mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-06 13:54:21 +00:00
stage2: small fixes + adjustments to std tests
This commit is contained in:
parent
95ab942184
commit
488e1e5f51
11 changed files with 50 additions and 47 deletions
|
|
@ -5,7 +5,6 @@ pub const gzip = @import("compress/gzip.zig");
|
|||
pub const zlib = @import("compress/zlib.zig");
|
||||
|
||||
test {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
_ = deflate;
|
||||
_ = gzip;
|
||||
_ = zlib;
|
||||
|
|
|
|||
|
|
@ -254,7 +254,10 @@ pub fn Compressor(comptime WriterType: anytype) type {
|
|||
|
||||
// Inner writer wrapped in a HuffmanBitWriter
|
||||
hm_bw: hm_bw.HuffmanBitWriter(WriterType) = undefined,
|
||||
bulk_hasher: fn ([]u8, []u32) u32,
|
||||
bulk_hasher: if (@import("builtin").zig_backend == .stage1)
|
||||
fn ([]u8, []u32) u32
|
||||
else
|
||||
*const fn ([]u8, []u32) u32,
|
||||
|
||||
sync: bool, // requesting flush
|
||||
best_speed_enc: *fast.DeflateFast, // Encoder for best_speed
|
||||
|
|
|
|||
|
|
@ -122,11 +122,8 @@ fn testToFromWithLevelAndLimit(level: deflate.Compression, input: []const u8, li
|
|||
try expect(compressed.items.len <= limit);
|
||||
}
|
||||
|
||||
var decomp = try decompressor(
|
||||
testing.allocator,
|
||||
io.fixedBufferStream(compressed.items).reader(),
|
||||
null,
|
||||
);
|
||||
var fib = io.fixedBufferStream(compressed.items);
|
||||
var decomp = try decompressor(testing.allocator, fib.reader(), null);
|
||||
defer decomp.deinit();
|
||||
|
||||
var decompressed = try testing.allocator.alloc(u8, input.len);
|
||||
|
|
@ -136,7 +133,9 @@ fn testToFromWithLevelAndLimit(level: deflate.Compression, input: []const u8, li
|
|||
try expect(read == input.len);
|
||||
try expect(mem.eql(u8, input, decompressed));
|
||||
|
||||
try testSync(level, input);
|
||||
if (builtin.zig_backend == .stage1) {
|
||||
try testSync(level, input);
|
||||
}
|
||||
}
|
||||
|
||||
fn testToFromWithLimit(input: []const u8, limit: [11]u32) !void {
|
||||
|
|
@ -180,6 +179,7 @@ test "deflate/inflate" {
|
|||
}
|
||||
|
||||
test "very long sparse chunk" {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
// A SparseReader returns a stream consisting of 0s ending with 65,536 (1<<16) 1s.
|
||||
// This tests missing hash references in a very large input.
|
||||
const SparseReader = struct {
|
||||
|
|
@ -377,6 +377,7 @@ test "compressor dictionary" {
|
|||
// Update the hash for best_speed only if d.index < d.maxInsertIndex
|
||||
// See https://golang.org/issue/2508
|
||||
test "Go non-regression test for 2508" {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
var comp = try compressor(
|
||||
testing.allocator,
|
||||
io.null_writer,
|
||||
|
|
@ -475,21 +476,16 @@ test "inflate reset" {
|
|||
try comp.close();
|
||||
}
|
||||
|
||||
var decomp = try decompressor(
|
||||
testing.allocator,
|
||||
io.fixedBufferStream(compressed_strings[0].items).reader(),
|
||||
null,
|
||||
);
|
||||
var fib = io.fixedBufferStream(compressed_strings[0].items);
|
||||
var decomp = try decompressor(testing.allocator, fib.reader(), null);
|
||||
defer decomp.deinit();
|
||||
|
||||
var decompressed_0: []u8 = try decomp.reader()
|
||||
.readAllAlloc(testing.allocator, math.maxInt(usize));
|
||||
defer testing.allocator.free(decompressed_0);
|
||||
|
||||
try decomp.reset(
|
||||
io.fixedBufferStream(compressed_strings[1].items).reader(),
|
||||
null,
|
||||
);
|
||||
fib = io.fixedBufferStream(compressed_strings[1].items);
|
||||
try decomp.reset(fib.reader(), null);
|
||||
|
||||
var decompressed_1: []u8 = try decomp.reader()
|
||||
.readAllAlloc(testing.allocator, math.maxInt(usize));
|
||||
|
|
@ -530,21 +526,16 @@ test "inflate reset dictionary" {
|
|||
try comp.close();
|
||||
}
|
||||
|
||||
var decomp = try decompressor(
|
||||
testing.allocator,
|
||||
io.fixedBufferStream(compressed_strings[0].items).reader(),
|
||||
dict,
|
||||
);
|
||||
var fib = io.fixedBufferStream(compressed_strings[0].items);
|
||||
var decomp = try decompressor(testing.allocator, fib.reader(), dict);
|
||||
defer decomp.deinit();
|
||||
|
||||
var decompressed_0: []u8 = try decomp.reader()
|
||||
.readAllAlloc(testing.allocator, math.maxInt(usize));
|
||||
defer testing.allocator.free(decompressed_0);
|
||||
|
||||
try decomp.reset(
|
||||
io.fixedBufferStream(compressed_strings[1].items).reader(),
|
||||
dict,
|
||||
);
|
||||
fib = io.fixedBufferStream(compressed_strings[1].items);
|
||||
try decomp.reset(fib.reader(), dict);
|
||||
|
||||
var decompressed_1: []u8 = try decomp.reader()
|
||||
.readAllAlloc(testing.allocator, math.maxInt(usize));
|
||||
|
|
|
|||
|
|
@ -334,7 +334,10 @@ pub fn Decompressor(comptime ReaderType: type) type {
|
|||
|
||||
// Next step in the decompression,
|
||||
// and decompression state.
|
||||
step: fn (*Self) Error!void,
|
||||
step: if (@import("builtin").zig_backend == .stage1)
|
||||
fn (*Self) Error!void
|
||||
else
|
||||
*const fn (*Self) Error!void,
|
||||
step_state: DecompressorState,
|
||||
final: bool,
|
||||
err: ?Error,
|
||||
|
|
@ -479,7 +482,13 @@ pub fn Decompressor(comptime ReaderType: type) type {
|
|||
}
|
||||
|
||||
pub fn close(self: *Self) ?Error {
|
||||
if (self.err == Error.EndOfStreamWithNoError) {
|
||||
if (@import("builtin").zig_backend == .stage1) {
|
||||
if (self.err == Error.EndOfStreamWithNoError) {
|
||||
return null;
|
||||
}
|
||||
return self.err;
|
||||
}
|
||||
if (self.err == @as(?Error, error.EndOfStreamWithNoError)) {
|
||||
return null;
|
||||
}
|
||||
return self.err;
|
||||
|
|
@ -920,7 +929,8 @@ test "truncated input" {
|
|||
};
|
||||
|
||||
for (tests) |t| {
|
||||
var r = io.fixedBufferStream(t.input).reader();
|
||||
var fib = io.fixedBufferStream(t.input);
|
||||
const r = fib.reader();
|
||||
var z = try decompressor(testing.allocator, r, null);
|
||||
defer z.deinit();
|
||||
var zr = z.reader();
|
||||
|
|
@ -959,7 +969,8 @@ test "Go non-regression test for 9842" {
|
|||
};
|
||||
|
||||
for (tests) |t| {
|
||||
const reader = std.io.fixedBufferStream(t.input).reader();
|
||||
var fib = std.io.fixedBufferStream(t.input);
|
||||
const reader = fib.reader();
|
||||
var decomp = try decompressor(testing.allocator, reader, null);
|
||||
defer decomp.deinit();
|
||||
|
||||
|
|
@ -1017,7 +1028,8 @@ test "inflate A Tale of Two Cities (1859) intro" {
|
|||
\\
|
||||
;
|
||||
|
||||
const reader = std.io.fixedBufferStream(&compressed).reader();
|
||||
var fib = std.io.fixedBufferStream(&compressed);
|
||||
const reader = fib.reader();
|
||||
var decomp = try decompressor(testing.allocator, reader, null);
|
||||
defer decomp.deinit();
|
||||
|
||||
|
|
@ -1082,7 +1094,8 @@ test "fuzzing" {
|
|||
|
||||
fn decompress(input: []const u8) !void {
|
||||
const allocator = testing.allocator;
|
||||
const reader = std.io.fixedBufferStream(input).reader();
|
||||
var fib = std.io.fixedBufferStream(input);
|
||||
const reader = fib.reader();
|
||||
var decomp = try decompressor(allocator, reader, null);
|
||||
defer decomp.deinit();
|
||||
var output = try decomp.reader().readAllAlloc(allocator, math.maxInt(usize));
|
||||
|
|
|
|||
|
|
@ -78,11 +78,8 @@ test "best speed" {
|
|||
var decompressed = try testing.allocator.alloc(u8, want.items.len);
|
||||
defer testing.allocator.free(decompressed);
|
||||
|
||||
var decomp = try inflate.decompressor(
|
||||
testing.allocator,
|
||||
io.fixedBufferStream(compressed.items).reader(),
|
||||
null,
|
||||
);
|
||||
var fib = io.fixedBufferStream(compressed.items);
|
||||
var decomp = try inflate.decompressor(testing.allocator, fib.reader(), null);
|
||||
defer decomp.deinit();
|
||||
|
||||
var read = try decomp.reader().readAll(decompressed);
|
||||
|
|
@ -122,13 +119,13 @@ test "best speed max match offset" {
|
|||
// zeros1 is between 0 and 30 zeros.
|
||||
// The difference between the two abc's will be offset, which
|
||||
// is max_match_offset plus or minus a small adjustment.
|
||||
var src_len: usize = @intCast(usize, offset + abc.len + @intCast(i32, extra));
|
||||
var src_len: usize = @intCast(usize, offset + @as(i32, abc.len) + @intCast(i32, extra));
|
||||
var src = try testing.allocator.alloc(u8, src_len);
|
||||
defer testing.allocator.free(src);
|
||||
|
||||
mem.copy(u8, src, abc);
|
||||
if (!do_match_before) {
|
||||
var src_offset: usize = @intCast(usize, offset - xyz.len);
|
||||
var src_offset: usize = @intCast(usize, offset - @as(i32, xyz.len));
|
||||
mem.copy(u8, src[src_offset..], xyz);
|
||||
}
|
||||
var src_offset: usize = @intCast(usize, offset);
|
||||
|
|
@ -149,11 +146,8 @@ test "best speed max match offset" {
|
|||
var decompressed = try testing.allocator.alloc(u8, src.len);
|
||||
defer testing.allocator.free(decompressed);
|
||||
|
||||
var decomp = try inflate.decompressor(
|
||||
testing.allocator,
|
||||
io.fixedBufferStream(compressed.items).reader(),
|
||||
null,
|
||||
);
|
||||
var fib = io.fixedBufferStream(compressed.items);
|
||||
var decomp = try inflate.decompressor(testing.allocator, fib.reader(), null);
|
||||
defer decomp.deinit();
|
||||
var read = try decomp.reader().readAll(decompressed);
|
||||
_ = decomp.close();
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ pub const net = struct {
|
|||
};
|
||||
|
||||
test {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
inline for (.{ os, net }) |module| {
|
||||
std.testing.refAllDecls(module);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -117,6 +117,7 @@ pub const Reactor = struct {
|
|||
};
|
||||
|
||||
test "reactor/linux: drive async tcp client/listener pair" {
|
||||
if (@import("builtin").zig_backend != .stage1) return error.SkipZigTest;
|
||||
if (native_os.tag != .linux) return error.SkipZigTest;
|
||||
|
||||
const ip = std.x.net.ip;
|
||||
|
|
|
|||
|
|
@ -381,7 +381,7 @@ pub const IPv6 = extern struct {
|
|||
});
|
||||
}
|
||||
|
||||
const zero_span = span: {
|
||||
const zero_span: struct { from: usize, to: usize } = span: {
|
||||
var i: usize = 0;
|
||||
while (i < self.octets.len) : (i += 2) {
|
||||
if (self.octets[i] == 0 and self.octets[i + 1] == 0) break;
|
||||
|
|
|
|||
|
|
@ -19702,7 +19702,8 @@ fn coerce(
|
|||
// pointer to tuple to slice
|
||||
if (inst_ty.isSinglePointer() and
|
||||
inst_ty.childType().isTuple() and
|
||||
!dest_info.mutable and dest_info.size == .Slice)
|
||||
(!dest_info.mutable or inst_ty.ptrIsMutable() or inst_ty.childType().tupleFields().types.len == 0) and
|
||||
dest_info.size == .Slice)
|
||||
{
|
||||
return sema.coerceTupleToSlicePtrs(block, dest_ty, dest_ty_src, inst, inst_src);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1062,6 +1062,7 @@ pub const Value = extern union {
|
|||
sema_kit: ?Module.WipAnalysis,
|
||||
) Module.CompileError!BigIntConst {
|
||||
switch (val.tag()) {
|
||||
.null_value,
|
||||
.zero,
|
||||
.bool_false,
|
||||
.the_only_possible_value, // i0, u0
|
||||
|
|
|
|||
|
|
@ -1426,6 +1426,7 @@ test "coerce undefined single-item pointer of array to error union of slice" {
|
|||
}
|
||||
|
||||
test "pointer to empty struct literal to mutable slice" {
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
|
||||
var x: []i32 = &.{};
|
||||
try expect(x.len == 0);
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue