std.ArrayList: popOrNull() -> pop() [v2] (#22720)

This commit is contained in:
Meghan Denny 2025-02-09 20:21:31 -08:00 committed by GitHub
parent 75df7e502c
commit 9142482372
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 162 additions and 177 deletions

View file

@ -2446,7 +2446,7 @@ pub fn expandedSlice(pp: *const Preprocessor, tok: anytype) []const u8 {
/// Concat two tokens and add the result to pp.generated
fn pasteTokens(pp: *Preprocessor, lhs_toks: *ExpandBuf, rhs_toks: []const TokenWithExpansionLocs) Error!void {
const lhs = while (lhs_toks.popOrNull()) |lhs| {
const lhs = while (lhs_toks.pop()) |lhs| {
if ((pp.comp.langopts.preserve_comments_in_macros and lhs.id == .comment) or
(lhs.id != .macro_ws and lhs.id != .comment))
break lhs;
@ -2676,7 +2676,7 @@ fn defineFn(pp: *Preprocessor, tokenizer: *Tokenizer, define_tok: RawToken, macr
tok = tokenizer.nextNoWS();
if (tok.id == .ellipsis) {
try pp.err(tok, .gnu_va_macro);
gnu_var_args = params.pop();
gnu_var_args = params.pop().?;
const r_paren = tokenizer.nextNoWS();
if (r_paren.id != .r_paren) {
try pp.err(r_paren, .missing_paren_param_list);

View file

@ -103,7 +103,7 @@ fn diagnosticHandler(self: *GCC, pp: *Preprocessor, start_idx: TokenIndex) Pragm
try pp.comp.diagnostics.set(str[2..], new_kind);
},
.push => try self.options_stack.append(pp.comp.gpa, pp.comp.diagnostics.options),
.pop => pp.comp.diagnostics.options = self.options_stack.popOrNull() orelse self.original_options,
.pop => pp.comp.diagnostics.options = self.options_stack.pop() orelse self.original_options,
}
}

View file

@ -149,7 +149,7 @@ fn pop(pack: *Pack, p: *Parser, maybe_label: ?[]const u8) void {
}
}
} else {
const prev = pack.stack.popOrNull() orelse {
const prev = pack.stack.pop() orelse {
p.pragma_pack = 2;
return;
};

View file

@ -1190,7 +1190,7 @@ pub fn printErrorMessages(
const ttyconf = options.ttyconf;
try ttyconf.setColor(stderr, .dim);
var indent: usize = 0;
while (step_stack.popOrNull()) |s| : (indent += 1) {
while (step_stack.pop()) |s| : (indent += 1) {
if (indent > 0) {
try stderr.writer().writeByteNTimes(' ', (indent - 1) * 3);
try printChildNodePrefix(stderr, ttyconf);

View file

@ -816,7 +816,7 @@ fn isThematicBreak(line: []const u8) bool {
}
fn closeLastBlock(p: *Parser) !void {
const b = p.pending_blocks.pop();
const b = p.pending_blocks.pop().?;
const node = switch (b.tag) {
.list => list: {
assert(b.string_start == p.scratch_string.items.len);

View file

@ -616,7 +616,7 @@ fn expand_variables_cmake(
// no open bracket, preserve as a literal
break :blk;
}
const open_pos = var_stack.pop();
const open_pos = var_stack.pop().?;
if (source_offset == open_pos.source) {
source_offset += open_var.len;
}

View file

@ -289,10 +289,10 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?u29) type {
/// Asserts that the list is not empty.
/// Asserts that the index is in bounds.
pub fn swapRemove(self: *Self, i: usize) T {
if (self.items.len - 1 == i) return self.pop();
if (self.items.len - 1 == i) return self.pop().?;
const old_item = self.items[i];
self.items[i] = self.pop();
self.items[i] = self.pop().?;
return old_item;
}
@ -555,23 +555,15 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?u29) type {
return self.items[prev_len..][0..n];
}
/// Remove and return the last element from the list.
/// Invalidates element pointers to the removed element.
/// Asserts that the list is not empty.
pub fn pop(self: *Self) T {
/// Remove and return the last element from the list, or return `null` if list is empty.
/// Invalidates element pointers to the removed element, if any.
pub fn pop(self: *Self) ?T {
if (self.items.len == 0) return null;
const val = self.items[self.items.len - 1];
self.items.len -= 1;
return val;
}
/// Remove and return the last element from the list, or
/// return `null` if list is empty.
/// Invalidates element pointers to the removed element, if any.
pub fn popOrNull(self: *Self) ?T {
if (self.items.len == 0) return null;
return self.pop();
}
/// Returns a slice of all the items plus the extra capacity, whose memory
/// contents are `undefined`.
pub fn allocatedSlice(self: Self) Slice {
@ -897,10 +889,10 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) typ
/// Asserts that the list is not empty.
/// Asserts that the index is in bounds.
pub fn swapRemove(self: *Self, i: usize) T {
if (self.items.len - 1 == i) return self.pop();
if (self.items.len - 1 == i) return self.pop().?;
const old_item = self.items[i];
self.items[i] = self.pop();
self.items[i] = self.pop().?;
return old_item;
}
@ -1190,22 +1182,15 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) typ
}
/// Remove and return the last element from the list.
/// If the list is empty, returns `null`.
/// Invalidates pointers to last element.
/// Asserts that the list is not empty.
pub fn pop(self: *Self) T {
pub fn pop(self: *Self) ?T {
if (self.items.len == 0) return null;
const val = self.items[self.items.len - 1];
self.items.len -= 1;
return val;
}
/// Remove and return the last element from the list.
/// If the list is empty, returns `null`.
/// Invalidates pointers to last element.
pub fn popOrNull(self: *Self) ?T {
if (self.items.len == 0) return null;
return self.pop();
}
/// Returns a slice of all the items plus the extra capacity, whose memory
/// contents are `undefined`.
pub fn allocatedSlice(self: Self) Slice {
@ -2184,7 +2169,7 @@ test "ArrayList(u0)" {
try testing.expectEqual(count, 3);
}
test "ArrayList(?u32).popOrNull()" {
test "ArrayList(?u32).pop()" {
const a = testing.allocator;
var list = ArrayList(?u32).init(a);
@ -2195,10 +2180,10 @@ test "ArrayList(?u32).popOrNull()" {
try list.append(2);
try testing.expectEqual(list.items.len, 3);
try testing.expect(list.popOrNull().? == @as(u32, 2));
try testing.expect(list.popOrNull().? == @as(u32, 1));
try testing.expect(list.popOrNull().? == null);
try testing.expect(list.popOrNull() == null);
try testing.expect(list.pop().? == @as(u32, 2));
try testing.expect(list.pop().? == @as(u32, 1));
try testing.expect(list.pop().? == null);
try testing.expect(list.pop() == null);
}
test "ArrayList(u32).getLast()" {

View file

@ -527,14 +527,14 @@ pub fn StackMachine(comptime options: Options) type {
},
OP.@"and" => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a = try self.stack.pop().asIntegral();
const a = try self.stack.pop().?.asIntegral();
self.stack.items[self.stack.items.len - 1] = .{
.generic = a & try self.stack.items[self.stack.items.len - 1].asIntegral(),
};
},
OP.div => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a: isize = @bitCast(try self.stack.pop().asIntegral());
const a: isize = @bitCast(try self.stack.pop().?.asIntegral());
const b: isize = @bitCast(try self.stack.items[self.stack.items.len - 1].asIntegral());
self.stack.items[self.stack.items.len - 1] = .{
.generic = @bitCast(try std.math.divTrunc(isize, b, a)),
@ -542,14 +542,14 @@ pub fn StackMachine(comptime options: Options) type {
},
OP.minus => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const b = try self.stack.pop().asIntegral();
const b = try self.stack.pop().?.asIntegral();
self.stack.items[self.stack.items.len - 1] = .{
.generic = try std.math.sub(addr_type, try self.stack.items[self.stack.items.len - 1].asIntegral(), b),
};
},
OP.mod => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a: isize = @bitCast(try self.stack.pop().asIntegral());
const a: isize = @bitCast(try self.stack.pop().?.asIntegral());
const b: isize = @bitCast(try self.stack.items[self.stack.items.len - 1].asIntegral());
self.stack.items[self.stack.items.len - 1] = .{
.generic = @bitCast(@mod(b, a)),
@ -557,7 +557,7 @@ pub fn StackMachine(comptime options: Options) type {
},
OP.mul => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a: isize = @bitCast(try self.stack.pop().asIntegral());
const a: isize = @bitCast(try self.stack.pop().?.asIntegral());
const b: isize = @bitCast(try self.stack.items[self.stack.items.len - 1].asIntegral());
self.stack.items[self.stack.items.len - 1] = .{
.generic = @bitCast(@mulWithOverflow(a, b)[0]),
@ -581,14 +581,14 @@ pub fn StackMachine(comptime options: Options) type {
},
OP.@"or" => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a = try self.stack.pop().asIntegral();
const a = try self.stack.pop().?.asIntegral();
self.stack.items[self.stack.items.len - 1] = .{
.generic = a | try self.stack.items[self.stack.items.len - 1].asIntegral(),
};
},
OP.plus => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const b = try self.stack.pop().asIntegral();
const b = try self.stack.pop().?.asIntegral();
self.stack.items[self.stack.items.len - 1] = .{
.generic = try std.math.add(addr_type, try self.stack.items[self.stack.items.len - 1].asIntegral(), b),
};
@ -602,7 +602,7 @@ pub fn StackMachine(comptime options: Options) type {
},
OP.shl => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a = try self.stack.pop().asIntegral();
const a = try self.stack.pop().?.asIntegral();
const b = try self.stack.items[self.stack.items.len - 1].asIntegral();
self.stack.items[self.stack.items.len - 1] = .{
.generic = std.math.shl(usize, b, a),
@ -610,7 +610,7 @@ pub fn StackMachine(comptime options: Options) type {
},
OP.shr => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a = try self.stack.pop().asIntegral();
const a = try self.stack.pop().?.asIntegral();
const b = try self.stack.items[self.stack.items.len - 1].asIntegral();
self.stack.items[self.stack.items.len - 1] = .{
.generic = std.math.shr(usize, b, a),
@ -618,7 +618,7 @@ pub fn StackMachine(comptime options: Options) type {
},
OP.shra => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a = try self.stack.pop().asIntegral();
const a = try self.stack.pop().?.asIntegral();
const b: isize = @bitCast(try self.stack.items[self.stack.items.len - 1].asIntegral());
self.stack.items[self.stack.items.len - 1] = .{
.generic = @bitCast(std.math.shr(isize, b, a)),
@ -626,7 +626,7 @@ pub fn StackMachine(comptime options: Options) type {
},
OP.xor => {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a = try self.stack.pop().asIntegral();
const a = try self.stack.pop().?.asIntegral();
self.stack.items[self.stack.items.len - 1] = .{
.generic = a ^ try self.stack.items[self.stack.items.len - 1].asIntegral(),
};
@ -641,7 +641,7 @@ pub fn StackMachine(comptime options: Options) type {
OP.ne,
=> {
if (self.stack.items.len < 2) return error.InvalidExpression;
const a = self.stack.pop();
const a = self.stack.pop().?;
const b = self.stack.items[self.stack.items.len - 1];
if (a == .generic and b == .generic) {
@ -667,7 +667,7 @@ pub fn StackMachine(comptime options: Options) type {
const branch_offset = operand.?.branch_offset;
const condition = if (opcode == OP.bra) blk: {
if (self.stack.items.len == 0) return error.InvalidExpression;
break :blk try self.stack.pop().asIntegral() != 0;
break :blk try self.stack.pop().?.asIntegral() != 0;
} else true;
if (condition) {
@ -1080,7 +1080,7 @@ test "DWARF expressions" {
for (0..32) |i| {
const expected = 31 - i;
try testing.expectEqual(expected, stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(expected, stack_machine.stack.pop().?.generic);
}
}
@ -1141,7 +1141,7 @@ test "DWARF expressions" {
_ = try stack_machine.run(program.items, allocator, context, 0);
const const_type = stack_machine.stack.popOrNull().?.const_type;
const const_type = stack_machine.stack.pop().?.const_type;
try testing.expectEqual(die_offset, const_type.type_offset);
try testing.expectEqualSlices(u8, type_bytes, const_type.value_bytes);
@ -1162,7 +1162,7 @@ test "DWARF expressions" {
};
inline for (expected) |e| {
try testing.expectEqual(@as(e[0], e[1]), @as(e[2], @bitCast(stack_machine.stack.popOrNull().?.generic)));
try testing.expectEqual(@as(e[0], e[1]), @as(e[2], @bitCast(stack_machine.stack.pop().?.generic)));
}
}
@ -1199,14 +1199,14 @@ test "DWARF expressions" {
_ = try stack_machine.run(program.items, allocator, context, 0);
const regval_type = stack_machine.stack.popOrNull().?.regval_type;
const regval_type = stack_machine.stack.pop().?.regval_type;
try testing.expectEqual(@as(usize, 400), regval_type.type_offset);
try testing.expectEqual(@as(u8, @sizeOf(usize)), regval_type.type_size);
try testing.expectEqual(@as(usize, 0xee), regval_type.value);
try testing.expectEqual(@as(usize, 303), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 202), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 101), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 303), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, 202), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, 101), stack_machine.stack.pop().?.generic);
} else |err| {
switch (err) {
error.UnimplementedArch,
@ -1227,15 +1227,15 @@ test "DWARF expressions" {
try b.writeConst(writer, u8, 1);
try b.writeOpcode(writer, OP.dup);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 1), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 1), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 1), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, 1), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
try b.writeConst(writer, u8, 1);
try b.writeOpcode(writer, OP.drop);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expect(stack_machine.stack.popOrNull() == null);
try testing.expect(stack_machine.stack.pop() == null);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1244,7 +1244,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u8, 6);
try b.writePick(writer, 2);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 4), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 4), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1253,7 +1253,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u8, 6);
try b.writeOpcode(writer, OP.over);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 5), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 5), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1261,8 +1261,8 @@ test "DWARF expressions" {
try b.writeConst(writer, u8, 6);
try b.writeOpcode(writer, OP.swap);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 5), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 6), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 5), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, 6), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1271,9 +1271,9 @@ test "DWARF expressions" {
try b.writeConst(writer, u8, 6);
try b.writeOpcode(writer, OP.rot);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 5), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 4), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 6), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 5), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, 4), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, 6), stack_machine.stack.pop().?.generic);
const deref_target: usize = @truncate(0xffeeffee_ffeeffee);
@ -1282,7 +1282,7 @@ test "DWARF expressions" {
try b.writeAddr(writer, @intFromPtr(&deref_target));
try b.writeOpcode(writer, OP.deref);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(deref_target, stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(deref_target, stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1290,14 +1290,14 @@ test "DWARF expressions" {
try b.writeAddr(writer, @intFromPtr(&deref_target));
try b.writeOpcode(writer, OP.xderef);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(deref_target, stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(deref_target, stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
try b.writeAddr(writer, @intFromPtr(&deref_target));
try b.writeDerefSize(writer, 1);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, @as(*const u8, @ptrCast(&deref_target)).*), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, @as(*const u8, @ptrCast(&deref_target)).*), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1305,7 +1305,7 @@ test "DWARF expressions" {
try b.writeAddr(writer, @intFromPtr(&deref_target));
try b.writeXDerefSize(writer, 1);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, @as(*const u8, @ptrCast(&deref_target)).*), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, @as(*const u8, @ptrCast(&deref_target)).*), stack_machine.stack.pop().?.generic);
const type_offset: usize = @truncate(0xaabbaabb_aabbaabb);
@ -1314,7 +1314,7 @@ test "DWARF expressions" {
try b.writeAddr(writer, @intFromPtr(&deref_target));
try b.writeDerefType(writer, 1, type_offset);
_ = try stack_machine.run(program.items, allocator, context, null);
const deref_type = stack_machine.stack.popOrNull().?.regval_type;
const deref_type = stack_machine.stack.pop().?.regval_type;
try testing.expectEqual(type_offset, deref_type.type_offset);
try testing.expectEqual(@as(u8, 1), deref_type.type_size);
try testing.expectEqual(@as(usize, @as(*const u8, @ptrCast(&deref_target)).*), deref_type.value);
@ -1325,7 +1325,7 @@ test "DWARF expressions" {
try b.writeAddr(writer, @intFromPtr(&deref_target));
try b.writeXDerefType(writer, 1, type_offset);
_ = try stack_machine.run(program.items, allocator, context, null);
const xderef_type = stack_machine.stack.popOrNull().?.regval_type;
const xderef_type = stack_machine.stack.pop().?.regval_type;
try testing.expectEqual(type_offset, xderef_type.type_offset);
try testing.expectEqual(@as(u8, 1), xderef_type.type_size);
try testing.expectEqual(@as(usize, @as(*const u8, @ptrCast(&deref_target)).*), xderef_type.value);
@ -1336,7 +1336,7 @@ test "DWARF expressions" {
program.clearRetainingCapacity();
try b.writeOpcode(writer, OP.push_object_address);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, @intFromPtr(context.object_address.?)), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, @intFromPtr(context.object_address.?)), stack_machine.stack.pop().?.generic);
// TODO: Test OP.form_tls_address
@ -1346,7 +1346,7 @@ test "DWARF expressions" {
program.clearRetainingCapacity();
try b.writeOpcode(writer, OP.call_frame_cfa);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(context.cfa.?, stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(context.cfa.?, stack_machine.stack.pop().?.generic);
}
// Arithmetic and Logical Operations
@ -1358,7 +1358,7 @@ test "DWARF expressions" {
try b.writeConst(writer, i16, -4096);
try b.writeOpcode(writer, OP.abs);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 4096), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 4096), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1366,7 +1366,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 0xf0ff);
try b.writeOpcode(writer, OP.@"and");
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 0xf00f), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 0xf00f), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1374,7 +1374,7 @@ test "DWARF expressions" {
try b.writeConst(writer, i16, 100);
try b.writeOpcode(writer, OP.div);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(isize, -404 / 100), @as(isize, @bitCast(stack_machine.stack.popOrNull().?.generic)));
try testing.expectEqual(@as(isize, -404 / 100), @as(isize, @bitCast(stack_machine.stack.pop().?.generic)));
stack_machine.reset();
program.clearRetainingCapacity();
@ -1382,7 +1382,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 50);
try b.writeOpcode(writer, OP.minus);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 150), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 150), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1390,7 +1390,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 100);
try b.writeOpcode(writer, OP.mod);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 23), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 23), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1398,7 +1398,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 0xee);
try b.writeOpcode(writer, OP.mul);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 0xed12), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 0xed12), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1407,15 +1407,15 @@ test "DWARF expressions" {
try b.writeConst(writer, i16, -6);
try b.writeOpcode(writer, OP.neg);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 6), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(isize, -5), @as(isize, @bitCast(stack_machine.stack.popOrNull().?.generic)));
try testing.expectEqual(@as(usize, 6), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(isize, -5), @as(isize, @bitCast(stack_machine.stack.pop().?.generic)));
stack_machine.reset();
program.clearRetainingCapacity();
try b.writeConst(writer, u16, 0xff0f);
try b.writeOpcode(writer, OP.not);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(~@as(usize, 0xff0f), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(~@as(usize, 0xff0f), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1423,7 +1423,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 0xf0ff);
try b.writeOpcode(writer, OP.@"or");
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 0xffff), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 0xffff), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1431,14 +1431,14 @@ test "DWARF expressions" {
try b.writeConst(writer, i16, 100);
try b.writeOpcode(writer, OP.plus);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 502), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 502), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
try b.writeConst(writer, u16, 4096);
try b.writePlusUconst(writer, @as(usize, 8192));
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 4096 + 8192), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 4096 + 8192), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1446,7 +1446,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 1);
try b.writeOpcode(writer, OP.shl);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 0xfff << 1), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 0xfff << 1), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1454,7 +1454,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 1);
try b.writeOpcode(writer, OP.shr);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 0xfff >> 1), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 0xfff >> 1), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1462,7 +1462,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 1);
try b.writeOpcode(writer, OP.shr);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, @bitCast(@as(isize, 0xfff) >> 1)), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, @bitCast(@as(isize, 0xfff) >> 1)), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1470,7 +1470,7 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 0xff0f);
try b.writeOpcode(writer, OP.xor);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 0x0ff0), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 0x0ff0), stack_machine.stack.pop().?.generic);
}
// Control Flow Operations
@ -1499,9 +1499,9 @@ test "DWARF expressions" {
try b.writeConst(writer, u16, 0);
try b.writeOpcode(writer, e[0]);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, e[3]), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, e[2]), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, e[1]), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, e[3]), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, e[2]), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, e[1]), stack_machine.stack.pop().?.generic);
}
stack_machine.reset();
@ -1510,7 +1510,7 @@ test "DWARF expressions" {
try b.writeSkip(writer, 1);
try b.writeLiteral(writer, 3);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 2), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 2), stack_machine.stack.pop().?.generic);
stack_machine.reset();
program.clearRetainingCapacity();
@ -1522,9 +1522,9 @@ test "DWARF expressions" {
try b.writeLiteral(writer, 4);
try b.writeLiteral(writer, 5);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 5), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 4), stack_machine.stack.popOrNull().?.generic);
try testing.expect(stack_machine.stack.popOrNull() == null);
try testing.expectEqual(@as(usize, 5), stack_machine.stack.pop().?.generic);
try testing.expectEqual(@as(usize, 4), stack_machine.stack.pop().?.generic);
try testing.expect(stack_machine.stack.pop() == null);
// TODO: Test call2, call4, call_ref once implemented
@ -1548,7 +1548,7 @@ test "DWARF expressions" {
try b.writeConstType(writer, @as(usize, 0), &value_bytes);
try b.writeConvert(writer, @as(usize, 0));
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(value, stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(value, stack_machine.stack.pop().?.generic);
// Reinterpret to generic type
stack_machine.reset();
@ -1556,7 +1556,7 @@ test "DWARF expressions" {
try b.writeConstType(writer, @as(usize, 0), &value_bytes);
try b.writeReinterpret(writer, @as(usize, 0));
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(value, stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(value, stack_machine.stack.pop().?.generic);
// Reinterpret to new type
const die_offset: usize = 0xffee;
@ -1566,7 +1566,7 @@ test "DWARF expressions" {
try b.writeConstType(writer, @as(usize, 0), &value_bytes);
try b.writeReinterpret(writer, die_offset);
_ = try stack_machine.run(program.items, allocator, context, null);
const const_type = stack_machine.stack.popOrNull().?.const_type;
const const_type = stack_machine.stack.pop().?.const_type;
try testing.expectEqual(die_offset, const_type.type_offset);
stack_machine.reset();
@ -1574,7 +1574,7 @@ test "DWARF expressions" {
try b.writeLiteral(writer, 0);
try b.writeReinterpret(writer, die_offset);
_ = try stack_machine.run(program.items, allocator, context, null);
const regval_type = stack_machine.stack.popOrNull().?.regval_type;
const regval_type = stack_machine.stack.pop().?.regval_type;
try testing.expectEqual(die_offset, regval_type.type_offset);
}
@ -1586,7 +1586,7 @@ test "DWARF expressions" {
program.clearRetainingCapacity();
try b.writeOpcode(writer, OP.nop);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expect(stack_machine.stack.popOrNull() == null);
try testing.expect(stack_machine.stack.pop() == null);
// Sub-expression
{
@ -1599,7 +1599,7 @@ test "DWARF expressions" {
program.clearRetainingCapacity();
try b.writeEntryValue(writer, sub_program.items);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 3), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 3), stack_machine.stack.pop().?.generic);
}
// Register location description
@ -1626,7 +1626,7 @@ test "DWARF expressions" {
program.clearRetainingCapacity();
try b.writeEntryValue(writer, sub_program.items);
_ = try stack_machine.run(program.items, allocator, context, null);
try testing.expectEqual(@as(usize, 0xee), stack_machine.stack.popOrNull().?.generic);
try testing.expectEqual(@as(usize, 0xee), stack_machine.stack.pop().?.generic);
} else |err| {
switch (err) {
error.UnimplementedArch,

View file

@ -2138,7 +2138,7 @@ pub const VirtualMachine = struct {
self.current_row.copy_on_write = true;
},
.restore_state => {
const restored_columns = self.stack.popOrNull() orelse return error.InvalidOperation;
const restored_columns = self.stack.pop() orelse return error.InvalidOperation;
self.columns.shrinkRetainingCapacity(self.columns.items.len - self.current_row.columns.len);
try self.columns.ensureUnusedCapacity(allocator, restored_columns.len);

View file

@ -682,7 +682,7 @@ pub const Walker = struct {
// walking if they want, which means that we need to pop the directory
// that errored from the stack. Otherwise, all future `next` calls would
// likely just fail with the same error.
var item = self.stack.pop();
var item = self.stack.pop().?;
if (self.stack.items.len != 0) {
item.iter.dir.close();
}
@ -718,7 +718,7 @@ pub const Walker = struct {
.kind = base.kind,
};
} else {
var item = self.stack.pop();
var item = self.stack.pop().?;
if (self.stack.items.len != 0) {
item.iter.dir.close();
}

View file

@ -681,7 +681,7 @@ pub fn testAllocatorAlignedShrink(base_allocator: mem.Allocator) !void {
try stuff_to_free.append(slice);
slice = try allocator.alignedAlloc(u8, 16, alloc_size);
}
while (stuff_to_free.popOrNull()) |item| {
while (stuff_to_free.pop()) |item| {
allocator.free(item);
}
slice[0] = 0x12;

View file

@ -1070,7 +1070,7 @@ test "small allocations - free in reverse order" {
try list.append(ptr);
}
while (list.popOrNull()) |ptr| {
while (list.pop()) |ptr| {
allocator.destroy(ptr);
}
}
@ -1227,7 +1227,7 @@ test "shrink large object to large object with larger alignment" {
try stuff_to_free.append(slice);
slice = try allocator.alignedAlloc(u8, 16, alloc_size);
}
while (stuff_to_free.popOrNull()) |item| {
while (stuff_to_free.pop()) |item| {
allocator.free(item);
}
slice[0] = 0x12;
@ -1299,7 +1299,7 @@ test "realloc large object to larger alignment" {
try stuff_to_free.append(slice);
slice = try allocator.alignedAlloc(u8, 16, default_page_size * 2 + 50);
}
while (stuff_to_free.popOrNull()) |item| {
while (stuff_to_free.pop()) |item| {
allocator.free(item);
}
slice[0] = 0x12;

View file

@ -124,7 +124,7 @@ pub const Value = union(enum) {
.array_begin => {
try stack.append(Value{ .array = Array.init(allocator) });
},
.array_end => return try handleCompleteValue(&stack, allocator, source, stack.pop(), options) orelse continue,
.array_end => return try handleCompleteValue(&stack, allocator, source, stack.pop().?, options) orelse continue,
else => unreachable,
}
@ -171,7 +171,7 @@ fn handleCompleteValue(stack: *Array, allocator: Allocator, source: anytype, val
switch (try source.nextAllocMax(allocator, .alloc_always, options.max_value_len.?)) {
.object_end => {
// This object is complete.
value = stack.pop();
value = stack.pop().?;
// Effectively recurse now that we have a complete value.
if (stack.items.len == 0) return value;
continue;

View file

@ -657,7 +657,7 @@ pub const CObject = struct {
.end_block => |block| switch (@as(BlockId, @enumFromInt(block.id))) {
.Meta => {},
.Diag => {
var wip_diag = stack.pop();
var wip_diag = stack.pop().?;
errdefer wip_diag.deinit(gpa);
const src_ranges = try wip_diag.src_ranges.toOwnedSlice(gpa);
@ -5915,7 +5915,7 @@ pub fn addCCArgs(
try san_arg.appendSlice(arena, "fuzzer-no-link,");
}
// Chop off the trailing comma and append to argv.
if (san_arg.popOrNull()) |_| {
if (san_arg.pop()) |_| {
try argv.append(san_arg.items);
// These args have to be added after the `-fsanitize` arg or

View file

@ -929,7 +929,7 @@ pub fn addDependency(ip: *InternPool, gpa: Allocator, depender: AnalUnit, depend
}
// Prepend a new dependency.
const new_index: DepEntry.Index, const ptr = if (ip.free_dep_entries.popOrNull()) |new_index| new: {
const new_index: DepEntry.Index, const ptr = if (ip.free_dep_entries.pop()) |new_index| new: {
break :new .{ new_index, &ip.dep_entries.items[@intFromEnum(new_index)] };
} else .{ @enumFromInt(ip.dep_entries.items.len), ip.dep_entries.addOneAssumeCapacity() };
if (deps.unwrap()) |old_first| {
@ -960,7 +960,7 @@ pub fn addDependency(ip: *InternPool, gpa: Allocator, depender: AnalUnit, depend
}
// Prepend a new dependency.
const new_index: DepEntry.Index, const ptr = if (ip.free_dep_entries.popOrNull()) |new_index| new: {
const new_index: DepEntry.Index, const ptr = if (ip.free_dep_entries.pop()) |new_index| new: {
break :new .{ new_index, &ip.dep_entries.items[@intFromEnum(new_index)] };
} else .{ @enumFromInt(ip.dep_entries.items.len), ip.dep_entries.addOneAssumeCapacity() };
if (gop.found_existing) {

View file

@ -127,7 +127,7 @@ pub const JobQueue = struct {
// `Fetch` instances are allocated in prior ones' arenas.
// Sorry, I know it's a bit weird, but it slightly simplifies the
// critical section.
while (jq.all_fetches.popOrNull()) |f| f.deinit();
while (jq.all_fetches.pop()) |f| f.deinit();
jq.all_fetches.deinit(gpa);
jq.* = undefined;
}

View file

@ -3912,7 +3912,7 @@ fn resolveComptimeKnownAllocPtr(sema: *Sema, block: *Block, alloc: Air.Inst.Ref,
const tmp_air = sema.getTmpAir();
while (to_map.popOrNull()) |air_ptr| {
while (to_map.pop()) |air_ptr| {
if (ptr_mapping.contains(air_ptr)) continue;
const PointerMethod = union(enum) {
same_addr,
@ -38422,7 +38422,7 @@ pub fn flushExports(sema: *Sema) !void {
// `sema.exports` is completed; store the data into the `Zcu`.
if (sema.exports.items.len == 1) {
try zcu.single_exports.ensureUnusedCapacity(gpa, 1);
const export_idx: Zcu.Export.Index = zcu.free_exports.popOrNull() orelse idx: {
const export_idx: Zcu.Export.Index = zcu.free_exports.pop() orelse idx: {
_ = try zcu.all_exports.addOne(gpa);
break :idx @enumFromInt(zcu.all_exports.items.len - 1);
};

View file

@ -3130,7 +3130,7 @@ pub fn mapOldZirToNew(
}
}
while (match_stack.popOrNull()) |match_item| {
while (match_stack.pop()) |match_item| {
// First, a check: if the number of captures of this type has changed, we can't map it, because
// we wouldn't know how to correlate type information with the last update.
// Synchronizes with logic in `Zcu.PerThread.recreateStructType` etc.
@ -3412,7 +3412,7 @@ pub fn addUnitReference(zcu: *Zcu, src_unit: AnalUnit, referenced_unit: AnalUnit
try zcu.reference_table.ensureUnusedCapacity(gpa, 1);
const ref_idx = zcu.free_references.popOrNull() orelse idx: {
const ref_idx = zcu.free_references.pop() orelse idx: {
_ = try zcu.all_references.addOne(gpa);
break :idx zcu.all_references.items.len - 1;
};
@ -3437,7 +3437,7 @@ pub fn addTypeReference(zcu: *Zcu, src_unit: AnalUnit, referenced_type: InternPo
try zcu.type_reference_table.ensureUnusedCapacity(gpa, 1);
const ref_idx = zcu.free_type_references.popOrNull() orelse idx: {
const ref_idx = zcu.free_type_references.pop() orelse idx: {
_ = try zcu.all_type_references.addOne(gpa);
break :idx zcu.all_type_references.items.len - 1;
};

View file

@ -572,7 +572,7 @@ fn gen(self: *Self) !void {
// dbg_epilogue_begin) is the last exitlude jump
// relocation (which would just jump one instruction
// further), it can be safely removed
self.mir_instructions.orderedRemove(self.exitlude_jump_relocs.pop());
self.mir_instructions.orderedRemove(self.exitlude_jump_relocs.pop().?);
}
for (self.exitlude_jump_relocs.items) |jmp_reloc| {
@ -4694,7 +4694,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!void {
try self.branch_stack.append(.{});
errdefer {
_ = self.branch_stack.pop();
_ = self.branch_stack.pop().?;
}
try self.ensureProcessDeathCapacity(liveness_condbr.then_deaths.len);
@ -4705,7 +4705,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!void {
// Revert to the previous register and stack allocation state.
var saved_then_branch = self.branch_stack.pop();
var saved_then_branch = self.branch_stack.pop().?;
defer saved_then_branch.deinit(self.gpa);
self.register_manager.registers = parent_registers;
@ -4800,7 +4800,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!void {
}
{
var item = self.branch_stack.pop();
var item = self.branch_stack.pop().?;
item.deinit(self.gpa);
}
@ -5059,7 +5059,7 @@ fn lowerBlock(self: *Self, inst: Air.Inst.Index, body: []const Air.Inst.Index) !
// If the last Mir instruction is the last relocation (which
// would just jump one instruction further), it can be safely
// removed
self.mir_instructions.orderedRemove(relocs.pop());
self.mir_instructions.orderedRemove(relocs.pop().?);
}
for (relocs.items) |reloc| {
try self.performReloc(reloc);
@ -5125,7 +5125,7 @@ fn airSwitch(self: *Self, inst: Air.Inst.Index) InnerError!void {
try self.branch_stack.append(.{});
errdefer {
_ = self.branch_stack.pop();
_ = self.branch_stack.pop().?;
}
try self.ensureProcessDeathCapacity(liveness.deaths[case.idx].len);
@ -5135,7 +5135,7 @@ fn airSwitch(self: *Self, inst: Air.Inst.Index) InnerError!void {
try self.genBody(case.body);
// Revert to the previous register and stack allocation state.
var saved_case_branch = self.branch_stack.pop();
var saved_case_branch = self.branch_stack.pop().?;
defer saved_case_branch.deinit(self.gpa);
self.register_manager.registers = parent_registers;
@ -5163,7 +5163,7 @@ fn airSwitch(self: *Self, inst: Air.Inst.Index) InnerError!void {
try self.branch_stack.append(.{});
errdefer {
_ = self.branch_stack.pop();
_ = self.branch_stack.pop().?;
}
const else_deaths = liveness.deaths.len - 1;
@ -5174,7 +5174,7 @@ fn airSwitch(self: *Self, inst: Air.Inst.Index) InnerError!void {
try self.genBody(else_body);
// Revert to the previous register and stack allocation state.
var saved_case_branch = self.branch_stack.pop();
var saved_case_branch = self.branch_stack.pop().?;
defer saved_case_branch.deinit(self.gpa);
self.register_manager.registers = parent_registers;

View file

@ -568,7 +568,7 @@ fn gen(self: *Self) !void {
// dbg_epilogue_begin) is the last exitlude jump
// relocation (which would just jump one instruction
// further), it can be safely removed
self.mir_instructions.orderedRemove(self.exitlude_jump_relocs.pop());
self.mir_instructions.orderedRemove(self.exitlude_jump_relocs.pop().?);
}
for (self.exitlude_jump_relocs.items) |jmp_reloc| {
@ -4669,7 +4669,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
try self.branch_stack.append(.{});
errdefer {
_ = self.branch_stack.pop();
_ = self.branch_stack.pop().?;
}
try self.ensureProcessDeathCapacity(liveness_condbr.then_deaths.len);
@ -4680,7 +4680,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
// Revert to the previous register and stack allocation state.
var saved_then_branch = self.branch_stack.pop();
var saved_then_branch = self.branch_stack.pop().?;
defer saved_then_branch.deinit(self.gpa);
self.register_manager.registers = parent_registers;
@ -4775,7 +4775,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
}
{
var item = self.branch_stack.pop();
var item = self.branch_stack.pop().?;
item.deinit(self.gpa);
}
@ -5009,7 +5009,7 @@ fn lowerBlock(self: *Self, inst: Air.Inst.Index, body: []const Air.Inst.Index) !
// If the last Mir instruction is the last relocation (which
// would just jump one instruction further), it can be safely
// removed
self.mir_instructions.orderedRemove(relocs.pop());
self.mir_instructions.orderedRemove(relocs.pop().?);
}
for (relocs.items) |reloc| {
try self.performReloc(reloc);
@ -5074,7 +5074,7 @@ fn airSwitch(self: *Self, inst: Air.Inst.Index) !void {
try self.branch_stack.append(.{});
errdefer {
_ = self.branch_stack.pop();
_ = self.branch_stack.pop().?;
}
try self.ensureProcessDeathCapacity(liveness.deaths[case.idx].len);
@ -5084,7 +5084,7 @@ fn airSwitch(self: *Self, inst: Air.Inst.Index) !void {
try self.genBody(case.body);
// Revert to the previous register and stack allocation state.
var saved_case_branch = self.branch_stack.pop();
var saved_case_branch = self.branch_stack.pop().?;
defer saved_case_branch.deinit(self.gpa);
self.register_manager.registers = parent_registers;
@ -5112,7 +5112,7 @@ fn airSwitch(self: *Self, inst: Air.Inst.Index) !void {
try self.branch_stack.append(.{});
errdefer {
_ = self.branch_stack.pop();
_ = self.branch_stack.pop().?;
}
const else_deaths = liveness.deaths.len - 1;
@ -5123,7 +5123,7 @@ fn airSwitch(self: *Self, inst: Air.Inst.Index) !void {
try self.genBody(else_body);
// Revert to the previous register and stack allocation state.
var saved_case_branch = self.branch_stack.pop();
var saved_case_branch = self.branch_stack.pop().?;
defer saved_case_branch.deinit(self.gpa);
self.register_manager.registers = parent_registers;

View file

@ -392,7 +392,7 @@ fn gen(self: *Self) !void {
// dbg_epilogue_begin) is the last exitlude jump
// relocation (which would just jump two instructions
// further), it can be safely removed
const index = self.exitlude_jump_relocs.pop();
const index = self.exitlude_jump_relocs.pop().?;
// First, remove the delay slot, then remove
// the branch instruction itself.
@ -1147,7 +1147,7 @@ fn lowerBlock(self: *Self, inst: Air.Inst.Index, body: []const Air.Inst.Index) !
// If the last Mir instruction is the last relocation (which
// would just jump two instruction further), it can be safely
// removed
const index = relocs.pop();
const index = relocs.pop().?;
// First, remove the delay slot, then remove
// the branch instruction itself.
@ -1501,7 +1501,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
try self.branch_stack.append(.{});
errdefer {
_ = self.branch_stack.pop();
_ = self.branch_stack.pop().?;
}
try self.ensureProcessDeathCapacity(liveness_condbr.then_deaths.len);
@ -1512,7 +1512,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
// Revert to the previous register and stack allocation state.
var saved_then_branch = self.branch_stack.pop();
var saved_then_branch = self.branch_stack.pop().?;
defer saved_then_branch.deinit(self.gpa);
self.register_manager.registers = parent_registers;
@ -1608,7 +1608,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
}
{
var item = self.branch_stack.pop();
var item = self.branch_stack.pop().?;
item.deinit(self.gpa);
}

View file

@ -1121,11 +1121,11 @@ fn allocLocal(cg: *CodeGen, ty: Type) InnerError!WValue {
const zcu = cg.pt.zcu;
const valtype = typeToValtype(ty, zcu, cg.target);
const index_or_null = switch (valtype) {
.i32 => cg.free_locals_i32.popOrNull(),
.i64 => cg.free_locals_i64.popOrNull(),
.f32 => cg.free_locals_f32.popOrNull(),
.f64 => cg.free_locals_f64.popOrNull(),
.v128 => cg.free_locals_v128.popOrNull(),
.i32 => cg.free_locals_i32.pop(),
.i64 => cg.free_locals_i64.pop(),
.f32 => cg.free_locals_f32.pop(),
.f64 => cg.free_locals_f64.pop(),
.v128 => cg.free_locals_v128.pop(),
};
if (index_or_null) |index| {
log.debug("reusing local ({d}) of type {}", .{ index, valtype });
@ -1309,7 +1309,7 @@ fn functionInner(cg: *CodeGen, any_returns: bool) InnerError!Function {
try cg.branches.append(cg.gpa, .{});
// clean up outer branch
defer {
var outer_branch = cg.branches.pop();
var outer_branch = cg.branches.pop().?;
outer_branch.deinit(cg.gpa);
assert(cg.branches.items.len == 0); // missing branch merge
}
@ -3482,7 +3482,7 @@ fn airCondBr(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void {
cg.branches.appendAssumeCapacity(.{});
try cg.currentBranch().values.ensureUnusedCapacity(cg.gpa, @as(u32, @intCast(liveness_condbr.else_deaths.len)));
defer {
var else_stack = cg.branches.pop();
var else_stack = cg.branches.pop().?;
else_stack.deinit(cg.gpa);
}
try cg.genBody(else_body);
@ -3494,7 +3494,7 @@ fn airCondBr(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void {
cg.branches.appendAssumeCapacity(.{});
try cg.currentBranch().values.ensureUnusedCapacity(cg.gpa, @as(u32, @intCast(liveness_condbr.then_deaths.len)));
defer {
var then_stack = cg.branches.pop();
var then_stack = cg.branches.pop().?;
then_stack.deinit(cg.gpa);
}
try cg.genBody(then_body);
@ -4132,7 +4132,7 @@ fn airSwitchBr(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void {
cg.branches.appendAssumeCapacity(.{});
try cg.currentBranch().values.ensureUnusedCapacity(cg.gpa, liveness.deaths[index].len);
defer {
var case_branch = cg.branches.pop();
var case_branch = cg.branches.pop().?;
case_branch.deinit(cg.gpa);
}
try cg.genBody(case.body);
@ -4144,7 +4144,7 @@ fn airSwitchBr(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void {
const else_deaths = liveness.deaths.len - 1;
try cg.currentBranch().values.ensureUnusedCapacity(cg.gpa, liveness.deaths[else_deaths].len);
defer {
var else_branch = cg.branches.pop();
var else_branch = cg.branches.pop().?;
else_branch.deinit(cg.gpa);
}
try cg.genBody(else_body);
@ -6459,7 +6459,7 @@ fn lowerTry(
try cg.branches.append(cg.gpa, .{});
try cg.currentBranch().values.ensureUnusedCapacity(cg.gpa, liveness.else_deaths.len + liveness.then_deaths.len);
defer {
var branch = cg.branches.pop();
var branch = cg.branches.pop().?;
branch.deinit(cg.gpa);
}
try cg.genBody(body);

View file

@ -1888,7 +1888,7 @@ pub fn resolveInputs(
// that this library search logic can be applied to them.
mem.reverse(UnresolvedInput, unresolved_inputs.items);
syslib: while (unresolved_inputs.popOrNull()) |unresolved_input| {
syslib: while (unresolved_inputs.pop()) |unresolved_input| {
const name_query: UnresolvedInput.NameQuery = switch (unresolved_input) {
.name_query => |nq| nq,
.ambiguous_name => |an| an: {

View file

@ -707,7 +707,7 @@ pub fn allocateSymbol(coff: *Coff) !u32 {
try coff.locals.ensureUnusedCapacity(gpa, 1);
const index = blk: {
if (coff.locals_free_list.popOrNull()) |index| {
if (coff.locals_free_list.pop()) |index| {
log.debug(" (reusing symbol index {d})", .{index});
break :blk index;
} else {
@ -735,7 +735,7 @@ fn allocateGlobal(coff: *Coff) !u32 {
try coff.globals.ensureUnusedCapacity(gpa, 1);
const index = blk: {
if (coff.globals_free_list.popOrNull()) |index| {
if (coff.globals_free_list.pop()) |index| {
log.debug(" (reusing global index {d})", .{index});
break :blk index;
} else {
@ -861,7 +861,7 @@ fn writeAtom(coff: *Coff, atom_index: Atom.Index, code: []u8) !void {
try coff.pwriteAll(code, file_offset);
// Now we can mark the relocs as resolved.
while (relocs.popOrNull()) |reloc| {
while (relocs.pop()) |reloc| {
reloc.dirty = false;
}
}
@ -3670,7 +3670,7 @@ const ImportTable = struct {
fn addImport(itab: *ImportTable, allocator: Allocator, target: SymbolWithLoc) !ImportIndex {
try itab.entries.ensureUnusedCapacity(allocator, 1);
const index: u32 = blk: {
if (itab.free_list.popOrNull()) |index| {
if (itab.free_list.pop()) |index| {
log.debug(" (reusing import entry index {d})", .{index});
break :blk index;
} else {

View file

@ -363,7 +363,7 @@ pub const Section = struct {
fn popUnit(sec: *Section, gpa: std.mem.Allocator) void {
const unit_index: Unit.Index = @enumFromInt(sec.units.items.len - 1);
sec.unlinkUnit(unit_index);
var unit = sec.units.pop();
var unit = sec.units.pop().?;
unit.deinit(gpa);
}
@ -1559,7 +1559,7 @@ pub const WipNav = struct {
pub fn leaveBlock(wip_nav: *WipNav, code_off: u64) UpdateError!void {
const block_bytes = comptime uleb128Bytes(@intFromEnum(AbbrevCode.block));
const block = wip_nav.blocks.pop();
const block = wip_nav.blocks.pop().?;
if (wip_nav.any_children)
try uleb128(wip_nav.debug_info.writer(wip_nav.dwarf.gpa), @intFromEnum(AbbrevCode.null))
else
@ -1599,7 +1599,7 @@ pub const WipNav = struct {
pub fn leaveInlineFunc(wip_nav: *WipNav, func: InternPool.Index, code_off: u64) UpdateError!void {
const inlined_func_bytes = comptime uleb128Bytes(@intFromEnum(AbbrevCode.inlined_func));
const block = wip_nav.blocks.pop();
const block = wip_nav.blocks.pop().?;
if (wip_nav.any_children)
try uleb128(wip_nav.debug_info.writer(wip_nav.dwarf.gpa), @intFromEnum(AbbrevCode.null))
else
@ -2054,7 +2054,7 @@ pub const WipNav = struct {
fn updateLazy(wip_nav: *WipNav, src_loc: Zcu.LazySrcLoc) UpdateError!void {
const ip = &wip_nav.pt.zcu.intern_pool;
while (wip_nav.pending_lazy.popOrNull()) |val| switch (ip.typeOf(val)) {
while (wip_nav.pending_lazy.pop()) |val| switch (ip.typeOf(val)) {
.type_type => try wip_nav.dwarf.updateLazyType(wip_nav.pt, src_loc, val, &wip_nav.pending_lazy),
else => try wip_nav.dwarf.updateLazyValue(wip_nav.pt, src_loc, val, &wip_nav.pending_lazy),
};

View file

@ -515,7 +515,7 @@ fn updateFinish(self: *Plan9, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index
fn allocateSymbolIndex(self: *Plan9) !usize {
const gpa = self.base.comp.gpa;
if (self.syms_index_free_list.popOrNull()) |i| {
if (self.syms_index_free_list.pop()) |i| {
return i;
} else {
_ = try self.syms.addOne(gpa);
@ -524,7 +524,7 @@ fn allocateSymbolIndex(self: *Plan9) !usize {
}
fn allocateGotIndex(self: *Plan9) usize {
if (self.got_index_free_list.popOrNull()) |i| {
if (self.got_index_free_list.pop()) |i| {
return i;
} else {
self.got_len += 1;

View file

@ -13,7 +13,7 @@ pub fn TableSection(comptime Entry: type) type {
pub fn allocateEntry(self: *Self, allocator: Allocator, entry: Entry) Allocator.Error!Index {
try self.entries.ensureUnusedCapacity(allocator, 1);
const index = blk: {
if (self.free_list.popOrNull()) |index| {
if (self.free_list.pop()) |index| {
log.debug(" (reusing entry index {d})", .{index});
break :blk index;
} else {

View file

@ -192,7 +192,7 @@ pub fn main() !void {
var dir_stack = std.ArrayList([]const u8).init(allocator);
try dir_stack.append(target_include_dir);
while (dir_stack.popOrNull()) |full_dir_name| {
while (dir_stack.pop()) |full_dir_name| {
var dir = std.fs.cwd().openDir(full_dir_name, .{ .iterate = true }) catch |err| switch (err) {
error.FileNotFound => continue :search,
error.AccessDenied => continue :search,
@ -273,14 +273,14 @@ pub fn main() !void {
}
}
std.mem.sort(*Contents, contents_list.items, {}, Contents.hitCountLessThan);
const best_contents = contents_list.popOrNull().?;
const best_contents = contents_list.pop().?;
if (best_contents.hit_count > 1) {
// worth it to make it generic
const full_path = try std.fs.path.join(allocator, &[_][]const u8{ out_dir, generic_name, path_kv.key_ptr.* });
try std.fs.cwd().makePath(std.fs.path.dirname(full_path).?);
try std.fs.cwd().writeFile(.{ .sub_path = full_path, .data = best_contents.bytes });
best_contents.is_generic = true;
while (contents_list.popOrNull()) |contender| {
while (contents_list.pop()) |contender| {
if (contender.hit_count > 1) {
const this_missed_bytes = contender.hit_count * contender.bytes.len;
missed_opportunity_bytes += this_missed_bytes;

View file

@ -189,7 +189,7 @@ pub fn main() !void {
var dir_stack = std.ArrayList([]const u8).init(arena);
try dir_stack.append(target_include_dir);
while (dir_stack.popOrNull()) |full_dir_name| {
while (dir_stack.pop()) |full_dir_name| {
var dir = std.fs.cwd().openDir(full_dir_name, .{ .iterate = true }) catch |err| switch (err) {
error.FileNotFound => continue :search,
error.AccessDenied => continue :search,
@ -270,14 +270,14 @@ pub fn main() !void {
}
}
std.mem.sort(*Contents, contents_list.items, {}, Contents.hitCountLessThan);
const best_contents = contents_list.popOrNull().?;
const best_contents = contents_list.pop().?;
if (best_contents.hit_count > 1) {
// worth it to make it generic
const full_path = try std.fs.path.join(arena, &[_][]const u8{ out_dir, generic_name, path_kv.key_ptr.* });
try std.fs.cwd().makePath(std.fs.path.dirname(full_path).?);
try std.fs.cwd().writeFile(.{ .sub_path = full_path, .data = best_contents.bytes });
best_contents.is_generic = true;
while (contents_list.popOrNull()) |contender| {
while (contents_list.pop()) |contender| {
if (contender.hit_count > 1) {
const this_missed_bytes = contender.hit_count * contender.bytes.len;
missed_opportunity_bytes += this_missed_bytes;