mirror of
https://codeberg.org/ziglang/zig.git
synced 2025-12-09 15:19:07 +00:00
stage2: move zir.Code to become root level fields of zir.zig
next commit will do the rename
This commit is contained in:
parent
798ad631f3
commit
0170a242bb
4 changed files with 426 additions and 427 deletions
|
|
@ -1,7 +1,7 @@
|
|||
//! A Work-In-Progress `zir.Code`. This is a shared parent of all
|
||||
//! `GenZir` scopes. Once the `zir.Code` is produced, this struct
|
||||
//! A Work-In-Progress `Zir`. This is a shared parent of all
|
||||
//! `GenZir` scopes. Once the `Zir` is produced, this struct
|
||||
//! is deinitialized.
|
||||
//! The `GenZir.finish` function converts this to a `zir.Code`.
|
||||
//! The `GenZir.finish` function converts this to a `Zir`.
|
||||
|
||||
const AstGen = @This();
|
||||
|
||||
|
|
|
|||
206
src/Module.zig
206
src/Module.zig
|
|
@ -21,7 +21,7 @@ const TypedValue = @import("TypedValue.zig");
|
|||
const Package = @import("Package.zig");
|
||||
const link = @import("link.zig");
|
||||
const ir = @import("ir.zig");
|
||||
const zir = @import("zir.zig");
|
||||
const Zir = @import("zir.zig"); // TODO rename this to Zir
|
||||
const trace = @import("tracy.zig").trace;
|
||||
const AstGen = @import("AstGen.zig");
|
||||
const Sema = @import("Sema.zig");
|
||||
|
|
@ -464,7 +464,7 @@ pub const Fn = struct {
|
|||
/// The first N elements of `extra` are indexes into `string_bytes` to
|
||||
/// a null-terminated string.
|
||||
/// This memory is managed with gpa, must be freed when the function is freed.
|
||||
zir: zir.Code,
|
||||
zir: Zir,
|
||||
/// undefined unless analysis state is `success`.
|
||||
body: ir.Body,
|
||||
state: Analysis,
|
||||
|
|
@ -808,7 +808,7 @@ pub const Scope = struct {
|
|||
/// This `Block` maps a block ZIR instruction to the corresponding
|
||||
/// TZIR instruction for break instruction analysis.
|
||||
pub const Label = struct {
|
||||
zir_block: zir.Inst.Index,
|
||||
zir_block: Zir.Inst.Index,
|
||||
merges: Merges,
|
||||
};
|
||||
|
||||
|
|
@ -834,7 +834,7 @@ pub const Scope = struct {
|
|||
|
||||
/// For debugging purposes.
|
||||
pub fn dump(block: *Block, mod: Module) void {
|
||||
zir.dumpBlock(mod, block);
|
||||
Zir.dumpBlock(mod, block);
|
||||
}
|
||||
|
||||
pub fn makeSubBlock(parent: *Block) Block {
|
||||
|
|
@ -1045,7 +1045,7 @@ pub const Scope = struct {
|
|||
};
|
||||
|
||||
/// This is a temporary structure; references to it are valid only
|
||||
/// while constructing a `zir.Code`.
|
||||
/// while constructing a `Zir`.
|
||||
pub const GenZir = struct {
|
||||
pub const base_tag: Tag = .gen_zir;
|
||||
base: Scope = Scope{ .tag = base_tag },
|
||||
|
|
@ -1056,16 +1056,16 @@ pub const Scope = struct {
|
|||
astgen: *AstGen,
|
||||
/// Keeps track of the list of instructions in this scope only. Indexes
|
||||
/// to instructions in `astgen`.
|
||||
instructions: ArrayListUnmanaged(zir.Inst.Index) = .{},
|
||||
instructions: ArrayListUnmanaged(Zir.Inst.Index) = .{},
|
||||
label: ?Label = null,
|
||||
break_block: zir.Inst.Index = 0,
|
||||
continue_block: zir.Inst.Index = 0,
|
||||
break_block: Zir.Inst.Index = 0,
|
||||
continue_block: Zir.Inst.Index = 0,
|
||||
/// Only valid when setBreakResultLoc is called.
|
||||
break_result_loc: AstGen.ResultLoc = undefined,
|
||||
/// When a block has a pointer result location, here it is.
|
||||
rl_ptr: zir.Inst.Ref = .none,
|
||||
rl_ptr: Zir.Inst.Ref = .none,
|
||||
/// When a block has a type result location, here it is.
|
||||
rl_ty_inst: zir.Inst.Ref = .none,
|
||||
rl_ty_inst: Zir.Inst.Ref = .none,
|
||||
/// Keeps track of how many branches of a block did not actually
|
||||
/// consume the result location. astgen uses this to figure out
|
||||
/// whether to rely on break instructions or writing to the result
|
||||
|
|
@ -1077,25 +1077,25 @@ pub const Scope = struct {
|
|||
break_count: usize = 0,
|
||||
/// Tracks `break :foo bar` instructions so they can possibly be elided later if
|
||||
/// the labeled block ends up not needing a result location pointer.
|
||||
labeled_breaks: ArrayListUnmanaged(zir.Inst.Index) = .{},
|
||||
labeled_breaks: ArrayListUnmanaged(Zir.Inst.Index) = .{},
|
||||
/// Tracks `store_to_block_ptr` instructions that correspond to break instructions
|
||||
/// so they can possibly be elided later if the labeled block ends up not needing
|
||||
/// a result location pointer.
|
||||
labeled_store_to_block_ptr_list: ArrayListUnmanaged(zir.Inst.Index) = .{},
|
||||
labeled_store_to_block_ptr_list: ArrayListUnmanaged(Zir.Inst.Index) = .{},
|
||||
|
||||
pub const Label = struct {
|
||||
token: ast.TokenIndex,
|
||||
block_inst: zir.Inst.Index,
|
||||
block_inst: Zir.Inst.Index,
|
||||
used: bool = false,
|
||||
};
|
||||
|
||||
/// Only valid to call on the top of the `GenZir` stack. Completes the
|
||||
/// `AstGen` into a `zir.Code`. Leaves the `AstGen` in an
|
||||
/// `AstGen` into a `Zir`. Leaves the `AstGen` in an
|
||||
/// initialized, but empty, state.
|
||||
pub fn finish(gz: *GenZir) !zir.Code {
|
||||
pub fn finish(gz: *GenZir) !Zir {
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.setBlockBody(0);
|
||||
return zir.Code{
|
||||
return Zir{
|
||||
.instructions = gz.astgen.instructions.toOwnedSlice(),
|
||||
.string_bytes = gz.astgen.string_bytes.toOwnedSlice(gpa),
|
||||
.extra = gz.astgen.extra.toOwnedSlice(gpa),
|
||||
|
|
@ -1148,24 +1148,24 @@ pub const Scope = struct {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn setBoolBrBody(gz: GenZir, inst: zir.Inst.Index) !void {
|
||||
pub fn setBoolBrBody(gz: GenZir, inst: Zir.Inst.Index) !void {
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
|
||||
@typeInfo(zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
|
||||
@typeInfo(Zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
|
||||
const zir_datas = gz.astgen.instructions.items(.data);
|
||||
zir_datas[inst].bool_br.payload_index = gz.astgen.addExtraAssumeCapacity(
|
||||
zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
|
||||
Zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
|
||||
);
|
||||
gz.astgen.extra.appendSliceAssumeCapacity(gz.instructions.items);
|
||||
}
|
||||
|
||||
pub fn setBlockBody(gz: GenZir, inst: zir.Inst.Index) !void {
|
||||
pub fn setBlockBody(gz: GenZir, inst: Zir.Inst.Index) !void {
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
|
||||
@typeInfo(zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
|
||||
@typeInfo(Zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
|
||||
const zir_datas = gz.astgen.instructions.items(.data);
|
||||
zir_datas[inst].pl_node.payload_index = gz.astgen.addExtraAssumeCapacity(
|
||||
zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
|
||||
Zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
|
||||
);
|
||||
gz.astgen.extra.appendSliceAssumeCapacity(gz.instructions.items);
|
||||
}
|
||||
|
|
@ -1180,12 +1180,12 @@ pub const Scope = struct {
|
|||
return str_index;
|
||||
}
|
||||
|
||||
pub fn addFnTypeCc(gz: *GenZir, tag: zir.Inst.Tag, args: struct {
|
||||
pub fn addFnTypeCc(gz: *GenZir, tag: Zir.Inst.Tag, args: struct {
|
||||
src_node: ast.Node.Index,
|
||||
param_types: []const zir.Inst.Ref,
|
||||
ret_ty: zir.Inst.Ref,
|
||||
cc: zir.Inst.Ref,
|
||||
}) !zir.Inst.Ref {
|
||||
param_types: []const Zir.Inst.Ref,
|
||||
ret_ty: Zir.Inst.Ref,
|
||||
cc: Zir.Inst.Ref,
|
||||
}) !Zir.Inst.Ref {
|
||||
assert(args.src_node != 0);
|
||||
assert(args.ret_ty != .none);
|
||||
assert(args.cc != .none);
|
||||
|
|
@ -1193,16 +1193,16 @@ pub const Scope = struct {
|
|||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
|
||||
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
|
||||
@typeInfo(zir.Inst.FnTypeCc).Struct.fields.len + args.param_types.len);
|
||||
@typeInfo(Zir.Inst.FnTypeCc).Struct.fields.len + args.param_types.len);
|
||||
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(zir.Inst.FnTypeCc{
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.FnTypeCc{
|
||||
.return_type = args.ret_ty,
|
||||
.cc = args.cc,
|
||||
.param_types_len = @intCast(u32, args.param_types.len),
|
||||
});
|
||||
gz.astgen.appendRefsAssumeCapacity(args.param_types);
|
||||
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = tag,
|
||||
.data = .{ .pl_node = .{
|
||||
|
|
@ -1214,26 +1214,26 @@ pub const Scope = struct {
|
|||
return gz.astgen.indexToRef(new_index);
|
||||
}
|
||||
|
||||
pub fn addFnType(gz: *GenZir, tag: zir.Inst.Tag, args: struct {
|
||||
pub fn addFnType(gz: *GenZir, tag: Zir.Inst.Tag, args: struct {
|
||||
src_node: ast.Node.Index,
|
||||
ret_ty: zir.Inst.Ref,
|
||||
param_types: []const zir.Inst.Ref,
|
||||
}) !zir.Inst.Ref {
|
||||
ret_ty: Zir.Inst.Ref,
|
||||
param_types: []const Zir.Inst.Ref,
|
||||
}) !Zir.Inst.Ref {
|
||||
assert(args.src_node != 0);
|
||||
assert(args.ret_ty != .none);
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
|
||||
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
|
||||
@typeInfo(zir.Inst.FnType).Struct.fields.len + args.param_types.len);
|
||||
@typeInfo(Zir.Inst.FnType).Struct.fields.len + args.param_types.len);
|
||||
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(zir.Inst.FnType{
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.FnType{
|
||||
.return_type = args.ret_ty,
|
||||
.param_types_len = @intCast(u32, args.param_types.len),
|
||||
});
|
||||
gz.astgen.appendRefsAssumeCapacity(args.param_types);
|
||||
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = tag,
|
||||
.data = .{ .pl_node = .{
|
||||
|
|
@ -1247,27 +1247,27 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addCall(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
callee: zir.Inst.Ref,
|
||||
args: []const zir.Inst.Ref,
|
||||
tag: Zir.Inst.Tag,
|
||||
callee: Zir.Inst.Ref,
|
||||
args: []const Zir.Inst.Ref,
|
||||
/// Absolute node index. This function does the conversion to offset from Decl.
|
||||
src_node: ast.Node.Index,
|
||||
) !zir.Inst.Ref {
|
||||
) !Zir.Inst.Ref {
|
||||
assert(callee != .none);
|
||||
assert(src_node != 0);
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
|
||||
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
|
||||
@typeInfo(zir.Inst.Call).Struct.fields.len + args.len);
|
||||
@typeInfo(Zir.Inst.Call).Struct.fields.len + args.len);
|
||||
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(zir.Inst.Call{
|
||||
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Call{
|
||||
.callee = callee,
|
||||
.args_len = @intCast(u32, args.len),
|
||||
});
|
||||
gz.astgen.appendRefsAssumeCapacity(args);
|
||||
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = tag,
|
||||
.data = .{ .pl_node = .{
|
||||
|
|
@ -1279,19 +1279,19 @@ pub const Scope = struct {
|
|||
return gz.astgen.indexToRef(new_index);
|
||||
}
|
||||
|
||||
/// Note that this returns a `zir.Inst.Index` not a ref.
|
||||
/// Note that this returns a `Zir.Inst.Index` not a ref.
|
||||
/// Leaves the `payload_index` field undefined.
|
||||
pub fn addBoolBr(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
lhs: zir.Inst.Ref,
|
||||
) !zir.Inst.Index {
|
||||
tag: Zir.Inst.Tag,
|
||||
lhs: Zir.Inst.Ref,
|
||||
) !Zir.Inst.Index {
|
||||
assert(lhs != .none);
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
|
||||
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = tag,
|
||||
.data = .{ .bool_br = .{
|
||||
|
|
@ -1303,14 +1303,14 @@ pub const Scope = struct {
|
|||
return new_index;
|
||||
}
|
||||
|
||||
pub fn addInt(gz: *GenZir, integer: u64) !zir.Inst.Ref {
|
||||
pub fn addInt(gz: *GenZir, integer: u64) !Zir.Inst.Ref {
|
||||
return gz.add(.{
|
||||
.tag = .int,
|
||||
.data = .{ .int = integer },
|
||||
});
|
||||
}
|
||||
|
||||
pub fn addFloat(gz: *GenZir, number: f32, src_node: ast.Node.Index) !zir.Inst.Ref {
|
||||
pub fn addFloat(gz: *GenZir, number: f32, src_node: ast.Node.Index) !Zir.Inst.Ref {
|
||||
return gz.add(.{
|
||||
.tag = .float,
|
||||
.data = .{ .float = .{
|
||||
|
|
@ -1322,11 +1322,11 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addUnNode(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
operand: zir.Inst.Ref,
|
||||
tag: Zir.Inst.Tag,
|
||||
operand: Zir.Inst.Ref,
|
||||
/// Absolute node index. This function does the conversion to offset from Decl.
|
||||
src_node: ast.Node.Index,
|
||||
) !zir.Inst.Ref {
|
||||
) !Zir.Inst.Ref {
|
||||
assert(operand != .none);
|
||||
return gz.add(.{
|
||||
.tag = tag,
|
||||
|
|
@ -1339,17 +1339,17 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addPlNode(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
tag: Zir.Inst.Tag,
|
||||
/// Absolute node index. This function does the conversion to offset from Decl.
|
||||
src_node: ast.Node.Index,
|
||||
extra: anytype,
|
||||
) !zir.Inst.Ref {
|
||||
) !Zir.Inst.Ref {
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
|
||||
|
||||
const payload_index = try gz.astgen.addExtra(extra);
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = tag,
|
||||
.data = .{ .pl_node = .{
|
||||
|
|
@ -1363,19 +1363,19 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addArrayTypeSentinel(
|
||||
gz: *GenZir,
|
||||
len: zir.Inst.Ref,
|
||||
sentinel: zir.Inst.Ref,
|
||||
elem_type: zir.Inst.Ref,
|
||||
) !zir.Inst.Ref {
|
||||
len: Zir.Inst.Ref,
|
||||
sentinel: Zir.Inst.Ref,
|
||||
elem_type: Zir.Inst.Ref,
|
||||
) !Zir.Inst.Ref {
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
|
||||
|
||||
const payload_index = try gz.astgen.addExtra(zir.Inst.ArrayTypeSentinel{
|
||||
const payload_index = try gz.astgen.addExtra(Zir.Inst.ArrayTypeSentinel{
|
||||
.sentinel = sentinel,
|
||||
.elem_type = elem_type,
|
||||
});
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = .array_type_sentinel,
|
||||
.data = .{ .array_type_sentinel = .{
|
||||
|
|
@ -1389,11 +1389,11 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addUnTok(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
operand: zir.Inst.Ref,
|
||||
tag: Zir.Inst.Tag,
|
||||
operand: Zir.Inst.Ref,
|
||||
/// Absolute token index. This function does the conversion to Decl offset.
|
||||
abs_tok_index: ast.TokenIndex,
|
||||
) !zir.Inst.Ref {
|
||||
) !Zir.Inst.Ref {
|
||||
assert(operand != .none);
|
||||
return gz.add(.{
|
||||
.tag = tag,
|
||||
|
|
@ -1406,11 +1406,11 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addStrTok(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
tag: Zir.Inst.Tag,
|
||||
str_index: u32,
|
||||
/// Absolute token index. This function does the conversion to Decl offset.
|
||||
abs_tok_index: ast.TokenIndex,
|
||||
) !zir.Inst.Ref {
|
||||
) !Zir.Inst.Ref {
|
||||
return gz.add(.{
|
||||
.tag = tag,
|
||||
.data = .{ .str_tok = .{
|
||||
|
|
@ -1422,10 +1422,10 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addBreak(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
break_block: zir.Inst.Index,
|
||||
operand: zir.Inst.Ref,
|
||||
) !zir.Inst.Index {
|
||||
tag: Zir.Inst.Tag,
|
||||
break_block: Zir.Inst.Index,
|
||||
operand: Zir.Inst.Ref,
|
||||
) !Zir.Inst.Index {
|
||||
return gz.addAsIndex(.{
|
||||
.tag = tag,
|
||||
.data = .{ .@"break" = .{
|
||||
|
|
@ -1437,10 +1437,10 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addBin(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
lhs: zir.Inst.Ref,
|
||||
rhs: zir.Inst.Ref,
|
||||
) !zir.Inst.Ref {
|
||||
tag: Zir.Inst.Tag,
|
||||
lhs: Zir.Inst.Ref,
|
||||
rhs: Zir.Inst.Ref,
|
||||
) !Zir.Inst.Ref {
|
||||
assert(lhs != .none);
|
||||
assert(rhs != .none);
|
||||
return gz.add(.{
|
||||
|
|
@ -1454,10 +1454,10 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addDecl(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
tag: Zir.Inst.Tag,
|
||||
decl_index: u32,
|
||||
src_node: ast.Node.Index,
|
||||
) !zir.Inst.Ref {
|
||||
) !Zir.Inst.Ref {
|
||||
return gz.add(.{
|
||||
.tag = tag,
|
||||
.data = .{ .pl_node = .{
|
||||
|
|
@ -1469,10 +1469,10 @@ pub const Scope = struct {
|
|||
|
||||
pub fn addNode(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
tag: Zir.Inst.Tag,
|
||||
/// Absolute node index. This function does the conversion to offset from Decl.
|
||||
src_node: ast.Node.Index,
|
||||
) !zir.Inst.Ref {
|
||||
) !Zir.Inst.Ref {
|
||||
return gz.add(.{
|
||||
.tag = tag,
|
||||
.data = .{ .node = gz.astgen.decl.nodeIndexToRelative(src_node) },
|
||||
|
|
@ -1482,9 +1482,9 @@ pub const Scope = struct {
|
|||
/// Asserts that `str` is 8 or fewer bytes.
|
||||
pub fn addSmallStr(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
tag: Zir.Inst.Tag,
|
||||
str: []const u8,
|
||||
) !zir.Inst.Ref {
|
||||
) !Zir.Inst.Ref {
|
||||
var buf: [9]u8 = undefined;
|
||||
mem.copy(u8, &buf, str);
|
||||
buf[str.len] = 0;
|
||||
|
|
@ -1495,11 +1495,11 @@ pub const Scope = struct {
|
|||
});
|
||||
}
|
||||
|
||||
/// Note that this returns a `zir.Inst.Index` not a ref.
|
||||
/// Note that this returns a `Zir.Inst.Index` not a ref.
|
||||
/// Does *not* append the block instruction to the scope.
|
||||
/// Leaves the `payload_index` field undefined.
|
||||
pub fn addBlock(gz: *GenZir, tag: zir.Inst.Tag, node: ast.Node.Index) !zir.Inst.Index {
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
pub fn addBlock(gz: *GenZir, tag: Zir.Inst.Tag, node: ast.Node.Index) !Zir.Inst.Index {
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.astgen.instructions.append(gpa, .{
|
||||
.tag = tag,
|
||||
|
|
@ -1511,12 +1511,12 @@ pub const Scope = struct {
|
|||
return new_index;
|
||||
}
|
||||
|
||||
/// Note that this returns a `zir.Inst.Index` not a ref.
|
||||
/// Note that this returns a `Zir.Inst.Index` not a ref.
|
||||
/// Leaves the `payload_index` field undefined.
|
||||
pub fn addCondBr(gz: *GenZir, tag: zir.Inst.Tag, node: ast.Node.Index) !zir.Inst.Index {
|
||||
pub fn addCondBr(gz: *GenZir, tag: Zir.Inst.Tag, node: ast.Node.Index) !Zir.Inst.Index {
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
try gz.astgen.instructions.append(gpa, .{
|
||||
.tag = tag,
|
||||
.data = .{ .pl_node = .{
|
||||
|
|
@ -1528,16 +1528,16 @@ pub const Scope = struct {
|
|||
return new_index;
|
||||
}
|
||||
|
||||
pub fn add(gz: *GenZir, inst: zir.Inst) !zir.Inst.Ref {
|
||||
pub fn add(gz: *GenZir, inst: Zir.Inst) !Zir.Inst.Ref {
|
||||
return gz.astgen.indexToRef(try gz.addAsIndex(inst));
|
||||
}
|
||||
|
||||
pub fn addAsIndex(gz: *GenZir, inst: zir.Inst) !zir.Inst.Index {
|
||||
pub fn addAsIndex(gz: *GenZir, inst: Zir.Inst) !Zir.Inst.Index {
|
||||
const gpa = gz.astgen.mod.gpa;
|
||||
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
|
||||
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
|
||||
|
||||
const new_index = @intCast(zir.Inst.Index, gz.astgen.instructions.len);
|
||||
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
|
||||
gz.astgen.instructions.appendAssumeCapacity(inst);
|
||||
gz.instructions.appendAssumeCapacity(new_index);
|
||||
return new_index;
|
||||
|
|
@ -1554,7 +1554,7 @@ pub const Scope = struct {
|
|||
parent: *Scope,
|
||||
gen_zir: *GenZir,
|
||||
name: []const u8,
|
||||
inst: zir.Inst.Ref,
|
||||
inst: Zir.Inst.Ref,
|
||||
/// Source location of the corresponding variable declaration.
|
||||
src: LazySrcLoc,
|
||||
};
|
||||
|
|
@ -1569,7 +1569,7 @@ pub const Scope = struct {
|
|||
parent: *Scope,
|
||||
gen_zir: *GenZir,
|
||||
name: []const u8,
|
||||
ptr: zir.Inst.Ref,
|
||||
ptr: Zir.Inst.Ref,
|
||||
/// Source location of the corresponding variable declaration.
|
||||
src: LazySrcLoc,
|
||||
};
|
||||
|
|
@ -2511,7 +2511,7 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
|
|||
var analysis_arena = std.heap.ArenaAllocator.init(mod.gpa);
|
||||
defer analysis_arena.deinit();
|
||||
|
||||
var code: zir.Code = blk: {
|
||||
var code: Zir = blk: {
|
||||
var astgen = try AstGen.init(mod, decl, &analysis_arena.allocator);
|
||||
defer astgen.deinit();
|
||||
|
||||
|
|
@ -2578,7 +2578,7 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
|
|||
var analysis_arena = std.heap.ArenaAllocator.init(mod.gpa);
|
||||
defer analysis_arena.deinit();
|
||||
|
||||
var code: zir.Code = blk: {
|
||||
var code: Zir = blk: {
|
||||
var astgen = try AstGen.init(mod, decl, &analysis_arena.allocator);
|
||||
defer astgen.deinit();
|
||||
|
||||
|
|
@ -2676,7 +2676,7 @@ fn astgenAndSemaFn(
|
|||
}
|
||||
break :blk count;
|
||||
};
|
||||
const param_types = try fn_type_scope_arena.allocator.alloc(zir.Inst.Ref, param_count);
|
||||
const param_types = try fn_type_scope_arena.allocator.alloc(Zir.Inst.Ref, param_count);
|
||||
|
||||
var is_var_args = false;
|
||||
{
|
||||
|
|
@ -2782,7 +2782,7 @@ fn astgenAndSemaFn(
|
|||
else
|
||||
false;
|
||||
|
||||
const cc: zir.Inst.Ref = if (fn_proto.ast.callconv_expr != 0)
|
||||
const cc: Zir.Inst.Ref = if (fn_proto.ast.callconv_expr != 0)
|
||||
// TODO instead of enum literal type, this needs to be the
|
||||
// std.builtin.CallingConvention enum. We need to implement importing other files
|
||||
// and enums in order to fix this.
|
||||
|
|
@ -2797,8 +2797,8 @@ fn astgenAndSemaFn(
|
|||
else
|
||||
.none;
|
||||
|
||||
const fn_type_inst: zir.Inst.Ref = if (cc != .none) fn_type: {
|
||||
const tag: zir.Inst.Tag = if (is_var_args) .fn_type_cc_var_args else .fn_type_cc;
|
||||
const fn_type_inst: Zir.Inst.Ref = if (cc != .none) fn_type: {
|
||||
const tag: Zir.Inst.Tag = if (is_var_args) .fn_type_cc_var_args else .fn_type_cc;
|
||||
break :fn_type try fn_type_scope.addFnTypeCc(tag, .{
|
||||
.src_node = fn_proto.ast.proto_node,
|
||||
.ret_ty = return_type_inst,
|
||||
|
|
@ -2806,7 +2806,7 @@ fn astgenAndSemaFn(
|
|||
.cc = cc,
|
||||
});
|
||||
} else fn_type: {
|
||||
const tag: zir.Inst.Tag = if (is_var_args) .fn_type_var_args else .fn_type;
|
||||
const tag: Zir.Inst.Tag = if (is_var_args) .fn_type_var_args else .fn_type;
|
||||
break :fn_type try fn_type_scope.addFnType(tag, .{
|
||||
.src_node = fn_proto.ast.proto_node,
|
||||
.ret_ty = return_type_inst,
|
||||
|
|
@ -2890,10 +2890,10 @@ fn astgenAndSemaFn(
|
|||
const new_func = try decl_arena.allocator.create(Fn);
|
||||
const fn_payload = try decl_arena.allocator.create(Value.Payload.Function);
|
||||
|
||||
const fn_zir: zir.Code = blk: {
|
||||
const fn_zir: Zir = blk: {
|
||||
// We put the ZIR inside the Decl arena.
|
||||
var astgen = try AstGen.init(mod, decl, &decl_arena.allocator);
|
||||
astgen.ref_start_index = @intCast(u32, zir.Inst.Ref.typed_value_map.len + param_count);
|
||||
astgen.ref_start_index = @intCast(u32, Zir.Inst.Ref.typed_value_map.len + param_count);
|
||||
defer astgen.deinit();
|
||||
|
||||
var gen_scope: Scope.GenZir = .{
|
||||
|
|
@ -2920,7 +2920,7 @@ fn astgenAndSemaFn(
|
|||
.gen_zir = &gen_scope,
|
||||
.name = param_name,
|
||||
// Implicit const list first, then implicit arg list.
|
||||
.inst = @intToEnum(zir.Inst.Ref, @intCast(u32, zir.Inst.Ref.typed_value_map.len + i)),
|
||||
.inst = @intToEnum(Zir.Inst.Ref, @intCast(u32, Zir.Inst.Ref.typed_value_map.len + i)),
|
||||
.src = decl.tokSrcLoc(name_token),
|
||||
};
|
||||
params_scope = &sub_scope.base;
|
||||
|
|
|
|||
474
src/Sema.zig
474
src/Sema.zig
File diff suppressed because it is too large
Load diff
167
src/zir.zig
167
src/zir.zig
|
|
@ -1,5 +1,14 @@
|
|||
//! Zig Intermediate Representation. Astgen.zig converts AST nodes to these
|
||||
//! untyped IR instructions. Next, Sema.zig processes these into TZIR.
|
||||
//! The minimum amount of information needed to represent a list of ZIR instructions.
|
||||
//! Once this structure is completed, it can be used to generate TZIR, followed by
|
||||
//! machine code, without any memory access into the AST tree token list, node list,
|
||||
//! or source bytes. Exceptions include:
|
||||
//! * Compile errors, which may need to reach into these data structures to
|
||||
//! create a useful report.
|
||||
//! * In the future, possibly inline assembly, which needs to get parsed and
|
||||
//! handled by the codegen backend, and errors reported there. However for now,
|
||||
//! inline assembly is not an exception.
|
||||
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
|
|
@ -9,6 +18,7 @@ const BigIntConst = std.math.big.int.Const;
|
|||
const BigIntMutable = std.math.big.int.Mutable;
|
||||
const ast = std.zig.ast;
|
||||
|
||||
const Zir = @This();
|
||||
const Type = @import("type.zig").Type;
|
||||
const Value = @import("value.zig").Value;
|
||||
const TypedValue = @import("TypedValue.zig");
|
||||
|
|
@ -16,96 +26,85 @@ const ir = @import("ir.zig");
|
|||
const Module = @import("Module.zig");
|
||||
const LazySrcLoc = Module.LazySrcLoc;
|
||||
|
||||
/// The minimum amount of information needed to represent a list of ZIR instructions.
|
||||
/// Once this structure is completed, it can be used to generate TZIR, followed by
|
||||
/// machine code, without any memory access into the AST tree token list, node list,
|
||||
/// or source bytes. Exceptions include:
|
||||
/// * Compile errors, which may need to reach into these data structures to
|
||||
/// create a useful report.
|
||||
/// * In the future, possibly inline assembly, which needs to get parsed and
|
||||
/// handled by the codegen backend, and errors reported there. However for now,
|
||||
/// inline assembly is not an exception.
|
||||
pub const Code = struct {
|
||||
/// There is always implicitly a `block` instruction at index 0.
|
||||
/// This is so that `break_inline` can break from the root block.
|
||||
instructions: std.MultiArrayList(Inst).Slice,
|
||||
/// In order to store references to strings in fewer bytes, we copy all
|
||||
/// string bytes into here. String bytes can be null. It is up to whomever
|
||||
/// is referencing the data here whether they want to store both index and length,
|
||||
/// thus allowing null bytes, or store only index, and use null-termination. The
|
||||
/// `string_bytes` array is agnostic to either usage.
|
||||
string_bytes: []u8,
|
||||
/// The meaning of this data is determined by `Inst.Tag` value.
|
||||
extra: []u32,
|
||||
/// There is always implicitly a `block` instruction at index 0.
|
||||
/// This is so that `break_inline` can break from the root block.
|
||||
instructions: std.MultiArrayList(Inst).Slice,
|
||||
/// In order to store references to strings in fewer bytes, we copy all
|
||||
/// string bytes into here. String bytes can be null. It is up to whomever
|
||||
/// is referencing the data here whether they want to store both index and length,
|
||||
/// thus allowing null bytes, or store only index, and use null-termination. The
|
||||
/// `string_bytes` array is agnostic to either usage.
|
||||
string_bytes: []u8,
|
||||
/// The meaning of this data is determined by `Inst.Tag` value.
|
||||
extra: []u32,
|
||||
|
||||
/// Returns the requested data, as well as the new index which is at the start of the
|
||||
/// trailers for the object.
|
||||
pub fn extraData(code: Code, comptime T: type, index: usize) struct { data: T, end: usize } {
|
||||
const fields = std.meta.fields(T);
|
||||
var i: usize = index;
|
||||
var result: T = undefined;
|
||||
inline for (fields) |field| {
|
||||
@field(result, field.name) = switch (field.field_type) {
|
||||
u32 => code.extra[i],
|
||||
Inst.Ref => @intToEnum(Inst.Ref, code.extra[i]),
|
||||
else => unreachable,
|
||||
};
|
||||
i += 1;
|
||||
}
|
||||
return .{
|
||||
.data = result,
|
||||
.end = i,
|
||||
/// Returns the requested data, as well as the new index which is at the start of the
|
||||
/// trailers for the object.
|
||||
pub fn extraData(code: Zir, comptime T: type, index: usize) struct { data: T, end: usize } {
|
||||
const fields = std.meta.fields(T);
|
||||
var i: usize = index;
|
||||
var result: T = undefined;
|
||||
inline for (fields) |field| {
|
||||
@field(result, field.name) = switch (field.field_type) {
|
||||
u32 => code.extra[i],
|
||||
Inst.Ref => @intToEnum(Inst.Ref, code.extra[i]),
|
||||
else => unreachable,
|
||||
};
|
||||
i += 1;
|
||||
}
|
||||
return .{
|
||||
.data = result,
|
||||
.end = i,
|
||||
};
|
||||
}
|
||||
|
||||
/// Given an index into `string_bytes` returns the null-terminated string found there.
|
||||
pub fn nullTerminatedString(code: Code, index: usize) [:0]const u8 {
|
||||
var end: usize = index;
|
||||
while (code.string_bytes[end] != 0) {
|
||||
end += 1;
|
||||
}
|
||||
return code.string_bytes[index..end :0];
|
||||
/// Given an index into `string_bytes` returns the null-terminated string found there.
|
||||
pub fn nullTerminatedString(code: Zir, index: usize) [:0]const u8 {
|
||||
var end: usize = index;
|
||||
while (code.string_bytes[end] != 0) {
|
||||
end += 1;
|
||||
}
|
||||
return code.string_bytes[index..end :0];
|
||||
}
|
||||
|
||||
pub fn refSlice(code: Code, start: usize, len: usize) []Inst.Ref {
|
||||
const raw_slice = code.extra[start..][0..len];
|
||||
return @bitCast([]Inst.Ref, raw_slice);
|
||||
}
|
||||
pub fn refSlice(code: Zir, start: usize, len: usize) []Inst.Ref {
|
||||
const raw_slice = code.extra[start..][0..len];
|
||||
return @bitCast([]Inst.Ref, raw_slice);
|
||||
}
|
||||
|
||||
pub fn deinit(code: *Code, gpa: *Allocator) void {
|
||||
code.instructions.deinit(gpa);
|
||||
gpa.free(code.string_bytes);
|
||||
gpa.free(code.extra);
|
||||
code.* = undefined;
|
||||
}
|
||||
pub fn deinit(code: *Zir, gpa: *Allocator) void {
|
||||
code.instructions.deinit(gpa);
|
||||
gpa.free(code.string_bytes);
|
||||
gpa.free(code.extra);
|
||||
code.* = undefined;
|
||||
}
|
||||
|
||||
/// For debugging purposes, like dumpFn but for unanalyzed zir blocks
|
||||
pub fn dump(
|
||||
code: Code,
|
||||
gpa: *Allocator,
|
||||
kind: []const u8,
|
||||
scope: *Module.Scope,
|
||||
param_count: usize,
|
||||
) !void {
|
||||
var arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena.deinit();
|
||||
/// For debugging purposes, like dumpFn but for unanalyzed zir blocks
|
||||
pub fn dump(
|
||||
code: Zir,
|
||||
gpa: *Allocator,
|
||||
kind: []const u8,
|
||||
scope: *Module.Scope,
|
||||
param_count: usize,
|
||||
) !void {
|
||||
var arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena.deinit();
|
||||
|
||||
var writer: Writer = .{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.scope = scope,
|
||||
.code = code,
|
||||
.indent = 0,
|
||||
.param_count = param_count,
|
||||
};
|
||||
var writer: Writer = .{
|
||||
.gpa = gpa,
|
||||
.arena = &arena.allocator,
|
||||
.scope = scope,
|
||||
.code = code,
|
||||
.indent = 0,
|
||||
.param_count = param_count,
|
||||
};
|
||||
|
||||
const decl_name = scope.srcDecl().?.name;
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
try stderr.print("ZIR {s} {s} %0 ", .{ kind, decl_name });
|
||||
try writer.writeInstToStream(stderr, 0);
|
||||
try stderr.print(" // end ZIR {s} {s}\n\n", .{ kind, decl_name });
|
||||
}
|
||||
};
|
||||
const decl_name = scope.srcDecl().?.name;
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
try stderr.print("ZIR {s} {s} %0 ", .{ kind, decl_name });
|
||||
try writer.writeInstToStream(stderr, 0);
|
||||
try stderr.print(" // end ZIR {s} {s}\n\n", .{ kind, decl_name });
|
||||
}
|
||||
|
||||
/// These are untyped instructions generated from an Abstract Syntax Tree.
|
||||
/// The data here is immutable because it is possible to have multiple
|
||||
|
|
@ -885,7 +884,7 @@ pub const Inst = struct {
|
|||
}
|
||||
};
|
||||
|
||||
/// The position of a ZIR instruction within the `Code` instructions array.
|
||||
/// The position of a ZIR instruction within the `Zir` instructions array.
|
||||
pub const Index = u32;
|
||||
|
||||
/// A reference to a TypedValue, parameter of the current function,
|
||||
|
|
@ -1236,7 +1235,7 @@ pub const Inst = struct {
|
|||
/// Number of bytes in the string.
|
||||
len: u32,
|
||||
|
||||
pub fn get(self: @This(), code: Code) []const u8 {
|
||||
pub fn get(self: @This(), code: Zir) []const u8 {
|
||||
return code.string_bytes[self.start..][0..self.len];
|
||||
}
|
||||
},
|
||||
|
|
@ -1257,7 +1256,7 @@ pub const Inst = struct {
|
|||
/// Offset from Decl AST token index.
|
||||
src_tok: u32,
|
||||
|
||||
pub fn get(self: @This(), code: Code) [:0]const u8 {
|
||||
pub fn get(self: @This(), code: Zir) [:0]const u8 {
|
||||
return code.nullTerminatedString(self.start);
|
||||
}
|
||||
|
||||
|
|
@ -1609,7 +1608,7 @@ const Writer = struct {
|
|||
gpa: *Allocator,
|
||||
arena: *Allocator,
|
||||
scope: *Module.Scope,
|
||||
code: Code,
|
||||
code: Zir,
|
||||
indent: usize,
|
||||
param_count: usize,
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue