aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2021-01-31 20:15:08 -0800
committerGitHub <noreply@github.com>2021-01-31 20:15:08 -0800
commitbf76501b5d46277d3706a1f0b92ba52f2a47d894 (patch)
tree7a40c904b9246092009c02f5b09a2a8732b69175 /src
parentfdc875ed0080cd2542a854a8cd6c627b25e9b7a4 (diff)
parent0f5eda973e0c17b3f792cdb06674bf8d2863c8fb (diff)
downloadzig-bf76501b5d46277d3706a1f0b92ba52f2a47d894.tar.gz
zig-bf76501b5d46277d3706a1f0b92ba52f2a47d894.zip
Merge pull request #7847 from ziglang/astgen-rl-rework
stage2: rework astgen result locations
Diffstat (limited to 'src')
-rw-r--r--src/Module.zig73
-rw-r--r--src/astgen.zig1285
-rw-r--r--src/codegen.zig57
-rw-r--r--src/ir.zig56
-rw-r--r--src/zir.zig275
-rw-r--r--src/zir_sema.zig895
6 files changed, 1280 insertions, 1361 deletions
diff --git a/src/Module.zig b/src/Module.zig
index fa9722814e..b495afb336 100644
--- a/src/Module.zig
+++ b/src/Module.zig
@@ -375,6 +375,10 @@ pub const Scope = struct {
}
}
+ pub fn isComptime(self: *Scope) bool {
+ return self.getGenZIR().force_comptime;
+ }
+
pub fn ownerDecl(self: *Scope) ?*Decl {
return switch (self.tag) {
.block => self.cast(Block).?.owner_decl,
@@ -671,14 +675,36 @@ pub const Scope = struct {
};
pub const Merges = struct {
- results: ArrayListUnmanaged(*Inst),
block_inst: *Inst.Block,
+ /// Separate array list from break_inst_list so that it can be passed directly
+ /// to resolvePeerTypes.
+ results: ArrayListUnmanaged(*Inst),
+ /// Keeps track of the break instructions so that the operand can be replaced
+ /// if we need to add type coercion at the end of block analysis.
+ /// Same indexes, capacity, length as `results`.
+ br_list: ArrayListUnmanaged(*Inst.Br),
};
/// For debugging purposes.
pub fn dump(self: *Block, mod: Module) void {
zir.dumpBlock(mod, self);
}
+
+ pub fn makeSubBlock(parent: *Block) Block {
+ return .{
+ .parent = parent,
+ .inst_table = parent.inst_table,
+ .func = parent.func,
+ .owner_decl = parent.owner_decl,
+ .src_decl = parent.src_decl,
+ .instructions = .{},
+ .arena = parent.arena,
+ .label = null,
+ .inlining = parent.inlining,
+ .is_comptime = parent.is_comptime,
+ .branch_quota = parent.branch_quota,
+ };
+ }
};
/// This is a temporary structure, references to it are valid only
@@ -690,13 +716,32 @@ pub const Scope = struct {
parent: *Scope,
decl: *Decl,
arena: *Allocator,
+ force_comptime: bool,
/// The first N instructions in a function body ZIR are arg instructions.
instructions: std.ArrayListUnmanaged(*zir.Inst) = .{},
label: ?Label = null,
break_block: ?*zir.Inst.Block = null,
continue_block: ?*zir.Inst.Block = null,
- /// only valid if label != null or (continue_block and break_block) != null
+ /// Only valid when setBlockResultLoc is called.
break_result_loc: astgen.ResultLoc = undefined,
+ /// When a block has a pointer result location, here it is.
+ rl_ptr: ?*zir.Inst = null,
+ /// Keeps track of how many branches of a block did not actually
+ /// consume the result location. astgen uses this to figure out
+ /// whether to rely on break instructions or writing to the result
+ /// pointer for the result instruction.
+ rvalue_rl_count: usize = 0,
+ /// Keeps track of how many break instructions there are. When astgen is finished
+ /// with a block, it can check this against rvalue_rl_count to find out whether
+ /// the break instructions should be downgraded to break_void.
+ break_count: usize = 0,
+ /// Tracks `break :foo bar` instructions so they can possibly be elided later if
+ /// the labeled block ends up not needing a result location pointer.
+ labeled_breaks: std.ArrayListUnmanaged(*zir.Inst.Break) = .{},
+ /// Tracks `store_to_block_ptr` instructions that correspond to break instructions
+ /// so they can possibly be elided later if the labeled block ends up not needing
+ /// a result location pointer.
+ labeled_store_to_block_ptr_list: std.ArrayListUnmanaged(*zir.Inst.BinOp) = .{},
pub const Label = struct {
token: ast.TokenIndex,
@@ -968,6 +1013,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.decl = decl,
.arena = &fn_type_scope_arena.allocator,
.parent = &decl.container.base,
+ .force_comptime = true,
};
defer fn_type_scope.instructions.deinit(self.gpa);
@@ -1131,6 +1177,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.decl = decl,
.arena = &decl_arena.allocator,
.parent = &decl.container.base,
+ .force_comptime = false,
};
defer gen_scope.instructions.deinit(self.gpa);
@@ -1171,7 +1218,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
!gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn())
{
const src = tree.token_locs[body_block.rbrace].start;
- _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid);
+ _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .return_void);
}
if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
@@ -1329,6 +1376,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.decl = decl,
.arena = &gen_scope_arena.allocator,
.parent = &decl.container.base,
+ .force_comptime = false,
};
defer gen_scope.instructions.deinit(self.gpa);
@@ -1388,6 +1436,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.decl = decl,
.arena = &type_scope_arena.allocator,
.parent = &decl.container.base,
+ .force_comptime = true,
};
defer type_scope.instructions.deinit(self.gpa);
@@ -1457,13 +1506,15 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
decl.analysis = .in_progress;
- // A comptime decl does not store any value so we can just deinit this arena after analysis is done.
+ // A comptime decl does not store any value so we can just deinit
+ // this arena after analysis is done.
var analysis_arena = std.heap.ArenaAllocator.init(self.gpa);
defer analysis_arena.deinit();
var gen_scope: Scope.GenZIR = .{
.decl = decl,
.arena = &analysis_arena.allocator,
.parent = &decl.container.base,
+ .force_comptime = true,
};
defer gen_scope.instructions.deinit(self.gpa);
@@ -2100,7 +2151,7 @@ pub fn addBr(
src: usize,
target_block: *Inst.Block,
operand: *Inst,
-) !*Inst {
+) !*Inst.Br {
const inst = try scope_block.arena.create(Inst.Br);
inst.* = .{
.base = .{
@@ -2112,7 +2163,7 @@ pub fn addBr(
.block = target_block,
};
try scope_block.instructions.append(self.gpa, &inst.base);
- return &inst.base;
+ return inst;
}
pub fn addCondBr(
@@ -3466,18 +3517,18 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic
};
const ok_body: ir.Body = .{
- .instructions = try parent_block.arena.alloc(*Inst, 1), // Only need space for the brvoid.
+ .instructions = try parent_block.arena.alloc(*Inst, 1), // Only need space for the br_void.
};
- const brvoid = try parent_block.arena.create(Inst.BrVoid);
- brvoid.* = .{
+ const br_void = try parent_block.arena.create(Inst.BrVoid);
+ br_void.* = .{
.base = .{
- .tag = .brvoid,
+ .tag = .br_void,
.ty = Type.initTag(.noreturn),
.src = ok.src,
},
.block = block_inst,
};
- ok_body.instructions[0] = &brvoid.base;
+ ok_body.instructions[0] = &br_void.base;
var fail_block: Scope.Block = .{
.parent = parent_block,
diff --git a/src/astgen.zig b/src/astgen.zig
index a74b83de44..dfc5f06ddc 100644
--- a/src/astgen.zig
+++ b/src/astgen.zig
@@ -14,25 +14,45 @@ const InnerError = Module.InnerError;
pub const ResultLoc = union(enum) {
/// The expression is the right-hand side of assignment to `_`. Only the side-effects of the
- /// expression should be generated.
+ /// expression should be generated. The result instruction from the expression must
+ /// be ignored.
discard,
/// The expression has an inferred type, and it will be evaluated as an rvalue.
none,
/// The expression must generate a pointer rather than a value. For example, the left hand side
/// of an assignment uses this kind of result location.
ref,
- /// The expression will be type coerced into this type, but it will be evaluated as an rvalue.
+ /// The expression will be coerced into this type, but it will be evaluated as an rvalue.
ty: *zir.Inst,
- /// The expression must store its result into this typed pointer.
+ /// The expression must store its result into this typed pointer. The result instruction
+ /// from the expression must be ignored.
ptr: *zir.Inst,
/// The expression must store its result into this allocation, which has an inferred type.
+ /// The result instruction from the expression must be ignored.
inferred_ptr: *zir.Inst.Tag.alloc_inferred.Type(),
/// The expression must store its result into this pointer, which is a typed pointer that
/// has been bitcasted to whatever the expression's type is.
+ /// The result instruction from the expression must be ignored.
bitcasted_ptr: *zir.Inst.UnOp,
/// There is a pointer for the expression to store its result into, however, its type
/// is inferred based on peer type resolution for a `zir.Inst.Block`.
- block_ptr: *zir.Inst.Block,
+ /// The result instruction from the expression must be ignored.
+ block_ptr: *Module.Scope.GenZIR,
+
+ pub const Strategy = struct {
+ elide_store_to_block_ptr_instructions: bool,
+ tag: Tag,
+
+ pub const Tag = enum {
+ /// Both branches will use break_void; result location is used to communicate the
+ /// result instruction.
+ break_void,
+ /// Use break statements to pass the block result value, and call rvalue() at
+ /// the end depending on rl. Also elide the store_to_block_ptr instructions
+ /// depending on rl.
+ break_operand,
+ };
+ };
};
pub fn typeExpr(mod: *Module, scope: *Scope, type_node: *ast.Node) InnerError!*zir.Inst {
@@ -179,6 +199,9 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst {
}
/// Turn Zig AST into untyped ZIR istructions.
+/// When `rl` is discard, ptr, inferred_ptr, bitcasted_ptr, or inferred_ptr, the
+/// result instruction can be used to inspect whether it is isNoReturn() but that is it,
+/// it must otherwise not be used.
pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst {
switch (node.tag) {
.Root => unreachable, // Top-level declaration.
@@ -197,20 +220,20 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.FieldInitializer => unreachable, // Handled explicitly.
.ContainerField => unreachable, // Handled explicitly.
- .Assign => return rlWrapVoid(mod, scope, rl, node, try assign(mod, scope, node.castTag(.Assign).?)),
- .AssignBitAnd => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitAnd).?, .bitand)),
- .AssignBitOr => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitOr).?, .bitor)),
- .AssignBitShiftLeft => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftLeft).?, .shl)),
- .AssignBitShiftRight => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftRight).?, .shr)),
- .AssignBitXor => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitXor).?, .xor)),
- .AssignDiv => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignDiv).?, .div)),
- .AssignSub => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSub).?, .sub)),
- .AssignSubWrap => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSubWrap).?, .subwrap)),
- .AssignMod => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMod).?, .mod_rem)),
- .AssignAdd => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAdd).?, .add)),
- .AssignAddWrap => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAddWrap).?, .addwrap)),
- .AssignMul => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMul).?, .mul)),
- .AssignMulWrap => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMulWrap).?, .mulwrap)),
+ .Assign => return rvalueVoid(mod, scope, rl, node, try assign(mod, scope, node.castTag(.Assign).?)),
+ .AssignBitAnd => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitAnd).?, .bit_and)),
+ .AssignBitOr => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitOr).?, .bit_or)),
+ .AssignBitShiftLeft => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftLeft).?, .shl)),
+ .AssignBitShiftRight => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftRight).?, .shr)),
+ .AssignBitXor => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitXor).?, .xor)),
+ .AssignDiv => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignDiv).?, .div)),
+ .AssignSub => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSub).?, .sub)),
+ .AssignSubWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSubWrap).?, .subwrap)),
+ .AssignMod => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMod).?, .mod_rem)),
+ .AssignAdd => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAdd).?, .add)),
+ .AssignAddWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAddWrap).?, .addwrap)),
+ .AssignMul => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMul).?, .mul)),
+ .AssignMulWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMulWrap).?, .mulwrap)),
.Add => return simpleBinOp(mod, scope, rl, node.castTag(.Add).?, .add),
.AddWrap => return simpleBinOp(mod, scope, rl, node.castTag(.AddWrap).?, .addwrap),
@@ -220,8 +243,8 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.MulWrap => return simpleBinOp(mod, scope, rl, node.castTag(.MulWrap).?, .mulwrap),
.Div => return simpleBinOp(mod, scope, rl, node.castTag(.Div).?, .div),
.Mod => return simpleBinOp(mod, scope, rl, node.castTag(.Mod).?, .mod_rem),
- .BitAnd => return simpleBinOp(mod, scope, rl, node.castTag(.BitAnd).?, .bitand),
- .BitOr => return simpleBinOp(mod, scope, rl, node.castTag(.BitOr).?, .bitor),
+ .BitAnd => return simpleBinOp(mod, scope, rl, node.castTag(.BitAnd).?, .bit_and),
+ .BitOr => return simpleBinOp(mod, scope, rl, node.castTag(.BitOr).?, .bit_or),
.BitShiftLeft => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftLeft).?, .shl),
.BitShiftRight => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftRight).?, .shr),
.BitXor => return simpleBinOp(mod, scope, rl, node.castTag(.BitXor).?, .xor),
@@ -239,15 +262,15 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.BoolAnd => return boolBinOp(mod, scope, rl, node.castTag(.BoolAnd).?),
.BoolOr => return boolBinOp(mod, scope, rl, node.castTag(.BoolOr).?),
- .BoolNot => return rlWrap(mod, scope, rl, try boolNot(mod, scope, node.castTag(.BoolNot).?)),
- .BitNot => return rlWrap(mod, scope, rl, try bitNot(mod, scope, node.castTag(.BitNot).?)),
- .Negation => return rlWrap(mod, scope, rl, try negation(mod, scope, node.castTag(.Negation).?, .sub)),
- .NegationWrap => return rlWrap(mod, scope, rl, try negation(mod, scope, node.castTag(.NegationWrap).?, .subwrap)),
+ .BoolNot => return rvalue(mod, scope, rl, try boolNot(mod, scope, node.castTag(.BoolNot).?)),
+ .BitNot => return rvalue(mod, scope, rl, try bitNot(mod, scope, node.castTag(.BitNot).?)),
+ .Negation => return rvalue(mod, scope, rl, try negation(mod, scope, node.castTag(.Negation).?, .sub)),
+ .NegationWrap => return rvalue(mod, scope, rl, try negation(mod, scope, node.castTag(.NegationWrap).?, .subwrap)),
.Identifier => return try identifier(mod, scope, rl, node.castTag(.Identifier).?),
- .Asm => return rlWrap(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)),
- .StringLiteral => return rlWrap(mod, scope, rl, try stringLiteral(mod, scope, node.castTag(.StringLiteral).?)),
- .IntegerLiteral => return rlWrap(mod, scope, rl, try integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?)),
+ .Asm => return rvalue(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)),
+ .StringLiteral => return rvalue(mod, scope, rl, try stringLiteral(mod, scope, node.castTag(.StringLiteral).?)),
+ .IntegerLiteral => return rvalue(mod, scope, rl, try integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?)),
.BuiltinCall => return builtinCall(mod, scope, rl, node.castTag(.BuiltinCall).?),
.Call => return callExpr(mod, scope, rl, node.castTag(.Call).?),
.Unreachable => return unreach(mod, scope, node.castTag(.Unreachable).?),
@@ -255,38 +278,38 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.If => return ifExpr(mod, scope, rl, node.castTag(.If).?),
.While => return whileExpr(mod, scope, rl, node.castTag(.While).?),
.Period => return field(mod, scope, rl, node.castTag(.Period).?),
- .Deref => return rlWrap(mod, scope, rl, try deref(mod, scope, node.castTag(.Deref).?)),
- .AddressOf => return rlWrap(mod, scope, rl, try addressOf(mod, scope, node.castTag(.AddressOf).?)),
- .FloatLiteral => return rlWrap(mod, scope, rl, try floatLiteral(mod, scope, node.castTag(.FloatLiteral).?)),
- .UndefinedLiteral => return rlWrap(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)),
- .BoolLiteral => return rlWrap(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)),
- .NullLiteral => return rlWrap(mod, scope, rl, try nullLiteral(mod, scope, node.castTag(.NullLiteral).?)),
- .OptionalType => return rlWrap(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)),
+ .Deref => return rvalue(mod, scope, rl, try deref(mod, scope, node.castTag(.Deref).?)),
+ .AddressOf => return rvalue(mod, scope, rl, try addressOf(mod, scope, node.castTag(.AddressOf).?)),
+ .FloatLiteral => return rvalue(mod, scope, rl, try floatLiteral(mod, scope, node.castTag(.FloatLiteral).?)),
+ .UndefinedLiteral => return rvalue(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)),
+ .BoolLiteral => return rvalue(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)),
+ .NullLiteral => return rvalue(mod, scope, rl, try nullLiteral(mod, scope, node.castTag(.NullLiteral).?)),
+ .OptionalType => return rvalue(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)),
.UnwrapOptional => return unwrapOptional(mod, scope, rl, node.castTag(.UnwrapOptional).?),
- .Block => return rlWrapVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)),
+ .Block => return rvalueVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)),
.LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?, .block),
- .Break => return rlWrap(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)),
- .Continue => return rlWrap(mod, scope, rl, try continueExpr(mod, scope, node.castTag(.Continue).?)),
- .PtrType => return rlWrap(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)),
+ .Break => return rvalue(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)),
+ .Continue => return rvalue(mod, scope, rl, try continueExpr(mod, scope, node.castTag(.Continue).?)),
+ .PtrType => return rvalue(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)),
.GroupedExpression => return expr(mod, scope, rl, node.castTag(.GroupedExpression).?.expr),
- .ArrayType => return rlWrap(mod, scope, rl, try arrayType(mod, scope, node.castTag(.ArrayType).?)),
- .ArrayTypeSentinel => return rlWrap(mod, scope, rl, try arrayTypeSentinel(mod, scope, node.castTag(.ArrayTypeSentinel).?)),
- .EnumLiteral => return rlWrap(mod, scope, rl, try enumLiteral(mod, scope, node.castTag(.EnumLiteral).?)),
- .MultilineStringLiteral => return rlWrap(mod, scope, rl, try multilineStrLiteral(mod, scope, node.castTag(.MultilineStringLiteral).?)),
- .CharLiteral => return rlWrap(mod, scope, rl, try charLiteral(mod, scope, node.castTag(.CharLiteral).?)),
- .SliceType => return rlWrap(mod, scope, rl, try sliceType(mod, scope, node.castTag(.SliceType).?)),
- .ErrorUnion => return rlWrap(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.ErrorUnion).?, .error_union_type)),
- .MergeErrorSets => return rlWrap(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.MergeErrorSets).?, .merge_error_sets)),
- .AnyFrameType => return rlWrap(mod, scope, rl, try anyFrameType(mod, scope, node.castTag(.AnyFrameType).?)),
- .ErrorSetDecl => return rlWrap(mod, scope, rl, try errorSetDecl(mod, scope, node.castTag(.ErrorSetDecl).?)),
- .ErrorType => return rlWrap(mod, scope, rl, try errorType(mod, scope, node.castTag(.ErrorType).?)),
+ .ArrayType => return rvalue(mod, scope, rl, try arrayType(mod, scope, node.castTag(.ArrayType).?)),
+ .ArrayTypeSentinel => return rvalue(mod, scope, rl, try arrayTypeSentinel(mod, scope, node.castTag(.ArrayTypeSentinel).?)),
+ .EnumLiteral => return rvalue(mod, scope, rl, try enumLiteral(mod, scope, node.castTag(.EnumLiteral).?)),
+ .MultilineStringLiteral => return rvalue(mod, scope, rl, try multilineStrLiteral(mod, scope, node.castTag(.MultilineStringLiteral).?)),
+ .CharLiteral => return rvalue(mod, scope, rl, try charLiteral(mod, scope, node.castTag(.CharLiteral).?)),
+ .SliceType => return rvalue(mod, scope, rl, try sliceType(mod, scope, node.castTag(.SliceType).?)),
+ .ErrorUnion => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.ErrorUnion).?, .error_union_type)),
+ .MergeErrorSets => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.MergeErrorSets).?, .merge_error_sets)),
+ .AnyFrameType => return rvalue(mod, scope, rl, try anyFrameType(mod, scope, node.castTag(.AnyFrameType).?)),
+ .ErrorSetDecl => return rvalue(mod, scope, rl, try errorSetDecl(mod, scope, node.castTag(.ErrorSetDecl).?)),
+ .ErrorType => return rvalue(mod, scope, rl, try errorType(mod, scope, node.castTag(.ErrorType).?)),
.For => return forExpr(mod, scope, rl, node.castTag(.For).?),
.ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?),
- .Slice => return rlWrap(mod, scope, rl, try sliceExpr(mod, scope, node.castTag(.Slice).?)),
+ .Slice => return rvalue(mod, scope, rl, try sliceExpr(mod, scope, node.castTag(.Slice).?)),
.Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?),
.Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?),
.OrElse => return orelseExpr(mod, scope, rl, node.castTag(.OrElse).?),
- .Switch => return switchExpr(mod, scope, rl, node.castTag(.Switch).?),
+ .Switch => return mod.failNode(scope, node, "TODO implement astgen.expr for .Switch", .{}),
.ContainerDecl => return containerDecl(mod, scope, rl, node.castTag(.ContainerDecl).?),
.Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}),
@@ -311,11 +334,19 @@ fn comptimeKeyword(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.C
return comptimeExpr(mod, scope, rl, node.expr);
}
-pub fn comptimeExpr(mod: *Module, parent_scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst {
- const tree = parent_scope.tree();
- const src = tree.token_locs[node.firstToken()].start;
+pub fn comptimeExpr(
+ mod: *Module,
+ parent_scope: *Scope,
+ rl: ResultLoc,
+ node: *ast.Node,
+) InnerError!*zir.Inst {
+ // If we are already in a comptime scope, no need to make another one.
+ if (parent_scope.isComptime()) {
+ return expr(mod, parent_scope, rl, node);
+ }
- // Optimization for labeled blocks: don't need to have 2 layers of blocks, we can reuse the existing one.
+ // Optimization for labeled blocks: don't need to have 2 layers of blocks,
+ // we can reuse the existing one.
if (node.castTag(.LabeledBlock)) |block_node| {
return labeledBlockExpr(mod, parent_scope, rl, block_node, .block_comptime);
}
@@ -325,6 +356,7 @@ pub fn comptimeExpr(mod: *Module, parent_scope: *Scope, rl: ResultLoc, node: *as
.parent = parent_scope,
.decl = parent_scope.ownerDecl().?,
.arena = parent_scope.arena(),
+ .force_comptime = true,
.instructions = .{},
};
defer block_scope.instructions.deinit(mod.gpa);
@@ -333,6 +365,9 @@ pub fn comptimeExpr(mod: *Module, parent_scope: *Scope, rl: ResultLoc, node: *as
// instruction is the block's result value.
_ = try expr(mod, &block_scope.base, rl, node);
+ const tree = parent_scope.tree();
+ const src = tree.token_locs[node.firstToken()].start;
+
const block = try addZIRInstBlock(mod, parent_scope, src, .block_comptime_flat, .{
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
});
@@ -340,7 +375,11 @@ pub fn comptimeExpr(mod: *Module, parent_scope: *Scope, rl: ResultLoc, node: *as
return &block.base;
}
-fn breakExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
+fn breakExpr(
+ mod: *Module,
+ parent_scope: *Scope,
+ node: *ast.Node.ControlFlowExpression,
+) InnerError!*zir.Inst {
const tree = parent_scope.tree();
const src = tree.token_locs[node.ltoken].start;
@@ -366,25 +405,31 @@ fn breakExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpr
continue;
};
- if (node.getRHS()) |rhs| {
- // Most result location types can be forwarded directly; however
- // if we need to write to a pointer which has an inferred type,
- // proper type inference requires peer type resolution on the block's
- // break operand expressions.
- const branch_rl: ResultLoc = switch (gen_zir.break_result_loc) {
- .discard, .none, .ty, .ptr, .ref => gen_zir.break_result_loc,
- .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block_inst },
- };
- const operand = try expr(mod, parent_scope, branch_rl, rhs);
- return try addZIRInst(mod, parent_scope, src, zir.Inst.Break, .{
+ const rhs = node.getRHS() orelse {
+ return addZirInstTag(mod, parent_scope, src, .break_void, .{
.block = block_inst,
- .operand = operand,
- }, .{});
- } else {
- return try addZIRInst(mod, parent_scope, src, zir.Inst.BreakVoid, .{
- .block = block_inst,
- }, .{});
+ });
+ };
+ gen_zir.break_count += 1;
+ const prev_rvalue_rl_count = gen_zir.rvalue_rl_count;
+ const operand = try expr(mod, parent_scope, gen_zir.break_result_loc, rhs);
+ const have_store_to_block = gen_zir.rvalue_rl_count != prev_rvalue_rl_count;
+ const br = try addZirInstTag(mod, parent_scope, src, .@"break", .{
+ .block = block_inst,
+ .operand = operand,
+ });
+ if (gen_zir.break_result_loc == .block_ptr) {
+ try gen_zir.labeled_breaks.append(mod.gpa, br.castTag(.@"break").?);
+
+ if (have_store_to_block) {
+ const inst_list = parent_scope.getGenZIR().instructions.items;
+ const last_inst = inst_list[inst_list.len - 2];
+ const store_inst = last_inst.castTag(.store_to_block_ptr).?;
+ assert(store_inst.positionals.lhs == gen_zir.rl_ptr.?);
+ try gen_zir.labeled_store_to_block_ptr_list.append(mod.gpa, store_inst);
+ }
}
+ return br;
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
@@ -424,9 +469,9 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE
continue;
}
- return addZIRInst(mod, parent_scope, src, zir.Inst.BreakVoid, .{
+ return addZirInstTag(mod, parent_scope, src, .break_void, .{
.block = continue_block,
- }, .{});
+ });
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
@@ -526,28 +571,65 @@ fn labeledBlockExpr(
.parent = parent_scope,
.decl = parent_scope.ownerDecl().?,
.arena = gen_zir.arena,
+ .force_comptime = parent_scope.isComptime(),
.instructions = .{},
- .break_result_loc = rl,
// TODO @as here is working around a stage1 miscompilation bug :(
.label = @as(?Scope.GenZIR.Label, Scope.GenZIR.Label{
.token = block_node.label,
.block_inst = block_inst,
}),
};
+ setBlockResultLoc(&block_scope, rl);
defer block_scope.instructions.deinit(mod.gpa);
+ defer block_scope.labeled_breaks.deinit(mod.gpa);
+ defer block_scope.labeled_store_to_block_ptr_list.deinit(mod.gpa);
try blockExprStmts(mod, &block_scope.base, &block_node.base, block_node.statements());
+
if (!block_scope.label.?.used) {
return mod.fail(parent_scope, tree.token_locs[block_node.label].start, "unused block label", .{});
}
- block_inst.positionals.body.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items);
try gen_zir.instructions.append(mod.gpa, &block_inst.base);
- return &block_inst.base;
+ const strat = rlStrategy(rl, &block_scope);
+ switch (strat.tag) {
+ .break_void => {
+ // The code took advantage of the result location as a pointer.
+ // Turn the break instructions into break_void instructions.
+ for (block_scope.labeled_breaks.items) |br| {
+ br.base.tag = .break_void;
+ }
+ // TODO technically not needed since we changed the tag to break_void but
+ // would be better still to elide the ones that are in this list.
+ try copyBodyNoEliding(&block_inst.positionals.body, block_scope);
+
+ return &block_inst.base;
+ },
+ .break_operand => {
+ // All break operands are values that did not use the result location pointer.
+ if (strat.elide_store_to_block_ptr_instructions) {
+ for (block_scope.labeled_store_to_block_ptr_list.items) |inst| {
+ inst.base.tag = .void_value;
+ }
+ // TODO technically not needed since we changed the tag to void_value but
+ // would be better still to elide the ones that are in this list.
+ }
+ try copyBodyNoEliding(&block_inst.positionals.body, block_scope);
+ switch (rl) {
+ .ref => return &block_inst.base,
+ else => return rvalue(mod, parent_scope, rl, &block_inst.base),
+ }
+ },
+ }
}
-fn blockExprStmts(mod: *Module, parent_scope: *Scope, node: *ast.Node, statements: []*ast.Node) !void {
+fn blockExprStmts(
+ mod: *Module,
+ parent_scope: *Scope,
+ node: *ast.Node,
+ statements: []*ast.Node,
+) !void {
const tree = parent_scope.tree();
var block_arena = std.heap.ArenaAllocator.init(mod.gpa);
@@ -563,8 +645,8 @@ fn blockExprStmts(mod: *Module, parent_scope: *Scope, node: *ast.Node, statement
scope = try varDecl(mod, scope, var_decl_node, &block_arena.allocator);
},
.Assign => try assign(mod, scope, statement.castTag(.Assign).?),
- .AssignBitAnd => try assignOp(mod, scope, statement.castTag(.AssignBitAnd).?, .bitand),
- .AssignBitOr => try assignOp(mod, scope, statement.castTag(.AssignBitOr).?, .bitor),
+ .AssignBitAnd => try assignOp(mod, scope, statement.castTag(.AssignBitAnd).?, .bit_and),
+ .AssignBitOr => try assignOp(mod, scope, statement.castTag(.AssignBitOr).?, .bit_or),
.AssignBitShiftLeft => try assignOp(mod, scope, statement.castTag(.AssignBitShiftLeft).?, .shl),
.AssignBitShiftRight => try assignOp(mod, scope, statement.castTag(.AssignBitShiftRight).?, .shr),
.AssignBitXor => try assignOp(mod, scope, statement.castTag(.AssignBitXor).?, .xor),
@@ -644,6 +726,7 @@ fn varDecl(
// Namespace vars shadowing detection
if (mod.lookupDeclName(scope, ident_name)) |_| {
+ // TODO add note for other definition
return mod.fail(scope, name_src, "redefinition of '{s}'", .{ident_name});
}
const init_node = node.getInitNode() orelse
@@ -651,36 +734,103 @@ fn varDecl(
switch (tree.token_ids[node.mut_token]) {
.Keyword_const => {
- var resolve_inferred_alloc: ?*zir.Inst = null;
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
- const result_loc = if (nodeMayNeedMemoryLocation(init_node, scope)) r: {
- if (node.getTypeNode()) |type_node| {
- const type_inst = try typeExpr(mod, scope, type_node);
- const alloc = try addZIRUnOp(mod, scope, name_src, .alloc, type_inst);
- break :r ResultLoc{ .ptr = alloc };
- } else {
- const alloc = try addZIRNoOpT(mod, scope, name_src, .alloc_inferred);
- resolve_inferred_alloc = &alloc.base;
- break :r ResultLoc{ .inferred_ptr = alloc };
- }
- } else r: {
- if (node.getTypeNode()) |type_node|
- break :r ResultLoc{ .ty = try typeExpr(mod, scope, type_node) }
+ if (!nodeMayNeedMemoryLocation(init_node, scope)) {
+ const result_loc: ResultLoc = if (node.getTypeNode()) |type_node|
+ .{ .ty = try typeExpr(mod, scope, type_node) }
else
- break :r .none;
+ .none;
+ const init_inst = try expr(mod, scope, result_loc, init_node);
+ const sub_scope = try block_arena.create(Scope.LocalVal);
+ sub_scope.* = .{
+ .parent = scope,
+ .gen_zir = scope.getGenZIR(),
+ .name = ident_name,
+ .inst = init_inst,
+ };
+ return &sub_scope.base;
+ }
+
+ // Detect whether the initialization expression actually uses the
+ // result location pointer.
+ var init_scope: Scope.GenZIR = .{
+ .parent = scope,
+ .decl = scope.ownerDecl().?,
+ .arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
+ .instructions = .{},
};
- const init_inst = try expr(mod, scope, result_loc, init_node);
+ defer init_scope.instructions.deinit(mod.gpa);
+
+ var resolve_inferred_alloc: ?*zir.Inst = null;
+ var opt_type_inst: ?*zir.Inst = null;
+ if (node.getTypeNode()) |type_node| {
+ const type_inst = try typeExpr(mod, &init_scope.base, type_node);
+ opt_type_inst = type_inst;
+ init_scope.rl_ptr = try addZIRUnOp(mod, &init_scope.base, name_src, .alloc, type_inst);
+ } else {
+ const alloc = try addZIRNoOpT(mod, &init_scope.base, name_src, .alloc_inferred);
+ resolve_inferred_alloc = &alloc.base;
+ init_scope.rl_ptr = &alloc.base;
+ }
+ const init_result_loc: ResultLoc = .{ .block_ptr = &init_scope };
+ const init_inst = try expr(mod, &init_scope.base, init_result_loc, init_node);
+ const parent_zir = &scope.getGenZIR().instructions;
+ if (init_scope.rvalue_rl_count == 1) {
+ // Result location pointer not used. We don't need an alloc for this
+ // const local, and type inference becomes trivial.
+ // Move the init_scope instructions into the parent scope, eliding
+ // the alloc instruction and the store_to_block_ptr instruction.
+ const expected_len = parent_zir.items.len + init_scope.instructions.items.len - 2;
+ try parent_zir.ensureCapacity(mod.gpa, expected_len);
+ for (init_scope.instructions.items) |src_inst| {
+ if (src_inst == init_scope.rl_ptr.?) continue;
+ if (src_inst.castTag(.store_to_block_ptr)) |store| {
+ if (store.positionals.lhs == init_scope.rl_ptr.?) continue;
+ }
+ parent_zir.appendAssumeCapacity(src_inst);
+ }
+ assert(parent_zir.items.len == expected_len);
+ const casted_init = if (opt_type_inst) |type_inst|
+ try addZIRBinOp(mod, scope, type_inst.src, .as, type_inst, init_inst)
+ else
+ init_inst;
+
+ const sub_scope = try block_arena.create(Scope.LocalVal);
+ sub_scope.* = .{
+ .parent = scope,
+ .gen_zir = scope.getGenZIR(),
+ .name = ident_name,
+ .inst = casted_init,
+ };
+ return &sub_scope.base;
+ }
+ // The initialization expression took advantage of the result location
+ // of the const local. In this case we will create an alloc and a LocalPtr for it.
+ // Move the init_scope instructions into the parent scope, swapping
+ // store_to_block_ptr for store_to_inferred_ptr.
+ const expected_len = parent_zir.items.len + init_scope.instructions.items.len;
+ try parent_zir.ensureCapacity(mod.gpa, expected_len);
+ for (init_scope.instructions.items) |src_inst| {
+ if (src_inst.castTag(.store_to_block_ptr)) |store| {
+ if (store.positionals.lhs == init_scope.rl_ptr.?) {
+ src_inst.tag = .store_to_inferred_ptr;
+ }
+ }
+ parent_zir.appendAssumeCapacity(src_inst);
+ }
+ assert(parent_zir.items.len == expected_len);
if (resolve_inferred_alloc) |inst| {
_ = try addZIRUnOp(mod, scope, name_src, .resolve_inferred_alloc, inst);
}
- const sub_scope = try block_arena.create(Scope.LocalVal);
+ const sub_scope = try block_arena.create(Scope.LocalPtr);
sub_scope.* = .{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
- .inst = init_inst,
+ .ptr = init_scope.rl_ptr.?,
};
return &sub_scope.base;
},
@@ -751,14 +901,14 @@ fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerErr
.val = Value.initTag(.bool_type),
});
const operand = try expr(mod, scope, .{ .ty = bool_type }, node.rhs);
- return addZIRUnOp(mod, scope, src, .boolnot, operand);
+ return addZIRUnOp(mod, scope, src, .bool_not, operand);
}
fn bitNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
const operand = try expr(mod, scope, .none, node.rhs);
- return addZIRUnOp(mod, scope, src, .bitnot, operand);
+ return addZIRUnOp(mod, scope, src, .bit_not, operand);
}
fn negation(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp, op_inst_tag: zir.Inst.Tag) InnerError!*zir.Inst {
@@ -971,6 +1121,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
.parent = scope,
.decl = scope.ownerDecl().?,
.arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
.instructions = .{},
};
defer gen_scope.instructions.deinit(mod.gpa);
@@ -1101,7 +1252,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
if (rl == .ref) {
return addZIRInst(mod, scope, src, zir.Inst.DeclRef, .{ .decl = decl }, .{});
} else {
- return rlWrap(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclVal, .{
+ return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclVal, .{
.decl = decl,
}, .{}));
}
@@ -1207,24 +1358,15 @@ fn orelseCatchExpr(
.parent = scope,
.decl = scope.ownerDecl().?,
.arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
.instructions = .{},
};
+ setBlockResultLoc(&block_scope, rl);
defer block_scope.instructions.deinit(mod.gpa);
- const block = try addZIRInstBlock(mod, scope, src, .block, .{
- .instructions = undefined, // populated below
- });
-
- // Most result location types can be forwarded directly; however
- // if we need to write to a pointer which has an inferred type,
- // proper type inference requires peer type resolution on the if's
- // branches.
- const branch_rl: ResultLoc = switch (rl) {
- .discard, .none, .ty, .ptr, .ref => rl,
- .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
- };
// This could be a pointer or value depending on the `rl` parameter.
- const operand = try expr(mod, &block_scope.base, branch_rl, lhs);
+ block_scope.break_count += 1;
+ const operand = try expr(mod, &block_scope.base, block_scope.break_result_loc, lhs);
const cond = try addZIRUnOp(mod, &block_scope.base, src, cond_op, operand);
const condbr = try addZIRInstSpecial(mod, &block_scope.base, src, zir.Inst.CondBr, .{
@@ -1233,18 +1375,22 @@ fn orelseCatchExpr(
.else_body = undefined, // populated below
}, .{});
+ const block = try addZIRInstBlock(mod, scope, src, .block, .{
+ .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
+ });
+
var then_scope: Scope.GenZIR = .{
.parent = &block_scope.base,
.decl = block_scope.decl,
.arena = block_scope.arena,
+ .force_comptime = block_scope.force_comptime,
.instructions = .{},
};
defer then_scope.instructions.deinit(mod.gpa);
var err_val_scope: Scope.LocalVal = undefined;
const then_sub_scope = blk: {
- const payload = payload_node orelse
- break :blk &then_scope.base;
+ const payload = payload_node orelse break :blk &then_scope.base;
const err_name = tree.tokenSlice(payload.castTag(.Payload).?.error_symbol.firstToken());
if (mem.eql(u8, err_name, "_"))
@@ -1259,32 +1405,113 @@ fn orelseCatchExpr(
break :blk &err_val_scope.base;
};
- _ = try addZIRInst(mod, &then_scope.base, src, zir.Inst.Break, .{
- .block = block,
- .operand = try expr(mod, then_sub_scope, branch_rl, rhs),
- }, .{});
+ block_scope.break_count += 1;
+ const then_result = try expr(mod, then_sub_scope, block_scope.break_result_loc, rhs);
var else_scope: Scope.GenZIR = .{
.parent = &block_scope.base,
.decl = block_scope.decl,
.arena = block_scope.arena,
+ .force_comptime = block_scope.force_comptime,
.instructions = .{},
};
defer else_scope.instructions.deinit(mod.gpa);
// This could be a pointer or value depending on `unwrap_op`.
const unwrapped_payload = try addZIRUnOp(mod, &else_scope.base, src, unwrap_op, operand);
- _ = try addZIRInst(mod, &else_scope.base, src, zir.Inst.Break, .{
- .block = block,
- .operand = unwrapped_payload,
- }, .{});
- // All branches have been generated, add the instructions to the block.
- block.positionals.body.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items);
+ return finishThenElseBlock(
+ mod,
+ scope,
+ rl,
+ &block_scope,
+ &then_scope,
+ &else_scope,
+ &condbr.positionals.then_body,
+ &condbr.positionals.else_body,
+ src,
+ src,
+ then_result,
+ unwrapped_payload,
+ block,
+ block,
+ );
+}
- condbr.positionals.then_body = .{ .instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items) };
- condbr.positionals.else_body = .{ .instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items) };
- return &block.base;
+fn finishThenElseBlock(
+ mod: *Module,
+ parent_scope: *Scope,
+ rl: ResultLoc,
+ block_scope: *Scope.GenZIR,
+ then_scope: *Scope.GenZIR,
+ else_scope: *Scope.GenZIR,
+ then_body: *zir.Body,
+ else_body: *zir.Body,
+ then_src: usize,
+ else_src: usize,
+ then_result: *zir.Inst,
+ else_result: ?*zir.Inst,
+ main_block: *zir.Inst.Block,
+ then_break_block: *zir.Inst.Block,
+) InnerError!*zir.Inst {
+ // We now have enough information to decide whether the result instruction should
+ // be communicated via result location pointer or break instructions.
+ const strat = rlStrategy(rl, block_scope);
+ switch (strat.tag) {
+ .break_void => {
+ if (!then_result.tag.isNoReturn()) {
+ _ = try addZirInstTag(mod, &then_scope.base, then_src, .break_void, .{
+ .block = then_break_block,
+ });
+ }
+ if (else_result) |inst| {
+ if (!inst.tag.isNoReturn()) {
+ _ = try addZirInstTag(mod, &else_scope.base, else_src, .break_void, .{
+ .block = main_block,
+ });
+ }
+ } else {
+ _ = try addZirInstTag(mod, &else_scope.base, else_src, .break_void, .{
+ .block = main_block,
+ });
+ }
+ assert(!strat.elide_store_to_block_ptr_instructions);
+ try copyBodyNoEliding(then_body, then_scope.*);
+ try copyBodyNoEliding(else_body, else_scope.*);
+ return &main_block.base;
+ },
+ .break_operand => {
+ if (!then_result.tag.isNoReturn()) {
+ _ = try addZirInstTag(mod, &then_scope.base, then_src, .@"break", .{
+ .block = then_break_block,
+ .operand = then_result,
+ });
+ }
+ if (else_result) |inst| {
+ if (!inst.tag.isNoReturn()) {
+ _ = try addZirInstTag(mod, &else_scope.base, else_src, .@"break", .{
+ .block = main_block,
+ .operand = inst,
+ });
+ }
+ } else {
+ _ = try addZirInstTag(mod, &else_scope.base, else_src, .break_void, .{
+ .block = main_block,
+ });
+ }
+ if (strat.elide_store_to_block_ptr_instructions) {
+ try copyBodyWithElidedStoreBlockPtr(then_body, then_scope.*);
+ try copyBodyWithElidedStoreBlockPtr(else_body, else_scope.*);
+ } else {
+ try copyBodyNoEliding(then_body, then_scope.*);
+ try copyBodyNoEliding(else_body, else_scope.*);
+ }
+ switch (rl) {
+ .ref => return &main_block.base,
+ else => return rvalue(mod, parent_scope, rl, &main_block.base),
+ }
+ },
+ }
}
/// Return whether the identifier names of two tokens are equal. Resolves @""
@@ -1308,7 +1535,7 @@ pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleI
.field_name = field_name,
});
}
- return rlWrap(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{
+ return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{
.object = try expr(mod, scope, .none, node.lhs),
.field_name = field_name,
}));
@@ -1338,7 +1565,7 @@ fn namedField(
.field_name = try comptimeExpr(mod, scope, string_rl, params[1]),
});
}
- return rlWrap(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val_named, .{
+ return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val_named, .{
.object = try expr(mod, scope, .none, params[0]),
.field_name = try comptimeExpr(mod, scope, string_rl, params[1]),
}));
@@ -1359,7 +1586,7 @@ fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Array
.index = try expr(mod, scope, index_rl, node.index_expr),
});
}
- return rlWrap(mod, scope, rl, try addZirInstTag(mod, scope, src, .elem_val, .{
+ return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .elem_val, .{
.array = try expr(mod, scope, .none, node.lhs),
.index = try expr(mod, scope, index_rl, node.index_expr),
}));
@@ -1416,7 +1643,7 @@ fn simpleBinOp(
const rhs = try expr(mod, scope, .none, infix_node.rhs);
const result = try addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs);
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
fn boolBinOp(
@@ -1436,6 +1663,7 @@ fn boolBinOp(
.parent = scope,
.decl = scope.ownerDecl().?,
.arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
.instructions = .{},
};
defer block_scope.instructions.deinit(mod.gpa);
@@ -1455,6 +1683,7 @@ fn boolBinOp(
.parent = scope,
.decl = block_scope.decl,
.arena = block_scope.arena,
+ .force_comptime = block_scope.force_comptime,
.instructions = .{},
};
defer rhs_scope.instructions.deinit(mod.gpa);
@@ -1469,6 +1698,7 @@ fn boolBinOp(
.parent = scope,
.decl = block_scope.decl,
.arena = block_scope.arena,
+ .force_comptime = block_scope.force_comptime,
.instructions = .{},
};
defer const_scope.instructions.deinit(mod.gpa);
@@ -1498,7 +1728,7 @@ fn boolBinOp(
condbr.positionals.else_body = .{ .instructions = try rhs_scope.arena.dupe(*zir.Inst, rhs_scope.instructions.items) };
}
- return rlWrap(mod, scope, rl, &block.base);
+ return rvalue(mod, scope, rl, &block.base);
}
const CondKind = union(enum) {
@@ -1582,8 +1812,10 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
.parent = scope,
.decl = scope.ownerDecl().?,
.arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
.instructions = .{},
};
+ setBlockResultLoc(&block_scope, rl);
defer block_scope.instructions.deinit(mod.gpa);
const tree = scope.tree();
@@ -1605,6 +1837,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
.parent = scope,
.decl = block_scope.decl,
.arena = block_scope.arena,
+ .force_comptime = block_scope.force_comptime,
.instructions = .{},
};
defer then_scope.instructions.deinit(mod.gpa);
@@ -1612,62 +1845,81 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
// declare payload to the then_scope
const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, if_node.payload);
- // Most result location types can be forwarded directly; however
- // if we need to write to a pointer which has an inferred type,
- // proper type inference requires peer type resolution on the if's
- // branches.
- const branch_rl: ResultLoc = switch (rl) {
- .discard, .none, .ty, .ptr, .ref => rl,
- .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
- };
-
- const then_result = try expr(mod, then_sub_scope, branch_rl, if_node.body);
- if (!then_result.tag.isNoReturn()) {
- _ = try addZIRInst(mod, then_sub_scope, then_src, zir.Inst.Break, .{
- .block = block,
- .operand = then_result,
- }, .{});
- }
- condbr.positionals.then_body = .{
- .instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
- };
+ block_scope.break_count += 1;
+ const then_result = try expr(mod, then_sub_scope, block_scope.break_result_loc, if_node.body);
+ // We hold off on the break instructions as well as copying the then/else
+ // instructions into place until we know whether to keep store_to_block_ptr
+ // instructions or not.
var else_scope: Scope.GenZIR = .{
.parent = scope,
.decl = block_scope.decl,
.arena = block_scope.arena,
+ .force_comptime = block_scope.force_comptime,
.instructions = .{},
};
defer else_scope.instructions.deinit(mod.gpa);
- if (if_node.@"else") |else_node| {
- const else_src = tree.token_locs[else_node.body.lastToken()].start;
+ var else_src: usize = undefined;
+ var else_sub_scope: *Module.Scope = undefined;
+ const else_result: ?*zir.Inst = if (if_node.@"else") |else_node| blk: {
+ else_src = tree.token_locs[else_node.body.lastToken()].start;
// declare payload to the then_scope
- const else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
+ else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
+
+ block_scope.break_count += 1;
+ break :blk try expr(mod, else_sub_scope, block_scope.break_result_loc, else_node.body);
+ } else blk: {
+ else_src = tree.token_locs[if_node.lastToken()].start;
+ else_sub_scope = &else_scope.base;
+ break :blk null;
+ };
- const else_result = try expr(mod, else_sub_scope, branch_rl, else_node.body);
- if (!else_result.tag.isNoReturn()) {
- _ = try addZIRInst(mod, else_sub_scope, else_src, zir.Inst.Break, .{
- .block = block,
- .operand = else_result,
- }, .{});
+ return finishThenElseBlock(
+ mod,
+ scope,
+ rl,
+ &block_scope,
+ &then_scope,
+ &else_scope,
+ &condbr.positionals.then_body,
+ &condbr.positionals.else_body,
+ then_src,
+ else_src,
+ then_result,
+ else_result,
+ block,
+ block,
+ );
+}
+
+/// Expects to find exactly 1 .store_to_block_ptr instruction.
+fn copyBodyWithElidedStoreBlockPtr(body: *zir.Body, scope: Module.Scope.GenZIR) !void {
+ body.* = .{
+ .instructions = try scope.arena.alloc(*zir.Inst, scope.instructions.items.len - 1),
+ };
+ var dst_index: usize = 0;
+ for (scope.instructions.items) |src_inst| {
+ if (src_inst.tag != .store_to_block_ptr) {
+ body.instructions[dst_index] = src_inst;
+ dst_index += 1;
}
- } else {
- // TODO Optimization opportunity: we can avoid an allocation and a memcpy here
- // by directly allocating the body for this one instruction.
- const else_src = tree.token_locs[if_node.lastToken()].start;
- _ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.BreakVoid, .{
- .block = block,
- }, .{});
}
- condbr.positionals.else_body = .{
- .instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
- };
+ assert(dst_index == body.instructions.len);
+}
- return &block.base;
+fn copyBodyNoEliding(body: *zir.Body, scope: Module.Scope.GenZIR) !void {
+ body.* = .{
+ .instructions = try scope.arena.dupe(*zir.Inst, scope.instructions.items),
+ };
}
-fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.While) InnerError!*zir.Inst {
+fn whileExpr(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ while_node: *ast.Node.While,
+) InnerError!*zir.Inst {
var cond_kind: CondKind = .bool;
if (while_node.payload) |_| cond_kind = .{ .optional = null };
if (while_node.@"else") |else_node| {
@@ -1683,27 +1935,21 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
if (while_node.inline_token) |tok|
return mod.failTok(scope, tok, "TODO inline while", .{});
- var expr_scope: Scope.GenZIR = .{
+ var loop_scope: Scope.GenZIR = .{
.parent = scope,
.decl = scope.ownerDecl().?,
.arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
.instructions = .{},
};
- defer expr_scope.instructions.deinit(mod.gpa);
-
- var loop_scope: Scope.GenZIR = .{
- .parent = &expr_scope.base,
- .decl = expr_scope.decl,
- .arena = expr_scope.arena,
- .instructions = .{},
- .break_result_loc = rl,
- };
+ setBlockResultLoc(&loop_scope, rl);
defer loop_scope.instructions.deinit(mod.gpa);
var continue_scope: Scope.GenZIR = .{
.parent = &loop_scope.base,
.decl = loop_scope.decl,
.arena = loop_scope.arena,
+ .force_comptime = loop_scope.force_comptime,
.instructions = .{},
};
defer continue_scope.instructions.deinit(mod.gpa);
@@ -1731,11 +1977,21 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
if (while_node.continue_expr) |cont_expr| {
_ = try expr(mod, &loop_scope.base, .{ .ty = void_type }, cont_expr);
}
- const loop = try addZIRInstLoop(mod, &expr_scope.base, while_src, .{
- .instructions = try expr_scope.arena.dupe(*zir.Inst, loop_scope.instructions.items),
- });
+ const loop = try scope.arena().create(zir.Inst.Loop);
+ loop.* = .{
+ .base = .{
+ .tag = .loop,
+ .src = while_src,
+ },
+ .positionals = .{
+ .body = .{
+ .instructions = try scope.arena().dupe(*zir.Inst, loop_scope.instructions.items),
+ },
+ },
+ .kw_args = .{},
+ };
const while_block = try addZIRInstBlock(mod, scope, while_src, .block, .{
- .instructions = try expr_scope.arena.dupe(*zir.Inst, expr_scope.instructions.items),
+ .instructions = try scope.arena().dupe(*zir.Inst, &[1]*zir.Inst{&loop.base}),
});
loop_scope.break_block = while_block;
loop_scope.continue_block = cond_block;
@@ -1751,6 +2007,7 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
.parent = &continue_scope.base,
.decl = continue_scope.decl,
.arena = continue_scope.arena,
+ .force_comptime = continue_scope.force_comptime,
.instructions = .{},
};
defer then_scope.instructions.deinit(mod.gpa);
@@ -1758,61 +2015,51 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
// declare payload to the then_scope
const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, while_node.payload);
- // Most result location types can be forwarded directly; however
- // if we need to write to a pointer which has an inferred type,
- // proper type inference requires peer type resolution on the while's
- // branches.
- const branch_rl: ResultLoc = switch (rl) {
- .discard, .none, .ty, .ptr, .ref => rl,
- .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = while_block },
- };
-
- const then_result = try expr(mod, then_sub_scope, branch_rl, while_node.body);
- if (!then_result.tag.isNoReturn()) {
- _ = try addZIRInst(mod, then_sub_scope, then_src, zir.Inst.Break, .{
- .block = cond_block,
- .operand = then_result,
- }, .{});
- }
- condbr.positionals.then_body = .{
- .instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
- };
+ loop_scope.break_count += 1;
+ const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, while_node.body);
var else_scope: Scope.GenZIR = .{
.parent = &continue_scope.base,
.decl = continue_scope.decl,
.arena = continue_scope.arena,
+ .force_comptime = continue_scope.force_comptime,
.instructions = .{},
};
defer else_scope.instructions.deinit(mod.gpa);
- if (while_node.@"else") |else_node| {
- const else_src = tree.token_locs[else_node.body.lastToken()].start;
+ var else_src: usize = undefined;
+ const else_result: ?*zir.Inst = if (while_node.@"else") |else_node| blk: {
+ else_src = tree.token_locs[else_node.body.lastToken()].start;
// declare payload to the then_scope
const else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
- const else_result = try expr(mod, else_sub_scope, branch_rl, else_node.body);
- if (!else_result.tag.isNoReturn()) {
- _ = try addZIRInst(mod, else_sub_scope, else_src, zir.Inst.Break, .{
- .block = while_block,
- .operand = else_result,
- }, .{});
- }
- } else {
- const else_src = tree.token_locs[while_node.lastToken()].start;
- _ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.BreakVoid, .{
- .block = while_block,
- }, .{});
- }
- condbr.positionals.else_body = .{
- .instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
+ loop_scope.break_count += 1;
+ break :blk try expr(mod, else_sub_scope, loop_scope.break_result_loc, else_node.body);
+ } else blk: {
+ else_src = tree.token_locs[while_node.lastToken()].start;
+ break :blk null;
};
if (loop_scope.label) |some| {
if (!some.used) {
return mod.fail(scope, tree.token_locs[some.token].start, "unused while label", .{});
}
}
- return &while_block.base;
+ return finishThenElseBlock(
+ mod,
+ scope,
+ rl,
+ &loop_scope,
+ &then_scope,
+ &else_scope,
+ &condbr.positionals.then_body,
+ &condbr.positionals.else_body,
+ then_src,
+ else_src,
+ then_result,
+ else_result,
+ while_block,
+ cond_block,
+ );
}
fn forExpr(
@@ -1828,48 +2075,42 @@ fn forExpr(
if (for_node.inline_token) |tok|
return mod.failTok(scope, tok, "TODO inline for", .{});
- var for_scope: Scope.GenZIR = .{
- .parent = scope,
- .decl = scope.ownerDecl().?,
- .arena = scope.arena(),
- .instructions = .{},
- };
- defer for_scope.instructions.deinit(mod.gpa);
-
// setup variables and constants
const tree = scope.tree();
const for_src = tree.token_locs[for_node.for_token].start;
const index_ptr = blk: {
- const usize_type = try addZIRInstConst(mod, &for_scope.base, for_src, .{
+ const usize_type = try addZIRInstConst(mod, scope, for_src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.usize_type),
});
- const index_ptr = try addZIRUnOp(mod, &for_scope.base, for_src, .alloc, usize_type);
+ const index_ptr = try addZIRUnOp(mod, scope, for_src, .alloc, usize_type);
// initialize to zero
- const zero = try addZIRInstConst(mod, &for_scope.base, for_src, .{
+ const zero = try addZIRInstConst(mod, scope, for_src, .{
.ty = Type.initTag(.usize),
.val = Value.initTag(.zero),
});
- _ = try addZIRBinOp(mod, &for_scope.base, for_src, .store, index_ptr, zero);
+ _ = try addZIRBinOp(mod, scope, for_src, .store, index_ptr, zero);
break :blk index_ptr;
};
- const array_ptr = try expr(mod, &for_scope.base, .ref, for_node.array_expr);
+ const array_ptr = try expr(mod, scope, .ref, for_node.array_expr);
const cond_src = tree.token_locs[for_node.array_expr.firstToken()].start;
- const len = try addZIRUnOp(mod, &for_scope.base, cond_src, .indexable_ptr_len, array_ptr);
+ const len = try addZIRUnOp(mod, scope, cond_src, .indexable_ptr_len, array_ptr);
var loop_scope: Scope.GenZIR = .{
- .parent = &for_scope.base,
- .decl = for_scope.decl,
- .arena = for_scope.arena,
+ .parent = scope,
+ .decl = scope.ownerDecl().?,
+ .arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
.instructions = .{},
- .break_result_loc = rl,
};
+ setBlockResultLoc(&loop_scope, rl);
defer loop_scope.instructions.deinit(mod.gpa);
var cond_scope: Scope.GenZIR = .{
.parent = &loop_scope.base,
.decl = loop_scope.decl,
.arena = loop_scope.arena,
+ .force_comptime = loop_scope.force_comptime,
.instructions = .{},
};
defer cond_scope.instructions.deinit(mod.gpa);
@@ -1896,12 +2137,21 @@ fn forExpr(
const index_plus_one = try addZIRBinOp(mod, &loop_scope.base, for_src, .add, index_2, one);
_ = try addZIRBinOp(mod, &loop_scope.base, for_src, .store, index_ptr, index_plus_one);
- // looping stuff
- const loop = try addZIRInstLoop(mod, &for_scope.base, for_src, .{
- .instructions = try for_scope.arena.dupe(*zir.Inst, loop_scope.instructions.items),
- });
+ const loop = try scope.arena().create(zir.Inst.Loop);
+ loop.* = .{
+ .base = .{
+ .tag = .loop,
+ .src = for_src,
+ },
+ .positionals = .{
+ .body = .{
+ .instructions = try scope.arena().dupe(*zir.Inst, loop_scope.instructions.items),
+ },
+ },
+ .kw_args = .{},
+ };
const for_block = try addZIRInstBlock(mod, scope, for_src, .block, .{
- .instructions = try for_scope.arena.dupe(*zir.Inst, for_scope.instructions.items),
+ .instructions = try scope.arena().dupe(*zir.Inst, &[1]*zir.Inst{&loop.base}),
});
loop_scope.break_block = for_block;
loop_scope.continue_block = cond_block;
@@ -1918,19 +2168,11 @@ fn forExpr(
.parent = &cond_scope.base,
.decl = cond_scope.decl,
.arena = cond_scope.arena,
+ .force_comptime = cond_scope.force_comptime,
.instructions = .{},
};
defer then_scope.instructions.deinit(mod.gpa);
- // Most result location types can be forwarded directly; however
- // if we need to write to a pointer which has an inferred type,
- // proper type inference requires peer type resolution on the while's
- // branches.
- const branch_rl: ResultLoc = switch (rl) {
- .discard, .none, .ty, .ptr, .ref => rl,
- .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = for_block },
- };
-
var index_scope: Scope.LocalPtr = undefined;
const then_sub_scope = blk: {
const payload = for_node.payload.castTag(.PointerIndexPayload).?;
@@ -1959,319 +2201,49 @@ fn forExpr(
break :blk &index_scope.base;
};
- const then_result = try expr(mod, then_sub_scope, branch_rl, for_node.body);
- if (!then_result.tag.isNoReturn()) {
- _ = try addZIRInst(mod, then_sub_scope, then_src, zir.Inst.Break, .{
- .block = cond_block,
- .operand = then_result,
- }, .{});
- }
- condbr.positionals.then_body = .{
- .instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
- };
+ loop_scope.break_count += 1;
+ const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, for_node.body);
// else branch
var else_scope: Scope.GenZIR = .{
.parent = &cond_scope.base,
.decl = cond_scope.decl,
.arena = cond_scope.arena,
+ .force_comptime = cond_scope.force_comptime,
.instructions = .{},
};
defer else_scope.instructions.deinit(mod.gpa);
- if (for_node.@"else") |else_node| {
- const else_src = tree.token_locs[else_node.body.lastToken()].start;
- const else_result = try expr(mod, &else_scope.base, branch_rl, else_node.body);
- if (!else_result.tag.isNoReturn()) {
- _ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.Break, .{
- .block = for_block,
- .operand = else_result,
- }, .{});
- }
- } else {
- const else_src = tree.token_locs[for_node.lastToken()].start;
- _ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.BreakVoid, .{
- .block = for_block,
- }, .{});
- }
- condbr.positionals.else_body = .{
- .instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
+ var else_src: usize = undefined;
+ const else_result: ?*zir.Inst = if (for_node.@"else") |else_node| blk: {
+ else_src = tree.token_locs[else_node.body.lastToken()].start;
+ loop_scope.break_count += 1;
+ break :blk try expr(mod, &else_scope.base, loop_scope.break_result_loc, else_node.body);
+ } else blk: {
+ else_src = tree.token_locs[for_node.lastToken()].start;
+ break :blk null;
};
if (loop_scope.label) |some| {
if (!some.used) {
return mod.fail(scope, tree.token_locs[some.token].start, "unused for label", .{});
}
}
- return &for_block.base;
-}
-
-fn getRangeNode(node: *ast.Node) ?*ast.Node.SimpleInfixOp {
- var cur = node;
- while (true) {
- switch (cur.tag) {
- .Range => return @fieldParentPtr(ast.Node.SimpleInfixOp, "base", cur),
- .GroupedExpression => cur = @fieldParentPtr(ast.Node.GroupedExpression, "base", cur).expr,
- else => return null,
- }
- }
-}
-
-fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node.Switch) InnerError!*zir.Inst {
- var block_scope: Scope.GenZIR = .{
- .parent = scope,
- .decl = scope.ownerDecl().?,
- .arena = scope.arena(),
- .instructions = .{},
- };
- defer block_scope.instructions.deinit(mod.gpa);
-
- const tree = scope.tree();
- const switch_src = tree.token_locs[switch_node.switch_token].start;
- const target_ptr = try expr(mod, &block_scope.base, .ref, switch_node.expr);
- const target = try addZIRUnOp(mod, &block_scope.base, target_ptr.src, .deref, target_ptr);
- // Add the switch instruction here so that it comes before any range checks.
- const switch_inst = (try addZIRInst(mod, &block_scope.base, switch_src, zir.Inst.SwitchBr, .{
- .target_ptr = target_ptr,
- .cases = undefined, // populated below
- .items = &[_]*zir.Inst{}, // populated below
- .else_body = undefined, // populated below
- }, .{})).castTag(.switchbr).?;
-
- var items = std.ArrayList(*zir.Inst).init(mod.gpa);
- defer items.deinit();
- var cases = std.ArrayList(zir.Inst.SwitchBr.Case).init(mod.gpa);
- defer cases.deinit();
-
- // Add comptime block containing all prong items first,
- const item_block = try addZIRInstBlock(mod, scope, switch_src, .block_comptime_flat, .{
- .instructions = undefined, // populated below
- });
- // then add block containing the switch.
- const block = try addZIRInstBlock(mod, scope, switch_src, .block, .{
- .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
- });
-
- // Most result location types can be forwarded directly; however
- // if we need to write to a pointer which has an inferred type,
- // proper type inference requires peer type resolution on the switch case.
- const case_rl: ResultLoc = switch (rl) {
- .discard, .none, .ty, .ptr, .ref => rl,
- .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
- };
-
- var item_scope: Scope.GenZIR = .{
- .parent = scope,
- .decl = scope.ownerDecl().?,
- .arena = scope.arena(),
- .instructions = .{},
- };
- defer item_scope.instructions.deinit(mod.gpa);
-
- var case_scope: Scope.GenZIR = .{
- .parent = scope,
- .decl = block_scope.decl,
- .arena = block_scope.arena,
- .instructions = .{},
- };
- defer case_scope.instructions.deinit(mod.gpa);
-
- var else_scope: Scope.GenZIR = .{
- .parent = scope,
- .decl = block_scope.decl,
- .arena = block_scope.arena,
- .instructions = .{},
- };
- defer else_scope.instructions.deinit(mod.gpa);
-
- // first we gather all the switch items and check else/'_' prongs
- var else_src: ?usize = null;
- var underscore_src: ?usize = null;
- var first_range: ?*zir.Inst = null;
- var special_case: ?*ast.Node.SwitchCase = null;
- for (switch_node.cases()) |uncasted_case| {
- const case = uncasted_case.castTag(.SwitchCase).?;
- const case_src = tree.token_locs[case.firstToken()].start;
- // reset without freeing to reduce allocations.
- case_scope.instructions.items.len = 0;
- assert(case.items_len != 0);
-
- // Check for else/_ prong, those are handled last.
- if (case.items_len == 1 and case.items()[0].tag == .SwitchElse) {
- if (else_src) |src| {
- const msg = msg: {
- const msg = try mod.errMsg(
- scope,
- case_src,
- "multiple else prongs in switch expression",
- .{},
- );
- errdefer msg.destroy(mod.gpa);
- try mod.errNote(scope, src, msg, "previous else prong is here", .{});
- break :msg msg;
- };
- return mod.failWithOwnedErrorMsg(scope, msg);
- }
- else_src = case_src;
- special_case = case;
- continue;
- } else if (case.items_len == 1 and case.items()[0].tag == .Identifier and
- mem.eql(u8, tree.tokenSlice(case.items()[0].firstToken()), "_"))
- {
- if (underscore_src) |src| {
- const msg = msg: {
- const msg = try mod.errMsg(
- scope,
- case_src,
- "multiple '_' prongs in switch expression",
- .{},
- );
- errdefer msg.destroy(mod.gpa);
- try mod.errNote(scope, src, msg, "previous '_' prong is here", .{});
- break :msg msg;
- };
- return mod.failWithOwnedErrorMsg(scope, msg);
- }
- underscore_src = case_src;
- special_case = case;
- continue;
- }
-
- if (else_src) |some_else| {
- if (underscore_src) |some_underscore| {
- const msg = msg: {
- const msg = try mod.errMsg(
- scope,
- switch_src,
- "else and '_' prong in switch expression",
- .{},
- );
- errdefer msg.destroy(mod.gpa);
- try mod.errNote(scope, some_else, msg, "else prong is here", .{});
- try mod.errNote(scope, some_underscore, msg, "'_' prong is here", .{});
- break :msg msg;
- };
- return mod.failWithOwnedErrorMsg(scope, msg);
- }
- }
-
- // If this is a simple one item prong then it is handled by the switchbr.
- if (case.items_len == 1 and getRangeNode(case.items()[0]) == null) {
- const item = try expr(mod, &item_scope.base, .none, case.items()[0]);
- try items.append(item);
- try switchCaseExpr(mod, &case_scope.base, case_rl, block, case);
-
- try cases.append(.{
- .item = item,
- .body = .{ .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items) },
- });
- continue;
- }
-
- // TODO if the case has few items and no ranges it might be better
- // to just handle them as switch prongs.
-
- // Check if the target matches any of the items.
- // 1, 2, 3..6 will result in
- // target == 1 or target == 2 or (target >= 3 and target <= 6)
- var any_ok: ?*zir.Inst = null;
- for (case.items()) |item| {
- if (getRangeNode(item)) |range| {
- const start = try expr(mod, &item_scope.base, .none, range.lhs);
- const end = try expr(mod, &item_scope.base, .none, range.rhs);
- const range_src = tree.token_locs[range.op_token].start;
- const range_inst = try addZIRBinOp(mod, &item_scope.base, range_src, .switch_range, start, end);
- try items.append(range_inst);
- if (first_range == null) first_range = range_inst;
-
- // target >= start and target <= end
- const range_start_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .cmp_gte, target, start);
- const range_end_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .cmp_lte, target, end);
- const range_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .booland, range_start_ok, range_end_ok);
-
- if (any_ok) |some| {
- any_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .boolor, some, range_ok);
- } else {
- any_ok = range_ok;
- }
- continue;
- }
-
- const item_inst = try expr(mod, &item_scope.base, .none, item);
- try items.append(item_inst);
- const cpm_ok = try addZIRBinOp(mod, &else_scope.base, item_inst.src, .cmp_eq, target, item_inst);
-
- if (any_ok) |some| {
- any_ok = try addZIRBinOp(mod, &else_scope.base, item_inst.src, .boolor, some, cpm_ok);
- } else {
- any_ok = cpm_ok;
- }
- }
-
- const condbr = try addZIRInstSpecial(mod, &case_scope.base, case_src, zir.Inst.CondBr, .{
- .condition = any_ok.?,
- .then_body = undefined, // populated below
- .else_body = undefined, // populated below
- }, .{});
- const cond_block = try addZIRInstBlock(mod, &else_scope.base, case_src, .block, .{
- .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
- });
-
- // reset cond_scope for then_body
- case_scope.instructions.items.len = 0;
- try switchCaseExpr(mod, &case_scope.base, case_rl, block, case);
- condbr.positionals.then_body = .{
- .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
- };
-
- // reset cond_scope for else_body
- case_scope.instructions.items.len = 0;
- _ = try addZIRInst(mod, &case_scope.base, case_src, zir.Inst.BreakVoid, .{
- .block = cond_block,
- }, .{});
- condbr.positionals.else_body = .{
- .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
- };
- }
-
- // Generate else block or a break last to finish the block.
- if (special_case) |case| {
- try switchCaseExpr(mod, &else_scope.base, case_rl, block, case);
- } else {
- // Not handling all possible cases is a compile error.
- _ = try addZIRNoOp(mod, &else_scope.base, switch_src, .unreach_nocheck);
- }
-
- // All items have been generated, add the instructions to the comptime block.
- item_block.positionals.body = .{
- .instructions = try block_scope.arena.dupe(*zir.Inst, item_scope.instructions.items),
- };
-
- // Actually populate switch instruction values.
- if (else_src != null) switch_inst.kw_args.special_prong = .@"else";
- if (underscore_src != null) switch_inst.kw_args.special_prong = .underscore;
- switch_inst.positionals.cases = try block_scope.arena.dupe(zir.Inst.SwitchBr.Case, cases.items);
- switch_inst.positionals.items = try block_scope.arena.dupe(*zir.Inst, items.items);
- switch_inst.kw_args.range = first_range;
- switch_inst.positionals.else_body = .{
- .instructions = try block_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
- };
- return &block.base;
-}
-
-fn switchCaseExpr(mod: *Module, scope: *Scope, rl: ResultLoc, block: *zir.Inst.Block, case: *ast.Node.SwitchCase) !void {
- const tree = scope.tree();
- const case_src = tree.token_locs[case.firstToken()].start;
- if (case.payload != null) {
- return mod.fail(scope, case_src, "TODO switch case payload capture", .{});
- }
-
- const case_body = try expr(mod, scope, rl, case.expr);
- if (!case_body.tag.isNoReturn()) {
- _ = try addZIRInst(mod, scope, case_src, zir.Inst.Break, .{
- .block = block,
- .operand = case_body,
- }, .{});
- }
+ return finishThenElseBlock(
+ mod,
+ scope,
+ rl,
+ &loop_scope,
+ &then_scope,
+ &else_scope,
+ &condbr.positionals.then_body,
+ &condbr.positionals.else_body,
+ then_src,
+ else_src,
+ then_result,
+ else_result,
+ for_block,
+ cond_block,
+ );
}
fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
@@ -2288,7 +2260,7 @@ fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerE
return addZIRUnOp(mod, scope, src, .@"return", operand);
}
} else {
- return addZIRNoOp(mod, scope, src, .returnvoid);
+ return addZIRNoOp(mod, scope, src, .return_void);
}
}
@@ -2305,7 +2277,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
if (getSimplePrimitiveValue(ident_name)) |typed_value| {
const result = try addZIRInstConst(mod, scope, src, typed_value);
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
if (ident_name.len >= 2) integer: {
@@ -2327,7 +2299,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
32 => if (is_signed) Value.initTag(.i32_type) else Value.initTag(.u32_type),
64 => if (is_signed) Value.initTag(.i64_type) else Value.initTag(.u64_type),
else => {
- return rlWrap(mod, scope, rl, try addZIRInstConst(mod, scope, src, .{
+ return rvalue(mod, scope, rl, try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = try Value.Tag.int_type.create(scope.arena(), .{
.signed = is_signed,
@@ -2340,7 +2312,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
.ty = Type.initTag(.type),
.val = val,
});
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
}
@@ -2351,7 +2323,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
.local_val => {
const local_val = s.cast(Scope.LocalVal).?;
if (mem.eql(u8, local_val.name, ident_name)) {
- return rlWrap(mod, scope, rl, local_val.inst);
+ return rvalue(mod, scope, rl, local_val.inst);
}
s = local_val.parent;
},
@@ -2360,7 +2332,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
if (mem.eql(u8, local_ptr.name, ident_name)) {
if (rl == .ref) return local_ptr.ptr;
const loaded = try addZIRUnOp(mod, scope, src, .deref, local_ptr.ptr);
- return rlWrap(mod, scope, rl, loaded);
+ return rvalue(mod, scope, rl, loaded);
}
s = local_ptr.parent;
},
@@ -2373,7 +2345,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
if (rl == .ref) {
return addZIRInst(mod, scope, src, zir.Inst.DeclRef, .{ .decl = decl }, .{});
} else {
- return rlWrap(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclVal, .{
+ return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclVal, .{
.decl = decl,
}, .{}));
}
@@ -2590,7 +2562,7 @@ fn simpleCast(
const dest_type = try typeExpr(mod, scope, params[0]);
const rhs = try expr(mod, scope, .none, params[1]);
const result = try addZIRBinOp(mod, scope, src, inst_tag, dest_type, rhs);
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
@@ -2601,31 +2573,30 @@ fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError
return addZIRUnOp(mod, scope, src, .ptrtoint, operand);
}
-fn as(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
+fn as(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ call: *ast.Node.BuiltinCall,
+) InnerError!*zir.Inst {
try ensureBuiltinParamCount(mod, scope, call, 2);
const tree = scope.tree();
const src = tree.token_locs[call.builtin_token].start;
const params = call.params();
const dest_type = try typeExpr(mod, scope, params[0]);
switch (rl) {
- .none => return try expr(mod, scope, .{ .ty = dest_type }, params[1]),
- .discard => {
- const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]);
- _ = try addZIRUnOp(mod, scope, result.src, .ensure_result_non_error, result);
- return result;
- },
- .ref => {
- const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]);
- return addZIRUnOp(mod, scope, result.src, .ref, result);
- },
- .ty => |result_ty| {
+ .none, .discard, .ref, .ty => {
const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]);
- return addZIRBinOp(mod, scope, src, .as, result_ty, result);
+ return rvalue(mod, scope, rl, result);
},
+
.ptr => |result_ptr| {
- const casted_result_ptr = try addZIRBinOp(mod, scope, src, .coerce_result_ptr, dest_type, result_ptr);
- return expr(mod, scope, .{ .ptr = casted_result_ptr }, params[1]);
+ return asRlPtr(mod, scope, rl, src, result_ptr, params[1], dest_type);
+ },
+ .block_ptr => |block_scope| {
+ return asRlPtr(mod, scope, rl, src, block_scope.rl_ptr.?, params[1], dest_type);
},
+
.bitcasted_ptr => |bitcasted_ptr| {
// TODO here we should be able to resolve the inference; we now have a type for the result.
return mod.failTok(scope, call.builtin_token, "TODO implement @as with result location @bitCast", .{});
@@ -2634,13 +2605,50 @@ fn as(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) I
// TODO here we should be able to resolve the inference; we now have a type for the result.
return mod.failTok(scope, call.builtin_token, "TODO implement @as with inferred-type result location pointer", .{});
},
- .block_ptr => |block_ptr| {
- const casted_block_ptr = try addZIRInst(mod, scope, src, zir.Inst.CoerceResultBlockPtr, .{
- .dest_type = dest_type,
- .block = block_ptr,
- }, .{});
- return expr(mod, scope, .{ .ptr = casted_block_ptr }, params[1]);
- },
+ }
+}
+
+fn asRlPtr(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ src: usize,
+ result_ptr: *zir.Inst,
+ operand_node: *ast.Node,
+ dest_type: *zir.Inst,
+) InnerError!*zir.Inst {
+ // Detect whether this expr() call goes into rvalue() to store the result into the
+ // result location. If it does, elide the coerce_result_ptr instruction
+ // as well as the store instruction, instead passing the result as an rvalue.
+ var as_scope: Scope.GenZIR = .{
+ .parent = scope,
+ .decl = scope.ownerDecl().?,
+ .arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
+ .instructions = .{},
+ };
+ defer as_scope.instructions.deinit(mod.gpa);
+
+ as_scope.rl_ptr = try addZIRBinOp(mod, &as_scope.base, src, .coerce_result_ptr, dest_type, result_ptr);
+ const result = try expr(mod, &as_scope.base, .{ .block_ptr = &as_scope }, operand_node);
+ const parent_zir = &scope.getGenZIR().instructions;
+ if (as_scope.rvalue_rl_count == 1) {
+ // Busted! This expression didn't actually need a pointer.
+ const expected_len = parent_zir.items.len + as_scope.instructions.items.len - 2;
+ try parent_zir.ensureCapacity(mod.gpa, expected_len);
+ for (as_scope.instructions.items) |src_inst| {
+ if (src_inst == as_scope.rl_ptr.?) continue;
+ if (src_inst.castTag(.store_to_block_ptr)) |store| {
+ if (store.positionals.lhs == as_scope.rl_ptr.?) continue;
+ }
+ parent_zir.appendAssumeCapacity(src_inst);
+ }
+ assert(parent_zir.items.len == expected_len);
+ const casted_result = try addZIRBinOp(mod, scope, dest_type.src, .as, dest_type, result);
+ return rvalue(mod, scope, rl, casted_result);
+ } else {
+ try parent_zir.appendSlice(mod.gpa, as_scope.instructions.items);
+ return result;
}
}
@@ -2703,7 +2711,7 @@ fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerE
const src = tree.token_locs[call.builtin_token].start;
const params = call.params();
const target = try expr(mod, scope, .none, params[0]);
- return addZIRUnOp(mod, scope, src, .compileerror, target);
+ return addZIRUnOp(mod, scope, src, .compile_error, target);
}
fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
@@ -2728,12 +2736,12 @@ fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCal
return mod.failTok(scope, call.builtin_token, "expected at least 1 argument, found 0", .{});
}
if (params.len == 1) {
- return rlWrap(mod, scope, rl, try addZIRUnOp(mod, scope, src, .typeof, try expr(mod, scope, .none, params[0])));
+ return rvalue(mod, scope, rl, try addZIRUnOp(mod, scope, src, .typeof, try expr(mod, scope, .none, params[0])));
}
var items = try arena.alloc(*zir.Inst, params.len);
for (params) |param, param_i|
items[param_i] = try expr(mod, scope, .none, param);
- return rlWrap(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.TypeOfPeer, .{ .items = items }, .{}));
+ return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.TypeOfPeer, .{ .items = items }, .{}));
}
fn compileLog(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
const tree = scope.tree();
@@ -2756,7 +2764,7 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.Built
// Also, some builtins have a variable number of parameters.
if (mem.eql(u8, builtin_name, "@ptrToInt")) {
- return rlWrap(mod, scope, rl, try ptrToInt(mod, scope, call));
+ return rvalue(mod, scope, rl, try ptrToInt(mod, scope, call));
} else if (mem.eql(u8, builtin_name, "@as")) {
return as(mod, scope, rl, call);
} else if (mem.eql(u8, builtin_name, "@floatCast")) {
@@ -2769,9 +2777,9 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.Built
return typeOf(mod, scope, rl, call);
} else if (mem.eql(u8, builtin_name, "@breakpoint")) {
const src = tree.token_locs[call.builtin_token].start;
- return rlWrap(mod, scope, rl, try addZIRNoOp(mod, scope, src, .breakpoint));
+ return rvalue(mod, scope, rl, try addZIRNoOp(mod, scope, src, .breakpoint));
} else if (mem.eql(u8, builtin_name, "@import")) {
- return rlWrap(mod, scope, rl, try import(mod, scope, call));
+ return rvalue(mod, scope, rl, try import(mod, scope, call));
} else if (mem.eql(u8, builtin_name, "@compileError")) {
return compileError(mod, scope, call);
} else if (mem.eql(u8, builtin_name, "@setEvalBranchQuota")) {
@@ -2806,13 +2814,13 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Call) In
.args = args,
}, .{});
// TODO function call with result location
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.OneToken) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[unreach_node.token].start;
- return addZIRNoOp(mod, scope, src, .@"unreachable");
+ return addZIRNoOp(mod, scope, src, .unreachable_safe);
}
fn getSimplePrimitiveValue(name: []const u8) ?TypedValue {
@@ -3099,7 +3107,7 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool {
/// result locations must call this function on their result.
/// As an example, if the `ResultLoc` is `ptr`, it will write the result to the pointer.
/// If the `ResultLoc` is `ty`, it will coerce the result to the type.
-fn rlWrap(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerError!*zir.Inst {
+fn rvalue(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerError!*zir.Inst {
switch (rl) {
.none => return result,
.discard => {
@@ -3113,42 +3121,97 @@ fn rlWrap(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerEr
},
.ty => |ty_inst| return addZIRBinOp(mod, scope, result.src, .as, ty_inst, result),
.ptr => |ptr_inst| {
- const casted_result = try addZIRInst(mod, scope, result.src, zir.Inst.CoerceToPtrElem, .{
- .ptr = ptr_inst,
- .value = result,
- }, .{});
- _ = try addZIRBinOp(mod, scope, result.src, .store, ptr_inst, casted_result);
- return casted_result;
+ _ = try addZIRBinOp(mod, scope, result.src, .store, ptr_inst, result);
+ return result;
},
.bitcasted_ptr => |bitcasted_ptr| {
- return mod.fail(scope, result.src, "TODO implement rlWrap .bitcasted_ptr", .{});
+ return mod.fail(scope, result.src, "TODO implement rvalue .bitcasted_ptr", .{});
},
.inferred_ptr => |alloc| {
_ = try addZIRBinOp(mod, scope, result.src, .store_to_inferred_ptr, &alloc.base, result);
return result;
},
- .block_ptr => |block_ptr| {
- return mod.fail(scope, result.src, "TODO implement rlWrap .block_ptr", .{});
+ .block_ptr => |block_scope| {
+ block_scope.rvalue_rl_count += 1;
+ _ = try addZIRBinOp(mod, scope, result.src, .store_to_block_ptr, block_scope.rl_ptr.?, result);
+ return result;
},
}
}
-fn rlWrapVoid(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node, result: void) InnerError!*zir.Inst {
+fn rvalueVoid(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node, result: void) InnerError!*zir.Inst {
const src = scope.tree().token_locs[node.firstToken()].start;
const void_inst = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.void),
.val = Value.initTag(.void_value),
});
- return rlWrap(mod, scope, rl, void_inst);
+ return rvalue(mod, scope, rl, void_inst);
+}
+
+fn rlStrategy(rl: ResultLoc, block_scope: *Scope.GenZIR) ResultLoc.Strategy {
+ var elide_store_to_block_ptr_instructions = false;
+ switch (rl) {
+ // In this branch there will not be any store_to_block_ptr instructions.
+ .discard, .none, .ty, .ref => return .{
+ .tag = .break_operand,
+ .elide_store_to_block_ptr_instructions = false,
+ },
+ // The pointer got passed through to the sub-expressions, so we will use
+ // break_void here.
+ // In this branch there will not be any store_to_block_ptr instructions.
+ .ptr => return .{
+ .tag = .break_void,
+ .elide_store_to_block_ptr_instructions = false,
+ },
+ .inferred_ptr, .bitcasted_ptr, .block_ptr => {
+ if (block_scope.rvalue_rl_count == block_scope.break_count) {
+ // Neither prong of the if consumed the result location, so we can
+ // use break instructions to create an rvalue.
+ return .{
+ .tag = .break_operand,
+ .elide_store_to_block_ptr_instructions = true,
+ };
+ } else {
+ // Allow the store_to_block_ptr instructions to remain so that
+ // semantic analysis can turn them into bitcasts.
+ return .{
+ .tag = .break_void,
+ .elide_store_to_block_ptr_instructions = false,
+ };
+ }
+ },
+ }
}
-/// TODO go over all the callsites and see where we can introduce "by-value" ZIR instructions
-/// to save ZIR memory. For example, see DeclVal vs DeclRef.
-/// Do not add additional callsites to this function.
-fn rlWrapPtr(mod: *Module, scope: *Scope, rl: ResultLoc, ptr: *zir.Inst) InnerError!*zir.Inst {
- if (rl == .ref) return ptr;
+fn setBlockResultLoc(block_scope: *Scope.GenZIR, parent_rl: ResultLoc) void {
+ // Depending on whether the result location is a pointer or value, different
+ // ZIR needs to be generated. In the former case we rely on storing to the
+ // pointer to communicate the result, and use breakvoid; in the latter case
+ // the block break instructions will have the result values.
+ // One more complication: when the result location is a pointer, we detect
+ // the scenario where the result location is not consumed. In this case
+ // we emit ZIR for the block break instructions to have the result values,
+ // and then rvalue() on that to pass the value to the result location.
+ switch (parent_rl) {
+ .discard, .none, .ty, .ptr, .ref => {
+ block_scope.break_result_loc = parent_rl;
+ },
+
+ .inferred_ptr => |ptr| {
+ block_scope.rl_ptr = &ptr.base;
+ block_scope.break_result_loc = .{ .block_ptr = block_scope };
+ },
+
+ .bitcasted_ptr => |ptr| {
+ block_scope.rl_ptr = &ptr.base;
+ block_scope.break_result_loc = .{ .block_ptr = block_scope };
+ },
- return rlWrap(mod, scope, rl, try addZIRUnOp(mod, scope, ptr.src, .deref, ptr));
+ .block_ptr => |parent_block_scope| {
+ block_scope.rl_ptr = parent_block_scope.rl_ptr.?;
+ block_scope.break_result_loc = .{ .block_ptr = block_scope };
+ },
+ }
}
pub fn addZirInstTag(
diff --git a/src/codegen.zig b/src/codegen.zig
index 1ca2bb2abe..362b04ab26 100644
--- a/src/codegen.zig
+++ b/src/codegen.zig
@@ -840,14 +840,15 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
.arg => return self.genArg(inst.castTag(.arg).?),
.assembly => return self.genAsm(inst.castTag(.assembly).?),
.bitcast => return self.genBitCast(inst.castTag(.bitcast).?),
- .bitand => return self.genBitAnd(inst.castTag(.bitand).?),
- .bitor => return self.genBitOr(inst.castTag(.bitor).?),
+ .bit_and => return self.genBitAnd(inst.castTag(.bit_and).?),
+ .bit_or => return self.genBitOr(inst.castTag(.bit_or).?),
.block => return self.genBlock(inst.castTag(.block).?),
.br => return self.genBr(inst.castTag(.br).?),
+ .br_block_flat => return self.genBrBlockFlat(inst.castTag(.br_block_flat).?),
.breakpoint => return self.genBreakpoint(inst.src),
- .brvoid => return self.genBrVoid(inst.castTag(.brvoid).?),
- .booland => return self.genBoolOp(inst.castTag(.booland).?),
- .boolor => return self.genBoolOp(inst.castTag(.boolor).?),
+ .br_void => return self.genBrVoid(inst.castTag(.br_void).?),
+ .bool_and => return self.genBoolOp(inst.castTag(.bool_and).?),
+ .bool_or => return self.genBoolOp(inst.castTag(.bool_or).?),
.call => return self.genCall(inst.castTag(.call).?),
.cmp_lt => return self.genCmp(inst.castTag(.cmp_lt).?, .lt),
.cmp_lte => return self.genCmp(inst.castTag(.cmp_lte).?, .lte),
@@ -1097,7 +1098,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
if (inst.base.isUnused())
return MCValue.dead;
switch (arch) {
- .arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bitand),
+ .arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bit_and),
else => return self.fail(inst.base.src, "TODO implement bitwise and for {}", .{self.target.cpu.arch}),
}
}
@@ -1107,7 +1108,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
if (inst.base.isUnused())
return MCValue.dead;
switch (arch) {
- .arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bitor),
+ .arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bit_or),
else => return self.fail(inst.base.src, "TODO implement bitwise or for {}", .{self.target.cpu.arch}),
}
}
@@ -1371,10 +1372,10 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.rsb(.al, dst_reg, dst_reg, operand).toU32());
}
},
- .booland, .bitand => {
+ .bool_and, .bit_and => {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.@"and"(.al, dst_reg, dst_reg, operand).toU32());
},
- .boolor, .bitor => {
+ .bool_or, .bit_or => {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.orr(.al, dst_reg, dst_reg, operand).toU32());
},
.not, .xor => {
@@ -2441,17 +2442,14 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
+ fn genBrBlockFlat(self: *Self, inst: *ir.Inst.BrBlockFlat) !MCValue {
+ try self.genBody(inst.body);
+ const last = inst.body.instructions[inst.body.instructions.len - 1];
+ return self.br(inst.base.src, inst.block, last);
+ }
+
fn genBr(self: *Self, inst: *ir.Inst.Br) !MCValue {
- if (inst.operand.ty.hasCodeGenBits()) {
- const operand = try self.resolveInst(inst.operand);
- const block_mcv = @bitCast(MCValue, inst.block.codegen.mcv);
- if (block_mcv == .none) {
- inst.block.codegen.mcv = @bitCast(AnyMCValue, operand);
- } else {
- try self.setRegOrMem(inst.base.src, inst.block.base.ty, block_mcv, operand);
- }
- }
- return self.brVoid(inst.base.src, inst.block);
+ return self.br(inst.base.src, inst.block, inst.operand);
}
fn genBrVoid(self: *Self, inst: *ir.Inst.BrVoid) !MCValue {
@@ -2464,20 +2462,33 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
switch (arch) {
.x86_64 => switch (inst.base.tag) {
// lhs AND rhs
- .booland => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 4, 0x20),
+ .bool_and => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 4, 0x20),
// lhs OR rhs
- .boolor => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 1, 0x08),
+ .bool_or => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 1, 0x08),
else => unreachable, // Not a boolean operation
},
.arm, .armeb => switch (inst.base.tag) {
- .booland => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .booland),
- .boolor => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .boolor),
+ .bool_and => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bool_and),
+ .bool_or => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bool_or),
else => unreachable, // Not a boolean operation
},
else => return self.fail(inst.base.src, "TODO implement boolean operations for {}", .{self.target.cpu.arch}),
}
}
+ fn br(self: *Self, src: usize, block: *ir.Inst.Block, operand: *ir.Inst) !MCValue {
+ if (operand.ty.hasCodeGenBits()) {
+ const operand_mcv = try self.resolveInst(operand);
+ const block_mcv = @bitCast(MCValue, block.codegen.mcv);
+ if (block_mcv == .none) {
+ block.codegen.mcv = @bitCast(AnyMCValue, operand_mcv);
+ } else {
+ try self.setRegOrMem(src, block.base.ty, block_mcv, operand_mcv);
+ }
+ }
+ return self.brVoid(src, block);
+ }
+
fn brVoid(self: *Self, src: usize, block: *ir.Inst.Block) !MCValue {
// Emit a jump with a relocation. It will be patched up after the block ends.
try block.codegen.relocs.ensureCapacity(self.gpa, block.codegen.relocs.items.len + 1);
diff --git a/src/ir.zig b/src/ir.zig
index 89698bdd84..408efc3bba 100644
--- a/src/ir.zig
+++ b/src/ir.zig
@@ -56,13 +56,20 @@ pub const Inst = struct {
alloc,
arg,
assembly,
- bitand,
+ bit_and,
bitcast,
- bitor,
+ bit_or,
block,
br,
+ /// Same as `br` except the operand is a list of instructions to be treated as
+ /// a flat block; that is there is only 1 break instruction from the block, and
+ /// it is implied to be after the last instruction, and the last instruction is
+ /// the break operand.
+ /// This instruction exists for late-stage semantic analysis patch ups, to
+ /// replace one br operand with multiple instructions, without moving anything else around.
+ br_block_flat,
breakpoint,
- brvoid,
+ br_void,
call,
cmp_lt,
cmp_lte,
@@ -85,8 +92,8 @@ pub const Inst = struct {
is_err,
// *E!T => bool
is_err_ptr,
- booland,
- boolor,
+ bool_and,
+ bool_or,
/// Read a value from a pointer.
load,
loop,
@@ -147,10 +154,10 @@ pub const Inst = struct {
.cmp_gt,
.cmp_neq,
.store,
- .booland,
- .boolor,
- .bitand,
- .bitor,
+ .bool_and,
+ .bool_or,
+ .bit_and,
+ .bit_or,
.xor,
=> BinOp,
@@ -158,7 +165,8 @@ pub const Inst = struct {
.assembly => Assembly,
.block => Block,
.br => Br,
- .brvoid => BrVoid,
+ .br_block_flat => BrBlockFlat,
+ .br_void => BrVoid,
.call => Call,
.condbr => CondBr,
.constant => Constant,
@@ -251,7 +259,8 @@ pub const Inst = struct {
pub fn breakBlock(base: *Inst) ?*Block {
return switch (base.tag) {
.br => base.castTag(.br).?.block,
- .brvoid => base.castTag(.brvoid).?.block,
+ .br_void => base.castTag(.br_void).?.block,
+ .br_block_flat => base.castTag(.br_block_flat).?.block,
else => null,
};
}
@@ -355,6 +364,27 @@ pub const Inst = struct {
}
};
+ pub const convertable_br_size = std.math.max(@sizeOf(BrBlockFlat), @sizeOf(Br));
+ pub const convertable_br_align = std.math.max(@alignOf(BrBlockFlat), @alignOf(Br));
+ comptime {
+ assert(@byteOffsetOf(BrBlockFlat, "base") == @byteOffsetOf(Br, "base"));
+ }
+
+ pub const BrBlockFlat = struct {
+ pub const base_tag = Tag.br_block_flat;
+
+ base: Inst,
+ block: *Block,
+ body: Body,
+
+ pub fn operandCount(self: *const BrBlockFlat) usize {
+ return 0;
+ }
+ pub fn getOperand(self: *const BrBlockFlat, index: usize) ?*Inst {
+ return null;
+ }
+ };
+
pub const Br = struct {
pub const base_tag = Tag.br;
@@ -363,7 +393,7 @@ pub const Inst = struct {
operand: *Inst,
pub fn operandCount(self: *const Br) usize {
- return 0;
+ return 1;
}
pub fn getOperand(self: *const Br, index: usize) ?*Inst {
if (index == 0)
@@ -373,7 +403,7 @@ pub const Inst = struct {
};
pub const BrVoid = struct {
- pub const base_tag = Tag.brvoid;
+ pub const base_tag = Tag.br_void;
base: Inst,
block: *Block,
diff --git a/src/zir.zig b/src/zir.zig
index 9e5830e79a..2559fcdc8e 100644
--- a/src/zir.zig
+++ b/src/zir.zig
@@ -59,7 +59,7 @@ pub const Inst = struct {
/// Inline assembly.
@"asm",
/// Bitwise AND. `&`
- bitand,
+ bit_and,
/// TODO delete this instruction, it has no purpose.
bitcast,
/// An arbitrary typed pointer is pointer-casted to a new Pointer.
@@ -71,9 +71,9 @@ pub const Inst = struct {
/// The new result location pointer has an inferred type.
bitcast_result_ptr,
/// Bitwise NOT. `~`
- bitnot,
+ bit_not,
/// Bitwise OR. `|`
- bitor,
+ bit_or,
/// A labeled block of code, which can return a value.
block,
/// A block of code, which can return a value. There are no instructions that break out of
@@ -83,17 +83,17 @@ pub const Inst = struct {
block_comptime,
/// Same as `block_flat` but additionally makes the inner instructions execute at comptime.
block_comptime_flat,
- /// Boolean AND. See also `bitand`.
- booland,
- /// Boolean NOT. See also `bitnot`.
- boolnot,
- /// Boolean OR. See also `bitor`.
- boolor,
+ /// Boolean AND. See also `bit_and`.
+ bool_and,
+ /// Boolean NOT. See also `bit_not`.
+ bool_not,
+ /// Boolean OR. See also `bit_or`.
+ bool_or,
/// Return a value from a `Block`.
@"break",
breakpoint,
/// Same as `break` but without an operand; the operand is assumed to be the void value.
- breakvoid,
+ break_void,
/// Function call.
call,
/// `<`
@@ -112,16 +112,10 @@ pub const Inst = struct {
/// as type coercion from the new element type to the old element type.
/// LHS is destination element type, RHS is result pointer.
coerce_result_ptr,
- /// This instruction does a `coerce_result_ptr` operation on a `Block`'s
- /// result location pointer, whose type is inferred by peer type resolution on the
- /// `Block`'s corresponding `break` instructions.
- coerce_result_block_ptr,
- /// Equivalent to `as(ptr_child_type(typeof(ptr)), value)`.
- coerce_to_ptr_elem,
/// Emit an error message and fail compilation.
- compileerror,
+ compile_error,
/// Log compile time variables and emit an error message.
- compilelog,
+ compile_log,
/// Conditional branch. Splits control flow based on a boolean condition value.
condbr,
/// Special case, has no textual representation.
@@ -135,11 +129,11 @@ pub const Inst = struct {
/// Declares the beginning of a statement. Used for debug info.
dbg_stmt,
/// Represents a pointer to a global decl.
- declref,
+ decl_ref,
/// Represents a pointer to a global decl by string name.
- declref_str,
- /// Equivalent to a declref followed by deref.
- declval,
+ decl_ref_str,
+ /// Equivalent to a decl_ref followed by deref.
+ decl_val,
/// Load the value from a pointer.
deref,
/// Arithmetic division. Asserts no integer overflow.
@@ -185,7 +179,7 @@ pub const Inst = struct {
/// can hold the same mathematical value.
intcast,
/// Make an integer type out of signedness and bit count.
- inttype,
+ int_type,
/// Return a boolean false if an optional is null. `x != null`
is_non_null,
/// Return a boolean true if an optional is null. `x == null`
@@ -232,7 +226,7 @@ pub const Inst = struct {
/// Sends control flow back to the function's callee. Takes an operand as the return value.
@"return",
/// Same as `return` but there is no operand; the operand is implicitly the void value.
- returnvoid,
+ return_void,
/// Changes the maximum number of backwards branches that compile-time
/// code execution can use before giving up and making a compile error.
set_eval_branch_quota,
@@ -270,6 +264,9 @@ pub const Inst = struct {
/// Write a value to a pointer. For loading, see `deref`.
store,
/// Same as `store` but the type of the value being stored will be used to infer
+ /// the block type. The LHS is the pointer to store to.
+ store_to_block_ptr,
+ /// Same as `store` but the type of the value being stored will be used to infer
/// the pointer type.
store_to_inferred_ptr,
/// String Literal. Makes an anonymous Decl and then takes a pointer to it.
@@ -286,11 +283,11 @@ pub const Inst = struct {
typeof_peer,
/// Asserts control-flow will not reach this instruction. Not safety checked - the compiler
/// will assume the correctness of this instruction.
- unreach_nocheck,
+ unreachable_unsafe,
/// Asserts control-flow will not reach this instruction. In safety-checked modes,
/// this will generate a call to the panic function unless it can be proven unreachable
/// by the compiler.
- @"unreachable",
+ unreachable_safe,
/// Bitwise XOR. `^`
xor,
/// Create an optional type '?T'
@@ -339,12 +336,8 @@ pub const Inst = struct {
enum_literal,
/// Create an enum type.
enum_type,
- /// A switch expression.
- switchbr,
- /// A range in a switch case, `lhs...rhs`.
- /// Only checks that `lhs >= rhs` if they are ints, everything else is
- /// validated by the .switch instruction.
- switch_range,
+ /// Does nothing; returns a void value.
+ void_value,
pub fn Type(tag: Tag) type {
return switch (tag) {
@@ -352,17 +345,18 @@ pub const Inst = struct {
.alloc_inferred_mut,
.breakpoint,
.dbg_stmt,
- .returnvoid,
+ .return_void,
.ret_ptr,
.ret_type,
- .unreach_nocheck,
- .@"unreachable",
+ .unreachable_unsafe,
+ .unreachable_safe,
+ .void_value,
=> NoOp,
.alloc,
.alloc_mut,
- .boolnot,
- .compileerror,
+ .bool_not,
+ .compile_error,
.deref,
.@"return",
.is_null,
@@ -400,7 +394,7 @@ pub const Inst = struct {
.err_union_code_ptr,
.ensure_err_payload_void,
.anyframe_type,
- .bitnot,
+ .bit_not,
.import,
.set_eval_branch_quota,
.indexable_ptr_len,
@@ -411,10 +405,10 @@ pub const Inst = struct {
.array_cat,
.array_mul,
.array_type,
- .bitand,
- .bitor,
- .booland,
- .boolor,
+ .bit_and,
+ .bit_or,
+ .bool_and,
+ .bool_or,
.div,
.mod_rem,
.mul,
@@ -422,6 +416,7 @@ pub const Inst = struct {
.shl,
.shr,
.store,
+ .store_to_block_ptr,
.store_to_inferred_ptr,
.sub,
.subwrap,
@@ -440,7 +435,6 @@ pub const Inst = struct {
.error_union_type,
.merge_error_sets,
.slice_start,
- .switch_range,
=> BinOp,
.block,
@@ -452,19 +446,17 @@ pub const Inst = struct {
.arg => Arg,
.array_type_sentinel => ArrayTypeSentinel,
.@"break" => Break,
- .breakvoid => BreakVoid,
+ .break_void => BreakVoid,
.call => Call,
- .coerce_to_ptr_elem => CoerceToPtrElem,
- .declref => DeclRef,
- .declref_str => DeclRefStr,
- .declval => DeclVal,
- .coerce_result_block_ptr => CoerceResultBlockPtr,
- .compilelog => CompileLog,
+ .decl_ref => DeclRef,
+ .decl_ref_str => DeclRefStr,
+ .decl_val => DeclVal,
+ .compile_log => CompileLog,
.loop => Loop,
.@"const" => Const,
.str => Str,
.int => Int,
- .inttype => IntType,
+ .int_type => IntType,
.field_ptr, .field_val => Field,
.field_ptr_named, .field_val_named => FieldNamed,
.@"asm" => Asm,
@@ -479,7 +471,6 @@ pub const Inst = struct {
.enum_literal => EnumLiteral,
.error_set => ErrorSet,
.slice => Slice,
- .switchbr => SwitchBr,
.typeof_peer => TypeOfPeer,
.container_field_named => ContainerFieldNamed,
.container_field_typed => ContainerFieldTyped,
@@ -508,18 +499,18 @@ pub const Inst = struct {
.arg,
.as,
.@"asm",
- .bitand,
+ .bit_and,
.bitcast,
.bitcast_ref,
.bitcast_result_ptr,
- .bitor,
+ .bit_or,
.block,
.block_flat,
.block_comptime,
.block_comptime_flat,
- .boolnot,
- .booland,
- .boolor,
+ .bool_not,
+ .bool_and,
+ .bool_or,
.breakpoint,
.call,
.cmp_lt,
@@ -529,13 +520,11 @@ pub const Inst = struct {
.cmp_gt,
.cmp_neq,
.coerce_result_ptr,
- .coerce_result_block_ptr,
- .coerce_to_ptr_elem,
.@"const",
.dbg_stmt,
- .declref,
- .declref_str,
- .declval,
+ .decl_ref,
+ .decl_ref_str,
+ .decl_val,
.deref,
.div,
.elem_ptr,
@@ -552,7 +541,7 @@ pub const Inst = struct {
.fntype,
.int,
.intcast,
- .inttype,
+ .int_type,
.is_non_null,
.is_null,
.is_non_null_ptr,
@@ -579,6 +568,7 @@ pub const Inst = struct {
.mut_slice_type,
.const_slice_type,
.store,
+ .store_to_block_ptr,
.store_to_inferred_ptr,
.str,
.sub,
@@ -602,31 +592,30 @@ pub const Inst = struct {
.merge_error_sets,
.anyframe_type,
.error_union_type,
- .bitnot,
+ .bit_not,
.error_set,
.slice,
.slice_start,
.import,
- .switch_range,
.typeof_peer,
.resolve_inferred_alloc,
.set_eval_branch_quota,
- .compilelog,
+ .compile_log,
.enum_type,
.union_type,
.struct_type,
+ .void_value,
=> false,
.@"break",
- .breakvoid,
+ .break_void,
.condbr,
- .compileerror,
+ .compile_error,
.@"return",
- .returnvoid,
- .unreach_nocheck,
- .@"unreachable",
+ .return_void,
+ .unreachable_unsafe,
+ .unreachable_safe,
.loop,
- .switchbr,
.container_field_named,
.container_field_typed,
.container_field,
@@ -717,7 +706,7 @@ pub const Inst = struct {
};
pub const BreakVoid = struct {
- pub const base_tag = Tag.breakvoid;
+ pub const base_tag = Tag.break_void;
base: Inst,
positionals: struct {
@@ -739,19 +728,8 @@ pub const Inst = struct {
},
};
- pub const CoerceToPtrElem = struct {
- pub const base_tag = Tag.coerce_to_ptr_elem;
- base: Inst,
-
- positionals: struct {
- ptr: *Inst,
- value: *Inst,
- },
- kw_args: struct {},
- };
-
pub const DeclRef = struct {
- pub const base_tag = Tag.declref;
+ pub const base_tag = Tag.decl_ref;
base: Inst,
positionals: struct {
@@ -761,7 +739,7 @@ pub const Inst = struct {
};
pub const DeclRefStr = struct {
- pub const base_tag = Tag.declref_str;
+ pub const base_tag = Tag.decl_ref_str;
base: Inst,
positionals: struct {
@@ -771,7 +749,7 @@ pub const Inst = struct {
};
pub const DeclVal = struct {
- pub const base_tag = Tag.declval;
+ pub const base_tag = Tag.decl_val;
base: Inst,
positionals: struct {
@@ -780,19 +758,8 @@ pub const Inst = struct {
kw_args: struct {},
};
- pub const CoerceResultBlockPtr = struct {
- pub const base_tag = Tag.coerce_result_block_ptr;
- base: Inst,
-
- positionals: struct {
- dest_type: *Inst,
- block: *Block,
- },
- kw_args: struct {},
- };
-
pub const CompileLog = struct {
- pub const base_tag = Tag.compilelog;
+ pub const base_tag = Tag.compile_log;
base: Inst,
positionals: struct {
@@ -905,7 +872,7 @@ pub const Inst = struct {
};
pub const IntType = struct {
- pub const base_tag = Tag.inttype;
+ pub const base_tag = Tag.int_type;
base: Inst,
positionals: struct {
@@ -1114,32 +1081,6 @@ pub const Inst = struct {
},
};
- pub const SwitchBr = struct {
- pub const base_tag = Tag.switchbr;
- base: Inst,
-
- positionals: struct {
- target_ptr: *Inst,
- /// List of all individual items and ranges
- items: []*Inst,
- cases: []Case,
- else_body: Body,
- },
- kw_args: struct {
- /// Pointer to first range if such exists.
- range: ?*Inst = null,
- special_prong: enum {
- none,
- @"else",
- underscore,
- } = .none,
- },
-
- pub const Case = struct {
- item: *Inst,
- body: Body,
- };
- };
pub const TypeOfPeer = struct {
pub const base_tag = .typeof_peer;
base: Inst,
@@ -1473,7 +1414,7 @@ const Writer = struct {
TypedValue => return stream.print("TypedValue{{ .ty = {}, .val = {}}}", .{ param.ty, param.val }),
*IrModule.Decl => return stream.print("Decl({s})", .{param.name}),
*Inst.Block => {
- const name = self.block_table.get(param).?;
+ const name = self.block_table.get(param) orelse "!BADREF!";
return stream.print("\"{}\"", .{std.zig.fmtEscapes(name)});
},
*Inst.Loop => {
@@ -1490,26 +1431,6 @@ const Writer = struct {
}
try stream.writeByte(']');
},
- []Inst.SwitchBr.Case => {
- if (param.len == 0) {
- return stream.writeAll("{}");
- }
- try stream.writeAll("{\n");
- for (param) |*case, i| {
- if (i != 0) {
- try stream.writeAll(",\n");
- }
- try stream.writeByteNTimes(' ', self.indent);
- self.indent += 2;
- try self.writeParamToStream(stream, &case.item);
- try stream.writeAll(" => ");
- try self.writeParamToStream(stream, &case.body);
- self.indent -= 2;
- }
- try stream.writeByte('\n');
- try stream.writeByteNTimes(' ', self.indent - 2);
- try stream.writeByte('}');
- },
else => |T| @compileError("unimplemented: rendering parameter of type " ++ @typeName(T)),
}
}
@@ -1641,10 +1562,10 @@ const DumpTzir = struct {
.cmp_gt,
.cmp_neq,
.store,
- .booland,
- .boolor,
- .bitand,
- .bitor,
+ .bool_and,
+ .bool_or,
+ .bit_and,
+ .bit_or,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
@@ -1660,9 +1581,15 @@ const DumpTzir = struct {
try dtz.findConst(br.operand);
},
- .brvoid => {
- const brvoid = inst.castTag(.brvoid).?;
- try dtz.findConst(&brvoid.block.base);
+ .br_block_flat => {
+ const br_block_flat = inst.castTag(.br_block_flat).?;
+ try dtz.findConst(&br_block_flat.block.base);
+ try dtz.fetchInstsAndResolveConsts(br_block_flat.body);
+ },
+
+ .br_void => {
+ const br_void = inst.castTag(.br_void).?;
+ try dtz.findConst(&br_void.block.base);
},
.block => {
@@ -1753,10 +1680,10 @@ const DumpTzir = struct {
.cmp_gt,
.cmp_neq,
.store,
- .booland,
- .boolor,
- .bitand,
- .bitor,
+ .bool_and,
+ .bool_or,
+ .bit_and,
+ .bit_or,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
@@ -1805,9 +1732,27 @@ const DumpTzir = struct {
}
},
- .brvoid => {
- const brvoid = inst.castTag(.brvoid).?;
- const kinky = try dtz.writeInst(writer, &brvoid.block.base);
+ .br_block_flat => {
+ const br_block_flat = inst.castTag(.br_block_flat).?;
+ const block_kinky = try dtz.writeInst(writer, &br_block_flat.block.base);
+ if (block_kinky != null) {
+ try writer.writeAll(", { // Instruction does not dominate all uses!\n");
+ } else {
+ try writer.writeAll(", {\n");
+ }
+
+ const old_indent = dtz.indent;
+ dtz.indent += 2;
+ try dtz.dumpBody(br_block_flat.body, writer);
+ dtz.indent = old_indent;
+
+ try writer.writeByteNTimes(' ', dtz.indent);
+ try writer.writeAll("})\n");
+ },
+
+ .br_void => {
+ const br_void = inst.castTag(.br_void).?;
+ const kinky = try dtz.writeInst(writer, &br_void.block.base);
if (kinky) |_| {
try writer.writeAll(") // Instruction does not dominate all uses!\n");
} else {
@@ -1818,7 +1763,7 @@ const DumpTzir = struct {
.block => {
const block = inst.castTag(.block).?;
- try writer.writeAll("\n");
+ try writer.writeAll("{\n");
const old_indent = dtz.indent;
dtz.indent += 2;
@@ -1826,7 +1771,7 @@ const DumpTzir = struct {
dtz.indent = old_indent;
try writer.writeByteNTimes(' ', dtz.indent);
- try writer.writeAll(")\n");
+ try writer.writeAll("})\n");
},
.condbr => {
@@ -1856,7 +1801,7 @@ const DumpTzir = struct {
.loop => {
const loop = inst.castTag(.loop).?;
- try writer.writeAll("\n");
+ try writer.writeAll("{\n");
const old_indent = dtz.indent;
dtz.indent += 2;
@@ -1864,7 +1809,7 @@ const DumpTzir = struct {
dtz.indent = old_indent;
try writer.writeByteNTimes(' ', dtz.indent);
- try writer.writeAll(")\n");
+ try writer.writeAll("})\n");
},
.call => {
diff --git a/src/zir_sema.zig b/src/zir_sema.zig
index 0caaa2a03f..301b95ad97 100644
--- a/src/zir_sema.zig
+++ b/src/zir_sema.zig
@@ -28,144 +28,132 @@ const Decl = Module.Decl;
pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*Inst {
switch (old_inst.tag) {
- .alloc => return analyzeInstAlloc(mod, scope, old_inst.castTag(.alloc).?),
- .alloc_mut => return analyzeInstAllocMut(mod, scope, old_inst.castTag(.alloc_mut).?),
- .alloc_inferred => return analyzeInstAllocInferred(
- mod,
- scope,
- old_inst.castTag(.alloc_inferred).?,
- .inferred_alloc_const,
- ),
- .alloc_inferred_mut => return analyzeInstAllocInferred(
- mod,
- scope,
- old_inst.castTag(.alloc_inferred_mut).?,
- .inferred_alloc_mut,
- ),
- .arg => return analyzeInstArg(mod, scope, old_inst.castTag(.arg).?),
- .bitcast_ref => return bitCastRef(mod, scope, old_inst.castTag(.bitcast_ref).?),
- .bitcast_result_ptr => return bitCastResultPtr(mod, scope, old_inst.castTag(.bitcast_result_ptr).?),
- .block => return analyzeInstBlock(mod, scope, old_inst.castTag(.block).?, false),
- .block_comptime => return analyzeInstBlock(mod, scope, old_inst.castTag(.block_comptime).?, true),
- .block_flat => return analyzeInstBlockFlat(mod, scope, old_inst.castTag(.block_flat).?, false),
- .block_comptime_flat => return analyzeInstBlockFlat(mod, scope, old_inst.castTag(.block_comptime_flat).?, true),
- .@"break" => return analyzeInstBreak(mod, scope, old_inst.castTag(.@"break").?),
- .breakpoint => return analyzeInstBreakpoint(mod, scope, old_inst.castTag(.breakpoint).?),
- .breakvoid => return analyzeInstBreakVoid(mod, scope, old_inst.castTag(.breakvoid).?),
- .call => return call(mod, scope, old_inst.castTag(.call).?),
- .coerce_result_block_ptr => return analyzeInstCoerceResultBlockPtr(mod, scope, old_inst.castTag(.coerce_result_block_ptr).?),
- .coerce_result_ptr => return analyzeInstCoerceResultPtr(mod, scope, old_inst.castTag(.coerce_result_ptr).?),
- .coerce_to_ptr_elem => return analyzeInstCoerceToPtrElem(mod, scope, old_inst.castTag(.coerce_to_ptr_elem).?),
- .compileerror => return analyzeInstCompileError(mod, scope, old_inst.castTag(.compileerror).?),
- .compilelog => return analyzeInstCompileLog(mod, scope, old_inst.castTag(.compilelog).?),
- .@"const" => return analyzeInstConst(mod, scope, old_inst.castTag(.@"const").?),
- .dbg_stmt => return analyzeInstDbgStmt(mod, scope, old_inst.castTag(.dbg_stmt).?),
- .declref => return declRef(mod, scope, old_inst.castTag(.declref).?),
- .declref_str => return analyzeInstDeclRefStr(mod, scope, old_inst.castTag(.declref_str).?),
- .declval => return declVal(mod, scope, old_inst.castTag(.declval).?),
- .ensure_result_used => return analyzeInstEnsureResultUsed(mod, scope, old_inst.castTag(.ensure_result_used).?),
- .ensure_result_non_error => return analyzeInstEnsureResultNonError(mod, scope, old_inst.castTag(.ensure_result_non_error).?),
- .indexable_ptr_len => return indexablePtrLen(mod, scope, old_inst.castTag(.indexable_ptr_len).?),
- .ref => return ref(mod, scope, old_inst.castTag(.ref).?),
- .resolve_inferred_alloc => return analyzeInstResolveInferredAlloc(mod, scope, old_inst.castTag(.resolve_inferred_alloc).?),
- .ret_ptr => return analyzeInstRetPtr(mod, scope, old_inst.castTag(.ret_ptr).?),
- .ret_type => return analyzeInstRetType(mod, scope, old_inst.castTag(.ret_type).?),
- .store_to_inferred_ptr => return analyzeInstStoreToInferredPtr(mod, scope, old_inst.castTag(.store_to_inferred_ptr).?),
- .single_const_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.single_const_ptr_type).?, false, .One),
- .single_mut_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.single_mut_ptr_type).?, true, .One),
- .many_const_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.many_const_ptr_type).?, false, .Many),
- .many_mut_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.many_mut_ptr_type).?, true, .Many),
- .c_const_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.c_const_ptr_type).?, false, .C),
- .c_mut_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.c_mut_ptr_type).?, true, .C),
- .const_slice_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.const_slice_type).?, false, .Slice),
- .mut_slice_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice),
- .ptr_type => return analyzeInstPtrType(mod, scope, old_inst.castTag(.ptr_type).?),
- .store => return analyzeInstStore(mod, scope, old_inst.castTag(.store).?),
- .set_eval_branch_quota => return analyzeInstSetEvalBranchQuota(mod, scope, old_inst.castTag(.set_eval_branch_quota).?),
- .str => return analyzeInstStr(mod, scope, old_inst.castTag(.str).?),
- .int => return analyzeInstInt(mod, scope, old_inst.castTag(.int).?),
- .inttype => return analyzeInstIntType(mod, scope, old_inst.castTag(.inttype).?),
- .loop => return analyzeInstLoop(mod, scope, old_inst.castTag(.loop).?),
- .param_type => return analyzeInstParamType(mod, scope, old_inst.castTag(.param_type).?),
- .ptrtoint => return analyzeInstPtrToInt(mod, scope, old_inst.castTag(.ptrtoint).?),
- .field_ptr => return fieldPtr(mod, scope, old_inst.castTag(.field_ptr).?),
- .field_val => return fieldVal(mod, scope, old_inst.castTag(.field_val).?),
- .field_ptr_named => return fieldPtrNamed(mod, scope, old_inst.castTag(.field_ptr_named).?),
- .field_val_named => return fieldValNamed(mod, scope, old_inst.castTag(.field_val_named).?),
- .deref => return analyzeInstDeref(mod, scope, old_inst.castTag(.deref).?),
- .as => return analyzeInstAs(mod, scope, old_inst.castTag(.as).?),
- .@"asm" => return analyzeInstAsm(mod, scope, old_inst.castTag(.@"asm").?),
- .@"unreachable" => return analyzeInstUnreachable(mod, scope, old_inst.castTag(.@"unreachable").?, true),
- .unreach_nocheck => return analyzeInstUnreachable(mod, scope, old_inst.castTag(.unreach_nocheck).?, false),
- .@"return" => return analyzeInstRet(mod, scope, old_inst.castTag(.@"return").?),
- .returnvoid => return analyzeInstRetVoid(mod, scope, old_inst.castTag(.returnvoid).?),
- .@"fn" => return analyzeInstFn(mod, scope, old_inst.castTag(.@"fn").?),
- .@"export" => return analyzeInstExport(mod, scope, old_inst.castTag(.@"export").?),
- .primitive => return analyzeInstPrimitive(mod, scope, old_inst.castTag(.primitive).?),
- .fntype => return analyzeInstFnType(mod, scope, old_inst.castTag(.fntype).?),
- .intcast => return analyzeInstIntCast(mod, scope, old_inst.castTag(.intcast).?),
- .bitcast => return analyzeInstBitCast(mod, scope, old_inst.castTag(.bitcast).?),
- .floatcast => return analyzeInstFloatCast(mod, scope, old_inst.castTag(.floatcast).?),
- .elem_ptr => return elemPtr(mod, scope, old_inst.castTag(.elem_ptr).?),
- .elem_val => return elemVal(mod, scope, old_inst.castTag(.elem_val).?),
- .add => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.add).?),
- .addwrap => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.addwrap).?),
- .sub => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.sub).?),
- .subwrap => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.subwrap).?),
- .mul => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.mul).?),
- .mulwrap => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.mulwrap).?),
- .div => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.div).?),
- .mod_rem => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.mod_rem).?),
- .array_cat => return analyzeInstArrayCat(mod, scope, old_inst.castTag(.array_cat).?),
- .array_mul => return analyzeInstArrayMul(mod, scope, old_inst.castTag(.array_mul).?),
- .bitand => return analyzeInstBitwise(mod, scope, old_inst.castTag(.bitand).?),
- .bitnot => return analyzeInstBitNot(mod, scope, old_inst.castTag(.bitnot).?),
- .bitor => return analyzeInstBitwise(mod, scope, old_inst.castTag(.bitor).?),
- .xor => return analyzeInstBitwise(mod, scope, old_inst.castTag(.xor).?),
- .shl => return analyzeInstShl(mod, scope, old_inst.castTag(.shl).?),
- .shr => return analyzeInstShr(mod, scope, old_inst.castTag(.shr).?),
- .cmp_lt => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_lt).?, .lt),
- .cmp_lte => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_lte).?, .lte),
- .cmp_eq => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_eq).?, .eq),
- .cmp_gte => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_gte).?, .gte),
- .cmp_gt => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_gt).?, .gt),
- .cmp_neq => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_neq).?, .neq),
- .condbr => return analyzeInstCondBr(mod, scope, old_inst.castTag(.condbr).?),
- .is_null => return isNull(mod, scope, old_inst.castTag(.is_null).?, false),
- .is_non_null => return isNull(mod, scope, old_inst.castTag(.is_non_null).?, true),
- .is_null_ptr => return isNullPtr(mod, scope, old_inst.castTag(.is_null_ptr).?, false),
- .is_non_null_ptr => return isNullPtr(mod, scope, old_inst.castTag(.is_non_null_ptr).?, true),
- .is_err => return isErr(mod, scope, old_inst.castTag(.is_err).?),
- .is_err_ptr => return isErrPtr(mod, scope, old_inst.castTag(.is_err_ptr).?),
- .boolnot => return analyzeInstBoolNot(mod, scope, old_inst.castTag(.boolnot).?),
- .typeof => return analyzeInstTypeOf(mod, scope, old_inst.castTag(.typeof).?),
- .typeof_peer => return analyzeInstTypeOfPeer(mod, scope, old_inst.castTag(.typeof_peer).?),
- .optional_type => return analyzeInstOptionalType(mod, scope, old_inst.castTag(.optional_type).?),
- .optional_payload_safe => return optionalPayload(mod, scope, old_inst.castTag(.optional_payload_safe).?, true),
- .optional_payload_unsafe => return optionalPayload(mod, scope, old_inst.castTag(.optional_payload_unsafe).?, false),
- .optional_payload_safe_ptr => return optionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_safe_ptr).?, true),
- .optional_payload_unsafe_ptr => return optionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_unsafe_ptr).?, false),
- .err_union_payload_safe => return errorUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_safe).?, true),
- .err_union_payload_unsafe => return errorUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_unsafe).?, false),
- .err_union_payload_safe_ptr => return errorUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_safe_ptr).?, true),
- .err_union_payload_unsafe_ptr => return errorUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_unsafe_ptr).?, false),
- .err_union_code => return errorUnionCode(mod, scope, old_inst.castTag(.err_union_code).?),
- .err_union_code_ptr => return errorUnionCodePtr(mod, scope, old_inst.castTag(.err_union_code_ptr).?),
- .ensure_err_payload_void => return analyzeInstEnsureErrPayloadVoid(mod, scope, old_inst.castTag(.ensure_err_payload_void).?),
- .array_type => return analyzeInstArrayType(mod, scope, old_inst.castTag(.array_type).?),
- .array_type_sentinel => return analyzeInstArrayTypeSentinel(mod, scope, old_inst.castTag(.array_type_sentinel).?),
- .enum_literal => return analyzeInstEnumLiteral(mod, scope, old_inst.castTag(.enum_literal).?),
- .merge_error_sets => return analyzeInstMergeErrorSets(mod, scope, old_inst.castTag(.merge_error_sets).?),
- .error_union_type => return analyzeInstErrorUnionType(mod, scope, old_inst.castTag(.error_union_type).?),
- .anyframe_type => return analyzeInstAnyframeType(mod, scope, old_inst.castTag(.anyframe_type).?),
- .error_set => return analyzeInstErrorSet(mod, scope, old_inst.castTag(.error_set).?),
- .slice => return analyzeInstSlice(mod, scope, old_inst.castTag(.slice).?),
- .slice_start => return analyzeInstSliceStart(mod, scope, old_inst.castTag(.slice_start).?),
- .import => return analyzeInstImport(mod, scope, old_inst.castTag(.import).?),
- .switchbr => return analyzeInstSwitchBr(mod, scope, old_inst.castTag(.switchbr).?),
- .switch_range => return analyzeInstSwitchRange(mod, scope, old_inst.castTag(.switch_range).?),
- .booland => return analyzeInstBoolOp(mod, scope, old_inst.castTag(.booland).?),
- .boolor => return analyzeInstBoolOp(mod, scope, old_inst.castTag(.boolor).?),
+ .alloc => return zirAlloc(mod, scope, old_inst.castTag(.alloc).?),
+ .alloc_mut => return zirAllocMut(mod, scope, old_inst.castTag(.alloc_mut).?),
+ .alloc_inferred => return zirAllocInferred(mod, scope, old_inst.castTag(.alloc_inferred).?, .inferred_alloc_const),
+ .alloc_inferred_mut => return zirAllocInferred(mod, scope, old_inst.castTag(.alloc_inferred_mut).?, .inferred_alloc_mut),
+ .arg => return zirArg(mod, scope, old_inst.castTag(.arg).?),
+ .bitcast_ref => return zirBitcastRef(mod, scope, old_inst.castTag(.bitcast_ref).?),
+ .bitcast_result_ptr => return zirBitcastResultPtr(mod, scope, old_inst.castTag(.bitcast_result_ptr).?),
+ .block => return zirBlock(mod, scope, old_inst.castTag(.block).?, false),
+ .block_comptime => return zirBlock(mod, scope, old_inst.castTag(.block_comptime).?, true),
+ .block_flat => return zirBlockFlat(mod, scope, old_inst.castTag(.block_flat).?, false),
+ .block_comptime_flat => return zirBlockFlat(mod, scope, old_inst.castTag(.block_comptime_flat).?, true),
+ .@"break" => return zirBreak(mod, scope, old_inst.castTag(.@"break").?),
+ .breakpoint => return zirBreakpoint(mod, scope, old_inst.castTag(.breakpoint).?),
+ .break_void => return zirBreakVoid(mod, scope, old_inst.castTag(.break_void).?),
+ .call => return zirCall(mod, scope, old_inst.castTag(.call).?),
+ .coerce_result_ptr => return zirCoerceResultPtr(mod, scope, old_inst.castTag(.coerce_result_ptr).?),
+ .compile_error => return zirCompileError(mod, scope, old_inst.castTag(.compile_error).?),
+ .compile_log => return zirCompileLog(mod, scope, old_inst.castTag(.compile_log).?),
+ .@"const" => return zirConst(mod, scope, old_inst.castTag(.@"const").?),
+ .dbg_stmt => return zirDbgStmt(mod, scope, old_inst.castTag(.dbg_stmt).?),
+ .decl_ref => return zirDeclRef(mod, scope, old_inst.castTag(.decl_ref).?),
+ .decl_ref_str => return zirDeclRefStr(mod, scope, old_inst.castTag(.decl_ref_str).?),
+ .decl_val => return zirDeclVal(mod, scope, old_inst.castTag(.decl_val).?),
+ .ensure_result_used => return zirEnsureResultUsed(mod, scope, old_inst.castTag(.ensure_result_used).?),
+ .ensure_result_non_error => return zirEnsureResultNonError(mod, scope, old_inst.castTag(.ensure_result_non_error).?),
+ .indexable_ptr_len => return zirIndexablePtrLen(mod, scope, old_inst.castTag(.indexable_ptr_len).?),
+ .ref => return zirRef(mod, scope, old_inst.castTag(.ref).?),
+ .resolve_inferred_alloc => return zirResolveInferredAlloc(mod, scope, old_inst.castTag(.resolve_inferred_alloc).?),
+ .ret_ptr => return zirRetPtr(mod, scope, old_inst.castTag(.ret_ptr).?),
+ .ret_type => return zirRetType(mod, scope, old_inst.castTag(.ret_type).?),
+ .store_to_block_ptr => return zirStoreToBlockPtr(mod, scope, old_inst.castTag(.store_to_block_ptr).?),
+ .store_to_inferred_ptr => return zirStoreToInferredPtr(mod, scope, old_inst.castTag(.store_to_inferred_ptr).?),
+ .single_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.single_const_ptr_type).?, false, .One),
+ .single_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.single_mut_ptr_type).?, true, .One),
+ .many_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.many_const_ptr_type).?, false, .Many),
+ .many_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.many_mut_ptr_type).?, true, .Many),
+ .c_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.c_const_ptr_type).?, false, .C),
+ .c_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.c_mut_ptr_type).?, true, .C),
+ .const_slice_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.const_slice_type).?, false, .Slice),
+ .mut_slice_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice),
+ .ptr_type => return zirPtrType(mod, scope, old_inst.castTag(.ptr_type).?),
+ .store => return zirStore(mod, scope, old_inst.castTag(.store).?),
+ .set_eval_branch_quota => return zirSetEvalBranchQuota(mod, scope, old_inst.castTag(.set_eval_branch_quota).?),
+ .str => return zirStr(mod, scope, old_inst.castTag(.str).?),
+ .int => return zirInt(mod, scope, old_inst.castTag(.int).?),
+ .int_type => return zirIntType(mod, scope, old_inst.castTag(.int_type).?),
+ .loop => return zirLoop(mod, scope, old_inst.castTag(.loop).?),
+ .param_type => return zirParamType(mod, scope, old_inst.castTag(.param_type).?),
+ .ptrtoint => return zirPtrtoint(mod, scope, old_inst.castTag(.ptrtoint).?),
+ .field_ptr => return zirFieldPtr(mod, scope, old_inst.castTag(.field_ptr).?),
+ .field_val => return zirFieldVal(mod, scope, old_inst.castTag(.field_val).?),
+ .field_ptr_named => return zirFieldPtrNamed(mod, scope, old_inst.castTag(.field_ptr_named).?),
+ .field_val_named => return zirFieldValNamed(mod, scope, old_inst.castTag(.field_val_named).?),
+ .deref => return zirDeref(mod, scope, old_inst.castTag(.deref).?),
+ .as => return zirAs(mod, scope, old_inst.castTag(.as).?),
+ .@"asm" => return zirAsm(mod, scope, old_inst.castTag(.@"asm").?),
+ .unreachable_safe => return zirUnreachable(mod, scope, old_inst.castTag(.unreachable_safe).?, true),
+ .unreachable_unsafe => return zirUnreachable(mod, scope, old_inst.castTag(.unreachable_unsafe).?, false),
+ .@"return" => return zirReturn(mod, scope, old_inst.castTag(.@"return").?),
+ .return_void => return zirReturnVoid(mod, scope, old_inst.castTag(.return_void).?),
+ .@"fn" => return zirFn(mod, scope, old_inst.castTag(.@"fn").?),
+ .@"export" => return zirExport(mod, scope, old_inst.castTag(.@"export").?),
+ .primitive => return zirPrimitive(mod, scope, old_inst.castTag(.primitive).?),
+ .fntype => return zirFnType(mod, scope, old_inst.castTag(.fntype).?),
+ .intcast => return zirIntcast(mod, scope, old_inst.castTag(.intcast).?),
+ .bitcast => return zirBitcast(mod, scope, old_inst.castTag(.bitcast).?),
+ .floatcast => return zirFloatcast(mod, scope, old_inst.castTag(.floatcast).?),
+ .elem_ptr => return zirElemPtr(mod, scope, old_inst.castTag(.elem_ptr).?),
+ .elem_val => return zirElemVal(mod, scope, old_inst.castTag(.elem_val).?),
+ .add => return zirArithmetic(mod, scope, old_inst.castTag(.add).?),
+ .addwrap => return zirArithmetic(mod, scope, old_inst.castTag(.addwrap).?),
+ .sub => return zirArithmetic(mod, scope, old_inst.castTag(.sub).?),
+ .subwrap => return zirArithmetic(mod, scope, old_inst.castTag(.subwrap).?),
+ .mul => return zirArithmetic(mod, scope, old_inst.castTag(.mul).?),
+ .mulwrap => return zirArithmetic(mod, scope, old_inst.castTag(.mulwrap).?),
+ .div => return zirArithmetic(mod, scope, old_inst.castTag(.div).?),
+ .mod_rem => return zirArithmetic(mod, scope, old_inst.castTag(.mod_rem).?),
+ .array_cat => return zirArrayCat(mod, scope, old_inst.castTag(.array_cat).?),
+ .array_mul => return zirArrayMul(mod, scope, old_inst.castTag(.array_mul).?),
+ .bit_and => return zirBitwise(mod, scope, old_inst.castTag(.bit_and).?),
+ .bit_not => return zirBitNot(mod, scope, old_inst.castTag(.bit_not).?),
+ .bit_or => return zirBitwise(mod, scope, old_inst.castTag(.bit_or).?),
+ .xor => return zirBitwise(mod, scope, old_inst.castTag(.xor).?),
+ .shl => return zirShl(mod, scope, old_inst.castTag(.shl).?),
+ .shr => return zirShr(mod, scope, old_inst.castTag(.shr).?),
+ .cmp_lt => return zirCmp(mod, scope, old_inst.castTag(.cmp_lt).?, .lt),
+ .cmp_lte => return zirCmp(mod, scope, old_inst.castTag(.cmp_lte).?, .lte),
+ .cmp_eq => return zirCmp(mod, scope, old_inst.castTag(.cmp_eq).?, .eq),
+ .cmp_gte => return zirCmp(mod, scope, old_inst.castTag(.cmp_gte).?, .gte),
+ .cmp_gt => return zirCmp(mod, scope, old_inst.castTag(.cmp_gt).?, .gt),
+ .cmp_neq => return zirCmp(mod, scope, old_inst.castTag(.cmp_neq).?, .neq),
+ .condbr => return zirCondbr(mod, scope, old_inst.castTag(.condbr).?),
+ .is_null => return zirIsNull(mod, scope, old_inst.castTag(.is_null).?, false),
+ .is_non_null => return zirIsNull(mod, scope, old_inst.castTag(.is_non_null).?, true),
+ .is_null_ptr => return zirIsNullPtr(mod, scope, old_inst.castTag(.is_null_ptr).?, false),
+ .is_non_null_ptr => return zirIsNullPtr(mod, scope, old_inst.castTag(.is_non_null_ptr).?, true),
+ .is_err => return zirIsErr(mod, scope, old_inst.castTag(.is_err).?),
+ .is_err_ptr => return zirIsErrPtr(mod, scope, old_inst.castTag(.is_err_ptr).?),
+ .bool_not => return zirBoolNot(mod, scope, old_inst.castTag(.bool_not).?),
+ .typeof => return zirTypeof(mod, scope, old_inst.castTag(.typeof).?),
+ .typeof_peer => return zirTypeofPeer(mod, scope, old_inst.castTag(.typeof_peer).?),
+ .optional_type => return zirOptionalType(mod, scope, old_inst.castTag(.optional_type).?),
+ .optional_payload_safe => return zirOptionalPayload(mod, scope, old_inst.castTag(.optional_payload_safe).?, true),
+ .optional_payload_unsafe => return zirOptionalPayload(mod, scope, old_inst.castTag(.optional_payload_unsafe).?, false),
+ .optional_payload_safe_ptr => return zirOptionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_safe_ptr).?, true),
+ .optional_payload_unsafe_ptr => return zirOptionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_unsafe_ptr).?, false),
+ .err_union_payload_safe => return zirErrUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_safe).?, true),
+ .err_union_payload_unsafe => return zirErrUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_unsafe).?, false),
+ .err_union_payload_safe_ptr => return zirErrUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_safe_ptr).?, true),
+ .err_union_payload_unsafe_ptr => return zirErrUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_unsafe_ptr).?, false),
+ .err_union_code => return zirErrUnionCode(mod, scope, old_inst.castTag(.err_union_code).?),
+ .err_union_code_ptr => return zirErrUnionCodePtr(mod, scope, old_inst.castTag(.err_union_code_ptr).?),
+ .ensure_err_payload_void => return zirEnsureErrPayloadVoid(mod, scope, old_inst.castTag(.ensure_err_payload_void).?),
+ .array_type => return zirArrayType(mod, scope, old_inst.castTag(.array_type).?),
+ .array_type_sentinel => return zirArrayTypeSentinel(mod, scope, old_inst.castTag(.array_type_sentinel).?),
+ .enum_literal => return zirEnumLiteral(mod, scope, old_inst.castTag(.enum_literal).?),
+ .merge_error_sets => return zirMergeErrorSets(mod, scope, old_inst.castTag(.merge_error_sets).?),
+ .error_union_type => return zirErrorUnionType(mod, scope, old_inst.castTag(.error_union_type).?),
+ .anyframe_type => return zirAnyframeType(mod, scope, old_inst.castTag(.anyframe_type).?),
+ .error_set => return zirErrorSet(mod, scope, old_inst.castTag(.error_set).?),
+ .slice => return zirSlice(mod, scope, old_inst.castTag(.slice).?),
+ .slice_start => return zirSliceStart(mod, scope, old_inst.castTag(.slice_start).?),
+ .import => return zirImport(mod, scope, old_inst.castTag(.import).?),
+ .bool_and => return zirBoolOp(mod, scope, old_inst.castTag(.bool_and).?),
+ .bool_or => return zirBoolOp(mod, scope, old_inst.castTag(.bool_or).?),
+ .void_value => return mod.constVoid(scope, old_inst.src),
.container_field_named,
.container_field_typed,
@@ -258,7 +246,7 @@ pub fn resolveInstConst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerE
};
}
-fn analyzeInstConst(mod: *Module, scope: *Scope, const_inst: *zir.Inst.Const) InnerError!*Inst {
+fn zirConst(mod: *Module, scope: *Scope, const_inst: *zir.Inst.Const) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// Move the TypedValue from old memory to new memory. This allows freeing the ZIR instructions
@@ -275,44 +263,25 @@ fn analyzeConstInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError
};
}
-fn analyzeInstCoerceResultBlockPtr(
- mod: *Module,
- scope: *Scope,
- inst: *zir.Inst.CoerceResultBlockPtr,
-) InnerError!*Inst {
- const tracy = trace(@src());
- defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceResultBlockPtr", .{});
-}
-
-fn bitCastRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirBitcastRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement zir_sema.bitCastRef", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zir_sema.zirBitcastRef", .{});
}
-fn bitCastResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirBitcastResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement zir_sema.bitCastResultPtr", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zir_sema.zirBitcastResultPtr", .{});
}
-fn analyzeInstCoerceResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirCoerceResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceResultPtr", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirCoerceResultPtr", .{});
}
-/// Equivalent to `as(ptr_child_type(typeof(ptr)), value)`.
-fn analyzeInstCoerceToPtrElem(mod: *Module, scope: *Scope, inst: *zir.Inst.CoerceToPtrElem) InnerError!*Inst {
- const tracy = trace(@src());
- defer tracy.end();
- const ptr = try resolveInst(mod, scope, inst.positionals.ptr);
- const operand = try resolveInst(mod, scope, inst.positionals.value);
- return mod.coerce(scope, ptr.ty.elemType(), operand);
-}
-
-fn analyzeInstRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn zirRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
@@ -322,7 +291,7 @@ fn analyzeInstRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerErr
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
-fn ref(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -330,7 +299,7 @@ fn ref(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
return mod.analyzeRef(scope, inst.base.src, operand);
}
-fn analyzeInstRetType(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn zirRetType(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
@@ -339,7 +308,7 @@ fn analyzeInstRetType(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerEr
return mod.constType(scope, inst.base.src, ret_type);
}
-fn analyzeInstEnsureResultUsed(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirEnsureResultUsed(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
@@ -349,7 +318,7 @@ fn analyzeInstEnsureResultUsed(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp
}
}
-fn analyzeInstEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
@@ -359,7 +328,7 @@ fn analyzeInstEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.
}
}
-fn indexablePtrLen(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirIndexablePtrLen(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -389,7 +358,7 @@ fn indexablePtrLen(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
-fn analyzeInstAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const var_type = try resolveType(mod, scope, inst.positionals.operand);
@@ -398,7 +367,7 @@ fn analyzeInstAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerErro
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
-fn analyzeInstAllocMut(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirAllocMut(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const var_type = try resolveType(mod, scope, inst.positionals.operand);
@@ -408,7 +377,7 @@ fn analyzeInstAllocMut(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerE
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
-fn analyzeInstAllocInferred(
+fn zirAllocInferred(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.NoOp,
@@ -437,7 +406,7 @@ fn analyzeInstAllocInferred(
return result;
}
-fn analyzeInstResolveInferredAlloc(
+fn zirResolveInferredAlloc(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.UnOp,
@@ -466,28 +435,46 @@ fn analyzeInstResolveInferredAlloc(
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstStoreToInferredPtr(
+fn zirStoreToBlockPtr(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.BinOp,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
+ const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
+ const value = try resolveInst(mod, scope, inst.positionals.rhs);
+ const ptr_ty = try mod.simplePtrType(scope, inst.base.src, value.ty, true, .One);
+ // TODO detect when this store should be done at compile-time. For example,
+ // if expressions should force it when the condition is compile-time known.
+ const b = try mod.requireRuntimeBlock(scope, inst.base.src);
+ const bitcasted_ptr = try mod.addUnOp(b, inst.base.src, ptr_ty, .bitcast, ptr);
+ return mod.storePtr(scope, inst.base.src, bitcasted_ptr, value);
+}
+
+fn zirStoreToInferredPtr(
+ mod: *Module,
+ scope: *Scope,
+ inst: *zir.Inst.BinOp,
+) InnerError!*Inst {
+ const tracy = trace(@src());
+ defer tracy.end();
+
const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
const value = try resolveInst(mod, scope, inst.positionals.rhs);
const inferred_alloc = ptr.castTag(.constant).?.val.castTag(.inferred_alloc).?;
// Add the stored instruction to the set we will use to resolve peer types
// for the inferred allocation.
try inferred_alloc.data.stored_inst_list.append(scope.arena(), value);
- // Create a new alloc with exactly the type the pointer wants.
- // Later it gets cleaned up by aliasing the alloc we are supposed to be storing to.
+ // Create a runtime bitcast instruction with exactly the type the pointer wants.
const ptr_ty = try mod.simplePtrType(scope, inst.base.src, value.ty, true, .One);
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const bitcasted_ptr = try mod.addUnOp(b, inst.base.src, ptr_ty, .bitcast, ptr);
return mod.storePtr(scope, inst.base.src, bitcasted_ptr, value);
}
-fn analyzeInstSetEvalBranchQuota(
+fn zirSetEvalBranchQuota(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.UnOp,
@@ -499,15 +486,16 @@ fn analyzeInstSetEvalBranchQuota(
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstStore(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirStore(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
const value = try resolveInst(mod, scope, inst.positionals.rhs);
return mod.storePtr(scope, inst.base.src, ptr, value);
}
-fn analyzeInstParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType) InnerError!*Inst {
+fn zirParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const fn_inst = try resolveInst(mod, scope, inst.positionals.func);
@@ -516,7 +504,7 @@ fn analyzeInstParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType)
const fn_ty: Type = switch (fn_inst.ty.zigTypeTag()) {
.Fn => fn_inst.ty,
.BoundFn => {
- return mod.fail(scope, fn_inst.src, "TODO implement analyzeInstParamType for method call syntax", .{});
+ return mod.fail(scope, fn_inst.src, "TODO implement zirParamType for method call syntax", .{});
},
else => {
return mod.fail(scope, fn_inst.src, "expected function, found '{}'", .{fn_inst.ty});
@@ -538,7 +526,7 @@ fn analyzeInstParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType)
return mod.constType(scope, inst.base.src, param_type);
}
-fn analyzeInstStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerError!*Inst {
+fn zirStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// The bytes references memory inside the ZIR module, which can get deallocated
@@ -557,14 +545,14 @@ fn analyzeInstStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerErr
return mod.analyzeDeclRef(scope, str_inst.base.src, new_decl);
}
-fn analyzeInstInt(mod: *Module, scope: *Scope, inst: *zir.Inst.Int) InnerError!*Inst {
+fn zirInt(mod: *Module, scope: *Scope, inst: *zir.Inst.Int) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.constIntBig(scope, inst.base.src, Type.initTag(.comptime_int), inst.positionals.int);
}
-fn analyzeInstExport(mod: *Module, scope: *Scope, export_inst: *zir.Inst.Export) InnerError!*Inst {
+fn zirExport(mod: *Module, scope: *Scope, export_inst: *zir.Inst.Export) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const symbol_name = try resolveConstString(mod, scope, export_inst.positionals.symbol_name);
@@ -574,14 +562,14 @@ fn analyzeInstExport(mod: *Module, scope: *Scope, export_inst: *zir.Inst.Export)
return mod.constVoid(scope, export_inst.base.src);
}
-fn analyzeInstCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const msg = try resolveConstString(mod, scope, inst.positionals.operand);
return mod.fail(scope, inst.base.src, "{s}", .{msg});
}
-fn analyzeInstCompileLog(mod: *Module, scope: *Scope, inst: *zir.Inst.CompileLog) InnerError!*Inst {
+fn zirCompileLog(mod: *Module, scope: *Scope, inst: *zir.Inst.CompileLog) InnerError!*Inst {
var managed = mod.compile_log_text.toManaged(mod.gpa);
defer mod.compile_log_text = managed.moveToUnmanaged();
const writer = managed.writer();
@@ -608,7 +596,7 @@ fn analyzeInstCompileLog(mod: *Module, scope: *Scope, inst: *zir.Inst.CompileLog
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
+fn zirArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
@@ -631,7 +619,7 @@ fn analyzeInstArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*
return mod.addArg(b, inst.base.src, param_type, name);
}
-fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError!*Inst {
+fn zirLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const parent_block = scope.cast(Scope.Block).?;
@@ -672,25 +660,14 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError
return &loop_inst.base;
}
-fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
+fn zirBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const parent_block = scope.cast(Scope.Block).?;
- var child_block: Scope.Block = .{
- .parent = parent_block,
- .inst_table = parent_block.inst_table,
- .func = parent_block.func,
- .owner_decl = parent_block.owner_decl,
- .src_decl = parent_block.src_decl,
- .instructions = .{},
- .arena = parent_block.arena,
- .label = null,
- .inlining = parent_block.inlining,
- .is_comptime = parent_block.is_comptime or is_comptime,
- .branch_quota = parent_block.branch_quota,
- };
+ var child_block = parent_block.makeSubBlock();
defer child_block.instructions.deinit(mod.gpa);
+ child_block.is_comptime = child_block.is_comptime or is_comptime;
try analyzeBody(mod, &child_block, inst.positionals.body);
@@ -704,9 +681,15 @@ fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_c
return resolveInst(mod, scope, last_zir_inst);
}
-fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
+fn zirBlock(
+ mod: *Module,
+ scope: *Scope,
+ inst: *zir.Inst.Block,
+ is_comptime: bool,
+) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const parent_block = scope.cast(Scope.Block).?;
// Reserve space for a Block instruction so that generated Break instructions can
@@ -735,6 +718,7 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
.zir_block = inst,
.merges = .{
.results = .{},
+ .br_list = .{},
.block_inst = block_inst,
},
}),
@@ -746,6 +730,7 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_compt
defer child_block.instructions.deinit(mod.gpa);
defer merges.results.deinit(mod.gpa);
+ defer merges.br_list.deinit(mod.gpa);
try analyzeBody(mod, &child_block, inst.positionals.body);
@@ -779,49 +764,127 @@ fn analyzeBlockBody(
const last_inst = child_block.instructions.items[last_inst_index];
if (last_inst.breakBlock()) |br_block| {
if (br_block == merges.block_inst) {
- // No need for a block instruction. We can put the new instructions directly into the parent block.
- // Here we omit the break instruction.
+ // No need for a block instruction. We can put the new instructions directly
+ // into the parent block. Here we omit the break instruction.
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items[0..last_inst_index]);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
return merges.results.items[0];
}
}
}
- // It should be impossible to have the number of results be > 1 in a comptime scope.
- assert(!child_block.is_comptime); // We should have already got a compile error in the condbr condition.
+ // It is impossible to have the number of results be > 1 in a comptime scope.
+ assert(!child_block.is_comptime); // Should already got a compile error in the condbr condition.
// Need to set the type and emit the Block instruction. This allows machine code generation
// to emit a jump instruction to after the block when it encounters the break.
try parent_block.instructions.append(mod.gpa, &merges.block_inst.base);
- merges.block_inst.base.ty = try mod.resolvePeerTypes(scope, merges.results.items);
- merges.block_inst.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
+ const resolved_ty = try mod.resolvePeerTypes(scope, merges.results.items);
+ merges.block_inst.base.ty = resolved_ty;
+ merges.block_inst.body = .{
+ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items),
+ };
+ // Now that the block has its type resolved, we need to go back into all the break
+ // instructions, and insert type coercion on the operands.
+ for (merges.br_list.items) |br| {
+ if (br.operand.ty.eql(resolved_ty)) {
+ // No type coercion needed.
+ continue;
+ }
+ var coerce_block = parent_block.makeSubBlock();
+ defer coerce_block.instructions.deinit(mod.gpa);
+ const coerced_operand = try mod.coerce(&coerce_block.base, resolved_ty, br.operand);
+ // If no instructions were produced, such as in the case of a coercion of a
+ // constant value to a new type, we can simply point the br operand to it.
+ if (coerce_block.instructions.items.len == 0) {
+ br.operand = coerced_operand;
+ continue;
+ }
+ assert(coerce_block.instructions.items[coerce_block.instructions.items.len - 1] == coerced_operand);
+ // Here we depend on the br instruction having been over-allocated (if necessary)
+ // inide analyzeBreak so that it can be converted into a br_block_flat instruction.
+ const br_src = br.base.src;
+ const br_ty = br.base.ty;
+ const br_block_flat = @ptrCast(*Inst.BrBlockFlat, br);
+ br_block_flat.* = .{
+ .base = .{
+ .src = br_src,
+ .ty = br_ty,
+ .tag = .br_block_flat,
+ },
+ .block = merges.block_inst,
+ .body = .{
+ .instructions = try parent_block.arena.dupe(*Inst, coerce_block.instructions.items),
+ },
+ };
+ }
return &merges.block_inst.base;
}
-fn analyzeInstBreakpoint(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn zirBreakpoint(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
return mod.addNoOp(b, inst.base.src, Type.initTag(.void), .breakpoint);
}
-fn analyzeInstBreak(mod: *Module, scope: *Scope, inst: *zir.Inst.Break) InnerError!*Inst {
+fn zirBreak(mod: *Module, scope: *Scope, inst: *zir.Inst.Break) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const operand = try resolveInst(mod, scope, inst.positionals.operand);
const block = inst.positionals.block;
return analyzeBreak(mod, scope, inst.base.src, block, operand);
}
-fn analyzeInstBreakVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) InnerError!*Inst {
+fn zirBreakVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const block = inst.positionals.block;
const void_inst = try mod.constVoid(scope, inst.base.src);
return analyzeBreak(mod, scope, inst.base.src, block, void_inst);
}
-fn analyzeInstDbgStmt(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn analyzeBreak(
+ mod: *Module,
+ scope: *Scope,
+ src: usize,
+ zir_block: *zir.Inst.Block,
+ operand: *Inst,
+) InnerError!*Inst {
+ var opt_block = scope.cast(Scope.Block);
+ while (opt_block) |block| {
+ if (block.label) |*label| {
+ if (label.zir_block == zir_block) {
+ const b = try mod.requireFunctionBlock(scope, src);
+ // Here we add a br instruction, but we over-allocate a little bit
+ // (if necessary) to make it possible to convert the instruction into
+ // a br_block_flat instruction later.
+ const br = @ptrCast(*Inst.Br, try b.arena.alignedAlloc(
+ u8,
+ Inst.convertable_br_align,
+ Inst.convertable_br_size,
+ ));
+ br.* = .{
+ .base = .{
+ .tag = .br,
+ .ty = Type.initTag(.noreturn),
+ .src = src,
+ },
+ .operand = operand,
+ .block = label.merges.block_inst,
+ };
+ try b.instructions.append(mod.gpa, &br.base);
+ try label.merges.results.append(mod.gpa, operand);
+ try label.merges.br_list.append(mod.gpa, br);
+ return &br.base;
+ }
+ }
+ opt_block = block.parent;
+ } else unreachable;
+}
+
+fn zirDbgStmt(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
if (scope.cast(Scope.Block)) |b| {
@@ -832,26 +895,26 @@ fn analyzeInstDbgStmt(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerEr
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstDeclRefStr(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRefStr) InnerError!*Inst {
+fn zirDeclRefStr(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRefStr) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const decl_name = try resolveConstString(mod, scope, inst.positionals.name);
return mod.analyzeDeclRefByName(scope, inst.base.src, decl_name);
}
-fn declRef(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRef) InnerError!*Inst {
+fn zirDeclRef(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRef) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.analyzeDeclRef(scope, inst.base.src, inst.positionals.decl);
}
-fn declVal(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclVal) InnerError!*Inst {
+fn zirDeclVal(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclVal) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.analyzeDeclVal(scope, inst.base.src, inst.positionals.decl);
}
-fn call(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
+fn zirCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -965,6 +1028,7 @@ fn call(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
.casted_args = casted_args,
.merges = .{
.results = .{},
+ .br_list = .{},
.block_inst = block_inst,
},
};
@@ -989,6 +1053,7 @@ fn call(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
defer child_block.instructions.deinit(mod.gpa);
defer merges.results.deinit(mod.gpa);
+ defer merges.br_list.deinit(mod.gpa);
try mod.emitBackwardBranch(&child_block, inst.base.src);
@@ -1002,7 +1067,7 @@ fn call(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
return mod.addCall(b, inst.base.src, ret_type, func, casted_args);
}
-fn analyzeInstFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!*Inst {
+fn zirFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const fn_type = try resolveType(mod, scope, fn_inst.positionals.fn_type);
@@ -1019,13 +1084,13 @@ fn analyzeInstFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!
});
}
-fn analyzeInstIntType(mod: *Module, scope: *Scope, inttype: *zir.Inst.IntType) InnerError!*Inst {
+fn zirIntType(mod: *Module, scope: *Scope, inttype: *zir.Inst.IntType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.fail(scope, inttype.base.src, "TODO implement inttype", .{});
}
-fn analyzeInstOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const child_type = try resolveType(mod, scope, optional.positionals.operand);
@@ -1033,7 +1098,7 @@ fn analyzeInstOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp
return mod.constType(scope, optional.base.src, try mod.optionalType(scope, child_type));
}
-fn analyzeInstArrayType(mod: *Module, scope: *Scope, array: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirArrayType(mod: *Module, scope: *Scope, array: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// TODO these should be lazily evaluated
@@ -1043,7 +1108,7 @@ fn analyzeInstArrayType(mod: *Module, scope: *Scope, array: *zir.Inst.BinOp) Inn
return mod.constType(scope, array.base.src, try mod.arrayType(scope, len.val.toUnsignedInt(), null, elem_type));
}
-fn analyzeInstArrayTypeSentinel(mod: *Module, scope: *Scope, array: *zir.Inst.ArrayTypeSentinel) InnerError!*Inst {
+fn zirArrayTypeSentinel(mod: *Module, scope: *Scope, array: *zir.Inst.ArrayTypeSentinel) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// TODO these should be lazily evaluated
@@ -1054,7 +1119,7 @@ fn analyzeInstArrayTypeSentinel(mod: *Module, scope: *Scope, array: *zir.Inst.Ar
return mod.constType(scope, array.base.src, try mod.arrayType(scope, len.val.toUnsignedInt(), sentinel.val, elem_type));
}
-fn analyzeInstErrorUnionType(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirErrorUnionType(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const error_union = try resolveType(mod, scope, inst.positionals.lhs);
@@ -1067,7 +1132,7 @@ fn analyzeInstErrorUnionType(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp)
return mod.constType(scope, inst.base.src, try mod.errorUnionType(scope, error_union, payload));
}
-fn analyzeInstAnyframeType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirAnyframeType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const return_type = try resolveType(mod, scope, inst.positionals.operand);
@@ -1075,7 +1140,7 @@ fn analyzeInstAnyframeType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) In
return mod.constType(scope, inst.base.src, try mod.anyframeType(scope, return_type));
}
-fn analyzeInstErrorSet(mod: *Module, scope: *Scope, inst: *zir.Inst.ErrorSet) InnerError!*Inst {
+fn zirErrorSet(mod: *Module, scope: *Scope, inst: *zir.Inst.ErrorSet) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// The declarations arena will store the hashmap.
@@ -1107,13 +1172,13 @@ fn analyzeInstErrorSet(mod: *Module, scope: *Scope, inst: *zir.Inst.ErrorSet) In
return mod.analyzeDeclVal(scope, inst.base.src, new_decl);
}
-fn analyzeInstMergeErrorSets(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirMergeErrorSets(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.fail(scope, inst.base.src, "TODO implement merge_error_sets", .{});
}
-fn analyzeInstEnumLiteral(mod: *Module, scope: *Scope, inst: *zir.Inst.EnumLiteral) InnerError!*Inst {
+fn zirEnumLiteral(mod: *Module, scope: *Scope, inst: *zir.Inst.EnumLiteral) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const duped_name = try scope.arena().dupe(u8, inst.positionals.name);
@@ -1124,7 +1189,7 @@ fn analyzeInstEnumLiteral(mod: *Module, scope: *Scope, inst: *zir.Inst.EnumLiter
}
/// Pointer in, pointer out.
-fn optionalPayloadPtr(
+fn zirOptionalPayloadPtr(
mod: *Module,
scope: *Scope,
unwrap: *zir.Inst.UnOp,
@@ -1165,7 +1230,7 @@ fn optionalPayloadPtr(
}
/// Value in, value out.
-fn optionalPayload(
+fn zirOptionalPayload(
mod: *Module,
scope: *Scope,
unwrap: *zir.Inst.UnOp,
@@ -1201,40 +1266,40 @@ fn optionalPayload(
}
/// Value in, value out
-fn errorUnionPayload(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
+fn zirErrUnionPayload(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.errorUnionPayload", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.zirErrUnionPayload", .{});
}
/// Pointer in, pointer out
-fn errorUnionPayloadPtr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
+fn zirErrUnionPayloadPtr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.errorUnionPayloadPtr", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.zirErrUnionPayloadPtr", .{});
}
/// Value in, value out
-fn errorUnionCode(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirErrUnionCode(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.errorUnionCode", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.zirErrUnionCode", .{});
}
/// Pointer in, value out
-fn errorUnionCodePtr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirErrUnionCodePtr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.errorUnionCodePtr", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.zirErrUnionCodePtr", .{});
}
-fn analyzeInstEnsureErrPayloadVoid(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirEnsureErrPayloadVoid(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement analyzeInstEnsureErrPayloadVoid", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zirEnsureErrPayloadVoid", .{});
}
-fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst {
+fn zirFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const return_type = try resolveType(mod, scope, fntype.positionals.return_type);
@@ -1277,13 +1342,13 @@ fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) Inne
return mod.constType(scope, fntype.base.src, fn_ty);
}
-fn analyzeInstPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst {
+fn zirPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.constInst(scope, primitive.base.src, primitive.positionals.tag.toTypedValue());
}
-fn analyzeInstAs(mod: *Module, scope: *Scope, as: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirAs(mod: *Module, scope: *Scope, as: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const dest_type = try resolveType(mod, scope, as.positionals.lhs);
@@ -1291,7 +1356,7 @@ fn analyzeInstAs(mod: *Module, scope: *Scope, as: *zir.Inst.BinOp) InnerError!*I
return mod.coerce(scope, dest_type, new_inst);
}
-fn analyzeInstPtrToInt(mod: *Module, scope: *Scope, ptrtoint: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirPtrtoint(mod: *Module, scope: *Scope, ptrtoint: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, ptrtoint.positionals.operand);
@@ -1304,7 +1369,7 @@ fn analyzeInstPtrToInt(mod: *Module, scope: *Scope, ptrtoint: *zir.Inst.UnOp) In
return mod.addUnOp(b, ptrtoint.base.src, ty, .ptrtoint, ptr);
}
-fn fieldVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst {
+fn zirFieldVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1315,7 +1380,7 @@ fn fieldVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
-fn fieldPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst {
+fn zirFieldPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1324,7 +1389,7 @@ fn fieldPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst
return mod.namedFieldPtr(scope, inst.base.src, object_ptr, field_name, inst.base.src);
}
-fn fieldValNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerError!*Inst {
+fn zirFieldValNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1336,7 +1401,7 @@ fn fieldValNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerE
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
-fn fieldPtrNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerError!*Inst {
+fn zirFieldPtrNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1346,7 +1411,7 @@ fn fieldPtrNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerE
return mod.namedFieldPtr(scope, inst.base.src, object_ptr, field_name, fsrc);
}
-fn analyzeInstIntCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirIntcast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const dest_type = try resolveType(mod, scope, inst.positionals.lhs);
@@ -1384,7 +1449,7 @@ fn analyzeInstIntCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerE
return mod.fail(scope, inst.base.src, "TODO implement analyze widen or shorten int", .{});
}
-fn analyzeInstBitCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirBitcast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const dest_type = try resolveType(mod, scope, inst.positionals.lhs);
@@ -1392,7 +1457,7 @@ fn analyzeInstBitCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerE
return mod.bitcast(scope, dest_type, operand);
}
-fn analyzeInstFloatCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirFloatcast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const dest_type = try resolveType(mod, scope, inst.positionals.lhs);
@@ -1430,7 +1495,7 @@ fn analyzeInstFloatCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) Inne
return mod.fail(scope, inst.base.src, "TODO implement analyze widen or shorten float", .{});
}
-fn elemVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
+fn zirElemVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1441,7 +1506,7 @@ fn elemVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
-fn elemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
+fn zirElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1450,7 +1515,7 @@ fn elemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
return mod.elemPtr(scope, inst.base.src, array_ptr, elem_index);
}
-fn analyzeInstSlice(mod: *Module, scope: *Scope, inst: *zir.Inst.Slice) InnerError!*Inst {
+fn zirSlice(mod: *Module, scope: *Scope, inst: *zir.Inst.Slice) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const array_ptr = try resolveInst(mod, scope, inst.positionals.array_ptr);
@@ -1461,7 +1526,7 @@ fn analyzeInstSlice(mod: *Module, scope: *Scope, inst: *zir.Inst.Slice) InnerErr
return mod.analyzeSlice(scope, inst.base.src, array_ptr, start, end, sentinel);
}
-fn analyzeInstSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const array_ptr = try resolveInst(mod, scope, inst.positionals.lhs);
@@ -1470,235 +1535,7 @@ fn analyzeInstSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) Inn
return mod.analyzeSlice(scope, inst.base.src, array_ptr, start, null, null);
}
-fn analyzeInstSwitchRange(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
- const tracy = trace(@src());
- defer tracy.end();
- const start = try resolveInst(mod, scope, inst.positionals.lhs);
- const end = try resolveInst(mod, scope, inst.positionals.rhs);
-
- switch (start.ty.zigTypeTag()) {
- .Int, .ComptimeInt => {},
- else => return mod.constVoid(scope, inst.base.src),
- }
- switch (end.ty.zigTypeTag()) {
- .Int, .ComptimeInt => {},
- else => return mod.constVoid(scope, inst.base.src),
- }
- if (start.value()) |start_val| {
- if (end.value()) |end_val| {
- if (start_val.compare(.gte, end_val)) {
- return mod.fail(scope, inst.base.src, "range start value must be smaller than the end value", .{});
- }
- }
- }
- return mod.constVoid(scope, inst.base.src);
-}
-
-fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) InnerError!*Inst {
- const tracy = trace(@src());
- defer tracy.end();
- const target_ptr = try resolveInst(mod, scope, inst.positionals.target_ptr);
- const target = try mod.analyzeDeref(scope, inst.base.src, target_ptr, inst.positionals.target_ptr.src);
- try validateSwitch(mod, scope, target, inst);
-
- if (try mod.resolveDefinedValue(scope, target)) |target_val| {
- for (inst.positionals.cases) |case| {
- const resolved = try resolveInst(mod, scope, case.item);
- const casted = try mod.coerce(scope, target.ty, resolved);
- const item = try mod.resolveConstValue(scope, casted);
-
- if (target_val.eql(item)) {
- try analyzeBody(mod, scope.cast(Scope.Block).?, case.body);
- return mod.constNoReturn(scope, inst.base.src);
- }
- }
- try analyzeBody(mod, scope.cast(Scope.Block).?, inst.positionals.else_body);
- return mod.constNoReturn(scope, inst.base.src);
- }
-
- if (inst.positionals.cases.len == 0) {
- // no cases just analyze else_branch
- try analyzeBody(mod, scope.cast(Scope.Block).?, inst.positionals.else_body);
- return mod.constNoReturn(scope, inst.base.src);
- }
-
- const parent_block = try mod.requireRuntimeBlock(scope, inst.base.src);
- const cases = try parent_block.arena.alloc(Inst.SwitchBr.Case, inst.positionals.cases.len);
-
- var case_block: Scope.Block = .{
- .parent = parent_block,
- .inst_table = parent_block.inst_table,
- .func = parent_block.func,
- .owner_decl = parent_block.owner_decl,
- .src_decl = parent_block.src_decl,
- .instructions = .{},
- .arena = parent_block.arena,
- .inlining = parent_block.inlining,
- .is_comptime = parent_block.is_comptime,
- .branch_quota = parent_block.branch_quota,
- };
- defer case_block.instructions.deinit(mod.gpa);
-
- for (inst.positionals.cases) |case, i| {
- // Reset without freeing.
- case_block.instructions.items.len = 0;
-
- const resolved = try resolveInst(mod, scope, case.item);
- const casted = try mod.coerce(scope, target.ty, resolved);
- const item = try mod.resolveConstValue(scope, casted);
-
- try analyzeBody(mod, &case_block, case.body);
-
- cases[i] = .{
- .item = item,
- .body = .{ .instructions = try parent_block.arena.dupe(*Inst, case_block.instructions.items) },
- };
- }
-
- case_block.instructions.items.len = 0;
- try analyzeBody(mod, &case_block, inst.positionals.else_body);
-
- const else_body: ir.Body = .{
- .instructions = try parent_block.arena.dupe(*Inst, case_block.instructions.items),
- };
-
- return mod.addSwitchBr(parent_block, inst.base.src, target_ptr, cases, else_body);
-}
-
-fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.SwitchBr) InnerError!void {
- // validate usage of '_' prongs
- if (inst.kw_args.special_prong == .underscore and target.ty.zigTypeTag() != .Enum) {
- return mod.fail(scope, inst.base.src, "'_' prong only allowed when switching on non-exhaustive enums", .{});
- // TODO notes "'_' prong here" inst.positionals.cases[last].src
- }
-
- // check that target type supports ranges
- if (inst.kw_args.range) |range_inst| {
- switch (target.ty.zigTypeTag()) {
- .Int, .ComptimeInt => {},
- else => {
- return mod.fail(scope, target.src, "ranges not allowed when switching on type {}", .{target.ty});
- // TODO notes "range used here" range_inst.src
- },
- }
- }
-
- // validate for duplicate items/missing else prong
- switch (target.ty.zigTypeTag()) {
- .Enum => return mod.fail(scope, inst.base.src, "TODO validateSwitch .Enum", .{}),
- .ErrorSet => return mod.fail(scope, inst.base.src, "TODO validateSwitch .ErrorSet", .{}),
- .Union => return mod.fail(scope, inst.base.src, "TODO validateSwitch .Union", .{}),
- .Int, .ComptimeInt => {
- var range_set = @import("RangeSet.zig").init(mod.gpa);
- defer range_set.deinit();
-
- for (inst.positionals.items) |item| {
- const maybe_src = if (item.castTag(.switch_range)) |range| blk: {
- const start_resolved = try resolveInst(mod, scope, range.positionals.lhs);
- const start_casted = try mod.coerce(scope, target.ty, start_resolved);
- const end_resolved = try resolveInst(mod, scope, range.positionals.rhs);
- const end_casted = try mod.coerce(scope, target.ty, end_resolved);
-
- break :blk try range_set.add(
- try mod.resolveConstValue(scope, start_casted),
- try mod.resolveConstValue(scope, end_casted),
- item.src,
- );
- } else blk: {
- const resolved = try resolveInst(mod, scope, item);
- const casted = try mod.coerce(scope, target.ty, resolved);
- const value = try mod.resolveConstValue(scope, casted);
- break :blk try range_set.add(value, value, item.src);
- };
-
- if (maybe_src) |previous_src| {
- return mod.fail(scope, item.src, "duplicate switch value", .{});
- // TODO notes "previous value is here" previous_src
- }
- }
-
- if (target.ty.zigTypeTag() == .Int) {
- var arena = std.heap.ArenaAllocator.init(mod.gpa);
- defer arena.deinit();
-
- const start = try target.ty.minInt(&arena, mod.getTarget());
- const end = try target.ty.maxInt(&arena, mod.getTarget());
- if (try range_set.spans(start, end)) {
- if (inst.kw_args.special_prong == .@"else") {
- return mod.fail(scope, inst.base.src, "unreachable else prong, all cases already handled", .{});
- }
- return;
- }
- }
-
- if (inst.kw_args.special_prong != .@"else") {
- return mod.fail(scope, inst.base.src, "switch must handle all possibilities", .{});
- }
- },
- .Bool => {
- var true_count: u8 = 0;
- var false_count: u8 = 0;
- for (inst.positionals.items) |item| {
- const resolved = try resolveInst(mod, scope, item);
- const casted = try mod.coerce(scope, Type.initTag(.bool), resolved);
- if ((try mod.resolveConstValue(scope, casted)).toBool()) {
- true_count += 1;
- } else {
- false_count += 1;
- }
-
- if (true_count + false_count > 2) {
- return mod.fail(scope, item.src, "duplicate switch value", .{});
- }
- }
- if ((true_count + false_count < 2) and inst.kw_args.special_prong != .@"else") {
- return mod.fail(scope, inst.base.src, "switch must handle all possibilities", .{});
- }
- if ((true_count + false_count == 2) and inst.kw_args.special_prong == .@"else") {
- return mod.fail(scope, inst.base.src, "unreachable else prong, all cases already handled", .{});
- }
- },
- .EnumLiteral, .Void, .Fn, .Pointer, .Type => {
- if (inst.kw_args.special_prong != .@"else") {
- return mod.fail(scope, inst.base.src, "else prong required when switching on type '{}'", .{target.ty});
- }
-
- var seen_values = std.HashMap(Value, usize, Value.hash, Value.eql, std.hash_map.DefaultMaxLoadPercentage).init(mod.gpa);
- defer seen_values.deinit();
-
- for (inst.positionals.items) |item| {
- const resolved = try resolveInst(mod, scope, item);
- const casted = try mod.coerce(scope, target.ty, resolved);
- const val = try mod.resolveConstValue(scope, casted);
-
- if (try seen_values.fetchPut(val, item.src)) |prev| {
- return mod.fail(scope, item.src, "duplicate switch value", .{});
- // TODO notes "previous value here" prev.value
- }
- }
- },
-
- .ErrorUnion,
- .NoReturn,
- .Array,
- .Struct,
- .Undefined,
- .Null,
- .Optional,
- .BoundFn,
- .Opaque,
- .Vector,
- .Frame,
- .AnyFrame,
- .ComptimeFloat,
- .Float,
- => {
- return mod.fail(scope, target.src, "invalid switch target type '{}'", .{target.ty});
- },
- }
-}
-
-fn analyzeInstImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveConstString(mod, scope, inst.positionals.operand);
@@ -1718,19 +1555,19 @@ fn analyzeInstImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerErr
return mod.constType(scope, inst.base.src, file_scope.root_container.ty);
}
-fn analyzeInstShl(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirShl(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstShl", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirShl", .{});
}
-fn analyzeInstShr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirShr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstShr", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirShr", .{});
}
-fn analyzeInstBitwise(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirBitwise(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1784,8 +1621,8 @@ fn analyzeInstBitwise(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerE
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const ir_tag = switch (inst.base.tag) {
- .bitand => Inst.Tag.bitand,
- .bitor => Inst.Tag.bitor,
+ .bit_and => Inst.Tag.bit_and,
+ .bit_or => Inst.Tag.bit_or,
.xor => Inst.Tag.xor,
else => unreachable,
};
@@ -1793,25 +1630,25 @@ fn analyzeInstBitwise(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerE
return mod.addBinOp(b, inst.base.src, scalar_type, ir_tag, casted_lhs, casted_rhs);
}
-fn analyzeInstBitNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirBitNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstBitNot", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirBitNot", .{});
}
-fn analyzeInstArrayCat(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirArrayCat(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstArrayCat", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirArrayCat", .{});
}
-fn analyzeInstArrayMul(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirArrayMul(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstArrayMul", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirArrayMul", .{});
}
-fn analyzeInstArithmetic(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirArithmetic(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1912,14 +1749,14 @@ fn analyzeInstComptimeOp(mod: *Module, scope: *Scope, res_type: Type, inst: *zir
});
}
-fn analyzeInstDeref(mod: *Module, scope: *Scope, deref: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirDeref(mod: *Module, scope: *Scope, deref: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, deref.positionals.operand);
return mod.analyzeDeref(scope, deref.base.src, ptr, deref.positionals.operand.src);
}
-fn analyzeInstAsm(mod: *Module, scope: *Scope, assembly: *zir.Inst.Asm) InnerError!*Inst {
+fn zirAsm(mod: *Module, scope: *Scope, assembly: *zir.Inst.Asm) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const return_type = try resolveType(mod, scope, assembly.positionals.return_type);
@@ -1960,7 +1797,7 @@ fn analyzeInstAsm(mod: *Module, scope: *Scope, assembly: *zir.Inst.Asm) InnerErr
return &inst.base;
}
-fn analyzeInstCmp(
+fn zirCmp(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.BinOp,
@@ -2018,14 +1855,14 @@ fn analyzeInstCmp(
return mod.fail(scope, inst.base.src, "TODO implement more cmp analysis", .{});
}
-fn analyzeInstTypeOf(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirTypeof(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
return mod.constType(scope, inst.base.src, operand.ty);
}
-fn analyzeInstTypeOfPeer(mod: *Module, scope: *Scope, inst: *zir.Inst.TypeOfPeer) InnerError!*Inst {
+fn zirTypeofPeer(mod: *Module, scope: *Scope, inst: *zir.Inst.TypeOfPeer) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
var insts_to_res = try mod.gpa.alloc(*ir.Inst, inst.positionals.items.len);
@@ -2037,7 +1874,7 @@ fn analyzeInstTypeOfPeer(mod: *Module, scope: *Scope, inst: *zir.Inst.TypeOfPeer
return mod.constType(scope, inst.base.src, pt_res);
}
-fn analyzeInstBoolNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirBoolNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const uncasted_operand = try resolveInst(mod, scope, inst.positionals.operand);
@@ -2050,7 +1887,7 @@ fn analyzeInstBoolNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerEr
return mod.addUnOp(b, inst.base.src, bool_type, .not, operand);
}
-fn analyzeInstBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const bool_type = Type.initTag(.bool);
@@ -2059,7 +1896,7 @@ fn analyzeInstBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerEr
const uncasted_rhs = try resolveInst(mod, scope, inst.positionals.rhs);
const rhs = try mod.coerce(scope, bool_type, uncasted_rhs);
- const is_bool_or = inst.base.tag == .boolor;
+ const is_bool_or = inst.base.tag == .bool_or;
if (lhs.value()) |lhs_val| {
if (rhs.value()) |rhs_val| {
@@ -2071,17 +1908,17 @@ fn analyzeInstBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerEr
}
}
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
- return mod.addBinOp(b, inst.base.src, bool_type, if (is_bool_or) .boolor else .booland, lhs, rhs);
+ return mod.addBinOp(b, inst.base.src, bool_type, if (is_bool_or) .bool_or else .bool_and, lhs, rhs);
}
-fn isNull(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
+fn zirIsNull(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
return mod.analyzeIsNull(scope, inst.base.src, operand, invert_logic);
}
-fn isNullPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
+fn zirIsNullPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, inst.positionals.operand);
@@ -2089,14 +1926,14 @@ fn isNullPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bo
return mod.analyzeIsNull(scope, inst.base.src, loaded, invert_logic);
}
-fn isErr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirIsErr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
return mod.analyzeIsErr(scope, inst.base.src, operand);
}
-fn isErrPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirIsErrPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, inst.positionals.operand);
@@ -2104,7 +1941,7 @@ fn isErrPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst
return mod.analyzeIsErr(scope, inst.base.src, loaded);
}
-fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerError!*Inst {
+fn zirCondbr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const uncasted_cond = try resolveInst(mod, scope, inst.positionals.condition);
@@ -2153,7 +1990,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
return mod.addCondBr(parent_block, inst.base.src, cond, then_body, else_body);
}
-fn analyzeInstUnreachable(
+fn zirUnreachable(
mod: *Module,
scope: *Scope,
unreach: *zir.Inst.NoOp,
@@ -2170,7 +2007,7 @@ fn analyzeInstUnreachable(
}
}
-fn analyzeInstRet(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirReturn(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
@@ -2179,13 +2016,14 @@ fn analyzeInstRet(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!
if (b.inlining) |inlining| {
// We are inlining a function call; rewrite the `ret` as a `break`.
try inlining.merges.results.append(mod.gpa, operand);
- return mod.addBr(b, inst.base.src, inlining.merges.block_inst, operand);
+ const br = try mod.addBr(b, inst.base.src, inlining.merges.block_inst, operand);
+ return &br.base;
}
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, operand);
}
-fn analyzeInstRetVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn zirReturnVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
@@ -2193,7 +2031,8 @@ fn analyzeInstRetVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerEr
// We are inlining a function call; rewrite the `retvoid` as a `breakvoid`.
const void_inst = try mod.constVoid(scope, inst.base.src);
try inlining.merges.results.append(mod.gpa, void_inst);
- return mod.addBr(b, inst.base.src, inlining.merges.block_inst, void_inst);
+ const br = try mod.addBr(b, inst.base.src, inlining.merges.block_inst, void_inst);
+ return &br.base;
}
if (b.func) |func| {
@@ -2216,27 +2055,7 @@ fn floatOpAllowed(tag: zir.Inst.Tag) bool {
};
}
-fn analyzeBreak(
- mod: *Module,
- scope: *Scope,
- src: usize,
- zir_block: *zir.Inst.Block,
- operand: *Inst,
-) InnerError!*Inst {
- var opt_block = scope.cast(Scope.Block);
- while (opt_block) |block| {
- if (block.label) |*label| {
- if (label.zir_block == zir_block) {
- try label.merges.results.append(mod.gpa, operand);
- const b = try mod.requireFunctionBlock(scope, src);
- return mod.addBr(b, src, label.merges.block_inst, operand);
- }
- }
- opt_block = block.parent;
- } else unreachable;
-}
-
-fn analyzeInstSimplePtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, mutable: bool, size: std.builtin.TypeInfo.Pointer.Size) InnerError!*Inst {
+fn zirSimplePtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, mutable: bool, size: std.builtin.TypeInfo.Pointer.Size) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const elem_type = try resolveType(mod, scope, inst.positionals.operand);
@@ -2244,7 +2063,7 @@ fn analyzeInstSimplePtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, m
return mod.constType(scope, inst.base.src, ty);
}
-fn analyzeInstPtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.PtrType) InnerError!*Inst {
+fn zirPtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.PtrType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// TODO lazy values