aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2021-01-20 20:37:44 -0700
committerAndrew Kelley <andrew@ziglang.org>2021-01-31 21:09:22 -0700
commitb7452fe35f514d4c04aae4582bc8071bc9e70f1b (patch)
treeb125fa735b9d2f9ea3791687f8ef22d75aca590a /src
parentfdc875ed0080cd2542a854a8cd6c627b25e9b7a4 (diff)
downloadzig-b7452fe35f514d4c04aae4582bc8071bc9e70f1b.tar.gz
zig-b7452fe35f514d4c04aae4582bc8071bc9e70f1b.zip
stage2: rework astgen result locations
Motivating test case: ```zig export fn _start() noreturn { var x: u64 = 1; var y: u32 = 2; var thing: u32 = 1; const result = if (thing == 1) x else y; exit(); } ``` The main idea here is for astgen to output ideal ZIR depending on whether or not the sub-expressions of a block consume the result location. Here, neither `x` nor `y` consume the result location of the conditional expression block, and so the ZIR should communicate the result of the condbr using break instructions, not with the result location pointer. With this commit, this is accomplished: ``` %22 = alloc_inferred() %23 = block({ %24 = const(TypedValue{ .ty = type, .val = bool}) %25 = deref(%18) %26 = const(TypedValue{ .ty = comptime_int, .val = 1}) %27 = cmp_eq(%25, %26) %28 = as(%24, %27) %29 = condbr(%28, { %30 = deref(%4) < there is no longer a store instruction here > %31 = break("label_23", %30) }, { %32 = deref(%11) < there is no longer a store instruction here > %33 = break("label_23", %32) }) }) %34 = store_to_inferred_ptr(%22, %23) <-- the store is only here %35 = resolve_inferred_alloc(%22) ``` However if the result location gets consumed, the break instructions change to break_void, and the result value is communicated only by the stores, not by the break instructions. Implementation: * The GenZIR scope that conditional branches uses now has an optional result location pointer field and a count of how many times the result location ended up being an rvalue (not consumed). * When rvalue() is called on a result location for a block, it increments this counter. After generating the branches of a block, astgen for the conditional branch checks this count and if it is 2 then the store_to_block_ptr instructions are elided and it calls rvalue() using the block result (which will account for peer type resolution on the break operands). astgen has many functions disabled until they can be reworked with these new semantics. That will be done before merging the branch. There are some new rules for astgen to follow regarding result locations and what you are allowed/required to do depending on which one is passed to expr(). See the updated doc comments of ResultLoc for details. I also changed naming conventions of stuff in this commit, sorry about that.
Diffstat (limited to 'src')
-rw-r--r--src/Module.zig9
-rw-r--r--src/astgen.zig402
-rw-r--r--src/codegen.zig24
-rw-r--r--src/ir.zig16
-rw-r--r--src/zir.zig155
-rw-r--r--src/zir_sema.zig572
6 files changed, 649 insertions, 529 deletions
diff --git a/src/Module.zig b/src/Module.zig
index fa9722814e..2dc84a93a9 100644
--- a/src/Module.zig
+++ b/src/Module.zig
@@ -697,6 +697,13 @@ pub const Scope = struct {
continue_block: ?*zir.Inst.Block = null,
/// only valid if label != null or (continue_block and break_block) != null
break_result_loc: astgen.ResultLoc = undefined,
+ /// When a block has a pointer result location, here it is.
+ rl_ptr: ?*zir.Inst = null,
+ /// Keeps track of how many branches of a block did not actually
+ /// consume the result location. astgen uses this to figure out
+ /// whether to rely on break instructions or writing to the result
+ /// pointer for the result instruction.
+ rvalue_rl_count: usize = 0,
pub const Label = struct {
token: ast.TokenIndex,
@@ -1171,7 +1178,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
!gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn())
{
const src = tree.token_locs[body_block.rbrace].start;
- _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid);
+ _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .return_void);
}
if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
diff --git a/src/astgen.zig b/src/astgen.zig
index a74b83de44..617937aa82 100644
--- a/src/astgen.zig
+++ b/src/astgen.zig
@@ -14,25 +14,30 @@ const InnerError = Module.InnerError;
pub const ResultLoc = union(enum) {
/// The expression is the right-hand side of assignment to `_`. Only the side-effects of the
- /// expression should be generated.
+ /// expression should be generated. The result instruction from the expression must
+ /// be ignored.
discard,
/// The expression has an inferred type, and it will be evaluated as an rvalue.
none,
/// The expression must generate a pointer rather than a value. For example, the left hand side
/// of an assignment uses this kind of result location.
ref,
- /// The expression will be type coerced into this type, but it will be evaluated as an rvalue.
+ /// The expression will be coerced into this type, but it will be evaluated as an rvalue.
ty: *zir.Inst,
- /// The expression must store its result into this typed pointer.
+ /// The expression must store its result into this typed pointer. The result instruction
+ /// from the expression must be ignored.
ptr: *zir.Inst,
/// The expression must store its result into this allocation, which has an inferred type.
+ /// The result instruction from the expression must be ignored.
inferred_ptr: *zir.Inst.Tag.alloc_inferred.Type(),
/// The expression must store its result into this pointer, which is a typed pointer that
/// has been bitcasted to whatever the expression's type is.
+ /// The result instruction from the expression must be ignored.
bitcasted_ptr: *zir.Inst.UnOp,
/// There is a pointer for the expression to store its result into, however, its type
/// is inferred based on peer type resolution for a `zir.Inst.Block`.
- block_ptr: *zir.Inst.Block,
+ /// The result instruction from the expression must be ignored.
+ block_ptr: *Module.Scope.GenZIR,
};
pub fn typeExpr(mod: *Module, scope: *Scope, type_node: *ast.Node) InnerError!*zir.Inst {
@@ -179,6 +184,9 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst {
}
/// Turn Zig AST into untyped ZIR istructions.
+/// When `rl` is discard, ptr, inferred_ptr, bitcasted_ptr, or inferred_ptr, the
+/// result instruction can be used to inspect whether it is isNoReturn() but that is it,
+/// it must otherwise not be used.
pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst {
switch (node.tag) {
.Root => unreachable, // Top-level declaration.
@@ -197,20 +205,20 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.FieldInitializer => unreachable, // Handled explicitly.
.ContainerField => unreachable, // Handled explicitly.
- .Assign => return rlWrapVoid(mod, scope, rl, node, try assign(mod, scope, node.castTag(.Assign).?)),
- .AssignBitAnd => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitAnd).?, .bitand)),
- .AssignBitOr => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitOr).?, .bitor)),
- .AssignBitShiftLeft => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftLeft).?, .shl)),
- .AssignBitShiftRight => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftRight).?, .shr)),
- .AssignBitXor => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitXor).?, .xor)),
- .AssignDiv => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignDiv).?, .div)),
- .AssignSub => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSub).?, .sub)),
- .AssignSubWrap => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSubWrap).?, .subwrap)),
- .AssignMod => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMod).?, .mod_rem)),
- .AssignAdd => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAdd).?, .add)),
- .AssignAddWrap => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAddWrap).?, .addwrap)),
- .AssignMul => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMul).?, .mul)),
- .AssignMulWrap => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMulWrap).?, .mulwrap)),
+ .Assign => return rvalueVoid(mod, scope, rl, node, try assign(mod, scope, node.castTag(.Assign).?)),
+ .AssignBitAnd => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitAnd).?, .bit_and)),
+ .AssignBitOr => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitOr).?, .bit_or)),
+ .AssignBitShiftLeft => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftLeft).?, .shl)),
+ .AssignBitShiftRight => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftRight).?, .shr)),
+ .AssignBitXor => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitXor).?, .xor)),
+ .AssignDiv => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignDiv).?, .div)),
+ .AssignSub => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSub).?, .sub)),
+ .AssignSubWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSubWrap).?, .subwrap)),
+ .AssignMod => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMod).?, .mod_rem)),
+ .AssignAdd => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAdd).?, .add)),
+ .AssignAddWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAddWrap).?, .addwrap)),
+ .AssignMul => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMul).?, .mul)),
+ .AssignMulWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMulWrap).?, .mulwrap)),
.Add => return simpleBinOp(mod, scope, rl, node.castTag(.Add).?, .add),
.AddWrap => return simpleBinOp(mod, scope, rl, node.castTag(.AddWrap).?, .addwrap),
@@ -220,8 +228,8 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.MulWrap => return simpleBinOp(mod, scope, rl, node.castTag(.MulWrap).?, .mulwrap),
.Div => return simpleBinOp(mod, scope, rl, node.castTag(.Div).?, .div),
.Mod => return simpleBinOp(mod, scope, rl, node.castTag(.Mod).?, .mod_rem),
- .BitAnd => return simpleBinOp(mod, scope, rl, node.castTag(.BitAnd).?, .bitand),
- .BitOr => return simpleBinOp(mod, scope, rl, node.castTag(.BitOr).?, .bitor),
+ .BitAnd => return simpleBinOp(mod, scope, rl, node.castTag(.BitAnd).?, .bit_and),
+ .BitOr => return simpleBinOp(mod, scope, rl, node.castTag(.BitOr).?, .bit_or),
.BitShiftLeft => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftLeft).?, .shl),
.BitShiftRight => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftRight).?, .shr),
.BitXor => return simpleBinOp(mod, scope, rl, node.castTag(.BitXor).?, .xor),
@@ -239,15 +247,15 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.BoolAnd => return boolBinOp(mod, scope, rl, node.castTag(.BoolAnd).?),
.BoolOr => return boolBinOp(mod, scope, rl, node.castTag(.BoolOr).?),
- .BoolNot => return rlWrap(mod, scope, rl, try boolNot(mod, scope, node.castTag(.BoolNot).?)),
- .BitNot => return rlWrap(mod, scope, rl, try bitNot(mod, scope, node.castTag(.BitNot).?)),
- .Negation => return rlWrap(mod, scope, rl, try negation(mod, scope, node.castTag(.Negation).?, .sub)),
- .NegationWrap => return rlWrap(mod, scope, rl, try negation(mod, scope, node.castTag(.NegationWrap).?, .subwrap)),
+ .BoolNot => return rvalue(mod, scope, rl, try boolNot(mod, scope, node.castTag(.BoolNot).?)),
+ .BitNot => return rvalue(mod, scope, rl, try bitNot(mod, scope, node.castTag(.BitNot).?)),
+ .Negation => return rvalue(mod, scope, rl, try negation(mod, scope, node.castTag(.Negation).?, .sub)),
+ .NegationWrap => return rvalue(mod, scope, rl, try negation(mod, scope, node.castTag(.NegationWrap).?, .subwrap)),
.Identifier => return try identifier(mod, scope, rl, node.castTag(.Identifier).?),
- .Asm => return rlWrap(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)),
- .StringLiteral => return rlWrap(mod, scope, rl, try stringLiteral(mod, scope, node.castTag(.StringLiteral).?)),
- .IntegerLiteral => return rlWrap(mod, scope, rl, try integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?)),
+ .Asm => return rvalue(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)),
+ .StringLiteral => return rvalue(mod, scope, rl, try stringLiteral(mod, scope, node.castTag(.StringLiteral).?)),
+ .IntegerLiteral => return rvalue(mod, scope, rl, try integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?)),
.BuiltinCall => return builtinCall(mod, scope, rl, node.castTag(.BuiltinCall).?),
.Call => return callExpr(mod, scope, rl, node.castTag(.Call).?),
.Unreachable => return unreach(mod, scope, node.castTag(.Unreachable).?),
@@ -255,34 +263,34 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
.If => return ifExpr(mod, scope, rl, node.castTag(.If).?),
.While => return whileExpr(mod, scope, rl, node.castTag(.While).?),
.Period => return field(mod, scope, rl, node.castTag(.Period).?),
- .Deref => return rlWrap(mod, scope, rl, try deref(mod, scope, node.castTag(.Deref).?)),
- .AddressOf => return rlWrap(mod, scope, rl, try addressOf(mod, scope, node.castTag(.AddressOf).?)),
- .FloatLiteral => return rlWrap(mod, scope, rl, try floatLiteral(mod, scope, node.castTag(.FloatLiteral).?)),
- .UndefinedLiteral => return rlWrap(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)),
- .BoolLiteral => return rlWrap(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)),
- .NullLiteral => return rlWrap(mod, scope, rl, try nullLiteral(mod, scope, node.castTag(.NullLiteral).?)),
- .OptionalType => return rlWrap(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)),
+ .Deref => return rvalue(mod, scope, rl, try deref(mod, scope, node.castTag(.Deref).?)),
+ .AddressOf => return rvalue(mod, scope, rl, try addressOf(mod, scope, node.castTag(.AddressOf).?)),
+ .FloatLiteral => return rvalue(mod, scope, rl, try floatLiteral(mod, scope, node.castTag(.FloatLiteral).?)),
+ .UndefinedLiteral => return rvalue(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)),
+ .BoolLiteral => return rvalue(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)),
+ .NullLiteral => return rvalue(mod, scope, rl, try nullLiteral(mod, scope, node.castTag(.NullLiteral).?)),
+ .OptionalType => return rvalue(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)),
.UnwrapOptional => return unwrapOptional(mod, scope, rl, node.castTag(.UnwrapOptional).?),
- .Block => return rlWrapVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)),
+ .Block => return rvalueVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)),
.LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?, .block),
- .Break => return rlWrap(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)),
- .Continue => return rlWrap(mod, scope, rl, try continueExpr(mod, scope, node.castTag(.Continue).?)),
- .PtrType => return rlWrap(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)),
+ .Break => return rvalue(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)),
+ .Continue => return rvalue(mod, scope, rl, try continueExpr(mod, scope, node.castTag(.Continue).?)),
+ .PtrType => return rvalue(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)),
.GroupedExpression => return expr(mod, scope, rl, node.castTag(.GroupedExpression).?.expr),
- .ArrayType => return rlWrap(mod, scope, rl, try arrayType(mod, scope, node.castTag(.ArrayType).?)),
- .ArrayTypeSentinel => return rlWrap(mod, scope, rl, try arrayTypeSentinel(mod, scope, node.castTag(.ArrayTypeSentinel).?)),
- .EnumLiteral => return rlWrap(mod, scope, rl, try enumLiteral(mod, scope, node.castTag(.EnumLiteral).?)),
- .MultilineStringLiteral => return rlWrap(mod, scope, rl, try multilineStrLiteral(mod, scope, node.castTag(.MultilineStringLiteral).?)),
- .CharLiteral => return rlWrap(mod, scope, rl, try charLiteral(mod, scope, node.castTag(.CharLiteral).?)),
- .SliceType => return rlWrap(mod, scope, rl, try sliceType(mod, scope, node.castTag(.SliceType).?)),
- .ErrorUnion => return rlWrap(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.ErrorUnion).?, .error_union_type)),
- .MergeErrorSets => return rlWrap(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.MergeErrorSets).?, .merge_error_sets)),
- .AnyFrameType => return rlWrap(mod, scope, rl, try anyFrameType(mod, scope, node.castTag(.AnyFrameType).?)),
- .ErrorSetDecl => return rlWrap(mod, scope, rl, try errorSetDecl(mod, scope, node.castTag(.ErrorSetDecl).?)),
- .ErrorType => return rlWrap(mod, scope, rl, try errorType(mod, scope, node.castTag(.ErrorType).?)),
+ .ArrayType => return rvalue(mod, scope, rl, try arrayType(mod, scope, node.castTag(.ArrayType).?)),
+ .ArrayTypeSentinel => return rvalue(mod, scope, rl, try arrayTypeSentinel(mod, scope, node.castTag(.ArrayTypeSentinel).?)),
+ .EnumLiteral => return rvalue(mod, scope, rl, try enumLiteral(mod, scope, node.castTag(.EnumLiteral).?)),
+ .MultilineStringLiteral => return rvalue(mod, scope, rl, try multilineStrLiteral(mod, scope, node.castTag(.MultilineStringLiteral).?)),
+ .CharLiteral => return rvalue(mod, scope, rl, try charLiteral(mod, scope, node.castTag(.CharLiteral).?)),
+ .SliceType => return rvalue(mod, scope, rl, try sliceType(mod, scope, node.castTag(.SliceType).?)),
+ .ErrorUnion => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.ErrorUnion).?, .error_union_type)),
+ .MergeErrorSets => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.MergeErrorSets).?, .merge_error_sets)),
+ .AnyFrameType => return rvalue(mod, scope, rl, try anyFrameType(mod, scope, node.castTag(.AnyFrameType).?)),
+ .ErrorSetDecl => return rvalue(mod, scope, rl, try errorSetDecl(mod, scope, node.castTag(.ErrorSetDecl).?)),
+ .ErrorType => return rvalue(mod, scope, rl, try errorType(mod, scope, node.castTag(.ErrorType).?)),
.For => return forExpr(mod, scope, rl, node.castTag(.For).?),
.ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?),
- .Slice => return rlWrap(mod, scope, rl, try sliceExpr(mod, scope, node.castTag(.Slice).?)),
+ .Slice => return rvalue(mod, scope, rl, try sliceExpr(mod, scope, node.castTag(.Slice).?)),
.Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?),
.Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?),
.OrElse => return orelseExpr(mod, scope, rl, node.castTag(.OrElse).?),
@@ -341,6 +349,9 @@ pub fn comptimeExpr(mod: *Module, parent_scope: *Scope, rl: ResultLoc, node: *as
}
fn breakExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
+ if (true) {
+ @panic("TODO reimplement this");
+ }
const tree = parent_scope.tree();
const src = tree.token_locs[node.ltoken].start;
@@ -563,8 +574,8 @@ fn blockExprStmts(mod: *Module, parent_scope: *Scope, node: *ast.Node, statement
scope = try varDecl(mod, scope, var_decl_node, &block_arena.allocator);
},
.Assign => try assign(mod, scope, statement.castTag(.Assign).?),
- .AssignBitAnd => try assignOp(mod, scope, statement.castTag(.AssignBitAnd).?, .bitand),
- .AssignBitOr => try assignOp(mod, scope, statement.castTag(.AssignBitOr).?, .bitor),
+ .AssignBitAnd => try assignOp(mod, scope, statement.castTag(.AssignBitAnd).?, .bit_and),
+ .AssignBitOr => try assignOp(mod, scope, statement.castTag(.AssignBitOr).?, .bit_or),
.AssignBitShiftLeft => try assignOp(mod, scope, statement.castTag(.AssignBitShiftLeft).?, .shl),
.AssignBitShiftRight => try assignOp(mod, scope, statement.castTag(.AssignBitShiftRight).?, .shr),
.AssignBitXor => try assignOp(mod, scope, statement.castTag(.AssignBitXor).?, .xor),
@@ -644,6 +655,7 @@ fn varDecl(
// Namespace vars shadowing detection
if (mod.lookupDeclName(scope, ident_name)) |_| {
+ // TODO add note for other definition
return mod.fail(scope, name_src, "redefinition of '{s}'", .{ident_name});
}
const init_node = node.getInitNode() orelse
@@ -751,14 +763,14 @@ fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerErr
.val = Value.initTag(.bool_type),
});
const operand = try expr(mod, scope, .{ .ty = bool_type }, node.rhs);
- return addZIRUnOp(mod, scope, src, .boolnot, operand);
+ return addZIRUnOp(mod, scope, src, .bool_not, operand);
}
fn bitNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
const operand = try expr(mod, scope, .none, node.rhs);
- return addZIRUnOp(mod, scope, src, .bitnot, operand);
+ return addZIRUnOp(mod, scope, src, .bit_not, operand);
}
fn negation(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp, op_inst_tag: zir.Inst.Tag) InnerError!*zir.Inst {
@@ -1101,7 +1113,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
if (rl == .ref) {
return addZIRInst(mod, scope, src, zir.Inst.DeclRef, .{ .decl = decl }, .{});
} else {
- return rlWrap(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclVal, .{
+ return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclVal, .{
.decl = decl,
}, .{}));
}
@@ -1200,6 +1212,9 @@ fn orelseCatchExpr(
rhs: *ast.Node,
payload_node: ?*ast.Node,
) InnerError!*zir.Inst {
+ if (true) {
+ @panic("TODO reimplement this");
+ }
const tree = scope.tree();
const src = tree.token_locs[op_token].start;
@@ -1308,7 +1323,7 @@ pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleI
.field_name = field_name,
});
}
- return rlWrap(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{
+ return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{
.object = try expr(mod, scope, .none, node.lhs),
.field_name = field_name,
}));
@@ -1338,7 +1353,7 @@ fn namedField(
.field_name = try comptimeExpr(mod, scope, string_rl, params[1]),
});
}
- return rlWrap(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val_named, .{
+ return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val_named, .{
.object = try expr(mod, scope, .none, params[0]),
.field_name = try comptimeExpr(mod, scope, string_rl, params[1]),
}));
@@ -1359,7 +1374,7 @@ fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Array
.index = try expr(mod, scope, index_rl, node.index_expr),
});
}
- return rlWrap(mod, scope, rl, try addZirInstTag(mod, scope, src, .elem_val, .{
+ return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .elem_val, .{
.array = try expr(mod, scope, .none, node.lhs),
.index = try expr(mod, scope, index_rl, node.index_expr),
}));
@@ -1416,7 +1431,7 @@ fn simpleBinOp(
const rhs = try expr(mod, scope, .none, infix_node.rhs);
const result = try addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs);
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
fn boolBinOp(
@@ -1498,7 +1513,7 @@ fn boolBinOp(
condbr.positionals.else_body = .{ .instructions = try rhs_scope.arena.dupe(*zir.Inst, rhs_scope.instructions.items) };
}
- return rlWrap(mod, scope, rl, &block.base);
+ return rvalue(mod, scope, rl, &block.base);
}
const CondKind = union(enum) {
@@ -1578,6 +1593,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
cond_kind = .{ .err_union = null };
}
}
+ const block_branch_count = 2; // then and else
var block_scope: Scope.GenZIR = .{
.parent = scope,
.decl = scope.ownerDecl().?,
@@ -1600,6 +1616,33 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
});
+ // Depending on whether the result location is a pointer or value, different
+ // ZIR needs to be generated. In the former case we rely on storing to the
+ // pointer to communicate the result, and use breakvoid; in the latter case
+ // the block break instructions will have the result values.
+ // One more complication: when the result location is a pointer, we detect
+ // the scenario where the result location is not consumed. In this case
+ // we emit ZIR for the block break instructions to have the result values,
+ // and then rvalue() on that to pass the value to the result location.
+ const branch_rl: ResultLoc = switch (rl) {
+ .discard, .none, .ty, .ptr, .ref => rl,
+
+ .inferred_ptr => |ptr| blk: {
+ block_scope.rl_ptr = &ptr.base;
+ break :blk .{ .block_ptr = &block_scope };
+ },
+
+ .bitcasted_ptr => |ptr| blk: {
+ block_scope.rl_ptr = &ptr.base;
+ break :blk .{ .block_ptr = &block_scope };
+ },
+
+ .block_ptr => |parent_block_scope| blk: {
+ block_scope.rl_ptr = parent_block_scope.rl_ptr.?;
+ break :blk .{ .block_ptr = &block_scope };
+ },
+ };
+
const then_src = tree.token_locs[if_node.body.lastToken()].start;
var then_scope: Scope.GenZIR = .{
.parent = scope,
@@ -1612,25 +1655,10 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
// declare payload to the then_scope
const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, if_node.payload);
- // Most result location types can be forwarded directly; however
- // if we need to write to a pointer which has an inferred type,
- // proper type inference requires peer type resolution on the if's
- // branches.
- const branch_rl: ResultLoc = switch (rl) {
- .discard, .none, .ty, .ptr, .ref => rl,
- .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
- };
-
const then_result = try expr(mod, then_sub_scope, branch_rl, if_node.body);
- if (!then_result.tag.isNoReturn()) {
- _ = try addZIRInst(mod, then_sub_scope, then_src, zir.Inst.Break, .{
- .block = block,
- .operand = then_result,
- }, .{});
- }
- condbr.positionals.then_body = .{
- .instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
- };
+ // We hold off on the break instructions as well as copying the then/else
+ // instructions into place until we know whether to keep store_to_block_ptr
+ // instructions or not.
var else_scope: Scope.GenZIR = .{
.parent = scope,
@@ -1640,34 +1668,127 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
};
defer else_scope.instructions.deinit(mod.gpa);
- if (if_node.@"else") |else_node| {
- const else_src = tree.token_locs[else_node.body.lastToken()].start;
+ var else_src: usize = undefined;
+ var else_sub_scope: *Module.Scope = undefined;
+ const else_result: ?*zir.Inst = if (if_node.@"else") |else_node| blk: {
+ else_src = tree.token_locs[else_node.body.lastToken()].start;
// declare payload to the then_scope
- const else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
+ else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
+
+ break :blk try expr(mod, else_sub_scope, branch_rl, else_node.body);
+ } else blk: {
+ else_src = tree.token_locs[if_node.lastToken()].start;
+ else_sub_scope = &else_scope.base;
+ block_scope.rvalue_rl_count += 1;
+ break :blk null;
+ };
- const else_result = try expr(mod, else_sub_scope, branch_rl, else_node.body);
- if (!else_result.tag.isNoReturn()) {
- _ = try addZIRInst(mod, else_sub_scope, else_src, zir.Inst.Break, .{
- .block = block,
- .operand = else_result,
- }, .{});
- }
- } else {
- // TODO Optimization opportunity: we can avoid an allocation and a memcpy here
- // by directly allocating the body for this one instruction.
- const else_src = tree.token_locs[if_node.lastToken()].start;
- _ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.BreakVoid, .{
- .block = block,
- }, .{});
+ // We now have enough information to decide whether the result instruction should
+ // be communicated via result location pointer or break instructions.
+ const Strategy = enum {
+ /// Both branches will use break_void; result location is used to communicate the
+ /// result instruction.
+ break_void,
+ /// Use break statements to pass the block result value, and call rvalue() at
+ /// the end depending on rl. Also elide the store_to_block_ptr instructions
+ /// depending on rl.
+ break_operand,
+ };
+ var elide_store_to_block_ptr_instructions = false;
+ const strategy: Strategy = switch (rl) {
+ // In this branch there will not be any store_to_block_ptr instructions.
+ .discard, .none, .ty, .ref => .break_operand,
+ // The pointer got passed through to the sub-expressions, so we will use
+ // break_void here.
+ // In this branch there will not be any store_to_block_ptr instructions.
+ .ptr => .break_void,
+ .inferred_ptr, .bitcasted_ptr, .block_ptr => blk: {
+ if (block_scope.rvalue_rl_count == 2) {
+ // Neither prong of the if consumed the result location, so we can
+ // use break instructions to create an rvalue.
+ elide_store_to_block_ptr_instructions = true;
+ break :blk Strategy.break_operand;
+ } else {
+ // Allow the store_to_block_ptr instructions to remain so that
+ // semantic analysis can turn them into bitcasts.
+ break :blk Strategy.break_void;
+ }
+ },
+ };
+ switch (strategy) {
+ .break_void => {
+ if (!then_result.tag.isNoReturn()) {
+ _ = try addZIRNoOp(mod, then_sub_scope, then_src, .break_void);
+ }
+ if (else_result) |inst| {
+ if (!inst.tag.isNoReturn()) {
+ _ = try addZIRNoOp(mod, else_sub_scope, else_src, .break_void);
+ }
+ } else {
+ _ = try addZIRNoOp(mod, else_sub_scope, else_src, .break_void);
+ }
+ assert(!elide_store_to_block_ptr_instructions);
+ try copyBodyNoEliding(&condbr.positionals.then_body, then_scope);
+ try copyBodyNoEliding(&condbr.positionals.else_body, else_scope);
+ return &block.base;
+ },
+ .break_operand => {
+ if (!then_result.tag.isNoReturn()) {
+ _ = try addZirInstTag(mod, then_sub_scope, then_src, .@"break", .{
+ .block = block,
+ .operand = then_result,
+ });
+ }
+ if (else_result) |inst| {
+ if (!inst.tag.isNoReturn()) {
+ _ = try addZirInstTag(mod, else_sub_scope, else_src, .@"break", .{
+ .block = block,
+ .operand = inst,
+ });
+ }
+ } else {
+ _ = try addZIRNoOp(mod, else_sub_scope, else_src, .break_void);
+ }
+ if (elide_store_to_block_ptr_instructions) {
+ try copyBodyWithElidedStoreBlockPtr(&condbr.positionals.then_body, then_scope);
+ try copyBodyWithElidedStoreBlockPtr(&condbr.positionals.else_body, else_scope);
+ } else {
+ try copyBodyNoEliding(&condbr.positionals.then_body, then_scope);
+ try copyBodyNoEliding(&condbr.positionals.else_body, else_scope);
+ }
+ switch (rl) {
+ .ref => return &block.base,
+ else => return rvalue(mod, scope, rl, &block.base),
+ }
+ },
}
- condbr.positionals.else_body = .{
- .instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
+}
+
+/// Expects to find exactly 1 .store_to_block_ptr instruction.
+fn copyBodyWithElidedStoreBlockPtr(body: *zir.Body, scope: Module.Scope.GenZIR) !void {
+ body.* = .{
+ .instructions = try scope.arena.alloc(*zir.Inst, scope.instructions.items.len - 1),
};
+ var dst_index: usize = 0;
+ for (scope.instructions.items) |src_inst| {
+ if (src_inst.tag != .store_to_block_ptr) {
+ body.instructions[dst_index] = src_inst;
+ dst_index += 1;
+ }
+ }
+ assert(dst_index == body.instructions.len);
+}
- return &block.base;
+fn copyBodyNoEliding(body: *zir.Body, scope: Module.Scope.GenZIR) !void {
+ body.* = .{
+ .instructions = try scope.arena.dupe(*zir.Inst, scope.instructions.items),
+ };
}
fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.While) InnerError!*zir.Inst {
+ if (true) {
+ @panic("TODO reimplement this");
+ }
var cond_kind: CondKind = .bool;
if (while_node.payload) |_| cond_kind = .{ .optional = null };
if (while_node.@"else") |else_node| {
@@ -1821,6 +1942,9 @@ fn forExpr(
rl: ResultLoc,
for_node: *ast.Node.For,
) InnerError!*zir.Inst {
+ if (true) {
+ @panic("TODO reimplement this");
+ }
if (for_node.label) |label| {
try checkLabelRedefinition(mod, scope, label);
}
@@ -2017,6 +2141,9 @@ fn getRangeNode(node: *ast.Node) ?*ast.Node.SimpleInfixOp {
}
fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node.Switch) InnerError!*zir.Inst {
+ if (true) {
+ @panic("TODO reimplement this");
+ }
var block_scope: Scope.GenZIR = .{
.parent = scope,
.decl = scope.ownerDecl().?,
@@ -2186,10 +2313,10 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
// target >= start and target <= end
const range_start_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .cmp_gte, target, start);
const range_end_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .cmp_lte, target, end);
- const range_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .booland, range_start_ok, range_end_ok);
+ const range_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .bool_and, range_start_ok, range_end_ok);
if (any_ok) |some| {
- any_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .boolor, some, range_ok);
+ any_ok = try addZIRBinOp(mod, &else_scope.base, range_src, .bool_or, some, range_ok);
} else {
any_ok = range_ok;
}
@@ -2201,7 +2328,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
const cpm_ok = try addZIRBinOp(mod, &else_scope.base, item_inst.src, .cmp_eq, target, item_inst);
if (any_ok) |some| {
- any_ok = try addZIRBinOp(mod, &else_scope.base, item_inst.src, .boolor, some, cpm_ok);
+ any_ok = try addZIRBinOp(mod, &else_scope.base, item_inst.src, .bool_or, some, cpm_ok);
} else {
any_ok = cpm_ok;
}
@@ -2238,7 +2365,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
try switchCaseExpr(mod, &else_scope.base, case_rl, block, case);
} else {
// Not handling all possible cases is a compile error.
- _ = try addZIRNoOp(mod, &else_scope.base, switch_src, .unreach_nocheck);
+ _ = try addZIRNoOp(mod, &else_scope.base, switch_src, .unreachable_unsafe);
}
// All items have been generated, add the instructions to the comptime block.
@@ -2288,7 +2415,7 @@ fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerE
return addZIRUnOp(mod, scope, src, .@"return", operand);
}
} else {
- return addZIRNoOp(mod, scope, src, .returnvoid);
+ return addZIRNoOp(mod, scope, src, .return_void);
}
}
@@ -2305,7 +2432,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
if (getSimplePrimitiveValue(ident_name)) |typed_value| {
const result = try addZIRInstConst(mod, scope, src, typed_value);
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
if (ident_name.len >= 2) integer: {
@@ -2327,7 +2454,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
32 => if (is_signed) Value.initTag(.i32_type) else Value.initTag(.u32_type),
64 => if (is_signed) Value.initTag(.i64_type) else Value.initTag(.u64_type),
else => {
- return rlWrap(mod, scope, rl, try addZIRInstConst(mod, scope, src, .{
+ return rvalue(mod, scope, rl, try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = try Value.Tag.int_type.create(scope.arena(), .{
.signed = is_signed,
@@ -2340,7 +2467,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
.ty = Type.initTag(.type),
.val = val,
});
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
}
@@ -2351,7 +2478,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
.local_val => {
const local_val = s.cast(Scope.LocalVal).?;
if (mem.eql(u8, local_val.name, ident_name)) {
- return rlWrap(mod, scope, rl, local_val.inst);
+ return rvalue(mod, scope, rl, local_val.inst);
}
s = local_val.parent;
},
@@ -2360,7 +2487,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
if (mem.eql(u8, local_ptr.name, ident_name)) {
if (rl == .ref) return local_ptr.ptr;
const loaded = try addZIRUnOp(mod, scope, src, .deref, local_ptr.ptr);
- return rlWrap(mod, scope, rl, loaded);
+ return rvalue(mod, scope, rl, loaded);
}
s = local_ptr.parent;
},
@@ -2373,7 +2500,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
if (rl == .ref) {
return addZIRInst(mod, scope, src, zir.Inst.DeclRef, .{ .decl = decl }, .{});
} else {
- return rlWrap(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclVal, .{
+ return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.DeclVal, .{
.decl = decl,
}, .{}));
}
@@ -2590,7 +2717,7 @@ fn simpleCast(
const dest_type = try typeExpr(mod, scope, params[0]);
const rhs = try expr(mod, scope, .none, params[1]);
const result = try addZIRBinOp(mod, scope, src, inst_tag, dest_type, rhs);
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
@@ -2634,11 +2761,11 @@ fn as(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) I
// TODO here we should be able to resolve the inference; we now have a type for the result.
return mod.failTok(scope, call.builtin_token, "TODO implement @as with inferred-type result location pointer", .{});
},
- .block_ptr => |block_ptr| {
- const casted_block_ptr = try addZIRInst(mod, scope, src, zir.Inst.CoerceResultBlockPtr, .{
+ .block_ptr => |block_scope| {
+ const casted_block_ptr = try addZirInstTag(mod, scope, src, .coerce_result_block_ptr, .{
.dest_type = dest_type,
- .block = block_ptr,
- }, .{});
+ .block_ptr = block_scope.rl_ptr.?,
+ });
return expr(mod, scope, .{ .ptr = casted_block_ptr }, params[1]);
},
}
@@ -2703,7 +2830,7 @@ fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerE
const src = tree.token_locs[call.builtin_token].start;
const params = call.params();
const target = try expr(mod, scope, .none, params[0]);
- return addZIRUnOp(mod, scope, src, .compileerror, target);
+ return addZIRUnOp(mod, scope, src, .compile_error, target);
}
fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
@@ -2728,12 +2855,12 @@ fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCal
return mod.failTok(scope, call.builtin_token, "expected at least 1 argument, found 0", .{});
}
if (params.len == 1) {
- return rlWrap(mod, scope, rl, try addZIRUnOp(mod, scope, src, .typeof, try expr(mod, scope, .none, params[0])));
+ return rvalue(mod, scope, rl, try addZIRUnOp(mod, scope, src, .typeof, try expr(mod, scope, .none, params[0])));
}
var items = try arena.alloc(*zir.Inst, params.len);
for (params) |param, param_i|
items[param_i] = try expr(mod, scope, .none, param);
- return rlWrap(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.TypeOfPeer, .{ .items = items }, .{}));
+ return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.TypeOfPeer, .{ .items = items }, .{}));
}
fn compileLog(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
const tree = scope.tree();
@@ -2756,7 +2883,7 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.Built
// Also, some builtins have a variable number of parameters.
if (mem.eql(u8, builtin_name, "@ptrToInt")) {
- return rlWrap(mod, scope, rl, try ptrToInt(mod, scope, call));
+ return rvalue(mod, scope, rl, try ptrToInt(mod, scope, call));
} else if (mem.eql(u8, builtin_name, "@as")) {
return as(mod, scope, rl, call);
} else if (mem.eql(u8, builtin_name, "@floatCast")) {
@@ -2769,9 +2896,9 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.Built
return typeOf(mod, scope, rl, call);
} else if (mem.eql(u8, builtin_name, "@breakpoint")) {
const src = tree.token_locs[call.builtin_token].start;
- return rlWrap(mod, scope, rl, try addZIRNoOp(mod, scope, src, .breakpoint));
+ return rvalue(mod, scope, rl, try addZIRNoOp(mod, scope, src, .breakpoint));
} else if (mem.eql(u8, builtin_name, "@import")) {
- return rlWrap(mod, scope, rl, try import(mod, scope, call));
+ return rvalue(mod, scope, rl, try import(mod, scope, call));
} else if (mem.eql(u8, builtin_name, "@compileError")) {
return compileError(mod, scope, call);
} else if (mem.eql(u8, builtin_name, "@setEvalBranchQuota")) {
@@ -2806,13 +2933,13 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Call) In
.args = args,
}, .{});
// TODO function call with result location
- return rlWrap(mod, scope, rl, result);
+ return rvalue(mod, scope, rl, result);
}
fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.OneToken) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[unreach_node.token].start;
- return addZIRNoOp(mod, scope, src, .@"unreachable");
+ return addZIRNoOp(mod, scope, src, .unreachable_safe);
}
fn getSimplePrimitiveValue(name: []const u8) ?TypedValue {
@@ -3099,7 +3226,7 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool {
/// result locations must call this function on their result.
/// As an example, if the `ResultLoc` is `ptr`, it will write the result to the pointer.
/// If the `ResultLoc` is `ty`, it will coerce the result to the type.
-fn rlWrap(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerError!*zir.Inst {
+fn rvalue(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerError!*zir.Inst {
switch (rl) {
.none => return result,
.discard => {
@@ -3113,42 +3240,31 @@ fn rlWrap(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerEr
},
.ty => |ty_inst| return addZIRBinOp(mod, scope, result.src, .as, ty_inst, result),
.ptr => |ptr_inst| {
- const casted_result = try addZIRInst(mod, scope, result.src, zir.Inst.CoerceToPtrElem, .{
- .ptr = ptr_inst,
- .value = result,
- }, .{});
- _ = try addZIRBinOp(mod, scope, result.src, .store, ptr_inst, casted_result);
- return casted_result;
+ _ = try addZIRBinOp(mod, scope, result.src, .store, ptr_inst, result);
+ return result;
},
.bitcasted_ptr => |bitcasted_ptr| {
- return mod.fail(scope, result.src, "TODO implement rlWrap .bitcasted_ptr", .{});
+ return mod.fail(scope, result.src, "TODO implement rvalue .bitcasted_ptr", .{});
},
.inferred_ptr => |alloc| {
_ = try addZIRBinOp(mod, scope, result.src, .store_to_inferred_ptr, &alloc.base, result);
return result;
},
- .block_ptr => |block_ptr| {
- return mod.fail(scope, result.src, "TODO implement rlWrap .block_ptr", .{});
+ .block_ptr => |block_scope| {
+ block_scope.rvalue_rl_count += 1;
+ _ = try addZIRBinOp(mod, scope, result.src, .store_to_block_ptr, block_scope.rl_ptr.?, result);
+ return result;
},
}
}
-fn rlWrapVoid(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node, result: void) InnerError!*zir.Inst {
+fn rvalueVoid(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node, result: void) InnerError!*zir.Inst {
const src = scope.tree().token_locs[node.firstToken()].start;
const void_inst = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.void),
.val = Value.initTag(.void_value),
});
- return rlWrap(mod, scope, rl, void_inst);
-}
-
-/// TODO go over all the callsites and see where we can introduce "by-value" ZIR instructions
-/// to save ZIR memory. For example, see DeclVal vs DeclRef.
-/// Do not add additional callsites to this function.
-fn rlWrapPtr(mod: *Module, scope: *Scope, rl: ResultLoc, ptr: *zir.Inst) InnerError!*zir.Inst {
- if (rl == .ref) return ptr;
-
- return rlWrap(mod, scope, rl, try addZIRUnOp(mod, scope, ptr.src, .deref, ptr));
+ return rvalue(mod, scope, rl, void_inst);
}
pub fn addZirInstTag(
diff --git a/src/codegen.zig b/src/codegen.zig
index 1ca2bb2abe..a7b067f7e1 100644
--- a/src/codegen.zig
+++ b/src/codegen.zig
@@ -840,14 +840,14 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
.arg => return self.genArg(inst.castTag(.arg).?),
.assembly => return self.genAsm(inst.castTag(.assembly).?),
.bitcast => return self.genBitCast(inst.castTag(.bitcast).?),
- .bitand => return self.genBitAnd(inst.castTag(.bitand).?),
- .bitor => return self.genBitOr(inst.castTag(.bitor).?),
+ .bit_and => return self.genBitAnd(inst.castTag(.bit_and).?),
+ .bit_or => return self.genBitOr(inst.castTag(.bit_or).?),
.block => return self.genBlock(inst.castTag(.block).?),
.br => return self.genBr(inst.castTag(.br).?),
.breakpoint => return self.genBreakpoint(inst.src),
.brvoid => return self.genBrVoid(inst.castTag(.brvoid).?),
- .booland => return self.genBoolOp(inst.castTag(.booland).?),
- .boolor => return self.genBoolOp(inst.castTag(.boolor).?),
+ .bool_and => return self.genBoolOp(inst.castTag(.bool_and).?),
+ .bool_or => return self.genBoolOp(inst.castTag(.bool_or).?),
.call => return self.genCall(inst.castTag(.call).?),
.cmp_lt => return self.genCmp(inst.castTag(.cmp_lt).?, .lt),
.cmp_lte => return self.genCmp(inst.castTag(.cmp_lte).?, .lte),
@@ -1097,7 +1097,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
if (inst.base.isUnused())
return MCValue.dead;
switch (arch) {
- .arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bitand),
+ .arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bit_and),
else => return self.fail(inst.base.src, "TODO implement bitwise and for {}", .{self.target.cpu.arch}),
}
}
@@ -1107,7 +1107,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
if (inst.base.isUnused())
return MCValue.dead;
switch (arch) {
- .arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bitor),
+ .arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bit_or),
else => return self.fail(inst.base.src, "TODO implement bitwise or for {}", .{self.target.cpu.arch}),
}
}
@@ -1371,10 +1371,10 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.rsb(.al, dst_reg, dst_reg, operand).toU32());
}
},
- .booland, .bitand => {
+ .bool_and, .bit_and => {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.@"and"(.al, dst_reg, dst_reg, operand).toU32());
},
- .boolor, .bitor => {
+ .bool_or, .bit_or => {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.orr(.al, dst_reg, dst_reg, operand).toU32());
},
.not, .xor => {
@@ -2464,14 +2464,14 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
switch (arch) {
.x86_64 => switch (inst.base.tag) {
// lhs AND rhs
- .booland => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 4, 0x20),
+ .bool_and => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 4, 0x20),
// lhs OR rhs
- .boolor => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 1, 0x08),
+ .bool_or => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 1, 0x08),
else => unreachable, // Not a boolean operation
},
.arm, .armeb => switch (inst.base.tag) {
- .booland => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .booland),
- .boolor => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .boolor),
+ .bool_and => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bool_and),
+ .bool_or => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bool_or),
else => unreachable, // Not a boolean operation
},
else => return self.fail(inst.base.src, "TODO implement boolean operations for {}", .{self.target.cpu.arch}),
diff --git a/src/ir.zig b/src/ir.zig
index 89698bdd84..b1147871f4 100644
--- a/src/ir.zig
+++ b/src/ir.zig
@@ -56,9 +56,9 @@ pub const Inst = struct {
alloc,
arg,
assembly,
- bitand,
+ bit_and,
bitcast,
- bitor,
+ bit_or,
block,
br,
breakpoint,
@@ -85,8 +85,8 @@ pub const Inst = struct {
is_err,
// *E!T => bool
is_err_ptr,
- booland,
- boolor,
+ bool_and,
+ bool_or,
/// Read a value from a pointer.
load,
loop,
@@ -147,10 +147,10 @@ pub const Inst = struct {
.cmp_gt,
.cmp_neq,
.store,
- .booland,
- .boolor,
- .bitand,
- .bitor,
+ .bool_and,
+ .bool_or,
+ .bit_and,
+ .bit_or,
.xor,
=> BinOp,
diff --git a/src/zir.zig b/src/zir.zig
index 9e5830e79a..07fc64b65a 100644
--- a/src/zir.zig
+++ b/src/zir.zig
@@ -59,7 +59,7 @@ pub const Inst = struct {
/// Inline assembly.
@"asm",
/// Bitwise AND. `&`
- bitand,
+ bit_and,
/// TODO delete this instruction, it has no purpose.
bitcast,
/// An arbitrary typed pointer is pointer-casted to a new Pointer.
@@ -71,9 +71,9 @@ pub const Inst = struct {
/// The new result location pointer has an inferred type.
bitcast_result_ptr,
/// Bitwise NOT. `~`
- bitnot,
+ bit_not,
/// Bitwise OR. `|`
- bitor,
+ bit_or,
/// A labeled block of code, which can return a value.
block,
/// A block of code, which can return a value. There are no instructions that break out of
@@ -83,17 +83,17 @@ pub const Inst = struct {
block_comptime,
/// Same as `block_flat` but additionally makes the inner instructions execute at comptime.
block_comptime_flat,
- /// Boolean AND. See also `bitand`.
- booland,
- /// Boolean NOT. See also `bitnot`.
- boolnot,
- /// Boolean OR. See also `bitor`.
- boolor,
+ /// Boolean AND. See also `bit_and`.
+ bool_and,
+ /// Boolean NOT. See also `bit_not`.
+ bool_not,
+ /// Boolean OR. See also `bit_or`.
+ bool_or,
/// Return a value from a `Block`.
@"break",
breakpoint,
/// Same as `break` but without an operand; the operand is assumed to be the void value.
- breakvoid,
+ break_void,
/// Function call.
call,
/// `<`
@@ -116,12 +116,10 @@ pub const Inst = struct {
/// result location pointer, whose type is inferred by peer type resolution on the
/// `Block`'s corresponding `break` instructions.
coerce_result_block_ptr,
- /// Equivalent to `as(ptr_child_type(typeof(ptr)), value)`.
- coerce_to_ptr_elem,
/// Emit an error message and fail compilation.
- compileerror,
+ compile_error,
/// Log compile time variables and emit an error message.
- compilelog,
+ compile_log,
/// Conditional branch. Splits control flow based on a boolean condition value.
condbr,
/// Special case, has no textual representation.
@@ -135,11 +133,11 @@ pub const Inst = struct {
/// Declares the beginning of a statement. Used for debug info.
dbg_stmt,
/// Represents a pointer to a global decl.
- declref,
+ decl_ref,
/// Represents a pointer to a global decl by string name.
- declref_str,
- /// Equivalent to a declref followed by deref.
- declval,
+ decl_ref_str,
+ /// Equivalent to a decl_ref followed by deref.
+ decl_val,
/// Load the value from a pointer.
deref,
/// Arithmetic division. Asserts no integer overflow.
@@ -185,7 +183,7 @@ pub const Inst = struct {
/// can hold the same mathematical value.
intcast,
/// Make an integer type out of signedness and bit count.
- inttype,
+ int_type,
/// Return a boolean false if an optional is null. `x != null`
is_non_null,
/// Return a boolean true if an optional is null. `x == null`
@@ -232,7 +230,7 @@ pub const Inst = struct {
/// Sends control flow back to the function's callee. Takes an operand as the return value.
@"return",
/// Same as `return` but there is no operand; the operand is implicitly the void value.
- returnvoid,
+ return_void,
/// Changes the maximum number of backwards branches that compile-time
/// code execution can use before giving up and making a compile error.
set_eval_branch_quota,
@@ -270,6 +268,10 @@ pub const Inst = struct {
/// Write a value to a pointer. For loading, see `deref`.
store,
/// Same as `store` but the type of the value being stored will be used to infer
+ /// the block type. The LHS is a block instruction, whose result location is
+ /// being stored to.
+ store_to_block_ptr,
+ /// Same as `store` but the type of the value being stored will be used to infer
/// the pointer type.
store_to_inferred_ptr,
/// String Literal. Makes an anonymous Decl and then takes a pointer to it.
@@ -286,11 +288,11 @@ pub const Inst = struct {
typeof_peer,
/// Asserts control-flow will not reach this instruction. Not safety checked - the compiler
/// will assume the correctness of this instruction.
- unreach_nocheck,
+ unreachable_unsafe,
/// Asserts control-flow will not reach this instruction. In safety-checked modes,
/// this will generate a call to the panic function unless it can be proven unreachable
/// by the compiler.
- @"unreachable",
+ unreachable_safe,
/// Bitwise XOR. `^`
xor,
/// Create an optional type '?T'
@@ -352,17 +354,17 @@ pub const Inst = struct {
.alloc_inferred_mut,
.breakpoint,
.dbg_stmt,
- .returnvoid,
+ .return_void,
.ret_ptr,
.ret_type,
- .unreach_nocheck,
- .@"unreachable",
+ .unreachable_unsafe,
+ .unreachable_safe,
=> NoOp,
.alloc,
.alloc_mut,
- .boolnot,
- .compileerror,
+ .bool_not,
+ .compile_error,
.deref,
.@"return",
.is_null,
@@ -400,7 +402,7 @@ pub const Inst = struct {
.err_union_code_ptr,
.ensure_err_payload_void,
.anyframe_type,
- .bitnot,
+ .bit_not,
.import,
.set_eval_branch_quota,
.indexable_ptr_len,
@@ -411,10 +413,10 @@ pub const Inst = struct {
.array_cat,
.array_mul,
.array_type,
- .bitand,
- .bitor,
- .booland,
- .boolor,
+ .bit_and,
+ .bit_or,
+ .bool_and,
+ .bool_or,
.div,
.mod_rem,
.mul,
@@ -422,6 +424,7 @@ pub const Inst = struct {
.shl,
.shr,
.store,
+ .store_to_block_ptr,
.store_to_inferred_ptr,
.sub,
.subwrap,
@@ -452,19 +455,18 @@ pub const Inst = struct {
.arg => Arg,
.array_type_sentinel => ArrayTypeSentinel,
.@"break" => Break,
- .breakvoid => BreakVoid,
+ .break_void => BreakVoid,
.call => Call,
- .coerce_to_ptr_elem => CoerceToPtrElem,
- .declref => DeclRef,
- .declref_str => DeclRefStr,
- .declval => DeclVal,
+ .decl_ref => DeclRef,
+ .decl_ref_str => DeclRefStr,
+ .decl_val => DeclVal,
.coerce_result_block_ptr => CoerceResultBlockPtr,
- .compilelog => CompileLog,
+ .compile_log => CompileLog,
.loop => Loop,
.@"const" => Const,
.str => Str,
.int => Int,
- .inttype => IntType,
+ .int_type => IntType,
.field_ptr, .field_val => Field,
.field_ptr_named, .field_val_named => FieldNamed,
.@"asm" => Asm,
@@ -508,18 +510,18 @@ pub const Inst = struct {
.arg,
.as,
.@"asm",
- .bitand,
+ .bit_and,
.bitcast,
.bitcast_ref,
.bitcast_result_ptr,
- .bitor,
+ .bit_or,
.block,
.block_flat,
.block_comptime,
.block_comptime_flat,
- .boolnot,
- .booland,
- .boolor,
+ .bool_not,
+ .bool_and,
+ .bool_or,
.breakpoint,
.call,
.cmp_lt,
@@ -530,12 +532,11 @@ pub const Inst = struct {
.cmp_neq,
.coerce_result_ptr,
.coerce_result_block_ptr,
- .coerce_to_ptr_elem,
.@"const",
.dbg_stmt,
- .declref,
- .declref_str,
- .declval,
+ .decl_ref,
+ .decl_ref_str,
+ .decl_val,
.deref,
.div,
.elem_ptr,
@@ -552,7 +553,7 @@ pub const Inst = struct {
.fntype,
.int,
.intcast,
- .inttype,
+ .int_type,
.is_non_null,
.is_null,
.is_non_null_ptr,
@@ -579,6 +580,7 @@ pub const Inst = struct {
.mut_slice_type,
.const_slice_type,
.store,
+ .store_to_block_ptr,
.store_to_inferred_ptr,
.str,
.sub,
@@ -602,7 +604,7 @@ pub const Inst = struct {
.merge_error_sets,
.anyframe_type,
.error_union_type,
- .bitnot,
+ .bit_not,
.error_set,
.slice,
.slice_start,
@@ -611,20 +613,20 @@ pub const Inst = struct {
.typeof_peer,
.resolve_inferred_alloc,
.set_eval_branch_quota,
- .compilelog,
+ .compile_log,
.enum_type,
.union_type,
.struct_type,
=> false,
.@"break",
- .breakvoid,
+ .break_void,
.condbr,
- .compileerror,
+ .compile_error,
.@"return",
- .returnvoid,
- .unreach_nocheck,
- .@"unreachable",
+ .return_void,
+ .unreachable_unsafe,
+ .unreachable_safe,
.loop,
.switchbr,
.container_field_named,
@@ -717,7 +719,7 @@ pub const Inst = struct {
};
pub const BreakVoid = struct {
- pub const base_tag = Tag.breakvoid;
+ pub const base_tag = Tag.break_void;
base: Inst,
positionals: struct {
@@ -739,19 +741,8 @@ pub const Inst = struct {
},
};
- pub const CoerceToPtrElem = struct {
- pub const base_tag = Tag.coerce_to_ptr_elem;
- base: Inst,
-
- positionals: struct {
- ptr: *Inst,
- value: *Inst,
- },
- kw_args: struct {},
- };
-
pub const DeclRef = struct {
- pub const base_tag = Tag.declref;
+ pub const base_tag = Tag.decl_ref;
base: Inst,
positionals: struct {
@@ -761,7 +752,7 @@ pub const Inst = struct {
};
pub const DeclRefStr = struct {
- pub const base_tag = Tag.declref_str;
+ pub const base_tag = Tag.decl_ref_str;
base: Inst,
positionals: struct {
@@ -771,7 +762,7 @@ pub const Inst = struct {
};
pub const DeclVal = struct {
- pub const base_tag = Tag.declval;
+ pub const base_tag = Tag.decl_val;
base: Inst,
positionals: struct {
@@ -786,13 +777,13 @@ pub const Inst = struct {
positionals: struct {
dest_type: *Inst,
- block: *Block,
+ block_ptr: *Inst,
},
kw_args: struct {},
};
pub const CompileLog = struct {
- pub const base_tag = Tag.compilelog;
+ pub const base_tag = Tag.compile_log;
base: Inst,
positionals: struct {
@@ -905,7 +896,7 @@ pub const Inst = struct {
};
pub const IntType = struct {
- pub const base_tag = Tag.inttype;
+ pub const base_tag = Tag.int_type;
base: Inst,
positionals: struct {
@@ -1641,10 +1632,10 @@ const DumpTzir = struct {
.cmp_gt,
.cmp_neq,
.store,
- .booland,
- .boolor,
- .bitand,
- .bitor,
+ .bool_and,
+ .bool_or,
+ .bit_and,
+ .bit_or,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
@@ -1753,10 +1744,10 @@ const DumpTzir = struct {
.cmp_gt,
.cmp_neq,
.store,
- .booland,
- .boolor,
- .bitand,
- .bitor,
+ .bool_and,
+ .bool_or,
+ .bit_and,
+ .bit_or,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
diff --git a/src/zir_sema.zig b/src/zir_sema.zig
index 0caaa2a03f..ca8255df94 100644
--- a/src/zir_sema.zig
+++ b/src/zir_sema.zig
@@ -28,144 +28,134 @@ const Decl = Module.Decl;
pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*Inst {
switch (old_inst.tag) {
- .alloc => return analyzeInstAlloc(mod, scope, old_inst.castTag(.alloc).?),
- .alloc_mut => return analyzeInstAllocMut(mod, scope, old_inst.castTag(.alloc_mut).?),
- .alloc_inferred => return analyzeInstAllocInferred(
- mod,
- scope,
- old_inst.castTag(.alloc_inferred).?,
- .inferred_alloc_const,
- ),
- .alloc_inferred_mut => return analyzeInstAllocInferred(
- mod,
- scope,
- old_inst.castTag(.alloc_inferred_mut).?,
- .inferred_alloc_mut,
- ),
- .arg => return analyzeInstArg(mod, scope, old_inst.castTag(.arg).?),
- .bitcast_ref => return bitCastRef(mod, scope, old_inst.castTag(.bitcast_ref).?),
- .bitcast_result_ptr => return bitCastResultPtr(mod, scope, old_inst.castTag(.bitcast_result_ptr).?),
- .block => return analyzeInstBlock(mod, scope, old_inst.castTag(.block).?, false),
- .block_comptime => return analyzeInstBlock(mod, scope, old_inst.castTag(.block_comptime).?, true),
- .block_flat => return analyzeInstBlockFlat(mod, scope, old_inst.castTag(.block_flat).?, false),
- .block_comptime_flat => return analyzeInstBlockFlat(mod, scope, old_inst.castTag(.block_comptime_flat).?, true),
- .@"break" => return analyzeInstBreak(mod, scope, old_inst.castTag(.@"break").?),
- .breakpoint => return analyzeInstBreakpoint(mod, scope, old_inst.castTag(.breakpoint).?),
- .breakvoid => return analyzeInstBreakVoid(mod, scope, old_inst.castTag(.breakvoid).?),
- .call => return call(mod, scope, old_inst.castTag(.call).?),
- .coerce_result_block_ptr => return analyzeInstCoerceResultBlockPtr(mod, scope, old_inst.castTag(.coerce_result_block_ptr).?),
- .coerce_result_ptr => return analyzeInstCoerceResultPtr(mod, scope, old_inst.castTag(.coerce_result_ptr).?),
- .coerce_to_ptr_elem => return analyzeInstCoerceToPtrElem(mod, scope, old_inst.castTag(.coerce_to_ptr_elem).?),
- .compileerror => return analyzeInstCompileError(mod, scope, old_inst.castTag(.compileerror).?),
- .compilelog => return analyzeInstCompileLog(mod, scope, old_inst.castTag(.compilelog).?),
- .@"const" => return analyzeInstConst(mod, scope, old_inst.castTag(.@"const").?),
- .dbg_stmt => return analyzeInstDbgStmt(mod, scope, old_inst.castTag(.dbg_stmt).?),
- .declref => return declRef(mod, scope, old_inst.castTag(.declref).?),
- .declref_str => return analyzeInstDeclRefStr(mod, scope, old_inst.castTag(.declref_str).?),
- .declval => return declVal(mod, scope, old_inst.castTag(.declval).?),
- .ensure_result_used => return analyzeInstEnsureResultUsed(mod, scope, old_inst.castTag(.ensure_result_used).?),
- .ensure_result_non_error => return analyzeInstEnsureResultNonError(mod, scope, old_inst.castTag(.ensure_result_non_error).?),
- .indexable_ptr_len => return indexablePtrLen(mod, scope, old_inst.castTag(.indexable_ptr_len).?),
- .ref => return ref(mod, scope, old_inst.castTag(.ref).?),
- .resolve_inferred_alloc => return analyzeInstResolveInferredAlloc(mod, scope, old_inst.castTag(.resolve_inferred_alloc).?),
- .ret_ptr => return analyzeInstRetPtr(mod, scope, old_inst.castTag(.ret_ptr).?),
- .ret_type => return analyzeInstRetType(mod, scope, old_inst.castTag(.ret_type).?),
- .store_to_inferred_ptr => return analyzeInstStoreToInferredPtr(mod, scope, old_inst.castTag(.store_to_inferred_ptr).?),
- .single_const_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.single_const_ptr_type).?, false, .One),
- .single_mut_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.single_mut_ptr_type).?, true, .One),
- .many_const_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.many_const_ptr_type).?, false, .Many),
- .many_mut_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.many_mut_ptr_type).?, true, .Many),
- .c_const_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.c_const_ptr_type).?, false, .C),
- .c_mut_ptr_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.c_mut_ptr_type).?, true, .C),
- .const_slice_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.const_slice_type).?, false, .Slice),
- .mut_slice_type => return analyzeInstSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice),
- .ptr_type => return analyzeInstPtrType(mod, scope, old_inst.castTag(.ptr_type).?),
- .store => return analyzeInstStore(mod, scope, old_inst.castTag(.store).?),
- .set_eval_branch_quota => return analyzeInstSetEvalBranchQuota(mod, scope, old_inst.castTag(.set_eval_branch_quota).?),
- .str => return analyzeInstStr(mod, scope, old_inst.castTag(.str).?),
- .int => return analyzeInstInt(mod, scope, old_inst.castTag(.int).?),
- .inttype => return analyzeInstIntType(mod, scope, old_inst.castTag(.inttype).?),
- .loop => return analyzeInstLoop(mod, scope, old_inst.castTag(.loop).?),
- .param_type => return analyzeInstParamType(mod, scope, old_inst.castTag(.param_type).?),
- .ptrtoint => return analyzeInstPtrToInt(mod, scope, old_inst.castTag(.ptrtoint).?),
- .field_ptr => return fieldPtr(mod, scope, old_inst.castTag(.field_ptr).?),
- .field_val => return fieldVal(mod, scope, old_inst.castTag(.field_val).?),
- .field_ptr_named => return fieldPtrNamed(mod, scope, old_inst.castTag(.field_ptr_named).?),
- .field_val_named => return fieldValNamed(mod, scope, old_inst.castTag(.field_val_named).?),
- .deref => return analyzeInstDeref(mod, scope, old_inst.castTag(.deref).?),
- .as => return analyzeInstAs(mod, scope, old_inst.castTag(.as).?),
- .@"asm" => return analyzeInstAsm(mod, scope, old_inst.castTag(.@"asm").?),
- .@"unreachable" => return analyzeInstUnreachable(mod, scope, old_inst.castTag(.@"unreachable").?, true),
- .unreach_nocheck => return analyzeInstUnreachable(mod, scope, old_inst.castTag(.unreach_nocheck).?, false),
- .@"return" => return analyzeInstRet(mod, scope, old_inst.castTag(.@"return").?),
- .returnvoid => return analyzeInstRetVoid(mod, scope, old_inst.castTag(.returnvoid).?),
- .@"fn" => return analyzeInstFn(mod, scope, old_inst.castTag(.@"fn").?),
- .@"export" => return analyzeInstExport(mod, scope, old_inst.castTag(.@"export").?),
- .primitive => return analyzeInstPrimitive(mod, scope, old_inst.castTag(.primitive).?),
- .fntype => return analyzeInstFnType(mod, scope, old_inst.castTag(.fntype).?),
- .intcast => return analyzeInstIntCast(mod, scope, old_inst.castTag(.intcast).?),
- .bitcast => return analyzeInstBitCast(mod, scope, old_inst.castTag(.bitcast).?),
- .floatcast => return analyzeInstFloatCast(mod, scope, old_inst.castTag(.floatcast).?),
- .elem_ptr => return elemPtr(mod, scope, old_inst.castTag(.elem_ptr).?),
- .elem_val => return elemVal(mod, scope, old_inst.castTag(.elem_val).?),
- .add => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.add).?),
- .addwrap => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.addwrap).?),
- .sub => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.sub).?),
- .subwrap => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.subwrap).?),
- .mul => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.mul).?),
- .mulwrap => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.mulwrap).?),
- .div => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.div).?),
- .mod_rem => return analyzeInstArithmetic(mod, scope, old_inst.castTag(.mod_rem).?),
- .array_cat => return analyzeInstArrayCat(mod, scope, old_inst.castTag(.array_cat).?),
- .array_mul => return analyzeInstArrayMul(mod, scope, old_inst.castTag(.array_mul).?),
- .bitand => return analyzeInstBitwise(mod, scope, old_inst.castTag(.bitand).?),
- .bitnot => return analyzeInstBitNot(mod, scope, old_inst.castTag(.bitnot).?),
- .bitor => return analyzeInstBitwise(mod, scope, old_inst.castTag(.bitor).?),
- .xor => return analyzeInstBitwise(mod, scope, old_inst.castTag(.xor).?),
- .shl => return analyzeInstShl(mod, scope, old_inst.castTag(.shl).?),
- .shr => return analyzeInstShr(mod, scope, old_inst.castTag(.shr).?),
- .cmp_lt => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_lt).?, .lt),
- .cmp_lte => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_lte).?, .lte),
- .cmp_eq => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_eq).?, .eq),
- .cmp_gte => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_gte).?, .gte),
- .cmp_gt => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_gt).?, .gt),
- .cmp_neq => return analyzeInstCmp(mod, scope, old_inst.castTag(.cmp_neq).?, .neq),
- .condbr => return analyzeInstCondBr(mod, scope, old_inst.castTag(.condbr).?),
- .is_null => return isNull(mod, scope, old_inst.castTag(.is_null).?, false),
- .is_non_null => return isNull(mod, scope, old_inst.castTag(.is_non_null).?, true),
- .is_null_ptr => return isNullPtr(mod, scope, old_inst.castTag(.is_null_ptr).?, false),
- .is_non_null_ptr => return isNullPtr(mod, scope, old_inst.castTag(.is_non_null_ptr).?, true),
- .is_err => return isErr(mod, scope, old_inst.castTag(.is_err).?),
- .is_err_ptr => return isErrPtr(mod, scope, old_inst.castTag(.is_err_ptr).?),
- .boolnot => return analyzeInstBoolNot(mod, scope, old_inst.castTag(.boolnot).?),
- .typeof => return analyzeInstTypeOf(mod, scope, old_inst.castTag(.typeof).?),
- .typeof_peer => return analyzeInstTypeOfPeer(mod, scope, old_inst.castTag(.typeof_peer).?),
- .optional_type => return analyzeInstOptionalType(mod, scope, old_inst.castTag(.optional_type).?),
- .optional_payload_safe => return optionalPayload(mod, scope, old_inst.castTag(.optional_payload_safe).?, true),
- .optional_payload_unsafe => return optionalPayload(mod, scope, old_inst.castTag(.optional_payload_unsafe).?, false),
- .optional_payload_safe_ptr => return optionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_safe_ptr).?, true),
- .optional_payload_unsafe_ptr => return optionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_unsafe_ptr).?, false),
- .err_union_payload_safe => return errorUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_safe).?, true),
- .err_union_payload_unsafe => return errorUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_unsafe).?, false),
- .err_union_payload_safe_ptr => return errorUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_safe_ptr).?, true),
- .err_union_payload_unsafe_ptr => return errorUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_unsafe_ptr).?, false),
- .err_union_code => return errorUnionCode(mod, scope, old_inst.castTag(.err_union_code).?),
- .err_union_code_ptr => return errorUnionCodePtr(mod, scope, old_inst.castTag(.err_union_code_ptr).?),
- .ensure_err_payload_void => return analyzeInstEnsureErrPayloadVoid(mod, scope, old_inst.castTag(.ensure_err_payload_void).?),
- .array_type => return analyzeInstArrayType(mod, scope, old_inst.castTag(.array_type).?),
- .array_type_sentinel => return analyzeInstArrayTypeSentinel(mod, scope, old_inst.castTag(.array_type_sentinel).?),
- .enum_literal => return analyzeInstEnumLiteral(mod, scope, old_inst.castTag(.enum_literal).?),
- .merge_error_sets => return analyzeInstMergeErrorSets(mod, scope, old_inst.castTag(.merge_error_sets).?),
- .error_union_type => return analyzeInstErrorUnionType(mod, scope, old_inst.castTag(.error_union_type).?),
- .anyframe_type => return analyzeInstAnyframeType(mod, scope, old_inst.castTag(.anyframe_type).?),
- .error_set => return analyzeInstErrorSet(mod, scope, old_inst.castTag(.error_set).?),
- .slice => return analyzeInstSlice(mod, scope, old_inst.castTag(.slice).?),
- .slice_start => return analyzeInstSliceStart(mod, scope, old_inst.castTag(.slice_start).?),
- .import => return analyzeInstImport(mod, scope, old_inst.castTag(.import).?),
- .switchbr => return analyzeInstSwitchBr(mod, scope, old_inst.castTag(.switchbr).?),
- .switch_range => return analyzeInstSwitchRange(mod, scope, old_inst.castTag(.switch_range).?),
- .booland => return analyzeInstBoolOp(mod, scope, old_inst.castTag(.booland).?),
- .boolor => return analyzeInstBoolOp(mod, scope, old_inst.castTag(.boolor).?),
+ .alloc => return zirAlloc(mod, scope, old_inst.castTag(.alloc).?),
+ .alloc_mut => return zirAllocMut(mod, scope, old_inst.castTag(.alloc_mut).?),
+ .alloc_inferred => return zirAllocInferred(mod, scope, old_inst.castTag(.alloc_inferred).?, .inferred_alloc_const),
+ .alloc_inferred_mut => return zirAllocInferred(mod, scope, old_inst.castTag(.alloc_inferred_mut).?, .inferred_alloc_mut),
+ .arg => return zirArg(mod, scope, old_inst.castTag(.arg).?),
+ .bitcast_ref => return zirBitcastRef(mod, scope, old_inst.castTag(.bitcast_ref).?),
+ .bitcast_result_ptr => return zirBitcastResultPtr(mod, scope, old_inst.castTag(.bitcast_result_ptr).?),
+ .block => return zirBlock(mod, scope, old_inst.castTag(.block).?, false),
+ .block_comptime => return zirBlock(mod, scope, old_inst.castTag(.block_comptime).?, true),
+ .block_flat => return zirBlockFlat(mod, scope, old_inst.castTag(.block_flat).?, false),
+ .block_comptime_flat => return zirBlockFlat(mod, scope, old_inst.castTag(.block_comptime_flat).?, true),
+ .@"break" => return zirBreak(mod, scope, old_inst.castTag(.@"break").?),
+ .breakpoint => return zirBreakpoint(mod, scope, old_inst.castTag(.breakpoint).?),
+ .break_void => return zirBreakVoid(mod, scope, old_inst.castTag(.break_void).?),
+ .call => return zirCall(mod, scope, old_inst.castTag(.call).?),
+ .coerce_result_block_ptr => return zirCoerceResultBlockPtr(mod, scope, old_inst.castTag(.coerce_result_block_ptr).?),
+ .coerce_result_ptr => return zirCoerceResultPtr(mod, scope, old_inst.castTag(.coerce_result_ptr).?),
+ .compile_error => return zirCompileError(mod, scope, old_inst.castTag(.compile_error).?),
+ .compile_log => return zirCompileLog(mod, scope, old_inst.castTag(.compile_log).?),
+ .@"const" => return zirConst(mod, scope, old_inst.castTag(.@"const").?),
+ .dbg_stmt => return zirDbgStmt(mod, scope, old_inst.castTag(.dbg_stmt).?),
+ .decl_ref => return zirDeclRef(mod, scope, old_inst.castTag(.decl_ref).?),
+ .decl_ref_str => return zirDeclRefStr(mod, scope, old_inst.castTag(.decl_ref_str).?),
+ .decl_val => return zirDeclVal(mod, scope, old_inst.castTag(.decl_val).?),
+ .ensure_result_used => return zirEnsureResultUsed(mod, scope, old_inst.castTag(.ensure_result_used).?),
+ .ensure_result_non_error => return zirEnsureResultNonError(mod, scope, old_inst.castTag(.ensure_result_non_error).?),
+ .indexable_ptr_len => return zirIndexablePtrLen(mod, scope, old_inst.castTag(.indexable_ptr_len).?),
+ .ref => return zirRef(mod, scope, old_inst.castTag(.ref).?),
+ .resolve_inferred_alloc => return zirResolveInferredAlloc(mod, scope, old_inst.castTag(.resolve_inferred_alloc).?),
+ .ret_ptr => return zirRetPtr(mod, scope, old_inst.castTag(.ret_ptr).?),
+ .ret_type => return zirRetType(mod, scope, old_inst.castTag(.ret_type).?),
+ .store_to_block_ptr => return zirStoreToBlockPtr(mod, scope, old_inst.castTag(.store_to_block_ptr).?),
+ .store_to_inferred_ptr => return zirStoreToInferredPtr(mod, scope, old_inst.castTag(.store_to_inferred_ptr).?),
+ .single_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.single_const_ptr_type).?, false, .One),
+ .single_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.single_mut_ptr_type).?, true, .One),
+ .many_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.many_const_ptr_type).?, false, .Many),
+ .many_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.many_mut_ptr_type).?, true, .Many),
+ .c_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.c_const_ptr_type).?, false, .C),
+ .c_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.c_mut_ptr_type).?, true, .C),
+ .const_slice_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.const_slice_type).?, false, .Slice),
+ .mut_slice_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice),
+ .ptr_type => return zirPtrType(mod, scope, old_inst.castTag(.ptr_type).?),
+ .store => return zirStore(mod, scope, old_inst.castTag(.store).?),
+ .set_eval_branch_quota => return zirSetEvalBranchQuota(mod, scope, old_inst.castTag(.set_eval_branch_quota).?),
+ .str => return zirStr(mod, scope, old_inst.castTag(.str).?),
+ .int => return zirInt(mod, scope, old_inst.castTag(.int).?),
+ .int_type => return zirIntType(mod, scope, old_inst.castTag(.int_type).?),
+ .loop => return zirLoop(mod, scope, old_inst.castTag(.loop).?),
+ .param_type => return zirParamType(mod, scope, old_inst.castTag(.param_type).?),
+ .ptrtoint => return zirPtrtoint(mod, scope, old_inst.castTag(.ptrtoint).?),
+ .field_ptr => return zirFieldPtr(mod, scope, old_inst.castTag(.field_ptr).?),
+ .field_val => return zirFieldVal(mod, scope, old_inst.castTag(.field_val).?),
+ .field_ptr_named => return zirFieldPtrNamed(mod, scope, old_inst.castTag(.field_ptr_named).?),
+ .field_val_named => return zirFieldValNamed(mod, scope, old_inst.castTag(.field_val_named).?),
+ .deref => return zirDeref(mod, scope, old_inst.castTag(.deref).?),
+ .as => return zirAs(mod, scope, old_inst.castTag(.as).?),
+ .@"asm" => return zirAsm(mod, scope, old_inst.castTag(.@"asm").?),
+ .unreachable_safe => return zirUnreachable(mod, scope, old_inst.castTag(.unreachable_safe).?, true),
+ .unreachable_unsafe => return zirUnreachable(mod, scope, old_inst.castTag(.unreachable_unsafe).?, false),
+ .@"return" => return zirReturn(mod, scope, old_inst.castTag(.@"return").?),
+ .return_void => return zirReturnVoid(mod, scope, old_inst.castTag(.return_void).?),
+ .@"fn" => return zirFn(mod, scope, old_inst.castTag(.@"fn").?),
+ .@"export" => return zirExport(mod, scope, old_inst.castTag(.@"export").?),
+ .primitive => return zirPrimitive(mod, scope, old_inst.castTag(.primitive).?),
+ .fntype => return zirFnType(mod, scope, old_inst.castTag(.fntype).?),
+ .intcast => return zirIntcast(mod, scope, old_inst.castTag(.intcast).?),
+ .bitcast => return zirBitcast(mod, scope, old_inst.castTag(.bitcast).?),
+ .floatcast => return zirFloatcast(mod, scope, old_inst.castTag(.floatcast).?),
+ .elem_ptr => return zirElemPtr(mod, scope, old_inst.castTag(.elem_ptr).?),
+ .elem_val => return zirElemVal(mod, scope, old_inst.castTag(.elem_val).?),
+ .add => return zirArithmetic(mod, scope, old_inst.castTag(.add).?),
+ .addwrap => return zirArithmetic(mod, scope, old_inst.castTag(.addwrap).?),
+ .sub => return zirArithmetic(mod, scope, old_inst.castTag(.sub).?),
+ .subwrap => return zirArithmetic(mod, scope, old_inst.castTag(.subwrap).?),
+ .mul => return zirArithmetic(mod, scope, old_inst.castTag(.mul).?),
+ .mulwrap => return zirArithmetic(mod, scope, old_inst.castTag(.mulwrap).?),
+ .div => return zirArithmetic(mod, scope, old_inst.castTag(.div).?),
+ .mod_rem => return zirArithmetic(mod, scope, old_inst.castTag(.mod_rem).?),
+ .array_cat => return zirArrayCat(mod, scope, old_inst.castTag(.array_cat).?),
+ .array_mul => return zirArrayMul(mod, scope, old_inst.castTag(.array_mul).?),
+ .bit_and => return zirBitwise(mod, scope, old_inst.castTag(.bit_and).?),
+ .bit_not => return zirBitNot(mod, scope, old_inst.castTag(.bit_not).?),
+ .bit_or => return zirBitwise(mod, scope, old_inst.castTag(.bit_or).?),
+ .xor => return zirBitwise(mod, scope, old_inst.castTag(.xor).?),
+ .shl => return zirShl(mod, scope, old_inst.castTag(.shl).?),
+ .shr => return zirShr(mod, scope, old_inst.castTag(.shr).?),
+ .cmp_lt => return zirCmp(mod, scope, old_inst.castTag(.cmp_lt).?, .lt),
+ .cmp_lte => return zirCmp(mod, scope, old_inst.castTag(.cmp_lte).?, .lte),
+ .cmp_eq => return zirCmp(mod, scope, old_inst.castTag(.cmp_eq).?, .eq),
+ .cmp_gte => return zirCmp(mod, scope, old_inst.castTag(.cmp_gte).?, .gte),
+ .cmp_gt => return zirCmp(mod, scope, old_inst.castTag(.cmp_gt).?, .gt),
+ .cmp_neq => return zirCmp(mod, scope, old_inst.castTag(.cmp_neq).?, .neq),
+ .condbr => return zirCondbr(mod, scope, old_inst.castTag(.condbr).?),
+ .is_null => return zirIsNull(mod, scope, old_inst.castTag(.is_null).?, false),
+ .is_non_null => return zirIsNull(mod, scope, old_inst.castTag(.is_non_null).?, true),
+ .is_null_ptr => return zirIsNullPtr(mod, scope, old_inst.castTag(.is_null_ptr).?, false),
+ .is_non_null_ptr => return zirIsNullPtr(mod, scope, old_inst.castTag(.is_non_null_ptr).?, true),
+ .is_err => return zirIsErr(mod, scope, old_inst.castTag(.is_err).?),
+ .is_err_ptr => return zirIsErrPtr(mod, scope, old_inst.castTag(.is_err_ptr).?),
+ .bool_not => return zirBoolNot(mod, scope, old_inst.castTag(.bool_not).?),
+ .typeof => return zirTypeof(mod, scope, old_inst.castTag(.typeof).?),
+ .typeof_peer => return zirTypeofPeer(mod, scope, old_inst.castTag(.typeof_peer).?),
+ .optional_type => return zirOptionalType(mod, scope, old_inst.castTag(.optional_type).?),
+ .optional_payload_safe => return zirOptionalPayload(mod, scope, old_inst.castTag(.optional_payload_safe).?, true),
+ .optional_payload_unsafe => return zirOptionalPayload(mod, scope, old_inst.castTag(.optional_payload_unsafe).?, false),
+ .optional_payload_safe_ptr => return zirOptionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_safe_ptr).?, true),
+ .optional_payload_unsafe_ptr => return zirOptionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_unsafe_ptr).?, false),
+ .err_union_payload_safe => return zirErrUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_safe).?, true),
+ .err_union_payload_unsafe => return zirErrUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_unsafe).?, false),
+ .err_union_payload_safe_ptr => return zirErrUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_safe_ptr).?, true),
+ .err_union_payload_unsafe_ptr => return zirErrUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_unsafe_ptr).?, false),
+ .err_union_code => return zirErrUnionCode(mod, scope, old_inst.castTag(.err_union_code).?),
+ .err_union_code_ptr => return zirErrUnionCodePtr(mod, scope, old_inst.castTag(.err_union_code_ptr).?),
+ .ensure_err_payload_void => return zirEnsureErrPayloadVoid(mod, scope, old_inst.castTag(.ensure_err_payload_void).?),
+ .array_type => return zirArrayType(mod, scope, old_inst.castTag(.array_type).?),
+ .array_type_sentinel => return zirArrayTypeSentinel(mod, scope, old_inst.castTag(.array_type_sentinel).?),
+ .enum_literal => return zirEnumLiteral(mod, scope, old_inst.castTag(.enum_literal).?),
+ .merge_error_sets => return zirMergeErrorSets(mod, scope, old_inst.castTag(.merge_error_sets).?),
+ .error_union_type => return zirErrorUnionType(mod, scope, old_inst.castTag(.error_union_type).?),
+ .anyframe_type => return zirAnyframeType(mod, scope, old_inst.castTag(.anyframe_type).?),
+ .error_set => return zirErrorSet(mod, scope, old_inst.castTag(.error_set).?),
+ .slice => return zirSlice(mod, scope, old_inst.castTag(.slice).?),
+ .slice_start => return zirSliceStart(mod, scope, old_inst.castTag(.slice_start).?),
+ .import => return zirImport(mod, scope, old_inst.castTag(.import).?),
+ .switchbr => return zirSwitchbr(mod, scope, old_inst.castTag(.switchbr).?),
+ .switch_range => return zirSwitchRange(mod, scope, old_inst.castTag(.switch_range).?),
+ .bool_and => return zirBoolOp(mod, scope, old_inst.castTag(.bool_and).?),
+ .bool_or => return zirBoolOp(mod, scope, old_inst.castTag(.bool_or).?),
.container_field_named,
.container_field_typed,
@@ -258,7 +248,7 @@ pub fn resolveInstConst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerE
};
}
-fn analyzeInstConst(mod: *Module, scope: *Scope, const_inst: *zir.Inst.Const) InnerError!*Inst {
+fn zirConst(mod: *Module, scope: *Scope, const_inst: *zir.Inst.Const) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// Move the TypedValue from old memory to new memory. This allows freeing the ZIR instructions
@@ -275,44 +265,35 @@ fn analyzeConstInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError
};
}
-fn analyzeInstCoerceResultBlockPtr(
+fn zirCoerceResultBlockPtr(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.CoerceResultBlockPtr,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceResultBlockPtr", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirCoerceResultBlockPtr", .{});
}
-fn bitCastRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirBitcastRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement zir_sema.bitCastRef", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zir_sema.zirBitcastRef", .{});
}
-fn bitCastResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirBitcastResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement zir_sema.bitCastResultPtr", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zir_sema.zirBitcastResultPtr", .{});
}
-fn analyzeInstCoerceResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirCoerceResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceResultPtr", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirCoerceResultPtr", .{});
}
-/// Equivalent to `as(ptr_child_type(typeof(ptr)), value)`.
-fn analyzeInstCoerceToPtrElem(mod: *Module, scope: *Scope, inst: *zir.Inst.CoerceToPtrElem) InnerError!*Inst {
- const tracy = trace(@src());
- defer tracy.end();
- const ptr = try resolveInst(mod, scope, inst.positionals.ptr);
- const operand = try resolveInst(mod, scope, inst.positionals.value);
- return mod.coerce(scope, ptr.ty.elemType(), operand);
-}
-
-fn analyzeInstRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn zirRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
@@ -322,7 +303,7 @@ fn analyzeInstRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerErr
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
-fn ref(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -330,7 +311,7 @@ fn ref(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
return mod.analyzeRef(scope, inst.base.src, operand);
}
-fn analyzeInstRetType(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn zirRetType(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
@@ -339,7 +320,7 @@ fn analyzeInstRetType(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerEr
return mod.constType(scope, inst.base.src, ret_type);
}
-fn analyzeInstEnsureResultUsed(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirEnsureResultUsed(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
@@ -349,7 +330,7 @@ fn analyzeInstEnsureResultUsed(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp
}
}
-fn analyzeInstEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
@@ -359,7 +340,7 @@ fn analyzeInstEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.
}
}
-fn indexablePtrLen(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirIndexablePtrLen(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -389,7 +370,7 @@ fn indexablePtrLen(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
-fn analyzeInstAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const var_type = try resolveType(mod, scope, inst.positionals.operand);
@@ -398,7 +379,7 @@ fn analyzeInstAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerErro
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
-fn analyzeInstAllocMut(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirAllocMut(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const var_type = try resolveType(mod, scope, inst.positionals.operand);
@@ -408,7 +389,7 @@ fn analyzeInstAllocMut(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerE
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
-fn analyzeInstAllocInferred(
+fn zirAllocInferred(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.NoOp,
@@ -437,7 +418,7 @@ fn analyzeInstAllocInferred(
return result;
}
-fn analyzeInstResolveInferredAlloc(
+fn zirResolveInferredAlloc(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.UnOp,
@@ -466,28 +447,44 @@ fn analyzeInstResolveInferredAlloc(
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstStoreToInferredPtr(
+fn zirStoreToBlockPtr(
+ mod: *Module,
+ scope: *Scope,
+ inst: *zir.Inst.BinOp,
+) InnerError!*Inst {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
+ const value = try resolveInst(mod, scope, inst.positionals.rhs);
+ const ptr_ty = try mod.simplePtrType(scope, inst.base.src, value.ty, true, .One);
+ const b = try mod.requireRuntimeBlock(scope, inst.base.src);
+ const bitcasted_ptr = try mod.addUnOp(b, inst.base.src, ptr_ty, .bitcast, ptr);
+ return mod.storePtr(scope, inst.base.src, bitcasted_ptr, value);
+}
+
+fn zirStoreToInferredPtr(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.BinOp,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
const value = try resolveInst(mod, scope, inst.positionals.rhs);
const inferred_alloc = ptr.castTag(.constant).?.val.castTag(.inferred_alloc).?;
// Add the stored instruction to the set we will use to resolve peer types
// for the inferred allocation.
try inferred_alloc.data.stored_inst_list.append(scope.arena(), value);
- // Create a new alloc with exactly the type the pointer wants.
- // Later it gets cleaned up by aliasing the alloc we are supposed to be storing to.
+ // Create a runtime bitcast instruction with exactly the type the pointer wants.
const ptr_ty = try mod.simplePtrType(scope, inst.base.src, value.ty, true, .One);
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const bitcasted_ptr = try mod.addUnOp(b, inst.base.src, ptr_ty, .bitcast, ptr);
return mod.storePtr(scope, inst.base.src, bitcasted_ptr, value);
}
-fn analyzeInstSetEvalBranchQuota(
+fn zirSetEvalBranchQuota(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.UnOp,
@@ -499,15 +496,16 @@ fn analyzeInstSetEvalBranchQuota(
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstStore(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirStore(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
const value = try resolveInst(mod, scope, inst.positionals.rhs);
return mod.storePtr(scope, inst.base.src, ptr, value);
}
-fn analyzeInstParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType) InnerError!*Inst {
+fn zirParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const fn_inst = try resolveInst(mod, scope, inst.positionals.func);
@@ -516,7 +514,7 @@ fn analyzeInstParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType)
const fn_ty: Type = switch (fn_inst.ty.zigTypeTag()) {
.Fn => fn_inst.ty,
.BoundFn => {
- return mod.fail(scope, fn_inst.src, "TODO implement analyzeInstParamType for method call syntax", .{});
+ return mod.fail(scope, fn_inst.src, "TODO implement zirParamType for method call syntax", .{});
},
else => {
return mod.fail(scope, fn_inst.src, "expected function, found '{}'", .{fn_inst.ty});
@@ -538,7 +536,7 @@ fn analyzeInstParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType)
return mod.constType(scope, inst.base.src, param_type);
}
-fn analyzeInstStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerError!*Inst {
+fn zirStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// The bytes references memory inside the ZIR module, which can get deallocated
@@ -557,14 +555,14 @@ fn analyzeInstStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerErr
return mod.analyzeDeclRef(scope, str_inst.base.src, new_decl);
}
-fn analyzeInstInt(mod: *Module, scope: *Scope, inst: *zir.Inst.Int) InnerError!*Inst {
+fn zirInt(mod: *Module, scope: *Scope, inst: *zir.Inst.Int) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.constIntBig(scope, inst.base.src, Type.initTag(.comptime_int), inst.positionals.int);
}
-fn analyzeInstExport(mod: *Module, scope: *Scope, export_inst: *zir.Inst.Export) InnerError!*Inst {
+fn zirExport(mod: *Module, scope: *Scope, export_inst: *zir.Inst.Export) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const symbol_name = try resolveConstString(mod, scope, export_inst.positionals.symbol_name);
@@ -574,14 +572,14 @@ fn analyzeInstExport(mod: *Module, scope: *Scope, export_inst: *zir.Inst.Export)
return mod.constVoid(scope, export_inst.base.src);
}
-fn analyzeInstCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const msg = try resolveConstString(mod, scope, inst.positionals.operand);
return mod.fail(scope, inst.base.src, "{s}", .{msg});
}
-fn analyzeInstCompileLog(mod: *Module, scope: *Scope, inst: *zir.Inst.CompileLog) InnerError!*Inst {
+fn zirCompileLog(mod: *Module, scope: *Scope, inst: *zir.Inst.CompileLog) InnerError!*Inst {
var managed = mod.compile_log_text.toManaged(mod.gpa);
defer mod.compile_log_text = managed.moveToUnmanaged();
const writer = managed.writer();
@@ -608,7 +606,7 @@ fn analyzeInstCompileLog(mod: *Module, scope: *Scope, inst: *zir.Inst.CompileLog
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
+fn zirArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
@@ -631,7 +629,7 @@ fn analyzeInstArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*
return mod.addArg(b, inst.base.src, param_type, name);
}
-fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError!*Inst {
+fn zirLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const parent_block = scope.cast(Scope.Block).?;
@@ -672,7 +670,7 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError
return &loop_inst.base;
}
-fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
+fn zirBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const parent_block = scope.cast(Scope.Block).?;
@@ -704,9 +702,15 @@ fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_c
return resolveInst(mod, scope, last_zir_inst);
}
-fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
+fn zirBlock(
+ mod: *Module,
+ scope: *Scope,
+ inst: *zir.Inst.Block,
+ is_comptime: bool,
+) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const parent_block = scope.cast(Scope.Block).?;
// Reserve space for a Block instruction so that generated Break instructions can
@@ -798,30 +802,52 @@ fn analyzeBlockBody(
return &merges.block_inst.base;
}
-fn analyzeInstBreakpoint(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn zirBreakpoint(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
return mod.addNoOp(b, inst.base.src, Type.initTag(.void), .breakpoint);
}
-fn analyzeInstBreak(mod: *Module, scope: *Scope, inst: *zir.Inst.Break) InnerError!*Inst {
+fn zirBreak(mod: *Module, scope: *Scope, inst: *zir.Inst.Break) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const operand = try resolveInst(mod, scope, inst.positionals.operand);
const block = inst.positionals.block;
return analyzeBreak(mod, scope, inst.base.src, block, operand);
}
-fn analyzeInstBreakVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) InnerError!*Inst {
+fn zirBreakVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const block = inst.positionals.block;
const void_inst = try mod.constVoid(scope, inst.base.src);
return analyzeBreak(mod, scope, inst.base.src, block, void_inst);
}
-fn analyzeInstDbgStmt(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn analyzeBreak(
+ mod: *Module,
+ scope: *Scope,
+ src: usize,
+ zir_block: *zir.Inst.Block,
+ operand: *Inst,
+) InnerError!*Inst {
+ var opt_block = scope.cast(Scope.Block);
+ while (opt_block) |block| {
+ if (block.label) |*label| {
+ if (label.zir_block == zir_block) {
+ try label.merges.results.append(mod.gpa, operand);
+ const b = try mod.requireFunctionBlock(scope, src);
+ return mod.addBr(b, src, label.merges.block_inst, operand);
+ }
+ }
+ opt_block = block.parent;
+ } else unreachable;
+}
+
+fn zirDbgStmt(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
if (scope.cast(Scope.Block)) |b| {
@@ -832,26 +858,26 @@ fn analyzeInstDbgStmt(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerEr
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstDeclRefStr(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRefStr) InnerError!*Inst {
+fn zirDeclRefStr(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRefStr) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const decl_name = try resolveConstString(mod, scope, inst.positionals.name);
return mod.analyzeDeclRefByName(scope, inst.base.src, decl_name);
}
-fn declRef(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRef) InnerError!*Inst {
+fn zirDeclRef(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRef) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.analyzeDeclRef(scope, inst.base.src, inst.positionals.decl);
}
-fn declVal(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclVal) InnerError!*Inst {
+fn zirDeclVal(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclVal) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.analyzeDeclVal(scope, inst.base.src, inst.positionals.decl);
}
-fn call(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
+fn zirCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1002,7 +1028,7 @@ fn call(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
return mod.addCall(b, inst.base.src, ret_type, func, casted_args);
}
-fn analyzeInstFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!*Inst {
+fn zirFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const fn_type = try resolveType(mod, scope, fn_inst.positionals.fn_type);
@@ -1019,13 +1045,13 @@ fn analyzeInstFn(mod: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError!
});
}
-fn analyzeInstIntType(mod: *Module, scope: *Scope, inttype: *zir.Inst.IntType) InnerError!*Inst {
+fn zirIntType(mod: *Module, scope: *Scope, inttype: *zir.Inst.IntType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.fail(scope, inttype.base.src, "TODO implement inttype", .{});
}
-fn analyzeInstOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const child_type = try resolveType(mod, scope, optional.positionals.operand);
@@ -1033,7 +1059,7 @@ fn analyzeInstOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp
return mod.constType(scope, optional.base.src, try mod.optionalType(scope, child_type));
}
-fn analyzeInstArrayType(mod: *Module, scope: *Scope, array: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirArrayType(mod: *Module, scope: *Scope, array: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// TODO these should be lazily evaluated
@@ -1043,7 +1069,7 @@ fn analyzeInstArrayType(mod: *Module, scope: *Scope, array: *zir.Inst.BinOp) Inn
return mod.constType(scope, array.base.src, try mod.arrayType(scope, len.val.toUnsignedInt(), null, elem_type));
}
-fn analyzeInstArrayTypeSentinel(mod: *Module, scope: *Scope, array: *zir.Inst.ArrayTypeSentinel) InnerError!*Inst {
+fn zirArrayTypeSentinel(mod: *Module, scope: *Scope, array: *zir.Inst.ArrayTypeSentinel) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// TODO these should be lazily evaluated
@@ -1054,7 +1080,7 @@ fn analyzeInstArrayTypeSentinel(mod: *Module, scope: *Scope, array: *zir.Inst.Ar
return mod.constType(scope, array.base.src, try mod.arrayType(scope, len.val.toUnsignedInt(), sentinel.val, elem_type));
}
-fn analyzeInstErrorUnionType(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirErrorUnionType(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const error_union = try resolveType(mod, scope, inst.positionals.lhs);
@@ -1067,7 +1093,7 @@ fn analyzeInstErrorUnionType(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp)
return mod.constType(scope, inst.base.src, try mod.errorUnionType(scope, error_union, payload));
}
-fn analyzeInstAnyframeType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirAnyframeType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const return_type = try resolveType(mod, scope, inst.positionals.operand);
@@ -1075,7 +1101,7 @@ fn analyzeInstAnyframeType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) In
return mod.constType(scope, inst.base.src, try mod.anyframeType(scope, return_type));
}
-fn analyzeInstErrorSet(mod: *Module, scope: *Scope, inst: *zir.Inst.ErrorSet) InnerError!*Inst {
+fn zirErrorSet(mod: *Module, scope: *Scope, inst: *zir.Inst.ErrorSet) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// The declarations arena will store the hashmap.
@@ -1107,13 +1133,13 @@ fn analyzeInstErrorSet(mod: *Module, scope: *Scope, inst: *zir.Inst.ErrorSet) In
return mod.analyzeDeclVal(scope, inst.base.src, new_decl);
}
-fn analyzeInstMergeErrorSets(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirMergeErrorSets(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.fail(scope, inst.base.src, "TODO implement merge_error_sets", .{});
}
-fn analyzeInstEnumLiteral(mod: *Module, scope: *Scope, inst: *zir.Inst.EnumLiteral) InnerError!*Inst {
+fn zirEnumLiteral(mod: *Module, scope: *Scope, inst: *zir.Inst.EnumLiteral) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const duped_name = try scope.arena().dupe(u8, inst.positionals.name);
@@ -1124,7 +1150,7 @@ fn analyzeInstEnumLiteral(mod: *Module, scope: *Scope, inst: *zir.Inst.EnumLiter
}
/// Pointer in, pointer out.
-fn optionalPayloadPtr(
+fn zirOptionalPayloadPtr(
mod: *Module,
scope: *Scope,
unwrap: *zir.Inst.UnOp,
@@ -1165,7 +1191,7 @@ fn optionalPayloadPtr(
}
/// Value in, value out.
-fn optionalPayload(
+fn zirOptionalPayload(
mod: *Module,
scope: *Scope,
unwrap: *zir.Inst.UnOp,
@@ -1201,40 +1227,40 @@ fn optionalPayload(
}
/// Value in, value out
-fn errorUnionPayload(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
+fn zirErrUnionPayload(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.errorUnionPayload", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.zirErrUnionPayload", .{});
}
/// Pointer in, pointer out
-fn errorUnionPayloadPtr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
+fn zirErrUnionPayloadPtr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.errorUnionPayloadPtr", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.zirErrUnionPayloadPtr", .{});
}
/// Value in, value out
-fn errorUnionCode(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirErrUnionCode(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.errorUnionCode", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.zirErrUnionCode", .{});
}
/// Pointer in, value out
-fn errorUnionCodePtr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirErrUnionCodePtr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.errorUnionCodePtr", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zir_sema.zirErrUnionCodePtr", .{});
}
-fn analyzeInstEnsureErrPayloadVoid(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirEnsureErrPayloadVoid(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, unwrap.base.src, "TODO implement analyzeInstEnsureErrPayloadVoid", .{});
+ return mod.fail(scope, unwrap.base.src, "TODO implement zirEnsureErrPayloadVoid", .{});
}
-fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst {
+fn zirFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const return_type = try resolveType(mod, scope, fntype.positionals.return_type);
@@ -1277,13 +1303,13 @@ fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) Inne
return mod.constType(scope, fntype.base.src, fn_ty);
}
-fn analyzeInstPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst {
+fn zirPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.constInst(scope, primitive.base.src, primitive.positionals.tag.toTypedValue());
}
-fn analyzeInstAs(mod: *Module, scope: *Scope, as: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirAs(mod: *Module, scope: *Scope, as: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const dest_type = try resolveType(mod, scope, as.positionals.lhs);
@@ -1291,7 +1317,7 @@ fn analyzeInstAs(mod: *Module, scope: *Scope, as: *zir.Inst.BinOp) InnerError!*I
return mod.coerce(scope, dest_type, new_inst);
}
-fn analyzeInstPtrToInt(mod: *Module, scope: *Scope, ptrtoint: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirPtrtoint(mod: *Module, scope: *Scope, ptrtoint: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, ptrtoint.positionals.operand);
@@ -1304,7 +1330,7 @@ fn analyzeInstPtrToInt(mod: *Module, scope: *Scope, ptrtoint: *zir.Inst.UnOp) In
return mod.addUnOp(b, ptrtoint.base.src, ty, .ptrtoint, ptr);
}
-fn fieldVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst {
+fn zirFieldVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1315,7 +1341,7 @@ fn fieldVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
-fn fieldPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst {
+fn zirFieldPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1324,7 +1350,7 @@ fn fieldPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Field) InnerError!*Inst
return mod.namedFieldPtr(scope, inst.base.src, object_ptr, field_name, inst.base.src);
}
-fn fieldValNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerError!*Inst {
+fn zirFieldValNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1336,7 +1362,7 @@ fn fieldValNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerE
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
-fn fieldPtrNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerError!*Inst {
+fn zirFieldPtrNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1346,7 +1372,7 @@ fn fieldPtrNamed(mod: *Module, scope: *Scope, inst: *zir.Inst.FieldNamed) InnerE
return mod.namedFieldPtr(scope, inst.base.src, object_ptr, field_name, fsrc);
}
-fn analyzeInstIntCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirIntcast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const dest_type = try resolveType(mod, scope, inst.positionals.lhs);
@@ -1384,7 +1410,7 @@ fn analyzeInstIntCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerE
return mod.fail(scope, inst.base.src, "TODO implement analyze widen or shorten int", .{});
}
-fn analyzeInstBitCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirBitcast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const dest_type = try resolveType(mod, scope, inst.positionals.lhs);
@@ -1392,7 +1418,7 @@ fn analyzeInstBitCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerE
return mod.bitcast(scope, dest_type, operand);
}
-fn analyzeInstFloatCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirFloatcast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const dest_type = try resolveType(mod, scope, inst.positionals.lhs);
@@ -1430,7 +1456,7 @@ fn analyzeInstFloatCast(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) Inne
return mod.fail(scope, inst.base.src, "TODO implement analyze widen or shorten float", .{});
}
-fn elemVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
+fn zirElemVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1441,7 +1467,7 @@ fn elemVal(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
-fn elemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
+fn zirElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1450,7 +1476,7 @@ fn elemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.Elem) InnerError!*Inst {
return mod.elemPtr(scope, inst.base.src, array_ptr, elem_index);
}
-fn analyzeInstSlice(mod: *Module, scope: *Scope, inst: *zir.Inst.Slice) InnerError!*Inst {
+fn zirSlice(mod: *Module, scope: *Scope, inst: *zir.Inst.Slice) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const array_ptr = try resolveInst(mod, scope, inst.positionals.array_ptr);
@@ -1461,7 +1487,7 @@ fn analyzeInstSlice(mod: *Module, scope: *Scope, inst: *zir.Inst.Slice) InnerErr
return mod.analyzeSlice(scope, inst.base.src, array_ptr, start, end, sentinel);
}
-fn analyzeInstSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const array_ptr = try resolveInst(mod, scope, inst.positionals.lhs);
@@ -1470,7 +1496,7 @@ fn analyzeInstSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) Inn
return mod.analyzeSlice(scope, inst.base.src, array_ptr, start, null, null);
}
-fn analyzeInstSwitchRange(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirSwitchRange(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const start = try resolveInst(mod, scope, inst.positionals.lhs);
@@ -1494,7 +1520,7 @@ fn analyzeInstSwitchRange(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) In
return mod.constVoid(scope, inst.base.src);
}
-fn analyzeInstSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) InnerError!*Inst {
+fn zirSwitchbr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const target_ptr = try resolveInst(mod, scope, inst.positionals.target_ptr);
@@ -1698,7 +1724,7 @@ fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.Sw
}
}
-fn analyzeInstImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveConstString(mod, scope, inst.positionals.operand);
@@ -1718,19 +1744,19 @@ fn analyzeInstImport(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerErr
return mod.constType(scope, inst.base.src, file_scope.root_container.ty);
}
-fn analyzeInstShl(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirShl(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstShl", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirShl", .{});
}
-fn analyzeInstShr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirShr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstShr", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirShr", .{});
}
-fn analyzeInstBitwise(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirBitwise(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1784,8 +1810,8 @@ fn analyzeInstBitwise(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerE
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const ir_tag = switch (inst.base.tag) {
- .bitand => Inst.Tag.bitand,
- .bitor => Inst.Tag.bitor,
+ .bit_and => Inst.Tag.bit_and,
+ .bit_or => Inst.Tag.bit_or,
.xor => Inst.Tag.xor,
else => unreachable,
};
@@ -1793,25 +1819,25 @@ fn analyzeInstBitwise(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerE
return mod.addBinOp(b, inst.base.src, scalar_type, ir_tag, casted_lhs, casted_rhs);
}
-fn analyzeInstBitNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirBitNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstBitNot", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirBitNot", .{});
}
-fn analyzeInstArrayCat(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirArrayCat(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstArrayCat", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirArrayCat", .{});
}
-fn analyzeInstArrayMul(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirArrayMul(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- return mod.fail(scope, inst.base.src, "TODO implement analyzeInstArrayMul", .{});
+ return mod.fail(scope, inst.base.src, "TODO implement zirArrayMul", .{});
}
-fn analyzeInstArithmetic(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirArithmetic(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -1912,14 +1938,14 @@ fn analyzeInstComptimeOp(mod: *Module, scope: *Scope, res_type: Type, inst: *zir
});
}
-fn analyzeInstDeref(mod: *Module, scope: *Scope, deref: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirDeref(mod: *Module, scope: *Scope, deref: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, deref.positionals.operand);
return mod.analyzeDeref(scope, deref.base.src, ptr, deref.positionals.operand.src);
}
-fn analyzeInstAsm(mod: *Module, scope: *Scope, assembly: *zir.Inst.Asm) InnerError!*Inst {
+fn zirAsm(mod: *Module, scope: *Scope, assembly: *zir.Inst.Asm) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const return_type = try resolveType(mod, scope, assembly.positionals.return_type);
@@ -1960,7 +1986,7 @@ fn analyzeInstAsm(mod: *Module, scope: *Scope, assembly: *zir.Inst.Asm) InnerErr
return &inst.base;
}
-fn analyzeInstCmp(
+fn zirCmp(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.BinOp,
@@ -2018,14 +2044,14 @@ fn analyzeInstCmp(
return mod.fail(scope, inst.base.src, "TODO implement more cmp analysis", .{});
}
-fn analyzeInstTypeOf(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirTypeof(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
return mod.constType(scope, inst.base.src, operand.ty);
}
-fn analyzeInstTypeOfPeer(mod: *Module, scope: *Scope, inst: *zir.Inst.TypeOfPeer) InnerError!*Inst {
+fn zirTypeofPeer(mod: *Module, scope: *Scope, inst: *zir.Inst.TypeOfPeer) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
var insts_to_res = try mod.gpa.alloc(*ir.Inst, inst.positionals.items.len);
@@ -2037,7 +2063,7 @@ fn analyzeInstTypeOfPeer(mod: *Module, scope: *Scope, inst: *zir.Inst.TypeOfPeer
return mod.constType(scope, inst.base.src, pt_res);
}
-fn analyzeInstBoolNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirBoolNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const uncasted_operand = try resolveInst(mod, scope, inst.positionals.operand);
@@ -2050,7 +2076,7 @@ fn analyzeInstBoolNot(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerEr
return mod.addUnOp(b, inst.base.src, bool_type, .not, operand);
}
-fn analyzeInstBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+fn zirBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const bool_type = Type.initTag(.bool);
@@ -2059,7 +2085,7 @@ fn analyzeInstBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerEr
const uncasted_rhs = try resolveInst(mod, scope, inst.positionals.rhs);
const rhs = try mod.coerce(scope, bool_type, uncasted_rhs);
- const is_bool_or = inst.base.tag == .boolor;
+ const is_bool_or = inst.base.tag == .bool_or;
if (lhs.value()) |lhs_val| {
if (rhs.value()) |rhs_val| {
@@ -2071,17 +2097,17 @@ fn analyzeInstBoolOp(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerEr
}
}
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
- return mod.addBinOp(b, inst.base.src, bool_type, if (is_bool_or) .boolor else .booland, lhs, rhs);
+ return mod.addBinOp(b, inst.base.src, bool_type, if (is_bool_or) .bool_or else .bool_and, lhs, rhs);
}
-fn isNull(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
+fn zirIsNull(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
return mod.analyzeIsNull(scope, inst.base.src, operand, invert_logic);
}
-fn isNullPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
+fn zirIsNullPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, inst.positionals.operand);
@@ -2089,14 +2115,14 @@ fn isNullPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bo
return mod.analyzeIsNull(scope, inst.base.src, loaded, invert_logic);
}
-fn isErr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirIsErr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
return mod.analyzeIsErr(scope, inst.base.src, operand);
}
-fn isErrPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirIsErrPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, inst.positionals.operand);
@@ -2104,7 +2130,7 @@ fn isErrPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst
return mod.analyzeIsErr(scope, inst.base.src, loaded);
}
-fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerError!*Inst {
+fn zirCondbr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const uncasted_cond = try resolveInst(mod, scope, inst.positionals.condition);
@@ -2153,7 +2179,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
return mod.addCondBr(parent_block, inst.base.src, cond, then_body, else_body);
}
-fn analyzeInstUnreachable(
+fn zirUnreachable(
mod: *Module,
scope: *Scope,
unreach: *zir.Inst.NoOp,
@@ -2170,7 +2196,7 @@ fn analyzeInstUnreachable(
}
}
-fn analyzeInstRet(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+fn zirReturn(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
@@ -2185,7 +2211,7 @@ fn analyzeInstRet(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!
return mod.addUnOp(b, inst.base.src, Type.initTag(.noreturn), .ret, operand);
}
-fn analyzeInstRetVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
+fn zirReturnVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
@@ -2216,27 +2242,7 @@ fn floatOpAllowed(tag: zir.Inst.Tag) bool {
};
}
-fn analyzeBreak(
- mod: *Module,
- scope: *Scope,
- src: usize,
- zir_block: *zir.Inst.Block,
- operand: *Inst,
-) InnerError!*Inst {
- var opt_block = scope.cast(Scope.Block);
- while (opt_block) |block| {
- if (block.label) |*label| {
- if (label.zir_block == zir_block) {
- try label.merges.results.append(mod.gpa, operand);
- const b = try mod.requireFunctionBlock(scope, src);
- return mod.addBr(b, src, label.merges.block_inst, operand);
- }
- }
- opt_block = block.parent;
- } else unreachable;
-}
-
-fn analyzeInstSimplePtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, mutable: bool, size: std.builtin.TypeInfo.Pointer.Size) InnerError!*Inst {
+fn zirSimplePtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, mutable: bool, size: std.builtin.TypeInfo.Pointer.Size) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const elem_type = try resolveType(mod, scope, inst.positionals.operand);
@@ -2244,7 +2250,7 @@ fn analyzeInstSimplePtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, m
return mod.constType(scope, inst.base.src, ty);
}
-fn analyzeInstPtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.PtrType) InnerError!*Inst {
+fn zirPtrType(mod: *Module, scope: *Scope, inst: *zir.Inst.PtrType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// TODO lazy values