diff options
| author | Jacob Young <jacobly0@users.noreply.github.com> | 2023-03-14 19:57:42 -0400 |
|---|---|---|
| committer | Jacob Young <jacobly0@users.noreply.github.com> | 2023-03-15 01:04:21 -0400 |
| commit | bb6b9c19e06320d8617f8858dde1ae6a7cf7fc5e (patch) | |
| tree | 4c97b97424677a7edb129b9044e4855ce8946e62 /src/arch | |
| parent | c51930b060cf66b21c78b97e53fd71b153a5e60c (diff) | |
| download | zig-bb6b9c19e06320d8617f8858dde1ae6a7cf7fc5e.tar.gz zig-bb6b9c19e06320d8617f8858dde1ae6a7cf7fc5e.zip | |
x86_64: fix lowering of non-pointer optional is null
Diffstat (limited to 'src/arch')
| -rw-r--r-- | src/arch/x86_64/CodeGen.zig | 43 |
1 files changed, 23 insertions, 20 deletions
diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 12c5071462..588782d838 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -208,7 +208,7 @@ const Branch = struct { }; const StackAllocation = struct { - inst: Air.Inst.Index, + inst: ?Air.Inst.Index, /// TODO do we need size? should be determined by inst.ty.abiSize(self.target.*) size: u32, }; @@ -1109,7 +1109,7 @@ fn ensureProcessDeathCapacity(self: *Self, additional_count: usize) !void { try table.ensureUnusedCapacity(self.gpa, additional_count); } -fn allocMem(self: *Self, inst: Air.Inst.Index, abi_size: u32, abi_align: u32) !u32 { +fn allocMem(self: *Self, inst: ?Air.Inst.Index, abi_size: u32, abi_align: u32) !u32 { if (abi_align > self.stack_align) self.stack_align = abi_align; // TODO find a free slot instead of always appending @@ -1142,7 +1142,14 @@ fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 { } fn allocRegOrMem(self: *Self, inst: Air.Inst.Index, reg_ok: bool) !MCValue { - const elem_ty = self.air.typeOfIndex(inst); + return self.allocRegOrMemAdvanced(self.air.typeOfIndex(inst), inst, reg_ok); +} + +fn allocTempRegOrMem(self: *Self, elem_ty: Type, reg_ok: bool) !MCValue { + return self.allocRegOrMemAdvanced(elem_ty, null, reg_ok); +} + +fn allocRegOrMemAdvanced(self: *Self, elem_ty: Type, inst: ?Air.Inst.Index, reg_ok: bool) !MCValue { const abi_size = math.cast(u32, elem_ty.abiSize(self.target.*)) orelse { const mod = self.bin_file.options.module.?; return self.fail("type '{}' too big to fit into stack frame", .{elem_ty.fmt(mod)}); @@ -4571,18 +4578,16 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void { }; defer if (operand_ptr_lock) |lock| self.register_manager.unlockReg(lock); - const operand: MCValue = blk: { - if (self.reuseOperand(inst, un_op, 0, operand_ptr)) { - // The MCValue that holds the pointer can be re-used as the value. - break :blk operand_ptr; - } else { - break :blk try self.allocRegOrMem(inst, true); - } - }; const ptr_ty = self.air.typeOf(un_op); + const elem_ty = ptr_ty.childType(); + const operand = if (elem_ty.isPtrLikeOptional() and self.reuseOperand(inst, un_op, 0, operand_ptr)) + // The MCValue that holds the pointer can be re-used as the value. + operand_ptr + else + try self.allocTempRegOrMem(elem_ty, true); try self.load(operand, operand_ptr, ptr_ty); - const result = try self.isNull(inst, ptr_ty.elemType(), operand); + const result = try self.isNull(inst, elem_ty, operand); return self.finishAir(inst, result, .{ un_op, .none, .none }); } @@ -4611,15 +4616,13 @@ fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void { }; defer if (operand_ptr_lock) |lock| self.register_manager.unlockReg(lock); - const operand: MCValue = blk: { - if (self.reuseOperand(inst, un_op, 0, operand_ptr)) { - // The MCValue that holds the pointer can be re-used as the value. - break :blk operand_ptr; - } else { - break :blk try self.allocRegOrMem(inst, true); - } - }; const ptr_ty = self.air.typeOf(un_op); + const elem_ty = ptr_ty.childType(); + const operand = if (elem_ty.isPtrLikeOptional() and self.reuseOperand(inst, un_op, 0, operand_ptr)) + // The MCValue that holds the pointer can be re-used as the value. + operand_ptr + else + try self.allocTempRegOrMem(elem_ty, true); try self.load(operand, operand_ptr, ptr_ty); const result = try self.isNonNull(inst, ptr_ty.elemType(), operand); |
