From 8258530c39c347ca83fdbe7575460712960a83f3 Mon Sep 17 00:00:00 2001 From: mlugg Date: Sun, 9 Apr 2023 01:32:53 +0100 Subject: Liveness: control flow analysis This is a partial rewrite of Liveness, so has some other notable changes: - A proper multi-pass system to prevent code duplication - Better logging - Minor bugfixes --- src/codegen/c.zig | 5 ----- 1 file changed, 5 deletions(-) (limited to 'src/codegen') diff --git a/src/codegen/c.zig b/src/codegen/c.zig index f659827f09..9a89574b38 100644 --- a/src/codegen/c.zig +++ b/src/codegen/c.zig @@ -4637,17 +4637,12 @@ fn airLoop(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const loop = f.air.extraData(Air.Block, ty_pl.payload); const body = f.air.extra[loop.end..][0..loop.data.body_len]; - const liveness_loop = f.liveness.getLoop(inst); const writer = f.object.writer(); try writer.writeAll("for (;;) "); try genBody(f, body); try writer.writeByte('\n'); - for (liveness_loop.deaths) |operand| { - try die(f, inst, Air.indexToRef(operand)); - } - return .none; } -- cgit v1.2.3 From 407dc6eee4660bb0744c55f4565be77501ec7d37 Mon Sep 17 00:00:00 2001 From: mlugg Date: Fri, 14 Apr 2023 21:38:32 +0100 Subject: Liveness: avoid emitting unused instructions or marking their operands as used Backends want to avoid emitting unused instructions which do not have side effects: to that end, they all have `Liveness.isUnused` checks for many instructions. However, checking this in the backends avoids a lot of potential optimizations. For instance, if a nested field is loaded, then the first field access would still be emitted, since its result is used by the next access (which is then unreferenced). To elide more instructions, Liveness can track this data instead. For operands which do not have to be lowered (i.e. are not side effecting and are not something special like `arg), Liveness can ignore their operand usages, and push the unused information further up, potentially marking many more instructions as unreferenced. In doing this, I also uncovered a bug in the LLVM backend relating to discarding the result of `@cVaArg`, which this change fixes. A behaviour test has been added to cover it. --- src/Air.zig | 214 ++++++++++ src/Liveness.zig | 67 ++-- src/Liveness/Verify.zig | 945 +++++++++++++++++++++++---------------------- src/codegen/llvm.zig | 193 +-------- test/behavior/var_args.zig | 16 + 5 files changed, 756 insertions(+), 679 deletions(-) (limited to 'src/codegen') diff --git a/src/Air.zig b/src/Air.zig index b8771b2a61..19ba576a5f 100644 --- a/src/Air.zig +++ b/src/Air.zig @@ -1375,3 +1375,217 @@ pub fn nullTerminatedString(air: Air, index: usize) [:0]const u8 { } return bytes[0..end :0]; } + +/// Returns whether the given instruction must always be lowered, for instance because it can cause +/// side effects. If an instruction does not need to be lowered, and Liveness determines its result +/// is unused, backends should avoid lowering it. +pub fn mustLower(air: Air, inst: Air.Inst.Index) bool { + const data = air.instructions.items(.data)[inst]; + return switch (air.instructions.items(.tag)[inst]) { + .arg, + .block, + .loop, + .br, + .trap, + .breakpoint, + .call, + .call_always_tail, + .call_never_tail, + .call_never_inline, + .cond_br, + .switch_br, + .@"try", + .try_ptr, + .dbg_stmt, + .dbg_block_begin, + .dbg_block_end, + .dbg_inline_begin, + .dbg_inline_end, + .dbg_var_ptr, + .dbg_var_val, + .ret, + .ret_load, + .store, + .unreach, + .optional_payload_ptr_set, + .errunion_payload_ptr_set, + .set_union_tag, + .memset, + .memcpy, + .cmpxchg_weak, + .cmpxchg_strong, + .fence, + .atomic_store_unordered, + .atomic_store_monotonic, + .atomic_store_release, + .atomic_store_seq_cst, + .atomic_rmw, + .prefetch, + .wasm_memory_grow, + .set_err_return_trace, + .vector_store_elem, + .c_va_arg, + .c_va_copy, + .c_va_end, + .c_va_start, + => true, + + .add, + .add_optimized, + .addwrap, + .addwrap_optimized, + .add_sat, + .sub, + .sub_optimized, + .subwrap, + .subwrap_optimized, + .sub_sat, + .mul, + .mul_optimized, + .mulwrap, + .mulwrap_optimized, + .mul_sat, + .div_float, + .div_float_optimized, + .div_trunc, + .div_trunc_optimized, + .div_floor, + .div_floor_optimized, + .div_exact, + .div_exact_optimized, + .rem, + .rem_optimized, + .mod, + .mod_optimized, + .ptr_add, + .ptr_sub, + .max, + .min, + .add_with_overflow, + .sub_with_overflow, + .mul_with_overflow, + .shl_with_overflow, + .alloc, + .ret_ptr, + .bit_and, + .bit_or, + .shr, + .shr_exact, + .shl, + .shl_exact, + .shl_sat, + .xor, + .not, + .bitcast, + .ret_addr, + .frame_addr, + .clz, + .ctz, + .popcount, + .byte_swap, + .bit_reverse, + .sqrt, + .sin, + .cos, + .tan, + .exp, + .exp2, + .log, + .log2, + .log10, + .fabs, + .floor, + .ceil, + .round, + .trunc_float, + .neg, + .neg_optimized, + .cmp_lt, + .cmp_lt_optimized, + .cmp_lte, + .cmp_lte_optimized, + .cmp_eq, + .cmp_eq_optimized, + .cmp_gte, + .cmp_gte_optimized, + .cmp_gt, + .cmp_gt_optimized, + .cmp_neq, + .cmp_neq_optimized, + .cmp_vector, + .cmp_vector_optimized, + .constant, + .const_ty, + .is_null, + .is_non_null, + .is_null_ptr, + .is_non_null_ptr, + .is_err, + .is_non_err, + .is_err_ptr, + .is_non_err_ptr, + .bool_and, + .bool_or, + .ptrtoint, + .bool_to_int, + .fptrunc, + .fpext, + .intcast, + .trunc, + .optional_payload, + .optional_payload_ptr, + .wrap_optional, + .unwrap_errunion_payload, + .unwrap_errunion_err, + .unwrap_errunion_payload_ptr, + .unwrap_errunion_err_ptr, + .wrap_errunion_payload, + .wrap_errunion_err, + .struct_field_ptr, + .struct_field_ptr_index_0, + .struct_field_ptr_index_1, + .struct_field_ptr_index_2, + .struct_field_ptr_index_3, + .struct_field_val, + .get_union_tag, + .slice, + .slice_len, + .slice_ptr, + .ptr_slice_len_ptr, + .ptr_slice_ptr_ptr, + .array_elem_val, + .slice_elem_ptr, + .ptr_elem_ptr, + .array_to_slice, + .float_to_int, + .float_to_int_optimized, + .int_to_float, + .reduce, + .reduce_optimized, + .splat, + .shuffle, + .select, + .is_named_enum_value, + .tag_name, + .error_name, + .error_set_has_value, + .aggregate_init, + .union_init, + .mul_add, + .field_parent_ptr, + .wasm_memory_size, + .cmp_lt_errors_len, + .err_return_trace, + .addrspace_cast, + .save_err_return_trace_index, + .work_item_id, + .work_group_size, + .work_group_id, + => false, + + .assembly => @truncate(u1, air.extraData(Air.Asm, data.ty_pl.payload).data.flags >> 31) != 0, + .load => air.typeOf(data.ty_op.operand).isVolatilePtr(), + .slice_elem_val, .ptr_elem_val => air.typeOf(data.bin_op.lhs).isVolatilePtr(), + .atomic_load => air.typeOf(data.atomic_load.ptr).isVolatilePtr(), + }; +} diff --git a/src/Liveness.zig b/src/Liveness.zig index 6c522bab88..6c0a799476 100644 --- a/src/Liveness.zig +++ b/src/Liveness.zig @@ -1333,40 +1333,47 @@ fn analyzeOperands( .main_analysis => { const usize_index = (inst * bpi) / @bitSizeOf(usize); - var tomb_bits: Bpi = 0; - + // This logic must synchronize with `will_die_immediately` in `AnalyzeBigOperands.init`. + var immediate_death = false; if (data.branch_deaths.remove(inst)) { log.debug("[{}] %{}: resolved branch death to birth (immediate death)", .{ pass, inst }); - tomb_bits |= @as(Bpi, 1) << (bpi - 1); + immediate_death = true; assert(!data.live_set.contains(inst)); } else if (data.live_set.remove(inst)) { log.debug("[{}] %{}: removed from live set", .{ pass, inst }); } else { log.debug("[{}] %{}: immediate death", .{ pass, inst }); - tomb_bits |= @as(Bpi, 1) << (bpi - 1); + immediate_death = true; } - // Note that it's important we iterate over the operands backwards, so that if a dying - // operand is used multiple times we mark its last use as its death. - var i = operands.len; - while (i > 0) { - i -= 1; - const op_ref = operands[i]; - const operand = Air.refToIndex(op_ref) orelse continue; - - // Don't compute any liveness for constants - switch (inst_tags[operand]) { - .constant, .const_ty => continue, - else => {}, - } + var tomb_bits: Bpi = @as(Bpi, @boolToInt(immediate_death)) << (bpi - 1); + + // If our result is unused and the instruction doesn't need to be lowered, backends will + // skip the lowering of this instruction, so we don't want to record uses of operands. + // That way, we can mark as many instructions as possible unused. + if (!immediate_death or a.air.mustLower(inst)) { + // Note that it's important we iterate over the operands backwards, so that if a dying + // operand is used multiple times we mark its last use as its death. + var i = operands.len; + while (i > 0) { + i -= 1; + const op_ref = operands[i]; + const operand = Air.refToIndex(op_ref) orelse continue; + + // Don't compute any liveness for constants + switch (inst_tags[operand]) { + .constant, .const_ty => continue, + else => {}, + } - const mask = @as(Bpi, 1) << @intCast(OperandInt, i); + const mask = @as(Bpi, 1) << @intCast(OperandInt, i); - if ((try data.live_set.fetchPut(gpa, operand, {})) == null) { - log.debug("[{}] %{}: added %{} to live set (operand dies here)", .{ pass, inst, operand }); - tomb_bits |= mask; - if (data.branch_deaths.remove(operand)) { - log.debug("[{}] %{}: resolved branch death of %{} to this usage", .{ pass, inst, operand }); + if ((try data.live_set.fetchPut(gpa, operand, {})) == null) { + log.debug("[{}] %{}: added %{} to live set (operand dies here)", .{ pass, inst, operand }); + tomb_bits |= mask; + if (data.branch_deaths.remove(operand)) { + log.debug("[{}] %{}: resolved branch death of %{} to this usage", .{ pass, inst, operand }); + } } } } @@ -1975,6 +1982,9 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { small: [bpi - 1]Air.Inst.Ref = .{.none} ** (bpi - 1), extra_tombs: []u32, + // Only used in `LivenessPass.main_analysis` + will_die_immediately: bool, + const Self = @This(); fn init( @@ -1994,12 +2004,18 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { std.mem.set(u32, extra_tombs, 0); + const will_die_immediately: bool = switch (pass) { + .loop_analysis => false, // track everything, since we don't have full liveness information yet + .main_analysis => data.branch_deaths.contains(inst) and !data.live_set.contains(inst), + }; + return .{ .a = a, .data = data, .inst = inst, .operands_remaining = @intCast(u32, total_operands), .extra_tombs = extra_tombs, + .will_die_immediately = will_die_immediately, }; } @@ -2022,6 +2038,11 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { else => {}, } + // If our result is unused and the instruction doesn't need to be lowered, backends will + // skip the lowering of this instruction, so we don't want to record uses of operands. + // That way, we can mark as many instructions as possible unused. + if (big.will_die_immediately and !big.a.air.mustLower(big.inst)) return; + const extra_byte = (big.operands_remaining - (bpi - 1)) / 31; const extra_bit = @intCast(u5, big.operands_remaining - (bpi - 1) - extra_byte * 31); diff --git a/src/Liveness/Verify.zig b/src/Liveness/Verify.zig index ef103cd3a1..6c1e72392d 100644 --- a/src/Liveness/Verify.zig +++ b/src/Liveness/Verify.zig @@ -29,518 +29,525 @@ const LiveMap = std.AutoHashMapUnmanaged(Air.Inst.Index, void); fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void { const tag = self.air.instructions.items(.tag); const data = self.air.instructions.items(.data); - for (body) |inst| switch (tag[inst]) { - // no operands - .arg, - .alloc, - .ret_ptr, - .constant, - .const_ty, - .breakpoint, - .dbg_stmt, - .dbg_inline_begin, - .dbg_inline_end, - .dbg_block_begin, - .dbg_block_end, - .fence, - .ret_addr, - .frame_addr, - .wasm_memory_size, - .err_return_trace, - .save_err_return_trace_index, - .c_va_start, - .work_item_id, - .work_group_size, - .work_group_id, - => try self.verifyInst(inst, .{ .none, .none, .none }), - - .trap, .unreach => { - try self.verifyInst(inst, .{ .none, .none, .none }); - // This instruction terminates the function, so everything should be dead - if (self.live.count() > 0) return invalid("%{}: instructions still alive", .{inst}); - }, + for (body) |inst| { + if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) { + // This instruction will not be lowered and should be ignored. + continue; + } + + switch (tag[inst]) { + // no operands + .arg, + .alloc, + .ret_ptr, + .constant, + .const_ty, + .breakpoint, + .dbg_stmt, + .dbg_inline_begin, + .dbg_inline_end, + .dbg_block_begin, + .dbg_block_end, + .fence, + .ret_addr, + .frame_addr, + .wasm_memory_size, + .err_return_trace, + .save_err_return_trace_index, + .c_va_start, + .work_item_id, + .work_group_size, + .work_group_id, + => try self.verifyInst(inst, .{ .none, .none, .none }), + + .trap, .unreach => { + try self.verifyInst(inst, .{ .none, .none, .none }); + // This instruction terminates the function, so everything should be dead + if (self.live.count() > 0) return invalid("%{}: instructions still alive", .{inst}); + }, + + // unary + .not, + .bitcast, + .load, + .fpext, + .fptrunc, + .intcast, + .trunc, + .optional_payload, + .optional_payload_ptr, + .optional_payload_ptr_set, + .errunion_payload_ptr_set, + .wrap_optional, + .unwrap_errunion_payload, + .unwrap_errunion_err, + .unwrap_errunion_payload_ptr, + .unwrap_errunion_err_ptr, + .wrap_errunion_payload, + .wrap_errunion_err, + .slice_ptr, + .slice_len, + .ptr_slice_len_ptr, + .ptr_slice_ptr_ptr, + .struct_field_ptr_index_0, + .struct_field_ptr_index_1, + .struct_field_ptr_index_2, + .struct_field_ptr_index_3, + .array_to_slice, + .float_to_int, + .float_to_int_optimized, + .int_to_float, + .get_union_tag, + .clz, + .ctz, + .popcount, + .byte_swap, + .bit_reverse, + .splat, + .error_set_has_value, + .addrspace_cast, + .c_va_arg, + .c_va_copy, + => { + const ty_op = data[inst].ty_op; + try self.verifyInst(inst, .{ ty_op.operand, .none, .none }); + }, + .is_null, + .is_non_null, + .is_null_ptr, + .is_non_null_ptr, + .is_err, + .is_non_err, + .is_err_ptr, + .is_non_err_ptr, + .ptrtoint, + .bool_to_int, + .is_named_enum_value, + .tag_name, + .error_name, + .sqrt, + .sin, + .cos, + .tan, + .exp, + .exp2, + .log, + .log2, + .log10, + .fabs, + .floor, + .ceil, + .round, + .trunc_float, + .neg, + .neg_optimized, + .cmp_lt_errors_len, + .set_err_return_trace, + .c_va_end, + => { + const un_op = data[inst].un_op; + try self.verifyInst(inst, .{ un_op, .none, .none }); + }, + .ret, + .ret_load, + => { + const un_op = data[inst].un_op; + try self.verifyInst(inst, .{ un_op, .none, .none }); + // This instruction terminates the function, so everything should be dead + if (self.live.count() > 0) return invalid("%{}: instructions still alive", .{inst}); + }, + .dbg_var_ptr, + .dbg_var_val, + .wasm_memory_grow, + => { + const pl_op = data[inst].pl_op; + try self.verifyInst(inst, .{ pl_op.operand, .none, .none }); + }, + .prefetch => { + const prefetch = data[inst].prefetch; + try self.verifyInst(inst, .{ prefetch.ptr, .none, .none }); + }, + .reduce, + .reduce_optimized, + => { + const reduce = data[inst].reduce; + try self.verifyInst(inst, .{ reduce.operand, .none, .none }); + }, + .union_init => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.UnionInit, ty_pl.payload).data; + try self.verifyInst(inst, .{ extra.init, .none, .none }); + }, + .struct_field_ptr, .struct_field_val => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.StructField, ty_pl.payload).data; + try self.verifyInst(inst, .{ extra.struct_operand, .none, .none }); + }, + .field_parent_ptr => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.FieldParentPtr, ty_pl.payload).data; + try self.verifyInst(inst, .{ extra.field_ptr, .none, .none }); + }, + .atomic_load => { + const atomic_load = data[inst].atomic_load; + try self.verifyInst(inst, .{ atomic_load.ptr, .none, .none }); + }, + + // binary + .add, + .add_optimized, + .addwrap, + .addwrap_optimized, + .add_sat, + .sub, + .sub_optimized, + .subwrap, + .subwrap_optimized, + .sub_sat, + .mul, + .mul_optimized, + .mulwrap, + .mulwrap_optimized, + .mul_sat, + .div_float, + .div_float_optimized, + .div_trunc, + .div_trunc_optimized, + .div_floor, + .div_floor_optimized, + .div_exact, + .div_exact_optimized, + .rem, + .rem_optimized, + .mod, + .mod_optimized, + .bit_and, + .bit_or, + .xor, + .cmp_lt, + .cmp_lt_optimized, + .cmp_lte, + .cmp_lte_optimized, + .cmp_eq, + .cmp_eq_optimized, + .cmp_gte, + .cmp_gte_optimized, + .cmp_gt, + .cmp_gt_optimized, + .cmp_neq, + .cmp_neq_optimized, + .bool_and, + .bool_or, + .store, + .array_elem_val, + .slice_elem_val, + .ptr_elem_val, + .shl, + .shl_exact, + .shl_sat, + .shr, + .shr_exact, + .atomic_store_unordered, + .atomic_store_monotonic, + .atomic_store_release, + .atomic_store_seq_cst, + .set_union_tag, + .min, + .max, + => { + const bin_op = data[inst].bin_op; + try self.verifyInst(inst, .{ bin_op.lhs, bin_op.rhs, .none }); + }, + .add_with_overflow, + .sub_with_overflow, + .mul_with_overflow, + .shl_with_overflow, + .ptr_add, + .ptr_sub, + .ptr_elem_ptr, + .slice_elem_ptr, + .slice, + => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.Bin, ty_pl.payload).data; + try self.verifyInst(inst, .{ extra.lhs, extra.rhs, .none }); + }, + .shuffle => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.Shuffle, ty_pl.payload).data; + try self.verifyInst(inst, .{ extra.a, extra.b, .none }); + }, + .cmp_vector, + .cmp_vector_optimized, + => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.VectorCmp, ty_pl.payload).data; + try self.verifyInst(inst, .{ extra.lhs, extra.rhs, .none }); + }, + .atomic_rmw => { + const pl_op = data[inst].pl_op; + const extra = self.air.extraData(Air.AtomicRmw, pl_op.payload).data; + try self.verifyInst(inst, .{ pl_op.operand, extra.operand, .none }); + }, + + // ternary + .select => { + const pl_op = data[inst].pl_op; + const extra = self.air.extraData(Air.Bin, pl_op.payload).data; + try self.verifyInst(inst, .{ pl_op.operand, extra.lhs, extra.rhs }); + }, + .mul_add => { + const pl_op = data[inst].pl_op; + const extra = self.air.extraData(Air.Bin, pl_op.payload).data; + try self.verifyInst(inst, .{ extra.lhs, extra.rhs, pl_op.operand }); + }, + .vector_store_elem => { + const vector_store_elem = data[inst].vector_store_elem; + const extra = self.air.extraData(Air.Bin, vector_store_elem.payload).data; + try self.verifyInst(inst, .{ vector_store_elem.vector_ptr, extra.lhs, extra.rhs }); + }, + .memset, + .memcpy, + => { + const pl_op = data[inst].pl_op; + const extra = self.air.extraData(Air.Bin, pl_op.payload).data; + try self.verifyInst(inst, .{ pl_op.operand, extra.lhs, extra.rhs }); + }, + .cmpxchg_strong, + .cmpxchg_weak, + => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.Cmpxchg, ty_pl.payload).data; + try self.verifyInst(inst, .{ extra.ptr, extra.expected_value, extra.new_value }); + }, + + // big tombs + .aggregate_init => { + const ty_pl = data[inst].ty_pl; + const aggregate_ty = self.air.getRefType(ty_pl.ty); + const len = @intCast(usize, aggregate_ty.arrayLen()); + const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]); + + var bt = self.liveness.iterateBigTomb(inst); + for (elements) |element| { + try self.verifyOperand(inst, element, bt.feed()); + } + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + .call, .call_always_tail, .call_never_tail, .call_never_inline => { + const pl_op = data[inst].pl_op; + const extra = self.air.extraData(Air.Call, pl_op.payload); + const args = @ptrCast( + []const Air.Inst.Ref, + self.air.extra[extra.end..][0..extra.data.args_len], + ); - // unary - .not, - .bitcast, - .load, - .fpext, - .fptrunc, - .intcast, - .trunc, - .optional_payload, - .optional_payload_ptr, - .optional_payload_ptr_set, - .errunion_payload_ptr_set, - .wrap_optional, - .unwrap_errunion_payload, - .unwrap_errunion_err, - .unwrap_errunion_payload_ptr, - .unwrap_errunion_err_ptr, - .wrap_errunion_payload, - .wrap_errunion_err, - .slice_ptr, - .slice_len, - .ptr_slice_len_ptr, - .ptr_slice_ptr_ptr, - .struct_field_ptr_index_0, - .struct_field_ptr_index_1, - .struct_field_ptr_index_2, - .struct_field_ptr_index_3, - .array_to_slice, - .float_to_int, - .float_to_int_optimized, - .int_to_float, - .get_union_tag, - .clz, - .ctz, - .popcount, - .byte_swap, - .bit_reverse, - .splat, - .error_set_has_value, - .addrspace_cast, - .c_va_arg, - .c_va_copy, - => { - const ty_op = data[inst].ty_op; - try self.verifyInst(inst, .{ ty_op.operand, .none, .none }); - }, - .is_null, - .is_non_null, - .is_null_ptr, - .is_non_null_ptr, - .is_err, - .is_non_err, - .is_err_ptr, - .is_non_err_ptr, - .ptrtoint, - .bool_to_int, - .is_named_enum_value, - .tag_name, - .error_name, - .sqrt, - .sin, - .cos, - .tan, - .exp, - .exp2, - .log, - .log2, - .log10, - .fabs, - .floor, - .ceil, - .round, - .trunc_float, - .neg, - .neg_optimized, - .cmp_lt_errors_len, - .set_err_return_trace, - .c_va_end, - => { - const un_op = data[inst].un_op; - try self.verifyInst(inst, .{ un_op, .none, .none }); - }, - .ret, - .ret_load, - => { - const un_op = data[inst].un_op; - try self.verifyInst(inst, .{ un_op, .none, .none }); - // This instruction terminates the function, so everything should be dead - if (self.live.count() > 0) return invalid("%{}: instructions still alive", .{inst}); - }, - .dbg_var_ptr, - .dbg_var_val, - .wasm_memory_grow, - => { - const pl_op = data[inst].pl_op; - try self.verifyInst(inst, .{ pl_op.operand, .none, .none }); - }, - .prefetch => { - const prefetch = data[inst].prefetch; - try self.verifyInst(inst, .{ prefetch.ptr, .none, .none }); - }, - .reduce, - .reduce_optimized, - => { - const reduce = data[inst].reduce; - try self.verifyInst(inst, .{ reduce.operand, .none, .none }); - }, - .union_init => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.UnionInit, ty_pl.payload).data; - try self.verifyInst(inst, .{ extra.init, .none, .none }); - }, - .struct_field_ptr, .struct_field_val => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.StructField, ty_pl.payload).data; - try self.verifyInst(inst, .{ extra.struct_operand, .none, .none }); - }, - .field_parent_ptr => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.FieldParentPtr, ty_pl.payload).data; - try self.verifyInst(inst, .{ extra.field_ptr, .none, .none }); - }, - .atomic_load => { - const atomic_load = data[inst].atomic_load; - try self.verifyInst(inst, .{ atomic_load.ptr, .none, .none }); - }, + var bt = self.liveness.iterateBigTomb(inst); + try self.verifyOperand(inst, pl_op.operand, bt.feed()); + for (args) |arg| { + try self.verifyOperand(inst, arg, bt.feed()); + } + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + .assembly => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.Asm, ty_pl.payload); + var extra_i = extra.end; + const outputs = @ptrCast( + []const Air.Inst.Ref, + self.air.extra[extra_i..][0..extra.data.outputs_len], + ); + extra_i += outputs.len; + const inputs = @ptrCast( + []const Air.Inst.Ref, + self.air.extra[extra_i..][0..extra.data.inputs_len], + ); + extra_i += inputs.len; - // binary - .add, - .add_optimized, - .addwrap, - .addwrap_optimized, - .add_sat, - .sub, - .sub_optimized, - .subwrap, - .subwrap_optimized, - .sub_sat, - .mul, - .mul_optimized, - .mulwrap, - .mulwrap_optimized, - .mul_sat, - .div_float, - .div_float_optimized, - .div_trunc, - .div_trunc_optimized, - .div_floor, - .div_floor_optimized, - .div_exact, - .div_exact_optimized, - .rem, - .rem_optimized, - .mod, - .mod_optimized, - .bit_and, - .bit_or, - .xor, - .cmp_lt, - .cmp_lt_optimized, - .cmp_lte, - .cmp_lte_optimized, - .cmp_eq, - .cmp_eq_optimized, - .cmp_gte, - .cmp_gte_optimized, - .cmp_gt, - .cmp_gt_optimized, - .cmp_neq, - .cmp_neq_optimized, - .bool_and, - .bool_or, - .store, - .array_elem_val, - .slice_elem_val, - .ptr_elem_val, - .shl, - .shl_exact, - .shl_sat, - .shr, - .shr_exact, - .atomic_store_unordered, - .atomic_store_monotonic, - .atomic_store_release, - .atomic_store_seq_cst, - .set_union_tag, - .min, - .max, - => { - const bin_op = data[inst].bin_op; - try self.verifyInst(inst, .{ bin_op.lhs, bin_op.rhs, .none }); - }, - .add_with_overflow, - .sub_with_overflow, - .mul_with_overflow, - .shl_with_overflow, - .ptr_add, - .ptr_sub, - .ptr_elem_ptr, - .slice_elem_ptr, - .slice, - => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.Bin, ty_pl.payload).data; - try self.verifyInst(inst, .{ extra.lhs, extra.rhs, .none }); - }, - .shuffle => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.Shuffle, ty_pl.payload).data; - try self.verifyInst(inst, .{ extra.a, extra.b, .none }); - }, - .cmp_vector, - .cmp_vector_optimized, - => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.VectorCmp, ty_pl.payload).data; - try self.verifyInst(inst, .{ extra.lhs, extra.rhs, .none }); - }, - .atomic_rmw => { - const pl_op = data[inst].pl_op; - const extra = self.air.extraData(Air.AtomicRmw, pl_op.payload).data; - try self.verifyInst(inst, .{ pl_op.operand, extra.operand, .none }); - }, + var bt = self.liveness.iterateBigTomb(inst); + for (outputs) |output| { + if (output != .none) { + try self.verifyOperand(inst, output, bt.feed()); + } + } + for (inputs) |input| { + try self.verifyOperand(inst, input, bt.feed()); + } + try self.verifyInst(inst, .{ .none, .none, .none }); + }, - // ternary - .select => { - const pl_op = data[inst].pl_op; - const extra = self.air.extraData(Air.Bin, pl_op.payload).data; - try self.verifyInst(inst, .{ pl_op.operand, extra.lhs, extra.rhs }); - }, - .mul_add => { - const pl_op = data[inst].pl_op; - const extra = self.air.extraData(Air.Bin, pl_op.payload).data; - try self.verifyInst(inst, .{ extra.lhs, extra.rhs, pl_op.operand }); - }, - .vector_store_elem => { - const vector_store_elem = data[inst].vector_store_elem; - const extra = self.air.extraData(Air.Bin, vector_store_elem.payload).data; - try self.verifyInst(inst, .{ vector_store_elem.vector_ptr, extra.lhs, extra.rhs }); - }, - .memset, - .memcpy, - => { - const pl_op = data[inst].pl_op; - const extra = self.air.extraData(Air.Bin, pl_op.payload).data; - try self.verifyInst(inst, .{ pl_op.operand, extra.lhs, extra.rhs }); - }, - .cmpxchg_strong, - .cmpxchg_weak, - => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.Cmpxchg, ty_pl.payload).data; - try self.verifyInst(inst, .{ extra.ptr, extra.expected_value, extra.new_value }); - }, + // control flow + .@"try" => { + const pl_op = data[inst].pl_op; + const extra = self.air.extraData(Air.Try, pl_op.payload); + const try_body = self.air.extra[extra.end..][0..extra.data.body_len]; - // big tombs - .aggregate_init => { - const ty_pl = data[inst].ty_pl; - const aggregate_ty = self.air.getRefType(ty_pl.ty); - const len = @intCast(usize, aggregate_ty.arrayLen()); - const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]); + const cond_br_liveness = self.liveness.getCondBr(inst); - var bt = self.liveness.iterateBigTomb(inst); - for (elements) |element| { - try self.verifyOperand(inst, element, bt.feed()); - } - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - .call, .call_always_tail, .call_never_tail, .call_never_inline => { - const pl_op = data[inst].pl_op; - const extra = self.air.extraData(Air.Call, pl_op.payload); - const args = @ptrCast( - []const Air.Inst.Ref, - self.air.extra[extra.end..][0..extra.data.args_len], - ); - - var bt = self.liveness.iterateBigTomb(inst); - try self.verifyOperand(inst, pl_op.operand, bt.feed()); - for (args) |arg| { - try self.verifyOperand(inst, arg, bt.feed()); - } - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - .assembly => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.Asm, ty_pl.payload); - var extra_i = extra.end; - const outputs = @ptrCast( - []const Air.Inst.Ref, - self.air.extra[extra_i..][0..extra.data.outputs_len], - ); - extra_i += outputs.len; - const inputs = @ptrCast( - []const Air.Inst.Ref, - self.air.extra[extra_i..][0..extra.data.inputs_len], - ); - extra_i += inputs.len; - - var bt = self.liveness.iterateBigTomb(inst); - for (outputs) |output| { - if (output != .none) { - try self.verifyOperand(inst, output, bt.feed()); - } - } - for (inputs) |input| { - try self.verifyOperand(inst, input, bt.feed()); - } - try self.verifyInst(inst, .{ .none, .none, .none }); - }, + try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0)); - // control flow - .@"try" => { - const pl_op = data[inst].pl_op; - const extra = self.air.extraData(Air.Try, pl_op.payload); - const try_body = self.air.extra[extra.end..][0..extra.data.body_len]; + var live = try self.live.clone(self.gpa); + defer live.deinit(self.gpa); - const cond_br_liveness = self.liveness.getCondBr(inst); + for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death); + try self.verifyBody(try_body); - try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0)); + self.live.deinit(self.gpa); + self.live = live.move(); - var live = try self.live.clone(self.gpa); - defer live.deinit(self.gpa); + for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death); - for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death); - try self.verifyBody(try_body); + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + .try_ptr => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.TryPtr, ty_pl.payload); + const try_body = self.air.extra[extra.end..][0..extra.data.body_len]; - self.live.deinit(self.gpa); - self.live = live.move(); + const cond_br_liveness = self.liveness.getCondBr(inst); - for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death); + try self.verifyOperand(inst, extra.data.ptr, self.liveness.operandDies(inst, 0)); - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - .try_ptr => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.TryPtr, ty_pl.payload); - const try_body = self.air.extra[extra.end..][0..extra.data.body_len]; - - const cond_br_liveness = self.liveness.getCondBr(inst); + var live = try self.live.clone(self.gpa); + defer live.deinit(self.gpa); - try self.verifyOperand(inst, extra.data.ptr, self.liveness.operandDies(inst, 0)); + for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death); + try self.verifyBody(try_body); - var live = try self.live.clone(self.gpa); - defer live.deinit(self.gpa); + self.live.deinit(self.gpa); + self.live = live.move(); - for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death); - try self.verifyBody(try_body); + for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death); - self.live.deinit(self.gpa); - self.live = live.move(); + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + .br => { + const br = data[inst].br; + const gop = try self.blocks.getOrPut(self.gpa, br.block_inst); - for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death); + try self.verifyOperand(inst, br.operand, self.liveness.operandDies(inst, 0)); + if (gop.found_existing) { + try self.verifyMatchingLiveness(br.block_inst, gop.value_ptr.*); + } else { + gop.value_ptr.* = try self.live.clone(self.gpa); + } + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + .block => { + const ty_pl = data[inst].ty_pl; + const block_ty = self.air.getRefType(ty_pl.ty); + const extra = self.air.extraData(Air.Block, ty_pl.payload); + const block_body = self.air.extra[extra.end..][0..extra.data.body_len]; + const block_liveness = self.liveness.getBlock(inst); + + var orig_live = try self.live.clone(self.gpa); + defer orig_live.deinit(self.gpa); - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - .br => { - const br = data[inst].br; - const gop = try self.blocks.getOrPut(self.gpa, br.block_inst); + assert(!self.blocks.contains(inst)); + try self.verifyBody(block_body); - try self.verifyOperand(inst, br.operand, self.liveness.operandDies(inst, 0)); - if (gop.found_existing) { - try self.verifyMatchingLiveness(br.block_inst, gop.value_ptr.*); - } else { - gop.value_ptr.* = try self.live.clone(self.gpa); - } - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - .block => { - const ty_pl = data[inst].ty_pl; - const block_ty = self.air.getRefType(ty_pl.ty); - const extra = self.air.extraData(Air.Block, ty_pl.payload); - const block_body = self.air.extra[extra.end..][0..extra.data.body_len]; - const block_liveness = self.liveness.getBlock(inst); + // Liveness data after the block body is garbage, but we want to + // restore it to verify deaths + self.live.deinit(self.gpa); + self.live = orig_live.move(); - var orig_live = try self.live.clone(self.gpa); - defer orig_live.deinit(self.gpa); + for (block_liveness.deaths) |death| try self.verifyDeath(inst, death); - assert(!self.blocks.contains(inst)); - try self.verifyBody(block_body); + if (block_ty.isNoReturn()) { + assert(!self.blocks.contains(inst)); + } else { + var live = self.blocks.fetchRemove(inst).?.value; + defer live.deinit(self.gpa); - // Liveness data after the block body is garbage, but we want to - // restore it to verify deaths - self.live.deinit(self.gpa); - self.live = orig_live.move(); + try self.verifyMatchingLiveness(inst, live); + } - for (block_liveness.deaths) |death| try self.verifyDeath(inst, death); + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + .loop => { + const ty_pl = data[inst].ty_pl; + const extra = self.air.extraData(Air.Block, ty_pl.payload); + const loop_body = self.air.extra[extra.end..][0..extra.data.body_len]; - if (block_ty.isNoReturn()) { - assert(!self.blocks.contains(inst)); - } else { - var live = self.blocks.fetchRemove(inst).?.value; + var live = try self.live.clone(self.gpa); defer live.deinit(self.gpa); - try self.verifyMatchingLiveness(inst, live); - } + try self.verifyBody(loop_body); - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - .loop => { - const ty_pl = data[inst].ty_pl; - const extra = self.air.extraData(Air.Block, ty_pl.payload); - const loop_body = self.air.extra[extra.end..][0..extra.data.body_len]; + // The same stuff should be alive after the loop as before it + try self.verifyMatchingLiveness(inst, live); - var live = try self.live.clone(self.gpa); - defer live.deinit(self.gpa); + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + .cond_br => { + const pl_op = data[inst].pl_op; + const extra = self.air.extraData(Air.CondBr, pl_op.payload); + const then_body = self.air.extra[extra.end..][0..extra.data.then_body_len]; + const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len]; + const cond_br_liveness = self.liveness.getCondBr(inst); - try self.verifyBody(loop_body); + try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0)); - // The same stuff should be alive after the loop as before it - try self.verifyMatchingLiveness(inst, live); + var live = try self.live.clone(self.gpa); + defer live.deinit(self.gpa); - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - .cond_br => { - const pl_op = data[inst].pl_op; - const extra = self.air.extraData(Air.CondBr, pl_op.payload); - const then_body = self.air.extra[extra.end..][0..extra.data.then_body_len]; - const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len]; - const cond_br_liveness = self.liveness.getCondBr(inst); + for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death); + try self.verifyBody(then_body); - try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0)); + self.live.deinit(self.gpa); + self.live = live.move(); - var live = try self.live.clone(self.gpa); - defer live.deinit(self.gpa); + for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death); + try self.verifyBody(else_body); - for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death); - try self.verifyBody(then_body); + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + .switch_br => { + const pl_op = data[inst].pl_op; + const switch_br = self.air.extraData(Air.SwitchBr, pl_op.payload); + var extra_index = switch_br.end; + var case_i: u32 = 0; + const switch_br_liveness = try self.liveness.getSwitchBr( + self.gpa, + inst, + switch_br.data.cases_len + 1, + ); + defer self.gpa.free(switch_br_liveness.deaths); - self.live.deinit(self.gpa); - self.live = live.move(); + try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0)); - for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death); - try self.verifyBody(else_body); + var live = self.live.move(); + defer live.deinit(self.gpa); - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - .switch_br => { - const pl_op = data[inst].pl_op; - const switch_br = self.air.extraData(Air.SwitchBr, pl_op.payload); - var extra_index = switch_br.end; - var case_i: u32 = 0; - const switch_br_liveness = try self.liveness.getSwitchBr( - self.gpa, - inst, - switch_br.data.cases_len + 1, - ); - defer self.gpa.free(switch_br_liveness.deaths); - - try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0)); - - var live = self.live.move(); - defer live.deinit(self.gpa); - - while (case_i < switch_br.data.cases_len) : (case_i += 1) { - const case = self.air.extraData(Air.SwitchBr.Case, extra_index); - const items = @ptrCast( - []const Air.Inst.Ref, - self.air.extra[case.end..][0..case.data.items_len], - ); - const case_body = self.air.extra[case.end + items.len ..][0..case.data.body_len]; - extra_index = case.end + items.len + case_body.len; + while (case_i < switch_br.data.cases_len) : (case_i += 1) { + const case = self.air.extraData(Air.SwitchBr.Case, extra_index); + const items = @ptrCast( + []const Air.Inst.Ref, + self.air.extra[case.end..][0..case.data.items_len], + ); + const case_body = self.air.extra[case.end + items.len ..][0..case.data.body_len]; + extra_index = case.end + items.len + case_body.len; - self.live.deinit(self.gpa); - self.live = try live.clone(self.gpa); + self.live.deinit(self.gpa); + self.live = try live.clone(self.gpa); - for (switch_br_liveness.deaths[case_i]) |death| try self.verifyDeath(inst, death); - try self.verifyBody(case_body); - } + for (switch_br_liveness.deaths[case_i]) |death| try self.verifyDeath(inst, death); + try self.verifyBody(case_body); + } - const else_body = self.air.extra[extra_index..][0..switch_br.data.else_body_len]; - if (else_body.len > 0) { - self.live.deinit(self.gpa); - self.live = try live.clone(self.gpa); + const else_body = self.air.extra[extra_index..][0..switch_br.data.else_body_len]; + if (else_body.len > 0) { + self.live.deinit(self.gpa); + self.live = try live.clone(self.gpa); - for (switch_br_liveness.deaths[case_i]) |death| try self.verifyDeath(inst, death); - try self.verifyBody(else_body); - } + for (switch_br_liveness.deaths[case_i]) |death| try self.verifyDeath(inst, death); + try self.verifyBody(else_body); + } - try self.verifyInst(inst, .{ .none, .none, .none }); - }, - }; + try self.verifyInst(inst, .{ .none, .none, .none }); + }, + } + } } fn verifyDeath(self: *Verify, inst: Air.Inst.Index, operand: Air.Inst.Index) Error!void { diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index 47b47cc807..fc3f14b7c6 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -4523,6 +4523,10 @@ pub const FuncGen = struct { fn genBody(self: *FuncGen, body: []const Air.Inst.Index) Error!void { const air_tags = self.air.instructions.items(.tag); for (body, 0..) |inst, i| { + if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) { + continue; + } + const opt_value: ?*llvm.Value = switch (air_tags[inst]) { // zig fmt: off .add => try self.airAdd(inst, false), @@ -5166,8 +5170,6 @@ pub const FuncGen = struct { } fn airCVaArg(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const list = try self.resolveInst(ty_op.operand); const arg_ty = self.air.getRefType(ty_op.ty); @@ -5177,8 +5179,6 @@ pub const FuncGen = struct { } fn airCVaCopy(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const src_list = try self.resolveInst(ty_op.operand); const va_list_ty = self.air.getRefType(ty_op.ty); @@ -5226,8 +5226,6 @@ pub const FuncGen = struct { } fn airCVaStart(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const va_list_ty = self.air.typeOfIndex(inst); const llvm_va_list_ty = try self.dg.lowerType(va_list_ty); @@ -5254,7 +5252,6 @@ pub const FuncGen = struct { } fn airCmp(self: *FuncGen, inst: Air.Inst.Index, op: math.CompareOperator, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -5266,7 +5263,6 @@ pub const FuncGen = struct { } fn airCmpVector(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; @@ -5281,8 +5277,6 @@ pub const FuncGen = struct { } fn airCmpLtErrorsLen(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); const llvm_fn = try self.getCmpLtErrorsLenFunction(); @@ -5650,9 +5644,6 @@ pub const FuncGen = struct { } fn airArrayToSlice(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand_ty = self.air.typeOf(ty_op.operand); const array_ty = operand_ty.childType(); @@ -5674,9 +5665,6 @@ pub const FuncGen = struct { } fn airIntToFloat(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); @@ -5733,9 +5721,6 @@ pub const FuncGen = struct { } fn airFloatToInt(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - self.builder.setFastMath(want_fast_math); const target = self.dg.module.getTarget(); @@ -5792,16 +5777,12 @@ pub const FuncGen = struct { } fn airSliceField(self: *FuncGen, inst: Air.Inst.Index, index: c_uint) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); return self.builder.buildExtractValue(operand, index, ""); } fn airPtrSliceFieldPtr(self: *FuncGen, inst: Air.Inst.Index, index: c_uint) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const slice_ptr = try self.resolveInst(ty_op.operand); const slice_ptr_ty = self.air.typeOf(ty_op.operand); @@ -5814,8 +5795,6 @@ pub const FuncGen = struct { const inst = body_tail[0]; const bin_op = self.air.instructions.items(.data)[inst].bin_op; const slice_ty = self.air.typeOf(bin_op.lhs); - if (!slice_ty.isVolatilePtr() and self.liveness.isUnused(inst)) return null; - const slice = try self.resolveInst(bin_op.lhs); const index = try self.resolveInst(bin_op.rhs); const elem_ty = slice_ty.childType(); @@ -5835,7 +5814,6 @@ pub const FuncGen = struct { } fn airSliceElemPtr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data; const slice_ty = self.air.typeOf(bin_op.lhs); @@ -5850,7 +5828,6 @@ pub const FuncGen = struct { fn airArrayElemVal(self: *FuncGen, body_tail: []const Air.Inst.Index) !?*llvm.Value { const inst = body_tail[0]; - if (self.liveness.isUnused(inst)) return null; const bin_op = self.air.instructions.items(.data)[inst].bin_op; const array_ty = self.air.typeOf(bin_op.lhs); @@ -5881,8 +5858,6 @@ pub const FuncGen = struct { const inst = body_tail[0]; const bin_op = self.air.instructions.items(.data)[inst].bin_op; const ptr_ty = self.air.typeOf(bin_op.lhs); - if (!ptr_ty.isVolatilePtr() and self.liveness.isUnused(inst)) return null; - const elem_ty = ptr_ty.childType(); const llvm_elem_ty = try self.dg.lowerPtrElemTy(elem_ty); const base_ptr = try self.resolveInst(bin_op.lhs); @@ -5908,8 +5883,6 @@ pub const FuncGen = struct { } fn airPtrElemPtr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data; const ptr_ty = self.air.typeOf(bin_op.lhs); @@ -5934,9 +5907,6 @@ pub const FuncGen = struct { } fn airStructFieldPtr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const struct_field = self.air.extraData(Air.StructField, ty_pl.payload).data; const struct_ptr = try self.resolveInst(struct_field.struct_operand); @@ -5949,8 +5919,6 @@ pub const FuncGen = struct { inst: Air.Inst.Index, field_index: u32, ) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const struct_ptr = try self.resolveInst(ty_op.operand); const struct_ptr_ty = self.air.typeOf(ty_op.operand); @@ -5959,8 +5927,6 @@ pub const FuncGen = struct { fn airStructFieldVal(self: *FuncGen, body_tail: []const Air.Inst.Index) !?*llvm.Value { const inst = body_tail[0]; - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const struct_field = self.air.extraData(Air.StructField, ty_pl.payload).data; const struct_ty = self.air.typeOf(struct_field.struct_operand); @@ -6060,8 +6026,6 @@ pub const FuncGen = struct { } fn airFieldParentPtr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const extra = self.air.extraData(Air.FieldParentPtr, ty_pl.payload).data; @@ -6083,9 +6047,6 @@ pub const FuncGen = struct { } fn airNot(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); @@ -6263,8 +6224,6 @@ pub const FuncGen = struct { const clobbers_len = @truncate(u31, extra.data.flags); var extra_i: usize = extra.end; - if (!is_volatile and self.liveness.isUnused(inst)) return null; - const outputs = @ptrCast([]const Air.Inst.Ref, self.air.extra[extra_i..][0..extra.data.outputs_len]); extra_i += outputs.len; const inputs = @ptrCast([]const Air.Inst.Ref, self.air.extra[extra_i..][0..extra.data.inputs_len]); @@ -6610,8 +6569,6 @@ pub const FuncGen = struct { operand_is_ptr: bool, pred: llvm.IntPredicate, ) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); const operand_ty = self.air.typeOf(un_op); @@ -6659,8 +6616,6 @@ pub const FuncGen = struct { op: llvm.IntPredicate, operand_is_ptr: bool, ) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); const operand_ty = self.air.typeOf(un_op); @@ -6701,8 +6656,6 @@ pub const FuncGen = struct { } fn airOptionalPayloadPtr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const optional_ty = self.air.typeOf(ty_op.operand).childType(); @@ -6756,8 +6709,6 @@ pub const FuncGen = struct { fn airOptionalPayload(self: *FuncGen, body_tail: []const Air.Inst.Index) !?*llvm.Value { const inst = body_tail[0]; - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const optional_ty = self.air.typeOf(ty_op.operand); @@ -6780,8 +6731,6 @@ pub const FuncGen = struct { operand_is_ptr: bool, ) !?*llvm.Value { const inst = body_tail[0]; - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const operand_ty = self.air.typeOf(ty_op.operand); @@ -6817,9 +6766,6 @@ pub const FuncGen = struct { inst: Air.Inst.Index, operand_is_ptr: bool, ) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const operand_ty = self.air.typeOf(ty_op.operand); @@ -6893,8 +6839,6 @@ pub const FuncGen = struct { } fn airSaveErrReturnTraceIndex(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const target = self.dg.module.getTarget(); const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; @@ -6911,8 +6855,6 @@ pub const FuncGen = struct { } fn airWrapOptional(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const payload_ty = self.air.typeOf(ty_op.operand); const non_null_bit = self.context.intType(8).constInt(1, .False); @@ -6943,8 +6885,6 @@ pub const FuncGen = struct { } fn airWrapErrUnionPayload(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const err_un_ty = self.air.typeOfIndex(inst); const operand = try self.resolveInst(ty_op.operand); @@ -6978,8 +6918,6 @@ pub const FuncGen = struct { } fn airWrapErrUnionErr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const err_un_ty = self.air.typeOfIndex(inst); const payload_ty = err_un_ty.errorUnionPayload(); @@ -7015,8 +6953,6 @@ pub const FuncGen = struct { } fn airWasmMemorySize(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const pl_op = self.air.instructions.items(.data)[inst].pl_op; const index = pl_op.payload; const llvm_u32 = self.context.intType(32); @@ -7061,8 +6997,6 @@ pub const FuncGen = struct { } fn airMin(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); const rhs = try self.resolveInst(bin_op.rhs); @@ -7074,8 +7008,6 @@ pub const FuncGen = struct { } fn airMax(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); const rhs = try self.resolveInst(bin_op.rhs); @@ -7087,8 +7019,6 @@ pub const FuncGen = struct { } fn airSlice(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data; const ptr = try self.resolveInst(bin_op.lhs); @@ -7103,7 +7033,6 @@ pub const FuncGen = struct { } fn airAdd(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7118,7 +7047,6 @@ pub const FuncGen = struct { } fn airAddWrap(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7129,8 +7057,6 @@ pub const FuncGen = struct { } fn airAddSat(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); const rhs = try self.resolveInst(bin_op.rhs); @@ -7144,7 +7070,6 @@ pub const FuncGen = struct { } fn airSub(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7159,7 +7084,6 @@ pub const FuncGen = struct { } fn airSubWrap(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7170,8 +7094,6 @@ pub const FuncGen = struct { } fn airSubSat(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); const rhs = try self.resolveInst(bin_op.rhs); @@ -7184,7 +7106,6 @@ pub const FuncGen = struct { } fn airMul(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7199,7 +7120,6 @@ pub const FuncGen = struct { } fn airMulWrap(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7210,8 +7130,6 @@ pub const FuncGen = struct { } fn airMulSat(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); const rhs = try self.resolveInst(bin_op.rhs); @@ -7224,7 +7142,6 @@ pub const FuncGen = struct { } fn airDivFloat(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7236,7 +7153,6 @@ pub const FuncGen = struct { } fn airDivTrunc(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7254,7 +7170,6 @@ pub const FuncGen = struct { } fn airDivFloor(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7287,7 +7202,6 @@ pub const FuncGen = struct { } fn airDivExact(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7302,7 +7216,6 @@ pub const FuncGen = struct { } fn airRem(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7317,7 +7230,6 @@ pub const FuncGen = struct { } fn airMod(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const bin_op = self.air.instructions.items(.data)[inst].bin_op; @@ -7347,8 +7259,6 @@ pub const FuncGen = struct { } fn airPtrAdd(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data; const base_ptr = try self.resolveInst(bin_op.lhs); @@ -7368,8 +7278,6 @@ pub const FuncGen = struct { } fn airPtrSub(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data; const base_ptr = try self.resolveInst(bin_op.lhs); @@ -7395,9 +7303,6 @@ pub const FuncGen = struct { signed_intrinsic: []const u8, unsigned_intrinsic: []const u8, ) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const extra = self.air.extraData(Air.Bin, ty_pl.payload).data; @@ -7686,8 +7591,6 @@ pub const FuncGen = struct { } fn airMulAdd(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const pl_op = self.air.instructions.items(.data)[inst].pl_op; const extra = self.air.extraData(Air.Bin, pl_op.payload).data; @@ -7700,9 +7603,6 @@ pub const FuncGen = struct { } fn airShlWithOverflow(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const extra = self.air.extraData(Air.Bin, ty_pl.payload).data; @@ -7759,8 +7659,6 @@ pub const FuncGen = struct { } fn airAnd(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); const rhs = try self.resolveInst(bin_op.rhs); @@ -7768,8 +7666,6 @@ pub const FuncGen = struct { } fn airOr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); const rhs = try self.resolveInst(bin_op.rhs); @@ -7777,8 +7673,6 @@ pub const FuncGen = struct { } fn airXor(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); const rhs = try self.resolveInst(bin_op.rhs); @@ -7786,8 +7680,6 @@ pub const FuncGen = struct { } fn airShlExact(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); @@ -7809,8 +7701,6 @@ pub const FuncGen = struct { } fn airShl(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); @@ -7831,8 +7721,6 @@ pub const FuncGen = struct { } fn airShlSat(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); @@ -7876,8 +7764,6 @@ pub const FuncGen = struct { } fn airShr(self: *FuncGen, inst: Air.Inst.Index, is_exact: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const bin_op = self.air.instructions.items(.data)[inst].bin_op; const lhs = try self.resolveInst(bin_op.lhs); @@ -7912,9 +7798,6 @@ pub const FuncGen = struct { } fn airIntCast(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const target = self.dg.module.getTarget(); const ty_op = self.air.instructions.items(.data)[inst].ty_op; const dest_ty = self.air.typeOfIndex(inst); @@ -7937,8 +7820,6 @@ pub const FuncGen = struct { } fn airTrunc(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const dest_llvm_ty = try self.dg.lowerType(self.air.typeOfIndex(inst)); @@ -7946,9 +7827,6 @@ pub const FuncGen = struct { } fn airFptrunc(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const operand_ty = self.air.typeOf(ty_op.operand); @@ -7978,9 +7856,6 @@ pub const FuncGen = struct { } fn airFpext(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const operand_ty = self.air.typeOf(ty_op.operand); @@ -8010,9 +7885,6 @@ pub const FuncGen = struct { } fn airPtrToInt(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); const dest_llvm_ty = try self.dg.lowerType(self.air.typeOfIndex(inst)); @@ -8020,8 +7892,6 @@ pub const FuncGen = struct { } fn airBitCast(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand_ty = self.air.typeOf(ty_op.operand); const inst_ty = self.air.typeOfIndex(inst); @@ -8137,9 +8007,6 @@ pub const FuncGen = struct { } fn airBoolToInt(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) - return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); return operand; @@ -8189,7 +8056,6 @@ pub const FuncGen = struct { } fn airAlloc(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; const ptr_ty = self.air.typeOfIndex(inst); const pointee_type = ptr_ty.childType(); if (!pointee_type.isFnOrHasRuntimeBitsIgnoreComptime()) return self.dg.lowerPtrToVoid(ptr_ty); @@ -8201,7 +8067,6 @@ pub const FuncGen = struct { } fn airRetPtr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; const ptr_ty = self.air.typeOfIndex(inst); const ret_ty = ptr_ty.childType(); if (!ret_ty.isFnOrHasRuntimeBitsIgnoreComptime()) return self.dg.lowerPtrToVoid(ptr_ty); @@ -8289,8 +8154,6 @@ pub const FuncGen = struct { const ptr = try fg.resolveInst(ty_op.operand); elide: { - if (ptr_info.@"volatile") break :elide; - if (fg.liveness.isUnused(inst)) return null; if (!isByRef(ptr_info.pointee_type)) break :elide; if (!canElideLoad(fg, body_tail)) break :elide; return ptr; @@ -8314,8 +8177,7 @@ pub const FuncGen = struct { } fn airRetAddr(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - + _ = inst; const llvm_usize = try self.dg.lowerType(Type.usize); const target = self.dg.module.getTarget(); if (!target_util.supportsReturnAddress(target)) { @@ -8331,8 +8193,7 @@ pub const FuncGen = struct { } fn airFrameAddress(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - + _ = inst; const llvm_i32 = self.context.intType(32); const llvm_fn_name = "llvm.frameaddress.p0"; const llvm_fn = self.dg.object.llvm_module.getNamedFunction(llvm_fn_name) orelse blk: { @@ -8462,8 +8323,6 @@ pub const FuncGen = struct { const ptr = try self.resolveInst(atomic_load.ptr); const ptr_ty = self.air.typeOf(atomic_load.ptr); const ptr_info = ptr_ty.ptrInfo().data; - if (!ptr_info.@"volatile" and self.liveness.isUnused(inst)) - return null; const elem_ty = ptr_info.pointee_type; if (!elem_ty.hasRuntimeBitsIgnoreComptime()) return null; @@ -8577,8 +8436,6 @@ pub const FuncGen = struct { } fn airGetUnionTag(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const un_ty = self.air.typeOf(ty_op.operand); const target = self.dg.module.getTarget(); @@ -8603,8 +8460,6 @@ pub const FuncGen = struct { } fn airUnaryOp(self: *FuncGen, inst: Air.Inst.Index, comptime op: FloatOp) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); const operand_ty = self.air.typeOf(un_op); @@ -8613,7 +8468,6 @@ pub const FuncGen = struct { } fn airNeg(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const un_op = self.air.instructions.items(.data)[inst].un_op; @@ -8624,8 +8478,6 @@ pub const FuncGen = struct { } fn airClzCtz(self: *FuncGen, inst: Air.Inst.Index, llvm_fn_name: []const u8) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand_ty = self.air.typeOf(ty_op.operand); const operand = try self.resolveInst(ty_op.operand); @@ -8652,8 +8504,6 @@ pub const FuncGen = struct { } fn airBitOp(self: *FuncGen, inst: Air.Inst.Index, llvm_fn_name: []const u8) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand_ty = self.air.typeOf(ty_op.operand); const operand = try self.resolveInst(ty_op.operand); @@ -8679,8 +8529,6 @@ pub const FuncGen = struct { } fn airByteSwap(self: *FuncGen, inst: Air.Inst.Index, llvm_fn_name: []const u8) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const target = self.dg.module.getTarget(); const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand_ty = self.air.typeOf(ty_op.operand); @@ -8734,8 +8582,6 @@ pub const FuncGen = struct { } fn airErrorSetHasValue(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const error_set_ty = self.air.getRefType(ty_op.ty); @@ -8781,8 +8627,6 @@ pub const FuncGen = struct { } fn airIsNamedEnumValue(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); const enum_ty = self.air.typeOf(un_op); @@ -8862,8 +8706,6 @@ pub const FuncGen = struct { } fn airTagName(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); const enum_ty = self.air.typeOf(un_op); @@ -8995,8 +8837,6 @@ pub const FuncGen = struct { } fn airErrorName(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); const slice_ty = self.air.typeOfIndex(inst); @@ -9011,8 +8851,6 @@ pub const FuncGen = struct { } fn airSplat(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const scalar = try self.resolveInst(ty_op.operand); const vector_ty = self.air.typeOfIndex(inst); @@ -9021,8 +8859,6 @@ pub const FuncGen = struct { } fn airSelect(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const pl_op = self.air.instructions.items(.data)[inst].pl_op; const extra = self.air.extraData(Air.Bin, pl_op.payload).data; const pred = try self.resolveInst(pl_op.operand); @@ -9033,8 +8869,6 @@ pub const FuncGen = struct { } fn airShuffle(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const extra = self.air.extraData(Air.Shuffle, ty_pl.payload).data; const a = try self.resolveInst(extra.a); @@ -9134,7 +8968,6 @@ pub const FuncGen = struct { } fn airReduce(self: *FuncGen, inst: Air.Inst.Index, want_fast_math: bool) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; self.builder.setFastMath(want_fast_math); const target = self.dg.module.getTarget(); @@ -9221,8 +9054,6 @@ pub const FuncGen = struct { } fn airAggregateInit(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const result_ty = self.air.typeOfIndex(inst); const len = @intCast(usize, result_ty.arrayLen()); @@ -9360,8 +9191,6 @@ pub const FuncGen = struct { } fn airUnionInit(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const extra = self.air.extraData(Air.UnionInit, ty_pl.payload).data; const union_ty = self.air.typeOfIndex(inst); @@ -9566,8 +9395,6 @@ pub const FuncGen = struct { } fn airAddrSpaceCast(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const ty_op = self.air.instructions.items(.data)[inst].ty_op; const inst_ty = self.air.typeOfIndex(inst); const operand = try self.resolveInst(ty_op.operand); @@ -9592,8 +9419,6 @@ pub const FuncGen = struct { } fn airWorkItemId(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const target = self.dg.module.getTarget(); assert(target.cpu.arch == .amdgcn); // TODO is to port this function to other GPU architectures @@ -9603,8 +9428,6 @@ pub const FuncGen = struct { } fn airWorkGroupSize(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const target = self.dg.module.getTarget(); assert(target.cpu.arch == .amdgcn); // TODO is to port this function to other GPU architectures @@ -9634,8 +9457,6 @@ pub const FuncGen = struct { } fn airWorkGroupId(self: *FuncGen, inst: Air.Inst.Index) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const target = self.dg.module.getTarget(); assert(target.cpu.arch == .amdgcn); // TODO is to port this function to other GPU architectures @@ -9756,8 +9577,6 @@ pub const FuncGen = struct { struct_ptr_ty: Type, field_index: u32, ) !?*llvm.Value { - if (self.liveness.isUnused(inst)) return null; - const target = self.dg.object.target; const struct_ty = struct_ptr_ty.childType(); switch (struct_ty.zigTypeTag()) { diff --git a/test/behavior/var_args.zig b/test/behavior/var_args.zig index cdfbcc9188..21a5729dc0 100644 --- a/test/behavior/var_args.zig +++ b/test/behavior/var_args.zig @@ -215,3 +215,19 @@ test "copy VaList" { try std.testing.expectEqual(@as(c_int, 3), S.add(1, @as(c_int, 1))); try std.testing.expectEqual(@as(c_int, 9), S.add(2, @as(c_int, 1), @as(c_int, 2))); } + +test "unused VaList arg" { + const S = struct { + fn thirdArg(dummy: c_int, ...) callconv(.C) c_int { + _ = dummy; + + var ap = @cVaStart(); + defer @cVaEnd(&ap); + + _ = @cVaArg(&ap, c_int); + return @cVaArg(&ap, c_int); + } + }; + const x = S.thirdArg(0, @as(c_int, 1), @as(c_int, 2)); + try std.testing.expectEqual(@as(c_int, 2), x); +} -- cgit v1.2.3 From 6fc524de4267fce0d381215bfda9c2caca9a4f07 Mon Sep 17 00:00:00 2001 From: mlugg Date: Sun, 16 Apr 2023 04:00:23 +0100 Subject: cbe: integrate new Liveness behaviour --- src/codegen/c.zig | 527 +++++++++++++----------------------------------------- 1 file changed, 121 insertions(+), 406 deletions(-) (limited to 'src/codegen') diff --git a/src/codegen/c.zig b/src/codegen/c.zig index 9a89574b38..d2d594c901 100644 --- a/src/codegen/c.zig +++ b/src/codegen/c.zig @@ -78,7 +78,6 @@ const LoopDepth = u16; const Local = struct { cty_idx: CType.Index, alignas: CType.AlignAs, - is_in_clone: bool, pub fn getType(local: Local) LocalType { return .{ .cty_idx = local.cty_idx, .alignas = local.alignas }; @@ -275,16 +274,13 @@ pub const Function = struct { /// All the locals, to be emitted at the top of the function. locals: std.ArrayListUnmanaged(Local) = .{}, /// Which locals are available for reuse, based on Type. - /// Only locals in the last stack entry are available for reuse, - /// other entries will become available on loop exit. free_locals_map: LocalsMap = .{}, - is_in_clone: bool = false, /// Locals which will not be freed by Liveness. This is used after a /// Function body is lowered in order to make `free_locals_map` have /// 100% of the locals within so that it can be used to render the block /// of variable declarations at the top of a function, sorted descending /// by type alignment. - /// The value is whether the alloc is static or not. + /// The value is whether the alloc needs to be emitted in the header. allocs: std.AutoArrayHashMapUnmanaged(LocalIndex, bool) = .{}, /// Needed for memory used by the keys of free_locals_map entries. arena: std.heap.ArenaAllocator, @@ -302,7 +298,7 @@ pub const Function = struct { const alignment = 0; const decl_c_value = try f.allocLocalValue(ty, alignment); const gpa = f.object.dg.gpa; - try f.allocs.put(gpa, decl_c_value.new_local, true); + try f.allocs.put(gpa, decl_c_value.new_local, false); try writer.writeAll("static "); try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, Const, alignment, .complete); try writer.writeAll(" = "); @@ -323,14 +319,15 @@ pub const Function = struct { }; } - /// Skips the reuse logic. + /// Skips the reuse logic. This function should be used for any persistent allocation, i.e. + /// those which go into `allocs`. This function does not add the resulting local into `allocs`; + /// that responsibility lies with the caller. fn allocLocalValue(f: *Function, ty: Type, alignment: u32) !CValue { const gpa = f.object.dg.gpa; const target = f.object.dg.module.getTarget(); try f.locals.append(gpa, .{ .cty_idx = try f.typeToIndex(ty, .complete), .alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target)), - .is_in_clone = f.is_in_clone, }); return .{ .new_local = @intCast(LocalIndex, f.locals.items.len - 1) }; } @@ -341,7 +338,8 @@ pub const Function = struct { return result; } - /// Only allocates the local; does not print anything. + /// Only allocates the local; does not print anything. Will attempt to re-use locals, so should + /// not be used for persistent locals (i.e. those in `allocs`). fn allocAlignedLocal(f: *Function, ty: Type, _: CQualifiers, alignment: u32) !CValue { const target = f.object.dg.module.getTarget(); if (f.free_locals_map.getPtr(.{ @@ -2586,7 +2584,7 @@ pub fn genFunc(f: *Function) !void { f.free_locals_map.clearRetainingCapacity(); const main_body = f.air.getMainBody(); - try genBody(f, main_body); + try genBodyResolveState(f, undefined, &.{}, main_body, false); try o.indent_writer.insertNewline(); @@ -2597,8 +2595,8 @@ pub fn genFunc(f: *Function) !void { // alignment, descending. const free_locals = &f.free_locals_map; assert(f.value_map.count() == 0); // there must not be any unfreed locals - for (f.allocs.keys(), f.allocs.values()) |local_index, value| { - if (value) continue; // static + for (f.allocs.keys(), f.allocs.values()) |local_index, should_emit| { + if (!should_emit) continue; const local = f.locals.items[local_index]; log.debug("inserting local {d} into free_locals", .{local_index}); const gop = try free_locals.getOrPut(gpa, local.getType()); @@ -2715,6 +2713,10 @@ pub fn genHeader(dg: *DeclGen) error{ AnalysisFail, OutOfMemory }!void { } } +/// Generate code for an entire body which ends with a `noreturn` instruction. The states of +/// `value_map` and `free_locals_map` are undefined after the generation, and new locals may not +/// have been added to `free_locals_map`. For a version of this function that restores this state, +/// see `genBodyResolveState`. fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutOfMemory }!void { const writer = f.object.writer(); if (body.len == 0) { @@ -2728,10 +2730,69 @@ fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutO } } +/// Generate code for an entire body which ends with a `noreturn` instruction. The states of +/// `value_map` and `free_locals_map` are restored to their original values, and any non-allocated +/// locals introduced within the body are correctly added to `free_locals_map`. Operands in +/// `leading_deaths` have their deaths processed before the body is generated. +/// A scope is introduced (using braces) only if `inner` is `false`. +/// If `leading_deaths` is empty, `inst` may be `undefined`. +fn genBodyResolveState(f: *Function, inst: Air.Inst.Index, leading_deaths: []const Air.Inst.Index, body: []const Air.Inst.Index, inner: bool) error{ AnalysisFail, OutOfMemory }!void { + if (body.len == 0) { + // Don't go to the expense of cloning everything! + if (!inner) try f.object.writer().writeAll("{}"); + return; + } + + // TODO: we can probably avoid the copies in some other common cases too. + + const gpa = f.object.dg.gpa; + + // Save the original value_map and free_locals_map so that we can restore them after the body. + var old_value_map = try f.value_map.clone(); + defer old_value_map.deinit(); + var old_free_locals = try cloneFreeLocalsMap(gpa, &f.free_locals_map); + defer deinitFreeLocalsMap(gpa, &old_free_locals); + + // Remember how many locals there were before entering the body so that we can free any that + // were newly introduced. Any new locals must necessarily be logically free after the then + // branch is complete. + const pre_locals_len = @intCast(LocalIndex, f.locals.items.len); + + for (leading_deaths) |death| { + try die(f, inst, Air.indexToRef(death)); + } + + if (inner) { + try genBodyInner(f, body); + } else { + try genBody(f, body); + } + + f.value_map.deinit(); + f.value_map = old_value_map.move(); + deinitFreeLocalsMap(gpa, &f.free_locals_map); + f.free_locals_map = old_free_locals.move(); + + // Now, use the lengths we stored earlier to detect any locals the body generated, and free + // them, unless they were used to store allocs. + + for (pre_locals_len..f.locals.items.len) |local_i| { + const local_index = @intCast(LocalIndex, local_i); + if (f.allocs.contains(local_index)) { + continue; + } + try freeLocal(f, inst, local_index, 0); + } +} + fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutOfMemory }!void { const air_tags = f.air.instructions.items(.tag); for (body) |inst| { + if (f.liveness.isUnused(inst) and !f.air.mustLower(inst)) { + continue; + } + const result_value = switch (air_tags[inst]) { // zig fmt: off .constant => unreachable, // excluded from function bodies @@ -3009,11 +3070,6 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, fn airSliceField(f: *Function, inst: Air.Inst.Index, is_ptr: bool, field_name: []const u8) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -3032,10 +3088,7 @@ fn airSliceField(f: *Function, inst: Air.Inst.Index, is_ptr: bool, field_name: [ fn airPtrElemVal(f: *Function, inst: Air.Inst.Index) !CValue { const inst_ty = f.air.typeOfIndex(inst); const bin_op = f.air.instructions.items(.data)[inst].bin_op; - const ptr_ty = f.air.typeOf(bin_op.lhs); - if ((!ptr_ty.isVolatilePtr() and f.liveness.isUnused(inst)) or - !inst_ty.hasRuntimeBitsIgnoreComptime()) - { + if (!inst_ty.hasRuntimeBitsIgnoreComptime()) { try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); return .none; } @@ -3074,11 +3127,6 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const ptr_ty = f.air.typeOf(bin_op.lhs); const child_ty = ptr_ty.childType(); @@ -3116,10 +3164,7 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue { fn airSliceElemVal(f: *Function, inst: Air.Inst.Index) !CValue { const inst_ty = f.air.typeOfIndex(inst); const bin_op = f.air.instructions.items(.data)[inst].bin_op; - const slice_ty = f.air.typeOf(bin_op.lhs); - if ((!slice_ty.isVolatilePtr() and f.liveness.isUnused(inst)) or - !inst_ty.hasRuntimeBitsIgnoreComptime()) - { + if (!inst_ty.hasRuntimeBitsIgnoreComptime()) { try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); return .none; } @@ -3158,11 +3203,6 @@ fn airSliceElemPtr(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } - const slice_ty = f.air.typeOf(bin_op.lhs); const child_ty = slice_ty.elemType2(); const slice = try f.resolveInst(bin_op.lhs); @@ -3188,7 +3228,7 @@ fn airSliceElemPtr(f: *Function, inst: Air.Inst.Index) !CValue { fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue { const bin_op = f.air.instructions.items(.data)[inst].bin_op; const inst_ty = f.air.typeOfIndex(inst); - if (f.liveness.isUnused(inst) or !inst_ty.hasRuntimeBitsIgnoreComptime()) { + if (!inst_ty.hasRuntimeBitsIgnoreComptime()) { try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); return .none; } @@ -3224,40 +3264,34 @@ fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue { } fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue { - if (f.liveness.isUnused(inst)) return .none; - const inst_ty = f.air.typeOfIndex(inst); const elem_type = inst_ty.elemType(); if (!elem_type.isFnOrHasRuntimeBitsIgnoreComptime()) return .{ .undef = inst_ty }; const target = f.object.dg.module.getTarget(); - const local = try f.allocAlignedLocal( + const local = try f.allocLocalValue( elem_type, - CQualifiers.init(.{ .@"const" = inst_ty.isConstPtr() }), inst_ty.ptrAlignment(target), ); log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.new_local }); const gpa = f.object.dg.module.gpa; - try f.allocs.put(gpa, local.new_local, false); + try f.allocs.put(gpa, local.new_local, true); return .{ .local_ref = local.new_local }; } fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue { - if (f.liveness.isUnused(inst)) return .none; - const inst_ty = f.air.typeOfIndex(inst); const elem_ty = inst_ty.elemType(); if (!elem_ty.isFnOrHasRuntimeBitsIgnoreComptime()) return .{ .undef = inst_ty }; const target = f.object.dg.module.getTarget(); - const local = try f.allocAlignedLocal( + const local = try f.allocLocalValue( elem_ty, - CQualifiers.init(.{ .@"const" = inst_ty.isConstPtr() }), inst_ty.ptrAlignment(target), ); log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.new_local }); const gpa = f.object.dg.module.gpa; - try f.allocs.put(gpa, local.new_local, false); + try f.allocs.put(gpa, local.new_local, true); return .{ .local_ref = local.new_local }; } @@ -3293,9 +3327,7 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue { const ptr_info = ptr_scalar_ty.ptrInfo().data; const src_ty = ptr_info.pointee_type; - if (!src_ty.hasRuntimeBitsIgnoreComptime() or - (!ptr_info.@"volatile" and f.liveness.isUnused(inst))) - { + if (!src_ty.hasRuntimeBitsIgnoreComptime()) { try reap(f, inst, &.{ty_op.operand}); return .none; } @@ -3442,11 +3474,6 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue { fn airIntCast(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -3470,10 +3497,6 @@ fn airIntCast(f: *Function, inst: Air.Inst.Index) !CValue { fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -3569,10 +3592,6 @@ fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue { fn airBoolToInt(f: *Function, inst: Air.Inst.Index) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); const writer = f.object.writer(); @@ -3746,11 +3765,6 @@ fn airOverflow(f: *Function, inst: Air.Inst.Index, operation: []const u8, info: const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } - const lhs = try f.resolveInst(bin_op.lhs); const rhs = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); @@ -3790,11 +3804,6 @@ fn airNot(f: *Function, inst: Air.Inst.Index) !CValue { const scalar_ty = operand_ty.scalarType(); if (scalar_ty.tag() != .bool) return try airUnBuiltinCall(f, inst, "not", .bits); - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const op = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -3829,11 +3838,6 @@ fn airBinOp( if ((scalar_ty.isInt() and scalar_ty.bitSize(target) > 64) or scalar_ty.isRuntimeFloat()) return try airBinBuiltinCall(f, inst, operation, info); - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } - const lhs = try f.resolveInst(bin_op.lhs); const rhs = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); @@ -3865,11 +3869,6 @@ fn airCmpOp( data: anytype, operator: std.math.CompareOperator, ) !CValue { - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ data.lhs, data.rhs }); - return .none; - } - const operand_ty = f.air.typeOf(data.lhs); const scalar_ty = operand_ty.scalarType(); @@ -3918,11 +3917,6 @@ fn airEquality( ) !CValue { const bin_op = f.air.instructions.items(.data)[inst].bin_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } - const operand_ty = f.air.typeOf(bin_op.lhs); const target = f.object.dg.module.getTarget(); const operand_bits = operand_ty.bitSize(target); @@ -3987,11 +3981,6 @@ fn airEquality( fn airCmpLtErrorsLen(f: *Function, inst: Air.Inst.Index) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); @@ -4008,10 +3997,6 @@ fn airCmpLtErrorsLen(f: *Function, inst: Air.Inst.Index) !CValue { fn airPtrAddSub(f: *Function, inst: Air.Inst.Index, operator: u8) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } const lhs = try f.resolveInst(bin_op.lhs); const rhs = try f.resolveInst(bin_op.rhs); @@ -4059,11 +4044,6 @@ fn airPtrAddSub(f: *Function, inst: Air.Inst.Index, operator: u8) !CValue { fn airMinMax(f: *Function, inst: Air.Inst.Index, operator: u8, operation: []const u8) !CValue { const bin_op = f.air.instructions.items(.data)[inst].bin_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const inst_scalar_ty = inst_ty.scalarType(); @@ -4107,11 +4087,6 @@ fn airSlice(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } - const ptr = try f.resolveInst(bin_op.lhs); const len = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); @@ -4316,6 +4291,7 @@ fn airBlock(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const extra = f.air.extraData(Air.Block, ty_pl.payload); const body = f.air.extra[extra.end..][0..extra.data.body_len]; + const liveness_block = f.liveness.getBlock(inst); const block_id: usize = f.next_block_index; f.next_block_index += 1; @@ -4332,7 +4308,15 @@ fn airBlock(f: *Function, inst: Air.Inst.Index) !CValue { .result = result, }); - try genBodyInner(f, body); + try genBodyResolveState(f, inst, &.{}, body, true); + + assert(f.blocks.remove(inst)); + + // The body might result in some values we had beforehand being killed + for (liveness_block.deaths) |death| { + try die(f, inst, Air.indexToRef(death)); + } + try f.object.indent_writer.insertNewline(); // label might be unused, add a dummy goto // label must be followed by an expression, add an empty one. @@ -4366,6 +4350,7 @@ fn lowerTry( ) !CValue { const err_union = try f.resolveInst(operand); const result_ty = f.air.typeOfIndex(inst); + const liveness_condbr = f.liveness.getCondBr(inst); const writer = f.object.writer(); const payload_ty = err_union_ty.errorUnionPayload(); const payload_has_bits = payload_ty.hasRuntimeBitsIgnoreComptime(); @@ -4389,10 +4374,15 @@ fn lowerTry( } try writer.writeByte(')'); - try genBody(f, body); + try genBodyResolveState(f, inst, liveness_condbr.else_deaths, body, false); try f.object.indent_writer.insertNewline(); } + // Now we have the "then branch" (in terms of the liveness data); process any deaths. + for (liveness_condbr.then_deaths) |death| { + try die(f, inst, Air.indexToRef(death)); + } + if (!payload_has_bits) { if (!operand_is_ptr) { return .none; @@ -4466,10 +4456,6 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !CValue { fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; const dest_ty = f.air.typeOfIndex(inst); - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -4593,7 +4579,6 @@ fn airBreakpoint(writer: anytype) !CValue { } fn airRetAddr(f: *Function, inst: Air.Inst.Index) !CValue { - if (f.liveness.isUnused(inst)) return .none; const writer = f.object.writer(); const local = try f.allocLocal(inst, Type.usize); try f.writeCValue(writer, local, .Other); @@ -4604,7 +4589,6 @@ fn airRetAddr(f: *Function, inst: Air.Inst.Index) !CValue { } fn airFrameAddress(f: *Function, inst: Air.Inst.Index) !CValue { - if (f.liveness.isUnused(inst)) return .none; const writer = f.object.writer(); const local = try f.allocLocal(inst, Type.usize); try f.writeCValue(writer, local, .Other); @@ -4640,7 +4624,7 @@ fn airLoop(f: *Function, inst: Air.Inst.Index) !CValue { const writer = f.object.writer(); try writer.writeAll("for (;;) "); - try genBody(f, body); + try genBody(f, body); // no need to restore state, we're noreturn try writer.writeByte('\n'); return .none; @@ -4656,61 +4640,24 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue { const liveness_condbr = f.liveness.getCondBr(inst); const writer = f.object.writer(); - // Keep using the original for the then branch; use a clone of the value - // map for the else branch. - const gpa = f.object.dg.gpa; - var cloned_map = try f.value_map.clone(); - defer cloned_map.deinit(); - var cloned_frees = try cloneFreeLocalsMap(gpa, &f.free_locals_map); - defer deinitFreeLocalsMap(gpa, &cloned_frees); - - // Remember how many locals there were before entering the then branch so - // that we can notice and use them in the else branch. Any new locals must - // necessarily be free already after the then branch is complete. - const pre_locals_len = @intCast(LocalIndex, f.locals.items.len); - // Remember how many allocs there were before entering the then branch so - // that we can notice and make sure not to use them in the else branch. - // Any new allocs must be removed from the free list. - const pre_allocs_len = @intCast(LocalIndex, f.allocs.count()); - const was_in_clone = f.is_in_clone; - f.is_in_clone = true; - - for (liveness_condbr.then_deaths) |operand| { - try die(f, inst, Air.indexToRef(operand)); - } - try writer.writeAll("if ("); try f.writeCValue(writer, cond, .Other); try writer.writeAll(") "); - try genBody(f, then_body); - // TODO: If body ends in goto, elide the else block? - const needs_else = then_body.len <= 0 or f.air.instructions.items(.tag)[then_body[then_body.len - 1]] != .br; - if (needs_else) { - try writer.writeAll(" else "); - } else { - try writer.writeByte('\n'); - } + try genBodyResolveState(f, inst, liveness_condbr.then_deaths, then_body, false); - f.value_map.deinit(); - f.value_map = cloned_map.move(); - const free_locals = &f.free_locals_map; - deinitFreeLocalsMap(gpa, free_locals); - free_locals.* = cloned_frees.move(); - f.is_in_clone = was_in_clone; - for (liveness_condbr.else_deaths) |operand| { - try die(f, inst, Air.indexToRef(operand)); - } - - try noticeBranchFrees(f, pre_locals_len, pre_allocs_len, inst); + // We don't need to use `genBodyResolveState` for the else block, because this instruction is + // noreturn so must terminate a body, therefore we don't need to leave `value_map` or + // `free_locals_map` well defined (our parent is responsible for doing that). - if (needs_else) { - try genBody(f, else_body); - } else { - try genBodyInner(f, else_body); + for (liveness_condbr.else_deaths) |death| { + try die(f, inst, Air.indexToRef(death)); } - try f.object.indent_writer.insertNewline(); + // We never actually need an else block, because our branches are noreturn so must (for + // instance) `br` to a block (label). + + try genBodyInner(f, else_body); return .none; } @@ -4741,9 +4688,8 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue { const liveness = try f.liveness.getSwitchBr(gpa, inst, switch_br.data.cases_len + 1); defer gpa.free(liveness.deaths); - // On the final iteration we do not clone the map. This ensures that - // lowering proceeds after the switch_br taking into account the - // mutations to the liveness information. + // On the final iteration we do not need to fix any state. This is because, like in the `else` + // branch of a `cond_br`, our parent has to do it for this entire body anyway. const last_case_i = switch_br.data.cases_len - @boolToInt(switch_br.data.else_body_len == 0); var extra_index: usize = switch_br.end; @@ -4767,56 +4713,23 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue { try writer.writeByte(' '); if (case_i != last_case_i) { - const old_value_map = f.value_map; - f.value_map = try old_value_map.clone(); - var free_locals = &f.free_locals_map; - const old_free_locals = free_locals.*; - free_locals.* = try cloneFreeLocalsMap(gpa, free_locals); - - // Remember how many locals there were before entering each branch so that - // we can notice and use them in subsequent branches. Any new locals must - // necessarily be free already after the previous branch is complete. - const pre_locals_len = @intCast(LocalIndex, f.locals.items.len); - // Remember how many allocs there were before entering each branch so that - // we can notice and make sure not to use them in subsequent branches. - // Any new allocs must be removed from the free list. - const pre_allocs_len = @intCast(LocalIndex, f.allocs.count()); - const was_in_clone = f.is_in_clone; - f.is_in_clone = true; - - { - defer { - f.is_in_clone = was_in_clone; - f.value_map.deinit(); - deinitFreeLocalsMap(gpa, free_locals); - f.value_map = old_value_map; - free_locals.* = old_free_locals; - } - - for (liveness.deaths[case_i]) |operand| { - try die(f, inst, Air.indexToRef(operand)); - } - - try genBody(f, case_body); - } - - try noticeBranchFrees(f, pre_locals_len, pre_allocs_len, inst); + try genBodyResolveState(f, inst, liveness.deaths[case_i], case_body, false); } else { - for (liveness.deaths[case_i]) |operand| { - try die(f, inst, Air.indexToRef(operand)); + for (liveness.deaths[case_i]) |death| { + try die(f, inst, Air.indexToRef(death)); } try genBody(f, case_body); } // The case body must be noreturn so we don't need to insert a break. - } const else_body = f.air.extra[extra_index..][0..switch_br.data.else_body_len]; try f.object.indent_writer.insertNewline(); if (else_body.len > 0) { - for (liveness.deaths[liveness.deaths.len - 1]) |operand| { - try die(f, inst, Air.indexToRef(operand)); + // Note that this must be the last case (i.e. the `last_case_i` case was not hit above) + for (liveness.deaths[liveness.deaths.len - 1]) |death| { + try die(f, inst, Air.indexToRef(death)); } try writer.writeAll("default: "); try genBody(f, else_body); @@ -4843,6 +4756,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { const extra = f.air.extraData(Air.Asm, ty_pl.payload); const is_volatile = @truncate(u1, extra.data.flags >> 31) != 0; const clobbers_len = @truncate(u31, extra.data.flags); + const gpa = f.object.dg.gpa; var extra_i: usize = extra.end; const outputs = @ptrCast([]const Air.Inst.Ref, f.air.extra[extra_i..][0..extra.data.outputs_len]); extra_i += outputs.len; @@ -4850,8 +4764,6 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { extra_i += inputs.len; const result = result: { - if (!is_volatile and f.liveness.isUnused(inst)) break :result .none; - const writer = f.object.writer(); const inst_ty = f.air.typeOfIndex(inst); const local = if (inst_ty.hasRuntimeBitsIgnoreComptime()) local: { @@ -4887,6 +4799,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { try writer.writeAll("register "); const alignment = 0; const local_value = try f.allocLocalValue(output_ty, alignment); + try f.allocs.put(gpa, local_value.new_local, false); try f.object.dg.renderTypeAndName(writer, output_ty, local_value, .{}, alignment, .complete); try writer.writeAll(" __asm(\""); try writer.writeAll(constraint["={".len .. constraint.len - "}".len]); @@ -4919,6 +4832,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { if (is_reg) try writer.writeAll("register "); const alignment = 0; const local_value = try f.allocLocalValue(input_ty, alignment); + try f.allocs.put(gpa, local_value.new_local, false); try f.object.dg.renderTypeAndName(writer, input_ty, local_value, Const, alignment, .complete); if (is_reg) { try writer.writeAll(" __asm(\""); @@ -5101,11 +5015,6 @@ fn airIsNull( ) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } - const writer = f.object.writer(); const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); @@ -5151,11 +5060,6 @@ fn airIsNull( fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); const opt_ty = f.air.typeOf(ty_op.operand); @@ -5203,11 +5107,6 @@ fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue { fn airOptionalPayloadPtr(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const writer = f.object.writer(); const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -5337,11 +5236,6 @@ fn airStructFieldPtr(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const extra = f.air.extraData(Air.StructField, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{extra.struct_operand}); - return .none; - } - const container_ptr_val = try f.resolveInst(extra.struct_operand); try reap(f, inst, &.{extra.struct_operand}); const container_ptr_ty = f.air.typeOf(extra.struct_operand); @@ -5351,11 +5245,6 @@ fn airStructFieldPtr(f: *Function, inst: Air.Inst.Index) !CValue { fn airStructFieldPtrIndex(f: *Function, inst: Air.Inst.Index, index: u8) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const container_ptr_val = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); const container_ptr_ty = f.air.typeOf(ty_op.operand); @@ -5366,11 +5255,6 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const extra = f.air.extraData(Air.FieldParentPtr, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{extra.field_ptr}); - return .none; - } - const target = f.object.dg.module.getTarget(); const container_ptr_ty = f.air.typeOfIndex(inst); const container_ty = container_ptr_ty.childType(); @@ -5489,11 +5373,6 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const extra = f.air.extraData(Air.StructField, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{extra.struct_operand}); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); if (!inst_ty.hasRuntimeBitsIgnoreComptime()) { try reap(f, inst, &.{extra.struct_operand}); @@ -5639,11 +5518,6 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue { fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const operand = try f.resolveInst(ty_op.operand); const operand_ty = f.air.typeOf(ty_op.operand); @@ -5676,11 +5550,6 @@ fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue { fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -5718,11 +5587,6 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValu fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const payload = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -5764,10 +5628,6 @@ fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue { fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } const writer = f.object.writer(); const operand = try f.resolveInst(ty_op.operand); @@ -5831,7 +5691,7 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue { } fn airErrReturnTrace(f: *Function, inst: Air.Inst.Index) !CValue { - if (f.liveness.isUnused(inst)) return .none; + _ = inst; return f.fail("TODO: C backend: implement airErrReturnTrace", .{}); } @@ -5847,10 +5707,6 @@ fn airSaveErrReturnTraceIndex(f: *Function, inst: Air.Inst.Index) !CValue { fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } const inst_ty = f.air.typeOfIndex(inst); const payload_ty = inst_ty.errorUnionPayload(); @@ -5885,11 +5741,6 @@ fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue { fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const u8) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } - const writer = f.object.writer(); const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); @@ -5923,11 +5774,6 @@ fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); const inst_ty = f.air.typeOfIndex(inst); @@ -5961,11 +5807,6 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue { fn airFloatCast(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -6009,11 +5850,6 @@ fn airFloatCast(f: *Function, inst: Air.Inst.Index) !CValue { fn airPtrToInt(f: *Function, inst: Air.Inst.Index) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } - const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); const inst_ty = f.air.typeOfIndex(inst); @@ -6037,11 +5873,6 @@ fn airUnBuiltinCall( ) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); const inst_ty = f.air.typeOfIndex(inst); @@ -6085,11 +5916,6 @@ fn airBinBuiltinCall( ) !CValue { const bin_op = f.air.instructions.items(.data)[inst].bin_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } - const operand_ty = f.air.typeOf(bin_op.lhs); const operand_cty = try f.typeToCType(operand_ty, .complete); const is_big = operand_cty.tag() == .array; @@ -6142,11 +5968,6 @@ fn airCmpBuiltinCall( operation: enum { cmp, operator }, info: BuiltinInfo, ) !CValue { - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ data.lhs, data.rhs }); - return .none; - } - const lhs = try f.resolveInst(data.lhs); const rhs = try f.resolveInst(data.rhs); try reap(f, inst, &.{ data.lhs, data.rhs }); @@ -6317,9 +6138,6 @@ fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue { const ptr = try f.resolveInst(atomic_load.ptr); try reap(f, inst, &.{atomic_load.ptr}); const ptr_ty = f.air.typeOf(atomic_load.ptr); - if (!ptr_ty.isVolatilePtr() and f.liveness.isUnused(inst)) { - return .none; - } const inst_ty = f.air.typeOfIndex(inst); const writer = f.object.writer(); @@ -6463,11 +6281,6 @@ fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue { fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -6491,11 +6304,6 @@ fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue { fn airTagName(f: *Function, inst: Air.Inst.Index) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } - const inst_ty = f.air.typeOfIndex(inst); const enum_ty = f.air.typeOf(un_op); const operand = try f.resolveInst(un_op); @@ -6516,11 +6324,6 @@ fn airTagName(f: *Function, inst: Air.Inst.Index) !CValue { fn airErrorName(f: *Function, inst: Air.Inst.Index) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } - const writer = f.object.writer(); const inst_ty = f.air.typeOfIndex(inst); const operand = try f.resolveInst(un_op); @@ -6537,11 +6340,6 @@ fn airErrorName(f: *Function, inst: Air.Inst.Index) !CValue { fn airSplat(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } - const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); @@ -6573,11 +6371,6 @@ fn airSelect(f: *Function, inst: Air.Inst.Index) !CValue { const pl_op = f.air.instructions.items(.data)[inst].pl_op; const extra = f.air.extraData(Air.Bin, pl_op.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ pl_op.operand, extra.lhs, extra.rhs }); - return .none; - } - const pred = try f.resolveInst(pl_op.operand); const lhs = try f.resolveInst(extra.lhs); const rhs = try f.resolveInst(extra.rhs); @@ -6609,11 +6402,6 @@ fn airShuffle(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const extra = f.air.extraData(Air.Shuffle, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ extra.a, extra.b }); - return .none; - } - const mask = f.air.values[extra.mask]; const lhs = try f.resolveInst(extra.a); const rhs = try f.resolveInst(extra.b); @@ -6655,11 +6443,6 @@ fn airShuffle(f: *Function, inst: Air.Inst.Index) !CValue { fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue { const reduce = f.air.instructions.items(.data)[inst].reduce; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{reduce.operand}); - return .none; - } - const target = f.object.dg.module.getTarget(); const scalar_ty = f.air.typeOfIndex(inst); const operand = try f.resolveInst(reduce.operand); @@ -6831,8 +6614,6 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue { } } - if (f.liveness.isUnused(inst)) return .none; - const target = f.object.dg.module.getTarget(); const writer = f.object.writer(); @@ -6999,11 +6780,6 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[inst].ty_pl; const extra = f.air.extraData(Air.UnionInit, ty_pl.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{extra.init}); - return .none; - } - const union_ty = f.air.typeOfIndex(inst); const target = f.object.dg.module.getTarget(); const union_obj = union_ty.cast(Type.Payload.Union).?.data; @@ -7070,8 +6846,6 @@ fn airPrefetch(f: *Function, inst: Air.Inst.Index) !CValue { } fn airWasmMemorySize(f: *Function, inst: Air.Inst.Index) !CValue { - if (f.liveness.isUnused(inst)) return .none; - const pl_op = f.air.instructions.items(.data)[inst].pl_op; const writer = f.object.writer(); @@ -7104,10 +6878,6 @@ fn airWasmMemoryGrow(f: *Function, inst: Air.Inst.Index) !CValue { fn airFloatNeg(f: *Function, inst: Air.Inst.Index) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); @@ -7133,10 +6903,6 @@ fn airFloatNeg(f: *Function, inst: Air.Inst.Index) !CValue { fn airUnFloatOp(f: *Function, inst: Air.Inst.Index, operation: []const u8) !CValue { const un_op = f.air.instructions.items(.data)[inst].un_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{un_op}); - return .none; - } const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); @@ -7164,10 +6930,6 @@ fn airUnFloatOp(f: *Function, inst: Air.Inst.Index, operation: []const u8) !CVal fn airBinFloatOp(f: *Function, inst: Air.Inst.Index, operation: []const u8) !CValue { const bin_op = f.air.instructions.items(.data)[inst].bin_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - return .none; - } const lhs = try f.resolveInst(bin_op.lhs); const rhs = try f.resolveInst(bin_op.rhs); @@ -7200,10 +6962,6 @@ fn airBinFloatOp(f: *Function, inst: Air.Inst.Index, operation: []const u8) !CVa fn airMulAdd(f: *Function, inst: Air.Inst.Index) !CValue { const pl_op = f.air.instructions.items(.data)[inst].pl_op; const bin_op = f.air.extraData(Air.Bin, pl_op.payload).data; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs, pl_op.operand }); - return .none; - } const mulend1 = try f.resolveInst(bin_op.lhs); const mulend2 = try f.resolveInst(bin_op.rhs); @@ -7236,8 +6994,6 @@ fn airMulAdd(f: *Function, inst: Air.Inst.Index) !CValue { } fn airCVaStart(f: *Function, inst: Air.Inst.Index) !CValue { - if (f.liveness.isUnused(inst)) return .none; - const inst_ty = f.air.typeOfIndex(inst); const fn_cty = try f.typeToCType(f.object.dg.decl.?.ty, .complete); const param_len = fn_cty.castTag(.varargs_function).?.data.param_types.len; @@ -7256,10 +7012,6 @@ fn airCVaStart(f: *Function, inst: Air.Inst.Index) !CValue { fn airCVaArg(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } const inst_ty = f.air.typeOfIndex(inst); const va_list = try f.resolveInst(ty_op.operand); @@ -7291,10 +7043,6 @@ fn airCVaEnd(f: *Function, inst: Air.Inst.Index) !CValue { fn airCVaCopy(f: *Function, inst: Air.Inst.Index) !CValue { const ty_op = f.air.instructions.items(.data)[inst].ty_op; - if (f.liveness.isUnused(inst)) { - try reap(f, inst, &.{ty_op.operand}); - return .none; - } const inst_ty = f.air.typeOfIndex(inst); const va_list = try f.resolveInst(ty_op.operand); @@ -7858,7 +7606,6 @@ fn freeLocal(f: *Function, inst: Air.Inst.Index, local_index: LocalIndex, ref_in const gpa = f.object.dg.gpa; const local = &f.locals.items[local_index]; log.debug("%{d}: freeing t{d} (operand %{d})", .{ inst, local_index, ref_inst }); - if (f.is_in_clone != local.is_in_clone) return; const gop = try f.free_locals_map.getOrPut(gpa, local.getType()); if (!gop.found_existing) gop.value_ptr.* = .{}; if (std.debug.runtime_safety) { @@ -7916,35 +7663,3 @@ fn deinitFreeLocalsMap(gpa: mem.Allocator, map: *LocalsMap) void { } map.deinit(gpa); } - -fn noticeBranchFrees( - f: *Function, - pre_locals_len: LocalIndex, - pre_allocs_len: LocalIndex, - inst: Air.Inst.Index, -) !void { - for (f.locals.items[pre_locals_len..], pre_locals_len..) |*local, local_i| { - const local_index = @intCast(LocalIndex, local_i); - if (f.allocs.contains(local_index)) { - if (std.debug.runtime_safety) { - // new allocs are no longer freeable, so make sure they aren't in the free list - if (f.free_locals_map.getPtr(local.getType())) |locals_list| { - assert(!locals_list.contains(local_index)); - } - } - continue; - } - - // free cloned locals from other branches at current cloned-ness - std.debug.assert(local.is_in_clone or !f.is_in_clone); - local.is_in_clone = f.is_in_clone; - try freeLocal(f, inst, local_index, 0); - } - - for (f.allocs.keys()[pre_allocs_len..]) |local_i| { - const local_index = @intCast(LocalIndex, local_i); - const local = &f.locals.items[local_index]; - // new allocs are no longer freeable, so remove them from the free list - if (f.free_locals_map.getPtr(local.getType())) |locals_list| _ = locals_list.swapRemove(local_index); - } -} -- cgit v1.2.3 From 6f09a7041ecf328f761df60fae007c800ee1e2ec Mon Sep 17 00:00:00 2001 From: mlugg Date: Wed, 19 Apr 2023 01:58:39 +0100 Subject: Begin integrating new liveness analysis into remaining backends --- src/arch/aarch64/CodeGen.zig | 5 +++++ src/arch/arm/CodeGen.zig | 5 +++++ src/arch/riscv64/CodeGen.zig | 5 +++++ src/arch/sparc64/CodeGen.zig | 5 +++++ src/arch/x86_64/CodeGen.zig | 5 +++++ src/codegen/spirv.zig | 5 +++++ 6 files changed, 30 insertions(+) (limited to 'src/codegen') diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index f2116689e9..1acc11d7e8 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -655,6 +655,11 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { const air_tags = self.air.instructions.items(.tag); for (body) |inst| { + // TODO: remove now-redundant isUnused calls from AIR handler functions + if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) { + continue; + } + const old_air_bookkeeping = self.air_bookkeeping; try self.ensureProcessDeathCapacity(Liveness.bpi); diff --git a/src/arch/arm/CodeGen.zig b/src/arch/arm/CodeGen.zig index 70144551d1..661e713b1c 100644 --- a/src/arch/arm/CodeGen.zig +++ b/src/arch/arm/CodeGen.zig @@ -639,6 +639,11 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { const air_tags = self.air.instructions.items(.tag); for (body) |inst| { + // TODO: remove now-redundant isUnused calls from AIR handler functions + if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) { + continue; + } + const old_air_bookkeeping = self.air_bookkeeping; try self.ensureProcessDeathCapacity(Liveness.bpi); diff --git a/src/arch/riscv64/CodeGen.zig b/src/arch/riscv64/CodeGen.zig index 16d9548da7..f0ab8b3317 100644 --- a/src/arch/riscv64/CodeGen.zig +++ b/src/arch/riscv64/CodeGen.zig @@ -473,6 +473,11 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { const air_tags = self.air.instructions.items(.tag); for (body) |inst| { + // TODO: remove now-redundant isUnused calls from AIR handler functions + if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) { + continue; + } + const old_air_bookkeeping = self.air_bookkeeping; try self.ensureProcessDeathCapacity(Liveness.bpi); diff --git a/src/arch/sparc64/CodeGen.zig b/src/arch/sparc64/CodeGen.zig index 197234c13b..2bcc1e1c4e 100644 --- a/src/arch/sparc64/CodeGen.zig +++ b/src/arch/sparc64/CodeGen.zig @@ -489,6 +489,11 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { const air_tags = self.air.instructions.items(.tag); for (body) |inst| { + // TODO: remove now-redundant isUnused calls from AIR handler functions + if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) { + continue; + } + const old_air_bookkeeping = self.air_bookkeeping; try self.ensureProcessDeathCapacity(Liveness.bpi); diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 33ff45d27d..5620ee2ad2 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -905,6 +905,11 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { const air_tags = self.air.instructions.items(.tag); for (body) |inst| { + // TODO: remove now-redundant isUnused calls from AIR handler functions + if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) { + continue; + } + const old_air_bookkeeping = self.air_bookkeeping; try self.ensureProcessDeathCapacity(Liveness.bpi); if (builtin.mode == .Debug) { diff --git a/src/codegen/spirv.zig b/src/codegen/spirv.zig index 58da5539ac..87b72c6726 100644 --- a/src/codegen/spirv.zig +++ b/src/codegen/spirv.zig @@ -1507,6 +1507,11 @@ pub const DeclGen = struct { } fn genInst(self: *DeclGen, inst: Air.Inst.Index) !void { + // TODO: remove now-redundant isUnused calls from AIR handler functions + if (self.liveness.isUnused(inst) and !self.air.mustLower(inst)) { + return; + } + const air_tags = self.air.instructions.items(.tag); const maybe_result_id: ?IdRef = switch (air_tags[inst]) { // zig fmt: off -- cgit v1.2.3