diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2023-06-24 16:58:19 -0700 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2023-06-24 16:58:19 -0700 |
| commit | 146b79af153bbd5dafda0ba12a040385c7fc58f8 (patch) | |
| tree | 67e3db8b444d65c667e314770fc983a7fc8ba293 /src/Liveness.zig | |
| parent | 13853bef0df3c90633021850cc6d6abaeea03282 (diff) | |
| parent | 21ac0beb436f49fe49c6982a872f2dc48e4bea5e (diff) | |
| download | zig-146b79af153bbd5dafda0ba12a040385c7fc58f8.tar.gz zig-146b79af153bbd5dafda0ba12a040385c7fc58f8.zip | |
Merge pull request #16163 from mlugg/feat/builtins-infer-dest-ty
Infer destination type of cast builtins using result type
Diffstat (limited to 'src/Liveness.zig')
| -rw-r--r-- | src/Liveness.zig | 68 |
1 files changed, 34 insertions, 34 deletions
diff --git a/src/Liveness.zig b/src/Liveness.zig index 1141b8620c..ab7c612758 100644 --- a/src/Liveness.zig +++ b/src/Liveness.zig @@ -178,14 +178,14 @@ pub fn analyze(gpa: Allocator, air: Air, intern_pool: *const InternPool) Allocat pub fn getTombBits(l: Liveness, inst: Air.Inst.Index) Bpi { const usize_index = (inst * bpi) / @bitSizeOf(usize); - return @truncate(Bpi, l.tomb_bits[usize_index] >> - @intCast(Log2Int(usize), (inst % (@bitSizeOf(usize) / bpi)) * bpi)); + return @as(Bpi, @truncate(l.tomb_bits[usize_index] >> + @as(Log2Int(usize), @intCast((inst % (@bitSizeOf(usize) / bpi)) * bpi)))); } pub fn isUnused(l: Liveness, inst: Air.Inst.Index) bool { const usize_index = (inst * bpi) / @bitSizeOf(usize); const mask = @as(usize, 1) << - @intCast(Log2Int(usize), (inst % (@bitSizeOf(usize) / bpi)) * bpi + (bpi - 1)); + @as(Log2Int(usize), @intCast((inst % (@bitSizeOf(usize) / bpi)) * bpi + (bpi - 1))); return (l.tomb_bits[usize_index] & mask) != 0; } @@ -193,7 +193,7 @@ pub fn operandDies(l: Liveness, inst: Air.Inst.Index, operand: OperandInt) bool assert(operand < bpi - 1); const usize_index = (inst * bpi) / @bitSizeOf(usize); const mask = @as(usize, 1) << - @intCast(Log2Int(usize), (inst % (@bitSizeOf(usize) / bpi)) * bpi + operand); + @as(Log2Int(usize), @intCast((inst % (@bitSizeOf(usize) / bpi)) * bpi + operand)); return (l.tomb_bits[usize_index] & mask) != 0; } @@ -201,7 +201,7 @@ pub fn clearOperandDeath(l: Liveness, inst: Air.Inst.Index, operand: OperandInt) assert(operand < bpi - 1); const usize_index = (inst * bpi) / @bitSizeOf(usize); const mask = @as(usize, 1) << - @intCast(Log2Int(usize), (inst % (@bitSizeOf(usize) / bpi)) * bpi + operand); + @as(Log2Int(usize), @intCast((inst % (@bitSizeOf(usize) / bpi)) * bpi + operand)); l.tomb_bits[usize_index] &= ~mask; } @@ -484,11 +484,11 @@ pub fn categorizeOperand( const inst_data = air_datas[inst].pl_op; const callee = inst_data.operand; const extra = air.extraData(Air.Call, inst_data.payload); - const args = @ptrCast([]const Air.Inst.Ref, air.extra[extra.end..][0..extra.data.args_len]); + const args = @as([]const Air.Inst.Ref, @ptrCast(air.extra[extra.end..][0..extra.data.args_len])); if (args.len + 1 <= bpi - 1) { if (callee == operand_ref) return matchOperandSmallIndex(l, inst, 0, .write); for (args, 0..) |arg, i| { - if (arg == operand_ref) return matchOperandSmallIndex(l, inst, @intCast(OperandInt, i + 1), .write); + if (arg == operand_ref) return matchOperandSmallIndex(l, inst, @as(OperandInt, @intCast(i + 1)), .write); } return .write; } @@ -535,12 +535,12 @@ pub fn categorizeOperand( .aggregate_init => { const ty_pl = air_datas[inst].ty_pl; const aggregate_ty = air.getRefType(ty_pl.ty); - const len = @intCast(usize, aggregate_ty.arrayLenIp(ip)); - const elements = @ptrCast([]const Air.Inst.Ref, air.extra[ty_pl.payload..][0..len]); + const len = @as(usize, @intCast(aggregate_ty.arrayLenIp(ip))); + const elements = @as([]const Air.Inst.Ref, @ptrCast(air.extra[ty_pl.payload..][0..len])); if (elements.len <= bpi - 1) { for (elements, 0..) |elem, i| { - if (elem == operand_ref) return matchOperandSmallIndex(l, inst, @intCast(OperandInt, i), .none); + if (elem == operand_ref) return matchOperandSmallIndex(l, inst, @as(OperandInt, @intCast(i)), .none); } return .none; } @@ -808,20 +808,20 @@ pub const BigTomb = struct { const small_tombs = bpi - 1; if (this_bit_index < small_tombs) { - const dies = @truncate(u1, bt.tomb_bits >> @intCast(Liveness.OperandInt, this_bit_index)) != 0; + const dies = @as(u1, @truncate(bt.tomb_bits >> @as(Liveness.OperandInt, @intCast(this_bit_index)))) != 0; return dies; } const big_bit_index = this_bit_index - small_tombs; while (big_bit_index - bt.extra_offset * 31 >= 31) { - if (@truncate(u1, bt.extra[bt.extra_start + bt.extra_offset] >> 31) != 0) { + if (@as(u1, @truncate(bt.extra[bt.extra_start + bt.extra_offset] >> 31)) != 0) { bt.reached_end = true; return false; } bt.extra_offset += 1; } - const dies = @truncate(u1, bt.extra[bt.extra_start + bt.extra_offset] >> - @intCast(u5, big_bit_index - bt.extra_offset * 31)) != 0; + const dies = @as(u1, @truncate(bt.extra[bt.extra_start + bt.extra_offset] >> + @as(u5, @intCast(big_bit_index - bt.extra_offset * 31)))) != 0; return dies; } }; @@ -838,7 +838,7 @@ const Analysis = struct { fn storeTombBits(a: *Analysis, inst: Air.Inst.Index, tomb_bits: Bpi) void { const usize_index = (inst * bpi) / @bitSizeOf(usize); a.tomb_bits[usize_index] |= @as(usize, tomb_bits) << - @intCast(Log2Int(usize), (inst % (@bitSizeOf(usize) / bpi)) * bpi); + @as(Log2Int(usize), @intCast((inst % (@bitSizeOf(usize) / bpi)) * bpi)); } fn addExtra(a: *Analysis, extra: anytype) Allocator.Error!u32 { @@ -849,7 +849,7 @@ const Analysis = struct { fn addExtraAssumeCapacity(a: *Analysis, extra: anytype) u32 { const fields = std.meta.fields(@TypeOf(extra)); - const result = @intCast(u32, a.extra.items.len); + const result = @as(u32, @intCast(a.extra.items.len)); inline for (fields) |field| { a.extra.appendAssumeCapacity(switch (field.type) { u32 => @field(extra, field.name), @@ -1108,7 +1108,7 @@ fn analyzeInst( const inst_data = inst_datas[inst].pl_op; const callee = inst_data.operand; const extra = a.air.extraData(Air.Call, inst_data.payload); - const args = @ptrCast([]const Air.Inst.Ref, a.air.extra[extra.end..][0..extra.data.args_len]); + const args = @as([]const Air.Inst.Ref, @ptrCast(a.air.extra[extra.end..][0..extra.data.args_len])); if (args.len + 1 <= bpi - 1) { var buf = [1]Air.Inst.Ref{.none} ** (bpi - 1); buf[0] = callee; @@ -1146,8 +1146,8 @@ fn analyzeInst( .aggregate_init => { const ty_pl = inst_datas[inst].ty_pl; const aggregate_ty = a.air.getRefType(ty_pl.ty); - const len = @intCast(usize, aggregate_ty.arrayLenIp(ip)); - const elements = @ptrCast([]const Air.Inst.Ref, a.air.extra[ty_pl.payload..][0..len]); + const len = @as(usize, @intCast(aggregate_ty.arrayLenIp(ip))); + const elements = @as([]const Air.Inst.Ref, @ptrCast(a.air.extra[ty_pl.payload..][0..len])); if (elements.len <= bpi - 1) { var buf = [1]Air.Inst.Ref{.none} ** (bpi - 1); @@ -1200,9 +1200,9 @@ fn analyzeInst( .assembly => { const extra = a.air.extraData(Air.Asm, inst_datas[inst].ty_pl.payload); var extra_i: usize = extra.end; - const outputs = @ptrCast([]const Air.Inst.Ref, a.air.extra[extra_i..][0..extra.data.outputs_len]); + const outputs = @as([]const Air.Inst.Ref, @ptrCast(a.air.extra[extra_i..][0..extra.data.outputs_len])); extra_i += outputs.len; - const inputs = @ptrCast([]const Air.Inst.Ref, a.air.extra[extra_i..][0..extra.data.inputs_len]); + const inputs = @as([]const Air.Inst.Ref, @ptrCast(a.air.extra[extra_i..][0..extra.data.inputs_len])); extra_i += inputs.len; const num_operands = simple: { @@ -1310,7 +1310,7 @@ fn analyzeOperands( // Don't compute any liveness for constants if (inst_tags[operand] == .interned) continue; - const mask = @as(Bpi, 1) << @intCast(OperandInt, i); + const mask = @as(Bpi, 1) << @as(OperandInt, @intCast(i)); if ((try data.live_set.fetchPut(gpa, operand, {})) == null) { log.debug("[{}] %{}: added %{} to live set (operand dies here)", .{ pass, inst, operand }); @@ -1320,7 +1320,7 @@ fn analyzeOperands( } a.tomb_bits[usize_index] |= @as(usize, tomb_bits) << - @intCast(Log2Int(usize), (inst % (@bitSizeOf(usize) / bpi)) * bpi); + @as(Log2Int(usize), @intCast((inst % (@bitSizeOf(usize) / bpi)) * bpi)); }, } } @@ -1472,7 +1472,7 @@ fn analyzeInstLoop( const num_breaks = data.breaks.count(); try a.extra.ensureUnusedCapacity(gpa, 1 + num_breaks); - const extra_index = @intCast(u32, a.extra.items.len); + const extra_index = @as(u32, @intCast(a.extra.items.len)); a.extra.appendAssumeCapacity(num_breaks); var it = data.breaks.keyIterator(); @@ -1523,7 +1523,7 @@ fn analyzeInstLoop( // This is necessarily not in the same control flow branch, because loops are noreturn data.live_set.clearRetainingCapacity(); - try data.live_set.ensureUnusedCapacity(gpa, @intCast(u32, loop_live.len)); + try data.live_set.ensureUnusedCapacity(gpa, @as(u32, @intCast(loop_live.len))); for (loop_live) |alive| { data.live_set.putAssumeCapacity(alive, {}); } @@ -1647,8 +1647,8 @@ fn analyzeInstCondBr( log.debug("[{}] %{}: new live set is {}", .{ pass, inst, fmtInstSet(&data.live_set) }); // Write the mirrored deaths to `extra` - const then_death_count = @intCast(u32, then_mirrored_deaths.items.len); - const else_death_count = @intCast(u32, else_mirrored_deaths.items.len); + const then_death_count = @as(u32, @intCast(then_mirrored_deaths.items.len)); + const else_death_count = @as(u32, @intCast(else_mirrored_deaths.items.len)); try a.extra.ensureUnusedCapacity(gpa, std.meta.fields(CondBr).len + then_death_count + else_death_count); const extra_index = a.addExtraAssumeCapacity(CondBr{ .then_death_count = then_death_count, @@ -1758,12 +1758,12 @@ fn analyzeInstSwitchBr( log.debug("[{}] %{}: new live set is {}", .{ pass, inst, fmtInstSet(&data.live_set) }); } - const else_death_count = @intCast(u32, mirrored_deaths[ncases].items.len); + const else_death_count = @as(u32, @intCast(mirrored_deaths[ncases].items.len)); const extra_index = try a.addExtra(SwitchBr{ .else_death_count = else_death_count, }); for (mirrored_deaths[0..ncases]) |mirrored| { - const num = @intCast(u32, mirrored.items.len); + const num = @as(u32, @intCast(mirrored.items.len)); try a.extra.ensureUnusedCapacity(gpa, num + 1); a.extra.appendAssumeCapacity(num); a.extra.appendSliceAssumeCapacity(mirrored.items); @@ -1798,7 +1798,7 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { inst: Air.Inst.Index, total_operands: usize, ) !Self { - const extra_operands = @intCast(u32, total_operands) -| (bpi - 1); + const extra_operands = @as(u32, @intCast(total_operands)) -| (bpi - 1); const max_extra_tombs = (extra_operands + 30) / 31; const extra_tombs: []u32 = switch (pass) { @@ -1818,7 +1818,7 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { .a = a, .data = data, .inst = inst, - .operands_remaining = @intCast(u32, total_operands), + .operands_remaining = @as(u32, @intCast(total_operands)), .extra_tombs = extra_tombs, .will_die_immediately = will_die_immediately, }; @@ -1847,7 +1847,7 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { if (big.will_die_immediately and !big.a.air.mustLower(big.inst, ip)) return; const extra_byte = (big.operands_remaining - (bpi - 1)) / 31; - const extra_bit = @intCast(u5, big.operands_remaining - (bpi - 1) - extra_byte * 31); + const extra_bit = @as(u5, @intCast(big.operands_remaining - (bpi - 1) - extra_byte * 31)); const gpa = big.a.gpa; @@ -1881,7 +1881,7 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { // keep at least one. var num: usize = big.extra_tombs.len; while (num > 1) { - if (@truncate(u31, big.extra_tombs[num - 1]) != 0) { + if (@as(u31, @truncate(big.extra_tombs[num - 1])) != 0) { // Some operand dies here break; } @@ -1892,7 +1892,7 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { const extra_tombs = big.extra_tombs[0..num]; - const extra_index = @intCast(u32, big.a.extra.items.len); + const extra_index = @as(u32, @intCast(big.a.extra.items.len)); try big.a.extra.appendSlice(gpa, extra_tombs); try big.a.special.put(gpa, big.inst, extra_index); }, |
