aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJacob Young <jacobly0@users.noreply.github.com>2023-08-12 01:56:39 -0400
committerJacob Young <jacobly0@users.noreply.github.com>2023-08-12 02:22:26 -0400
commit41575fa868f4b75b79bf5f774b66bb9a7b1ab142 (patch)
treef7038b14c37cf43074f52001c2a151ace5796ee9
parentce7acf1296f7d38e6ee1b24e7aa85864198dd8fa (diff)
downloadzig-41575fa868f4b75b79bf5f774b66bb9a7b1ab142.tar.gz
zig-41575fa868f4b75b79bf5f774b66bb9a7b1ab142.zip
AstGen: fix src loc for invalid switch expression rls coercions
-rw-r--r--src/AstGen.zig177
-rw-r--r--test/cases/compile_errors/invalid_switch_expr_result_location_coercion.zig46
2 files changed, 148 insertions, 75 deletions
diff --git a/src/AstGen.zig b/src/AstGen.zig
index 9bbf14da2d..20df50c908 100644
--- a/src/AstGen.zig
+++ b/src/AstGen.zig
@@ -7178,7 +7178,8 @@ fn switchExpr(
try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.SwitchBlock).Struct.fields.len +
@intFromBool(multi_cases_len != 0) +
@intFromBool(any_has_tag_capture) +
- payloads.items.len - case_table_end);
+ payloads.items.len - case_table_end +
+ (case_table_end - case_table_start) * @typeInfo(Zir.Inst.As).Struct.fields.len);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.SwitchBlock{
.operand = raw_operand,
@@ -7205,82 +7206,108 @@ fn switchExpr(
zir_datas[switch_block].pl_node.payload_index = payload_index;
const strat = ri.rl.strategy(&block_scope);
- for (payloads.items[case_table_start..case_table_end], 0..) |start_index, i| {
- var body_len_index = start_index;
- var end_index = start_index;
- const table_index = case_table_start + i;
- if (table_index < scalar_case_table) {
- end_index += 1;
- } else if (table_index < multi_case_table) {
- body_len_index += 1;
- end_index += 2;
- } else {
- body_len_index += 2;
- const items_len = payloads.items[start_index];
- const ranges_len = payloads.items[start_index + 1];
- end_index += 3 + items_len + 2 * ranges_len;
- }
-
- const body_len = @as(Zir.Inst.SwitchBlock.ProngInfo, @bitCast(payloads.items[body_len_index])).body_len;
- end_index += body_len;
-
- switch (strat.tag) {
- .break_operand => blk: {
- // Switch expressions return `true` for `nodeMayNeedMemoryLocation` thus
- // `elide_store_to_block_ptr_instructions` will either be true,
- // or all prongs are noreturn.
- if (!strat.elide_store_to_block_ptr_instructions)
- break :blk;
-
- // There will necessarily be a store_to_block_ptr for
- // all prongs, except for prongs that ended with a noreturn instruction.
- // Elide all the `store_to_block_ptr` instructions.
-
- // The break instructions need to have their operands coerced if the
- // switch's result location is a `ty`. In this case we overwrite the
- // `store_to_block_ptr` instruction with an `as` instruction and repurpose
- // it as the break operand.
- if (body_len < 2)
- break :blk;
-
- var store_index = end_index - 2;
- while (true) : (store_index -= 1) switch (zir_tags[payloads.items[store_index]]) {
- .dbg_block_end, .dbg_block_begin, .dbg_stmt, .dbg_var_val, .dbg_var_ptr => {},
- else => break,
- };
- const store_inst = payloads.items[store_index];
- if (zir_tags[store_inst] != .store_to_block_ptr or
- zir_datas[store_inst].bin.lhs != block_scope.rl_ptr)
- break :blk;
- const break_inst = payloads.items[end_index - 1];
- if (block_scope.rl_ty_inst != .none) {
- zir_tags[store_inst] = .as;
- zir_datas[store_inst].bin = .{
- .lhs = block_scope.rl_ty_inst,
- .rhs = zir_datas[break_inst].@"break".operand,
+ inline for (.{ .body, .breaks }) |pass| {
+ for (payloads.items[case_table_start..case_table_end], 0..) |start_index, i| {
+ var body_len_index = start_index;
+ var end_index = start_index;
+ const table_index = case_table_start + i;
+ if (table_index < scalar_case_table) {
+ end_index += 1;
+ } else if (table_index < multi_case_table) {
+ body_len_index += 1;
+ end_index += 2;
+ } else {
+ body_len_index += 2;
+ const items_len = payloads.items[start_index];
+ const ranges_len = payloads.items[start_index + 1];
+ end_index += 3 + items_len + 2 * ranges_len;
+ }
+
+ const prong_info: Zir.Inst.SwitchBlock.ProngInfo = @bitCast(payloads.items[body_len_index]);
+ end_index += prong_info.body_len;
+
+ switch (strat.tag) {
+ .break_operand => blk: {
+ // Switch expressions return `true` for `nodeMayNeedMemoryLocation` thus
+ // `elide_store_to_block_ptr_instructions` will either be true,
+ // or all prongs are noreturn.
+ if (!strat.elide_store_to_block_ptr_instructions)
+ break :blk;
+
+ // There will necessarily be a store_to_block_ptr for
+ // all prongs, except for prongs that ended with a noreturn instruction.
+ // Elide all the `store_to_block_ptr` instructions.
+
+ // The break instructions need to have their operands coerced if the
+ // switch's result location is a `ty`. In this case we overwrite the
+ // `store_to_block_ptr` instruction with an `as` instruction and repurpose
+ // it as the break operand.
+ if (prong_info.body_len < 2)
+ break :blk;
+
+ var store_index = end_index - 2;
+ while (true) : (store_index -= 1) switch (zir_tags[payloads.items[store_index]]) {
+ .dbg_block_end, .dbg_block_begin, .dbg_stmt, .dbg_var_val, .dbg_var_ptr => {},
+ else => break,
};
- zir_datas[break_inst].@"break".operand = indexToRef(store_inst);
- } else {
- payloads.items[body_len_index] -= 1;
- astgen.extra.appendSliceAssumeCapacity(payloads.items[start_index .. end_index - 2]);
- astgen.extra.appendAssumeCapacity(break_inst);
- continue;
- }
- },
- .break_void => {
- assert(!strat.elide_store_to_block_ptr_instructions);
- const last_inst = payloads.items[end_index - 1];
- if (zir_tags[last_inst] == .@"break") {
- const break_data = &zir_datas[last_inst].@"break";
- const block_inst = astgen.extra.items[
- break_data.payload_index + std.meta.fieldIndex(Zir.Inst.Break, "block_inst").?
- ];
- if (block_inst == switch_block) break_data.operand = .void_value;
- }
- },
- }
+ const store_inst = payloads.items[store_index];
+ if (zir_tags[store_inst] != .store_to_block_ptr or
+ zir_datas[store_inst].bin.lhs != block_scope.rl_ptr)
+ break :blk;
+ const break_inst = payloads.items[end_index - 1];
+ if (block_scope.rl_ty_inst != .none) {
+ if (pass == .breaks) {
+ const break_data = &zir_datas[break_inst].@"break";
+ const break_src: i32 = @bitCast(astgen.extra.items[
+ break_data.payload_index +
+ std.meta.fieldIndex(Zir.Inst.Break, "operand_src_node").?
+ ]);
+ if (break_src == Zir.Inst.Break.no_src_node) {
+ zir_tags[store_inst] = .as;
+ zir_datas[store_inst].bin = .{
+ .lhs = block_scope.rl_ty_inst,
+ .rhs = break_data.operand,
+ };
+ } else {
+ zir_tags[store_inst] = .as_node;
+ zir_datas[store_inst] = .{ .pl_node = .{
+ .src_node = break_src,
+ .payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.As{
+ .dest_type = block_scope.rl_ty_inst,
+ .operand = break_data.operand,
+ }),
+ } };
+ }
+ break_data.operand = indexToRef(store_inst);
+ }
+ } else {
+ if (pass == .body) {
+ payloads.items[body_len_index] -= 1;
+ astgen.extra.appendSliceAssumeCapacity(
+ payloads.items[start_index .. end_index - 2],
+ );
+ astgen.extra.appendAssumeCapacity(break_inst);
+ }
+ continue;
+ }
+ },
+ .break_void => if (pass == .breaks) {
+ assert(!strat.elide_store_to_block_ptr_instructions);
+ const last_inst = payloads.items[end_index - 1];
+ if (zir_tags[last_inst] == .@"break") {
+ const break_data = &zir_datas[last_inst].@"break";
+ const block_inst = astgen.extra.items[
+ break_data.payload_index +
+ std.meta.fieldIndex(Zir.Inst.Break, "block_inst").?
+ ];
+ if (block_inst == switch_block) break_data.operand = .void_value;
+ }
+ },
+ }
- astgen.extra.appendSliceAssumeCapacity(payloads.items[start_index..end_index]);
+ if (pass == .body)
+ astgen.extra.appendSliceAssumeCapacity(payloads.items[start_index..end_index]);
+ }
}
const block_ref = indexToRef(switch_block);
diff --git a/test/cases/compile_errors/invalid_switch_expr_result_location_coercion.zig b/test/cases/compile_errors/invalid_switch_expr_result_location_coercion.zig
new file mode 100644
index 0000000000..d0fc87ae8d
--- /dev/null
+++ b/test/cases/compile_errors/invalid_switch_expr_result_location_coercion.zig
@@ -0,0 +1,46 @@
+const Enum = enum(u8) { first, second, _ };
+
+export fn invalidFirstProng(enum_value: Enum) u8 {
+ const result: u8 = switch (enum_value) {
+ .first => 256,
+ .second => 0,
+ else => 0,
+ };
+ return result;
+}
+
+export fn invalidSecondProng(enum_value: Enum) u8 {
+ const result: u8 = switch (enum_value) {
+ .first => 0,
+ .second => 256,
+ _ => 0,
+ };
+ return result;
+}
+
+export fn invalidElseProng(enum_value: Enum) u8 {
+ const result: u8 = switch (enum_value) {
+ .first => 0,
+ .second => 0,
+ else => 256,
+ };
+ return result;
+}
+
+export fn invalidNonExhaustiveProng(enum_value: Enum) u8 {
+ const result: u8 = switch (enum_value) {
+ .first => 0,
+ .second => 0,
+ _ => 256,
+ };
+ return result;
+}
+
+// error
+// backend=stage2
+// target=native
+//
+// :5:19: error: type 'u8' cannot represent integer value '256'
+// :15:20: error: type 'u8' cannot represent integer value '256'
+// :25:17: error: type 'u8' cannot represent integer value '256'
+// :34:14: error: type 'u8' cannot represent integer value '256'