diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2021-06-23 18:19:17 -0700 |
|---|---|---|
| committer | Andrew Kelley <andrew@ziglang.org> | 2021-07-02 13:26:50 -0700 |
| commit | 125b85d7375b96b4847f6ead51c853cdc0567506 (patch) | |
| tree | 34a947e48f84ce6455d3ff133828a8e0e5614ce8 /src | |
| parent | d84b386f6034278c8a9e8c3d2b0975ac541584aa (diff) | |
| download | zig-125b85d7375b96b4847f6ead51c853cdc0567506.tar.gz zig-125b85d7375b96b4847f6ead51c853cdc0567506.zip | |
move "unreachable code" error from stage1 to stage2
* AstGen: implement "unreachable code" error for blocks. This works at
the statement level.
* stage1: remove the "unreachable code" error implementation, which
means removing the `is_gen` field from IrInstSrc. This is one small
step towards a smaller memory footprint for stage1. The benefits
won't be realized until a future commit because this flag took
advantage of padding.
There may be a regression here with "union has no associated enum"
error, and there is a regression with the following code:
```zig
const a = noreturn;
```
A future commit will address these regressions.
Diffstat (limited to 'src')
| -rw-r--r-- | src/AstGen.zig | 70 | ||||
| -rw-r--r-- | src/stage1/all_types.hpp | 3 | ||||
| -rw-r--r-- | src/stage1/astgen.cpp | 121 | ||||
| -rw-r--r-- | src/stage1/ir.cpp | 16 |
4 files changed, 107 insertions, 103 deletions
diff --git a/src/AstGen.zig b/src/AstGen.zig index 4254481a55..6c5f2b5dae 100644 --- a/src/AstGen.zig +++ b/src/AstGen.zig @@ -1570,7 +1570,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: ast.Node.Index) Inn const defer_scope = scope.cast(Scope.Defer).?; scope = defer_scope.parent; const expr_node = node_datas[defer_scope.defer_node].rhs; - try unusedResultExpr(parent_gz, defer_scope.parent, expr_node); + _ = try unusedResultExpr(parent_gz, defer_scope.parent, expr_node); }, .defer_error => scope = scope.cast(Scope.Defer).?.parent, .top => unreachable, @@ -1623,7 +1623,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: ast.Node.Index) const defer_scope = scope.cast(Scope.Defer).?; scope = defer_scope.parent; const expr_node = node_datas[defer_scope.defer_node].rhs; - try unusedResultExpr(parent_gz, defer_scope.parent, expr_node); + _ = try unusedResultExpr(parent_gz, defer_scope.parent, expr_node); }, .defer_error => scope = scope.cast(Scope.Defer).?.parent, .namespace => break, @@ -1785,8 +1785,23 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const ast.Nod var block_arena = std.heap.ArenaAllocator.init(gz.astgen.gpa); defer block_arena.deinit(); + var noreturn_src_node: ast.Node.Index = 0; var scope = parent_scope; for (statements) |statement| { + if (noreturn_src_node != 0) { + return astgen.failNodeNotes( + statement, + "unreachable code", + .{}, + &[_]u32{ + try astgen.errNoteNode( + noreturn_src_node, + "control flow is diverted here", + .{}, + ), + }, + ); + } switch (node_tags[statement]) { // zig fmt: off .global_var_decl => scope = try varDecl(gz, scope, statement, &block_arena.allocator, tree.globalVarDecl(statement)), @@ -1814,7 +1829,7 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const ast.Nod .assign_mul => try assignOp(gz, scope, statement, .mul), .assign_mul_wrap => try assignOp(gz, scope, statement, .mulwrap), - else => try unusedResultExpr(gz, scope, statement), + else => noreturn_src_node = try unusedResultExpr(gz, scope, statement), // zig fmt: on } } @@ -1823,11 +1838,14 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const ast.Nod try checkUsed(gz, parent_scope, scope); } -fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) InnerError!void { +/// Returns AST source node of the thing that is noreturn if the statement is definitely `noreturn`. +/// Otherwise returns 0. +fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) InnerError!ast.Node.Index { try emitDbgNode(gz, statement); // We need to emit an error if the result is not `noreturn` or `void`, but // we want to avoid adding the ZIR instruction if possible for performance. const maybe_unused_result = try expr(gz, scope, .none, statement); + var noreturn_src_node: ast.Node.Index = 0; const elide_check = if (gz.refToIndex(maybe_unused_result)) |inst| b: { // Note that this array becomes invalid after appending more items to it // in the above while loop. @@ -2061,15 +2079,7 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) Inner .extended, => break :b false, - // ZIR instructions that are always either `noreturn` or `void`. - .breakpoint, - .fence, - .dbg_stmt, - .ensure_result_used, - .ensure_result_non_error, - .@"export", - .set_eval_branch_quota, - .ensure_err_payload_void, + // ZIR instructions that are always `noreturn`. .@"break", .break_inline, .condbr, @@ -2078,16 +2088,30 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) Inner .ret_node, .ret_coerce, .@"unreachable", + .repeat, + .repeat_inline, + .panic, + => { + noreturn_src_node = statement; + break :b true; + }, + + // ZIR instructions that are always `void`. + .breakpoint, + .fence, + .dbg_stmt, + .ensure_result_used, + .ensure_result_non_error, + .@"export", + .set_eval_branch_quota, + .ensure_err_payload_void, .store, .store_node, .store_to_block_ptr, .store_to_inferred_ptr, .resolve_inferred_alloc, - .repeat, - .repeat_inline, .validate_struct_init_ptr, .validate_array_init_ptr, - .panic, .set_align_stack, .set_cold, .set_float_mode, @@ -2097,15 +2121,19 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) Inner } else switch (maybe_unused_result) { .none => unreachable, - .void_value, - .unreachable_value, - => true, + .unreachable_value => b: { + noreturn_src_node = statement; + break :b true; + }, + + .void_value => true, else => false, }; if (!elide_check) { _ = try gz.addUnNode(.ensure_result_used, maybe_unused_result, statement); } + return noreturn_src_node; } fn genDefers( @@ -2132,7 +2160,7 @@ fn genDefers( const prev_in_defer = gz.in_defer; gz.in_defer = true; defer gz.in_defer = prev_in_defer; - try unusedResultExpr(gz, defer_scope.parent, expr_node); + _ = try unusedResultExpr(gz, defer_scope.parent, expr_node); }, .defer_error => { const defer_scope = scope.cast(Scope.Defer).?; @@ -2142,7 +2170,7 @@ fn genDefers( const prev_in_defer = gz.in_defer; gz.in_defer = true; defer gz.in_defer = prev_in_defer; - try unusedResultExpr(gz, defer_scope.parent, expr_node); + _ = try unusedResultExpr(gz, defer_scope.parent, expr_node); }, .namespace => unreachable, .top => unreachable, diff --git a/src/stage1/all_types.hpp b/src/stage1/all_types.hpp index 34cd773abe..a05230058c 100644 --- a/src/stage1/all_types.hpp +++ b/src/stage1/all_types.hpp @@ -2733,9 +2733,6 @@ struct IrInstSrc { IrInst base; IrInstSrcId id; - // true if this instruction was generated by zig and not from user code - // this matters for the "unreachable code" compile error - bool is_gen; bool is_noreturn; // When analyzing IR, instructions that point to this instruction in the "old ir" diff --git a/src/stage1/astgen.cpp b/src/stage1/astgen.cpp index b69cd480c6..a208f42516 100644 --- a/src/stage1/astgen.cpp +++ b/src/stage1/astgen.cpp @@ -2557,7 +2557,6 @@ static IrInstSrc *ir_build_reset_result(Stage1AstGen *ag, Scope *scope, AstNode { IrInstSrcResetResult *instruction = ir_build_instruction<IrInstSrcResetResult>(ag, scope, source_node); instruction->result_loc = result_loc; - instruction->base.is_gen = true; return &instruction->base; } @@ -2737,7 +2736,6 @@ static IrInstSrc *ir_build_alloca_src(Stage1AstGen *ag, Scope *scope, AstNode *s IrInstSrc *align, const char *name_hint, IrInstSrc *is_comptime) { IrInstSrcAlloca *instruction = ir_build_instruction<IrInstSrcAlloca>(ag, scope, source_node); - instruction->base.is_gen = true; instruction->align = align; instruction->name_hint = name_hint; instruction->is_comptime = is_comptime; @@ -2752,7 +2750,6 @@ static IrInstSrc *ir_build_end_expr(Stage1AstGen *ag, Scope *scope, AstNode *sou IrInstSrc *value, ResultLoc *result_loc) { IrInstSrcEndExpr *instruction = ir_build_instruction<IrInstSrcEndExpr>(ag, scope, source_node); - instruction->base.is_gen = true; instruction->value = value; instruction->result_loc = result_loc; @@ -2885,11 +2882,6 @@ static void ir_count_defers(Stage1AstGen *ag, Scope *inner_scope, Scope *outer_s } } -static IrInstSrc *ir_mark_gen(IrInstSrc *instruction) { - instruction->is_gen = true; - return instruction; -} - static bool astgen_defers_for_block(Stage1AstGen *ag, Scope *inner_scope, Scope *outer_scope, bool *is_noreturn, IrInstSrc *err_value) { Scope *scope = inner_scope; if (is_noreturn != nullptr) *is_noreturn = false; @@ -2948,8 +2940,8 @@ static bool astgen_defers_for_block(Stage1AstGen *ag, Scope *inner_scope, Scope if (defer_expr_value->is_noreturn) { if (is_noreturn != nullptr) *is_noreturn = true; } else { - ir_mark_gen(ir_build_check_statement_is_void(ag, defer_expr_scope, defer_expr_node, - defer_expr_value)); + ir_build_check_statement_is_void(ag, defer_expr_scope, defer_expr_node, + defer_expr_value); } scope = scope->parent; continue; @@ -3047,7 +3039,7 @@ static IrInstSrc *astgen_return(Stage1AstGen *ag, Scope *scope, AstNode *node, L ir_build_end_expr(ag, scope, node, return_value, &result_loc_ret->base); } - ir_mark_gen(ir_build_add_implicit_return_type(ag, scope, node, return_value, result_loc_ret)); + ir_build_add_implicit_return_type(ag, scope, node, return_value, result_loc_ret); size_t defer_counts[2]; ir_count_defers(ag, scope, outer_scope, defer_counts); @@ -3074,7 +3066,7 @@ static IrInstSrc *astgen_return(Stage1AstGen *ag, Scope *scope, AstNode *node, L is_comptime = ir_build_test_comptime(ag, scope, node, is_err); } - ir_mark_gen(ir_build_cond_br(ag, scope, node, is_err, err_block, ok_block, is_comptime)); + ir_build_cond_br(ag, scope, node, is_err, err_block, ok_block, is_comptime); Stage1ZirBasicBlock *ret_stmt_block = ir_create_basic_block(ag, scope, "RetStmt"); ir_set_cursor_at_end_and_append_block(ag, err_block); @@ -3112,12 +3104,12 @@ static IrInstSrc *astgen_return(Stage1AstGen *ag, Scope *scope, AstNode *node, L } else { is_comptime = ir_build_test_comptime(ag, scope, node, is_err_val); } - ir_mark_gen(ir_build_cond_br(ag, scope, node, is_err_val, return_block, continue_block, is_comptime)); + ir_build_cond_br(ag, scope, node, is_err_val, return_block, continue_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, return_block); IrInstSrc *err_val_ptr = ir_build_unwrap_err_code_src(ag, scope, node, err_union_ptr); IrInstSrc *err_val = ir_build_load_ptr(ag, scope, node, err_val_ptr); - ir_mark_gen(ir_build_add_implicit_return_type(ag, scope, node, err_val, nullptr)); + ir_build_add_implicit_return_type(ag, scope, node, err_val, nullptr); IrInstSrcSpillBegin *spill_begin = ir_build_spill_begin_src(ag, scope, node, err_val, SpillIdRetErrCode); ResultLocReturn *result_loc_ret = heap::c_allocator.create<ResultLocReturn>(); @@ -3338,7 +3330,7 @@ static IrInstSrc *astgen_block(Stage1AstGen *ag, Scope *parent_scope, AstNode *b child_scope = decl_var_instruction->var->child_scope; } else if (!is_continuation_unreachable) { // this statement's value must be void - ir_mark_gen(ir_build_check_statement_is_void(ag, child_scope, statement_node, statement_value)); + ir_build_check_statement_is_void(ag, child_scope, statement_node, statement_value); } } @@ -3364,7 +3356,7 @@ static IrInstSrc *astgen_block(Stage1AstGen *ag, Scope *parent_scope, AstNode *b return ir_expr_wrap(ag, parent_scope, phi, result_loc); } else { incoming_blocks.append(ag->current_basic_block); - IrInstSrc *else_expr_result = ir_mark_gen(ir_build_const_void(ag, parent_scope, block_node)); + IrInstSrc *else_expr_result = ir_build_const_void(ag, parent_scope, block_node); if (scope_block->peer_parent != nullptr) { ResultLocPeer *peer_result = create_peer_result(scope_block->peer_parent); @@ -3387,13 +3379,13 @@ static IrInstSrc *astgen_block(Stage1AstGen *ag, Scope *parent_scope, AstNode *b IrInstSrc *result; if (block_node->data.block.name != nullptr) { - ir_mark_gen(ir_build_br(ag, parent_scope, block_node, scope_block->end_block, scope_block->is_comptime)); + ir_build_br(ag, parent_scope, block_node, scope_block->end_block, scope_block->is_comptime); ir_set_cursor_at_end_and_append_block(ag, scope_block->end_block); IrInstSrc *phi = ir_build_phi(ag, parent_scope, block_node, incoming_blocks.length, incoming_blocks.items, incoming_values.items, scope_block->peer_parent); result = ir_expr_wrap(ag, parent_scope, phi, result_loc); } else { - IrInstSrc *void_inst = ir_mark_gen(ir_build_const_void(ag, child_scope, block_node)); + IrInstSrc *void_inst = ir_build_const_void(ag, child_scope, block_node); result = ir_lval_wrap(ag, parent_scope, void_inst, lval, result_loc); } if (!is_return_from_fn) @@ -3402,14 +3394,14 @@ static IrInstSrc *astgen_block(Stage1AstGen *ag, Scope *parent_scope, AstNode *b // no need for save_err_ret_addr because this cannot return error // only generate unconditional defers - ir_mark_gen(ir_build_add_implicit_return_type(ag, child_scope, block_node, result, nullptr)); + ir_build_add_implicit_return_type(ag, child_scope, block_node, result, nullptr); ResultLocReturn *result_loc_ret = heap::c_allocator.create<ResultLocReturn>(); result_loc_ret->base.id = ResultLocIdReturn; ir_build_reset_result(ag, parent_scope, block_node, &result_loc_ret->base); - ir_mark_gen(ir_build_end_expr(ag, parent_scope, block_node, result, &result_loc_ret->base)); + ir_build_end_expr(ag, parent_scope, block_node, result, &result_loc_ret->base); if (!astgen_defers_for_block(ag, child_scope, outer_block_scope, nullptr, nullptr)) return ag->codegen->invalid_inst_src; - return ir_mark_gen(ir_build_return_src(ag, child_scope, result->base.source_node, result)); + return ir_build_return_src(ag, child_scope, result->base.source_node, result); } static IrInstSrc *astgen_bin_op_id(Stage1AstGen *ag, Scope *scope, AstNode *node, IrBinOp op_id) { @@ -3628,7 +3620,7 @@ static IrInstSrc *astgen_orelse(Stage1AstGen *ag, Scope *parent_scope, AstNode * return ag->codegen->invalid_inst_src; Stage1ZirBasicBlock *after_null_block = ag->current_basic_block; if (!instr_is_unreachable(null_result)) - ir_mark_gen(ir_build_br(ag, parent_scope, node, end_block, is_comptime)); + ir_build_br(ag, parent_scope, node, end_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, ok_block); IrInstSrc *unwrapped_ptr = ir_build_optional_unwrap_ptr(ag, parent_scope, node, maybe_ptr, false); @@ -5395,7 +5387,7 @@ static IrInstSrc *astgen_if_bool_expr(Stage1AstGen *ag, Scope *scope, AstNode *n return ag->codegen->invalid_inst_src; Stage1ZirBasicBlock *after_then_block = ag->current_basic_block; if (!instr_is_unreachable(then_expr_result)) - ir_mark_gen(ir_build_br(ag, scope, node, endif_block, is_comptime)); + ir_build_br(ag, scope, node, endif_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, else_block); IrInstSrc *else_expr_result; @@ -5409,7 +5401,7 @@ static IrInstSrc *astgen_if_bool_expr(Stage1AstGen *ag, Scope *scope, AstNode *n } Stage1ZirBasicBlock *after_else_block = ag->current_basic_block; if (!instr_is_unreachable(else_expr_result)) - ir_mark_gen(ir_build_br(ag, scope, node, endif_block, is_comptime)); + ir_build_br(ag, scope, node, endif_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, endif_block); IrInstSrc **incoming_values = heap::c_allocator.allocate<IrInstSrc *>(2); @@ -5954,12 +5946,11 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod IrInstSrc *is_err = ir_build_test_err_src(ag, scope, node->data.while_expr.condition, err_val_ptr, true, false); Stage1ZirBasicBlock *after_cond_block = ag->current_basic_block; - IrInstSrc *void_else_result = else_node ? nullptr : ir_mark_gen(ir_build_const_void(ag, scope, node)); + IrInstSrc *void_else_result = else_node ? nullptr : ir_build_const_void(ag, scope, node); IrInstSrc *cond_br_inst; if (!instr_is_unreachable(is_err)) { cond_br_inst = ir_build_cond_br(ag, scope, node->data.while_expr.condition, is_err, else_block, body_block, is_comptime); - cond_br_inst->is_gen = true; } else { // for the purposes of the source instruction to ir_build_result_peers cond_br_inst = ag->current_basic_block->instruction_list.last(); @@ -6005,8 +5996,8 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod } if (!instr_is_unreachable(body_result)) { - ir_mark_gen(ir_build_check_statement_is_void(ag, payload_scope, node->data.while_expr.body, body_result)); - ir_mark_gen(ir_build_br(ag, payload_scope, node, continue_block, is_comptime)); + ir_build_check_statement_is_void(ag, payload_scope, node->data.while_expr.body, body_result); + ir_build_br(ag, payload_scope, node, continue_block, is_comptime); } if (continue_expr_node) { @@ -6015,8 +6006,8 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod if (expr_result == ag->codegen->invalid_inst_src) return expr_result; if (!instr_is_unreachable(expr_result)) { - ir_mark_gen(ir_build_check_statement_is_void(ag, payload_scope, continue_expr_node, expr_result)); - ir_mark_gen(ir_build_br(ag, payload_scope, node, cond_block, is_comptime)); + ir_build_check_statement_is_void(ag, payload_scope, continue_expr_node, expr_result); + ir_build_br(ag, payload_scope, node, cond_block, is_comptime); } } @@ -6041,7 +6032,7 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod if (else_result == ag->codegen->invalid_inst_src) return else_result; if (!instr_is_unreachable(else_result)) - ir_mark_gen(ir_build_br(ag, scope, node, end_block, is_comptime)); + ir_build_br(ag, scope, node, end_block, is_comptime); Stage1ZirBasicBlock *after_else_block = ag->current_basic_block; ir_set_cursor_at_end_and_append_block(ag, end_block); if (else_result) { @@ -6075,12 +6066,11 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod IrInstSrc *maybe_val = ir_build_load_ptr(ag, scope, node->data.while_expr.condition, maybe_val_ptr); IrInstSrc *is_non_null = ir_build_test_non_null_src(ag, scope, node->data.while_expr.condition, maybe_val); Stage1ZirBasicBlock *after_cond_block = ag->current_basic_block; - IrInstSrc *void_else_result = else_node ? nullptr : ir_mark_gen(ir_build_const_void(ag, scope, node)); + IrInstSrc *void_else_result = else_node ? nullptr : ir_build_const_void(ag, scope, node); IrInstSrc *cond_br_inst; if (!instr_is_unreachable(is_non_null)) { cond_br_inst = ir_build_cond_br(ag, scope, node->data.while_expr.condition, is_non_null, body_block, else_block, is_comptime); - cond_br_inst->is_gen = true; } else { // for the purposes of the source instruction to ir_build_result_peers cond_br_inst = ag->current_basic_block->instruction_list.last(); @@ -6123,8 +6113,8 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod } if (!instr_is_unreachable(body_result)) { - ir_mark_gen(ir_build_check_statement_is_void(ag, child_scope, node->data.while_expr.body, body_result)); - ir_mark_gen(ir_build_br(ag, child_scope, node, continue_block, is_comptime)); + ir_build_check_statement_is_void(ag, child_scope, node->data.while_expr.body, body_result); + ir_build_br(ag, child_scope, node, continue_block, is_comptime); } if (continue_expr_node) { @@ -6133,8 +6123,8 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod if (expr_result == ag->codegen->invalid_inst_src) return expr_result; if (!instr_is_unreachable(expr_result)) { - ir_mark_gen(ir_build_check_statement_is_void(ag, child_scope, continue_expr_node, expr_result)); - ir_mark_gen(ir_build_br(ag, child_scope, node, cond_block, is_comptime)); + ir_build_check_statement_is_void(ag, child_scope, continue_expr_node, expr_result); + ir_build_br(ag, child_scope, node, cond_block, is_comptime); } } @@ -6151,7 +6141,7 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod if (else_result == ag->codegen->invalid_inst_src) return else_result; if (!instr_is_unreachable(else_result)) - ir_mark_gen(ir_build_br(ag, scope, node, end_block, is_comptime)); + ir_build_br(ag, scope, node, end_block, is_comptime); } Stage1ZirBasicBlock *after_else_block = ag->current_basic_block; ir_set_cursor_at_end_and_append_block(ag, end_block); @@ -6175,12 +6165,11 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod if (cond_val == ag->codegen->invalid_inst_src) return cond_val; Stage1ZirBasicBlock *after_cond_block = ag->current_basic_block; - IrInstSrc *void_else_result = else_node ? nullptr : ir_mark_gen(ir_build_const_void(ag, scope, node)); + IrInstSrc *void_else_result = else_node ? nullptr : ir_build_const_void(ag, scope, node); IrInstSrc *cond_br_inst; if (!instr_is_unreachable(cond_val)) { cond_br_inst = ir_build_cond_br(ag, scope, node->data.while_expr.condition, cond_val, body_block, else_block, is_comptime); - cond_br_inst->is_gen = true; } else { // for the purposes of the source instruction to ir_build_result_peers cond_br_inst = ag->current_basic_block->instruction_list.last(); @@ -6219,8 +6208,8 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod } if (!instr_is_unreachable(body_result)) { - ir_mark_gen(ir_build_check_statement_is_void(ag, scope, node->data.while_expr.body, body_result)); - ir_mark_gen(ir_build_br(ag, scope, node, continue_block, is_comptime)); + ir_build_check_statement_is_void(ag, scope, node->data.while_expr.body, body_result); + ir_build_br(ag, scope, node, continue_block, is_comptime); } if (continue_expr_node) { @@ -6229,8 +6218,8 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod if (expr_result == ag->codegen->invalid_inst_src) return expr_result; if (!instr_is_unreachable(expr_result)) { - ir_mark_gen(ir_build_check_statement_is_void(ag, scope, continue_expr_node, expr_result)); - ir_mark_gen(ir_build_br(ag, scope, node, cond_block, is_comptime)); + ir_build_check_statement_is_void(ag, scope, continue_expr_node, expr_result); + ir_build_br(ag, scope, node, cond_block, is_comptime); } } @@ -6248,7 +6237,7 @@ static IrInstSrc *astgen_while_expr(Stage1AstGen *ag, Scope *scope, AstNode *nod if (else_result == ag->codegen->invalid_inst_src) return else_result; if (!instr_is_unreachable(else_result)) - ir_mark_gen(ir_build_br(ag, scope, node, end_block, is_comptime)); + ir_build_br(ag, scope, node, end_block, is_comptime); } Stage1ZirBasicBlock *after_else_block = ag->current_basic_block; ir_set_cursor_at_end_and_append_block(ag, end_block); @@ -6332,9 +6321,9 @@ static IrInstSrc *astgen_for_expr(Stage1AstGen *ag, Scope *parent_scope, AstNode IrInstSrc *index_val = ir_build_load_ptr(ag, &spill_scope->base, node, index_ptr); IrInstSrc *cond = ir_build_bin_op(ag, parent_scope, node, IrBinOpCmpLessThan, index_val, len_val, false); Stage1ZirBasicBlock *after_cond_block = ag->current_basic_block; - IrInstSrc *void_else_value = else_node ? nullptr : ir_mark_gen(ir_build_const_void(ag, parent_scope, node)); - IrInstSrc *cond_br_inst = ir_mark_gen(ir_build_cond_br(ag, parent_scope, node, cond, - body_block, else_block, is_comptime)); + IrInstSrc *void_else_value = else_node ? nullptr : ir_build_const_void(ag, parent_scope, node); + IrInstSrc *cond_br_inst = ir_build_cond_br(ag, parent_scope, node, cond, + body_block, else_block, is_comptime); ResultLocPeerParent *peer_parent = ir_build_result_peers(ag, cond_br_inst, end_block, result_loc, is_comptime); @@ -6377,8 +6366,8 @@ static IrInstSrc *astgen_for_expr(Stage1AstGen *ag, Scope *parent_scope, AstNode } if (!instr_is_unreachable(body_result)) { - ir_mark_gen(ir_build_check_statement_is_void(ag, child_scope, node->data.for_expr.body, body_result)); - ir_mark_gen(ir_build_br(ag, child_scope, node, continue_block, is_comptime)); + ir_build_check_statement_is_void(ag, child_scope, node->data.for_expr.body, body_result); + ir_build_br(ag, child_scope, node, continue_block, is_comptime); } ir_set_cursor_at_end_and_append_block(ag, continue_block); @@ -6399,7 +6388,7 @@ static IrInstSrc *astgen_for_expr(Stage1AstGen *ag, Scope *parent_scope, AstNode if (else_result == ag->codegen->invalid_inst_src) return else_result; if (!instr_is_unreachable(else_result)) - ir_mark_gen(ir_build_br(ag, parent_scope, node, end_block, is_comptime)); + ir_build_br(ag, parent_scope, node, end_block, is_comptime); } Stage1ZirBasicBlock *after_else_block = ag->current_basic_block; ir_set_cursor_at_end_and_append_block(ag, end_block); @@ -6719,7 +6708,7 @@ static IrInstSrc *astgen_if_optional_expr(Stage1AstGen *ag, Scope *scope, AstNod return then_expr_result; Stage1ZirBasicBlock *after_then_block = ag->current_basic_block; if (!instr_is_unreachable(then_expr_result)) - ir_mark_gen(ir_build_br(ag, scope, node, endif_block, is_comptime)); + ir_build_br(ag, scope, node, endif_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, else_block); IrInstSrc *else_expr_result; @@ -6733,7 +6722,7 @@ static IrInstSrc *astgen_if_optional_expr(Stage1AstGen *ag, Scope *scope, AstNod } Stage1ZirBasicBlock *after_else_block = ag->current_basic_block; if (!instr_is_unreachable(else_expr_result)) - ir_mark_gen(ir_build_br(ag, scope, node, endif_block, is_comptime)); + ir_build_br(ag, scope, node, endif_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, endif_block); IrInstSrc **incoming_values = heap::c_allocator.allocate<IrInstSrc *>(2); @@ -6802,7 +6791,7 @@ static IrInstSrc *astgen_if_err_expr(Stage1AstGen *ag, Scope *scope, AstNode *no return then_expr_result; Stage1ZirBasicBlock *after_then_block = ag->current_basic_block; if (!instr_is_unreachable(then_expr_result)) - ir_mark_gen(ir_build_br(ag, scope, node, endif_block, is_comptime)); + ir_build_br(ag, scope, node, endif_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, else_block); @@ -6831,7 +6820,7 @@ static IrInstSrc *astgen_if_err_expr(Stage1AstGen *ag, Scope *scope, AstNode *no } Stage1ZirBasicBlock *after_else_block = ag->current_basic_block; if (!instr_is_unreachable(else_expr_result)) - ir_mark_gen(ir_build_br(ag, scope, node, endif_block, is_comptime)); + ir_build_br(ag, scope, node, endif_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, endif_block); IrInstSrc **incoming_values = heap::c_allocator.allocate<IrInstSrc *>(2); @@ -6893,7 +6882,7 @@ static bool astgen_switch_prong_expr(Stage1AstGen *ag, Scope *scope, AstNode *sw if (expr_result == ag->codegen->invalid_inst_src) return false; if (!instr_is_unreachable(expr_result)) - ir_mark_gen(ir_build_br(ag, scope, switch_node, end_block, is_comptime)); + ir_build_br(ag, scope, switch_node, end_block, is_comptime); incoming_blocks->append(ag->current_basic_block); incoming_values->append(expr_result); return true; @@ -7008,8 +6997,8 @@ static IrInstSrc *astgen_switch_expr(Stage1AstGen *ag, Scope *scope, AstNode *no assert(ok_bit); assert(last_item_node); - IrInstSrc *br_inst = ir_mark_gen(ir_build_cond_br(ag, scope, last_item_node, ok_bit, - range_block_yes, range_block_no, is_comptime)); + IrInstSrc *br_inst = ir_build_cond_br(ag, scope, last_item_node, ok_bit, + range_block_yes, range_block_no, is_comptime); if (peer_parent->base.source_instruction == nullptr) { peer_parent->base.source_instruction = br_inst; } @@ -7349,14 +7338,14 @@ static IrInstSrc *astgen_continue(Stage1AstGen *ag, Scope *continue_scope, AstNo for (size_t i = 0; i < runtime_scopes.length; i += 1) { ScopeRuntime *scope_runtime = runtime_scopes.at(i); - ir_mark_gen(ir_build_check_runtime_scope(ag, continue_scope, node, scope_runtime->is_comptime, is_comptime)); + ir_build_check_runtime_scope(ag, continue_scope, node, scope_runtime->is_comptime, is_comptime); } runtime_scopes.deinit(); Stage1ZirBasicBlock *dest_block = loop_scope->continue_block; if (!astgen_defers_for_block(ag, continue_scope, dest_block->scope, nullptr, nullptr)) return ag->codegen->invalid_inst_src; - return ir_mark_gen(ir_build_br(ag, continue_scope, node, dest_block, is_comptime)); + return ir_build_br(ag, continue_scope, node, dest_block, is_comptime); } static IrInstSrc *astgen_error_type(Stage1AstGen *ag, Scope *scope, AstNode *node) { @@ -7482,7 +7471,7 @@ static IrInstSrc *astgen_catch(Stage1AstGen *ag, Scope *parent_scope, AstNode *n return ag->codegen->invalid_inst_src; Stage1ZirBasicBlock *after_err_block = ag->current_basic_block; if (!instr_is_unreachable(err_result)) - ir_mark_gen(ir_build_br(ag, parent_scope, node, end_block, is_comptime)); + ir_build_br(ag, parent_scope, node, end_block, is_comptime); ir_set_cursor_at_end_and_append_block(ag, ok_block); IrInstSrc *unwrapped_ptr = ir_build_unwrap_err_payload_src(ag, parent_scope, node, err_union_ptr, false, false); @@ -7757,9 +7746,9 @@ static IrInstSrc *astgen_suspend(Stage1AstGen *ag, Scope *parent_scope, AstNode IrInstSrc *susp_res = astgen_node(ag, node->data.suspend.block, child_scope); if (susp_res == ag->codegen->invalid_inst_src) return ag->codegen->invalid_inst_src; - ir_mark_gen(ir_build_check_statement_is_void(ag, child_scope, node->data.suspend.block, susp_res)); + ir_build_check_statement_is_void(ag, child_scope, node->data.suspend.block, susp_res); - return ir_mark_gen(ir_build_suspend_finish_src(ag, parent_scope, node, begin)); + return ir_build_suspend_finish_src(ag, parent_scope, node, begin); } static IrInstSrc *astgen_node_raw(Stage1AstGen *ag, AstNode *node, Scope *scope, @@ -8073,13 +8062,13 @@ bool stage1_astgen(CodeGen *codegen, AstNode *node, Scope *scope, Stage1Zir *sta } if (!instr_is_unreachable(result)) { - ir_mark_gen(ir_build_add_implicit_return_type(ag, scope, result->base.source_node, result, nullptr)); + ir_build_add_implicit_return_type(ag, scope, result->base.source_node, result, nullptr); // no need for save_err_ret_addr because this cannot return error ResultLocReturn *result_loc_ret = heap::c_allocator.create<ResultLocReturn>(); result_loc_ret->base.id = ResultLocIdReturn; ir_build_reset_result(ag, scope, node, &result_loc_ret->base); - ir_mark_gen(ir_build_end_expr(ag, scope, node, result, &result_loc_ret->base)); - ir_mark_gen(ir_build_return_src(ag, scope, result->base.source_node, result)); + ir_build_end_expr(ag, scope, node, result, &result_loc_ret->base); + ir_build_return_src(ag, scope, result->base.source_node, result); } return true; diff --git a/src/stage1/ir.cpp b/src/stage1/ir.cpp index 2200e8380d..0c915f5c35 100644 --- a/src/stage1/ir.cpp +++ b/src/stage1/ir.cpp @@ -5407,16 +5407,6 @@ static void ir_finish_bb(IrAnalyze *ira) { ira->new_irb.current_basic_block->debug_id); } } - ira->instruction_index += 1; - while (ira->instruction_index < ira->zir_current_basic_block->instruction_list.length) { - IrInstSrc *next_instruction = ira->zir_current_basic_block->instruction_list.at(ira->instruction_index); - if (!next_instruction->is_gen) { - ir_add_error(ira, &next_instruction->base, buf_sprintf("unreachable code")); - break; - } - ira->instruction_index += 1; - } - ir_start_next_bb(ira); } @@ -15934,7 +15924,7 @@ static IrInstGen *ir_analyze_instruction_pop_count(IrAnalyze *ira, IrInstSrcPopC return ir_build_pop_count_gen(ira, &instruction->base.base, return_type, op); } -static IrInstGen *ir_analyze_union_tag(IrAnalyze *ira, IrInst* source_instr, IrInstGen *value, bool is_gen) { +static IrInstGen *ir_analyze_union_tag(IrAnalyze *ira, IrInst* source_instr, IrInstGen *value) { if (type_is_invalid(value->value->type)) return ira->codegen->invalid_inst_gen; @@ -15943,7 +15933,7 @@ static IrInstGen *ir_analyze_union_tag(IrAnalyze *ira, IrInst* source_instr, IrI buf_sprintf("expected enum or union type, found '%s'", buf_ptr(&value->value->type->name))); return ira->codegen->invalid_inst_gen; } - if (!value->value->type->data.unionation.have_explicit_tag_type && !is_gen) { + if (!value->value->type->data.unionation.have_explicit_tag_type) { ErrorMsg *msg = ir_add_error(ira, source_instr, buf_sprintf("union has no associated enum")); if (value->value->type->data.unionation.decl_node != nullptr) { add_error_note(ira->codegen, msg, value->value->type->data.unionation.decl_node, @@ -16906,7 +16896,7 @@ static IrInstGen *ir_analyze_instruction_enum_tag_name(IrAnalyze *ira, IrInstSrc } if (target_type->id == ZigTypeIdUnion) { - target = ir_analyze_union_tag(ira, &instruction->base.base, target, instruction->base.is_gen); + target = ir_analyze_union_tag(ira, &instruction->base.base, target); if (type_is_invalid(target->value->type)) return ira->codegen->invalid_inst_gen; target_type = target->value->type; |
