diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2021-03-22 23:46:51 -0700 |
|---|---|---|
| committer | Andrew Kelley <andrew@ziglang.org> | 2021-03-22 23:47:13 -0700 |
| commit | d24be85be88737db8399b492931647056c547614 (patch) | |
| tree | e6079e363a2b5bcdc1b100b1149c313b13197f41 /src | |
| parent | 568f333681e6ecf8c60c5bbe04ea1e494d966d48 (diff) | |
| download | zig-d24be85be88737db8399b492931647056c547614.tar.gz zig-d24be85be88737db8399b492931647056c547614.zip | |
stage2: fix `if` expressions
Diffstat (limited to 'src')
| -rw-r--r-- | src/Module.zig | 14 | ||||
| -rw-r--r-- | src/Sema.zig | 28 | ||||
| -rw-r--r-- | src/astgen.zig | 238 | ||||
| -rw-r--r-- | src/zir.zig | 123 |
4 files changed, 269 insertions, 134 deletions
diff --git a/src/Module.zig b/src/Module.zig index 960f2175d8..33422ae011 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -1307,6 +1307,7 @@ pub const Scope = struct { /// Note that this returns a `zir.Inst.Index` not a ref. /// Leaves the `payload_index` field undefined. pub fn addCondBr(gz: *GenZir, node: ast.Node.Index) !zir.Inst.Index { + try gz.instructions.ensureCapacity(gz.zir_code.gpa, gz.instructions.items.len + 1); const new_index = @intCast(zir.Inst.Index, gz.zir_code.instructions.len); try gz.zir_code.instructions.append(gz.zir_code.gpa, .{ .tag = .condbr, @@ -1315,6 +1316,7 @@ pub const Scope = struct { .payload_index = undefined, } }, }); + gz.instructions.appendAssumeCapacity(new_index); return new_index; } @@ -1398,6 +1400,14 @@ pub const WipZirCode = struct { return result; } + pub fn refIsNoReturn(wzc: WipZirCode, zir_inst_ref: zir.Inst.Ref) bool { + if (zir_inst_ref >= wzc.ref_start_index) { + const zir_inst = zir_inst_ref - wzc.ref_start_index; + return wzc.instructions.items(.tag)[zir_inst].isNoReturn(); + } + return false; + } + pub fn deinit(wzc: *WipZirCode) void { wzc.instructions.deinit(wzc.gpa); wzc.extra.deinit(wzc.gpa); @@ -2290,6 +2300,7 @@ fn astgenAndSemaFn( .decl = decl, .arena = &decl_arena.allocator, .gpa = mod.gpa, + .ref_start_index = @intCast(u32, zir.const_inst_list.len + param_count), }; defer wip_zir_code.deinit(); @@ -3199,6 +3210,9 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn) !void { }; defer inner_block.instructions.deinit(mod.gpa); + // TZIR currently requires the arg parameters to be the first N instructions + try inner_block.instructions.appendSlice(mod.gpa, param_inst_list); + func.state = .in_progress; log.debug("set {s} to in_progress", .{decl.name}); diff --git a/src/Sema.zig b/src/Sema.zig index 7e5ce9a001..d9a80610e6 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -218,7 +218,7 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde // tail call them here. .condbr => return sema.zirCondbr(block, inst), .@"break" => return sema.zirBreak(block, inst), - .break_void_tok => return sema.zirBreakVoidTok(block, inst), + .break_void_node => return sema.zirBreakVoidNode(block, inst), .break_flat => return sema.code.instructions.items(.data)[inst].un_node.operand, .compile_error => return sema.zirCompileError(block, inst), .ret_coerce => return sema.zirRetTok(block, inst, true), @@ -955,20 +955,18 @@ fn zirBreak(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!z const tracy = trace(@src()); defer tracy.end(); - const bin_inst = sema.code.instructions.items(.data)[inst].bin; - const operand = try sema.resolveInst(bin_inst.rhs); - const zir_block = bin_inst.lhs; - return sema.analyzeBreak(block, sema.src, zir_block, operand); + const inst_data = sema.code.instructions.items(.data)[inst].@"break"; + const operand = try sema.resolveInst(inst_data.operand); + return sema.analyzeBreak(block, sema.src, inst_data.block_inst, operand); } -fn zirBreakVoidTok(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!zir.Inst.Index { +fn zirBreakVoidNode(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!zir.Inst.Index { const tracy = trace(@src()); defer tracy.end(); - const inst_data = sema.code.instructions.items(.data)[inst].un_tok; - const zir_block = inst_data.operand; + const inst_data = sema.code.instructions.items(.data)[inst].break_void_node; const void_inst = try sema.mod.constVoid(sema.arena, .unneeded); - return sema.analyzeBreak(block, inst_data.src(), zir_block, void_inst); + return sema.analyzeBreak(block, inst_data.src(), inst_data.block_inst, void_inst); } fn analyzeBreak( @@ -982,7 +980,6 @@ fn analyzeBreak( while (true) { if (block.label) |*label| { if (label.zir_block == zir_block) { - try sema.requireFunctionBlock(block, src); // Here we add a br instruction, but we over-allocate a little bit // (if necessary) to make it possible to convert the instruction into // a br_block_flat instruction later. @@ -1000,7 +997,7 @@ fn analyzeBreak( .operand = operand, .block = label.merges.block_inst, }; - try block.instructions.append(sema.gpa, &br.base); + try start_block.instructions.append(sema.gpa, &br.base); try label.merges.results.append(sema.gpa, operand); try label.merges.br_list.append(sema.gpa, br); return always_noreturn; @@ -2613,10 +2610,11 @@ fn zirCmp( const tracy = trace(@src()); defer tracy.end(); - const src: LazySrcLoc = .todo; - const bin_inst = sema.code.instructions.items(.data)[inst].bin; - const lhs = try sema.resolveInst(bin_inst.lhs); - const rhs = try sema.resolveInst(bin_inst.rhs); + const inst_data = sema.code.instructions.items(.data)[inst].pl_node; + const extra = sema.code.extraData(zir.Inst.Bin, inst_data.payload_index).data; + const src: LazySrcLoc = inst_data.src(); + const lhs = try sema.resolveInst(extra.lhs); + const rhs = try sema.resolveInst(extra.rhs); const is_equality_cmp = switch (op) { .eq, .neq => true, diff --git a/src/astgen.zig b/src/astgen.zig index 2237534fa8..0aee5caeee 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -428,12 +428,12 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .if_simple => return ifExpr(mod, scope, rl, node, tree.ifSimple(node)), .@"if" => return ifExpr(mod, scope, rl, node, tree.ifFull(node)), - .while_simple => return whileExpr(mod, scope, rl, tree.whileSimple(node)), - .while_cont => return whileExpr(mod, scope, rl, tree.whileCont(node)), - .@"while" => return whileExpr(mod, scope, rl, tree.whileFull(node)), + .while_simple => return whileExpr(mod, scope, rl, node, tree.whileSimple(node)), + .while_cont => return whileExpr(mod, scope, rl, node, tree.whileCont(node)), + .@"while" => return whileExpr(mod, scope, rl, node, tree.whileFull(node)), - .for_simple => return forExpr(mod, scope, rl, tree.forSimple(node)), - .@"for" => return forExpr(mod, scope, rl, tree.forFull(node)), + .for_simple => return forExpr(mod, scope, rl, node, tree.forSimple(node)), + .@"for" => return forExpr(mod, scope, rl, node, tree.forFull(node)), // TODO handling these separately would actually be simpler & have fewer branches // once we have a ZIR instruction for each of these 3 cases. @@ -956,7 +956,7 @@ fn labeledBlockExpr( // The code took advantage of the result location as a pointer. // Turn the break instruction operands into void. for (block_scope.labeled_breaks.items) |br| { - zir_datas[br].bin.rhs = 0; + zir_datas[br].@"break".operand = @enumToInt(zir.Const.void_value); } // TODO technically not needed since we changed the tag to break_void but // would be better still to elide the ones that are in this list. @@ -1169,7 +1169,7 @@ fn blockExprStmts( .compile_log, .ensure_err_payload_void, .@"break", - .break_void_tok, + .break_void_node, .break_flat, .condbr, .compile_error, @@ -1749,13 +1749,13 @@ fn orelseCatchExpr( return finishThenElseBlock( mod, - scope, - rl, &block_scope, + rl, + node, &then_scope, &else_scope, - &condbr.positionals.then_body, - &condbr.positionals.else_body, + condbr, + cond, src, src, then_result, @@ -1767,75 +1767,87 @@ fn orelseCatchExpr( fn finishThenElseBlock( mod: *Module, - parent_scope: *Scope, - rl: ResultLoc, block_scope: *Scope.GenZir, + rl: ResultLoc, + node: ast.Node.Index, then_scope: *Scope.GenZir, else_scope: *Scope.GenZir, - then_body: *zir.Body, - else_body: *zir.Body, - then_src: usize, - else_src: usize, + condbr: zir.Inst.Index, + cond: zir.Inst.Ref, + then_src: ast.Node.Index, + else_src: ast.Node.Index, then_result: zir.Inst.Ref, - else_result: ?*zir.Inst, - main_block: zir.Inst.Ref.Block, - then_break_block: zir.Inst.Ref.Block, + else_result: zir.Inst.Ref, + main_block: zir.Inst.Index, + then_break_block: zir.Inst.Index, ) InnerError!zir.Inst.Ref { // We now have enough information to decide whether the result instruction should // be communicated via result location pointer or break instructions. const strat = rlStrategy(rl, block_scope); + const wzc = block_scope.zir_code; switch (strat.tag) { .break_void => { - if (!then_result.tag.isNoReturn()) { - _ = try addZirInstTag(mod, &then_scope.base, then_src, .break_void, .{ - .block = then_break_block, + if (!wzc.refIsNoReturn(then_result)) { + _ = try then_scope.add(.{ + .tag = .break_void_node, + .data = .{ .break_void_node = .{ + .src_node = wzc.decl.nodeIndexToRelative(then_src), + .block_inst = then_break_block, + } }, }); } - if (else_result) |inst| { - if (!inst.tag.isNoReturn()) { - _ = try addZirInstTag(mod, &else_scope.base, else_src, .break_void, .{ - .block = main_block, - }); - } - } else { - _ = try addZirInstTag(mod, &else_scope.base, else_src, .break_void, .{ - .block = main_block, + const elide_else = if (else_result != 0) wzc.refIsNoReturn(else_result) else false; + if (!elide_else) { + _ = try else_scope.add(.{ + .tag = .break_void_node, + .data = .{ .break_void_node = .{ + .src_node = wzc.decl.nodeIndexToRelative(else_src), + .block_inst = main_block, + } }, }); } assert(!strat.elide_store_to_block_ptr_instructions); - try then_scope.setBlockBody(then_body); - try else_scope.setBlockBody(else_body); - return &main_block.base; + try setCondBrPayload(condbr, cond, then_scope, else_scope); + return wzc.ref_start_index + main_block; }, .break_operand => { - if (!then_result.tag.isNoReturn()) { - _ = try addZirInstTag(mod, &then_scope.base, then_src, .@"break", .{ - .block = then_break_block, - .operand = then_result, + if (!wzc.refIsNoReturn(then_result)) { + _ = try then_scope.add(.{ + .tag = .@"break", + .data = .{ .@"break" = .{ + .block_inst = then_break_block, + .operand = then_result, + } }, }); } - if (else_result) |inst| { - if (!inst.tag.isNoReturn()) { - _ = try addZirInstTag(mod, &else_scope.base, else_src, .@"break", .{ - .block = main_block, - .operand = inst, + if (else_result != 0) { + if (!wzc.refIsNoReturn(else_result)) { + _ = try else_scope.add(.{ + .tag = .@"break", + .data = .{ .@"break" = .{ + .block_inst = main_block, + .operand = else_result, + } }, }); } } else { - _ = try addZirInstTag(mod, &else_scope.base, else_src, .break_void, .{ - .block = main_block, + _ = try else_scope.add(.{ + .tag = .break_void_node, + .data = .{ .break_void_node = .{ + .src_node = wzc.decl.nodeIndexToRelative(else_src), + .block_inst = main_block, + } }, }); } if (strat.elide_store_to_block_ptr_instructions) { - try copyBodyWithElidedStoreBlockPtr(then_body, then_scope.*); - try copyBodyWithElidedStoreBlockPtr(else_body, else_scope.*); + try setCondBrPayloadElideBlockStorePtr(condbr, cond, then_scope, else_scope); } else { - try then_scope.setBlockBody(then_body); - try else_scope.setBlockBody(else_body); + try setCondBrPayload(condbr, cond, then_scope, else_scope); } + const block_ref = wzc.ref_start_index + main_block; switch (rl) { - .ref => return &main_block.base, - else => return rvalue(mod, parent_scope, rl, &main_block.base), + .ref => return block_ref, + else => return rvalue(mod, &block_scope.base, rl, block_ref, node), } }, } @@ -1951,18 +1963,18 @@ fn simpleBinOp( mod: *Module, scope: *Scope, rl: ResultLoc, - infix_node: ast.Node.Index, + node: ast.Node.Index, op_inst_tag: zir.Inst.Tag, ) InnerError!zir.Inst.Ref { - const tree = scope.tree(); + const gz = scope.getGenZir(); + const tree = gz.tree(); const node_datas = tree.nodes.items(.data); - const gz = scope.getGenZir(); - const result = try gz.addPlNode(op_inst_tag, infix_node, zir.Inst.Bin{ - .lhs = try expr(mod, scope, .none, node_datas[infix_node].lhs), - .rhs = try expr(mod, scope, .none, node_datas[infix_node].rhs), + const result = try gz.addPlNode(op_inst_tag, node, zir.Inst.Bin{ + .lhs = try expr(mod, scope, .none, node_datas[node].lhs), + .rhs = try expr(mod, scope, .none, node_datas[node].rhs), }); - return rvalue(mod, scope, rl, result, infix_node); + return rvalue(mod, scope, rl, result, node); } fn boolBinOp( @@ -2000,7 +2012,6 @@ fn ifExpr( node: ast.Node.Index, if_full: ast.full.If, ) InnerError!zir.Inst.Ref { - if (true) @panic("TODO update for zir-memory-layout"); const parent_gz = scope.getGenZir(); var block_scope: Scope.GenZir = .{ .parent = scope, @@ -2011,8 +2022,6 @@ fn ifExpr( setBlockResultLoc(&block_scope, rl); defer block_scope.instructions.deinit(mod.gpa); - const tree = parent_gz.tree(); - const cond = c: { // TODO https://github.com/ziglang/zig/issues/7929 if (if_full.error_token) |error_token| { @@ -2031,11 +2040,9 @@ fn ifExpr( try parent_gz.instructions.append(mod.gpa, block); try block_scope.setBlockBody(block); - const then_src = token_starts[tree.lastToken(if_full.ast.then_expr)]; var then_scope: Scope.GenZir = .{ .parent = scope, - .decl = block_scope.decl, - .arena = block_scope.arena, + .zir_code = parent_gz.zir_code, .force_comptime = block_scope.force_comptime, .instructions = .{}, }; @@ -2052,36 +2059,38 @@ fn ifExpr( var else_scope: Scope.GenZir = .{ .parent = scope, - .decl = block_scope.decl, - .arena = block_scope.arena, + .zir_code = parent_gz.zir_code, .force_comptime = block_scope.force_comptime, .instructions = .{}, }; defer else_scope.instructions.deinit(mod.gpa); const else_node = if_full.ast.else_expr; - const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: { + const else_info: struct { + src: ast.Node.Index, + result: zir.Inst.Ref, + } = if (else_node != 0) blk: { block_scope.break_count += 1; const sub_scope = &else_scope.base; break :blk .{ - .src = token_starts[tree.lastToken(else_node)], + .src = else_node, .result = try expr(mod, sub_scope, block_scope.break_result_loc, else_node), }; } else .{ - .src = token_starts[tree.lastToken(if_full.ast.then_expr)], - .result = null, + .src = if_full.ast.then_expr, + .result = 0, }; return finishThenElseBlock( mod, - scope, - rl, &block_scope, + rl, + node, &then_scope, &else_scope, - &condbr.positionals.then_body, - &condbr.positionals.else_body, - then_src, + condbr, + cond, + if_full.ast.then_expr, else_info.src, then_result, else_info.result, @@ -2090,25 +2099,63 @@ fn ifExpr( ); } -/// Expects to find exactly 1 .store_to_block_ptr instruction. -fn copyBodyWithElidedStoreBlockPtr(body: *zir.Body, scope: Module.Scope.GenZir) !void { - body.* = .{ - .instructions = try scope.arena.alloc(zir.Inst.Ref, scope.instructions.items.len - 1), - }; - var dst_index: usize = 0; - for (scope.instructions.items) |src_inst| { - if (src_inst.tag != .store_to_block_ptr) { - body.instructions[dst_index] = src_inst; - dst_index += 1; +fn setCondBrPayload( + condbr: zir.Inst.Index, + cond: zir.Inst.Ref, + then_scope: *Scope.GenZir, + else_scope: *Scope.GenZir, +) !void { + const wzc = then_scope.zir_code; + + try wzc.extra.ensureCapacity(wzc.gpa, wzc.extra.items.len + + @typeInfo(zir.Inst.CondBr).Struct.fields.len + + then_scope.instructions.items.len + else_scope.instructions.items.len); + + const zir_datas = wzc.instructions.items(.data); + zir_datas[condbr].pl_node.payload_index = wzc.addExtraAssumeCapacity(zir.Inst.CondBr{ + .condition = cond, + .then_body_len = @intCast(u32, then_scope.instructions.items.len), + .else_body_len = @intCast(u32, else_scope.instructions.items.len), + }); + wzc.extra.appendSliceAssumeCapacity(then_scope.instructions.items); + wzc.extra.appendSliceAssumeCapacity(else_scope.instructions.items); +} + +/// If `elide_block_store_ptr` is set, expects to find exactly 1 .store_to_block_ptr instruction. +fn setCondBrPayloadElideBlockStorePtr( + condbr: zir.Inst.Index, + cond: zir.Inst.Ref, + then_scope: *Scope.GenZir, + else_scope: *Scope.GenZir, +) !void { + const wzc = then_scope.zir_code; + + try wzc.extra.ensureCapacity(wzc.gpa, wzc.extra.items.len + + @typeInfo(zir.Inst.CondBr).Struct.fields.len + + then_scope.instructions.items.len + else_scope.instructions.items.len - 2); + + const zir_datas = wzc.instructions.items(.data); + zir_datas[condbr].pl_node.payload_index = wzc.addExtraAssumeCapacity(zir.Inst.CondBr{ + .condition = cond, + .then_body_len = @intCast(u32, then_scope.instructions.items.len - 1), + .else_body_len = @intCast(u32, else_scope.instructions.items.len - 1), + }); + + const zir_tags = wzc.instructions.items(.tag); + for ([_]*Scope.GenZir{ then_scope, else_scope }) |scope| { + for (scope.instructions.items) |src_inst| { + if (zir_tags[src_inst] != .store_to_block_ptr) { + wzc.extra.appendAssumeCapacity(src_inst); + } } } - assert(dst_index == body.instructions.len); } fn whileExpr( mod: *Module, scope: *Scope, rl: ResultLoc, + node: ast.Node.Index, while_full: ast.full.While, ) InnerError!zir.Inst.Ref { if (true) @panic("TODO update for zir-memory-layout"); @@ -2245,13 +2292,13 @@ fn whileExpr( } return finishThenElseBlock( mod, - scope, - rl, &loop_scope, + rl, + node, &then_scope, &else_scope, - &condbr.positionals.then_body, - &condbr.positionals.else_body, + condbr, + cond, then_src, else_info.src, then_result, @@ -2265,6 +2312,7 @@ fn forExpr( mod: *Module, scope: *Scope, rl: ResultLoc, + node: ast.Node.Index, for_full: ast.full.While, ) InnerError!zir.Inst.Ref { if (true) @panic("TODO update for zir-memory-layout"); @@ -2442,13 +2490,13 @@ fn forExpr( } return finishThenElseBlock( mod, - scope, - rl, &loop_scope, + rl, + node, &then_scope, &else_scope, - &condbr.positionals.then_body, - &condbr.positionals.else_body, + condbr, + cond, then_src, else_info.src, then_result, diff --git a/src/zir.zig b/src/zir.zig index 995cbba339..7a963ee097 100644 --- a/src/zir.zig +++ b/src/zir.zig @@ -90,7 +90,7 @@ pub const Code = struct { .arena = &arena.allocator, .scope = scope, .code = code, - .indent = 4, + .indent = 2, .param_count = param_count, }; @@ -469,15 +469,13 @@ pub const Inst = struct { /// Uses the `bool_br` union field. bool_br_or, /// Return a value from a block. - /// Uses the `bin` union field: `lhs` is `Index` to the block (*not* `Ref`!), - /// `rhs` is operand. + /// Uses the `break` union field. /// Uses the source information from previous instruction. @"break", - /// Same as `break` but has source information in the form of a token, and + /// Same as `break` but has source information in the form of an AST node, and /// the operand is assumed to be the void value. - /// Uses the `un_tok` union field. - /// Note that the block operand is a `Index`, not `Ref`. - break_void_tok, + /// Uses the `break_void_node` union field. + break_void_node, /// Return a value from a block. This is a special form that is only valid /// when there is exactly 1 break from a block (this one). This instruction /// allows using the return value from `Sema.analyzeBody`. The block is @@ -997,7 +995,7 @@ pub const Inst = struct { => false, .@"break", - .break_void_tok, + .break_void_node, .break_flat, .condbr, .compile_error, @@ -1023,10 +1021,9 @@ pub const Inst = struct { /// This logic is implemented in `Sema.resolveRef`. pub const Ref = u32; - /// For instructions whose payload fits into 8 bytes, this is used. - /// When an instruction's payload does not fit, bin_op is used, and - /// lhs and rhs refer to `Tag`-specific values, with one of the operands - /// used to index into a separate array specific to that instruction. + /// All instructions have an 8-byte payload, which is contained within + /// this union. `Tag` determines which union field is active, as well as + /// how to interpret the data within. pub const Data = union { /// Used for unary operators, with an AST node source location. un_node: struct { @@ -1161,6 +1158,20 @@ pub const Inst = struct { return .{ .node_offset = self.src_node }; } }, + break_void_node: struct { + /// Offset from Decl AST node index. + /// `Tag` determines which kind of AST node this points to. + src_node: i32, + block_inst: Index, + + pub fn src(self: @This()) LazySrcLoc { + return .{ .node_offset = self.src_node }; + } + }, + @"break": struct { + block_inst: Index, + operand: Ref, + }, // Make sure we don't accidentally add a field to make this union // bigger than expected. Note that in Debug builds, Zig is allowed @@ -1368,7 +1379,6 @@ const Writer = struct { .break_flat, => try self.writeUnNode(stream, inst), - .break_void_tok, .is_non_null, .is_null, .is_non_null_ptr, @@ -1394,16 +1404,11 @@ const Writer = struct { .int => try self.writeInt(stream, inst), .str => try self.writeStr(stream, inst), .elided => try stream.writeAll(")"), + .break_void_node => try self.writeBreakVoidNode(stream, inst), .@"asm", .asm_volatile, - .block, - .block_comptime, - .call, - .call_chkused, - .call_compile_time, .compile_log, - .condbr, .elem_ptr_node, .elem_val_node, .field_ptr, @@ -1441,6 +1446,17 @@ const Writer = struct { .xor, => try self.writePlNodeBin(stream, inst), + .call, + .call_chkused, + .call_compile_time, + => try self.writePlNodeCall(stream, inst), + + .block, + .block_comptime, + => try self.writePlNodeBlock(stream, inst), + + .condbr => try self.writePlNodeCondBr(stream, inst), + .as_node => try self.writeAs(stream, inst), .breakpoint, @@ -1531,7 +1547,8 @@ const Writer = struct { inst: Inst.Index, ) (@TypeOf(stream).Error || error{OutOfMemory})!void { const inst_data = self.code.instructions.items(.data)[inst].param_type; - try stream.writeAll("TODO)"); + try self.writeInstRef(stream, inst_data.callee); + try stream.print(", {d})", .{inst_data.param_index}); } fn writePtrTypeSimple( @@ -1591,6 +1608,53 @@ const Writer = struct { try self.writeSrc(stream, inst_data.src()); } + fn writePlNodeCall(self: *Writer, stream: anytype, inst: Inst.Index) !void { + const inst_data = self.code.instructions.items(.data)[inst].pl_node; + const extra = self.code.extraData(Inst.Call, inst_data.payload_index); + const args = self.code.extra[extra.end..][0..extra.data.args_len]; + try self.writeInstRef(stream, extra.data.callee); + try stream.writeAll(", ["); + for (args) |arg, i| { + if (i != 0) try stream.writeAll(", "); + try self.writeInstRef(stream, arg); + } + try stream.writeAll("]) "); + try self.writeSrc(stream, inst_data.src()); + } + + fn writePlNodeBlock(self: *Writer, stream: anytype, inst: Inst.Index) !void { + const inst_data = self.code.instructions.items(.data)[inst].pl_node; + const extra = self.code.extraData(Inst.Block, inst_data.payload_index); + const body = self.code.extra[extra.end..][0..extra.data.body_len]; + try stream.writeAll("{\n"); + self.indent += 2; + try self.writeBody(stream, body); + self.indent -= 2; + try stream.writeByteNTimes(' ', self.indent); + try stream.writeAll("}) "); + try self.writeSrc(stream, inst_data.src()); + } + + fn writePlNodeCondBr(self: *Writer, stream: anytype, inst: Inst.Index) !void { + const inst_data = self.code.instructions.items(.data)[inst].pl_node; + const extra = self.code.extraData(Inst.CondBr, inst_data.payload_index); + const then_body = self.code.extra[extra.end..][0..extra.data.then_body_len]; + const else_body = self.code.extra[extra.end + then_body.len ..][0..extra.data.else_body_len]; + try self.writeInstRef(stream, extra.data.condition); + try stream.writeAll(", {\n"); + self.indent += 2; + try self.writeBody(stream, then_body); + self.indent -= 2; + try stream.writeByteNTimes(' ', self.indent); + try stream.writeAll("}, {\n"); + self.indent += 2; + try self.writeBody(stream, else_body); + self.indent -= 2; + try stream.writeByteNTimes(' ', self.indent); + try stream.writeAll("}) "); + try self.writeSrc(stream, inst_data.src()); + } + fn writeAs(self: *Writer, stream: anytype, inst: Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[inst].pl_node; const extra = self.code.extraData(Inst.As, inst_data.payload_index).data; @@ -1671,6 +1735,13 @@ const Writer = struct { return self.writeFnTypeCommon(stream, param_types, inst_data.return_type, var_args, cc); } + fn writeBreakVoidNode(self: *Writer, stream: anytype, inst: Inst.Index) !void { + const inst_data = self.code.instructions.items(.data)[inst].break_void_node; + try self.writeInstIndex(stream, inst_data.block_inst); + try stream.writeAll(") "); + try self.writeSrc(stream, inst_data.src()); + } + fn writeUnreachable(self: *Writer, stream: anytype, inst: Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[inst].@"unreachable"; const safety_str = if (inst_data.safety) "safe" else "unsafe"; @@ -1686,12 +1757,12 @@ const Writer = struct { var_args: bool, cc: Inst.Ref, ) (@TypeOf(stream).Error || error{OutOfMemory})!void { - try stream.writeAll("("); + try stream.writeAll("["); for (param_types) |param_type, i| { if (i != 0) try stream.writeAll(", "); try self.writeInstRef(stream, param_type); } - try stream.writeAll("), "); + try stream.writeAll("], "); try self.writeInstRef(stream, ret_ty); try self.writeOptionalInstRef(stream, ", cc=", cc); try self.writeFlag(stream, ", var_args", var_args); @@ -1707,7 +1778,7 @@ const Writer = struct { try stream.print("\"{}\")", .{std.zig.fmtEscapes(str)}); } - fn writeInstRef(self: *Writer, stream: anytype, inst: Inst.Index) !void { + fn writeInstRef(self: *Writer, stream: anytype, inst: Inst.Ref) !void { var i: usize = inst; if (i < const_inst_list.len) { @@ -1720,7 +1791,11 @@ const Writer = struct { } i -= self.param_count; - return stream.print("%{d}", .{i}); + return self.writeInstIndex(stream, @intCast(Inst.Index, i)); + } + + fn writeInstIndex(self: *Writer, stream: anytype, inst: Inst.Index) !void { + return stream.print("%{d}", .{inst}); } fn writeOptionalInstRef( |
