aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2021-03-19 23:06:19 -0700
committerAndrew Kelley <andrew@ziglang.org>2021-03-19 23:15:18 -0700
commit56677f2f2da41af5999b84b7f740d7bc463d1032 (patch)
treeacc501152db65974b9e381d72de6c6dc385ad667 /src
parent937c43ddf1297f355cc535adf3ec08f9f741b6c8 (diff)
downloadzig-56677f2f2da41af5999b84b7f740d7bc463d1032.tar.gz
zig-56677f2f2da41af5999b84b7f740d7bc463d1032.zip
astgen: support blocks
We are now passing this test: ```zig export fn _start() noreturn {} ``` ``` test.zig:1:30: error: expected noreturn, found void ``` I ran into an issue where we get an integer overflow trying to compute node index offsets from the containing Decl. The problem is that the parser adds the Decl node after adding the child nodes. For some things, it is easy to reserve the node index and then set it later, however, for this case, it is not a trivial code change, because depending on tokens after parsing the decl determines whether we want to add a new node or not. Possible strategies here: 1. Rework the parser code to make sure that Decl nodes are before children nodes in the AST node array. 2. Use signed integers for Decl node offsets. 3. Just flip the order of subtraction and addition. Expect Decl Node index to be greater than children Node indexes. I opted for (3) because it seems like the simplest thing to do. We'll want to unify the logic for computing the offsets though because if the logic gets repeated, it will probably get repeated wrong.
Diffstat (limited to 'src')
-rw-r--r--src/Compilation.zig4
-rw-r--r--src/Module.zig242
-rw-r--r--src/Sema.zig60
-rw-r--r--src/astgen.zig114
-rw-r--r--src/zir.zig40
5 files changed, 375 insertions, 85 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig
index 41acd04ef4..30fcdefc99 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -317,7 +317,7 @@ pub const AllErrors = struct {
for (notes) |*note, i| {
const module_note = module_err_msg.notes[i];
const source = try module_note.src_loc.fileScope().getSource(module);
- const byte_offset = try module_note.src_loc.byteOffset(module);
+ const byte_offset = try module_note.src_loc.byteOffset();
const loc = std.zig.findLineColumn(source, byte_offset);
const sub_file_path = module_note.src_loc.fileScope().sub_file_path;
note.* = .{
@@ -331,7 +331,7 @@ pub const AllErrors = struct {
};
}
const source = try module_err_msg.src_loc.fileScope().getSource(module);
- const byte_offset = try module_err_msg.src_loc.byteOffset(module);
+ const byte_offset = try module_err_msg.src_loc.byteOffset();
const loc = std.zig.findLineColumn(source, byte_offset);
const sub_file_path = module_err_msg.src_loc.fileScope().sub_file_path;
try errors.append(.{
diff --git a/src/Module.zig b/src/Module.zig
index 050b634180..30b454b12d 100644
--- a/src/Module.zig
+++ b/src/Module.zig
@@ -241,6 +241,10 @@ pub const Decl = struct {
return .{ .token_offset = token_index - decl.srcToken() };
}
+ pub fn nodeSrcLoc(decl: *Decl, node_index: ast.Node.Index) LazySrcLoc {
+ return .{ .node_offset = node_index - decl.srcNode() };
+ }
+
pub fn srcLoc(decl: *Decl) SrcLoc {
return .{
.container = .{ .decl = decl },
@@ -1003,10 +1007,14 @@ pub const Scope = struct {
};
}
- pub fn tokSrcLoc(gz: *GenZir, token_index: ast.TokenIndex) LazySrcLoc {
+ pub fn tokSrcLoc(gz: GenZir, token_index: ast.TokenIndex) LazySrcLoc {
return gz.zir_code.decl.tokSrcLoc(token_index);
}
+ pub fn nodeSrcLoc(gz: GenZir, node_index: ast.Node.Index) LazySrcLoc {
+ return gz.zir_code.decl.nodeSrcLoc(node_index);
+ }
+
pub fn addFnTypeCc(gz: *GenZir, tag: zir.Inst.Tag, args: struct {
param_types: []const zir.Inst.Ref,
ret_ty: zir.Inst.Ref,
@@ -1092,6 +1100,30 @@ pub const Scope = struct {
});
}
+ pub fn addPlNode(
+ gz: *GenZir,
+ tag: zir.Inst.Tag,
+ /// Absolute node index. This function does the conversion to offset from Decl.
+ abs_node_index: ast.Node.Index,
+ extra: anytype,
+ ) !zir.Inst.Ref {
+ const gpa = gz.zir_code.gpa;
+ try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
+ try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
+
+ const payload_index = try gz.zir_code.addExtra(extra);
+ const new_index = @intCast(zir.Inst.Index, gz.zir_code.instructions.len);
+ gz.zir_code.instructions.appendAssumeCapacity(.{
+ .tag = tag,
+ .data = .{ .pl_node = .{
+ .src_node = gz.zir_code.decl.srcNode() - abs_node_index,
+ .payload_index = payload_index,
+ } },
+ });
+ gz.instructions.appendAssumeCapacity(new_index);
+ return new_index + gz.zir_code.ref_start_index;
+ }
+
pub fn addUnTok(
gz: *GenZir,
tag: zir.Inst.Tag,
@@ -1165,6 +1197,21 @@ pub const Scope = struct {
});
}
+ /// Note that this returns a `zir.Inst.Index` not a ref.
+ /// Does *not* append the block instruction to the scope.
+ /// Leaves the `payload_index` field undefined.
+ pub fn addBlock(gz: *GenZir, tag: zir.Inst.Tag, node: ast.Node.Index) !zir.Inst.Index {
+ const new_index = @intCast(zir.Inst.Index, gz.zir_code.instructions.len);
+ try gz.zir_code.instructions.append(gz.zir_code.gpa, .{
+ .tag = tag,
+ .data = .{ .pl_node = .{
+ .src_node = node - gz.zir_code.decl.srcNode(),
+ .payload_index = undefined,
+ } },
+ });
+ return new_index;
+ }
+
fn add(gz: *GenZir, inst: zir.Inst) !zir.Inst.Ref {
const gpa = gz.zir_code.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
@@ -1188,6 +1235,8 @@ pub const Scope = struct {
gen_zir: *GenZir,
name: []const u8,
inst: zir.Inst.Index,
+ /// Source location of the corresponding variable declaration.
+ src: LazySrcLoc,
};
/// This could be a `const` or `var` local. It has a pointer instead of a value.
@@ -1201,6 +1250,8 @@ pub const Scope = struct {
gen_zir: *GenZir,
name: []const u8,
ptr: zir.Inst.Index,
+ /// Source location of the corresponding variable declaration.
+ src: LazySrcLoc,
};
pub const Nosuspend = struct {
@@ -1246,6 +1297,169 @@ pub const WipZirCode = struct {
return result;
}
+ /// Returns `true` if and only if the instruction *always* has a void type, or
+ /// *always* has a NoReturn type. Function calls return false because
+ /// the answer depends on their type.
+ /// This is used to elide unnecessary `ensure_result_used` instructions.
+ pub fn isVoidOrNoReturn(wzc: WipZirCode, inst_ref: zir.Inst.Ref) bool {
+ if (inst_ref >= wzc.ref_start_index) {
+ const inst = inst_ref - wzc.ref_start_index;
+ const tags = wzc.instructions.items(.tag);
+ switch (tags[inst]) {
+ .@"const" => {
+ const tv = wzc.instructions.items(.data)[inst].@"const";
+ return switch (tv.ty.zigTypeTag()) {
+ .NoReturn, .Void => true,
+ else => false,
+ };
+ },
+
+ .add,
+ .addwrap,
+ .alloc,
+ .alloc_mut,
+ .alloc_inferred,
+ .alloc_inferred_mut,
+ .array_cat,
+ .array_mul,
+ .array_type,
+ .array_type_sentinel,
+ .indexable_ptr_len,
+ .as,
+ .as_node,
+ .@"asm",
+ .asm_volatile,
+ .bit_and,
+ .bitcast,
+ .bitcast_ref,
+ .bitcast_result_ptr,
+ .bit_or,
+ .block,
+ .block_flat,
+ .block_comptime,
+ .block_comptime_flat,
+ .bool_not,
+ .bool_and,
+ .bool_or,
+ .call,
+ .call_async_kw,
+ .call_no_async,
+ .call_compile_time,
+ .call_none,
+ .cmp_lt,
+ .cmp_lte,
+ .cmp_eq,
+ .cmp_gte,
+ .cmp_gt,
+ .cmp_neq,
+ .coerce_result_ptr,
+ .decl_ref,
+ .decl_val,
+ .deref_node,
+ .div,
+ .elem_ptr,
+ .elem_val,
+ .elem_ptr_node,
+ .elem_val_node,
+ .floatcast,
+ .field_ptr,
+ .field_val,
+ .field_ptr_named,
+ .field_val_named,
+ .fn_type,
+ .fn_type_var_args,
+ .fn_type_cc,
+ .fn_type_cc_var_args,
+ .int,
+ .intcast,
+ .int_type,
+ .is_non_null,
+ .is_null,
+ .is_non_null_ptr,
+ .is_null_ptr,
+ .is_err,
+ .is_err_ptr,
+ .mod_rem,
+ .mul,
+ .mulwrap,
+ .param_type,
+ .ptrtoint,
+ .ref,
+ .ret_ptr,
+ .ret_type,
+ .shl,
+ .shr,
+ .store,
+ .store_to_block_ptr,
+ .store_to_inferred_ptr,
+ .str,
+ .sub,
+ .subwrap,
+ .typeof,
+ .xor,
+ .optional_type,
+ .optional_type_from_ptr_elem,
+ .optional_payload_safe,
+ .optional_payload_unsafe,
+ .optional_payload_safe_ptr,
+ .optional_payload_unsafe_ptr,
+ .err_union_payload_safe,
+ .err_union_payload_unsafe,
+ .err_union_payload_safe_ptr,
+ .err_union_payload_unsafe_ptr,
+ .err_union_code,
+ .err_union_code_ptr,
+ .ptr_type,
+ .ptr_type_simple,
+ .enum_literal,
+ .enum_literal_small,
+ .merge_error_sets,
+ .anyframe_type,
+ .error_union_type,
+ .bit_not,
+ .error_set,
+ .error_value,
+ .slice_start,
+ .slice_end,
+ .slice_sentinel,
+ .import,
+ .typeof_peer,
+ .resolve_inferred_alloc,
+ .@"resume",
+ .@"await",
+ .nosuspend_await,
+ => return false,
+
+ .breakpoint,
+ .dbg_stmt_node,
+ .ensure_result_used,
+ .ensure_result_non_error,
+ .set_eval_branch_quota,
+ .compile_log,
+ .ensure_err_payload_void,
+ .@"break",
+ .break_void_tok,
+ .condbr,
+ .compile_error,
+ .ret_node,
+ .ret_tok,
+ .ret_coerce,
+ .unreachable_unsafe,
+ .unreachable_safe,
+ .loop,
+ .suspend_block,
+ .suspend_block_one,
+ .elided,
+ => return true,
+ }
+ }
+ return switch (inst_ref) {
+ @enumToInt(zir.Const.unused) => unreachable,
+ @enumToInt(zir.Const.void_value), @enumToInt(zir.Const.unreachable_value) => true,
+ else => false,
+ };
+ }
+
pub fn deinit(wzc: *WipZirCode) void {
wzc.instructions.deinit(wzc.gpa);
wzc.extra.deinit(wzc.gpa);
@@ -1348,7 +1562,7 @@ pub const SrcLoc = struct {
};
}
- pub fn byteOffset(src_loc: SrcLoc, mod: *Module) !u32 {
+ pub fn byteOffset(src_loc: SrcLoc) !u32 {
switch (src_loc.lazy) {
.unneeded => unreachable,
.todo => unreachable,
@@ -1373,14 +1587,14 @@ pub const SrcLoc = struct {
.token_offset => |tok_off| {
const decl = src_loc.container.decl;
const tok_index = decl.srcToken() + tok_off;
- const tree = src_loc.container.file_scope.base.tree();
+ const tree = decl.container.file_scope.base.tree();
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
.node_offset => |node_off| {
const decl = src_loc.container.decl;
const node_index = decl.srcNode() + node_off;
- const tree = src_loc.container.file_scope.base.tree();
+ const tree = decl.container.file_scope.base.tree();
const tok_index = tree.firstToken(node_index);
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
@@ -1826,7 +2040,7 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
const code = try gen_scope.finish();
if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
- code.dump(mod.gpa, "comptime_block", decl.name, 0) catch {};
+ code.dump(mod.gpa, "comptime_block", &gen_scope.base, 0) catch {};
}
break :blk code;
};
@@ -2047,7 +2261,7 @@ fn astgenAndSemaFn(
const fn_type_code = try fn_type_scope.finish();
if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
- fn_type_code.dump(mod.gpa, "fn_type", decl.name, 0) catch {};
+ fn_type_code.dump(mod.gpa, "fn_type", &fn_type_scope.base, 0) catch {};
}
var fn_type_sema: Sema = .{
@@ -2146,6 +2360,7 @@ fn astgenAndSemaFn(
.name = param_name,
// Implicit const list first, then implicit arg list.
.inst = @intCast(u32, zir.const_inst_list.len + i),
+ .src = decl.tokSrcLoc(name_token),
};
params_scope = &sub_scope.base;
@@ -2164,13 +2379,16 @@ fn astgenAndSemaFn(
!wip_zir_code.instructions.items(.tag)[gen_scope.instructions.items.len - 1]
.isNoReturn())
{
- const void_operand = @enumToInt(zir.Const.void_value);
- _ = try gen_scope.addUnTok(.ret_tok, void_operand, tree.lastToken(body_node));
+ // astgen uses result location semantics to coerce return operands.
+ // Since we are adding the return instruction here, we must handle the coercion.
+ // We do this by using the `ret_coerce` instruction.
+ const void_inst: zir.Inst.Ref = @enumToInt(zir.Const.void_value);
+ _ = try gen_scope.addUnTok(.ret_coerce, void_inst, tree.lastToken(body_node));
}
const code = try gen_scope.finish();
if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
- code.dump(mod.gpa, "fn_body", decl.name, param_count) catch {};
+ code.dump(mod.gpa, "fn_body", &gen_scope.base, param_count) catch {};
}
break :blk code;
@@ -2347,7 +2565,7 @@ fn astgenAndSemaVarDecl(
);
const code = try gen_scope.finish();
if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
- code.dump(mod.gpa, "var_init", decl.name, 0) catch {};
+ code.dump(mod.gpa, "var_init", &gen_scope.base, 0) catch {};
}
var sema: Sema = .{
@@ -2409,7 +2627,7 @@ fn astgenAndSemaVarDecl(
const var_type = try astgen.typeExpr(mod, &type_scope.base, var_decl.ast.type_node);
const code = try type_scope.finish();
if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
- code.dump(mod.gpa, "var_type", decl.name, 0) catch {};
+ code.dump(mod.gpa, "var_type", &type_scope.base, 0) catch {};
}
var sema: Sema = .{
@@ -3475,7 +3693,7 @@ pub fn failNode(
args: anytype,
) InnerError {
const decl_node = scope.srcDecl().?.srcNode();
- const src: LazySrcLoc = .{ .node_offset = node_index - decl_node };
+ const src: LazySrcLoc = .{ .node_offset = decl_node - node_index };
return mod.fail(scope, src, format, args);
}
diff --git a/src/Sema.zig b/src/Sema.zig
index ed3b441e61..ba07da3fdf 100644
--- a/src/Sema.zig
+++ b/src/Sema.zig
@@ -108,6 +108,7 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde
.dbg_stmt_node => try sema.zirDbgStmtNode(block, zir_inst),
.decl_ref => try sema.zirDeclRef(block, zir_inst),
.decl_val => try sema.zirDeclVal(block, zir_inst),
+ .elided => continue,
.ensure_result_used => try sema.zirEnsureResultUsed(block, zir_inst),
.ensure_result_non_error => try sema.zirEnsureResultNonError(block, zir_inst),
.indexable_ptr_len => try sema.zirIndexablePtrLen(block, zir_inst),
@@ -133,11 +134,13 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde
.field_val_named => try sema.zirFieldValNamed(block, zir_inst),
.deref_node => try sema.zirDerefNode(block, zir_inst),
.as => try sema.zirAs(block, zir_inst),
+ .as_node => try sema.zirAsNode(block, zir_inst),
.@"asm" => try sema.zirAsm(block, zir_inst, false),
.asm_volatile => try sema.zirAsm(block, zir_inst, true),
.unreachable_safe => try sema.zirUnreachable(block, zir_inst, true),
.unreachable_unsafe => try sema.zirUnreachable(block, zir_inst, false),
- .ret_tok => try sema.zirRetTok(block, zir_inst),
+ .ret_coerce => try sema.zirRetTok(block, zir_inst, true),
+ .ret_tok => try sema.zirRetTok(block, zir_inst, false),
.ret_node => try sema.zirRetNode(block, zir_inst),
.fn_type => try sema.zirFnType(block, zir_inst, false),
.fn_type_cc => try sema.zirFnTypeCc(block, zir_inst, false),
@@ -1004,7 +1007,7 @@ fn zirDbgStmtNode(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerE
const src_node = sema.code.instructions.items(.data)[inst].node;
const src: LazySrcLoc = .{ .node_offset = src_node };
const src_loc = src.toSrcLoc(&block.base);
- const abs_byte_off = try src_loc.byteOffset(sema.mod);
+ const abs_byte_off = try src_loc.byteOffset();
return block.addDbgStmt(src, abs_byte_off);
}
@@ -1767,9 +1770,29 @@ fn zirAs(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Ins
defer tracy.end();
const bin_inst = sema.code.instructions.items(.data)[inst].bin;
- const dest_type = try sema.resolveType(block, .todo, bin_inst.lhs);
- const tzir_inst = try sema.resolveInst(bin_inst.rhs);
- return sema.coerce(block, dest_type, tzir_inst, .todo);
+ return sema.analyzeAs(block, .unneeded, bin_inst.lhs, bin_inst.rhs);
+}
+
+fn zirAsNode(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
+ const src = inst_data.src();
+ const extra = sema.code.extraData(zir.Inst.As, inst_data.payload_index).data;
+ return sema.analyzeAs(block, src, extra.dest_type, extra.operand);
+}
+
+fn analyzeAs(
+ sema: *Sema,
+ block: *Scope.Block,
+ src: LazySrcLoc,
+ zir_dest_type: zir.Inst.Ref,
+ zir_operand: zir.Inst.Ref,
+) InnerError!*Inst {
+ const dest_type = try sema.resolveType(block, src, zir_dest_type);
+ const operand = try sema.resolveInst(zir_operand);
+ return sema.coerce(block, dest_type, operand, src);
}
fn zirPtrtoint(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
@@ -2850,7 +2873,12 @@ fn zirUnreachable(
}
}
-fn zirRetTok(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
+fn zirRetTok(
+ sema: *Sema,
+ block: *Scope.Block,
+ inst: zir.Inst.Index,
+ need_coercion: bool,
+) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -2858,7 +2886,7 @@ fn zirRetTok(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!
const operand = try sema.resolveInst(inst_data.operand);
const src = inst_data.src();
- return sema.analyzeRet(block, operand, src);
+ return sema.analyzeRet(block, operand, src, need_coercion);
}
fn zirRetNode(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
@@ -2869,10 +2897,16 @@ fn zirRetNode(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError
const operand = try sema.resolveInst(inst_data.operand);
const src = inst_data.src();
- return sema.analyzeRet(block, operand, src);
+ return sema.analyzeRet(block, operand, src, false);
}
-fn analyzeRet(sema: *Sema, block: *Scope.Block, operand: *Inst, src: LazySrcLoc) InnerError!*Inst {
+fn analyzeRet(
+ sema: *Sema,
+ block: *Scope.Block,
+ operand: *Inst,
+ src: LazySrcLoc,
+ need_coercion: bool,
+) InnerError!*Inst {
if (block.inlining) |inlining| {
// We are inlining a function call; rewrite the `ret` as a `break`.
try inlining.merges.results.append(sema.gpa, operand);
@@ -2880,7 +2914,13 @@ fn analyzeRet(sema: *Sema, block: *Scope.Block, operand: *Inst, src: LazySrcLoc)
return &br.base;
}
- try sema.requireFunctionBlock(block, src);
+ if (need_coercion) {
+ if (sema.func) |func| {
+ const fn_ty = func.owner_decl.typed_value.most_recent.typed_value.ty;
+ const casted_operand = try sema.coerce(block, fn_ty.fnReturnType(), operand, src);
+ return block.addUnOp(src, Type.initTag(.noreturn), .ret, casted_operand);
+ }
+ }
return block.addUnOp(src, Type.initTag(.noreturn), .ret, operand);
}
diff --git a/src/astgen.zig b/src/astgen.zig
index d93ffa1966..428a5c7ff8 100644
--- a/src/astgen.zig
+++ b/src/astgen.zig
@@ -497,7 +497,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
}
},
.block_two, .block_two_semicolon => {
- if (true) @panic("TODO update for zir-memory-layout");
const statements = [2]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
if (node_datas[node].lhs == 0) {
return blockExpr(mod, scope, rl, node, statements[0..0]);
@@ -508,7 +507,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
}
},
.block, .block_semicolon => {
- if (true) @panic("TODO update for zir-memory-layout");
const statements = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs];
return blockExpr(mod, scope, rl, node, statements);
},
@@ -808,7 +806,7 @@ fn breakExpr(
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
- .gen_suspend => scope = scope.cast(Scope.GenZIR).?.parent,
+ .gen_suspend => scope = scope.cast(Scope.GenZir).?.parent,
.gen_nosuspend => scope = scope.cast(Scope.Nosuspend).?.parent,
else => if (break_label != 0) {
const label_name = try mod.identifierTokenString(parent_scope, break_label);
@@ -864,7 +862,7 @@ fn continueExpr(
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
- .gen_suspend => scope = scope.cast(Scope.GenZIR).?.parent,
+ .gen_suspend => scope = scope.cast(Scope.GenZir).?.parent,
.gen_nosuspend => scope = scope.cast(Scope.Nosuspend).?.parent,
else => if (break_label != 0) {
const label_name = try mod.identifierTokenString(parent_scope, break_label);
@@ -939,7 +937,7 @@ fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIn
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
- .gen_suspend => scope = scope.cast(Scope.GenZIR).?.parent,
+ .gen_suspend => scope = scope.cast(Scope.GenZir).?.parent,
.gen_nosuspend => scope = scope.cast(Scope.Nosuspend).?.parent,
else => return,
}
@@ -971,25 +969,14 @@ fn labeledBlockExpr(
try checkLabelRedefinition(mod, parent_scope, label_token);
- // Create the Block ZIR instruction so that we can put it into the GenZir struct
+ // Reserve the Block ZIR instruction index so that we can put it into the GenZir struct
// so that break statements can reference it.
- const gen_zir = parent_scope.getGenZir();
- const block_inst = try gen_zir.arena.create(zir.Inst.Block);
- block_inst.* = .{
- .base = .{
- .tag = zir_tag,
- .src = src,
- },
- .positionals = .{
- .body = .{ .instructions = undefined },
- },
- .kw_args = .{},
- };
+ const gz = parent_scope.getGenZir();
+ const block_inst = try gz.addBlock(zir_tag, block_node);
var block_scope: Scope.GenZir = .{
.parent = parent_scope,
- .decl = parent_scope.ownerDecl().?,
- .arena = gen_zir.arena,
+ .zir_code = gz.zir_code,
.force_comptime = parent_scope.isComptime(),
.instructions = .{},
// TODO @as here is working around a stage1 miscompilation bug :(
@@ -1009,35 +996,40 @@ fn labeledBlockExpr(
return mod.failTok(parent_scope, label_token, "unused block label", .{});
}
- try gen_zir.instructions.append(mod.gpa, &block_inst.base);
+ try gz.instructions.append(mod.gpa, block_inst);
+
+ const zir_tags = gz.zir_code.instructions.items(.tag);
+ const zir_datas = gz.zir_code.instructions.items(.data);
const strat = rlStrategy(rl, &block_scope);
switch (strat.tag) {
.break_void => {
// The code took advantage of the result location as a pointer.
- // Turn the break instructions into break_void instructions.
+ // Turn the break instruction operands into void.
for (block_scope.labeled_breaks.items) |br| {
- br.base.tag = .break_void;
+ zir_datas[br].bin.rhs = 0;
}
// TODO technically not needed since we changed the tag to break_void but
// would be better still to elide the ones that are in this list.
- try copyBodyNoEliding(&block_inst.positionals.body, block_scope);
+ try copyBodyNoEliding(block_inst, block_scope);
- return &block_inst.base;
+ return gz.zir_code.ref_start_index + block_inst;
},
.break_operand => {
// All break operands are values that did not use the result location pointer.
if (strat.elide_store_to_block_ptr_instructions) {
for (block_scope.labeled_store_to_block_ptr_list.items) |inst| {
- inst.base.tag = .void_value;
+ zir_tags[inst] = .elided;
+ zir_datas[inst] = undefined;
}
- // TODO technically not needed since we changed the tag to void_value but
+ // TODO technically not needed since we changed the tag to elided but
// would be better still to elide the ones that are in this list.
}
- try copyBodyNoEliding(&block_inst.positionals.body, block_scope);
+ try copyBodyNoEliding(block_inst, block_scope);
+ const block_ref = gz.zir_code.ref_start_index + block_inst;
switch (rl) {
- .ref => return &block_inst.base,
- else => return rvalue(mod, parent_scope, rl, &block_inst.base),
+ .ref => return block_ref,
+ else => return rvalue(mod, parent_scope, rl, block_ref, block_node),
}
},
}
@@ -1057,15 +1049,16 @@ fn blockExprStmts(
var block_arena = std.heap.ArenaAllocator.init(mod.gpa);
defer block_arena.deinit();
+ const gz = parent_scope.getGenZir();
+
var scope = parent_scope;
for (statements) |statement| {
- const src = token_starts[tree.firstToken(statement)];
- _ = try addZIRNoOp(mod, scope, src, .dbg_stmt);
+ _ = try gz.addNode(.dbg_stmt_node, statement);
switch (node_tags[statement]) {
- .global_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.globalVarDecl(statement)),
- .local_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.localVarDecl(statement)),
- .simple_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.simpleVarDecl(statement)),
- .aligned_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.alignedVarDecl(statement)),
+ .global_var_decl => scope = try varDecl(mod, scope, statement, &block_arena.allocator, tree.globalVarDecl(statement)),
+ .local_var_decl => scope = try varDecl(mod, scope, statement, &block_arena.allocator, tree.localVarDecl(statement)),
+ .simple_var_decl => scope = try varDecl(mod, scope, statement, &block_arena.allocator, tree.simpleVarDecl(statement)),
+ .aligned_var_decl => scope = try varDecl(mod, scope, statement, &block_arena.allocator, tree.alignedVarDecl(statement)),
.assign => try assign(mod, scope, statement),
.assign_bit_and => try assignOp(mod, scope, statement, .bit_and),
@@ -1084,8 +1077,8 @@ fn blockExprStmts(
else => {
const possibly_unused_result = try expr(mod, scope, .none, statement);
- if (!possibly_unused_result.tag.isNoReturn()) {
- _ = try addZIRUnOp(mod, scope, src, .ensure_result_used, possibly_unused_result);
+ if (!gz.zir_code.isVoidOrNoReturn(possibly_unused_result)) {
+ _ = try gz.addUnNode(.ensure_result_used, possibly_unused_result, statement);
}
},
}
@@ -1095,22 +1088,24 @@ fn blockExprStmts(
fn varDecl(
mod: *Module,
scope: *Scope,
+ node: ast.Node.Index,
block_arena: *Allocator,
var_decl: ast.full.VarDecl,
) InnerError!*Scope {
+ if (true) @panic("TODO update for zir-memory-layout");
+
if (var_decl.comptime_token) |comptime_token| {
return mod.failTok(scope, comptime_token, "TODO implement comptime locals", .{});
}
if (var_decl.ast.align_node != 0) {
return mod.failNode(scope, var_decl.ast.align_node, "TODO implement alignment on locals", .{});
}
+ const gz = scope.getGenZir();
const tree = scope.tree();
- const main_tokens = tree.nodes.items(.main_token);
- const token_starts = tree.tokens.items(.start);
const token_tags = tree.tokens.items(.tag);
const name_token = var_decl.ast.mut_token + 1;
- const name_src = token_starts[name_token];
+ const name_src = gz.tokSrcLoc(name_token);
const ident_name = try mod.identifierTokenString(scope, name_token);
// Local variables shadowing detection, including function parameters.
@@ -1125,7 +1120,7 @@ fn varDecl(
ident_name,
});
errdefer msg.destroy(mod.gpa);
- try mod.errNote(scope, local_val.inst.src, msg, "previous definition is here", .{});
+ try mod.errNote(scope, local_val.src, msg, "previous definition is here", .{});
break :msg msg;
};
return mod.failWithOwnedErrorMsg(scope, msg);
@@ -1140,7 +1135,7 @@ fn varDecl(
ident_name,
});
errdefer msg.destroy(mod.gpa);
- try mod.errNote(scope, local_ptr.ptr.src, msg, "previous definition is here", .{});
+ try mod.errNote(scope, local_ptr.src, msg, "previous definition is here", .{});
break :msg msg;
};
return mod.failWithOwnedErrorMsg(scope, msg);
@@ -1176,9 +1171,10 @@ fn varDecl(
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = scope,
- .gen_zir = scope.getGenZir(),
+ .gen_zir = gz,
.name = ident_name,
.inst = init_inst,
+ .src = gz.nodeSrcLoc(node),
};
return &sub_scope.base;
}
@@ -1207,7 +1203,7 @@ fn varDecl(
}
const init_result_loc: ResultLoc = .{ .block_ptr = &init_scope };
const init_inst = try expr(mod, &init_scope.base, init_result_loc, var_decl.ast.init_node);
- const parent_zir = &scope.getGenZir().instructions;
+ const parent_zir = &gz.instructions;
if (init_scope.rvalue_rl_count == 1) {
// Result location pointer not used. We don't need an alloc for this
// const local, and type inference becomes trivial.
@@ -1231,7 +1227,7 @@ fn varDecl(
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = scope,
- .gen_zir = scope.getGenZir(),
+ .gen_zir = gz,
.name = ident_name,
.inst = casted_init,
};
@@ -1258,7 +1254,7 @@ fn varDecl(
const sub_scope = try block_arena.create(Scope.LocalPtr);
sub_scope.* = .{
.parent = scope,
- .gen_zir = scope.getGenZir(),
+ .gen_zir = gz,
.name = ident_name,
.ptr = init_scope.rl_ptr.?,
};
@@ -1285,9 +1281,10 @@ fn varDecl(
const sub_scope = try block_arena.create(Scope.LocalPtr);
sub_scope.* = .{
.parent = scope,
- .gen_zir = scope.getGenZir(),
+ .gen_zir = gz,
.name = ident_name,
.ptr = var_data.alloc,
+ .src = gz.nodeSrcLoc(node),
};
return &sub_scope.base;
},
@@ -2078,10 +2075,10 @@ fn copyBodyWithElidedStoreBlockPtr(body: *zir.Body, scope: Module.Scope.GenZir)
assert(dst_index == body.instructions.len);
}
-fn copyBodyNoEliding(body: *zir.Body, scope: Module.Scope.GenZir) !void {
- body.* = .{
- .instructions = try scope.arena.dupe(zir.Inst.Ref, scope.instructions.items),
- };
+fn copyBodyNoEliding(block_inst: zir.Inst.Index, gz: Module.Scope.GenZir) !void {
+ const zir_datas = gz.zir_code.instructions.items(.data);
+ zir_datas[block_inst].pl_node.payload_index = @intCast(u32, gz.zir_code.extra.items.len);
+ try gz.zir_code.extra.appendSlice(gz.zir_code.gpa, gz.instructions.items);
}
fn whileExpr(
@@ -3515,7 +3512,7 @@ fn suspendExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!zir
return mod.failWithOwnedErrorMsg(scope, msg);
}
- var suspend_scope: Scope.GenZIR = .{
+ var suspend_scope: Scope.GenZir = .{
.base = .{ .tag = .gen_suspend },
.parent = scope,
.decl = scope.ownerDecl().?,
@@ -3864,7 +3861,10 @@ fn rvalue(
const src_token = tree.firstToken(src_node);
return gz.addUnTok(.ref, result, src_token);
},
- .ty => |ty_inst| return gz.addBin(.as, ty_inst, result),
+ .ty => |ty_inst| return gz.addPlNode(.as_node, src_node, zir.Inst.As{
+ .dest_type = ty_inst,
+ .operand = result,
+ }),
.ptr => |ptr_inst| {
_ = try gz.addBin(.store, ptr_inst, result);
return result;
@@ -3953,17 +3953,17 @@ fn setBlockResultLoc(block_scope: *Scope.GenZir, parent_rl: ResultLoc) void {
},
.inferred_ptr => |ptr| {
- block_scope.rl_ptr = &ptr.base;
+ block_scope.rl_ptr = ptr;
block_scope.break_result_loc = .{ .block_ptr = block_scope };
},
.bitcasted_ptr => |ptr| {
- block_scope.rl_ptr = &ptr.base;
+ block_scope.rl_ptr = ptr;
block_scope.break_result_loc = .{ .block_ptr = block_scope };
},
.block_ptr => |parent_block_scope| {
- block_scope.rl_ptr = parent_block_scope.rl_ptr.?;
+ block_scope.rl_ptr = parent_block_scope.rl_ptr;
block_scope.break_result_loc = .{ .block_ptr = block_scope };
},
}
diff --git a/src/zir.zig b/src/zir.zig
index 87e87b08a9..13ec67b5c0 100644
--- a/src/zir.zig
+++ b/src/zir.zig
@@ -72,7 +72,7 @@ pub const Code = struct {
code: Code,
gpa: *Allocator,
kind: []const u8,
- decl_name: [*:0]const u8,
+ scope: *Module.Scope,
param_count: usize,
) !void {
var arena = std.heap.ArenaAllocator.init(gpa);
@@ -81,11 +81,13 @@ pub const Code = struct {
var writer: Writer = .{
.gpa = gpa,
.arena = &arena.allocator,
+ .scope = scope,
.code = code,
.indent = 4,
.param_count = param_count,
};
+ const decl_name = scope.srcDecl().?.name;
const stderr = std.io.getStdErr().writer();
try stderr.print("ZIR {s} {s} {{\n", .{ kind, decl_name });
@@ -416,9 +418,12 @@ pub const Inst = struct {
/// error if the indexable object is not indexable.
/// Uses the `un_node` field. The AST node is the for loop node.
indexable_ptr_len,
- /// Type coercion.
+ /// Type coercion. No source location attached.
/// Uses the `bin` field.
as,
+ /// Type coercion to the function's return type.
+ /// Uses the `pl_node` field. Payload is `As`. AST node could be many things.
+ as_node,
/// Inline assembly. Non-volatile.
/// Uses the `pl_node` union field. Payload is `Asm`. AST node is the assembly node.
@"asm",
@@ -464,12 +469,14 @@ pub const Inst = struct {
/// Uses the `bin` field.
bool_or,
/// Return a value from a block.
- /// Uses the `bin` union field: `lhs` is `Ref` to the block, `rhs` is operand.
+ /// Uses the `bin` union field: `lhs` is `Index` to the block (*not* `Ref`!),
+ /// `rhs` is operand.
/// Uses the source information from previous instruction.
@"break",
/// Same as `break` but has source information in the form of a token, and
/// the operand is assumed to be the void value.
/// Uses the `un_tok` union field.
+ /// Note that the block operand is a `Index`, not `Ref`.
break_void_tok,
/// Uses the `node` union field.
breakpoint,
@@ -543,6 +550,9 @@ pub const Inst = struct {
/// Same as `elem_val` except also stores a source location node.
/// Uses the `pl_node` union field. AST node is a[b] syntax. Payload is `Bin`.
elem_val_node,
+ /// This instruction has been deleted late in the astgen phase. It must
+ /// be ignored, and the corresponding `Data` is undefined.
+ elided,
/// Emits a compile error if the operand is not `void`.
/// Uses the `un_node` field.
ensure_result_used,
@@ -671,6 +681,9 @@ pub const Inst = struct {
/// Includes a token source location.
/// Uses the `un_tok` union field.
ret_tok,
+ /// Same as `ret_tok` except the operand needs to get coerced to the function's
+ /// return type.
+ ret_coerce,
/// Changes the maximum number of backwards branches that compile-time
/// code execution can use before giving up and making a compile error.
/// Uses the `un_node` union field.
@@ -704,6 +717,7 @@ pub const Inst = struct {
store,
/// Same as `store` but the type of the value being stored will be used to infer
/// the block type. The LHS is the pointer to store to.
+ /// Uses the `bin` union field.
store_to_block_ptr,
/// Same as `store` but the type of the value being stored will be used to infer
/// the pointer type.
@@ -854,6 +868,7 @@ pub const Inst = struct {
.array_type_sentinel,
.indexable_ptr_len,
.as,
+ .as_node,
.@"asm",
.asm_volatile,
.bit_and,
@@ -963,6 +978,7 @@ pub const Inst = struct {
.@"resume",
.@"await",
.nosuspend_await,
+ .elided,
=> false,
.@"break",
@@ -971,6 +987,7 @@ pub const Inst = struct {
.compile_error,
.ret_node,
.ret_tok,
+ .ret_coerce,
.unreachable_unsafe,
.unreachable_safe,
.loop,
@@ -1242,11 +1259,17 @@ pub const Inst = struct {
lhs: Ref,
field_name: Ref,
};
+
+ pub const As = struct {
+ dest_type: Ref,
+ operand: Ref,
+ };
};
const Writer = struct {
gpa: *Allocator,
arena: *Allocator,
+ scope: *Module.Scope,
code: Code,
indent: usize,
param_count: usize,
@@ -1325,6 +1348,7 @@ const Writer = struct {
.is_err_ptr,
.ref,
.ret_tok,
+ .ret_coerce,
.typeof,
.optional_type,
.optional_type_from_ptr_elem,
@@ -1348,6 +1372,7 @@ const Writer = struct {
.ptr_type => try self.writePtrType(stream, inst),
.int => try self.writeInt(stream, inst),
.str => try self.writeStr(stream, inst),
+ .elided => try stream.writeAll(")"),
.@"asm",
.asm_volatile,
@@ -1374,6 +1399,7 @@ const Writer = struct {
.slice_sentinel,
.typeof_peer,
.suspend_block,
+ .as_node,
=> try self.writePlNode(stream, inst),
.breakpoint,
@@ -1641,6 +1667,12 @@ const Writer = struct {
}
fn writeSrc(self: *Writer, stream: anytype, src: LazySrcLoc) !void {
- try stream.print("TODOsrc({s})", .{@tagName(src)});
+ const tree = self.scope.tree();
+ const src_loc = src.toSrcLoc(self.scope);
+ const abs_byte_off = try src_loc.byteOffset();
+ const delta_line = std.zig.findLineColumn(tree.source, abs_byte_off);
+ try stream.print("{s}:{d}:{d}", .{
+ @tagName(src), delta_line.line + 1, delta_line.column + 1,
+ });
}
};