aboutsummaryrefslogtreecommitdiff
path: root/lib/std
diff options
context:
space:
mode:
authormlugg <mlugg@mlugg.co.uk>2024-02-06 01:55:22 +0000
committermlugg <mlugg@mlugg.co.uk>2024-03-06 21:26:37 +0000
commita6ca20b9a1dfc7b6e8d004cb166c0714bb8db2db (patch)
tree8ecc7aae4a07f9e5a2fe1d2f2acfe0f40f5ea673 /lib/std
parent90ab8ea9e681a4ffac0b4dc500e3ec489014e12f (diff)
downloadzig-a6ca20b9a1dfc7b6e8d004cb166c0714bb8db2db.tar.gz
zig-a6ca20b9a1dfc7b6e8d004cb166c0714bb8db2db.zip
compiler: change representation of closures
This changes the representation of closures in Zir and Sema. Rather than a pair of instructions `closure_capture` and `closure_get`, the system now works as follows: * Each ZIR type declaration (`struct_decl` etc) contains a list of captures in the form of ZIR indices (or, for efficiency, direct references to parent captures). This is an ordered list; indexes into it are used to refer to captured values. * The `extended(closure_get)` ZIR instruction refers to a value in this list via a 16-bit index (limiting this index to 16 bits allows us to store this in `extended`). * `Module.Namespace` has a new field `captures` which contains the list of values captured in a given namespace. This is initialized based on the ZIR capture list whenever a type declaration is analyzed. This change eliminates `CaptureScope` from semantic analysis, which is a nice simplification; but the main motivation here is that this change is a prerequisite for #18816.
Diffstat (limited to 'lib/std')
-rw-r--r--lib/std/zig/AstGen.zig245
-rw-r--r--lib/std/zig/Zir.zig147
2 files changed, 248 insertions, 144 deletions
diff --git a/lib/std/zig/AstGen.zig b/lib/std/zig/AstGen.zig
index af9bde4917..dfb93e0590 100644
--- a/lib/std/zig/AstGen.zig
+++ b/lib/std/zig/AstGen.zig
@@ -2205,7 +2205,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
- .namespace, .enum_namespace => break,
+ .namespace => break,
.defer_normal, .defer_error => scope = scope.cast(Scope.Defer).?.parent,
.top => unreachable,
}
@@ -2279,7 +2279,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
try parent_gz.addDefer(defer_scope.index, defer_scope.len);
},
.defer_error => scope = scope.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => break,
+ .namespace => break,
.top => unreachable,
}
}
@@ -2412,7 +2412,7 @@ fn checkLabelRedefinition(astgen: *AstGen, parent_scope: *Scope, label: Ast.Toke
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
.defer_normal, .defer_error => scope = scope.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => break,
+ .namespace => break,
.top => unreachable,
}
}
@@ -2790,7 +2790,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.@"resume",
.@"await",
.ret_err_value_code,
- .closure_get,
.ret_ptr,
.ret_type,
.for_len,
@@ -2860,7 +2859,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.store_to_inferred_ptr,
.resolve_inferred_alloc,
.set_runtime_safety,
- .closure_capture,
.memcpy,
.memset,
.validate_deref,
@@ -2928,7 +2926,7 @@ fn countDefers(outer_scope: *Scope, inner_scope: *Scope) struct {
const have_err_payload = defer_scope.remapped_err_code != .none;
need_err_code = need_err_code or have_err_payload;
},
- .namespace, .enum_namespace => unreachable,
+ .namespace => unreachable,
.top => unreachable,
}
}
@@ -2998,7 +2996,7 @@ fn genDefers(
.normal_only => continue,
}
},
- .namespace, .enum_namespace => unreachable,
+ .namespace => unreachable,
.top => unreachable,
}
}
@@ -3042,7 +3040,7 @@ fn checkUsed(gz: *GenZir, outer_scope: *Scope, inner_scope: *Scope) InnerError!v
scope = s.parent;
},
.defer_normal, .defer_error => scope = scope.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => unreachable,
+ .namespace => unreachable,
.top => unreachable,
}
}
@@ -4732,7 +4730,7 @@ fn testDecl(
},
.gen_zir => s = s.cast(GenZir).?.parent,
.defer_normal, .defer_error => s = s.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => {
+ .namespace => {
const ns = s.cast(Scope.Namespace).?;
if (ns.decls.get(name_str_index)) |i| {
if (found_already) |f| {
@@ -4849,10 +4847,10 @@ fn structDeclInner(
try gz.setStruct(decl_inst, .{
.src_node = node,
.layout = layout,
+ .captures_len = 0,
.fields_len = 0,
.decls_len = 0,
- .backing_int_ref = .none,
- .backing_int_body_len = 0,
+ .has_backing_int = false,
.known_non_opv = false,
.known_comptime_only = false,
.is_tuple = false,
@@ -5142,10 +5140,10 @@ fn structDeclInner(
try gz.setStruct(decl_inst, .{
.src_node = node,
.layout = layout,
+ .captures_len = @intCast(namespace.captures.count()),
.fields_len = field_count,
.decls_len = decl_count,
- .backing_int_ref = backing_int_ref,
- .backing_int_body_len = @intCast(backing_int_body_len),
+ .has_backing_int = backing_int_ref != .none,
.known_non_opv = known_non_opv,
.known_comptime_only = known_comptime_only,
.is_tuple = is_tuple,
@@ -5159,15 +5157,22 @@ fn structDeclInner(
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
const bodies_slice = astgen.scratch.items[bodies_start..];
- try astgen.extra.ensureUnusedCapacity(gpa, backing_int_body_len +
- decls_slice.len + fields_slice.len + bodies_slice.len);
- astgen.extra.appendSliceAssumeCapacity(astgen.scratch.items[scratch_top..][0..backing_int_body_len]);
+ try astgen.extra.ensureUnusedCapacity(gpa, backing_int_body_len + 2 +
+ decls_slice.len + namespace.captures.count() + fields_slice.len + bodies_slice.len);
+ astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
+ if (backing_int_ref != .none) {
+ astgen.extra.appendAssumeCapacity(@intCast(backing_int_body_len));
+ if (backing_int_body_len == 0) {
+ astgen.extra.appendAssumeCapacity(@intFromEnum(backing_int_ref));
+ } else {
+ astgen.extra.appendSliceAssumeCapacity(astgen.scratch.items[scratch_top..][0..backing_int_body_len]);
+ }
+ }
astgen.extra.appendSliceAssumeCapacity(decls_slice);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
astgen.extra.appendSliceAssumeCapacity(bodies_slice);
block_scope.unstack();
- try gz.addNamespaceCaptures(&namespace);
return decl_inst.toRef();
}
@@ -5368,6 +5373,7 @@ fn unionDeclInner(
.src_node = node,
.layout = layout,
.tag_type = arg_inst,
+ .captures_len = @intCast(namespace.captures.count()),
.body_len = body_len,
.fields_len = field_count,
.decls_len = decl_count,
@@ -5379,13 +5385,13 @@ fn unionDeclInner(
wip_members.finishBits(bits_per_field);
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + body_len + fields_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, namespace.captures.count() + decls_slice.len + body_len + fields_slice.len);
+ astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
astgen.extra.appendSliceAssumeCapacity(decls_slice);
astgen.appendBodyWithFixups(body);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
block_scope.unstack();
- try gz.addNamespaceCaptures(&namespace);
return decl_inst.toRef();
}
@@ -5555,7 +5561,7 @@ fn containerDecl(
defer block_scope.unstack();
_ = try astgen.scanDecls(&namespace, container_decl.ast.members);
- namespace.base.tag = .enum_namespace;
+ namespace.base.tag = .namespace;
const arg_inst: Zir.Inst.Ref = if (container_decl.ast.arg != 0)
try comptimeExpr(&block_scope, &namespace.base, coerced_type_ri, container_decl.ast.arg)
@@ -5586,7 +5592,6 @@ fn containerDecl(
if (member_node == counts.nonexhaustive_node)
continue;
fields_hasher.update(tree.getNodeSource(member_node));
- namespace.base.tag = .namespace;
var member = switch (try containerMember(&block_scope, &namespace.base, &wip_members, member_node)) {
.decl => continue,
.field => |field| field,
@@ -5630,7 +5635,6 @@ fn containerDecl(
},
);
}
- namespace.base.tag = .enum_namespace;
const tag_value_inst = try expr(&block_scope, &namespace.base, .{ .rl = .{ .ty = arg_inst } }, member.ast.value_expr);
wip_members.appendToField(@intFromEnum(tag_value_inst));
}
@@ -5676,6 +5680,7 @@ fn containerDecl(
.src_node = node,
.nonexhaustive = nonexhaustive,
.tag_type = arg_inst,
+ .captures_len = @intCast(namespace.captures.count()),
.body_len = body_len,
.fields_len = @intCast(counts.total_fields),
.decls_len = @intCast(counts.decls),
@@ -5685,13 +5690,13 @@ fn containerDecl(
wip_members.finishBits(bits_per_field);
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + body_len + fields_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, namespace.captures.count() + decls_slice.len + body_len + fields_slice.len);
+ astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
astgen.extra.appendSliceAssumeCapacity(decls_slice);
astgen.appendBodyWithFixups(body);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
block_scope.unstack();
- try gz.addNamespaceCaptures(&namespace);
return rvalue(gz, ri, decl_inst.toRef(), node);
},
.keyword_opaque => {
@@ -5733,16 +5738,17 @@ fn containerDecl(
try gz.setOpaque(decl_inst, .{
.src_node = node,
+ .captures_len = @intCast(namespace.captures.count()),
.decls_len = decl_count,
});
wip_members.finishBits(0);
const decls_slice = wip_members.declsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, namespace.captures.count() + decls_slice.len);
+ astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
astgen.extra.appendSliceAssumeCapacity(decls_slice);
block_scope.unstack();
- try gz.addNamespaceCaptures(&namespace);
return rvalue(gz, ri, decl_inst.toRef(), node);
},
else => unreachable,
@@ -8238,12 +8244,12 @@ fn localVarRef(
ident_token: Ast.TokenIndex,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
- const gpa = astgen.gpa;
const name_str_index = try astgen.identAsString(ident_token);
var s = scope;
var found_already: ?Ast.Node.Index = null; // we have found a decl with the same name already
var num_namespaces_out: u32 = 0;
- var capturing_namespace: ?*Scope.Namespace = null;
+ // defined when `num_namespaces_out != 0`
+ var capturing_namespace: *Scope.Namespace = undefined;
while (true) switch (s.tag) {
.local_val => {
const local_val = s.cast(Scope.LocalVal).?;
@@ -8257,15 +8263,14 @@ fn localVarRef(
local_val.used = ident_token;
}
- const value_inst = try tunnelThroughClosure(
+ const value_inst = if (num_namespaces_out != 0) try tunnelThroughClosure(
gz,
ident,
num_namespaces_out,
capturing_namespace,
local_val.inst,
local_val.token_src,
- gpa,
- );
+ ) else local_val.inst;
return rvalueNoCoercePreRef(gz, ri, value_inst, ident);
}
@@ -8285,19 +8290,18 @@ fn localVarRef(
const ident_name = try astgen.identifierTokenString(ident_token);
return astgen.failNodeNotes(ident, "mutable '{s}' not accessible from here", .{ident_name}, &.{
try astgen.errNoteTok(local_ptr.token_src, "declared mutable here", .{}),
- try astgen.errNoteNode(capturing_namespace.?.node, "crosses namespace boundary here", .{}),
+ try astgen.errNoteNode(capturing_namespace.node, "crosses namespace boundary here", .{}),
});
}
- const ptr_inst = try tunnelThroughClosure(
+ const ptr_inst = if (num_namespaces_out != 0) try tunnelThroughClosure(
gz,
ident,
num_namespaces_out,
capturing_namespace,
local_ptr.ptr,
local_ptr.token_src,
- gpa,
- );
+ ) else local_ptr.ptr;
switch (ri.rl) {
.ref, .ref_coerced_ty => {
@@ -8314,7 +8318,7 @@ fn localVarRef(
},
.gen_zir => s = s.cast(GenZir).?.parent,
.defer_normal, .defer_error => s = s.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => {
+ .namespace => {
const ns = s.cast(Scope.Namespace).?;
if (ns.decls.get(name_str_index)) |i| {
if (found_already) |f| {
@@ -8326,7 +8330,7 @@ fn localVarRef(
// We found a match but must continue looking for ambiguous references to decls.
found_already = i;
}
- if (s.tag == .namespace) num_namespaces_out += 1;
+ num_namespaces_out += 1;
capturing_namespace = ns;
s = ns.parent;
},
@@ -8348,41 +8352,70 @@ fn localVarRef(
}
}
-/// Adds a capture to a namespace, if needed.
-/// Returns the index of the closure_capture instruction.
+/// Access a ZIR instruction through closure. May tunnel through arbitrarily
+/// many namespaces, adding closure captures as required.
+/// Returns the index of the `closure_get` instruction added to `gz`.
fn tunnelThroughClosure(
gz: *GenZir,
+ /// The node which references the value to be captured.
inner_ref_node: Ast.Node.Index,
+ /// The number of namespaces being tunnelled through. At least 1.
num_tunnels: u32,
- ns: ?*Scope.Namespace,
+ /// The namespace being captured from.
+ ns: *Scope.Namespace,
+ /// The value being captured.
value: Zir.Inst.Ref,
+ /// The token of the value's declaration.
token: Ast.TokenIndex,
- gpa: Allocator,
) !Zir.Inst.Ref {
- // For trivial values, we don't need a tunnel.
- // Just return the ref.
- if (num_tunnels == 0 or value.toIndex() == null) {
+ const value_inst = value.toIndex() orelse {
+ // For trivial values, we don't need a tunnel; just return the ref.
return value;
+ };
+
+ const astgen = gz.astgen;
+ const gpa = astgen.gpa;
+
+ // Otherwise we need a tunnel. First, figure out the path of namespaces we
+ // are tunneling through. This is usually only going to be one or two, so
+ // use an SFBA to optimize for the common case.
+ var sfba = std.heap.stackFallback(@sizeOf(usize) * 2, astgen.arena);
+ var intermediate_tunnels = try sfba.get().alloc(*Scope.Namespace, num_tunnels - 1);
+
+ {
+ var i: usize = num_tunnels - 1;
+ var scope: *Scope = gz.parent;
+ while (i > 0) {
+ if (scope.cast(Scope.Namespace)) |mid_ns| {
+ i -= 1;
+ intermediate_tunnels[i] = mid_ns;
+ }
+ scope = scope.parent().?;
+ }
}
- // Otherwise we need a tunnel. Check if this namespace
- // already has one for this value.
- const gop = try ns.?.captures.getOrPut(gpa, value.toIndex().?);
- if (!gop.found_existing) {
- // Make a new capture for this value but don't add it to the declaring_gz yet
- try gz.astgen.instructions.append(gz.astgen.gpa, .{
- .tag = .closure_capture,
- .data = .{ .un_tok = .{
- .operand = value,
- .src_tok = ns.?.declaring_gz.?.tokenIndexToRelative(token),
- } },
+ // Now that we know the scopes we're tunneling through, begin adding
+ // captures as required, starting with the outermost namespace.
+ var cur_capture_index = std.math.cast(
+ u16,
+ (try ns.captures.getOrPut(gpa, Zir.Inst.Capture.wrap(.{ .inst = value_inst }))).index,
+ ) orelse return astgen.failNodeNotes(ns.node, "this compiler implementation only supports up to 65536 captures per namespace", .{}, &.{
+ try astgen.errNoteTok(token, "captured value here", .{}),
+ try astgen.errNoteNode(inner_ref_node, "value used here", .{}),
+ });
+
+ for (intermediate_tunnels) |tunnel_ns| {
+ cur_capture_index = std.math.cast(
+ u16,
+ (try tunnel_ns.captures.getOrPut(gpa, Zir.Inst.Capture.wrap(.{ .nested = cur_capture_index }))).index,
+ ) orelse return astgen.failNodeNotes(tunnel_ns.node, "this compiler implementation only supports up to 65536 captures per namespace", .{}, &.{
+ try astgen.errNoteTok(token, "captured value here", .{}),
+ try astgen.errNoteNode(inner_ref_node, "value used here", .{}),
});
- gop.value_ptr.* = @enumFromInt(gz.astgen.instructions.len - 1);
}
- // Add an instruction to get the value from the closure into
- // our current context
- return try gz.addInstNode(.closure_get, gop.value_ptr.*, inner_ref_node);
+ // Add an instruction to get the value from the closure.
+ return gz.addExtendedNodeSmall(.closure_get, inner_ref_node, cur_capture_index);
}
fn stringLiteral(
@@ -9095,7 +9128,7 @@ fn builtinCall(
},
.gen_zir => s = s.cast(GenZir).?.parent,
.defer_normal, .defer_error => s = s.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => {
+ .namespace => {
const ns = s.cast(Scope.Namespace).?;
if (ns.decls.get(decl_name)) |i| {
if (found_already) |f| {
@@ -11605,7 +11638,7 @@ const Scope = struct {
}
if (T == Namespace) {
switch (base.tag) {
- .namespace, .enum_namespace => return @fieldParentPtr(T, "base", base),
+ .namespace => return @fieldParentPtr(T, "base", base),
else => return null,
}
}
@@ -11621,7 +11654,7 @@ const Scope = struct {
.local_val => base.cast(LocalVal).?.parent,
.local_ptr => base.cast(LocalPtr).?.parent,
.defer_normal, .defer_error => base.cast(Defer).?.parent,
- .namespace, .enum_namespace => base.cast(Namespace).?.parent,
+ .namespace => base.cast(Namespace).?.parent,
.top => null,
};
}
@@ -11633,7 +11666,6 @@ const Scope = struct {
defer_normal,
defer_error,
namespace,
- enum_namespace,
top,
};
@@ -11725,9 +11757,8 @@ const Scope = struct {
/// Only valid during astgen.
declaring_gz: ?*GenZir,
- /// Map from the raw captured value to the instruction
- /// ref of the capture for decls in this namespace
- captures: std.AutoArrayHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{},
+ /// Set of captures used by this namespace.
+ captures: std.AutoArrayHashMapUnmanaged(Zir.Inst.Capture, void) = .{},
fn deinit(self: *Namespace, gpa: Allocator) void {
self.decls.deinit(gpa);
@@ -11787,12 +11818,6 @@ const GenZir = struct {
// Set if this GenZir is a defer or it is inside a defer.
any_defer_node: Ast.Node.Index = 0,
- /// Namespace members are lazy. When executing a decl within a namespace,
- /// any references to external instructions need to be treated specially.
- /// This list tracks those references. See also .closure_capture and .closure_get.
- /// Keys are the raw instruction index, values are the closure_capture instruction.
- captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{},
-
const unstacked_top = std.math.maxInt(usize);
/// Call unstack before adding any new instructions to containing GenZir.
fn unstack(self: *GenZir) void {
@@ -12534,6 +12559,30 @@ const GenZir = struct {
return new_index.toRef();
}
+ fn addExtendedNodeSmall(
+ gz: *GenZir,
+ opcode: Zir.Inst.Extended,
+ src_node: Ast.Node.Index,
+ small: u16,
+ ) !Zir.Inst.Ref {
+ const astgen = gz.astgen;
+ const gpa = astgen.gpa;
+
+ try gz.instructions.ensureUnusedCapacity(gpa, 1);
+ try astgen.instructions.ensureUnusedCapacity(gpa, 1);
+ const new_index: Zir.Inst.Index = @enumFromInt(astgen.instructions.len);
+ astgen.instructions.appendAssumeCapacity(.{
+ .tag = .extended,
+ .data = .{ .extended = .{
+ .opcode = opcode,
+ .small = small,
+ .operand = @bitCast(gz.nodeIndexToRelative(src_node)),
+ } },
+ });
+ gz.instructions.appendAssumeCapacity(new_index);
+ return new_index.toRef();
+ }
+
fn addUnTok(
gz: *GenZir,
tag: Zir.Inst.Tag,
@@ -12957,10 +13006,10 @@ const GenZir = struct {
fn setStruct(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
src_node: Ast.Node.Index,
+ captures_len: u32,
fields_len: u32,
decls_len: u32,
- backing_int_ref: Zir.Inst.Ref,
- backing_int_body_len: u32,
+ has_backing_int: bool,
layout: std.builtin.Type.ContainerLayout,
known_non_opv: bool,
known_comptime_only: bool,
@@ -12978,7 +13027,7 @@ const GenZir = struct {
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
- try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.StructDecl).Struct.fields.len + 4);
+ try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.StructDecl).Struct.fields.len + 3);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.StructDecl{
.fields_hash_0 = fields_hash_arr[0],
.fields_hash_1 = fields_hash_arr[1],
@@ -12987,26 +13036,24 @@ const GenZir = struct {
.src_node = gz.nodeIndexToRelative(args.src_node),
});
+ if (args.captures_len != 0) {
+ astgen.extra.appendAssumeCapacity(args.captures_len);
+ }
if (args.fields_len != 0) {
astgen.extra.appendAssumeCapacity(args.fields_len);
}
if (args.decls_len != 0) {
astgen.extra.appendAssumeCapacity(args.decls_len);
}
- if (args.backing_int_ref != .none) {
- astgen.extra.appendAssumeCapacity(args.backing_int_body_len);
- if (args.backing_int_body_len == 0) {
- astgen.extra.appendAssumeCapacity(@intFromEnum(args.backing_int_ref));
- }
- }
astgen.instructions.set(@intFromEnum(inst), .{
.tag = .extended,
.data = .{ .extended = .{
.opcode = .struct_decl,
.small = @bitCast(Zir.Inst.StructDecl.Small{
+ .has_captures_len = args.captures_len != 0,
.has_fields_len = args.fields_len != 0,
.has_decls_len = args.decls_len != 0,
- .has_backing_int = args.backing_int_ref != .none,
+ .has_backing_int = args.has_backing_int,
.known_non_opv = args.known_non_opv,
.known_comptime_only = args.known_comptime_only,
.is_tuple = args.is_tuple,
@@ -13024,6 +13071,7 @@ const GenZir = struct {
fn setUnion(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
src_node: Ast.Node.Index,
tag_type: Zir.Inst.Ref,
+ captures_len: u32,
body_len: u32,
fields_len: u32,
decls_len: u32,
@@ -13039,7 +13087,7 @@ const GenZir = struct {
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
- try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.UnionDecl).Struct.fields.len + 4);
+ try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.UnionDecl).Struct.fields.len + 5);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.UnionDecl{
.fields_hash_0 = fields_hash_arr[0],
.fields_hash_1 = fields_hash_arr[1],
@@ -13051,6 +13099,9 @@ const GenZir = struct {
if (args.tag_type != .none) {
astgen.extra.appendAssumeCapacity(@intFromEnum(args.tag_type));
}
+ if (args.captures_len != 0) {
+ astgen.extra.appendAssumeCapacity(args.captures_len);
+ }
if (args.body_len != 0) {
astgen.extra.appendAssumeCapacity(args.body_len);
}
@@ -13066,6 +13117,7 @@ const GenZir = struct {
.opcode = .union_decl,
.small = @bitCast(Zir.Inst.UnionDecl.Small{
.has_tag_type = args.tag_type != .none,
+ .has_captures_len = args.captures_len != 0,
.has_body_len = args.body_len != 0,
.has_fields_len = args.fields_len != 0,
.has_decls_len = args.decls_len != 0,
@@ -13082,6 +13134,7 @@ const GenZir = struct {
fn setEnum(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
src_node: Ast.Node.Index,
tag_type: Zir.Inst.Ref,
+ captures_len: u32,
body_len: u32,
fields_len: u32,
decls_len: u32,
@@ -13095,7 +13148,7 @@ const GenZir = struct {
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
- try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.EnumDecl).Struct.fields.len + 4);
+ try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.EnumDecl).Struct.fields.len + 5);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.EnumDecl{
.fields_hash_0 = fields_hash_arr[0],
.fields_hash_1 = fields_hash_arr[1],
@@ -13107,6 +13160,9 @@ const GenZir = struct {
if (args.tag_type != .none) {
astgen.extra.appendAssumeCapacity(@intFromEnum(args.tag_type));
}
+ if (args.captures_len != 0) {
+ astgen.extra.appendAssumeCapacity(args.captures_len);
+ }
if (args.body_len != 0) {
astgen.extra.appendAssumeCapacity(args.body_len);
}
@@ -13122,6 +13178,7 @@ const GenZir = struct {
.opcode = .enum_decl,
.small = @bitCast(Zir.Inst.EnumDecl.Small{
.has_tag_type = args.tag_type != .none,
+ .has_captures_len = args.captures_len != 0,
.has_body_len = args.body_len != 0,
.has_fields_len = args.fields_len != 0,
.has_decls_len = args.decls_len != 0,
@@ -13135,6 +13192,7 @@ const GenZir = struct {
fn setOpaque(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
src_node: Ast.Node.Index,
+ captures_len: u32,
decls_len: u32,
}) !void {
const astgen = gz.astgen;
@@ -13142,11 +13200,14 @@ const GenZir = struct {
assert(args.src_node != 0);
- try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.OpaqueDecl).Struct.fields.len + 1);
+ try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.OpaqueDecl).Struct.fields.len + 2);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.OpaqueDecl{
.src_node = gz.nodeIndexToRelative(args.src_node),
});
+ if (args.captures_len != 0) {
+ astgen.extra.appendAssumeCapacity(args.captures_len);
+ }
if (args.decls_len != 0) {
astgen.extra.appendAssumeCapacity(args.decls_len);
}
@@ -13155,6 +13216,7 @@ const GenZir = struct {
.data = .{ .extended = .{
.opcode = .opaque_decl,
.small = @bitCast(Zir.Inst.OpaqueDecl.Small{
+ .has_captures_len = args.captures_len != 0,
.has_decls_len = args.decls_len != 0,
.name_strategy = gz.anon_name_strategy,
}),
@@ -13197,15 +13259,6 @@ const GenZir = struct {
}
}
- fn addNamespaceCaptures(gz: *GenZir, namespace: *Scope.Namespace) !void {
- if (namespace.captures.count() > 0) {
- try gz.instructions.ensureUnusedCapacity(gz.astgen.gpa, namespace.captures.count());
- for (namespace.captures.values()) |capture| {
- gz.instructions.appendAssumeCapacity(capture);
- }
- }
- }
-
fn addDbgVar(gz: *GenZir, tag: Zir.Inst.Tag, name: Zir.NullTerminatedString, inst: Zir.Inst.Ref) !void {
if (gz.is_comptime) return;
@@ -13305,7 +13358,7 @@ fn detectLocalShadowing(
}
s = local_ptr.parent;
},
- .namespace, .enum_namespace => {
+ .namespace => {
outer_scope = true;
const ns = s.cast(Scope.Namespace).?;
const decl_node = ns.decls.get(ident_name) orelse {
@@ -13478,7 +13531,7 @@ fn scanDecls(astgen: *AstGen, namespace: *Scope.Namespace, members: []const Ast.
}
s = local_ptr.parent;
},
- .namespace, .enum_namespace => s = s.cast(Scope.Namespace).?.parent,
+ .namespace => s = s.cast(Scope.Namespace).?.parent,
.gen_zir => s = s.cast(GenZir).?.parent,
.defer_normal, .defer_error => s = s.cast(Scope.Defer).?.parent,
.top => break,
diff --git a/lib/std/zig/Zir.zig b/lib/std/zig/Zir.zig
index 117212ccbb..d46f22fec9 100644
--- a/lib/std/zig/Zir.zig
+++ b/lib/std/zig/Zir.zig
@@ -1004,17 +1004,6 @@ pub const Inst = struct {
@"resume",
@"await",
- /// When a type or function refers to a comptime value from an outer
- /// scope, that forms a closure over comptime value. The outer scope
- /// will record a capture of that value, which encodes its current state
- /// and marks it to persist. Uses `un_tok` field. Operand is the
- /// instruction value to capture.
- closure_capture,
- /// The inner scope of a closure uses closure_get to retrieve the value
- /// stored by the outer scope. Uses `inst_node` field. Operand is the
- /// closure_capture instruction ref.
- closure_get,
-
/// A defer statement.
/// Uses the `defer` union field.
@"defer",
@@ -1251,8 +1240,6 @@ pub const Inst = struct {
.@"await",
.ret_err_value_code,
.extended,
- .closure_get,
- .closure_capture,
.ret_ptr,
.ret_type,
.@"try",
@@ -1542,8 +1529,6 @@ pub const Inst = struct {
.@"resume",
.@"await",
.ret_err_value_code,
- .closure_get,
- .closure_capture,
.@"break",
.break_inline,
.condbr,
@@ -1829,9 +1814,6 @@ pub const Inst = struct {
.@"resume" = .un_node,
.@"await" = .un_node,
- .closure_capture = .un_tok,
- .closure_get = .inst_node,
-
.@"defer" = .@"defer",
.defer_err_code = .defer_err_code,
@@ -2074,6 +2056,10 @@ pub const Inst = struct {
/// `operand` is payload index to `RestoreErrRetIndex`.
/// `small` is undefined.
restore_err_ret_index,
+ /// Retrieves a value from the current type declaration scope's closure.
+ /// `operand` is `src_node: i32`.
+ /// `small` is closure index.
+ closure_get,
/// Used as a placeholder instruction which is just a dummy index for Sema to replace
/// with a specific value. For instance, this is used for the capture of an `errdefer`.
/// This should never appear in a body.
@@ -2949,7 +2935,7 @@ pub const Inst = struct {
/// These are stored in trailing data in `extra` for each prong.
pub const ProngInfo = packed struct(u32) {
body_len: u28,
- capture: Capture,
+ capture: ProngInfo.Capture,
is_inline: bool,
has_tag_capture: bool,
@@ -3013,19 +2999,21 @@ pub const Inst = struct {
};
/// Trailing:
- /// 0. fields_len: u32, // if has_fields_len
- /// 1. decls_len: u32, // if has_decls_len
- /// 2. backing_int_body_len: u32, // if has_backing_int
- /// 3. backing_int_ref: Ref, // if has_backing_int and backing_int_body_len is 0
- /// 4. backing_int_body_inst: Inst, // if has_backing_int and backing_int_body_len is > 0
- /// 5. decl: Index, // for every decls_len; points to a `declaration` instruction
- /// 6. flags: u32 // for every 8 fields
+ /// 0. captures_len: u32 // if has_captures_len
+ /// 1. fields_len: u32, // if has_fields_len
+ /// 2. decls_len: u32, // if has_decls_len
+ /// 3. capture: Capture // for every captures_len
+ /// 4. backing_int_body_len: u32, // if has_backing_int
+ /// 5. backing_int_ref: Ref, // if has_backing_int and backing_int_body_len is 0
+ /// 6. backing_int_body_inst: Inst, // if has_backing_int and backing_int_body_len is > 0
+ /// 7. decl: Index, // for every decls_len; points to a `declaration` instruction
+ /// 8. flags: u32 // for every 8 fields
/// - sets of 4 bits:
/// 0b000X: whether corresponding field has an align expression
/// 0b00X0: whether corresponding field has a default expression
/// 0b0X00: whether corresponding field is comptime
/// 0bX000: whether corresponding field has a type expression
- /// 7. fields: { // for every fields_len
+ /// 9. fields: { // for every fields_len
/// field_name: u32, // if !is_tuple
/// doc_comment: NullTerminatedString, // .empty if no doc comment
/// field_type: Ref, // if corresponding bit is not set. none means anytype.
@@ -3033,7 +3021,7 @@ pub const Inst = struct {
/// align_body_len: u32, // if corresponding bit is set
/// init_body_len: u32, // if corresponding bit is set
/// }
- /// 8. bodies: { // for every fields_len
+ /// 10. bodies: { // for every fields_len
/// field_type_body_inst: Inst, // for each field_type_body_len
/// align_body_inst: Inst, // for each align_body_len
/// init_body_inst: Inst, // for each init_body_len
@@ -3052,6 +3040,7 @@ pub const Inst = struct {
}
pub const Small = packed struct {
+ has_captures_len: bool,
has_fields_len: bool,
has_decls_len: bool,
has_backing_int: bool,
@@ -3063,10 +3052,35 @@ pub const Inst = struct {
any_default_inits: bool,
any_comptime_fields: bool,
any_aligned_fields: bool,
- _: u3 = undefined,
+ _: u2 = undefined,
};
};
+ /// Represents a single value being captured in a type declaration's closure.
+ /// If high bit is 0, this represents a `Zir.Inst,Index`.
+ /// If high bit is 1, this represents an index into the last closure.
+ pub const Capture = enum(u32) {
+ _,
+ pub const Unwrapped = union(enum) {
+ inst: Zir.Inst.Index,
+ nested: u16,
+ };
+ pub fn wrap(cap: Unwrapped) Capture {
+ return switch (cap) {
+ .inst => |inst| @enumFromInt(@intFromEnum(inst)),
+ .nested => |idx| @enumFromInt((1 << 31) | @as(u32, idx)),
+ };
+ }
+ pub fn unwrap(cap: Capture) Unwrapped {
+ const raw = @intFromEnum(cap);
+ const tag: u1 = @intCast(raw >> 31);
+ return switch (tag) {
+ 0 => .{ .inst = @enumFromInt(raw) },
+ 1 => .{ .nested = @truncate(raw) },
+ };
+ }
+ };
+
pub const NameStrategy = enum(u2) {
/// Use the same name as the parent declaration name.
/// e.g. `const Foo = struct {...};`.
@@ -3098,14 +3112,16 @@ pub const Inst = struct {
/// Trailing:
/// 0. tag_type: Ref, // if has_tag_type
- /// 1. body_len: u32, // if has_body_len
- /// 2. fields_len: u32, // if has_fields_len
- /// 3. decls_len: u32, // if has_decls_len
- /// 4. decl: Index, // for every decls_len; points to a `declaration` instruction
- /// 5. inst: Index // for every body_len
- /// 6. has_bits: u32 // for every 32 fields
+ /// 1. captures_len: u32, // if has_captures_len
+ /// 2. body_len: u32, // if has_body_len
+ /// 3. fields_len: u32, // if has_fields_len
+ /// 4. decls_len: u32, // if has_decls_len
+ /// 5. capture: Capture // for every captures_len
+ /// 6. decl: Index, // for every decls_len; points to a `declaration` instruction
+ /// 7. inst: Index // for every body_len
+ /// 8. has_bits: u32 // for every 32 fields
/// - the bit is whether corresponding field has an value expression
- /// 7. fields: { // for every fields_len
+ /// 9. fields: { // for every fields_len
/// field_name: u32,
/// doc_comment: u32, // .empty if no doc_comment
/// value: Ref, // if corresponding bit is set
@@ -3125,29 +3141,32 @@ pub const Inst = struct {
pub const Small = packed struct {
has_tag_type: bool,
+ has_captures_len: bool,
has_body_len: bool,
has_fields_len: bool,
has_decls_len: bool,
name_strategy: NameStrategy,
nonexhaustive: bool,
- _: u9 = undefined,
+ _: u8 = undefined,
};
};
/// Trailing:
/// 0. tag_type: Ref, // if has_tag_type
- /// 1. body_len: u32, // if has_body_len
- /// 2. fields_len: u32, // if has_fields_len
- /// 3. decls_len: u32, // if has_decls_len
- /// 4. decl: Index, // for every decls_len; points to a `declaration` instruction
- /// 5. inst: Index // for every body_len
- /// 6. has_bits: u32 // for every 8 fields
+ /// 1. captures_len: u32 // if has_captures_len
+ /// 2. body_len: u32, // if has_body_len
+ /// 3. fields_len: u32, // if has_fields_len
+ /// 4. decls_len: u37, // if has_decls_len
+ /// 5. capture: Capture // for every captures_len
+ /// 6. decl: Index, // for every decls_len; points to a `declaration` instruction
+ /// 7. inst: Index // for every body_len
+ /// 8. has_bits: u32 // for every 8 fields
/// - sets of 4 bits:
/// 0b000X: whether corresponding field has a type expression
/// 0b00X0: whether corresponding field has a align expression
/// 0b0X00: whether corresponding field has a tag value expression
/// 0bX000: unused
- /// 7. fields: { // for every fields_len
+ /// 9. fields: { // for every fields_len
/// field_name: NullTerminatedString, // null terminated string index
/// doc_comment: NullTerminatedString, // .empty if no doc comment
/// field_type: Ref, // if corresponding bit is set
@@ -3170,6 +3189,7 @@ pub const Inst = struct {
pub const Small = packed struct {
has_tag_type: bool,
+ has_captures_len: bool,
has_body_len: bool,
has_fields_len: bool,
has_decls_len: bool,
@@ -3183,13 +3203,15 @@ pub const Inst = struct {
/// true | false | union(T) { }
auto_enum_tag: bool,
any_aligned_fields: bool,
- _: u6 = undefined,
+ _: u5 = undefined,
};
};
/// Trailing:
- /// 0. decls_len: u32, // if has_decls_len
- /// 1. decl: Index, // for every decls_len; points to a `declaration` instruction
+ /// 0. captures_len: u32, // if has_captures_len
+ /// 1. decls_len: u32, // if has_decls_len
+ /// 2. capture: Capture, // for every captures_len
+ /// 3. decl: Index, // for every decls_len; points to a `declaration` instruction
pub const OpaqueDecl = struct {
src_node: i32,
@@ -3198,9 +3220,10 @@ pub const Inst = struct {
}
pub const Small = packed struct {
+ has_captures_len: bool,
has_decls_len: bool,
name_strategy: NameStrategy,
- _: u13 = undefined,
+ _: u12 = undefined,
};
};
@@ -3502,6 +3525,11 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
.struct_decl => {
const small: Inst.StructDecl.Small = @bitCast(extended.small);
var extra_index: u32 = @intCast(extended.operand + @typeInfo(Inst.StructDecl).Struct.fields.len);
+ const captures_len = if (small.has_captures_len) captures_len: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :captures_len captures_len;
+ } else 0;
extra_index += @intFromBool(small.has_fields_len);
const decls_len = if (small.has_decls_len) decls_len: {
const decls_len = zir.extra[extra_index];
@@ -3509,6 +3537,8 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
break :decls_len decls_len;
} else 0;
+ extra_index += captures_len;
+
if (small.has_backing_int) {
const backing_int_body_len = zir.extra[extra_index];
extra_index += 1; // backing_int_body_len
@@ -3529,6 +3559,11 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
const small: Inst.EnumDecl.Small = @bitCast(extended.small);
var extra_index: u32 = @intCast(extended.operand + @typeInfo(Inst.EnumDecl).Struct.fields.len);
extra_index += @intFromBool(small.has_tag_type);
+ const captures_len = if (small.has_captures_len) captures_len: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :captures_len captures_len;
+ } else 0;
extra_index += @intFromBool(small.has_body_len);
extra_index += @intFromBool(small.has_fields_len);
const decls_len = if (small.has_decls_len) decls_len: {
@@ -3537,6 +3572,8 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
break :decls_len decls_len;
} else 0;
+ extra_index += captures_len;
+
return .{
.extra_index = extra_index,
.decls_remaining = decls_len,
@@ -3547,6 +3584,11 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
const small: Inst.UnionDecl.Small = @bitCast(extended.small);
var extra_index: u32 = @intCast(extended.operand + @typeInfo(Inst.UnionDecl).Struct.fields.len);
extra_index += @intFromBool(small.has_tag_type);
+ const captures_len = if (small.has_captures_len) captures_len: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :captures_len captures_len;
+ } else 0;
extra_index += @intFromBool(small.has_body_len);
extra_index += @intFromBool(small.has_fields_len);
const decls_len = if (small.has_decls_len) decls_len: {
@@ -3555,6 +3597,8 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
break :decls_len decls_len;
} else 0;
+ extra_index += captures_len;
+
return .{
.extra_index = extra_index,
.decls_remaining = decls_len,
@@ -3569,6 +3613,13 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
extra_index += 1;
break :decls_len decls_len;
} else 0;
+ const captures_len = if (small.has_captures_len) captures_len: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :captures_len captures_len;
+ } else 0;
+
+ extra_index += captures_len;
return .{
.extra_index = extra_index,