aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorLoris Cro <kappaloris@gmail.com>2022-08-23 18:50:36 +0200
committerGitHub <noreply@github.com>2022-08-23 18:50:36 +0200
commitc8d04fea157444f88485bd69b42f58b5c8f12b1e (patch)
tree6f979ca279de8d96951c55975978e92c1008e048 /src
parent583175dc1d8d14bc92838476eb226f0e91077abc (diff)
parentb32e5a14ce20ae2943576de76d1af4c8bcc9dd72 (diff)
downloadzig-c8d04fea157444f88485bd69b42f58b5c8f12b1e.tar.gz
zig-c8d04fea157444f88485bd69b42f58b5c8f12b1e.zip
Merge pull request #12598 from ziglang/autodoc-anonstruct-wip
Autodoc: anon_init_struct support
Diffstat (limited to 'src')
-rw-r--r--src/Autodoc.zig163
1 files changed, 126 insertions, 37 deletions
diff --git a/src/Autodoc.zig b/src/Autodoc.zig
index f983bdfdf0..f8560407fe 100644
--- a/src/Autodoc.zig
+++ b/src/Autodoc.zig
@@ -69,6 +69,8 @@ pub fn generateZirData(self: *Autodoc) !void {
}
}
+ log.debug("Ref map size: {}", .{Ref.typed_value_map.len});
+
const root_src_dir = self.module.main_pkg.root_src_directory;
const root_src_path = self.module.main_pkg.root_src_path;
const joined_src_path = try root_src_dir.join(self.arena, &.{root_src_path});
@@ -159,6 +161,9 @@ pub fn generateZirData(self: *Autodoc) !void {
.void_type => .{
.Void = .{ .name = tmpbuf.toOwnedSlice() },
},
+ .type_info_type => .{
+ .ComptimeExpr = .{ .name = tmpbuf.toOwnedSlice() },
+ },
.type_type => .{
.Type = .{ .name = tmpbuf.toOwnedSlice() },
},
@@ -608,6 +613,7 @@ const DocData = struct {
type: usize, // index in `types`
this: usize, // index in `types`
declRef: usize, // index in `decls`
+ builtinField: enum { len, ptr },
fieldRef: FieldRef,
refPath: []Expr,
int: struct {
@@ -697,20 +703,26 @@ const DocData = struct {
var jsw = std.json.writeStream(w, 15);
try jsw.beginObject();
try jsw.objectField(@tagName(active_tag));
- inline for (comptime std.meta.fields(Expr)) |case| {
- if (@field(Expr, case.name) == active_tag) {
- switch (active_tag) {
- .int => {
- if (self.int.negated) try w.writeAll("-");
- try jsw.emitNumber(self.int.value);
- },
- .int_big => {
+ switch (self) {
+ .int => {
+ if (self.int.negated) try w.writeAll("-");
+ try jsw.emitNumber(self.int.value);
+ },
+ .int_big => {
- //@panic("TODO: json serialization of big ints!");
- //if (v.negated) try w.writeAll("-");
- //try jsw.emitNumber(v.value);
- },
- else => {
+ //@panic("TODO: json serialization of big ints!");
+ //if (v.negated) try w.writeAll("-");
+ //try jsw.emitNumber(v.value);
+ },
+ .builtinField => {
+ try jsw.emitString(@tagName(self.builtinField));
+ },
+ else => {
+ inline for (comptime std.meta.fields(Expr)) |case| {
+ // TODO: this is super ugly, fix once `inline else` is a thing
+ if (comptime std.mem.eql(u8, case.name, "builtinField"))
+ continue;
+ if (@field(Expr, case.name) == active_tag) {
try std.json.stringify(@field(self, case.name), opt, w);
jsw.state_index -= 1;
// TODO: we should not reach into the state of the
@@ -719,9 +731,9 @@ const DocData = struct {
// would be nice to have a proper integration
// between the json writer and the generic
// std.json.stringify implementation
- },
+ }
}
- }
+ },
}
try jsw.endObject();
}
@@ -1905,31 +1917,60 @@ fn walkInstruction(
const extra = file.zir.extraData(Zir.Inst.Field, pl_node.payload_index);
var path: std.ArrayListUnmanaged(DocData.Expr) = .{};
- var lhs = @enumToInt(extra.data.lhs) - Ref.typed_value_map.len; // underflow = need to handle Refs
-
try path.append(self.arena, .{
.string = file.zir.nullTerminatedString(extra.data.field_name_start),
});
+
// Put inside path the starting index of each decl name that
- // we encounter as we navigate through all the field_vals
- while (tags[lhs] == .field_val or
- tags[lhs] == .field_call_bind or
- tags[lhs] == .field_ptr or
- tags[lhs] == .field_type)
- {
- const lhs_extra = file.zir.extraData(
- Zir.Inst.Field,
- data[lhs].pl_node.payload_index,
- );
+ // we encounter as we navigate through all the field_*s
+ const lhs_ref = blk: {
+ var lhs_extra = extra;
+ while (true) {
+ if (@enumToInt(lhs_extra.data.lhs) < Ref.typed_value_map.len) {
+ break :blk lhs_extra.data.lhs;
+ }
- try path.append(self.arena, .{
- .string = file.zir.nullTerminatedString(lhs_extra.data.field_name_start),
- });
- lhs = @enumToInt(lhs_extra.data.lhs) - Ref.typed_value_map.len; // underflow = need to handle Refs
- }
+ const lhs = @enumToInt(lhs_extra.data.lhs) - Ref.typed_value_map.len;
+ if (tags[lhs] != .field_val and
+ tags[lhs] != .field_call_bind and
+ tags[lhs] != .field_ptr and
+ tags[lhs] != .field_type) break :blk lhs_extra.data.lhs;
+
+ lhs_extra = file.zir.extraData(
+ Zir.Inst.Field,
+ data[lhs].pl_node.payload_index,
+ );
+
+ try path.append(self.arena, .{
+ .string = file.zir.nullTerminatedString(lhs_extra.data.field_name_start),
+ });
+ }
+ };
+ // If the lhs is a `call` instruction, it means that we're inside
+ // a function call and we're referring to one of its arguments.
+ // We can't just blindly analyze the instruction or we will
+ // start recursing forever.
+ // TODO: add proper resolution of the container type for `calls`
+ // TODO: we're like testing lhs as an instruction twice
+ // (above and below) this todo, maybe a cleaer solution woul
+ // avoid that.
// TODO: double check that we really don't need type info here
- const wr = try self.walkInstruction(file, parent_scope, lhs, false);
+
+ const wr = blk: {
+ if (@enumToInt(lhs_ref) >= Ref.typed_value_map.len) {
+ const lhs_inst = @enumToInt(lhs_ref) - Ref.typed_value_map.len;
+ if (tags[lhs_inst] == .call) {
+ break :blk DocData.WalkResult{
+ .expr = .{
+ .comptimeExpr = 0,
+ },
+ };
+ }
+ }
+
+ break :blk try self.walkRef(file, parent_scope, lhs_ref, false);
+ };
try path.append(self.arena, wr.expr);
// This way the data in `path` has the same ordering that the ref
@@ -1948,7 +1989,7 @@ fn walkInstruction(
// - (2) Paths can sometimes never resolve fully. This means that
// any value that depends on that will have to become a
// comptimeExpr.
- try self.tryResolveRefPath(file, lhs, path.items);
+ try self.tryResolveRefPath(file, inst_index, path.items);
return DocData.WalkResult{ .expr = .{ .refPath = path.items } };
},
.int_type => {
@@ -2053,6 +2094,33 @@ fn walkInstruction(
);
return self.cteTodo(@tagName(tags[inst_index]));
},
+ .struct_init_anon => {
+ const pl_node = data[inst_index].pl_node;
+ const extra = file.zir.extraData(Zir.Inst.StructInitAnon, pl_node.payload_index);
+
+ const field_vals = try self.arena.alloc(
+ DocData.Expr.FieldVal,
+ extra.data.fields_len,
+ );
+
+ var idx = extra.end;
+ for (field_vals) |*fv| {
+ const init_extra = file.zir.extraData(Zir.Inst.StructInitAnon.Item, idx);
+ const field_name = file.zir.nullTerminatedString(init_extra.data.field_name);
+ const value = try self.walkRef(
+ file,
+ parent_scope,
+ init_extra.data.init,
+ need_type,
+ );
+ fv.* = .{ .name = field_name, .val = value };
+ idx = init_extra.end;
+ }
+
+ return DocData.WalkResult{
+ .expr = .{ .@"struct" = field_vals },
+ };
+ },
.error_set_decl => {
const pl_node = data[inst_index].pl_node;
const extra = file.zir.extraData(Zir.Inst.ErrorSetDecl, pl_node.payload_index);
@@ -3072,6 +3140,10 @@ fn tryResolveRefPath(
.{ @tagName(self.types.items[t_index]), resolved_parent },
);
},
+ .ComptimeExpr => {
+ // Same as the comptimeExpr branch above
+ break :outer;
+ },
.Unanalyzed => {
// This decl path is pending completion
{
@@ -3094,6 +3166,20 @@ fn tryResolveRefPath(
return;
},
+ .Array => {
+ if (std.mem.eql(u8, child_string, "len")) {
+ path[i + 1] = .{
+ .builtinField = .len,
+ };
+ } else {
+ panicWithContext(
+ file,
+ inst_index,
+ "TODO: handle `{s}` in tryResolveDeclPath.type.Array\nInfo: {}",
+ .{ child_string, resolved_parent },
+ );
+ }
+ },
.Enum => |t_enum| {
for (t_enum.pubDecls) |d| {
// TODO: this could be improved a lot
@@ -3820,9 +3906,12 @@ fn walkRef(
} else if (enum_value < Ref.typed_value_map.len) {
switch (ref) {
else => {
- std.debug.panic("TODO: handle {s} in `walkRef`\n", .{
- @tagName(ref),
- });
+ panicWithContext(
+ file,
+ 0,
+ "TODO: handle {s} in walkRef",
+ .{@tagName(ref)},
+ );
},
.undef => {
return DocData.WalkResult{ .expr = .@"undefined" };