aboutsummaryrefslogtreecommitdiff
path: root/src/Autodoc.zig
diff options
context:
space:
mode:
authorLoris Cro <kappaloris@gmail.com>2023-06-18 09:06:40 +0200
committerGitHub <noreply@github.com>2023-06-18 09:06:40 +0200
commit216ef10dc471e4db60a30208be178d6c59efeaaf (patch)
tree8c239dab283ae9cb3b7fe099bae240bcc53f894e /src/Autodoc.zig
parent0fc1d396495c1ab482197021dedac8bea3f9401c (diff)
parent729a051e9e38674233190aea23c0ac8c134f2d67 (diff)
downloadzig-216ef10dc471e4db60a30208be178d6c59efeaaf.tar.gz
zig-216ef10dc471e4db60a30208be178d6c59efeaaf.zip
Merge branch 'master' into autodoc-searchkey
Diffstat (limited to 'src/Autodoc.zig')
-rw-r--r--src/Autodoc.zig319
1 files changed, 202 insertions, 117 deletions
diff --git a/src/Autodoc.zig b/src/Autodoc.zig
index f73e718a4d..b34432fe26 100644
--- a/src/Autodoc.zig
+++ b/src/Autodoc.zig
@@ -8,6 +8,7 @@ const CompilationModule = @import("Module.zig");
const File = CompilationModule.File;
const Module = @import("Package.zig");
const Tokenizer = std.zig.Tokenizer;
+const InternPool = @import("InternPool.zig");
const Zir = @import("Zir.zig");
const Ref = Zir.Inst.Ref;
const log = std.log.scoped(.autodoc);
@@ -95,8 +96,6 @@ pub fn generateZirData(self: *Autodoc) !void {
}
}
- log.debug("Ref map size: {}", .{Ref.typed_value_map.len});
-
const root_src_dir = self.comp_module.main_pkg.root_src_directory;
const root_src_path = self.comp_module.main_pkg.root_src_path;
const joined_src_path = try root_src_dir.join(self.arena, &.{root_src_path});
@@ -108,18 +107,20 @@ pub fn generateZirData(self: *Autodoc) !void {
const file = self.comp_module.import_table.get(abs_root_src_path).?; // file is expected to be present in the import table
// Append all the types in Zir.Inst.Ref.
{
- try self.types.append(self.arena, .{
- .ComptimeExpr = .{ .name = "ComptimeExpr" },
- });
-
- // this skips Ref.none but it's ok becuse we replaced it with ComptimeExpr
- var i: u32 = 1;
- while (i <= @enumToInt(Ref.anyerror_void_error_union_type)) : (i += 1) {
+ comptime std.debug.assert(@enumToInt(InternPool.Index.first_type) == 0);
+ var i: u32 = 0;
+ while (i <= @enumToInt(InternPool.Index.last_type)) : (i += 1) {
+ const ip_index = @intToEnum(InternPool.Index, i);
var tmpbuf = std.ArrayList(u8).init(self.arena);
- try Ref.typed_value_map[i].val.fmtDebug().format("", .{}, tmpbuf.writer());
+ if (ip_index == .generic_poison_type) {
+ // Not a real type, doesn't have a normal name
+ try tmpbuf.writer().writeAll("(generic poison)");
+ } else {
+ try ip_index.toType().fmt(self.comp_module).format("", .{}, tmpbuf.writer());
+ }
try self.types.append(
self.arena,
- switch (@intToEnum(Ref, i)) {
+ switch (ip_index) {
else => blk: {
// TODO: map the remaining refs to a correct type
// instead of just assinging "array" to them.
@@ -295,7 +296,7 @@ pub fn generateZirData(self: *Autodoc) !void {
try std.json.stringify(
data,
.{
- .whitespace = .{ .indent = .None, .separator = false },
+ .whitespace = .{ .indent = .none, .separator = false },
.emit_null_optional_fields = true,
},
out,
@@ -444,7 +445,7 @@ const DocData = struct {
w: anytype,
) !void {
var jsw = std.json.writeStream(w, 15);
- if (opts.whitespace) |ws| jsw.whitespace = ws;
+ jsw.whitespace = opts.whitespace;
try jsw.beginObject();
inline for (comptime std.meta.tags(std.meta.FieldEnum(DocData))) |f| {
const f_name = @tagName(f);
@@ -495,7 +496,7 @@ const DocData = struct {
w: anytype,
) !void {
var jsw = std.json.writeStream(w, 15);
- if (opts.whitespace) |ws| jsw.whitespace = ws;
+ jsw.whitespace = opts.whitespace;
try jsw.beginObject();
inline for (comptime std.meta.tags(std.meta.FieldEnum(DocModule))) |f| {
@@ -529,7 +530,7 @@ const DocData = struct {
w: anytype,
) !void {
var jsw = std.json.writeStream(w, 15);
- if (opts.whitespace) |ws| jsw.whitespace = ws;
+ jsw.whitespace = opts.whitespace;
try jsw.beginArray();
inline for (comptime std.meta.fields(Decl)) |f| {
try jsw.arrayElem();
@@ -556,7 +557,7 @@ const DocData = struct {
w: anytype,
) !void {
var jsw = std.json.writeStream(w, 15);
- if (opts.whitespace) |ws| jsw.whitespace = ws;
+ jsw.whitespace = opts.whitespace;
try jsw.beginArray();
inline for (comptime std.meta.fields(AstNode)) |f| {
try jsw.arrayElem();
@@ -604,9 +605,11 @@ const DocData = struct {
pubDecls: []usize = &.{}, // index into decls
field_types: []Expr = &.{}, // (use src->fields to find names)
field_defaults: []?Expr = &.{}, // default values is specified
+ backing_int: ?Expr = null, // backing integer if specified
is_tuple: bool,
line_number: usize,
parent_container: ?usize, // index into `types`
+ layout: ?Expr, // if different than Auto
},
ComptimeExpr: struct { name: []const u8 },
ComptimeFloat: struct { name: []const u8 },
@@ -644,6 +647,7 @@ const DocData = struct {
tag: ?Expr, // tag type if specified
auto_enum: bool, // tag is an auto enum
parent_container: ?usize, // index into `types`
+ layout: ?Expr, // if different than Auto
},
Fn: struct {
name: []const u8,
@@ -686,7 +690,7 @@ const DocData = struct {
) !void {
const active_tag = std.meta.activeTag(self);
var jsw = std.json.writeStream(w, 15);
- if (opts.whitespace) |ws| jsw.whitespace = ws;
+ jsw.whitespace = opts.whitespace;
try jsw.beginArray();
try jsw.arrayElem();
try jsw.emitNumber(@enumToInt(active_tag));
@@ -755,6 +759,7 @@ const DocData = struct {
string: []const u8, // direct value
sliceIndex: usize,
slice: Slice,
+ sliceLength: SliceLength,
cmpxchgIndex: usize,
cmpxchg: Cmpxchg,
builtin: Builtin,
@@ -791,6 +796,12 @@ const DocData = struct {
end: ?usize = null,
sentinel: ?usize = null, // index in `exprs`
};
+ const SliceLength = struct {
+ lhs: usize,
+ start: usize,
+ len: usize,
+ sentinel: ?usize = null,
+ };
const Cmpxchg = struct {
name: []const u8,
type: usize,
@@ -821,7 +832,7 @@ const DocData = struct {
) @TypeOf(w).Error!void {
const active_tag = std.meta.activeTag(self);
var jsw = std.json.writeStream(w, 15);
- if (opts.whitespace) |ws| jsw.whitespace = ws;
+ jsw.whitespace = opts.whitespace;
try jsw.beginObject();
if (active_tag == .declIndex) {
try jsw.objectField("declRef");
@@ -996,6 +1007,12 @@ fn walkInstruction(
};
}
+ const maybe_tldoc_comment = try self.getTLDocComment(new_file.file);
+ try self.ast_nodes.append(self.arena, .{
+ .name = path,
+ .docs = maybe_tldoc_comment,
+ });
+
result.value_ptr.* = self.types.items.len;
var new_scope = Scope{
@@ -1024,7 +1041,7 @@ fn walkInstruction(
.ret_load => {
const un_node = data[inst_index].un_node;
const res_ptr_ref = un_node.operand;
- const res_ptr_inst = @enumToInt(res_ptr_ref) - Ref.typed_value_map.len;
+ const res_ptr_inst = Zir.refToIndex(res_ptr_ref).?;
// TODO: this instruction doesn't let us know trivially if there's
// branching involved or not. For now here's the strat:
// We search backwarts until `ret_ptr` for `store_node`,
@@ -1287,6 +1304,68 @@ fn walkInstruction(
.expr = .{ .sliceIndex = slice_index },
};
},
+ .slice_length => {
+ const pl_node = data[inst_index].pl_node;
+ const extra = file.zir.extraData(Zir.Inst.SliceLength, pl_node.payload_index);
+
+ const slice_index = self.exprs.items.len;
+ try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
+
+ var lhs: DocData.WalkResult = try self.walkRef(
+ file,
+ parent_scope,
+ parent_src,
+ extra.data.lhs,
+ false,
+ );
+ var start: DocData.WalkResult = try self.walkRef(
+ file,
+ parent_scope,
+ parent_src,
+ extra.data.start,
+ false,
+ );
+ var len: DocData.WalkResult = try self.walkRef(
+ file,
+ parent_scope,
+ parent_src,
+ extra.data.len,
+ false,
+ );
+ var sentinel_opt: ?DocData.WalkResult = if (extra.data.sentinel != .none)
+ try self.walkRef(
+ file,
+ parent_scope,
+ parent_src,
+ extra.data.sentinel,
+ false,
+ )
+ else
+ null;
+
+ const lhs_index = self.exprs.items.len;
+ try self.exprs.append(self.arena, lhs.expr);
+ const start_index = self.exprs.items.len;
+ try self.exprs.append(self.arena, start.expr);
+ const len_index = self.exprs.items.len;
+ try self.exprs.append(self.arena, len.expr);
+ const sentinel_index = if (sentinel_opt) |sentinel| sentinel_index: {
+ const index = self.exprs.items.len;
+ try self.exprs.append(self.arena, sentinel.expr);
+ break :sentinel_index index;
+ } else null;
+ self.exprs.items[slice_index] = .{ .sliceLength = .{
+ .lhs = lhs_index,
+ .start = start_index,
+ .len = len_index,
+ .sentinel = sentinel_index,
+ } };
+
+ return DocData.WalkResult{
+ .typeRef = self.decls.items[lhs.expr.declRef.Analyzed].value.typeRef,
+ .expr = .{ .sliceIndex = slice_index },
+ };
+ },
// @check array_cat and array_mul
.add,
@@ -1415,8 +1494,6 @@ fn walkInstruction(
.frame_type,
.frame_size,
.ptr_to_int,
- .min,
- .max,
.bit_not,
// @check
.clz,
@@ -1467,6 +1544,8 @@ fn walkInstruction(
.offset_of,
.splat,
.reduce,
+ .min,
+ .max,
=> {
const pl_node = data[inst_index].pl_node;
const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
@@ -1876,12 +1955,14 @@ fn walkInstruction(
// WIP
const pl_node = data[inst_index].pl_node;
const extra = file.zir.extraData(Zir.Inst.SwitchBlock, pl_node.payload_index);
- const cond_index = self.exprs.items.len;
- _ = try self.walkRef(file, parent_scope, parent_src, extra.data.operand, false);
+ const switch_cond = try self.walkRef(file, parent_scope, parent_src, extra.data.operand, false);
+ const cond_index = self.exprs.items.len;
+ try self.exprs.append(self.arena, switch_cond.expr);
+ _ = cond_index;
- const ast_index = self.ast_nodes.items.len;
- const type_index = self.types.items.len - 1;
+ // const ast_index = self.ast_nodes.items.len;
+ // const type_index = self.types.items.len - 1;
// const ast_line = self.ast_nodes.items[ast_index - 1];
@@ -1894,41 +1975,22 @@ fn walkInstruction(
// log.debug("{s}", .{sep});
const switch_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .switchOp = .{
- .cond_index = cond_index,
- .file_name = file.sub_file_path,
- .src = ast_index,
- .outer_decl = type_index,
- } });
- return DocData.WalkResult{
- .typeRef = .{ .type = @enumToInt(Ref.type_type) },
- .expr = .{ .switchIndex = switch_index },
- };
- },
- .switch_cond => {
- const un_node = data[inst_index].un_node;
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- need_type,
- );
- const operand_index = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
+ // const src_loc = try self.srcLocInfo(file, pl_node.src_node, parent_src);
- // const ast_index = self.ast_nodes.items.len;
- // const sep = "=" ** 200;
- // log.debug("{s}", .{sep});
- // log.debug("SWITCH COND", .{});
- // log.debug("ast index = {}", .{ast_index});
- // log.debug("ast previous = {}", .{self.ast_nodes.items[ast_index - 1]});
- // log.debug("{s}", .{sep});
+ const switch_expr = try self.getBlockSource(file, parent_src, pl_node.src_node);
+ try self.exprs.append(self.arena, .{ .comptimeExpr = self.comptime_exprs.items.len });
+ try self.comptime_exprs.append(self.arena, .{ .code = switch_expr });
+ // try self.exprs.append(self.arena, .{ .switchOp = .{
+ // .cond_index = cond_index,
+ // .file_name = file.sub_file_path,
+ // .src = ast_index,
+ // .outer_decl = type_index,
+ // } });
return DocData.WalkResult{
- .typeRef = operand.typeRef,
- .expr = .{ .typeOf = operand_index },
+ .typeRef = .{ .type = @enumToInt(Ref.type_type) },
+ .expr = .{ .switchIndex = switch_index },
};
},
@@ -2055,7 +2117,7 @@ fn walkInstruction(
.expr = .{ .declRef = decl_status },
};
},
- .field_val, .field_call_bind, .field_ptr, .field_type => {
+ .field_val, .field_ptr, .field_type => {
// TODO: field type uses Zir.Inst.FieldType, it just happens to have the
// same layout as Zir.Inst.Field :^)
const pl_node = data[inst_index].pl_node;
@@ -2071,13 +2133,11 @@ fn walkInstruction(
const lhs_ref = blk: {
var lhs_extra = extra;
while (true) {
- if (@enumToInt(lhs_extra.data.lhs) < Ref.typed_value_map.len) {
+ const lhs = Zir.refToIndex(lhs_extra.data.lhs) orelse {
break :blk lhs_extra.data.lhs;
- }
+ };
- const lhs = @enumToInt(lhs_extra.data.lhs) - Ref.typed_value_map.len;
if (tags[lhs] != .field_val and
- tags[lhs] != .field_call_bind and
tags[lhs] != .field_ptr and
tags[lhs] != .field_type) break :blk lhs_extra.data.lhs;
@@ -2103,9 +2163,8 @@ fn walkInstruction(
// TODO: double check that we really don't need type info here
const wr = blk: {
- if (@enumToInt(lhs_ref) >= Ref.typed_value_map.len) {
- const lhs_inst = @enumToInt(lhs_ref) - Ref.typed_value_map.len;
- if (tags[lhs_inst] == .call) {
+ if (Zir.refToIndex(lhs_ref)) |lhs_inst| {
+ if (tags[lhs_inst] == .call or tags[lhs_inst] == .field_call) {
break :blk DocData.WalkResult{
.expr = .{
.comptimeExpr = 0,
@@ -2153,11 +2212,14 @@ fn walkInstruction(
};
},
.block => {
- const res = DocData.WalkResult{ .expr = .{
- .comptimeExpr = self.comptime_exprs.items.len,
- } };
+ const res = DocData.WalkResult{
+ .typeRef = .{ .type = @enumToInt(Ref.type_type) },
+ .expr = .{ .comptimeExpr = self.comptime_exprs.items.len },
+ };
+ const pl_node = data[inst_index].pl_node;
+ const block_expr = try self.getBlockSource(file, parent_src, pl_node.src_node);
try self.comptime_exprs.append(self.arena, .{
- .code = "if (...) { ... }",
+ .code = block_expr,
});
return res;
},
@@ -2167,11 +2229,14 @@ fn walkInstruction(
parent_scope,
parent_src,
getBlockInlineBreak(file.zir, inst_index) orelse {
- const res = DocData.WalkResult{ .expr = .{
- .comptimeExpr = self.comptime_exprs.items.len,
- } };
+ const res = DocData.WalkResult{
+ .typeRef = .{ .type = @enumToInt(Ref.type_type) },
+ .expr = .{ .comptimeExpr = self.comptime_exprs.items.len },
+ };
+ const pl_node = data[inst_index].pl_node;
+ const block_inline_expr = try self.getBlockSource(file, parent_src, pl_node.src_node);
try self.comptime_exprs.append(self.arena, .{
- .code = "if (...) { ... }",
+ .code = block_inline_expr,
});
return res;
},
@@ -2375,7 +2440,19 @@ fn walkInstruction(
return DocData.WalkResult{
.typeRef = if (callee.typeRef) |tr| switch (tr) {
- .type => |func_type_idx| self.types.items[func_type_idx].Fn.ret,
+ .type => |func_type_idx| switch (self.types.items[func_type_idx]) {
+ .Fn => |func| func.ret,
+ else => blk: {
+ printWithContext(
+ file,
+ inst_index,
+ "unexpected callee type in walkInstruction.call: `{s}`\n",
+ .{@tagName(self.types.items[func_type_idx])},
+ );
+
+ break :blk null;
+ },
+ },
else => null,
} else null,
.expr = .{ .call = call_slot_index },
@@ -2573,12 +2650,12 @@ fn walkInstruction(
// We delay analysis because union tags can refer to
// decls defined inside the union itself.
- const tag_type_ref: Ref = if (small.has_tag_type) blk: {
+ const tag_type_ref: ?Ref = if (small.has_tag_type) blk: {
const tag_type = file.zir.extra[extra_index];
extra_index += 1;
const tag_ref = @intToEnum(Ref, tag_type);
break :blk tag_ref;
- } else .none;
+ } else null;
const body_len = if (small.has_body_len) blk: {
const body_len = file.zir.extra[extra_index];
@@ -2592,6 +2669,11 @@ fn walkInstruction(
break :blk fields_len;
} else 0;
+ const layout_expr: ?DocData.Expr = switch (small.layout) {
+ .Auto => null,
+ else => .{ .enumLiteral = @tagName(small.layout) },
+ };
+
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
@@ -2605,13 +2687,13 @@ fn walkInstruction(
);
// Analyze the tag once all decls have been analyzed
- const tag_type = try self.walkRef(
+ const tag_type = if (tag_type_ref) |tt_ref| (try self.walkRef(
file,
&scope,
parent_src,
- tag_type_ref,
+ tt_ref,
false,
- );
+ )).expr else null;
// Fields
extra_index += body_len;
@@ -2643,9 +2725,10 @@ fn walkInstruction(
.privDecls = priv_decl_indexes.items,
.pubDecls = decl_indexes.items,
.fields = field_type_refs.items,
- .tag = tag_type.expr,
+ .tag = tag_type,
.auto_enum = small.auto_enum_tag,
.parent_container = parent_scope.enclosing_type,
+ .layout = layout_expr,
},
};
@@ -2834,17 +2917,33 @@ fn walkInstruction(
break :blk fields_len;
} else 0;
- // TODO: Expose explicit backing integer types in some way.
+ // We don't care about decls yet
+ if (small.has_decls_len) extra_index += 1;
+
+ var backing_int: ?DocData.Expr = null;
if (small.has_backing_int) {
const backing_int_body_len = file.zir.extra[extra_index];
extra_index += 1; // backing_int_body_len
if (backing_int_body_len == 0) {
+ const backing_int_ref = @intToEnum(Ref, file.zir.extra[extra_index]);
+ const backing_int_res = try self.walkRef(file, &scope, src_info, backing_int_ref, true);
+ backing_int = backing_int_res.expr;
extra_index += 1; // backing_int_ref
} else {
+ const backing_int_body = file.zir.extra[extra_index..][0..backing_int_body_len];
+ const break_inst = backing_int_body[backing_int_body.len - 1];
+ const operand = data[break_inst].@"break".operand;
+ const backing_int_res = try self.walkRef(file, &scope, src_info, operand, true);
+ backing_int = backing_int_res.expr;
extra_index += backing_int_body_len; // backing_int_body_inst
}
}
+ const layout_expr: ?DocData.Expr = switch (small.layout) {
+ .Auto => null,
+ else => .{ .enumLiteral = @tagName(small.layout) },
+ };
+
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
@@ -2883,8 +2982,10 @@ fn walkInstruction(
.field_types = field_type_refs.items,
.field_defaults = field_default_refs.items,
.is_tuple = small.is_tuple,
+ .backing_int = backing_int,
.line_number = self.ast_nodes.items[self_ast_node_index].line,
.parent_container = parent_scope.enclosing_type,
+ .layout = layout_expr,
},
};
if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
@@ -3201,29 +3302,7 @@ fn analyzeDecl(
const decl_src = try self.srcLocInfo(file, value_pl_node.src_node, parent_src);
const name: []const u8 = switch (decl_name_index) {
- 0, 1 => unreachable, // comptime or usingnamespace decl
- 2 => {
- unreachable;
- // decl test
- // const decl_status = scope.resolveDeclName(doc_comment_index);
- // const decl_being_tested = decl_status.Analyzed;
- // const func_index = getBlockInlineBreak(file.zir, value_index).?;
-
- // const pl_node = data[Zir.refToIndex(func_index).?].pl_node;
- // const fn_src = try self.srcLocInfo(file, pl_node.src_node, decl_src);
- // const tree = try file.getTree(self.comp_module.gpa);
- // const test_source_code = tree.getNodeSource(fn_src.src_node);
-
- // const ast_node_index = self.ast_nodes.items.len;
- // try self.ast_nodes.append(self.arena, .{
- // .file = 0,
- // .line = 0,
- // .col = 0,
- // .code = test_source_code,
- // });
- // self.decls.items[decl_being_tested].decltest = ast_node_index;
- // continue;
- },
+ 0, 1, 2 => unreachable, // comptime or usingnamespace decl, decltest
else => blk: {
if (file.zir.string_bytes[decl_name_index] == 0) {
// test decl
@@ -3359,15 +3438,10 @@ fn analyzeDecltest(
const value_index = file.zir.extra[d.sub_index + 6];
const decl_name_index = file.zir.extra[d.sub_index + 7];
- // This is known to work because decl values are always block_inlines
const value_pl_node = data[value_index].pl_node;
const decl_src = try self.srcLocInfo(file, value_pl_node.src_node, parent_src);
- const func_index = getBlockInlineBreak(file.zir, value_index).?;
- const pl_node = data[Zir.refToIndex(func_index).?].pl_node;
- const fn_src = try self.srcLocInfo(file, pl_node.src_node, decl_src);
- const tree = try file.getTree(self.comp_module.gpa);
- const test_source_code = tree.getNodeSource(fn_src.src_node);
+ const test_source_code = try self.getBlockSource(file, parent_src, value_pl_node.src_node);
const decl_name: ?[]const u8 = if (decl_name_index != 0)
file.zir.nullTerminatedString(decl_name_index)
@@ -4572,16 +4646,19 @@ fn walkRef(
ref: Ref,
need_type: bool, // true when the caller needs also a typeRef for the return value
) AutodocErrors!DocData.WalkResult {
- const enum_value = @enumToInt(ref);
- if (enum_value <= @enumToInt(Ref.anyerror_void_error_union_type)) {
+ if (ref == .none) {
+ return .{ .expr = .{ .comptimeExpr = 0 } };
+ } else if (@enumToInt(ref) <= @enumToInt(InternPool.Index.last_type)) {
// We can just return a type that indexes into `types` with the
// enum value because in the beginning we pre-filled `types` with
// the types that are listed in `Ref`.
return DocData.WalkResult{
.typeRef = .{ .type = @enumToInt(std.builtin.TypeId.Type) },
- .expr = .{ .type = enum_value },
+ .expr = .{ .type = @enumToInt(ref) },
};
- } else if (enum_value < Ref.typed_value_map.len) {
+ } else if (Zir.refToIndex(ref)) |zir_index| {
+ return self.walkInstruction(file, parent_scope, parent_src, zir_index, need_type);
+ } else {
switch (ref) {
else => {
panicWithContext(
@@ -4674,9 +4751,6 @@ fn walkRef(
// } };
// },
}
- } else {
- const zir_index = enum_value - Ref.typed_value_map.len;
- return self.walkInstruction(file, parent_scope, parent_src, zir_index, need_type);
}
}
@@ -4814,6 +4888,17 @@ fn declIsVar(
return (tags[tok_idx] == .keyword_var);
}
+fn getBlockSource(
+ self: Autodoc,
+ file: *File,
+ parent_src: SrcLocInfo,
+ block_src_node: i32,
+) AutodocErrors![]const u8 {
+ const tree = try file.getTree(self.comp_module.gpa);
+ const block_src = try self.srcLocInfo(file, block_src_node, parent_src);
+ return tree.getNodeSource(block_src.src_node);
+}
+
fn getTLDocComment(self: *Autodoc, file: *File) ![]const u8 {
const source = (try file.getSource(self.comp_module.gpa)).bytes;
var tokenizer = Tokenizer.init(source);
@@ -4839,7 +4924,7 @@ fn findGuidePaths(self: *Autodoc, file: *File, str: []const u8) ![]const u8 {
// TODO: this algo is kinda inefficient
- var it = std.mem.split(u8, str, "\n");
+ var it = std.mem.splitScalar(u8, str, '\n');
while (it.next()) |line| {
const trimmed_line = std.mem.trim(u8, line, " ");
if (std.mem.startsWith(u8, trimmed_line, guide_prefix)) {