diff options
| author | Vexu <git@vexu.eu> | 2020-07-11 22:04:38 +0300 |
|---|---|---|
| committer | Vexu <git@vexu.eu> | 2020-07-12 00:54:07 +0300 |
| commit | be1507a7afe4c8869abdbab67a32ede6afe3d938 (patch) | |
| tree | 88be868d2b791d3bd7ab5cbd479b11066ece55d2 /doc | |
| parent | 3e095d8ef32fc93f5050cead708849846d626d1d (diff) | |
| download | zig-be1507a7afe4c8869abdbab67a32ede6afe3d938.tar.gz zig-be1507a7afe4c8869abdbab67a32ede6afe3d938.zip | |
update compile error tests and some doc comments
Diffstat (limited to 'doc')
| -rw-r--r-- | doc/docgen.zig | 11 |
1 files changed, 6 insertions, 5 deletions
diff --git a/doc/docgen.zig b/doc/docgen.zig index e2acfae768..af4d2530d0 100644 --- a/doc/docgen.zig +++ b/doc/docgen.zig @@ -212,7 +212,7 @@ const Tokenizer = struct { } }; -fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: var) anyerror { +fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: anytype) anyerror { const loc = tokenizer.getTokenLocation(token); const args_prefix = .{ tokenizer.source_file_name, loc.line + 1, loc.column + 1 }; warn("{}:{}:{}: error: " ++ fmt ++ "\n", args_prefix ++ args); @@ -634,7 +634,7 @@ fn escapeHtml(allocator: *mem.Allocator, input: []const u8) ![]u8 { return buf.toOwnedSlice(); } -fn writeEscaped(out: var, input: []const u8) !void { +fn writeEscaped(out: anytype, input: []const u8) !void { for (input) |c| { try switch (c) { '&' => out.writeAll("&"), @@ -765,7 +765,7 @@ fn isType(name: []const u8) bool { return false; } -fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Token, raw_src: []const u8) !void { +fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: anytype, source_token: Token, raw_src: []const u8) !void { const src = mem.trim(u8, raw_src, " \n"); try out.writeAll("<code class=\"zig\">"); var tokenizer = std.zig.Tokenizer.init(src); @@ -825,6 +825,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok .Keyword_volatile, .Keyword_allowzero, .Keyword_while, + .Keyword_anytype, => { try out.writeAll("<span class=\"tok-kw\">"); try writeEscaped(out, src[token.loc.start..token.loc.end]); @@ -977,12 +978,12 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok try out.writeAll("</code>"); } -fn tokenizeAndPrint(docgen_tokenizer: *Tokenizer, out: var, source_token: Token) !void { +fn tokenizeAndPrint(docgen_tokenizer: *Tokenizer, out: anytype, source_token: Token) !void { const raw_src = docgen_tokenizer.buffer[source_token.start..source_token.end]; return tokenizeAndPrintRaw(docgen_tokenizer, out, source_token, raw_src); } -fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var, zig_exe: []const u8) !void { +fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: anytype, zig_exe: []const u8) !void { var code_progress_index: usize = 0; var env_map = try process.getEnvMap(allocator); |
