diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2019-05-30 12:07:55 -0400 |
|---|---|---|
| committer | Andrew Kelley <andrew@ziglang.org> | 2019-05-30 12:07:55 -0400 |
| commit | 7878f9660fcb8fa64fda0b2b67320fe9ddc2727b (patch) | |
| tree | e9286a58127e707c0c06b302347e7d49079e343e /src-self-hosted | |
| parent | 5954d5235fd0d029034d68f82bb7831ff47506f8 (diff) | |
| download | zig-7878f9660fcb8fa64fda0b2b67320fe9ddc2727b.tar.gz zig-7878f9660fcb8fa64fda0b2b67320fe9ddc2727b.zip | |
dep tokenizer: run zig fmt and move exports to canonical location
Diffstat (limited to 'src-self-hosted')
| -rw-r--r-- | src-self-hosted/dep_tokenizer.zig | 117 | ||||
| -rw-r--r-- | src-self-hosted/stage1.zig | 60 |
2 files changed, 75 insertions, 102 deletions
diff --git a/src-self-hosted/dep_tokenizer.zig b/src-self-hosted/dep_tokenizer.zig index 9ce965fff7..0dde6da71c 100644 --- a/src-self-hosted/dep_tokenizer.zig +++ b/src-self-hosted/dep_tokenizer.zig @@ -359,63 +359,6 @@ pub const Tokenizer = struct { }; }; -export fn stage2_DepTokenizer_init(input: [*]const u8, len: usize) stage2_DepTokenizer { - const t = std.heap.c_allocator.create(Tokenizer) catch @panic("failed to create .d tokenizer"); - t.* = Tokenizer.init(std.heap.c_allocator, input[0..len]); - return stage2_DepTokenizer{ - .handle = t, - }; -} - -export fn stage2_DepTokenizer_deinit(self: *stage2_DepTokenizer) void { - self.handle.deinit(); -} - -export fn stage2_DepTokenizer_next(self: *stage2_DepTokenizer) stage2_DepNextResult { - const otoken = self.handle.next() catch { - const textz = std.Buffer.init(&self.handle.arena.allocator, self.handle.error_text) catch @panic("failed to create .d tokenizer error text"); - return stage2_DepNextResult{ - .type_id = .error_, - .textz = textz.toSlice().ptr, - }; - }; - const token = otoken orelse { - return stage2_DepNextResult{ - .type_id = .null_, - .textz = undefined, - }; - }; - const textz = std.Buffer.init(&self.handle.arena.allocator, token.bytes) catch @panic("failed to create .d tokenizer token text"); - return stage2_DepNextResult{ - .type_id = switch (token.id) { - .target => stage2_DepNextResult.TypeId.target, - .prereq => stage2_DepNextResult.TypeId.prereq, - }, - .textz = textz.toSlice().ptr, - }; -} - -export const stage2_DepTokenizer = extern struct { - handle: *Tokenizer, -}; - -export const stage2_DepNextResult = extern struct { - type_id: TypeId, - - // when type_id == error --> error text - // when type_id == null --> undefined - // when type_id == target --> target pathname - // when type_id == prereq --> prereq pathname - textz: [*]const u8, - - export const TypeId = extern enum { - error_, - null_, - target, - prereq, - }; -}; - test "empty file" { try depTokenizer("", ""); } @@ -469,78 +412,54 @@ test "empty target linefeeds" { const expect = "target = {foo.o}"; try depTokenizer( \\foo.o: - , - expect - ); + , expect); try depTokenizer( \\foo.o: \\ - , - expect - ); + , expect); try depTokenizer( \\foo.o: - , - expect - ); + , expect); try depTokenizer( \\foo.o: \\ - , - expect - ); + , expect); } test "empty target linefeeds + continuations" { const expect = "target = {foo.o}"; try depTokenizer( \\foo.o:\ - , - expect - ); + , expect); try depTokenizer( \\foo.o:\ \\ - , - expect - ); + , expect); try depTokenizer( \\foo.o:\ - , - expect - ); + , expect); try depTokenizer( \\foo.o:\ \\ - , - expect - ); + , expect); } test "empty target linefeeds + hspace + continuations" { const expect = "target = {foo.o}"; try depTokenizer( \\foo.o: \ - , - expect - ); + , expect); try depTokenizer( \\foo.o: \ \\ - , - expect - ); + , expect); try depTokenizer( \\foo.o: \ - , - expect - ); + , expect); try depTokenizer( \\foo.o: \ \\ - , - expect - ); + , expect); } test "prereq" { @@ -572,15 +491,11 @@ test "prereq continuation" { try depTokenizer( \\foo.o: foo.h\ \\bar.h - , - expect - ); + , expect); try depTokenizer( \\foo.o: foo.h\ \\bar.h - , - expect - ); + , expect); } test "multiple prereqs" { @@ -907,14 +822,14 @@ test "error target - continuation expecting end-of-line" { \\target = {foo.o} \\ERROR: illegal char 'x' at position 8: continuation expecting end-of-line ); - try depTokenizer("foo.o: \\
x", + try depTokenizer("foo.o: \\\x0dx", \\target = {foo.o} \\ERROR: illegal char 'x' at position 9: continuation expecting end-of-line ); } test "error prereq - continuation expecting end-of-line" { - try depTokenizer("foo.o: foo.h\\
x", + try depTokenizer("foo.o: foo.h\\\x0dx", \\target = {foo.o} \\ERROR: illegal char 'x' at position 14: continuation expecting end-of-line ); diff --git a/src-self-hosted/stage1.zig b/src-self-hosted/stage1.zig index b5beb02f10..bc1a746154 100644 --- a/src-self-hosted/stage1.zig +++ b/src-self-hosted/stage1.zig @@ -15,13 +15,14 @@ const self_hosted_main = @import("main.zig"); const Args = arg.Args; const Flag = arg.Flag; const errmsg = @import("errmsg.zig"); +const DepTokenizer = @import("dep_tokenizer.zig").Tokenizer; var stderr_file: fs.File = undefined; var stderr: *io.OutStream(fs.File.WriteError) = undefined; var stdout: *io.OutStream(fs.File.WriteError) = undefined; comptime { - _ = @import("dep_tokenizer.zig"); + _ = @import("dep_tokenizer.zig"); } // ABI warning @@ -397,3 +398,60 @@ fn printErrMsgToFile( try stream.writeByteNTimes('~', last_token.end - first_token.start); try stream.write("\n"); } + +export fn stage2_DepTokenizer_init(input: [*]const u8, len: usize) stage2_DepTokenizer { + const t = std.heap.c_allocator.create(DepTokenizer) catch @panic("failed to create .d tokenizer"); + t.* = DepTokenizer.init(std.heap.c_allocator, input[0..len]); + return stage2_DepTokenizer{ + .handle = t, + }; +} + +export fn stage2_DepTokenizer_deinit(self: *stage2_DepTokenizer) void { + self.handle.deinit(); +} + +export fn stage2_DepTokenizer_next(self: *stage2_DepTokenizer) stage2_DepNextResult { + const otoken = self.handle.next() catch { + const textz = std.Buffer.init(&self.handle.arena.allocator, self.handle.error_text) catch @panic("failed to create .d tokenizer error text"); + return stage2_DepNextResult{ + .type_id = .error_, + .textz = textz.toSlice().ptr, + }; + }; + const token = otoken orelse { + return stage2_DepNextResult{ + .type_id = .null_, + .textz = undefined, + }; + }; + const textz = std.Buffer.init(&self.handle.arena.allocator, token.bytes) catch @panic("failed to create .d tokenizer token text"); + return stage2_DepNextResult{ + .type_id = switch (token.id) { + .target => stage2_DepNextResult.TypeId.target, + .prereq => stage2_DepNextResult.TypeId.prereq, + }, + .textz = textz.toSlice().ptr, + }; +} + +export const stage2_DepTokenizer = extern struct { + handle: *DepTokenizer, +}; + +export const stage2_DepNextResult = extern struct { + type_id: TypeId, + + // when type_id == error --> error text + // when type_id == null --> undefined + // when type_id == target --> target pathname + // when type_id == prereq --> prereq pathname + textz: [*]const u8, + + export const TypeId = extern enum { + error_, + null_, + target, + prereq, + }; +}; |
