aboutsummaryrefslogtreecommitdiff
path: root/std
diff options
context:
space:
mode:
Diffstat (limited to 'std')
-rw-r--r--std/array_list.zig66
-rw-r--r--std/atomic/index.zig7
-rw-r--r--std/atomic/queue.zig126
-rw-r--r--std/atomic/stack.zig131
-rw-r--r--std/buf_map.zig63
-rw-r--r--std/buf_set.zig28
-rw-r--r--std/buffer.zig28
-rw-r--r--std/build.zig15
-rw-r--r--std/c/darwin.zig31
-rw-r--r--std/c/index.zig12
-rw-r--r--std/c/linux.zig5
-rw-r--r--std/crypto/throughput_test.zig27
-rw-r--r--std/debug/index.zig25
-rw-r--r--std/elf.zig611
-rw-r--r--std/endian.zig25
-rw-r--r--std/event.zig235
-rw-r--r--std/fmt/errol/index.zig79
-rw-r--r--std/fmt/index.zig570
-rw-r--r--std/hash_map.zig68
-rw-r--r--std/heap.zig135
-rw-r--r--std/index.zig12
-rw-r--r--std/io.zig5
-rw-r--r--std/io_test.zig18
-rw-r--r--std/json.zig1304
-rw-r--r--std/json_test.zig1942
-rw-r--r--std/linked_list.zig1
-rw-r--r--std/math/complex/abs.zig18
-rw-r--r--std/math/complex/acos.zig21
-rw-r--r--std/math/complex/acosh.zig21
-rw-r--r--std/math/complex/arg.zig18
-rw-r--r--std/math/complex/asin.zig27
-rw-r--r--std/math/complex/asinh.zig22
-rw-r--r--std/math/complex/atan.zig130
-rw-r--r--std/math/complex/atanh.zig22
-rw-r--r--std/math/complex/conj.zig17
-rw-r--r--std/math/complex/cos.zig21
-rw-r--r--std/math/complex/cosh.zig165
-rw-r--r--std/math/complex/exp.zig140
-rw-r--r--std/math/complex/index.zig171
-rw-r--r--std/math/complex/ldexp.zig75
-rw-r--r--std/math/complex/log.zig23
-rw-r--r--std/math/complex/pow.zig22
-rw-r--r--std/math/complex/proj.zig24
-rw-r--r--std/math/complex/sin.zig22
-rw-r--r--std/math/complex/sinh.zig164
-rw-r--r--std/math/complex/sqrt.zig133
-rw-r--r--std/math/complex/tan.zig22
-rw-r--r--std/math/complex/tanh.zig111
-rw-r--r--std/math/exp.zig5
-rw-r--r--std/math/index.zig31
-rw-r--r--std/math/ln.zig2
-rw-r--r--std/math/log2.zig7
-rw-r--r--std/math/sqrt.zig295
-rw-r--r--std/math/x86_64/sqrt.zig15
-rw-r--r--std/mem.zig133
-rw-r--r--std/net.zig283
-rw-r--r--std/os/darwin.zig30
-rw-r--r--std/os/epoch.zig26
-rw-r--r--std/os/file.zig45
-rw-r--r--std/os/index.zig1104
-rw-r--r--std/os/linux/i386.zig505
-rw-r--r--std/os/linux/index.zig689
-rw-r--r--std/os/linux/test.zig1
-rw-r--r--std/os/linux/vdso.zig89
-rw-r--r--std/os/linux/x86_64.zig21
-rw-r--r--std/os/test.zig44
-rw-r--r--std/os/time.zig288
-rw-r--r--std/os/windows/index.zig19
-rw-r--r--std/rand/index.zig24
-rw-r--r--std/rand/ziggurat.zig146
-rw-r--r--std/segmented_list.zig379
-rw-r--r--std/special/bootstrap.zig27
-rw-r--r--std/special/bootstrap_lib.zig3
-rw-r--r--std/special/builtin.zig281
-rw-r--r--std/special/compiler_rt/fixuint.zig2
-rw-r--r--std/special/compiler_rt/fixunsdfdi_test.zig2
-rw-r--r--std/special/compiler_rt/fixunsdfsi_test.zig2
-rw-r--r--std/special/compiler_rt/fixunsdfti_test.zig2
-rw-r--r--std/special/compiler_rt/fixunssfdi_test.zig2
-rw-r--r--std/special/compiler_rt/fixunssfsi_test.zig2
-rw-r--r--std/special/compiler_rt/fixunssfti_test.zig2
-rw-r--r--std/special/compiler_rt/fixunstfdi_test.zig2
-rw-r--r--std/special/compiler_rt/fixunstfsi_test.zig2
-rw-r--r--std/special/compiler_rt/fixunstfti_test.zig2
-rw-r--r--std/special/compiler_rt/index.zig26
-rw-r--r--std/special/compiler_rt/udivmod.zig2
-rw-r--r--std/special/compiler_rt/udivmodti4.zig6
-rw-r--r--std/special/compiler_rt/udivti3.zig9
-rw-r--r--std/special/compiler_rt/umodti3.zig10
-rw-r--r--std/unicode.zig163
-rw-r--r--std/zig/ast.zig2561
-rw-r--r--std/zig/bench.zig38
-rw-r--r--std/zig/index.zig8
-rw-r--r--std/zig/parse.zig3503
-rw-r--r--std/zig/parser.zig1733
-rw-r--r--std/zig/parser_test.zig1183
-rw-r--r--std/zig/render.zig1270
-rw-r--r--std/zig/tokenizer.zig420
98 files changed, 18521 insertions, 3881 deletions
diff --git a/std/array_list.zig b/std/array_list.zig
index 2a44b66518..f1881cd7f3 100644
--- a/std/array_list.zig
+++ b/std/array_list.zig
@@ -28,11 +28,11 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type{
};
}
- pub fn deinit(l: &Self) void {
+ pub fn deinit(l: &const Self) void {
l.allocator.free(l.items);
}
- pub fn toSlice(l: &Self) []align(A) T {
+ pub fn toSlice(l: &const Self) []align(A) T {
return l.items[0..l.len];
}
@@ -44,6 +44,10 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type{
return l.toSliceConst()[n];
}
+ pub fn count(self: &const Self) usize {
+ return self.len;
+ }
+
/// ArrayList takes ownership of the passed in slice. The slice must have been
/// allocated with `allocator`.
/// Deinitialize with `deinit` or use `toOwnedSlice`.
@@ -128,6 +132,27 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type{
return null;
return self.pop();
}
+
+ pub const Iterator = struct {
+ list: &const Self,
+ // how many items have we returned
+ count: usize,
+
+ pub fn next(it: &Iterator) ?T {
+ if (it.count >= it.list.len) return null;
+ const val = it.list.at(it.count);
+ it.count += 1;
+ return val;
+ }
+
+ pub fn reset(it: &Iterator) void {
+ it.count = 0;
+ }
+ };
+
+ pub fn iterator(self: &const Self) Iterator {
+ return Iterator { .list = self, .count = 0 };
+ }
};
}
@@ -143,6 +168,14 @@ test "basic ArrayList test" {
assert(list.items[i] == i32(i + 1));
}}
+ for (list.toSlice()) |v, i| {
+ assert(v == i32(i + 1));
+ }
+
+ for (list.toSliceConst()) |v, i| {
+ assert(v == i32(i + 1));
+ }
+
assert(list.pop() == 10);
assert(list.len == 9);
@@ -157,6 +190,35 @@ test "basic ArrayList test" {
assert(list.len == 9);
}
+test "iterator ArrayList test" {
+ var list = ArrayList(i32).init(debug.global_allocator);
+ defer list.deinit();
+
+ try list.append(1);
+ try list.append(2);
+ try list.append(3);
+
+ var count : i32 = 0;
+ var it = list.iterator();
+ while (it.next()) |next| {
+ assert(next == count + 1);
+ count += 1;
+ }
+
+ assert(count == 3);
+ assert(it.next() == null);
+ it.reset();
+ count = 0;
+ while (it.next()) |next| {
+ assert(next == count + 1);
+ count += 1;
+ if (count == 2) break;
+ }
+
+ it.reset();
+ assert(?? it.next() == 1);
+}
+
test "insert ArrayList test" {
var list = ArrayList(i32).init(debug.global_allocator);
defer list.deinit();
diff --git a/std/atomic/index.zig b/std/atomic/index.zig
new file mode 100644
index 0000000000..9d556a6415
--- /dev/null
+++ b/std/atomic/index.zig
@@ -0,0 +1,7 @@
+pub const Stack = @import("stack.zig").Stack;
+pub const Queue = @import("queue.zig").Queue;
+
+test "std.atomic" {
+ _ = @import("stack.zig").Stack;
+ _ = @import("queue.zig").Queue;
+}
diff --git a/std/atomic/queue.zig b/std/atomic/queue.zig
new file mode 100644
index 0000000000..e25c8e6b17
--- /dev/null
+++ b/std/atomic/queue.zig
@@ -0,0 +1,126 @@
+const builtin = @import("builtin");
+const AtomicOrder = builtin.AtomicOrder;
+const AtomicRmwOp = builtin.AtomicRmwOp;
+
+/// Many reader, many writer, non-allocating, thread-safe, lock-free
+pub fn Queue(comptime T: type) type {
+ return struct {
+ head: &Node,
+ tail: &Node,
+ root: Node,
+
+ pub const Self = this;
+
+ pub const Node = struct {
+ next: ?&Node,
+ data: T,
+ };
+
+ // TODO: well defined copy elision: https://github.com/zig-lang/zig/issues/287
+ pub fn init(self: &Self) void {
+ self.root.next = null;
+ self.head = &self.root;
+ self.tail = &self.root;
+ }
+
+ pub fn put(self: &Self, node: &Node) void {
+ node.next = null;
+
+ const tail = @atomicRmw(&Node, &self.tail, AtomicRmwOp.Xchg, node, AtomicOrder.SeqCst);
+ _ = @atomicRmw(?&Node, &tail.next, AtomicRmwOp.Xchg, node, AtomicOrder.SeqCst);
+ }
+
+ pub fn get(self: &Self) ?&Node {
+ var head = @atomicLoad(&Node, &self.head, AtomicOrder.SeqCst);
+ while (true) {
+ const node = head.next ?? return null;
+ head = @cmpxchgWeak(&Node, &self.head, head, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return node;
+ }
+ }
+ };
+}
+
+const std = @import("std");
+const Context = struct {
+ allocator: &std.mem.Allocator,
+ queue: &Queue(i32),
+ put_sum: isize,
+ get_sum: isize,
+ get_count: usize,
+ puts_done: u8, // TODO make this a bool
+};
+
+// TODO add lazy evaluated build options and then put puts_per_thread behind
+// some option such as: "AggressiveMultithreadedFuzzTest". In the AppVeyor
+// CI we would use a less aggressive setting since at 1 core, while we still
+// want this test to pass, we need a smaller value since there is so much thrashing
+// we would also use a less aggressive setting when running in valgrind
+const puts_per_thread = 500;
+const put_thread_count = 3;
+
+test "std.atomic.queue" {
+ var direct_allocator = std.heap.DirectAllocator.init();
+ defer direct_allocator.deinit();
+
+ var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 300 * 1024);
+ defer direct_allocator.allocator.free(plenty_of_memory);
+
+ var fixed_buffer_allocator = std.heap.ThreadSafeFixedBufferAllocator.init(plenty_of_memory);
+ var a = &fixed_buffer_allocator.allocator;
+
+ var queue: Queue(i32) = undefined;
+ queue.init();
+ var context = Context {
+ .allocator = a,
+ .queue = &queue,
+ .put_sum = 0,
+ .get_sum = 0,
+ .puts_done = 0,
+ .get_count = 0,
+ };
+
+ var putters: [put_thread_count]&std.os.Thread = undefined;
+ for (putters) |*t| {
+ *t = try std.os.spawnThread(&context, startPuts);
+ }
+ var getters: [put_thread_count]&std.os.Thread = undefined;
+ for (getters) |*t| {
+ *t = try std.os.spawnThread(&context, startGets);
+ }
+
+ for (putters) |t| t.wait();
+ _ = @atomicRmw(u8, &context.puts_done, builtin.AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst);
+ for (getters) |t| t.wait();
+
+ std.debug.assert(context.put_sum == context.get_sum);
+ std.debug.assert(context.get_count == puts_per_thread * put_thread_count);
+}
+
+fn startPuts(ctx: &Context) u8 {
+ var put_count: usize = puts_per_thread;
+ var r = std.rand.DefaultPrng.init(0xdeadbeef);
+ while (put_count != 0) : (put_count -= 1) {
+ std.os.time.sleep(0, 1); // let the os scheduler be our fuzz
+ const x = @bitCast(i32, r.random.scalar(u32));
+ const node = ctx.allocator.create(Queue(i32).Node) catch unreachable;
+ node.data = x;
+ ctx.queue.put(node);
+ _ = @atomicRmw(isize, &ctx.put_sum, builtin.AtomicRmwOp.Add, x, AtomicOrder.SeqCst);
+ }
+ return 0;
+}
+
+fn startGets(ctx: &Context) u8 {
+ while (true) {
+ while (ctx.queue.get()) |node| {
+ std.os.time.sleep(0, 1); // let the os scheduler be our fuzz
+ _ = @atomicRmw(isize, &ctx.get_sum, builtin.AtomicRmwOp.Add, node.data, builtin.AtomicOrder.SeqCst);
+ _ = @atomicRmw(usize, &ctx.get_count, builtin.AtomicRmwOp.Add, 1, builtin.AtomicOrder.SeqCst);
+ }
+
+ if (@atomicLoad(u8, &ctx.puts_done, builtin.AtomicOrder.SeqCst) == 1) {
+ break;
+ }
+ }
+ return 0;
+}
diff --git a/std/atomic/stack.zig b/std/atomic/stack.zig
new file mode 100644
index 0000000000..4a3dbef32b
--- /dev/null
+++ b/std/atomic/stack.zig
@@ -0,0 +1,131 @@
+const builtin = @import("builtin");
+const AtomicOrder = builtin.AtomicOrder;
+
+/// Many reader, many writer, non-allocating, thread-safe, lock-free
+pub fn Stack(comptime T: type) type {
+ return struct {
+ root: ?&Node,
+
+ pub const Self = this;
+
+ pub const Node = struct {
+ next: ?&Node,
+ data: T,
+ };
+
+ pub fn init() Self {
+ return Self {
+ .root = null,
+ };
+ }
+
+ /// push operation, but only if you are the first item in the stack. if you did not succeed in
+ /// being the first item in the stack, returns the other item that was there.
+ pub fn pushFirst(self: &Self, node: &Node) ?&Node {
+ node.next = null;
+ return @cmpxchgStrong(?&Node, &self.root, null, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst);
+ }
+
+ pub fn push(self: &Self, node: &Node) void {
+ var root = @atomicLoad(?&Node, &self.root, AtomicOrder.SeqCst);
+ while (true) {
+ node.next = root;
+ root = @cmpxchgWeak(?&Node, &self.root, root, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? break;
+ }
+ }
+
+ pub fn pop(self: &Self) ?&Node {
+ var root = @atomicLoad(?&Node, &self.root, AtomicOrder.SeqCst);
+ while (true) {
+ root = @cmpxchgWeak(?&Node, &self.root, root, (root ?? return null).next, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return root;
+ }
+ }
+
+ pub fn isEmpty(self: &Self) bool {
+ return @atomicLoad(?&Node, &self.root, AtomicOrder.SeqCst) == null;
+ }
+ };
+}
+
+const std = @import("std");
+const Context = struct {
+ allocator: &std.mem.Allocator,
+ stack: &Stack(i32),
+ put_sum: isize,
+ get_sum: isize,
+ get_count: usize,
+ puts_done: u8, // TODO make this a bool
+};
+// TODO add lazy evaluated build options and then put puts_per_thread behind
+// some option such as: "AggressiveMultithreadedFuzzTest". In the AppVeyor
+// CI we would use a less aggressive setting since at 1 core, while we still
+// want this test to pass, we need a smaller value since there is so much thrashing
+// we would also use a less aggressive setting when running in valgrind
+const puts_per_thread = 500;
+const put_thread_count = 3;
+
+test "std.atomic.stack" {
+ var direct_allocator = std.heap.DirectAllocator.init();
+ defer direct_allocator.deinit();
+
+ var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 300 * 1024);
+ defer direct_allocator.allocator.free(plenty_of_memory);
+
+ var fixed_buffer_allocator = std.heap.ThreadSafeFixedBufferAllocator.init(plenty_of_memory);
+ var a = &fixed_buffer_allocator.allocator;
+
+ var stack = Stack(i32).init();
+ var context = Context {
+ .allocator = a,
+ .stack = &stack,
+ .put_sum = 0,
+ .get_sum = 0,
+ .puts_done = 0,
+ .get_count = 0,
+ };
+
+ var putters: [put_thread_count]&std.os.Thread = undefined;
+ for (putters) |*t| {
+ *t = try std.os.spawnThread(&context, startPuts);
+ }
+ var getters: [put_thread_count]&std.os.Thread = undefined;
+ for (getters) |*t| {
+ *t = try std.os.spawnThread(&context, startGets);
+ }
+
+ for (putters) |t| t.wait();
+ _ = @atomicRmw(u8, &context.puts_done, builtin.AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst);
+ for (getters) |t| t.wait();
+
+ std.debug.assert(context.put_sum == context.get_sum);
+ std.debug.assert(context.get_count == puts_per_thread * put_thread_count);
+}
+
+fn startPuts(ctx: &Context) u8 {
+ var put_count: usize = puts_per_thread;
+ var r = std.rand.DefaultPrng.init(0xdeadbeef);
+ while (put_count != 0) : (put_count -= 1) {
+ std.os.time.sleep(0, 1); // let the os scheduler be our fuzz
+ const x = @bitCast(i32, r.random.scalar(u32));
+ const node = ctx.allocator.create(Stack(i32).Node) catch unreachable;
+ node.data = x;
+ ctx.stack.push(node);
+ _ = @atomicRmw(isize, &ctx.put_sum, builtin.AtomicRmwOp.Add, x, AtomicOrder.SeqCst);
+ }
+ return 0;
+}
+
+fn startGets(ctx: &Context) u8 {
+ while (true) {
+ while (ctx.stack.pop()) |node| {
+ std.os.time.sleep(0, 1); // let the os scheduler be our fuzz
+ _ = @atomicRmw(isize, &ctx.get_sum, builtin.AtomicRmwOp.Add, node.data, builtin.AtomicOrder.SeqCst);
+ _ = @atomicRmw(usize, &ctx.get_count, builtin.AtomicRmwOp.Add, 1, builtin.AtomicOrder.SeqCst);
+ }
+
+ if (@atomicLoad(u8, &ctx.puts_done, builtin.AtomicOrder.SeqCst) == 1) {
+ break;
+ }
+ }
+ return 0;
+}
diff --git a/std/buf_map.zig b/std/buf_map.zig
index a58df4b2db..57c5830bbe 100644
--- a/std/buf_map.zig
+++ b/std/buf_map.zig
@@ -1,6 +1,8 @@
-const HashMap = @import("hash_map.zig").HashMap;
-const mem = @import("mem.zig");
+const std = @import("index.zig");
+const HashMap = std.HashMap;
+const mem = std.mem;
const Allocator = mem.Allocator;
+const assert = std.debug.assert;
/// BufMap copies keys and values before they go into the map, and
/// frees them when they get removed.
@@ -16,10 +18,10 @@ pub const BufMap = struct {
return self;
}
- pub fn deinit(self: &BufMap) void {
+ pub fn deinit(self: &const BufMap) void {
var it = self.hash_map.iterator();
while (true) {
- const entry = it.next() ?? break;
+ const entry = it.next() ?? break;
self.free(entry.key);
self.free(entry.value);
}
@@ -28,21 +30,15 @@ pub const BufMap = struct {
}
pub fn set(self: &BufMap, key: []const u8, value: []const u8) !void {
- if (self.hash_map.get(key)) |entry| {
- const value_copy = try self.copy(value);
- errdefer self.free(value_copy);
- _ = try self.hash_map.put(key, value_copy);
- self.free(entry.value);
- } else {
- const key_copy = try self.copy(key);
- errdefer self.free(key_copy);
- const value_copy = try self.copy(value);
- errdefer self.free(value_copy);
- _ = try self.hash_map.put(key_copy, value_copy);
- }
+ self.delete(key);
+ const key_copy = try self.copy(key);
+ errdefer self.free(key_copy);
+ const value_copy = try self.copy(value);
+ errdefer self.free(value_copy);
+ _ = try self.hash_map.put(key_copy, value_copy);
}
- pub fn get(self: &BufMap, key: []const u8) ?[]const u8 {
+ pub fn get(self: &const BufMap, key: []const u8) ?[]const u8 {
const entry = self.hash_map.get(key) ?? return null;
return entry.value;
}
@@ -54,20 +50,41 @@ pub const BufMap = struct {
}
pub fn count(self: &const BufMap) usize {
- return self.hash_map.size;
+ return self.hash_map.count();
}
pub fn iterator(self: &const BufMap) BufMapHashMap.Iterator {
return self.hash_map.iterator();
}
- fn free(self: &BufMap, value: []const u8) void {
+ fn free(self: &const BufMap, value: []const u8) void {
self.hash_map.allocator.free(value);
}
- fn copy(self: &BufMap, value: []const u8) ![]const u8 {
- const result = try self.hash_map.allocator.alloc(u8, value.len);
- mem.copy(u8, result, value);
- return result;
+ fn copy(self: &const BufMap, value: []const u8) ![]const u8 {
+ return mem.dupe(self.hash_map.allocator, u8, value);
}
};
+
+test "BufMap" {
+ var direct_allocator = std.heap.DirectAllocator.init();
+ defer direct_allocator.deinit();
+
+ var bufmap = BufMap.init(&direct_allocator.allocator);
+ defer bufmap.deinit();
+
+ try bufmap.set("x", "1");
+ assert(mem.eql(u8, ??bufmap.get("x"), "1"));
+ assert(1 == bufmap.count());
+
+ try bufmap.set("x", "2");
+ assert(mem.eql(u8, ??bufmap.get("x"), "2"));
+ assert(1 == bufmap.count());
+
+ try bufmap.set("x", "3");
+ assert(mem.eql(u8, ??bufmap.get("x"), "3"));
+ assert(1 == bufmap.count());
+
+ bufmap.delete("x");
+ assert(0 == bufmap.count());
+}
diff --git a/std/buf_set.zig b/std/buf_set.zig
index 618b985c41..1badb5bf18 100644
--- a/std/buf_set.zig
+++ b/std/buf_set.zig
@@ -1,6 +1,8 @@
+const std = @import("index.zig");
const HashMap = @import("hash_map.zig").HashMap;
const mem = @import("mem.zig");
const Allocator = mem.Allocator;
+const assert = std.debug.assert;
pub const BufSet = struct {
hash_map: BufSetHashMap,
@@ -14,10 +16,10 @@ pub const BufSet = struct {
return self;
}
- pub fn deinit(self: &BufSet) void {
+ pub fn deinit(self: &const BufSet) void {
var it = self.hash_map.iterator();
while (true) {
- const entry = it.next() ?? break;
+ const entry = it.next() ?? break;
self.free(entry.key);
}
@@ -38,7 +40,7 @@ pub const BufSet = struct {
}
pub fn count(self: &const BufSet) usize {
- return self.hash_map.size;
+ return self.hash_map.count();
}
pub fn iterator(self: &const BufSet) BufSetHashMap.Iterator {
@@ -49,14 +51,30 @@ pub const BufSet = struct {
return self.hash_map.allocator;
}
- fn free(self: &BufSet, value: []const u8) void {
+ fn free(self: &const BufSet, value: []const u8) void {
self.hash_map.allocator.free(value);
}
- fn copy(self: &BufSet, value: []const u8) ![]const u8 {
+ fn copy(self: &const BufSet, value: []const u8) ![]const u8 {
const result = try self.hash_map.allocator.alloc(u8, value.len);
mem.copy(u8, result, value);
return result;
}
};
+test "BufSet" {
+ var direct_allocator = std.heap.DirectAllocator.init();
+ defer direct_allocator.deinit();
+
+ var bufset = BufSet.init(&direct_allocator.allocator);
+ defer bufset.deinit();
+
+ try bufset.put("x");
+ assert(bufset.count() == 1);
+ bufset.delete("x");
+ assert(bufset.count() == 0);
+
+ try bufset.put("x");
+ try bufset.put("y");
+ try bufset.put("z");
+}
diff --git a/std/buffer.zig b/std/buffer.zig
index e0892d5933..42fec7f988 100644
--- a/std/buffer.zig
+++ b/std/buffer.zig
@@ -66,7 +66,7 @@ pub const Buffer = struct {
self.list.deinit();
}
- pub fn toSlice(self: &Buffer) []u8 {
+ pub fn toSlice(self: &const Buffer) []u8 {
return self.list.toSlice()[0..self.len()];
}
@@ -99,26 +99,10 @@ pub const Buffer = struct {
mem.copy(u8, self.list.toSlice()[old_len..], m);
}
- // TODO: remove, use OutStream for this
- pub fn appendFormat(self: &Buffer, comptime format: []const u8, args: ...) !void {
- return fmt.format(self, append, format, args);
- }
-
- // TODO: remove, use OutStream for this
pub fn appendByte(self: &Buffer, byte: u8) !void {
- return self.appendByteNTimes(byte, 1);
- }
-
- // TODO: remove, use OutStream for this
- pub fn appendByteNTimes(self: &Buffer, byte: u8, count: usize) !void {
- var prev_size: usize = self.len();
- const new_size = prev_size + count;
- try self.resize(new_size);
-
- var i: usize = prev_size;
- while (i < new_size) : (i += 1) {
- self.list.items[i] = byte;
- }
+ const old_len = self.len();
+ try self.resize(old_len + 1);
+ self.list.toSlice()[old_len] = byte;
}
pub fn eql(self: &const Buffer, m: []const u8) bool {
@@ -154,7 +138,7 @@ test "simple Buffer" {
var buf = try Buffer.init(debug.global_allocator, "");
assert(buf.len() == 0);
try buf.append("hello");
- try buf.appendByte(' ');
+ try buf.append(" ");
try buf.append("world");
assert(buf.eql("hello world"));
assert(mem.eql(u8, cstr.toSliceConst(buf.toSliceConst().ptr), buf.toSliceConst()));
@@ -166,5 +150,5 @@ test "simple Buffer" {
assert(buf.endsWith("orld"));
try buf2.resize(4);
- assert(buf.startsWith(buf2.toSliceConst()));
+ assert(buf.startsWith(buf2.toSlice()));
}
diff --git a/std/build.zig b/std/build.zig
index a4d745e450..a312b28a6f 100644
--- a/std/build.zig
+++ b/std/build.zig
@@ -426,15 +426,18 @@ pub const Builder = struct {
const release_safe = self.option(bool, "release-safe", "optimizations on and safety on") ?? false;
const release_fast = self.option(bool, "release-fast", "optimizations on and safety off") ?? false;
+ const release_small = self.option(bool, "release-small", "size optimizations on and safety off") ?? false;
- const mode = if (release_safe and !release_fast)
+ const mode = if (release_safe and !release_fast and !release_small)
builtin.Mode.ReleaseSafe
- else if (release_fast and !release_safe)
+ else if (release_fast and !release_safe and !release_small)
builtin.Mode.ReleaseFast
- else if (!release_fast and !release_safe)
+ else if (release_small and !release_fast and !release_safe)
+ builtin.Mode.ReleaseSmall
+ else if (!release_fast and !release_safe and !release_small)
builtin.Mode.Debug
else x: {
- warn("Both -Drelease-safe and -Drelease-fast specified");
+ warn("Multiple release modes (of -Drelease-safe, -Drelease-fast and -Drelease-small)");
self.markInvalidUserInput();
break :x builtin.Mode.Debug;
};
@@ -1229,6 +1232,7 @@ pub const LibExeObjStep = struct {
builtin.Mode.Debug => {},
builtin.Mode.ReleaseSafe => zig_args.append("--release-safe") catch unreachable,
builtin.Mode.ReleaseFast => zig_args.append("--release-fast") catch unreachable,
+ builtin.Mode.ReleaseSmall => zig_args.append("--release-small") catch unreachable,
}
zig_args.append("--cache-dir") catch unreachable;
@@ -1369,7 +1373,7 @@ pub const LibExeObjStep = struct {
args.append("ssp-buffer-size=4") catch unreachable;
}
},
- builtin.Mode.ReleaseFast => {
+ builtin.Mode.ReleaseFast, builtin.Mode.ReleaseSmall => {
args.append("-O2") catch unreachable;
args.append("-fno-stack-protector") catch unreachable;
},
@@ -1706,6 +1710,7 @@ pub const TestStep = struct {
builtin.Mode.Debug => {},
builtin.Mode.ReleaseSafe => try zig_args.append("--release-safe"),
builtin.Mode.ReleaseFast => try zig_args.append("--release-fast"),
+ builtin.Mode.ReleaseSmall => try zig_args.append("--release-small"),
}
switch (self.target) {
diff --git a/std/c/darwin.zig b/std/c/darwin.zig
index aa49dfa3df..7ac57514c9 100644
--- a/std/c/darwin.zig
+++ b/std/c/darwin.zig
@@ -3,10 +3,28 @@ pub extern "c" fn _NSGetExecutablePath(buf: &u8, bufsize: &u32) c_int;
pub extern "c" fn __getdirentries64(fd: c_int, buf_ptr: &u8, buf_len: usize, basep: &i64) usize;
+pub extern "c" fn mach_absolute_time() u64;
+pub extern "c" fn mach_timebase_info(tinfo: ?&mach_timebase_info_data) void;
+
pub use @import("../os/darwin_errno.zig");
pub const _errno = __error;
+pub const timeval = extern struct {
+ tv_sec: isize,
+ tv_usec: isize,
+};
+
+pub const timezone = extern struct {
+ tz_minuteswest: i32,
+ tz_dsttime: i32,
+};
+
+pub const mach_timebase_info_data = struct {
+ numer: u32,
+ denom: u32,
+};
+
/// Renamed to Stat to not conflict with the stat function.
pub const Stat = extern struct {
dev: i32,
@@ -55,3 +73,16 @@ pub const dirent = extern struct {
d_type: u8,
d_name: u8, // field address is address of first byte of name
};
+
+pub const sockaddr = extern struct {
+ sa_len: u8,
+ sa_family: sa_family_t,
+ sa_data: [14]u8,
+};
+
+pub const sa_family_t = u8;
+
+pub const pthread_attr_t = extern struct {
+ __sig: c_long,
+ __opaque: [56]u8,
+};
diff --git a/std/c/index.zig b/std/c/index.zig
index 369ea2b358..34269d2aa2 100644
--- a/std/c/index.zig
+++ b/std/c/index.zig
@@ -28,6 +28,7 @@ pub extern "c" fn unlink(path: &const u8) c_int;
pub extern "c" fn getcwd(buf: &u8, size: usize) ?&u8;
pub extern "c" fn waitpid(pid: c_int, stat_loc: &c_int, options: c_int) c_int;
pub extern "c" fn fork() c_int;
+pub extern "c" fn access(path: &const u8, mode: c_uint) c_int;
pub extern "c" fn pipe(fds: &c_int) c_int;
pub extern "c" fn mkdir(path: &const u8, mode: c_uint) c_int;
pub extern "c" fn symlink(existing: &const u8, new: &const u8) c_int;
@@ -40,6 +41,7 @@ pub extern "c" fn dup2(old_fd: c_int, new_fd: c_int) c_int;
pub extern "c" fn readlink(noalias path: &const u8, noalias buf: &u8, bufsize: usize) isize;
pub extern "c" fn realpath(noalias file_name: &const u8, noalias resolved_name: &u8) ?&u8;
pub extern "c" fn sigprocmask(how: c_int, noalias set: &const sigset_t, noalias oset: ?&sigset_t) c_int;
+pub extern "c" fn gettimeofday(tv: ?&timeval, tz: ?&timezone) c_int;
pub extern "c" fn sigaction(sig: c_int, noalias act: &const Sigaction, noalias oact: ?&Sigaction) c_int;
pub extern "c" fn nanosleep(rqtp: &const timespec, rmtp: ?&timespec) c_int;
pub extern "c" fn setreuid(ruid: c_uint, euid: c_uint) c_int;
@@ -51,3 +53,13 @@ pub extern "c" fn malloc(usize) ?&c_void;
pub extern "c" fn realloc(&c_void, usize) ?&c_void;
pub extern "c" fn free(&c_void) void;
pub extern "c" fn posix_memalign(memptr: &&c_void, alignment: usize, size: usize) c_int;
+
+pub extern "pthread" fn pthread_create(noalias newthread: &pthread_t,
+ noalias attr: ?&const pthread_attr_t, start_routine: extern fn(?&c_void) ?&c_void,
+ noalias arg: ?&c_void) c_int;
+pub extern "pthread" fn pthread_attr_init(attr: &pthread_attr_t) c_int;
+pub extern "pthread" fn pthread_attr_setstack(attr: &pthread_attr_t, stackaddr: &c_void, stacksize: usize) c_int;
+pub extern "pthread" fn pthread_attr_destroy(attr: &pthread_attr_t) c_int;
+pub extern "pthread" fn pthread_join(thread: pthread_t, arg_return: ?&?&c_void) c_int;
+
+pub const pthread_t = &@OpaqueType();
diff --git a/std/c/linux.zig b/std/c/linux.zig
index b2ac05eba5..7810fec130 100644
--- a/std/c/linux.zig
+++ b/std/c/linux.zig
@@ -3,3 +3,8 @@ pub use @import("../os/linux/errno.zig");
pub extern "c" fn getrandom(buf_ptr: &u8, buf_len: usize, flags: c_uint) c_int;
extern "c" fn __errno_location() &c_int;
pub const _errno = __errno_location;
+
+pub const pthread_attr_t = extern struct {
+ __size: [56]u8,
+ __align: c_long,
+};
diff --git a/std/crypto/throughput_test.zig b/std/crypto/throughput_test.zig
index 60610411b5..0756f9a4eb 100644
--- a/std/crypto/throughput_test.zig
+++ b/std/crypto/throughput_test.zig
@@ -1,22 +1,17 @@
// Modify the HashFunction variable to the one wanted to test.
//
-// NOTE: The throughput measurement may be slightly lower than other measurements since we run
-// through our block alignment functions as well. Be aware when comparing against other tests.
-//
// ```
-// zig build-exe --release-fast --library c throughput_test.zig
+// zig build-exe --release-fast throughput_test.zig
// ./throughput_test
// ```
-const HashFunction = @import("md5.zig").Md5;
-const BytesToHash = 1024 * Mb;
const std = @import("std");
+const time = std.os.time;
+const Timer = time.Timer;
+const HashFunction = @import("md5.zig").Md5;
-const c = @cImport({
- @cInclude("time.h");
-});
-
-const Mb = 1024 * 1024;
+const MiB = 1024 * 1024;
+const BytesToHash = 1024 * MiB;
pub fn main() !void {
var stdout_file = try std.io.getStdOut();
@@ -29,15 +24,15 @@ pub fn main() !void {
var h = HashFunction.init();
var offset: usize = 0;
- const start = c.clock();
+ var timer = try Timer.start();
+ const start = timer.lap();
while (offset < BytesToHash) : (offset += block.len) {
h.update(block[0..]);
}
- const end = c.clock();
+ const end = timer.read();
- const elapsed_s = f64((end - start) * c.CLOCKS_PER_SEC) / 1000000;
+ const elapsed_s = f64(end - start) / time.ns_per_s;
const throughput = u64(BytesToHash / elapsed_s);
- try stdout.print("{}: ", @typeName(HashFunction));
- try stdout.print("{} Mb/s\n", throughput);
+ try stdout.print("{}: {} MiB/s\n", @typeName(HashFunction), throughput / (1 * MiB));
}
diff --git a/std/debug/index.zig b/std/debug/index.zig
index a573dc5549..9057f157de 100644
--- a/std/debug/index.zig
+++ b/std/debug/index.zig
@@ -38,7 +38,7 @@ pub fn getSelfDebugInfo() !&ElfStackTrace {
if (self_debug_info) |info| {
return info;
} else {
- const info = try openSelfDebugInfo(global_allocator);
+ const info = try openSelfDebugInfo(getDebugInfoAllocator());
self_debug_info = info;
return info;
}
@@ -51,7 +51,7 @@ pub fn dumpCurrentStackTrace(start_addr: ?usize) void {
stderr.print("Unable to dump stack trace: Unable to open debug info: {}\n", @errorName(err)) catch return;
return;
};
- writeCurrentStackTrace(stderr, global_allocator, debug_info, stderr_file.isTty(), start_addr) catch |err| {
+ writeCurrentStackTrace(stderr, getDebugInfoAllocator(), debug_info, stderr_file.isTty(), start_addr) catch |err| {
stderr.print("Unable to dump stack trace: {}\n", @errorName(err)) catch return;
return;
};
@@ -64,7 +64,7 @@ pub fn dumpStackTrace(stack_trace: &const builtin.StackTrace) void {
stderr.print("Unable to dump stack trace: Unable to open debug info: {}\n", @errorName(err)) catch return;
return;
};
- writeStackTrace(stack_trace, stderr, global_allocator, debug_info, stderr_file.isTty()) catch |err| {
+ writeStackTrace(stack_trace, stderr, getDebugInfoAllocator(), debug_info, stderr_file.isTty()) catch |err| {
stderr.print("Unable to dump stack trace: {}\n", @errorName(err)) catch return;
return;
};
@@ -592,8 +592,8 @@ fn getString(st: &ElfStackTrace, offset: u64) ![]u8 {
}
fn readAllocBytes(allocator: &mem.Allocator, in_stream: var, size: usize) ![]u8 {
- const buf = try global_allocator.alloc(u8, size);
- errdefer global_allocator.free(buf);
+ const buf = try allocator.alloc(u8, size);
+ errdefer allocator.free(buf);
if ((try in_stream.read(buf)) < size) return error.EndOfFile;
return buf;
}
@@ -1126,6 +1126,21 @@ fn readILeb128(in_stream: var) !i64 {
}
}
+/// This should only be used in temporary test programs.
pub const global_allocator = &global_fixed_allocator.allocator;
var global_fixed_allocator = std.heap.FixedBufferAllocator.init(global_allocator_mem[0..]);
var global_allocator_mem: [100 * 1024]u8 = undefined;
+
+
+// TODO make thread safe
+var debug_info_allocator: ?&mem.Allocator = null;
+var debug_info_direct_allocator: std.heap.DirectAllocator = undefined;
+var debug_info_arena_allocator: std.heap.ArenaAllocator = undefined;
+fn getDebugInfoAllocator() &mem.Allocator {
+ if (debug_info_allocator) |a| return a;
+
+ debug_info_direct_allocator = std.heap.DirectAllocator.init();
+ debug_info_arena_allocator = std.heap.ArenaAllocator.init(&debug_info_direct_allocator.allocator);
+ debug_info_allocator = &debug_info_arena_allocator.allocator;
+ return &debug_info_arena_allocator.allocator;
+}
diff --git a/std/elf.zig b/std/elf.zig
index 7e20fa000f..1764829bc8 100644
--- a/std/elf.zig
+++ b/std/elf.zig
@@ -7,6 +7,246 @@ const mem = std.mem;
const debug = std.debug;
const InStream = std.stream.InStream;
+pub const AT_NULL = 0;
+pub const AT_IGNORE = 1;
+pub const AT_EXECFD = 2;
+pub const AT_PHDR = 3;
+pub const AT_PHENT = 4;
+pub const AT_PHNUM = 5;
+pub const AT_PAGESZ = 6;
+pub const AT_BASE = 7;
+pub const AT_FLAGS = 8;
+pub const AT_ENTRY = 9;
+pub const AT_NOTELF = 10;
+pub const AT_UID = 11;
+pub const AT_EUID = 12;
+pub const AT_GID = 13;
+pub const AT_EGID = 14;
+pub const AT_CLKTCK = 17;
+pub const AT_PLATFORM = 15;
+pub const AT_HWCAP = 16;
+pub const AT_FPUCW = 18;
+pub const AT_DCACHEBSIZE = 19;
+pub const AT_ICACHEBSIZE = 20;
+pub const AT_UCACHEBSIZE = 21;
+pub const AT_IGNOREPPC = 22;
+pub const AT_SECURE = 23;
+pub const AT_BASE_PLATFORM = 24;
+pub const AT_RANDOM = 25;
+pub const AT_HWCAP2 = 26;
+pub const AT_EXECFN = 31;
+pub const AT_SYSINFO = 32;
+pub const AT_SYSINFO_EHDR = 33;
+pub const AT_L1I_CACHESHAPE = 34;
+pub const AT_L1D_CACHESHAPE = 35;
+pub const AT_L2_CACHESHAPE = 36;
+pub const AT_L3_CACHESHAPE = 37;
+pub const AT_L1I_CACHESIZE = 40;
+pub const AT_L1I_CACHEGEOMETRY = 41;
+pub const AT_L1D_CACHESIZE = 42;
+pub const AT_L1D_CACHEGEOMETRY = 43;
+pub const AT_L2_CACHESIZE = 44;
+pub const AT_L2_CACHEGEOMETRY = 45;
+pub const AT_L3_CACHESIZE = 46;
+pub const AT_L3_CACHEGEOMETRY = 47;
+
+pub const DT_NULL = 0;
+pub const DT_NEEDED = 1;
+pub const DT_PLTRELSZ = 2;
+pub const DT_PLTGOT = 3;
+pub const DT_HASH = 4;
+pub const DT_STRTAB = 5;
+pub const DT_SYMTAB = 6;
+pub const DT_RELA = 7;
+pub const DT_RELASZ = 8;
+pub const DT_RELAENT = 9;
+pub const DT_STRSZ = 10;
+pub const DT_SYMENT = 11;
+pub const DT_INIT = 12;
+pub const DT_FINI = 13;
+pub const DT_SONAME = 14;
+pub const DT_RPATH = 15;
+pub const DT_SYMBOLIC = 16;
+pub const DT_REL = 17;
+pub const DT_RELSZ = 18;
+pub const DT_RELENT = 19;
+pub const DT_PLTREL = 20;
+pub const DT_DEBUG = 21;
+pub const DT_TEXTREL = 22;
+pub const DT_JMPREL = 23;
+pub const DT_BIND_NOW = 24;
+pub const DT_INIT_ARRAY = 25;
+pub const DT_FINI_ARRAY = 26;
+pub const DT_INIT_ARRAYSZ = 27;
+pub const DT_FINI_ARRAYSZ = 28;
+pub const DT_RUNPATH = 29;
+pub const DT_FLAGS = 30;
+pub const DT_ENCODING = 32;
+pub const DT_PREINIT_ARRAY = 32;
+pub const DT_PREINIT_ARRAYSZ = 33;
+pub const DT_SYMTAB_SHNDX = 34;
+pub const DT_NUM = 35;
+pub const DT_LOOS = 0x6000000d;
+pub const DT_HIOS = 0x6ffff000;
+pub const DT_LOPROC = 0x70000000;
+pub const DT_HIPROC = 0x7fffffff;
+pub const DT_PROCNUM = DT_MIPS_NUM;
+
+pub const DT_VALRNGLO = 0x6ffffd00;
+pub const DT_GNU_PRELINKED = 0x6ffffdf5;
+pub const DT_GNU_CONFLICTSZ = 0x6ffffdf6;
+pub const DT_GNU_LIBLISTSZ = 0x6ffffdf7;
+pub const DT_CHECKSUM = 0x6ffffdf8;
+pub const DT_PLTPADSZ = 0x6ffffdf9;
+pub const DT_MOVEENT = 0x6ffffdfa;
+pub const DT_MOVESZ = 0x6ffffdfb;
+pub const DT_FEATURE_1 = 0x6ffffdfc;
+pub const DT_POSFLAG_1 = 0x6ffffdfd;
+
+pub const DT_SYMINSZ = 0x6ffffdfe;
+pub const DT_SYMINENT = 0x6ffffdff;
+pub const DT_VALRNGHI = 0x6ffffdff;
+pub const DT_VALNUM = 12;
+
+pub const DT_ADDRRNGLO = 0x6ffffe00;
+pub const DT_GNU_HASH = 0x6ffffef5;
+pub const DT_TLSDESC_PLT = 0x6ffffef6;
+pub const DT_TLSDESC_GOT = 0x6ffffef7;
+pub const DT_GNU_CONFLICT = 0x6ffffef8;
+pub const DT_GNU_LIBLIST = 0x6ffffef9;
+pub const DT_CONFIG = 0x6ffffefa;
+pub const DT_DEPAUDIT = 0x6ffffefb;
+pub const DT_AUDIT = 0x6ffffefc;
+pub const DT_PLTPAD = 0x6ffffefd;
+pub const DT_MOVETAB = 0x6ffffefe;
+pub const DT_SYMINFO = 0x6ffffeff;
+pub const DT_ADDRRNGHI = 0x6ffffeff;
+pub const DT_ADDRNUM = 11;
+
+
+pub const DT_VERSYM = 0x6ffffff0;
+
+pub const DT_RELACOUNT = 0x6ffffff9;
+pub const DT_RELCOUNT = 0x6ffffffa;
+
+
+pub const DT_FLAGS_1 = 0x6ffffffb;
+pub const DT_VERDEF = 0x6ffffffc;
+
+pub const DT_VERDEFNUM = 0x6ffffffd;
+pub const DT_VERNEED = 0x6ffffffe;
+
+pub const DT_VERNEEDNUM = 0x6fffffff;
+pub const DT_VERSIONTAGNUM = 16;
+
+
+
+pub const DT_AUXILIARY = 0x7ffffffd;
+pub const DT_FILTER = 0x7fffffff;
+pub const DT_EXTRANUM = 3;
+
+
+pub const DT_SPARC_REGISTER = 0x70000001;
+pub const DT_SPARC_NUM = 2;
+
+pub const DT_MIPS_RLD_VERSION = 0x70000001;
+pub const DT_MIPS_TIME_STAMP = 0x70000002;
+pub const DT_MIPS_ICHECKSUM = 0x70000003;
+pub const DT_MIPS_IVERSION = 0x70000004;
+pub const DT_MIPS_FLAGS = 0x70000005;
+pub const DT_MIPS_BASE_ADDRESS = 0x70000006;
+pub const DT_MIPS_MSYM = 0x70000007;
+pub const DT_MIPS_CONFLICT = 0x70000008;
+pub const DT_MIPS_LIBLIST = 0x70000009;
+pub const DT_MIPS_LOCAL_GOTNO = 0x7000000a;
+pub const DT_MIPS_CONFLICTNO = 0x7000000b;
+pub const DT_MIPS_LIBLISTNO = 0x70000010;
+pub const DT_MIPS_SYMTABNO = 0x70000011;
+pub const DT_MIPS_UNREFEXTNO = 0x70000012;
+pub const DT_MIPS_GOTSYM = 0x70000013;
+pub const DT_MIPS_HIPAGENO = 0x70000014;
+pub const DT_MIPS_RLD_MAP = 0x70000016;
+pub const DT_MIPS_DELTA_CLASS = 0x70000017;
+pub const DT_MIPS_DELTA_CLASS_NO = 0x70000018;
+
+pub const DT_MIPS_DELTA_INSTANCE = 0x70000019;
+pub const DT_MIPS_DELTA_INSTANCE_NO = 0x7000001a;
+
+pub const DT_MIPS_DELTA_RELOC = 0x7000001b;
+pub const DT_MIPS_DELTA_RELOC_NO = 0x7000001c;
+
+pub const DT_MIPS_DELTA_SYM = 0x7000001d;
+
+pub const DT_MIPS_DELTA_SYM_NO = 0x7000001e;
+
+pub const DT_MIPS_DELTA_CLASSSYM = 0x70000020;
+
+pub const DT_MIPS_DELTA_CLASSSYM_NO = 0x70000021;
+
+pub const DT_MIPS_CXX_FLAGS = 0x70000022;
+pub const DT_MIPS_PIXIE_INIT = 0x70000023;
+pub const DT_MIPS_SYMBOL_LIB = 0x70000024;
+pub const DT_MIPS_LOCALPAGE_GOTIDX = 0x70000025;
+pub const DT_MIPS_LOCAL_GOTIDX = 0x70000026;
+pub const DT_MIPS_HIDDEN_GOTIDX = 0x70000027;
+pub const DT_MIPS_PROTECTED_GOTIDX = 0x70000028;
+pub const DT_MIPS_OPTIONS = 0x70000029;
+pub const DT_MIPS_INTERFACE = 0x7000002a;
+pub const DT_MIPS_DYNSTR_ALIGN = 0x7000002b;
+pub const DT_MIPS_INTERFACE_SIZE = 0x7000002c;
+pub const DT_MIPS_RLD_TEXT_RESOLVE_ADDR = 0x7000002d;
+
+pub const DT_MIPS_PERF_SUFFIX = 0x7000002e;
+
+pub const DT_MIPS_COMPACT_SIZE = 0x7000002f;
+pub const DT_MIPS_GP_VALUE = 0x70000030;
+pub const DT_MIPS_AUX_DYNAMIC = 0x70000031;
+
+pub const DT_MIPS_PLTGOT = 0x70000032;
+
+pub const DT_MIPS_RWPLT = 0x70000034;
+pub const DT_MIPS_RLD_MAP_REL = 0x70000035;
+pub const DT_MIPS_NUM = 0x36;
+
+pub const DT_ALPHA_PLTRO = (DT_LOPROC + 0);
+pub const DT_ALPHA_NUM = 1;
+
+pub const DT_PPC_GOT = (DT_LOPROC + 0);
+pub const DT_PPC_OPT = (DT_LOPROC + 1);
+pub const DT_PPC_NUM = 2;
+
+pub const DT_PPC64_GLINK = (DT_LOPROC + 0);
+pub const DT_PPC64_OPD = (DT_LOPROC + 1);
+pub const DT_PPC64_OPDSZ = (DT_LOPROC + 2);
+pub const DT_PPC64_OPT = (DT_LOPROC + 3);
+pub const DT_PPC64_NUM = 4;
+
+pub const DT_IA_64_PLT_RESERVE = (DT_LOPROC + 0);
+pub const DT_IA_64_NUM = 1;
+
+pub const DT_NIOS2_GP = 0x70000002;
+
+pub const PT_NULL = 0;
+pub const PT_LOAD = 1;
+pub const PT_DYNAMIC = 2;
+pub const PT_INTERP = 3;
+pub const PT_NOTE = 4;
+pub const PT_SHLIB = 5;
+pub const PT_PHDR = 6;
+pub const PT_TLS = 7;
+pub const PT_NUM = 8;
+pub const PT_LOOS = 0x60000000;
+pub const PT_GNU_EH_FRAME = 0x6474e550;
+pub const PT_GNU_STACK = 0x6474e551;
+pub const PT_GNU_RELRO = 0x6474e552;
+pub const PT_LOSUNW = 0x6ffffffa;
+pub const PT_SUNWBSS = 0x6ffffffa;
+pub const PT_SUNWSTACK = 0x6ffffffb;
+pub const PT_HISUNW = 0x6fffffff;
+pub const PT_HIOS = 0x6fffffff;
+pub const PT_LOPROC = 0x70000000;
+pub const PT_HIPROC = 0x7fffffff;
+
pub const SHT_NULL = 0;
pub const SHT_PROGBITS = 1;
pub const SHT_SYMTAB = 2;
@@ -31,6 +271,45 @@ pub const SHT_HIPROC = 0x7fffffff;
pub const SHT_LOUSER = 0x80000000;
pub const SHT_HIUSER = 0xffffffff;
+pub const STB_LOCAL = 0;
+pub const STB_GLOBAL = 1;
+pub const STB_WEAK = 2;
+pub const STB_NUM = 3;
+pub const STB_LOOS = 10;
+pub const STB_GNU_UNIQUE = 10;
+pub const STB_HIOS = 12;
+pub const STB_LOPROC = 13;
+pub const STB_HIPROC = 15;
+
+pub const STB_MIPS_SPLIT_COMMON = 13;
+
+pub const STT_NOTYPE = 0;
+pub const STT_OBJECT = 1;
+pub const STT_FUNC = 2;
+pub const STT_SECTION = 3;
+pub const STT_FILE = 4;
+pub const STT_COMMON = 5;
+pub const STT_TLS = 6;
+pub const STT_NUM = 7;
+pub const STT_LOOS = 10;
+pub const STT_GNU_IFUNC = 10;
+pub const STT_HIOS = 12;
+pub const STT_LOPROC = 13;
+pub const STT_HIPROC = 15;
+
+pub const STT_SPARC_REGISTER = 13;
+
+pub const STT_PARISC_MILLICODE = 13;
+
+pub const STT_HP_OPAQUE = (STT_LOOS + 0x1);
+pub const STT_HP_STUB = (STT_LOOS + 0x2);
+
+pub const STT_ARM_TFUNC = STT_LOPROC;
+pub const STT_ARM_16BIT = STT_HIPROC;
+
+pub const VER_FLG_BASE = 0x1;
+pub const VER_FLG_WEAK = 0x2;
+
pub const FileType = enum {
Relocatable,
Executable,
@@ -266,3 +545,335 @@ pub const Elf = struct {
try elf.in_file.seekTo(elf_section.offset);
}
};
+
+pub const EI_NIDENT = 16;
+pub const Elf32_Half = u16;
+pub const Elf64_Half = u16;
+pub const Elf32_Word = u32;
+pub const Elf32_Sword = i32;
+pub const Elf64_Word = u32;
+pub const Elf64_Sword = i32;
+pub const Elf32_Xword = u64;
+pub const Elf32_Sxword = i64;
+pub const Elf64_Xword = u64;
+pub const Elf64_Sxword = i64;
+pub const Elf32_Addr = u32;
+pub const Elf64_Addr = u64;
+pub const Elf32_Off = u32;
+pub const Elf64_Off = u64;
+pub const Elf32_Section = u16;
+pub const Elf64_Section = u16;
+pub const Elf32_Versym = Elf32_Half;
+pub const Elf64_Versym = Elf64_Half;
+pub const Elf32_Ehdr = extern struct {
+ e_ident: [EI_NIDENT]u8,
+ e_type: Elf32_Half,
+ e_machine: Elf32_Half,
+ e_version: Elf32_Word,
+ e_entry: Elf32_Addr,
+ e_phoff: Elf32_Off,
+ e_shoff: Elf32_Off,
+ e_flags: Elf32_Word,
+ e_ehsize: Elf32_Half,
+ e_phentsize: Elf32_Half,
+ e_phnum: Elf32_Half,
+ e_shentsize: Elf32_Half,
+ e_shnum: Elf32_Half,
+ e_shstrndx: Elf32_Half,
+};
+pub const Elf64_Ehdr = extern struct {
+ e_ident: [EI_NIDENT]u8,
+ e_type: Elf64_Half,
+ e_machine: Elf64_Half,
+ e_version: Elf64_Word,
+ e_entry: Elf64_Addr,
+ e_phoff: Elf64_Off,
+ e_shoff: Elf64_Off,
+ e_flags: Elf64_Word,
+ e_ehsize: Elf64_Half,
+ e_phentsize: Elf64_Half,
+ e_phnum: Elf64_Half,
+ e_shentsize: Elf64_Half,
+ e_shnum: Elf64_Half,
+ e_shstrndx: Elf64_Half,
+};
+pub const Elf32_Shdr = extern struct {
+ sh_name: Elf32_Word,
+ sh_type: Elf32_Word,
+ sh_flags: Elf32_Word,
+ sh_addr: Elf32_Addr,
+ sh_offset: Elf32_Off,
+ sh_size: Elf32_Word,
+ sh_link: Elf32_Word,
+ sh_info: Elf32_Word,
+ sh_addralign: Elf32_Word,
+ sh_entsize: Elf32_Word,
+};
+pub const Elf64_Shdr = extern struct {
+ sh_name: Elf64_Word,
+ sh_type: Elf64_Word,
+ sh_flags: Elf64_Xword,
+ sh_addr: Elf64_Addr,
+ sh_offset: Elf64_Off,
+ sh_size: Elf64_Xword,
+ sh_link: Elf64_Word,
+ sh_info: Elf64_Word,
+ sh_addralign: Elf64_Xword,
+ sh_entsize: Elf64_Xword,
+};
+pub const Elf32_Chdr = extern struct {
+ ch_type: Elf32_Word,
+ ch_size: Elf32_Word,
+ ch_addralign: Elf32_Word,
+};
+pub const Elf64_Chdr = extern struct {
+ ch_type: Elf64_Word,
+ ch_reserved: Elf64_Word,
+ ch_size: Elf64_Xword,
+ ch_addralign: Elf64_Xword,
+};
+pub const Elf32_Sym = extern struct {
+ st_name: Elf32_Word,
+ st_value: Elf32_Addr,
+ st_size: Elf32_Word,
+ st_info: u8,
+ st_other: u8,
+ st_shndx: Elf32_Section,
+};
+pub const Elf64_Sym = extern struct {
+ st_name: Elf64_Word,
+ st_info: u8,
+ st_other: u8,
+ st_shndx: Elf64_Section,
+ st_value: Elf64_Addr,
+ st_size: Elf64_Xword,
+};
+pub const Elf32_Syminfo = extern struct {
+ si_boundto: Elf32_Half,
+ si_flags: Elf32_Half,
+};
+pub const Elf64_Syminfo = extern struct {
+ si_boundto: Elf64_Half,
+ si_flags: Elf64_Half,
+};
+pub const Elf32_Rel = extern struct {
+ r_offset: Elf32_Addr,
+ r_info: Elf32_Word,
+};
+pub const Elf64_Rel = extern struct {
+ r_offset: Elf64_Addr,
+ r_info: Elf64_Xword,
+};
+pub const Elf32_Rela = extern struct {
+ r_offset: Elf32_Addr,
+ r_info: Elf32_Word,
+ r_addend: Elf32_Sword,
+};
+pub const Elf64_Rela = extern struct {
+ r_offset: Elf64_Addr,
+ r_info: Elf64_Xword,
+ r_addend: Elf64_Sxword,
+};
+pub const Elf32_Phdr = extern struct {
+ p_type: Elf32_Word,
+ p_offset: Elf32_Off,
+ p_vaddr: Elf32_Addr,
+ p_paddr: Elf32_Addr,
+ p_filesz: Elf32_Word,
+ p_memsz: Elf32_Word,
+ p_flags: Elf32_Word,
+ p_align: Elf32_Word,
+};
+pub const Elf64_Phdr = extern struct {
+ p_type: Elf64_Word,
+ p_flags: Elf64_Word,
+ p_offset: Elf64_Off,
+ p_vaddr: Elf64_Addr,
+ p_paddr: Elf64_Addr,
+ p_filesz: Elf64_Xword,
+ p_memsz: Elf64_Xword,
+ p_align: Elf64_Xword,
+};
+pub const Elf32_Dyn = extern struct {
+ d_tag: Elf32_Sword,
+ d_un: extern union {
+ d_val: Elf32_Word,
+ d_ptr: Elf32_Addr,
+ },
+};
+pub const Elf64_Dyn = extern struct {
+ d_tag: Elf64_Sxword,
+ d_un: extern union {
+ d_val: Elf64_Xword,
+ d_ptr: Elf64_Addr,
+ },
+};
+pub const Elf32_Verdef = extern struct {
+ vd_version: Elf32_Half,
+ vd_flags: Elf32_Half,
+ vd_ndx: Elf32_Half,
+ vd_cnt: Elf32_Half,
+ vd_hash: Elf32_Word,
+ vd_aux: Elf32_Word,
+ vd_next: Elf32_Word,
+};
+pub const Elf64_Verdef = extern struct {
+ vd_version: Elf64_Half,
+ vd_flags: Elf64_Half,
+ vd_ndx: Elf64_Half,
+ vd_cnt: Elf64_Half,
+ vd_hash: Elf64_Word,
+ vd_aux: Elf64_Word,
+ vd_next: Elf64_Word,
+};
+pub const Elf32_Verdaux = extern struct {
+ vda_name: Elf32_Word,
+ vda_next: Elf32_Word,
+};
+pub const Elf64_Verdaux = extern struct {
+ vda_name: Elf64_Word,
+ vda_next: Elf64_Word,
+};
+pub const Elf32_Verneed = extern struct {
+ vn_version: Elf32_Half,
+ vn_cnt: Elf32_Half,
+ vn_file: Elf32_Word,
+ vn_aux: Elf32_Word,
+ vn_next: Elf32_Word,
+};
+pub const Elf64_Verneed = extern struct {
+ vn_version: Elf64_Half,
+ vn_cnt: Elf64_Half,
+ vn_file: Elf64_Word,
+ vn_aux: Elf64_Word,
+ vn_next: Elf64_Word,
+};
+pub const Elf32_Vernaux = extern struct {
+ vna_hash: Elf32_Word,
+ vna_flags: Elf32_Half,
+ vna_other: Elf32_Half,
+ vna_name: Elf32_Word,
+ vna_next: Elf32_Word,
+};
+pub const Elf64_Vernaux = extern struct {
+ vna_hash: Elf64_Word,
+ vna_flags: Elf64_Half,
+ vna_other: Elf64_Half,
+ vna_name: Elf64_Word,
+ vna_next: Elf64_Word,
+};
+pub const Elf32_auxv_t = extern struct {
+ a_type: u32,
+ a_un: extern union {
+ a_val: u32,
+ },
+};
+pub const Elf64_auxv_t = extern struct {
+ a_type: u64,
+ a_un: extern union {
+ a_val: u64,
+ },
+};
+pub const Elf32_Nhdr = extern struct {
+ n_namesz: Elf32_Word,
+ n_descsz: Elf32_Word,
+ n_type: Elf32_Word,
+};
+pub const Elf64_Nhdr = extern struct {
+ n_namesz: Elf64_Word,
+ n_descsz: Elf64_Word,
+ n_type: Elf64_Word,
+};
+pub const Elf32_Move = extern struct {
+ m_value: Elf32_Xword,
+ m_info: Elf32_Word,
+ m_poffset: Elf32_Word,
+ m_repeat: Elf32_Half,
+ m_stride: Elf32_Half,
+};
+pub const Elf64_Move = extern struct {
+ m_value: Elf64_Xword,
+ m_info: Elf64_Xword,
+ m_poffset: Elf64_Xword,
+ m_repeat: Elf64_Half,
+ m_stride: Elf64_Half,
+};
+pub const Elf32_gptab = extern union {
+ gt_header: extern struct {
+ gt_current_g_value: Elf32_Word,
+ gt_unused: Elf32_Word,
+ },
+ gt_entry: extern struct {
+ gt_g_value: Elf32_Word,
+ gt_bytes: Elf32_Word,
+ },
+};
+pub const Elf32_RegInfo = extern struct {
+ ri_gprmask: Elf32_Word,
+ ri_cprmask: [4]Elf32_Word,
+ ri_gp_value: Elf32_Sword,
+};
+pub const Elf_Options = extern struct {
+ kind: u8,
+ size: u8,
+ @"section": Elf32_Section,
+ info: Elf32_Word,
+};
+pub const Elf_Options_Hw = extern struct {
+ hwp_flags1: Elf32_Word,
+ hwp_flags2: Elf32_Word,
+};
+pub const Elf32_Lib = extern struct {
+ l_name: Elf32_Word,
+ l_time_stamp: Elf32_Word,
+ l_checksum: Elf32_Word,
+ l_version: Elf32_Word,
+ l_flags: Elf32_Word,
+};
+pub const Elf64_Lib = extern struct {
+ l_name: Elf64_Word,
+ l_time_stamp: Elf64_Word,
+ l_checksum: Elf64_Word,
+ l_version: Elf64_Word,
+ l_flags: Elf64_Word,
+};
+pub const Elf32_Conflict = Elf32_Addr;
+pub const Elf_MIPS_ABIFlags_v0 = extern struct {
+ version: Elf32_Half,
+ isa_level: u8,
+ isa_rev: u8,
+ gpr_size: u8,
+ cpr1_size: u8,
+ cpr2_size: u8,
+ fp_abi: u8,
+ isa_ext: Elf32_Word,
+ ases: Elf32_Word,
+ flags1: Elf32_Word,
+ flags2: Elf32_Word,
+};
+
+pub const Ehdr = switch(@sizeOf(usize)) {
+ 4 => Elf32_Ehdr,
+ 8 => Elf64_Ehdr,
+ else => @compileError("expected pointer size of 32 or 64"),
+};
+pub const Phdr = switch(@sizeOf(usize)) {
+ 4 => Elf32_Phdr,
+ 8 => Elf64_Phdr,
+ else => @compileError("expected pointer size of 32 or 64"),
+};
+pub const Sym = switch(@sizeOf(usize)) {
+ 4 => Elf32_Sym,
+ 8 => Elf64_Sym,
+ else => @compileError("expected pointer size of 32 or 64"),
+};
+pub const Verdef = switch(@sizeOf(usize)) {
+ 4 => Elf32_Verdef,
+ 8 => Elf64_Verdef,
+ else => @compileError("expected pointer size of 32 or 64"),
+};
+pub const Verdaux = switch(@sizeOf(usize)) {
+ 4 => Elf32_Verdaux,
+ 8 => Elf64_Verdaux,
+ else => @compileError("expected pointer size of 32 or 64"),
+};
diff --git a/std/endian.zig b/std/endian.zig
deleted file mode 100644
index 121505d24d..0000000000
--- a/std/endian.zig
+++ /dev/null
@@ -1,25 +0,0 @@
-const mem = @import("mem.zig");
-const builtin = @import("builtin");
-
-pub fn swapIfLe(comptime T: type, x: T) T {
- return swapIf(builtin.Endian.Little, T, x);
-}
-
-pub fn swapIfBe(comptime T: type, x: T) T {
- return swapIf(builtin.Endian.Big, T, x);
-}
-
-pub fn swapIf(endian: builtin.Endian, comptime T: type, x: T) T {
- return if (builtin.endian == endian) swap(T, x) else x;
-}
-
-pub fn swap(comptime T: type, x: T) T {
- var buf: [@sizeOf(T)]u8 = undefined;
- mem.writeInt(buf[0..], x, builtin.Endian.Little);
- return mem.readInt(buf, T, builtin.Endian.Big);
-}
-
-test "swap" {
- const debug = @import("debug/index.zig");
- debug.assert(swap(u32, 0xDEADBEEF) == 0xEFBEADDE);
-}
diff --git a/std/event.zig b/std/event.zig
new file mode 100644
index 0000000000..bdad7fcc18
--- /dev/null
+++ b/std/event.zig
@@ -0,0 +1,235 @@
+const std = @import("index.zig");
+const builtin = @import("builtin");
+const assert = std.debug.assert;
+const event = this;
+const mem = std.mem;
+const posix = std.os.posix;
+
+pub const TcpServer = struct {
+ handleRequestFn: async<&mem.Allocator> fn (&TcpServer, &const std.net.Address, &const std.os.File) void,
+
+ loop: &Loop,
+ sockfd: i32,
+ accept_coro: ?promise,
+ listen_address: std.net.Address,
+
+ waiting_for_emfile_node: PromiseNode,
+
+ const PromiseNode = std.LinkedList(promise).Node;
+
+ pub fn init(loop: &Loop) !TcpServer {
+ const sockfd = try std.os.posixSocket(posix.AF_INET,
+ posix.SOCK_STREAM|posix.SOCK_CLOEXEC|posix.SOCK_NONBLOCK,
+ posix.PROTO_tcp);
+ errdefer std.os.close(sockfd);
+
+ // TODO can't initialize handler coroutine here because we need well defined copy elision
+ return TcpServer {
+ .loop = loop,
+ .sockfd = sockfd,
+ .accept_coro = null,
+ .handleRequestFn = undefined,
+ .waiting_for_emfile_node = undefined,
+ .listen_address = undefined,
+ };
+ }
+
+ pub fn listen(self: &TcpServer, address: &const std.net.Address,
+ handleRequestFn: async<&mem.Allocator> fn (&TcpServer, &const std.net.Address, &const std.os.File)void) !void
+ {
+ self.handleRequestFn = handleRequestFn;
+
+ try std.os.posixBind(self.sockfd, &address.os_addr);
+ try std.os.posixListen(self.sockfd, posix.SOMAXCONN);
+ self.listen_address = std.net.Address.initPosix(try std.os.posixGetSockName(self.sockfd));
+
+ self.accept_coro = try async<self.loop.allocator> TcpServer.handler(self);
+ errdefer cancel ??self.accept_coro;
+
+ try self.loop.addFd(self.sockfd, ??self.accept_coro);
+ errdefer self.loop.removeFd(self.sockfd);
+
+ }
+
+ pub fn deinit(self: &TcpServer) void {
+ self.loop.removeFd(self.sockfd);
+ if (self.accept_coro) |accept_coro| cancel accept_coro;
+ std.os.close(self.sockfd);
+ }
+
+ pub async fn handler(self: &TcpServer) void {
+ while (true) {
+ var accepted_addr: std.net.Address = undefined;
+ if (std.os.posixAccept(self.sockfd, &accepted_addr.os_addr,
+ posix.SOCK_NONBLOCK | posix.SOCK_CLOEXEC)) |accepted_fd|
+ {
+ var socket = std.os.File.openHandle(accepted_fd);
+ _ = async<self.loop.allocator> self.handleRequestFn(self, accepted_addr, socket) catch |err| switch (err) {
+ error.OutOfMemory => {
+ socket.close();
+ continue;
+ },
+ };
+ } else |err| switch (err) {
+ error.WouldBlock => {
+ suspend; // we will get resumed by epoll_wait in the event loop
+ continue;
+ },
+ error.ProcessFdQuotaExceeded => {
+ errdefer std.os.emfile_promise_queue.remove(&self.waiting_for_emfile_node);
+ suspend |p| {
+ self.waiting_for_emfile_node = PromiseNode.init(p);
+ std.os.emfile_promise_queue.append(&self.waiting_for_emfile_node);
+ }
+ continue;
+ },
+ error.ConnectionAborted,
+ error.FileDescriptorClosed => continue,
+
+ error.PageFault => unreachable,
+ error.InvalidSyscall => unreachable,
+ error.FileDescriptorNotASocket => unreachable,
+ error.OperationNotSupported => unreachable,
+
+ error.SystemFdQuotaExceeded,
+ error.SystemResources,
+ error.ProtocolFailure,
+ error.BlockedByFirewall,
+ error.Unexpected => {
+ @panic("TODO handle this error");
+ },
+ }
+ }
+ }
+};
+
+pub const Loop = struct {
+ allocator: &mem.Allocator,
+ epollfd: i32,
+ keep_running: bool,
+
+ fn init(allocator: &mem.Allocator) !Loop {
+ const epollfd = try std.os.linuxEpollCreate(std.os.linux.EPOLL_CLOEXEC);
+ return Loop {
+ .keep_running = true,
+ .allocator = allocator,
+ .epollfd = epollfd,
+ };
+ }
+
+ pub fn addFd(self: &Loop, fd: i32, prom: promise) !void {
+ var ev = std.os.linux.epoll_event {
+ .events = std.os.linux.EPOLLIN|std.os.linux.EPOLLOUT|std.os.linux.EPOLLET,
+ .data = std.os.linux.epoll_data {
+ .ptr = @ptrToInt(prom),
+ },
+ };
+ try std.os.linuxEpollCtl(self.epollfd, std.os.linux.EPOLL_CTL_ADD, fd, &ev);
+ }
+
+ pub fn removeFd(self: &Loop, fd: i32) void {
+ std.os.linuxEpollCtl(self.epollfd, std.os.linux.EPOLL_CTL_DEL, fd, undefined) catch {};
+ }
+
+ async fn waitFd(self: &Loop, fd: i32) !void {
+ defer self.removeFd(fd);
+ suspend |p| {
+ try self.addFd(fd, p);
+ }
+ }
+
+ pub fn stop(self: &Loop) void {
+ // TODO make atomic
+ self.keep_running = false;
+ // TODO activate an fd in the epoll set
+ }
+
+ pub fn run(self: &Loop) void {
+ while (self.keep_running) {
+ var events: [16]std.os.linux.epoll_event = undefined;
+ const count = std.os.linuxEpollWait(self.epollfd, events[0..], -1);
+ for (events[0..count]) |ev| {
+ const p = @intToPtr(promise, ev.data.ptr);
+ resume p;
+ }
+ }
+ }
+};
+
+pub async fn connect(loop: &Loop, _address: &const std.net.Address) !std.os.File {
+ var address = *_address; // TODO https://github.com/zig-lang/zig/issues/733
+
+ const sockfd = try std.os.posixSocket(posix.AF_INET, posix.SOCK_STREAM|posix.SOCK_CLOEXEC|posix.SOCK_NONBLOCK, posix.PROTO_tcp);
+ errdefer std.os.close(sockfd);
+
+ try std.os.posixConnectAsync(sockfd, &address.os_addr);
+ try await try async loop.waitFd(sockfd);
+ try std.os.posixGetSockOptConnectError(sockfd);
+
+ return std.os.File.openHandle(sockfd);
+}
+
+test "listen on a port, send bytes, receive bytes" {
+ if (builtin.os != builtin.Os.linux) {
+ // TODO build abstractions for other operating systems
+ return;
+ }
+ const MyServer = struct {
+ tcp_server: TcpServer,
+
+ const Self = this;
+
+ async<&mem.Allocator> fn handler(tcp_server: &TcpServer, _addr: &const std.net.Address,
+ _socket: &const std.os.File) void
+ {
+ const self = @fieldParentPtr(Self, "tcp_server", tcp_server);
+ var socket = *_socket; // TODO https://github.com/zig-lang/zig/issues/733
+ defer socket.close();
+ const next_handler = async errorableHandler(self, _addr, socket) catch |err| switch (err) {
+ error.OutOfMemory => @panic("unable to handle connection: out of memory"),
+ };
+ (await next_handler) catch |err| {
+ std.debug.panic("unable to handle connection: {}\n", err);
+ };
+ suspend |p| { cancel p; }
+ }
+
+ async fn errorableHandler(self: &Self, _addr: &const std.net.Address,
+ _socket: &const std.os.File) !void
+ {
+ const addr = *_addr; // TODO https://github.com/zig-lang/zig/issues/733
+ var socket = *_socket; // TODO https://github.com/zig-lang/zig/issues/733
+
+ var adapter = std.io.FileOutStream.init(&socket);
+ var stream = &adapter.stream;
+ try stream.print("hello from server\n");
+ }
+ };
+
+ const ip4addr = std.net.parseIp4("127.0.0.1") catch unreachable;
+ const addr = std.net.Address.initIp4(ip4addr, 0);
+
+ var loop = try Loop.init(std.debug.global_allocator);
+ var server = MyServer {
+ .tcp_server = try TcpServer.init(&loop),
+ };
+ defer server.tcp_server.deinit();
+ try server.tcp_server.listen(addr, MyServer.handler);
+
+ const p = try async<std.debug.global_allocator> doAsyncTest(&loop, server.tcp_server.listen_address);
+ defer cancel p;
+ loop.run();
+}
+
+async fn doAsyncTest(loop: &Loop, address: &const std.net.Address) void {
+ errdefer @panic("test failure");
+
+ var socket_file = try await try async event.connect(loop, address);
+ defer socket_file.close();
+
+ var buf: [512]u8 = undefined;
+ const amt_read = try socket_file.read(buf[0..]);
+ const msg = buf[0..amt_read];
+ assert(mem.eql(u8, msg, "hello from server\n"));
+ loop.stop();
+}
diff --git a/std/fmt/errol/index.zig b/std/fmt/errol/index.zig
index 42287bd25b..00c69cd294 100644
--- a/std/fmt/errol/index.zig
+++ b/std/fmt/errol/index.zig
@@ -12,13 +12,79 @@ pub const FloatDecimal = struct {
exp: i32,
};
+pub const RoundMode = enum {
+ // Round only the fractional portion (e.g. 1234.23 has precision 2)
+ Decimal,
+ // Round the entire whole/fractional portion (e.g. 1.23423e3 has precision 5)
+ Scientific,
+};
+
+/// Round a FloatDecimal as returned by errol3 to the specified fractional precision.
+/// All digits after the specified precision should be considered invalid.
+pub fn roundToPrecision(float_decimal: &FloatDecimal, precision: usize, mode: RoundMode) void {
+ // The round digit refers to the index which we should look at to determine
+ // whether we need to round to match the specified precision.
+ var round_digit: usize = 0;
+
+ switch (mode) {
+ RoundMode.Decimal => {
+ if (float_decimal.exp >= 0) {
+ round_digit = precision + usize(float_decimal.exp);
+ } else {
+ // if a small negative exp, then adjust we need to offset by the number
+ // of leading zeros that will occur.
+ const min_exp_required = usize(-float_decimal.exp);
+ if (precision > min_exp_required) {
+ round_digit = precision - min_exp_required;
+ }
+ }
+ },
+ RoundMode.Scientific => {
+ round_digit = 1 + precision;
+ },
+ }
+
+ // It suffices to look at just this digit. We don't round and propagate say 0.04999 to 0.05
+ // first, and then to 0.1 in the case of a {.1} single precision.
+
+ // Find the digit which will signify the round point and start rounding backwards.
+ if (round_digit < float_decimal.digits.len and float_decimal.digits[round_digit] - '0' >= 5) {
+ assert(round_digit >= 0);
+
+ var i = round_digit;
+ while (true) {
+ if (i == 0) {
+ // Rounded all the way past the start. This was of the form 9.999...
+ // Slot the new digit in place and increase the exponent.
+ float_decimal.exp += 1;
+
+ // Re-size the buffer to use the reserved leading byte.
+ const one_before = @intToPtr(&u8, @ptrToInt(&float_decimal.digits[0]) - 1);
+ float_decimal.digits = one_before[0..float_decimal.digits.len + 1];
+ float_decimal.digits[0] = '1';
+ return;
+ }
+
+ i -= 1;
+
+ const new_value = (float_decimal.digits[i] - '0' + 1) % 10;
+ float_decimal.digits[i] = new_value + '0';
+
+ // must continue rounding until non-9
+ if (new_value != 0) {
+ return;
+ }
+ }
+ }
+}
+
/// Corrected Errol3 double to ASCII conversion.
pub fn errol3(value: f64, buffer: []u8) FloatDecimal {
const bits = @bitCast(u64, value);
const i = tableLowerBound(bits);
if (i < enum3.len and enum3[i] == bits) {
const data = enum3_data[i];
- const digits = buffer[0..data.str.len];
+ const digits = buffer[1..data.str.len + 1];
mem.copy(u8, digits, data.str);
return FloatDecimal {
.digits = digits,
@@ -98,7 +164,11 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
}
// digit generation
- var buf_index: usize = 0;
+
+ // We generate digits starting at index 1. If rounding a buffer later then it may be
+ // required to generate a preceeding digit in some cases (9.999) in which case we use
+ // the 0-index for this extra digit.
+ var buf_index: usize = 1;
while (true) {
var hdig = u8(math.floor(high.val));
if ((high.val == f64(hdig)) and (high.off < 0))
@@ -128,7 +198,7 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
buf_index += 1;
return FloatDecimal {
- .digits = buffer[0..buf_index],
+ .digits = buffer[1..buf_index],
.exp = exp,
};
}
@@ -189,6 +259,9 @@ fn gethi(in: f64) f64 {
/// Normalize the number by factoring in the error.
/// @hp: The float pair.
fn hpNormalize(hp: &HP) void {
+ // Required to avoid segfaults causing buffer overrun during errol3 digit output termination.
+ @setFloatMode(this, @import("builtin").FloatMode.Strict);
+
const val = hp.val;
hp.val += hp.off;
diff --git a/std/fmt/index.zig b/std/fmt/index.zig
index bd5b5710e0..43e758038f 100644
--- a/std/fmt/index.zig
+++ b/std/fmt/index.zig
@@ -4,29 +4,31 @@ const debug = std.debug;
const assert = debug.assert;
const mem = std.mem;
const builtin = @import("builtin");
-const errol3 = @import("errol/index.zig").errol3;
+const errol = @import("errol/index.zig");
const max_int_digits = 65;
-const State = enum { // TODO put inside format function and make sure the name and debug info is correct
- Start,
- OpenBrace,
- CloseBrace,
- Integer,
- IntegerWidth,
- Float,
- FloatWidth,
- Character,
- Buf,
- BufWidth,
-};
-
/// Renders fmt string with args, calling output with slices of bytes.
/// If `output` returns an error, the error is returned from `format` and
/// `output` is not called again.
pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void,
comptime fmt: []const u8, args: ...) Errors!void
{
+ const State = enum {
+ Start,
+ OpenBrace,
+ CloseBrace,
+ Integer,
+ IntegerWidth,
+ Float,
+ FloatWidth,
+ FloatScientific,
+ FloatScientificWidth,
+ Character,
+ Buf,
+ BufWidth,
+ };
+
comptime var start_index = 0;
comptime var state = State.Start;
comptime var next_arg = 0;
@@ -86,7 +88,11 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
},
's' => {
state = State.Buf;
- },'.' => {
+ },
+ 'e' => {
+ state = State.FloatScientific;
+ },
+ '.' => {
state = State.Float;
},
else => @compileError("Unknown format character: " ++ []u8{c}),
@@ -132,9 +138,33 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
'0' ... '9' => {},
else => @compileError("Unexpected character in format string: " ++ []u8{c}),
},
+ State.FloatScientific => switch (c) {
+ '}' => {
+ try formatFloatScientific(args[next_arg], null, context, Errors, output);
+ next_arg += 1;
+ state = State.Start;
+ start_index = i + 1;
+ },
+ '0' ... '9' => {
+ width_start = i;
+ state = State.FloatScientificWidth;
+ },
+ else => @compileError("Unexpected character in format string: " ++ []u8{c}),
+ },
+ State.FloatScientificWidth => switch (c) {
+ '}' => {
+ width = comptime (parseUnsigned(usize, fmt[width_start..i], 10) catch unreachable);
+ try formatFloatScientific(args[next_arg], width, context, Errors, output);
+ next_arg += 1;
+ state = State.Start;
+ start_index = i + 1;
+ },
+ '0' ... '9' => {},
+ else => @compileError("Unexpected character in format string: " ++ []u8{c}),
+ },
State.Float => switch (c) {
'}' => {
- try formatFloatDecimal(args[next_arg], 0, context, Errors, output);
+ try formatFloatDecimal(args[next_arg], null, context, Errors, output);
next_arg += 1;
state = State.Start;
start_index = i + 1;
@@ -198,7 +228,7 @@ pub fn formatValue(value: var, context: var, comptime Errors: type, output: fn(@
return formatInt(value, 10, false, 0, context, Errors, output);
},
builtin.TypeId.Float => {
- return formatFloat(value, context, Errors, output);
+ return formatFloatScientific(value, null, context, Errors, output);
},
builtin.TypeId.Void => {
return output(context, "void");
@@ -256,81 +286,237 @@ pub fn formatBuf(buf: []const u8, width: usize,
}
}
-pub fn formatFloat(value: var, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8)Errors!void) Errors!void {
+// Print a float in scientific notation to the specified precision. Null uses full precision.
+// It should be the case that every full precision, printed value can be re-parsed back to the
+// same type unambiguously.
+pub fn formatFloatScientific(value: var, maybe_precision: ?usize, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8)Errors!void) Errors!void {
var x = f64(value);
// Errol doesn't handle these special cases.
- if (math.isNan(x)) {
- return output(context, "NaN");
- }
if (math.signbit(x)) {
try output(context, "-");
x = -x;
}
+
+ if (math.isNan(x)) {
+ return output(context, "nan");
+ }
if (math.isPositiveInf(x)) {
- return output(context, "Infinity");
+ return output(context, "inf");
}
if (x == 0.0) {
- return output(context, "0.0");
+ try output(context, "0");
+
+ if (maybe_precision) |precision| {
+ if (precision != 0) {
+ try output(context, ".");
+ var i: usize = 0;
+ while (i < precision) : (i += 1) {
+ try output(context, "0");
+ }
+ }
+ } else {
+ try output(context, ".0");
+ }
+
+ try output(context, "e+00");
+ return;
}
var buffer: [32]u8 = undefined;
- const float_decimal = errol3(x, buffer[0..]);
- try output(context, float_decimal.digits[0..1]);
- try output(context, ".");
- if (float_decimal.digits.len > 1) {
- const num_digits = if (@typeOf(value) == f32)
- math.min(usize(9), float_decimal.digits.len)
- else
- float_decimal.digits.len;
- try output(context, float_decimal.digits[1 .. num_digits]);
+ var float_decimal = errol.errol3(x, buffer[0..]);
+
+ if (maybe_precision) |precision| {
+ errol.roundToPrecision(&float_decimal, precision, errol.RoundMode.Scientific);
+
+ try output(context, float_decimal.digits[0..1]);
+
+ // {e0} case prints no `.`
+ if (precision != 0) {
+ try output(context, ".");
+
+ var printed: usize = 0;
+ if (float_decimal.digits.len > 1) {
+ const num_digits = math.min(float_decimal.digits.len, precision + 1);
+ try output(context, float_decimal.digits[1 .. num_digits]);
+ printed += num_digits - 1;
+ }
+
+ while (printed < precision) : (printed += 1) {
+ try output(context, "0");
+ }
+ }
} else {
- try output(context, "0");
+ try output(context, float_decimal.digits[0..1]);
+ try output(context, ".");
+ if (float_decimal.digits.len > 1) {
+ const num_digits = if (@typeOf(value) == f32)
+ math.min(usize(9), float_decimal.digits.len)
+ else
+ float_decimal.digits.len;
+
+ try output(context, float_decimal.digits[1 .. num_digits]);
+ } else {
+ try output(context, "0");
+ }
}
- if (float_decimal.exp != 1) {
- try output(context, "e");
- try formatInt(float_decimal.exp - 1, 10, false, 0, context, Errors, output);
+ try output(context, "e");
+ const exp = float_decimal.exp - 1;
+
+ if (exp >= 0) {
+ try output(context, "+");
+ if (exp > -10 and exp < 10) {
+ try output(context, "0");
+ }
+ try formatInt(exp, 10, false, 0, context, Errors, output);
+ } else {
+ try output(context, "-");
+ if (exp > -10 and exp < 10) {
+ try output(context, "0");
+ }
+ try formatInt(-exp, 10, false, 0, context, Errors, output);
}
}
-pub fn formatFloatDecimal(value: var, precision: usize, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8)Errors!void) Errors!void {
+// Print a float of the format x.yyyyy where the number of y is specified by the precision argument.
+// By default floats are printed at full precision (no rounding).
+pub fn formatFloatDecimal(value: var, maybe_precision: ?usize, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8)Errors!void) Errors!void {
var x = f64(value);
// Errol doesn't handle these special cases.
- if (math.isNan(x)) {
- return output(context, "NaN");
- }
if (math.signbit(x)) {
try output(context, "-");
x = -x;
}
+
+ if (math.isNan(x)) {
+ return output(context, "nan");
+ }
if (math.isPositiveInf(x)) {
- return output(context, "Infinity");
+ return output(context, "inf");
}
if (x == 0.0) {
- return output(context, "0.0");
+ try output(context, "0");
+
+ if (maybe_precision) |precision| {
+ if (precision != 0) {
+ try output(context, ".");
+ var i: usize = 0;
+ while (i < precision) : (i += 1) {
+ try output(context, "0");
+ }
+ } else {
+ try output(context, ".0");
+ }
+ } else {
+ try output(context, "0");
+ }
+
+ return;
}
+ // non-special case, use errol3
var buffer: [32]u8 = undefined;
- const float_decimal = errol3(x, buffer[0..]);
-
- const num_left_digits = if (float_decimal.exp > 0) usize(float_decimal.exp) else 1;
-
- try output(context, float_decimal.digits[0 .. num_left_digits]);
- try output(context, ".");
- if (float_decimal.digits.len > 1) {
- const num_valid_digtis = if (@typeOf(value) == f32) math.min(usize(7), float_decimal.digits.len)
- else
- float_decimal.digits.len;
-
- const num_right_digits = if (precision != 0)
- math.min(precision, (num_valid_digtis-num_left_digits))
- else
- num_valid_digtis - num_left_digits;
- try output(context, float_decimal.digits[num_left_digits .. (num_left_digits + num_right_digits)]);
+ var float_decimal = errol.errol3(x, buffer[0..]);
+
+ if (maybe_precision) |precision| {
+ errol.roundToPrecision(&float_decimal, precision, errol.RoundMode.Decimal);
+
+ // exp < 0 means the leading is always 0 as errol result is normalized.
+ var num_digits_whole = if (float_decimal.exp > 0) usize(float_decimal.exp) else 0;
+
+ // the actual slice into the buffer, we may need to zero-pad between num_digits_whole and this.
+ var num_digits_whole_no_pad = math.min(num_digits_whole, float_decimal.digits.len);
+
+ if (num_digits_whole > 0) {
+ // We may have to zero pad, for instance 1e4 requires zero padding.
+ try output(context, float_decimal.digits[0 .. num_digits_whole_no_pad]);
+
+ var i = num_digits_whole_no_pad;
+ while (i < num_digits_whole) : (i += 1) {
+ try output(context, "0");
+ }
+ } else {
+ try output(context , "0");
+ }
+
+ // {.0} special case doesn't want a trailing '.'
+ if (precision == 0) {
+ return;
+ }
+
+ try output(context, ".");
+
+ // Keep track of fractional count printed for case where we pre-pad then post-pad with 0's.
+ var printed: usize = 0;
+
+ // Zero-fill until we reach significant digits or run out of precision.
+ if (float_decimal.exp <= 0) {
+ const zero_digit_count = usize(-float_decimal.exp);
+ const zeros_to_print = math.min(zero_digit_count, precision);
+
+ var i: usize = 0;
+ while (i < zeros_to_print) : (i += 1) {
+ try output(context, "0");
+ printed += 1;
+ }
+
+ if (printed >= precision) {
+ return;
+ }
+ }
+
+ // Remaining fractional portion, zero-padding if insufficient.
+ debug.assert(precision >= printed);
+ if (num_digits_whole_no_pad + precision - printed < float_decimal.digits.len) {
+ try output(context, float_decimal.digits[num_digits_whole_no_pad .. num_digits_whole_no_pad + precision - printed]);
+ return;
+ } else {
+ try output(context, float_decimal.digits[num_digits_whole_no_pad ..]);
+ printed += float_decimal.digits.len - num_digits_whole_no_pad;
+
+ while (printed < precision) : (printed += 1) {
+ try output(context, "0");
+ }
+ }
} else {
- try output(context, "0");
+ // exp < 0 means the leading is always 0 as errol result is normalized.
+ var num_digits_whole = if (float_decimal.exp > 0) usize(float_decimal.exp) else 0;
+
+ // the actual slice into the buffer, we may need to zero-pad between num_digits_whole and this.
+ var num_digits_whole_no_pad = math.min(num_digits_whole, float_decimal.digits.len);
+
+ if (num_digits_whole > 0) {
+ // We may have to zero pad, for instance 1e4 requires zero padding.
+ try output(context, float_decimal.digits[0 .. num_digits_whole_no_pad]);
+
+ var i = num_digits_whole_no_pad;
+ while (i < num_digits_whole) : (i += 1) {
+ try output(context, "0");
+ }
+ } else {
+ try output(context , "0");
+ }
+
+ // Omit `.` if no fractional portion
+ if (float_decimal.exp >= 0 and num_digits_whole_no_pad == float_decimal.digits.len) {
+ return;
+ }
+
+ try output(context, ".");
+
+ // Zero-fill until we reach significant digits or run out of precision.
+ if (float_decimal.exp < 0) {
+ const zero_digit_count = usize(-float_decimal.exp);
+
+ var i: usize = 0;
+ while (i < zero_digit_count) : (i += 1) {
+ try output(context, "0");
+ }
+ }
+
+ try output(context, float_decimal.digits[num_digits_whole_no_pad ..]);
}
}
@@ -465,7 +651,7 @@ pub fn parseUnsigned(comptime T: type, buf: []const u8, radix: u8) ParseUnsigned
return x;
}
-fn charToDigit(c: u8, radix: u8) (error{InvalidCharacter}!u8) {
+pub fn charToDigit(c: u8, radix: u8) (error{InvalidCharacter}!u8) {
const value = switch (c) {
'0' ... '9' => c - '0',
'A' ... 'Z' => c - 'A' + 10,
@@ -593,70 +779,210 @@ test "fmt.format" {
const result = try bufPrint(buf1[0..], "pointer: {}\n", &value);
assert(mem.startsWith(u8, result, "pointer: Struct@"));
}
-
- // TODO get these tests passing in release modes
- // https://github.com/zig-lang/zig/issues/564
- if (builtin.mode == builtin.Mode.Debug) {
- {
- var buf1: [32]u8 = undefined;
- const value: f32 = 12.34;
- const result = try bufPrint(buf1[0..], "f32: {}\n", value);
- assert(mem.eql(u8, result, "f32: 1.23400001e1\n"));
- }
- {
- var buf1: [32]u8 = undefined;
- const value: f64 = -12.34e10;
- const result = try bufPrint(buf1[0..], "f64: {}\n", value);
- assert(mem.eql(u8, result, "f64: -1.234e11\n"));
- }
- {
- var buf1: [32]u8 = undefined;
- const result = try bufPrint(buf1[0..], "f64: {}\n", math.nan_f64);
- assert(mem.eql(u8, result, "f64: NaN\n"));
- }
- {
- var buf1: [32]u8 = undefined;
- const result = try bufPrint(buf1[0..], "f64: {}\n", math.inf_f64);
- assert(mem.eql(u8, result, "f64: Infinity\n"));
- }
- {
- var buf1: [32]u8 = undefined;
- const result = try bufPrint(buf1[0..], "f64: {}\n", -math.inf_f64);
- assert(mem.eql(u8, result, "f64: -Infinity\n"));
- }
- {
- var buf1: [32]u8 = undefined;
- const value: f32 = 1.1234;
- const result = try bufPrint(buf1[0..], "f32: {.1}\n", value);
- assert(mem.eql(u8, result, "f32: 1.1\n"));
- }
- {
- var buf1: [32]u8 = undefined;
- const value: f32 = 1234.567;
- const result = try bufPrint(buf1[0..], "f32: {.2}\n", value);
- assert(mem.eql(u8, result, "f32: 1234.56\n"));
- }
- {
- var buf1: [32]u8 = undefined;
- const value: f32 = -11.1234;
- const result = try bufPrint(buf1[0..], "f32: {.4}\n", value);
- // -11.1234 is converted to f64 -11.12339... internally (errol3() function takes f64).
- // -11.12339... is truncated to -11.1233
- assert(mem.eql(u8, result, "f32: -11.1233\n"));
- }
- {
- var buf1: [32]u8 = undefined;
- const value: f32 = 91.12345;
- const result = try bufPrint(buf1[0..], "f32: {.}\n", value);
- assert(mem.eql(u8, result, "f32: 91.12345\n"));
- }
- {
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f32 = 1.34;
+ const result = try bufPrint(buf1[0..], "f32: {e}\n", value);
+ assert(mem.eql(u8, result, "f32: 1.34000003e+00\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f32 = 12.34;
+ const result = try bufPrint(buf1[0..], "f32: {e}\n", value);
+ assert(mem.eql(u8, result, "f32: 1.23400001e+01\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = -12.34e10;
+ const result = try bufPrint(buf1[0..], "f64: {e}\n", value);
+ assert(mem.eql(u8, result, "f64: -1.234e+11\n"));
+ }
+ {
+ // This fails on release due to a minor rounding difference.
+ // --release-fast outputs 9.999960000000001e-40 vs. the expected.
+ if (builtin.mode == builtin.Mode.Debug) {
var buf1: [32]u8 = undefined;
- const value: f64 = 91.12345678901235;
- const result = try bufPrint(buf1[0..], "f64: {.10}\n", value);
- assert(mem.eql(u8, result, "f64: 91.1234567890\n"));
+ const value: f64 = 9.999960e-40;
+ const result = try bufPrint(buf1[0..], "f64: {e}\n", value);
+ assert(mem.eql(u8, result, "f64: 9.99996e-40\n"));
}
-
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 1.409706e-42;
+ const result = try bufPrint(buf1[0..], "f64: {e5}\n", value);
+ assert(mem.eql(u8, result, "f64: 1.40971e-42\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = @bitCast(f32, u32(814313563));
+ const result = try bufPrint(buf1[0..], "f64: {e5}\n", value);
+ assert(mem.eql(u8, result, "f64: 1.00000e-09\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = @bitCast(f32, u32(1006632960));
+ const result = try bufPrint(buf1[0..], "f64: {e5}\n", value);
+ assert(mem.eql(u8, result, "f64: 7.81250e-03\n"));
+ }
+ {
+ // libc rounds 1.000005e+05 to 1.00000e+05 but zig does 1.00001e+05.
+ // In fact, libc doesn't round a lot of 5 cases up when one past the precision point.
+ var buf1: [32]u8 = undefined;
+ const value: f64 = @bitCast(f32, u32(1203982400));
+ const result = try bufPrint(buf1[0..], "f64: {e5}\n", value);
+ assert(mem.eql(u8, result, "f64: 1.00001e+05\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const result = try bufPrint(buf1[0..], "f64: {}\n", math.nan_f64);
+ assert(mem.eql(u8, result, "f64: nan\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const result = try bufPrint(buf1[0..], "f64: {}\n", -math.nan_f64);
+ assert(mem.eql(u8, result, "f64: -nan\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const result = try bufPrint(buf1[0..], "f64: {}\n", math.inf_f64);
+ assert(mem.eql(u8, result, "f64: inf\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const result = try bufPrint(buf1[0..], "f64: {}\n", -math.inf_f64);
+ assert(mem.eql(u8, result, "f64: -inf\n"));
+ }
+ {
+ var buf1: [64]u8 = undefined;
+ const value: f64 = 1.52314e+29;
+ const result = try bufPrint(buf1[0..], "f64: {.}\n", value);
+ assert(mem.eql(u8, result, "f64: 152314000000000000000000000000\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f32 = 1.1234;
+ const result = try bufPrint(buf1[0..], "f32: {.1}\n", value);
+ assert(mem.eql(u8, result, "f32: 1.1\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f32 = 1234.567;
+ const result = try bufPrint(buf1[0..], "f32: {.2}\n", value);
+ assert(mem.eql(u8, result, "f32: 1234.57\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f32 = -11.1234;
+ const result = try bufPrint(buf1[0..], "f32: {.4}\n", value);
+ // -11.1234 is converted to f64 -11.12339... internally (errol3() function takes f64).
+ // -11.12339... is rounded back up to -11.1234
+ assert(mem.eql(u8, result, "f32: -11.1234\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f32 = 91.12345;
+ const result = try bufPrint(buf1[0..], "f32: {.5}\n", value);
+ assert(mem.eql(u8, result, "f32: 91.12345\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 91.12345678901235;
+ const result = try bufPrint(buf1[0..], "f64: {.10}\n", value);
+ assert(mem.eql(u8, result, "f64: 91.1234567890\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 0.0;
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 0.00000\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 5.700;
+ const result = try bufPrint(buf1[0..], "f64: {.0}\n", value);
+ assert(mem.eql(u8, result, "f64: 6\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 9.999;
+ const result = try bufPrint(buf1[0..], "f64: {.1}\n", value);
+ assert(mem.eql(u8, result, "f64: 10.0\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 1.0;
+ const result = try bufPrint(buf1[0..], "f64: {.3}\n", value);
+ assert(mem.eql(u8, result, "f64: 1.000\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 0.0003;
+ const result = try bufPrint(buf1[0..], "f64: {.8}\n", value);
+ assert(mem.eql(u8, result, "f64: 0.00030000\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 1.40130e-45;
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 0.00000\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = 9.999960e-40;
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 0.00000\n"));
+ }
+ // libc checks
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = f64(@bitCast(f32, u32(916964781)));
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 0.00001\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = f64(@bitCast(f32, u32(925353389)));
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 0.00001\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = f64(@bitCast(f32, u32(1036831278)));
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 0.10000\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = f64(@bitCast(f32, u32(1065353133)));
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 1.00000\n"));
+ }
+ {
+ var buf1: [32]u8 = undefined;
+ const value: f64 = f64(@bitCast(f32, u32(1092616192)));
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 10.00000\n"));
+ }
+ // libc differences
+ {
+ var buf1: [32]u8 = undefined;
+ // This is 0.015625 exactly according to gdb. We thus round down,
+ // however glibc rounds up for some reason. This occurs for all
+ // floats of the form x.yyyy25 on a precision point.
+ const value: f64 = f64(@bitCast(f32, u32(1015021568)));
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 0.01563\n"));
+ }
+ // std-windows-x86_64-Debug-bare test case fails
+ {
+ // errol3 rounds to ... 630 but libc rounds to ...632. Grisu3
+ // also rounds to 630 so I'm inclined to believe libc is not
+ // optimal here.
+ var buf1: [32]u8 = undefined;
+ const value: f64 = f64(@bitCast(f32, u32(1518338049)));
+ const result = try bufPrint(buf1[0..], "f64: {.5}\n", value);
+ assert(mem.eql(u8, result, "f64: 18014400656965630.00000\n"));
}
}
diff --git a/std/hash_map.zig b/std/hash_map.zig
index becced64ff..2a178d9d44 100644
--- a/std/hash_map.zig
+++ b/std/hash_map.zig
@@ -54,6 +54,14 @@ pub fn HashMap(comptime K: type, comptime V: type,
}
unreachable; // no next item
}
+
+ // Reset the iterator to the initial index
+ pub fn reset(it: &Iterator) void {
+ it.count = 0;
+ it.index = 0;
+ // Resetting the modification count too
+ it.initial_modification_count = it.hm.modification_count;
+ }
};
pub fn init(allocator: &Allocator) Self {
@@ -66,7 +74,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
};
}
- pub fn deinit(hm: &Self) void {
+ pub fn deinit(hm: &const Self) void {
hm.allocator.free(hm.entries);
}
@@ -79,6 +87,10 @@ pub fn HashMap(comptime K: type, comptime V: type,
hm.incrementModificationCount();
}
+ pub fn count(hm: &const Self) usize {
+ return hm.size;
+ }
+
/// Returns the value that was already there.
pub fn put(hm: &Self, key: K, value: &const V) !?V {
if (hm.entries.len == 0) {
@@ -102,18 +114,19 @@ pub fn HashMap(comptime K: type, comptime V: type,
return hm.internalPut(key, value);
}
- pub fn get(hm: &Self, key: K) ?&Entry {
+ pub fn get(hm: &const Self, key: K) ?&Entry {
if (hm.entries.len == 0) {
return null;
}
return hm.internalGet(key);
}
- pub fn contains(hm: &Self, key: K) bool {
+ pub fn contains(hm: &const Self, key: K) bool {
return hm.get(key) != null;
}
pub fn remove(hm: &Self, key: K) ?&Entry {
+ if (hm.entries.len == 0) return null;
hm.incrementModificationCount();
const start_index = hm.keyToIndex(key);
{var roll_over: usize = 0; while (roll_over <= hm.max_distance_from_start_index) : (roll_over += 1) {
@@ -217,7 +230,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
unreachable; // put into a full map
}
- fn internalGet(hm: &Self, key: K) ?&Entry {
+ fn internalGet(hm: &const Self, key: K) ?&Entry {
const start_index = hm.keyToIndex(key);
{var roll_over: usize = 0; while (roll_over <= hm.max_distance_from_start_index) : (roll_over += 1) {
const index = (start_index + roll_over) % hm.entries.len;
@@ -229,14 +242,17 @@ pub fn HashMap(comptime K: type, comptime V: type,
return null;
}
- fn keyToIndex(hm: &Self, key: K) usize {
+ fn keyToIndex(hm: &const Self, key: K) usize {
return usize(hash(key)) % hm.entries.len;
}
};
}
test "basic hash map usage" {
- var map = HashMap(i32, i32, hash_i32, eql_i32).init(debug.global_allocator);
+ var direct_allocator = std.heap.DirectAllocator.init();
+ defer direct_allocator.deinit();
+
+ var map = HashMap(i32, i32, hash_i32, eql_i32).init(&direct_allocator.allocator);
defer map.deinit();
assert((map.put(1, 11) catch unreachable) == null);
@@ -248,12 +264,52 @@ test "basic hash map usage" {
assert(??(map.put(5, 66) catch unreachable) == 55);
assert(??(map.put(5, 55) catch unreachable) == 66);
+ assert(map.contains(2));
assert((??map.get(2)).value == 22);
_ = map.remove(2);
assert(map.remove(2) == null);
assert(map.get(2) == null);
}
+test "iterator hash map" {
+ var direct_allocator = std.heap.DirectAllocator.init();
+ defer direct_allocator.deinit();
+
+ var reset_map = HashMap(i32, i32, hash_i32, eql_i32).init(&direct_allocator.allocator);
+ defer reset_map.deinit();
+
+ assert((reset_map.put(1, 11) catch unreachable) == null);
+ assert((reset_map.put(2, 22) catch unreachable) == null);
+ assert((reset_map.put(3, 33) catch unreachable) == null);
+
+ var keys = []i32 { 1, 2, 3 };
+ var values = []i32 { 11, 22, 33 };
+
+ var it = reset_map.iterator();
+ var count : usize = 0;
+ while (it.next()) |next| {
+ assert(next.key == keys[count]);
+ assert(next.value == values[count]);
+ count += 1;
+ }
+
+ assert(count == 3);
+ assert(it.next() == null);
+ it.reset();
+ count = 0;
+ while (it.next()) |next| {
+ assert(next.key == keys[count]);
+ assert(next.value == values[count]);
+ count += 1;
+ if (count == 2) break;
+ }
+
+ it.reset();
+ var entry = ?? it.next();
+ assert(entry.key == keys[0]);
+ assert(entry.value == values[0]);
+}
+
fn hash_i32(x: i32) u32 {
return @bitCast(u32, x);
}
diff --git a/std/heap.zig b/std/heap.zig
index ca6736af1e..bfdf62f658 100644
--- a/std/heap.zig
+++ b/std/heap.zig
@@ -47,13 +47,6 @@ pub const DirectAllocator = struct {
const HeapHandle = if (builtin.os == Os.windows) os.windows.HANDLE else void;
- //pub const canary_bytes = []u8 {48, 239, 128, 46, 18, 49, 147, 9, 195, 59, 203, 3, 245, 54, 9, 122};
- //pub const want_safety = switch (builtin.mode) {
- // builtin.Mode.Debug => true,
- // builtin.Mode.ReleaseSafe => true,
- // else => false,
- //};
-
pub fn init() DirectAllocator {
return DirectAllocator {
.allocator = Allocator {
@@ -79,19 +72,38 @@ pub const DirectAllocator = struct {
switch (builtin.os) {
Os.linux, Os.macosx, Os.ios => {
- assert(alignment <= os.page_size);
const p = os.posix;
- const addr = p.mmap(null, n, p.PROT_READ|p.PROT_WRITE,
- p.MAP_PRIVATE|p.MAP_ANONYMOUS, -1, 0);
- if (addr == p.MAP_FAILED) {
- return error.OutOfMemory;
- }
- return @intToPtr(&u8, addr)[0..n];
+ const alloc_size = if(alignment <= os.page_size) n else n + alignment;
+ const addr = p.mmap(null, alloc_size, p.PROT_READ|p.PROT_WRITE,
+ p.MAP_PRIVATE|p.MAP_ANONYMOUS, -1, 0);
+ if(addr == p.MAP_FAILED) return error.OutOfMemory;
+
+ if(alloc_size == n) return @intToPtr(&u8, addr)[0..n];
+
+ var aligned_addr = addr & ~usize(alignment - 1);
+ aligned_addr += alignment;
+
+ //We can unmap the unused portions of our mmap, but we must only
+ // pass munmap bytes that exist outside our allocated pages or it
+ // will happily eat us too
+
+ //Since alignment > page_size, we are by definition on a page boundry
+ const unused_start = addr;
+ const unused_len = aligned_addr - 1 - unused_start;
+
+ var err = p.munmap(unused_start, unused_len);
+ debug.assert(p.getErrno(err) == 0);
+
+ //It is impossible that there is an unoccupied page at the top of our
+ // mmap.
+
+ return @intToPtr(&u8, aligned_addr)[0..n];
},
Os.windows => {
const amt = n + alignment + @sizeOf(usize);
const heap_handle = self.heap_handle ?? blk: {
- const hh = os.windows.HeapCreate(os.windows.HEAP_NO_SERIALIZE, amt, 0) ?? return error.OutOfMemory;
+ const hh = os.windows.HeapCreate(os.windows.HEAP_NO_SERIALIZE, amt, 0)
+ ?? return error.OutOfMemory;
self.heap_handle = hh;
break :blk hh;
};
@@ -120,7 +132,7 @@ pub const DirectAllocator = struct {
const rem = @rem(new_addr_end, os.page_size);
const new_addr_end_rounded = new_addr_end + if (rem == 0) 0 else (os.page_size - rem);
if (old_addr_end > new_addr_end_rounded) {
- _ = os.posix.munmap(@intToPtr(&u8, new_addr_end_rounded), old_addr_end - new_addr_end_rounded);
+ _ = os.posix.munmap(new_addr_end_rounded, old_addr_end - new_addr_end_rounded);
}
return old_mem[0..new_size];
}
@@ -158,7 +170,7 @@ pub const DirectAllocator = struct {
switch (builtin.os) {
Os.linux, Os.macosx, Os.ios => {
- _ = os.posix.munmap(bytes.ptr, bytes.len);
+ _ = os.posix.munmap(@ptrToInt(bytes.ptr), bytes.len);
},
Os.windows => {
const record_addr = @ptrToInt(bytes.ptr) + bytes.len;
@@ -279,7 +291,7 @@ pub const FixedBufferAllocator = struct {
fn alloc(allocator: &Allocator, n: usize, alignment: u29) ![]u8 {
const self = @fieldParentPtr(FixedBufferAllocator, "allocator", allocator);
- const addr = @ptrToInt(&self.buffer[self.end_index]);
+ const addr = @ptrToInt(self.buffer.ptr) + self.end_index;
const rem = @rem(addr, alignment);
const march_forward_bytes = if (rem == 0) 0 else (alignment - rem);
const adjusted_index = self.end_index + march_forward_bytes;
@@ -306,6 +318,54 @@ pub const FixedBufferAllocator = struct {
fn free(allocator: &Allocator, bytes: []u8) void { }
};
+/// lock free
+pub const ThreadSafeFixedBufferAllocator = struct {
+ allocator: Allocator,
+ end_index: usize,
+ buffer: []u8,
+
+ pub fn init(buffer: []u8) ThreadSafeFixedBufferAllocator {
+ return ThreadSafeFixedBufferAllocator {
+ .allocator = Allocator {
+ .allocFn = alloc,
+ .reallocFn = realloc,
+ .freeFn = free,
+ },
+ .buffer = buffer,
+ .end_index = 0,
+ };
+ }
+
+ fn alloc(allocator: &Allocator, n: usize, alignment: u29) ![]u8 {
+ const self = @fieldParentPtr(ThreadSafeFixedBufferAllocator, "allocator", allocator);
+ var end_index = @atomicLoad(usize, &self.end_index, builtin.AtomicOrder.SeqCst);
+ while (true) {
+ const addr = @ptrToInt(self.buffer.ptr) + end_index;
+ const rem = @rem(addr, alignment);
+ const march_forward_bytes = if (rem == 0) 0 else (alignment - rem);
+ const adjusted_index = end_index + march_forward_bytes;
+ const new_end_index = adjusted_index + n;
+ if (new_end_index > self.buffer.len) {
+ return error.OutOfMemory;
+ }
+ end_index = @cmpxchgWeak(usize, &self.end_index, end_index, new_end_index,
+ builtin.AtomicOrder.SeqCst, builtin.AtomicOrder.SeqCst) ?? return self.buffer[adjusted_index .. new_end_index];
+ }
+ }
+
+ fn realloc(allocator: &Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
+ if (new_size <= old_mem.len) {
+ return old_mem[0..new_size];
+ } else {
+ const result = try alloc(allocator, new_size, alignment);
+ mem.copy(u8, result, old_mem);
+ return result;
+ }
+ }
+
+ fn free(allocator: &Allocator, bytes: []u8) void { }
+};
+
test "c_allocator" {
@@ -322,6 +382,7 @@ test "DirectAllocator" {
const allocator = &direct_allocator.allocator;
try testAllocator(allocator);
+ try testAllocatorLargeAlignment(allocator);
}
test "ArenaAllocator" {
@@ -332,6 +393,7 @@ test "ArenaAllocator" {
defer arena_allocator.deinit();
try testAllocator(&arena_allocator.allocator);
+ try testAllocatorLargeAlignment(&arena_allocator.allocator);
}
var test_fixed_buffer_allocator_memory: [30000 * @sizeOf(usize)]u8 = undefined;
@@ -339,6 +401,14 @@ test "FixedBufferAllocator" {
var fixed_buffer_allocator = FixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]);
try testAllocator(&fixed_buffer_allocator.allocator);
+ try testAllocatorLargeAlignment(&fixed_buffer_allocator.allocator);
+}
+
+test "ThreadSafeFixedBufferAllocator" {
+ var fixed_buffer_allocator = ThreadSafeFixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]);
+
+ try testAllocator(&fixed_buffer_allocator.allocator);
+ try testAllocatorLargeAlignment(&fixed_buffer_allocator.allocator);
}
fn testAllocator(allocator: &mem.Allocator) !void {
@@ -360,3 +430,32 @@ fn testAllocator(allocator: &mem.Allocator) !void {
allocator.free(slice);
}
+
+fn testAllocatorLargeAlignment(allocator: &mem.Allocator) mem.Allocator.Error!void {
+ //Maybe a platform's page_size is actually the same as or
+ // very near usize?
+ if(os.page_size << 2 > @maxValue(usize)) return;
+
+ const USizeShift = @IntType(false, std.math.log2(usize.bit_count));
+ const large_align = u29(os.page_size << 2);
+
+ var align_mask: usize = undefined;
+ _ = @shlWithOverflow(usize, ~usize(0), USizeShift(@ctz(large_align)), &align_mask);
+
+ var slice = try allocator.allocFn(allocator, 500, large_align);
+ debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
+
+ slice = try allocator.reallocFn(allocator, slice, 100, large_align);
+ debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
+
+ slice = try allocator.reallocFn(allocator, slice, 5000, large_align);
+ debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
+
+ slice = try allocator.reallocFn(allocator, slice, 10, large_align);
+ debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
+
+ slice = try allocator.reallocFn(allocator, slice, 20000, large_align);
+ debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
+
+ allocator.free(slice);
+}
diff --git a/std/index.zig b/std/index.zig
index f2af70b28b..8abfa3db88 100644
--- a/std/index.zig
+++ b/std/index.zig
@@ -7,7 +7,9 @@ pub const BufferOutStream = @import("buffer.zig").BufferOutStream;
pub const HashMap = @import("hash_map.zig").HashMap;
pub const LinkedList = @import("linked_list.zig").LinkedList;
pub const IntrusiveLinkedList = @import("linked_list.zig").IntrusiveLinkedList;
+pub const SegmentedList = @import("segmented_list.zig").SegmentedList;
+pub const atomic = @import("atomic/index.zig");
pub const base64 = @import("base64.zig");
pub const build = @import("build.zig");
pub const c = @import("c/index.zig");
@@ -17,11 +19,12 @@ pub const debug = @import("debug/index.zig");
pub const dwarf = @import("dwarf.zig");
pub const elf = @import("elf.zig");
pub const empty_import = @import("empty.zig");
-pub const endian = @import("endian.zig");
+pub const event = @import("event.zig");
pub const fmt = @import("fmt/index.zig");
pub const hash = @import("hash/index.zig");
pub const heap = @import("heap.zig");
pub const io = @import("io.zig");
+pub const json = @import("json.zig");
pub const macho = @import("macho.zig");
pub const math = @import("math/index.zig");
pub const mem = @import("mem.zig");
@@ -34,12 +37,14 @@ pub const zig = @import("zig/index.zig");
test "std" {
// run tests from these
+ _ = @import("atomic/index.zig");
_ = @import("array_list.zig");
_ = @import("buf_map.zig");
_ = @import("buf_set.zig");
_ = @import("buffer.zig");
_ = @import("hash_map.zig");
_ = @import("linked_list.zig");
+ _ = @import("segmented_list.zig");
_ = @import("base64.zig");
_ = @import("build.zig");
@@ -50,15 +55,16 @@ test "std" {
_ = @import("dwarf.zig");
_ = @import("elf.zig");
_ = @import("empty.zig");
- _ = @import("endian.zig");
+ _ = @import("event.zig");
_ = @import("fmt/index.zig");
_ = @import("hash/index.zig");
_ = @import("io.zig");
+ _ = @import("json.zig");
_ = @import("macho.zig");
_ = @import("math/index.zig");
_ = @import("mem.zig");
- _ = @import("heap.zig");
_ = @import("net.zig");
+ _ = @import("heap.zig");
_ = @import("os/index.zig");
_ = @import("rand/index.zig");
_ = @import("sort.zig");
diff --git a/std/io.zig b/std/io.zig
index 93d50e6709..7b72af15e4 100644
--- a/std/io.zig
+++ b/std/io.zig
@@ -486,6 +486,11 @@ pub fn readLine(buf: []u8) !usize {
while (true) {
const byte = stream.readByte() catch return error.EndOfFile;
switch (byte) {
+ '\r' => {
+ // trash the following \n
+ _ = stream.readByte() catch return error.EndOfFile;
+ return index;
+ },
'\n' => return index,
else => {
if (index == buf.len) return error.InputTooLong;
diff --git a/std/io_test.zig b/std/io_test.zig
index 89959b7b54..5f53556785 100644
--- a/std/io_test.zig
+++ b/std/io_test.zig
@@ -1,6 +1,5 @@
const std = @import("index.zig");
const io = std.io;
-const allocator = std.debug.global_allocator;
const DefaultPrng = std.rand.DefaultPrng;
const assert = std.debug.assert;
const mem = std.mem;
@@ -8,6 +7,9 @@ const os = std.os;
const builtin = @import("builtin");
test "write a file, read it, then delete it" {
+ var raw_bytes: [200 * 1024]u8 = undefined;
+ var allocator = &std.heap.FixedBufferAllocator.init(raw_bytes[0..]).allocator;
+
var data: [1024]u8 = undefined;
var prng = DefaultPrng.init(1234);
prng.random.bytes(data[0..]);
@@ -44,3 +46,17 @@ test "write a file, read it, then delete it" {
}
try os.deleteFile(allocator, tmp_file_name);
}
+
+test "BufferOutStream" {
+ var bytes: [100]u8 = undefined;
+ var allocator = &std.heap.FixedBufferAllocator.init(bytes[0..]).allocator;
+
+ var buffer = try std.Buffer.initSize(allocator, 0);
+ var buf_stream = &std.io.BufferOutStream.init(&buffer).stream;
+
+ const x: i32 = 42;
+ const y: i32 = 1234;
+ try buf_stream.print("x: {}\ny: {}\n", x, y);
+
+ assert(mem.eql(u8, buffer.toSlice(), "x: 42\ny: 1234\n"));
+}
diff --git a/std/json.zig b/std/json.zig
new file mode 100644
index 0000000000..6f853501ed
--- /dev/null
+++ b/std/json.zig
@@ -0,0 +1,1304 @@
+// JSON parser conforming to RFC8259.
+//
+// https://tools.ietf.org/html/rfc8259
+
+const std = @import("index.zig");
+const mem = std.mem;
+
+const u1 = @IntType(false, 1);
+const u256 = @IntType(false, 256);
+
+// A single token slice into the parent string.
+//
+// Use `token.slice()` on the inptu at the current position to get the current slice.
+pub const Token = struct {
+ id: Id,
+ // How many bytes do we skip before counting
+ offset: u1,
+ // Whether string contains a \uXXXX sequence and cannot be zero-copied
+ string_has_escape: bool,
+ // Whether number is simple and can be represented by an integer (i.e. no `.` or `e`)
+ number_is_integer: bool,
+ // How many bytes from the current position behind the start of this token is.
+ count: usize,
+
+ pub const Id = enum {
+ ObjectBegin,
+ ObjectEnd,
+ ArrayBegin,
+ ArrayEnd,
+ String,
+ Number,
+ True,
+ False,
+ Null,
+ };
+
+ pub fn init(id: Id, count: usize, offset: u1) Token {
+ return Token {
+ .id = id,
+ .offset = offset,
+ .string_has_escape = false,
+ .number_is_integer = true,
+ .count = count,
+ };
+ }
+
+ pub fn initString(count: usize, has_unicode_escape: bool) Token {
+ return Token {
+ .id = Id.String,
+ .offset = 0,
+ .string_has_escape = has_unicode_escape,
+ .number_is_integer = true,
+ .count = count,
+ };
+ }
+
+ pub fn initNumber(count: usize, number_is_integer: bool) Token {
+ return Token {
+ .id = Id.Number,
+ .offset = 0,
+ .string_has_escape = false,
+ .number_is_integer = number_is_integer,
+ .count = count,
+ };
+ }
+
+ // A marker token is a zero-length
+ pub fn initMarker(id: Id) Token {
+ return Token {
+ .id = id,
+ .offset = 0,
+ .string_has_escape = false,
+ .number_is_integer = true,
+ .count = 0,
+ };
+ }
+
+ // Slice into the underlying input string.
+ pub fn slice(self: &const Token, input: []const u8, i: usize) []const u8 {
+ return input[i + self.offset - self.count .. i + self.offset];
+ }
+};
+
+// A small streaming JSON parser. This accepts input one byte at a time and returns tokens as
+// they are encountered. No copies or allocations are performed during parsing and the entire
+// parsing state requires ~40-50 bytes of stack space.
+//
+// Conforms strictly to RFC8529.
+const StreamingJsonParser = struct {
+ // Current state
+ state: State,
+ // How many bytes we have counted for the current token
+ count: usize,
+ // What state to follow after parsing a string (either property or value string)
+ after_string_state: State,
+ // What state to follow after parsing a value (either top-level or value end)
+ after_value_state: State,
+ // If we stopped now, would the complete parsed string to now be a valid json string
+ complete: bool,
+ // Current token flags to pass through to the next generated, see Token.
+ string_has_escape: bool,
+ number_is_integer: bool,
+
+ // Bit-stack for nested object/map literals (max 255 nestings).
+ stack: u256,
+ stack_used: u8,
+
+ const object_bit = 0;
+ const array_bit = 1;
+ const max_stack_size = @maxValue(u8);
+
+ pub fn init() StreamingJsonParser {
+ var p: StreamingJsonParser = undefined;
+ p.reset();
+ return p;
+ }
+
+ pub fn reset(p: &StreamingJsonParser) void {
+ p.state = State.TopLevelBegin;
+ p.count = 0;
+ // Set before ever read in main transition function
+ p.after_string_state = undefined;
+ p.after_value_state = State.ValueEnd; // handle end of values normally
+ p.stack = 0;
+ p.stack_used = 0;
+ p.complete = false;
+ p.string_has_escape = false;
+ p.number_is_integer = true;
+ }
+
+ pub const State = enum {
+ // These must be first with these explicit values as we rely on them for indexing the
+ // bit-stack directly and avoiding a branch.
+ ObjectSeparator = 0,
+ ValueEnd = 1,
+
+ TopLevelBegin,
+ TopLevelEnd,
+
+ ValueBegin,
+ ValueBeginNoClosing,
+
+ String,
+ StringUtf8Byte3,
+ StringUtf8Byte2,
+ StringUtf8Byte1,
+ StringEscapeCharacter,
+ StringEscapeHexUnicode4,
+ StringEscapeHexUnicode3,
+ StringEscapeHexUnicode2,
+ StringEscapeHexUnicode1,
+
+ Number,
+ NumberMaybeDotOrExponent,
+ NumberMaybeDigitOrDotOrExponent,
+ NumberFractionalRequired,
+ NumberFractional,
+ NumberMaybeExponent,
+ NumberExponent,
+ NumberExponentDigitsRequired,
+ NumberExponentDigits,
+
+ TrueLiteral1,
+ TrueLiteral2,
+ TrueLiteral3,
+
+ FalseLiteral1,
+ FalseLiteral2,
+ FalseLiteral3,
+ FalseLiteral4,
+
+ NullLiteral1,
+ NullLiteral2,
+ NullLiteral3,
+
+ // Only call this function to generate array/object final state.
+ pub fn fromInt(x: var) State {
+ std.debug.assert(x == 0 or x == 1);
+ const T = @TagType(State);
+ return State(T(x));
+ }
+ };
+
+ pub const Error = error {
+ InvalidTopLevel,
+ TooManyNestedItems,
+ TooManyClosingItems,
+ InvalidValueBegin,
+ InvalidValueEnd,
+ UnbalancedBrackets,
+ UnbalancedBraces,
+ UnexpectedClosingBracket,
+ UnexpectedClosingBrace,
+ InvalidNumber,
+ InvalidSeparator,
+ InvalidLiteral,
+ InvalidEscapeCharacter,
+ InvalidUnicodeHexSymbol,
+ InvalidUtf8Byte,
+ InvalidTopLevelTrailing,
+ InvalidControlCharacter,
+ };
+
+ // Give another byte to the parser and obtain any new tokens. This may (rarely) return two
+ // tokens. token2 is always null if token1 is null.
+ //
+ // There is currently no error recovery on a bad stream.
+ pub fn feed(p: &StreamingJsonParser, c: u8, token1: &?Token, token2: &?Token) Error!void {
+ *token1 = null;
+ *token2 = null;
+ p.count += 1;
+
+ // unlikely
+ if (try p.transition(c, token1)) {
+ _ = try p.transition(c, token2);
+ }
+ }
+
+ // Perform a single transition on the state machine and return any possible token.
+ fn transition(p: &StreamingJsonParser, c: u8, token: &?Token) Error!bool {
+ switch (p.state) {
+ State.TopLevelBegin => switch (c) {
+ '{' => {
+ p.stack <<= 1;
+ p.stack |= object_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ObjectSeparator;
+
+ *token = Token.initMarker(Token.Id.ObjectBegin);
+ },
+ '[' => {
+ p.stack <<= 1;
+ p.stack |= array_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ValueEnd;
+
+ *token = Token.initMarker(Token.Id.ArrayBegin);
+ },
+ '-' => {
+ p.number_is_integer = true;
+ p.state = State.Number;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ '0' => {
+ p.number_is_integer = true;
+ p.state = State.NumberMaybeDotOrExponent;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ '1' ... '9' => {
+ p.number_is_integer = true;
+ p.state = State.NumberMaybeDigitOrDotOrExponent;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ '"' => {
+ p.state = State.String;
+ p.after_value_state = State.TopLevelEnd;
+ // We don't actually need the following since after_value_state should override.
+ p.after_string_state = State.ValueEnd;
+ p.string_has_escape = false;
+ p.count = 0;
+ },
+ 't' => {
+ p.state = State.TrueLiteral1;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ 'f' => {
+ p.state = State.FalseLiteral1;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ 'n' => {
+ p.state = State.NullLiteral1;
+ p.after_value_state = State.TopLevelEnd;
+ p.count = 0;
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidTopLevel;
+ },
+ },
+
+ State.TopLevelEnd => switch (c) {
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidTopLevelTrailing;
+ },
+ },
+
+ State.ValueBegin => switch (c) {
+ // NOTE: These are shared in ValueEnd as well, think we can reorder states to
+ // be a bit clearer and avoid this duplication.
+ '}' => {
+ // unlikely
+ if (p.stack & 1 != object_bit) {
+ return error.UnexpectedClosingBracket;
+ }
+ if (p.stack_used == 0) {
+ return error.TooManyClosingItems;
+ }
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.fromInt(p.stack & 1);
+
+ p.stack >>= 1;
+ p.stack_used -= 1;
+
+ switch (p.stack_used) {
+ 0 => {
+ p.complete = true;
+ p.state = State.TopLevelEnd;
+ },
+ else => {},
+ }
+
+ *token = Token.initMarker(Token.Id.ObjectEnd);
+ },
+ ']' => {
+ if (p.stack & 1 != array_bit) {
+ return error.UnexpectedClosingBrace;
+ }
+ if (p.stack_used == 0) {
+ return error.TooManyClosingItems;
+ }
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.fromInt(p.stack & 1);
+
+ p.stack >>= 1;
+ p.stack_used -= 1;
+
+ switch (p.stack_used) {
+ 0 => {
+ p.complete = true;
+ p.state = State.TopLevelEnd;
+ },
+ else => {},
+ }
+
+ *token = Token.initMarker(Token.Id.ArrayEnd);
+ },
+ '{' => {
+ if (p.stack_used == max_stack_size) {
+ return error.TooManyNestedItems;
+ }
+
+ p.stack <<= 1;
+ p.stack |= object_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ObjectSeparator;
+
+ *token = Token.initMarker(Token.Id.ObjectBegin);
+ },
+ '[' => {
+ if (p.stack_used == max_stack_size) {
+ return error.TooManyNestedItems;
+ }
+
+ p.stack <<= 1;
+ p.stack |= array_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ValueEnd;
+
+ *token = Token.initMarker(Token.Id.ArrayBegin);
+ },
+ '-' => {
+ p.state = State.Number;
+ p.count = 0;
+ },
+ '0' => {
+ p.state = State.NumberMaybeDotOrExponent;
+ p.count = 0;
+ },
+ '1' ... '9' => {
+ p.state = State.NumberMaybeDigitOrDotOrExponent;
+ p.count = 0;
+ },
+ '"' => {
+ p.state = State.String;
+ p.count = 0;
+ },
+ 't' => {
+ p.state = State.TrueLiteral1;
+ p.count = 0;
+ },
+ 'f' => {
+ p.state = State.FalseLiteral1;
+ p.count = 0;
+ },
+ 'n' => {
+ p.state = State.NullLiteral1;
+ p.count = 0;
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidValueBegin;
+ },
+ },
+
+ // TODO: A bit of duplication here and in the following state, redo.
+ State.ValueBeginNoClosing => switch (c) {
+ '{' => {
+ if (p.stack_used == max_stack_size) {
+ return error.TooManyNestedItems;
+ }
+
+ p.stack <<= 1;
+ p.stack |= object_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ObjectSeparator;
+
+ *token = Token.initMarker(Token.Id.ObjectBegin);
+ },
+ '[' => {
+ if (p.stack_used == max_stack_size) {
+ return error.TooManyNestedItems;
+ }
+
+ p.stack <<= 1;
+ p.stack |= array_bit;
+ p.stack_used += 1;
+
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ValueEnd;
+
+ *token = Token.initMarker(Token.Id.ArrayBegin);
+ },
+ '-' => {
+ p.state = State.Number;
+ p.count = 0;
+ },
+ '0' => {
+ p.state = State.NumberMaybeDotOrExponent;
+ p.count = 0;
+ },
+ '1' ... '9' => {
+ p.state = State.NumberMaybeDigitOrDotOrExponent;
+ p.count = 0;
+ },
+ '"' => {
+ p.state = State.String;
+ p.count = 0;
+ },
+ 't' => {
+ p.state = State.TrueLiteral1;
+ p.count = 0;
+ },
+ 'f' => {
+ p.state = State.FalseLiteral1;
+ p.count = 0;
+ },
+ 'n' => {
+ p.state = State.NullLiteral1;
+ p.count = 0;
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidValueBegin;
+ },
+ },
+
+ State.ValueEnd => switch (c) {
+ ',' => {
+ p.after_string_state = State.fromInt(p.stack & 1);
+ p.state = State.ValueBeginNoClosing;
+ },
+ ']' => {
+ if (p.stack_used == 0) {
+ return error.UnbalancedBrackets;
+ }
+
+ p.state = State.ValueEnd;
+ p.after_string_state = State.fromInt(p.stack & 1);
+
+ p.stack >>= 1;
+ p.stack_used -= 1;
+
+ if (p.stack_used == 0) {
+ p.complete = true;
+ p.state = State.TopLevelEnd;
+ }
+
+ *token = Token.initMarker(Token.Id.ArrayEnd);
+ },
+ '}' => {
+ if (p.stack_used == 0) {
+ return error.UnbalancedBraces;
+ }
+
+ p.state = State.ValueEnd;
+ p.after_string_state = State.fromInt(p.stack & 1);
+
+ p.stack >>= 1;
+ p.stack_used -= 1;
+
+ if (p.stack_used == 0) {
+ p.complete = true;
+ p.state = State.TopLevelEnd;
+ }
+
+ *token = Token.initMarker(Token.Id.ObjectEnd);
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidValueEnd;
+ },
+ },
+
+ State.ObjectSeparator => switch (c) {
+ ':' => {
+ p.state = State.ValueBegin;
+ p.after_string_state = State.ValueEnd;
+ },
+ 0x09, 0x0A, 0x0D, 0x20 => {
+ // whitespace
+ },
+ else => {
+ return error.InvalidSeparator;
+ },
+ },
+
+ State.String => switch (c) {
+ 0x00 ... 0x1F => {
+ return error.InvalidControlCharacter;
+ },
+ '"' => {
+ p.state = p.after_string_state;
+ if (p.after_value_state == State.TopLevelEnd) {
+ p.state = State.TopLevelEnd;
+ p.complete = true;
+ }
+
+ *token = Token.initString(p.count - 1, p.string_has_escape);
+ },
+ '\\' => {
+ p.state = State.StringEscapeCharacter;
+ },
+ 0x20, 0x21, 0x23 ... 0x5B, 0x5D ... 0x7F => {
+ // non-control ascii
+ },
+ 0xC0 ... 0xDF => {
+ p.state = State.StringUtf8Byte1;
+ },
+ 0xE0 ... 0xEF => {
+ p.state = State.StringUtf8Byte2;
+ },
+ 0xF0 ... 0xFF => {
+ p.state = State.StringUtf8Byte3;
+ },
+ else => {
+ return error.InvalidUtf8Byte;
+ },
+ },
+
+ State.StringUtf8Byte3 => switch (c >> 6) {
+ 0b10 => p.state = State.StringUtf8Byte2,
+ else => return error.InvalidUtf8Byte,
+ },
+
+ State.StringUtf8Byte2 => switch (c >> 6) {
+ 0b10 => p.state = State.StringUtf8Byte1,
+ else => return error.InvalidUtf8Byte,
+ },
+
+ State.StringUtf8Byte1 => switch (c >> 6) {
+ 0b10 => p.state = State.String,
+ else => return error.InvalidUtf8Byte,
+ },
+
+ State.StringEscapeCharacter => switch (c) {
+ // NOTE: '/' is allowed as an escaped character but it also is allowed
+ // as unescaped according to the RFC. There is a reported errata which suggests
+ // removing the non-escaped variant but it makes more sense to simply disallow
+ // it as an escape code here.
+ //
+ // The current JSONTestSuite tests rely on both of this behaviour being present
+ // however, so we default to the status quo where both are accepted until this
+ // is further clarified.
+ '"', '\\', '/', 'b', 'f', 'n', 'r', 't' => {
+ p.string_has_escape = true;
+ p.state = State.String;
+ },
+ 'u' => {
+ p.string_has_escape = true;
+ p.state = State.StringEscapeHexUnicode4;
+ },
+ else => {
+ return error.InvalidEscapeCharacter;
+ },
+ },
+
+ State.StringEscapeHexUnicode4 => switch (c) {
+ '0' ... '9', 'A' ... 'F', 'a' ... 'f' => {
+ p.state = State.StringEscapeHexUnicode3;
+ },
+ else => return error.InvalidUnicodeHexSymbol,
+ },
+
+ State.StringEscapeHexUnicode3 => switch (c) {
+ '0' ... '9', 'A' ... 'F', 'a' ... 'f' => {
+ p.state = State.StringEscapeHexUnicode2;
+ },
+ else => return error.InvalidUnicodeHexSymbol,
+ },
+
+ State.StringEscapeHexUnicode2 => switch (c) {
+ '0' ... '9', 'A' ... 'F', 'a' ... 'f' => {
+ p.state = State.StringEscapeHexUnicode1;
+ },
+ else => return error.InvalidUnicodeHexSymbol,
+ },
+
+ State.StringEscapeHexUnicode1 => switch (c) {
+ '0' ... '9', 'A' ... 'F', 'a' ... 'f' => {
+ p.state = State.String;
+ },
+ else => return error.InvalidUnicodeHexSymbol,
+ },
+
+ State.Number => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '0' => {
+ p.state = State.NumberMaybeDotOrExponent;
+ },
+ '1' ... '9' => {
+ p.state = State.NumberMaybeDigitOrDotOrExponent;
+ },
+ else => {
+ return error.InvalidNumber;
+ },
+ }
+ },
+
+ State.NumberMaybeDotOrExponent => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '.' => {
+ p.number_is_integer = false;
+ p.state = State.NumberFractionalRequired;
+ },
+ 'e', 'E' => {
+ p.number_is_integer = false;
+ p.state = State.NumberExponent;
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.NumberMaybeDigitOrDotOrExponent => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '.' => {
+ p.number_is_integer = false;
+ p.state = State.NumberFractionalRequired;
+ },
+ 'e', 'E' => {
+ p.number_is_integer = false;
+ p.state = State.NumberExponent;
+ },
+ '0' ... '9' => {
+ // another digit
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.NumberFractionalRequired => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '0' ... '9' => {
+ p.state = State.NumberFractional;
+ },
+ else => {
+ return error.InvalidNumber;
+ },
+ }
+ },
+
+ State.NumberFractional => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '0' ... '9' => {
+ // another digit
+ },
+ 'e', 'E' => {
+ p.number_is_integer = false;
+ p.state = State.NumberExponent;
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.NumberMaybeExponent => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ 'e', 'E' => {
+ p.number_is_integer = false;
+ p.state = State.NumberExponent;
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.NumberExponent => switch (c) {
+ '-', '+', => {
+ p.complete = false;
+ p.state = State.NumberExponentDigitsRequired;
+ },
+ '0' ... '9' => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ p.state = State.NumberExponentDigits;
+ },
+ else => {
+ return error.InvalidNumber;
+ },
+ },
+
+ State.NumberExponentDigitsRequired => switch (c) {
+ '0' ... '9' => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ p.state = State.NumberExponentDigits;
+ },
+ else => {
+ return error.InvalidNumber;
+ },
+ },
+
+ State.NumberExponentDigits => {
+ p.complete = p.after_value_state == State.TopLevelEnd;
+ switch (c) {
+ '0' ... '9' => {
+ // another digit
+ },
+ else => {
+ p.state = p.after_value_state;
+ *token = Token.initNumber(p.count, p.number_is_integer);
+ return true;
+ },
+ }
+ },
+
+ State.TrueLiteral1 => switch (c) {
+ 'r' => p.state = State.TrueLiteral2,
+ else => return error.InvalidLiteral,
+ },
+
+ State.TrueLiteral2 => switch (c) {
+ 'u' => p.state = State.TrueLiteral3,
+ else => return error.InvalidLiteral,
+ },
+
+ State.TrueLiteral3 => switch (c) {
+ 'e' => {
+ p.state = p.after_value_state;
+ p.complete = p.state == State.TopLevelEnd;
+ *token = Token.init(Token.Id.True, p.count + 1, 1);
+ },
+ else => {
+ return error.InvalidLiteral;
+ },
+ },
+
+ State.FalseLiteral1 => switch (c) {
+ 'a' => p.state = State.FalseLiteral2,
+ else => return error.InvalidLiteral,
+ },
+
+ State.FalseLiteral2 => switch (c) {
+ 'l' => p.state = State.FalseLiteral3,
+ else => return error.InvalidLiteral,
+ },
+
+ State.FalseLiteral3 => switch (c) {
+ 's' => p.state = State.FalseLiteral4,
+ else => return error.InvalidLiteral,
+ },
+
+ State.FalseLiteral4 => switch (c) {
+ 'e' => {
+ p.state = p.after_value_state;
+ p.complete = p.state == State.TopLevelEnd;
+ *token = Token.init(Token.Id.False, p.count + 1, 1);
+ },
+ else => {
+ return error.InvalidLiteral;
+ },
+ },
+
+ State.NullLiteral1 => switch (c) {
+ 'u' => p.state = State.NullLiteral2,
+ else => return error.InvalidLiteral,
+ },
+
+ State.NullLiteral2 => switch (c) {
+ 'l' => p.state = State.NullLiteral3,
+ else => return error.InvalidLiteral,
+ },
+
+ State.NullLiteral3 => switch (c) {
+ 'l' => {
+ p.state = p.after_value_state;
+ p.complete = p.state == State.TopLevelEnd;
+ *token = Token.init(Token.Id.Null, p.count + 1, 1);
+ },
+ else => {
+ return error.InvalidLiteral;
+ },
+ },
+ }
+
+ return false;
+ }
+};
+
+// Validate a JSON string. This does not limit number precision so a decoder may not necessarily
+// be able to decode the string even if this returns true.
+pub fn validate(s: []const u8) bool {
+ var p = StreamingJsonParser.init();
+
+ for (s) |c, i| {
+ var token1: ?Token = undefined;
+ var token2: ?Token = undefined;
+
+ p.feed(c, &token1, &token2) catch |err| {
+ return false;
+ };
+ }
+
+ return p.complete;
+}
+
+const Allocator = std.mem.Allocator;
+const ArenaAllocator = std.heap.ArenaAllocator;
+const ArrayList = std.ArrayList;
+const HashMap = std.HashMap;
+
+pub const ValueTree = struct {
+ arena: ArenaAllocator,
+ root: Value,
+
+ pub fn deinit(self: &ValueTree) void {
+ self.arena.deinit();
+ }
+};
+
+pub const ObjectMap = HashMap([]const u8, Value, mem.hash_slice_u8, mem.eql_slice_u8);
+
+pub const Value = union(enum) {
+ Null,
+ Bool: bool,
+ Integer: i64,
+ Float: f64,
+ String: []const u8,
+ Array: ArrayList(Value),
+ Object: ObjectMap,
+
+ pub fn dump(self: &const Value) void {
+ switch (*self) {
+ Value.Null => {
+ std.debug.warn("null");
+ },
+ Value.Bool => |inner| {
+ std.debug.warn("{}", inner);
+ },
+ Value.Integer => |inner| {
+ std.debug.warn("{}", inner);
+ },
+ Value.Float => |inner| {
+ std.debug.warn("{.5}", inner);
+ },
+ Value.String => |inner| {
+ std.debug.warn("\"{}\"", inner);
+ },
+ Value.Array => |inner| {
+ var not_first = false;
+ std.debug.warn("[");
+ for (inner.toSliceConst()) |value| {
+ if (not_first) {
+ std.debug.warn(",");
+ }
+ not_first = true;
+ value.dump();
+ }
+ std.debug.warn("]");
+ },
+ Value.Object => |inner| {
+ var not_first = false;
+ std.debug.warn("{{");
+ var it = inner.iterator();
+
+ while (it.next()) |entry| {
+ if (not_first) {
+ std.debug.warn(",");
+ }
+ not_first = true;
+ std.debug.warn("\"{}\":", entry.key);
+ entry.value.dump();
+ }
+ std.debug.warn("}}");
+ },
+ }
+ }
+
+ pub fn dumpIndent(self: &const Value, indent: usize) void {
+ if (indent == 0) {
+ self.dump();
+ } else {
+ self.dumpIndentLevel(indent, 0);
+ }
+ }
+
+ fn dumpIndentLevel(self: &const Value, indent: usize, level: usize) void {
+ switch (*self) {
+ Value.Null => {
+ std.debug.warn("null");
+ },
+ Value.Bool => |inner| {
+ std.debug.warn("{}", inner);
+ },
+ Value.Integer => |inner| {
+ std.debug.warn("{}", inner);
+ },
+ Value.Float => |inner| {
+ std.debug.warn("{.5}", inner);
+ },
+ Value.String => |inner| {
+ std.debug.warn("\"{}\"", inner);
+ },
+ Value.Array => |inner| {
+ var not_first = false;
+ std.debug.warn("[\n");
+
+ for (inner.toSliceConst()) |value| {
+ if (not_first) {
+ std.debug.warn(",\n");
+ }
+ not_first = true;
+ padSpace(level + indent);
+ value.dumpIndentLevel(indent, level + indent);
+ }
+ std.debug.warn("\n");
+ padSpace(level);
+ std.debug.warn("]");
+ },
+ Value.Object => |inner| {
+ var not_first = false;
+ std.debug.warn("{{\n");
+ var it = inner.iterator();
+
+ while (it.next()) |entry| {
+ if (not_first) {
+ std.debug.warn(",\n");
+ }
+ not_first = true;
+ padSpace(level + indent);
+ std.debug.warn("\"{}\": ", entry.key);
+ entry.value.dumpIndentLevel(indent, level + indent);
+ }
+ std.debug.warn("\n");
+ padSpace(level);
+ std.debug.warn("}}");
+ },
+ }
+ }
+
+ fn padSpace(indent: usize) void {
+ var i: usize = 0;
+ while (i < indent) : (i += 1) {
+ std.debug.warn(" ");
+ }
+ }
+};
+
+// A non-stream JSON parser which constructs a tree of Value's.
+const JsonParser = struct {
+ allocator: &Allocator,
+ state: State,
+ copy_strings: bool,
+ // Stores parent nodes and un-combined Values.
+ stack: ArrayList(Value),
+
+ const State = enum {
+ ObjectKey,
+ ObjectValue,
+ ArrayValue,
+ Simple,
+ };
+
+ pub fn init(allocator: &Allocator, copy_strings: bool) JsonParser {
+ return JsonParser {
+ .allocator = allocator,
+ .state = State.Simple,
+ .copy_strings = copy_strings,
+ .stack = ArrayList(Value).init(allocator),
+ };
+ }
+
+ pub fn deinit(p: &JsonParser) void {
+ p.stack.deinit();
+ }
+
+ pub fn reset(p: &JsonParser) void {
+ p.state = State.Simple;
+ p.stack.shrink(0);
+ }
+
+ pub fn parse(p: &JsonParser, input: []const u8) !ValueTree {
+ var mp = StreamingJsonParser.init();
+
+ var arena = ArenaAllocator.init(p.allocator);
+ errdefer arena.deinit();
+
+ for (input) |c, i| {
+ var mt1: ?Token = undefined;
+ var mt2: ?Token = undefined;
+
+ try mp.feed(c, &mt1, &mt2);
+ if (mt1) |t1| {
+ try p.transition(&arena.allocator, input, i, t1);
+
+ if (mt2) |t2| {
+ try p.transition(&arena.allocator, input, i, t2);
+ }
+ }
+ }
+
+ // Handle top-level lonely number values.
+ {
+ const i = input.len;
+ var mt1: ?Token = undefined;
+ var mt2: ?Token = undefined;
+
+ try mp.feed(' ', &mt1, &mt2);
+ if (mt1) |t1| {
+ try p.transition(&arena.allocator, input, i, t1);
+ }
+ }
+
+ if (!mp.complete) {
+ return error.IncompleteJsonInput;
+ }
+
+ std.debug.assert(p.stack.len == 1);
+
+ return ValueTree {
+ .arena = arena,
+ .root = p.stack.at(0),
+ };
+ }
+
+ // Even though p.allocator exists, we take an explicit allocator so that allocation state
+ // can be cleaned up on error correctly during a `parse` on call.
+ fn transition(p: &JsonParser, allocator: &Allocator, input: []const u8, i: usize, token: &const Token) !void {
+ switch (p.state) {
+ State.ObjectKey => switch (token.id) {
+ Token.Id.ObjectEnd => {
+ if (p.stack.len == 1) {
+ return;
+ }
+
+ var value = p.stack.pop();
+ try p.pushToParent(value);
+ },
+ Token.Id.String => {
+ try p.stack.append(try p.parseString(allocator, token, input, i));
+ p.state = State.ObjectValue;
+ },
+ else => {
+ unreachable;
+ },
+ },
+ State.ObjectValue => {
+ var object = &p.stack.items[p.stack.len - 2].Object;
+ var key = p.stack.items[p.stack.len - 1].String;
+
+ switch (token.id) {
+ Token.Id.ObjectBegin => {
+ try p.stack.append(Value { .Object = ObjectMap.init(allocator) });
+ p.state = State.ObjectKey;
+ },
+ Token.Id.ArrayBegin => {
+ try p.stack.append(Value { .Array = ArrayList(Value).init(allocator) });
+ p.state = State.ArrayValue;
+ },
+ Token.Id.String => {
+ _ = try object.put(key, try p.parseString(allocator, token, input, i));
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ Token.Id.Number => {
+ _ = try object.put(key, try p.parseNumber(token, input, i));
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ Token.Id.True => {
+ _ = try object.put(key, Value { .Bool = true });
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ Token.Id.False => {
+ _ = try object.put(key, Value { .Bool = false });
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ Token.Id.Null => {
+ _ = try object.put(key, Value.Null);
+ _ = p.stack.pop();
+ p.state = State.ObjectKey;
+ },
+ else => {
+ unreachable;
+ },
+ }
+ },
+ State.ArrayValue => {
+ var array = &p.stack.items[p.stack.len - 1].Array;
+
+ switch (token.id) {
+ Token.Id.ArrayEnd => {
+ if (p.stack.len == 1) {
+ return;
+ }
+
+ var value = p.stack.pop();
+ try p.pushToParent(value);
+ },
+ Token.Id.ObjectBegin => {
+ try p.stack.append(Value { .Object = ObjectMap.init(allocator) });
+ p.state = State.ObjectKey;
+ },
+ Token.Id.ArrayBegin => {
+ try p.stack.append(Value { .Array = ArrayList(Value).init(allocator) });
+ p.state = State.ArrayValue;
+ },
+ Token.Id.String => {
+ try array.append(try p.parseString(allocator, token, input, i));
+ },
+ Token.Id.Number => {
+ try array.append(try p.parseNumber(token, input, i));
+ },
+ Token.Id.True => {
+ try array.append(Value { .Bool = true });
+ },
+ Token.Id.False => {
+ try array.append(Value { .Bool = false });
+ },
+ Token.Id.Null => {
+ try array.append(Value.Null);
+ },
+ else => {
+ unreachable;
+ },
+ }
+ },
+ State.Simple => switch (token.id) {
+ Token.Id.ObjectBegin => {
+ try p.stack.append(Value { .Object = ObjectMap.init(allocator) });
+ p.state = State.ObjectKey;
+ },
+ Token.Id.ArrayBegin => {
+ try p.stack.append(Value { .Array = ArrayList(Value).init(allocator) });
+ p.state = State.ArrayValue;
+ },
+ Token.Id.String => {
+ try p.stack.append(try p.parseString(allocator, token, input, i));
+ },
+ Token.Id.Number => {
+ try p.stack.append(try p.parseNumber(token, input, i));
+ },
+ Token.Id.True => {
+ try p.stack.append(Value { .Bool = true });
+ },
+ Token.Id.False => {
+ try p.stack.append(Value { .Bool = false });
+ },
+ Token.Id.Null => {
+ try p.stack.append(Value.Null);
+ },
+ Token.Id.ObjectEnd, Token.Id.ArrayEnd => {
+ unreachable;
+ },
+ },
+ }
+ }
+
+ fn pushToParent(p: &JsonParser, value: &const Value) !void {
+ switch (p.stack.at(p.stack.len - 1)) {
+ // Object Parent -> [ ..., object, <key>, value ]
+ Value.String => |key| {
+ _ = p.stack.pop();
+
+ var object = &p.stack.items[p.stack.len - 1].Object;
+ _ = try object.put(key, value);
+ p.state = State.ObjectKey;
+ },
+ // Array Parent -> [ ..., <array>, value ]
+ Value.Array => |*array| {
+ try array.append(value);
+ p.state = State.ArrayValue;
+ },
+ else => {
+ unreachable;
+ },
+ }
+ }
+
+ fn parseString(p: &JsonParser, allocator: &Allocator, token: &const Token, input: []const u8, i: usize) !Value {
+ // TODO: We don't strictly have to copy values which do not contain any escape
+ // characters if flagged with the option.
+ const slice = token.slice(input, i);
+ return Value { .String = try mem.dupe(p.allocator, u8, slice) };
+ }
+
+ fn parseNumber(p: &JsonParser, token: &const Token, input: []const u8, i: usize) !Value {
+ return if (token.number_is_integer)
+ Value { .Integer = try std.fmt.parseInt(i64, token.slice(input, i), 10) }
+ else
+ @panic("TODO: fmt.parseFloat not yet implemented")
+ ;
+ }
+};
+
+const debug = std.debug;
+
+test "json parser dynamic" {
+ var p = JsonParser.init(std.debug.global_allocator, false);
+ defer p.deinit();
+
+ const s =
+ \\{
+ \\ "Image": {
+ \\ "Width": 800,
+ \\ "Height": 600,
+ \\ "Title": "View from 15th Floor",
+ \\ "Thumbnail": {
+ \\ "Url": "http://www.example.com/image/481989943",
+ \\ "Height": 125,
+ \\ "Width": 100
+ \\ },
+ \\ "Animated" : false,
+ \\ "IDs": [116, 943, 234, 38793]
+ \\ }
+ \\}
+ ;
+
+ var tree = try p.parse(s);
+ defer tree.deinit();
+
+ var root = tree.root;
+
+ var image = (??root.Object.get("Image")).value;
+
+ const width = (??image.Object.get("Width")).value;
+ debug.assert(width.Integer == 800);
+
+ const height = (??image.Object.get("Height")).value;
+ debug.assert(height.Integer == 600);
+
+ const title = (??image.Object.get("Title")).value;
+ debug.assert(mem.eql(u8, title.String, "View from 15th Floor"));
+
+ const animated = (??image.Object.get("Animated")).value;
+ debug.assert(animated.Bool == false);
+}
diff --git a/std/json_test.zig b/std/json_test.zig
new file mode 100644
index 0000000000..90a2ddbd50
--- /dev/null
+++ b/std/json_test.zig
@@ -0,0 +1,1942 @@
+// RFC 8529 conformance tests.
+//
+// Tests are taken from https://github.com/nst/JSONTestSuite
+// Read also http://seriot.ch/parsing_json.php for a good overview.
+
+const std = @import("index.zig");
+
+fn ok(comptime s: []const u8) void {
+ std.debug.assert(std.json.validate(s));
+}
+
+fn err(comptime s: []const u8) void {
+ std.debug.assert(!std.json.validate(s));
+}
+
+fn any(comptime s: []const u8) void {
+ std.debug.assert(true);
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+test "y_array_arraysWithSpaces" {
+ ok(
+ \\[[] ]
+ );
+}
+
+test "y_array_empty" {
+ ok(
+ \\[]
+ );
+}
+
+test "y_array_empty-string" {
+ ok(
+ \\[""]
+ );
+}
+
+test "y_array_ending_with_newline" {
+ ok(
+ \\["a"]
+ );
+}
+
+test "y_array_false" {
+ ok(
+ \\[false]
+ );
+}
+
+test "y_array_heterogeneous" {
+ ok(
+ \\[null, 1, "1", {}]
+ );
+}
+
+test "y_array_null" {
+ ok(
+ \\[null]
+ );
+}
+
+test "y_array_with_1_and_newline" {
+ ok(
+ \\[1
+ \\]
+ );
+}
+
+test "y_array_with_leading_space" {
+ ok(
+ \\ [1]
+ );
+}
+
+test "y_array_with_several_null" {
+ ok(
+ \\[1,null,null,null,2]
+ );
+}
+
+test "y_array_with_trailing_space" {
+ ok(
+ "[2] "
+ );
+}
+
+test "y_number_0e+1" {
+ ok(
+ \\[0e+1]
+ );
+}
+
+test "y_number_0e1" {
+ ok(
+ \\[0e1]
+ );
+}
+
+test "y_number_after_space" {
+ ok(
+ \\[ 4]
+ );
+}
+
+test "y_number_double_close_to_zero" {
+ ok(
+ \\[-0.000000000000000000000000000000000000000000000000000000000000000000000000000001]
+ );
+}
+
+test "y_number_int_with_exp" {
+ ok(
+ \\[20e1]
+ );
+}
+
+test "y_number" {
+ ok(
+ \\[123e65]
+ );
+}
+
+test "y_number_minus_zero" {
+ ok(
+ \\[-0]
+ );
+}
+
+test "y_number_negative_int" {
+ ok(
+ \\[-123]
+ );
+}
+
+test "y_number_negative_one" {
+ ok(
+ \\[-1]
+ );
+}
+
+test "y_number_negative_zero" {
+ ok(
+ \\[-0]
+ );
+}
+
+test "y_number_real_capital_e" {
+ ok(
+ \\[1E22]
+ );
+}
+
+test "y_number_real_capital_e_neg_exp" {
+ ok(
+ \\[1E-2]
+ );
+}
+
+test "y_number_real_capital_e_pos_exp" {
+ ok(
+ \\[1E+2]
+ );
+}
+
+test "y_number_real_exponent" {
+ ok(
+ \\[123e45]
+ );
+}
+
+test "y_number_real_fraction_exponent" {
+ ok(
+ \\[123.456e78]
+ );
+}
+
+test "y_number_real_neg_exp" {
+ ok(
+ \\[1e-2]
+ );
+}
+
+test "y_number_real_pos_exponent" {
+ ok(
+ \\[1e+2]
+ );
+}
+
+test "y_number_simple_int" {
+ ok(
+ \\[123]
+ );
+}
+
+test "y_number_simple_real" {
+ ok(
+ \\[123.456789]
+ );
+}
+
+test "y_object_basic" {
+ ok(
+ \\{"asd":"sdf"}
+ );
+}
+
+test "y_object_duplicated_key_and_value" {
+ ok(
+ \\{"a":"b","a":"b"}
+ );
+}
+
+test "y_object_duplicated_key" {
+ ok(
+ \\{"a":"b","a":"c"}
+ );
+}
+
+test "y_object_empty" {
+ ok(
+ \\{}
+ );
+}
+
+test "y_object_empty_key" {
+ ok(
+ \\{"":0}
+ );
+}
+
+test "y_object_escaped_null_in_key" {
+ ok(
+ \\{"foo\u0000bar": 42}
+ );
+}
+
+test "y_object_extreme_numbers" {
+ ok(
+ \\{ "min": -1.0e+28, "max": 1.0e+28 }
+ );
+}
+
+test "y_object" {
+ ok(
+ \\{"asd":"sdf", "dfg":"fgh"}
+ );
+}
+
+test "y_object_long_strings" {
+ ok(
+ \\{"x":[{"id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}], "id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}
+ );
+}
+
+test "y_object_simple" {
+ ok(
+ \\{"a":[]}
+ );
+}
+
+test "y_object_string_unicode" {
+ ok(
+ \\{"title":"\u041f\u043e\u043b\u0442\u043e\u0440\u0430 \u0417\u0435\u043c\u043b\u0435\u043a\u043e\u043f\u0430" }
+ );
+}
+
+test "y_object_with_newlines" {
+ ok(
+ \\{
+ \\"a": "b"
+ \\}
+ );
+}
+
+test "y_string_1_2_3_bytes_UTF-8_sequences" {
+ ok(
+ \\["\u0060\u012a\u12AB"]
+ );
+}
+
+test "y_string_accepted_surrogate_pair" {
+ ok(
+ \\["\uD801\udc37"]
+ );
+}
+
+test "y_string_accepted_surrogate_pairs" {
+ ok(
+ \\["\ud83d\ude39\ud83d\udc8d"]
+ );
+}
+
+test "y_string_allowed_escapes" {
+ ok(
+ \\["\"\\\/\b\f\n\r\t"]
+ );
+}
+
+test "y_string_backslash_and_u_escaped_zero" {
+ ok(
+ \\["\\u0000"]
+ );
+}
+
+test "y_string_backslash_doublequotes" {
+ ok(
+ \\["\""]
+ );
+}
+
+test "y_string_comments" {
+ ok(
+ \\["a/*b*/c/*d//e"]
+ );
+}
+
+test "y_string_double_escape_a" {
+ ok(
+ \\["\\a"]
+ );
+}
+
+test "y_string_double_escape_n" {
+ ok(
+ \\["\\n"]
+ );
+}
+
+test "y_string_escaped_control_character" {
+ ok(
+ \\["\u0012"]
+ );
+}
+
+test "y_string_escaped_noncharacter" {
+ ok(
+ \\["\uFFFF"]
+ );
+}
+
+test "y_string_in_array" {
+ ok(
+ \\["asd"]
+ );
+}
+
+test "y_string_in_array_with_leading_space" {
+ ok(
+ \\[ "asd"]
+ );
+}
+
+test "y_string_last_surrogates_1_and_2" {
+ ok(
+ \\["\uDBFF\uDFFF"]
+ );
+}
+
+test "y_string_nbsp_uescaped" {
+ ok(
+ \\["new\u00A0line"]
+ );
+}
+
+test "y_string_nonCharacterInUTF-8_U+10FFFF" {
+ ok(
+ \\["􏿿"]
+ );
+}
+
+test "y_string_nonCharacterInUTF-8_U+FFFF" {
+ ok(
+ \\["￿"]
+ );
+}
+
+test "y_string_null_escape" {
+ ok(
+ \\["\u0000"]
+ );
+}
+
+test "y_string_one-byte-utf-8" {
+ ok(
+ \\["\u002c"]
+ );
+}
+
+test "y_string_pi" {
+ ok(
+ \\["π"]
+ );
+}
+
+test "y_string_reservedCharacterInUTF-8_U+1BFFF" {
+ ok(
+ \\["𛿿"]
+ );
+}
+
+test "y_string_simple_ascii" {
+ ok(
+ \\["asd "]
+ );
+}
+
+test "y_string_space" {
+ ok(
+ \\" "
+ );
+}
+
+test "y_string_surrogates_U+1D11E_MUSICAL_SYMBOL_G_CLEF" {
+ ok(
+ \\["\uD834\uDd1e"]
+ );
+}
+
+test "y_string_three-byte-utf-8" {
+ ok(
+ \\["\u0821"]
+ );
+}
+
+test "y_string_two-byte-utf-8" {
+ ok(
+ \\["\u0123"]
+ );
+}
+
+test "y_string_u+2028_line_sep" {
+ ok(
+ \\["
"]
+ );
+}
+
+test "y_string_u+2029_par_sep" {
+ ok(
+ \\["
"]
+ );
+}
+
+test "y_string_uescaped_newline" {
+ ok(
+ \\["new\u000Aline"]
+ );
+}
+
+test "y_string_uEscape" {
+ ok(
+ \\["\u0061\u30af\u30EA\u30b9"]
+ );
+}
+
+test "y_string_unescaped_char_delete" {
+ ok(
+ \\[""]
+ );
+}
+
+test "y_string_unicode_2" {
+ ok(
+ \\["⍂㈴⍂"]
+ );
+}
+
+test "y_string_unicodeEscapedBackslash" {
+ ok(
+ \\["\u005C"]
+ );
+}
+
+test "y_string_unicode_escaped_double_quote" {
+ ok(
+ \\["\u0022"]
+ );
+}
+
+test "y_string_unicode" {
+ ok(
+ \\["\uA66D"]
+ );
+}
+
+test "y_string_unicode_U+10FFFE_nonchar" {
+ ok(
+ \\["\uDBFF\uDFFE"]
+ );
+}
+
+test "y_string_unicode_U+1FFFE_nonchar" {
+ ok(
+ \\["\uD83F\uDFFE"]
+ );
+}
+
+test "y_string_unicode_U+200B_ZERO_WIDTH_SPACE" {
+ ok(
+ \\["\u200B"]
+ );
+}
+
+test "y_string_unicode_U+2064_invisible_plus" {
+ ok(
+ \\["\u2064"]
+ );
+}
+
+test "y_string_unicode_U+FDD0_nonchar" {
+ ok(
+ \\["\uFDD0"]
+ );
+}
+
+test "y_string_unicode_U+FFFE_nonchar" {
+ ok(
+ \\["\uFFFE"]
+ );
+}
+
+test "y_string_utf8" {
+ ok(
+ \\["€𝄞"]
+ );
+}
+
+test "y_string_with_del_character" {
+ ok(
+ \\["aa"]
+ );
+}
+
+test "y_structure_lonely_false" {
+ ok(
+ \\false
+ );
+}
+
+test "y_structure_lonely_int" {
+ ok(
+ \\42
+ );
+}
+
+test "y_structure_lonely_negative_real" {
+ ok(
+ \\-0.1
+ );
+}
+
+test "y_structure_lonely_null" {
+ ok(
+ \\null
+ );
+}
+
+test "y_structure_lonely_string" {
+ ok(
+ \\"asd"
+ );
+}
+
+test "y_structure_lonely_true" {
+ ok(
+ \\true
+ );
+}
+
+test "y_structure_string_empty" {
+ ok(
+ \\""
+ );
+}
+
+test "y_structure_trailing_newline" {
+ ok(
+ \\["a"]
+ );
+}
+
+test "y_structure_true_in_array" {
+ ok(
+ \\[true]
+ );
+}
+
+test "y_structure_whitespace_array" {
+ ok(
+ " [] "
+ );
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+test "n_array_1_true_without_comma" {
+ err(
+ \\[1 true]
+ );
+}
+
+test "n_array_a_invalid_utf8" {
+ err(
+ \\[aå]
+ );
+}
+
+test "n_array_colon_instead_of_comma" {
+ err(
+ \\["": 1]
+ );
+}
+
+test "n_array_comma_after_close" {
+ //err(
+ // \\[""],
+ //);
+}
+
+test "n_array_comma_and_number" {
+ err(
+ \\[,1]
+ );
+}
+
+test "n_array_double_comma" {
+ err(
+ \\[1,,2]
+ );
+}
+
+test "n_array_double_extra_comma" {
+ err(
+ \\["x",,]
+ );
+}
+
+test "n_array_extra_close" {
+ err(
+ \\["x"]]
+ );
+}
+
+test "n_array_extra_comma" {
+ //err(
+ // \\["",]
+ //);
+}
+
+test "n_array_incomplete_invalid_value" {
+ err(
+ \\[x
+ );
+}
+
+test "n_array_incomplete" {
+ err(
+ \\["x"
+ );
+}
+
+test "n_array_inner_array_no_comma" {
+ err(
+ \\[3[4]]
+ );
+}
+
+test "n_array_invalid_utf8" {
+ err(
+ \\[ÿ]
+ );
+}
+
+test "n_array_items_separated_by_semicolon" {
+ err(
+ \\[1:2]
+ );
+}
+
+test "n_array_just_comma" {
+ err(
+ \\[,]
+ );
+}
+
+test "n_array_just_minus" {
+ err(
+ \\[-]
+ );
+}
+
+test "n_array_missing_value" {
+ err(
+ \\[ , ""]
+ );
+}
+
+test "n_array_newlines_unclosed" {
+ err(
+ \\["a",
+ \\4
+ \\,1,
+ );
+}
+
+
+test "n_array_number_and_comma" {
+ err(
+ \\[1,]
+ );
+}
+
+test "n_array_number_and_several_commas" {
+ err(
+ \\[1,,]
+ );
+}
+
+test "n_array_spaces_vertical_tab_formfeed" {
+ err(
+ \\[" a"\f]
+ );
+}
+
+test "n_array_star_inside" {
+ err(
+ \\[*]
+ );
+}
+
+test "n_array_unclosed" {
+ err(
+ \\[""
+ );
+}
+
+test "n_array_unclosed_trailing_comma" {
+ err(
+ \\[1,
+ );
+}
+
+test "n_array_unclosed_with_new_lines" {
+ err(
+ \\[1,
+ \\1
+ \\,1
+ );
+}
+
+test "n_array_unclosed_with_object_inside" {
+ err(
+ \\[{}
+ );
+}
+
+test "n_incomplete_false" {
+ err(
+ \\[fals]
+ );
+}
+
+test "n_incomplete_null" {
+ err(
+ \\[nul]
+ );
+}
+
+test "n_incomplete_true" {
+ err(
+ \\[tru]
+ );
+}
+
+test "n_multidigit_number_then_00" {
+ err(
+ \\123
+ );
+}
+
+test "n_number_0.1.2" {
+ err(
+ \\[0.1.2]
+ );
+}
+
+test "n_number_-01" {
+ err(
+ \\[-01]
+ );
+}
+
+test "n_number_0.3e" {
+ err(
+ \\[0.3e]
+ );
+}
+
+test "n_number_0.3e+" {
+ err(
+ \\[0.3e+]
+ );
+}
+
+test "n_number_0_capital_E" {
+ err(
+ \\[0E]
+ );
+}
+
+test "n_number_0_capital_E+" {
+ err(
+ \\[0E+]
+ );
+}
+
+test "n_number_0.e1" {
+ err(
+ \\[0.e1]
+ );
+}
+
+test "n_number_0e" {
+ err(
+ \\[0e]
+ );
+}
+
+test "n_number_0e+" {
+ err(
+ \\[0e+]
+ );
+}
+
+test "n_number_1_000" {
+ err(
+ \\[1 000.0]
+ );
+}
+
+test "n_number_1.0e-" {
+ err(
+ \\[1.0e-]
+ );
+}
+
+test "n_number_1.0e" {
+ err(
+ \\[1.0e]
+ );
+}
+
+test "n_number_1.0e+" {
+ err(
+ \\[1.0e+]
+ );
+}
+
+test "n_number_-1.0." {
+ err(
+ \\[-1.0.]
+ );
+}
+
+test "n_number_1eE2" {
+ err(
+ \\[1eE2]
+ );
+}
+
+test "n_number_.-1" {
+ err(
+ \\[.-1]
+ );
+}
+
+test "n_number_+1" {
+ err(
+ \\[+1]
+ );
+}
+
+test "n_number_.2e-3" {
+ err(
+ \\[.2e-3]
+ );
+}
+
+test "n_number_2.e-3" {
+ err(
+ \\[2.e-3]
+ );
+}
+
+test "n_number_2.e+3" {
+ err(
+ \\[2.e+3]
+ );
+}
+
+test "n_number_2.e3" {
+ err(
+ \\[2.e3]
+ );
+}
+
+test "n_number_-2." {
+ err(
+ \\[-2.]
+ );
+}
+
+test "n_number_9.e+" {
+ err(
+ \\[9.e+]
+ );
+}
+
+test "n_number_expression" {
+ err(
+ \\[1+2]
+ );
+}
+
+test "n_number_hex_1_digit" {
+ err(
+ \\[0x1]
+ );
+}
+
+test "n_number_hex_2_digits" {
+ err(
+ \\[0x42]
+ );
+}
+
+test "n_number_infinity" {
+ err(
+ \\[Infinity]
+ );
+}
+
+test "n_number_+Inf" {
+ err(
+ \\[+Inf]
+ );
+}
+
+test "n_number_Inf" {
+ err(
+ \\[Inf]
+ );
+}
+
+test "n_number_invalid+-" {
+ err(
+ \\[0e+-1]
+ );
+}
+
+test "n_number_invalid-negative-real" {
+ err(
+ \\[-123.123foo]
+ );
+}
+
+test "n_number_invalid-utf-8-in-bigger-int" {
+ err(
+ \\[123å]
+ );
+}
+
+test "n_number_invalid-utf-8-in-exponent" {
+ err(
+ \\[1e1å]
+ );
+}
+
+test "n_number_invalid-utf-8-in-int" {
+ err(
+ \\[0å]
+ );
+}
+
+
+test "n_number_++" {
+ err(
+ \\[++1234]
+ );
+}
+
+test "n_number_minus_infinity" {
+ err(
+ \\[-Infinity]
+ );
+}
+
+test "n_number_minus_sign_with_trailing_garbage" {
+ err(
+ \\[-foo]
+ );
+}
+
+test "n_number_minus_space_1" {
+ err(
+ \\[- 1]
+ );
+}
+
+test "n_number_-NaN" {
+ err(
+ \\[-NaN]
+ );
+}
+
+test "n_number_NaN" {
+ err(
+ \\[NaN]
+ );
+}
+
+test "n_number_neg_int_starting_with_zero" {
+ err(
+ \\[-012]
+ );
+}
+
+test "n_number_neg_real_without_int_part" {
+ err(
+ \\[-.123]
+ );
+}
+
+test "n_number_neg_with_garbage_at_end" {
+ err(
+ \\[-1x]
+ );
+}
+
+test "n_number_real_garbage_after_e" {
+ err(
+ \\[1ea]
+ );
+}
+
+test "n_number_real_with_invalid_utf8_after_e" {
+ err(
+ \\[1eå]
+ );
+}
+
+test "n_number_real_without_fractional_part" {
+ err(
+ \\[1.]
+ );
+}
+
+test "n_number_starting_with_dot" {
+ err(
+ \\[.123]
+ );
+}
+
+test "n_number_U+FF11_fullwidth_digit_one" {
+ err(
+ \\[1]
+ );
+}
+
+test "n_number_with_alpha_char" {
+ err(
+ \\[1.8011670033376514H-308]
+ );
+}
+
+test "n_number_with_alpha" {
+ err(
+ \\[1.2a-3]
+ );
+}
+
+test "n_number_with_leading_zero" {
+ err(
+ \\[012]
+ );
+}
+
+test "n_object_bad_value" {
+ err(
+ \\["x", truth]
+ );
+}
+
+test "n_object_bracket_key" {
+ err(
+ \\{[: "x"}
+ );
+}
+
+test "n_object_comma_instead_of_colon" {
+ err(
+ \\{"x", null}
+ );
+}
+
+test "n_object_double_colon" {
+ err(
+ \\{"x"::"b"}
+ );
+}
+
+test "n_object_emoji" {
+ err(
+ \\{🇨🇭}
+ );
+}
+
+test "n_object_garbage_at_end" {
+ err(
+ \\{"a":"a" 123}
+ );
+}
+
+test "n_object_key_with_single_quotes" {
+ err(
+ \\{key: 'value'}
+ );
+}
+
+test "n_object_lone_continuation_byte_in_key_and_trailing_comma" {
+ err(
+ \\{"¹":"0",}
+ );
+}
+
+test "n_object_missing_colon" {
+ err(
+ \\{"a" b}
+ );
+}
+
+test "n_object_missing_key" {
+ err(
+ \\{:"b"}
+ );
+}
+
+test "n_object_missing_semicolon" {
+ err(
+ \\{"a" "b"}
+ );
+}
+
+test "n_object_missing_value" {
+ err(
+ \\{"a":
+ );
+}
+
+test "n_object_no-colon" {
+ err(
+ \\{"a"
+ );
+}
+
+test "n_object_non_string_key_but_huge_number_instead" {
+ err(
+ \\{9999E9999:1}
+ );
+}
+
+test "n_object_non_string_key" {
+ err(
+ \\{1:1}
+ );
+}
+
+test "n_object_repeated_null_null" {
+ err(
+ \\{null:null,null:null}
+ );
+}
+
+test "n_object_several_trailing_commas" {
+ err(
+ \\{"id":0,,,,,}
+ );
+}
+
+test "n_object_single_quote" {
+ err(
+ \\{'a':0}
+ );
+}
+
+test "n_object_trailing_comma" {
+ err(
+ \\{"id":0,}
+ );
+}
+
+test "n_object_trailing_comment" {
+ err(
+ \\{"a":"b"}/**/
+ );
+}
+
+test "n_object_trailing_comment_open" {
+ err(
+ \\{"a":"b"}/**//
+ );
+}
+
+test "n_object_trailing_comment_slash_open_incomplete" {
+ err(
+ \\{"a":"b"}/
+ );
+}
+
+test "n_object_trailing_comment_slash_open" {
+ err(
+ \\{"a":"b"}//
+ );
+}
+
+test "n_object_two_commas_in_a_row" {
+ err(
+ \\{"a":"b",,"c":"d"}
+ );
+}
+
+test "n_object_unquoted_key" {
+ err(
+ \\{a: "b"}
+ );
+}
+
+test "n_object_unterminated-value" {
+ err(
+ \\{"a":"a
+ );
+ }
+
+test "n_object_with_single_string" {
+ err(
+ \\{ "foo" : "bar", "a" }
+ );
+}
+
+test "n_object_with_trailing_garbage" {
+ err(
+ \\{"a":"b"}#
+ );
+}
+
+test "n_single_space" {
+ err(
+ " "
+ );
+}
+
+test "n_string_1_surrogate_then_escape" {
+ err(
+ \\["\uD800\"]
+ );
+}
+
+test "n_string_1_surrogate_then_escape_u1" {
+ err(
+ \\["\uD800\u1"]
+ );
+}
+
+test "n_string_1_surrogate_then_escape_u1x" {
+ err(
+ \\["\uD800\u1x"]
+ );
+}
+
+test "n_string_1_surrogate_then_escape_u" {
+ err(
+ \\["\uD800\u"]
+ );
+}
+
+test "n_string_accentuated_char_no_quotes" {
+ err(
+ \\[é]
+ );
+}
+
+test "n_string_backslash_00" {
+ err(
+ \\["\
+ );
+}
+
+test "n_string_escaped_backslash_bad" {
+ err(
+ \\["\\\"]
+ );
+}
+
+test "n_string_escaped_ctrl_char_tab" {
+ err(
+ \\["\ "]
+ );
+}
+
+test "n_string_escaped_emoji" {
+ err(
+ \\["\🌀"]
+ );
+}
+
+test "n_string_escape_x" {
+ err(
+ \\["\x00"]
+ );
+}
+
+test "n_string_incomplete_escaped_character" {
+ err(
+ \\["\u00A"]
+ );
+}
+
+test "n_string_incomplete_escape" {
+ err(
+ \\["\"]
+ );
+}
+
+test "n_string_incomplete_surrogate_escape_invalid" {
+ err(
+ \\["\uD800\uD800\x"]
+ );
+}
+
+test "n_string_incomplete_surrogate" {
+ err(
+ \\["\uD834\uDd"]
+ );
+}
+
+test "n_string_invalid_backslash_esc" {
+ err(
+ \\["\a"]
+ );
+}
+
+test "n_string_invalid_unicode_escape" {
+ err(
+ \\["\uqqqq"]
+ );
+}
+
+test "n_string_invalid_utf8_after_escape" {
+ err(
+ \\["\å"]
+ );
+}
+
+test "n_string_invalid-utf-8-in-escape" {
+ err(
+ \\["\uå"]
+ );
+}
+
+test "n_string_leading_uescaped_thinspace" {
+ err(
+ \\[\u0020"asd"]
+ );
+}
+
+test "n_string_no_quotes_with_bad_escape" {
+ err(
+ \\[\n]
+ );
+}
+
+test "n_string_single_doublequote" {
+ err(
+ \\"
+ );
+}
+
+test "n_string_single_quote" {
+ err(
+ \\['single quote']
+ );
+}
+
+test "n_string_single_string_no_double_quotes" {
+ err(
+ \\abc
+ );
+}
+
+test "n_string_start_escape_unclosed" {
+ err(
+ \\["\
+ );
+}
+
+test "n_string_unescaped_crtl_char" {
+ err(
+ \\["a
+ );
+}
+
+test "n_string_unescaped_newline" {
+ err(
+ \\["new
+ \\line"]
+ );
+}
+
+test "n_string_unescaped_tab" {
+ err(
+ \\[" "]
+ );
+}
+
+test "n_string_unicode_CapitalU" {
+ err(
+ \\"\UA66D"
+ );
+}
+
+test "n_string_with_trailing_garbage" {
+ err(
+ \\""x
+ );
+}
+
+test "n_structure_100000_opening_arrays" {
+ err(
+ "[" ** 100000
+ );
+}
+
+test "n_structure_angle_bracket_." {
+ err(
+ \\<.>
+ );
+}
+
+test "n_structure_angle_bracket_null" {
+ err(
+ \\[<null>]
+ );
+}
+
+test "n_structure_array_trailing_garbage" {
+ err(
+ \\[1]x
+ );
+}
+
+test "n_structure_array_with_extra_array_close" {
+ err(
+ \\[1]]
+ );
+}
+
+test "n_structure_array_with_unclosed_string" {
+ err(
+ \\["asd]
+ );
+}
+
+test "n_structure_ascii-unicode-identifier" {
+ err(
+ \\aå
+ );
+}
+
+test "n_structure_capitalized_True" {
+ err(
+ \\[True]
+ );
+}
+
+test "n_structure_close_unopened_array" {
+ err(
+ \\1]
+ );
+}
+
+test "n_structure_comma_instead_of_closing_brace" {
+ err(
+ \\{"x": true,
+ );
+}
+
+test "n_structure_double_array" {
+ err(
+ \\[][]
+ );
+}
+
+test "n_structure_end_array" {
+ err(
+ \\]
+ );
+}
+
+test "n_structure_incomplete_UTF8_BOM" {
+ err(
+ \\ï»{}
+ );
+}
+
+test "n_structure_lone-invalid-utf-8" {
+ err(
+ \\å
+ );
+}
+
+test "n_structure_lone-open-bracket" {
+ err(
+ \\[
+ );
+}
+
+test "n_structure_no_data" {
+ err(
+ \\
+ );
+}
+
+test "n_structure_null-byte-outside-string" {
+ err(
+ \\[
+ );
+}
+
+test "n_structure_number_with_trailing_garbage" {
+ err(
+ \\2@
+ );
+}
+
+test "n_structure_object_followed_by_closing_object" {
+ err(
+ \\{}}
+ );
+}
+
+test "n_structure_object_unclosed_no_value" {
+ err(
+ \\{"":
+ );
+}
+
+test "n_structure_object_with_comment" {
+ err(
+ \\{"a":/*comment*/"b"}
+ );
+}
+
+test "n_structure_object_with_trailing_garbage" {
+ err(
+ \\{"a": true} "x"
+ );
+}
+
+test "n_structure_open_array_apostrophe" {
+ err(
+ \\['
+ );
+}
+
+test "n_structure_open_array_comma" {
+ err(
+ \\[,
+ );
+}
+
+test "n_structure_open_array_object" {
+ err(
+ "[{\"\":" ** 50000
+ );
+}
+
+test "n_structure_open_array_open_object" {
+ err(
+ \\[{
+ );
+}
+
+test "n_structure_open_array_open_string" {
+ err(
+ \\["a
+ );
+}
+
+test "n_structure_open_array_string" {
+ err(
+ \\["a"
+ );
+}
+
+test "n_structure_open_object_close_array" {
+ err(
+ \\{]
+ );
+}
+
+test "n_structure_open_object_comma" {
+ err(
+ \\{,
+ );
+}
+
+test "n_structure_open_object" {
+ err(
+ \\{
+ );
+}
+
+test "n_structure_open_object_open_array" {
+ err(
+ \\{[
+ );
+}
+
+test "n_structure_open_object_open_string" {
+ err(
+ \\{"a
+ );
+}
+
+test "n_structure_open_object_string_with_apostrophes" {
+ err(
+ \\{'a'
+ );
+}
+
+test "n_structure_open_open" {
+ err(
+ \\["\{["\{["\{["\{
+ );
+}
+
+test "n_structure_single_eacute" {
+ err(
+ \\é
+ );
+}
+
+test "n_structure_single_star" {
+ err(
+ \\*
+ );
+}
+
+test "n_structure_trailing_#" {
+ err(
+ \\{"a":"b"}#{}
+ );
+}
+
+test "n_structure_U+2060_word_joined" {
+ err(
+ \\[⁠]
+ );
+}
+
+test "n_structure_uescaped_LF_before_string" {
+ err(
+ \\[\u000A""]
+ );
+}
+
+test "n_structure_unclosed_array" {
+ err(
+ \\[1
+ );
+}
+
+test "n_structure_unclosed_array_partial_null" {
+ err(
+ \\[ false, nul
+ );
+}
+
+test "n_structure_unclosed_array_unfinished_false" {
+ err(
+ \\[ true, fals
+ );
+}
+
+test "n_structure_unclosed_array_unfinished_true" {
+ err(
+ \\[ false, tru
+ );
+}
+
+test "n_structure_unclosed_object" {
+ err(
+ \\{"asd":"asd"
+ );
+}
+
+test "n_structure_unicode-identifier" {
+ err(
+ \\Ã¥
+ );
+}
+
+test "n_structure_UTF8_BOM_no_data" {
+ err(
+ \\
+ );
+}
+
+test "n_structure_whitespace_formfeed" {
+ err(
+ \\[ ]
+ );
+}
+
+test "n_structure_whitespace_U+2060_word_joiner" {
+ err(
+ \\[⁠]
+ );
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+test "i_number_double_huge_neg_exp" {
+ any(
+ \\[123.456e-789]
+ );
+}
+
+test "i_number_huge_exp" {
+ any(
+ \\[0.4e00669999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999969999999006]
+ );
+}
+
+test "i_number_neg_int_huge_exp" {
+ any(
+ \\[-1e+9999]
+ );
+}
+
+test "i_number_pos_double_huge_exp" {
+ any(
+ \\[1.5e+9999]
+ );
+}
+
+test "i_number_real_neg_overflow" {
+ any(
+ \\[-123123e100000]
+ );
+}
+
+test "i_number_real_pos_overflow" {
+ any(
+ \\[123123e100000]
+ );
+}
+
+test "i_number_real_underflow" {
+ any(
+ \\[123e-10000000]
+ );
+}
+
+test "i_number_too_big_neg_int" {
+ any(
+ \\[-123123123123123123123123123123]
+ );
+}
+
+test "i_number_too_big_pos_int" {
+ any(
+ \\[100000000000000000000]
+ );
+}
+
+test "i_number_very_big_negative_int" {
+ any(
+ \\[-237462374673276894279832749832423479823246327846]
+ );
+}
+
+test "i_object_key_lone_2nd_surrogate" {
+ any(
+ \\{"\uDFAA":0}
+ );
+}
+
+test "i_string_1st_surrogate_but_2nd_missing" {
+ any(
+ \\["\uDADA"]
+ );
+}
+
+test "i_string_1st_valid_surrogate_2nd_invalid" {
+ any(
+ \\["\uD888\u1234"]
+ );
+}
+
+test "i_string_incomplete_surrogate_and_escape_valid" {
+ any(
+ \\["\uD800\n"]
+ );
+}
+
+test "i_string_incomplete_surrogate_pair" {
+ any(
+ \\["\uDd1ea"]
+ );
+}
+
+test "i_string_incomplete_surrogates_escape_valid" {
+ any(
+ \\["\uD800\uD800\n"]
+ );
+}
+
+test "i_string_invalid_lonely_surrogate" {
+ any(
+ \\["\ud800"]
+ );
+}
+
+test "i_string_invalid_surrogate" {
+ any(
+ \\["\ud800abc"]
+ );
+}
+
+test "i_string_invalid_utf-8" {
+ any(
+ \\["ÿ"]
+ );
+}
+
+test "i_string_inverted_surrogates_U+1D11E" {
+ any(
+ \\["\uDd1e\uD834"]
+ );
+}
+
+test "i_string_iso_latin_1" {
+ any(
+ \\["é"]
+ );
+}
+
+test "i_string_lone_second_surrogate" {
+ any(
+ \\["\uDFAA"]
+ );
+}
+
+test "i_string_lone_utf8_continuation_byte" {
+ any(
+ \\[""]
+ );
+}
+
+test "i_string_not_in_unicode_range" {
+ any(
+ \\["ô¿¿¿"]
+ );
+}
+
+test "i_string_overlong_sequence_2_bytes" {
+ any(
+ \\["À¯"]
+ );
+}
+
+test "i_string_overlong_sequence_6_bytes" {
+ any(
+ \\["üƒ¿¿¿¿"]
+ );
+}
+
+test "i_string_overlong_sequence_6_bytes_null" {
+ any(
+ \\["ü€€€€€"]
+ );
+}
+
+test "i_string_truncated-utf-8" {
+ any(
+ \\["àÿ"]
+ );
+}
+
+test "i_string_utf16BE_no_BOM" {
+ any(
+ \\
+ );
+}
+
+test "i_string_utf16LE_no_BOM" {
+ any(
+ \\[
+ );
+}
+
+test "i_string_UTF-16LE_with_BOM" {
+ any(
+ \\ÿþ[
+ );
+}
+
+test "i_string_UTF-8_invalid_sequence" {
+ any(
+ \\["日шú"]
+ );
+}
+
+test "i_string_UTF8_surrogate_U+D800" {
+ any(
+ \\["í €"]
+ );
+}
+
+test "i_structure_500_nested_arrays" {
+ any(
+ ("[" ** 500) ++ ("]" ** 500)
+ );
+}
+
+test "i_structure_UTF-8_BOM_empty_object" {
+ any(
+ \\{}
+ );
+}
diff --git a/std/linked_list.zig b/std/linked_list.zig
index c916a53133..45595f3efb 100644
--- a/std/linked_list.zig
+++ b/std/linked_list.zig
@@ -161,6 +161,7 @@ fn BaseLinkedList(comptime T: type, comptime ParentType: type, comptime field_na
}
list.len -= 1;
+ assert(list.len == 0 or (list.first != null and list.last != null));
}
/// Remove and return the last node in the list.
diff --git a/std/math/complex/abs.zig b/std/math/complex/abs.zig
new file mode 100644
index 0000000000..4cd095c46b
--- /dev/null
+++ b/std/math/complex/abs.zig
@@ -0,0 +1,18 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn abs(z: var) @typeOf(z.re) {
+ const T = @typeOf(z.re);
+ return math.hypot(T, z.re, z.im);
+}
+
+const epsilon = 0.0001;
+
+test "complex.cabs" {
+ const a = Complex(f32).new(5, 3);
+ const c = abs(a);
+ debug.assert(math.approxEq(f32, c, 5.83095, epsilon));
+}
diff --git a/std/math/complex/acos.zig b/std/math/complex/acos.zig
new file mode 100644
index 0000000000..a5760b4ace
--- /dev/null
+++ b/std/math/complex/acos.zig
@@ -0,0 +1,21 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn acos(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const q = cmath.asin(z);
+ return Complex(T).new(T(math.pi) / 2 - q.re, -q.im);
+}
+
+const epsilon = 0.0001;
+
+test "complex.cacos" {
+ const a = Complex(f32).new(5, 3);
+ const c = acos(a);
+
+ debug.assert(math.approxEq(f32, c.re, 0.546975, epsilon));
+ debug.assert(math.approxEq(f32, c.im, -2.452914, epsilon));
+}
diff --git a/std/math/complex/acosh.zig b/std/math/complex/acosh.zig
new file mode 100644
index 0000000000..8dd91b2836
--- /dev/null
+++ b/std/math/complex/acosh.zig
@@ -0,0 +1,21 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn acosh(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const q = cmath.acos(z);
+ return Complex(T).new(-q.im, q.re);
+}
+
+const epsilon = 0.0001;
+
+test "complex.cacosh" {
+ const a = Complex(f32).new(5, 3);
+ const c = acosh(a);
+
+ debug.assert(math.approxEq(f32, c.re, 2.452914, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 0.546975, epsilon));
+}
diff --git a/std/math/complex/arg.zig b/std/math/complex/arg.zig
new file mode 100644
index 0000000000..f24512ac73
--- /dev/null
+++ b/std/math/complex/arg.zig
@@ -0,0 +1,18 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn arg(z: var) @typeOf(z.re) {
+ const T = @typeOf(z.re);
+ return math.atan2(T, z.im, z.re);
+}
+
+const epsilon = 0.0001;
+
+test "complex.carg" {
+ const a = Complex(f32).new(5, 3);
+ const c = arg(a);
+ debug.assert(math.approxEq(f32, c, 0.540420, epsilon));
+}
diff --git a/std/math/complex/asin.zig b/std/math/complex/asin.zig
new file mode 100644
index 0000000000..584a3a1a9b
--- /dev/null
+++ b/std/math/complex/asin.zig
@@ -0,0 +1,27 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn asin(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const x = z.re;
+ const y = z.im;
+
+ const p = Complex(T).new(1.0 - (x - y) * (x + y), -2.0 * x * y);
+ const q = Complex(T).new(-y, x);
+ const r = cmath.log(q.add(cmath.sqrt(p)));
+
+ return Complex(T).new(r.im, -r.re);
+}
+
+const epsilon = 0.0001;
+
+test "complex.casin" {
+ const a = Complex(f32).new(5, 3);
+ const c = asin(a);
+
+ debug.assert(math.approxEq(f32, c.re, 1.023822, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 2.452914, epsilon));
+}
diff --git a/std/math/complex/asinh.zig b/std/math/complex/asinh.zig
new file mode 100644
index 0000000000..0c4dc2b6e4
--- /dev/null
+++ b/std/math/complex/asinh.zig
@@ -0,0 +1,22 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn asinh(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const q = Complex(T).new(-z.im, z.re);
+ const r = cmath.asin(q);
+ return Complex(T).new(r.im, -r.re);
+}
+
+const epsilon = 0.0001;
+
+test "complex.casinh" {
+ const a = Complex(f32).new(5, 3);
+ const c = asinh(a);
+
+ debug.assert(math.approxEq(f32, c.re, 2.459831, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 0.533999, epsilon));
+}
diff --git a/std/math/complex/atan.zig b/std/math/complex/atan.zig
new file mode 100644
index 0000000000..b7bbf930eb
--- /dev/null
+++ b/std/math/complex/atan.zig
@@ -0,0 +1,130 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn atan(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ return switch (T) {
+ f32 => atan32(z),
+ f64 => atan64(z),
+ else => @compileError("atan not implemented for " ++ @typeName(z)),
+ };
+}
+
+fn redupif32(x: f32) f32 {
+ const DP1 = 3.140625;
+ const DP2 = 9.67502593994140625e-4;
+ const DP3 = 1.509957990978376432e-7;
+
+ var t = x / math.pi;
+ if (t >= 0.0) {
+ t += 0.5;
+ } else {
+ t -= 0.5;
+ }
+
+ const u = f32(i32(t));
+ return ((x - u * DP1) - u * DP2) - t * DP3;
+}
+
+fn atan32(z: &const Complex(f32)) Complex(f32) {
+ const maxnum = 1.0e38;
+
+ const x = z.re;
+ const y = z.im;
+
+ if ((x == 0.0) and (y > 1.0)) {
+ // overflow
+ return Complex(f32).new(maxnum, maxnum);
+ }
+
+ const x2 = x * x;
+ var a = 1.0 - x2 - (y * y);
+ if (a == 0.0) {
+ // overflow
+ return Complex(f32).new(maxnum, maxnum);
+ }
+
+ var t = 0.5 * math.atan2(f32, 2.0 * x, a);
+ var w = redupif32(t);
+
+ t = y - 1.0;
+ a = x2 + t * t;
+ if (a == 0.0) {
+ // overflow
+ return Complex(f32).new(maxnum, maxnum);
+ }
+
+ t = y + 1.0;
+ a = (x2 + (t * t)) / a;
+ return Complex(f32).new(w, 0.25 * math.ln(a));
+}
+
+fn redupif64(x: f64) f64 {
+ const DP1 = 3.14159265160560607910;
+ const DP2 = 1.98418714791870343106e-9;
+ const DP3 = 1.14423774522196636802e-17;
+
+ var t = x / math.pi;
+ if (t >= 0.0) {
+ t += 0.5;
+ } else {
+ t -= 0.5;
+ }
+
+ const u = f64(i64(t));
+ return ((x - u * DP1) - u * DP2) - t * DP3;
+}
+
+fn atan64(z: &const Complex(f64)) Complex(f64) {
+ const maxnum = 1.0e308;
+
+ const x = z.re;
+ const y = z.im;
+
+ if ((x == 0.0) and (y > 1.0)) {
+ // overflow
+ return Complex(f64).new(maxnum, maxnum);
+ }
+
+ const x2 = x * x;
+ var a = 1.0 - x2 - (y * y);
+ if (a == 0.0) {
+ // overflow
+ return Complex(f64).new(maxnum, maxnum);
+ }
+
+ var t = 0.5 * math.atan2(f64, 2.0 * x, a);
+ var w = redupif64(t);
+
+ t = y - 1.0;
+ a = x2 + t * t;
+ if (a == 0.0) {
+ // overflow
+ return Complex(f64).new(maxnum, maxnum);
+ }
+
+ t = y + 1.0;
+ a = (x2 + (t * t)) / a;
+ return Complex(f64).new(w, 0.25 * math.ln(a));
+}
+
+const epsilon = 0.0001;
+
+test "complex.catan32" {
+ const a = Complex(f32).new(5, 3);
+ const c = atan(a);
+
+ debug.assert(math.approxEq(f32, c.re, 1.423679, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 0.086569, epsilon));
+}
+
+test "complex.catan64" {
+ const a = Complex(f64).new(5, 3);
+ const c = atan(a);
+
+ debug.assert(math.approxEq(f64, c.re, 1.423679, epsilon));
+ debug.assert(math.approxEq(f64, c.im, 0.086569, epsilon));
+}
diff --git a/std/math/complex/atanh.zig b/std/math/complex/atanh.zig
new file mode 100644
index 0000000000..f70c741765
--- /dev/null
+++ b/std/math/complex/atanh.zig
@@ -0,0 +1,22 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn atanh(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const q = Complex(T).new(-z.im, z.re);
+ const r = cmath.atan(q);
+ return Complex(T).new(r.im, -r.re);
+}
+
+const epsilon = 0.0001;
+
+test "complex.catanh" {
+ const a = Complex(f32).new(5, 3);
+ const c = atanh(a);
+
+ debug.assert(math.approxEq(f32, c.re, 0.146947, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 1.480870, epsilon));
+}
diff --git a/std/math/complex/conj.zig b/std/math/complex/conj.zig
new file mode 100644
index 0000000000..ad3e8b5036
--- /dev/null
+++ b/std/math/complex/conj.zig
@@ -0,0 +1,17 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn conj(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ return Complex(T).new(z.re, -z.im);
+}
+
+test "complex.conj" {
+ const a = Complex(f32).new(5, 3);
+ const c = a.conjugate();
+
+ debug.assert(c.re == 5 and c.im == -3);
+}
diff --git a/std/math/complex/cos.zig b/std/math/complex/cos.zig
new file mode 100644
index 0000000000..96e4ffcdb0
--- /dev/null
+++ b/std/math/complex/cos.zig
@@ -0,0 +1,21 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn cos(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const p = Complex(T).new(-z.im, z.re);
+ return cmath.cosh(p);
+}
+
+const epsilon = 0.0001;
+
+test "complex.ccos" {
+ const a = Complex(f32).new(5, 3);
+ const c = cos(a);
+
+ debug.assert(math.approxEq(f32, c.re, 2.855815, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 9.606383, epsilon));
+}
diff --git a/std/math/complex/cosh.zig b/std/math/complex/cosh.zig
new file mode 100644
index 0000000000..96eac68556
--- /dev/null
+++ b/std/math/complex/cosh.zig
@@ -0,0 +1,165 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+const ldexp_cexp = @import("ldexp.zig").ldexp_cexp;
+
+pub fn cosh(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ return switch (T) {
+ f32 => cosh32(z),
+ f64 => cosh64(z),
+ else => @compileError("cosh not implemented for " ++ @typeName(z)),
+ };
+}
+
+fn cosh32(z: &const Complex(f32)) Complex(f32) {
+ const x = z.re;
+ const y = z.im;
+
+ const hx = @bitCast(u32, x);
+ const ix = hx & 0x7fffffff;
+
+ const hy = @bitCast(u32, y);
+ const iy = hy & 0x7fffffff;
+
+ if (ix < 0x7f800000 and iy < 0x7f800000) {
+ if (iy == 0) {
+ return Complex(f32).new(math.cosh(x), y);
+ }
+ // small x: normal case
+ if (ix < 0x41100000) {
+ return Complex(f32).new(math.cosh(x) * math.cos(y), math.sinh(x) * math.sin(y));
+ }
+
+ // |x|>= 9, so cosh(x) ~= exp(|x|)
+ if (ix < 0x42b17218) {
+ // x < 88.7: exp(|x|) won't overflow
+ const h = math.exp(math.fabs(x)) * 0.5;
+ return Complex(f32).new(math.copysign(f32, h, x) * math.cos(y), h * math.sin(y));
+ }
+ // x < 192.7: scale to avoid overflow
+ else if (ix < 0x4340b1e7) {
+ const v = Complex(f32).new(math.fabs(x), y);
+ const r = ldexp_cexp(v, -1);
+ return Complex(f32).new(x, y * math.copysign(f32, 1, x));
+ }
+ // x >= 192.7: result always overflows
+ else {
+ const h = 0x1p127 * x;
+ return Complex(f32).new(h * h * math.cos(y), h * math.sin(y));
+ }
+ }
+
+ if (ix == 0 and iy >= 0x7f800000) {
+ return Complex(f32).new(y - y, math.copysign(f32, 0, x * (y - y)));
+ }
+
+ if (iy == 0 and ix >= 0x7f800000) {
+ if (hx & 0x7fffff == 0) {
+ return Complex(f32).new(x * x, math.copysign(f32, 0, x) * y);
+ }
+ return Complex(f32).new(x, math.copysign(f32, 0, (x + x) * y));
+ }
+
+ if (ix < 0x7f800000 and iy >= 0x7f800000) {
+ return Complex(f32).new(y - y, x * (y - y));
+ }
+
+ if (ix >= 0x7f800000 and (hx & 0x7fffff) == 0) {
+ if (iy >= 0x7f800000) {
+ return Complex(f32).new(x * x, x * (y - y));
+ }
+ return Complex(f32).new((x * x) * math.cos(y), x * math.sin(y));
+ }
+
+ return Complex(f32).new((x * x) * (y - y), (x + x) * (y - y));
+}
+
+fn cosh64(z: &const Complex(f64)) Complex(f64) {
+ const x = z.re;
+ const y = z.im;
+
+ const fx = @bitCast(u64, x);
+ const hx = u32(fx >> 32);
+ const lx = @truncate(u32, fx);
+ const ix = hx & 0x7fffffff;
+
+ const fy = @bitCast(u64, y);
+ const hy = u32(fy >> 32);
+ const ly = @truncate(u32, fy);
+ const iy = hy & 0x7fffffff;
+
+ // nearly non-exceptional case where x, y are finite
+ if (ix < 0x7ff00000 and iy < 0x7ff00000) {
+ if (iy | ly == 0) {
+ return Complex(f64).new(math.cosh(x), x * y);
+ }
+ // small x: normal case
+ if (ix < 0x40360000) {
+ return Complex(f64).new(math.cosh(x) * math.cos(y), math.sinh(x) * math.sin(y));
+ }
+
+ // |x|>= 22, so cosh(x) ~= exp(|x|)
+ if (ix < 0x40862e42) {
+ // x < 710: exp(|x|) won't overflow
+ const h = math.exp(math.fabs(x)) * 0.5;
+ return Complex(f64).new(h * math.cos(y), math.copysign(f64, h, x) * math.sin(y));
+ }
+ // x < 1455: scale to avoid overflow
+ else if (ix < 0x4096bbaa) {
+ const v = Complex(f64).new(math.fabs(x), y);
+ const r = ldexp_cexp(v, -1);
+ return Complex(f64).new(x, y * math.copysign(f64, 1, x));
+ }
+ // x >= 1455: result always overflows
+ else {
+ const h = 0x1p1023;
+ return Complex(f64).new(h * h * math.cos(y), h * math.sin(y));
+ }
+ }
+
+ if (ix | lx == 0 and iy >= 0x7ff00000) {
+ return Complex(f64).new(y - y, math.copysign(f64, 0, x * (y - y)));
+ }
+
+ if (iy | ly == 0 and ix >= 0x7ff00000) {
+ if ((hx & 0xfffff) | lx == 0) {
+ return Complex(f64).new(x * x, math.copysign(f64, 0, x) * y);
+ }
+ return Complex(f64).new(x * x, math.copysign(f64, 0, (x + x) * y));
+ }
+
+ if (ix < 0x7ff00000 and iy >= 0x7ff00000) {
+ return Complex(f64).new(y - y, x * (y - y));
+ }
+
+ if (ix >= 0x7ff00000 and (hx & 0xfffff) | lx == 0) {
+ if (iy >= 0x7ff00000) {
+ return Complex(f64).new(x * x, x * (y - y));
+ }
+ return Complex(f64).new(x * x * math.cos(y), x * math.sin(y));
+ }
+
+ return Complex(f64).new((x * x) * (y - y), (x + x) * (y - y));
+}
+
+const epsilon = 0.0001;
+
+test "complex.ccosh32" {
+ const a = Complex(f32).new(5, 3);
+ const c = cosh(a);
+
+ debug.assert(math.approxEq(f32, c.re, -73.467300, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 10.471557, epsilon));
+}
+
+test "complex.ccosh64" {
+ const a = Complex(f64).new(5, 3);
+ const c = cosh(a);
+
+ debug.assert(math.approxEq(f64, c.re, -73.467300, epsilon));
+ debug.assert(math.approxEq(f64, c.im, 10.471557, epsilon));
+}
diff --git a/std/math/complex/exp.zig b/std/math/complex/exp.zig
new file mode 100644
index 0000000000..03f7f9e41b
--- /dev/null
+++ b/std/math/complex/exp.zig
@@ -0,0 +1,140 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+const ldexp_cexp = @import("ldexp.zig").ldexp_cexp;
+
+pub fn exp(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+
+ return switch (T) {
+ f32 => exp32(z),
+ f64 => exp64(z),
+ else => @compileError("exp not implemented for " ++ @typeName(z)),
+ };
+}
+
+fn exp32(z: &const Complex(f32)) Complex(f32) {
+ @setFloatMode(this, @import("builtin").FloatMode.Strict);
+
+ const exp_overflow = 0x42b17218; // max_exp * ln2 ~= 88.72283955
+ const cexp_overflow = 0x43400074; // (max_exp - min_denom_exp) * ln2
+
+ const x = z.re;
+ const y = z.im;
+
+ const hy = @bitCast(u32, y) & 0x7fffffff;
+ // cexp(x + i0) = exp(x) + i0
+ if (hy == 0) {
+ return Complex(f32).new(math.exp(x), y);
+ }
+
+ const hx = @bitCast(u32, x);
+ // cexp(0 + iy) = cos(y) + isin(y)
+ if ((hx & 0x7fffffff) == 0) {
+ return Complex(f32).new(math.cos(y), math.sin(y));
+ }
+
+ if (hy >= 0x7f800000) {
+ // cexp(finite|nan +- i inf|nan) = nan + i nan
+ if ((hx & 0x7fffffff) != 0x7f800000) {
+ return Complex(f32).new(y - y, y - y);
+ }
+ // cexp(-inf +- i inf|nan) = 0 + i0
+ else if (hx & 0x80000000 != 0) {
+ return Complex(f32).new(0, 0);
+ }
+ // cexp(+inf +- i inf|nan) = inf + i nan
+ else {
+ return Complex(f32).new(x, y - y);
+ }
+ }
+
+ // 88.7 <= x <= 192 so must scale
+ if (hx >= exp_overflow and hx <= cexp_overflow) {
+ return ldexp_cexp(z, 0);
+ }
+ // - x < exp_overflow => exp(x) won't overflow (common)
+ // - x > cexp_overflow, so exp(x) * s overflows for s > 0
+ // - x = +-inf
+ // - x = nan
+ else {
+ const exp_x = math.exp(x);
+ return Complex(f32).new(exp_x * math.cos(y), exp_x * math.sin(y));
+ }
+}
+
+fn exp64(z: &const Complex(f64)) Complex(f64) {
+ const exp_overflow = 0x40862e42; // high bits of max_exp * ln2 ~= 710
+ const cexp_overflow = 0x4096b8e4; // (max_exp - min_denorm_exp) * ln2
+
+ const x = z.re;
+ const y = z.im;
+
+ const fy = @bitCast(u64, y);
+ const hy = u32(fy >> 32) & 0x7fffffff;
+ const ly = @truncate(u32, fy);
+
+ // cexp(x + i0) = exp(x) + i0
+ if (hy | ly == 0) {
+ return Complex(f64).new(math.exp(x), y);
+ }
+
+ const fx = @bitCast(u64, x);
+ const hx = u32(fx >> 32);
+ const lx = @truncate(u32, fx);
+
+ // cexp(0 + iy) = cos(y) + isin(y)
+ if ((hx & 0x7fffffff) | lx == 0) {
+ return Complex(f64).new(math.cos(y), math.sin(y));
+ }
+
+ if (hy >= 0x7ff00000) {
+ // cexp(finite|nan +- i inf|nan) = nan + i nan
+ if (lx != 0 or (hx & 0x7fffffff) != 0x7ff00000) {
+ return Complex(f64).new(y - y, y - y);
+ }
+ // cexp(-inf +- i inf|nan) = 0 + i0
+ else if (hx & 0x80000000 != 0) {
+ return Complex(f64).new(0, 0);
+ }
+ // cexp(+inf +- i inf|nan) = inf + i nan
+ else {
+ return Complex(f64).new(x, y - y);
+ }
+ }
+
+ // 709.7 <= x <= 1454.3 so must scale
+ if (hx >= exp_overflow and hx <= cexp_overflow) {
+ const r = ldexp_cexp(z, 0);
+ return *r;
+ }
+ // - x < exp_overflow => exp(x) won't overflow (common)
+ // - x > cexp_overflow, so exp(x) * s overflows for s > 0
+ // - x = +-inf
+ // - x = nan
+ else {
+ const exp_x = math.exp(x);
+ return Complex(f64).new(exp_x * math.cos(y), exp_x * math.sin(y));
+ }
+}
+
+const epsilon = 0.0001;
+
+test "complex.cexp32" {
+ const a = Complex(f32).new(5, 3);
+ const c = exp(a);
+
+ debug.assert(math.approxEq(f32, c.re, -146.927917, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 20.944065, epsilon));
+}
+
+test "complex.cexp64" {
+ const a = Complex(f32).new(5, 3);
+ const c = exp(a);
+
+ debug.assert(math.approxEq(f64, c.re, -146.927917, epsilon));
+ debug.assert(math.approxEq(f64, c.im, 20.944065, epsilon));
+}
diff --git a/std/math/complex/index.zig b/std/math/complex/index.zig
new file mode 100644
index 0000000000..a4d493307e
--- /dev/null
+++ b/std/math/complex/index.zig
@@ -0,0 +1,171 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+
+pub const abs = @import("abs.zig").abs;
+pub const acosh = @import("acosh.zig").acosh;
+pub const acos = @import("acos.zig").acos;
+pub const arg = @import("arg.zig").arg;
+pub const asinh = @import("asinh.zig").asinh;
+pub const asin = @import("asin.zig").asin;
+pub const atanh = @import("atanh.zig").atanh;
+pub const atan = @import("atan.zig").atan;
+pub const conj = @import("conj.zig").conj;
+pub const cosh = @import("cosh.zig").cosh;
+pub const cos = @import("cos.zig").cos;
+pub const exp = @import("exp.zig").exp;
+pub const log = @import("log.zig").log;
+pub const pow = @import("pow.zig").pow;
+pub const proj = @import("proj.zig").proj;
+pub const sinh = @import("sinh.zig").sinh;
+pub const sin = @import("sin.zig").sin;
+pub const sqrt = @import("sqrt.zig").sqrt;
+pub const tanh = @import("tanh.zig").tanh;
+pub const tan = @import("tan.zig").tan;
+
+pub fn Complex(comptime T: type) type {
+ return struct {
+ const Self = this;
+
+ re: T,
+ im: T,
+
+ pub fn new(re: T, im: T) Self {
+ return Self {
+ .re = re,
+ .im = im,
+ };
+ }
+
+ pub fn add(self: &const Self, other: &const Self) Self {
+ return Self {
+ .re = self.re + other.re,
+ .im = self.im + other.im,
+ };
+ }
+
+ pub fn sub(self: &const Self, other: &const Self) Self {
+ return Self {
+ .re = self.re - other.re,
+ .im = self.im - other.im,
+ };
+ }
+
+ pub fn mul(self: &const Self, other: &const Self) Self {
+ return Self {
+ .re = self.re * other.re - self.im * other.im,
+ .im = self.im * other.re + self.re * other.im,
+ };
+ }
+
+ pub fn div(self: &const Self, other: &const Self) Self {
+ const re_num = self.re * other.re + self.im * other.im;
+ const im_num = self.im * other.re - self.re * other.im;
+ const den = other.re * other.re + other.im * other.im;
+
+ return Self {
+ .re = re_num / den,
+ .im = im_num / den,
+ };
+ }
+
+ pub fn conjugate(self: &const Self) Self {
+ return Self {
+ .re = self.re,
+ .im = -self.im,
+ };
+ }
+
+ pub fn reciprocal(self: &const Self) Self {
+ const m = self.re * self.re + self.im * self.im;
+ return Self {
+ .re = self.re / m,
+ .im = -self.im / m,
+ };
+ }
+
+ pub fn magnitude(self: &const Self) T {
+ return math.sqrt(self.re * self.re + self.im * self.im);
+ }
+ };
+}
+
+const epsilon = 0.0001;
+
+test "complex.add" {
+ const a = Complex(f32).new(5, 3);
+ const b = Complex(f32).new(2, 7);
+ const c = a.add(b);
+
+ debug.assert(c.re == 7 and c.im == 10);
+}
+
+test "complex.sub" {
+ const a = Complex(f32).new(5, 3);
+ const b = Complex(f32).new(2, 7);
+ const c = a.sub(b);
+
+ debug.assert(c.re == 3 and c.im == -4);
+}
+
+test "complex.mul" {
+ const a = Complex(f32).new(5, 3);
+ const b = Complex(f32).new(2, 7);
+ const c = a.mul(b);
+
+ debug.assert(c.re == -11 and c.im == 41);
+}
+
+test "complex.div" {
+ const a = Complex(f32).new(5, 3);
+ const b = Complex(f32).new(2, 7);
+ const c = a.div(b);
+
+ debug.assert(math.approxEq(f32, c.re, f32(31)/53, epsilon) and
+ math.approxEq(f32, c.im, f32(-29)/53, epsilon));
+}
+
+test "complex.conjugate" {
+ const a = Complex(f32).new(5, 3);
+ const c = a.conjugate();
+
+ debug.assert(c.re == 5 and c.im == -3);
+}
+
+test "complex.reciprocal" {
+ const a = Complex(f32).new(5, 3);
+ const c = a.reciprocal();
+
+ debug.assert(math.approxEq(f32, c.re, f32(5)/34, epsilon) and
+ math.approxEq(f32, c.im, f32(-3)/34, epsilon));
+}
+
+test "complex.magnitude" {
+ const a = Complex(f32).new(5, 3);
+ const c = a.magnitude();
+
+ debug.assert(math.approxEq(f32, c, 5.83095, epsilon));
+}
+
+test "complex.cmath" {
+ _ = @import("abs.zig");
+ _ = @import("acosh.zig");
+ _ = @import("acos.zig");
+ _ = @import("arg.zig");
+ _ = @import("asinh.zig");
+ _ = @import("asin.zig");
+ _ = @import("atanh.zig");
+ _ = @import("atan.zig");
+ _ = @import("conj.zig");
+ _ = @import("cosh.zig");
+ _ = @import("cos.zig");
+ _ = @import("exp.zig");
+ _ = @import("log.zig");
+ _ = @import("pow.zig");
+ _ = @import("proj.zig");
+ _ = @import("sinh.zig");
+ _ = @import("sin.zig");
+ _ = @import("sqrt.zig");
+ _ = @import("tanh.zig");
+ _ = @import("tan.zig");
+}
diff --git a/std/math/complex/ldexp.zig b/std/math/complex/ldexp.zig
new file mode 100644
index 0000000000..4fb5a6815f
--- /dev/null
+++ b/std/math/complex/ldexp.zig
@@ -0,0 +1,75 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn ldexp_cexp(z: var, expt: i32) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+
+ return switch (T) {
+ f32 => ldexp_cexp32(z, expt),
+ f64 => ldexp_cexp64(z, expt),
+ else => unreachable,
+ };
+}
+
+fn frexp_exp32(x: f32, expt: &i32) f32 {
+ const k = 235; // reduction constant
+ const kln2 = 162.88958740; // k * ln2
+
+ const exp_x = math.exp(x - kln2);
+ const hx = @bitCast(u32, exp_x);
+ *expt = i32(hx >> 23) - (0x7f + 127) + k;
+ return @bitCast(f32, (hx & 0x7fffff) | ((0x7f + 127) << 23));
+}
+
+fn ldexp_cexp32(z: &const Complex(f32), expt: i32) Complex(f32) {
+ var ex_expt: i32 = undefined;
+ const exp_x = frexp_exp32(z.re, &ex_expt);
+ const exptf = expt + ex_expt;
+
+ const half_expt1 = @divTrunc(exptf, 2);
+ const scale1 = @bitCast(f32, (0x7f + half_expt1) << 23);
+
+ const half_expt2 = exptf - half_expt1;
+ const scale2 = @bitCast(f32, (0x7f + half_expt2) << 23);
+
+ return Complex(f32).new(
+ math.cos(z.im) * exp_x * scale1 * scale2,
+ math.sin(z.im) * exp_x * scale1 * scale2,
+ );
+}
+
+fn frexp_exp64(x: f64, expt: &i32) f64 {
+ const k = 1799; // reduction constant
+ const kln2 = 1246.97177782734161156; // k * ln2
+
+ const exp_x = math.exp(x - kln2);
+
+ const fx = @bitCast(u64, x);
+ const hx = u32(fx >> 32);
+ const lx = @truncate(u32, fx);
+
+ *expt = i32(hx >> 20) - (0x3ff + 1023) + k;
+
+ const high_word = (hx & 0xfffff) | ((0x3ff + 1023) << 20);
+ return @bitCast(f64, (u64(high_word) << 32) | lx);
+}
+
+fn ldexp_cexp64(z: &const Complex(f64), expt: i32) Complex(f64) {
+ var ex_expt: i32 = undefined;
+ const exp_x = frexp_exp64(z.re, &ex_expt);
+ const exptf = i64(expt + ex_expt);
+
+ const half_expt1 = @divTrunc(exptf, 2);
+ const scale1 = @bitCast(f64, (0x3ff + half_expt1) << 20);
+
+ const half_expt2 = exptf - half_expt1;
+ const scale2 = @bitCast(f64, (0x3ff + half_expt2) << 20);
+
+ return Complex(f64).new(
+ math.cos(z.im) * exp_x * scale1 * scale2,
+ math.sin(z.im) * exp_x * scale1 * scale2,
+ );
+}
diff --git a/std/math/complex/log.zig b/std/math/complex/log.zig
new file mode 100644
index 0000000000..a4a1d1664f
--- /dev/null
+++ b/std/math/complex/log.zig
@@ -0,0 +1,23 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn log(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const r = cmath.abs(z);
+ const phi = cmath.arg(z);
+
+ return Complex(T).new(math.ln(r), phi);
+}
+
+const epsilon = 0.0001;
+
+test "complex.clog" {
+ const a = Complex(f32).new(5, 3);
+ const c = log(a);
+
+ debug.assert(math.approxEq(f32, c.re, 1.763180, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 0.540419, epsilon));
+}
diff --git a/std/math/complex/pow.zig b/std/math/complex/pow.zig
new file mode 100644
index 0000000000..bef9fde542
--- /dev/null
+++ b/std/math/complex/pow.zig
@@ -0,0 +1,22 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn pow(comptime T: type, z: &const T, c: &const T) T {
+ const p = cmath.log(z);
+ const q = c.mul(p);
+ return cmath.exp(q);
+}
+
+const epsilon = 0.0001;
+
+test "complex.cpow" {
+ const a = Complex(f32).new(5, 3);
+ const b = Complex(f32).new(2.3, -1.3);
+ const c = pow(Complex(f32), a, b);
+
+ debug.assert(math.approxEq(f32, c.re, 58.049110, epsilon));
+ debug.assert(math.approxEq(f32, c.im, -101.003433, epsilon));
+}
diff --git a/std/math/complex/proj.zig b/std/math/complex/proj.zig
new file mode 100644
index 0000000000..b6c4cc046e
--- /dev/null
+++ b/std/math/complex/proj.zig
@@ -0,0 +1,24 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn proj(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+
+ if (math.isInf(z.re) or math.isInf(z.im)) {
+ return Complex(T).new(math.inf(T), math.copysign(T, 0, z.re));
+ }
+
+ return Complex(T).new(z.re, z.im);
+}
+
+const epsilon = 0.0001;
+
+test "complex.cproj" {
+ const a = Complex(f32).new(5, 3);
+ const c = proj(a);
+
+ debug.assert(c.re == 5 and c.im == 3);
+}
diff --git a/std/math/complex/sin.zig b/std/math/complex/sin.zig
new file mode 100644
index 0000000000..d32b771d3b
--- /dev/null
+++ b/std/math/complex/sin.zig
@@ -0,0 +1,22 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn sin(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const p = Complex(T).new(-z.im, z.re);
+ const q = cmath.sinh(p);
+ return Complex(T).new(q.im, -q.re);
+}
+
+const epsilon = 0.0001;
+
+test "complex.csin" {
+ const a = Complex(f32).new(5, 3);
+ const c = sin(a);
+
+ debug.assert(math.approxEq(f32, c.re, -9.654126, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 2.841692, epsilon));
+}
diff --git a/std/math/complex/sinh.zig b/std/math/complex/sinh.zig
new file mode 100644
index 0000000000..09a62ca058
--- /dev/null
+++ b/std/math/complex/sinh.zig
@@ -0,0 +1,164 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+const ldexp_cexp = @import("ldexp.zig").ldexp_cexp;
+
+pub fn sinh(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ return switch (T) {
+ f32 => sinh32(z),
+ f64 => sinh64(z),
+ else => @compileError("tan not implemented for " ++ @typeName(z)),
+ };
+}
+
+fn sinh32(z: &const Complex(f32)) Complex(f32) {
+ const x = z.re;
+ const y = z.im;
+
+ const hx = @bitCast(u32, x);
+ const ix = hx & 0x7fffffff;
+
+ const hy = @bitCast(u32, y);
+ const iy = hy & 0x7fffffff;
+
+ if (ix < 0x7f800000 and iy < 0x7f800000) {
+ if (iy == 0) {
+ return Complex(f32).new(math.sinh(x), y);
+ }
+ // small x: normal case
+ if (ix < 0x41100000) {
+ return Complex(f32).new(math.sinh(x) * math.cos(y), math.cosh(x) * math.sin(y));
+ }
+
+ // |x|>= 9, so cosh(x) ~= exp(|x|)
+ if (ix < 0x42b17218) {
+ // x < 88.7: exp(|x|) won't overflow
+ const h = math.exp(math.fabs(x)) * 0.5;
+ return Complex(f32).new(math.copysign(f32, h, x) * math.cos(y), h * math.sin(y));
+ }
+ // x < 192.7: scale to avoid overflow
+ else if (ix < 0x4340b1e7) {
+ const v = Complex(f32).new(math.fabs(x), y);
+ const r = ldexp_cexp(v, -1);
+ return Complex(f32).new(x * math.copysign(f32, 1, x), y);
+ }
+ // x >= 192.7: result always overflows
+ else {
+ const h = 0x1p127 * x;
+ return Complex(f32).new(h * math.cos(y), h * h * math.sin(y));
+ }
+ }
+
+ if (ix == 0 and iy >= 0x7f800000) {
+ return Complex(f32).new(math.copysign(f32, 0, x * (y - y)), y - y);
+ }
+
+ if (iy == 0 and ix >= 0x7f800000) {
+ if (hx & 0x7fffff == 0) {
+ return Complex(f32).new(x, y);
+ }
+ return Complex(f32).new(x, math.copysign(f32, 0, y));
+ }
+
+ if (ix < 0x7f800000 and iy >= 0x7f800000) {
+ return Complex(f32).new(y - y, x * (y - y));
+ }
+
+ if (ix >= 0x7f800000 and (hx & 0x7fffff) == 0) {
+ if (iy >= 0x7f800000) {
+ return Complex(f32).new(x * x, x * (y - y));
+ }
+ return Complex(f32).new(x * math.cos(y), math.inf_f32 * math.sin(y));
+ }
+
+ return Complex(f32).new((x * x) * (y - y), (x + x) * (y - y));
+}
+
+fn sinh64(z: &const Complex(f64)) Complex(f64) {
+ const x = z.re;
+ const y = z.im;
+
+ const fx = @bitCast(u64, x);
+ const hx = u32(fx >> 32);
+ const lx = @truncate(u32, fx);
+ const ix = hx & 0x7fffffff;
+
+ const fy = @bitCast(u64, y);
+ const hy = u32(fy >> 32);
+ const ly = @truncate(u32, fy);
+ const iy = hy & 0x7fffffff;
+
+ if (ix < 0x7ff00000 and iy < 0x7ff00000) {
+ if (iy | ly == 0) {
+ return Complex(f64).new(math.sinh(x), y);
+ }
+ // small x: normal case
+ if (ix < 0x40360000) {
+ return Complex(f64).new(math.sinh(x) * math.cos(y), math.cosh(x) * math.sin(y));
+ }
+
+ // |x|>= 22, so cosh(x) ~= exp(|x|)
+ if (ix < 0x40862e42) {
+ // x < 710: exp(|x|) won't overflow
+ const h = math.exp(math.fabs(x)) * 0.5;
+ return Complex(f64).new(math.copysign(f64, h, x) * math.cos(y), h * math.sin(y));
+ }
+ // x < 1455: scale to avoid overflow
+ else if (ix < 0x4096bbaa) {
+ const v = Complex(f64).new(math.fabs(x), y);
+ const r = ldexp_cexp(v, -1);
+ return Complex(f64).new(x * math.copysign(f64, 1, x), y);
+ }
+ // x >= 1455: result always overflows
+ else {
+ const h = 0x1p1023 * x;
+ return Complex(f64).new(h * math.cos(y), h * h * math.sin(y));
+ }
+ }
+
+ if (ix | lx == 0 and iy >= 0x7ff00000) {
+ return Complex(f64).new(math.copysign(f64, 0, x * (y - y)), y - y);
+ }
+
+ if (iy | ly == 0 and ix >= 0x7ff00000) {
+ if ((hx & 0xfffff) | lx == 0) {
+ return Complex(f64).new(x, y);
+ }
+ return Complex(f64).new(x, math.copysign(f64, 0, y));
+ }
+
+ if (ix < 0x7ff00000 and iy >= 0x7ff00000) {
+ return Complex(f64).new(y - y, x * (y - y));
+ }
+
+ if (ix >= 0x7ff00000 and (hx & 0xfffff) | lx == 0) {
+ if (iy >= 0x7ff00000) {
+ return Complex(f64).new(x * x, x * (y - y));
+ }
+ return Complex(f64).new(x * math.cos(y), math.inf_f64 * math.sin(y));
+ }
+
+ return Complex(f64).new((x * x) * (y - y), (x + x) * (y - y));
+}
+
+const epsilon = 0.0001;
+
+test "complex.csinh32" {
+ const a = Complex(f32).new(5, 3);
+ const c = sinh(a);
+
+ debug.assert(math.approxEq(f32, c.re, -73.460617, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 10.472508, epsilon));
+}
+
+test "complex.csinh64" {
+ const a = Complex(f64).new(5, 3);
+ const c = sinh(a);
+
+ debug.assert(math.approxEq(f64, c.re, -73.460617, epsilon));
+ debug.assert(math.approxEq(f64, c.im, 10.472508, epsilon));
+}
diff --git a/std/math/complex/sqrt.zig b/std/math/complex/sqrt.zig
new file mode 100644
index 0000000000..afda69f7c9
--- /dev/null
+++ b/std/math/complex/sqrt.zig
@@ -0,0 +1,133 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+// TODO when #733 is solved this can be @typeOf(z) instead of Complex(@typeOf(z.re))
+pub fn sqrt(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+
+ return switch (T) {
+ f32 => sqrt32(z),
+ f64 => sqrt64(z),
+ else => @compileError("sqrt not implemented for " ++ @typeName(z)),
+ };
+}
+
+fn sqrt32(z: &const Complex(f32)) Complex(f32) {
+ const x = z.re;
+ const y = z.im;
+
+ if (x == 0 and y == 0) {
+ return Complex(f32).new(0, y);
+ }
+ if (math.isInf(y)) {
+ return Complex(f32).new(math.inf(f32), y);
+ }
+ if (math.isNan(x)) {
+ // raise invalid if y is not nan
+ const t = (y - y) / (y - y);
+ return Complex(f32).new(x, t);
+ }
+ if (math.isInf(x)) {
+ // sqrt(inf + i nan) = inf + nan i
+ // sqrt(inf + iy) = inf + i0
+ // sqrt(-inf + i nan) = nan +- inf i
+ // sqrt(-inf + iy) = 0 + inf i
+ if (math.signbit(x)) {
+ return Complex(f32).new(math.fabs(x - y), math.copysign(f32, x, y));
+ } else {
+ return Complex(f32).new(x, math.copysign(f32, y - y, y));
+ }
+ }
+
+ // y = nan special case is handled fine below
+
+ // double-precision avoids overflow with correct rounding.
+ const dx = f64(x);
+ const dy = f64(y);
+
+ if (dx >= 0) {
+ const t = math.sqrt((dx + math.hypot(f64, dx, dy)) * 0.5);
+ return Complex(f32).new(f32(t), f32(dy / (2.0 * t)));
+ } else {
+ const t = math.sqrt((-dx + math.hypot(f64, dx, dy)) * 0.5);
+ return Complex(f32).new(f32(math.fabs(y) / (2.0 * t)), f32(math.copysign(f64, t, y)));
+ }
+}
+
+fn sqrt64(z: &const Complex(f64)) Complex(f64) {
+ // may encounter overflow for im,re >= DBL_MAX / (1 + sqrt(2))
+ const threshold = 0x1.a827999fcef32p+1022;
+
+ var x = z.re;
+ var y = z.im;
+
+ if (x == 0 and y == 0) {
+ return Complex(f64).new(0, y);
+ }
+ if (math.isInf(y)) {
+ return Complex(f64).new(math.inf(f64), y);
+ }
+ if (math.isNan(x)) {
+ // raise invalid if y is not nan
+ const t = (y - y) / (y - y);
+ return Complex(f64).new(x, t);
+ }
+ if (math.isInf(x)) {
+ // sqrt(inf + i nan) = inf + nan i
+ // sqrt(inf + iy) = inf + i0
+ // sqrt(-inf + i nan) = nan +- inf i
+ // sqrt(-inf + iy) = 0 + inf i
+ if (math.signbit(x)) {
+ return Complex(f64).new(math.fabs(x - y), math.copysign(f64, x, y));
+ } else {
+ return Complex(f64).new(x, math.copysign(f64, y - y, y));
+ }
+ }
+
+ // y = nan special case is handled fine below
+
+ // scale to avoid overflow
+ var scale = false;
+ if (math.fabs(x) >= threshold or math.fabs(y) >= threshold) {
+ x *= 0.25;
+ y *= 0.25;
+ scale = true;
+ }
+
+ var result: Complex(f64) = undefined;
+ if (x >= 0) {
+ const t = math.sqrt((x + math.hypot(f64, x, y)) * 0.5);
+ result = Complex(f64).new(t, y / (2.0 * t));
+ } else {
+ const t = math.sqrt((-x + math.hypot(f64, x, y)) * 0.5);
+ result = Complex(f64).new(math.fabs(y) / (2.0 * t), math.copysign(f64, t, y));
+ }
+
+ if (scale) {
+ result.re *= 2;
+ result.im *= 2;
+ }
+
+ return result;
+}
+
+const epsilon = 0.0001;
+
+test "complex.csqrt32" {
+ const a = Complex(f32).new(5, 3);
+ const c = sqrt(a);
+
+ debug.assert(math.approxEq(f32, c.re, 2.327117, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 0.644574, epsilon));
+}
+
+test "complex.csqrt64" {
+ const a = Complex(f64).new(5, 3);
+ const c = sqrt(a);
+
+ debug.assert(math.approxEq(f64, c.re, 2.3271175190399496, epsilon));
+ debug.assert(math.approxEq(f64, c.im, 0.6445742373246469, epsilon));
+}
diff --git a/std/math/complex/tan.zig b/std/math/complex/tan.zig
new file mode 100644
index 0000000000..4ea5182fa7
--- /dev/null
+++ b/std/math/complex/tan.zig
@@ -0,0 +1,22 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn tan(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ const q = Complex(T).new(-z.im, z.re);
+ const r = cmath.tanh(q);
+ return Complex(T).new(r.im, -r.re);
+}
+
+const epsilon = 0.0001;
+
+test "complex.ctan" {
+ const a = Complex(f32).new(5, 3);
+ const c = tan(a);
+
+ debug.assert(math.approxEq(f32, c.re, -0.002708233, epsilon));
+ debug.assert(math.approxEq(f32, c.im, 1.004165, epsilon));
+}
diff --git a/std/math/complex/tanh.zig b/std/math/complex/tanh.zig
new file mode 100644
index 0000000000..6af62f48ae
--- /dev/null
+++ b/std/math/complex/tanh.zig
@@ -0,0 +1,111 @@
+const std = @import("../../index.zig");
+const debug = std.debug;
+const math = std.math;
+const cmath = math.complex;
+const Complex = cmath.Complex;
+
+pub fn tanh(z: var) Complex(@typeOf(z.re)) {
+ const T = @typeOf(z.re);
+ return switch (T) {
+ f32 => tanh32(z),
+ f64 => tanh64(z),
+ else => @compileError("tan not implemented for " ++ @typeName(z)),
+ };
+}
+
+fn tanh32(z: &const Complex(f32)) Complex(f32) {
+ const x = z.re;
+ const y = z.im;
+
+ const hx = @bitCast(u32, x);
+ const ix = hx & 0x7fffffff;
+
+ if (ix >= 0x7f800000) {
+ if (ix & 0x7fffff != 0) {
+ const r = if (y == 0) y else x * y;
+ return Complex(f32).new(x, r);
+ }
+ const xx = @bitCast(f32, hx - 0x40000000);
+ const r = if (math.isInf(y)) y else math.sin(y) * math.cos(y);
+ return Complex(f32).new(xx, math.copysign(f32, 0, r));
+ }
+
+ if (!math.isFinite(y)) {
+ const r = if (ix != 0) y - y else x;
+ return Complex(f32).new(r, y - y);
+ }
+
+ // x >= 11
+ if (ix >= 0x41300000) {
+ const exp_mx = math.exp(-math.fabs(x));
+ return Complex(f32).new(math.copysign(f32, 1, x), 4 * math.sin(y) * math.cos(y) * exp_mx * exp_mx);
+ }
+
+ // Kahan's algorithm
+ const t = math.tan(y);
+ const beta = 1.0 + t * t;
+ const s = math.sinh(x);
+ const rho = math.sqrt(1 + s * s);
+ const den = 1 + beta * s * s;
+
+ return Complex(f32).new((beta * rho * s) / den, t / den);
+}
+
+fn tanh64(z: &const Complex(f64)) Complex(f64) {
+ const x = z.re;
+ const y = z.im;
+
+ const fx = @bitCast(u64, x);
+ const hx = u32(fx >> 32);
+ const lx = @truncate(u32, fx);
+ const ix = hx & 0x7fffffff;
+
+ if (ix >= 0x7ff00000) {
+ if ((ix & 0x7fffff) | lx != 0) {
+ const r = if (y == 0) y else x * y;
+ return Complex(f64).new(x, r);
+ }
+
+ const xx = @bitCast(f64, (u64(hx - 0x40000000) << 32) | lx);
+ const r = if (math.isInf(y)) y else math.sin(y) * math.cos(y);
+ return Complex(f64).new(xx, math.copysign(f64, 0, r));
+ }
+
+ if (!math.isFinite(y)) {
+ const r = if (ix != 0) y - y else x;
+ return Complex(f64).new(r, y - y);
+ }
+
+ // x >= 22
+ if (ix >= 0x40360000) {
+ const exp_mx = math.exp(-math.fabs(x));
+ return Complex(f64).new(math.copysign(f64, 1, x), 4 * math.sin(y) * math.cos(y) * exp_mx * exp_mx);
+ }
+
+ // Kahan's algorithm
+ const t = math.tan(y);
+ const beta = 1.0 + t * t;
+ const s = math.sinh(x);
+ const rho = math.sqrt(1 + s * s);
+ const den = 1 + beta * s * s;
+
+ return Complex(f64).new((beta * rho * s) / den, t / den);
+}
+
+const epsilon = 0.0001;
+
+test "complex.ctanh32" {
+ const a = Complex(f32).new(5, 3);
+ const c = tanh(a);
+
+ debug.assert(math.approxEq(f32, c.re, 0.999913, epsilon));
+ debug.assert(math.approxEq(f32, c.im, -0.000025, epsilon));
+}
+
+test "complex.ctanh64" {
+ const a = Complex(f64).new(5, 3);
+ const c = tanh(a);
+
+ debug.assert(math.approxEq(f64, c.re, 0.999913, epsilon));
+ debug.assert(math.approxEq(f64, c.im, -0.000025, epsilon));
+}
diff --git a/std/math/exp.zig b/std/math/exp.zig
index 4032930a43..21aa558c57 100644
--- a/std/math/exp.zig
+++ b/std/math/exp.zig
@@ -6,6 +6,7 @@
const std = @import("../index.zig");
const math = std.math;
const assert = std.debug.assert;
+const builtin = @import("builtin");
pub fn exp(x: var) @typeOf(x) {
const T = @typeOf(x);
@@ -17,6 +18,8 @@ pub fn exp(x: var) @typeOf(x) {
}
fn exp32(x_: f32) f32 {
+ @setFloatMode(this, builtin.FloatMode.Strict);
+
const half = []f32 { 0.5, -0.5 };
const ln2hi = 6.9314575195e-1;
const ln2lo = 1.4286067653e-6;
@@ -94,6 +97,8 @@ fn exp32(x_: f32) f32 {
}
fn exp64(x_: f64) f64 {
+ @setFloatMode(this, builtin.FloatMode.Strict);
+
const half = []const f64 { 0.5, -0.5 };
const ln2hi: f64 = 6.93147180369123816490e-01;
const ln2lo: f64 = 1.90821492927058770002e-10;
diff --git a/std/math/index.zig b/std/math/index.zig
index 477dafcbcc..a549a6bb61 100644
--- a/std/math/index.zig
+++ b/std/math/index.zig
@@ -129,6 +129,9 @@ pub const cos = @import("cos.zig").cos;
pub const sin = @import("sin.zig").sin;
pub const tan = @import("tan.zig").tan;
+pub const complex = @import("complex/index.zig");
+pub const Complex = complex.Complex;
+
test "math" {
_ = @import("nan.zig");
_ = @import("isnan.zig");
@@ -172,6 +175,8 @@ test "math" {
_ = @import("sin.zig");
_ = @import("cos.zig");
_ = @import("tan.zig");
+
+ _ = @import("complex/index.zig");
}
@@ -553,6 +558,32 @@ test "math.floorPowerOfTwo" {
comptime testFloorPowerOfTwo();
}
+pub fn log2_int(comptime T: type, x: T) Log2Int(T) {
+ assert(x != 0);
+ return Log2Int(T)(T.bit_count - 1 - @clz(x));
+}
+
+pub fn log2_int_ceil(comptime T: type, x: T) Log2Int(T) {
+ assert(x != 0);
+ const log2_val = log2_int(T, x);
+ if (T(1) << log2_val == x)
+ return log2_val;
+ return log2_val + 1;
+}
+
+test "std.math.log2_int_ceil" {
+ assert(log2_int_ceil(u32, 1) == 0);
+ assert(log2_int_ceil(u32, 2) == 1);
+ assert(log2_int_ceil(u32, 3) == 2);
+ assert(log2_int_ceil(u32, 4) == 2);
+ assert(log2_int_ceil(u32, 5) == 3);
+ assert(log2_int_ceil(u32, 6) == 3);
+ assert(log2_int_ceil(u32, 7) == 3);
+ assert(log2_int_ceil(u32, 8) == 3);
+ assert(log2_int_ceil(u32, 9) == 4);
+ assert(log2_int_ceil(u32, 10) == 4);
+}
+
fn testFloorPowerOfTwo() void {
assert(floorPowerOfTwo(u32, 63) == 32);
assert(floorPowerOfTwo(u32, 64) == 64);
diff --git a/std/math/ln.zig b/std/math/ln.zig
index c349ed7c6f..d09494b998 100644
--- a/std/math/ln.zig
+++ b/std/math/ln.zig
@@ -89,6 +89,8 @@ pub fn ln_32(x_: f32) f32 {
}
pub fn ln_64(x_: f64) f64 {
+ @setFloatMode(this, @import("builtin").FloatMode.Strict);
+
const ln2_hi: f64 = 6.93147180369123816490e-01;
const ln2_lo: f64 = 1.90821492927058770002e-10;
const Lg1: f64 = 6.666666666666735130e-01;
diff --git a/std/math/log2.zig b/std/math/log2.zig
index 998d6d6c5e..d5bbe385c2 100644
--- a/std/math/log2.zig
+++ b/std/math/log2.zig
@@ -31,17 +31,12 @@ pub fn log2(x: var) @typeOf(x) {
return result;
},
TypeId.Int => {
- return log2_int(T, x);
+ return math.log2_int(T, x);
},
else => @compileError("log2 not implemented for " ++ @typeName(T)),
}
}
-pub fn log2_int(comptime T: type, x: T) T {
- assert(x != 0);
- return T.bit_count - 1 - T(@clz(x));
-}
-
pub fn log2_32(x_: f32) f32 {
const ivln2hi: f32 = 1.4428710938e+00;
const ivln2lo: f32 = -1.7605285393e-04;
diff --git a/std/math/sqrt.zig b/std/math/sqrt.zig
index 690f8b6901..982bd28b72 100644
--- a/std/math/sqrt.zig
+++ b/std/math/sqrt.zig
@@ -14,26 +14,8 @@ const TypeId = builtin.TypeId;
pub fn sqrt(x: var) (if (@typeId(@typeOf(x)) == TypeId.Int) @IntType(false, @typeOf(x).bit_count / 2) else @typeOf(x)) {
const T = @typeOf(x);
switch (@typeId(T)) {
- TypeId.FloatLiteral => {
- return T(sqrt64(x));
- },
- TypeId.Float => {
- switch (T) {
- f32 => {
- switch (builtin.arch) {
- builtin.Arch.x86_64 => return @import("x86_64/sqrt.zig").sqrt32(x),
- else => return sqrt32(x),
- }
- },
- f64 => {
- switch (builtin.arch) {
- builtin.Arch.x86_64 => return @import("x86_64/sqrt.zig").sqrt64(x),
- else => return sqrt64(x),
- }
- },
- else => @compileError("sqrt not implemented for " ++ @typeName(T)),
- }
- },
+ TypeId.FloatLiteral => return T(@sqrt(f64, x)), // TODO upgrade to f128
+ TypeId.Float => return @sqrt(T, x),
TypeId.IntLiteral => comptime {
if (x > @maxValue(u128)) {
@compileError("sqrt not implemented for comptime_int greater than 128 bits");
@@ -43,269 +25,58 @@ pub fn sqrt(x: var) (if (@typeId(@typeOf(x)) == TypeId.Int) @IntType(false, @typ
}
return T(sqrt_int(u128, x));
},
- TypeId.Int => {
- return sqrt_int(T, x);
- },
+ TypeId.Int => return sqrt_int(T, x),
else => @compileError("sqrt not implemented for " ++ @typeName(T)),
}
}
-fn sqrt32(x: f32) f32 {
- const tiny: f32 = 1.0e-30;
- const sign: i32 = @bitCast(i32, u32(0x80000000));
- var ix: i32 = @bitCast(i32, x);
-
- if ((ix & 0x7F800000) == 0x7F800000) {
- return x * x + x; // sqrt(nan) = nan, sqrt(+inf) = +inf, sqrt(-inf) = snan
- }
-
- // zero
- if (ix <= 0) {
- if (ix & ~sign == 0) {
- return x; // sqrt (+-0) = +-0
- }
- if (ix < 0) {
- return math.snan(f32);
- }
- }
-
- // normalize
- var m = ix >> 23;
- if (m == 0) {
- // subnormal
- var i: i32 = 0;
- while (ix & 0x00800000 == 0) : (i += 1) {
- ix <<= 1;
- }
- m -= i - 1;
- }
-
- m -= 127; // unbias exponent
- ix = (ix & 0x007FFFFF) | 0x00800000;
-
- if (m & 1 != 0) { // odd m, double x to even
- ix += ix;
- }
-
- m >>= 1; // m = [m / 2]
-
- // sqrt(x) bit by bit
- ix += ix;
- var q: i32 = 0; // q = sqrt(x)
- var s: i32 = 0;
- var r: i32 = 0x01000000; // r = moving bit right -> left
-
- while (r != 0) {
- const t = s + r;
- if (t <= ix) {
- s = t + r;
- ix -= t;
- q += r;
- }
- ix += ix;
- r >>= 1;
- }
-
- // floating add to find rounding direction
- if (ix != 0) {
- var z = 1.0 - tiny; // inexact
- if (z >= 1.0) {
- z = 1.0 + tiny;
- if (z > 1.0) {
- q += 2;
- } else {
- if (q & 1 != 0) {
- q += 1;
- }
- }
- }
- }
-
- ix = (q >> 1) + 0x3f000000;
- ix += m << 23;
- return @bitCast(f32, ix);
-}
-
-// NOTE: The original code is full of implicit signed -> unsigned assumptions and u32 wraparound
-// behaviour. Most intermediate i32 values are changed to u32 where appropriate but there are
-// potentially some edge cases remaining that are not handled in the same way.
-fn sqrt64(x: f64) f64 {
- const tiny: f64 = 1.0e-300;
- const sign: u32 = 0x80000000;
- const u = @bitCast(u64, x);
-
- var ix0 = u32(u >> 32);
- var ix1 = u32(u & 0xFFFFFFFF);
-
- // sqrt(nan) = nan, sqrt(+inf) = +inf, sqrt(-inf) = nan
- if (ix0 & 0x7FF00000 == 0x7FF00000) {
- return x * x + x;
- }
-
- // sqrt(+-0) = +-0
- if (x == 0.0) {
- return x;
- }
- // sqrt(-ve) = snan
- if (ix0 & sign != 0) {
- return math.snan(f64);
- }
-
- // normalize x
- var m = i32(ix0 >> 20);
- if (m == 0) {
- // subnormal
- while (ix0 == 0) {
- m -= 21;
- ix0 |= ix1 >> 11;
- ix1 <<= 21;
- }
-
- // subnormal
- var i: u32 = 0;
- while (ix0 & 0x00100000 == 0) : (i += 1) {
- ix0 <<= 1;
- }
- m -= i32(i) - 1;
- ix0 |= ix1 >> u5(32 - i);
- ix1 <<= u5(i);
- }
-
- // unbias exponent
- m -= 1023;
- ix0 = (ix0 & 0x000FFFFF) | 0x00100000;
- if (m & 1 != 0) {
- ix0 += ix0 + (ix1 >> 31);
- ix1 = ix1 +% ix1;
- }
- m >>= 1;
-
- // sqrt(x) bit by bit
- ix0 += ix0 + (ix1 >> 31);
- ix1 = ix1 +% ix1;
-
- var q: u32 = 0;
- var q1: u32 = 0;
- var s0: u32 = 0;
- var s1: u32 = 0;
- var r: u32 = 0x00200000;
- var t: u32 = undefined;
- var t1: u32 = undefined;
-
- while (r != 0) {
- t = s0 +% r;
- if (t <= ix0) {
- s0 = t + r;
- ix0 -= t;
- q += r;
- }
- ix0 = ix0 +% ix0 +% (ix1 >> 31);
- ix1 = ix1 +% ix1;
- r >>= 1;
- }
-
- r = sign;
- while (r != 0) {
- t = s1 +% r;
- t = s0;
- if (t < ix0 or (t == ix0 and t1 <= ix1)) {
- s1 = t1 +% r;
- if (t1 & sign == sign and s1 & sign == 0) {
- s0 += 1;
- }
- ix0 -= t;
- if (ix1 < t1) {
- ix0 -= 1;
- }
- ix1 = ix1 -% t1;
- q1 += r;
- }
- ix0 = ix0 +% ix0 +% (ix1 >> 31);
- ix1 = ix1 +% ix1;
- r >>= 1;
- }
-
- // rounding direction
- if (ix0 | ix1 != 0) {
- var z = 1.0 - tiny; // raise inexact
- if (z >= 1.0) {
- z = 1.0 + tiny;
- if (q1 == 0xFFFFFFFF) {
- q1 = 0;
- q += 1;
- } else if (z > 1.0) {
- if (q1 == 0xFFFFFFFE) {
- q += 1;
- }
- q1 += 2;
- } else {
- q1 += q1 & 1;
- }
- }
- }
-
- ix0 = (q >> 1) + 0x3FE00000;
- ix1 = q1 >> 1;
- if (q & 1 != 0) {
- ix1 |= 0x80000000;
- }
-
- // NOTE: musl here appears to rely on signed twos-complement wraparound. +% has the same
- // behaviour at least.
- var iix0 = i32(ix0);
- iix0 = iix0 +% (m << 20);
-
- const uz = (u64(iix0) << 32) | ix1;
- return @bitCast(f64, uz);
-}
-
test "math.sqrt" {
- assert(sqrt(f32(0.0)) == sqrt32(0.0));
- assert(sqrt(f64(0.0)) == sqrt64(0.0));
+ assert(sqrt(f32(0.0)) == @sqrt(f32, 0.0));
+ assert(sqrt(f64(0.0)) == @sqrt(f64, 0.0));
}
test "math.sqrt32" {
const epsilon = 0.000001;
- assert(sqrt32(0.0) == 0.0);
- assert(math.approxEq(f32, sqrt32(2.0), 1.414214, epsilon));
- assert(math.approxEq(f32, sqrt32(3.6), 1.897367, epsilon));
- assert(sqrt32(4.0) == 2.0);
- assert(math.approxEq(f32, sqrt32(7.539840), 2.745877, epsilon));
- assert(math.approxEq(f32, sqrt32(19.230934), 4.385309, epsilon));
- assert(sqrt32(64.0) == 8.0);
- assert(math.approxEq(f32, sqrt32(64.1), 8.006248, epsilon));
- assert(math.approxEq(f32, sqrt32(8942.230469), 94.563370, epsilon));
+ assert(@sqrt(f32, 0.0) == 0.0);
+ assert(math.approxEq(f32, @sqrt(f32, 2.0), 1.414214, epsilon));
+ assert(math.approxEq(f32, @sqrt(f32, 3.6), 1.897367, epsilon));
+ assert(@sqrt(f32, 4.0) == 2.0);
+ assert(math.approxEq(f32, @sqrt(f32, 7.539840), 2.745877, epsilon));
+ assert(math.approxEq(f32, @sqrt(f32, 19.230934), 4.385309, epsilon));
+ assert(@sqrt(f32, 64.0) == 8.0);
+ assert(math.approxEq(f32, @sqrt(f32, 64.1), 8.006248, epsilon));
+ assert(math.approxEq(f32, @sqrt(f32, 8942.230469), 94.563370, epsilon));
}
test "math.sqrt64" {
const epsilon = 0.000001;
- assert(sqrt64(0.0) == 0.0);
- assert(math.approxEq(f64, sqrt64(2.0), 1.414214, epsilon));
- assert(math.approxEq(f64, sqrt64(3.6), 1.897367, epsilon));
- assert(sqrt64(4.0) == 2.0);
- assert(math.approxEq(f64, sqrt64(7.539840), 2.745877, epsilon));
- assert(math.approxEq(f64, sqrt64(19.230934), 4.385309, epsilon));
- assert(sqrt64(64.0) == 8.0);
- assert(math.approxEq(f64, sqrt64(64.1), 8.006248, epsilon));
- assert(math.approxEq(f64, sqrt64(8942.230469), 94.563367, epsilon));
+ assert(@sqrt(f64, 0.0) == 0.0);
+ assert(math.approxEq(f64, @sqrt(f64, 2.0), 1.414214, epsilon));
+ assert(math.approxEq(f64, @sqrt(f64, 3.6), 1.897367, epsilon));
+ assert(@sqrt(f64, 4.0) == 2.0);
+ assert(math.approxEq(f64, @sqrt(f64, 7.539840), 2.745877, epsilon));
+ assert(math.approxEq(f64, @sqrt(f64, 19.230934), 4.385309, epsilon));
+ assert(@sqrt(f64, 64.0) == 8.0);
+ assert(math.approxEq(f64, @sqrt(f64, 64.1), 8.006248, epsilon));
+ assert(math.approxEq(f64, @sqrt(f64, 8942.230469), 94.563367, epsilon));
}
test "math.sqrt32.special" {
- assert(math.isPositiveInf(sqrt32(math.inf(f32))));
- assert(sqrt32(0.0) == 0.0);
- assert(sqrt32(-0.0) == -0.0);
- assert(math.isNan(sqrt32(-1.0)));
- assert(math.isNan(sqrt32(math.nan(f32))));
+ assert(math.isPositiveInf(@sqrt(f32, math.inf(f32))));
+ assert(@sqrt(f32, 0.0) == 0.0);
+ assert(@sqrt(f32, -0.0) == -0.0);
+ assert(math.isNan(@sqrt(f32, -1.0)));
+ assert(math.isNan(@sqrt(f32, math.nan(f32))));
}
test "math.sqrt64.special" {
- assert(math.isPositiveInf(sqrt64(math.inf(f64))));
- assert(sqrt64(0.0) == 0.0);
- assert(sqrt64(-0.0) == -0.0);
- assert(math.isNan(sqrt64(-1.0)));
- assert(math.isNan(sqrt64(math.nan(f64))));
+ assert(math.isPositiveInf(@sqrt(f64, math.inf(f64))));
+ assert(@sqrt(f64, 0.0) == 0.0);
+ assert(@sqrt(f64, -0.0) == -0.0);
+ assert(math.isNan(@sqrt(f64, -1.0)));
+ assert(math.isNan(@sqrt(f64, math.nan(f64))));
}
fn sqrt_int(comptime T: type, value: T) @IntType(false, T.bit_count / 2) {
diff --git a/std/math/x86_64/sqrt.zig b/std/math/x86_64/sqrt.zig
deleted file mode 100644
index ad9ce0c96c..0000000000
--- a/std/math/x86_64/sqrt.zig
+++ /dev/null
@@ -1,15 +0,0 @@
-pub fn sqrt32(x: f32) f32 {
- return asm (
- \\sqrtss %%xmm0, %%xmm0
- : [ret] "={xmm0}" (-> f32)
- : [x] "{xmm0}" (x)
- );
-}
-
-pub fn sqrt64(x: f64) f64 {
- return asm (
- \\sqrtsd %%xmm0, %%xmm0
- : [ret] "={xmm0}" (-> f64)
- : [x] "{xmm0}" (x)
- );
-}
diff --git a/std/mem.zig b/std/mem.zig
index 97cb35ae65..d874f8a6c9 100644
--- a/std/mem.zig
+++ b/std/mem.zig
@@ -3,6 +3,7 @@ const debug = std.debug;
const assert = debug.assert;
const math = std.math;
const builtin = @import("builtin");
+const mem = this;
pub const Allocator = struct {
const Error = error {OutOfMemory};
@@ -10,6 +11,8 @@ pub const Allocator = struct {
/// Allocate byte_count bytes and return them in a slice, with the
/// slice's pointer aligned at least to alignment bytes.
/// The returned newly allocated memory is undefined.
+ /// `alignment` is guaranteed to be >= 1
+ /// `alignment` is guaranteed to be a power of 2
allocFn: fn (self: &Allocator, byte_count: usize, alignment: u29) Error![]u8,
/// If `new_byte_count > old_mem.len`:
@@ -17,20 +20,37 @@ pub const Allocator = struct {
/// * alignment >= alignment of old_mem.ptr
///
/// If `new_byte_count <= old_mem.len`:
- /// * this function must return successfully.
+ /// * this function must return successfully.
/// * alignment <= alignment of old_mem.ptr
///
/// The returned newly allocated memory is undefined.
+ /// `alignment` is guaranteed to be >= 1
+ /// `alignment` is guaranteed to be a power of 2
reallocFn: fn (self: &Allocator, old_mem: []u8, new_byte_count: usize, alignment: u29) Error![]u8,
/// Guaranteed: `old_mem.len` is the same as what was returned from `allocFn` or `reallocFn`
freeFn: fn (self: &Allocator, old_mem: []u8) void,
fn create(self: &Allocator, comptime T: type) !&T {
+ if (@sizeOf(T) == 0) return &{};
const slice = try self.alloc(T, 1);
return &slice[0];
}
+ // TODO once #733 is solved, this will replace create
+ fn construct(self: &Allocator, init: var) t: {
+ // TODO this is a workaround for type getting parsed as Error!&const T
+ const T = @typeOf(init).Child;
+ break :t Error!&T;
+ } {
+ const T = @typeOf(init).Child;
+ if (@sizeOf(T) == 0) return &{};
+ const slice = try self.alloc(T, 1);
+ const ptr = &slice[0];
+ *ptr = *init;
+ return ptr;
+ }
+
fn destroy(self: &Allocator, ptr: var) void {
self.free(ptr[0..1]);
}
@@ -48,7 +68,7 @@ pub const Allocator = struct {
const byte_count = math.mul(usize, @sizeOf(T), n) catch return Error.OutOfMemory;
const byte_slice = try self.allocFn(self, byte_count, alignment);
assert(byte_slice.len == byte_count);
- // This loop should get optimized out in ReleaseFast mode
+ // This loop gets optimized out in ReleaseFast mode
for (byte_slice) |*byte| {
*byte = undefined;
}
@@ -75,7 +95,7 @@ pub const Allocator = struct {
const byte_slice = try self.reallocFn(self, old_byte_slice, byte_count, alignment);
assert(byte_slice.len == byte_count);
if (n > old_mem.len) {
- // This loop should get optimized out in ReleaseFast mode
+ // This loop gets optimized out in ReleaseFast mode
for (byte_slice[old_byte_slice.len..]) |*byte| {
*byte = undefined;
}
@@ -169,6 +189,20 @@ pub fn dupe(allocator: &Allocator, comptime T: type, m: []const T) ![]T {
return new_buf;
}
+/// Remove values from the beginning of a slice.
+pub fn trimLeft(comptime T: type, slice: []const T, values_to_strip: []const T) []const T {
+ var begin: usize = 0;
+ while (begin < slice.len and indexOfScalar(T, values_to_strip, slice[begin]) != null) : (begin += 1) {}
+ return slice[begin..];
+}
+
+/// Remove values from the end of a slice.
+pub fn trimRight(comptime T: type, slice: []const T, values_to_strip: []const T) []const T {
+ var end: usize = slice.len;
+ while (end > 0 and indexOfScalar(T, values_to_strip, slice[end - 1]) != null) : (end -= 1) {}
+ return slice[0..end];
+}
+
/// Remove values from the beginning and end of a slice.
pub fn trim(comptime T: type, slice: []const T, values_to_strip: []const T) []const T {
var begin: usize = 0;
@@ -179,6 +213,8 @@ pub fn trim(comptime T: type, slice: []const T, values_to_strip: []const T) []co
}
test "mem.trim" {
+ assert(eql(u8, trimLeft(u8, " foo\n ", " \n"), "foo\n "));
+ assert(eql(u8, trimRight(u8, " foo\n ", " \n"), " foo"));
assert(eql(u8, trim(u8, " foo\n ", " \n"), "foo"));
assert(eql(u8, trim(u8, "foo", " \n"), "foo"));
}
@@ -188,6 +224,17 @@ pub fn indexOfScalar(comptime T: type, slice: []const T, value: T) ?usize {
return indexOfScalarPos(T, slice, 0, value);
}
+/// Linear search for the last index of a scalar value inside a slice.
+pub fn lastIndexOfScalar(comptime T: type, slice: []const T, value: T) ?usize {
+ var i: usize = slice.len;
+ while (i != 0) {
+ i -= 1;
+ if (slice[i] == value)
+ return i;
+ }
+ return null;
+}
+
pub fn indexOfScalarPos(comptime T: type, slice: []const T, start_index: usize, value: T) ?usize {
var i: usize = start_index;
while (i < slice.len) : (i += 1) {
@@ -201,6 +248,18 @@ pub fn indexOfAny(comptime T: type, slice: []const T, values: []const T) ?usize
return indexOfAnyPos(T, slice, 0, values);
}
+pub fn lastIndexOfAny(comptime T: type, slice: []const T, values: []const T) ?usize {
+ var i: usize = slice.len;
+ while (i != 0) {
+ i -= 1;
+ for (values) |value| {
+ if (slice[i] == value)
+ return i;
+ }
+ }
+ return null;
+}
+
pub fn indexOfAnyPos(comptime T: type, slice: []const T, start_index: usize, values: []const T) ?usize {
var i: usize = start_index;
while (i < slice.len) : (i += 1) {
@@ -216,6 +275,22 @@ pub fn indexOf(comptime T: type, haystack: []const T, needle: []const T) ?usize
return indexOfPos(T, haystack, 0, needle);
}
+/// Find the index in a slice of a sub-slice, searching from the end backwards.
+/// To start looking at a different index, slice the haystack first.
+/// TODO is there even a better algorithm for this?
+pub fn lastIndexOf(comptime T: type, haystack: []const T, needle: []const T) ?usize {
+ if (needle.len > haystack.len)
+ return null;
+
+ var i: usize = haystack.len - needle.len;
+ while (true) : (i -= 1) {
+ if (mem.eql(T, haystack[i..i+needle.len], needle))
+ return i;
+ if (i == 0)
+ return null;
+ }
+}
+
// TODO boyer-moore algorithm
pub fn indexOfPos(comptime T: type, haystack: []const T, start_index: usize, needle: []const T) ?usize {
if (needle.len > haystack.len)
@@ -232,9 +307,19 @@ pub fn indexOfPos(comptime T: type, haystack: []const T, start_index: usize, nee
test "mem.indexOf" {
assert(??indexOf(u8, "one two three four", "four") == 14);
+ assert(??lastIndexOf(u8, "one two three two four", "two") == 14);
assert(indexOf(u8, "one two three four", "gour") == null);
+ assert(lastIndexOf(u8, "one two three four", "gour") == null);
assert(??indexOf(u8, "foo", "foo") == 0);
+ assert(??lastIndexOf(u8, "foo", "foo") == 0);
assert(indexOf(u8, "foo", "fool") == null);
+ assert(lastIndexOf(u8, "foo", "lfoo") == null);
+ assert(lastIndexOf(u8, "foo", "fool") == null);
+
+ assert(??indexOf(u8, "foo foo", "foo") == 0);
+ assert(??lastIndexOf(u8, "foo foo", "foo") == 4);
+ assert(??lastIndexOfAny(u8, "boo, cat", "abo") == 6);
+ assert(??lastIndexOfScalar(u8, "boo", 'o') == 2);
}
/// Reads an integer from memory with size equal to bytes.len.
@@ -354,9 +439,24 @@ pub fn startsWith(comptime T: type, haystack: []const T, needle: []const T) bool
return if (needle.len > haystack.len) false else eql(T, haystack[0 .. needle.len], needle);
}
+test "mem.startsWith" {
+ assert(startsWith(u8, "Bob", "Bo"));
+ assert(!startsWith(u8, "Needle in haystack", "haystack"));
+}
+
+pub fn endsWith(comptime T: type, haystack: []const T, needle: []const T) bool {
+ return if (needle.len > haystack.len) false else eql(T, haystack[haystack.len - needle.len ..], needle);
+}
+
+
+test "mem.endsWith" {
+ assert(endsWith(u8, "Needle in haystack", "haystack"));
+ assert(!endsWith(u8, "Bob", "Bo"));
+}
+
pub const SplitIterator = struct {
buffer: []const u8,
- split_bytes: []const u8,
+ split_bytes: []const u8,
index: usize,
pub fn next(self: &SplitIterator) ?[]const u8 {
@@ -550,3 +650,28 @@ test "std.mem.rotate" {
assert(eql(i32, arr, []i32{ 1, 2, 4, 5, 3 }));
}
+
+// TODO: When https://github.com/zig-lang/zig/issues/649 is solved these can be done by
+// endian-casting the pointer and then dereferencing
+
+pub fn endianSwapIfLe(comptime T: type, x: T) T {
+ return endianSwapIf(builtin.Endian.Little, T, x);
+}
+
+pub fn endianSwapIfBe(comptime T: type, x: T) T {
+ return endianSwapIf(builtin.Endian.Big, T, x);
+}
+
+pub fn endianSwapIf(endian: builtin.Endian, comptime T: type, x: T) T {
+ return if (builtin.endian == endian) endianSwap(T, x) else x;
+}
+
+pub fn endianSwap(comptime T: type, x: T) T {
+ var buf: [@sizeOf(T)]u8 = undefined;
+ mem.writeInt(buf[0..], x, builtin.Endian.Little);
+ return mem.readInt(buf, T, builtin.Endian.Big);
+}
+
+test "std.mem.endianSwap" {
+ assert(endianSwap(u32, 0xDEADBEEF) == 0xEFBEADDE);
+}
diff --git a/std/net.zig b/std/net.zig
index 1140b6449b..8e1b8d97b2 100644
--- a/std/net.zig
+++ b/std/net.zig
@@ -1,143 +1,120 @@
const std = @import("index.zig");
-const linux = std.os.linux;
+const builtin = @import("builtin");
const assert = std.debug.assert;
-const endian = std.endian;
+const net = this;
+const posix = std.os.posix;
+const mem = std.mem;
-// TODO don't trust this file, it bit rotted. start over
-
-const Connection = struct {
- socket_fd: i32,
-
- pub fn send(c: Connection, buf: []const u8) !usize {
- const send_ret = linux.sendto(c.socket_fd, buf.ptr, buf.len, 0, null, 0);
- const send_err = linux.getErrno(send_ret);
- switch (send_err) {
- 0 => return send_ret,
- linux.EINVAL => unreachable,
- linux.EFAULT => unreachable,
- linux.ECONNRESET => return error.ConnectionReset,
- linux.EINTR => return error.SigInterrupt,
- // TODO there are more possible errors
- else => return error.Unexpected,
- }
- }
-
- pub fn recv(c: Connection, buf: []u8) ![]u8 {
- const recv_ret = linux.recvfrom(c.socket_fd, buf.ptr, buf.len, 0, null, null);
- const recv_err = linux.getErrno(recv_ret);
- switch (recv_err) {
- 0 => return buf[0..recv_ret],
- linux.EINVAL => unreachable,
- linux.EFAULT => unreachable,
- linux.ENOTSOCK => return error.NotSocket,
- linux.EINTR => return error.SigInterrupt,
- linux.ENOMEM => return error.OutOfMemory,
- linux.ECONNREFUSED => return error.ConnectionRefused,
- linux.EBADF => return error.BadFd,
- // TODO more error values
- else => return error.Unexpected,
- }
- }
-
- pub fn close(c: Connection) !void {
- switch (linux.getErrno(linux.close(c.socket_fd))) {
- 0 => return,
- linux.EBADF => unreachable,
- linux.EINTR => return error.SigInterrupt,
- linux.EIO => return error.Io,
- else => return error.Unexpected,
- }
- }
+pub const TmpWinAddr = struct {
+ family: u8,
+ data: [14]u8,
};
-const Address = struct {
- family: u16,
- scope_id: u32,
- addr: [16]u8,
- sort_key: i32,
+pub const OsAddress = switch (builtin.os) {
+ builtin.Os.windows => TmpWinAddr,
+ else => posix.sockaddr,
};
-pub fn lookup(hostname: []const u8, out_addrs: []Address) ![]Address {
- if (hostname.len == 0) {
-
- unreachable; // TODO
+pub const Address = struct {
+ os_addr: OsAddress,
+
+ pub fn initIp4(ip4: u32, port: u16) Address {
+ return Address {
+ .os_addr = posix.sockaddr {
+ .in = posix.sockaddr_in {
+ .family = posix.AF_INET,
+ .port = std.mem.endianSwapIfLe(u16, port),
+ .addr = ip4,
+ .zero = []u8{0} ** 8,
+ },
+ },
+ };
}
- unreachable; // TODO
-}
+ pub fn initIp6(ip6: &const Ip6Addr, port: u16) Address {
+ return Address {
+ .family = posix.AF_INET6,
+ .os_addr = posix.sockaddr {
+ .in6 = posix.sockaddr_in6 {
+ .family = posix.AF_INET6,
+ .port = std.mem.endianSwapIfLe(u16, port),
+ .flowinfo = 0,
+ .addr = ip6.addr,
+ .scope_id = ip6.scope_id,
+ },
+ },
+ };
+ }
-pub fn connectAddr(addr: &Address, port: u16) !Connection {
- const socket_ret = linux.socket(addr.family, linux.SOCK_STREAM, linux.PROTO_tcp);
- const socket_err = linux.getErrno(socket_ret);
- if (socket_err > 0) {
- // TODO figure out possible errors from socket()
- return error.Unexpected;
+ pub fn initPosix(addr: &const posix.sockaddr) Address {
+ return Address {
+ .os_addr = *addr,
+ };
}
- const socket_fd = i32(socket_ret);
- const connect_ret = if (addr.family == linux.AF_INET) x: {
- var os_addr: linux.sockaddr_in = undefined;
- os_addr.family = addr.family;
- os_addr.port = endian.swapIfLe(u16, port);
- @memcpy((&u8)(&os_addr.addr), &addr.addr[0], 4);
- @memset(&os_addr.zero[0], 0, @sizeOf(@typeOf(os_addr.zero)));
- break :x linux.connect(socket_fd, (&linux.sockaddr)(&os_addr), @sizeOf(linux.sockaddr_in));
- } else if (addr.family == linux.AF_INET6) x: {
- var os_addr: linux.sockaddr_in6 = undefined;
- os_addr.family = addr.family;
- os_addr.port = endian.swapIfLe(u16, port);
- os_addr.flowinfo = 0;
- os_addr.scope_id = addr.scope_id;
- @memcpy(&os_addr.addr[0], &addr.addr[0], 16);
- break :x linux.connect(socket_fd, (&linux.sockaddr)(&os_addr), @sizeOf(linux.sockaddr_in6));
- } else {
- unreachable;
- };
- const connect_err = linux.getErrno(connect_ret);
- if (connect_err > 0) {
- switch (connect_err) {
- linux.ETIMEDOUT => return error.TimedOut,
- else => {
- // TODO figure out possible errors from connect()
- return error.Unexpected;
+ pub fn format(self: &const Address, out_stream: var) !void {
+ switch (self.os_addr.in.family) {
+ posix.AF_INET => {
+ const native_endian_port = std.mem.endianSwapIfLe(u16, self.os_addr.in.port);
+ const bytes = ([]const u8)((&self.os_addr.in.addr)[0..1]);
+ try out_stream.print("{}.{}.{}.{}:{}", bytes[0], bytes[1], bytes[2], bytes[3], native_endian_port);
+ },
+ posix.AF_INET6 => {
+ const native_endian_port = std.mem.endianSwapIfLe(u16, self.os_addr.in6.port);
+ try out_stream.print("[TODO render ip6 address]:{}", native_endian_port);
},
+ else => try out_stream.write("(unrecognized address family)"),
}
}
+};
- return Connection {
- .socket_fd = socket_fd,
- };
-}
-
-pub fn connect(hostname: []const u8, port: u16) !Connection {
- var addrs_buf: [1]Address = undefined;
- const addrs_slice = try lookup(hostname, addrs_buf[0..]);
- const main_addr = &addrs_slice[0];
-
- return connectAddr(main_addr, port);
-}
+pub fn parseIp4(buf: []const u8) !u32 {
+ var result: u32 = undefined;
+ const out_ptr = ([]u8)((&result)[0..1]);
-pub fn parseIpLiteral(buf: []const u8) !Address {
+ var x: u8 = 0;
+ var index: u8 = 0;
+ var saw_any_digits = false;
+ for (buf) |c| {
+ if (c == '.') {
+ if (!saw_any_digits) {
+ return error.InvalidCharacter;
+ }
+ if (index == 3) {
+ return error.InvalidEnd;
+ }
+ out_ptr[index] = x;
+ index += 1;
+ x = 0;
+ saw_any_digits = false;
+ } else if (c >= '0' and c <= '9') {
+ saw_any_digits = true;
+ const digit = c - '0';
+ if (@mulWithOverflow(u8, x, 10, &x)) {
+ return error.Overflow;
+ }
+ if (@addWithOverflow(u8, x, digit, &x)) {
+ return error.Overflow;
+ }
+ } else {
+ return error.InvalidCharacter;
+ }
+ }
+ if (index == 3 and saw_any_digits) {
+ out_ptr[index] = x;
+ return result;
+ }
- return error.InvalidIpLiteral;
+ return error.Incomplete;
}
-fn hexDigit(c: u8) u8 {
- // TODO use switch with range
- if ('0' <= c and c <= '9') {
- return c - '0';
- } else if ('A' <= c and c <= 'Z') {
- return c - 'A' + 10;
- } else if ('a' <= c and c <= 'z') {
- return c - 'a' + 10;
- } else {
- return @maxValue(u8);
- }
-}
+pub const Ip6Addr = struct {
+ scope_id: u32,
+ addr: [16]u8,
+};
-fn parseIp6(buf: []const u8) !Address {
- var result: Address = undefined;
- result.family = linux.AF_INET6;
+pub fn parseIp6(buf: []const u8) !Ip6Addr {
+ var result: Ip6Addr = undefined;
result.scope_id = 0;
const ip_slice = result.addr[0..];
@@ -156,14 +133,14 @@ fn parseIp6(buf: []const u8) !Address {
return error.Overflow;
}
} else {
- return error.InvalidChar;
+ return error.InvalidCharacter;
}
} else if (c == ':') {
if (!saw_any_digits) {
- return error.InvalidChar;
+ return error.InvalidCharacter;
}
if (index == 14) {
- return error.JunkAtEnd;
+ return error.InvalidEnd;
}
ip_slice[index] = @truncate(u8, x >> 8);
index += 1;
@@ -174,7 +151,7 @@ fn parseIp6(buf: []const u8) !Address {
saw_any_digits = false;
} else if (c == '%') {
if (!saw_any_digits) {
- return error.InvalidChar;
+ return error.InvalidCharacter;
}
if (index == 14) {
ip_slice[index] = @truncate(u8, x >> 8);
@@ -185,10 +162,7 @@ fn parseIp6(buf: []const u8) !Address {
scope_id = true;
saw_any_digits = false;
} else {
- const digit = hexDigit(c);
- if (digit == @maxValue(u8)) {
- return error.InvalidChar;
- }
+ const digit = try std.fmt.charToDigit(c, 16);
if (@mulWithOverflow(u16, x, 16, &x)) {
return error.Overflow;
}
@@ -216,42 +190,27 @@ fn parseIp6(buf: []const u8) !Address {
return error.Incomplete;
}
-fn parseIp4(buf: []const u8) !u32 {
- var result: u32 = undefined;
- const out_ptr = ([]u8)((&result)[0..1]);
+test "std.net.parseIp4" {
+ assert((try parseIp4("127.0.0.1")) == std.mem.endianSwapIfLe(u32, 0x7f000001));
- var x: u8 = 0;
- var index: u8 = 0;
- var saw_any_digits = false;
- for (buf) |c| {
- if (c == '.') {
- if (!saw_any_digits) {
- return error.InvalidChar;
- }
- if (index == 3) {
- return error.JunkAtEnd;
- }
- out_ptr[index] = x;
- index += 1;
- x = 0;
- saw_any_digits = false;
- } else if (c >= '0' and c <= '9') {
- saw_any_digits = true;
- const digit = c - '0';
- if (@mulWithOverflow(u8, x, 10, &x)) {
- return error.Overflow;
- }
- if (@addWithOverflow(u8, x, digit, &x)) {
- return error.Overflow;
- }
- } else {
- return error.InvalidChar;
- }
- }
- if (index == 3 and saw_any_digits) {
- out_ptr[index] = x;
- return result;
+ testParseIp4Fail("256.0.0.1", error.Overflow);
+ testParseIp4Fail("x.0.0.1", error.InvalidCharacter);
+ testParseIp4Fail("127.0.0.1.1", error.InvalidEnd);
+ testParseIp4Fail("127.0.0.", error.Incomplete);
+ testParseIp4Fail("100..0.1", error.InvalidCharacter);
+}
+
+fn testParseIp4Fail(buf: []const u8, expected_err: error) void {
+ if (parseIp4(buf)) |_| {
+ @panic("expected error");
+ } else |e| {
+ assert(e == expected_err);
}
+}
- return error.Incomplete;
+test "std.net.parseIp6" {
+ const addr = try parseIp6("FF01:0:0:0:0:0:0:FB");
+ assert(addr.addr[0] == 0xff);
+ assert(addr.addr[1] == 0x01);
+ assert(addr.addr[2] == 0x00);
}
diff --git a/std/os/darwin.zig b/std/os/darwin.zig
index f8b1fbed3b..0a62b03ab2 100644
--- a/std/os/darwin.zig
+++ b/std/os/darwin.zig
@@ -41,6 +41,11 @@ pub const SA_64REGSET = 0x0200; /// signal handler with SA_SIGINFO args with 64
pub const O_LARGEFILE = 0x0000;
pub const O_PATH = 0x0000;
+pub const F_OK = 0;
+pub const X_OK = 1;
+pub const W_OK = 2;
+pub const R_OK = 4;
+
pub const O_RDONLY = 0x0000; /// open for reading only
pub const O_WRONLY = 0x0001; /// open for writing only
pub const O_RDWR = 0x0002; /// open for reading and writing
@@ -179,7 +184,7 @@ pub fn write(fd: i32, buf: &const u8, nbyte: usize) usize {
return errnoWrap(c.write(fd, @ptrCast(&const c_void, buf), nbyte));
}
-pub fn mmap(address: ?&u8, length: usize, prot: usize, flags: usize, fd: i32,
+pub fn mmap(address: ?&u8, length: usize, prot: usize, flags: u32, fd: i32,
offset: isize) usize
{
const ptr_result = c.mmap(@ptrCast(&c_void, address), length,
@@ -188,8 +193,8 @@ pub fn mmap(address: ?&u8, length: usize, prot: usize, flags: usize, fd: i32,
return errnoWrap(isize_result);
}
-pub fn munmap(address: &u8, length: usize) usize {
- return errnoWrap(c.munmap(@ptrCast(&c_void, address), length));
+pub fn munmap(address: usize, length: usize) usize {
+ return errnoWrap(c.munmap(@intToPtr(&c_void, address), length));
}
pub fn unlink(path: &const u8) usize {
@@ -209,6 +214,10 @@ pub fn fork() usize {
return errnoWrap(c.fork());
}
+pub fn access(path: &const u8, mode: u32) usize {
+ return errnoWrap(c.access(path, mode));
+}
+
pub fn pipe(fds: &[2]i32) usize {
comptime assert(i32.bit_count == c_int.bit_count);
return errnoWrap(c.pipe(@ptrCast(&c_int, fds)));
@@ -251,6 +260,10 @@ pub fn readlink(noalias path: &const u8, noalias buf_ptr: &u8, buf_len: usize) u
return errnoWrap(c.readlink(path, buf_ptr, buf_len));
}
+pub fn gettimeofday(tv: ?&timeval, tz: ?&timezone) usize {
+ return errnoWrap(c.gettimeofday(tv, tz));
+}
+
pub fn nanosleep(req: &const timespec, rem: ?&timespec) usize {
return errnoWrap(c.nanosleep(req, rem));
}
@@ -301,6 +314,9 @@ pub const timespec = c.timespec;
pub const Stat = c.Stat;
pub const dirent = c.dirent;
+pub const sa_family_t = c.sa_family_t;
+pub const sockaddr = c.sockaddr;
+
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with the syscall.
pub const Sigaction = struct {
handler: extern fn(i32)void,
@@ -318,3 +334,11 @@ pub fn sigaddset(set: &sigset_t, signo: u5) void {
fn errnoWrap(value: isize) usize {
return @bitCast(usize, if (value == -1) -isize(*c._errno()) else value);
}
+
+
+pub const timezone = c.timezone;
+pub const timeval = c.timeval;
+pub const mach_timebase_info_data = c.mach_timebase_info_data;
+
+pub const mach_absolute_time = c.mach_absolute_time;
+pub const mach_timebase_info = c.mach_timebase_info;
diff --git a/std/os/epoch.zig b/std/os/epoch.zig
new file mode 100644
index 0000000000..e1256c1374
--- /dev/null
+++ b/std/os/epoch.zig
@@ -0,0 +1,26 @@
+/// Epoch reference times in terms of their difference from
+/// posix epoch in seconds.
+pub const posix = 0; //Jan 01, 1970 AD
+pub const dos = 315532800; //Jan 01, 1980 AD
+pub const ios = 978307200; //Jan 01, 2001 AD
+pub const openvms = -3506716800; //Nov 17, 1858 AD
+pub const zos = -2208988800; //Jan 01, 1900 AD
+pub const windows = -11644473600; //Jan 01, 1601 AD
+pub const amiga = 252460800; //Jan 01, 1978 AD
+pub const pickos = -63244800; //Dec 31, 1967 AD
+pub const gps = 315964800; //Jan 06, 1980 AD
+pub const clr = -62135769600; //Jan 01, 0001 AD
+
+pub const unix = posix;
+pub const android = posix;
+pub const os2 = dos;
+pub const bios = dos;
+pub const vfat = dos;
+pub const ntfs = windows;
+pub const ntp = zos;
+pub const jbase = pickos;
+pub const aros = amiga;
+pub const morphos = amiga;
+pub const brew = gps;
+pub const atsc = gps;
+pub const go = clr; \ No newline at end of file
diff --git a/std/os/file.zig b/std/os/file.zig
index eed3a443b9..61fc2b1455 100644
--- a/std/os/file.zig
+++ b/std/os/file.zig
@@ -85,6 +85,47 @@ pub const File = struct {
};
}
+ pub fn access(allocator: &mem.Allocator, path: []const u8, file_mode: os.FileMode) !bool {
+ const path_with_null = try std.cstr.addNullByte(allocator, path);
+ defer allocator.free(path_with_null);
+
+ if (is_posix) {
+ // mode is ignored and is always F_OK for now
+ const result = posix.access(path_with_null.ptr, posix.F_OK);
+ const err = posix.getErrno(result);
+ if (err > 0) {
+ return switch (err) {
+ posix.EACCES => error.PermissionDenied,
+ posix.EROFS => error.PermissionDenied,
+ posix.ELOOP => error.PermissionDenied,
+ posix.ETXTBSY => error.PermissionDenied,
+ posix.ENOTDIR => error.NotFound,
+ posix.ENOENT => error.NotFound,
+
+ posix.ENAMETOOLONG => error.NameTooLong,
+ posix.EINVAL => error.BadMode,
+ posix.EFAULT => error.BadPathName,
+ posix.EIO => error.Io,
+ posix.ENOMEM => error.SystemResources,
+ else => os.unexpectedErrorPosix(err),
+ };
+ }
+ return true;
+ } else if (is_windows) {
+ if (os.windows.PathFileExists(path_with_null.ptr) == os.windows.TRUE) {
+ return true;
+ }
+
+ const err = windows.GetLastError();
+ return switch (err) {
+ windows.ERROR.FILE_NOT_FOUND => error.NotFound,
+ windows.ERROR.ACCESS_DENIED => error.PermissionDenied,
+ else => os.unexpectedErrorWindows(err),
+ };
+ } else {
+ @compileError("TODO implement access for this OS");
+ }
+ }
/// Upon success, the stream is in an uninitialized state. To continue using it,
/// you must use the open() function.
@@ -245,7 +286,9 @@ pub const File = struct {
};
}
- return stat.mode;
+ // TODO: we should be able to cast u16 to ModeError!u32, making this
+ // explicit cast not necessary
+ return os.FileMode(stat.mode);
} else if (is_windows) {
return {};
} else {
diff --git a/std/os/index.zig b/std/os/index.zig
index 4b74af035e..93c5f70f1e 100644
--- a/std/os/index.zig
+++ b/std/os/index.zig
@@ -2,22 +2,42 @@ const std = @import("../index.zig");
const builtin = @import("builtin");
const Os = builtin.Os;
const is_windows = builtin.os == Os.windows;
+const is_posix = switch (builtin.os) {
+ builtin.Os.linux,
+ builtin.Os.macosx => true,
+ else => false,
+};
const os = this;
+test "std.os" {
+ _ = @import("child_process.zig");
+ _ = @import("darwin.zig");
+ _ = @import("darwin_errno.zig");
+ _ = @import("get_user_id.zig");
+ _ = @import("linux/index.zig");
+ _ = @import("path.zig");
+ _ = @import("test.zig");
+ _ = @import("time.zig");
+ _ = @import("windows/index.zig");
+}
+
pub const windows = @import("windows/index.zig");
pub const darwin = @import("darwin.zig");
pub const linux = @import("linux/index.zig");
pub const zen = @import("zen.zig");
-pub const posix = switch(builtin.os) {
+pub const posix = switch (builtin.os) {
Os.linux => linux,
- Os.macosx, Os.ios => darwin,
+ Os.macosx,
+ Os.ios => darwin,
Os.zen => zen,
else => @compileError("Unsupported OS"),
};
+pub const net = @import("net.zig");
pub const ChildProcess = @import("child_process.zig").ChildProcess;
pub const path = @import("path.zig");
pub const File = @import("file.zig").File;
+pub const time = @import("time.zig");
pub const FileMode = switch (builtin.os) {
Os.windows => void,
@@ -40,7 +60,7 @@ pub const windowsWrite = windows_util.windowsWrite;
pub const windowsIsCygwinPty = windows_util.windowsIsCygwinPty;
pub const windowsOpen = windows_util.windowsOpen;
pub const windowsLoadDll = windows_util.windowsLoadDll;
-pub const windowsUnloadDll = windows_util.windowsUnloadDll;
+pub const windowsUnloadDll = windows_util.windowsUnloadDll;
pub const createWindowsEnvBlock = windows_util.createWindowsEnvBlock;
pub const WindowsWaitError = windows_util.WaitError;
@@ -79,9 +99,9 @@ pub fn getRandomBytes(buf: []u8) !void {
switch (err) {
posix.EINVAL => unreachable,
posix.EFAULT => unreachable,
- posix.EINTR => continue,
+ posix.EINTR => continue,
posix.ENOSYS => {
- const fd = try posixOpenC(c"/dev/urandom", posix.O_RDONLY|posix.O_CLOEXEC, 0);
+ const fd = try posixOpenC(c"/dev/urandom", posix.O_RDONLY | posix.O_CLOEXEC, 0);
defer close(fd);
try posixRead(fd, buf);
@@ -92,8 +112,9 @@ pub fn getRandomBytes(buf: []u8) !void {
}
return;
},
- Os.macosx, Os.ios => {
- const fd = try posixOpenC(c"/dev/urandom", posix.O_RDONLY|posix.O_CLOEXEC, 0);
+ Os.macosx,
+ Os.ios => {
+ const fd = try posixOpenC(c"/dev/urandom", posix.O_RDONLY | posix.O_CLOEXEC, 0);
defer close(fd);
try posixRead(fd, buf);
@@ -116,7 +137,20 @@ pub fn getRandomBytes(buf: []u8) !void {
}
},
Os.zen => {
- const randomness = []u8 {42, 1, 7, 12, 22, 17, 99, 16, 26, 87, 41, 45};
+ const randomness = []u8 {
+ 42,
+ 1,
+ 7,
+ 12,
+ 22,
+ 17,
+ 99,
+ 16,
+ 26,
+ 87,
+ 41,
+ 45,
+ };
var i: usize = 0;
while (i < buf.len) : (i += 1) {
if (i > randomness.len) return error.Unknown;
@@ -141,7 +175,9 @@ pub fn abort() noreturn {
c.abort();
}
switch (builtin.os) {
- Os.linux, Os.macosx, Os.ios => {
+ Os.linux,
+ Os.macosx,
+ Os.ios => {
_ = posix.raise(posix.SIGABRT);
_ = posix.raise(posix.SIGKILL);
while (true) {}
@@ -163,7 +199,9 @@ pub fn exit(status: u8) noreturn {
c.exit(status);
}
switch (builtin.os) {
- Os.linux, Os.macosx, Os.ios => {
+ Os.linux,
+ Os.macosx,
+ Os.ios => {
posix.exit(status);
},
Os.windows => {
@@ -173,6 +211,13 @@ pub fn exit(status: u8) noreturn {
}
}
+/// When a file descriptor is closed on linux, it pops the first
+/// node from this queue and resumes it.
+/// Async functions which get the EMFILE error code can suspend,
+/// putting their coroutine handle into this list.
+/// TODO make this an atomic linked list
+pub var emfile_promise_queue = std.LinkedList(promise).init();
+
/// Closes the file handle. Keeps trying if it gets interrupted by a signal.
pub fn close(handle: FileHandle) void {
if (is_windows) {
@@ -180,10 +225,12 @@ pub fn close(handle: FileHandle) void {
} else {
while (true) {
const err = posix.getErrno(posix.close(handle));
- if (err == posix.EINTR) {
- continue;
- } else {
- return;
+ switch (err) {
+ posix.EINTR => continue,
+ else => {
+ if (emfile_promise_queue.popFirst()) |p| resume p.data;
+ return;
+ },
}
}
}
@@ -203,12 +250,14 @@ pub fn posixRead(fd: i32, buf: []u8) !void {
if (err > 0) {
return switch (err) {
posix.EINTR => continue,
- posix.EINVAL, posix.EFAULT => unreachable,
+ posix.EINVAL,
+ posix.EFAULT => unreachable,
posix.EAGAIN => error.WouldBlock,
posix.EBADF => error.FileClosed,
posix.EIO => error.InputOutput,
posix.EISDIR => error.IsDir,
- posix.ENOBUFS, posix.ENOMEM => error.SystemResources,
+ posix.ENOBUFS,
+ posix.ENOMEM => error.SystemResources,
else => unexpectedErrorPosix(err),
};
}
@@ -242,18 +291,19 @@ pub fn posixWrite(fd: i32, bytes: []const u8) !void {
const write_err = posix.getErrno(rc);
if (write_err > 0) {
return switch (write_err) {
- posix.EINTR => continue,
- posix.EINVAL, posix.EFAULT => unreachable,
+ posix.EINTR => continue,
+ posix.EINVAL,
+ posix.EFAULT => unreachable,
posix.EAGAIN => PosixWriteError.WouldBlock,
posix.EBADF => PosixWriteError.FileClosed,
posix.EDESTADDRREQ => PosixWriteError.DestinationAddressRequired,
posix.EDQUOT => PosixWriteError.DiskQuota,
- posix.EFBIG => PosixWriteError.FileTooBig,
- posix.EIO => PosixWriteError.InputOutput,
+ posix.EFBIG => PosixWriteError.FileTooBig,
+ posix.EIO => PosixWriteError.InputOutput,
posix.ENOSPC => PosixWriteError.NoSpaceLeft,
- posix.EPERM => PosixWriteError.AccessDenied,
- posix.EPIPE => PosixWriteError.BrokenPipe,
- else => unexpectedErrorPosix(write_err),
+ posix.EPERM => PosixWriteError.AccessDenied,
+ posix.EPIPE => PosixWriteError.BrokenPipe,
+ else => unexpectedErrorPosix(write_err),
};
}
index += rc;
@@ -299,7 +349,8 @@ pub fn posixOpenC(file_path: &const u8, flags: u32, perm: usize) !i32 {
posix.EFAULT => unreachable,
posix.EINVAL => unreachable,
posix.EACCES => return PosixOpenError.AccessDenied,
- posix.EFBIG, posix.EOVERFLOW => return PosixOpenError.FileTooBig,
+ posix.EFBIG,
+ posix.EOVERFLOW => return PosixOpenError.FileTooBig,
posix.EISDIR => return PosixOpenError.IsDir,
posix.ELOOP => return PosixOpenError.SymLinkLoop,
posix.EMFILE => return PosixOpenError.ProcessFdQuotaExceeded,
@@ -324,7 +375,8 @@ pub fn posixDup2(old_fd: i32, new_fd: i32) !void {
const err = posix.getErrno(posix.dup2(old_fd, new_fd));
if (err > 0) {
return switch (err) {
- posix.EBUSY, posix.EINTR => continue,
+ posix.EBUSY,
+ posix.EINTR => continue,
posix.EMFILE => error.ProcessFdQuotaExceeded,
posix.EINVAL => unreachable,
else => unexpectedErrorPosix(err),
@@ -359,7 +411,7 @@ pub fn createNullDelimitedEnvMap(allocator: &Allocator, env_map: &const BufMap)
pub fn freeNullDelimitedEnvMap(allocator: &Allocator, envp_buf: []?&u8) void {
for (envp_buf) |env| {
- const env_buf = if (env) |ptr| ptr[0 .. cstr.len(ptr) + 1] else break;
+ const env_buf = if (env) |ptr| ptr[0..cstr.len(ptr) + 1] else break;
allocator.free(env_buf);
}
allocator.free(envp_buf);
@@ -370,9 +422,7 @@ pub fn freeNullDelimitedEnvMap(allocator: &Allocator, envp_buf: []?&u8) void {
/// pointers after the args and after the environment variables.
/// `argv[0]` is the executable path.
/// This function also uses the PATH environment variable to get the full path to the executable.
-pub fn posixExecve(argv: []const []const u8, env_map: &const BufMap,
- allocator: &Allocator) !void
-{
+pub fn posixExecve(argv: []const []const u8, env_map: &const BufMap, allocator: &Allocator) !void {
const argv_buf = try allocator.alloc(?&u8, argv.len + 1);
mem.set(?&u8, argv_buf, null);
defer {
@@ -411,7 +461,7 @@ pub fn posixExecve(argv: []const []const u8, env_map: &const BufMap,
while (it.next()) |search_path| {
mem.copy(u8, path_buf, search_path);
path_buf[search_path.len] = '/';
- mem.copy(u8, path_buf[search_path.len + 1 ..], exe_path);
+ mem.copy(u8, path_buf[search_path.len + 1..], exe_path);
path_buf[search_path.len + exe_path.len + 1] = 0;
err = posix.getErrno(posix.execve(path_buf.ptr, argv_buf.ptr, envp_buf.ptr));
assert(err > 0);
@@ -443,10 +493,17 @@ fn posixExecveErrnoToErr(err: usize) PosixExecveError {
assert(err > 0);
return switch (err) {
posix.EFAULT => unreachable,
- posix.E2BIG, posix.EMFILE, posix.ENAMETOOLONG, posix.ENFILE, posix.ENOMEM => error.SystemResources,
- posix.EACCES, posix.EPERM => error.AccessDenied,
- posix.EINVAL, posix.ENOEXEC => error.InvalidExe,
- posix.EIO, posix.ELOOP => error.FileSystem,
+ posix.E2BIG,
+ posix.EMFILE,
+ posix.ENAMETOOLONG,
+ posix.ENFILE,
+ posix.ENOMEM => error.SystemResources,
+ posix.EACCES,
+ posix.EPERM => error.AccessDenied,
+ posix.EINVAL,
+ posix.ENOEXEC => error.InvalidExe,
+ posix.EIO,
+ posix.ELOOP => error.FileSystem,
posix.EISDIR => error.IsDir,
posix.ENOENT => error.FileNotFound,
posix.ENOTDIR => error.NotDir,
@@ -455,6 +512,7 @@ fn posixExecveErrnoToErr(err: usize) PosixExecveError {
};
}
+pub var linux_aux_raw = []usize {0} ** 38;
pub var posix_environ_raw: []&u8 = undefined;
/// Caller must free result when done.
@@ -468,8 +526,7 @@ pub fn getEnvMap(allocator: &Allocator) !BufMap {
var i: usize = 0;
while (true) {
- if (ptr[i] == 0)
- return result;
+ if (ptr[i] == 0) return result;
const key_start = i;
@@ -507,8 +564,7 @@ pub fn getEnvPosix(key: []const u8) ?[]const u8 {
var line_i: usize = 0;
while (ptr[line_i] != 0 and ptr[line_i] != '=') : (line_i += 1) {}
const this_key = ptr[0..line_i];
- if (!mem.eql(u8, key, this_key))
- continue;
+ if (!mem.eql(u8, key, this_key)) continue;
var end_i: usize = line_i;
while (ptr[end_i] != 0) : (end_i += 1) {}
@@ -661,8 +717,10 @@ pub fn symLinkPosix(allocator: &Allocator, existing_path: []const u8, new_path:
const err = posix.getErrno(posix.symlink(existing_buf.ptr, new_buf.ptr));
if (err > 0) {
return switch (err) {
- posix.EFAULT, posix.EINVAL => unreachable,
- posix.EACCES, posix.EPERM => error.AccessDenied,
+ posix.EFAULT,
+ posix.EINVAL => unreachable,
+ posix.EACCES,
+ posix.EPERM => error.AccessDenied,
posix.EDQUOT => error.DiskQuota,
posix.EEXIST => error.PathAlreadyExists,
posix.EIO => error.FileSystem,
@@ -679,9 +737,7 @@ pub fn symLinkPosix(allocator: &Allocator, existing_path: []const u8, new_path:
}
// here we replace the standard +/ with -_ so that it can be used in a file name
-const b64_fs_encoder = base64.Base64Encoder.init(
- "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_",
- base64.standard_pad_char);
+const b64_fs_encoder = base64.Base64Encoder.init("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_", base64.standard_pad_char);
pub fn atomicSymLink(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) !void {
if (symLink(allocator, existing_path, new_path)) {
@@ -700,7 +756,7 @@ pub fn atomicSymLink(allocator: &Allocator, existing_path: []const u8, new_path:
tmp_path[dirname.len] = os.path.sep;
while (true) {
try getRandomBytes(rand_buf[0..]);
- b64_fs_encoder.encode(tmp_path[dirname.len + 1 ..], rand_buf);
+ b64_fs_encoder.encode(tmp_path[dirname.len + 1..], rand_buf);
if (symLink(allocator, existing_path, tmp_path)) {
return rename(allocator, tmp_path, new_path);
@@ -709,7 +765,6 @@ pub fn atomicSymLink(allocator: &Allocator, existing_path: []const u8, new_path:
else => return err, // TODO zig should know this set does not include PathAlreadyExists
}
}
-
}
pub fn deleteFile(allocator: &Allocator, file_path: []const u8) !void {
@@ -732,7 +787,8 @@ pub fn deleteFileWindows(allocator: &Allocator, file_path: []const u8) !void {
return switch (err) {
windows.ERROR.FILE_NOT_FOUND => error.FileNotFound,
windows.ERROR.ACCESS_DENIED => error.AccessDenied,
- windows.ERROR.FILENAME_EXCED_RANGE, windows.ERROR.INVALID_PARAMETER => error.NameTooLong,
+ windows.ERROR.FILENAME_EXCED_RANGE,
+ windows.ERROR.INVALID_PARAMETER => error.NameTooLong,
else => unexpectedErrorWindows(err),
};
}
@@ -748,9 +804,11 @@ pub fn deleteFilePosix(allocator: &Allocator, file_path: []const u8) !void {
const err = posix.getErrno(posix.unlink(buf.ptr));
if (err > 0) {
return switch (err) {
- posix.EACCES, posix.EPERM => error.AccessDenied,
+ posix.EACCES,
+ posix.EPERM => error.AccessDenied,
posix.EBUSY => error.FileBusy,
- posix.EFAULT, posix.EINVAL => unreachable,
+ posix.EFAULT,
+ posix.EINVAL => unreachable,
posix.EIO => error.FileSystem,
posix.EISDIR => error.IsDir,
posix.ELOOP => error.SymLinkLoop,
@@ -828,7 +886,7 @@ pub const AtomicFile = struct {
while (true) {
try getRandomBytes(rand_buf[0..]);
- b64_fs_encoder.encode(tmp_path[dirname.len + 1 ..], rand_buf);
+ b64_fs_encoder.encode(tmp_path[dirname.len + 1..], rand_buf);
const file = os.File.openWriteNoClobber(allocator, tmp_path, mode) catch |err| switch (err) {
error.PathAlreadyExists => continue,
@@ -879,7 +937,7 @@ pub fn rename(allocator: &Allocator, old_path: []const u8, new_path: []const u8)
new_buf[new_path.len] = 0;
if (is_windows) {
- const flags = windows.MOVEFILE_REPLACE_EXISTING|windows.MOVEFILE_WRITE_THROUGH;
+ const flags = windows.MOVEFILE_REPLACE_EXISTING | windows.MOVEFILE_WRITE_THROUGH;
if (windows.MoveFileExA(old_buf.ptr, new_buf.ptr, flags) == 0) {
const err = windows.GetLastError();
return switch (err) {
@@ -890,10 +948,12 @@ pub fn rename(allocator: &Allocator, old_path: []const u8, new_path: []const u8)
const err = posix.getErrno(posix.rename(old_buf.ptr, new_buf.ptr));
if (err > 0) {
return switch (err) {
- posix.EACCES, posix.EPERM => error.AccessDenied,
+ posix.EACCES,
+ posix.EPERM => error.AccessDenied,
posix.EBUSY => error.FileBusy,
posix.EDQUOT => error.DiskQuota,
- posix.EFAULT, posix.EINVAL => unreachable,
+ posix.EFAULT,
+ posix.EINVAL => unreachable,
posix.EISDIR => error.IsDir,
posix.ELOOP => error.SymLinkLoop,
posix.EMLINK => error.LinkQuotaExceeded,
@@ -902,7 +962,8 @@ pub fn rename(allocator: &Allocator, old_path: []const u8, new_path: []const u8)
posix.ENOTDIR => error.NotDir,
posix.ENOMEM => error.SystemResources,
posix.ENOSPC => error.NoSpaceLeft,
- posix.EEXIST, posix.ENOTEMPTY => error.PathAlreadyExists,
+ posix.EEXIST,
+ posix.ENOTEMPTY => error.PathAlreadyExists,
posix.EROFS => error.ReadOnlyFileSystem,
posix.EXDEV => error.RenameAcrossMountPoints,
else => unexpectedErrorPosix(err),
@@ -940,7 +1001,8 @@ pub fn makeDirPosix(allocator: &Allocator, dir_path: []const u8) !void {
const err = posix.getErrno(posix.mkdir(path_buf.ptr, 0o755));
if (err > 0) {
return switch (err) {
- posix.EACCES, posix.EPERM => error.AccessDenied,
+ posix.EACCES,
+ posix.EPERM => error.AccessDenied,
posix.EDQUOT => error.DiskQuota,
posix.EEXIST => error.PathAlreadyExists,
posix.EFAULT => unreachable,
@@ -970,27 +1032,23 @@ pub fn makePath(allocator: &Allocator, full_path: []const u8) !void {
// TODO stat the file and return an error if it's not a directory
// this is important because otherwise a dangling symlink
// could cause an infinite loop
- if (end_index == resolved_path.len)
- return;
+ if (end_index == resolved_path.len) return;
} else if (err == error.FileNotFound) {
// march end_index backward until next path component
while (true) {
end_index -= 1;
- if (os.path.isSep(resolved_path[end_index]))
- break;
+ if (os.path.isSep(resolved_path[end_index])) break;
}
continue;
} else {
return err;
}
};
- if (end_index == resolved_path.len)
- return;
+ if (end_index == resolved_path.len) return;
// march end_index forward until next path component
while (true) {
end_index += 1;
- if (end_index == resolved_path.len or os.path.isSep(resolved_path[end_index]))
- break;
+ if (end_index == resolved_path.len or os.path.isSep(resolved_path[end_index])) break;
}
}
}
@@ -1007,15 +1065,18 @@ pub fn deleteDir(allocator: &Allocator, dir_path: []const u8) !void {
const err = posix.getErrno(posix.rmdir(path_buf.ptr));
if (err > 0) {
return switch (err) {
- posix.EACCES, posix.EPERM => error.AccessDenied,
+ posix.EACCES,
+ posix.EPERM => error.AccessDenied,
posix.EBUSY => error.FileBusy,
- posix.EFAULT, posix.EINVAL => unreachable,
+ posix.EFAULT,
+ posix.EINVAL => unreachable,
posix.ELOOP => error.SymLinkLoop,
posix.ENAMETOOLONG => error.NameTooLong,
posix.ENOENT => error.FileNotFound,
posix.ENOMEM => error.SystemResources,
posix.ENOTDIR => error.NotDir,
- posix.EEXIST, posix.ENOTEMPTY => error.DirNotEmpty,
+ posix.EEXIST,
+ posix.ENOTEMPTY => error.DirNotEmpty,
posix.EROFS => error.ReadOnlyFileSystem,
else => unexpectedErrorPosix(err),
};
@@ -1025,7 +1086,7 @@ pub fn deleteDir(allocator: &Allocator, dir_path: []const u8) !void {
/// Whether ::full_path describes a symlink, file, or directory, this function
/// removes it. If it cannot be removed because it is a non-empty directory,
/// this function recursively removes its entries and then tries again.
-// TODO non-recursive implementation
+/// TODO non-recursive implementation
const DeleteTreeError = error {
OutOfMemory,
AccessDenied,
@@ -1067,8 +1128,7 @@ pub fn deleteTree(allocator: &Allocator, full_path: []const u8) DeleteTreeError!
error.NotDir,
error.FileSystem,
error.FileBusy,
- error.Unexpected
- => return err,
+ error.Unexpected => return err,
}
{
var dir = Dir.open(allocator, full_path) catch |err| switch (err) {
@@ -1092,8 +1152,7 @@ pub fn deleteTree(allocator: &Allocator, full_path: []const u8) DeleteTreeError!
error.SystemResources,
error.NoSpaceLeft,
error.PathAlreadyExists,
- error.Unexpected
- => return err,
+ error.Unexpected => return err,
};
defer dir.close();
@@ -1123,7 +1182,8 @@ pub const Dir = struct {
end_index: usize,
const darwin_seek_t = switch (builtin.os) {
- Os.macosx, Os.ios => i64,
+ Os.macosx,
+ Os.ios => i64,
else => void,
};
@@ -1147,12 +1207,14 @@ pub const Dir = struct {
pub fn open(allocator: &Allocator, dir_path: []const u8) !Dir {
const fd = switch (builtin.os) {
Os.windows => @compileError("TODO support Dir.open for windows"),
- Os.linux => try posixOpen(allocator, dir_path, posix.O_RDONLY|posix.O_DIRECTORY|posix.O_CLOEXEC, 0),
- Os.macosx, Os.ios => try posixOpen(allocator, dir_path, posix.O_RDONLY|posix.O_NONBLOCK|posix.O_DIRECTORY|posix.O_CLOEXEC, 0),
+ Os.linux => try posixOpen(allocator, dir_path, posix.O_RDONLY | posix.O_DIRECTORY | posix.O_CLOEXEC, 0),
+ Os.macosx,
+ Os.ios => try posixOpen(allocator, dir_path, posix.O_RDONLY | posix.O_NONBLOCK | posix.O_DIRECTORY | posix.O_CLOEXEC, 0),
else => @compileError("Dir.open is not supported for this platform"),
};
const darwin_seek_init = switch (builtin.os) {
- Os.macosx, Os.ios => 0,
+ Os.macosx,
+ Os.ios => 0,
else => {},
};
return Dir {
@@ -1175,7 +1237,8 @@ pub const Dir = struct {
pub fn next(self: &Dir) !?Entry {
switch (builtin.os) {
Os.linux => return self.nextLinux(),
- Os.macosx, Os.ios => return self.nextDarwin(),
+ Os.macosx,
+ Os.ios => return self.nextDarwin(),
Os.windows => return self.nextWindows(),
else => @compileError("Dir.next not supported on " ++ @tagName(builtin.os)),
}
@@ -1189,12 +1252,13 @@ pub const Dir = struct {
}
while (true) {
- const result = posix.getdirentries64(self.fd, self.buf.ptr, self.buf.len,
- &self.darwin_seek);
+ const result = posix.getdirentries64(self.fd, self.buf.ptr, self.buf.len, &self.darwin_seek);
const err = posix.getErrno(result);
if (err > 0) {
switch (err) {
- posix.EBADF, posix.EFAULT, posix.ENOTDIR => unreachable,
+ posix.EBADF,
+ posix.EFAULT,
+ posix.ENOTDIR => unreachable,
posix.EINVAL => {
self.buf = try self.allocator.realloc(u8, self.buf, self.buf.len * 2);
continue;
@@ -1202,14 +1266,13 @@ pub const Dir = struct {
else => return unexpectedErrorPosix(err),
}
}
- if (result == 0)
- return null;
+ if (result == 0) return null;
self.index = 0;
self.end_index = result;
break;
}
}
- const darwin_entry = @ptrCast(& align(1) posix.dirent, &self.buf[self.index]);
+ const darwin_entry = @ptrCast(&align(1) posix.dirent, &self.buf[self.index]);
const next_index = self.index + darwin_entry.d_reclen;
self.index = next_index;
@@ -1254,7 +1317,9 @@ pub const Dir = struct {
const err = posix.getErrno(result);
if (err > 0) {
switch (err) {
- posix.EBADF, posix.EFAULT, posix.ENOTDIR => unreachable,
+ posix.EBADF,
+ posix.EFAULT,
+ posix.ENOTDIR => unreachable,
posix.EINVAL => {
self.buf = try self.allocator.realloc(u8, self.buf, self.buf.len * 2);
continue;
@@ -1262,14 +1327,13 @@ pub const Dir = struct {
else => return unexpectedErrorPosix(err),
}
}
- if (result == 0)
- return null;
+ if (result == 0) return null;
self.index = 0;
self.end_index = result;
break;
}
}
- const linux_entry = @ptrCast(& align(1) posix.dirent, &self.buf[self.index]);
+ const linux_entry = @ptrCast(&align(1) posix.dirent, &self.buf[self.index]);
const next_index = self.index + linux_entry.d_reclen;
self.index = next_index;
@@ -1338,7 +1402,8 @@ pub fn readLink(allocator: &Allocator, pathname: []const u8) ![]u8 {
if (err > 0) {
return switch (err) {
posix.EACCES => error.AccessDenied,
- posix.EFAULT, posix.EINVAL => unreachable,
+ posix.EFAULT,
+ posix.EINVAL => unreachable,
posix.EIO => error.FileSystem,
posix.ELOOP => error.SymLinkLoop,
posix.ENAMETOOLONG => error.NameTooLong,
@@ -1356,50 +1421,6 @@ pub fn readLink(allocator: &Allocator, pathname: []const u8) ![]u8 {
}
}
-pub fn sleep(seconds: usize, nanoseconds: usize) void {
- switch(builtin.os) {
- Os.linux, Os.macosx, Os.ios => {
- posixSleep(u63(seconds), u63(nanoseconds));
- },
- Os.windows => {
- const milliseconds = seconds * 1000 + nanoseconds / 1000000;
- windows.Sleep(windows.DWORD(milliseconds));
- },
- else => @compileError("Unsupported OS"),
- }
-}
-
-const u63 = @IntType(false, 63);
-pub fn posixSleep(seconds: u63, nanoseconds: u63) void {
- var req = posix.timespec {
- .tv_sec = seconds,
- .tv_nsec = nanoseconds,
- };
- var rem: posix.timespec = undefined;
- while (true) {
- const ret_val = posix.nanosleep(&req, &rem);
- const err = posix.getErrno(ret_val);
- if (err == 0) return;
- switch (err) {
- posix.EFAULT => unreachable,
- posix.EINVAL => {
- // Sometimes Darwin returns EINVAL for no reason.
- // We treat it as a spurious wakeup.
- return;
- },
- posix.EINTR => {
- req = rem;
- continue;
- },
- else => return,
- }
- }
-}
-
-test "os.sleep" {
- sleep(0, 1);
-}
-
pub fn posix_setuid(uid: u32) !void {
const err = posix.getErrno(posix.setuid(uid));
if (err == 0) return;
@@ -1475,8 +1496,7 @@ pub const ArgIteratorPosix = struct {
}
pub fn next(self: &ArgIteratorPosix) ?[]const u8 {
- if (self.index == self.count)
- return null;
+ if (self.index == self.count) return null;
const s = raw[self.index];
self.index += 1;
@@ -1484,8 +1504,7 @@ pub const ArgIteratorPosix = struct {
}
pub fn skip(self: &ArgIteratorPosix) bool {
- if (self.index == self.count)
- return false;
+ if (self.index == self.count) return false;
self.index += 1;
return true;
@@ -1503,7 +1522,9 @@ pub const ArgIteratorWindows = struct {
quote_count: usize,
seen_quote_count: usize,
- pub const NextError = error{OutOfMemory};
+ pub const NextError = error {
+ OutOfMemory,
+ };
pub fn init() ArgIteratorWindows {
return initWithCmdLine(windows.GetCommandLineA());
@@ -1526,7 +1547,8 @@ pub const ArgIteratorWindows = struct {
const byte = self.cmd_line[self.index];
switch (byte) {
0 => return null,
- ' ', '\t' => continue,
+ ' ',
+ '\t' => continue,
else => break,
}
}
@@ -1540,7 +1562,8 @@ pub const ArgIteratorWindows = struct {
const byte = self.cmd_line[self.index];
switch (byte) {
0 => return false,
- ' ', '\t' => continue,
+ ' ',
+ '\t' => continue,
else => break,
}
}
@@ -1559,7 +1582,8 @@ pub const ArgIteratorWindows = struct {
'\\' => {
backslash_count += 1;
},
- ' ', '\t' => {
+ ' ',
+ '\t' => {
if (self.seen_quote_count % 2 == 0 or self.seen_quote_count == self.quote_count) {
return true;
}
@@ -1599,7 +1623,8 @@ pub const ArgIteratorWindows = struct {
'\\' => {
backslash_count += 1;
},
- ' ', '\t' => {
+ ' ',
+ '\t' => {
try self.emitBackslashes(&buf, backslash_count);
backslash_count = 0;
if (self.seen_quote_count % 2 == 1 and self.seen_quote_count != self.quote_count) {
@@ -1643,7 +1668,6 @@ pub const ArgIteratorWindows = struct {
}
}
}
-
};
pub const ArgIterator = struct {
@@ -1658,7 +1682,7 @@ pub const ArgIterator = struct {
}
pub const NextError = ArgIteratorWindows.NextError;
-
+
/// You must free the returned memory when done.
pub fn next(self: &ArgIterator, allocator: &Allocator) ?(NextError![]u8) {
if (builtin.os == Os.windows) {
@@ -1733,15 +1757,47 @@ pub fn argsFree(allocator: &mem.Allocator, args_alloc: []const []u8) void {
}
test "windows arg parsing" {
- testWindowsCmdLine(c"a b\tc d", [][]const u8{"a", "b", "c", "d"});
- testWindowsCmdLine(c"\"abc\" d e", [][]const u8{"abc", "d", "e"});
- testWindowsCmdLine(c"a\\\\\\b d\"e f\"g h", [][]const u8{"a\\\\\\b", "de fg", "h"});
- testWindowsCmdLine(c"a\\\\\\\"b c d", [][]const u8{"a\\\"b", "c", "d"});
- testWindowsCmdLine(c"a\\\\\\\\\"b c\" d e", [][]const u8{"a\\\\b c", "d", "e"});
- testWindowsCmdLine(c"a b\tc \"d f", [][]const u8{"a", "b", "c", "\"d", "f"});
-
- testWindowsCmdLine(c"\".\\..\\zig-cache\\build\" \"bin\\zig.exe\" \".\\..\" \".\\..\\zig-cache\" \"--help\"",
- [][]const u8{".\\..\\zig-cache\\build", "bin\\zig.exe", ".\\..", ".\\..\\zig-cache", "--help"});
+ testWindowsCmdLine(c"a b\tc d", [][]const u8 {
+ "a",
+ "b",
+ "c",
+ "d",
+ });
+ testWindowsCmdLine(c"\"abc\" d e", [][]const u8 {
+ "abc",
+ "d",
+ "e",
+ });
+ testWindowsCmdLine(c"a\\\\\\b d\"e f\"g h", [][]const u8 {
+ "a\\\\\\b",
+ "de fg",
+ "h",
+ });
+ testWindowsCmdLine(c"a\\\\\\\"b c d", [][]const u8 {
+ "a\\\"b",
+ "c",
+ "d",
+ });
+ testWindowsCmdLine(c"a\\\\\\\\\"b c\" d e", [][]const u8 {
+ "a\\\\b c",
+ "d",
+ "e",
+ });
+ testWindowsCmdLine(c"a b\tc \"d f", [][]const u8 {
+ "a",
+ "b",
+ "c",
+ "\"d",
+ "f",
+ });
+
+ testWindowsCmdLine(c"\".\\..\\zig-cache\\build\" \"bin\\zig.exe\" \".\\..\" \".\\..\\zig-cache\" \"--help\"", [][]const u8 {
+ ".\\..\\zig-cache\\build",
+ "bin\\zig.exe",
+ ".\\..",
+ ".\\..\\zig-cache",
+ "--help",
+ });
}
fn testWindowsCmdLine(input_cmd_line: &const u8, expected_args: []const []const u8) void {
@@ -1753,27 +1809,16 @@ fn testWindowsCmdLine(input_cmd_line: &const u8, expected_args: []const []const
assert(it.next(debug.global_allocator) == null);
}
-test "std.os" {
- _ = @import("child_process.zig");
- _ = @import("darwin_errno.zig");
- _ = @import("darwin.zig");
- _ = @import("get_user_id.zig");
- _ = @import("linux/errno.zig");
- //_ = @import("linux_i386.zig");
- _ = @import("linux/x86_64.zig");
- _ = @import("linux/index.zig");
- _ = @import("path.zig");
- _ = @import("windows/index.zig");
- _ = @import("test.zig");
-}
-
-
// TODO make this a build variable that you can set
const unexpected_error_tracing = false;
+const UnexpectedError = error {
+ /// The Operating System returned an undocumented error code.
+ Unexpected,
+};
/// Call this when you made a syscall or something that sets errno
/// and you get an unexpected error.
-pub fn unexpectedErrorPosix(errno: usize) (error{Unexpected}) {
+pub fn unexpectedErrorPosix(errno: usize) UnexpectedError {
if (unexpected_error_tracing) {
debug.warn("unexpected errno: {}\n", errno);
debug.dumpCurrentStackTrace(null);
@@ -1783,7 +1828,7 @@ pub fn unexpectedErrorPosix(errno: usize) (error{Unexpected}) {
/// Call this when you made a windows DLL call or something that does SetLastError
/// and you get an unexpected error.
-pub fn unexpectedErrorWindows(err: windows.DWORD) (error{Unexpected}) {
+pub fn unexpectedErrorWindows(err: windows.DWORD) UnexpectedError {
if (unexpected_error_tracing) {
debug.warn("unexpected GetLastError(): {}\n", err);
debug.dumpCurrentStackTrace(null);
@@ -1799,7 +1844,8 @@ pub fn openSelfExe() !os.File {
var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
return os.File.openRead(&fixed_allocator.allocator, proc_file_path);
},
- Os.macosx, Os.ios => {
+ Os.macosx,
+ Os.ios => {
var fixed_buffer_mem: [darwin.PATH_MAX * 2]u8 = undefined;
var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
const self_exe_path = try selfExePath(&fixed_allocator.allocator);
@@ -1811,8 +1857,10 @@ pub fn openSelfExe() !os.File {
test "openSelfExe" {
switch (builtin.os) {
- Os.linux, Os.macosx, Os.ios => (try openSelfExe()).close(),
- else => return, // Unsupported OS.
+ Os.linux,
+ Os.macosx,
+ Os.ios => (try openSelfExe()).close(),
+ else => return, // Unsupported OS.
}
}
@@ -1849,7 +1897,8 @@ pub fn selfExePath(allocator: &mem.Allocator) ![]u8 {
try out_path.resize(new_len);
}
},
- Os.macosx, Os.ios => {
+ Os.macosx,
+ Os.ios => {
var u32_len: u32 = 0;
const ret1 = c._NSGetExecutablePath(undefined, &u32_len);
assert(ret1 != 0);
@@ -1877,7 +1926,9 @@ pub fn selfExeDirPath(allocator: &mem.Allocator) ![]u8 {
const dir = path.dirname(full_exe_path);
return allocator.shrink(u8, full_exe_path, dir.len);
},
- Os.windows, Os.macosx, Os.ios => {
+ Os.windows,
+ Os.macosx,
+ Os.ios => {
const self_exe_path = try selfExePath(allocator);
errdefer allocator.free(self_exe_path);
const dirname = os.path.dirname(self_exe_path);
@@ -1898,3 +1949,712 @@ pub fn isTty(handle: FileHandle) bool {
}
}
}
+
+pub const PosixSocketError = error {
+ /// Permission to create a socket of the specified type and/or
+ /// pro‐tocol is denied.
+ PermissionDenied,
+
+ /// The implementation does not support the specified address family.
+ AddressFamilyNotSupported,
+
+ /// Unknown protocol, or protocol family not available.
+ ProtocolFamilyNotAvailable,
+
+ /// The per-process limit on the number of open file descriptors has been reached.
+ ProcessFdQuotaExceeded,
+
+ /// The system-wide limit on the total number of open files has been reached.
+ SystemFdQuotaExceeded,
+
+ /// Insufficient memory is available. The socket cannot be created until sufficient
+ /// resources are freed.
+ SystemResources,
+
+ /// The protocol type or the specified protocol is not supported within this domain.
+ ProtocolNotSupported,
+};
+
+pub fn posixSocket(domain: u32, socket_type: u32, protocol: u32) !i32 {
+ const rc = posix.socket(domain, socket_type, protocol);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return i32(rc),
+ posix.EACCES => return PosixSocketError.PermissionDenied,
+ posix.EAFNOSUPPORT => return PosixSocketError.AddressFamilyNotSupported,
+ posix.EINVAL => return PosixSocketError.ProtocolFamilyNotAvailable,
+ posix.EMFILE => return PosixSocketError.ProcessFdQuotaExceeded,
+ posix.ENFILE => return PosixSocketError.SystemFdQuotaExceeded,
+ posix.ENOBUFS,
+ posix.ENOMEM => return PosixSocketError.SystemResources,
+ posix.EPROTONOSUPPORT => return PosixSocketError.ProtocolNotSupported,
+ else => return unexpectedErrorPosix(err),
+ }
+}
+
+pub const PosixBindError = error {
+ /// The address is protected, and the user is not the superuser.
+ /// For UNIX domain sockets: Search permission is denied on a component
+ /// of the path prefix.
+ AccessDenied,
+
+ /// The given address is already in use, or in the case of Internet domain sockets,
+ /// The port number was specified as zero in the socket
+ /// address structure, but, upon attempting to bind to an ephemeral port, it was
+ /// determined that all port numbers in the ephemeral port range are currently in
+ /// use. See the discussion of /proc/sys/net/ipv4/ip_local_port_range ip(7).
+ AddressInUse,
+
+ /// sockfd is not a valid file descriptor.
+ InvalidFileDescriptor,
+
+ /// The socket is already bound to an address, or addrlen is wrong, or addr is not
+ /// a valid address for this socket's domain.
+ InvalidSocketOrAddress,
+
+ /// The file descriptor sockfd does not refer to a socket.
+ FileDescriptorNotASocket,
+
+ /// A nonexistent interface was requested or the requested address was not local.
+ AddressNotAvailable,
+
+ /// addr points outside the user's accessible address space.
+ PageFault,
+
+ /// Too many symbolic links were encountered in resolving addr.
+ SymLinkLoop,
+
+ /// addr is too long.
+ NameTooLong,
+
+ /// A component in the directory prefix of the socket pathname does not exist.
+ FileNotFound,
+
+ /// Insufficient kernel memory was available.
+ SystemResources,
+
+ /// A component of the path prefix is not a directory.
+ NotDir,
+
+ /// The socket inode would reside on a read-only filesystem.
+ ReadOnlyFileSystem,
+
+ Unexpected,
+};
+
+/// addr is `&const T` where T is one of the sockaddr
+pub fn posixBind(fd: i32, addr: &const posix.sockaddr) PosixBindError!void {
+ const rc = posix.bind(fd, addr, @sizeOf(posix.sockaddr));
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return,
+ posix.EACCES => return PosixBindError.AccessDenied,
+ posix.EADDRINUSE => return PosixBindError.AddressInUse,
+ posix.EBADF => return PosixBindError.InvalidFileDescriptor,
+ posix.EINVAL => return PosixBindError.InvalidSocketOrAddress,
+ posix.ENOTSOCK => return PosixBindError.FileDescriptorNotASocket,
+ posix.EADDRNOTAVAIL => return PosixBindError.AddressNotAvailable,
+ posix.EFAULT => return PosixBindError.PageFault,
+ posix.ELOOP => return PosixBindError.SymLinkLoop,
+ posix.ENAMETOOLONG => return PosixBindError.NameTooLong,
+ posix.ENOENT => return PosixBindError.FileNotFound,
+ posix.ENOMEM => return PosixBindError.SystemResources,
+ posix.ENOTDIR => return PosixBindError.NotDir,
+ posix.EROFS => return PosixBindError.ReadOnlyFileSystem,
+ else => return unexpectedErrorPosix(err),
+ }
+}
+
+const PosixListenError = error {
+ /// Another socket is already listening on the same port.
+ /// For Internet domain sockets, the socket referred to by sockfd had not previously
+ /// been bound to an address and, upon attempting to bind it to an ephemeral port, it
+ /// was determined that all port numbers in the ephemeral port range are currently in
+ /// use. See the discussion of /proc/sys/net/ipv4/ip_local_port_range in ip(7).
+ AddressInUse,
+
+ /// The argument sockfd is not a valid file descriptor.
+ InvalidFileDescriptor,
+
+ /// The file descriptor sockfd does not refer to a socket.
+ FileDescriptorNotASocket,
+
+ /// The socket is not of a type that supports the listen() operation.
+ OperationNotSupported,
+
+ Unexpected,
+};
+
+pub fn posixListen(sockfd: i32, backlog: u32) PosixListenError!void {
+ const rc = posix.listen(sockfd, backlog);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return,
+ posix.EADDRINUSE => return PosixListenError.AddressInUse,
+ posix.EBADF => return PosixListenError.InvalidFileDescriptor,
+ posix.ENOTSOCK => return PosixListenError.FileDescriptorNotASocket,
+ posix.EOPNOTSUPP => return PosixListenError.OperationNotSupported,
+ else => return unexpectedErrorPosix(err),
+ }
+}
+
+pub const PosixAcceptError = error {
+ /// The socket is marked nonblocking and no connections are present to be accepted.
+ WouldBlock,
+
+ /// sockfd is not an open file descriptor.
+ FileDescriptorClosed,
+
+ ConnectionAborted,
+
+ /// The addr argument is not in a writable part of the user address space.
+ PageFault,
+
+ /// Socket is not listening for connections, or addrlen is invalid (e.g., is negative),
+ /// or invalid value in flags.
+ InvalidSyscall,
+
+ /// The per-process limit on the number of open file descriptors has been reached.
+ ProcessFdQuotaExceeded,
+
+ /// The system-wide limit on the total number of open files has been reached.
+ SystemFdQuotaExceeded,
+
+ /// Not enough free memory. This often means that the memory allocation is limited
+ /// by the socket buffer limits, not by the system memory.
+ SystemResources,
+
+ /// The file descriptor sockfd does not refer to a socket.
+ FileDescriptorNotASocket,
+
+ /// The referenced socket is not of type SOCK_STREAM.
+ OperationNotSupported,
+
+ ProtocolFailure,
+
+ /// Firewall rules forbid connection.
+ BlockedByFirewall,
+
+ Unexpected,
+};
+
+pub fn posixAccept(fd: i32, addr: &posix.sockaddr, flags: u32) PosixAcceptError!i32 {
+ while (true) {
+ var sockaddr_size = u32(@sizeOf(posix.sockaddr));
+ const rc = posix.accept4(fd, addr, &sockaddr_size, flags);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return i32(rc),
+ posix.EINTR => continue,
+ else => return unexpectedErrorPosix(err),
+
+ posix.EAGAIN => return PosixAcceptError.WouldBlock,
+ posix.EBADF => return PosixAcceptError.FileDescriptorClosed,
+ posix.ECONNABORTED => return PosixAcceptError.ConnectionAborted,
+ posix.EFAULT => return PosixAcceptError.PageFault,
+ posix.EINVAL => return PosixAcceptError.InvalidSyscall,
+ posix.EMFILE => return PosixAcceptError.ProcessFdQuotaExceeded,
+ posix.ENFILE => return PosixAcceptError.SystemFdQuotaExceeded,
+ posix.ENOBUFS,
+ posix.ENOMEM => return PosixAcceptError.SystemResources,
+ posix.ENOTSOCK => return PosixAcceptError.FileDescriptorNotASocket,
+ posix.EOPNOTSUPP => return PosixAcceptError.OperationNotSupported,
+ posix.EPROTO => return PosixAcceptError.ProtocolFailure,
+ posix.EPERM => return PosixAcceptError.BlockedByFirewall,
+ }
+ }
+}
+
+pub const LinuxEpollCreateError = error {
+ /// Invalid value specified in flags.
+ InvalidSyscall,
+
+ /// The per-user limit on the number of epoll instances imposed by
+ /// /proc/sys/fs/epoll/max_user_instances was encountered. See epoll(7) for further
+ /// details.
+ /// Or, The per-process limit on the number of open file descriptors has been reached.
+ ProcessFdQuotaExceeded,
+
+ /// The system-wide limit on the total number of open files has been reached.
+ SystemFdQuotaExceeded,
+
+ /// There was insufficient memory to create the kernel object.
+ SystemResources,
+
+ Unexpected,
+};
+
+pub fn linuxEpollCreate(flags: u32) LinuxEpollCreateError!i32 {
+ const rc = posix.epoll_create1(flags);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return i32(rc),
+ else => return unexpectedErrorPosix(err),
+
+ posix.EINVAL => return LinuxEpollCreateError.InvalidSyscall,
+ posix.EMFILE => return LinuxEpollCreateError.ProcessFdQuotaExceeded,
+ posix.ENFILE => return LinuxEpollCreateError.SystemFdQuotaExceeded,
+ posix.ENOMEM => return LinuxEpollCreateError.SystemResources,
+ }
+}
+
+pub const LinuxEpollCtlError = error {
+ /// epfd or fd is not a valid file descriptor.
+ InvalidFileDescriptor,
+
+ /// op was EPOLL_CTL_ADD, and the supplied file descriptor fd is already registered
+ /// with this epoll instance.
+ FileDescriptorAlreadyPresentInSet,
+
+ /// epfd is not an epoll file descriptor, or fd is the same as epfd, or the requested
+ /// operation op is not supported by this interface, or
+ /// An invalid event type was specified along with EPOLLEXCLUSIVE in events, or
+ /// op was EPOLL_CTL_MOD and events included EPOLLEXCLUSIVE, or
+ /// op was EPOLL_CTL_MOD and the EPOLLEXCLUSIVE flag has previously been applied to
+ /// this epfd, fd pair, or
+ /// EPOLLEXCLUSIVE was specified in event and fd refers to an epoll instance.
+ InvalidSyscall,
+
+ /// fd refers to an epoll instance and this EPOLL_CTL_ADD operation would result in a
+ /// circular loop of epoll instances monitoring one another.
+ OperationCausesCircularLoop,
+
+ /// op was EPOLL_CTL_MOD or EPOLL_CTL_DEL, and fd is not registered with this epoll
+ /// instance.
+ FileDescriptorNotRegistered,
+
+ /// There was insufficient memory to handle the requested op control operation.
+ SystemResources,
+
+ /// The limit imposed by /proc/sys/fs/epoll/max_user_watches was encountered while
+ /// trying to register (EPOLL_CTL_ADD) a new file descriptor on an epoll instance.
+ /// See epoll(7) for further details.
+ UserResourceLimitReached,
+
+ /// The target file fd does not support epoll. This error can occur if fd refers to,
+ /// for example, a regular file or a directory.
+ FileDescriptorIncompatibleWithEpoll,
+
+ Unexpected,
+};
+
+pub fn linuxEpollCtl(epfd: i32, op: u32, fd: i32, event: &linux.epoll_event) LinuxEpollCtlError!void {
+ const rc = posix.epoll_ctl(epfd, op, fd, event);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return,
+ else => return unexpectedErrorPosix(err),
+
+ posix.EBADF => return LinuxEpollCtlError.InvalidFileDescriptor,
+ posix.EEXIST => return LinuxEpollCtlError.FileDescriptorAlreadyPresentInSet,
+ posix.EINVAL => return LinuxEpollCtlError.InvalidSyscall,
+ posix.ELOOP => return LinuxEpollCtlError.OperationCausesCircularLoop,
+ posix.ENOENT => return LinuxEpollCtlError.FileDescriptorNotRegistered,
+ posix.ENOMEM => return LinuxEpollCtlError.SystemResources,
+ posix.ENOSPC => return LinuxEpollCtlError.UserResourceLimitReached,
+ posix.EPERM => return LinuxEpollCtlError.FileDescriptorIncompatibleWithEpoll,
+ }
+}
+
+pub fn linuxEpollWait(epfd: i32, events: []linux.epoll_event, timeout: i32) usize {
+ while (true) {
+ const rc = posix.epoll_wait(epfd, events.ptr, u32(events.len), timeout);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return rc,
+ posix.EINTR => continue,
+ posix.EBADF => unreachable,
+ posix.EFAULT => unreachable,
+ posix.EINVAL => unreachable,
+ else => unreachable,
+ }
+ }
+}
+
+pub const PosixGetSockNameError = error {
+ /// Insufficient resources were available in the system to perform the operation.
+ SystemResources,
+
+ Unexpected,
+};
+
+pub fn posixGetSockName(sockfd: i32) PosixGetSockNameError!posix.sockaddr {
+ var addr: posix.sockaddr = undefined;
+ var addrlen: posix.socklen_t = @sizeOf(posix.sockaddr);
+ const rc = posix.getsockname(sockfd, &addr, &addrlen);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return addr,
+ else => return unexpectedErrorPosix(err),
+
+ posix.EBADF => unreachable,
+ posix.EFAULT => unreachable,
+ posix.EINVAL => unreachable,
+ posix.ENOTSOCK => unreachable,
+ posix.ENOBUFS => return PosixGetSockNameError.SystemResources,
+ }
+}
+
+pub const PosixConnectError = error {
+ /// For UNIX domain sockets, which are identified by pathname: Write permission is denied on the socket
+ /// file, or search permission is denied for one of the directories in the path prefix.
+ /// or
+ /// The user tried to connect to a broadcast address without having the socket broadcast flag enabled or
+ /// the connection request failed because of a local firewall rule.
+ PermissionDenied,
+
+ /// Local address is already in use.
+ AddressInUse,
+
+ /// (Internet domain sockets) The socket referred to by sockfd had not previously been bound to an
+ /// address and, upon attempting to bind it to an ephemeral port, it was determined that all port numbers
+ /// in the ephemeral port range are currently in use. See the discussion of
+ /// /proc/sys/net/ipv4/ip_local_port_range in ip(7).
+ AddressNotAvailable,
+
+ /// The passed address didn't have the correct address family in its sa_family field.
+ AddressFamilyNotSupported,
+
+ /// Insufficient entries in the routing cache.
+ SystemResources,
+
+ /// A connect() on a stream socket found no one listening on the remote address.
+ ConnectionRefused,
+
+ /// Network is unreachable.
+ NetworkUnreachable,
+
+ /// Timeout while attempting connection. The server may be too busy to accept new connections. Note
+ /// that for IP sockets the timeout may be very long when syncookies are enabled on the server.
+ ConnectionTimedOut,
+
+ Unexpected,
+};
+
+pub fn posixConnect(sockfd: i32, sockaddr: &const posix.sockaddr) PosixConnectError!void {
+ while (true) {
+ const rc = posix.connect(sockfd, sockaddr, @sizeOf(posix.sockaddr));
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return,
+ else => return unexpectedErrorPosix(err),
+
+ posix.EACCES => return PosixConnectError.PermissionDenied,
+ posix.EPERM => return PosixConnectError.PermissionDenied,
+ posix.EADDRINUSE => return PosixConnectError.AddressInUse,
+ posix.EADDRNOTAVAIL => return PosixConnectError.AddressNotAvailable,
+ posix.EAFNOSUPPORT => return PosixConnectError.AddressFamilyNotSupported,
+ posix.EAGAIN => return PosixConnectError.SystemResources,
+ posix.EALREADY => unreachable, // The socket is nonblocking and a previous connection attempt has not yet been completed.
+ posix.EBADF => unreachable, // sockfd is not a valid open file descriptor.
+ posix.ECONNREFUSED => return PosixConnectError.ConnectionRefused,
+ posix.EFAULT => unreachable, // The socket structure address is outside the user's address space.
+ posix.EINPROGRESS => unreachable, // The socket is nonblocking and the connection cannot be completed immediately.
+ posix.EINTR => continue,
+ posix.EISCONN => unreachable, // The socket is already connected.
+ posix.ENETUNREACH => return PosixConnectError.NetworkUnreachable,
+ posix.ENOTSOCK => unreachable, // The file descriptor sockfd does not refer to a socket.
+ posix.EPROTOTYPE => unreachable, // The socket type does not support the requested communications protocol.
+ posix.ETIMEDOUT => return PosixConnectError.ConnectionTimedOut,
+ }
+ }
+}
+
+/// Same as posixConnect except it is for blocking socket file descriptors.
+/// It expects to receive EINPROGRESS.
+pub fn posixConnectAsync(sockfd: i32, sockaddr: &const posix.sockaddr) PosixConnectError!void {
+ while (true) {
+ const rc = posix.connect(sockfd, sockaddr, @sizeOf(posix.sockaddr));
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0,
+ posix.EINPROGRESS => return,
+ else => return unexpectedErrorPosix(err),
+
+ posix.EACCES => return PosixConnectError.PermissionDenied,
+ posix.EPERM => return PosixConnectError.PermissionDenied,
+ posix.EADDRINUSE => return PosixConnectError.AddressInUse,
+ posix.EADDRNOTAVAIL => return PosixConnectError.AddressNotAvailable,
+ posix.EAFNOSUPPORT => return PosixConnectError.AddressFamilyNotSupported,
+ posix.EAGAIN => return PosixConnectError.SystemResources,
+ posix.EALREADY => unreachable, // The socket is nonblocking and a previous connection attempt has not yet been completed.
+ posix.EBADF => unreachable, // sockfd is not a valid open file descriptor.
+ posix.ECONNREFUSED => return PosixConnectError.ConnectionRefused,
+ posix.EFAULT => unreachable, // The socket structure address is outside the user's address space.
+ posix.EINTR => continue,
+ posix.EISCONN => unreachable, // The socket is already connected.
+ posix.ENETUNREACH => return PosixConnectError.NetworkUnreachable,
+ posix.ENOTSOCK => unreachable, // The file descriptor sockfd does not refer to a socket.
+ posix.EPROTOTYPE => unreachable, // The socket type does not support the requested communications protocol.
+ posix.ETIMEDOUT => return PosixConnectError.ConnectionTimedOut,
+ }
+ }
+}
+
+pub fn posixGetSockOptConnectError(sockfd: i32) PosixConnectError!void {
+ var err_code: i32 = undefined;
+ var size: u32 = @sizeOf(i32);
+ const rc = posix.getsockopt(sockfd, posix.SOL_SOCKET, posix.SO_ERROR, @ptrCast(&u8, &err_code), &size);
+ assert(size == 4);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => switch (err_code) {
+ 0 => return,
+ else => return unexpectedErrorPosix(err),
+
+ posix.EACCES => return PosixConnectError.PermissionDenied,
+ posix.EPERM => return PosixConnectError.PermissionDenied,
+ posix.EADDRINUSE => return PosixConnectError.AddressInUse,
+ posix.EADDRNOTAVAIL => return PosixConnectError.AddressNotAvailable,
+ posix.EAFNOSUPPORT => return PosixConnectError.AddressFamilyNotSupported,
+ posix.EAGAIN => return PosixConnectError.SystemResources,
+ posix.EALREADY => unreachable, // The socket is nonblocking and a previous connection attempt has not yet been completed.
+ posix.EBADF => unreachable, // sockfd is not a valid open file descriptor.
+ posix.ECONNREFUSED => return PosixConnectError.ConnectionRefused,
+ posix.EFAULT => unreachable, // The socket structure address is outside the user's address space.
+ posix.EISCONN => unreachable, // The socket is already connected.
+ posix.ENETUNREACH => return PosixConnectError.NetworkUnreachable,
+ posix.ENOTSOCK => unreachable, // The file descriptor sockfd does not refer to a socket.
+ posix.EPROTOTYPE => unreachable, // The socket type does not support the requested communications protocol.
+ posix.ETIMEDOUT => return PosixConnectError.ConnectionTimedOut,
+ },
+ else => return unexpectedErrorPosix(err),
+ posix.EBADF => unreachable, // The argument sockfd is not a valid file descriptor.
+ posix.EFAULT => unreachable, // The address pointed to by optval or optlen is not in a valid part of the process address space.
+ posix.EINVAL => unreachable,
+ posix.ENOPROTOOPT => unreachable, // The option is unknown at the level indicated.
+ posix.ENOTSOCK => unreachable, // The file descriptor sockfd does not refer to a socket.
+ }
+}
+
+pub const Thread = struct {
+ data: Data,
+
+ pub const use_pthreads = is_posix and builtin.link_libc;
+ const Data = if (use_pthreads) struct {
+ handle: c.pthread_t,
+ stack_addr: usize,
+ stack_len: usize,
+ } else switch (builtin.os) {
+ builtin.Os.linux => struct {
+ pid: i32,
+ stack_addr: usize,
+ stack_len: usize,
+ },
+ builtin.Os.windows => struct {
+ handle: windows.HANDLE,
+ alloc_start: &c_void,
+ heap_handle: windows.HANDLE,
+ },
+ else => @compileError("Unsupported OS"),
+ };
+
+ pub fn wait(self: &const Thread) void {
+ if (use_pthreads) {
+ const err = c.pthread_join(self.data.handle, null);
+ switch (err) {
+ 0 => {},
+ posix.EINVAL => unreachable,
+ posix.ESRCH => unreachable,
+ posix.EDEADLK => unreachable,
+ else => unreachable,
+ }
+ assert(posix.munmap(self.data.stack_addr, self.data.stack_len) == 0);
+ } else switch (builtin.os) {
+ builtin.Os.linux => {
+ while (true) {
+ const pid_value = @atomicLoad(i32, &self.data.pid, builtin.AtomicOrder.SeqCst);
+ if (pid_value == 0) break;
+ const rc = linux.futex_wait(@ptrToInt(&self.data.pid), linux.FUTEX_WAIT, pid_value, null);
+ switch (linux.getErrno(rc)) {
+ 0 => continue,
+ posix.EINTR => continue,
+ posix.EAGAIN => continue,
+ else => unreachable,
+ }
+ }
+ assert(posix.munmap(self.data.stack_addr, self.data.stack_len) == 0);
+ },
+ builtin.Os.windows => {
+ assert(windows.WaitForSingleObject(self.data.handle, windows.INFINITE) == windows.WAIT_OBJECT_0);
+ assert(windows.CloseHandle(self.data.handle) != 0);
+ assert(windows.HeapFree(self.data.heap_handle, 0, self.data.alloc_start) != 0);
+ },
+ else => @compileError("Unsupported OS"),
+ }
+ }
+};
+
+pub const SpawnThreadError = error {
+ /// A system-imposed limit on the number of threads was encountered.
+ /// There are a number of limits that may trigger this error:
+ /// * the RLIMIT_NPROC soft resource limit (set via setrlimit(2)),
+ /// which limits the number of processes and threads for a real
+ /// user ID, was reached;
+ /// * the kernel's system-wide limit on the number of processes and
+ /// threads, /proc/sys/kernel/threads-max, was reached (see
+ /// proc(5));
+ /// * the maximum number of PIDs, /proc/sys/kernel/pid_max, was
+ /// reached (see proc(5)); or
+ /// * the PID limit (pids.max) imposed by the cgroup "process num‐
+ /// ber" (PIDs) controller was reached.
+ ThreadQuotaExceeded,
+
+ /// The kernel cannot allocate sufficient memory to allocate a task structure
+ /// for the child, or to copy those parts of the caller's context that need to
+ /// be copied.
+ SystemResources,
+
+ /// Not enough userland memory to spawn the thread.
+ OutOfMemory,
+
+ Unexpected,
+};
+
+/// caller must call wait on the returned thread
+/// fn startFn(@typeOf(context)) T
+/// where T is u8, noreturn, void, or !void
+/// caller must call wait on the returned thread
+pub fn spawnThread(context: var, comptime startFn: var) SpawnThreadError!&Thread {
+ // TODO compile-time call graph analysis to determine stack upper bound
+ // https://github.com/zig-lang/zig/issues/157
+ const default_stack_size = 8 * 1024 * 1024;
+
+ const Context = @typeOf(context);
+ comptime assert(@ArgType(@typeOf(startFn), 0) == Context);
+
+ if (builtin.os == builtin.Os.windows) {
+ const WinThread = struct {
+ const OuterContext = struct {
+ thread: Thread,
+ inner: Context,
+ };
+ extern fn threadMain(arg: windows.LPVOID) windows.DWORD {
+ if (@sizeOf(Context) == 0) {
+ return startFn({});
+ } else {
+ return startFn(*@ptrCast(&Context, @alignCast(@alignOf(Context), arg)));
+ }
+ }
+ };
+
+ const heap_handle = windows.GetProcessHeap() ?? return SpawnThreadError.OutOfMemory;
+ const byte_count = @alignOf(WinThread.OuterContext) + @sizeOf(WinThread.OuterContext);
+ const bytes_ptr = windows.HeapAlloc(heap_handle, 0, byte_count) ?? return SpawnThreadError.OutOfMemory;
+ errdefer assert(windows.HeapFree(heap_handle, 0, bytes_ptr) != 0);
+ const bytes = @ptrCast(&u8, bytes_ptr)[0..byte_count];
+ const outer_context = std.heap.FixedBufferAllocator.init(bytes).allocator.create(WinThread.OuterContext) catch unreachable;
+ outer_context.inner = context;
+ outer_context.thread.data.heap_handle = heap_handle;
+ outer_context.thread.data.alloc_start = bytes_ptr;
+
+ const parameter = if (@sizeOf(Context) == 0) null else @ptrCast(&c_void, &outer_context.inner);
+ outer_context.thread.data.handle = windows.CreateThread(null, default_stack_size, WinThread.threadMain, parameter, 0, null) ?? {
+ const err = windows.GetLastError();
+ return switch (err) {
+ else => os.unexpectedErrorWindows(err),
+ };
+ };
+ return &outer_context.thread;
+ }
+
+ const MainFuncs = struct {
+ extern fn linuxThreadMain(ctx_addr: usize) u8 {
+ if (@sizeOf(Context) == 0) {
+ return startFn({});
+ } else {
+ return startFn(*@intToPtr(&const Context, ctx_addr));
+ }
+ }
+ extern fn posixThreadMain(ctx: ?&c_void) ?&c_void {
+ if (@sizeOf(Context) == 0) {
+ _ = startFn({});
+ return null;
+ } else {
+ _ = startFn(*@ptrCast(&const Context, @alignCast(@alignOf(Context), ctx)));
+ return null;
+ }
+ }
+ };
+
+ const MAP_GROWSDOWN = if (builtin.os == builtin.Os.linux) linux.MAP_GROWSDOWN else 0;
+
+ const mmap_len = default_stack_size;
+ const stack_addr = posix.mmap(null, mmap_len, posix.PROT_READ | posix.PROT_WRITE, posix.MAP_PRIVATE | posix.MAP_ANONYMOUS | MAP_GROWSDOWN, -1, 0);
+ if (stack_addr == posix.MAP_FAILED) return error.OutOfMemory;
+ errdefer assert(posix.munmap(stack_addr, mmap_len) == 0);
+
+ var stack_end: usize = stack_addr + mmap_len;
+ var arg: usize = undefined;
+ if (@sizeOf(Context) != 0) {
+ stack_end -= @sizeOf(Context);
+ stack_end -= stack_end % @alignOf(Context);
+ assert(stack_end >= stack_addr);
+ const context_ptr = @alignCast(@alignOf(Context), @intToPtr(&Context, stack_end));
+ *context_ptr = context;
+ arg = stack_end;
+ }
+
+ stack_end -= @sizeOf(Thread);
+ stack_end -= stack_end % @alignOf(Thread);
+ assert(stack_end >= stack_addr);
+ const thread_ptr = @alignCast(@alignOf(Thread), @intToPtr(&Thread, stack_end));
+
+ thread_ptr.data.stack_addr = stack_addr;
+ thread_ptr.data.stack_len = mmap_len;
+
+ if (builtin.os == builtin.Os.windows) {
+ // use windows API directly
+ @compileError("TODO support spawnThread for Windows");
+ } else if (Thread.use_pthreads) {
+ // use pthreads
+ var attr: c.pthread_attr_t = undefined;
+ if (c.pthread_attr_init(&attr) != 0) return SpawnThreadError.SystemResources;
+ defer assert(c.pthread_attr_destroy(&attr) == 0);
+
+ // align to page
+ stack_end -= stack_end % os.page_size;
+ assert(c.pthread_attr_setstack(&attr, @intToPtr(&c_void, stack_addr), stack_end - stack_addr) == 0);
+
+ const err = c.pthread_create(&thread_ptr.data.handle, &attr, MainFuncs.posixThreadMain, @intToPtr(&c_void, arg));
+ switch (err) {
+ 0 => return thread_ptr,
+ posix.EAGAIN => return SpawnThreadError.SystemResources,
+ posix.EPERM => unreachable,
+ posix.EINVAL => unreachable,
+ else => return unexpectedErrorPosix(usize(err)),
+ }
+ } else if (builtin.os == builtin.Os.linux) {
+ // use linux API directly. TODO use posix.CLONE_SETTLS and initialize thread local storage correctly
+ const flags = posix.CLONE_VM | posix.CLONE_FS | posix.CLONE_FILES | posix.CLONE_SIGHAND | posix.CLONE_THREAD | posix.CLONE_SYSVSEM | posix.CLONE_PARENT_SETTID | posix.CLONE_CHILD_CLEARTID | posix.CLONE_DETACHED;
+ const newtls: usize = 0;
+ const rc = posix.clone(MainFuncs.linuxThreadMain, stack_end, flags, arg, &thread_ptr.data.pid, newtls, &thread_ptr.data.pid);
+ const err = posix.getErrno(rc);
+ switch (err) {
+ 0 => return thread_ptr,
+ posix.EAGAIN => return SpawnThreadError.ThreadQuotaExceeded,
+ posix.EINVAL => unreachable,
+ posix.ENOMEM => return SpawnThreadError.SystemResources,
+ posix.ENOSPC => unreachable,
+ posix.EPERM => unreachable,
+ posix.EUSERS => unreachable,
+ else => return unexpectedErrorPosix(err),
+ }
+ } else {
+ @compileError("Unsupported OS");
+ }
+}
+
+pub fn posixWait(pid: i32) i32 {
+ var status: i32 = undefined;
+ while (true) {
+ const err = posix.getErrno(posix.waitpid(pid, &status, 0));
+ switch (err) {
+ 0 => return status,
+ posix.EINTR => continue,
+ posix.ECHILD => unreachable, // The process specified does not exist. It would be a race condition to handle this error.
+ posix.EINVAL => unreachable, // The options argument was invalid
+ else => unreachable,
+ }
+ }
+}
diff --git a/std/os/linux/i386.zig b/std/os/linux/i386.zig
deleted file mode 100644
index 7450ad34fa..0000000000
--- a/std/os/linux/i386.zig
+++ /dev/null
@@ -1,505 +0,0 @@
-const std = @import("../../index.zig");
-const linux = std.os.linux;
-const socklen_t = linux.socklen_t;
-const iovec = linux.iovec;
-
-pub const SYS_restart_syscall = 0;
-pub const SYS_exit = 1;
-pub const SYS_fork = 2;
-pub const SYS_read = 3;
-pub const SYS_write = 4;
-pub const SYS_open = 5;
-pub const SYS_close = 6;
-pub const SYS_waitpid = 7;
-pub const SYS_creat = 8;
-pub const SYS_link = 9;
-pub const SYS_unlink = 10;
-pub const SYS_execve = 11;
-pub const SYS_chdir = 12;
-pub const SYS_time = 13;
-pub const SYS_mknod = 14;
-pub const SYS_chmod = 15;
-pub const SYS_lchown = 16;
-pub const SYS_break = 17;
-pub const SYS_oldstat = 18;
-pub const SYS_lseek = 19;
-pub const SYS_getpid = 20;
-pub const SYS_mount = 21;
-pub const SYS_umount = 22;
-pub const SYS_setuid = 23;
-pub const SYS_getuid = 24;
-pub const SYS_stime = 25;
-pub const SYS_ptrace = 26;
-pub const SYS_alarm = 27;
-pub const SYS_oldfstat = 28;
-pub const SYS_pause = 29;
-pub const SYS_utime = 30;
-pub const SYS_stty = 31;
-pub const SYS_gtty = 32;
-pub const SYS_access = 33;
-pub const SYS_nice = 34;
-pub const SYS_ftime = 35;
-pub const SYS_sync = 36;
-pub const SYS_kill = 37;
-pub const SYS_rename = 38;
-pub const SYS_mkdir = 39;
-pub const SYS_rmdir = 40;
-pub const SYS_dup = 41;
-pub const SYS_pipe = 42;
-pub const SYS_times = 43;
-pub const SYS_prof = 44;
-pub const SYS_brk = 45;
-pub const SYS_setgid = 46;
-pub const SYS_getgid = 47;
-pub const SYS_signal = 48;
-pub const SYS_geteuid = 49;
-pub const SYS_getegid = 50;
-pub const SYS_acct = 51;
-pub const SYS_umount2 = 52;
-pub const SYS_lock = 53;
-pub const SYS_ioctl = 54;
-pub const SYS_fcntl = 55;
-pub const SYS_mpx = 56;
-pub const SYS_setpgid = 57;
-pub const SYS_ulimit = 58;
-pub const SYS_oldolduname = 59;
-pub const SYS_umask = 60;
-pub const SYS_chroot = 61;
-pub const SYS_ustat = 62;
-pub const SYS_dup2 = 63;
-pub const SYS_getppid = 64;
-pub const SYS_getpgrp = 65;
-pub const SYS_setsid = 66;
-pub const SYS_sigaction = 67;
-pub const SYS_sgetmask = 68;
-pub const SYS_ssetmask = 69;
-pub const SYS_setreuid = 70;
-pub const SYS_setregid = 71;
-pub const SYS_sigsuspend = 72;
-pub const SYS_sigpending = 73;
-pub const SYS_sethostname = 74;
-pub const SYS_setrlimit = 75;
-pub const SYS_getrlimit = 76;
-pub const SYS_getrusage = 77;
-pub const SYS_gettimeofday = 78;
-pub const SYS_settimeofday = 79;
-pub const SYS_getgroups = 80;
-pub const SYS_setgroups = 81;
-pub const SYS_select = 82;
-pub const SYS_symlink = 83;
-pub const SYS_oldlstat = 84;
-pub const SYS_readlink = 85;
-pub const SYS_uselib = 86;
-pub const SYS_swapon = 87;
-pub const SYS_reboot = 88;
-pub const SYS_readdir = 89;
-pub const SYS_mmap = 90;
-pub const SYS_munmap = 91;
-pub const SYS_truncate = 92;
-pub const SYS_ftruncate = 93;
-pub const SYS_fchmod = 94;
-pub const SYS_fchown = 95;
-pub const SYS_getpriority = 96;
-pub const SYS_setpriority = 97;
-pub const SYS_profil = 98;
-pub const SYS_statfs = 99;
-pub const SYS_fstatfs = 100;
-pub const SYS_ioperm = 101;
-pub const SYS_socketcall = 102;
-pub const SYS_syslog = 103;
-pub const SYS_setitimer = 104;
-pub const SYS_getitimer = 105;
-pub const SYS_stat = 106;
-pub const SYS_lstat = 107;
-pub const SYS_fstat = 108;
-pub const SYS_olduname = 109;
-pub const SYS_iopl = 110;
-pub const SYS_vhangup = 111;
-pub const SYS_idle = 112;
-pub const SYS_vm86old = 113;
-pub const SYS_wait4 = 114;
-pub const SYS_swapoff = 115;
-pub const SYS_sysinfo = 116;
-pub const SYS_ipc = 117;
-pub const SYS_fsync = 118;
-pub const SYS_sigreturn = 119;
-pub const SYS_clone = 120;
-pub const SYS_setdomainname = 121;
-pub const SYS_uname = 122;
-pub const SYS_modify_ldt = 123;
-pub const SYS_adjtimex = 124;
-pub const SYS_mprotect = 125;
-pub const SYS_sigprocmask = 126;
-pub const SYS_create_module = 127;
-pub const SYS_init_module = 128;
-pub const SYS_delete_module = 129;
-pub const SYS_get_kernel_syms = 130;
-pub const SYS_quotactl = 131;
-pub const SYS_getpgid = 132;
-pub const SYS_fchdir = 133;
-pub const SYS_bdflush = 134;
-pub const SYS_sysfs = 135;
-pub const SYS_personality = 136;
-pub const SYS_afs_syscall = 137;
-pub const SYS_setfsuid = 138;
-pub const SYS_setfsgid = 139;
-pub const SYS__llseek = 140;
-pub const SYS_getdents = 141;
-pub const SYS__newselect = 142;
-pub const SYS_flock = 143;
-pub const SYS_msync = 144;
-pub const SYS_readv = 145;
-pub const SYS_writev = 146;
-pub const SYS_getsid = 147;
-pub const SYS_fdatasync = 148;
-pub const SYS__sysctl = 149;
-pub const SYS_mlock = 150;
-pub const SYS_munlock = 151;
-pub const SYS_mlockall = 152;
-pub const SYS_munlockall = 153;
-pub const SYS_sched_setparam = 154;
-pub const SYS_sched_getparam = 155;
-pub const SYS_sched_setscheduler = 156;
-pub const SYS_sched_getscheduler = 157;
-pub const SYS_sched_yield = 158;
-pub const SYS_sched_get_priority_max = 159;
-pub const SYS_sched_get_priority_min = 160;
-pub const SYS_sched_rr_get_interval = 161;
-pub const SYS_nanosleep = 162;
-pub const SYS_mremap = 163;
-pub const SYS_setresuid = 164;
-pub const SYS_getresuid = 165;
-pub const SYS_vm86 = 166;
-pub const SYS_query_module = 167;
-pub const SYS_poll = 168;
-pub const SYS_nfsservctl = 169;
-pub const SYS_setresgid = 170;
-pub const SYS_getresgid = 171;
-pub const SYS_prctl = 172;
-pub const SYS_rt_sigreturn = 173;
-pub const SYS_rt_sigaction = 174;
-pub const SYS_rt_sigprocmask = 175;
-pub const SYS_rt_sigpending = 176;
-pub const SYS_rt_sigtimedwait = 177;
-pub const SYS_rt_sigqueueinfo = 178;
-pub const SYS_rt_sigsuspend = 179;
-pub const SYS_pread64 = 180;
-pub const SYS_pwrite64 = 181;
-pub const SYS_chown = 182;
-pub const SYS_getcwd = 183;
-pub const SYS_capget = 184;
-pub const SYS_capset = 185;
-pub const SYS_sigaltstack = 186;
-pub const SYS_sendfile = 187;
-pub const SYS_getpmsg = 188;
-pub const SYS_putpmsg = 189;
-pub const SYS_vfork = 190;
-pub const SYS_ugetrlimit = 191;
-pub const SYS_mmap2 = 192;
-pub const SYS_truncate64 = 193;
-pub const SYS_ftruncate64 = 194;
-pub const SYS_stat64 = 195;
-pub const SYS_lstat64 = 196;
-pub const SYS_fstat64 = 197;
-pub const SYS_lchown32 = 198;
-pub const SYS_getuid32 = 199;
-pub const SYS_getgid32 = 200;
-pub const SYS_geteuid32 = 201;
-pub const SYS_getegid32 = 202;
-pub const SYS_setreuid32 = 203;
-pub const SYS_setregid32 = 204;
-pub const SYS_getgroups32 = 205;
-pub const SYS_setgroups32 = 206;
-pub const SYS_fchown32 = 207;
-pub const SYS_setresuid32 = 208;
-pub const SYS_getresuid32 = 209;
-pub const SYS_setresgid32 = 210;
-pub const SYS_getresgid32 = 211;
-pub const SYS_chown32 = 212;
-pub const SYS_setuid32 = 213;
-pub const SYS_setgid32 = 214;
-pub const SYS_setfsuid32 = 215;
-pub const SYS_setfsgid32 = 216;
-pub const SYS_pivot_root = 217;
-pub const SYS_mincore = 218;
-pub const SYS_madvise = 219;
-pub const SYS_madvise1 = 219;
-pub const SYS_getdents64 = 220;
-pub const SYS_fcntl64 = 221;
-pub const SYS_gettid = 224;
-pub const SYS_readahead = 225;
-pub const SYS_setxattr = 226;
-pub const SYS_lsetxattr = 227;
-pub const SYS_fsetxattr = 228;
-pub const SYS_getxattr = 229;
-pub const SYS_lgetxattr = 230;
-pub const SYS_fgetxattr = 231;
-pub const SYS_listxattr = 232;
-pub const SYS_llistxattr = 233;
-pub const SYS_flistxattr = 234;
-pub const SYS_removexattr = 235;
-pub const SYS_lremovexattr = 236;
-pub const SYS_fremovexattr = 237;
-pub const SYS_tkill = 238;
-pub const SYS_sendfile64 = 239;
-pub const SYS_futex = 240;
-pub const SYS_sched_setaffinity = 241;
-pub const SYS_sched_getaffinity = 242;
-pub const SYS_set_thread_area = 243;
-pub const SYS_get_thread_area = 244;
-pub const SYS_io_setup = 245;
-pub const SYS_io_destroy = 246;
-pub const SYS_io_getevents = 247;
-pub const SYS_io_submit = 248;
-pub const SYS_io_cancel = 249;
-pub const SYS_fadvise64 = 250;
-pub const SYS_exit_group = 252;
-pub const SYS_lookup_dcookie = 253;
-pub const SYS_epoll_create = 254;
-pub const SYS_epoll_ctl = 255;
-pub const SYS_epoll_wait = 256;
-pub const SYS_remap_file_pages = 257;
-pub const SYS_set_tid_address = 258;
-pub const SYS_timer_create = 259;
-pub const SYS_timer_settime = SYS_timer_create+1;
-pub const SYS_timer_gettime = SYS_timer_create+2;
-pub const SYS_timer_getoverrun = SYS_timer_create+3;
-pub const SYS_timer_delete = SYS_timer_create+4;
-pub const SYS_clock_settime = SYS_timer_create+5;
-pub const SYS_clock_gettime = SYS_timer_create+6;
-pub const SYS_clock_getres = SYS_timer_create+7;
-pub const SYS_clock_nanosleep = SYS_timer_create+8;
-pub const SYS_statfs64 = 268;
-pub const SYS_fstatfs64 = 269;
-pub const SYS_tgkill = 270;
-pub const SYS_utimes = 271;
-pub const SYS_fadvise64_64 = 272;
-pub const SYS_vserver = 273;
-pub const SYS_mbind = 274;
-pub const SYS_get_mempolicy = 275;
-pub const SYS_set_mempolicy = 276;
-pub const SYS_mq_open = 277;
-pub const SYS_mq_unlink = SYS_mq_open+1;
-pub const SYS_mq_timedsend = SYS_mq_open+2;
-pub const SYS_mq_timedreceive = SYS_mq_open+3;
-pub const SYS_mq_notify = SYS_mq_open+4;
-pub const SYS_mq_getsetattr = SYS_mq_open+5;
-pub const SYS_kexec_load = 283;
-pub const SYS_waitid = 284;
-pub const SYS_add_key = 286;
-pub const SYS_request_key = 287;
-pub const SYS_keyctl = 288;
-pub const SYS_ioprio_set = 289;
-pub const SYS_ioprio_get = 290;
-pub const SYS_inotify_init = 291;
-pub const SYS_inotify_add_watch = 292;
-pub const SYS_inotify_rm_watch = 293;
-pub const SYS_migrate_pages = 294;
-pub const SYS_openat = 295;
-pub const SYS_mkdirat = 296;
-pub const SYS_mknodat = 297;
-pub const SYS_fchownat = 298;
-pub const SYS_futimesat = 299;
-pub const SYS_fstatat64 = 300;
-pub const SYS_unlinkat = 301;
-pub const SYS_renameat = 302;
-pub const SYS_linkat = 303;
-pub const SYS_symlinkat = 304;
-pub const SYS_readlinkat = 305;
-pub const SYS_fchmodat = 306;
-pub const SYS_faccessat = 307;
-pub const SYS_pselect6 = 308;
-pub const SYS_ppoll = 309;
-pub const SYS_unshare = 310;
-pub const SYS_set_robust_list = 311;
-pub const SYS_get_robust_list = 312;
-pub const SYS_splice = 313;
-pub const SYS_sync_file_range = 314;
-pub const SYS_tee = 315;
-pub const SYS_vmsplice = 316;
-pub const SYS_move_pages = 317;
-pub const SYS_getcpu = 318;
-pub const SYS_epoll_pwait = 319;
-pub const SYS_utimensat = 320;
-pub const SYS_signalfd = 321;
-pub const SYS_timerfd_create = 322;
-pub const SYS_eventfd = 323;
-pub const SYS_fallocate = 324;
-pub const SYS_timerfd_settime = 325;
-pub const SYS_timerfd_gettime = 326;
-pub const SYS_signalfd4 = 327;
-pub const SYS_eventfd2 = 328;
-pub const SYS_epoll_create1 = 329;
-pub const SYS_dup3 = 330;
-pub const SYS_pipe2 = 331;
-pub const SYS_inotify_init1 = 332;
-pub const SYS_preadv = 333;
-pub const SYS_pwritev = 334;
-pub const SYS_rt_tgsigqueueinfo = 335;
-pub const SYS_perf_event_open = 336;
-pub const SYS_recvmmsg = 337;
-pub const SYS_fanotify_init = 338;
-pub const SYS_fanotify_mark = 339;
-pub const SYS_prlimit64 = 340;
-pub const SYS_name_to_handle_at = 341;
-pub const SYS_open_by_handle_at = 342;
-pub const SYS_clock_adjtime = 343;
-pub const SYS_syncfs = 344;
-pub const SYS_sendmmsg = 345;
-pub const SYS_setns = 346;
-pub const SYS_process_vm_readv = 347;
-pub const SYS_process_vm_writev = 348;
-pub const SYS_kcmp = 349;
-pub const SYS_finit_module = 350;
-pub const SYS_sched_setattr = 351;
-pub const SYS_sched_getattr = 352;
-pub const SYS_renameat2 = 353;
-pub const SYS_seccomp = 354;
-pub const SYS_getrandom = 355;
-pub const SYS_memfd_create = 356;
-pub const SYS_bpf = 357;
-pub const SYS_execveat = 358;
-pub const SYS_socket = 359;
-pub const SYS_socketpair = 360;
-pub const SYS_bind = 361;
-pub const SYS_connect = 362;
-pub const SYS_listen = 363;
-pub const SYS_accept4 = 364;
-pub const SYS_getsockopt = 365;
-pub const SYS_setsockopt = 366;
-pub const SYS_getsockname = 367;
-pub const SYS_getpeername = 368;
-pub const SYS_sendto = 369;
-pub const SYS_sendmsg = 370;
-pub const SYS_recvfrom = 371;
-pub const SYS_recvmsg = 372;
-pub const SYS_shutdown = 373;
-pub const SYS_userfaultfd = 374;
-pub const SYS_membarrier = 375;
-pub const SYS_mlock2 = 376;
-
-
-pub const O_CREAT = 0o100;
-pub const O_EXCL = 0o200;
-pub const O_NOCTTY = 0o400;
-pub const O_TRUNC = 0o1000;
-pub const O_APPEND = 0o2000;
-pub const O_NONBLOCK = 0o4000;
-pub const O_DSYNC = 0o10000;
-pub const O_SYNC = 0o4010000;
-pub const O_RSYNC = 0o4010000;
-pub const O_DIRECTORY = 0o200000;
-pub const O_NOFOLLOW = 0o400000;
-pub const O_CLOEXEC = 0o2000000;
-
-pub const O_ASYNC = 0o20000;
-pub const O_DIRECT = 0o40000;
-pub const O_LARGEFILE = 0o100000;
-pub const O_NOATIME = 0o1000000;
-pub const O_PATH = 0o10000000;
-pub const O_TMPFILE = 0o20200000;
-pub const O_NDELAY = O_NONBLOCK;
-
-pub const F_DUPFD = 0;
-pub const F_GETFD = 1;
-pub const F_SETFD = 2;
-pub const F_GETFL = 3;
-pub const F_SETFL = 4;
-
-pub const F_SETOWN = 8;
-pub const F_GETOWN = 9;
-pub const F_SETSIG = 10;
-pub const F_GETSIG = 11;
-
-pub const F_GETLK = 12;
-pub const F_SETLK = 13;
-pub const F_SETLKW = 14;
-
-pub const F_SETOWN_EX = 15;
-pub const F_GETOWN_EX = 16;
-
-pub const F_GETOWNER_UIDS = 17;
-
-pub inline fn syscall0(number: usize) usize {
- return asm volatile ("int $0x80"
- : [ret] "={eax}" (-> usize)
- : [number] "{eax}" (number));
-}
-
-pub inline fn syscall1(number: usize, arg1: usize) usize {
- return asm volatile ("int $0x80"
- : [ret] "={eax}" (-> usize)
- : [number] "{eax}" (number),
- [arg1] "{ebx}" (arg1));
-}
-
-pub inline fn syscall2(number: usize, arg1: usize, arg2: usize) usize {
- return asm volatile ("int $0x80"
- : [ret] "={eax}" (-> usize)
- : [number] "{eax}" (number),
- [arg1] "{ebx}" (arg1),
- [arg2] "{ecx}" (arg2));
-}
-
-pub inline fn syscall3(number: usize, arg1: usize, arg2: usize, arg3: usize) usize {
- return asm volatile ("int $0x80"
- : [ret] "={eax}" (-> usize)
- : [number] "{eax}" (number),
- [arg1] "{ebx}" (arg1),
- [arg2] "{ecx}" (arg2),
- [arg3] "{edx}" (arg3));
-}
-
-pub inline fn syscall4(number: usize, arg1: usize, arg2: usize, arg3: usize, arg4: usize) usize {
- return asm volatile ("int $0x80"
- : [ret] "={eax}" (-> usize)
- : [number] "{eax}" (number),
- [arg1] "{ebx}" (arg1),
- [arg2] "{ecx}" (arg2),
- [arg3] "{edx}" (arg3),
- [arg4] "{esi}" (arg4));
-}
-
-pub inline fn syscall5(number: usize, arg1: usize, arg2: usize, arg3: usize,
- arg4: usize, arg5: usize) usize
-{
- return asm volatile ("int $0x80"
- : [ret] "={eax}" (-> usize)
- : [number] "{eax}" (number),
- [arg1] "{ebx}" (arg1),
- [arg2] "{ecx}" (arg2),
- [arg3] "{edx}" (arg3),
- [arg4] "{esi}" (arg4),
- [arg5] "{edi}" (arg5));
-}
-
-pub inline fn syscall6(number: usize, arg1: usize, arg2: usize, arg3: usize,
- arg4: usize, arg5: usize, arg6: usize) usize
-{
- return asm volatile ("int $0x80"
- : [ret] "={eax}" (-> usize)
- : [number] "{eax}" (number),
- [arg1] "{ebx}" (arg1),
- [arg2] "{ecx}" (arg2),
- [arg3] "{edx}" (arg3),
- [arg4] "{esi}" (arg4),
- [arg5] "{edi}" (arg5),
- [arg6] "{ebp}" (arg6));
-}
-
-pub nakedcc fn restore() void {
- asm volatile (
- \\popl %%eax
- \\movl $119, %%eax
- \\int $0x80
- :
- :
- : "rcx", "r11");
-}
-
-pub nakedcc fn restore_rt() void {
- asm volatile ("int $0x80"
- :
- : [number] "{eax}" (usize(SYS_rt_sigreturn))
- : "rcx", "r11");
-}
diff --git a/std/os/linux/index.zig b/std/os/linux/index.zig
index 8fd8bcbe78..368f074b9b 100644
--- a/std/os/linux/index.zig
+++ b/std/os/linux/index.zig
@@ -1,6 +1,7 @@
const std = @import("../../index.zig");
const assert = std.debug.assert;
const builtin = @import("builtin");
+const vdso = @import("vdso.zig");
pub use switch (builtin.arch) {
builtin.Arch.x86_64 => @import("x86_64.zig"),
builtin.Arch.i386 => @import("i386.zig"),
@@ -14,6 +15,22 @@ pub const STDIN_FILENO = 0;
pub const STDOUT_FILENO = 1;
pub const STDERR_FILENO = 2;
+pub const FUTEX_WAIT = 0;
+pub const FUTEX_WAKE = 1;
+pub const FUTEX_FD = 2;
+pub const FUTEX_REQUEUE = 3;
+pub const FUTEX_CMP_REQUEUE = 4;
+pub const FUTEX_WAKE_OP = 5;
+pub const FUTEX_LOCK_PI = 6;
+pub const FUTEX_UNLOCK_PI = 7;
+pub const FUTEX_TRYLOCK_PI = 8;
+pub const FUTEX_WAIT_BITSET = 9;
+
+pub const FUTEX_PRIVATE_FLAG = 128;
+
+pub const FUTEX_CLOCK_REALTIME = 256;
+
+
pub const PROT_NONE = 0;
pub const PROT_READ = 1;
pub const PROT_WRITE = 2;
@@ -38,6 +55,11 @@ pub const MAP_STACK = 0x20000;
pub const MAP_HUGETLB = 0x40000;
pub const MAP_FILE = 0;
+pub const F_OK = 0;
+pub const X_OK = 1;
+pub const W_OK = 2;
+pub const R_OK = 4;
+
pub const WNOHANG = 1;
pub const WUNTRACED = 2;
pub const WSTOPPED = 2;
@@ -101,17 +123,6 @@ pub const SIG_BLOCK = 0;
pub const SIG_UNBLOCK = 1;
pub const SIG_SETMASK = 2;
-pub const SOCK_STREAM = 1;
-pub const SOCK_DGRAM = 2;
-pub const SOCK_RAW = 3;
-pub const SOCK_RDM = 4;
-pub const SOCK_SEQPACKET = 5;
-pub const SOCK_DCCP = 6;
-pub const SOCK_PACKET = 10;
-pub const SOCK_CLOEXEC = 0o2000000;
-pub const SOCK_NONBLOCK = 0o4000;
-
-
pub const PROTO_ip = 0o000;
pub const PROTO_icmp = 0o001;
pub const PROTO_igmp = 0o002;
@@ -149,6 +160,20 @@ pub const PROTO_encap = 0o142;
pub const PROTO_pim = 0o147;
pub const PROTO_raw = 0o377;
+pub const SHUT_RD = 0;
+pub const SHUT_WR = 1;
+pub const SHUT_RDWR = 2;
+
+pub const SOCK_STREAM = 1;
+pub const SOCK_DGRAM = 2;
+pub const SOCK_RAW = 3;
+pub const SOCK_RDM = 4;
+pub const SOCK_SEQPACKET = 5;
+pub const SOCK_DCCP = 6;
+pub const SOCK_PACKET = 10;
+pub const SOCK_CLOEXEC = 0o2000000;
+pub const SOCK_NONBLOCK = 0o4000;
+
pub const PF_UNSPEC = 0;
pub const PF_LOCAL = 1;
pub const PF_UNIX = PF_LOCAL;
@@ -193,7 +218,10 @@ pub const PF_CAIF = 37;
pub const PF_ALG = 38;
pub const PF_NFC = 39;
pub const PF_VSOCK = 40;
-pub const PF_MAX = 41;
+pub const PF_KCM = 41;
+pub const PF_QIPCRTR = 42;
+pub const PF_SMC = 43;
+pub const PF_MAX = 44;
pub const AF_UNSPEC = PF_UNSPEC;
pub const AF_LOCAL = PF_LOCAL;
@@ -239,8 +267,137 @@ pub const AF_CAIF = PF_CAIF;
pub const AF_ALG = PF_ALG;
pub const AF_NFC = PF_NFC;
pub const AF_VSOCK = PF_VSOCK;
+pub const AF_KCM = PF_KCM;
+pub const AF_QIPCRTR = PF_QIPCRTR;
+pub const AF_SMC = PF_SMC;
pub const AF_MAX = PF_MAX;
+pub const SO_DEBUG = 1;
+pub const SO_REUSEADDR = 2;
+pub const SO_TYPE = 3;
+pub const SO_ERROR = 4;
+pub const SO_DONTROUTE = 5;
+pub const SO_BROADCAST = 6;
+pub const SO_SNDBUF = 7;
+pub const SO_RCVBUF = 8;
+pub const SO_KEEPALIVE = 9;
+pub const SO_OOBINLINE = 10;
+pub const SO_NO_CHECK = 11;
+pub const SO_PRIORITY = 12;
+pub const SO_LINGER = 13;
+pub const SO_BSDCOMPAT = 14;
+pub const SO_REUSEPORT = 15;
+pub const SO_PASSCRED = 16;
+pub const SO_PEERCRED = 17;
+pub const SO_RCVLOWAT = 18;
+pub const SO_SNDLOWAT = 19;
+pub const SO_RCVTIMEO = 20;
+pub const SO_SNDTIMEO = 21;
+pub const SO_ACCEPTCONN = 30;
+pub const SO_SNDBUFFORCE = 32;
+pub const SO_RCVBUFFORCE = 33;
+pub const SO_PROTOCOL = 38;
+pub const SO_DOMAIN = 39;
+
+pub const SO_SECURITY_AUTHENTICATION = 22;
+pub const SO_SECURITY_ENCRYPTION_TRANSPORT = 23;
+pub const SO_SECURITY_ENCRYPTION_NETWORK = 24;
+
+pub const SO_BINDTODEVICE = 25;
+
+pub const SO_ATTACH_FILTER = 26;
+pub const SO_DETACH_FILTER = 27;
+pub const SO_GET_FILTER = SO_ATTACH_FILTER;
+
+pub const SO_PEERNAME = 28;
+pub const SO_TIMESTAMP = 29;
+pub const SCM_TIMESTAMP = SO_TIMESTAMP;
+
+pub const SO_PEERSEC = 31;
+pub const SO_PASSSEC = 34;
+pub const SO_TIMESTAMPNS = 35;
+pub const SCM_TIMESTAMPNS = SO_TIMESTAMPNS;
+pub const SO_MARK = 36;
+pub const SO_TIMESTAMPING = 37;
+pub const SCM_TIMESTAMPING = SO_TIMESTAMPING;
+pub const SO_RXQ_OVFL = 40;
+pub const SO_WIFI_STATUS = 41;
+pub const SCM_WIFI_STATUS = SO_WIFI_STATUS;
+pub const SO_PEEK_OFF = 42;
+pub const SO_NOFCS = 43;
+pub const SO_LOCK_FILTER = 44;
+pub const SO_SELECT_ERR_QUEUE = 45;
+pub const SO_BUSY_POLL = 46;
+pub const SO_MAX_PACING_RATE = 47;
+pub const SO_BPF_EXTENSIONS = 48;
+pub const SO_INCOMING_CPU = 49;
+pub const SO_ATTACH_BPF = 50;
+pub const SO_DETACH_BPF = SO_DETACH_FILTER;
+pub const SO_ATTACH_REUSEPORT_CBPF = 51;
+pub const SO_ATTACH_REUSEPORT_EBPF = 52;
+pub const SO_CNX_ADVICE = 53;
+pub const SCM_TIMESTAMPING_OPT_STATS = 54;
+pub const SO_MEMINFO = 55;
+pub const SO_INCOMING_NAPI_ID = 56;
+pub const SO_COOKIE = 57;
+pub const SCM_TIMESTAMPING_PKTINFO = 58;
+pub const SO_PEERGROUPS = 59;
+pub const SO_ZEROCOPY = 60;
+
+pub const SOL_SOCKET = 1;
+
+pub const SOL_IP = 0;
+pub const SOL_IPV6 = 41;
+pub const SOL_ICMPV6 = 58;
+
+pub const SOL_RAW = 255;
+pub const SOL_DECNET = 261;
+pub const SOL_X25 = 262;
+pub const SOL_PACKET = 263;
+pub const SOL_ATM = 264;
+pub const SOL_AAL = 265;
+pub const SOL_IRDA = 266;
+pub const SOL_NETBEUI = 267;
+pub const SOL_LLC = 268;
+pub const SOL_DCCP = 269;
+pub const SOL_NETLINK = 270;
+pub const SOL_TIPC = 271;
+pub const SOL_RXRPC = 272;
+pub const SOL_PPPOL2TP = 273;
+pub const SOL_BLUETOOTH = 274;
+pub const SOL_PNPIPE = 275;
+pub const SOL_RDS = 276;
+pub const SOL_IUCV = 277;
+pub const SOL_CAIF = 278;
+pub const SOL_ALG = 279;
+pub const SOL_NFC = 280;
+pub const SOL_KCM = 281;
+pub const SOL_TLS = 282;
+
+pub const SOMAXCONN = 128;
+
+pub const MSG_OOB = 0x0001;
+pub const MSG_PEEK = 0x0002;
+pub const MSG_DONTROUTE = 0x0004;
+pub const MSG_CTRUNC = 0x0008;
+pub const MSG_PROXY = 0x0010;
+pub const MSG_TRUNC = 0x0020;
+pub const MSG_DONTWAIT = 0x0040;
+pub const MSG_EOR = 0x0080;
+pub const MSG_WAITALL = 0x0100;
+pub const MSG_FIN = 0x0200;
+pub const MSG_SYN = 0x0400;
+pub const MSG_CONFIRM = 0x0800;
+pub const MSG_RST = 0x1000;
+pub const MSG_ERRQUEUE = 0x2000;
+pub const MSG_NOSIGNAL = 0x4000;
+pub const MSG_MORE = 0x8000;
+pub const MSG_WAITFORONE = 0x10000;
+pub const MSG_BATCH = 0x40000;
+pub const MSG_ZEROCOPY = 0x4000000;
+pub const MSG_FASTOPEN = 0x20000000;
+pub const MSG_CMSG_CLOEXEC = 0x40000000;
+
pub const DT_UNKNOWN = 0;
pub const DT_FIFO = 1;
pub const DT_CHR = 2;
@@ -343,6 +500,126 @@ pub const CLOCK_BOOTTIME_ALARM = 9;
pub const CLOCK_SGI_CYCLE = 10;
pub const CLOCK_TAI = 11;
+pub const CSIGNAL = 0x000000ff;
+pub const CLONE_VM = 0x00000100;
+pub const CLONE_FS = 0x00000200;
+pub const CLONE_FILES = 0x00000400;
+pub const CLONE_SIGHAND = 0x00000800;
+pub const CLONE_PTRACE = 0x00002000;
+pub const CLONE_VFORK = 0x00004000;
+pub const CLONE_PARENT = 0x00008000;
+pub const CLONE_THREAD = 0x00010000;
+pub const CLONE_NEWNS = 0x00020000;
+pub const CLONE_SYSVSEM = 0x00040000;
+pub const CLONE_SETTLS = 0x00080000;
+pub const CLONE_PARENT_SETTID = 0x00100000;
+pub const CLONE_CHILD_CLEARTID = 0x00200000;
+pub const CLONE_DETACHED = 0x00400000;
+pub const CLONE_UNTRACED = 0x00800000;
+pub const CLONE_CHILD_SETTID = 0x01000000;
+pub const CLONE_NEWCGROUP = 0x02000000;
+pub const CLONE_NEWUTS = 0x04000000;
+pub const CLONE_NEWIPC = 0x08000000;
+pub const CLONE_NEWUSER = 0x10000000;
+pub const CLONE_NEWPID = 0x20000000;
+pub const CLONE_NEWNET = 0x40000000;
+pub const CLONE_IO = 0x80000000;
+
+pub const MS_RDONLY = 1;
+pub const MS_NOSUID = 2;
+pub const MS_NODEV = 4;
+pub const MS_NOEXEC = 8;
+pub const MS_SYNCHRONOUS = 16;
+pub const MS_REMOUNT = 32;
+pub const MS_MANDLOCK = 64;
+pub const MS_DIRSYNC = 128;
+pub const MS_NOATIME = 1024;
+pub const MS_NODIRATIME = 2048;
+pub const MS_BIND = 4096;
+pub const MS_MOVE = 8192;
+pub const MS_REC = 16384;
+pub const MS_SILENT = 32768;
+pub const MS_POSIXACL = (1<<16);
+pub const MS_UNBINDABLE = (1<<17);
+pub const MS_PRIVATE = (1<<18);
+pub const MS_SLAVE = (1<<19);
+pub const MS_SHARED = (1<<20);
+pub const MS_RELATIME = (1<<21);
+pub const MS_KERNMOUNT = (1<<22);
+pub const MS_I_VERSION = (1<<23);
+pub const MS_STRICTATIME = (1<<24);
+pub const MS_LAZYTIME = (1<<25);
+pub const MS_NOREMOTELOCK = (1<<27);
+pub const MS_NOSEC = (1<<28);
+pub const MS_BORN = (1<<29);
+pub const MS_ACTIVE = (1<<30);
+pub const MS_NOUSER = (1<<31);
+
+pub const MS_RMT_MASK = (MS_RDONLY|MS_SYNCHRONOUS|MS_MANDLOCK|MS_I_VERSION|MS_LAZYTIME);
+
+pub const MS_MGC_VAL = 0xc0ed0000;
+pub const MS_MGC_MSK = 0xffff0000;
+
+pub const MNT_FORCE = 1;
+pub const MNT_DETACH = 2;
+pub const MNT_EXPIRE = 4;
+pub const UMOUNT_NOFOLLOW = 8;
+
+
+pub const S_IFMT = 0o170000;
+
+pub const S_IFDIR = 0o040000;
+pub const S_IFCHR = 0o020000;
+pub const S_IFBLK = 0o060000;
+pub const S_IFREG = 0o100000;
+pub const S_IFIFO = 0o010000;
+pub const S_IFLNK = 0o120000;
+pub const S_IFSOCK = 0o140000;
+
+pub const S_ISUID = 0o4000;
+pub const S_ISGID = 0o2000;
+pub const S_ISVTX = 0o1000;
+pub const S_IRUSR = 0o400;
+pub const S_IWUSR = 0o200;
+pub const S_IXUSR = 0o100;
+pub const S_IRWXU = 0o700;
+pub const S_IRGRP = 0o040;
+pub const S_IWGRP = 0o020;
+pub const S_IXGRP = 0o010;
+pub const S_IRWXG = 0o070;
+pub const S_IROTH = 0o004;
+pub const S_IWOTH = 0o002;
+pub const S_IXOTH = 0o001;
+pub const S_IRWXO = 0o007;
+
+pub fn S_ISREG(m: u32) bool {
+ return m & S_IFMT == S_IFREG;
+}
+
+pub fn S_ISDIR(m: u32) bool {
+ return m & S_IFMT == S_IFDIR;
+}
+
+pub fn S_ISCHR(m: u32) bool {
+ return m & S_IFMT == S_IFCHR;
+}
+
+pub fn S_ISBLK(m: u32) bool {
+ return m & S_IFMT == S_IFBLK;
+}
+
+pub fn S_ISFIFO(m: u32) bool {
+ return m & S_IFMT == S_IFIFO;
+}
+
+pub fn S_ISLNK(m: u32) bool {
+ return m & S_IFMT == S_IFLNK;
+}
+
+pub fn S_ISSOCK(m: u32) bool {
+ return m & S_IFMT == S_IFSOCK;
+}
+
pub const TFD_NONBLOCK = O_NONBLOCK;
pub const TFD_CLOEXEC = O_CLOEXEC;
@@ -380,6 +657,10 @@ pub fn chdir(path: &const u8) usize {
return syscall1(SYS_chdir, @ptrToInt(path));
}
+pub fn chroot(path: &const u8) usize {
+ return syscall1(SYS_chroot, @ptrToInt(path));
+}
+
pub fn execve(path: &const u8, argv: &const ?&const u8, envp: &const ?&const u8) usize {
return syscall3(SYS_execve, @ptrToInt(path), @ptrToInt(argv), @ptrToInt(envp));
}
@@ -388,6 +669,10 @@ pub fn fork() usize {
return syscall0(SYS_fork);
}
+pub fn futex_wait(uaddr: usize, futex_op: u32, val: i32, timeout: ?&timespec) usize {
+ return syscall4(SYS_futex, uaddr, futex_op, @bitCast(u32, val), @ptrToInt(timeout));
+}
+
pub fn getcwd(buf: &u8, size: usize) usize {
return syscall2(SYS_getcwd, @ptrToInt(buf), size);
}
@@ -409,13 +694,25 @@ pub fn mkdir(path: &const u8, mode: u32) usize {
return syscall2(SYS_mkdir, @ptrToInt(path), mode);
}
-pub fn mmap(address: ?&u8, length: usize, prot: usize, flags: usize, fd: i32, offset: isize) usize {
+pub fn mount(special: &const u8, dir: &const u8, fstype: &const u8, flags: usize, data: usize) usize {
+ return syscall5(SYS_mount, @ptrToInt(special), @ptrToInt(dir), @ptrToInt(fstype), flags, data);
+}
+
+pub fn umount(special: &const u8) usize {
+ return syscall2(SYS_umount2, @ptrToInt(special), 0);
+}
+
+pub fn umount2(special: &const u8, flags: u32) usize {
+ return syscall2(SYS_umount2, @ptrToInt(special), flags);
+}
+
+pub fn mmap(address: ?&u8, length: usize, prot: usize, flags: u32, fd: i32, offset: isize) usize {
return syscall6(SYS_mmap, @ptrToInt(address), length, prot, flags, usize(fd),
@bitCast(usize, offset));
}
-pub fn munmap(address: &u8, length: usize) usize {
- return syscall2(SYS_munmap, @ptrToInt(address), length);
+pub fn munmap(address: usize, length: usize) usize {
+ return syscall2(SYS_munmap, address, length);
}
pub fn read(fd: i32, buf: &u8, count: usize) usize {
@@ -434,6 +731,10 @@ pub fn pread(fd: i32, buf: &u8, count: usize, offset: usize) usize {
return syscall4(SYS_pread, usize(fd), @ptrToInt(buf), count, offset);
}
+pub fn access(path: &const u8, mode: u32) usize {
+ return syscall2(SYS_access, @ptrToInt(path), mode);
+}
+
pub fn pipe(fd: &[2]i32) usize {
return pipe2(fd, 0);
}
@@ -466,6 +767,16 @@ pub fn openat(dirfd: i32, path: &const u8, flags: usize, mode: usize) usize {
return syscall4(SYS_openat, usize(dirfd), @ptrToInt(path), flags, mode);
}
+/// See also `clone` (from the arch-specific include)
+pub fn clone5(flags: usize, child_stack_ptr: usize, parent_tid: &i32, child_tid: &i32, newtls: usize) usize {
+ return syscall5(SYS_clone, flags, child_stack_ptr, @ptrToInt(parent_tid), @ptrToInt(child_tid), newtls);
+}
+
+/// See also `clone` (from the arch-specific include)
+pub fn clone2(flags: usize, child_stack_ptr: usize) usize {
+ return syscall2(SYS_clone, flags, child_stack_ptr);
+}
+
pub fn close(fd: i32) usize {
return syscall1(SYS_close, usize(fd));
}
@@ -495,6 +806,45 @@ pub fn waitpid(pid: i32, status: &i32, options: i32) usize {
return syscall4(SYS_wait4, @bitCast(usize, isize(pid)), @ptrToInt(status), @bitCast(usize, isize(options)), 0);
}
+pub fn clock_gettime(clk_id: i32, tp: &timespec) usize {
+ if (VDSO_CGT_SYM.len != 0) {
+ const f = @atomicLoad(@typeOf(init_vdso_clock_gettime), &vdso_clock_gettime, builtin.AtomicOrder.Unordered);
+ if (@ptrToInt(f) != 0) {
+ const rc = f(clk_id, tp);
+ switch (rc) {
+ 0, @bitCast(usize, isize(-EINVAL)) => return rc,
+ else => {},
+ }
+ }
+ }
+ return syscall2(SYS_clock_gettime, @bitCast(usize, isize(clk_id)), @ptrToInt(tp));
+}
+var vdso_clock_gettime = init_vdso_clock_gettime;
+extern fn init_vdso_clock_gettime(clk: i32, ts: &timespec) usize {
+ const addr = vdso.lookup(VDSO_CGT_VER, VDSO_CGT_SYM);
+ var f = @intToPtr(@typeOf(init_vdso_clock_gettime), addr);
+ _ = @cmpxchgStrong(@typeOf(init_vdso_clock_gettime), &vdso_clock_gettime, init_vdso_clock_gettime, f,
+ builtin.AtomicOrder.Monotonic, builtin.AtomicOrder.Monotonic);
+ if (@ptrToInt(f) == 0) return @bitCast(usize, isize(-ENOSYS));
+ return f(clk, ts);
+}
+
+pub fn clock_getres(clk_id: i32, tp: &timespec) usize {
+ return syscall2(SYS_clock_getres, @bitCast(usize, isize(clk_id)), @ptrToInt(tp));
+}
+
+pub fn clock_settime(clk_id: i32, tp: &const timespec) usize {
+ return syscall2(SYS_clock_settime, @bitCast(usize, isize(clk_id)), @ptrToInt(tp));
+}
+
+pub fn gettimeofday(tv: &timeval, tz: &timezone) usize {
+ return syscall2(SYS_gettimeofday, @ptrToInt(tv), @ptrToInt(tz));
+}
+
+pub fn settimeofday(tv: &const timeval, tz: &const timezone) usize {
+ return syscall2(SYS_settimeofday, @ptrToInt(tv), @ptrToInt(tz));
+}
+
pub fn nanosleep(req: &const timespec, rem: ?&timespec) usize {
return syscall2(SYS_nanosleep, @ptrToInt(req), @ptrToInt(rem));
}
@@ -515,6 +865,58 @@ pub fn setregid(rgid: u32, egid: u32) usize {
return syscall2(SYS_setregid, rgid, egid);
}
+pub fn getuid() u32 {
+ return u32(syscall0(SYS_getuid));
+}
+
+pub fn getgid() u32 {
+ return u32(syscall0(SYS_getgid));
+}
+
+pub fn geteuid() u32 {
+ return u32(syscall0(SYS_geteuid));
+}
+
+pub fn getegid() u32 {
+ return u32(syscall0(SYS_getegid));
+}
+
+pub fn seteuid(euid: u32) usize {
+ return syscall1(SYS_seteuid, euid);
+}
+
+pub fn setegid(egid: u32) usize {
+ return syscall1(SYS_setegid, egid);
+}
+
+pub fn getresuid(ruid: &u32, euid: &u32, suid: &u32) usize {
+ return syscall3(SYS_getresuid, @ptrToInt(ruid), @ptrToInt(euid), @ptrToInt(suid));
+}
+
+pub fn getresgid(rgid: &u32, egid: &u32, sgid: &u32) usize {
+ return syscall3(SYS_getresgid, @ptrToInt(rgid), @ptrToInt(egid), @ptrToInt(sgid));
+}
+
+pub fn setresuid(ruid: u32, euid: u32, suid: u32) usize {
+ return syscall3(SYS_setresuid, ruid, euid, suid);
+}
+
+pub fn setresgid(rgid: u32, egid: u32, sgid: u32) usize {
+ return syscall3(SYS_setresgid, rgid, egid, sgid);
+}
+
+pub fn getgroups(size: usize, list: &u32) usize {
+ return syscall2(SYS_getgroups, size, @ptrToInt(list));
+}
+
+pub fn setgroups(size: usize, list: &const u32) usize {
+ return syscall2(SYS_setgroups, size, @ptrToInt(list));
+}
+
+pub fn getpid() i32 {
+ return @bitCast(i32, u32(syscall0(SYS_getpid)));
+}
+
pub fn sigprocmask(flags: u32, noalias set: &const sigset_t, noalias oldset: ?&sigset_t) usize {
return syscall4(SYS_rt_sigprocmask, flags, @ptrToInt(set), @ptrToInt(oldset), NSIG/8);
}
@@ -599,30 +1001,27 @@ pub fn sigismember(set: &const sigset_t, sig: u6) bool {
return ((*set)[usize(s) / usize.bit_count] & (usize(1) << (s & (usize.bit_count - 1)))) != 0;
}
-
+pub const in_port_t = u16;
pub const sa_family_t = u16;
pub const socklen_t = u32;
-pub const in_addr = u32;
-pub const in6_addr = [16]u8;
-pub const sockaddr = extern struct {
- family: sa_family_t,
- port: u16,
- data: [12]u8,
+pub const sockaddr = extern union {
+ in: sockaddr_in,
+ in6: sockaddr_in6,
};
pub const sockaddr_in = extern struct {
family: sa_family_t,
- port: u16,
- addr: in_addr,
+ port: in_port_t,
+ addr: u32,
zero: [8]u8,
};
pub const sockaddr_in6 = extern struct {
family: sa_family_t,
- port: u16,
+ port: in_port_t,
flowinfo: u32,
- addr: in6_addr,
+ addr: [16]u8,
scope_id: u32,
};
@@ -639,16 +1038,16 @@ pub fn getpeername(fd: i32, noalias addr: &sockaddr, noalias len: &socklen_t) us
return syscall3(SYS_getpeername, usize(fd), @ptrToInt(addr), @ptrToInt(len));
}
-pub fn socket(domain: i32, socket_type: i32, protocol: i32) usize {
- return syscall3(SYS_socket, usize(domain), usize(socket_type), usize(protocol));
+pub fn socket(domain: u32, socket_type: u32, protocol: u32) usize {
+ return syscall3(SYS_socket, domain, socket_type, protocol);
}
-pub fn setsockopt(fd: i32, level: i32, optname: i32, optval: &const u8, optlen: socklen_t) usize {
- return syscall5(SYS_setsockopt, usize(fd), usize(level), usize(optname), usize(optval), @ptrToInt(optlen));
+pub fn setsockopt(fd: i32, level: u32, optname: u32, optval: &const u8, optlen: socklen_t) usize {
+ return syscall5(SYS_setsockopt, usize(fd), level, optname, usize(optval), @ptrToInt(optlen));
}
-pub fn getsockopt(fd: i32, level: i32, optname: i32, noalias optval: &u8, noalias optlen: &socklen_t) usize {
- return syscall5(SYS_getsockopt, usize(fd), usize(level), usize(optname), @ptrToInt(optval), @ptrToInt(optlen));
+pub fn getsockopt(fd: i32, level: u32, optname: u32, noalias optval: &u8, noalias optlen: &socklen_t) usize {
+ return syscall5(SYS_getsockopt, usize(fd), level, optname, @ptrToInt(optval), @ptrToInt(optlen));
}
pub fn sendmsg(fd: i32, msg: &const msghdr, flags: u32) usize {
@@ -677,8 +1076,8 @@ pub fn bind(fd: i32, addr: &const sockaddr, len: socklen_t) usize {
return syscall3(SYS_bind, usize(fd), @ptrToInt(addr), usize(len));
}
-pub fn listen(fd: i32, backlog: i32) usize {
- return syscall2(SYS_listen, usize(fd), usize(backlog));
+pub fn listen(fd: i32, backlog: u32) usize {
+ return syscall2(SYS_listen, usize(fd), backlog);
}
pub fn sendto(fd: i32, buf: &const u8, len: usize, flags: u32, addr: ?&const sockaddr, alen: socklen_t) usize {
@@ -697,46 +1096,83 @@ pub fn accept4(fd: i32, noalias addr: &sockaddr, noalias len: &socklen_t, flags:
return syscall4(SYS_accept4, usize(fd), @ptrToInt(addr), @ptrToInt(len), flags);
}
-// error NameTooLong;
-// error SystemResources;
-// error Io;
-//
-// pub fn if_nametoindex(name: []u8) !u32 {
-// var ifr: ifreq = undefined;
-//
-// if (name.len >= ifr.ifr_name.len) {
-// return error.NameTooLong;
-// }
-//
-// const socket_ret = socket(AF_UNIX, SOCK_DGRAM|SOCK_CLOEXEC, 0);
-// const socket_err = getErrno(socket_ret);
-// if (socket_err > 0) {
-// return error.SystemResources;
-// }
-// const socket_fd = i32(socket_ret);
-// @memcpy(&ifr.ifr_name[0], &name[0], name.len);
-// ifr.ifr_name[name.len] = 0;
-// const ioctl_ret = ioctl(socket_fd, SIOCGIFINDEX, &ifr);
-// close(socket_fd);
-// const ioctl_err = getErrno(ioctl_ret);
-// if (ioctl_err > 0) {
-// return error.Io;
-// }
-// return ifr.ifr_ifindex;
-// }
-
pub fn fstat(fd: i32, stat_buf: &Stat) usize {
return syscall2(SYS_fstat, usize(fd), @ptrToInt(stat_buf));
}
-pub const epoll_data = extern union {
+pub fn stat(pathname: &const u8, statbuf: &Stat) usize {
+ return syscall2(SYS_stat, @ptrToInt(pathname), @ptrToInt(statbuf));
+}
+
+pub fn lstat(pathname: &const u8, statbuf: &Stat) usize {
+ return syscall2(SYS_lstat, @ptrToInt(pathname), @ptrToInt(statbuf));
+}
+
+pub fn listxattr(path: &const u8, list: &u8, size: usize) usize {
+ return syscall3(SYS_listxattr, @ptrToInt(path), @ptrToInt(list), size);
+}
+
+pub fn llistxattr(path: &const u8, list: &u8, size: usize) usize {
+ return syscall3(SYS_llistxattr, @ptrToInt(path), @ptrToInt(list), size);
+}
+
+pub fn flistxattr(fd: usize, list: &u8, size: usize) usize {
+ return syscall3(SYS_flistxattr, fd, @ptrToInt(list), size);
+}
+
+pub fn getxattr(path: &const u8, name: &const u8, value: &void, size: usize) usize {
+ return syscall4(SYS_getxattr, @ptrToInt(path), @ptrToInt(name), @ptrToInt(value), size);
+}
+
+pub fn lgetxattr(path: &const u8, name: &const u8, value: &void, size: usize) usize {
+ return syscall4(SYS_lgetxattr, @ptrToInt(path), @ptrToInt(name), @ptrToInt(value), size);
+}
+
+pub fn fgetxattr(fd: usize, name: &const u8, value: &void, size: usize) usize {
+ return syscall4(SYS_lgetxattr, fd, @ptrToInt(name), @ptrToInt(value), size);
+}
+
+pub fn setxattr(path: &const u8, name: &const u8, value: &const void,
+ size: usize, flags: usize) usize {
+
+ return syscall5(SYS_setxattr, @ptrToInt(path), @ptrToInt(name), @ptrToInt(value),
+ size, flags);
+}
+
+pub fn lsetxattr(path: &const u8, name: &const u8, value: &const void,
+ size: usize, flags: usize) usize {
+
+ return syscall5(SYS_lsetxattr, @ptrToInt(path), @ptrToInt(name), @ptrToInt(value),
+ size, flags);
+}
+
+pub fn fsetxattr(fd: usize, name: &const u8, value: &const void,
+ size: usize, flags: usize) usize {
+
+ return syscall5(SYS_fsetxattr, fd, @ptrToInt(name), @ptrToInt(value),
+ size, flags);
+}
+
+pub fn removexattr(path: &const u8, name: &const u8) usize {
+ return syscall2(SYS_removexattr, @ptrToInt(path), @ptrToInt(name));
+}
+
+pub fn lremovexattr(path: &const u8, name: &const u8) usize {
+ return syscall2(SYS_lremovexattr, @ptrToInt(path), @ptrToInt(name));
+}
+
+pub fn fremovexattr(fd: usize, name: &const u8) usize {
+ return syscall2(SYS_fremovexattr, fd, @ptrToInt(name));
+}
+
+pub const epoll_data = packed union {
ptr: usize,
fd: i32,
@"u32": u32,
@"u64": u64,
};
-pub const epoll_event = extern struct {
+pub const epoll_event = packed struct {
events: u32,
data: epoll_data,
};
@@ -749,7 +1185,7 @@ pub fn epoll_create1(flags: usize) usize {
return syscall1(SYS_epoll_create1, flags);
}
-pub fn epoll_ctl(epoll_fd: i32, op: i32, fd: i32, ev: &epoll_event) usize {
+pub fn epoll_ctl(epoll_fd: i32, op: u32, fd: i32, ev: &epoll_event) usize {
return syscall4(SYS_epoll_ctl, usize(epoll_fd), usize(op), usize(fd), @ptrToInt(ev));
}
@@ -774,9 +1210,126 @@ pub fn timerfd_settime(fd: i32, flags: u32, new_value: &const itimerspec, old_va
return syscall4(SYS_timerfd_settime, usize(fd), usize(flags), @ptrToInt(new_value), @ptrToInt(old_value));
}
-test "import linux test" {
- // TODO lazy analysis should prevent this test from being compiled on windows, but
- // it is still compiled on windows
+pub const _LINUX_CAPABILITY_VERSION_1 = 0x19980330;
+pub const _LINUX_CAPABILITY_U32S_1 = 1;
+
+pub const _LINUX_CAPABILITY_VERSION_2 = 0x20071026;
+pub const _LINUX_CAPABILITY_U32S_2 = 2;
+
+pub const _LINUX_CAPABILITY_VERSION_3 = 0x20080522;
+pub const _LINUX_CAPABILITY_U32S_3 = 2;
+
+pub const VFS_CAP_REVISION_MASK = 0xFF000000;
+pub const VFS_CAP_REVISION_SHIFT = 24;
+pub const VFS_CAP_FLAGS_MASK = ~VFS_CAP_REVISION_MASK;
+pub const VFS_CAP_FLAGS_EFFECTIVE = 0x000001;
+
+pub const VFS_CAP_REVISION_1 = 0x01000000;
+pub const VFS_CAP_U32_1 = 1;
+pub const XATTR_CAPS_SZ_1 = @sizeOf(u32)*(1 + 2*VFS_CAP_U32_1);
+
+pub const VFS_CAP_REVISION_2 = 0x02000000;
+pub const VFS_CAP_U32_2 = 2;
+pub const XATTR_CAPS_SZ_2 = @sizeOf(u32)*(1 + 2*VFS_CAP_U32_2);
+
+pub const XATTR_CAPS_SZ = XATTR_CAPS_SZ_2;
+pub const VFS_CAP_U32 = VFS_CAP_U32_2;
+pub const VFS_CAP_REVISION = VFS_CAP_REVISION_2;
+
+pub const vfs_cap_data = extern struct {
+ //all of these are mandated as little endian
+ //when on disk.
+ const Data = struct {
+ permitted: u32,
+ inheritable: u32,
+ };
+
+ magic_etc: u32,
+ data: [VFS_CAP_U32]Data,
+};
+
+
+pub const CAP_CHOWN = 0;
+pub const CAP_DAC_OVERRIDE = 1;
+pub const CAP_DAC_READ_SEARCH = 2;
+pub const CAP_FOWNER = 3;
+pub const CAP_FSETID = 4;
+pub const CAP_KILL = 5;
+pub const CAP_SETGID = 6;
+pub const CAP_SETUID = 7;
+pub const CAP_SETPCAP = 8;
+pub const CAP_LINUX_IMMUTABLE = 9;
+pub const CAP_NET_BIND_SERVICE = 10;
+pub const CAP_NET_BROADCAST = 11;
+pub const CAP_NET_ADMIN = 12;
+pub const CAP_NET_RAW = 13;
+pub const CAP_IPC_LOCK = 14;
+pub const CAP_IPC_OWNER = 15;
+pub const CAP_SYS_MODULE = 16;
+pub const CAP_SYS_RAWIO = 17;
+pub const CAP_SYS_CHROOT = 18;
+pub const CAP_SYS_PTRACE = 19;
+pub const CAP_SYS_PACCT = 20;
+pub const CAP_SYS_ADMIN = 21;
+pub const CAP_SYS_BOOT = 22;
+pub const CAP_SYS_NICE = 23;
+pub const CAP_SYS_RESOURCE = 24;
+pub const CAP_SYS_TIME = 25;
+pub const CAP_SYS_TTY_CONFIG = 26;
+pub const CAP_MKNOD = 27;
+pub const CAP_LEASE = 28;
+pub const CAP_AUDIT_WRITE = 29;
+pub const CAP_AUDIT_CONTROL = 30;
+pub const CAP_SETFCAP = 31;
+pub const CAP_MAC_OVERRIDE = 32;
+pub const CAP_MAC_ADMIN = 33;
+pub const CAP_SYSLOG = 34;
+pub const CAP_WAKE_ALARM = 35;
+pub const CAP_BLOCK_SUSPEND = 36;
+pub const CAP_AUDIT_READ = 37;
+pub const CAP_LAST_CAP = CAP_AUDIT_READ;
+
+pub fn cap_valid(u8: x) bool {
+ return x >= 0 and x <= CAP_LAST_CAP;
+}
+
+pub fn CAP_TO_MASK(cap: u8) u32 {
+ return u32(1) << u5(cap & 31);
+}
+
+pub fn CAP_TO_INDEX(cap: u8) u8 {
+ return cap >> 5;
+}
+
+pub const cap_t = extern struct {
+ hdrp: &cap_user_header_t,
+ datap: &cap_user_data_t,
+};
+
+pub const cap_user_header_t = extern struct {
+ version: u32,
+ pid: usize,
+};
+
+pub const cap_user_data_t = extern struct {
+ effective: u32,
+ permitted: u32,
+ inheritable: u32,
+};
+
+pub fn unshare(flags: usize) usize {
+ return syscall1(SYS_unshare, usize(flags));
+}
+
+pub fn capget(hdrp: &cap_user_header_t, datap: &cap_user_data_t) usize {
+ return syscall2(SYS_capget, @ptrToInt(hdrp), @ptrToInt(datap));
+}
+
+pub fn capset(hdrp: &cap_user_header_t, datap: &const cap_user_data_t) usize {
+ return syscall2(SYS_capset, @ptrToInt(hdrp), @ptrToInt(datap));
+}
+
+test "import" {
if (builtin.os == builtin.Os.linux) {
_ = @import("test.zig");
}
diff --git a/std/os/linux/test.zig b/std/os/linux/test.zig
index e427fd5d59..18a6e5f19f 100644
--- a/std/os/linux/test.zig
+++ b/std/os/linux/test.zig
@@ -1,4 +1,5 @@
const std = @import("../../index.zig");
+const builtin = @import("builtin");
const linux = std.os.linux;
const assert = std.debug.assert;
diff --git a/std/os/linux/vdso.zig b/std/os/linux/vdso.zig
new file mode 100644
index 0000000000..f4fb513af9
--- /dev/null
+++ b/std/os/linux/vdso.zig
@@ -0,0 +1,89 @@
+const std = @import("../../index.zig");
+const elf = std.elf;
+const linux = std.os.linux;
+const cstr = std.cstr;
+const mem = std.mem;
+
+pub fn lookup(vername: []const u8, name: []const u8) usize {
+ const vdso_addr = std.os.linux_aux_raw[std.elf.AT_SYSINFO_EHDR];
+ if (vdso_addr == 0) return 0;
+
+ const eh = @intToPtr(&elf.Ehdr, vdso_addr);
+ var ph_addr: usize = vdso_addr + eh.e_phoff;
+ const ph = @intToPtr(&elf.Phdr, ph_addr);
+
+ var maybe_dynv: ?&usize = null;
+ var base: usize = @maxValue(usize);
+ {
+ var i: usize = 0;
+ while (i < eh.e_phnum) : ({i += 1; ph_addr += eh.e_phentsize;}) {
+ const this_ph = @intToPtr(&elf.Phdr, ph_addr);
+ switch (this_ph.p_type) {
+ elf.PT_LOAD => base = vdso_addr + this_ph.p_offset - this_ph.p_vaddr,
+ elf.PT_DYNAMIC => maybe_dynv = @intToPtr(&usize, vdso_addr + this_ph.p_offset),
+ else => {},
+ }
+ }
+ }
+ const dynv = maybe_dynv ?? return 0;
+ if (base == @maxValue(usize)) return 0;
+
+ var maybe_strings: ?&u8 = null;
+ var maybe_syms: ?&elf.Sym = null;
+ var maybe_hashtab: ?&linux.Elf_Symndx = null;
+ var maybe_versym: ?&u16 = null;
+ var maybe_verdef: ?&elf.Verdef = null;
+
+ {
+ var i: usize = 0;
+ while (dynv[i] != 0) : (i += 2) {
+ const p = base + dynv[i + 1];
+ switch (dynv[i]) {
+ elf.DT_STRTAB => maybe_strings = @intToPtr(&u8, p),
+ elf.DT_SYMTAB => maybe_syms = @intToPtr(&elf.Sym, p),
+ elf.DT_HASH => maybe_hashtab = @intToPtr(&linux.Elf_Symndx, p),
+ elf.DT_VERSYM => maybe_versym = @intToPtr(&u16, p),
+ elf.DT_VERDEF => maybe_verdef = @intToPtr(&elf.Verdef, p),
+ else => {},
+ }
+ }
+ }
+
+ const strings = maybe_strings ?? return 0;
+ const syms = maybe_syms ?? return 0;
+ const hashtab = maybe_hashtab ?? return 0;
+ if (maybe_verdef == null) maybe_versym = null;
+
+
+ const OK_TYPES = (1<<elf.STT_NOTYPE | 1<<elf.STT_OBJECT | 1<<elf.STT_FUNC | 1<<elf.STT_COMMON);
+ const OK_BINDS = (1<<elf.STB_GLOBAL | 1<<elf.STB_WEAK | 1<<elf.STB_GNU_UNIQUE);
+
+ var i: usize = 0;
+ while (i < hashtab[1]) : (i += 1) {
+ if (0==(u32(1)<<u5(syms[i].st_info&0xf) & OK_TYPES)) continue;
+ if (0==(u32(1)<<u5(syms[i].st_info>>4) & OK_BINDS)) continue;
+ if (0==syms[i].st_shndx) continue;
+ if (!mem.eql(u8, name, cstr.toSliceConst(&strings[syms[i].st_name]))) continue;
+ if (maybe_versym) |versym| {
+ if (!checkver(??maybe_verdef, versym[i], vername, strings))
+ continue;
+ }
+ return base + syms[i].st_value;
+ }
+
+ return 0;
+}
+
+fn checkver(def_arg: &elf.Verdef, vsym_arg: i32, vername: []const u8, strings: &u8) bool {
+ var def = def_arg;
+ const vsym = @bitCast(u32, vsym_arg) & 0x7fff;
+ while (true) {
+ if (0==(def.vd_flags & elf.VER_FLG_BASE) and (def.vd_ndx & 0x7fff) == vsym)
+ break;
+ if (def.vd_next == 0)
+ return false;
+ def = @intToPtr(&elf.Verdef, @ptrToInt(def) + def.vd_next);
+ }
+ const aux = @intToPtr(&elf.Verdaux, @ptrToInt(def ) + def.vd_aux);
+ return mem.eql(u8, vername, cstr.toSliceConst(&strings[aux.vda_name]));
+}
diff --git a/std/os/linux/x86_64.zig b/std/os/linux/x86_64.zig
index cfb2231df9..544b2365ce 100644
--- a/std/os/linux/x86_64.zig
+++ b/std/os/linux/x86_64.zig
@@ -371,6 +371,13 @@ pub const F_GETOWN_EX = 16;
pub const F_GETOWNER_UIDS = 17;
+
+pub const VDSO_USEFUL = true;
+pub const VDSO_CGT_SYM = "__vdso_clock_gettime";
+pub const VDSO_CGT_VER = "LINUX_2.6";
+pub const VDSO_GETCPU_SYM = "__vdso_getcpu";
+pub const VDSO_GETCPU_VER = "LINUX_2.6";
+
pub fn syscall0(number: usize) usize {
return asm volatile ("syscall"
: [ret] "={rax}" (-> usize)
@@ -443,6 +450,9 @@ pub fn syscall6(number: usize, arg1: usize, arg2: usize, arg3: usize, arg4: usiz
: "rcx", "r11");
}
+/// This matches the libc clone function.
+pub extern fn clone(func: extern fn(arg: usize) u8, stack: usize, flags: usize, arg: usize, ptid: &i32, tls: usize, ctid: &i32) usize;
+
pub nakedcc fn restore_rt() void {
return asm volatile ("syscall"
:
@@ -489,6 +499,16 @@ pub const timespec = extern struct {
tv_nsec: isize,
};
+pub const timeval = extern struct {
+ tv_sec: isize,
+ tv_usec: isize,
+};
+
+pub const timezone = extern struct {
+ tz_minuteswest: i32,
+ tz_dsttime: i32,
+};
+
pub const dirent = extern struct {
d_ino: usize,
d_off: usize,
@@ -496,3 +516,4 @@ pub const dirent = extern struct {
d_name: u8, // field address is the address of first byte of name
};
+pub const Elf_Symndx = u32;
diff --git a/std/os/test.zig b/std/os/test.zig
index 9c718d5b6b..56d6e8b309 100644
--- a/std/os/test.zig
+++ b/std/os/test.zig
@@ -6,6 +6,8 @@ const io = std.io;
const a = std.debug.global_allocator;
const builtin = @import("builtin");
+const AtomicRmwOp = builtin.AtomicRmwOp;
+const AtomicOrder = builtin.AtomicOrder;
test "makePath, put some files in it, deleteTree" {
if (builtin.os == builtin.Os.windows) {
@@ -23,3 +25,45 @@ test "makePath, put some files in it, deleteTree" {
assert(err == error.PathNotFound);
}
}
+
+test "access file" {
+ if (builtin.os == builtin.Os.windows) {
+ return;
+ }
+
+ try os.makePath(a, "os_test_tmp");
+ if (os.File.access(a, "os_test_tmp/file.txt", os.default_file_mode)) |ok| {
+ unreachable;
+ } else |err| {
+ assert(err == error.NotFound);
+ }
+
+ try io.writeFile(a, "os_test_tmp/file.txt", "");
+ assert((try os.File.access(a, "os_test_tmp/file.txt", os.default_file_mode)) == true);
+ try os.deleteTree(a, "os_test_tmp");
+}
+
+test "spawn threads" {
+ var shared_ctx: i32 = 1;
+
+ const thread1 = try std.os.spawnThread({}, start1);
+ const thread2 = try std.os.spawnThread(&shared_ctx, start2);
+ const thread3 = try std.os.spawnThread(&shared_ctx, start2);
+ const thread4 = try std.os.spawnThread(&shared_ctx, start2);
+
+ thread1.wait();
+ thread2.wait();
+ thread3.wait();
+ thread4.wait();
+
+ assert(shared_ctx == 4);
+}
+
+fn start1(ctx: void) u8 {
+ return 0;
+}
+
+fn start2(ctx: &i32) u8 {
+ _ = @atomicRmw(i32, ctx, AtomicRmwOp.Add, 1, AtomicOrder.SeqCst);
+ return 0;
+}
diff --git a/std/os/time.zig b/std/os/time.zig
new file mode 100644
index 0000000000..4fd2c4e924
--- /dev/null
+++ b/std/os/time.zig
@@ -0,0 +1,288 @@
+const std = @import("../index.zig");
+const builtin = @import("builtin");
+const Os = builtin.Os;
+const debug = std.debug;
+
+const windows = std.os.windows;
+const linux = std.os.linux;
+const darwin = std.os.darwin;
+const posix = std.os.posix;
+
+pub const epoch = @import("epoch.zig");
+
+/// Sleep for the specified duration
+pub fn sleep(seconds: usize, nanoseconds: usize) void {
+ switch (builtin.os) {
+ Os.linux, Os.macosx, Os.ios => {
+ posixSleep(u63(seconds), u63(nanoseconds));
+ },
+ Os.windows => {
+ const ns_per_ms = ns_per_s / ms_per_s;
+ const milliseconds = seconds * ms_per_s + nanoseconds / ns_per_ms;
+ windows.Sleep(windows.DWORD(milliseconds));
+ },
+ else => @compileError("Unsupported OS"),
+ }
+}
+
+const u63 = @IntType(false, 63);
+pub fn posixSleep(seconds: u63, nanoseconds: u63) void {
+ var req = posix.timespec {
+ .tv_sec = seconds,
+ .tv_nsec = nanoseconds,
+ };
+ var rem: posix.timespec = undefined;
+ while (true) {
+ const ret_val = posix.nanosleep(&req, &rem);
+ const err = posix.getErrno(ret_val);
+ if (err == 0) return;
+ switch (err) {
+ posix.EFAULT => unreachable,
+ posix.EINVAL => {
+ // Sometimes Darwin returns EINVAL for no reason.
+ // We treat it as a spurious wakeup.
+ return;
+ },
+ posix.EINTR => {
+ req = rem;
+ continue;
+ },
+ else => return,
+ }
+ }
+}
+
+/// Get the posix timestamp, UTC, in seconds
+pub fn timestamp() u64 {
+ return @divFloor(milliTimestamp(), ms_per_s);
+}
+
+/// Get the posix timestamp, UTC, in milliseconds
+pub const milliTimestamp = switch (builtin.os) {
+ Os.windows => milliTimestampWindows,
+ Os.linux => milliTimestampPosix,
+ Os.macosx, Os.ios => milliTimestampDarwin,
+ else => @compileError("Unsupported OS"),
+};
+
+fn milliTimestampWindows() u64 {
+ //FileTime has a granularity of 100 nanoseconds
+ // and uses the NTFS/Windows epoch
+ var ft: i64 = undefined;
+ windows.GetSystemTimeAsFileTime(&ft);
+ const hns_per_ms = (ns_per_s / 100) / ms_per_s;
+ const epoch_adj = epoch.windows * ms_per_s;
+ return u64(@divFloor(ft, hns_per_ms) + epoch_adj);
+}
+
+fn milliTimestampDarwin() u64 {
+ //Sources suggest MacOS 10.12 has support for
+ // posix clock_gettime.
+ var tv: darwin.timeval = undefined;
+ var err = darwin.gettimeofday(&tv, null);
+ debug.assert(err == 0);
+ const sec_ms = u64(tv.tv_sec) * ms_per_s;
+ const usec_ms = @divFloor(u64(tv.tv_usec), us_per_s / ms_per_s);
+ return u64(sec_ms) + u64(usec_ms);
+}
+
+fn milliTimestampPosix() u64 {
+ //From what I can tell there's no reason clock_gettime
+ // should ever fail for us with CLOCK_REALTIME,
+ // seccomp aside.
+ var ts: posix.timespec = undefined;
+ const err = posix.clock_gettime(posix.CLOCK_REALTIME, &ts);
+ debug.assert(err == 0);
+ const sec_ms = u64(ts.tv_sec) * ms_per_s;
+ const nsec_ms = @divFloor(u64(ts.tv_nsec), ns_per_s / ms_per_s);
+ return sec_ms + nsec_ms;
+}
+
+/// Divisions of a second
+pub const ns_per_s = 1000000000;
+pub const us_per_s = 1000000;
+pub const ms_per_s = 1000;
+pub const cs_per_s = 100;
+
+/// Common time divisions
+pub const s_per_min = 60;
+pub const s_per_hour = s_per_min * 60;
+pub const s_per_day = s_per_hour * 24;
+pub const s_per_week = s_per_day * 7;
+
+
+/// A monotonic high-performance timer.
+/// Timer.start() must be called to initialize the struct, which captures
+/// the counter frequency on windows and darwin, records the resolution,
+/// and gives the user an oportunity to check for the existnece of
+/// monotonic clocks without forcing them to check for error on each read.
+/// .resolution is in nanoseconds on all platforms but .start_time's meaning
+/// depends on the OS. On Windows and Darwin it is a hardware counter
+/// value that requires calculation to convert to a meaninful unit.
+pub const Timer = struct {
+
+ //if we used resolution's value when performing the
+ // performance counter calc on windows/darwin, it would
+ // be less precise
+ frequency: switch (builtin.os) {
+ Os.windows => u64,
+ Os.macosx, Os.ios => darwin.mach_timebase_info_data,
+ else => void,
+ },
+ resolution: u64,
+ start_time: u64,
+
+
+ //At some point we may change our minds on RAW, but for now we're
+ // sticking with posix standard MONOTONIC. For more information, see:
+ // https://github.com/zig-lang/zig/pull/933
+ //
+ //const monotonic_clock_id = switch(builtin.os) {
+ // Os.linux => linux.CLOCK_MONOTONIC_RAW,
+ // else => posix.CLOCK_MONOTONIC,
+ //};
+ const monotonic_clock_id = posix.CLOCK_MONOTONIC;
+
+
+ /// Initialize the timer structure.
+ //This gives us an oportunity to grab the counter frequency in windows.
+ //On Windows: QueryPerformanceCounter will succeed on anything >= XP/2000.
+ //On Posix: CLOCK_MONOTONIC will only fail if the monotonic counter is not
+ // supported, or if the timespec pointer is out of bounds, which should be
+ // impossible here barring cosmic rays or other such occurances of
+ // incredibly bad luck.
+ //On Darwin: This cannot fail, as far as I am able to tell.
+ const TimerError = error{TimerUnsupported, Unexpected};
+ pub fn start() TimerError!Timer {
+ var self: Timer = undefined;
+
+ switch (builtin.os) {
+ Os.windows => {
+ var freq: i64 = undefined;
+ var err = windows.QueryPerformanceFrequency(&freq);
+ if (err == windows.FALSE) return error.TimerUnsupported;
+ self.frequency = u64(freq);
+ self.resolution = @divFloor(ns_per_s, self.frequency);
+
+ var start_time: i64 = undefined;
+ err = windows.QueryPerformanceCounter(&start_time);
+ debug.assert(err != windows.FALSE);
+ self.start_time = u64(start_time);
+ },
+ Os.linux => {
+ //On Linux, seccomp can do arbitrary things to our ability to call
+ // syscalls, including return any errno value it wants and
+ // inconsistently throwing errors. Since we can't account for
+ // abuses of seccomp in a reasonable way, we'll assume that if
+ // seccomp is going to block us it will at least do so consistently
+ var ts: posix.timespec = undefined;
+ var result = posix.clock_getres(monotonic_clock_id, &ts);
+ var errno = posix.getErrno(result);
+ switch (errno) {
+ 0 => {},
+ posix.EINVAL => return error.TimerUnsupported,
+ else => return std.os.unexpectedErrorPosix(errno),
+ }
+ self.resolution = u64(ts.tv_sec) * u64(ns_per_s) + u64(ts.tv_nsec);
+
+ result = posix.clock_gettime(monotonic_clock_id, &ts);
+ errno = posix.getErrno(result);
+ if (errno != 0) return std.os.unexpectedErrorPosix(errno);
+ self.start_time = u64(ts.tv_sec) * u64(ns_per_s) + u64(ts.tv_nsec);
+ },
+ Os.macosx, Os.ios => {
+ darwin.mach_timebase_info(&self.frequency);
+ self.resolution = @divFloor(self.frequency.numer, self.frequency.denom);
+ self.start_time = darwin.mach_absolute_time();
+ },
+ else => @compileError("Unsupported OS"),
+ }
+ return self;
+ }
+
+ /// Reads the timer value since start or the last reset in nanoseconds
+ pub fn read(self: &Timer) u64 {
+ var clock = clockNative() - self.start_time;
+ return switch (builtin.os) {
+ Os.windows => @divFloor(clock * ns_per_s, self.frequency),
+ Os.linux => clock,
+ Os.macosx, Os.ios => @divFloor(clock * self.frequency.numer, self.frequency.denom),
+ else => @compileError("Unsupported OS"),
+ };
+ }
+
+ /// Resets the timer value to 0/now.
+ pub fn reset(self: &Timer) void
+ {
+ self.start_time = clockNative();
+ }
+
+ /// Returns the current value of the timer in nanoseconds, then resets it
+ pub fn lap(self: &Timer) u64 {
+ var now = clockNative();
+ var lap_time = self.read();
+ self.start_time = now;
+ return lap_time;
+ }
+
+
+ const clockNative = switch (builtin.os) {
+ Os.windows => clockWindows,
+ Os.linux => clockLinux,
+ Os.macosx, Os.ios => clockDarwin,
+ else => @compileError("Unsupported OS"),
+ };
+
+ fn clockWindows() u64 {
+ var result: i64 = undefined;
+ var err = windows.QueryPerformanceCounter(&result);
+ debug.assert(err != windows.FALSE);
+ return u64(result);
+ }
+
+ fn clockDarwin() u64 {
+ return darwin.mach_absolute_time();
+ }
+
+ fn clockLinux() u64 {
+ var ts: posix.timespec = undefined;
+ var result = posix.clock_gettime(monotonic_clock_id, &ts);
+ debug.assert(posix.getErrno(result) == 0);
+ return u64(ts.tv_sec) * u64(ns_per_s) + u64(ts.tv_nsec);
+ }
+};
+
+
+
+
+
+test "os.time.sleep" {
+ sleep(0, 1);
+}
+
+test "os.time.timestamp" {
+ const ns_per_ms = (ns_per_s / ms_per_s);
+ const margin = 50;
+
+ const time_0 = milliTimestamp();
+ sleep(0, ns_per_ms);
+ const time_1 = milliTimestamp();
+ const interval = time_1 - time_0;
+ debug.assert(interval > 0 and interval < margin);
+}
+
+test "os.time.Timer" {
+ const ns_per_ms = (ns_per_s / ms_per_s);
+ const margin = ns_per_ms * 50;
+
+ var timer = try Timer.start();
+ sleep(0, 10 * ns_per_ms);
+ const time_0 = timer.read();
+ debug.assert(time_0 > 0 and time_0 < margin);
+
+ const time_1 = timer.lap();
+ debug.assert(time_1 >= time_0);
+
+ timer.reset();
+ debug.assert(timer.read() < time_1);
+}
diff --git a/std/os/windows/index.zig b/std/os/windows/index.zig
index 2709cf2a78..e13ed0f131 100644
--- a/std/os/windows/index.zig
+++ b/std/os/windows/index.zig
@@ -28,6 +28,9 @@ pub extern "kernel32" stdcallcc fn CreateProcessA(lpApplicationName: ?LPCSTR, lp
pub extern "kernel32" stdcallcc fn CreateSymbolicLinkA(lpSymlinkFileName: LPCSTR, lpTargetFileName: LPCSTR,
dwFlags: DWORD) BOOLEAN;
+
+pub extern "kernel32" stdcallcc fn CreateThread(lpThreadAttributes: ?LPSECURITY_ATTRIBUTES, dwStackSize: SIZE_T, lpStartAddress: LPTHREAD_START_ROUTINE, lpParameter: ?LPVOID, dwCreationFlags: DWORD, lpThreadId: ?LPDWORD) ?HANDLE;
+
pub extern "kernel32" stdcallcc fn DeleteFileA(lpFileName: LPCSTR) BOOL;
pub extern "kernel32" stdcallcc fn ExitProcess(exit_code: UINT) noreturn;
@@ -61,6 +64,8 @@ pub extern "kernel32" stdcallcc fn GetFinalPathNameByHandleA(hFile: HANDLE, lpsz
pub extern "kernel32" stdcallcc fn GetProcessHeap() ?HANDLE;
+pub extern "kernel32" stdcallcc fn GetSystemTimeAsFileTime(?&FILETIME) void;
+
pub extern "kernel32" stdcallcc fn HeapCreate(flOptions: DWORD, dwInitialSize: SIZE_T, dwMaximumSize: SIZE_T) ?HANDLE;
pub extern "kernel32" stdcallcc fn HeapDestroy(hHeap: HANDLE) BOOL;
pub extern "kernel32" stdcallcc fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: &c_void, dwBytes: SIZE_T) ?&c_void;
@@ -77,6 +82,12 @@ pub extern "kernel32" stdcallcc fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem
pub extern "kernel32" stdcallcc fn MoveFileExA(lpExistingFileName: LPCSTR, lpNewFileName: LPCSTR,
dwFlags: DWORD) BOOL;
+
+pub extern "kernel32" stdcallcc fn QueryPerformanceCounter(lpPerformanceCount: &LARGE_INTEGER) BOOL;
+
+pub extern "kernel32" stdcallcc fn QueryPerformanceFrequency(lpFrequency: &LARGE_INTEGER) BOOL;
+
+pub extern "kernel32" stdcallcc fn PathFileExists(pszPath: ?LPCTSTR) BOOL;
pub extern "kernel32" stdcallcc fn ReadFile(in_hFile: HANDLE, out_lpBuffer: &c_void,
in_nNumberOfBytesToRead: DWORD, out_lpNumberOfBytesRead: &DWORD,
@@ -137,6 +148,7 @@ pub const UNICODE = false;
pub const WCHAR = u16;
pub const WORD = u16;
pub const LARGE_INTEGER = i64;
+pub const FILETIME = i64;
pub const TRUE = 1;
pub const FALSE = 0;
@@ -308,3 +320,10 @@ pub const FILE_END = 2;
pub const HEAP_CREATE_ENABLE_EXECUTE = 0x00040000;
pub const HEAP_GENERATE_EXCEPTIONS = 0x00000004;
pub const HEAP_NO_SERIALIZE = 0x00000001;
+
+pub const PTHREAD_START_ROUTINE = extern fn(LPVOID) DWORD;
+pub const LPTHREAD_START_ROUTINE = PTHREAD_START_ROUTINE;
+
+test "import" {
+ _ = @import("util.zig");
+}
diff --git a/std/rand/index.zig b/std/rand/index.zig
index 6a746fce92..bd6209009e 100644
--- a/std/rand/index.zig
+++ b/std/rand/index.zig
@@ -19,6 +19,7 @@ const builtin = @import("builtin");
const assert = std.debug.assert;
const mem = std.mem;
const math = std.math;
+const ziggurat = @import("ziggurat.zig");
// When you need fast unbiased random numbers
pub const DefaultPrng = Xoroshiro128;
@@ -109,15 +110,28 @@ pub const Random = struct {
}
}
- /// Return a floating point value normally distributed in the range [0, 1].
+ /// Return a floating point value normally distributed with mean = 0, stddev = 1.
+ ///
+ /// To use different parameters, use: floatNorm(...) * desiredStddev + desiredMean.
pub fn floatNorm(r: &Random, comptime T: type) T {
- // TODO(tiehuis): See https://www.doornik.com/research/ziggurat.pdf
- @compileError("floatNorm is unimplemented");
+ const value = ziggurat.next_f64(r, ziggurat.NormDist);
+ switch (T) {
+ f32 => return f32(value),
+ f64 => return value,
+ else => @compileError("unknown floating point type"),
+ }
}
- /// Return a exponentially distributed float between (0, @maxValue(f64))
+ /// Return an exponentially distributed float with a rate parameter of 1.
+ ///
+ /// To use a different rate parameter, use: floatExp(...) / desiredRate.
pub fn floatExp(r: &Random, comptime T: type) T {
- @compileError("floatExp is unimplemented");
+ const value = ziggurat.next_f64(r, ziggurat.ExpDist);
+ switch (T) {
+ f32 => return f32(value),
+ f64 => return value,
+ else => @compileError("unknown floating point type"),
+ }
}
/// Shuffle a slice into a random order.
diff --git a/std/rand/ziggurat.zig b/std/rand/ziggurat.zig
new file mode 100644
index 0000000000..7790b71d26
--- /dev/null
+++ b/std/rand/ziggurat.zig
@@ -0,0 +1,146 @@
+// Implements ZIGNOR [1].
+//
+// [1]: Jurgen A. Doornik (2005). [*An Improved Ziggurat Method to Generate Normal Random Samples*]
+// (https://www.doornik.com/research/ziggurat.pdf). Nuffield College, Oxford.
+//
+// rust/rand used as a reference;
+//
+// NOTE: This seems interesting but reference code is a bit hard to grok:
+// https://sbarral.github.io/etf.
+
+const std = @import("../index.zig");
+const math = std.math;
+const Random = std.rand.Random;
+
+pub fn next_f64(random: &Random, comptime tables: &const ZigTable) f64 {
+ while (true) {
+ // We manually construct a float from parts as we can avoid an extra random lookup here by
+ // using the unused exponent for the lookup table entry.
+ const bits = random.scalar(u64);
+ const i = usize(bits & 0xff);
+
+ const u = blk: {
+ if (tables.is_symmetric) {
+ // Generate a value in the range [2, 4) and scale into [-1, 1)
+ const repr = ((0x3ff + 1) << 52) | (bits >> 12);
+ break :blk @bitCast(f64, repr) - 3.0;
+ } else {
+ // Generate a value in the range [1, 2) and scale into (0, 1)
+ const repr = (0x3ff << 52) | (bits >> 12);
+ break :blk @bitCast(f64, repr) - (1.0 - math.f64_epsilon / 2.0);
+ }
+ };
+
+ const x = u * tables.x[i];
+ const test_x = if (tables.is_symmetric) math.fabs(x) else x;
+
+ // equivalent to |u| < tables.x[i+1] / tables.x[i] (or u < tables.x[i+1] / tables.x[i])
+ if (test_x < tables.x[i + 1]) {
+ return x;
+ }
+
+ if (i == 0) {
+ return tables.zero_case(random, u);
+ }
+
+ // equivalent to f1 + DRanU() * (f0 - f1) < 1
+ if (tables.f[i + 1] + (tables.f[i] - tables.f[i + 1]) * random.float(f64) < tables.pdf(x)) {
+ return x;
+ }
+ }
+}
+
+pub const ZigTable = struct {
+ r: f64,
+ x: [257]f64,
+ f: [257]f64,
+
+ // probability density function used as a fallback
+ pdf: fn(f64) f64,
+ // whether the distribution is symmetric
+ is_symmetric: bool,
+ // fallback calculation in the case we are in the 0 block
+ zero_case: fn(&Random, f64) f64,
+};
+
+// zigNorInit
+fn ZigTableGen(comptime is_symmetric: bool, comptime r: f64, comptime v: f64, comptime f: fn(f64) f64,
+ comptime f_inv: fn(f64) f64, comptime zero_case: fn(&Random, f64) f64) ZigTable {
+ var tables: ZigTable = undefined;
+
+ tables.is_symmetric = is_symmetric;
+ tables.r = r;
+ tables.pdf = f;
+ tables.zero_case = zero_case;
+
+ tables.x[0] = v / f(r);
+ tables.x[1] = r;
+
+ for (tables.x[2..256]) |*entry, i| {
+ const last = tables.x[2 + i - 1];
+ *entry = f_inv(v / last + f(last));
+ }
+ tables.x[256] = 0;
+
+ for (tables.f[0..]) |*entry, i| {
+ *entry = f(tables.x[i]);
+ }
+
+ return tables;
+}
+
+// N(0, 1)
+pub const NormDist = blk: {
+ @setEvalBranchQuota(30000);
+ break :blk ZigTableGen(true, norm_r, norm_v, norm_f, norm_f_inv, norm_zero_case);
+};
+
+const norm_r = 3.6541528853610088;
+const norm_v = 0.00492867323399;
+
+fn norm_f(x: f64) f64 { return math.exp(-x * x / 2.0); }
+fn norm_f_inv(y: f64) f64 { return math.sqrt(-2.0 * math.ln(y)); }
+fn norm_zero_case(random: &Random, u: f64) f64 {
+ var x: f64 = 1;
+ var y: f64 = 0;
+
+ while (-2.0 * y < x * x) {
+ x = math.ln(random.float(f64)) / norm_r;
+ y = math.ln(random.float(f64));
+ }
+
+ if (u < 0) {
+ return x - norm_r;
+ } else {
+ return norm_r - x;
+ }
+}
+
+test "ziggurant normal dist sanity" {
+ var prng = std.rand.DefaultPrng.init(0);
+ var i: usize = 0;
+ while (i < 1000) : (i += 1) {
+ _ = prng.random.floatNorm(f64);
+ }
+}
+
+// Exp(1)
+pub const ExpDist = blk: {
+ @setEvalBranchQuota(30000);
+ break :blk ZigTableGen(false, exp_r, exp_v, exp_f, exp_f_inv, exp_zero_case);
+};
+
+const exp_r = 7.69711747013104972;
+const exp_v = 0.0039496598225815571993;
+
+fn exp_f(x: f64) f64 { return math.exp(-x); }
+fn exp_f_inv(y: f64) f64 { return -math.ln(y); }
+fn exp_zero_case(random: &Random, _: f64) f64 { return exp_r - math.ln(random.float(f64)); }
+
+test "ziggurant exp dist sanity" {
+ var prng = std.rand.DefaultPrng.init(0);
+ var i: usize = 0;
+ while (i < 1000) : (i += 1) {
+ _ = prng.random.floatExp(f64);
+ }
+}
diff --git a/std/segmented_list.zig b/std/segmented_list.zig
new file mode 100644
index 0000000000..a89d332556
--- /dev/null
+++ b/std/segmented_list.zig
@@ -0,0 +1,379 @@
+const std = @import("index.zig");
+const assert = std.debug.assert;
+const Allocator = std.mem.Allocator;
+
+// Imagine that `fn at(self: &Self, index: usize) &T` is a customer asking for a box
+// from a warehouse, based on a flat array, boxes ordered from 0 to N - 1.
+// But the warehouse actually stores boxes in shelves of increasing powers of 2 sizes.
+// So when the customer requests a box index, we have to translate it to shelf index
+// and box index within that shelf. Illustration:
+//
+// customer indexes:
+// shelf 0: 0
+// shelf 1: 1 2
+// shelf 2: 3 4 5 6
+// shelf 3: 7 8 9 10 11 12 13 14
+// shelf 4: 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
+// shelf 5: 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
+// ...
+//
+// warehouse indexes:
+// shelf 0: 0
+// shelf 1: 0 1
+// shelf 2: 0 1 2 3
+// shelf 3: 0 1 2 3 4 5 6 7
+// shelf 4: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+// shelf 5: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
+// ...
+//
+// With this arrangement, here are the equations to get the shelf index and
+// box index based on customer box index:
+//
+// shelf_index = floor(log2(customer_index + 1))
+// shelf_count = ceil(log2(box_count + 1))
+// box_index = customer_index + 1 - 2 ** shelf
+// shelf_size = 2 ** shelf_index
+//
+// Now we complicate it a little bit further by adding a preallocated shelf, which must be
+// a power of 2:
+// prealloc=4
+//
+// customer indexes:
+// prealloc: 0 1 2 3
+// shelf 0: 4 5 6 7 8 9 10 11
+// shelf 1: 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
+// shelf 2: 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
+// ...
+//
+// warehouse indexes:
+// prealloc: 0 1 2 3
+// shelf 0: 0 1 2 3 4 5 6 7
+// shelf 1: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+// shelf 2: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
+// ...
+//
+// Now the equations are:
+//
+// shelf_index = floor(log2(customer_index + prealloc)) - log2(prealloc) - 1
+// shelf_count = ceil(log2(box_count + prealloc)) - log2(prealloc) - 1
+// box_index = customer_index + prealloc - 2 ** (log2(prealloc) + 1 + shelf)
+// shelf_size = prealloc * 2 ** (shelf_index + 1)
+
+/// This is a stack data structure where pointers to indexes have the same lifetime as the data structure
+/// itself, unlike ArrayList where push() invalidates all existing element pointers.
+/// The tradeoff is that elements are not guaranteed to be contiguous. For that, use ArrayList.
+/// Note however that most elements are contiguous, making this data structure cache-friendly.
+///
+/// Because it never has to copy elements from an old location to a new location, it does not require
+/// its elements to be copyable, and it avoids wasting memory when backed by an ArenaAllocator.
+/// Note that the push() and pop() convenience methods perform a copy, but you can instead use
+/// addOne(), at(), setCapacity(), and shrinkCapacity() to avoid copying items.
+///
+/// This data structure has O(1) push and O(1) pop.
+///
+/// It supports preallocated elements, making it especially well suited when the expected maximum
+/// size is small. `prealloc_item_count` must be 0, or a power of 2.
+pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type {
+ return struct {
+ const Self = this;
+ const prealloc_exp = blk: {
+ // we don't use the prealloc_exp constant when prealloc_item_count is 0.
+ assert(prealloc_item_count != 0);
+
+ const value = std.math.log2_int(usize, prealloc_item_count);
+ assert((1 << value) == prealloc_item_count); // prealloc_item_count must be a power of 2
+ break :blk @typeOf(1)(value);
+ };
+ const ShelfIndex = std.math.Log2Int(usize);
+
+ prealloc_segment: [prealloc_item_count]T,
+ dynamic_segments: []&T,
+ allocator: &Allocator,
+ len: usize,
+
+ pub const prealloc_count = prealloc_item_count;
+
+ /// Deinitialize with `deinit`
+ pub fn init(allocator: &Allocator) Self {
+ return Self {
+ .allocator = allocator,
+ .len = 0,
+ .prealloc_segment = undefined,
+ .dynamic_segments = []&T{},
+ };
+ }
+
+ pub fn deinit(self: &Self) void {
+ self.freeShelves(ShelfIndex(self.dynamic_segments.len), 0);
+ self.allocator.free(self.dynamic_segments);
+ *self = undefined;
+ }
+
+ pub fn at(self: &Self, i: usize) &T {
+ assert(i < self.len);
+ return self.uncheckedAt(i);
+ }
+
+ pub fn count(self: &const Self) usize {
+ return self.len;
+ }
+
+ pub fn push(self: &Self, item: &const T) !void {
+ const new_item_ptr = try self.addOne();
+ *new_item_ptr = *item;
+ }
+
+ pub fn pushMany(self: &Self, items: []const T) !void {
+ for (items) |item| {
+ try self.push(item);
+ }
+ }
+
+ pub fn pop(self: &Self) ?T {
+ if (self.len == 0)
+ return null;
+
+ const index = self.len - 1;
+ const result = *self.uncheckedAt(index);
+ self.len = index;
+ return result;
+ }
+
+ pub fn addOne(self: &Self) !&T {
+ const new_length = self.len + 1;
+ try self.growCapacity(new_length);
+ const result = self.uncheckedAt(self.len);
+ self.len = new_length;
+ return result;
+ }
+
+ /// Grows or shrinks capacity to match usage.
+ pub fn setCapacity(self: &Self, new_capacity: usize) !void {
+ if (new_capacity <= usize(1) << (prealloc_exp + self.dynamic_segments.len)) {
+ return self.shrinkCapacity(new_capacity);
+ } else {
+ return self.growCapacity(new_capacity);
+ }
+ }
+
+ /// Only grows capacity, or retains current capacity
+ pub fn growCapacity(self: &Self, new_capacity: usize) !void {
+ const new_cap_shelf_count = shelfCount(new_capacity);
+ const old_shelf_count = ShelfIndex(self.dynamic_segments.len);
+ if (new_cap_shelf_count > old_shelf_count) {
+ self.dynamic_segments = try self.allocator.realloc(&T, self.dynamic_segments, new_cap_shelf_count);
+ var i = old_shelf_count;
+ errdefer {
+ self.freeShelves(i, old_shelf_count);
+ self.dynamic_segments = self.allocator.shrink(&T, self.dynamic_segments, old_shelf_count);
+ }
+ while (i < new_cap_shelf_count) : (i += 1) {
+ self.dynamic_segments[i] = (try self.allocator.alloc(T, shelfSize(i))).ptr;
+ }
+ }
+ }
+
+ /// Only shrinks capacity or retains current capacity
+ pub fn shrinkCapacity(self: &Self, new_capacity: usize) void {
+ if (new_capacity <= prealloc_item_count) {
+ const len = ShelfIndex(self.dynamic_segments.len);
+ self.freeShelves(len, 0);
+ self.allocator.free(self.dynamic_segments);
+ self.dynamic_segments = []&T{};
+ return;
+ }
+
+ const new_cap_shelf_count = shelfCount(new_capacity);
+ const old_shelf_count = ShelfIndex(self.dynamic_segments.len);
+ assert(new_cap_shelf_count <= old_shelf_count);
+ if (new_cap_shelf_count == old_shelf_count) {
+ return;
+ }
+
+ self.freeShelves(old_shelf_count, new_cap_shelf_count);
+ self.dynamic_segments = self.allocator.shrink(&T, self.dynamic_segments, new_cap_shelf_count);
+ }
+
+ pub fn uncheckedAt(self: &Self, index: usize) &T {
+ if (index < prealloc_item_count) {
+ return &self.prealloc_segment[index];
+ }
+ const shelf_index = shelfIndex(index);
+ const box_index = boxIndex(index, shelf_index);
+ return &self.dynamic_segments[shelf_index][box_index];
+ }
+
+ fn shelfCount(box_count: usize) ShelfIndex {
+ if (prealloc_item_count == 0) {
+ return std.math.log2_int_ceil(usize, box_count + 1);
+ }
+ return std.math.log2_int_ceil(usize, box_count + prealloc_item_count) - prealloc_exp - 1;
+ }
+
+ fn shelfSize(shelf_index: ShelfIndex) usize {
+ if (prealloc_item_count == 0) {
+ return usize(1) << shelf_index;
+ }
+ return usize(1) << (shelf_index + (prealloc_exp + 1));
+ }
+
+ fn shelfIndex(list_index: usize) ShelfIndex {
+ if (prealloc_item_count == 0) {
+ return std.math.log2_int(usize, list_index + 1);
+ }
+ return std.math.log2_int(usize, list_index + prealloc_item_count) - prealloc_exp - 1;
+ }
+
+ fn boxIndex(list_index: usize, shelf_index: ShelfIndex) usize {
+ if (prealloc_item_count == 0) {
+ return (list_index + 1) - (usize(1) << shelf_index);
+ }
+ return list_index + prealloc_item_count - (usize(1) << ((prealloc_exp + 1) + shelf_index));
+ }
+
+ fn freeShelves(self: &Self, from_count: ShelfIndex, to_count: ShelfIndex) void {
+ var i = from_count;
+ while (i != to_count) {
+ i -= 1;
+ self.allocator.free(self.dynamic_segments[i][0..shelfSize(i)]);
+ }
+ }
+
+ pub const Iterator = struct {
+ list: &Self,
+ index: usize,
+ box_index: usize,
+ shelf_index: ShelfIndex,
+ shelf_size: usize,
+
+ pub fn next(it: &Iterator) ?&T {
+ if (it.index >= it.list.len)
+ return null;
+ if (it.index < prealloc_item_count) {
+ const ptr = &it.list.prealloc_segment[it.index];
+ it.index += 1;
+ if (it.index == prealloc_item_count) {
+ it.box_index = 0;
+ it.shelf_index = 0;
+ it.shelf_size = prealloc_item_count * 2;
+ }
+ return ptr;
+ }
+
+ const ptr = &it.list.dynamic_segments[it.shelf_index][it.box_index];
+ it.index += 1;
+ it.box_index += 1;
+ if (it.box_index == it.shelf_size) {
+ it.shelf_index += 1;
+ it.box_index = 0;
+ it.shelf_size *= 2;
+ }
+ return ptr;
+ }
+
+ pub fn prev(it: &Iterator) ?&T {
+ if (it.index == 0)
+ return null;
+
+ it.index -= 1;
+ if (it.index < prealloc_item_count)
+ return &it.list.prealloc_segment[it.index];
+
+ if (it.box_index == 0) {
+ it.shelf_index -= 1;
+ it.shelf_size /= 2;
+ it.box_index = it.shelf_size - 1;
+ } else {
+ it.box_index -= 1;
+ }
+
+ return &it.list.dynamic_segments[it.shelf_index][it.box_index];
+ }
+
+ pub fn peek(it: &Iterator) ?&T {
+ if (it.index >= it.list.len)
+ return null;
+ if (it.index < prealloc_item_count)
+ return &it.list.prealloc_segment[it.index];
+
+ return &it.list.dynamic_segments[it.shelf_index][it.box_index];
+ }
+ };
+
+ pub fn iterator(self: &Self, start_index: usize) Iterator {
+ var it = Iterator {
+ .list = self,
+ .index = start_index,
+ .shelf_index = undefined,
+ .box_index = undefined,
+ .shelf_size = undefined,
+ };
+ if (start_index >= prealloc_item_count) {
+ it.shelf_index = shelfIndex(start_index);
+ it.box_index = boxIndex(start_index, it.shelf_index);
+ it.shelf_size = shelfSize(it.shelf_index);
+ }
+ return it;
+ }
+ };
+}
+
+test "std.SegmentedList" {
+ var da = std.heap.DirectAllocator.init();
+ defer da.deinit();
+ var a = &da.allocator;
+
+ try testSegmentedList(0, a);
+ try testSegmentedList(1, a);
+ try testSegmentedList(2, a);
+ try testSegmentedList(4, a);
+ try testSegmentedList(8, a);
+ try testSegmentedList(16, a);
+}
+
+fn testSegmentedList(comptime prealloc: usize, allocator: &Allocator) !void {
+ var list = SegmentedList(i32, prealloc).init(allocator);
+ defer list.deinit();
+
+ {var i: usize = 0; while (i < 100) : (i += 1) {
+ try list.push(i32(i + 1));
+ assert(list.len == i + 1);
+ }}
+
+ {var i: usize = 0; while (i < 100) : (i += 1) {
+ assert(*list.at(i) == i32(i + 1));
+ }}
+
+ {
+ var it = list.iterator(0);
+ var x: i32 = 0;
+ while (it.next()) |item| {
+ x += 1;
+ assert(*item == x);
+ }
+ assert(x == 100);
+ while (it.prev()) |item| : (x -= 1) {
+ assert(*item == x);
+ }
+ assert(x == 0);
+ }
+
+ assert(??list.pop() == 100);
+ assert(list.len == 99);
+
+ try list.pushMany([]i32 { 1, 2, 3 });
+ assert(list.len == 102);
+ assert(??list.pop() == 3);
+ assert(??list.pop() == 2);
+ assert(??list.pop() == 1);
+ assert(list.len == 99);
+
+ try list.pushMany([]const i32 {});
+ assert(list.len == 99);
+
+ var i: i32 = 99;
+ while (list.pop()) |item| : (i -= 1) {
+ assert(item == i);
+ list.shrinkCapacity(list.len);
+ }
+}
diff --git a/std/special/bootstrap.zig b/std/special/bootstrap.zig
index d2c22c13e1..1dc7e24869 100644
--- a/std/special/bootstrap.zig
+++ b/std/special/bootstrap.zig
@@ -48,22 +48,33 @@ extern fn WinMainCRTStartup() noreturn {
fn posixCallMainAndExit() noreturn {
const argc = *argc_ptr;
const argv = @ptrCast(&&u8, &argc_ptr[1]);
- const envp = @ptrCast(&?&u8, &argv[argc + 1]);
+ const envp_nullable = @ptrCast(&?&u8, &argv[argc + 1]);
+ var envp_count: usize = 0;
+ while (envp_nullable[envp_count]) |_| : (envp_count += 1) {}
+ const envp = @ptrCast(&&u8, envp_nullable)[0..envp_count];
+ if (builtin.os == builtin.Os.linux) {
+ const auxv = &@ptrCast(&usize, envp.ptr)[envp_count + 1];
+ var i: usize = 0;
+ while (auxv[i] != 0) : (i += 2) {
+ if (auxv[i] < std.os.linux_aux_raw.len) std.os.linux_aux_raw[auxv[i]] = auxv[i+1];
+ }
+ std.debug.assert(std.os.linux_aux_raw[std.elf.AT_PAGESZ] == std.os.page_size);
+ }
+
std.os.posix.exit(callMainWithArgs(argc, argv, envp));
}
-fn callMainWithArgs(argc: usize, argv: &&u8, envp: &?&u8) u8 {
+fn callMainWithArgs(argc: usize, argv: &&u8, envp: []&u8) u8 {
std.os.ArgIteratorPosix.raw = argv[0..argc];
-
- var env_count: usize = 0;
- while (envp[env_count] != null) : (env_count += 1) {}
- std.os.posix_environ_raw = @ptrCast(&&u8, envp)[0..env_count];
-
+ std.os.posix_environ_raw = envp;
return callMain();
}
extern fn main(c_argc: i32, c_argv: &&u8, c_envp: &?&u8) i32 {
- return callMainWithArgs(usize(c_argc), c_argv, c_envp);
+ var env_count: usize = 0;
+ while (c_envp[env_count] != null) : (env_count += 1) {}
+ const envp = @ptrCast(&&u8, c_envp)[0..env_count];
+ return callMainWithArgs(usize(c_argc), c_argv, envp);
}
fn callMain() u8 {
diff --git a/std/special/bootstrap_lib.zig b/std/special/bootstrap_lib.zig
index 40b6588838..f55aaed96a 100644
--- a/std/special/bootstrap_lib.zig
+++ b/std/special/bootstrap_lib.zig
@@ -1,9 +1,10 @@
// This file is included in the compilation unit when exporting a library on windows.
const std = @import("std");
+const builtin = @import("builtin");
comptime {
- @export("_DllMainCRTStartup", _DllMainCRTStartup);
+ @export("_DllMainCRTStartup", _DllMainCRTStartup, builtin.GlobalLinkage.Strong);
}
stdcallcc fn _DllMainCRTStartup(hinstDLL: std.os.windows.HINSTANCE, fdwReason: std.os.windows.DWORD,
diff --git a/std/special/builtin.zig b/std/special/builtin.zig
index 268d0ab545..a5126bc4f3 100644
--- a/std/special/builtin.zig
+++ b/std/special/builtin.zig
@@ -14,37 +14,91 @@ pub fn panic(msg: []const u8, error_return_trace: ?&builtin.StackTrace) noreturn
}
}
-// Note that memset does not return `dest`, like the libc API.
-// The semantics of memset is dictated by the corresponding
-// LLVM intrinsics, not by the libc API.
-export fn memset(dest: ?&u8, c: u8, n: usize) void {
+export fn memset(dest: ?&u8, c: u8, n: usize) ?&u8 {
@setRuntimeSafety(false);
var index: usize = 0;
while (index != n) : (index += 1)
(??dest)[index] = c;
+
+ return dest;
}
-// Note that memcpy does not return `dest`, like the libc API.
-// The semantics of memcpy is dictated by the corresponding
-// LLVM intrinsics, not by the libc API.
-export fn memcpy(noalias dest: ?&u8, noalias src: ?&const u8, n: usize) void {
+export fn memcpy(noalias dest: ?&u8, noalias src: ?&const u8, n: usize) ?&u8 {
@setRuntimeSafety(false);
var index: usize = 0;
while (index != n) : (index += 1)
(??dest)[index] = (??src)[index];
+
+ return dest;
+}
+
+export fn memmove(dest: ?&u8, src: ?&const u8, n: usize) ?&u8 {
+ @setRuntimeSafety(false);
+
+ if (@ptrToInt(dest) < @ptrToInt(src)) {
+ var index: usize = 0;
+ while (index != n) : (index += 1) {
+ (??dest)[index] = (??src)[index];
+ }
+ } else {
+ var index = n;
+ while (index != 0) {
+ index -= 1;
+ (??dest)[index] = (??src)[index];
+ }
+ }
+
+ return dest;
}
comptime {
- if (builtin.mode != builtin.Mode.ReleaseFast and builtin.os != builtin.Os.windows) {
+ if (builtin.mode != builtin.Mode.ReleaseFast and
+ builtin.mode != builtin.Mode.ReleaseSmall and
+ builtin.os != builtin.Os.windows) {
@export("__stack_chk_fail", __stack_chk_fail, builtin.GlobalLinkage.Strong);
}
+ if (builtin.os == builtin.Os.linux and builtin.arch == builtin.Arch.x86_64) {
+ @export("clone", clone, builtin.GlobalLinkage.Strong);
+ }
}
extern fn __stack_chk_fail() noreturn {
@panic("stack smashing detected");
}
+// TODO we should be able to put this directly in std/linux/x86_64.zig but
+// it causes a segfault in release mode. this is a workaround of calling it
+// across .o file boundaries. fix comptime @ptrCast of nakedcc functions.
+nakedcc fn clone() void {
+ asm volatile (
+ \\ xor %%eax,%%eax
+ \\ mov $56,%%al
+ \\ mov %%rdi,%%r11
+ \\ mov %%rdx,%%rdi
+ \\ mov %%r8,%%rdx
+ \\ mov %%r9,%%r8
+ \\ mov 8(%%rsp),%%r10
+ \\ mov %%r11,%%r9
+ \\ and $-16,%%rsi
+ \\ sub $8,%%rsi
+ \\ mov %%rcx,(%%rsi)
+ \\ syscall
+ \\ test %%eax,%%eax
+ \\ jnz 1f
+ \\ xor %%ebp,%%ebp
+ \\ pop %%rdi
+ \\ call *%%r9
+ \\ mov %%eax,%%edi
+ \\ xor %%eax,%%eax
+ \\ mov $60,%%al
+ \\ syscall
+ \\ hlt
+ \\1: ret
+ \\
+ );
+}
+
const math = @import("../math/index.zig");
export fn fmodf(x: f32, y: f32) f32 { return generic_fmod(f32, x, y); }
@@ -142,3 +196,212 @@ fn isNan(comptime T: type, bits: T) bool {
unreachable;
}
}
+
+// NOTE: The original code is full of implicit signed -> unsigned assumptions and u32 wraparound
+// behaviour. Most intermediate i32 values are changed to u32 where appropriate but there are
+// potentially some edge cases remaining that are not handled in the same way.
+export fn sqrt(x: f64) f64 {
+ const tiny: f64 = 1.0e-300;
+ const sign: u32 = 0x80000000;
+ const u = @bitCast(u64, x);
+
+ var ix0 = u32(u >> 32);
+ var ix1 = u32(u & 0xFFFFFFFF);
+
+ // sqrt(nan) = nan, sqrt(+inf) = +inf, sqrt(-inf) = nan
+ if (ix0 & 0x7FF00000 == 0x7FF00000) {
+ return x * x + x;
+ }
+
+ // sqrt(+-0) = +-0
+ if (x == 0.0) {
+ return x;
+ }
+ // sqrt(-ve) = snan
+ if (ix0 & sign != 0) {
+ return math.snan(f64);
+ }
+
+ // normalize x
+ var m = i32(ix0 >> 20);
+ if (m == 0) {
+ // subnormal
+ while (ix0 == 0) {
+ m -= 21;
+ ix0 |= ix1 >> 11;
+ ix1 <<= 21;
+ }
+
+ // subnormal
+ var i: u32 = 0;
+ while (ix0 & 0x00100000 == 0) : (i += 1) {
+ ix0 <<= 1;
+ }
+ m -= i32(i) - 1;
+ ix0 |= ix1 >> u5(32 - i);
+ ix1 <<= u5(i);
+ }
+
+ // unbias exponent
+ m -= 1023;
+ ix0 = (ix0 & 0x000FFFFF) | 0x00100000;
+ if (m & 1 != 0) {
+ ix0 += ix0 + (ix1 >> 31);
+ ix1 = ix1 +% ix1;
+ }
+ m >>= 1;
+
+ // sqrt(x) bit by bit
+ ix0 += ix0 + (ix1 >> 31);
+ ix1 = ix1 +% ix1;
+
+ var q: u32 = 0;
+ var q1: u32 = 0;
+ var s0: u32 = 0;
+ var s1: u32 = 0;
+ var r: u32 = 0x00200000;
+ var t: u32 = undefined;
+ var t1: u32 = undefined;
+
+ while (r != 0) {
+ t = s0 +% r;
+ if (t <= ix0) {
+ s0 = t + r;
+ ix0 -= t;
+ q += r;
+ }
+ ix0 = ix0 +% ix0 +% (ix1 >> 31);
+ ix1 = ix1 +% ix1;
+ r >>= 1;
+ }
+
+ r = sign;
+ while (r != 0) {
+ t = s1 +% r;
+ t = s0;
+ if (t < ix0 or (t == ix0 and t1 <= ix1)) {
+ s1 = t1 +% r;
+ if (t1 & sign == sign and s1 & sign == 0) {
+ s0 += 1;
+ }
+ ix0 -= t;
+ if (ix1 < t1) {
+ ix0 -= 1;
+ }
+ ix1 = ix1 -% t1;
+ q1 += r;
+ }
+ ix0 = ix0 +% ix0 +% (ix1 >> 31);
+ ix1 = ix1 +% ix1;
+ r >>= 1;
+ }
+
+ // rounding direction
+ if (ix0 | ix1 != 0) {
+ var z = 1.0 - tiny; // raise inexact
+ if (z >= 1.0) {
+ z = 1.0 + tiny;
+ if (q1 == 0xFFFFFFFF) {
+ q1 = 0;
+ q += 1;
+ } else if (z > 1.0) {
+ if (q1 == 0xFFFFFFFE) {
+ q += 1;
+ }
+ q1 += 2;
+ } else {
+ q1 += q1 & 1;
+ }
+ }
+ }
+
+ ix0 = (q >> 1) + 0x3FE00000;
+ ix1 = q1 >> 1;
+ if (q & 1 != 0) {
+ ix1 |= 0x80000000;
+ }
+
+ // NOTE: musl here appears to rely on signed twos-complement wraparound. +% has the same
+ // behaviour at least.
+ var iix0 = i32(ix0);
+ iix0 = iix0 +% (m << 20);
+
+ const uz = (u64(iix0) << 32) | ix1;
+ return @bitCast(f64, uz);
+}
+
+export fn sqrtf(x: f32) f32 {
+ const tiny: f32 = 1.0e-30;
+ const sign: i32 = @bitCast(i32, u32(0x80000000));
+ var ix: i32 = @bitCast(i32, x);
+
+ if ((ix & 0x7F800000) == 0x7F800000) {
+ return x * x + x; // sqrt(nan) = nan, sqrt(+inf) = +inf, sqrt(-inf) = snan
+ }
+
+ // zero
+ if (ix <= 0) {
+ if (ix & ~sign == 0) {
+ return x; // sqrt (+-0) = +-0
+ }
+ if (ix < 0) {
+ return math.snan(f32);
+ }
+ }
+
+ // normalize
+ var m = ix >> 23;
+ if (m == 0) {
+ // subnormal
+ var i: i32 = 0;
+ while (ix & 0x00800000 == 0) : (i += 1) {
+ ix <<= 1;
+ }
+ m -= i - 1;
+ }
+
+ m -= 127; // unbias exponent
+ ix = (ix & 0x007FFFFF) | 0x00800000;
+
+ if (m & 1 != 0) { // odd m, double x to even
+ ix += ix;
+ }
+
+ m >>= 1; // m = [m / 2]
+
+ // sqrt(x) bit by bit
+ ix += ix;
+ var q: i32 = 0; // q = sqrt(x)
+ var s: i32 = 0;
+ var r: i32 = 0x01000000; // r = moving bit right -> left
+
+ while (r != 0) {
+ const t = s + r;
+ if (t <= ix) {
+ s = t + r;
+ ix -= t;
+ q += r;
+ }
+ ix += ix;
+ r >>= 1;
+ }
+
+ // floating add to find rounding direction
+ if (ix != 0) {
+ var z = 1.0 - tiny; // inexact
+ if (z >= 1.0) {
+ z = 1.0 + tiny;
+ if (z > 1.0) {
+ q += 2;
+ } else {
+ if (q & 1 != 0) {
+ q += 1;
+ }
+ }
+ }
+ }
+
+ ix = (q >> 1) + 0x3f000000;
+ ix += m << 23;
+ return @bitCast(f32, ix);
+}
diff --git a/std/special/compiler_rt/fixuint.zig b/std/special/compiler_rt/fixuint.zig
index b01bc48118..37cec446bc 100644
--- a/std/special/compiler_rt/fixuint.zig
+++ b/std/special/compiler_rt/fixuint.zig
@@ -1,5 +1,5 @@
const is_test = @import("builtin").is_test;
-const Log2Int = @import("../../math/index.zig").Log2Int;
+const Log2Int = @import("std").math.Log2Int;
pub fn fixuint(comptime fp_t: type, comptime fixuint_t: type, a: fp_t) fixuint_t {
@setRuntimeSafety(is_test);
diff --git a/std/special/compiler_rt/fixunsdfdi_test.zig b/std/special/compiler_rt/fixunsdfdi_test.zig
index 3443a4938e..e59d09f8de 100644
--- a/std/special/compiler_rt/fixunsdfdi_test.zig
+++ b/std/special/compiler_rt/fixunsdfdi_test.zig
@@ -1,5 +1,5 @@
const __fixunsdfdi = @import("fixunsdfdi.zig").__fixunsdfdi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunsdfdi(a: f64, expected: u64) void {
const x = __fixunsdfdi(a);
diff --git a/std/special/compiler_rt/fixunsdfsi_test.zig b/std/special/compiler_rt/fixunsdfsi_test.zig
index 3c74bc5f4c..db6e32e23d 100644
--- a/std/special/compiler_rt/fixunsdfsi_test.zig
+++ b/std/special/compiler_rt/fixunsdfsi_test.zig
@@ -1,5 +1,5 @@
const __fixunsdfsi = @import("fixunsdfsi.zig").__fixunsdfsi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunsdfsi(a: f64, expected: u32) void {
const x = __fixunsdfsi(a);
diff --git a/std/special/compiler_rt/fixunsdfti_test.zig b/std/special/compiler_rt/fixunsdfti_test.zig
index 3cb7687887..7283b35c0e 100644
--- a/std/special/compiler_rt/fixunsdfti_test.zig
+++ b/std/special/compiler_rt/fixunsdfti_test.zig
@@ -1,5 +1,5 @@
const __fixunsdfti = @import("fixunsdfti.zig").__fixunsdfti;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunsdfti(a: f64, expected: u128) void {
const x = __fixunsdfti(a);
diff --git a/std/special/compiler_rt/fixunssfdi_test.zig b/std/special/compiler_rt/fixunssfdi_test.zig
index de27323777..e4e6c1736d 100644
--- a/std/special/compiler_rt/fixunssfdi_test.zig
+++ b/std/special/compiler_rt/fixunssfdi_test.zig
@@ -1,5 +1,5 @@
const __fixunssfdi = @import("fixunssfdi.zig").__fixunssfdi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunssfdi(a: f32, expected: u64) void {
const x = __fixunssfdi(a);
diff --git a/std/special/compiler_rt/fixunssfsi_test.zig b/std/special/compiler_rt/fixunssfsi_test.zig
index 47ed21d4f4..614c648dfe 100644
--- a/std/special/compiler_rt/fixunssfsi_test.zig
+++ b/std/special/compiler_rt/fixunssfsi_test.zig
@@ -1,5 +1,5 @@
const __fixunssfsi = @import("fixunssfsi.zig").__fixunssfsi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunssfsi(a: f32, expected: u32) void {
const x = __fixunssfsi(a);
diff --git a/std/special/compiler_rt/fixunssfti_test.zig b/std/special/compiler_rt/fixunssfti_test.zig
index 3033eb0def..43ad527f53 100644
--- a/std/special/compiler_rt/fixunssfti_test.zig
+++ b/std/special/compiler_rt/fixunssfti_test.zig
@@ -1,5 +1,5 @@
const __fixunssfti = @import("fixunssfti.zig").__fixunssfti;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunssfti(a: f32, expected: u128) void {
const x = __fixunssfti(a);
diff --git a/std/special/compiler_rt/fixunstfdi_test.zig b/std/special/compiler_rt/fixunstfdi_test.zig
index d1f5f6496a..dd0869195a 100644
--- a/std/special/compiler_rt/fixunstfdi_test.zig
+++ b/std/special/compiler_rt/fixunstfdi_test.zig
@@ -1,5 +1,5 @@
const __fixunstfdi = @import("fixunstfdi.zig").__fixunstfdi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunstfdi(a: f128, expected: u64) void {
const x = __fixunstfdi(a);
diff --git a/std/special/compiler_rt/fixunstfsi_test.zig b/std/special/compiler_rt/fixunstfsi_test.zig
index 8bdf36d9d4..f682191994 100644
--- a/std/special/compiler_rt/fixunstfsi_test.zig
+++ b/std/special/compiler_rt/fixunstfsi_test.zig
@@ -1,5 +1,5 @@
const __fixunstfsi = @import("fixunstfsi.zig").__fixunstfsi;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunstfsi(a: f128, expected: u32) void {
const x = __fixunstfsi(a);
diff --git a/std/special/compiler_rt/fixunstfti_test.zig b/std/special/compiler_rt/fixunstfti_test.zig
index d9eb60e59b..9128ac6c08 100644
--- a/std/special/compiler_rt/fixunstfti_test.zig
+++ b/std/special/compiler_rt/fixunstfti_test.zig
@@ -1,5 +1,5 @@
const __fixunstfti = @import("fixunstfti.zig").__fixunstfti;
-const assert = @import("../../index.zig").debug.assert;
+const assert = @import("std").debug.assert;
fn test__fixunstfti(a: f128, expected: u128) void {
const x = __fixunstfti(a);
diff --git a/std/special/compiler_rt/index.zig b/std/special/compiler_rt/index.zig
index 81fe1ffec1..9da9c3f083 100644
--- a/std/special/compiler_rt/index.zig
+++ b/std/special/compiler_rt/index.zig
@@ -32,10 +32,6 @@ comptime {
@export("__fixunstfti", @import("fixunstfti.zig").__fixunstfti, linkage);
@export("__udivmoddi4", @import("udivmoddi4.zig").__udivmoddi4, linkage);
- @export("__udivmodti4", @import("udivmodti4.zig").__udivmodti4, linkage);
-
- @export("__udivti3", @import("udivti3.zig").__udivti3, linkage);
- @export("__umodti3", @import("umodti3.zig").__umodti3, linkage);
@export("__udivsi3", __udivsi3, linkage);
@export("__udivdi3", __udivdi3, linkage);
@@ -62,13 +58,21 @@ comptime {
@export("__chkstk", __chkstk, strong_linkage);
@export("___chkstk_ms", ___chkstk_ms, linkage);
}
+ @export("__udivti3", @import("udivti3.zig").__udivti3_windows_x86_64, linkage);
+ @export("__udivmodti4", @import("udivmodti4.zig").__udivmodti4_windows_x86_64, linkage);
+ @export("__umodti3", @import("umodti3.zig").__umodti3_windows_x86_64, linkage);
},
else => {},
}
+ } else {
+ @export("__udivti3", @import("udivti3.zig").__udivti3, linkage);
+ @export("__udivmodti4", @import("udivmodti4.zig").__udivmodti4, linkage);
+ @export("__umodti3", @import("umodti3.zig").__umodti3, linkage);
}
}
-const assert = @import("../../index.zig").debug.assert;
+const std = @import("std");
+const assert = std.debug.assert;
const __udivmoddi4 = @import("udivmoddi4.zig").__udivmoddi4;
@@ -77,12 +81,22 @@ const __udivmoddi4 = @import("udivmoddi4.zig").__udivmoddi4;
pub fn panic(msg: []const u8, error_return_trace: ?&builtin.StackTrace) noreturn {
@setCold(true);
if (is_test) {
- @import("std").debug.panic("{}", msg);
+ std.debug.panic("{}", msg);
} else {
unreachable;
}
}
+pub fn setXmm0(comptime T: type, value: T) void {
+ comptime assert(builtin.arch == builtin.Arch.x86_64);
+ const aligned_value: T align(16) = value;
+ asm volatile (
+ \\movaps (%[ptr]), %%xmm0
+ :
+ : [ptr] "r" (&aligned_value)
+ : "xmm0");
+}
+
extern fn __udivdi3(a: u64, b: u64) u64 {
@setRuntimeSafety(is_test);
return __udivmoddi4(a, b, null);
diff --git a/std/special/compiler_rt/udivmod.zig b/std/special/compiler_rt/udivmod.zig
index 07eaef583c..7820c7beb0 100644
--- a/std/special/compiler_rt/udivmod.zig
+++ b/std/special/compiler_rt/udivmod.zig
@@ -9,7 +9,7 @@ pub fn udivmod(comptime DoubleInt: type, a: DoubleInt, b: DoubleInt, maybe_rem:
const SingleInt = @IntType(false, @divExact(DoubleInt.bit_count, 2));
const SignedDoubleInt = @IntType(true, DoubleInt.bit_count);
- const Log2SingleInt = @import("../../math/index.zig").Log2Int(SingleInt);
+ const Log2SingleInt = @import("std").math.Log2Int(SingleInt);
const n = *@ptrCast(&const [2]SingleInt, &a); // TODO issue #421
const d = *@ptrCast(&const [2]SingleInt, &b); // TODO issue #421
diff --git a/std/special/compiler_rt/udivmodti4.zig b/std/special/compiler_rt/udivmodti4.zig
index 196d067aef..f8fdebe4db 100644
--- a/std/special/compiler_rt/udivmodti4.zig
+++ b/std/special/compiler_rt/udivmodti4.zig
@@ -1,11 +1,17 @@
const udivmod = @import("udivmod.zig").udivmod;
const builtin = @import("builtin");
+const compiler_rt = @import("index.zig");
pub extern fn __udivmodti4(a: u128, b: u128, maybe_rem: ?&u128) u128 {
@setRuntimeSafety(builtin.is_test);
return udivmod(u128, a, b, maybe_rem);
}
+pub extern fn __udivmodti4_windows_x86_64(a: &const u128, b: &const u128, maybe_rem: ?&u128) void {
+ @setRuntimeSafety(builtin.is_test);
+ compiler_rt.setXmm0(u128, udivmod(u128, *a, *b, maybe_rem));
+}
+
test "import udivmodti4" {
_ = @import("udivmodti4_test.zig");
}
diff --git a/std/special/compiler_rt/udivti3.zig b/std/special/compiler_rt/udivti3.zig
index eaecbac4d2..ad0f09e733 100644
--- a/std/special/compiler_rt/udivti3.zig
+++ b/std/special/compiler_rt/udivti3.zig
@@ -1,7 +1,12 @@
-const __udivmodti4 = @import("udivmodti4.zig").__udivmodti4;
+const udivmodti4 = @import("udivmodti4.zig");
const builtin = @import("builtin");
pub extern fn __udivti3(a: u128, b: u128) u128 {
@setRuntimeSafety(builtin.is_test);
- return __udivmodti4(a, b, null);
+ return udivmodti4.__udivmodti4(a, b, null);
+}
+
+pub extern fn __udivti3_windows_x86_64(a: &const u128, b: &const u128) void {
+ @setRuntimeSafety(builtin.is_test);
+ udivmodti4.__udivmodti4_windows_x86_64(a, b, null);
}
diff --git a/std/special/compiler_rt/umodti3.zig b/std/special/compiler_rt/umodti3.zig
index 26b306efa9..3e8b80058e 100644
--- a/std/special/compiler_rt/umodti3.zig
+++ b/std/special/compiler_rt/umodti3.zig
@@ -1,9 +1,15 @@
-const __udivmodti4 = @import("udivmodti4.zig").__udivmodti4;
+const udivmodti4 = @import("udivmodti4.zig");
const builtin = @import("builtin");
+const compiler_rt = @import("index.zig");
pub extern fn __umodti3(a: u128, b: u128) u128 {
@setRuntimeSafety(builtin.is_test);
var r: u128 = undefined;
- _ = __udivmodti4(a, b, &r);
+ _ = udivmodti4.__udivmodti4(a, b, &r);
return r;
}
+
+pub extern fn __umodti3_windows_x86_64(a: &const u128, b: &const u128) void {
+ @setRuntimeSafety(builtin.is_test);
+ compiler_rt.setXmm0(u128, __umodti3(*a, *b));
+}
diff --git a/std/unicode.zig b/std/unicode.zig
index 356df824f0..300e129647 100644
--- a/std/unicode.zig
+++ b/std/unicode.zig
@@ -1,6 +1,16 @@
const std = @import("./index.zig");
const debug = std.debug;
+/// Returns how many bytes the UTF-8 representation would require
+/// for the given codepoint.
+pub fn utf8CodepointSequenceLength(c: u32) !u3 {
+ if (c < 0x80) return u3(1);
+ if (c < 0x800) return u3(2);
+ if (c < 0x10000) return u3(3);
+ if (c < 0x110000) return u3(4);
+ return error.CodepointTooLarge;
+}
+
/// Given the first byte of a UTF-8 codepoint,
/// returns a number 1-4 indicating the total length of the codepoint in bytes.
/// If this byte does not match the form of a UTF-8 start byte, returns Utf8InvalidStartByte.
@@ -12,11 +22,47 @@ pub fn utf8ByteSequenceLength(first_byte: u8) !u3 {
return error.Utf8InvalidStartByte;
}
+/// Encodes the given codepoint into a UTF-8 byte sequence.
+/// c: the codepoint.
+/// out: the out buffer to write to. Must have a len >= utf8CodepointSequenceLength(c).
+/// Errors: if c cannot be encoded in UTF-8.
+/// Returns: the number of bytes written to out.
+pub fn utf8Encode(c: u32, out: []u8) !u3 {
+ const length = try utf8CodepointSequenceLength(c);
+ debug.assert(out.len >= length);
+ switch (length) {
+ // The pattern for each is the same
+ // - Increasing the initial shift by 6 each time
+ // - Each time after the first shorten the shifted
+ // value to a max of 0b111111 (63)
+ 1 => out[0] = u8(c), // Can just do 0 + codepoint for initial range
+ 2 => {
+ out[0] = u8(0b11000000 | (c >> 6));
+ out[1] = u8(0b10000000 | (c & 0b111111));
+ },
+ 3 => {
+ if (0xd800 <= c and c <= 0xdfff) return error.Utf8CannotEncodeSurrogateHalf;
+ out[0] = u8(0b11100000 | (c >> 12));
+ out[1] = u8(0b10000000 | ((c >> 6) & 0b111111));
+ out[2] = u8(0b10000000 | (c & 0b111111));
+ },
+ 4 => {
+ out[0] = u8(0b11110000 | (c >> 18));
+ out[1] = u8(0b10000000 | ((c >> 12) & 0b111111));
+ out[2] = u8(0b10000000 | ((c >> 6) & 0b111111));
+ out[3] = u8(0b10000000 | (c & 0b111111));
+ },
+ else => unreachable,
+ }
+ return length;
+}
+
+const Utf8DecodeError = Utf8Decode2Error || Utf8Decode3Error || Utf8Decode4Error;
/// Decodes the UTF-8 codepoint encoded in the given slice of bytes.
/// bytes.len must be equal to utf8ByteSequenceLength(bytes[0]) catch unreachable.
/// If you already know the length at comptime, you can call one of
/// utf8Decode2,utf8Decode3,utf8Decode4 directly instead of this function.
-pub fn utf8Decode(bytes: []const u8) !u32 {
+pub fn utf8Decode(bytes: []const u8) Utf8DecodeError!u32 {
return switch (bytes.len) {
1 => u32(bytes[0]),
2 => utf8Decode2(bytes),
@@ -25,7 +71,12 @@ pub fn utf8Decode(bytes: []const u8) !u32 {
else => unreachable,
};
}
-pub fn utf8Decode2(bytes: []const u8) !u32 {
+
+const Utf8Decode2Error = error{
+ Utf8ExpectedContinuation,
+ Utf8OverlongEncoding,
+};
+pub fn utf8Decode2(bytes: []const u8) Utf8Decode2Error!u32 {
debug.assert(bytes.len == 2);
debug.assert(bytes[0] & 0b11100000 == 0b11000000);
var value: u32 = bytes[0] & 0b00011111;
@@ -38,7 +89,13 @@ pub fn utf8Decode2(bytes: []const u8) !u32 {
return value;
}
-pub fn utf8Decode3(bytes: []const u8) !u32 {
+
+const Utf8Decode3Error = error{
+ Utf8ExpectedContinuation,
+ Utf8OverlongEncoding,
+ Utf8EncodesSurrogateHalf,
+};
+pub fn utf8Decode3(bytes: []const u8) Utf8Decode3Error!u32 {
debug.assert(bytes.len == 3);
debug.assert(bytes[0] & 0b11110000 == 0b11100000);
var value: u32 = bytes[0] & 0b00001111;
@@ -56,7 +113,13 @@ pub fn utf8Decode3(bytes: []const u8) !u32 {
return value;
}
-pub fn utf8Decode4(bytes: []const u8) !u32 {
+
+const Utf8Decode4Error = error{
+ Utf8ExpectedContinuation,
+ Utf8OverlongEncoding,
+ Utf8CodepointTooLarge,
+};
+pub fn utf8Decode4(bytes: []const u8) Utf8Decode4Error!u32 {
debug.assert(bytes.len == 4);
debug.assert(bytes[0] & 0b11111000 == 0b11110000);
var value: u32 = bytes[0] & 0b00000111;
@@ -158,19 +221,67 @@ const Utf8Iterator = struct {
pub fn nextCodepoint(it: &Utf8Iterator) ?u32 {
const slice = it.nextCodepointSlice() ?? return null;
- const r = switch (slice.len) {
- 1 => u32(slice[0]),
- 2 => utf8Decode2(slice),
- 3 => utf8Decode3(slice),
- 4 => utf8Decode4(slice),
+ switch (slice.len) {
+ 1 => return u32(slice[0]),
+ 2 => return utf8Decode2(slice) catch unreachable,
+ 3 => return utf8Decode3(slice) catch unreachable,
+ 4 => return utf8Decode4(slice) catch unreachable,
else => unreachable,
- };
-
- return r catch unreachable;
+ }
}
};
+test "utf8 encode" {
+ comptime testUtf8Encode() catch unreachable;
+ try testUtf8Encode();
+}
+fn testUtf8Encode() !void {
+ // A few taken from wikipedia a few taken elsewhere
+ var array: [4]u8 = undefined;
+ debug.assert((try utf8Encode(try utf8Decode("€"), array[0..])) == 3);
+ debug.assert(array[0] == 0b11100010);
+ debug.assert(array[1] == 0b10000010);
+ debug.assert(array[2] == 0b10101100);
+
+ debug.assert((try utf8Encode(try utf8Decode("$"), array[0..])) == 1);
+ debug.assert(array[0] == 0b00100100);
+
+ debug.assert((try utf8Encode(try utf8Decode("¢"), array[0..])) == 2);
+ debug.assert(array[0] == 0b11000010);
+ debug.assert(array[1] == 0b10100010);
+
+ debug.assert((try utf8Encode(try utf8Decode("𐍈"), array[0..])) == 4);
+ debug.assert(array[0] == 0b11110000);
+ debug.assert(array[1] == 0b10010000);
+ debug.assert(array[2] == 0b10001101);
+ debug.assert(array[3] == 0b10001000);
+}
+
+test "utf8 encode error" {
+ comptime testUtf8EncodeError();
+ testUtf8EncodeError();
+}
+fn testUtf8EncodeError() void {
+ var array: [4]u8 = undefined;
+ testErrorEncode(0xd800, array[0..], error.Utf8CannotEncodeSurrogateHalf);
+ testErrorEncode(0xdfff, array[0..], error.Utf8CannotEncodeSurrogateHalf);
+ testErrorEncode(0x110000, array[0..], error.CodepointTooLarge);
+ testErrorEncode(0xffffffff, array[0..], error.CodepointTooLarge);
+}
+
+fn testErrorEncode(codePoint: u32, array: []u8, expectedErr: error) void {
+ if (utf8Encode(codePoint, array)) |_| {
+ unreachable;
+ } else |err| {
+ debug.assert(err == expectedErr);
+ }
+}
+
test "utf8 iterator on ascii" {
+ comptime testUtf8IteratorOnAscii();
+ testUtf8IteratorOnAscii();
+}
+fn testUtf8IteratorOnAscii() void {
const s = Utf8View.initComptime("abc");
var it1 = s.iterator();
@@ -187,6 +298,10 @@ test "utf8 iterator on ascii" {
}
test "utf8 view bad" {
+ comptime testUtf8ViewBad();
+ testUtf8ViewBad();
+}
+fn testUtf8ViewBad() void {
// Compile-time error.
// const s3 = Utf8View.initComptime("\xfe\xf2");
@@ -195,6 +310,10 @@ test "utf8 view bad" {
}
test "utf8 view ok" {
+ comptime testUtf8ViewOk();
+ testUtf8ViewOk();
+}
+fn testUtf8ViewOk() void {
const s = Utf8View.initComptime("東京市");
var it1 = s.iterator();
@@ -211,6 +330,10 @@ test "utf8 view ok" {
}
test "bad utf8 slice" {
+ comptime testBadUtf8Slice();
+ testBadUtf8Slice();
+}
+fn testBadUtf8Slice() void {
debug.assert(utf8ValidateSlice("abc"));
debug.assert(!utf8ValidateSlice("abc\xc0"));
debug.assert(!utf8ValidateSlice("abc\xc0abc"));
@@ -218,6 +341,10 @@ test "bad utf8 slice" {
}
test "valid utf8" {
+ comptime testValidUtf8();
+ testValidUtf8();
+}
+fn testValidUtf8() void {
testValid("\x00", 0x0);
testValid("\x20", 0x20);
testValid("\x7f", 0x7f);
@@ -233,6 +360,10 @@ test "valid utf8" {
}
test "invalid utf8 continuation bytes" {
+ comptime testInvalidUtf8ContinuationBytes();
+ testInvalidUtf8ContinuationBytes();
+}
+fn testInvalidUtf8ContinuationBytes() void {
// unexpected continuation
testError("\x80", error.Utf8InvalidStartByte);
testError("\xbf", error.Utf8InvalidStartByte);
@@ -261,6 +392,10 @@ test "invalid utf8 continuation bytes" {
}
test "overlong utf8 codepoint" {
+ comptime testOverlongUtf8Codepoint();
+ testOverlongUtf8Codepoint();
+}
+fn testOverlongUtf8Codepoint() void {
testError("\xc0\x80", error.Utf8OverlongEncoding);
testError("\xc1\xbf", error.Utf8OverlongEncoding);
testError("\xe0\x80\x80", error.Utf8OverlongEncoding);
@@ -270,6 +405,10 @@ test "overlong utf8 codepoint" {
}
test "misc invalid utf8" {
+ comptime testMiscInvalidUtf8();
+ testMiscInvalidUtf8();
+}
+fn testMiscInvalidUtf8() void {
// codepoint out of bounds
testError("\xf4\x90\x80\x80", error.Utf8CodepointTooLarge);
testError("\xf7\xbf\xbf\xbf", error.Utf8CodepointTooLarge);
diff --git a/std/zig/ast.zig b/std/zig/ast.zig
index 715a333c0f..a92555731d 100644
--- a/std/zig/ast.zig
+++ b/std/zig/ast.zig
@@ -1,647 +1,2176 @@
const std = @import("../index.zig");
const assert = std.debug.assert;
-const ArrayList = std.ArrayList;
-const Token = std.zig.Token;
+const SegmentedList = std.SegmentedList;
const mem = std.mem;
+const Token = std.zig.Token;
+
+pub const TokenIndex = usize;
+
+pub const Tree = struct {
+ source: []const u8,
+ tokens: TokenList,
+ root_node: &Node.Root,
+ arena_allocator: std.heap.ArenaAllocator,
+ errors: ErrorList,
+
+ pub const TokenList = SegmentedList(Token, 64);
+ pub const ErrorList = SegmentedList(Error, 0);
+
+ pub fn deinit(self: &Tree) void {
+ self.arena_allocator.deinit();
+ }
+
+ pub fn renderError(self: &Tree, parse_error: &Error, stream: var) !void {
+ return parse_error.render(&self.tokens, stream);
+ }
+
+ pub fn tokenSlice(self: &Tree, token_index: TokenIndex) []const u8 {
+ return self.tokenSlicePtr(self.tokens.at(token_index));
+ }
+
+ pub fn tokenSlicePtr(self: &Tree, token: &const Token) []const u8 {
+ return self.source[token.start..token.end];
+ }
+
+ pub const Location = struct {
+ line: usize,
+ column: usize,
+ line_start: usize,
+ line_end: usize,
+ };
+
+ pub fn tokenLocationPtr(self: &Tree, start_index: usize, token: &const Token) Location {
+ var loc = Location {
+ .line = 0,
+ .column = 0,
+ .line_start = start_index,
+ .line_end = self.source.len,
+ };
+ const token_start = token.start;
+ for (self.source[start_index..]) |c, i| {
+ if (i + start_index == token_start) {
+ loc.line_end = i + start_index;
+ while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') : (loc.line_end += 1) {}
+ return loc;
+ }
+ if (c == '\n') {
+ loc.line += 1;
+ loc.column = 0;
+ loc.line_start = i + 1;
+ } else {
+ loc.column += 1;
+ }
+ }
+ return loc;
+ }
+
+ pub fn tokenLocation(self: &Tree, start_index: usize, token_index: TokenIndex) Location {
+ return self.tokenLocationPtr(start_index, self.tokens.at(token_index));
+ }
+};
+
+pub const Error = union(enum) {
+ InvalidToken: InvalidToken,
+ ExpectedVarDeclOrFn: ExpectedVarDeclOrFn,
+ ExpectedAggregateKw: ExpectedAggregateKw,
+ UnattachedDocComment: UnattachedDocComment,
+ ExpectedEqOrSemi: ExpectedEqOrSemi,
+ ExpectedSemiOrLBrace: ExpectedSemiOrLBrace,
+ ExpectedLabelable: ExpectedLabelable,
+ ExpectedInlinable: ExpectedInlinable,
+ ExpectedAsmOutputReturnOrType: ExpectedAsmOutputReturnOrType,
+ ExpectedCall: ExpectedCall,
+ ExpectedCallOrFnProto: ExpectedCallOrFnProto,
+ ExpectedSliceOrRBracket: ExpectedSliceOrRBracket,
+ ExtraAlignQualifier: ExtraAlignQualifier,
+ ExtraConstQualifier: ExtraConstQualifier,
+ ExtraVolatileQualifier: ExtraVolatileQualifier,
+ ExpectedPrimaryExpr: ExpectedPrimaryExpr,
+ ExpectedToken: ExpectedToken,
+ ExpectedCommaOrEnd: ExpectedCommaOrEnd,
+
+ pub fn render(self: &Error, tokens: &Tree.TokenList, stream: var) !void {
+ switch (*self) {
+ // TODO https://github.com/zig-lang/zig/issues/683
+ @TagType(Error).InvalidToken => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedAggregateKw => |*x| return x.render(tokens, stream),
+ @TagType(Error).UnattachedDocComment => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedEqOrSemi => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedSemiOrLBrace => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedLabelable => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedInlinable => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedAsmOutputReturnOrType => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedCall => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedCallOrFnProto => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedSliceOrRBracket => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExtraAlignQualifier => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExtraConstQualifier => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExtraVolatileQualifier => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedPrimaryExpr => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedToken => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedCommaOrEnd => |*x| return x.render(tokens, stream),
+ }
+ }
+
+ pub fn loc(self: &Error) TokenIndex {
+ switch (*self) {
+ // TODO https://github.com/zig-lang/zig/issues/683
+ @TagType(Error).InvalidToken => |x| return x.token,
+ @TagType(Error).ExpectedVarDeclOrFn => |x| return x.token,
+ @TagType(Error).ExpectedAggregateKw => |x| return x.token,
+ @TagType(Error).UnattachedDocComment => |x| return x.token,
+ @TagType(Error).ExpectedEqOrSemi => |x| return x.token,
+ @TagType(Error).ExpectedSemiOrLBrace => |x| return x.token,
+ @TagType(Error).ExpectedLabelable => |x| return x.token,
+ @TagType(Error).ExpectedInlinable => |x| return x.token,
+ @TagType(Error).ExpectedAsmOutputReturnOrType => |x| return x.token,
+ @TagType(Error).ExpectedCall => |x| return x.node.firstToken(),
+ @TagType(Error).ExpectedCallOrFnProto => |x| return x.node.firstToken(),
+ @TagType(Error).ExpectedSliceOrRBracket => |x| return x.token,
+ @TagType(Error).ExtraAlignQualifier => |x| return x.token,
+ @TagType(Error).ExtraConstQualifier => |x| return x.token,
+ @TagType(Error).ExtraVolatileQualifier => |x| return x.token,
+ @TagType(Error).ExpectedPrimaryExpr => |x| return x.token,
+ @TagType(Error).ExpectedToken => |x| return x.token,
+ @TagType(Error).ExpectedCommaOrEnd => |x| return x.token,
+ }
+ }
+
+ pub const InvalidToken = SingleTokenError("Invalid token {}");
+ pub const ExpectedVarDeclOrFn = SingleTokenError("Expected variable declaration or function, found {}");
+ pub const ExpectedAggregateKw = SingleTokenError("Expected " ++
+ @tagName(Token.Id.Keyword_struct) ++ ", " ++ @tagName(Token.Id.Keyword_union) ++ ", or " ++
+ @tagName(Token.Id.Keyword_enum) ++ ", found {}");
+ pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found {}");
+ pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found {}");
+ pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found {}");
+ pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found {}");
+ pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or " ++
+ @tagName(Token.Id.Identifier) ++ ", found {}");
+ pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found {}");
+ pub const ExpectedPrimaryExpr = SingleTokenError("Expected primary expression, found {}");
+
+ pub const UnattachedDocComment = SimpleError("Unattached documentation comment");
+ pub const ExtraAlignQualifier = SimpleError("Extra align qualifier");
+ pub const ExtraConstQualifier = SimpleError("Extra const qualifier");
+ pub const ExtraVolatileQualifier = SimpleError("Extra volatile qualifier");
+
+ pub const ExpectedCall = struct {
+ node: &Node,
+
+ pub fn render(self: &ExpectedCall, tokens: &Tree.TokenList, stream: var) !void {
+ return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ ", found {}",
+ @tagName(self.node.id));
+ }
+ };
+
+ pub const ExpectedCallOrFnProto = struct {
+ node: &Node,
+
+ pub fn render(self: &ExpectedCallOrFnProto, tokens: &Tree.TokenList, stream: var) !void {
+ return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ " or " ++
+ @tagName(Node.Id.FnProto) ++ ", found {}", @tagName(self.node.id));
+ }
+ };
+
+ pub const ExpectedToken = struct {
+ token: TokenIndex,
+ expected_id: @TagType(Token.Id),
+
+ pub fn render(self: &ExpectedToken, tokens: &Tree.TokenList, stream: var) !void {
+ const token_name = @tagName(tokens.at(self.token).id);
+ return stream.print("expected {}, found {}", @tagName(self.expected_id), token_name);
+ }
+ };
+
+ pub const ExpectedCommaOrEnd = struct {
+ token: TokenIndex,
+ end_id: @TagType(Token.Id),
+
+ pub fn render(self: &ExpectedCommaOrEnd, tokens: &Tree.TokenList, stream: var) !void {
+ const token_name = @tagName(tokens.at(self.token).id);
+ return stream.print("expected ',' or {}, found {}", @tagName(self.end_id), token_name);
+ }
+ };
+
+ fn SingleTokenError(comptime msg: []const u8) type {
+ return struct {
+ const ThisError = this;
+
+ token: TokenIndex,
+
+ pub fn render(self: &ThisError, tokens: &Tree.TokenList, stream: var) !void {
+ const token_name = @tagName(tokens.at(self.token).id);
+ return stream.print(msg, token_name);
+ }
+ };
+ }
+
+ fn SimpleError(comptime msg: []const u8) type {
+ return struct {
+ const ThisError = this;
+
+ token: TokenIndex,
+
+ pub fn render(self: &ThisError, tokens: &Tree.TokenList, stream: var) !void {
+ return stream.write(msg);
+ }
+ };
+ }
+};
pub const Node = struct {
id: Id,
- comment: ?&NodeLineComment,
pub const Id = enum {
+ // Top level
Root,
+ Use,
+ TestDecl,
+
+ // Statements
VarDecl,
- Identifier,
- FnProto,
- ParamDecl,
- Block,
+ Defer,
+
+ // Operators
InfixOp,
PrefixOp,
+ SuffixOp,
+
+ // Control flow
+ Switch,
+ While,
+ For,
+ If,
+ ControlFlowExpression,
+ Suspend,
+
+ // Type expressions
+ VarType,
+ ErrorType,
+ FnProto,
+ PromiseType,
+
+ // Primary expressions
IntegerLiteral,
FloatLiteral,
StringLiteral,
+ MultilineStringLiteral,
+ CharLiteral,
+ BoolLiteral,
+ NullLiteral,
UndefinedLiteral,
+ ThisLiteral,
+ Unreachable,
+ Identifier,
+ GroupedExpression,
BuiltinCall,
- Call,
+ ErrorSetDecl,
+ ContainerDecl,
+ Asm,
+ Comptime,
+ Block,
+
+ // Misc
LineComment,
- TestDecl,
+ DocComment,
+ SwitchCase,
+ SwitchElse,
+ Else,
+ Payload,
+ PointerPayload,
+ PointerIndexPayload,
+ StructField,
+ UnionTag,
+ EnumTag,
+ ErrorTag,
+ AsmInput,
+ AsmOutput,
+ AsyncAttribute,
+ ParamDecl,
+ FieldInitializer,
};
- pub fn iterate(base: &Node, index: usize) ?&Node {
- return switch (base.id) {
- Id.Root => @fieldParentPtr(NodeRoot, "base", base).iterate(index),
- Id.VarDecl => @fieldParentPtr(NodeVarDecl, "base", base).iterate(index),
- Id.Identifier => @fieldParentPtr(NodeIdentifier, "base", base).iterate(index),
- Id.FnProto => @fieldParentPtr(NodeFnProto, "base", base).iterate(index),
- Id.ParamDecl => @fieldParentPtr(NodeParamDecl, "base", base).iterate(index),
- Id.Block => @fieldParentPtr(NodeBlock, "base", base).iterate(index),
- Id.InfixOp => @fieldParentPtr(NodeInfixOp, "base", base).iterate(index),
- Id.PrefixOp => @fieldParentPtr(NodePrefixOp, "base", base).iterate(index),
- Id.IntegerLiteral => @fieldParentPtr(NodeIntegerLiteral, "base", base).iterate(index),
- Id.FloatLiteral => @fieldParentPtr(NodeFloatLiteral, "base", base).iterate(index),
- Id.StringLiteral => @fieldParentPtr(NodeStringLiteral, "base", base).iterate(index),
- Id.UndefinedLiteral => @fieldParentPtr(NodeUndefinedLiteral, "base", base).iterate(index),
- Id.BuiltinCall => @fieldParentPtr(NodeBuiltinCall, "base", base).iterate(index),
- Id.Call => @fieldParentPtr(NodeCall, "base", base).iterate(index),
- Id.LineComment => @fieldParentPtr(NodeLineComment, "base", base).iterate(index),
- Id.TestDecl => @fieldParentPtr(NodeTestDecl, "base", base).iterate(index),
- };
+ pub fn cast(base: &Node, comptime T: type) ?&T {
+ if (base.id == comptime typeToId(T)) {
+ return @fieldParentPtr(T, "base", base);
+ }
+ return null;
}
- pub fn firstToken(base: &Node) Token {
- return switch (base.id) {
- Id.Root => @fieldParentPtr(NodeRoot, "base", base).firstToken(),
- Id.VarDecl => @fieldParentPtr(NodeVarDecl, "base", base).firstToken(),
- Id.Identifier => @fieldParentPtr(NodeIdentifier, "base", base).firstToken(),
- Id.FnProto => @fieldParentPtr(NodeFnProto, "base", base).firstToken(),
- Id.ParamDecl => @fieldParentPtr(NodeParamDecl, "base", base).firstToken(),
- Id.Block => @fieldParentPtr(NodeBlock, "base", base).firstToken(),
- Id.InfixOp => @fieldParentPtr(NodeInfixOp, "base", base).firstToken(),
- Id.PrefixOp => @fieldParentPtr(NodePrefixOp, "base", base).firstToken(),
- Id.IntegerLiteral => @fieldParentPtr(NodeIntegerLiteral, "base", base).firstToken(),
- Id.FloatLiteral => @fieldParentPtr(NodeFloatLiteral, "base", base).firstToken(),
- Id.StringLiteral => @fieldParentPtr(NodeStringLiteral, "base", base).firstToken(),
- Id.UndefinedLiteral => @fieldParentPtr(NodeUndefinedLiteral, "base", base).firstToken(),
- Id.BuiltinCall => @fieldParentPtr(NodeBuiltinCall, "base", base).firstToken(),
- Id.Call => @fieldParentPtr(NodeCall, "base", base).firstToken(),
- Id.LineComment => @fieldParentPtr(NodeLineComment, "base", base).firstToken(),
- Id.TestDecl => @fieldParentPtr(NodeTestDecl, "base", base).firstToken(),
- };
+ pub fn iterate(base: &Node, index: usize) ?&Node {
+ comptime var i = 0;
+ inline while (i < @memberCount(Id)) : (i += 1) {
+ if (base.id == @field(Id, @memberName(Id, i))) {
+ const T = @field(Node, @memberName(Id, i));
+ return @fieldParentPtr(T, "base", base).iterate(index);
+ }
+ }
+ unreachable;
}
- pub fn lastToken(base: &Node) Token {
- return switch (base.id) {
- Id.Root => @fieldParentPtr(NodeRoot, "base", base).lastToken(),
- Id.VarDecl => @fieldParentPtr(NodeVarDecl, "base", base).lastToken(),
- Id.Identifier => @fieldParentPtr(NodeIdentifier, "base", base).lastToken(),
- Id.FnProto => @fieldParentPtr(NodeFnProto, "base", base).lastToken(),
- Id.ParamDecl => @fieldParentPtr(NodeParamDecl, "base", base).lastToken(),
- Id.Block => @fieldParentPtr(NodeBlock, "base", base).lastToken(),
- Id.InfixOp => @fieldParentPtr(NodeInfixOp, "base", base).lastToken(),
- Id.PrefixOp => @fieldParentPtr(NodePrefixOp, "base", base).lastToken(),
- Id.IntegerLiteral => @fieldParentPtr(NodeIntegerLiteral, "base", base).lastToken(),
- Id.FloatLiteral => @fieldParentPtr(NodeFloatLiteral, "base", base).lastToken(),
- Id.StringLiteral => @fieldParentPtr(NodeStringLiteral, "base", base).lastToken(),
- Id.UndefinedLiteral => @fieldParentPtr(NodeUndefinedLiteral, "base", base).lastToken(),
- Id.BuiltinCall => @fieldParentPtr(NodeBuiltinCall, "base", base).lastToken(),
- Id.Call => @fieldParentPtr(NodeCall, "base", base).lastToken(),
- Id.LineComment => @fieldParentPtr(NodeLineComment, "base", base).lastToken(),
- Id.TestDecl => @fieldParentPtr(NodeTestDecl, "base", base).lastToken(),
- };
+ pub fn firstToken(base: &Node) TokenIndex {
+ comptime var i = 0;
+ inline while (i < @memberCount(Id)) : (i += 1) {
+ if (base.id == @field(Id, @memberName(Id, i))) {
+ const T = @field(Node, @memberName(Id, i));
+ return @fieldParentPtr(T, "base", base).firstToken();
+ }
+ }
+ unreachable;
}
-};
-
-pub const NodeRoot = struct {
- base: Node,
- decls: ArrayList(&Node),
- eof_token: Token,
- pub fn iterate(self: &NodeRoot, index: usize) ?&Node {
- if (index < self.decls.len) {
- return self.decls.items[self.decls.len - index - 1];
+ pub fn lastToken(base: &Node) TokenIndex {
+ comptime var i = 0;
+ inline while (i < @memberCount(Id)) : (i += 1) {
+ if (base.id == @field(Id, @memberName(Id, i))) {
+ const T = @field(Node, @memberName(Id, i));
+ return @fieldParentPtr(T, "base", base).lastToken();
+ }
}
- return null;
+ unreachable;
}
- pub fn firstToken(self: &NodeRoot) Token {
- return if (self.decls.len == 0) self.eof_token else self.decls.at(0).firstToken();
+ pub fn typeToId(comptime T: type) Id {
+ comptime var i = 0;
+ inline while (i < @memberCount(Id)) : (i += 1) {
+ if (T == @field(Node, @memberName(Id, i))) {
+ return @field(Id, @memberName(Id, i));
+ }
+ }
+ unreachable;
}
- pub fn lastToken(self: &NodeRoot) Token {
- return if (self.decls.len == 0) self.eof_token else self.decls.at(self.decls.len - 1).lastToken();
+ pub fn requireSemiColon(base: &const Node) bool {
+ var n = base;
+ while (true) {
+ switch (n.id) {
+ Id.Root,
+ Id.StructField,
+ Id.UnionTag,
+ Id.EnumTag,
+ Id.ParamDecl,
+ Id.Block,
+ Id.Payload,
+ Id.PointerPayload,
+ Id.PointerIndexPayload,
+ Id.Switch,
+ Id.SwitchCase,
+ Id.SwitchElse,
+ Id.FieldInitializer,
+ Id.DocComment,
+ Id.LineComment,
+ Id.TestDecl => return false,
+ Id.While => {
+ const while_node = @fieldParentPtr(While, "base", n);
+ if (while_node.@"else") |@"else"| {
+ n = @"else".base;
+ continue;
+ }
+
+ return while_node.body.id != Id.Block;
+ },
+ Id.For => {
+ const for_node = @fieldParentPtr(For, "base", n);
+ if (for_node.@"else") |@"else"| {
+ n = @"else".base;
+ continue;
+ }
+
+ return for_node.body.id != Id.Block;
+ },
+ Id.If => {
+ const if_node = @fieldParentPtr(If, "base", n);
+ if (if_node.@"else") |@"else"| {
+ n = @"else".base;
+ continue;
+ }
+
+ return if_node.body.id != Id.Block;
+ },
+ Id.Else => {
+ const else_node = @fieldParentPtr(Else, "base", n);
+ n = else_node.body;
+ continue;
+ },
+ Id.Defer => {
+ const defer_node = @fieldParentPtr(Defer, "base", n);
+ return defer_node.expr.id != Id.Block;
+ },
+ Id.Comptime => {
+ const comptime_node = @fieldParentPtr(Comptime, "base", n);
+ return comptime_node.expr.id != Id.Block;
+ },
+ Id.Suspend => {
+ const suspend_node = @fieldParentPtr(Suspend, "base", n);
+ if (suspend_node.body) |body| {
+ return body.id != Id.Block;
+ }
+
+ return true;
+ },
+ else => return true,
+ }
+ }
}
-};
-pub const NodeVarDecl = struct {
- base: Node,
- visib_token: ?Token,
- name_token: Token,
- eq_token: Token,
- mut_token: Token,
- comptime_token: ?Token,
- extern_token: ?Token,
- lib_name: ?&Node,
- type_node: ?&Node,
- align_node: ?&Node,
- init_node: ?&Node,
- semicolon_token: Token,
-
- pub fn iterate(self: &NodeVarDecl, index: usize) ?&Node {
- var i = index;
-
- if (self.type_node) |type_node| {
- if (i < 1) return type_node;
+
+ pub const Root = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ decls: DeclList,
+ eof_token: TokenIndex,
+
+ pub const DeclList = SegmentedList(&Node, 4);
+
+ pub fn iterate(self: &Root, index: usize) ?&Node {
+ if (index < self.decls.len) {
+ return self.decls.items[self.decls.len - index - 1];
+ }
+ return null;
+ }
+
+ pub fn firstToken(self: &Root) TokenIndex {
+ return if (self.decls.len == 0) self.eof_token else (*self.decls.at(0)).firstToken();
+ }
+
+ pub fn lastToken(self: &Root) TokenIndex {
+ return if (self.decls.len == 0) self.eof_token else (*self.decls.at(self.decls.len - 1)).lastToken();
+ }
+ };
+
+ pub const VarDecl = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ visib_token: ?TokenIndex,
+ name_token: TokenIndex,
+ eq_token: TokenIndex,
+ mut_token: TokenIndex,
+ comptime_token: ?TokenIndex,
+ extern_export_token: ?TokenIndex,
+ lib_name: ?&Node,
+ type_node: ?&Node,
+ align_node: ?&Node,
+ init_node: ?&Node,
+ semicolon_token: TokenIndex,
+
+ pub fn iterate(self: &VarDecl, index: usize) ?&Node {
+ var i = index;
+
+ if (self.type_node) |type_node| {
+ if (i < 1) return type_node;
+ i -= 1;
+ }
+
+ if (self.align_node) |align_node| {
+ if (i < 1) return align_node;
+ i -= 1;
+ }
+
+ if (self.init_node) |init_node| {
+ if (i < 1) return init_node;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &VarDecl) TokenIndex {
+ if (self.visib_token) |visib_token| return visib_token;
+ if (self.comptime_token) |comptime_token| return comptime_token;
+ if (self.extern_export_token) |extern_export_token| return extern_export_token;
+ assert(self.lib_name == null);
+ return self.mut_token;
+ }
+
+ pub fn lastToken(self: &VarDecl) TokenIndex {
+ return self.semicolon_token;
+ }
+ };
+
+ pub const Use = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ visib_token: ?TokenIndex,
+ expr: &Node,
+ semicolon_token: TokenIndex,
+
+ pub fn iterate(self: &Use, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.expr;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &Use) TokenIndex {
+ if (self.visib_token) |visib_token| return visib_token;
+ return self.expr.firstToken();
+ }
+
+ pub fn lastToken(self: &Use) TokenIndex {
+ return self.semicolon_token;
+ }
+ };
+
+ pub const ErrorSetDecl = struct {
+ base: Node,
+ error_token: TokenIndex,
+ decls: DeclList,
+ rbrace_token: TokenIndex,
+
+ pub const DeclList = SegmentedList(&Node, 2);
+
+ pub fn iterate(self: &ErrorSetDecl, index: usize) ?&Node {
+ var i = index;
+
+ if (i < self.decls.len) return *self.decls.at(i);
+ i -= self.decls.len;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &ErrorSetDecl) TokenIndex {
+ return self.error_token;
+ }
+
+ pub fn lastToken(self: &ErrorSetDecl) TokenIndex {
+ return self.rbrace_token;
+ }
+ };
+
+ pub const ContainerDecl = struct {
+ base: Node,
+ ltoken: TokenIndex,
+ layout: Layout,
+ kind: Kind,
+ init_arg_expr: InitArg,
+ fields_and_decls: DeclList,
+ rbrace_token: TokenIndex,
+
+ pub const DeclList = Root.DeclList;
+
+ const Layout = enum {
+ Auto,
+ Extern,
+ Packed,
+ };
+
+ const Kind = enum {
+ Struct,
+ Enum,
+ Union,
+ };
+
+ const InitArg = union(enum) {
+ None,
+ Enum: ?&Node,
+ Type: &Node,
+ };
+
+ pub fn iterate(self: &ContainerDecl, index: usize) ?&Node {
+ var i = index;
+
+ switch (self.init_arg_expr) {
+ InitArg.Type => |t| {
+ if (i < 1) return t;
+ i -= 1;
+ },
+ InitArg.None,
+ InitArg.Enum => { }
+ }
+
+ if (i < self.fields_and_decls.len) return *self.fields_and_decls.at(i);
+ i -= self.fields_and_decls.len;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &ContainerDecl) TokenIndex {
+ return self.ltoken;
+ }
+
+ pub fn lastToken(self: &ContainerDecl) TokenIndex {
+ return self.rbrace_token;
+ }
+ };
+
+ pub const StructField = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ visib_token: ?TokenIndex,
+ name_token: TokenIndex,
+ type_expr: &Node,
+
+ pub fn iterate(self: &StructField, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.type_expr;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &StructField) TokenIndex {
+ if (self.visib_token) |visib_token| return visib_token;
+ return self.name_token;
+ }
+
+ pub fn lastToken(self: &StructField) TokenIndex {
+ return self.type_expr.lastToken();
+ }
+ };
+
+ pub const UnionTag = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ name_token: TokenIndex,
+ type_expr: ?&Node,
+ value_expr: ?&Node,
+
+ pub fn iterate(self: &UnionTag, index: usize) ?&Node {
+ var i = index;
+
+ if (self.type_expr) |type_expr| {
+ if (i < 1) return type_expr;
+ i -= 1;
+ }
+
+ if (self.value_expr) |value_expr| {
+ if (i < 1) return value_expr;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &UnionTag) TokenIndex {
+ return self.name_token;
+ }
+
+ pub fn lastToken(self: &UnionTag) TokenIndex {
+ if (self.value_expr) |value_expr| {
+ return value_expr.lastToken();
+ }
+ if (self.type_expr) |type_expr| {
+ return type_expr.lastToken();
+ }
+
+ return self.name_token;
+ }
+ };
+
+ pub const EnumTag = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ name_token: TokenIndex,
+ value: ?&Node,
+
+ pub fn iterate(self: &EnumTag, index: usize) ?&Node {
+ var i = index;
+
+ if (self.value) |value| {
+ if (i < 1) return value;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &EnumTag) TokenIndex {
+ return self.name_token;
+ }
+
+ pub fn lastToken(self: &EnumTag) TokenIndex {
+ if (self.value) |value| {
+ return value.lastToken();
+ }
+
+ return self.name_token;
+ }
+ };
+
+ pub const ErrorTag = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ name_token: TokenIndex,
+
+ pub fn iterate(self: &ErrorTag, index: usize) ?&Node {
+ var i = index;
+
+ if (self.doc_comments) |comments| {
+ if (i < 1) return &comments.base;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &ErrorTag) TokenIndex {
+ return self.name_token;
+ }
+
+ pub fn lastToken(self: &ErrorTag) TokenIndex {
+ return self.name_token;
+ }
+ };
+
+ pub const Identifier = struct {
+ base: Node,
+ token: TokenIndex,
+
+ pub fn iterate(self: &Identifier, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &Identifier) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &Identifier) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const AsyncAttribute = struct {
+ base: Node,
+ async_token: TokenIndex,
+ allocator_type: ?&Node,
+ rangle_bracket: ?TokenIndex,
+
+ pub fn iterate(self: &AsyncAttribute, index: usize) ?&Node {
+ var i = index;
+
+ if (self.allocator_type) |allocator_type| {
+ if (i < 1) return allocator_type;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &AsyncAttribute) TokenIndex {
+ return self.async_token;
+ }
+
+ pub fn lastToken(self: &AsyncAttribute) TokenIndex {
+ if (self.rangle_bracket) |rangle_bracket| {
+ return rangle_bracket;
+ }
+
+ return self.async_token;
+ }
+ };
+
+ pub const FnProto = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ visib_token: ?TokenIndex,
+ fn_token: TokenIndex,
+ name_token: ?TokenIndex,
+ params: ParamList,
+ return_type: ReturnType,
+ var_args_token: ?TokenIndex,
+ extern_export_inline_token: ?TokenIndex,
+ cc_token: ?TokenIndex,
+ async_attr: ?&AsyncAttribute,
+ body_node: ?&Node,
+ lib_name: ?&Node, // populated if this is an extern declaration
+ align_expr: ?&Node, // populated if align(A) is present
+
+ pub const ParamList = SegmentedList(&Node, 2);
+
+ pub const ReturnType = union(enum) {
+ Explicit: &Node,
+ InferErrorSet: &Node,
+ };
+
+ pub fn iterate(self: &FnProto, index: usize) ?&Node {
+ var i = index;
+
+ if (self.body_node) |body_node| {
+ if (i < 1) return body_node;
+ i -= 1;
+ }
+
+ switch (self.return_type) {
+ // TODO allow this and next prong to share bodies since the types are the same
+ ReturnType.Explicit => |node| {
+ if (i < 1) return node;
+ i -= 1;
+ },
+ ReturnType.InferErrorSet => |node| {
+ if (i < 1) return node;
+ i -= 1;
+ },
+ }
+
+ if (self.align_expr) |align_expr| {
+ if (i < 1) return align_expr;
+ i -= 1;
+ }
+
+ if (i < self.params.len) return self.params.items[self.params.len - i - 1];
+ i -= self.params.len;
+
+ if (self.lib_name) |lib_name| {
+ if (i < 1) return lib_name;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &FnProto) TokenIndex {
+ if (self.visib_token) |visib_token| return visib_token;
+ if (self.extern_export_inline_token) |extern_export_inline_token| return extern_export_inline_token;
+ assert(self.lib_name == null);
+ if (self.cc_token) |cc_token| return cc_token;
+ return self.fn_token;
+ }
+
+ pub fn lastToken(self: &FnProto) TokenIndex {
+ if (self.body_node) |body_node| return body_node.lastToken();
+ switch (self.return_type) {
+ // TODO allow this and next prong to share bodies since the types are the same
+ ReturnType.Explicit => |node| return node.lastToken(),
+ ReturnType.InferErrorSet => |node| return node.lastToken(),
+ }
+ }
+ };
+
+ pub const PromiseType = struct {
+ base: Node,
+ promise_token: TokenIndex,
+ result: ?Result,
+
+ pub const Result = struct {
+ arrow_token: TokenIndex,
+ return_type: &Node,
+ };
+
+ pub fn iterate(self: &PromiseType, index: usize) ?&Node {
+ var i = index;
+
+ if (self.result) |result| {
+ if (i < 1) return result.return_type;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &PromiseType) TokenIndex {
+ return self.promise_token;
+ }
+
+ pub fn lastToken(self: &PromiseType) TokenIndex {
+ if (self.result) |result| return result.return_type.lastToken();
+ return self.promise_token;
+ }
+ };
+
+ pub const ParamDecl = struct {
+ base: Node,
+ comptime_token: ?TokenIndex,
+ noalias_token: ?TokenIndex,
+ name_token: ?TokenIndex,
+ type_node: &Node,
+ var_args_token: ?TokenIndex,
+
+ pub fn iterate(self: &ParamDecl, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.type_node;
i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &ParamDecl) TokenIndex {
+ if (self.comptime_token) |comptime_token| return comptime_token;
+ if (self.noalias_token) |noalias_token| return noalias_token;
+ if (self.name_token) |name_token| return name_token;
+ return self.type_node.firstToken();
+ }
+
+ pub fn lastToken(self: &ParamDecl) TokenIndex {
+ if (self.var_args_token) |var_args_token| return var_args_token;
+ return self.type_node.lastToken();
+ }
+ };
+
+ pub const Block = struct {
+ base: Node,
+ label: ?TokenIndex,
+ lbrace: TokenIndex,
+ statements: StatementList,
+ rbrace: TokenIndex,
+
+ pub const StatementList = Root.DeclList;
+
+ pub fn iterate(self: &Block, index: usize) ?&Node {
+ var i = index;
+
+ if (i < self.statements.len) return self.statements.items[i];
+ i -= self.statements.len;
+
+ return null;
}
- if (self.align_node) |align_node| {
- if (i < 1) return align_node;
+ pub fn firstToken(self: &Block) TokenIndex {
+ if (self.label) |label| {
+ return label;
+ }
+
+ return self.lbrace;
+ }
+
+ pub fn lastToken(self: &Block) TokenIndex {
+ return self.rbrace;
+ }
+ };
+
+ pub const Defer = struct {
+ base: Node,
+ defer_token: TokenIndex,
+ kind: Kind,
+ expr: &Node,
+
+ const Kind = enum {
+ Error,
+ Unconditional,
+ };
+
+ pub fn iterate(self: &Defer, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.expr;
i -= 1;
+
+ return null;
}
- if (self.init_node) |init_node| {
- if (i < 1) return init_node;
+ pub fn firstToken(self: &Defer) TokenIndex {
+ return self.defer_token;
+ }
+
+ pub fn lastToken(self: &Defer) TokenIndex {
+ return self.expr.lastToken();
+ }
+ };
+
+ pub const Comptime = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ comptime_token: TokenIndex,
+ expr: &Node,
+
+ pub fn iterate(self: &Comptime, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.expr;
i -= 1;
+
+ return null;
}
- return null;
- }
+ pub fn firstToken(self: &Comptime) TokenIndex {
+ return self.comptime_token;
+ }
- pub fn firstToken(self: &NodeVarDecl) Token {
- if (self.visib_token) |visib_token| return visib_token;
- if (self.comptime_token) |comptime_token| return comptime_token;
- if (self.extern_token) |extern_token| return extern_token;
- assert(self.lib_name == null);
- return self.mut_token;
- }
+ pub fn lastToken(self: &Comptime) TokenIndex {
+ return self.expr.lastToken();
+ }
+ };
- pub fn lastToken(self: &NodeVarDecl) Token {
- return self.semicolon_token;
- }
-};
+ pub const Payload = struct {
+ base: Node,
+ lpipe: TokenIndex,
+ error_symbol: &Node,
+ rpipe: TokenIndex,
-pub const NodeIdentifier = struct {
- base: Node,
- name_token: Token,
+ pub fn iterate(self: &Payload, index: usize) ?&Node {
+ var i = index;
- pub fn iterate(self: &NodeIdentifier, index: usize) ?&Node {
- return null;
- }
+ if (i < 1) return self.error_symbol;
+ i -= 1;
- pub fn firstToken(self: &NodeIdentifier) Token {
- return self.name_token;
- }
+ return null;
+ }
- pub fn lastToken(self: &NodeIdentifier) Token {
- return self.name_token;
- }
-};
+ pub fn firstToken(self: &Payload) TokenIndex {
+ return self.lpipe;
+ }
+
+ pub fn lastToken(self: &Payload) TokenIndex {
+ return self.rpipe;
+ }
+ };
-pub const NodeFnProto = struct {
- base: Node,
- visib_token: ?Token,
- fn_token: Token,
- name_token: ?Token,
- params: ArrayList(&Node),
- return_type: ReturnType,
- var_args_token: ?Token,
- extern_token: ?Token,
- inline_token: ?Token,
- cc_token: ?Token,
- body_node: ?&Node,
- lib_name: ?&Node, // populated if this is an extern declaration
- align_expr: ?&Node, // populated if align(A) is present
-
- pub const ReturnType = union(enum) {
- Explicit: &Node,
- Infer: Token,
- InferErrorSet: &Node,
- };
-
- pub fn iterate(self: &NodeFnProto, index: usize) ?&Node {
- var i = index;
-
- if (self.body_node) |body_node| {
- if (i < 1) return body_node;
+ pub const PointerPayload = struct {
+ base: Node,
+ lpipe: TokenIndex,
+ ptr_token: ?TokenIndex,
+ value_symbol: &Node,
+ rpipe: TokenIndex,
+
+ pub fn iterate(self: &PointerPayload, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.value_symbol;
i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &PointerPayload) TokenIndex {
+ return self.lpipe;
}
- switch (self.return_type) {
- // TODO allow this and next prong to share bodies since the types are the same
- ReturnType.Explicit => |node| {
- if (i < 1) return node;
+ pub fn lastToken(self: &PointerPayload) TokenIndex {
+ return self.rpipe;
+ }
+ };
+
+ pub const PointerIndexPayload = struct {
+ base: Node,
+ lpipe: TokenIndex,
+ ptr_token: ?TokenIndex,
+ value_symbol: &Node,
+ index_symbol: ?&Node,
+ rpipe: TokenIndex,
+
+ pub fn iterate(self: &PointerIndexPayload, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.value_symbol;
+ i -= 1;
+
+ if (self.index_symbol) |index_symbol| {
+ if (i < 1) return index_symbol;
i -= 1;
- },
- ReturnType.InferErrorSet => |node| {
- if (i < 1) return node;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &PointerIndexPayload) TokenIndex {
+ return self.lpipe;
+ }
+
+ pub fn lastToken(self: &PointerIndexPayload) TokenIndex {
+ return self.rpipe;
+ }
+ };
+
+ pub const Else = struct {
+ base: Node,
+ else_token: TokenIndex,
+ payload: ?&Node,
+ body: &Node,
+
+ pub fn iterate(self: &Else, index: usize) ?&Node {
+ var i = index;
+
+ if (self.payload) |payload| {
+ if (i < 1) return payload;
i -= 1;
- },
- ReturnType.Infer => {},
+ }
+
+ if (i < 1) return self.body;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &Else) TokenIndex {
+ return self.else_token;
}
- if (self.align_expr) |align_expr| {
- if (i < 1) return align_expr;
+ pub fn lastToken(self: &Else) TokenIndex {
+ return self.body.lastToken();
+ }
+ };
+
+ pub const Switch = struct {
+ base: Node,
+ switch_token: TokenIndex,
+ expr: &Node,
+ /// these can be SwitchCase nodes or LineComment nodes
+ cases: CaseList,
+ rbrace: TokenIndex,
+
+ pub const CaseList = SegmentedList(&Node, 2);
+
+ pub fn iterate(self: &Switch, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.expr;
i -= 1;
+
+ if (i < self.cases.len) return *self.cases.at(i);
+ i -= self.cases.len;
+
+ return null;
}
- if (i < self.params.len) return self.params.items[self.params.len - i - 1];
- i -= self.params.len;
+ pub fn firstToken(self: &Switch) TokenIndex {
+ return self.switch_token;
+ }
- if (self.lib_name) |lib_name| {
- if (i < 1) return lib_name;
+ pub fn lastToken(self: &Switch) TokenIndex {
+ return self.rbrace;
+ }
+ };
+
+ pub const SwitchCase = struct {
+ base: Node,
+ items: ItemList,
+ payload: ?&Node,
+ expr: &Node,
+
+ pub const ItemList = SegmentedList(&Node, 1);
+
+ pub fn iterate(self: &SwitchCase, index: usize) ?&Node {
+ var i = index;
+
+ if (i < self.items.len) return *self.items.at(i);
+ i -= self.items.len;
+
+ if (self.payload) |payload| {
+ if (i < 1) return payload;
+ i -= 1;
+ }
+
+ if (i < 1) return self.expr;
i -= 1;
+
+ return null;
}
- return null;
- }
+ pub fn firstToken(self: &SwitchCase) TokenIndex {
+ return (*self.items.at(0)).firstToken();
+ }
- pub fn firstToken(self: &NodeFnProto) Token {
- if (self.visib_token) |visib_token| return visib_token;
- if (self.extern_token) |extern_token| return extern_token;
- assert(self.lib_name == null);
- if (self.inline_token) |inline_token| return inline_token;
- if (self.cc_token) |cc_token| return cc_token;
- return self.fn_token;
- }
+ pub fn lastToken(self: &SwitchCase) TokenIndex {
+ return self.expr.lastToken();
+ }
+ };
+
+ pub const SwitchElse = struct {
+ base: Node,
+ token: TokenIndex,
- pub fn lastToken(self: &NodeFnProto) Token {
- if (self.body_node) |body_node| return body_node.lastToken();
- switch (self.return_type) {
- // TODO allow this and next prong to share bodies since the types are the same
- ReturnType.Explicit => |node| return node.lastToken(),
- ReturnType.InferErrorSet => |node| return node.lastToken(),
- ReturnType.Infer => |token| return token,
+ pub fn iterate(self: &SwitchElse, index: usize) ?&Node {
+ return null;
}
- }
-};
-pub const NodeParamDecl = struct {
- base: Node,
- comptime_token: ?Token,
- noalias_token: ?Token,
- name_token: ?Token,
- type_node: &Node,
- var_args_token: ?Token,
+ pub fn firstToken(self: &SwitchElse) TokenIndex {
+ return self.token;
+ }
- pub fn iterate(self: &NodeParamDecl, index: usize) ?&Node {
- var i = index;
+ pub fn lastToken(self: &SwitchElse) TokenIndex {
+ return self.token;
+ }
+ };
- if (i < 1) return self.type_node;
- i -= 1;
+ pub const While = struct {
+ base: Node,
+ label: ?TokenIndex,
+ inline_token: ?TokenIndex,
+ while_token: TokenIndex,
+ condition: &Node,
+ payload: ?&Node,
+ continue_expr: ?&Node,
+ body: &Node,
+ @"else": ?&Else,
+
+ pub fn iterate(self: &While, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.condition;
+ i -= 1;
- return null;
- }
+ if (self.payload) |payload| {
+ if (i < 1) return payload;
+ i -= 1;
+ }
- pub fn firstToken(self: &NodeParamDecl) Token {
- if (self.comptime_token) |comptime_token| return comptime_token;
- if (self.noalias_token) |noalias_token| return noalias_token;
- if (self.name_token) |name_token| return name_token;
- return self.type_node.firstToken();
- }
+ if (self.continue_expr) |continue_expr| {
+ if (i < 1) return continue_expr;
+ i -= 1;
+ }
- pub fn lastToken(self: &NodeParamDecl) Token {
- if (self.var_args_token) |var_args_token| return var_args_token;
- return self.type_node.lastToken();
- }
-};
+ if (i < 1) return self.body;
+ i -= 1;
-pub const NodeBlock = struct {
- base: Node,
- begin_token: Token,
- end_token: Token,
- statements: ArrayList(&Node),
+ if (self.@"else") |@"else"| {
+ if (i < 1) return &@"else".base;
+ i -= 1;
+ }
- pub fn iterate(self: &NodeBlock, index: usize) ?&Node {
- var i = index;
+ return null;
+ }
- if (i < self.statements.len) return self.statements.items[i];
- i -= self.statements.len;
+ pub fn firstToken(self: &While) TokenIndex {
+ if (self.label) |label| {
+ return label;
+ }
- return null;
- }
+ if (self.inline_token) |inline_token| {
+ return inline_token;
+ }
- pub fn firstToken(self: &NodeBlock) Token {
- return self.begin_token;
- }
+ return self.while_token;
+ }
- pub fn lastToken(self: &NodeBlock) Token {
- return self.end_token;
- }
-};
+ pub fn lastToken(self: &While) TokenIndex {
+ if (self.@"else") |@"else"| {
+ return @"else".body.lastToken();
+ }
-pub const NodeInfixOp = struct {
- base: Node,
- op_token: Token,
- lhs: &Node,
- op: InfixOp,
- rhs: &Node,
-
- const InfixOp = enum {
- Add,
- AddWrap,
- ArrayCat,
- ArrayMult,
- Assign,
- AssignBitAnd,
- AssignBitOr,
- AssignBitShiftLeft,
- AssignBitShiftRight,
- AssignBitXor,
- AssignDiv,
- AssignMinus,
- AssignMinusWrap,
- AssignMod,
- AssignPlus,
- AssignPlusWrap,
- AssignTimes,
- AssignTimesWarp,
- BangEqual,
- BitAnd,
- BitOr,
- BitShiftLeft,
- BitShiftRight,
- BitXor,
- BoolAnd,
- BoolOr,
- Div,
- EqualEqual,
- ErrorUnion,
- GreaterOrEqual,
- GreaterThan,
- LessOrEqual,
- LessThan,
- MergeErrorSets,
- Mod,
- Mult,
- MultWrap,
- Period,
- Sub,
- SubWrap,
- UnwrapMaybe,
- };
-
- pub fn iterate(self: &NodeInfixOp, index: usize) ?&Node {
- var i = index;
-
- if (i < 1) return self.lhs;
- i -= 1;
-
- switch (self.op) {
- InfixOp.Add,
- InfixOp.AddWrap,
- InfixOp.ArrayCat,
- InfixOp.ArrayMult,
- InfixOp.Assign,
- InfixOp.AssignBitAnd,
- InfixOp.AssignBitOr,
- InfixOp.AssignBitShiftLeft,
- InfixOp.AssignBitShiftRight,
- InfixOp.AssignBitXor,
- InfixOp.AssignDiv,
- InfixOp.AssignMinus,
- InfixOp.AssignMinusWrap,
- InfixOp.AssignMod,
- InfixOp.AssignPlus,
- InfixOp.AssignPlusWrap,
- InfixOp.AssignTimes,
- InfixOp.AssignTimesWarp,
- InfixOp.BangEqual,
- InfixOp.BitAnd,
- InfixOp.BitOr,
- InfixOp.BitShiftLeft,
- InfixOp.BitShiftRight,
- InfixOp.BitXor,
- InfixOp.BoolAnd,
- InfixOp.BoolOr,
- InfixOp.Div,
- InfixOp.EqualEqual,
- InfixOp.ErrorUnion,
- InfixOp.GreaterOrEqual,
- InfixOp.GreaterThan,
- InfixOp.LessOrEqual,
- InfixOp.LessThan,
- InfixOp.MergeErrorSets,
- InfixOp.Mod,
- InfixOp.Mult,
- InfixOp.MultWrap,
- InfixOp.Period,
- InfixOp.Sub,
- InfixOp.SubWrap,
- InfixOp.UnwrapMaybe => {},
- }
-
- if (i < 1) return self.rhs;
- i -= 1;
+ return self.body.lastToken();
+ }
+ };
- return null;
- }
+ pub const For = struct {
+ base: Node,
+ label: ?TokenIndex,
+ inline_token: ?TokenIndex,
+ for_token: TokenIndex,
+ array_expr: &Node,
+ payload: ?&Node,
+ body: &Node,
+ @"else": ?&Else,
- pub fn firstToken(self: &NodeInfixOp) Token {
- return self.lhs.firstToken();
- }
+ pub fn iterate(self: &For, index: usize) ?&Node {
+ var i = index;
- pub fn lastToken(self: &NodeInfixOp) Token {
- return self.rhs.lastToken();
- }
-};
+ if (i < 1) return self.array_expr;
+ i -= 1;
+
+ if (self.payload) |payload| {
+ if (i < 1) return payload;
+ i -= 1;
+ }
+
+ if (i < 1) return self.body;
+ i -= 1;
+
+ if (self.@"else") |@"else"| {
+ if (i < 1) return &@"else".base;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &For) TokenIndex {
+ if (self.label) |label| {
+ return label;
+ }
+
+ if (self.inline_token) |inline_token| {
+ return inline_token;
+ }
+
+ return self.for_token;
+ }
+
+ pub fn lastToken(self: &For) TokenIndex {
+ if (self.@"else") |@"else"| {
+ return @"else".body.lastToken();
+ }
+
+ return self.body.lastToken();
+ }
+ };
+
+ pub const If = struct {
+ base: Node,
+ if_token: TokenIndex,
+ condition: &Node,
+ payload: ?&Node,
+ body: &Node,
+ @"else": ?&Else,
+
+ pub fn iterate(self: &If, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.condition;
+ i -= 1;
+
+ if (self.payload) |payload| {
+ if (i < 1) return payload;
+ i -= 1;
+ }
+
+ if (i < 1) return self.body;
+ i -= 1;
+
+ if (self.@"else") |@"else"| {
+ if (i < 1) return &@"else".base;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &If) TokenIndex {
+ return self.if_token;
+ }
+
+ pub fn lastToken(self: &If) TokenIndex {
+ if (self.@"else") |@"else"| {
+ return @"else".body.lastToken();
+ }
+
+ return self.body.lastToken();
+ }
+ };
+
+ pub const InfixOp = struct {
+ base: Node,
+ op_token: TokenIndex,
+ lhs: &Node,
+ op: Op,
+ rhs: &Node,
+
+ pub const Op = union(enum) {
+ Add,
+ AddWrap,
+ ArrayCat,
+ ArrayMult,
+ Assign,
+ AssignBitAnd,
+ AssignBitOr,
+ AssignBitShiftLeft,
+ AssignBitShiftRight,
+ AssignBitXor,
+ AssignDiv,
+ AssignMinus,
+ AssignMinusWrap,
+ AssignMod,
+ AssignPlus,
+ AssignPlusWrap,
+ AssignTimes,
+ AssignTimesWarp,
+ BangEqual,
+ BitAnd,
+ BitOr,
+ BitShiftLeft,
+ BitShiftRight,
+ BitXor,
+ BoolAnd,
+ BoolOr,
+ Catch: ?&Node,
+ Div,
+ EqualEqual,
+ ErrorUnion,
+ GreaterOrEqual,
+ GreaterThan,
+ LessOrEqual,
+ LessThan,
+ MergeErrorSets,
+ Mod,
+ Mult,
+ MultWrap,
+ Period,
+ Range,
+ Sub,
+ SubWrap,
+ UnwrapMaybe,
+ };
+
+ pub fn iterate(self: &InfixOp, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.lhs;
+ i -= 1;
+
+ switch (self.op) {
+ Op.Catch => |maybe_payload| {
+ if (maybe_payload) |payload| {
+ if (i < 1) return payload;
+ i -= 1;
+ }
+ },
+
+ Op.Add,
+ Op.AddWrap,
+ Op.ArrayCat,
+ Op.ArrayMult,
+ Op.Assign,
+ Op.AssignBitAnd,
+ Op.AssignBitOr,
+ Op.AssignBitShiftLeft,
+ Op.AssignBitShiftRight,
+ Op.AssignBitXor,
+ Op.AssignDiv,
+ Op.AssignMinus,
+ Op.AssignMinusWrap,
+ Op.AssignMod,
+ Op.AssignPlus,
+ Op.AssignPlusWrap,
+ Op.AssignTimes,
+ Op.AssignTimesWarp,
+ Op.BangEqual,
+ Op.BitAnd,
+ Op.BitOr,
+ Op.BitShiftLeft,
+ Op.BitShiftRight,
+ Op.BitXor,
+ Op.BoolAnd,
+ Op.BoolOr,
+ Op.Div,
+ Op.EqualEqual,
+ Op.ErrorUnion,
+ Op.GreaterOrEqual,
+ Op.GreaterThan,
+ Op.LessOrEqual,
+ Op.LessThan,
+ Op.MergeErrorSets,
+ Op.Mod,
+ Op.Mult,
+ Op.MultWrap,
+ Op.Period,
+ Op.Range,
+ Op.Sub,
+ Op.SubWrap,
+ Op.UnwrapMaybe => {},
+ }
+
+ if (i < 1) return self.rhs;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &InfixOp) TokenIndex {
+ return self.lhs.firstToken();
+ }
+
+ pub fn lastToken(self: &InfixOp) TokenIndex {
+ return self.rhs.lastToken();
+ }
+ };
+
+ pub const PrefixOp = struct {
+ base: Node,
+ op_token: TokenIndex,
+ op: Op,
+ rhs: &Node,
+
+ const Op = union(enum) {
+ AddrOf: AddrOfInfo,
+ ArrayType: &Node,
+ Await,
+ BitNot,
+ BoolNot,
+ Cancel,
+ Deref,
+ MaybeType,
+ Negation,
+ NegationWrap,
+ Resume,
+ SliceType: AddrOfInfo,
+ Try,
+ UnwrapMaybe,
+ };
+
+ const AddrOfInfo = struct {
+ align_expr: ?&Node,
+ bit_offset_start_token: ?TokenIndex,
+ bit_offset_end_token: ?TokenIndex,
+ const_token: ?TokenIndex,
+ volatile_token: ?TokenIndex,
+ };
+
+ pub fn iterate(self: &PrefixOp, index: usize) ?&Node {
+ var i = index;
+
+ switch (self.op) {
+ Op.SliceType => |addr_of_info| {
+ if (addr_of_info.align_expr) |align_expr| {
+ if (i < 1) return align_expr;
+ i -= 1;
+ }
+ },
+ Op.AddrOf => |addr_of_info| {
+ if (addr_of_info.align_expr) |align_expr| {
+ if (i < 1) return align_expr;
+ i -= 1;
+ }
+ },
+ Op.ArrayType => |size_expr| {
+ if (i < 1) return size_expr;
+ i -= 1;
+ },
+ Op.Await,
+ Op.BitNot,
+ Op.BoolNot,
+ Op.Cancel,
+ Op.Deref,
+ Op.MaybeType,
+ Op.Negation,
+ Op.NegationWrap,
+ Op.Try,
+ Op.Resume,
+ Op.UnwrapMaybe => {},
+ }
+
+ if (i < 1) return self.rhs;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &PrefixOp) TokenIndex {
+ return self.op_token;
+ }
+
+ pub fn lastToken(self: &PrefixOp) TokenIndex {
+ return self.rhs.lastToken();
+ }
+ };
+
+ pub const FieldInitializer = struct {
+ base: Node,
+ period_token: TokenIndex,
+ name_token: TokenIndex,
+ expr: &Node,
+
+ pub fn iterate(self: &FieldInitializer, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.expr;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &FieldInitializer) TokenIndex {
+ return self.period_token;
+ }
+
+ pub fn lastToken(self: &FieldInitializer) TokenIndex {
+ return self.expr.lastToken();
+ }
+ };
+
+ pub const SuffixOp = struct {
+ base: Node,
+ lhs: &Node,
+ op: Op,
+ rtoken: TokenIndex,
+
+ pub const Op = union(enum) {
+ Call: Call,
+ ArrayAccess: &Node,
+ Slice: Slice,
+ ArrayInitializer: InitList,
+ StructInitializer: InitList,
+
+ pub const InitList = SegmentedList(&Node, 2);
+
+ pub const Call = struct {
+ params: ParamList,
+ async_attr: ?&AsyncAttribute,
+
+ pub const ParamList = SegmentedList(&Node, 2);
+ };
+
+ pub const Slice = struct {
+ start: &Node,
+ end: ?&Node,
+ };
+ };
+
+ pub fn iterate(self: &SuffixOp, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.lhs;
+ i -= 1;
+
+ switch (self.op) {
+ Op.Call => |call_info| {
+ if (i < call_info.params.len) return *call_info.params.at(i);
+ i -= call_info.params.len;
+ },
+ Op.ArrayAccess => |index_expr| {
+ if (i < 1) return index_expr;
+ i -= 1;
+ },
+ Op.Slice => |range| {
+ if (i < 1) return range.start;
+ i -= 1;
+
+ if (range.end) |end| {
+ if (i < 1) return end;
+ i -= 1;
+ }
+ },
+ Op.ArrayInitializer => |exprs| {
+ if (i < exprs.len) return *exprs.at(i);
+ i -= exprs.len;
+ },
+ Op.StructInitializer => |fields| {
+ if (i < fields.len) return *fields.at(i);
+ i -= fields.len;
+ },
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &SuffixOp) TokenIndex {
+ return self.lhs.firstToken();
+ }
+
+ pub fn lastToken(self: &SuffixOp) TokenIndex {
+ return self.rtoken;
+ }
+ };
+
+ pub const GroupedExpression = struct {
+ base: Node,
+ lparen: TokenIndex,
+ expr: &Node,
+ rparen: TokenIndex,
+
+ pub fn iterate(self: &GroupedExpression, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.expr;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &GroupedExpression) TokenIndex {
+ return self.lparen;
+ }
+
+ pub fn lastToken(self: &GroupedExpression) TokenIndex {
+ return self.rparen;
+ }
+ };
+
+ pub const ControlFlowExpression = struct {
+ base: Node,
+ ltoken: TokenIndex,
+ kind: Kind,
+ rhs: ?&Node,
+
+ const Kind = union(enum) {
+ Break: ?&Node,
+ Continue: ?&Node,
+ Return,
+ };
+
+ pub fn iterate(self: &ControlFlowExpression, index: usize) ?&Node {
+ var i = index;
+
+ switch (self.kind) {
+ Kind.Break => |maybe_label| {
+ if (maybe_label) |label| {
+ if (i < 1) return label;
+ i -= 1;
+ }
+ },
+ Kind.Continue => |maybe_label| {
+ if (maybe_label) |label| {
+ if (i < 1) return label;
+ i -= 1;
+ }
+ },
+ Kind.Return => {},
+ }
+
+ if (self.rhs) |rhs| {
+ if (i < 1) return rhs;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &ControlFlowExpression) TokenIndex {
+ return self.ltoken;
+ }
+
+ pub fn lastToken(self: &ControlFlowExpression) TokenIndex {
+ if (self.rhs) |rhs| {
+ return rhs.lastToken();
+ }
+
+ switch (self.kind) {
+ Kind.Break => |maybe_label| {
+ if (maybe_label) |label| {
+ return label.lastToken();
+ }
+ },
+ Kind.Continue => |maybe_label| {
+ if (maybe_label) |label| {
+ return label.lastToken();
+ }
+ },
+ Kind.Return => return self.ltoken,
+ }
+
+ return self.ltoken;
+ }
+ };
+
+ pub const Suspend = struct {
+ base: Node,
+ label: ?TokenIndex,
+ suspend_token: TokenIndex,
+ payload: ?&Node,
+ body: ?&Node,
+
+ pub fn iterate(self: &Suspend, index: usize) ?&Node {
+ var i = index;
+
+ if (self.payload) |payload| {
+ if (i < 1) return payload;
+ i -= 1;
+ }
+
+ if (self.body) |body| {
+ if (i < 1) return body;
+ i -= 1;
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: &Suspend) TokenIndex {
+ if (self.label) |label| return label;
+ return self.suspend_token;
+ }
+
+ pub fn lastToken(self: &Suspend) TokenIndex {
+ if (self.body) |body| {
+ return body.lastToken();
+ }
+
+ if (self.payload) |payload| {
+ return payload.lastToken();
+ }
+
+ return self.suspend_token;
+ }
+ };
+
+ pub const IntegerLiteral = struct {
+ base: Node,
+ token: TokenIndex,
+
+ pub fn iterate(self: &IntegerLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &IntegerLiteral) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &IntegerLiteral) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const FloatLiteral = struct {
+ base: Node,
+ token: TokenIndex,
+
+ pub fn iterate(self: &FloatLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &FloatLiteral) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &FloatLiteral) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const BuiltinCall = struct {
+ base: Node,
+ builtin_token: TokenIndex,
+ params: ParamList,
+ rparen_token: TokenIndex,
+
+ pub const ParamList = SegmentedList(&Node, 2);
+
+ pub fn iterate(self: &BuiltinCall, index: usize) ?&Node {
+ var i = index;
+
+ if (i < self.params.len) return *self.params.at(i);
+ i -= self.params.len;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &BuiltinCall) TokenIndex {
+ return self.builtin_token;
+ }
+
+ pub fn lastToken(self: &BuiltinCall) TokenIndex {
+ return self.rparen_token;
+ }
+ };
+
+ pub const StringLiteral = struct {
+ base: Node,
+ token: TokenIndex,
+
+ pub fn iterate(self: &StringLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &StringLiteral) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &StringLiteral) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const MultilineStringLiteral = struct {
+ base: Node,
+ lines: LineList,
+
+ pub const LineList = SegmentedList(TokenIndex, 4);
+
+ pub fn iterate(self: &MultilineStringLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &MultilineStringLiteral) TokenIndex {
+ return *self.lines.at(0);
+ }
+
+ pub fn lastToken(self: &MultilineStringLiteral) TokenIndex {
+ return *self.lines.at(self.lines.len - 1);
+ }
+ };
+
+ pub const CharLiteral = struct {
+ base: Node,
+ token: TokenIndex,
-pub const NodePrefixOp = struct {
- base: Node,
- op_token: Token,
- op: PrefixOp,
- rhs: &Node,
-
- const PrefixOp = union(enum) {
- AddrOf: AddrOfInfo,
- BitNot,
- BoolNot,
- Deref,
- Negation,
- NegationWrap,
- Return,
- Try,
- UnwrapMaybe,
- };
- const AddrOfInfo = struct {
- align_expr: ?&Node,
- bit_offset_start_token: ?Token,
- bit_offset_end_token: ?Token,
- const_token: ?Token,
- volatile_token: ?Token,
- };
-
- pub fn iterate(self: &NodePrefixOp, index: usize) ?&Node {
- var i = index;
-
- switch (self.op) {
- PrefixOp.AddrOf => |addr_of_info| {
- if (addr_of_info.align_expr) |align_expr| {
- if (i < 1) return align_expr;
+ pub fn iterate(self: &CharLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &CharLiteral) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &CharLiteral) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const BoolLiteral = struct {
+ base: Node,
+ token: TokenIndex,
+
+ pub fn iterate(self: &BoolLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &BoolLiteral) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &BoolLiteral) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const NullLiteral = struct {
+ base: Node,
+ token: TokenIndex,
+
+ pub fn iterate(self: &NullLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &NullLiteral) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &NullLiteral) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const UndefinedLiteral = struct {
+ base: Node,
+ token: TokenIndex,
+
+ pub fn iterate(self: &UndefinedLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &UndefinedLiteral) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &UndefinedLiteral) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const ThisLiteral = struct {
+ base: Node,
+ token: TokenIndex,
+
+ pub fn iterate(self: &ThisLiteral, index: usize) ?&Node {
+ return null;
+ }
+
+ pub fn firstToken(self: &ThisLiteral) TokenIndex {
+ return self.token;
+ }
+
+ pub fn lastToken(self: &ThisLiteral) TokenIndex {
+ return self.token;
+ }
+ };
+
+ pub const AsmOutput = struct {
+ base: Node,
+ symbolic_name: &Node,
+ constraint: &Node,
+ kind: Kind,
+
+ const Kind = union(enum) {
+ Variable: &Identifier,
+ Return: &Node
+ };
+
+ pub fn iterate(self: &AsmOutput, index: usize) ?&Node {
+ var i = index;
+
+ if (i < 1) return self.symbolic_name;
+ i -= 1;
+
+ if (i < 1) return self.constraint;
+ i -= 1;
+
+ switch (self.kind) {
+ Kind.Variable => |variable_name| {
+ if (i < 1) return &variable_name.base;
+ i -= 1;
+ },
+ Kind.Return => |return_type| {
+ if (i < 1) return return_type;
i -= 1;
}
- },
- PrefixOp.BitNot,
- PrefixOp.BoolNot,
- PrefixOp.Deref,
- PrefixOp.Negation,
- PrefixOp.NegationWrap,
- PrefixOp.Return,
- PrefixOp.Try,
- PrefixOp.UnwrapMaybe => {},
+ }
+
+ return null;
}
- if (i < 1) return self.rhs;
- i -= 1;
+ pub fn firstToken(self: &AsmOutput) TokenIndex {
+ return self.symbolic_name.firstToken();
+ }
- return null;
- }
+ pub fn lastToken(self: &AsmOutput) TokenIndex {
+ return switch (self.kind) {
+ Kind.Variable => |variable_name| variable_name.lastToken(),
+ Kind.Return => |return_type| return_type.lastToken(),
+ };
+ }
+ };
- pub fn firstToken(self: &NodePrefixOp) Token {
- return self.op_token;
- }
+ pub const AsmInput = struct {
+ base: Node,
+ symbolic_name: &Node,
+ constraint: &Node,
+ expr: &Node,
- pub fn lastToken(self: &NodePrefixOp) Token {
- return self.rhs.lastToken();
- }
-};
+ pub fn iterate(self: &AsmInput, index: usize) ?&Node {
+ var i = index;
-pub const NodeIntegerLiteral = struct {
- base: Node,
- token: Token,
+ if (i < 1) return self.symbolic_name;
+ i -= 1;
- pub fn iterate(self: &NodeIntegerLiteral, index: usize) ?&Node {
- return null;
- }
+ if (i < 1) return self.constraint;
+ i -= 1;
- pub fn firstToken(self: &NodeIntegerLiteral) Token {
- return self.token;
- }
+ if (i < 1) return self.expr;
+ i -= 1;
- pub fn lastToken(self: &NodeIntegerLiteral) Token {
- return self.token;
- }
-};
+ return null;
+ }
-pub const NodeFloatLiteral = struct {
- base: Node,
- token: Token,
+ pub fn firstToken(self: &AsmInput) TokenIndex {
+ return self.symbolic_name.firstToken();
+ }
- pub fn iterate(self: &NodeFloatLiteral, index: usize) ?&Node {
- return null;
- }
+ pub fn lastToken(self: &AsmInput) TokenIndex {
+ return self.expr.lastToken();
+ }
+ };
- pub fn firstToken(self: &NodeFloatLiteral) Token {
- return self.token;
- }
+ pub const Asm = struct {
+ base: Node,
+ asm_token: TokenIndex,
+ volatile_token: ?TokenIndex,
+ template: &Node,
+ outputs: OutputList,
+ inputs: InputList,
+ clobbers: ClobberList,
+ rparen: TokenIndex,
- pub fn lastToken(self: &NodeFloatLiteral) Token {
- return self.token;
- }
-};
+ const OutputList = SegmentedList(&AsmOutput, 2);
+ const InputList = SegmentedList(&AsmInput, 2);
+ const ClobberList = SegmentedList(&Node, 2);
-pub const NodeBuiltinCall = struct {
- base: Node,
- builtin_token: Token,
- params: ArrayList(&Node),
- rparen_token: Token,
+ pub fn iterate(self: &Asm, index: usize) ?&Node {
+ var i = index;
- pub fn iterate(self: &NodeBuiltinCall, index: usize) ?&Node {
- var i = index;
+ if (i < self.outputs.len) return &(*self.outputs.at(index)).base;
+ i -= self.outputs.len;
- if (i < self.params.len) return self.params.at(i);
- i -= self.params.len;
+ if (i < self.inputs.len) return &(*self.inputs.at(index)).base;
+ i -= self.inputs.len;
- return null;
- }
+ if (i < self.clobbers.len) return *self.clobbers.at(index);
+ i -= self.clobbers.len;
- pub fn firstToken(self: &NodeBuiltinCall) Token {
- return self.builtin_token;
- }
+ return null;
+ }
- pub fn lastToken(self: &NodeBuiltinCall) Token {
- return self.rparen_token;
- }
-};
+ pub fn firstToken(self: &Asm) TokenIndex {
+ return self.asm_token;
+ }
-pub const NodeCall = struct {
- base: Node,
- callee: &Node,
- params: ArrayList(&Node),
- rparen_token: Token,
+ pub fn lastToken(self: &Asm) TokenIndex {
+ return self.rparen;
+ }
+ };
- pub fn iterate(self: &NodeCall, index: usize) ?&Node {
- var i = index;
+ pub const Unreachable = struct {
+ base: Node,
+ token: TokenIndex,
- if (i < 1) return self.callee;
- i -= 1;
+ pub fn iterate(self: &Unreachable, index: usize) ?&Node {
+ return null;
+ }
- if (i < self.params.len) return self.params.at(i);
- i -= self.params.len;
+ pub fn firstToken(self: &Unreachable) TokenIndex {
+ return self.token;
+ }
- return null;
- }
+ pub fn lastToken(self: &Unreachable) TokenIndex {
+ return self.token;
+ }
+ };
- pub fn firstToken(self: &NodeCall) Token {
- return self.callee.firstToken();
- }
+ pub const ErrorType = struct {
+ base: Node,
+ token: TokenIndex,
- pub fn lastToken(self: &NodeCall) Token {
- return self.rparen_token;
- }
-};
+ pub fn iterate(self: &ErrorType, index: usize) ?&Node {
+ return null;
+ }
-pub const NodeStringLiteral = struct {
- base: Node,
- token: Token,
+ pub fn firstToken(self: &ErrorType) TokenIndex {
+ return self.token;
+ }
- pub fn iterate(self: &NodeStringLiteral, index: usize) ?&Node {
- return null;
- }
+ pub fn lastToken(self: &ErrorType) TokenIndex {
+ return self.token;
+ }
+ };
- pub fn firstToken(self: &NodeStringLiteral) Token {
- return self.token;
- }
+ pub const VarType = struct {
+ base: Node,
+ token: TokenIndex,
- pub fn lastToken(self: &NodeStringLiteral) Token {
- return self.token;
- }
-};
+ pub fn iterate(self: &VarType, index: usize) ?&Node {
+ return null;
+ }
-pub const NodeUndefinedLiteral = struct {
- base: Node,
- token: Token,
+ pub fn firstToken(self: &VarType) TokenIndex {
+ return self.token;
+ }
- pub fn iterate(self: &NodeUndefinedLiteral, index: usize) ?&Node {
- return null;
- }
+ pub fn lastToken(self: &VarType) TokenIndex {
+ return self.token;
+ }
+ };
- pub fn firstToken(self: &NodeUndefinedLiteral) Token {
- return self.token;
- }
+ pub const LineComment = struct {
+ base: Node,
+ token: TokenIndex,
- pub fn lastToken(self: &NodeUndefinedLiteral) Token {
- return self.token;
- }
-};
+ pub fn iterate(self: &LineComment, index: usize) ?&Node {
+ return null;
+ }
-pub const NodeLineComment = struct {
- base: Node,
- lines: ArrayList(Token),
+ pub fn firstToken(self: &LineComment) TokenIndex {
+ return self.token;
+ }
- pub fn iterate(self: &NodeLineComment, index: usize) ?&Node {
- return null;
- }
+ pub fn lastToken(self: &LineComment) TokenIndex {
+ return self.token;
+ }
+ };
- pub fn firstToken(self: &NodeLineComment) Token {
- return self.lines.at(0);
- }
+ pub const DocComment = struct {
+ base: Node,
+ lines: LineList,
- pub fn lastToken(self: &NodeLineComment) Token {
- return self.lines.at(self.lines.len - 1);
- }
-};
+ pub const LineList = SegmentedList(TokenIndex, 4);
-pub const NodeTestDecl = struct {
- base: Node,
- test_token: Token,
- name_token: Token,
- body_node: &Node,
+ pub fn iterate(self: &DocComment, index: usize) ?&Node {
+ return null;
+ }
- pub fn iterate(self: &NodeTestDecl, index: usize) ?&Node {
- var i = index;
+ pub fn firstToken(self: &DocComment) TokenIndex {
+ return *self.lines.at(0);
+ }
- if (i < 1) return self.body_node;
- i -= 1;
+ pub fn lastToken(self: &DocComment) TokenIndex {
+ return *self.lines.at(self.lines.len - 1);
+ }
+ };
- return null;
- }
+ pub const TestDecl = struct {
+ base: Node,
+ doc_comments: ?&DocComment,
+ test_token: TokenIndex,
+ name: &Node,
+ body_node: &Node,
- pub fn firstToken(self: &NodeTestDecl) Token {
- return self.test_token;
- }
+ pub fn iterate(self: &TestDecl, index: usize) ?&Node {
+ var i = index;
- pub fn lastToken(self: &NodeTestDecl) Token {
- return self.body_node.lastToken();
- }
+ if (i < 1) return self.body_node;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: &TestDecl) TokenIndex {
+ return self.test_token;
+ }
+
+ pub fn lastToken(self: &TestDecl) TokenIndex {
+ return self.body_node.lastToken();
+ }
+ };
};
diff --git a/std/zig/bench.zig b/std/zig/bench.zig
new file mode 100644
index 0000000000..c3b6b0d3d3
--- /dev/null
+++ b/std/zig/bench.zig
@@ -0,0 +1,38 @@
+const std = @import("std");
+const mem = std.mem;
+const warn = std.debug.warn;
+const Tokenizer = std.zig.Tokenizer;
+const Parser = std.zig.Parser;
+const io = std.io;
+
+const source = @embedFile("../os/index.zig");
+var fixed_buffer_mem: [10 * 1024 * 1024]u8 = undefined;
+
+pub fn main() !void {
+ var i: usize = 0;
+ var timer = try std.os.time.Timer.start();
+ const start = timer.lap();
+ const iterations = 100;
+ var memory_used: usize = 0;
+ while (i < iterations) : (i += 1) {
+ memory_used += testOnce();
+ }
+ const end = timer.read();
+ memory_used /= iterations;
+ const elapsed_s = f64(end - start) / std.os.time.ns_per_s;
+ const bytes_per_sec = f64(source.len * iterations) / elapsed_s;
+ const mb_per_sec = bytes_per_sec / (1024 * 1024);
+
+ var stdout_file = try std.io.getStdOut();
+ const stdout = &std.io.FileOutStream.init(&stdout_file).stream;
+ try stdout.print("{.3} MB/s, {} KB used \n", mb_per_sec, memory_used / 1024);
+}
+
+fn testOnce() usize {
+ var fixed_buf_alloc = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
+ var allocator = &fixed_buf_alloc.allocator;
+ var tokenizer = Tokenizer.init(source);
+ var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
+ _ = parser.parse() catch @panic("parse failure");
+ return fixed_buf_alloc.end_index;
+}
diff --git a/std/zig/index.zig b/std/zig/index.zig
index 32699935d9..4dd68fa8b3 100644
--- a/std/zig/index.zig
+++ b/std/zig/index.zig
@@ -1,11 +1,13 @@
const tokenizer = @import("tokenizer.zig");
pub const Token = tokenizer.Token;
pub const Tokenizer = tokenizer.Tokenizer;
-pub const Parser = @import("parser.zig").Parser;
+pub const parse = @import("parse.zig").parse;
+pub const render = @import("render.zig").render;
pub const ast = @import("ast.zig");
test "std.zig tests" {
- _ = @import("tokenizer.zig");
- _ = @import("parser.zig");
_ = @import("ast.zig");
+ _ = @import("parse.zig");
+ _ = @import("render.zig");
+ _ = @import("tokenizer.zig");
}
diff --git a/std/zig/parse.zig b/std/zig/parse.zig
new file mode 100644
index 0000000000..c96893fd96
--- /dev/null
+++ b/std/zig/parse.zig
@@ -0,0 +1,3503 @@
+const std = @import("../index.zig");
+const assert = std.debug.assert;
+const mem = std.mem;
+const ast = std.zig.ast;
+const Tokenizer = std.zig.Tokenizer;
+const Token = std.zig.Token;
+const TokenIndex = ast.TokenIndex;
+const Error = ast.Error;
+
+/// Returns an AST tree, allocated with the parser's allocator.
+/// Result should be freed with tree.deinit() when there are
+/// no more references to any AST nodes of the tree.
+pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
+ var tree_arena = std.heap.ArenaAllocator.init(allocator);
+ errdefer tree_arena.deinit();
+
+ var stack = std.ArrayList(State).init(allocator);
+ defer stack.deinit();
+
+ const arena = &tree_arena.allocator;
+ const root_node = try createNode(arena, ast.Node.Root,
+ ast.Node.Root {
+ .base = undefined,
+ .decls = ast.Node.Root.DeclList.init(arena),
+ .doc_comments = null,
+ // initialized when we get the eof token
+ .eof_token = undefined,
+ }
+ );
+
+ var tree = ast.Tree {
+ .source = source,
+ .root_node = root_node,
+ .arena_allocator = tree_arena,
+ .tokens = ast.Tree.TokenList.init(arena),
+ .errors = ast.Tree.ErrorList.init(arena),
+ };
+
+ var tokenizer = Tokenizer.init(tree.source);
+ while (true) {
+ const token_ptr = try tree.tokens.addOne();
+ *token_ptr = tokenizer.next();
+ if (token_ptr.id == Token.Id.Eof)
+ break;
+ }
+ var tok_it = tree.tokens.iterator(0);
+
+ try stack.append(State.TopLevel);
+
+ while (true) {
+ // This gives us 1 free push that can't fail
+ const state = stack.pop();
+
+ switch (state) {
+ State.TopLevel => {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try root_node.decls.push(&line_comment.base);
+ }
+
+ const comments = try eatDocComments(arena, &tok_it, &tree);
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_test => {
+ stack.append(State.TopLevel) catch unreachable;
+
+ const block = try arena.construct(ast.Node.Block {
+ .base = ast.Node {
+ .id = ast.Node.Id.Block,
+ },
+ .label = null,
+ .lbrace = undefined,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ });
+ const test_node = try arena.construct(ast.Node.TestDecl {
+ .base = ast.Node {
+ .id = ast.Node.Id.TestDecl,
+ },
+ .doc_comments = comments,
+ .test_token = token_index,
+ .name = undefined,
+ .body_node = &block.base,
+ });
+ try root_node.decls.push(&test_node.base);
+ try stack.append(State { .Block = block });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.LBrace,
+ .ptr = &block.rbrace,
+ }
+ });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = &test_node.name } });
+ continue;
+ },
+ Token.Id.Eof => {
+ root_node.eof_token = token_index;
+ root_node.doc_comments = comments;
+ return tree;
+ },
+ Token.Id.Keyword_pub => {
+ stack.append(State.TopLevel) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &root_node.decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ },
+ Token.Id.Keyword_comptime => {
+ const block = try createNode(arena, ast.Node.Block,
+ ast.Node.Block {
+ .base = undefined,
+ .label = null,
+ .lbrace = undefined,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ }
+ );
+ const node = try arena.construct(ast.Node.Comptime {
+ .base = ast.Node {
+ .id = ast.Node.Id.Comptime,
+ },
+ .comptime_token = token_index,
+ .expr = &block.base,
+ .doc_comments = comments,
+ });
+ try root_node.decls.push(&node.base);
+
+ stack.append(State.TopLevel) catch unreachable;
+ try stack.append(State { .Block = block });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.LBrace,
+ .ptr = &block.rbrace,
+ }
+ });
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State.TopLevel) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &root_node.decls,
+ .visib_token = null,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ },
+ }
+ },
+ State.TopLevelExtern => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_export, Token.Id.Keyword_inline => {
+ stack.append(State {
+ .TopLevelDecl = TopLevelDeclCtx {
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = AnnotatedToken {
+ .index = token_index,
+ .ptr = token_ptr,
+ },
+ .lib_name = null,
+ .comments = ctx.comments,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_extern => {
+ stack.append(State {
+ .TopLevelLibname = TopLevelDeclCtx {
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = AnnotatedToken {
+ .index = token_index,
+ .ptr = token_ptr,
+ },
+ .lib_name = null,
+ .comments = ctx.comments,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State { .TopLevelDecl = ctx }) catch unreachable;
+ continue;
+ }
+ }
+ },
+ State.TopLevelLibname => |ctx| {
+ const lib_name = blk: {
+ const lib_name_token = nextToken(&tok_it, &tree);
+ const lib_name_token_index = lib_name_token.index;
+ const lib_name_token_ptr = lib_name_token.ptr;
+ break :blk (try parseStringLiteral(arena, &tok_it, lib_name_token_ptr, lib_name_token_index, &tree)) ?? {
+ putBackToken(&tok_it, &tree);
+ break :blk null;
+ };
+ };
+
+ stack.append(State {
+ .TopLevelDecl = TopLevelDeclCtx {
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = ctx.extern_export_inline_token,
+ .lib_name = lib_name,
+ .comments = ctx.comments,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ State.TopLevelDecl => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_use => {
+ if (ctx.extern_export_inline_token) |annotated_token| {
+ *(try tree.errors.addOne()) = Error {
+ .InvalidToken = Error.InvalidToken { .token = annotated_token.index },
+ };
+ return tree;
+ }
+
+ const node = try arena.construct(ast.Node.Use {
+ .base = ast.Node {.id = ast.Node.Id.Use },
+ .visib_token = ctx.visib_token,
+ .expr = undefined,
+ .semicolon_token = undefined,
+ .doc_comments = ctx.comments,
+ });
+ try ctx.decls.push(&node.base);
+
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Semicolon,
+ .ptr = &node.semicolon_token,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ continue;
+ },
+ Token.Id.Keyword_var, Token.Id.Keyword_const => {
+ if (ctx.extern_export_inline_token) |annotated_token| {
+ if (annotated_token.ptr.id == Token.Id.Keyword_inline) {
+ *(try tree.errors.addOne()) = Error {
+ .InvalidToken = Error.InvalidToken { .token = annotated_token.index },
+ };
+ return tree;
+ }
+ }
+
+ try stack.append(State {
+ .VarDecl = VarDeclCtx {
+ .comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .lib_name = ctx.lib_name,
+ .comptime_token = null,
+ .extern_export_token = if (ctx.extern_export_inline_token) |at| at.index else null,
+ .mut_token = token_index,
+ .list = ctx.decls
+ }
+ });
+ continue;
+ },
+ Token.Id.Keyword_fn, Token.Id.Keyword_nakedcc,
+ Token.Id.Keyword_stdcallcc, Token.Id.Keyword_async => {
+ const fn_proto = try arena.construct(ast.Node.FnProto {
+ .base = ast.Node {
+ .id = ast.Node.Id.FnProto,
+ },
+ .doc_comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .name_token = null,
+ .fn_token = undefined,
+ .params = ast.Node.FnProto.ParamList.init(arena),
+ .return_type = undefined,
+ .var_args_token = null,
+ .extern_export_inline_token = if (ctx.extern_export_inline_token) |at| at.index else null,
+ .cc_token = null,
+ .async_attr = null,
+ .body_node = null,
+ .lib_name = ctx.lib_name,
+ .align_expr = null,
+ });
+ try ctx.decls.push(&fn_proto.base);
+ stack.append(State { .FnDef = fn_proto }) catch unreachable;
+ try stack.append(State { .FnProto = fn_proto });
+
+ switch (token_ptr.id) {
+ Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
+ fn_proto.cc_token = token_index;
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token,
+ }
+ });
+ continue;
+ },
+ Token.Id.Keyword_async => {
+ const async_node = try createNode(arena, ast.Node.AsyncAttribute,
+ ast.Node.AsyncAttribute {
+ .base = undefined,
+ .async_token = token_index,
+ .allocator_type = null,
+ .rangle_bracket = null,
+ }
+ );
+ fn_proto.async_attr = async_node;
+
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token,
+ }
+ });
+ try stack.append(State { .AsyncAllocator = async_node });
+ continue;
+ },
+ Token.Id.Keyword_fn => {
+ fn_proto.fn_token = token_index;
+ continue;
+ },
+ else => unreachable,
+ }
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedVarDeclOrFn = Error.ExpectedVarDeclOrFn { .token = token_index },
+ };
+ return tree;
+ },
+ }
+ },
+ State.TopLevelExternOrField => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |identifier| {
+ std.debug.assert(ctx.container_decl.kind == ast.Node.ContainerDecl.Kind.Struct);
+ const node = try arena.construct(ast.Node.StructField {
+ .base = ast.Node {
+ .id = ast.Node.Id.StructField,
+ },
+ .doc_comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .name_token = identifier,
+ .type_expr = undefined,
+ });
+ const node_ptr = try ctx.container_decl.fields_and_decls.addOne();
+ *node_ptr = &node.base;
+
+ stack.append(State { .FieldListCommaOrEnd = ctx.container_decl }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.type_expr } });
+ try stack.append(State { .ExpectToken = Token.Id.Colon });
+ continue;
+ }
+
+ stack.append(State{ .ContainerDecl = ctx.container_decl }) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &ctx.container_decl.fields_and_decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = ctx.comments,
+ }
+ });
+ continue;
+ },
+
+ State.FieldInitValue => |ctx| {
+ const eq_tok = nextToken(&tok_it, &tree);
+ const eq_tok_index = eq_tok.index;
+ const eq_tok_ptr = eq_tok.ptr;
+ if (eq_tok_ptr.id != Token.Id.Equal) {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ stack.append(State { .Expression = ctx }) catch unreachable;
+ continue;
+ },
+
+ State.ContainerKind => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ const node = try createToCtxNode(arena, ctx.opt_ctx, ast.Node.ContainerDecl,
+ ast.Node.ContainerDecl {
+ .base = undefined,
+ .ltoken = ctx.ltoken,
+ .layout = ctx.layout,
+ .kind = switch (token_ptr.id) {
+ Token.Id.Keyword_struct => ast.Node.ContainerDecl.Kind.Struct,
+ Token.Id.Keyword_union => ast.Node.ContainerDecl.Kind.Union,
+ Token.Id.Keyword_enum => ast.Node.ContainerDecl.Kind.Enum,
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedAggregateKw = Error.ExpectedAggregateKw { .token = token_index },
+ };
+ return tree;
+ },
+ },
+ .init_arg_expr = ast.Node.ContainerDecl.InitArg.None,
+ .fields_and_decls = ast.Node.ContainerDecl.DeclList.init(arena),
+ .rbrace_token = undefined,
+ }
+ );
+
+ stack.append(State { .ContainerDecl = node }) catch unreachable;
+ try stack.append(State { .ExpectToken = Token.Id.LBrace });
+ try stack.append(State { .ContainerInitArgStart = node });
+ continue;
+ },
+
+ State.ContainerInitArgStart => |container_decl| {
+ if (eatToken(&tok_it, &tree, Token.Id.LParen) == null) {
+ continue;
+ }
+
+ stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
+ try stack.append(State { .ContainerInitArg = container_decl });
+ continue;
+ },
+
+ State.ContainerInitArg => |container_decl| {
+ const init_arg_token = nextToken(&tok_it, &tree);
+ const init_arg_token_index = init_arg_token.index;
+ const init_arg_token_ptr = init_arg_token.ptr;
+ switch (init_arg_token_ptr.id) {
+ Token.Id.Keyword_enum => {
+ container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg {.Enum = null};
+ const lparen_tok = nextToken(&tok_it, &tree);
+ const lparen_tok_index = lparen_tok.index;
+ const lparen_tok_ptr = lparen_tok.ptr;
+ if (lparen_tok_ptr.id == Token.Id.LParen) {
+ try stack.append(State { .ExpectToken = Token.Id.RParen } );
+ try stack.append(State { .Expression = OptionalCtx {
+ .RequiredNull = &container_decl.init_arg_expr.Enum,
+ } });
+ } else {
+ putBackToken(&tok_it, &tree);
+ }
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg { .Type = undefined };
+ stack.append(State { .Expression = OptionalCtx { .Required = &container_decl.init_arg_expr.Type } }) catch unreachable;
+ },
+ }
+ continue;
+ },
+
+ State.ContainerDecl => |container_decl| {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try container_decl.fields_and_decls.push(&line_comment.base);
+ }
+
+ const comments = try eatDocComments(arena, &tok_it, &tree);
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Identifier => {
+ switch (container_decl.kind) {
+ ast.Node.ContainerDecl.Kind.Struct => {
+ const node = try arena.construct(ast.Node.StructField {
+ .base = ast.Node {
+ .id = ast.Node.Id.StructField,
+ },
+ .doc_comments = comments,
+ .visib_token = null,
+ .name_token = token_index,
+ .type_expr = undefined,
+ });
+ const node_ptr = try container_decl.fields_and_decls.addOne();
+ *node_ptr = &node.base;
+
+ try stack.append(State { .FieldListCommaOrEnd = container_decl });
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.type_expr } });
+ try stack.append(State { .ExpectToken = Token.Id.Colon });
+ continue;
+ },
+ ast.Node.ContainerDecl.Kind.Union => {
+ const node = try arena.construct(ast.Node.UnionTag {
+ .base = ast.Node {.id = ast.Node.Id.UnionTag },
+ .name_token = token_index,
+ .type_expr = null,
+ .value_expr = null,
+ .doc_comments = comments,
+ });
+ try container_decl.fields_and_decls.push(&node.base);
+
+ stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
+ try stack.append(State { .FieldInitValue = OptionalCtx { .RequiredNull = &node.value_expr } });
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &node.type_expr } });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ continue;
+ },
+ ast.Node.ContainerDecl.Kind.Enum => {
+ const node = try arena.construct(ast.Node.EnumTag {
+ .base = ast.Node { .id = ast.Node.Id.EnumTag },
+ .name_token = token_index,
+ .value = null,
+ .doc_comments = comments,
+ });
+ try container_decl.fields_and_decls.push(&node.base);
+
+ stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &node.value } });
+ try stack.append(State { .IfToken = Token.Id.Equal });
+ continue;
+ },
+ }
+ },
+ Token.Id.Keyword_pub => {
+ switch (container_decl.kind) {
+ ast.Node.ContainerDecl.Kind.Struct => {
+ try stack.append(State {
+ .TopLevelExternOrField = TopLevelExternOrFieldCtx {
+ .visib_token = token_index,
+ .container_decl = container_decl,
+ .comments = comments,
+ }
+ });
+ continue;
+ },
+ else => {
+ stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ }
+ }
+ },
+ Token.Id.Keyword_export => {
+ stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ },
+ Token.Id.RBrace => {
+ if (comments != null) {
+ *(try tree.errors.addOne()) = Error {
+ .UnattachedDocComment = Error.UnattachedDocComment { .token = token_index },
+ };
+ return tree;
+ }
+ container_decl.rbrace_token = token_index;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = null,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ }
+ }
+ },
+
+
+ State.VarDecl => |ctx| {
+ const var_decl = try arena.construct(ast.Node.VarDecl {
+ .base = ast.Node {
+ .id = ast.Node.Id.VarDecl,
+ },
+ .doc_comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .mut_token = ctx.mut_token,
+ .comptime_token = ctx.comptime_token,
+ .extern_export_token = ctx.extern_export_token,
+ .type_node = null,
+ .align_node = null,
+ .init_node = null,
+ .lib_name = ctx.lib_name,
+ // initialized later
+ .name_token = undefined,
+ .eq_token = undefined,
+ .semicolon_token = undefined,
+ });
+ try ctx.list.push(&var_decl.base);
+
+ try stack.append(State { .VarDeclAlign = var_decl });
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &var_decl.type_node} });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Identifier,
+ .ptr = &var_decl.name_token,
+ }
+ });
+ continue;
+ },
+ State.VarDeclAlign => |var_decl| {
+ try stack.append(State { .VarDeclEq = var_decl });
+
+ const next_token = nextToken(&tok_it, &tree);
+ const next_token_index = next_token.index;
+ const next_token_ptr = next_token.ptr;
+ if (next_token_ptr.id == Token.Id.Keyword_align) {
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &var_decl.align_node} });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ State.VarDeclEq => |var_decl| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Equal => {
+ var_decl.eq_token = token_index;
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Semicolon,
+ .ptr = &var_decl.semicolon_token,
+ },
+ }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &var_decl.init_node } });
+ continue;
+ },
+ Token.Id.Semicolon => {
+ var_decl.semicolon_token = token_index;
+ continue;
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedEqOrSemi = Error.ExpectedEqOrSemi { .token = token_index },
+ };
+ return tree;
+ }
+ }
+ },
+
+
+ State.FnDef => |fn_proto| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch(token_ptr.id) {
+ Token.Id.LBrace => {
+ const block = try arena.construct(ast.Node.Block {
+ .base = ast.Node { .id = ast.Node.Id.Block },
+ .label = null,
+ .lbrace = token_index,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ });
+ fn_proto.body_node = &block.base;
+ stack.append(State { .Block = block }) catch unreachable;
+ continue;
+ },
+ Token.Id.Semicolon => continue,
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedSemiOrLBrace = Error.ExpectedSemiOrLBrace { .token = token_index },
+ };
+ return tree;
+ },
+ }
+ },
+ State.FnProto => |fn_proto| {
+ stack.append(State { .FnProtoAlign = fn_proto }) catch unreachable;
+ try stack.append(State { .ParamDecl = fn_proto });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+
+ if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |name_token| {
+ fn_proto.name_token = name_token;
+ }
+ continue;
+ },
+ State.FnProtoAlign => |fn_proto| {
+ stack.append(State { .FnProtoReturnType = fn_proto }) catch unreachable;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_align)) |align_token| {
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &fn_proto.align_expr } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ }
+ continue;
+ },
+ State.FnProtoReturnType => |fn_proto| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Bang => {
+ fn_proto.return_type = ast.Node.FnProto.ReturnType { .InferErrorSet = undefined };
+ stack.append(State {
+ .TypeExprBegin = OptionalCtx { .Required = &fn_proto.return_type.InferErrorSet },
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ // TODO: this is a special case. Remove this when #760 is fixed
+ if (token_ptr.id == Token.Id.Keyword_error) {
+ if ((??tok_it.peek()).id == Token.Id.LBrace) {
+ const error_type_node = try arena.construct(ast.Node.ErrorType {
+ .base = ast.Node { .id = ast.Node.Id.ErrorType },
+ .token = token_index,
+ });
+ fn_proto.return_type = ast.Node.FnProto.ReturnType {
+ .Explicit = &error_type_node.base,
+ };
+ continue;
+ }
+ }
+
+ putBackToken(&tok_it, &tree);
+ fn_proto.return_type = ast.Node.FnProto.ReturnType { .Explicit = undefined };
+ stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &fn_proto.return_type.Explicit }, }) catch unreachable;
+ continue;
+ },
+ }
+ },
+
+
+ State.ParamDecl => |fn_proto| {
+ if (eatToken(&tok_it, &tree, Token.Id.RParen)) |_| {
+ continue;
+ }
+ const param_decl = try arena.construct(ast.Node.ParamDecl {
+ .base = ast.Node {.id = ast.Node.Id.ParamDecl },
+ .comptime_token = null,
+ .noalias_token = null,
+ .name_token = null,
+ .type_node = undefined,
+ .var_args_token = null,
+ });
+ try fn_proto.params.push(&param_decl.base);
+
+ stack.append(State {
+ .ParamDeclEnd = ParamDeclEndCtx {
+ .param_decl = param_decl,
+ .fn_proto = fn_proto,
+ }
+ }) catch unreachable;
+ try stack.append(State { .ParamDeclName = param_decl });
+ try stack.append(State { .ParamDeclAliasOrComptime = param_decl });
+ continue;
+ },
+ State.ParamDeclAliasOrComptime => |param_decl| {
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_comptime)) |comptime_token| {
+ param_decl.comptime_token = comptime_token;
+ } else if (eatToken(&tok_it, &tree, Token.Id.Keyword_noalias)) |noalias_token| {
+ param_decl.noalias_token = noalias_token;
+ }
+ continue;
+ },
+ State.ParamDeclName => |param_decl| {
+ // TODO: Here, we eat two tokens in one state. This means that we can't have
+ // comments between these two tokens.
+ if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |ident_token| {
+ if (eatToken(&tok_it, &tree, Token.Id.Colon)) |_| {
+ param_decl.name_token = ident_token;
+ } else {
+ putBackToken(&tok_it, &tree);
+ }
+ }
+ continue;
+ },
+ State.ParamDeclEnd => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Ellipsis3)) |ellipsis3| {
+ ctx.param_decl.var_args_token = ellipsis3;
+ stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
+ continue;
+ }
+
+ try stack.append(State { .ParamDeclComma = ctx.fn_proto });
+ try stack.append(State {
+ .TypeExprBegin = OptionalCtx { .Required = &ctx.param_decl.type_node }
+ });
+ continue;
+ },
+ State.ParamDeclComma => |fn_proto| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RParen)) {
+ ExpectCommaOrEndResult.end_token => |t| {
+ if (t == null) {
+ stack.append(State { .ParamDecl = fn_proto }) catch unreachable;
+ }
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+
+ State.MaybeLabeledExpression => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Colon)) |_| {
+ stack.append(State {
+ .LabeledExpression = LabelCtx {
+ .label = ctx.label,
+ .opt_ctx = ctx.opt_ctx,
+ }
+ }) catch unreachable;
+ continue;
+ }
+
+ _ = try createToCtxLiteral(arena, ctx.opt_ctx, ast.Node.Identifier, ctx.label);
+ continue;
+ },
+ State.LabeledExpression => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.LBrace => {
+ const block = try createToCtxNode(arena, ctx.opt_ctx, ast.Node.Block,
+ ast.Node.Block {
+ .base = undefined,
+ .label = ctx.label,
+ .lbrace = token_index,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ }
+ );
+ stack.append(State { .Block = block }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_while => {
+ stack.append(State {
+ .While = LoopCtx {
+ .label = ctx.label,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_for => {
+ stack.append(State {
+ .For = LoopCtx {
+ .label = ctx.label,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_suspend => {
+ const node = try arena.construct(ast.Node.Suspend {
+ .base = ast.Node {
+ .id = ast.Node.Id.Suspend,
+ },
+ .label = ctx.label,
+ .suspend_token = token_index,
+ .payload = null,
+ .body = null,
+ });
+ ctx.opt_ctx.store(&node.base);
+ stack.append(State { .SuspendBody = node }) catch unreachable;
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ continue;
+ },
+ Token.Id.Keyword_inline => {
+ stack.append(State {
+ .Inline = InlineCtx {
+ .label = ctx.label,
+ .inline_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ if (ctx.opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedLabelable = Error.ExpectedLabelable { .token = token_index },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ }
+ },
+ State.Inline => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_while => {
+ stack.append(State {
+ .While = LoopCtx {
+ .inline_token = ctx.inline_token,
+ .label = ctx.label,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_for => {
+ stack.append(State {
+ .For = LoopCtx {
+ .inline_token = ctx.inline_token,
+ .label = ctx.label,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ if (ctx.opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedInlinable = Error.ExpectedInlinable { .token = token_index },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ }
+ },
+ State.While => |ctx| {
+ const node = try createToCtxNode(arena, ctx.opt_ctx, ast.Node.While,
+ ast.Node.While {
+ .base = undefined,
+ .label = ctx.label,
+ .inline_token = ctx.inline_token,
+ .while_token = ctx.loop_token,
+ .condition = undefined,
+ .payload = null,
+ .continue_expr = null,
+ .body = undefined,
+ .@"else" = null,
+ }
+ );
+ stack.append(State { .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
+ try stack.append(State { .WhileContinueExpr = &node.continue_expr });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ },
+ State.WhileContinueExpr => |dest| {
+ stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .RequiredNull = dest } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ },
+ State.For => |ctx| {
+ const node = try createToCtxNode(arena, ctx.opt_ctx, ast.Node.For,
+ ast.Node.For {
+ .base = undefined,
+ .label = ctx.label,
+ .inline_token = ctx.inline_token,
+ .for_token = ctx.loop_token,
+ .array_expr = undefined,
+ .payload = null,
+ .body = undefined,
+ .@"else" = null,
+ }
+ );
+ stack.append(State { .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
+ try stack.append(State { .PointerIndexPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.array_expr } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ },
+ State.Else => |dest| {
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_else)) |else_token| {
+ const node = try createNode(arena, ast.Node.Else,
+ ast.Node.Else {
+ .base = undefined,
+ .else_token = else_token,
+ .payload = null,
+ .body = undefined,
+ }
+ );
+ *dest = node;
+
+ stack.append(State { .Expression = OptionalCtx { .Required = &node.body } }) catch unreachable;
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ continue;
+ } else {
+ continue;
+ }
+ },
+
+
+ State.Block => |block| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.RBrace => {
+ block.rbrace = token_index;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State { .Block = block }) catch unreachable;
+
+ var any_comments = false;
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try block.statements.push(&line_comment.base);
+ any_comments = true;
+ }
+ if (any_comments) continue;
+
+ try stack.append(State { .Statement = block });
+ continue;
+ },
+ }
+ },
+ State.Statement => |block| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_comptime => {
+ stack.append(State {
+ .ComptimeStatement = ComptimeStatementCtx {
+ .comptime_token = token_index,
+ .block = block,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_var, Token.Id.Keyword_const => {
+ stack.append(State {
+ .VarDecl = VarDeclCtx {
+ .comments = null,
+ .visib_token = null,
+ .comptime_token = null,
+ .extern_export_token = null,
+ .lib_name = null,
+ .mut_token = token_index,
+ .list = &block.statements,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_defer, Token.Id.Keyword_errdefer => {
+ const node = try arena.construct(ast.Node.Defer {
+ .base = ast.Node {
+ .id = ast.Node.Id.Defer,
+ },
+ .defer_token = token_index,
+ .kind = switch (token_ptr.id) {
+ Token.Id.Keyword_defer => ast.Node.Defer.Kind.Unconditional,
+ Token.Id.Keyword_errdefer => ast.Node.Defer.Kind.Error,
+ else => unreachable,
+ },
+ .expr = undefined,
+ });
+ const node_ptr = try block.statements.addOne();
+ *node_ptr = &node.base;
+
+ stack.append(State { .Semicolon = node_ptr }) catch unreachable;
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = &node.expr } });
+ continue;
+ },
+ Token.Id.LBrace => {
+ const inner_block = try arena.construct(ast.Node.Block {
+ .base = ast.Node { .id = ast.Node.Id.Block },
+ .label = null,
+ .lbrace = token_index,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ });
+ try block.statements.push(&inner_block.base);
+
+ stack.append(State { .Block = inner_block }) catch unreachable;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ const statement = try block.statements.addOne();
+ try stack.append(State { .Semicolon = statement });
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = statement } });
+ continue;
+ }
+ }
+ },
+ State.ComptimeStatement => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_var, Token.Id.Keyword_const => {
+ stack.append(State {
+ .VarDecl = VarDeclCtx {
+ .comments = null,
+ .visib_token = null,
+ .comptime_token = ctx.comptime_token,
+ .extern_export_token = null,
+ .lib_name = null,
+ .mut_token = token_index,
+ .list = &ctx.block.statements,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ putBackToken(&tok_it, &tree);
+ const statement = try ctx.block.statements.addOne();
+ try stack.append(State { .Semicolon = statement });
+ try stack.append(State { .Expression = OptionalCtx { .Required = statement } });
+ continue;
+ }
+ }
+ },
+ State.Semicolon => |node_ptr| {
+ const node = *node_ptr;
+ if (node.requireSemiColon()) {
+ stack.append(State { .ExpectToken = Token.Id.Semicolon }) catch unreachable;
+ continue;
+ }
+ continue;
+ },
+
+ State.AsmOutputItems => |items| {
+ const lbracket = nextToken(&tok_it, &tree);
+ const lbracket_index = lbracket.index;
+ const lbracket_ptr = lbracket.ptr;
+ if (lbracket_ptr.id != Token.Id.LBracket) {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createNode(arena, ast.Node.AsmOutput,
+ ast.Node.AsmOutput {
+ .base = undefined,
+ .symbolic_name = undefined,
+ .constraint = undefined,
+ .kind = undefined,
+ }
+ );
+ try items.push(node);
+
+ stack.append(State { .AsmOutputItems = items }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.Comma });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .AsmOutputReturnOrType = node });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.constraint } });
+ try stack.append(State { .ExpectToken = Token.Id.RBracket });
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.symbolic_name } });
+ continue;
+ },
+ State.AsmOutputReturnOrType => |node| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Identifier => {
+ node.kind = ast.Node.AsmOutput.Kind { .Variable = try createLiteral(arena, ast.Node.Identifier, token_index) };
+ continue;
+ },
+ Token.Id.Arrow => {
+ node.kind = ast.Node.AsmOutput.Kind { .Return = undefined };
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.kind.Return } });
+ continue;
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedAsmOutputReturnOrType = Error.ExpectedAsmOutputReturnOrType {
+ .token = token_index,
+ },
+ };
+ return tree;
+ },
+ }
+ },
+ State.AsmInputItems => |items| {
+ const lbracket = nextToken(&tok_it, &tree);
+ const lbracket_index = lbracket.index;
+ const lbracket_ptr = lbracket.ptr;
+ if (lbracket_ptr.id != Token.Id.LBracket) {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createNode(arena, ast.Node.AsmInput,
+ ast.Node.AsmInput {
+ .base = undefined,
+ .symbolic_name = undefined,
+ .constraint = undefined,
+ .expr = undefined,
+ }
+ );
+ try items.push(node);
+
+ stack.append(State { .AsmInputItems = items }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.Comma });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.constraint } });
+ try stack.append(State { .ExpectToken = Token.Id.RBracket });
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.symbolic_name } });
+ continue;
+ },
+ State.AsmClobberItems => |items| {
+ stack.append(State { .AsmClobberItems = items }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.Comma });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = try items.addOne() } });
+ continue;
+ },
+
+
+ State.ExprListItemOrEnd => |list_state| {
+ if (eatToken(&tok_it, &tree, list_state.end)) |token_index| {
+ *list_state.ptr = token_index;
+ continue;
+ }
+
+ stack.append(State { .ExprListCommaOrEnd = list_state }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = try list_state.list.addOne() } });
+ continue;
+ },
+ State.ExprListCommaOrEnd => |list_state| {
+ switch (expectCommaOrEnd(&tok_it, &tree, list_state.end)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ *list_state.ptr = end;
+ continue;
+ } else {
+ stack.append(State { .ExprListItemOrEnd = list_state }) catch unreachable;
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+ State.FieldInitListItemOrEnd => |list_state| {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try list_state.list.push(&line_comment.base);
+ }
+
+ if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
+ *list_state.ptr = rbrace;
+ continue;
+ }
+
+ const node = try arena.construct(ast.Node.FieldInitializer {
+ .base = ast.Node {
+ .id = ast.Node.Id.FieldInitializer,
+ },
+ .period_token = undefined,
+ .name_token = undefined,
+ .expr = undefined,
+ });
+ try list_state.list.push(&node.base);
+
+ stack.append(State { .FieldInitListCommaOrEnd = list_state }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx{ .Required = &node.expr } });
+ try stack.append(State { .ExpectToken = Token.Id.Equal });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Identifier,
+ .ptr = &node.name_token,
+ }
+ });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Period,
+ .ptr = &node.period_token,
+ }
+ });
+ continue;
+ },
+ State.FieldInitListCommaOrEnd => |list_state| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RBrace)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ *list_state.ptr = end;
+ continue;
+ } else {
+ stack.append(State { .FieldInitListItemOrEnd = list_state }) catch unreachable;
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+ State.FieldListCommaOrEnd => |container_decl| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RBrace)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ container_decl.rbrace_token = end;
+ continue;
+ } else {
+ try stack.append(State { .ContainerDecl = container_decl });
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+ State.ErrorTagListItemOrEnd => |list_state| {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try list_state.list.push(&line_comment.base);
+ }
+
+ if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
+ *list_state.ptr = rbrace;
+ continue;
+ }
+
+ const node_ptr = try list_state.list.addOne();
+
+ try stack.append(State { .ErrorTagListCommaOrEnd = list_state });
+ try stack.append(State { .ErrorTag = node_ptr });
+ continue;
+ },
+ State.ErrorTagListCommaOrEnd => |list_state| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RBrace)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ *list_state.ptr = end;
+ continue;
+ } else {
+ stack.append(State { .ErrorTagListItemOrEnd = list_state }) catch unreachable;
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+ State.SwitchCaseOrEnd => |list_state| {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try list_state.list.push(&line_comment.base);
+ }
+
+ if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
+ *list_state.ptr = rbrace;
+ continue;
+ }
+
+ const comments = try eatDocComments(arena, &tok_it, &tree);
+ const node = try arena.construct(ast.Node.SwitchCase {
+ .base = ast.Node {
+ .id = ast.Node.Id.SwitchCase,
+ },
+ .items = ast.Node.SwitchCase.ItemList.init(arena),
+ .payload = null,
+ .expr = undefined,
+ });
+ try list_state.list.push(&node.base);
+ try stack.append(State { .SwitchCaseCommaOrEnd = list_state });
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .Required = &node.expr } });
+ try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .SwitchCaseFirstItem = &node.items });
+
+ continue;
+ },
+
+ State.SwitchCaseCommaOrEnd => |list_state| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RParen)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ *list_state.ptr = end;
+ continue;
+ } else {
+ try stack.append(State { .SwitchCaseOrEnd = list_state });
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+
+ State.SwitchCaseFirstItem => |case_items| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id == Token.Id.Keyword_else) {
+ const else_node = try arena.construct(ast.Node.SwitchElse {
+ .base = ast.Node{ .id = ast.Node.Id.SwitchElse},
+ .token = token_index,
+ });
+ try case_items.push(&else_node.base);
+
+ try stack.append(State { .ExpectToken = Token.Id.EqualAngleBracketRight });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ try stack.append(State { .SwitchCaseItem = case_items });
+ continue;
+ }
+ },
+ State.SwitchCaseItem => |case_items| {
+ stack.append(State { .SwitchCaseItemCommaOrEnd = case_items }) catch unreachable;
+ try stack.append(State { .RangeExpressionBegin = OptionalCtx { .Required = try case_items.addOne() } });
+ },
+ State.SwitchCaseItemCommaOrEnd => |case_items| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.EqualAngleBracketRight)) {
+ ExpectCommaOrEndResult.end_token => |t| {
+ if (t == null) {
+ stack.append(State { .SwitchCaseItem = case_items }) catch unreachable;
+ }
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ continue;
+ },
+
+
+ State.SuspendBody => |suspend_node| {
+ if (suspend_node.payload != null) {
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .RequiredNull = &suspend_node.body } });
+ }
+ continue;
+ },
+ State.AsyncAllocator => |async_node| {
+ if (eatToken(&tok_it, &tree, Token.Id.AngleBracketLeft) == null) {
+ continue;
+ }
+
+ async_node.rangle_bracket = TokenIndex(0);
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.AngleBracketRight,
+ .ptr = &??async_node.rangle_bracket,
+ }
+ });
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &async_node.allocator_type } });
+ continue;
+ },
+ State.AsyncEnd => |ctx| {
+ const node = ctx.ctx.get() ?? continue;
+
+ switch (node.id) {
+ ast.Node.Id.FnProto => {
+ const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", node);
+ fn_proto.async_attr = ctx.attribute;
+ continue;
+ },
+ ast.Node.Id.SuffixOp => {
+ const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", node);
+ if (suffix_op.op == @TagType(ast.Node.SuffixOp.Op).Call) {
+ suffix_op.op.Call.async_attr = ctx.attribute;
+ continue;
+ }
+
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedCall = Error.ExpectedCall { .node = node },
+ };
+ return tree;
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedCallOrFnProto = Error.ExpectedCallOrFnProto { .node = node },
+ };
+ return tree;
+ }
+ }
+ },
+
+
+ State.ExternType => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_fn)) |fn_token| {
+ const fn_proto = try arena.construct(ast.Node.FnProto {
+ .base = ast.Node {
+ .id = ast.Node.Id.FnProto,
+ },
+ .doc_comments = ctx.comments,
+ .visib_token = null,
+ .name_token = null,
+ .fn_token = fn_token,
+ .params = ast.Node.FnProto.ParamList.init(arena),
+ .return_type = undefined,
+ .var_args_token = null,
+ .extern_export_inline_token = ctx.extern_token,
+ .cc_token = null,
+ .async_attr = null,
+ .body_node = null,
+ .lib_name = null,
+ .align_expr = null,
+ });
+ ctx.opt_ctx.store(&fn_proto.base);
+ stack.append(State { .FnProto = fn_proto }) catch unreachable;
+ continue;
+ }
+
+ stack.append(State {
+ .ContainerKind = ContainerKindCtx {
+ .opt_ctx = ctx.opt_ctx,
+ .ltoken = ctx.extern_token,
+ .layout = ast.Node.ContainerDecl.Layout.Extern,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ State.SliceOrArrayAccess => |node| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Ellipsis2 => {
+ const start = node.op.ArrayAccess;
+ node.op = ast.Node.SuffixOp.Op {
+ .Slice = ast.Node.SuffixOp.Op.Slice {
+ .start = start,
+ .end = null,
+ }
+ };
+
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.RBracket,
+ .ptr = &node.rtoken,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Optional = &node.op.Slice.end } });
+ continue;
+ },
+ Token.Id.RBracket => {
+ node.rtoken = token_index;
+ continue;
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedSliceOrRBracket = Error.ExpectedSliceOrRBracket { .token = token_index },
+ };
+ return tree;
+ }
+ }
+ },
+ State.SliceOrArrayType => |node| {
+ if (eatToken(&tok_it, &tree, Token.Id.RBracket)) |_| {
+ node.op = ast.Node.PrefixOp.Op {
+ .SliceType = ast.Node.PrefixOp.AddrOfInfo {
+ .align_expr = null,
+ .bit_offset_start_token = null,
+ .bit_offset_end_token = null,
+ .const_token = null,
+ .volatile_token = null,
+ }
+ };
+ stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ try stack.append(State { .AddrOfModifiers = &node.op.SliceType });
+ continue;
+ }
+
+ node.op = ast.Node.PrefixOp.Op { .ArrayType = undefined };
+ stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ try stack.append(State { .ExpectToken = Token.Id.RBracket });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.op.ArrayType } });
+ continue;
+ },
+ State.AddrOfModifiers => |addr_of_info| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_align => {
+ stack.append(state) catch unreachable;
+ if (addr_of_info.align_expr != null) {
+ *(try tree.errors.addOne()) = Error {
+ .ExtraAlignQualifier = Error.ExtraAlignQualifier { .token = token_index },
+ };
+ return tree;
+ }
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &addr_of_info.align_expr} });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ },
+ Token.Id.Keyword_const => {
+ stack.append(state) catch unreachable;
+ if (addr_of_info.const_token != null) {
+ *(try tree.errors.addOne()) = Error {
+ .ExtraConstQualifier = Error.ExtraConstQualifier { .token = token_index },
+ };
+ return tree;
+ }
+ addr_of_info.const_token = token_index;
+ continue;
+ },
+ Token.Id.Keyword_volatile => {
+ stack.append(state) catch unreachable;
+ if (addr_of_info.volatile_token != null) {
+ *(try tree.errors.addOne()) = Error {
+ .ExtraVolatileQualifier = Error.ExtraVolatileQualifier { .token = token_index },
+ };
+ return tree;
+ }
+ addr_of_info.volatile_token = token_index;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ }
+ },
+
+
+ State.Payload => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != Token.Id.Pipe) {
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.Payload,
+ ast.Node.Payload {
+ .base = undefined,
+ .lpipe = token_index,
+ .error_symbol = undefined,
+ .rpipe = undefined
+ }
+ );
+
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.error_symbol } });
+ continue;
+ },
+ State.PointerPayload => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != Token.Id.Pipe) {
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.PointerPayload,
+ ast.Node.PointerPayload {
+ .base = undefined,
+ .lpipe = token_index,
+ .ptr_token = null,
+ .value_symbol = undefined,
+ .rpipe = undefined
+ }
+ );
+
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ }
+ });
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
+ try stack.append(State {
+ .OptionalTokenSave = OptionalTokenSave {
+ .id = Token.Id.Asterisk,
+ .ptr = &node.ptr_token,
+ }
+ });
+ continue;
+ },
+ State.PointerIndexPayload => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != Token.Id.Pipe) {
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.PointerIndexPayload,
+ ast.Node.PointerIndexPayload {
+ .base = undefined,
+ .lpipe = token_index,
+ .ptr_token = null,
+ .value_symbol = undefined,
+ .index_symbol = null,
+ .rpipe = undefined
+ }
+ );
+
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.index_symbol } });
+ try stack.append(State { .IfToken = Token.Id.Comma });
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
+ try stack.append(State {
+ .OptionalTokenSave = OptionalTokenSave {
+ .id = Token.Id.Asterisk,
+ .ptr = &node.ptr_token,
+ }
+ });
+ continue;
+ },
+
+
+ State.Expression => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_return, Token.Id.Keyword_break, Token.Id.Keyword_continue => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.ControlFlowExpression,
+ ast.Node.ControlFlowExpression {
+ .base = undefined,
+ .ltoken = token_index,
+ .kind = undefined,
+ .rhs = null,
+ }
+ );
+
+ stack.append(State { .Expression = OptionalCtx { .Optional = &node.rhs } }) catch unreachable;
+
+ switch (token_ptr.id) {
+ Token.Id.Keyword_break => {
+ node.kind = ast.Node.ControlFlowExpression.Kind { .Break = null };
+ try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.kind.Break } });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ },
+ Token.Id.Keyword_continue => {
+ node.kind = ast.Node.ControlFlowExpression.Kind { .Continue = null };
+ try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.kind.Continue } });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ },
+ Token.Id.Keyword_return => {
+ node.kind = ast.Node.ControlFlowExpression.Kind.Return;
+ },
+ else => unreachable,
+ }
+ continue;
+ },
+ Token.Id.Keyword_try, Token.Id.Keyword_cancel, Token.Id.Keyword_resume => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
+ ast.Node.PrefixOp {
+ .base = undefined,
+ .op_token = token_index,
+ .op = switch (token_ptr.id) {
+ Token.Id.Keyword_try => ast.Node.PrefixOp.Op { .Try = void{} },
+ Token.Id.Keyword_cancel => ast.Node.PrefixOp.Op { .Cancel = void{} },
+ Token.Id.Keyword_resume => ast.Node.PrefixOp.Op { .Resume = void{} },
+ else => unreachable,
+ },
+ .rhs = undefined,
+ }
+ );
+
+ stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ continue;
+ },
+ else => {
+ if (!try parseBlockExpr(&stack, arena, opt_ctx, token_ptr, token_index)) {
+ putBackToken(&tok_it, &tree);
+ stack.append(State { .UnwrapExpressionBegin = opt_ctx }) catch unreachable;
+ }
+ continue;
+ }
+ }
+ },
+ State.RangeExpressionBegin => |opt_ctx| {
+ stack.append(State { .RangeExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .Expression = opt_ctx });
+ continue;
+ },
+ State.RangeExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Ellipsis3)) |ellipsis3| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = ellipsis3,
+ .op = ast.Node.InfixOp.Op.Range,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ continue;
+ }
+ },
+ State.AssignmentExpressionBegin => |opt_ctx| {
+ stack.append(State { .AssignmentExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .Expression = opt_ctx });
+ continue;
+ },
+
+ State.AssignmentExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToAssignment(token_ptr.id)) |ass_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = ass_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .AssignmentExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.UnwrapExpressionBegin => |opt_ctx| {
+ stack.append(State { .UnwrapExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BoolOrExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.UnwrapExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToUnwrapExpr(token_ptr.id)) |unwrap_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = unwrap_id,
+ .rhs = undefined,
+ }
+ );
+
+ stack.append(State { .UnwrapExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } });
+
+ if (node.op == ast.Node.InfixOp.Op.Catch) {
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.op.Catch } });
+ }
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.BoolOrExpressionBegin => |opt_ctx| {
+ stack.append(State { .BoolOrExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BoolAndExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BoolOrExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_or)) |or_token| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = or_token,
+ .op = ast.Node.InfixOp.Op.BoolOr,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BoolOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BoolAndExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.BoolAndExpressionBegin => |opt_ctx| {
+ stack.append(State { .BoolAndExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .ComparisonExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BoolAndExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_and)) |and_token| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = and_token,
+ .op = ast.Node.InfixOp.Op.BoolAnd,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BoolAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .ComparisonExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.ComparisonExpressionBegin => |opt_ctx| {
+ stack.append(State { .ComparisonExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BinaryOrExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.ComparisonExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToComparison(token_ptr.id)) |comp_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = comp_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .ComparisonExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BinaryOrExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.BinaryOrExpressionBegin => |opt_ctx| {
+ stack.append(State { .BinaryOrExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BinaryXorExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BinaryOrExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Pipe)) |pipe| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = pipe,
+ .op = ast.Node.InfixOp.Op.BitOr,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BinaryOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BinaryXorExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.BinaryXorExpressionBegin => |opt_ctx| {
+ stack.append(State { .BinaryXorExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BinaryAndExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BinaryXorExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Caret)) |caret| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = caret,
+ .op = ast.Node.InfixOp.Op.BitXor,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BinaryXorExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BinaryAndExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.BinaryAndExpressionBegin => |opt_ctx| {
+ stack.append(State { .BinaryAndExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BitShiftExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BinaryAndExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Ampersand)) |ampersand| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = ampersand,
+ .op = ast.Node.InfixOp.Op.BitAnd,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BinaryAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BitShiftExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.BitShiftExpressionBegin => |opt_ctx| {
+ stack.append(State { .BitShiftExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .AdditionExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BitShiftExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToBitShift(token_ptr.id)) |bitshift_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = bitshift_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BitShiftExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .AdditionExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.AdditionExpressionBegin => |opt_ctx| {
+ stack.append(State { .AdditionExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .MultiplyExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.AdditionExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToAddition(token_ptr.id)) |add_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = add_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .AdditionExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .MultiplyExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.MultiplyExpressionBegin => |opt_ctx| {
+ stack.append(State { .MultiplyExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .CurlySuffixExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.MultiplyExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToMultiply(token_ptr.id)) |mult_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = mult_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .MultiplyExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .CurlySuffixExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.CurlySuffixExpressionBegin => |opt_ctx| {
+ stack.append(State { .CurlySuffixExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.LBrace });
+ try stack.append(State { .TypeExprBegin = opt_ctx });
+ continue;
+ },
+
+ State.CurlySuffixExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if ((??tok_it.peek()).id == Token.Id.Period) {
+ const node = try arena.construct(ast.Node.SuffixOp {
+ .base = ast.Node { .id = ast.Node.Id.SuffixOp },
+ .lhs = lhs,
+ .op = ast.Node.SuffixOp.Op {
+ .StructInitializer = ast.Node.SuffixOp.Op.InitList.init(arena),
+ },
+ .rtoken = undefined,
+ });
+ opt_ctx.store(&node.base);
+
+ stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.LBrace });
+ try stack.append(State {
+ .FieldInitListItemOrEnd = ListSave(@typeOf(node.op.StructInitializer)) {
+ .list = &node.op.StructInitializer,
+ .ptr = &node.rtoken,
+ }
+ });
+ continue;
+ }
+
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
+ ast.Node.SuffixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op = ast.Node.SuffixOp.Op {
+ .ArrayInitializer = ast.Node.SuffixOp.Op.InitList.init(arena),
+ },
+ .rtoken = undefined,
+ }
+ );
+ stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.LBrace });
+ try stack.append(State {
+ .ExprListItemOrEnd = ExprListCtx {
+ .list = &node.op.ArrayInitializer,
+ .end = Token.Id.RBrace,
+ .ptr = &node.rtoken,
+ }
+ });
+ continue;
+ },
+
+ State.TypeExprBegin => |opt_ctx| {
+ stack.append(State { .TypeExprEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .PrefixOpExpression = opt_ctx });
+ continue;
+ },
+
+ State.TypeExprEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Bang)) |bang| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = bang,
+ .op = ast.Node.InfixOp.Op.ErrorUnion,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .TypeExprEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .PrefixOpExpression = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.PrefixOpExpression => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToPrefixOp(token_ptr.id)) |prefix_id| {
+ var node = try createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
+ ast.Node.PrefixOp {
+ .base = undefined,
+ .op_token = token_index,
+ .op = prefix_id,
+ .rhs = undefined,
+ }
+ );
+
+ // Treat '**' token as two derefs
+ if (token_ptr.id == Token.Id.AsteriskAsterisk) {
+ const child = try createNode(arena, ast.Node.PrefixOp,
+ ast.Node.PrefixOp {
+ .base = undefined,
+ .op_token = token_index,
+ .op = prefix_id,
+ .rhs = undefined,
+ }
+ );
+ node.rhs = &child.base;
+ node = child;
+ }
+
+ stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ if (node.op == ast.Node.PrefixOp.Op.AddrOf) {
+ try stack.append(State { .AddrOfModifiers = &node.op.AddrOf });
+ }
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ stack.append(State { .SuffixOpExpressionBegin = opt_ctx }) catch unreachable;
+ continue;
+ }
+ },
+
+ State.SuffixOpExpressionBegin => |opt_ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_async)) |async_token| {
+ const async_node = try createNode(arena, ast.Node.AsyncAttribute,
+ ast.Node.AsyncAttribute {
+ .base = undefined,
+ .async_token = async_token,
+ .allocator_type = null,
+ .rangle_bracket = null,
+ }
+ );
+ stack.append(State {
+ .AsyncEnd = AsyncEndCtx {
+ .ctx = opt_ctx,
+ .attribute = async_node,
+ }
+ }) catch unreachable;
+ try stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() });
+ try stack.append(State { .PrimaryExpression = opt_ctx.toRequired() });
+ try stack.append(State { .AsyncAllocator = async_node });
+ continue;
+ }
+
+ stack.append(State { .SuffixOpExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .PrimaryExpression = opt_ctx });
+ continue;
+ },
+
+ State.SuffixOpExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.LParen => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
+ ast.Node.SuffixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op = ast.Node.SuffixOp.Op {
+ .Call = ast.Node.SuffixOp.Op.Call {
+ .params = ast.Node.SuffixOp.Op.Call.ParamList.init(arena),
+ .async_attr = null,
+ }
+ },
+ .rtoken = undefined,
+ }
+ );
+ stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State {
+ .ExprListItemOrEnd = ExprListCtx {
+ .list = &node.op.Call.params,
+ .end = Token.Id.RParen,
+ .ptr = &node.rtoken,
+ }
+ });
+ continue;
+ },
+ Token.Id.LBracket => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
+ ast.Node.SuffixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op = ast.Node.SuffixOp.Op {
+ .ArrayAccess = undefined,
+ },
+ .rtoken = undefined
+ }
+ );
+ stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .SliceOrArrayAccess = node });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.op.ArrayAccess }});
+ continue;
+ },
+ Token.Id.Period => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = ast.Node.InfixOp.Op.Period,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ }
+ },
+
+ State.PrimaryExpression => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ switch (token.ptr.id) {
+ Token.Id.IntegerLiteral => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.StringLiteral, token.index);
+ continue;
+ },
+ Token.Id.FloatLiteral => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.FloatLiteral, token.index);
+ continue;
+ },
+ Token.Id.CharLiteral => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.CharLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_undefined => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.UndefinedLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_true, Token.Id.Keyword_false => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.BoolLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_null => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.NullLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_this => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.ThisLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_var => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.VarType, token.index);
+ continue;
+ },
+ Token.Id.Keyword_unreachable => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.Unreachable, token.index);
+ continue;
+ },
+ Token.Id.Keyword_promise => {
+ const node = try arena.construct(ast.Node.PromiseType {
+ .base = ast.Node {
+ .id = ast.Node.Id.PromiseType,
+ },
+ .promise_token = token.index,
+ .result = null,
+ });
+ opt_ctx.store(&node.base);
+ const next_token = nextToken(&tok_it, &tree);
+ const next_token_index = next_token.index;
+ const next_token_ptr = next_token.ptr;
+ if (next_token_ptr.id != Token.Id.Arrow) {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ node.result = ast.Node.PromiseType.Result {
+ .arrow_token = next_token_index,
+ .return_type = undefined,
+ };
+ const return_type_ptr = &((??node.result).return_type);
+ try stack.append(State { .Expression = OptionalCtx { .Required = return_type_ptr, } });
+ continue;
+ },
+ Token.Id.StringLiteral, Token.Id.MultilineStringLiteralLine => {
+ opt_ctx.store((try parseStringLiteral(arena, &tok_it, token.ptr, token.index, &tree)) ?? unreachable);
+ continue;
+ },
+ Token.Id.LParen => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.GroupedExpression,
+ ast.Node.GroupedExpression {
+ .base = undefined,
+ .lparen = token.index,
+ .expr = undefined,
+ .rparen = undefined,
+ }
+ );
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.RParen,
+ .ptr = &node.rparen,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ continue;
+ },
+ Token.Id.Builtin => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.BuiltinCall,
+ ast.Node.BuiltinCall {
+ .base = undefined,
+ .builtin_token = token.index,
+ .params = ast.Node.BuiltinCall.ParamList.init(arena),
+ .rparen_token = undefined,
+ }
+ );
+ stack.append(State {
+ .ExprListItemOrEnd = ExprListCtx {
+ .list = &node.params,
+ .end = Token.Id.RParen,
+ .ptr = &node.rparen_token,
+ }
+ }) catch unreachable;
+ try stack.append(State { .ExpectToken = Token.Id.LParen, });
+ continue;
+ },
+ Token.Id.LBracket => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
+ ast.Node.PrefixOp {
+ .base = undefined,
+ .op_token = token.index,
+ .op = undefined,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .SliceOrArrayType = node }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_error => {
+ stack.append(State {
+ .ErrorTypeOrSetDecl = ErrorTypeOrSetDeclCtx {
+ .error_token = token.index,
+ .opt_ctx = opt_ctx
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_packed => {
+ stack.append(State {
+ .ContainerKind = ContainerKindCtx {
+ .opt_ctx = opt_ctx,
+ .ltoken = token.index,
+ .layout = ast.Node.ContainerDecl.Layout.Packed,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_extern => {
+ stack.append(State {
+ .ExternType = ExternTypeCtx {
+ .opt_ctx = opt_ctx,
+ .extern_token = token.index,
+ .comments = null,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_struct, Token.Id.Keyword_union, Token.Id.Keyword_enum => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State {
+ .ContainerKind = ContainerKindCtx {
+ .opt_ctx = opt_ctx,
+ .ltoken = token.index,
+ .layout = ast.Node.ContainerDecl.Layout.Auto,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Identifier => {
+ stack.append(State {
+ .MaybeLabeledExpression = MaybeLabeledExpressionCtx {
+ .label = token.index,
+ .opt_ctx = opt_ctx
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_fn => {
+ const fn_proto = try arena.construct(ast.Node.FnProto {
+ .base = ast.Node {
+ .id = ast.Node.Id.FnProto,
+ },
+ .doc_comments = null,
+ .visib_token = null,
+ .name_token = null,
+ .fn_token = token.index,
+ .params = ast.Node.FnProto.ParamList.init(arena),
+ .return_type = undefined,
+ .var_args_token = null,
+ .extern_export_inline_token = null,
+ .cc_token = null,
+ .async_attr = null,
+ .body_node = null,
+ .lib_name = null,
+ .align_expr = null,
+ });
+ opt_ctx.store(&fn_proto.base);
+ stack.append(State { .FnProto = fn_proto }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
+ const fn_proto = try arena.construct(ast.Node.FnProto {
+ .base = ast.Node {
+ .id = ast.Node.Id.FnProto,
+ },
+ .doc_comments = null,
+ .visib_token = null,
+ .name_token = null,
+ .fn_token = undefined,
+ .params = ast.Node.FnProto.ParamList.init(arena),
+ .return_type = undefined,
+ .var_args_token = null,
+ .extern_export_inline_token = null,
+ .cc_token = token.index,
+ .async_attr = null,
+ .body_node = null,
+ .lib_name = null,
+ .align_expr = null,
+ });
+ opt_ctx.store(&fn_proto.base);
+ stack.append(State { .FnProto = fn_proto }) catch unreachable;
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token
+ }
+ });
+ continue;
+ },
+ Token.Id.Keyword_asm => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.Asm,
+ ast.Node.Asm {
+ .base = undefined,
+ .asm_token = token.index,
+ .volatile_token = null,
+ .template = undefined,
+ .outputs = ast.Node.Asm.OutputList.init(arena),
+ .inputs = ast.Node.Asm.InputList.init(arena),
+ .clobbers = ast.Node.Asm.ClobberList.init(arena),
+ .rparen = undefined,
+ }
+ );
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.RParen,
+ .ptr = &node.rparen,
+ }
+ }) catch unreachable;
+ try stack.append(State { .AsmClobberItems = &node.clobbers });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State { .AsmInputItems = &node.inputs });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State { .AsmOutputItems = &node.outputs });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.template } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State {
+ .OptionalTokenSave = OptionalTokenSave {
+ .id = Token.Id.Keyword_volatile,
+ .ptr = &node.volatile_token,
+ }
+ });
+ },
+ Token.Id.Keyword_inline => {
+ stack.append(State {
+ .Inline = InlineCtx {
+ .label = null,
+ .inline_token = token.index,
+ .opt_ctx = opt_ctx,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ if (!try parseBlockExpr(&stack, arena, opt_ctx, token.ptr, token.index)) {
+ putBackToken(&tok_it, &tree);
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedPrimaryExpr = Error.ExpectedPrimaryExpr { .token = token.index },
+ };
+ return tree;
+ }
+ }
+ continue;
+ }
+ }
+ },
+
+
+ State.ErrorTypeOrSetDecl => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.LBrace) == null) {
+ _ = try createToCtxLiteral(arena, ctx.opt_ctx, ast.Node.ErrorType, ctx.error_token);
+ continue;
+ }
+
+ const node = try arena.construct(ast.Node.ErrorSetDecl {
+ .base = ast.Node {
+ .id = ast.Node.Id.ErrorSetDecl,
+ },
+ .error_token = ctx.error_token,
+ .decls = ast.Node.ErrorSetDecl.DeclList.init(arena),
+ .rbrace_token = undefined,
+ });
+ ctx.opt_ctx.store(&node.base);
+
+ stack.append(State {
+ .ErrorTagListItemOrEnd = ListSave(@typeOf(node.decls)) {
+ .list = &node.decls,
+ .ptr = &node.rbrace_token,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ State.StringLiteral => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ opt_ctx.store(
+ (try parseStringLiteral(arena, &tok_it, token_ptr, token_index, &tree)) ?? {
+ putBackToken(&tok_it, &tree);
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedPrimaryExpr = Error.ExpectedPrimaryExpr { .token = token_index },
+ };
+ return tree;
+ }
+
+ continue;
+ }
+ );
+ },
+
+ State.Identifier => |opt_ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |ident_token| {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.Identifier, ident_token);
+ continue;
+ }
+
+ if (opt_ctx != OptionalCtx.Optional) {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = Token.Id.Identifier,
+ },
+ };
+ return tree;
+ }
+ },
+
+ State.ErrorTag => |node_ptr| {
+ const comments = try eatDocComments(arena, &tok_it, &tree);
+ const ident_token = nextToken(&tok_it, &tree);
+ const ident_token_index = ident_token.index;
+ const ident_token_ptr = ident_token.ptr;
+ if (ident_token_ptr.id != Token.Id.Identifier) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = ident_token_index,
+ .expected_id = Token.Id.Identifier,
+ },
+ };
+ return tree;
+ }
+
+ const node = try arena.construct(ast.Node.ErrorTag {
+ .base = ast.Node {
+ .id = ast.Node.Id.ErrorTag,
+ },
+ .doc_comments = comments,
+ .name_token = ident_token_index,
+ });
+ *node_ptr = &node.base;
+ continue;
+ },
+
+ State.ExpectToken => |token_id| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != token_id) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = token_id,
+ },
+ };
+ return tree;
+ }
+ continue;
+ },
+ State.ExpectTokenSave => |expect_token_save| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != expect_token_save.id) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = expect_token_save.id,
+ },
+ };
+ return tree;
+ }
+ *expect_token_save.ptr = token_index;
+ continue;
+ },
+ State.IfToken => |token_id| {
+ if (eatToken(&tok_it, &tree, token_id)) |_| {
+ continue;
+ }
+
+ _ = stack.pop();
+ continue;
+ },
+ State.IfTokenSave => |if_token_save| {
+ if (eatToken(&tok_it, &tree, if_token_save.id)) |token_index| {
+ *if_token_save.ptr = token_index;
+ continue;
+ }
+
+ _ = stack.pop();
+ continue;
+ },
+ State.OptionalTokenSave => |optional_token_save| {
+ if (eatToken(&tok_it, &tree, optional_token_save.id)) |token_index| {
+ *optional_token_save.ptr = token_index;
+ continue;
+ }
+
+ continue;
+ },
+ }
+ }
+}
+
+const AnnotatedToken = struct {
+ ptr: &Token,
+ index: TokenIndex,
+};
+
+const TopLevelDeclCtx = struct {
+ decls: &ast.Node.Root.DeclList,
+ visib_token: ?TokenIndex,
+ extern_export_inline_token: ?AnnotatedToken,
+ lib_name: ?&ast.Node,
+ comments: ?&ast.Node.DocComment,
+};
+
+const VarDeclCtx = struct {
+ mut_token: TokenIndex,
+ visib_token: ?TokenIndex,
+ comptime_token: ?TokenIndex,
+ extern_export_token: ?TokenIndex,
+ lib_name: ?&ast.Node,
+ list: &ast.Node.Root.DeclList,
+ comments: ?&ast.Node.DocComment,
+};
+
+const TopLevelExternOrFieldCtx = struct {
+ visib_token: TokenIndex,
+ container_decl: &ast.Node.ContainerDecl,
+ comments: ?&ast.Node.DocComment,
+};
+
+const ExternTypeCtx = struct {
+ opt_ctx: OptionalCtx,
+ extern_token: TokenIndex,
+ comments: ?&ast.Node.DocComment,
+};
+
+const ContainerKindCtx = struct {
+ opt_ctx: OptionalCtx,
+ ltoken: TokenIndex,
+ layout: ast.Node.ContainerDecl.Layout,
+};
+
+const ExpectTokenSave = struct {
+ id: @TagType(Token.Id),
+ ptr: &TokenIndex,
+};
+
+const OptionalTokenSave = struct {
+ id: @TagType(Token.Id),
+ ptr: &?TokenIndex,
+};
+
+const ExprListCtx = struct {
+ list: &ast.Node.SuffixOp.Op.InitList,
+ end: Token.Id,
+ ptr: &TokenIndex,
+};
+
+fn ListSave(comptime List: type) type {
+ return struct {
+ list: &List,
+ ptr: &TokenIndex,
+ };
+}
+
+const MaybeLabeledExpressionCtx = struct {
+ label: TokenIndex,
+ opt_ctx: OptionalCtx,
+};
+
+const LabelCtx = struct {
+ label: ?TokenIndex,
+ opt_ctx: OptionalCtx,
+};
+
+const InlineCtx = struct {
+ label: ?TokenIndex,
+ inline_token: ?TokenIndex,
+ opt_ctx: OptionalCtx,
+};
+
+const LoopCtx = struct {
+ label: ?TokenIndex,
+ inline_token: ?TokenIndex,
+ loop_token: TokenIndex,
+ opt_ctx: OptionalCtx,
+};
+
+const AsyncEndCtx = struct {
+ ctx: OptionalCtx,
+ attribute: &ast.Node.AsyncAttribute,
+};
+
+const ErrorTypeOrSetDeclCtx = struct {
+ opt_ctx: OptionalCtx,
+ error_token: TokenIndex,
+};
+
+const ParamDeclEndCtx = struct {
+ fn_proto: &ast.Node.FnProto,
+ param_decl: &ast.Node.ParamDecl,
+};
+
+const ComptimeStatementCtx = struct {
+ comptime_token: TokenIndex,
+ block: &ast.Node.Block,
+};
+
+const OptionalCtx = union(enum) {
+ Optional: &?&ast.Node,
+ RequiredNull: &?&ast.Node,
+ Required: &&ast.Node,
+
+ pub fn store(self: &const OptionalCtx, value: &ast.Node) void {
+ switch (*self) {
+ OptionalCtx.Optional => |ptr| *ptr = value,
+ OptionalCtx.RequiredNull => |ptr| *ptr = value,
+ OptionalCtx.Required => |ptr| *ptr = value,
+ }
+ }
+
+ pub fn get(self: &const OptionalCtx) ?&ast.Node {
+ switch (*self) {
+ OptionalCtx.Optional => |ptr| return *ptr,
+ OptionalCtx.RequiredNull => |ptr| return ??*ptr,
+ OptionalCtx.Required => |ptr| return *ptr,
+ }
+ }
+
+ pub fn toRequired(self: &const OptionalCtx) OptionalCtx {
+ switch (*self) {
+ OptionalCtx.Optional => |ptr| {
+ return OptionalCtx { .RequiredNull = ptr };
+ },
+ OptionalCtx.RequiredNull => |ptr| return *self,
+ OptionalCtx.Required => |ptr| return *self,
+ }
+ }
+};
+
+const AddCommentsCtx = struct {
+ node_ptr: &&ast.Node,
+ comments: ?&ast.Node.DocComment,
+};
+
+const State = union(enum) {
+ TopLevel,
+ TopLevelExtern: TopLevelDeclCtx,
+ TopLevelLibname: TopLevelDeclCtx,
+ TopLevelDecl: TopLevelDeclCtx,
+ TopLevelExternOrField: TopLevelExternOrFieldCtx,
+
+ ContainerKind: ContainerKindCtx,
+ ContainerInitArgStart: &ast.Node.ContainerDecl,
+ ContainerInitArg: &ast.Node.ContainerDecl,
+ ContainerDecl: &ast.Node.ContainerDecl,
+
+ VarDecl: VarDeclCtx,
+ VarDeclAlign: &ast.Node.VarDecl,
+ VarDeclEq: &ast.Node.VarDecl,
+
+ FnDef: &ast.Node.FnProto,
+ FnProto: &ast.Node.FnProto,
+ FnProtoAlign: &ast.Node.FnProto,
+ FnProtoReturnType: &ast.Node.FnProto,
+
+ ParamDecl: &ast.Node.FnProto,
+ ParamDeclAliasOrComptime: &ast.Node.ParamDecl,
+ ParamDeclName: &ast.Node.ParamDecl,
+ ParamDeclEnd: ParamDeclEndCtx,
+ ParamDeclComma: &ast.Node.FnProto,
+
+ MaybeLabeledExpression: MaybeLabeledExpressionCtx,
+ LabeledExpression: LabelCtx,
+ Inline: InlineCtx,
+ While: LoopCtx,
+ WhileContinueExpr: &?&ast.Node,
+ For: LoopCtx,
+ Else: &?&ast.Node.Else,
+
+ Block: &ast.Node.Block,
+ Statement: &ast.Node.Block,
+ ComptimeStatement: ComptimeStatementCtx,
+ Semicolon: &&ast.Node,
+
+ AsmOutputItems: &ast.Node.Asm.OutputList,
+ AsmOutputReturnOrType: &ast.Node.AsmOutput,
+ AsmInputItems: &ast.Node.Asm.InputList,
+ AsmClobberItems: &ast.Node.Asm.ClobberList,
+
+ ExprListItemOrEnd: ExprListCtx,
+ ExprListCommaOrEnd: ExprListCtx,
+ FieldInitListItemOrEnd: ListSave(ast.Node.SuffixOp.Op.InitList),
+ FieldInitListCommaOrEnd: ListSave(ast.Node.SuffixOp.Op.InitList),
+ FieldListCommaOrEnd: &ast.Node.ContainerDecl,
+ FieldInitValue: OptionalCtx,
+ ErrorTagListItemOrEnd: ListSave(ast.Node.ErrorSetDecl.DeclList),
+ ErrorTagListCommaOrEnd: ListSave(ast.Node.ErrorSetDecl.DeclList),
+ SwitchCaseOrEnd: ListSave(ast.Node.Switch.CaseList),
+ SwitchCaseCommaOrEnd: ListSave(ast.Node.Switch.CaseList),
+ SwitchCaseFirstItem: &ast.Node.SwitchCase.ItemList,
+ SwitchCaseItem: &ast.Node.SwitchCase.ItemList,
+ SwitchCaseItemCommaOrEnd: &ast.Node.SwitchCase.ItemList,
+
+ SuspendBody: &ast.Node.Suspend,
+ AsyncAllocator: &ast.Node.AsyncAttribute,
+ AsyncEnd: AsyncEndCtx,
+
+ ExternType: ExternTypeCtx,
+ SliceOrArrayAccess: &ast.Node.SuffixOp,
+ SliceOrArrayType: &ast.Node.PrefixOp,
+ AddrOfModifiers: &ast.Node.PrefixOp.AddrOfInfo,
+
+ Payload: OptionalCtx,
+ PointerPayload: OptionalCtx,
+ PointerIndexPayload: OptionalCtx,
+
+ Expression: OptionalCtx,
+ RangeExpressionBegin: OptionalCtx,
+ RangeExpressionEnd: OptionalCtx,
+ AssignmentExpressionBegin: OptionalCtx,
+ AssignmentExpressionEnd: OptionalCtx,
+ UnwrapExpressionBegin: OptionalCtx,
+ UnwrapExpressionEnd: OptionalCtx,
+ BoolOrExpressionBegin: OptionalCtx,
+ BoolOrExpressionEnd: OptionalCtx,
+ BoolAndExpressionBegin: OptionalCtx,
+ BoolAndExpressionEnd: OptionalCtx,
+ ComparisonExpressionBegin: OptionalCtx,
+ ComparisonExpressionEnd: OptionalCtx,
+ BinaryOrExpressionBegin: OptionalCtx,
+ BinaryOrExpressionEnd: OptionalCtx,
+ BinaryXorExpressionBegin: OptionalCtx,
+ BinaryXorExpressionEnd: OptionalCtx,
+ BinaryAndExpressionBegin: OptionalCtx,
+ BinaryAndExpressionEnd: OptionalCtx,
+ BitShiftExpressionBegin: OptionalCtx,
+ BitShiftExpressionEnd: OptionalCtx,
+ AdditionExpressionBegin: OptionalCtx,
+ AdditionExpressionEnd: OptionalCtx,
+ MultiplyExpressionBegin: OptionalCtx,
+ MultiplyExpressionEnd: OptionalCtx,
+ CurlySuffixExpressionBegin: OptionalCtx,
+ CurlySuffixExpressionEnd: OptionalCtx,
+ TypeExprBegin: OptionalCtx,
+ TypeExprEnd: OptionalCtx,
+ PrefixOpExpression: OptionalCtx,
+ SuffixOpExpressionBegin: OptionalCtx,
+ SuffixOpExpressionEnd: OptionalCtx,
+ PrimaryExpression: OptionalCtx,
+
+ ErrorTypeOrSetDecl: ErrorTypeOrSetDeclCtx,
+ StringLiteral: OptionalCtx,
+ Identifier: OptionalCtx,
+ ErrorTag: &&ast.Node,
+
+
+ IfToken: @TagType(Token.Id),
+ IfTokenSave: ExpectTokenSave,
+ ExpectToken: @TagType(Token.Id),
+ ExpectTokenSave: ExpectTokenSave,
+ OptionalTokenSave: OptionalTokenSave,
+};
+
+fn eatDocComments(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) !?&ast.Node.DocComment {
+ var result: ?&ast.Node.DocComment = null;
+ while (true) {
+ if (eatToken(tok_it, tree, Token.Id.DocComment)) |line_comment| {
+ const node = blk: {
+ if (result) |comment_node| {
+ break :blk comment_node;
+ } else {
+ const comment_node = try arena.construct(ast.Node.DocComment {
+ .base = ast.Node {
+ .id = ast.Node.Id.DocComment,
+ },
+ .lines = ast.Node.DocComment.LineList.init(arena),
+ });
+ result = comment_node;
+ break :blk comment_node;
+ }
+ };
+ try node.lines.push(line_comment);
+ continue;
+ }
+ break;
+ }
+ return result;
+}
+
+fn eatLineComment(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) !?&ast.Node.LineComment {
+ const token = eatToken(tok_it, tree, Token.Id.LineComment) ?? return null;
+ return try arena.construct(ast.Node.LineComment {
+ .base = ast.Node {
+ .id = ast.Node.Id.LineComment,
+ },
+ .token = token,
+ });
+}
+
+fn parseStringLiteral(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator,
+ token_ptr: &const Token, token_index: TokenIndex, tree: &ast.Tree) !?&ast.Node
+{
+ switch (token_ptr.id) {
+ Token.Id.StringLiteral => {
+ return &(try createLiteral(arena, ast.Node.StringLiteral, token_index)).base;
+ },
+ Token.Id.MultilineStringLiteralLine => {
+ const node = try arena.construct(ast.Node.MultilineStringLiteral {
+ .base = ast.Node { .id = ast.Node.Id.MultilineStringLiteral },
+ .lines = ast.Node.MultilineStringLiteral.LineList.init(arena),
+ });
+ try node.lines.push(token_index);
+ while (true) {
+ const multiline_str = nextToken(tok_it, tree);
+ const multiline_str_index = multiline_str.index;
+ const multiline_str_ptr = multiline_str.ptr;
+ if (multiline_str_ptr.id != Token.Id.MultilineStringLiteralLine) {
+ putBackToken(tok_it, tree);
+ break;
+ }
+
+ try node.lines.push(multiline_str_index);
+ }
+
+ return &node.base;
+ },
+ // TODO: We shouldn't need a cast, but:
+ // zig: /home/jc/Documents/zig/src/ir.cpp:7962: TypeTableEntry* ir_resolve_peer_types(IrAnalyze*, AstNode*, IrInstruction**, size_t): Assertion `err_set_type != nullptr' failed.
+ else => return (?&ast.Node)(null),
+ }
+}
+
+fn parseBlockExpr(stack: &std.ArrayList(State), arena: &mem.Allocator, ctx: &const OptionalCtx,
+ token_ptr: &const Token, token_index: TokenIndex) !bool {
+ switch (token_ptr.id) {
+ Token.Id.Keyword_suspend => {
+ const node = try createToCtxNode(arena, ctx, ast.Node.Suspend,
+ ast.Node.Suspend {
+ .base = undefined,
+ .label = null,
+ .suspend_token = token_index,
+ .payload = null,
+ .body = null,
+ }
+ );
+
+ stack.append(State { .SuspendBody = node }) catch unreachable;
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ return true;
+ },
+ Token.Id.Keyword_if => {
+ const node = try createToCtxNode(arena, ctx, ast.Node.If,
+ ast.Node.If {
+ .base = undefined,
+ .if_token = token_index,
+ .condition = undefined,
+ .payload = null,
+ .body = undefined,
+ .@"else" = null,
+ }
+ );
+
+ stack.append(State { .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
+ try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ return true;
+ },
+ Token.Id.Keyword_while => {
+ stack.append(State {
+ .While = LoopCtx {
+ .label = null,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = *ctx,
+ }
+ }) catch unreachable;
+ return true;
+ },
+ Token.Id.Keyword_for => {
+ stack.append(State {
+ .For = LoopCtx {
+ .label = null,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = *ctx,
+ }
+ }) catch unreachable;
+ return true;
+ },
+ Token.Id.Keyword_switch => {
+ const node = try arena.construct(ast.Node.Switch {
+ .base = ast.Node {
+ .id = ast.Node.Id.Switch,
+ },
+ .switch_token = token_index,
+ .expr = undefined,
+ .cases = ast.Node.Switch.CaseList.init(arena),
+ .rbrace = undefined,
+ });
+ ctx.store(&node.base);
+
+ stack.append(State {
+ .SwitchCaseOrEnd = ListSave(@typeOf(node.cases)) {
+ .list = &node.cases,
+ .ptr = &node.rbrace,
+ },
+ }) catch unreachable;
+ try stack.append(State { .ExpectToken = Token.Id.LBrace });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ return true;
+ },
+ Token.Id.Keyword_comptime => {
+ const node = try createToCtxNode(arena, ctx, ast.Node.Comptime,
+ ast.Node.Comptime {
+ .base = undefined,
+ .comptime_token = token_index,
+ .expr = undefined,
+ .doc_comments = null,
+ }
+ );
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ return true;
+ },
+ Token.Id.LBrace => {
+ const block = try arena.construct(ast.Node.Block {
+ .base = ast.Node {.id = ast.Node.Id.Block },
+ .label = null,
+ .lbrace = token_index,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ });
+ ctx.store(&block.base);
+ stack.append(State { .Block = block }) catch unreachable;
+ return true;
+ },
+ else => {
+ return false;
+ }
+ }
+}
+
+const ExpectCommaOrEndResult = union(enum) {
+ end_token: ?TokenIndex,
+ parse_error: Error,
+};
+
+fn expectCommaOrEnd(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, end: @TagType(Token.Id)) ExpectCommaOrEndResult {
+ const token = nextToken(tok_it, tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Comma => return ExpectCommaOrEndResult { .end_token = null},
+ else => {
+ if (end == token_ptr.id) {
+ return ExpectCommaOrEndResult { .end_token = token_index };
+ }
+
+ return ExpectCommaOrEndResult {
+ .parse_error = Error {
+ .ExpectedCommaOrEnd = Error.ExpectedCommaOrEnd {
+ .token = token_index,
+ .end_id = end,
+ },
+ },
+ };
+ },
+ }
+}
+
+fn tokenIdToAssignment(id: &const Token.Id) ?ast.Node.InfixOp.Op {
+ // TODO: We have to cast all cases because of this:
+ // error: expected type '?InfixOp', found '?@TagType(InfixOp)'
+ return switch (*id) {
+ Token.Id.AmpersandEqual => ast.Node.InfixOp.Op { .AssignBitAnd = {} },
+ Token.Id.AngleBracketAngleBracketLeftEqual => ast.Node.InfixOp.Op { .AssignBitShiftLeft = {} },
+ Token.Id.AngleBracketAngleBracketRightEqual => ast.Node.InfixOp.Op { .AssignBitShiftRight = {} },
+ Token.Id.AsteriskEqual => ast.Node.InfixOp.Op { .AssignTimes = {} },
+ Token.Id.AsteriskPercentEqual => ast.Node.InfixOp.Op { .AssignTimesWarp = {} },
+ Token.Id.CaretEqual => ast.Node.InfixOp.Op { .AssignBitXor = {} },
+ Token.Id.Equal => ast.Node.InfixOp.Op { .Assign = {} },
+ Token.Id.MinusEqual => ast.Node.InfixOp.Op { .AssignMinus = {} },
+ Token.Id.MinusPercentEqual => ast.Node.InfixOp.Op { .AssignMinusWrap = {} },
+ Token.Id.PercentEqual => ast.Node.InfixOp.Op { .AssignMod = {} },
+ Token.Id.PipeEqual => ast.Node.InfixOp.Op { .AssignBitOr = {} },
+ Token.Id.PlusEqual => ast.Node.InfixOp.Op { .AssignPlus = {} },
+ Token.Id.PlusPercentEqual => ast.Node.InfixOp.Op { .AssignPlusWrap = {} },
+ Token.Id.SlashEqual => ast.Node.InfixOp.Op { .AssignDiv = {} },
+ else => null,
+ };
+}
+
+fn tokenIdToUnwrapExpr(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.Keyword_catch => ast.Node.InfixOp.Op { .Catch = null },
+ Token.Id.QuestionMarkQuestionMark => ast.Node.InfixOp.Op { .UnwrapMaybe = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToComparison(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.BangEqual => ast.Node.InfixOp.Op { .BangEqual = void{} },
+ Token.Id.EqualEqual => ast.Node.InfixOp.Op { .EqualEqual = void{} },
+ Token.Id.AngleBracketLeft => ast.Node.InfixOp.Op { .LessThan = void{} },
+ Token.Id.AngleBracketLeftEqual => ast.Node.InfixOp.Op { .LessOrEqual = void{} },
+ Token.Id.AngleBracketRight => ast.Node.InfixOp.Op { .GreaterThan = void{} },
+ Token.Id.AngleBracketRightEqual => ast.Node.InfixOp.Op { .GreaterOrEqual = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToBitShift(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.AngleBracketAngleBracketLeft => ast.Node.InfixOp.Op { .BitShiftLeft = void{} },
+ Token.Id.AngleBracketAngleBracketRight => ast.Node.InfixOp.Op { .BitShiftRight = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToAddition(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.Minus => ast.Node.InfixOp.Op { .Sub = void{} },
+ Token.Id.MinusPercent => ast.Node.InfixOp.Op { .SubWrap = void{} },
+ Token.Id.Plus => ast.Node.InfixOp.Op { .Add = void{} },
+ Token.Id.PlusPercent => ast.Node.InfixOp.Op { .AddWrap = void{} },
+ Token.Id.PlusPlus => ast.Node.InfixOp.Op { .ArrayCat = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToMultiply(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.Slash => ast.Node.InfixOp.Op { .Div = void{} },
+ Token.Id.Asterisk => ast.Node.InfixOp.Op { .Mult = void{} },
+ Token.Id.AsteriskAsterisk => ast.Node.InfixOp.Op { .ArrayMult = void{} },
+ Token.Id.AsteriskPercent => ast.Node.InfixOp.Op { .MultWrap = void{} },
+ Token.Id.Percent => ast.Node.InfixOp.Op { .Mod = void{} },
+ Token.Id.PipePipe => ast.Node.InfixOp.Op { .MergeErrorSets = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToPrefixOp(id: @TagType(Token.Id)) ?ast.Node.PrefixOp.Op {
+ return switch (id) {
+ Token.Id.Bang => ast.Node.PrefixOp.Op { .BoolNot = void{} },
+ Token.Id.Tilde => ast.Node.PrefixOp.Op { .BitNot = void{} },
+ Token.Id.Minus => ast.Node.PrefixOp.Op { .Negation = void{} },
+ Token.Id.MinusPercent => ast.Node.PrefixOp.Op { .NegationWrap = void{} },
+ Token.Id.Asterisk, Token.Id.AsteriskAsterisk => ast.Node.PrefixOp.Op { .Deref = void{} },
+ Token.Id.Ampersand => ast.Node.PrefixOp.Op {
+ .AddrOf = ast.Node.PrefixOp.AddrOfInfo {
+ .align_expr = null,
+ .bit_offset_start_token = null,
+ .bit_offset_end_token = null,
+ .const_token = null,
+ .volatile_token = null,
+ },
+ },
+ Token.Id.QuestionMark => ast.Node.PrefixOp.Op { .MaybeType = void{} },
+ Token.Id.QuestionMarkQuestionMark => ast.Node.PrefixOp.Op { .UnwrapMaybe = void{} },
+ Token.Id.Keyword_await => ast.Node.PrefixOp.Op { .Await = void{} },
+ Token.Id.Keyword_try => ast.Node.PrefixOp.Op { .Try = void{ } },
+ else => null,
+ };
+}
+
+fn createNode(arena: &mem.Allocator, comptime T: type, init_to: &const T) !&T {
+ const node = try arena.create(T);
+ *node = *init_to;
+ node.base = blk: {
+ const id = ast.Node.typeToId(T);
+ break :blk ast.Node {
+ .id = id,
+ };
+ };
+
+ return node;
+}
+
+fn createToCtxNode(arena: &mem.Allocator, opt_ctx: &const OptionalCtx, comptime T: type, init_to: &const T) !&T {
+ const node = try createNode(arena, T, init_to);
+ opt_ctx.store(&node.base);
+
+ return node;
+}
+
+fn createLiteral(arena: &mem.Allocator, comptime T: type, token_index: TokenIndex) !&T {
+ return createNode(arena, T,
+ T {
+ .base = undefined,
+ .token = token_index,
+ }
+ );
+}
+
+fn createToCtxLiteral(arena: &mem.Allocator, opt_ctx: &const OptionalCtx, comptime T: type, token_index: TokenIndex) !&T {
+ const node = try createLiteral(arena, T, token_index);
+ opt_ctx.store(&node.base);
+
+ return node;
+}
+
+fn eatToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, id: @TagType(Token.Id)) ?TokenIndex {
+ const token = nextToken(tok_it, tree);
+
+ if (token.ptr.id == id)
+ return token.index;
+
+ putBackToken(tok_it, tree);
+ return null;
+}
+
+fn nextToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) AnnotatedToken {
+ const result = AnnotatedToken {
+ .index = tok_it.index,
+ .ptr = ??tok_it.next(),
+ };
+ // possibly skip a following same line token
+ const token = tok_it.next() ?? return result;
+ if (token.id != Token.Id.LineComment) {
+ putBackToken(tok_it, tree);
+ return result;
+ }
+ const loc = tree.tokenLocationPtr(result.ptr.end, token);
+ if (loc.line != 0) {
+ putBackToken(tok_it, tree);
+ }
+ return result;
+}
+
+fn putBackToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) void {
+ const prev_tok = ??tok_it.prev();
+ if (prev_tok.id == Token.Id.LineComment) {
+ const minus2_tok = tok_it.prev() ?? return;
+ const loc = tree.tokenLocationPtr(minus2_tok.end, prev_tok);
+ if (loc.line != 0) {
+ _ = tok_it.next();
+ }
+ }
+}
+
+const RenderAstFrame = struct {
+ node: &ast.Node,
+ indent: usize,
+};
+
+pub fn renderAst(allocator: &mem.Allocator, tree: &const ast.Tree, stream: var) !void {
+ var stack = std.ArrayList(State).init(allocator);
+ defer stack.deinit();
+
+ try stack.append(RenderAstFrame {
+ .node = &root_node.base,
+ .indent = 0,
+ });
+
+ while (stack.popOrNull()) |frame| {
+ {
+ var i: usize = 0;
+ while (i < frame.indent) : (i += 1) {
+ try stream.print(" ");
+ }
+ }
+ try stream.print("{}\n", @tagName(frame.node.id));
+ var child_i: usize = 0;
+ while (frame.node.iterate(child_i)) |child| : (child_i += 1) {
+ try stack.append(RenderAstFrame {
+ .node = child,
+ .indent = frame.indent + 2,
+ });
+ }
+ }
+}
+
+test "std.zig.parser" {
+ _ = @import("parser_test.zig");
+}
diff --git a/std/zig/parser.zig b/std/zig/parser.zig
deleted file mode 100644
index 62c62ed185..0000000000
--- a/std/zig/parser.zig
+++ /dev/null
@@ -1,1733 +0,0 @@
-const std = @import("../index.zig");
-const assert = std.debug.assert;
-const ArrayList = std.ArrayList;
-const mem = std.mem;
-const ast = std.zig.ast;
-const Tokenizer = std.zig.Tokenizer;
-const Token = std.zig.Token;
-const builtin = @import("builtin");
-const io = std.io;
-
-// TODO when we make parse errors into error types instead of printing directly,
-// get rid of this
-const warn = std.debug.warn;
-
-pub const Parser = struct {
- util_allocator: &mem.Allocator,
- tokenizer: &Tokenizer,
- put_back_tokens: [2]Token,
- put_back_count: usize,
- source_file_name: []const u8,
- pending_line_comment_node: ?&ast.NodeLineComment,
-
- pub const Tree = struct {
- root_node: &ast.NodeRoot,
- arena_allocator: std.heap.ArenaAllocator,
-
- pub fn deinit(self: &Tree) void {
- self.arena_allocator.deinit();
- }
- };
-
- // This memory contents are used only during a function call. It's used to repurpose memory;
- // we reuse the same bytes for the stack data structure used by parsing, tree rendering, and
- // source rendering.
- const utility_bytes_align = @alignOf( union { a: RenderAstFrame, b: State, c: RenderState } );
- utility_bytes: []align(utility_bytes_align) u8,
-
- /// allocator must outlive the returned Parser and all the parse trees you create with it.
- pub fn init(tokenizer: &Tokenizer, allocator: &mem.Allocator, source_file_name: []const u8) Parser {
- return Parser {
- .util_allocator = allocator,
- .tokenizer = tokenizer,
- .put_back_tokens = undefined,
- .put_back_count = 0,
- .source_file_name = source_file_name,
- .utility_bytes = []align(utility_bytes_align) u8{},
- .pending_line_comment_node = null,
- };
- }
-
- pub fn deinit(self: &Parser) void {
- self.util_allocator.free(self.utility_bytes);
- }
-
- const TopLevelDeclCtx = struct {
- visib_token: ?Token,
- extern_token: ?Token,
- };
-
- const DestPtr = union(enum) {
- Field: &&ast.Node,
- NullableField: &?&ast.Node,
- List: &ArrayList(&ast.Node),
-
- pub fn store(self: &const DestPtr, value: &ast.Node) !void {
- switch (*self) {
- DestPtr.Field => |ptr| *ptr = value,
- DestPtr.NullableField => |ptr| *ptr = value,
- DestPtr.List => |list| try list.append(value),
- }
- }
- };
-
- const ExpectTokenSave = struct {
- id: Token.Id,
- ptr: &Token,
- };
-
- const State = union(enum) {
- TopLevel,
- TopLevelExtern: ?Token,
- TopLevelDecl: TopLevelDeclCtx,
- Expression: DestPtr,
- ExpectOperand,
- Operand: &ast.Node,
- AfterOperand,
- InfixOp: &ast.NodeInfixOp,
- PrefixOp: &ast.NodePrefixOp,
- SuffixOp: &ast.Node,
- AddrOfModifiers: &ast.NodePrefixOp.AddrOfInfo,
- TypeExpr: DestPtr,
- VarDecl: &ast.NodeVarDecl,
- VarDeclAlign: &ast.NodeVarDecl,
- VarDeclEq: &ast.NodeVarDecl,
- ExpectToken: @TagType(Token.Id),
- ExpectTokenSave: ExpectTokenSave,
- FnProto: &ast.NodeFnProto,
- FnProtoAlign: &ast.NodeFnProto,
- FnProtoReturnType: &ast.NodeFnProto,
- ParamDecl: &ast.NodeFnProto,
- ParamDeclComma,
- FnDef: &ast.NodeFnProto,
- Block: &ast.NodeBlock,
- Statement: &ast.NodeBlock,
- ExprListItemOrEnd: &ArrayList(&ast.Node),
- ExprListCommaOrEnd: &ArrayList(&ast.Node),
- };
-
- /// Returns an AST tree, allocated with the parser's allocator.
- /// Result should be freed with tree.deinit() when there are
- /// no more references to any AST nodes of the tree.
- pub fn parse(self: &Parser) !Tree {
- var stack = self.initUtilityArrayList(State);
- defer self.deinitUtilityArrayList(stack);
-
- var arena_allocator = std.heap.ArenaAllocator.init(self.util_allocator);
- errdefer arena_allocator.deinit();
-
- const arena = &arena_allocator.allocator;
- const root_node = try self.createRoot(arena);
-
- try stack.append(State.TopLevel);
-
- while (true) {
- //{
- // const token = self.getNextToken();
- // warn("{} ", @tagName(token.id));
- // self.putBackToken(token);
- // var i: usize = stack.len;
- // while (i != 0) {
- // i -= 1;
- // warn("{} ", @tagName(stack.items[i]));
- // }
- // warn("\n");
- //}
-
- // look for line comments
- while (true) {
- const token = self.getNextToken();
- if (token.id == Token.Id.LineComment) {
- const node = blk: {
- if (self.pending_line_comment_node) |comment_node| {
- break :blk comment_node;
- } else {
- const comment_node = try arena.create(ast.NodeLineComment);
- *comment_node = ast.NodeLineComment {
- .base = ast.Node {
- .id = ast.Node.Id.LineComment,
- .comment = null,
- },
- .lines = ArrayList(Token).init(arena),
- };
- self.pending_line_comment_node = comment_node;
- break :blk comment_node;
- }
- };
- try node.lines.append(token);
- continue;
- }
- self.putBackToken(token);
- break;
- }
-
- // This gives us 1 free append that can't fail
- const state = stack.pop();
-
- switch (state) {
- State.TopLevel => {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_pub, Token.Id.Keyword_export => {
- stack.append(State { .TopLevelExtern = token }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_test => {
- stack.append(State.TopLevel) catch unreachable;
-
- const name_token = self.getNextToken();
- if (name_token.id != Token.Id.StringLiteral)
- return self.parseError(token, "expected {}, found {}", @tagName(Token.Id.StringLiteral), @tagName(name_token.id));
-
- const lbrace = self.getNextToken();
- if (lbrace.id != Token.Id.LBrace)
- return self.parseError(token, "expected {}, found {}", @tagName(Token.Id.LBrace), @tagName(name_token.id));
-
- const block = try self.createBlock(arena, token);
- const test_decl = try self.createAttachTestDecl(arena, &root_node.decls, token, name_token, block);
- try stack.append(State { .Block = block });
- continue;
- },
- Token.Id.Eof => {
- root_node.eof_token = token;
- return Tree {.root_node = root_node, .arena_allocator = arena_allocator};
- },
- else => {
- self.putBackToken(token);
- stack.append(State { .TopLevelExtern = null }) catch unreachable;
- continue;
- },
- }
- },
- State.TopLevelExtern => |visib_token| {
- const token = self.getNextToken();
- if (token.id == Token.Id.Keyword_extern) {
- stack.append(State {
- .TopLevelDecl = TopLevelDeclCtx {
- .visib_token = visib_token,
- .extern_token = token,
- },
- }) catch unreachable;
- continue;
- }
- self.putBackToken(token);
- stack.append(State {
- .TopLevelDecl = TopLevelDeclCtx {
- .visib_token = visib_token,
- .extern_token = null,
- },
- }) catch unreachable;
- continue;
- },
- State.TopLevelDecl => |ctx| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_var, Token.Id.Keyword_const => {
- stack.append(State.TopLevel) catch unreachable;
- // TODO shouldn't need these casts
- const var_decl_node = try self.createAttachVarDecl(arena, &root_node.decls, ctx.visib_token,
- token, (?Token)(null), ctx.extern_token);
- try stack.append(State { .VarDecl = var_decl_node });
- continue;
- },
- Token.Id.Keyword_fn => {
- stack.append(State.TopLevel) catch unreachable;
- // TODO shouldn't need these casts
- const fn_proto = try self.createAttachFnProto(arena, &root_node.decls, token,
- ctx.extern_token, (?Token)(null), ctx.visib_token, (?Token)(null));
- try stack.append(State { .FnDef = fn_proto });
- try stack.append(State { .FnProto = fn_proto });
- continue;
- },
- Token.Id.StringLiteral => {
- @panic("TODO extern with string literal");
- },
- Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
- stack.append(State.TopLevel) catch unreachable;
- const fn_token = try self.eatToken(Token.Id.Keyword_fn);
- // TODO shouldn't need this cast
- const fn_proto = try self.createAttachFnProto(arena, &root_node.decls, fn_token,
- ctx.extern_token, (?Token)(token), (?Token)(null), (?Token)(null));
- try stack.append(State { .FnDef = fn_proto });
- try stack.append(State { .FnProto = fn_proto });
- continue;
- },
- else => return self.parseError(token, "expected variable declaration or function, found {}", @tagName(token.id)),
- }
- },
- State.VarDecl => |var_decl| {
- var_decl.name_token = try self.eatToken(Token.Id.Identifier);
- stack.append(State { .VarDeclAlign = var_decl }) catch unreachable;
-
- const next_token = self.getNextToken();
- if (next_token.id == Token.Id.Colon) {
- try stack.append(State { .TypeExpr = DestPtr {.NullableField = &var_decl.type_node} });
- continue;
- }
-
- self.putBackToken(next_token);
- continue;
- },
- State.VarDeclAlign => |var_decl| {
- stack.append(State { .VarDeclEq = var_decl }) catch unreachable;
-
- const next_token = self.getNextToken();
- if (next_token.id == Token.Id.Keyword_align) {
- _ = try self.eatToken(Token.Id.LParen);
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = DestPtr{.NullableField = &var_decl.align_node} });
- continue;
- }
-
- self.putBackToken(next_token);
- continue;
- },
- State.VarDeclEq => |var_decl| {
- const token = self.getNextToken();
- if (token.id == Token.Id.Equal) {
- var_decl.eq_token = token;
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Semicolon,
- .ptr = &var_decl.semicolon_token,
- },
- }) catch unreachable;
- try stack.append(State {
- .Expression = DestPtr {.NullableField = &var_decl.init_node},
- });
- continue;
- }
- if (token.id == Token.Id.Semicolon) {
- var_decl.semicolon_token = token;
- continue;
- }
- return self.parseError(token, "expected '=' or ';', found {}", @tagName(token.id));
- },
- State.ExpectToken => |token_id| {
- _ = try self.eatToken(token_id);
- continue;
- },
-
- State.ExpectTokenSave => |expect_token_save| {
- *expect_token_save.ptr = try self.eatToken(expect_token_save.id);
- continue;
- },
-
- State.Expression => |dest_ptr| {
- // save the dest_ptr for later
- stack.append(state) catch unreachable;
- try stack.append(State.ExpectOperand);
- continue;
- },
- State.ExpectOperand => {
- // we'll either get an operand (like 1 or x),
- // or a prefix operator (like ~ or return).
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_return => {
- try stack.append(State { .PrefixOp = try self.createPrefixOp(arena, token,
- ast.NodePrefixOp.PrefixOp.Return) });
- try stack.append(State.ExpectOperand);
- continue;
- },
- Token.Id.Keyword_try => {
- try stack.append(State { .PrefixOp = try self.createPrefixOp(arena, token,
- ast.NodePrefixOp.PrefixOp.Try) });
- try stack.append(State.ExpectOperand);
- continue;
- },
- Token.Id.Minus => {
- try stack.append(State { .PrefixOp = try self.createPrefixOp(arena, token,
- ast.NodePrefixOp.PrefixOp.Negation) });
- try stack.append(State.ExpectOperand);
- continue;
- },
- Token.Id.MinusPercent => {
- try stack.append(State { .PrefixOp = try self.createPrefixOp(arena, token,
- ast.NodePrefixOp.PrefixOp.NegationWrap) });
- try stack.append(State.ExpectOperand);
- continue;
- },
- Token.Id.Tilde => {
- try stack.append(State { .PrefixOp = try self.createPrefixOp(arena, token,
- ast.NodePrefixOp.PrefixOp.BitNot) });
- try stack.append(State.ExpectOperand);
- continue;
- },
- Token.Id.QuestionMarkQuestionMark => {
- try stack.append(State { .PrefixOp = try self.createPrefixOp(arena, token,
- ast.NodePrefixOp.PrefixOp.UnwrapMaybe) });
- try stack.append(State.ExpectOperand);
- continue;
- },
- Token.Id.Bang => {
- try stack.append(State { .PrefixOp = try self.createPrefixOp(arena, token,
- ast.NodePrefixOp.PrefixOp.BoolNot) });
- try stack.append(State.ExpectOperand);
- continue;
- },
- Token.Id.Asterisk => {
- try stack.append(State { .PrefixOp = try self.createPrefixOp(arena, token,
- ast.NodePrefixOp.PrefixOp.Deref) });
- try stack.append(State.ExpectOperand);
- continue;
- },
- Token.Id.Ampersand => {
- const prefix_op = try self.createPrefixOp(arena, token, ast.NodePrefixOp.PrefixOp{
- .AddrOf = ast.NodePrefixOp.AddrOfInfo {
- .align_expr = null,
- .bit_offset_start_token = null,
- .bit_offset_end_token = null,
- .const_token = null,
- .volatile_token = null,
- }
- });
- try stack.append(State { .PrefixOp = prefix_op });
- try stack.append(State.ExpectOperand);
- try stack.append(State { .AddrOfModifiers = &prefix_op.op.AddrOf });
- continue;
- },
- Token.Id.Identifier => {
- try stack.append(State {
- .Operand = &(try self.createIdentifier(arena, token)).base
- });
- try stack.append(State.AfterOperand);
- continue;
- },
- Token.Id.IntegerLiteral => {
- try stack.append(State {
- .Operand = &(try self.createIntegerLiteral(arena, token)).base
- });
- try stack.append(State.AfterOperand);
- continue;
- },
- Token.Id.FloatLiteral => {
- try stack.append(State {
- .Operand = &(try self.createFloatLiteral(arena, token)).base
- });
- try stack.append(State.AfterOperand);
- continue;
- },
- Token.Id.Keyword_undefined => {
- try stack.append(State {
- .Operand = &(try self.createUndefined(arena, token)).base
- });
- try stack.append(State.AfterOperand);
- continue;
- },
- Token.Id.Builtin => {
- const node = try arena.create(ast.NodeBuiltinCall);
- *node = ast.NodeBuiltinCall {
- .base = self.initNode(ast.Node.Id.BuiltinCall),
- .builtin_token = token,
- .params = ArrayList(&ast.Node).init(arena),
- .rparen_token = undefined,
- };
- try stack.append(State {
- .Operand = &node.base
- });
- try stack.append(State.AfterOperand);
- try stack.append(State {.ExprListItemOrEnd = &node.params });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.LParen,
- .ptr = &node.rparen_token,
- },
- });
- continue;
- },
- Token.Id.StringLiteral => {
- const node = try arena.create(ast.NodeStringLiteral);
- *node = ast.NodeStringLiteral {
- .base = self.initNode(ast.Node.Id.StringLiteral),
- .token = token,
- };
- try stack.append(State {
- .Operand = &node.base
- });
- try stack.append(State.AfterOperand);
- continue;
- },
-
- else => return self.parseError(token, "expected primary expression, found {}", @tagName(token.id)),
- }
- },
-
- State.AfterOperand => {
- // we'll either get an infix operator (like != or ^),
- // or a postfix operator (like () or {}),
- // otherwise this expression is done (like on a ; or else).
- var token = self.getNextToken();
- if (tokenIdToInfixOp(token.id)) |infix_id| {
- try stack.append(State {
- .InfixOp = try self.createInfixOp(arena, token, infix_id)
- });
- try stack.append(State.ExpectOperand);
- continue;
-
- } else if (token.id == Token.Id.LParen) {
- self.putBackToken(token);
-
- const node = try arena.create(ast.NodeCall);
- *node = ast.NodeCall {
- .base = self.initNode(ast.Node.Id.Call),
- .callee = undefined,
- .params = ArrayList(&ast.Node).init(arena),
- .rparen_token = undefined,
- };
- try stack.append(State { .SuffixOp = &node.base });
- try stack.append(State.AfterOperand);
- try stack.append(State {.ExprListItemOrEnd = &node.params });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.LParen,
- .ptr = &node.rparen_token,
- },
- });
- continue;
-
- // TODO: Parse postfix operator
- } else {
- // no postfix/infix operator after this operand.
- self.putBackToken(token);
-
- var expression = popSuffixOp(&stack);
- while (true) {
- switch (stack.pop()) {
- State.Expression => |dest_ptr| {
- // we're done
- try dest_ptr.store(expression);
- break;
- },
- State.InfixOp => |infix_op| {
- infix_op.rhs = expression;
- infix_op.lhs = popSuffixOp(&stack);
- expression = &infix_op.base;
- continue;
- },
- State.PrefixOp => |prefix_op| {
- prefix_op.rhs = expression;
- expression = &prefix_op.base;
- continue;
- },
- else => unreachable,
- }
- }
- continue;
- }
- },
-
- State.ExprListItemOrEnd => |params| {
- var token = self.getNextToken();
- switch (token.id) {
- Token.Id.RParen => continue,
- else => {
- self.putBackToken(token);
- stack.append(State { .ExprListCommaOrEnd = params }) catch unreachable;
- try stack.append(State { .Expression = DestPtr{.List = params} });
- },
- }
- },
-
- State.ExprListCommaOrEnd => |params| {
- var token = self.getNextToken();
- switch (token.id) {
- Token.Id.Comma => {
- stack.append(State { .ExprListItemOrEnd = params }) catch unreachable;
- },
- Token.Id.RParen => continue,
- else => return self.parseError(token, "expected ',' or ')', found {}", @tagName(token.id)),
- }
- },
-
- State.AddrOfModifiers => |addr_of_info| {
- var token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_align => {
- stack.append(state) catch unreachable;
- if (addr_of_info.align_expr != null) return self.parseError(token, "multiple align qualifiers");
- _ = try self.eatToken(Token.Id.LParen);
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = DestPtr{.NullableField = &addr_of_info.align_expr} });
- continue;
- },
- Token.Id.Keyword_const => {
- stack.append(state) catch unreachable;
- if (addr_of_info.const_token != null) return self.parseError(token, "duplicate qualifier: const");
- addr_of_info.const_token = token;
- continue;
- },
- Token.Id.Keyword_volatile => {
- stack.append(state) catch unreachable;
- if (addr_of_info.volatile_token != null) return self.parseError(token, "duplicate qualifier: volatile");
- addr_of_info.volatile_token = token;
- continue;
- },
- else => {
- self.putBackToken(token);
- continue;
- },
- }
- },
-
- State.TypeExpr => |dest_ptr| {
- const token = self.getNextToken();
- if (token.id == Token.Id.Keyword_var) {
- @panic("TODO param with type var");
- }
- self.putBackToken(token);
-
- stack.append(State { .Expression = dest_ptr }) catch unreachable;
- continue;
- },
-
- State.FnProto => |fn_proto| {
- stack.append(State { .FnProtoAlign = fn_proto }) catch unreachable;
- try stack.append(State { .ParamDecl = fn_proto });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
-
- const next_token = self.getNextToken();
- if (next_token.id == Token.Id.Identifier) {
- fn_proto.name_token = next_token;
- continue;
- }
- self.putBackToken(next_token);
- continue;
- },
-
- State.FnProtoAlign => |fn_proto| {
- const token = self.getNextToken();
- if (token.id == Token.Id.Keyword_align) {
- @panic("TODO fn proto align");
- }
- self.putBackToken(token);
- stack.append(State {
- .FnProtoReturnType = fn_proto,
- }) catch unreachable;
- continue;
- },
-
- State.FnProtoReturnType => |fn_proto| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_var => {
- fn_proto.return_type = ast.NodeFnProto.ReturnType { .Infer = token };
- },
- Token.Id.Bang => {
- fn_proto.return_type = ast.NodeFnProto.ReturnType { .InferErrorSet = undefined };
- stack.append(State {
- .TypeExpr = DestPtr {.Field = &fn_proto.return_type.InferErrorSet},
- }) catch unreachable;
- },
- else => {
- self.putBackToken(token);
- fn_proto.return_type = ast.NodeFnProto.ReturnType { .Explicit = undefined };
- stack.append(State {
- .TypeExpr = DestPtr {.Field = &fn_proto.return_type.Explicit},
- }) catch unreachable;
- },
- }
- if (token.id == Token.Id.Keyword_align) {
- @panic("TODO fn proto align");
- }
- continue;
- },
-
- State.ParamDecl => |fn_proto| {
- var token = self.getNextToken();
- if (token.id == Token.Id.RParen) {
- continue;
- }
- const param_decl = try self.createAttachParamDecl(arena, &fn_proto.params);
- if (token.id == Token.Id.Keyword_comptime) {
- param_decl.comptime_token = token;
- token = self.getNextToken();
- } else if (token.id == Token.Id.Keyword_noalias) {
- param_decl.noalias_token = token;
- token = self.getNextToken();
- }
- if (token.id == Token.Id.Identifier) {
- const next_token = self.getNextToken();
- if (next_token.id == Token.Id.Colon) {
- param_decl.name_token = token;
- token = self.getNextToken();
- } else {
- self.putBackToken(next_token);
- }
- }
- if (token.id == Token.Id.Ellipsis3) {
- param_decl.var_args_token = token;
- stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
- continue;
- } else {
- self.putBackToken(token);
- }
-
- stack.append(State { .ParamDecl = fn_proto }) catch unreachable;
- try stack.append(State.ParamDeclComma);
- try stack.append(State {
- .TypeExpr = DestPtr {.Field = &param_decl.type_node}
- });
- continue;
- },
-
- State.ParamDeclComma => {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.RParen => {
- _ = stack.pop(); // pop off the ParamDecl
- continue;
- },
- Token.Id.Comma => continue,
- else => return self.parseError(token, "expected ',' or ')', found {}", @tagName(token.id)),
- }
- },
-
- State.FnDef => |fn_proto| {
- const token = self.getNextToken();
- switch(token.id) {
- Token.Id.LBrace => {
- const block = try self.createBlock(arena, token);
- fn_proto.body_node = &block.base;
- stack.append(State { .Block = block }) catch unreachable;
- continue;
- },
- Token.Id.Semicolon => continue,
- else => return self.parseError(token, "expected ';' or '{{', found {}", @tagName(token.id)),
- }
- },
-
- State.Block => |block| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.RBrace => {
- block.end_token = token;
- continue;
- },
- else => {
- self.putBackToken(token);
- stack.append(State { .Block = block }) catch unreachable;
- try stack.append(State { .Statement = block });
- continue;
- },
- }
- },
-
- State.Statement => |block| {
- {
- // Look for comptime var, comptime const
- const comptime_token = self.getNextToken();
- if (comptime_token.id == Token.Id.Keyword_comptime) {
- const mut_token = self.getNextToken();
- if (mut_token.id == Token.Id.Keyword_var or mut_token.id == Token.Id.Keyword_const) {
- // TODO shouldn't need these casts
- const var_decl = try self.createAttachVarDecl(arena, &block.statements, (?Token)(null),
- mut_token, (?Token)(comptime_token), (?Token)(null));
- try stack.append(State { .VarDecl = var_decl });
- continue;
- }
- self.putBackToken(mut_token);
- }
- self.putBackToken(comptime_token);
- }
- {
- // Look for const, var
- const mut_token = self.getNextToken();
- if (mut_token.id == Token.Id.Keyword_var or mut_token.id == Token.Id.Keyword_const) {
- // TODO shouldn't need these casts
- const var_decl = try self.createAttachVarDecl(arena, &block.statements, (?Token)(null),
- mut_token, (?Token)(null), (?Token)(null));
- try stack.append(State { .VarDecl = var_decl });
- continue;
- }
- self.putBackToken(mut_token);
- }
-
- stack.append(State { .ExpectToken = Token.Id.Semicolon }) catch unreachable;
- try stack.append(State { .Expression = DestPtr{.List = &block.statements} });
- continue;
- },
-
- // These are data, not control flow.
- State.InfixOp => unreachable,
- State.PrefixOp => unreachable,
- State.SuffixOp => unreachable,
- State.Operand => unreachable,
- }
- }
- }
-
- fn popSuffixOp(stack: &ArrayList(State)) &ast.Node {
- var expression: &ast.Node = undefined;
- var left_leaf_ptr: &&ast.Node = &expression;
- while (true) {
- switch (stack.pop()) {
- State.SuffixOp => |suffix_op| {
- switch (suffix_op.id) {
- ast.Node.Id.Call => {
- const call = @fieldParentPtr(ast.NodeCall, "base", suffix_op);
- *left_leaf_ptr = &call.base;
- left_leaf_ptr = &call.callee;
- continue;
- },
- else => unreachable,
- }
- },
- State.Operand => |operand| {
- *left_leaf_ptr = operand;
- break;
- },
- else => unreachable,
- }
- }
-
- return expression;
- }
-
- fn tokenIdToInfixOp(id: &const Token.Id) ?ast.NodeInfixOp.InfixOp {
- return switch (*id) {
- Token.Id.Ampersand => ast.NodeInfixOp.InfixOp.BitAnd,
- Token.Id.AmpersandEqual => ast.NodeInfixOp.InfixOp.AssignBitAnd,
- Token.Id.AngleBracketAngleBracketLeft => ast.NodeInfixOp.InfixOp.BitShiftLeft,
- Token.Id.AngleBracketAngleBracketLeftEqual => ast.NodeInfixOp.InfixOp.AssignBitShiftLeft,
- Token.Id.AngleBracketAngleBracketRight => ast.NodeInfixOp.InfixOp.BitShiftRight,
- Token.Id.AngleBracketAngleBracketRightEqual => ast.NodeInfixOp.InfixOp.AssignBitShiftRight,
- Token.Id.AngleBracketLeft => ast.NodeInfixOp.InfixOp.LessThan,
- Token.Id.AngleBracketLeftEqual => ast.NodeInfixOp.InfixOp.LessOrEqual,
- Token.Id.AngleBracketRight => ast.NodeInfixOp.InfixOp.GreaterThan,
- Token.Id.AngleBracketRightEqual => ast.NodeInfixOp.InfixOp.GreaterOrEqual,
- Token.Id.Asterisk => ast.NodeInfixOp.InfixOp.Mult,
- Token.Id.AsteriskAsterisk => ast.NodeInfixOp.InfixOp.ArrayMult,
- Token.Id.AsteriskEqual => ast.NodeInfixOp.InfixOp.AssignTimes,
- Token.Id.AsteriskPercent => ast.NodeInfixOp.InfixOp.MultWrap,
- Token.Id.AsteriskPercentEqual => ast.NodeInfixOp.InfixOp.AssignTimesWarp,
- Token.Id.Bang => ast.NodeInfixOp.InfixOp.ErrorUnion,
- Token.Id.BangEqual => ast.NodeInfixOp.InfixOp.BangEqual,
- Token.Id.Caret => ast.NodeInfixOp.InfixOp.BitXor,
- Token.Id.CaretEqual => ast.NodeInfixOp.InfixOp.AssignBitXor,
- Token.Id.Equal => ast.NodeInfixOp.InfixOp.Assign,
- Token.Id.EqualEqual => ast.NodeInfixOp.InfixOp.EqualEqual,
- Token.Id.Keyword_and => ast.NodeInfixOp.InfixOp.BoolAnd,
- Token.Id.Keyword_or => ast.NodeInfixOp.InfixOp.BoolOr,
- Token.Id.Minus => ast.NodeInfixOp.InfixOp.Sub,
- Token.Id.MinusEqual => ast.NodeInfixOp.InfixOp.AssignMinus,
- Token.Id.MinusPercent => ast.NodeInfixOp.InfixOp.SubWrap,
- Token.Id.MinusPercentEqual => ast.NodeInfixOp.InfixOp.AssignMinusWrap,
- Token.Id.Percent => ast.NodeInfixOp.InfixOp.Mod,
- Token.Id.PercentEqual => ast.NodeInfixOp.InfixOp.AssignMod,
- Token.Id.Period => ast.NodeInfixOp.InfixOp.Period,
- Token.Id.Pipe => ast.NodeInfixOp.InfixOp.BitOr,
- Token.Id.PipeEqual => ast.NodeInfixOp.InfixOp.AssignBitOr,
- Token.Id.PipePipe => ast.NodeInfixOp.InfixOp.MergeErrorSets,
- Token.Id.Plus => ast.NodeInfixOp.InfixOp.Add,
- Token.Id.PlusEqual => ast.NodeInfixOp.InfixOp.AssignPlus,
- Token.Id.PlusPercent => ast.NodeInfixOp.InfixOp.AddWrap,
- Token.Id.PlusPercentEqual => ast.NodeInfixOp.InfixOp.AssignPlusWrap,
- Token.Id.PlusPlus => ast.NodeInfixOp.InfixOp.ArrayCat,
- Token.Id.QuestionMarkQuestionMark => ast.NodeInfixOp.InfixOp.UnwrapMaybe,
- Token.Id.Slash => ast.NodeInfixOp.InfixOp.Div,
- Token.Id.SlashEqual => ast.NodeInfixOp.InfixOp.AssignDiv,
- else => null,
- };
- }
-
- fn initNode(self: &Parser, id: ast.Node.Id) ast.Node {
- if (self.pending_line_comment_node) |comment_node| {
- self.pending_line_comment_node = null;
- return ast.Node {.id = id, .comment = comment_node};
- }
- return ast.Node {.id = id, .comment = null };
- }
-
- fn createRoot(self: &Parser, arena: &mem.Allocator) !&ast.NodeRoot {
- const node = try arena.create(ast.NodeRoot);
-
- *node = ast.NodeRoot {
- .base = self.initNode(ast.Node.Id.Root),
- .decls = ArrayList(&ast.Node).init(arena),
- // initialized when we get the eof token
- .eof_token = undefined,
- };
- return node;
- }
-
- fn createVarDecl(self: &Parser, arena: &mem.Allocator, visib_token: &const ?Token, mut_token: &const Token,
- comptime_token: &const ?Token, extern_token: &const ?Token) !&ast.NodeVarDecl
- {
- const node = try arena.create(ast.NodeVarDecl);
-
- *node = ast.NodeVarDecl {
- .base = self.initNode(ast.Node.Id.VarDecl),
- .visib_token = *visib_token,
- .mut_token = *mut_token,
- .comptime_token = *comptime_token,
- .extern_token = *extern_token,
- .type_node = null,
- .align_node = null,
- .init_node = null,
- .lib_name = null,
- // initialized later
- .name_token = undefined,
- .eq_token = undefined,
- .semicolon_token = undefined,
- };
- return node;
- }
-
- fn createTestDecl(self: &Parser, arena: &mem.Allocator, test_token: &const Token, name_token: &const Token,
- block: &ast.NodeBlock) !&ast.NodeTestDecl
- {
- const node = try arena.create(ast.NodeTestDecl);
-
- *node = ast.NodeTestDecl {
- .base = self.initNode(ast.Node.Id.TestDecl),
- .test_token = *test_token,
- .name_token = *name_token,
- .body_node = &block.base,
- };
- return node;
- }
-
- fn createFnProto(self: &Parser, arena: &mem.Allocator, fn_token: &const Token, extern_token: &const ?Token,
- cc_token: &const ?Token, visib_token: &const ?Token, inline_token: &const ?Token) !&ast.NodeFnProto
- {
- const node = try arena.create(ast.NodeFnProto);
-
- *node = ast.NodeFnProto {
- .base = self.initNode(ast.Node.Id.FnProto),
- .visib_token = *visib_token,
- .name_token = null,
- .fn_token = *fn_token,
- .params = ArrayList(&ast.Node).init(arena),
- .return_type = undefined,
- .var_args_token = null,
- .extern_token = *extern_token,
- .inline_token = *inline_token,
- .cc_token = *cc_token,
- .body_node = null,
- .lib_name = null,
- .align_expr = null,
- };
- return node;
- }
-
- fn createParamDecl(self: &Parser, arena: &mem.Allocator) !&ast.NodeParamDecl {
- const node = try arena.create(ast.NodeParamDecl);
-
- *node = ast.NodeParamDecl {
- .base = self.initNode(ast.Node.Id.ParamDecl),
- .comptime_token = null,
- .noalias_token = null,
- .name_token = null,
- .type_node = undefined,
- .var_args_token = null,
- };
- return node;
- }
-
- fn createBlock(self: &Parser, arena: &mem.Allocator, begin_token: &const Token) !&ast.NodeBlock {
- const node = try arena.create(ast.NodeBlock);
-
- *node = ast.NodeBlock {
- .base = self.initNode(ast.Node.Id.Block),
- .begin_token = *begin_token,
- .end_token = undefined,
- .statements = ArrayList(&ast.Node).init(arena),
- };
- return node;
- }
-
- fn createInfixOp(self: &Parser, arena: &mem.Allocator, op_token: &const Token, op: &const ast.NodeInfixOp.InfixOp) !&ast.NodeInfixOp {
- const node = try arena.create(ast.NodeInfixOp);
-
- *node = ast.NodeInfixOp {
- .base = self.initNode(ast.Node.Id.InfixOp),
- .op_token = *op_token,
- .lhs = undefined,
- .op = *op,
- .rhs = undefined,
- };
- return node;
- }
-
- fn createPrefixOp(self: &Parser, arena: &mem.Allocator, op_token: &const Token, op: &const ast.NodePrefixOp.PrefixOp) !&ast.NodePrefixOp {
- const node = try arena.create(ast.NodePrefixOp);
-
- *node = ast.NodePrefixOp {
- .base = self.initNode(ast.Node.Id.PrefixOp),
- .op_token = *op_token,
- .op = *op,
- .rhs = undefined,
- };
- return node;
- }
-
- fn createIdentifier(self: &Parser, arena: &mem.Allocator, name_token: &const Token) !&ast.NodeIdentifier {
- const node = try arena.create(ast.NodeIdentifier);
-
- *node = ast.NodeIdentifier {
- .base = self.initNode(ast.Node.Id.Identifier),
- .name_token = *name_token,
- };
- return node;
- }
-
- fn createIntegerLiteral(self: &Parser, arena: &mem.Allocator, token: &const Token) !&ast.NodeIntegerLiteral {
- const node = try arena.create(ast.NodeIntegerLiteral);
-
- *node = ast.NodeIntegerLiteral {
- .base = self.initNode(ast.Node.Id.IntegerLiteral),
- .token = *token,
- };
- return node;
- }
-
- fn createFloatLiteral(self: &Parser, arena: &mem.Allocator, token: &const Token) !&ast.NodeFloatLiteral {
- const node = try arena.create(ast.NodeFloatLiteral);
-
- *node = ast.NodeFloatLiteral {
- .base = self.initNode(ast.Node.Id.FloatLiteral),
- .token = *token,
- };
- return node;
- }
-
- fn createUndefined(self: &Parser, arena: &mem.Allocator, token: &const Token) !&ast.NodeUndefinedLiteral {
- const node = try arena.create(ast.NodeUndefinedLiteral);
-
- *node = ast.NodeUndefinedLiteral {
- .base = self.initNode(ast.Node.Id.UndefinedLiteral),
- .token = *token,
- };
- return node;
- }
-
- fn createAttachIdentifier(self: &Parser, arena: &mem.Allocator, dest_ptr: &const DestPtr, name_token: &const Token) !&ast.NodeIdentifier {
- const node = try self.createIdentifier(arena, name_token);
- try dest_ptr.store(&node.base);
- return node;
- }
-
- fn createAttachParamDecl(self: &Parser, arena: &mem.Allocator, list: &ArrayList(&ast.Node)) !&ast.NodeParamDecl {
- const node = try self.createParamDecl(arena);
- try list.append(&node.base);
- return node;
- }
-
- fn createAttachFnProto(self: &Parser, arena: &mem.Allocator, list: &ArrayList(&ast.Node), fn_token: &const Token,
- extern_token: &const ?Token, cc_token: &const ?Token, visib_token: &const ?Token,
- inline_token: &const ?Token) !&ast.NodeFnProto
- {
- const node = try self.createFnProto(arena, fn_token, extern_token, cc_token, visib_token, inline_token);
- try list.append(&node.base);
- return node;
- }
-
- fn createAttachVarDecl(self: &Parser, arena: &mem.Allocator, list: &ArrayList(&ast.Node),
- visib_token: &const ?Token, mut_token: &const Token, comptime_token: &const ?Token,
- extern_token: &const ?Token) !&ast.NodeVarDecl
- {
- const node = try self.createVarDecl(arena, visib_token, mut_token, comptime_token, extern_token);
- try list.append(&node.base);
- return node;
- }
-
- fn createAttachTestDecl(self: &Parser, arena: &mem.Allocator, list: &ArrayList(&ast.Node),
- test_token: &const Token, name_token: &const Token, block: &ast.NodeBlock) !&ast.NodeTestDecl
- {
- const node = try self.createTestDecl(arena, test_token, name_token, block);
- try list.append(&node.base);
- return node;
- }
-
- fn parseError(self: &Parser, token: &const Token, comptime fmt: []const u8, args: ...) (error{ParseError}) {
- const loc = self.tokenizer.getTokenLocation(token);
- warn("{}:{}:{}: error: " ++ fmt ++ "\n", self.source_file_name, token.line + 1, token.column + 1, args);
- warn("{}\n", self.tokenizer.buffer[loc.line_start..loc.line_end]);
- {
- var i: usize = 0;
- while (i < token.column) : (i += 1) {
- warn(" ");
- }
- }
- {
- const caret_count = token.end - token.start;
- var i: usize = 0;
- while (i < caret_count) : (i += 1) {
- warn("~");
- }
- }
- warn("\n");
- return error.ParseError;
- }
-
- fn expectToken(self: &Parser, token: &const Token, id: @TagType(Token.Id)) !void {
- if (token.id != id) {
- return self.parseError(token, "expected {}, found {}", @tagName(id), @tagName(token.id));
- }
- }
-
- fn eatToken(self: &Parser, id: @TagType(Token.Id)) !Token {
- const token = self.getNextToken();
- try self.expectToken(token, id);
- return token;
- }
-
- fn putBackToken(self: &Parser, token: &const Token) void {
- self.put_back_tokens[self.put_back_count] = *token;
- self.put_back_count += 1;
- }
-
- fn getNextToken(self: &Parser) Token {
- if (self.put_back_count != 0) {
- const put_back_index = self.put_back_count - 1;
- const put_back_token = self.put_back_tokens[put_back_index];
- self.put_back_count = put_back_index;
- return put_back_token;
- } else {
- return self.tokenizer.next();
- }
- }
-
- const RenderAstFrame = struct {
- node: &ast.Node,
- indent: usize,
- };
-
- pub fn renderAst(self: &Parser, stream: var, root_node: &ast.NodeRoot) !void {
- var stack = self.initUtilityArrayList(RenderAstFrame);
- defer self.deinitUtilityArrayList(stack);
-
- try stack.append(RenderAstFrame {
- .node = &root_node.base,
- .indent = 0,
- });
-
- while (stack.popOrNull()) |frame| {
- {
- var i: usize = 0;
- while (i < frame.indent) : (i += 1) {
- try stream.print(" ");
- }
- }
- try stream.print("{}\n", @tagName(frame.node.id));
- var child_i: usize = 0;
- while (frame.node.iterate(child_i)) |child| : (child_i += 1) {
- try stack.append(RenderAstFrame {
- .node = child,
- .indent = frame.indent + 2,
- });
- }
- }
- }
-
- const RenderState = union(enum) {
- TopLevelDecl: &ast.Node,
- FnProtoRParen: &ast.NodeFnProto,
- ParamDecl: &ast.Node,
- Text: []const u8,
- Expression: &ast.Node,
- VarDecl: &ast.NodeVarDecl,
- Statement: &ast.Node,
- PrintIndent,
- Indent: usize,
- };
-
- pub fn renderSource(self: &Parser, stream: var, root_node: &ast.NodeRoot) !void {
- var stack = self.initUtilityArrayList(RenderState);
- defer self.deinitUtilityArrayList(stack);
-
- {
- try stack.append(RenderState { .Text = "\n"});
-
- var i = root_node.decls.len;
- while (i != 0) {
- i -= 1;
- const decl = root_node.decls.items[i];
- try stack.append(RenderState {.TopLevelDecl = decl});
- if (i != 0) {
- try stack.append(RenderState {
- .Text = blk: {
- const prev_node = root_node.decls.at(i - 1);
- const prev_line_index = prev_node.lastToken().line;
- const this_line_index = decl.firstToken().line;
- if (this_line_index - prev_line_index >= 2) {
- break :blk "\n\n";
- }
- break :blk "\n";
- },
- });
- }
- }
- }
-
- const indent_delta = 4;
- var indent: usize = 0;
- while (stack.popOrNull()) |state| {
- switch (state) {
- RenderState.TopLevelDecl => |decl| {
- switch (decl.id) {
- ast.Node.Id.FnProto => {
- const fn_proto = @fieldParentPtr(ast.NodeFnProto, "base", decl);
- if (fn_proto.visib_token) |visib_token| {
- switch (visib_token.id) {
- Token.Id.Keyword_pub => try stream.print("pub "),
- Token.Id.Keyword_export => try stream.print("export "),
- else => unreachable,
- }
- }
- if (fn_proto.extern_token) |extern_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(extern_token));
- }
- try stream.print("fn");
-
- if (fn_proto.name_token) |name_token| {
- try stream.print(" {}", self.tokenizer.getTokenSlice(name_token));
- }
-
- try stream.print("(");
-
- if (fn_proto.body_node == null) {
- try stack.append(RenderState { .Text = ";" });
- }
-
- try stack.append(RenderState { .FnProtoRParen = fn_proto});
- var i = fn_proto.params.len;
- while (i != 0) {
- i -= 1;
- const param_decl_node = fn_proto.params.items[i];
- try stack.append(RenderState { .ParamDecl = param_decl_node});
- if (i != 0) {
- try stack.append(RenderState { .Text = ", " });
- }
- }
- },
- ast.Node.Id.VarDecl => {
- const var_decl = @fieldParentPtr(ast.NodeVarDecl, "base", decl);
- try stack.append(RenderState { .VarDecl = var_decl});
- },
- ast.Node.Id.TestDecl => {
- const test_decl = @fieldParentPtr(ast.NodeTestDecl, "base", decl);
- try stream.print("test {} ", self.tokenizer.getTokenSlice(test_decl.name_token));
- try stack.append(RenderState { .Expression = test_decl.body_node });
- },
- else => unreachable,
- }
- },
-
- RenderState.VarDecl => |var_decl| {
- if (var_decl.visib_token) |visib_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(visib_token));
- }
- if (var_decl.extern_token) |extern_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(extern_token));
- if (var_decl.lib_name != null) {
- @panic("TODO");
- }
- }
- if (var_decl.comptime_token) |comptime_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(comptime_token));
- }
- try stream.print("{} ", self.tokenizer.getTokenSlice(var_decl.mut_token));
- try stream.print("{}", self.tokenizer.getTokenSlice(var_decl.name_token));
-
- try stack.append(RenderState { .Text = ";" });
- if (var_decl.init_node) |init_node| {
- try stack.append(RenderState { .Expression = init_node });
- try stack.append(RenderState { .Text = " = " });
- }
- if (var_decl.align_node) |align_node| {
- try stack.append(RenderState { .Text = ")" });
- try stack.append(RenderState { .Expression = align_node });
- try stack.append(RenderState { .Text = " align(" });
- }
- if (var_decl.type_node) |type_node| {
- try stream.print(": ");
- try stack.append(RenderState { .Expression = type_node });
- }
- },
-
- RenderState.ParamDecl => |base| {
- const param_decl = @fieldParentPtr(ast.NodeParamDecl, "base", base);
- if (param_decl.comptime_token) |comptime_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(comptime_token));
- }
- if (param_decl.noalias_token) |noalias_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(noalias_token));
- }
- if (param_decl.name_token) |name_token| {
- try stream.print("{}: ", self.tokenizer.getTokenSlice(name_token));
- }
- if (param_decl.var_args_token) |var_args_token| {
- try stream.print("{}", self.tokenizer.getTokenSlice(var_args_token));
- } else {
- try stack.append(RenderState { .Expression = param_decl.type_node});
- }
- },
- RenderState.Text => |bytes| {
- try stream.write(bytes);
- },
- RenderState.Expression => |base| switch (base.id) {
- ast.Node.Id.Identifier => {
- const identifier = @fieldParentPtr(ast.NodeIdentifier, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(identifier.name_token));
- },
- ast.Node.Id.Block => {
- const block = @fieldParentPtr(ast.NodeBlock, "base", base);
- if (block.statements.len == 0) {
- try stream.write("{}");
- } else {
- try stream.write("{");
- try stack.append(RenderState { .Text = "}"});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent});
- try stack.append(RenderState { .Text = "\n"});
- var i = block.statements.len;
- while (i != 0) {
- i -= 1;
- const statement_node = block.statements.items[i];
- try stack.append(RenderState { .Statement = statement_node});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta});
- try stack.append(RenderState {
- .Text = blk: {
- if (i != 0) {
- const prev_statement_node = block.statements.items[i - 1];
- const prev_line_index = prev_statement_node.lastToken().line;
- const this_line_index = statement_node.firstToken().line;
- if (this_line_index - prev_line_index >= 2) {
- break :blk "\n\n";
- }
- }
- break :blk "\n";
- },
- });
- }
- }
- },
- ast.Node.Id.InfixOp => {
- const prefix_op_node = @fieldParentPtr(ast.NodeInfixOp, "base", base);
- try stack.append(RenderState { .Expression = prefix_op_node.rhs });
- const text = switch (prefix_op_node.op) {
- ast.NodeInfixOp.InfixOp.Add => " + ",
- ast.NodeInfixOp.InfixOp.AddWrap => " +% ",
- ast.NodeInfixOp.InfixOp.ArrayCat => " ++ ",
- ast.NodeInfixOp.InfixOp.ArrayMult => " ** ",
- ast.NodeInfixOp.InfixOp.Assign => " = ",
- ast.NodeInfixOp.InfixOp.AssignBitAnd => " &= ",
- ast.NodeInfixOp.InfixOp.AssignBitOr => " |= ",
- ast.NodeInfixOp.InfixOp.AssignBitShiftLeft => " <<= ",
- ast.NodeInfixOp.InfixOp.AssignBitShiftRight => " >>= ",
- ast.NodeInfixOp.InfixOp.AssignBitXor => " ^= ",
- ast.NodeInfixOp.InfixOp.AssignDiv => " /= ",
- ast.NodeInfixOp.InfixOp.AssignMinus => " -= ",
- ast.NodeInfixOp.InfixOp.AssignMinusWrap => " -%= ",
- ast.NodeInfixOp.InfixOp.AssignMod => " %= ",
- ast.NodeInfixOp.InfixOp.AssignPlus => " += ",
- ast.NodeInfixOp.InfixOp.AssignPlusWrap => " +%= ",
- ast.NodeInfixOp.InfixOp.AssignTimes => " *= ",
- ast.NodeInfixOp.InfixOp.AssignTimesWarp => " *%= ",
- ast.NodeInfixOp.InfixOp.BangEqual => " != ",
- ast.NodeInfixOp.InfixOp.BitAnd => " & ",
- ast.NodeInfixOp.InfixOp.BitOr => " | ",
- ast.NodeInfixOp.InfixOp.BitShiftLeft => " << ",
- ast.NodeInfixOp.InfixOp.BitShiftRight => " >> ",
- ast.NodeInfixOp.InfixOp.BitXor => " ^ ",
- ast.NodeInfixOp.InfixOp.BoolAnd => " and ",
- ast.NodeInfixOp.InfixOp.BoolOr => " or ",
- ast.NodeInfixOp.InfixOp.Div => " / ",
- ast.NodeInfixOp.InfixOp.EqualEqual => " == ",
- ast.NodeInfixOp.InfixOp.ErrorUnion => "!",
- ast.NodeInfixOp.InfixOp.GreaterOrEqual => " >= ",
- ast.NodeInfixOp.InfixOp.GreaterThan => " > ",
- ast.NodeInfixOp.InfixOp.LessOrEqual => " <= ",
- ast.NodeInfixOp.InfixOp.LessThan => " < ",
- ast.NodeInfixOp.InfixOp.MergeErrorSets => " || ",
- ast.NodeInfixOp.InfixOp.Mod => " % ",
- ast.NodeInfixOp.InfixOp.Mult => " * ",
- ast.NodeInfixOp.InfixOp.MultWrap => " *% ",
- ast.NodeInfixOp.InfixOp.Period => ".",
- ast.NodeInfixOp.InfixOp.Sub => " - ",
- ast.NodeInfixOp.InfixOp.SubWrap => " -% ",
- ast.NodeInfixOp.InfixOp.UnwrapMaybe => " ?? ",
- };
-
- try stack.append(RenderState { .Text = text });
- try stack.append(RenderState { .Expression = prefix_op_node.lhs });
- },
- ast.Node.Id.PrefixOp => {
- const prefix_op_node = @fieldParentPtr(ast.NodePrefixOp, "base", base);
- try stack.append(RenderState { .Expression = prefix_op_node.rhs });
- switch (prefix_op_node.op) {
- ast.NodePrefixOp.PrefixOp.AddrOf => |addr_of_info| {
- try stream.write("&");
- if (addr_of_info.volatile_token != null) {
- try stack.append(RenderState { .Text = "volatile "});
- }
- if (addr_of_info.const_token != null) {
- try stack.append(RenderState { .Text = "const "});
- }
- if (addr_of_info.align_expr) |align_expr| {
- try stream.print("align(");
- try stack.append(RenderState { .Text = ") "});
- try stack.append(RenderState { .Expression = align_expr});
- }
- },
- ast.NodePrefixOp.PrefixOp.BitNot => try stream.write("~"),
- ast.NodePrefixOp.PrefixOp.BoolNot => try stream.write("!"),
- ast.NodePrefixOp.PrefixOp.Deref => try stream.write("*"),
- ast.NodePrefixOp.PrefixOp.Negation => try stream.write("-"),
- ast.NodePrefixOp.PrefixOp.NegationWrap => try stream.write("-%"),
- ast.NodePrefixOp.PrefixOp.Return => try stream.write("return "),
- ast.NodePrefixOp.PrefixOp.Try => try stream.write("try "),
- ast.NodePrefixOp.PrefixOp.UnwrapMaybe => try stream.write("??"),
- }
- },
- ast.Node.Id.IntegerLiteral => {
- const integer_literal = @fieldParentPtr(ast.NodeIntegerLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(integer_literal.token));
- },
- ast.Node.Id.FloatLiteral => {
- const float_literal = @fieldParentPtr(ast.NodeFloatLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(float_literal.token));
- },
- ast.Node.Id.StringLiteral => {
- const string_literal = @fieldParentPtr(ast.NodeStringLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(string_literal.token));
- },
- ast.Node.Id.UndefinedLiteral => {
- const undefined_literal = @fieldParentPtr(ast.NodeUndefinedLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(undefined_literal.token));
- },
- ast.Node.Id.BuiltinCall => {
- const builtin_call = @fieldParentPtr(ast.NodeBuiltinCall, "base", base);
- try stream.print("{}(", self.tokenizer.getTokenSlice(builtin_call.builtin_token));
- try stack.append(RenderState { .Text = ")"});
- var i = builtin_call.params.len;
- while (i != 0) {
- i -= 1;
- const param_node = builtin_call.params.at(i);
- try stack.append(RenderState { .Expression = param_node});
- if (i != 0) {
- try stack.append(RenderState { .Text = ", " });
- }
- }
- },
- ast.Node.Id.Call => {
- const call = @fieldParentPtr(ast.NodeCall, "base", base);
- try stack.append(RenderState { .Text = ")"});
- var i = call.params.len;
- while (i != 0) {
- i -= 1;
- const param_node = call.params.at(i);
- try stack.append(RenderState { .Expression = param_node});
- if (i != 0) {
- try stack.append(RenderState { .Text = ", " });
- }
- }
- try stack.append(RenderState { .Text = "("});
- try stack.append(RenderState { .Expression = call.callee });
- },
- ast.Node.Id.FnProto => @panic("TODO fn proto in an expression"),
- ast.Node.Id.LineComment => @panic("TODO render line comment in an expression"),
-
- ast.Node.Id.Root,
- ast.Node.Id.VarDecl,
- ast.Node.Id.TestDecl,
- ast.Node.Id.ParamDecl => unreachable,
- },
- RenderState.FnProtoRParen => |fn_proto| {
- try stream.print(")");
- if (fn_proto.align_expr != null) {
- @panic("TODO");
- }
- try stream.print(" ");
- if (fn_proto.body_node) |body_node| {
- try stack.append(RenderState { .Expression = body_node});
- try stack.append(RenderState { .Text = " "});
- }
- switch (fn_proto.return_type) {
- ast.NodeFnProto.ReturnType.Explicit => |node| {
- try stack.append(RenderState { .Expression = node});
- },
- ast.NodeFnProto.ReturnType.Infer => {
- try stream.print("var");
- },
- ast.NodeFnProto.ReturnType.InferErrorSet => |node| {
- try stream.print("!");
- try stack.append(RenderState { .Expression = node});
- },
- }
- },
- RenderState.Statement => |base| {
- if (base.comment) |comment| {
- for (comment.lines.toSliceConst()) |line_token| {
- try stream.print("{}\n", self.tokenizer.getTokenSlice(line_token));
- try stream.writeByteNTimes(' ', indent);
- }
- }
- switch (base.id) {
- ast.Node.Id.VarDecl => {
- const var_decl = @fieldParentPtr(ast.NodeVarDecl, "base", base);
- try stack.append(RenderState { .VarDecl = var_decl});
- },
- else => {
- try stack.append(RenderState { .Text = ";"});
- try stack.append(RenderState { .Expression = base});
- },
- }
- },
- RenderState.Indent => |new_indent| indent = new_indent,
- RenderState.PrintIndent => try stream.writeByteNTimes(' ', indent),
- }
- }
- }
-
- fn initUtilityArrayList(self: &Parser, comptime T: type) ArrayList(T) {
- const new_byte_count = self.utility_bytes.len - self.utility_bytes.len % @sizeOf(T);
- self.utility_bytes = self.util_allocator.alignedShrink(u8, utility_bytes_align, self.utility_bytes, new_byte_count);
- const typed_slice = ([]T)(self.utility_bytes);
- return ArrayList(T) {
- .allocator = self.util_allocator,
- .items = typed_slice,
- .len = 0,
- };
- }
-
- fn deinitUtilityArrayList(self: &Parser, list: var) void {
- self.utility_bytes = ([]align(utility_bytes_align) u8)(list.items);
- }
-
-};
-
-var fixed_buffer_mem: [100 * 1024]u8 = undefined;
-
-fn testParse(source: []const u8, allocator: &mem.Allocator) ![]u8 {
- var tokenizer = Tokenizer.init(source);
- var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
- defer parser.deinit();
-
- var tree = try parser.parse();
- defer tree.deinit();
-
- var buffer = try std.Buffer.initSize(allocator, 0);
- errdefer buffer.deinit();
-
- var buffer_out_stream = io.BufferOutStream.init(&buffer);
- try parser.renderSource(&buffer_out_stream.stream, tree.root_node);
- return buffer.toOwnedSlice();
-}
-
-fn testCanonical(source: []const u8) !void {
- const needed_alloc_count = x: {
- // Try it once with unlimited memory, make sure it works
- var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
- var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, @maxValue(usize));
- const result_source = try testParse(source, &failing_allocator.allocator);
- if (!mem.eql(u8, result_source, source)) {
- warn("\n====== expected this output: =========\n");
- warn("{}", source);
- warn("\n======== instead found this: =========\n");
- warn("{}", result_source);
- warn("\n======================================\n");
- return error.TestFailed;
- }
- failing_allocator.allocator.free(result_source);
- break :x failing_allocator.index;
- };
-
- var fail_index: usize = 0;
- while (fail_index < needed_alloc_count) : (fail_index += 1) {
- var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
- var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, fail_index);
- if (testParse(source, &failing_allocator.allocator)) |_| {
- return error.NondeterministicMemoryUsage;
- } else |err| switch (err) {
- error.OutOfMemory => {
- if (failing_allocator.allocated_bytes != failing_allocator.freed_bytes) {
- warn("\nfail_index: {}/{}\nallocated bytes: {}\nfreed bytes: {}\nallocations: {}\ndeallocations: {}\n",
- fail_index, needed_alloc_count,
- failing_allocator.allocated_bytes, failing_allocator.freed_bytes,
- failing_allocator.index, failing_allocator.deallocations);
- return error.MemoryLeakDetected;
- }
- },
- error.ParseError => @panic("test failed"),
- }
- }
-}
-
-test "zig fmt" {
- try testCanonical(
- \\const std = @import("std");
- \\
- \\pub fn main() !void {
- \\ // If this program is run without stdout attached, exit with an error.
- \\ // another comment
- \\ var stdout_file = try std.io.getStdOut;
- \\}
- \\
- );
-
- try testCanonical(
- \\const std = @import("std");
- \\
- \\pub fn main() !void {
- \\ var stdout_file = try std.io.getStdOut;
- \\ var stdout_file = try std.io.getStdOut;
- \\
- \\ var stdout_file = try std.io.getStdOut;
- \\ var stdout_file = try std.io.getStdOut;
- \\}
- \\
- );
-
- try testCanonical(
- \\pub fn main() !void {}
- \\pub fn main() var {}
- \\pub fn main() i32 {}
- \\
- );
-
- try testCanonical(
- \\const std = @import("std");
- \\const std = @import();
- \\
- );
-
- try testCanonical(
- \\extern fn puts(s: &const u8) c_int;
- \\
- );
-
- try testCanonical(
- \\const a = b;
- \\pub const a = b;
- \\var a = b;
- \\pub var a = b;
- \\const a: i32 = b;
- \\pub const a: i32 = b;
- \\var a: i32 = b;
- \\pub var a: i32 = b;
- \\
- );
-
- try testCanonical(
- \\extern var foo: c_int;
- \\
- );
-
- try testCanonical(
- \\var foo: c_int align(1);
- \\
- );
-
- try testCanonical(
- \\fn main(argc: c_int, argv: &&u8) c_int {
- \\ const a = b;
- \\}
- \\
- );
-
- try testCanonical(
- \\fn foo(argc: c_int, argv: &&u8) c_int {
- \\ return 0;
- \\}
- \\
- );
-
- try testCanonical(
- \\extern fn f1(s: &align(&u8) u8) c_int;
- \\
- );
-
- try testCanonical(
- \\extern fn f1(s: &&align(1) &const &volatile u8) c_int;
- \\extern fn f2(s: &align(1) const &align(1) volatile &const volatile u8) c_int;
- \\extern fn f3(s: &align(1) const volatile u8) c_int;
- \\
- );
-
- try testCanonical(
- \\fn f1(a: bool, b: bool) bool {
- \\ a != b;
- \\ return a == b;
- \\}
- \\
- );
-
- try testCanonical(
- \\test "test name" {
- \\ const a = 1;
- \\ var b = 1;
- \\}
- \\
- );
-
- try testCanonical(
- \\test "infix operators" {
- \\ var i = undefined;
- \\ i = 2;
- \\ i *= 2;
- \\ i |= 2;
- \\ i ^= 2;
- \\ i <<= 2;
- \\ i >>= 2;
- \\ i &= 2;
- \\ i *= 2;
- \\ i *%= 2;
- \\ i -= 2;
- \\ i -%= 2;
- \\ i += 2;
- \\ i +%= 2;
- \\ i /= 2;
- \\ i %= 2;
- \\ _ = i == i;
- \\ _ = i != i;
- \\ _ = i != i;
- \\ _ = i.i;
- \\ _ = i || i;
- \\ _ = i!i;
- \\ _ = i ** i;
- \\ _ = i ++ i;
- \\ _ = i ?? i;
- \\ _ = i % i;
- \\ _ = i / i;
- \\ _ = i *% i;
- \\ _ = i * i;
- \\ _ = i -% i;
- \\ _ = i - i;
- \\ _ = i +% i;
- \\ _ = i + i;
- \\ _ = i << i;
- \\ _ = i >> i;
- \\ _ = i & i;
- \\ _ = i ^ i;
- \\ _ = i | i;
- \\ _ = i >= i;
- \\ _ = i <= i;
- \\ _ = i > i;
- \\ _ = i < i;
- \\ _ = i and i;
- \\ _ = i or i;
- \\}
- \\
- );
-
- try testCanonical(
- \\test "prefix operators" {
- \\ --%~??!*&0;
- \\}
- \\
- );
-
- try testCanonical(
- \\test "test calls" {
- \\ a();
- \\ a(1);
- \\ a(1, 2);
- \\ a(1, 2) + a(1, 2);
- \\}
- \\
- );
-}
diff --git a/std/zig/parser_test.zig b/std/zig/parser_test.zig
new file mode 100644
index 0000000000..29b231a4db
--- /dev/null
+++ b/std/zig/parser_test.zig
@@ -0,0 +1,1183 @@
+test "zig fmt: same-line comment after a statement" {
+ try testCanonical(
+ \\test "" {
+ \\ a = b;
+ \\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption
+ \\ a = b;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: same-line comment after var decl in struct" {
+ try testCanonical(
+ \\pub const vfs_cap_data = extern struct {
+ \\ const Data = struct {}; // when on disk.
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: same-line comment after field decl" {
+ try testCanonical(
+ \\pub const dirent = extern struct {
+ \\ d_name: u8,
+ \\ d_name: u8, // comment 1
+ \\ d_name: u8,
+ \\ d_name: u8, // comment 2
+ \\ d_name: u8,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: same-line comment after switch prong" {
+ try testCanonical(
+ \\test "" {
+ \\ switch (err) {
+ \\ error.PathAlreadyExists => {}, // comment 2
+ \\ else => return err, // comment 1
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: same-line comment after non-block if expression" {
+ try testCanonical(
+ \\comptime {
+ \\ if (sr > n_uword_bits - 1) // d > r
+ \\ return 0;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: same-line comment on comptime expression" {
+ try testCanonical(
+ \\test "" {
+ \\ comptime assert(@typeId(T) == builtin.TypeId.Int); // must pass an integer to absInt
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: switch with empty body" {
+ try testCanonical(
+ \\test "" {
+ \\ foo() catch |err| switch (err) {};
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: float literal with exponent" {
+ try testCanonical(
+ \\pub const f64_true_min = 4.94065645841246544177e-324;
+ \\
+ );
+}
+
+test "zig fmt: line comments in struct initializer" {
+ try testCanonical(
+ \\fn foo() void {
+ \\ return Self{
+ \\ .a = b,
+ \\
+ \\ // Initialize these two fields to buffer_size so that
+ \\ // in `readFn` we treat the state as being able to read
+ \\ .start_index = buffer_size,
+ \\ .end_index = buffer_size,
+ \\
+ \\ // middle
+ \\
+ \\ .a = b,
+ \\
+ \\ // end
+ \\ };
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: doc comments before struct field" {
+ try testCanonical(
+ \\pub const Allocator = struct {
+ \\ /// Allocate byte_count bytes and return them in a slice, with the
+ \\ /// slice's pointer aligned at least to alignment bytes.
+ \\ allocFn: fn() void,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: error set declaration" {
+ try testCanonical(
+ \\const E = error{
+ \\ A,
+ \\ B,
+ \\
+ \\ C,
+ \\};
+ \\
+ \\const Error = error{
+ \\ /// no more memory
+ \\ OutOfMemory,
+ \\};
+ \\
+ \\const Error = error{
+ \\ /// no more memory
+ \\ OutOfMemory,
+ \\
+ \\ /// another
+ \\ Another,
+ \\
+ \\ // end
+ \\};
+ \\
+ \\const Error = error{OutOfMemory};
+ \\const Error = error{};
+ \\
+ );
+}
+
+test "zig fmt: union(enum(u32)) with assigned enum values" {
+ try testCanonical(
+ \\const MultipleChoice = union(enum(u32)) {
+ \\ A = 20,
+ \\ B = 40,
+ \\ C = 60,
+ \\ D = 1000,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: labeled suspend" {
+ try testCanonical(
+ \\fn foo() void {
+ \\ s: suspend |p| {
+ \\ break :s;
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: comments before error set decl" {
+ try testCanonical(
+ \\const UnexpectedError = error{
+ \\ /// The Operating System returned an undocumented error code.
+ \\ Unexpected,
+ \\ // another
+ \\ Another,
+ \\
+ \\ // in between
+ \\
+ \\ // at end
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: comments before switch prong" {
+ try testCanonical(
+ \\test "" {
+ \\ switch (err) {
+ \\ error.PathAlreadyExists => continue,
+ \\
+ \\ // comment 1
+ \\
+ \\ // comment 2
+ \\ else => return err,
+ \\ // at end
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: comments before var decl in struct" {
+ try testCanonical(
+ \\pub const vfs_cap_data = extern struct {
+ \\ // All of these are mandated as little endian
+ \\ // when on disk.
+ \\ const Data = struct {
+ \\ permitted: u32,
+ \\ inheritable: u32,
+ \\ };
+ \\
+ \\ // in between
+ \\
+ \\ /// All of these are mandated as little endian
+ \\ /// when on disk.
+ \\ const Data = struct {
+ \\ permitted: u32,
+ \\ inheritable: u32,
+ \\ };
+ \\
+ \\ // at end
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: array literal with 1 item on 1 line" {
+ try testCanonical(
+ \\var s = []const u64{0} ** 25;
+ \\
+ );
+}
+
+test "zig fmt: comments before global variables" {
+ try testCanonical(
+ \\/// Foo copies keys and values before they go into the map, and
+ \\/// frees them when they get removed.
+ \\pub const Foo = struct {};
+ \\
+ );
+}
+
+test "zig fmt: comments in statements" {
+ try testCanonical(
+ \\test "std" {
+ \\ // statement comment
+ \\ _ = @import("foo/bar.zig");
+ \\
+ \\ // middle
+ \\ // middle2
+ \\
+ \\ // end
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: comments before test decl" {
+ try testCanonical(
+ \\/// top level doc comment
+ \\test "hi" {}
+ \\
+ \\// top level normal comment
+ \\test "hi" {}
+ \\
+ \\// middle
+ \\
+ \\// end
+ \\
+ );
+}
+
+test "zig fmt: preserve spacing" {
+ try testCanonical(
+ \\const std = @import("std");
+ \\
+ \\pub fn main() !void {
+ \\ var stdout_file = try std.io.getStdOut;
+ \\ var stdout_file = try std.io.getStdOut;
+ \\
+ \\ var stdout_file = try std.io.getStdOut;
+ \\ var stdout_file = try std.io.getStdOut;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: return types" {
+ try testCanonical(
+ \\pub fn main() !void {}
+ \\pub fn main() var {}
+ \\pub fn main() i32 {}
+ \\
+ );
+}
+
+test "zig fmt: imports" {
+ try testCanonical(
+ \\const std = @import("std");
+ \\const std = @import();
+ \\
+ );
+}
+
+test "zig fmt: global declarations" {
+ try testCanonical(
+ \\const a = b;
+ \\pub const a = b;
+ \\var a = b;
+ \\pub var a = b;
+ \\const a: i32 = b;
+ \\pub const a: i32 = b;
+ \\var a: i32 = b;
+ \\pub var a: i32 = b;
+ \\extern const a: i32 = b;
+ \\pub extern const a: i32 = b;
+ \\extern var a: i32 = b;
+ \\pub extern var a: i32 = b;
+ \\extern "a" const a: i32 = b;
+ \\pub extern "a" const a: i32 = b;
+ \\extern "a" var a: i32 = b;
+ \\pub extern "a" var a: i32 = b;
+ \\
+ );
+}
+
+test "zig fmt: extern declaration" {
+ try testCanonical(
+ \\extern var foo: c_int;
+ \\
+ );
+}
+
+test "zig fmt: alignment" {
+ try testCanonical(
+ \\var foo: c_int align(1);
+ \\
+ );
+}
+
+test "zig fmt: C main" {
+ try testCanonical(
+ \\fn main(argc: c_int, argv: &&u8) c_int {
+ \\ const a = b;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: return" {
+ try testCanonical(
+ \\fn foo(argc: c_int, argv: &&u8) c_int {
+ \\ return 0;
+ \\}
+ \\
+ \\fn bar() void {
+ \\ return;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: pointer attributes" {
+ try testCanonical(
+ \\extern fn f1(s: &align(&u8) u8) c_int;
+ \\extern fn f2(s: &&align(1) &const &volatile u8) c_int;
+ \\extern fn f3(s: &align(1) const &align(1) volatile &const volatile u8) c_int;
+ \\extern fn f4(s: &align(1) const volatile u8) c_int;
+ \\
+ );
+}
+
+test "zig fmt: slice attributes" {
+ try testCanonical(
+ \\extern fn f1(s: &align(&u8) u8) c_int;
+ \\extern fn f2(s: &&align(1) &const &volatile u8) c_int;
+ \\extern fn f3(s: &align(1) const &align(1) volatile &const volatile u8) c_int;
+ \\extern fn f4(s: &align(1) const volatile u8) c_int;
+ \\
+ );
+}
+
+test "zig fmt: test declaration" {
+ try testCanonical(
+ \\test "test name" {
+ \\ const a = 1;
+ \\ var b = 1;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: infix operators" {
+ try testCanonical(
+ \\test "infix operators" {
+ \\ var i = undefined;
+ \\ i = 2;
+ \\ i *= 2;
+ \\ i |= 2;
+ \\ i ^= 2;
+ \\ i <<= 2;
+ \\ i >>= 2;
+ \\ i &= 2;
+ \\ i *= 2;
+ \\ i *%= 2;
+ \\ i -= 2;
+ \\ i -%= 2;
+ \\ i += 2;
+ \\ i +%= 2;
+ \\ i /= 2;
+ \\ i %= 2;
+ \\ _ = i == i;
+ \\ _ = i != i;
+ \\ _ = i != i;
+ \\ _ = i.i;
+ \\ _ = i || i;
+ \\ _ = i!i;
+ \\ _ = i ** i;
+ \\ _ = i ++ i;
+ \\ _ = i ?? i;
+ \\ _ = i % i;
+ \\ _ = i / i;
+ \\ _ = i *% i;
+ \\ _ = i * i;
+ \\ _ = i -% i;
+ \\ _ = i - i;
+ \\ _ = i +% i;
+ \\ _ = i + i;
+ \\ _ = i << i;
+ \\ _ = i >> i;
+ \\ _ = i & i;
+ \\ _ = i ^ i;
+ \\ _ = i | i;
+ \\ _ = i >= i;
+ \\ _ = i <= i;
+ \\ _ = i > i;
+ \\ _ = i < i;
+ \\ _ = i and i;
+ \\ _ = i or i;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: precedence" {
+ try testCanonical(
+ \\test "precedence" {
+ \\ a!b();
+ \\ (a!b)();
+ \\ !a!b;
+ \\ !(a!b);
+ \\ !a{};
+ \\ !(a{});
+ \\ a + b{};
+ \\ (a + b){};
+ \\ a << b + c;
+ \\ (a << b) + c;
+ \\ a & b << c;
+ \\ (a & b) << c;
+ \\ a ^ b & c;
+ \\ (a ^ b) & c;
+ \\ a | b ^ c;
+ \\ (a | b) ^ c;
+ \\ a == b | c;
+ \\ (a == b) | c;
+ \\ a and b == c;
+ \\ (a and b) == c;
+ \\ a or b and c;
+ \\ (a or b) and c;
+ \\ (a or b) and c;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: prefix operators" {
+ try testCanonical(
+ \\test "prefix operators" {
+ \\ try return --%~??!*&0;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: call expression" {
+ try testCanonical(
+ \\test "test calls" {
+ \\ a();
+ \\ a(1);
+ \\ a(1, 2);
+ \\ a(1, 2) + a(1, 2);
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: var args" {
+ try testCanonical(
+ \\fn print(args: ...) void {}
+ \\
+ );
+}
+
+test "zig fmt: var type" {
+ try testCanonical(
+ \\fn print(args: var) var {}
+ \\const Var = var;
+ \\const i: var = 0;
+ \\
+ );
+}
+
+test "zig fmt: functions" {
+ try testCanonical(
+ \\extern fn puts(s: &const u8) c_int;
+ \\extern "c" fn puts(s: &const u8) c_int;
+ \\export fn puts(s: &const u8) c_int;
+ \\inline fn puts(s: &const u8) c_int;
+ \\pub extern fn puts(s: &const u8) c_int;
+ \\pub extern "c" fn puts(s: &const u8) c_int;
+ \\pub export fn puts(s: &const u8) c_int;
+ \\pub inline fn puts(s: &const u8) c_int;
+ \\pub extern fn puts(s: &const u8) align(2 + 2) c_int;
+ \\pub extern "c" fn puts(s: &const u8) align(2 + 2) c_int;
+ \\pub export fn puts(s: &const u8) align(2 + 2) c_int;
+ \\pub inline fn puts(s: &const u8) align(2 + 2) c_int;
+ \\
+ );
+}
+
+test "zig fmt: multiline string" {
+ try testCanonical(
+ \\test "" {
+ \\ const s1 =
+ \\ \\one
+ \\ \\two)
+ \\ \\three
+ \\ ;
+ \\ const s2 =
+ \\ c\\one
+ \\ c\\two)
+ \\ c\\three
+ \\ ;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: values" {
+ try testCanonical(
+ \\test "values" {
+ \\ 1;
+ \\ 1.0;
+ \\ "string";
+ \\ c"cstring";
+ \\ 'c';
+ \\ true;
+ \\ false;
+ \\ null;
+ \\ undefined;
+ \\ error;
+ \\ this;
+ \\ unreachable;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: indexing" {
+ try testCanonical(
+ \\test "test index" {
+ \\ a[0];
+ \\ a[0 + 5];
+ \\ a[0..];
+ \\ a[0..5];
+ \\ a[a[0]];
+ \\ a[a[0..]];
+ \\ a[a[0..5]];
+ \\ a[a[0]..];
+ \\ a[a[0..5]..];
+ \\ a[a[0]..a[0]];
+ \\ a[a[0..5]..a[0]];
+ \\ a[a[0..5]..a[0..5]];
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: struct declaration" {
+ try testCanonical(
+ \\const S = struct {
+ \\ const Self = this;
+ \\ f1: u8,
+ \\ pub f3: u8,
+ \\
+ \\ fn method(self: &Self) Self {
+ \\ return *self;
+ \\ }
+ \\
+ \\ f2: u8,
+ \\};
+ \\
+ \\const Ps = packed struct {
+ \\ a: u8,
+ \\ pub b: u8,
+ \\
+ \\ c: u8,
+ \\};
+ \\
+ \\const Es = extern struct {
+ \\ a: u8,
+ \\ pub b: u8,
+ \\
+ \\ c: u8,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: enum declaration" {
+ try testCanonical(
+ \\const E = enum {
+ \\ Ok,
+ \\ SomethingElse = 0,
+ \\};
+ \\
+ \\const E2 = enum(u8) {
+ \\ Ok,
+ \\ SomethingElse = 255,
+ \\ SomethingThird,
+ \\};
+ \\
+ \\const Ee = extern enum {
+ \\ Ok,
+ \\ SomethingElse,
+ \\ SomethingThird,
+ \\};
+ \\
+ \\const Ep = packed enum {
+ \\ Ok,
+ \\ SomethingElse,
+ \\ SomethingThird,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: union declaration" {
+ try testCanonical(
+ \\const U = union {
+ \\ Int: u8,
+ \\ Float: f32,
+ \\ None,
+ \\ Bool: bool,
+ \\};
+ \\
+ \\const Ue = union(enum) {
+ \\ Int: u8,
+ \\ Float: f32,
+ \\ None,
+ \\ Bool: bool,
+ \\};
+ \\
+ \\const E = enum {
+ \\ Int,
+ \\ Float,
+ \\ None,
+ \\ Bool,
+ \\};
+ \\
+ \\const Ue2 = union(E) {
+ \\ Int: u8,
+ \\ Float: f32,
+ \\ None,
+ \\ Bool: bool,
+ \\};
+ \\
+ \\const Eu = extern union {
+ \\ Int: u8,
+ \\ Float: f32,
+ \\ None,
+ \\ Bool: bool,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: arrays" {
+ try testCanonical(
+ \\test "test array" {
+ \\ const a: [2]u8 = [2]u8{
+ \\ 1,
+ \\ 2,
+ \\ };
+ \\ const a: [2]u8 = []u8{
+ \\ 1,
+ \\ 2,
+ \\ };
+ \\ const a: [0]u8 = []u8{};
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: container initializers" {
+ try testCanonical(
+ \\const a0 = []u8{};
+ \\const a1 = []u8{1};
+ \\const a2 = []u8{
+ \\ 1,
+ \\ 2,
+ \\ 3,
+ \\ 4,
+ \\};
+ \\const s0 = S{};
+ \\const s1 = S{ .a = 1 };
+ \\const s2 = S{
+ \\ .a = 1,
+ \\ .b = 2,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: catch" {
+ try testCanonical(
+ \\test "catch" {
+ \\ const a: error!u8 = 0;
+ \\ _ = a catch return;
+ \\ _ = a catch |err| return;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: blocks" {
+ try testCanonical(
+ \\test "blocks" {
+ \\ {
+ \\ const a = 0;
+ \\ const b = 0;
+ \\ }
+ \\
+ \\ blk: {
+ \\ const a = 0;
+ \\ const b = 0;
+ \\ }
+ \\
+ \\ const r = blk: {
+ \\ const a = 0;
+ \\ const b = 0;
+ \\ };
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: switch" {
+ try testCanonical(
+ \\test "switch" {
+ \\ switch (0) {
+ \\ 0 => {},
+ \\ 1 => unreachable,
+ \\ 2,
+ \\ 3 => {},
+ \\ 4 ... 7 => {},
+ \\ 1 + 4 * 3 + 22 => {},
+ \\ else => {
+ \\ const a = 1;
+ \\ const b = a;
+ \\ },
+ \\ }
+ \\
+ \\ const res = switch (0) {
+ \\ 0 => 0,
+ \\ 1 => 2,
+ \\ 1 => a = 4,
+ \\ else => 4,
+ \\ };
+ \\
+ \\ const Union = union(enum) {
+ \\ Int: i64,
+ \\ Float: f64,
+ \\ };
+ \\
+ \\ switch (u) {
+ \\ Union.Int => |int| {},
+ \\ Union.Float => |*float| unreachable,
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: while" {
+ try testCanonical(
+ \\test "while" {
+ \\ while (10 < 1) {
+ \\ unreachable;
+ \\ }
+ \\
+ \\ while (10 < 1)
+ \\ unreachable;
+ \\
+ \\ var i: usize = 0;
+ \\ while (i < 10) : (i += 1) {
+ \\ continue;
+ \\ }
+ \\
+ \\ i = 0;
+ \\ while (i < 10) : (i += 1)
+ \\ continue;
+ \\
+ \\ i = 0;
+ \\ var j: usize = 0;
+ \\ while (i < 10) : ({
+ \\ i += 1;
+ \\ j += 1;
+ \\ }) {
+ \\ continue;
+ \\ }
+ \\
+ \\ var a: ?u8 = 2;
+ \\ while (a) |v| : (a = null) {
+ \\ continue;
+ \\ }
+ \\
+ \\ while (a) |v| : (a = null)
+ \\ unreachable;
+ \\
+ \\ label: while (10 < 0) {
+ \\ unreachable;
+ \\ }
+ \\
+ \\ const res = while (0 < 10) {
+ \\ break 7;
+ \\ } else {
+ \\ unreachable;
+ \\ };
+ \\
+ \\ const res = while (0 < 10)
+ \\ break 7
+ \\ else
+ \\ unreachable;
+ \\
+ \\ var a: error!u8 = 0;
+ \\ while (a) |v| {
+ \\ a = error.Err;
+ \\ } else |err| {
+ \\ i = 1;
+ \\ }
+ \\
+ \\ comptime var k: usize = 0;
+ \\ inline while (i < 10) : (i += 1)
+ \\ j += 2;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: for" {
+ try testCanonical(
+ \\test "for" {
+ \\ for (a) |v| {
+ \\ continue;
+ \\ }
+ \\
+ \\ for (a) |v|
+ \\ continue;
+ \\
+ \\ for (a) |*v|
+ \\ continue;
+ \\
+ \\ for (a) |v, i| {
+ \\ continue;
+ \\ }
+ \\
+ \\ for (a) |v, i|
+ \\ continue;
+ \\
+ \\ const res = for (a) |v, i| {
+ \\ break v;
+ \\ } else {
+ \\ unreachable;
+ \\ };
+ \\
+ \\ var num: usize = 0;
+ \\ inline for (a) |v, i| {
+ \\ num += v;
+ \\ num += i;
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: if" {
+ try testCanonical(
+ \\test "if" {
+ \\ if (10 < 0) {
+ \\ unreachable;
+ \\ }
+ \\
+ \\ if (10 < 0) unreachable;
+ \\
+ \\ if (10 < 0) {
+ \\ unreachable;
+ \\ } else {
+ \\ const a = 20;
+ \\ }
+ \\
+ \\ if (10 < 0) {
+ \\ unreachable;
+ \\ } else if (5 < 0) {
+ \\ unreachable;
+ \\ } else {
+ \\ const a = 20;
+ \\ }
+ \\
+ \\ const is_world_broken = if (10 < 0) true else false;
+ \\ const some_number = 1 + if (10 < 0) 2 else 3;
+ \\
+ \\ const a: ?u8 = 10;
+ \\ const b: ?u8 = null;
+ \\ if (a) |v| {
+ \\ const some = v;
+ \\ } else if (b) |*v| {
+ \\ unreachable;
+ \\ } else {
+ \\ const some = 10;
+ \\ }
+ \\
+ \\ const non_null_a = if (a) |v| v else 0;
+ \\
+ \\ const a_err: error!u8 = 0;
+ \\ if (a_err) |v| {
+ \\ const p = v;
+ \\ } else |err| {
+ \\ unreachable;
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: defer" {
+ try testCanonical(
+ \\test "defer" {
+ \\ var i: usize = 0;
+ \\ defer i = 1;
+ \\ defer {
+ \\ i += 2;
+ \\ i *= i;
+ \\ }
+ \\
+ \\ errdefer i += 3;
+ \\ errdefer {
+ \\ i += 2;
+ \\ i /= i;
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: comptime" {
+ try testCanonical(
+ \\fn a() u8 {
+ \\ return 5;
+ \\}
+ \\
+ \\fn b(comptime i: u8) u8 {
+ \\ return i;
+ \\}
+ \\
+ \\const av = comptime a();
+ \\const av2 = comptime blk: {
+ \\ var res = a();
+ \\ res *= b(2);
+ \\ break :blk res;
+ \\};
+ \\
+ \\comptime {
+ \\ _ = a();
+ \\}
+ \\
+ \\test "comptime" {
+ \\ const av3 = comptime a();
+ \\ const av4 = comptime blk: {
+ \\ var res = a();
+ \\ res *= a();
+ \\ break :blk res;
+ \\ };
+ \\
+ \\ comptime var i = 0;
+ \\ comptime {
+ \\ i = a();
+ \\ i += b(i);
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: fn type" {
+ try testCanonical(
+ \\fn a(i: u8) u8 {
+ \\ return i + 1;
+ \\}
+ \\
+ \\const a: fn(u8) u8 = undefined;
+ \\const b: extern fn(u8) u8 = undefined;
+ \\const c: nakedcc fn(u8) u8 = undefined;
+ \\const ap: fn(u8) u8 = a;
+ \\
+ );
+}
+
+test "zig fmt: inline asm" {
+ try testCanonical(
+ \\pub fn syscall1(number: usize, arg1: usize) usize {
+ \\ return asm volatile ("syscall"
+ \\ : [ret] "={rax}" (-> usize)
+ \\ : [number] "{rax}" (number),
+ \\ [arg1] "{rdi}" (arg1)
+ \\ : "rcx", "r11");
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: coroutines" {
+ try testCanonical(
+ \\async fn simpleAsyncFn() void {
+ \\ const a = async a.b();
+ \\ x += 1;
+ \\ suspend;
+ \\ x += 1;
+ \\ suspend |p| {}
+ \\ const p: promise->void = async simpleAsyncFn() catch unreachable;
+ \\ await p;
+ \\}
+ \\
+ \\test "coroutine suspend, resume, cancel" {
+ \\ const p: promise = try async<std.debug.global_allocator> testAsyncSeq();
+ \\ resume p;
+ \\ cancel p;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: Block after if" {
+ try testCanonical(
+ \\test "Block after if" {
+ \\ if (true) {
+ \\ const a = 0;
+ \\ }
+ \\
+ \\ {
+ \\ const a = 0;
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: use" {
+ try testCanonical(
+ \\use @import("std");
+ \\pub use @import("std");
+ \\
+ );
+}
+
+test "zig fmt: string identifier" {
+ try testCanonical(
+ \\const @"a b" = @"c d".@"e f";
+ \\fn @"g h"() void {}
+ \\
+ );
+}
+
+test "zig fmt: error return" {
+ try testCanonical(
+ \\fn err() error {
+ \\ call();
+ \\ return error.InvalidArgs;
+ \\}
+ \\
+ );
+}
+
+const std = @import("std");
+const mem = std.mem;
+const warn = std.debug.warn;
+const io = std.io;
+
+var fixed_buffer_mem: [100 * 1024]u8 = undefined;
+
+fn testParse(source: []const u8, allocator: &mem.Allocator) ![]u8 {
+ var stderr_file = try io.getStdErr();
+ var stderr = &io.FileOutStream.init(&stderr_file).stream;
+
+ var tree = try std.zig.parse(allocator, source);
+ defer tree.deinit();
+
+ var error_it = tree.errors.iterator(0);
+ while (error_it.next()) |parse_error| {
+ const token = tree.tokens.at(parse_error.loc());
+ const loc = tree.tokenLocation(0, parse_error.loc());
+ try stderr.print("(memory buffer):{}:{}: error: ", loc.line + 1, loc.column + 1);
+ try tree.renderError(parse_error, stderr);
+ try stderr.print("\n{}\n", source[loc.line_start..loc.line_end]);
+ {
+ var i: usize = 0;
+ while (i < loc.column) : (i += 1) {
+ try stderr.write(" ");
+ }
+ }
+ {
+ const caret_count = token.end - token.start;
+ var i: usize = 0;
+ while (i < caret_count) : (i += 1) {
+ try stderr.write("~");
+ }
+ }
+ try stderr.write("\n");
+ }
+ if (tree.errors.len != 0) {
+ return error.ParseError;
+ }
+
+ var buffer = try std.Buffer.initSize(allocator, 0);
+ errdefer buffer.deinit();
+
+ var buffer_out_stream = io.BufferOutStream.init(&buffer);
+ try std.zig.render(allocator, &buffer_out_stream.stream, &tree);
+ return buffer.toOwnedSlice();
+}
+
+fn testTransform(source: []const u8, expected_source: []const u8) !void {
+ const needed_alloc_count = x: {
+ // Try it once with unlimited memory, make sure it works
+ var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
+ var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, @maxValue(usize));
+ const result_source = try testParse(source, &failing_allocator.allocator);
+ if (!mem.eql(u8, result_source, expected_source)) {
+ warn("\n====== expected this output: =========\n");
+ warn("{}", expected_source);
+ warn("\n======== instead found this: =========\n");
+ warn("{}", result_source);
+ warn("\n======================================\n");
+ return error.TestFailed;
+ }
+ failing_allocator.allocator.free(result_source);
+ break :x failing_allocator.index;
+ };
+
+ var fail_index: usize = 0;
+ while (fail_index < needed_alloc_count) : (fail_index += 1) {
+ var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
+ var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, fail_index);
+ if (testParse(source, &failing_allocator.allocator)) |_| {
+ return error.NondeterministicMemoryUsage;
+ } else |err| switch (err) {
+ error.OutOfMemory => {
+ if (failing_allocator.allocated_bytes != failing_allocator.freed_bytes) {
+ warn("\nfail_index: {}/{}\nallocated bytes: {}\nfreed bytes: {}\nallocations: {}\ndeallocations: {}\n",
+ fail_index, needed_alloc_count,
+ failing_allocator.allocated_bytes, failing_allocator.freed_bytes,
+ failing_allocator.index, failing_allocator.deallocations);
+ return error.MemoryLeakDetected;
+ }
+ },
+ error.ParseError => @panic("test failed"),
+ else => @panic("test failed"),
+ }
+ }
+}
+
+fn testCanonical(source: []const u8) !void {
+ return testTransform(source, source);
+}
+
diff --git a/std/zig/render.zig b/std/zig/render.zig
new file mode 100644
index 0000000000..cced30cd60
--- /dev/null
+++ b/std/zig/render.zig
@@ -0,0 +1,1270 @@
+const std = @import("../index.zig");
+const assert = std.debug.assert;
+const mem = std.mem;
+const ast = std.zig.ast;
+const Token = std.zig.Token;
+
+const RenderState = union(enum) {
+ TopLevelDecl: &ast.Node,
+ ParamDecl: &ast.Node,
+ Text: []const u8,
+ Expression: &ast.Node,
+ VarDecl: &ast.Node.VarDecl,
+ Statement: &ast.Node,
+ PrintIndent,
+ Indent: usize,
+ MaybeSemiColon: &ast.Node,
+ Token: ast.TokenIndex,
+ NonBreakToken: ast.TokenIndex,
+};
+
+const indent_delta = 4;
+
+pub fn render(allocator: &mem.Allocator, stream: var, tree: &ast.Tree) !void {
+ var stack = std.ArrayList(RenderState).init(allocator);
+ defer stack.deinit();
+
+ {
+ try stack.append(RenderState { .Text = "\n"});
+
+ var i = tree.root_node.decls.len;
+ while (i != 0) {
+ i -= 1;
+ const decl = *tree.root_node.decls.at(i);
+ try stack.append(RenderState {.TopLevelDecl = decl});
+ if (i != 0) {
+ try stack.append(RenderState {
+ .Text = blk: {
+ const prev_node = *tree.root_node.decls.at(i - 1);
+ const prev_node_last_token = tree.tokens.at(prev_node.lastToken());
+ const loc = tree.tokenLocation(prev_node_last_token.end, decl.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ break :blk "\n";
+ },
+ });
+ }
+ }
+ }
+
+ var indent: usize = 0;
+ while (stack.popOrNull()) |state| {
+ switch (state) {
+ RenderState.TopLevelDecl => |decl| {
+ switch (decl.id) {
+ ast.Node.Id.FnProto => {
+ const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
+ try renderComments(tree, stream, fn_proto, indent);
+
+ if (fn_proto.body_node) |body_node| {
+ stack.append(RenderState { .Expression = body_node}) catch unreachable;
+ try stack.append(RenderState { .Text = " "});
+ } else {
+ stack.append(RenderState { .Text = ";" }) catch unreachable;
+ }
+
+ try stack.append(RenderState { .Expression = decl });
+ },
+ ast.Node.Id.Use => {
+ const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl);
+ if (use_decl.visib_token) |visib_token| {
+ try stream.print("{} ", tree.tokenSlice(visib_token));
+ }
+ try stream.print("use ");
+ try stack.append(RenderState { .Text = ";" });
+ try stack.append(RenderState { .Expression = use_decl.expr });
+ },
+ ast.Node.Id.VarDecl => {
+ const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl);
+ try renderComments(tree, stream, var_decl, indent);
+ try stack.append(RenderState { .VarDecl = var_decl});
+ },
+ ast.Node.Id.TestDecl => {
+ const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl);
+ try renderComments(tree, stream, test_decl, indent);
+ try stream.print("test ");
+ try stack.append(RenderState { .Expression = test_decl.body_node });
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = test_decl.name });
+ },
+ ast.Node.Id.StructField => {
+ const field = @fieldParentPtr(ast.Node.StructField, "base", decl);
+ try renderComments(tree, stream, field, indent);
+ if (field.visib_token) |visib_token| {
+ try stream.print("{} ", tree.tokenSlice(visib_token));
+ }
+ try stream.print("{}: ", tree.tokenSlice(field.name_token));
+ try stack.append(RenderState { .Token = field.lastToken() + 1 });
+ try stack.append(RenderState { .Expression = field.type_expr});
+ },
+ ast.Node.Id.UnionTag => {
+ const tag = @fieldParentPtr(ast.Node.UnionTag, "base", decl);
+ try renderComments(tree, stream, tag, indent);
+ try stream.print("{}", tree.tokenSlice(tag.name_token));
+
+ try stack.append(RenderState { .Text = "," });
+
+ if (tag.value_expr) |value_expr| {
+ try stack.append(RenderState { .Expression = value_expr });
+ try stack.append(RenderState { .Text = " = " });
+ }
+
+ if (tag.type_expr) |type_expr| {
+ try stream.print(": ");
+ try stack.append(RenderState { .Expression = type_expr});
+ }
+ },
+ ast.Node.Id.EnumTag => {
+ const tag = @fieldParentPtr(ast.Node.EnumTag, "base", decl);
+ try renderComments(tree, stream, tag, indent);
+ try stream.print("{}", tree.tokenSlice(tag.name_token));
+
+ try stack.append(RenderState { .Text = "," });
+ if (tag.value) |value| {
+ try stream.print(" = ");
+ try stack.append(RenderState { .Expression = value});
+ }
+ },
+ ast.Node.Id.ErrorTag => {
+ const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", decl);
+ try renderComments(tree, stream, tag, indent);
+ try stream.print("{}", tree.tokenSlice(tag.name_token));
+ },
+ ast.Node.Id.Comptime => {
+ try stack.append(RenderState { .MaybeSemiColon = decl });
+ try stack.append(RenderState { .Expression = decl });
+ },
+ ast.Node.Id.LineComment => {
+ const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", decl);
+ try stream.write(tree.tokenSlice(line_comment_node.token));
+ },
+ else => unreachable,
+ }
+ },
+
+ RenderState.VarDecl => |var_decl| {
+ try stack.append(RenderState { .Token = var_decl.semicolon_token });
+ if (var_decl.init_node) |init_node| {
+ try stack.append(RenderState { .Expression = init_node });
+ const text = if (init_node.id == ast.Node.Id.MultilineStringLiteral) " =" else " = ";
+ try stack.append(RenderState { .Text = text });
+ }
+ if (var_decl.align_node) |align_node| {
+ try stack.append(RenderState { .Text = ")" });
+ try stack.append(RenderState { .Expression = align_node });
+ try stack.append(RenderState { .Text = " align(" });
+ }
+ if (var_decl.type_node) |type_node| {
+ try stack.append(RenderState { .Expression = type_node });
+ try stack.append(RenderState { .Text = ": " });
+ }
+ try stack.append(RenderState { .Text = tree.tokenSlice(var_decl.name_token) });
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Text = tree.tokenSlice(var_decl.mut_token) });
+
+ if (var_decl.comptime_token) |comptime_token| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Text = tree.tokenSlice(comptime_token) });
+ }
+
+ if (var_decl.extern_export_token) |extern_export_token| {
+ if (var_decl.lib_name != null) {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = ??var_decl.lib_name });
+ }
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Text = tree.tokenSlice(extern_export_token) });
+ }
+
+ if (var_decl.visib_token) |visib_token| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Text = tree.tokenSlice(visib_token) });
+ }
+ },
+
+ RenderState.ParamDecl => |base| {
+ const param_decl = @fieldParentPtr(ast.Node.ParamDecl, "base", base);
+ if (param_decl.comptime_token) |comptime_token| {
+ try stream.print("{} ", tree.tokenSlice(comptime_token));
+ }
+ if (param_decl.noalias_token) |noalias_token| {
+ try stream.print("{} ", tree.tokenSlice(noalias_token));
+ }
+ if (param_decl.name_token) |name_token| {
+ try stream.print("{}: ", tree.tokenSlice(name_token));
+ }
+ if (param_decl.var_args_token) |var_args_token| {
+ try stream.print("{}", tree.tokenSlice(var_args_token));
+ } else {
+ try stack.append(RenderState { .Expression = param_decl.type_node});
+ }
+ },
+ RenderState.Text => |bytes| {
+ try stream.write(bytes);
+ },
+ RenderState.Expression => |base| switch (base.id) {
+ ast.Node.Id.Identifier => {
+ const identifier = @fieldParentPtr(ast.Node.Identifier, "base", base);
+ try stream.print("{}", tree.tokenSlice(identifier.token));
+ },
+ ast.Node.Id.Block => {
+ const block = @fieldParentPtr(ast.Node.Block, "base", base);
+ if (block.label) |label| {
+ try stream.print("{}: ", tree.tokenSlice(label));
+ }
+
+ if (block.statements.len == 0) {
+ try stream.write("{}");
+ } else {
+ try stream.write("{");
+ try stack.append(RenderState { .Text = "}"});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent});
+ try stack.append(RenderState { .Text = "\n"});
+ var i = block.statements.len;
+ while (i != 0) {
+ i -= 1;
+ const statement_node = *block.statements.at(i);
+ try stack.append(RenderState { .Statement = statement_node});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent + indent_delta});
+ try stack.append(RenderState {
+ .Text = blk: {
+ if (i != 0) {
+ const prev_node = *block.statements.at(i - 1);
+ const prev_node_last_token_end = tree.tokens.at(prev_node.lastToken()).end;
+ const loc = tree.tokenLocation(prev_node_last_token_end, statement_node.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ }
+ break :blk "\n";
+ },
+ });
+ }
+ }
+ },
+ ast.Node.Id.Defer => {
+ const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base);
+ try stream.print("{} ", tree.tokenSlice(defer_node.defer_token));
+ try stack.append(RenderState { .Expression = defer_node.expr });
+ },
+ ast.Node.Id.Comptime => {
+ const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", base);
+ try stream.print("{} ", tree.tokenSlice(comptime_node.comptime_token));
+ try stack.append(RenderState { .Expression = comptime_node.expr });
+ },
+ ast.Node.Id.AsyncAttribute => {
+ const async_attr = @fieldParentPtr(ast.Node.AsyncAttribute, "base", base);
+ try stream.print("{}", tree.tokenSlice(async_attr.async_token));
+
+ if (async_attr.allocator_type) |allocator_type| {
+ try stack.append(RenderState { .Text = ">" });
+ try stack.append(RenderState { .Expression = allocator_type });
+ try stack.append(RenderState { .Text = "<" });
+ }
+ },
+ ast.Node.Id.Suspend => {
+ const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
+ if (suspend_node.label) |label| {
+ try stream.print("{}: ", tree.tokenSlice(label));
+ }
+ try stream.print("{}", tree.tokenSlice(suspend_node.suspend_token));
+
+ if (suspend_node.body) |body| {
+ try stack.append(RenderState { .Expression = body });
+ try stack.append(RenderState { .Text = " " });
+ }
+
+ if (suspend_node.payload) |payload| {
+ try stack.append(RenderState { .Expression = payload });
+ try stack.append(RenderState { .Text = " " });
+ }
+ },
+ ast.Node.Id.InfixOp => {
+ const prefix_op_node = @fieldParentPtr(ast.Node.InfixOp, "base", base);
+ try stack.append(RenderState { .Expression = prefix_op_node.rhs });
+
+ if (prefix_op_node.op == ast.Node.InfixOp.Op.Catch) {
+ if (prefix_op_node.op.Catch) |payload| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = payload });
+ }
+ try stack.append(RenderState { .Text = " catch " });
+ } else {
+ const text = switch (prefix_op_node.op) {
+ ast.Node.InfixOp.Op.Add => " + ",
+ ast.Node.InfixOp.Op.AddWrap => " +% ",
+ ast.Node.InfixOp.Op.ArrayCat => " ++ ",
+ ast.Node.InfixOp.Op.ArrayMult => " ** ",
+ ast.Node.InfixOp.Op.Assign => " = ",
+ ast.Node.InfixOp.Op.AssignBitAnd => " &= ",
+ ast.Node.InfixOp.Op.AssignBitOr => " |= ",
+ ast.Node.InfixOp.Op.AssignBitShiftLeft => " <<= ",
+ ast.Node.InfixOp.Op.AssignBitShiftRight => " >>= ",
+ ast.Node.InfixOp.Op.AssignBitXor => " ^= ",
+ ast.Node.InfixOp.Op.AssignDiv => " /= ",
+ ast.Node.InfixOp.Op.AssignMinus => " -= ",
+ ast.Node.InfixOp.Op.AssignMinusWrap => " -%= ",
+ ast.Node.InfixOp.Op.AssignMod => " %= ",
+ ast.Node.InfixOp.Op.AssignPlus => " += ",
+ ast.Node.InfixOp.Op.AssignPlusWrap => " +%= ",
+ ast.Node.InfixOp.Op.AssignTimes => " *= ",
+ ast.Node.InfixOp.Op.AssignTimesWarp => " *%= ",
+ ast.Node.InfixOp.Op.BangEqual => " != ",
+ ast.Node.InfixOp.Op.BitAnd => " & ",
+ ast.Node.InfixOp.Op.BitOr => " | ",
+ ast.Node.InfixOp.Op.BitShiftLeft => " << ",
+ ast.Node.InfixOp.Op.BitShiftRight => " >> ",
+ ast.Node.InfixOp.Op.BitXor => " ^ ",
+ ast.Node.InfixOp.Op.BoolAnd => " and ",
+ ast.Node.InfixOp.Op.BoolOr => " or ",
+ ast.Node.InfixOp.Op.Div => " / ",
+ ast.Node.InfixOp.Op.EqualEqual => " == ",
+ ast.Node.InfixOp.Op.ErrorUnion => "!",
+ ast.Node.InfixOp.Op.GreaterOrEqual => " >= ",
+ ast.Node.InfixOp.Op.GreaterThan => " > ",
+ ast.Node.InfixOp.Op.LessOrEqual => " <= ",
+ ast.Node.InfixOp.Op.LessThan => " < ",
+ ast.Node.InfixOp.Op.MergeErrorSets => " || ",
+ ast.Node.InfixOp.Op.Mod => " % ",
+ ast.Node.InfixOp.Op.Mult => " * ",
+ ast.Node.InfixOp.Op.MultWrap => " *% ",
+ ast.Node.InfixOp.Op.Period => ".",
+ ast.Node.InfixOp.Op.Sub => " - ",
+ ast.Node.InfixOp.Op.SubWrap => " -% ",
+ ast.Node.InfixOp.Op.UnwrapMaybe => " ?? ",
+ ast.Node.InfixOp.Op.Range => " ... ",
+ ast.Node.InfixOp.Op.Catch => unreachable,
+ };
+
+ try stack.append(RenderState { .Text = text });
+ }
+ try stack.append(RenderState { .Expression = prefix_op_node.lhs });
+ },
+ ast.Node.Id.PrefixOp => {
+ const prefix_op_node = @fieldParentPtr(ast.Node.PrefixOp, "base", base);
+ try stack.append(RenderState { .Expression = prefix_op_node.rhs });
+ switch (prefix_op_node.op) {
+ ast.Node.PrefixOp.Op.AddrOf => |addr_of_info| {
+ try stream.write("&");
+ if (addr_of_info.volatile_token != null) {
+ try stack.append(RenderState { .Text = "volatile "});
+ }
+ if (addr_of_info.const_token != null) {
+ try stack.append(RenderState { .Text = "const "});
+ }
+ if (addr_of_info.align_expr) |align_expr| {
+ try stream.print("align(");
+ try stack.append(RenderState { .Text = ") "});
+ try stack.append(RenderState { .Expression = align_expr});
+ }
+ },
+ ast.Node.PrefixOp.Op.SliceType => |addr_of_info| {
+ try stream.write("[]");
+ if (addr_of_info.volatile_token != null) {
+ try stack.append(RenderState { .Text = "volatile "});
+ }
+ if (addr_of_info.const_token != null) {
+ try stack.append(RenderState { .Text = "const "});
+ }
+ if (addr_of_info.align_expr) |align_expr| {
+ try stream.print("align(");
+ try stack.append(RenderState { .Text = ") "});
+ try stack.append(RenderState { .Expression = align_expr});
+ }
+ },
+ ast.Node.PrefixOp.Op.ArrayType => |array_index| {
+ try stack.append(RenderState { .Text = "]"});
+ try stack.append(RenderState { .Expression = array_index});
+ try stack.append(RenderState { .Text = "["});
+ },
+ ast.Node.PrefixOp.Op.BitNot => try stream.write("~"),
+ ast.Node.PrefixOp.Op.BoolNot => try stream.write("!"),
+ ast.Node.PrefixOp.Op.Deref => try stream.write("*"),
+ ast.Node.PrefixOp.Op.Negation => try stream.write("-"),
+ ast.Node.PrefixOp.Op.NegationWrap => try stream.write("-%"),
+ ast.Node.PrefixOp.Op.Try => try stream.write("try "),
+ ast.Node.PrefixOp.Op.UnwrapMaybe => try stream.write("??"),
+ ast.Node.PrefixOp.Op.MaybeType => try stream.write("?"),
+ ast.Node.PrefixOp.Op.Await => try stream.write("await "),
+ ast.Node.PrefixOp.Op.Cancel => try stream.write("cancel "),
+ ast.Node.PrefixOp.Op.Resume => try stream.write("resume "),
+ }
+ },
+ ast.Node.Id.SuffixOp => {
+ const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", base);
+
+ switch (suffix_op.op) {
+ @TagType(ast.Node.SuffixOp.Op).Call => |*call_info| {
+ try stack.append(RenderState { .Text = ")"});
+ var i = call_info.params.len;
+ while (i != 0) {
+ i -= 1;
+ const param_node = *call_info.params.at(i);
+ try stack.append(RenderState { .Expression = param_node});
+ if (i != 0) {
+ try stack.append(RenderState { .Text = ", " });
+ }
+ }
+ try stack.append(RenderState { .Text = "("});
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+
+ if (call_info.async_attr) |async_attr| {
+ try stack.append(RenderState { .Text = " "});
+ try stack.append(RenderState { .Expression = &async_attr.base });
+ }
+ },
+ ast.Node.SuffixOp.Op.ArrayAccess => |index_expr| {
+ try stack.append(RenderState { .Text = "]"});
+ try stack.append(RenderState { .Expression = index_expr});
+ try stack.append(RenderState { .Text = "["});
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ },
+ @TagType(ast.Node.SuffixOp.Op).Slice => |range| {
+ try stack.append(RenderState { .Text = "]"});
+ if (range.end) |end| {
+ try stack.append(RenderState { .Expression = end});
+ }
+ try stack.append(RenderState { .Text = ".."});
+ try stack.append(RenderState { .Expression = range.start});
+ try stack.append(RenderState { .Text = "["});
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ },
+ ast.Node.SuffixOp.Op.StructInitializer => |*field_inits| {
+ if (field_inits.len == 0) {
+ try stack.append(RenderState { .Text = "{}" });
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ continue;
+ }
+ if (field_inits.len == 1) {
+ const field_init = *field_inits.at(0);
+
+ try stack.append(RenderState { .Text = " }" });
+ try stack.append(RenderState { .Expression = field_init });
+ try stack.append(RenderState { .Text = "{ " });
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ continue;
+ }
+ try stack.append(RenderState { .Text = "}"});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Text = "\n" });
+ var i = field_inits.len;
+ while (i != 0) {
+ i -= 1;
+ const field_init = *field_inits.at(i);
+ if (field_init.id != ast.Node.Id.LineComment) {
+ try stack.append(RenderState { .Text = "," });
+ }
+ try stack.append(RenderState { .Expression = field_init });
+ try stack.append(RenderState.PrintIndent);
+ if (i != 0) {
+ try stack.append(RenderState { .Text = blk: {
+ const prev_node = *field_inits.at(i - 1);
+ const prev_node_last_token_end = tree.tokens.at(prev_node.lastToken()).end;
+ const loc = tree.tokenLocation(prev_node_last_token_end, field_init.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ break :blk "\n";
+ }});
+ }
+ }
+ try stack.append(RenderState { .Indent = indent + indent_delta });
+ try stack.append(RenderState { .Text = "{\n"});
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ },
+ ast.Node.SuffixOp.Op.ArrayInitializer => |*exprs| {
+ if (exprs.len == 0) {
+ try stack.append(RenderState { .Text = "{}" });
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ continue;
+ }
+ if (exprs.len == 1) {
+ const expr = *exprs.at(0);
+
+ try stack.append(RenderState { .Text = "}" });
+ try stack.append(RenderState { .Expression = expr });
+ try stack.append(RenderState { .Text = "{" });
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ continue;
+ }
+
+ try stack.append(RenderState { .Text = "}"});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent });
+ var i = exprs.len;
+ while (i != 0) {
+ i -= 1;
+ const expr = *exprs.at(i);
+ try stack.append(RenderState { .Text = ",\n" });
+ try stack.append(RenderState { .Expression = expr });
+ try stack.append(RenderState.PrintIndent);
+ }
+ try stack.append(RenderState { .Indent = indent + indent_delta });
+ try stack.append(RenderState { .Text = "{\n"});
+ try stack.append(RenderState { .Expression = suffix_op.lhs });
+ },
+ }
+ },
+ ast.Node.Id.ControlFlowExpression => {
+ const flow_expr = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", base);
+
+ if (flow_expr.rhs) |rhs| {
+ try stack.append(RenderState { .Expression = rhs });
+ try stack.append(RenderState { .Text = " " });
+ }
+
+ switch (flow_expr.kind) {
+ ast.Node.ControlFlowExpression.Kind.Break => |maybe_label| {
+ try stream.print("break");
+ if (maybe_label) |label| {
+ try stream.print(" :");
+ try stack.append(RenderState { .Expression = label });
+ }
+ },
+ ast.Node.ControlFlowExpression.Kind.Continue => |maybe_label| {
+ try stream.print("continue");
+ if (maybe_label) |label| {
+ try stream.print(" :");
+ try stack.append(RenderState { .Expression = label });
+ }
+ },
+ ast.Node.ControlFlowExpression.Kind.Return => {
+ try stream.print("return");
+ },
+
+ }
+ },
+ ast.Node.Id.Payload => {
+ const payload = @fieldParentPtr(ast.Node.Payload, "base", base);
+ try stack.append(RenderState { .Text = "|"});
+ try stack.append(RenderState { .Expression = payload.error_symbol });
+ try stack.append(RenderState { .Text = "|"});
+ },
+ ast.Node.Id.PointerPayload => {
+ const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base);
+ try stack.append(RenderState { .Text = "|"});
+ try stack.append(RenderState { .Expression = payload.value_symbol });
+
+ if (payload.ptr_token) |ptr_token| {
+ try stack.append(RenderState { .Text = tree.tokenSlice(ptr_token) });
+ }
+
+ try stack.append(RenderState { .Text = "|"});
+ },
+ ast.Node.Id.PointerIndexPayload => {
+ const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base);
+ try stack.append(RenderState { .Text = "|"});
+
+ if (payload.index_symbol) |index_symbol| {
+ try stack.append(RenderState { .Expression = index_symbol });
+ try stack.append(RenderState { .Text = ", "});
+ }
+
+ try stack.append(RenderState { .Expression = payload.value_symbol });
+
+ if (payload.ptr_token) |ptr_token| {
+ try stack.append(RenderState { .Text = tree.tokenSlice(ptr_token) });
+ }
+
+ try stack.append(RenderState { .Text = "|"});
+ },
+ ast.Node.Id.GroupedExpression => {
+ const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base);
+ try stack.append(RenderState { .Text = ")"});
+ try stack.append(RenderState { .Expression = grouped_expr.expr });
+ try stack.append(RenderState { .Text = "("});
+ },
+ ast.Node.Id.FieldInitializer => {
+ const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base);
+ try stream.print(".{} = ", tree.tokenSlice(field_init.name_token));
+ try stack.append(RenderState { .Expression = field_init.expr });
+ },
+ ast.Node.Id.IntegerLiteral => {
+ const integer_literal = @fieldParentPtr(ast.Node.IntegerLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(integer_literal.token));
+ },
+ ast.Node.Id.FloatLiteral => {
+ const float_literal = @fieldParentPtr(ast.Node.FloatLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(float_literal.token));
+ },
+ ast.Node.Id.StringLiteral => {
+ const string_literal = @fieldParentPtr(ast.Node.StringLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(string_literal.token));
+ },
+ ast.Node.Id.CharLiteral => {
+ const char_literal = @fieldParentPtr(ast.Node.CharLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(char_literal.token));
+ },
+ ast.Node.Id.BoolLiteral => {
+ const bool_literal = @fieldParentPtr(ast.Node.CharLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(bool_literal.token));
+ },
+ ast.Node.Id.NullLiteral => {
+ const null_literal = @fieldParentPtr(ast.Node.NullLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(null_literal.token));
+ },
+ ast.Node.Id.ThisLiteral => {
+ const this_literal = @fieldParentPtr(ast.Node.ThisLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(this_literal.token));
+ },
+ ast.Node.Id.Unreachable => {
+ const unreachable_node = @fieldParentPtr(ast.Node.Unreachable, "base", base);
+ try stream.print("{}", tree.tokenSlice(unreachable_node.token));
+ },
+ ast.Node.Id.ErrorType => {
+ const error_type = @fieldParentPtr(ast.Node.ErrorType, "base", base);
+ try stream.print("{}", tree.tokenSlice(error_type.token));
+ },
+ ast.Node.Id.VarType => {
+ const var_type = @fieldParentPtr(ast.Node.VarType, "base", base);
+ try stream.print("{}", tree.tokenSlice(var_type.token));
+ },
+ ast.Node.Id.ContainerDecl => {
+ const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base);
+
+ switch (container_decl.layout) {
+ ast.Node.ContainerDecl.Layout.Packed => try stream.print("packed "),
+ ast.Node.ContainerDecl.Layout.Extern => try stream.print("extern "),
+ ast.Node.ContainerDecl.Layout.Auto => { },
+ }
+
+ switch (container_decl.kind) {
+ ast.Node.ContainerDecl.Kind.Struct => try stream.print("struct"),
+ ast.Node.ContainerDecl.Kind.Enum => try stream.print("enum"),
+ ast.Node.ContainerDecl.Kind.Union => try stream.print("union"),
+ }
+
+ if (container_decl.fields_and_decls.len == 0) {
+ try stack.append(RenderState { .Text = "{}"});
+ } else {
+ try stack.append(RenderState { .Text = "}"});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Text = "\n"});
+
+ var i = container_decl.fields_and_decls.len;
+ while (i != 0) {
+ i -= 1;
+ const node = *container_decl.fields_and_decls.at(i);
+ try stack.append(RenderState { .TopLevelDecl = node});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState {
+ .Text = blk: {
+ if (i != 0) {
+ const prev_node = *container_decl.fields_and_decls.at(i - 1);
+ const prev_node_last_token_end = tree.tokens.at(prev_node.lastToken()).end;
+ const loc = tree.tokenLocation(prev_node_last_token_end, node.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ }
+ break :blk "\n";
+ },
+ });
+ }
+ try stack.append(RenderState { .Indent = indent + indent_delta});
+ try stack.append(RenderState { .Text = "{"});
+ }
+
+ switch (container_decl.init_arg_expr) {
+ ast.Node.ContainerDecl.InitArg.None => try stack.append(RenderState { .Text = " "}),
+ ast.Node.ContainerDecl.InitArg.Enum => |enum_tag_type| {
+ if (enum_tag_type) |expr| {
+ try stack.append(RenderState { .Text = ")) "});
+ try stack.append(RenderState { .Expression = expr});
+ try stack.append(RenderState { .Text = "(enum("});
+ } else {
+ try stack.append(RenderState { .Text = "(enum) "});
+ }
+ },
+ ast.Node.ContainerDecl.InitArg.Type => |type_expr| {
+ try stack.append(RenderState { .Text = ") "});
+ try stack.append(RenderState { .Expression = type_expr});
+ try stack.append(RenderState { .Text = "("});
+ },
+ }
+ },
+ ast.Node.Id.ErrorSetDecl => {
+ const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base);
+
+ if (err_set_decl.decls.len == 0) {
+ try stream.write("error{}");
+ continue;
+ }
+
+ if (err_set_decl.decls.len == 1) blk: {
+ const node = *err_set_decl.decls.at(0);
+
+ // if there are any doc comments or same line comments
+ // don't try to put it all on one line
+ if (node.cast(ast.Node.ErrorTag)) |tag| {
+ if (tag.doc_comments != null) break :blk;
+ } else {
+ break :blk;
+ }
+
+
+ try stream.write("error{");
+ try stack.append(RenderState { .Text = "}" });
+ try stack.append(RenderState { .TopLevelDecl = node });
+ continue;
+ }
+
+ try stream.write("error{");
+
+ try stack.append(RenderState { .Text = "}"});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Text = "\n"});
+
+ var i = err_set_decl.decls.len;
+ while (i != 0) {
+ i -= 1;
+ const node = *err_set_decl.decls.at(i);
+ if (node.id != ast.Node.Id.LineComment) {
+ try stack.append(RenderState { .Text = "," });
+ }
+ try stack.append(RenderState { .TopLevelDecl = node });
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState {
+ .Text = blk: {
+ if (i != 0) {
+ const prev_node = *err_set_decl.decls.at(i - 1);
+ const prev_node_last_token_end = tree.tokens.at(prev_node.lastToken()).end;
+ const loc = tree.tokenLocation(prev_node_last_token_end, node.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ }
+ break :blk "\n";
+ },
+ });
+ }
+ try stack.append(RenderState { .Indent = indent + indent_delta});
+ },
+ ast.Node.Id.MultilineStringLiteral => {
+ const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
+ try stream.print("\n");
+
+ var i : usize = 0;
+ while (i < multiline_str_literal.lines.len) : (i += 1) {
+ const t = *multiline_str_literal.lines.at(i);
+ try stream.writeByteNTimes(' ', indent + indent_delta);
+ try stream.print("{}", tree.tokenSlice(t));
+ }
+ try stream.writeByteNTimes(' ', indent);
+ },
+ ast.Node.Id.UndefinedLiteral => {
+ const undefined_literal = @fieldParentPtr(ast.Node.UndefinedLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(undefined_literal.token));
+ },
+ ast.Node.Id.BuiltinCall => {
+ const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base);
+ try stream.print("{}(", tree.tokenSlice(builtin_call.builtin_token));
+ try stack.append(RenderState { .Text = ")"});
+ var i = builtin_call.params.len;
+ while (i != 0) {
+ i -= 1;
+ const param_node = *builtin_call.params.at(i);
+ try stack.append(RenderState { .Expression = param_node});
+ if (i != 0) {
+ try stack.append(RenderState { .Text = ", " });
+ }
+ }
+ },
+ ast.Node.Id.FnProto => {
+ const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base);
+
+ switch (fn_proto.return_type) {
+ ast.Node.FnProto.ReturnType.Explicit => |node| {
+ try stack.append(RenderState { .Expression = node});
+ },
+ ast.Node.FnProto.ReturnType.InferErrorSet => |node| {
+ try stack.append(RenderState { .Expression = node});
+ try stack.append(RenderState { .Text = "!"});
+ },
+ }
+
+ if (fn_proto.align_expr) |align_expr| {
+ try stack.append(RenderState { .Text = ") " });
+ try stack.append(RenderState { .Expression = align_expr});
+ try stack.append(RenderState { .Text = "align(" });
+ }
+
+ try stack.append(RenderState { .Text = ") " });
+ var i = fn_proto.params.len;
+ while (i != 0) {
+ i -= 1;
+ const param_decl_node = *fn_proto.params.at(i);
+ try stack.append(RenderState { .ParamDecl = param_decl_node});
+ if (i != 0) {
+ try stack.append(RenderState { .Text = ", " });
+ }
+ }
+
+ try stack.append(RenderState { .Text = "(" });
+ if (fn_proto.name_token) |name_token| {
+ try stack.append(RenderState { .Text = tree.tokenSlice(name_token) });
+ try stack.append(RenderState { .Text = " " });
+ }
+
+ try stack.append(RenderState { .Text = "fn" });
+
+ if (fn_proto.async_attr) |async_attr| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = &async_attr.base });
+ }
+
+ if (fn_proto.cc_token) |cc_token| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Text = tree.tokenSlice(cc_token) });
+ }
+
+ if (fn_proto.lib_name) |lib_name| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = lib_name });
+ }
+ if (fn_proto.extern_export_inline_token) |extern_export_inline_token| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Text = tree.tokenSlice(extern_export_inline_token) });
+ }
+
+ if (fn_proto.visib_token) |visib_token_index| {
+ const visib_token = tree.tokens.at(visib_token_index);
+ assert(visib_token.id == Token.Id.Keyword_pub or visib_token.id == Token.Id.Keyword_export);
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Text = tree.tokenSlice(visib_token_index) });
+ }
+ },
+ ast.Node.Id.PromiseType => {
+ const promise_type = @fieldParentPtr(ast.Node.PromiseType, "base", base);
+ try stream.write(tree.tokenSlice(promise_type.promise_token));
+ if (promise_type.result) |result| {
+ try stream.write(tree.tokenSlice(result.arrow_token));
+ try stack.append(RenderState { .Expression = result.return_type});
+ }
+ },
+ ast.Node.Id.LineComment => {
+ const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", base);
+ try stream.write(tree.tokenSlice(line_comment_node.token));
+ },
+ ast.Node.Id.DocComment => unreachable, // doc comments are attached to nodes
+ ast.Node.Id.Switch => {
+ const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base);
+
+ try stream.print("{} (", tree.tokenSlice(switch_node.switch_token));
+
+ if (switch_node.cases.len == 0) {
+ try stack.append(RenderState { .Text = ") {}"});
+ try stack.append(RenderState { .Expression = switch_node.expr });
+ continue;
+ }
+
+ try stack.append(RenderState { .Text = "}"});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Text = "\n"});
+
+ var i = switch_node.cases.len;
+ while (i != 0) {
+ i -= 1;
+ const node = *switch_node.cases.at(i);
+ try stack.append(RenderState { .Expression = node});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState {
+ .Text = blk: {
+ if (i != 0) {
+ const prev_node = *switch_node.cases.at(i - 1);
+ const prev_node_last_token_end = tree.tokens.at(prev_node.lastToken()).end;
+ const loc = tree.tokenLocation(prev_node_last_token_end, node.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ }
+ break :blk "\n";
+ },
+ });
+ }
+ try stack.append(RenderState { .Indent = indent + indent_delta});
+ try stack.append(RenderState { .Text = ") {"});
+ try stack.append(RenderState { .Expression = switch_node.expr });
+ },
+ ast.Node.Id.SwitchCase => {
+ const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base);
+
+ try stack.append(RenderState { .Token = switch_case.lastToken() + 1 });
+ try stack.append(RenderState { .Expression = switch_case.expr });
+ if (switch_case.payload) |payload| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = payload });
+ }
+ try stack.append(RenderState { .Text = " => "});
+
+ var i = switch_case.items.len;
+ while (i != 0) {
+ i -= 1;
+ try stack.append(RenderState { .Expression = *switch_case.items.at(i) });
+
+ if (i != 0) {
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Text = ",\n" });
+ }
+ }
+ },
+ ast.Node.Id.SwitchElse => {
+ const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base);
+ try stream.print("{}", tree.tokenSlice(switch_else.token));
+ },
+ ast.Node.Id.Else => {
+ const else_node = @fieldParentPtr(ast.Node.Else, "base", base);
+ try stream.print("{}", tree.tokenSlice(else_node.else_token));
+
+ switch (else_node.body.id) {
+ ast.Node.Id.Block, ast.Node.Id.If,
+ ast.Node.Id.For, ast.Node.Id.While,
+ ast.Node.Id.Switch => {
+ try stream.print(" ");
+ try stack.append(RenderState { .Expression = else_node.body });
+ },
+ else => {
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Expression = else_node.body });
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent + indent_delta });
+ try stack.append(RenderState { .Text = "\n" });
+ }
+ }
+
+ if (else_node.payload) |payload| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = payload });
+ }
+ },
+ ast.Node.Id.While => {
+ const while_node = @fieldParentPtr(ast.Node.While, "base", base);
+ if (while_node.label) |label| {
+ try stream.print("{}: ", tree.tokenSlice(label));
+ }
+
+ if (while_node.inline_token) |inline_token| {
+ try stream.print("{} ", tree.tokenSlice(inline_token));
+ }
+
+ try stream.print("{} ", tree.tokenSlice(while_node.while_token));
+
+ if (while_node.@"else") |@"else"| {
+ try stack.append(RenderState { .Expression = &@"else".base });
+
+ if (while_node.body.id == ast.Node.Id.Block) {
+ try stack.append(RenderState { .Text = " " });
+ } else {
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Text = "\n" });
+ }
+ }
+
+ if (while_node.body.id == ast.Node.Id.Block) {
+ try stack.append(RenderState { .Expression = while_node.body });
+ try stack.append(RenderState { .Text = " " });
+ } else {
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Expression = while_node.body });
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent + indent_delta });
+ try stack.append(RenderState { .Text = "\n" });
+ }
+
+ if (while_node.continue_expr) |continue_expr| {
+ try stack.append(RenderState { .Text = ")" });
+ try stack.append(RenderState { .Expression = continue_expr });
+ try stack.append(RenderState { .Text = ": (" });
+ try stack.append(RenderState { .Text = " " });
+ }
+
+ if (while_node.payload) |payload| {
+ try stack.append(RenderState { .Expression = payload });
+ try stack.append(RenderState { .Text = " " });
+ }
+
+ try stack.append(RenderState { .Text = ")" });
+ try stack.append(RenderState { .Expression = while_node.condition });
+ try stack.append(RenderState { .Text = "(" });
+ },
+ ast.Node.Id.For => {
+ const for_node = @fieldParentPtr(ast.Node.For, "base", base);
+ if (for_node.label) |label| {
+ try stream.print("{}: ", tree.tokenSlice(label));
+ }
+
+ if (for_node.inline_token) |inline_token| {
+ try stream.print("{} ", tree.tokenSlice(inline_token));
+ }
+
+ try stream.print("{} ", tree.tokenSlice(for_node.for_token));
+
+ if (for_node.@"else") |@"else"| {
+ try stack.append(RenderState { .Expression = &@"else".base });
+
+ if (for_node.body.id == ast.Node.Id.Block) {
+ try stack.append(RenderState { .Text = " " });
+ } else {
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Text = "\n" });
+ }
+ }
+
+ if (for_node.body.id == ast.Node.Id.Block) {
+ try stack.append(RenderState { .Expression = for_node.body });
+ try stack.append(RenderState { .Text = " " });
+ } else {
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Expression = for_node.body });
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent + indent_delta });
+ try stack.append(RenderState { .Text = "\n" });
+ }
+
+ if (for_node.payload) |payload| {
+ try stack.append(RenderState { .Expression = payload });
+ try stack.append(RenderState { .Text = " " });
+ }
+
+ try stack.append(RenderState { .Text = ")" });
+ try stack.append(RenderState { .Expression = for_node.array_expr });
+ try stack.append(RenderState { .Text = "(" });
+ },
+ ast.Node.Id.If => {
+ const if_node = @fieldParentPtr(ast.Node.If, "base", base);
+ try stream.print("{} ", tree.tokenSlice(if_node.if_token));
+
+ switch (if_node.body.id) {
+ ast.Node.Id.Block, ast.Node.Id.If,
+ ast.Node.Id.For, ast.Node.Id.While,
+ ast.Node.Id.Switch => {
+ if (if_node.@"else") |@"else"| {
+ try stack.append(RenderState { .Expression = &@"else".base });
+
+ if (if_node.body.id == ast.Node.Id.Block) {
+ try stack.append(RenderState { .Text = " " });
+ } else {
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Text = "\n" });
+ }
+ }
+ },
+ else => {
+ if (if_node.@"else") |@"else"| {
+ try stack.append(RenderState { .Expression = @"else".body });
+
+ if (@"else".payload) |payload| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = payload });
+ }
+
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Text = tree.tokenSlice(@"else".else_token) });
+ try stack.append(RenderState { .Text = " " });
+ }
+ }
+ }
+
+ try stack.append(RenderState { .Expression = if_node.body });
+
+ if (if_node.payload) |payload| {
+ try stack.append(RenderState { .Text = " " });
+ try stack.append(RenderState { .Expression = payload });
+ }
+
+ try stack.append(RenderState { .NonBreakToken = if_node.condition.lastToken() + 1 });
+ try stack.append(RenderState { .Expression = if_node.condition });
+ try stack.append(RenderState { .Text = "(" });
+ },
+ ast.Node.Id.Asm => {
+ const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base);
+ try stream.print("{} ", tree.tokenSlice(asm_node.asm_token));
+
+ if (asm_node.volatile_token) |volatile_token| {
+ try stream.print("{} ", tree.tokenSlice(volatile_token));
+ }
+
+ try stack.append(RenderState { .Indent = indent });
+ try stack.append(RenderState { .Text = ")" });
+ {
+ var i = asm_node.clobbers.len;
+ while (i != 0) {
+ i -= 1;
+ try stack.append(RenderState { .Expression = *asm_node.clobbers.at(i) });
+
+ if (i != 0) {
+ try stack.append(RenderState { .Text = ", " });
+ }
+ }
+ }
+ try stack.append(RenderState { .Text = ": " });
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent + indent_delta });
+ try stack.append(RenderState { .Text = "\n" });
+ {
+ var i = asm_node.inputs.len;
+ while (i != 0) {
+ i -= 1;
+ const node = *asm_node.inputs.at(i);
+ try stack.append(RenderState { .Expression = &node.base});
+
+ if (i != 0) {
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState {
+ .Text = blk: {
+ const prev_node = *asm_node.inputs.at(i - 1);
+ const prev_node_last_token_end = tree.tokens.at(prev_node.lastToken()).end;
+ const loc = tree.tokenLocation(prev_node_last_token_end, node.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ break :blk "\n";
+ },
+ });
+ try stack.append(RenderState { .Text = "," });
+ }
+ }
+ }
+ try stack.append(RenderState { .Indent = indent + indent_delta + 2});
+ try stack.append(RenderState { .Text = ": "});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent + indent_delta});
+ try stack.append(RenderState { .Text = "\n" });
+ {
+ var i = asm_node.outputs.len;
+ while (i != 0) {
+ i -= 1;
+ const node = *asm_node.outputs.at(i);
+ try stack.append(RenderState { .Expression = &node.base});
+
+ if (i != 0) {
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState {
+ .Text = blk: {
+ const prev_node = *asm_node.outputs.at(i - 1);
+ const prev_node_last_token_end = tree.tokens.at(prev_node.lastToken()).end;
+ const loc = tree.tokenLocation(prev_node_last_token_end, node.firstToken());
+ if (loc.line >= 2) {
+ break :blk "\n\n";
+ }
+ break :blk "\n";
+ },
+ });
+ try stack.append(RenderState { .Text = "," });
+ }
+ }
+ }
+ try stack.append(RenderState { .Indent = indent + indent_delta + 2});
+ try stack.append(RenderState { .Text = ": "});
+ try stack.append(RenderState.PrintIndent);
+ try stack.append(RenderState { .Indent = indent + indent_delta});
+ try stack.append(RenderState { .Text = "\n" });
+ try stack.append(RenderState { .Expression = asm_node.template });
+ try stack.append(RenderState { .Text = "(" });
+ },
+ ast.Node.Id.AsmInput => {
+ const asm_input = @fieldParentPtr(ast.Node.AsmInput, "base", base);
+
+ try stack.append(RenderState { .Text = ")"});
+ try stack.append(RenderState { .Expression = asm_input.expr});
+ try stack.append(RenderState { .Text = " ("});
+ try stack.append(RenderState { .Expression = asm_input.constraint });
+ try stack.append(RenderState { .Text = "] "});
+ try stack.append(RenderState { .Expression = asm_input.symbolic_name });
+ try stack.append(RenderState { .Text = "["});
+ },
+ ast.Node.Id.AsmOutput => {
+ const asm_output = @fieldParentPtr(ast.Node.AsmOutput, "base", base);
+
+ try stack.append(RenderState { .Text = ")"});
+ switch (asm_output.kind) {
+ ast.Node.AsmOutput.Kind.Variable => |variable_name| {
+ try stack.append(RenderState { .Expression = &variable_name.base});
+ },
+ ast.Node.AsmOutput.Kind.Return => |return_type| {
+ try stack.append(RenderState { .Expression = return_type});
+ try stack.append(RenderState { .Text = "-> "});
+ },
+ }
+ try stack.append(RenderState { .Text = " ("});
+ try stack.append(RenderState { .Expression = asm_output.constraint });
+ try stack.append(RenderState { .Text = "] "});
+ try stack.append(RenderState { .Expression = asm_output.symbolic_name });
+ try stack.append(RenderState { .Text = "["});
+ },
+
+ ast.Node.Id.StructField,
+ ast.Node.Id.UnionTag,
+ ast.Node.Id.EnumTag,
+ ast.Node.Id.ErrorTag,
+ ast.Node.Id.Root,
+ ast.Node.Id.VarDecl,
+ ast.Node.Id.Use,
+ ast.Node.Id.TestDecl,
+ ast.Node.Id.ParamDecl => unreachable,
+ },
+ RenderState.Statement => |base| {
+ switch (base.id) {
+ ast.Node.Id.VarDecl => {
+ const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base);
+ try stack.append(RenderState { .VarDecl = var_decl});
+ },
+ else => {
+ try stack.append(RenderState { .MaybeSemiColon = base });
+ try stack.append(RenderState { .Expression = base });
+ },
+ }
+ },
+ RenderState.Indent => |new_indent| indent = new_indent,
+ RenderState.PrintIndent => try stream.writeByteNTimes(' ', indent),
+ RenderState.Token => |token_index| try renderToken(tree, stream, token_index, indent, true),
+ RenderState.NonBreakToken => |token_index| try renderToken(tree, stream, token_index, indent, false),
+ RenderState.MaybeSemiColon => |base| {
+ if (base.requireSemiColon()) {
+ const semicolon_index = base.lastToken() + 1;
+ assert(tree.tokens.at(semicolon_index).id == Token.Id.Semicolon);
+ try renderToken(tree, stream, semicolon_index, indent, true);
+ }
+ },
+ }
+ }
+}
+
+fn renderToken(tree: &ast.Tree, stream: var, token_index: ast.TokenIndex, indent: usize, line_break: bool) !void {
+ const token = tree.tokens.at(token_index);
+ try stream.write(tree.tokenSlicePtr(token));
+
+ const next_token = tree.tokens.at(token_index + 1);
+ if (next_token.id == Token.Id.LineComment) {
+ const loc = tree.tokenLocationPtr(token.end, next_token);
+ if (loc.line == 0) {
+ try stream.print(" {}", tree.tokenSlicePtr(next_token));
+ if (!line_break) {
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent + indent_delta);
+ return;
+ }
+ }
+ }
+
+ if (!line_break) {
+ try stream.writeByte(' ');
+ }
+}
+
+fn renderComments(tree: &ast.Tree, stream: var, node: var, indent: usize) !void {
+ const comment = node.doc_comments ?? return;
+ var it = comment.lines.iterator(0);
+ while (it.next()) |line_token_index| {
+ try stream.print("{}\n", tree.tokenSlice(*line_token_index));
+ try stream.writeByteNTimes(' ', indent);
+ }
+}
+
diff --git a/std/zig/tokenizer.zig b/std/zig/tokenizer.zig
index 7a13d89975..b0e5014a1a 100644
--- a/std/zig/tokenizer.zig
+++ b/std/zig/tokenizer.zig
@@ -5,56 +5,61 @@ pub const Token = struct {
id: Id,
start: usize,
end: usize,
- line: usize,
- column: usize,
- const KeywordId = struct {
+ const Keyword = struct {
bytes: []const u8,
id: Id,
};
- const keywords = []KeywordId {
- KeywordId{.bytes="align", .id = Id.Keyword_align},
- KeywordId{.bytes="and", .id = Id.Keyword_and},
- KeywordId{.bytes="asm", .id = Id.Keyword_asm},
- KeywordId{.bytes="break", .id = Id.Keyword_break},
- KeywordId{.bytes="catch", .id = Id.Keyword_catch},
- KeywordId{.bytes="comptime", .id = Id.Keyword_comptime},
- KeywordId{.bytes="const", .id = Id.Keyword_const},
- KeywordId{.bytes="continue", .id = Id.Keyword_continue},
- KeywordId{.bytes="defer", .id = Id.Keyword_defer},
- KeywordId{.bytes="else", .id = Id.Keyword_else},
- KeywordId{.bytes="enum", .id = Id.Keyword_enum},
- KeywordId{.bytes="error", .id = Id.Keyword_error},
- KeywordId{.bytes="export", .id = Id.Keyword_export},
- KeywordId{.bytes="extern", .id = Id.Keyword_extern},
- KeywordId{.bytes="false", .id = Id.Keyword_false},
- KeywordId{.bytes="fn", .id = Id.Keyword_fn},
- KeywordId{.bytes="for", .id = Id.Keyword_for},
- KeywordId{.bytes="if", .id = Id.Keyword_if},
- KeywordId{.bytes="inline", .id = Id.Keyword_inline},
- KeywordId{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
- KeywordId{.bytes="noalias", .id = Id.Keyword_noalias},
- KeywordId{.bytes="null", .id = Id.Keyword_null},
- KeywordId{.bytes="or", .id = Id.Keyword_or},
- KeywordId{.bytes="packed", .id = Id.Keyword_packed},
- KeywordId{.bytes="pub", .id = Id.Keyword_pub},
- KeywordId{.bytes="return", .id = Id.Keyword_return},
- KeywordId{.bytes="section", .id = Id.Keyword_section},
- KeywordId{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
- KeywordId{.bytes="struct", .id = Id.Keyword_struct},
- KeywordId{.bytes="switch", .id = Id.Keyword_switch},
- KeywordId{.bytes="test", .id = Id.Keyword_test},
- KeywordId{.bytes="this", .id = Id.Keyword_this},
- KeywordId{.bytes="true", .id = Id.Keyword_true},
- KeywordId{.bytes="try", .id = Id.Keyword_try},
- KeywordId{.bytes="undefined", .id = Id.Keyword_undefined},
- KeywordId{.bytes="union", .id = Id.Keyword_union},
- KeywordId{.bytes="unreachable", .id = Id.Keyword_unreachable},
- KeywordId{.bytes="use", .id = Id.Keyword_use},
- KeywordId{.bytes="var", .id = Id.Keyword_var},
- KeywordId{.bytes="volatile", .id = Id.Keyword_volatile},
- KeywordId{.bytes="while", .id = Id.Keyword_while},
+ const keywords = []Keyword {
+ Keyword{.bytes="align", .id = Id.Keyword_align},
+ Keyword{.bytes="and", .id = Id.Keyword_and},
+ Keyword{.bytes="asm", .id = Id.Keyword_asm},
+ Keyword{.bytes="async", .id = Id.Keyword_async},
+ Keyword{.bytes="await", .id = Id.Keyword_await},
+ Keyword{.bytes="break", .id = Id.Keyword_break},
+ Keyword{.bytes="catch", .id = Id.Keyword_catch},
+ Keyword{.bytes="cancel", .id = Id.Keyword_cancel},
+ Keyword{.bytes="comptime", .id = Id.Keyword_comptime},
+ Keyword{.bytes="const", .id = Id.Keyword_const},
+ Keyword{.bytes="continue", .id = Id.Keyword_continue},
+ Keyword{.bytes="defer", .id = Id.Keyword_defer},
+ Keyword{.bytes="else", .id = Id.Keyword_else},
+ Keyword{.bytes="enum", .id = Id.Keyword_enum},
+ Keyword{.bytes="errdefer", .id = Id.Keyword_errdefer},
+ Keyword{.bytes="error", .id = Id.Keyword_error},
+ Keyword{.bytes="export", .id = Id.Keyword_export},
+ Keyword{.bytes="extern", .id = Id.Keyword_extern},
+ Keyword{.bytes="false", .id = Id.Keyword_false},
+ Keyword{.bytes="fn", .id = Id.Keyword_fn},
+ Keyword{.bytes="for", .id = Id.Keyword_for},
+ Keyword{.bytes="if", .id = Id.Keyword_if},
+ Keyword{.bytes="inline", .id = Id.Keyword_inline},
+ Keyword{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
+ Keyword{.bytes="noalias", .id = Id.Keyword_noalias},
+ Keyword{.bytes="null", .id = Id.Keyword_null},
+ Keyword{.bytes="or", .id = Id.Keyword_or},
+ Keyword{.bytes="packed", .id = Id.Keyword_packed},
+ Keyword{.bytes="promise", .id = Id.Keyword_promise},
+ Keyword{.bytes="pub", .id = Id.Keyword_pub},
+ Keyword{.bytes="resume", .id = Id.Keyword_resume},
+ Keyword{.bytes="return", .id = Id.Keyword_return},
+ Keyword{.bytes="section", .id = Id.Keyword_section},
+ Keyword{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
+ Keyword{.bytes="struct", .id = Id.Keyword_struct},
+ Keyword{.bytes="suspend", .id = Id.Keyword_suspend},
+ Keyword{.bytes="switch", .id = Id.Keyword_switch},
+ Keyword{.bytes="test", .id = Id.Keyword_test},
+ Keyword{.bytes="this", .id = Id.Keyword_this},
+ Keyword{.bytes="true", .id = Id.Keyword_true},
+ Keyword{.bytes="try", .id = Id.Keyword_try},
+ Keyword{.bytes="undefined", .id = Id.Keyword_undefined},
+ Keyword{.bytes="union", .id = Id.Keyword_union},
+ Keyword{.bytes="unreachable", .id = Id.Keyword_unreachable},
+ Keyword{.bytes="use", .id = Id.Keyword_use},
+ Keyword{.bytes="var", .id = Id.Keyword_var},
+ Keyword{.bytes="volatile", .id = Id.Keyword_volatile},
+ Keyword{.bytes="while", .id = Id.Keyword_while},
};
fn getKeyword(bytes: []const u8) ?Id {
@@ -72,7 +77,8 @@ pub const Token = struct {
Invalid,
Identifier,
StringLiteral: StrLitKind,
- StringIdentifier,
+ MultilineStringLiteralLine: StrLitKind,
+ CharLiteral,
Eof,
Builtin,
Bang,
@@ -81,6 +87,7 @@ pub const Token = struct {
PipeEqual,
Equal,
EqualEqual,
+ EqualAngleBracketRight,
BangEqual,
LParen,
RParen,
@@ -89,6 +96,8 @@ pub const Token = struct {
PercentEqual,
LBrace,
RBrace,
+ LBracket,
+ RBracket,
Period,
Ellipsis2,
Ellipsis3,
@@ -129,10 +138,14 @@ pub const Token = struct {
IntegerLiteral,
FloatLiteral,
LineComment,
+ DocComment,
Keyword_align,
Keyword_and,
Keyword_asm,
+ Keyword_async,
+ Keyword_await,
Keyword_break,
+ Keyword_cancel,
Keyword_catch,
Keyword_comptime,
Keyword_const,
@@ -140,6 +153,7 @@ pub const Token = struct {
Keyword_defer,
Keyword_else,
Keyword_enum,
+ Keyword_errdefer,
Keyword_error,
Keyword_export,
Keyword_extern,
@@ -153,11 +167,14 @@ pub const Token = struct {
Keyword_null,
Keyword_or,
Keyword_packed,
+ Keyword_promise,
Keyword_pub,
+ Keyword_resume,
Keyword_return,
Keyword_section,
Keyword_stdcallcc,
Keyword_struct,
+ Keyword_suspend,
Keyword_switch,
Keyword_test,
Keyword_this,
@@ -176,33 +193,8 @@ pub const Token = struct {
pub const Tokenizer = struct {
buffer: []const u8,
index: usize,
- line: usize,
- column: usize,
pending_invalid_token: ?Token,
- pub const LineLocation = struct {
- line_start: usize,
- line_end: usize,
- };
-
- pub fn getTokenLocation(self: &Tokenizer, token: &const Token) LineLocation {
- var loc = LineLocation {
- .line_start = 0,
- .line_end = self.buffer.len,
- };
- for (self.buffer) |c, i| {
- if (i == token.start) {
- loc.line_end = i;
- while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {}
- return loc;
- }
- if (c == '\n') {
- loc.line_start = i + 1;
- }
- }
- return loc;
- }
-
/// For debugging purposes
pub fn dump(self: &Tokenizer, token: &const Token) void {
std.debug.warn("{} \"{}\"\n", @tagName(token.id), self.buffer[token.start..token.end]);
@@ -212,8 +204,6 @@ pub const Tokenizer = struct {
return Tokenizer {
.buffer = buffer,
.index = 0,
- .line = 0,
- .column = 0,
.pending_invalid_token = null,
};
}
@@ -225,6 +215,12 @@ pub const Tokenizer = struct {
C,
StringLiteral,
StringLiteralBackslash,
+ MultilineStringLiteralLine,
+ MultilineStringLiteralLineBackslash,
+ CharLiteral,
+ CharLiteralBackslash,
+ CharLiteralEnd,
+ Backslash,
Equal,
Bang,
Pipe,
@@ -233,7 +229,10 @@ pub const Tokenizer = struct {
Asterisk,
AsteriskPercent,
Slash,
+ LineCommentStart,
LineComment,
+ DocCommentStart,
+ DocComment,
Zero,
IntegerLiteral,
IntegerLiteralWithRadix,
@@ -261,26 +260,22 @@ pub const Tokenizer = struct {
self.pending_invalid_token = null;
return token;
}
+ const start_index = self.index;
var state = State.Start;
var result = Token {
.id = Token.Id.Eof,
.start = self.index,
.end = undefined,
- .line = self.line,
- .column = self.column,
};
- while (self.index < self.buffer.len) {
+ while (self.index < self.buffer.len) : (self.index += 1) {
const c = self.buffer[self.index];
switch (state) {
State.Start => switch (c) {
' ' => {
result.start = self.index + 1;
- result.column += 1;
},
'\n' => {
result.start = self.index + 1;
- result.line += 1;
- result.column = 0;
},
'c' => {
state = State.C;
@@ -290,6 +285,9 @@ pub const Tokenizer = struct {
state = State.StringLiteral;
result.id = Token.Id { .StringLiteral = Token.StrLitKind.Normal };
},
+ '\'' => {
+ state = State.CharLiteral;
+ },
'a'...'b', 'd'...'z', 'A'...'Z', '_' => {
state = State.Identifier;
result.id = Token.Id.Identifier;
@@ -316,6 +314,16 @@ pub const Tokenizer = struct {
self.index += 1;
break;
},
+ '[' => {
+ result.id = Token.Id.LBracket;
+ self.index += 1;
+ break;
+ },
+ ']' => {
+ result.id = Token.Id.RBracket;
+ self.index += 1;
+ break;
+ },
';' => {
result.id = Token.Id.Semicolon;
self.index += 1;
@@ -352,6 +360,10 @@ pub const Tokenizer = struct {
'^' => {
state = State.Caret;
},
+ '\\' => {
+ state = State.Backslash;
+ result.id = Token.Id { .MultilineStringLiteralLine = Token.StrLitKind.Normal };
+ },
'{' => {
result.id = Token.Id.LBrace;
self.index += 1;
@@ -396,7 +408,7 @@ pub const Tokenizer = struct {
State.SawAtSign => switch (c) {
'"' => {
- result.id = Token.Id.StringIdentifier;
+ result.id = Token.Id.Identifier;
state = State.StringLiteral;
},
else => {
@@ -532,8 +544,17 @@ pub const Tokenizer = struct {
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
else => break,
},
+ State.Backslash => switch (c) {
+ '\\' => {
+ state = State.MultilineStringLiteralLine;
+ },
+ else => break,
+ },
State.C => switch (c) {
- '\\' => @panic("TODO"),
+ '\\' => {
+ state = State.Backslash;
+ result.id = Token.Id { .MultilineStringLiteralLine = Token.StrLitKind.C };
+ },
'"' => {
state = State.StringLiteral;
result.id = Token.Id { .StringLiteral = Token.StrLitKind.C };
@@ -562,6 +583,64 @@ pub const Tokenizer = struct {
},
},
+ State.CharLiteral => switch (c) {
+ '\\' => {
+ state = State.CharLiteralBackslash;
+ },
+ '\'' => {
+ result.id = Token.Id.Invalid;
+ break;
+ },
+ else => {
+ if (c < 0x20 or c == 0x7f) {
+ result.id = Token.Id.Invalid;
+ break;
+ }
+
+ state = State.CharLiteralEnd;
+ }
+ },
+
+ State.CharLiteralBackslash => switch (c) {
+ '\n' => {
+ result.id = Token.Id.Invalid;
+ break;
+ },
+ else => {
+ state = State.CharLiteralEnd;
+ },
+ },
+
+ State.CharLiteralEnd => switch (c) {
+ '\'' => {
+ result.id = Token.Id.CharLiteral;
+ self.index += 1;
+ break;
+ },
+ else => {
+ result.id = Token.Id.Invalid;
+ break;
+ },
+ },
+
+ State.MultilineStringLiteralLine => switch (c) {
+ '\\' => {
+ state = State.MultilineStringLiteralLineBackslash;
+ },
+ '\n' => {
+ self.index += 1;
+ break;
+ },
+ else => self.checkLiteralCharacter(),
+ },
+
+ State.MultilineStringLiteralLineBackslash => switch (c) {
+ '\n' => break, // Look for this error later.
+ else => {
+ state = State.MultilineStringLiteralLine;
+ },
+ },
+
State.Bang => switch (c) {
'=' => {
result.id = Token.Id.BangEqual;
@@ -597,6 +676,11 @@ pub const Tokenizer = struct {
self.index += 1;
break;
},
+ '>' => {
+ result.id = Token.Id.EqualAngleBracketRight;
+ self.index += 1;
+ break;
+ },
else => {
result.id = Token.Id.Equal;
break;
@@ -713,8 +797,8 @@ pub const Tokenizer = struct {
State.Slash => switch (c) {
'/' => {
+ state = State.LineCommentStart;
result.id = Token.Id.LineComment;
- state = State.LineComment;
},
'=' => {
result.id = Token.Id.SlashEqual;
@@ -726,7 +810,31 @@ pub const Tokenizer = struct {
break;
},
},
- State.LineComment => switch (c) {
+ State.LineCommentStart => switch (c) {
+ '/' => {
+ state = State.DocCommentStart;
+ },
+ '\n' => break,
+ else => {
+ state = State.LineComment;
+ self.checkLiteralCharacter();
+ },
+ },
+ State.DocCommentStart => switch (c) {
+ '/' => {
+ state = State.LineComment;
+ },
+ '\n' => {
+ result.id = Token.Id.DocComment;
+ break;
+ },
+ else => {
+ state = State.DocComment;
+ result.id = Token.Id.DocComment;
+ self.checkLiteralCharacter();
+ },
+ },
+ State.LineComment, State.DocComment => switch (c) {
'\n' => break,
else => self.checkLiteralCharacter(),
},
@@ -773,10 +881,10 @@ pub const Tokenizer = struct {
},
},
State.FloatFraction => switch (c) {
- 'p', 'P' => {
+ 'p', 'P', 'e', 'E' => {
state = State.FloatExponentUnsigned;
},
- '0'...'9', 'a'...'f', 'A'...'F' => {},
+ '0'...'9' => {},
else => break,
},
State.FloatExponentUnsigned => switch (c) {
@@ -794,14 +902,6 @@ pub const Tokenizer = struct {
else => break,
},
}
-
- self.index += 1;
- if (c == '\n') {
- self.line += 1;
- self.column = 0;
- } else {
- self.column += 1;
- }
} else if (self.index == self.buffer.len) {
switch (state) {
State.Start,
@@ -811,6 +911,7 @@ pub const Tokenizer = struct {
State.FloatFraction,
State.FloatExponentNumber,
State.StringLiteral, // find this error later
+ State.MultilineStringLiteralLine,
State.Builtin => {},
State.Identifier => {
@@ -818,13 +919,22 @@ pub const Tokenizer = struct {
result.id = id;
}
},
+ State.LineCommentStart,
State.LineComment => {
- result.id = Token.Id.Eof;
+ result.id = Token.Id.LineComment;
+ },
+ State.DocComment, State.DocCommentStart => {
+ result.id = Token.Id.DocComment;
},
State.NumberDot,
State.FloatExponentUnsigned,
State.SawAtSign,
+ State.Backslash,
+ State.MultilineStringLiteralLineBackslash,
+ State.CharLiteral,
+ State.CharLiteralBackslash,
+ State.CharLiteralEnd,
State.StringLiteralBackslash => {
result.id = Token.Id.Invalid;
},
@@ -894,6 +1004,7 @@ pub const Tokenizer = struct {
},
}
}
+
if (result.id == Token.Id.Eof) {
if (self.pending_invalid_token) |token| {
self.pending_invalid_token = null;
@@ -905,10 +1016,6 @@ pub const Tokenizer = struct {
return result;
}
- pub fn getTokenSlice(self: &const Tokenizer, token: &const Token) []const u8 {
- return self.buffer[token.start..token.end];
- }
-
fn checkLiteralCharacter(self: &Tokenizer) void {
if (self.pending_invalid_token != null) return;
const invalid_length = self.getInvalidCharacterLength();
@@ -917,8 +1024,6 @@ pub const Tokenizer = struct {
.id = Token.Id.Invalid,
.start = self.index,
.end = self.index + invalid_length,
- .line = self.line,
- .column = self.column,
};
}
@@ -968,9 +1073,25 @@ test "tokenizer" {
});
}
+test "tokenizer - float literal" {
+ testTokenize("a = 4.94065645841246544177e-324;\n", []Token.Id {
+ Token.Id.Identifier,
+ Token.Id.Equal,
+ Token.Id.FloatLiteral,
+ Token.Id.Semicolon,
+ });
+}
+
+test "tokenizer - chars" {
+ testTokenize("'c'", []Token.Id {Token.Id.CharLiteral});
+}
+
test "tokenizer - invalid token characters" {
testTokenize("#", []Token.Id{Token.Id.Invalid});
testTokenize("`", []Token.Id{Token.Id.Invalid});
+ testTokenize("'c", []Token.Id {Token.Id.Invalid});
+ testTokenize("'", []Token.Id {Token.Id.Invalid});
+ testTokenize("''", []Token.Id {Token.Id.Invalid, Token.Id.Invalid});
}
test "tokenizer - invalid literal/comment characters" {
@@ -979,41 +1100,77 @@ test "tokenizer - invalid literal/comment characters" {
Token.Id.Invalid,
});
testTokenize("//\x00", []Token.Id {
+ Token.Id.LineComment,
Token.Id.Invalid,
});
testTokenize("//\x1f", []Token.Id {
+ Token.Id.LineComment,
Token.Id.Invalid,
});
testTokenize("//\x7f", []Token.Id {
+ Token.Id.LineComment,
Token.Id.Invalid,
});
}
test "tokenizer - utf8" {
- testTokenize("//\xc2\x80", []Token.Id{});
- testTokenize("//\xf4\x8f\xbf\xbf", []Token.Id{});
+ testTokenize("//\xc2\x80", []Token.Id{Token.Id.LineComment});
+ testTokenize("//\xf4\x8f\xbf\xbf", []Token.Id{Token.Id.LineComment});
}
test "tokenizer - invalid utf8" {
- testTokenize("//\x80", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xbf", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xf8", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xff", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xc2\xc0", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xe0", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xf0", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xf0\x90\x80\xc0", []Token.Id{Token.Id.Invalid});
+ testTokenize("//\x80", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xbf", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xf8", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xff", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xc2\xc0", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xe0", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xf0", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xf0\x90\x80\xc0", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
}
test "tokenizer - illegal unicode codepoints" {
// unicode newline characters.U+0085, U+2028, U+2029
- testTokenize("//\xc2\x84", []Token.Id{});
- testTokenize("//\xc2\x85", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xc2\x86", []Token.Id{});
- testTokenize("//\xe2\x80\xa7", []Token.Id{});
- testTokenize("//\xe2\x80\xa8", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xe2\x80\xa9", []Token.Id{Token.Id.Invalid});
- testTokenize("//\xe2\x80\xaa", []Token.Id{});
+ testTokenize("//\xc2\x84", []Token.Id{Token.Id.LineComment});
+ testTokenize("//\xc2\x85", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xc2\x86", []Token.Id{Token.Id.LineComment});
+ testTokenize("//\xe2\x80\xa7", []Token.Id{Token.Id.LineComment});
+ testTokenize("//\xe2\x80\xa8", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xe2\x80\xa9", []Token.Id{
+ Token.Id.LineComment,
+ Token.Id.Invalid,
+ });
+ testTokenize("//\xe2\x80\xaa", []Token.Id{Token.Id.LineComment});
}
test "tokenizer - string identifier and builtin fns" {
@@ -1022,7 +1179,7 @@ test "tokenizer - string identifier and builtin fns" {
,
[]Token.Id{
Token.Id.Keyword_const,
- Token.Id.StringIdentifier,
+ Token.Id.Identifier,
Token.Id.Equal,
Token.Id.Builtin,
Token.Id.LParen,
@@ -1040,11 +1197,36 @@ test "tokenizer - pipe and then invalid" {
});
}
+test "tokenizer - line comment and doc comment" {
+ testTokenize("//", []Token.Id{Token.Id.LineComment});
+ testTokenize("// a / b", []Token.Id{Token.Id.LineComment});
+ testTokenize("// /", []Token.Id{Token.Id.LineComment});
+ testTokenize("/// a", []Token.Id{Token.Id.DocComment});
+ testTokenize("///", []Token.Id{Token.Id.DocComment});
+ testTokenize("////", []Token.Id{Token.Id.LineComment});
+}
+
+test "tokenizer - line comment followed by identifier" {
+ testTokenize(
+ \\ Unexpected,
+ \\ // another
+ \\ Another,
+ , []Token.Id{
+ Token.Id.Identifier,
+ Token.Id.Comma,
+ Token.Id.LineComment,
+ Token.Id.Identifier,
+ Token.Id.Comma,
+ });
+}
+
fn testTokenize(source: []const u8, expected_tokens: []const Token.Id) void {
var tokenizer = Tokenizer.init(source);
for (expected_tokens) |expected_token_id| {
const token = tokenizer.next();
- std.debug.assert(@TagType(Token.Id)(token.id) == @TagType(Token.Id)(expected_token_id));
+ if (@TagType(Token.Id)(token.id) != @TagType(Token.Id)(expected_token_id)) {
+ std.debug.panic("expected {}, found {}\n", @tagName(@TagType(Token.Id)(expected_token_id)), @tagName(@TagType(Token.Id)(token.id)));
+ }
switch (expected_token_id) {
Token.Id.StringLiteral => |expected_kind| {
std.debug.assert(expected_kind == switch (token.id) { Token.Id.StringLiteral => |kind| kind, else => unreachable });