aboutsummaryrefslogtreecommitdiff
path: root/std/atomic/stack.zig
diff options
context:
space:
mode:
authorAndrew Kelley <superjoe30@gmail.com>2018-07-14 18:27:51 -0400
committerAndrew Kelley <superjoe30@gmail.com>2018-07-14 18:27:51 -0400
commit4d920cee6e8be2f2ae2cfd9067358c65b977568a (patch)
tree2c04de6151b7448dec9958d0a91234ea0ba9a15d /std/atomic/stack.zig
parentda3acacc14331a6be33445c3bfd204e2cccabddd (diff)
parent28c3d4809bc6d497ac81892bc7eb03b95d8c2b32 (diff)
downloadzig-4d920cee6e8be2f2ae2cfd9067358c65b977568a.tar.gz
zig-4d920cee6e8be2f2ae2cfd9067358c65b977568a.zip
Merge remote-tracking branch 'origin/master' into llvm7
Diffstat (limited to 'std/atomic/stack.zig')
-rw-r--r--std/atomic/stack.zig32
1 files changed, 20 insertions, 12 deletions
diff --git a/std/atomic/stack.zig b/std/atomic/stack.zig
index d74bee8e8b..16d5c9503b 100644
--- a/std/atomic/stack.zig
+++ b/std/atomic/stack.zig
@@ -1,10 +1,13 @@
+const assert = std.debug.assert;
const builtin = @import("builtin");
const AtomicOrder = builtin.AtomicOrder;
-/// Many reader, many writer, non-allocating, thread-safe, lock-free
+/// Many reader, many writer, non-allocating, thread-safe
+/// Uses a spinlock to protect push() and pop()
pub fn Stack(comptime T: type) type {
return struct {
root: ?*Node,
+ lock: u8,
pub const Self = this;
@@ -14,7 +17,10 @@ pub fn Stack(comptime T: type) type {
};
pub fn init() Self {
- return Self{ .root = null };
+ return Self{
+ .root = null,
+ .lock = 0,
+ };
}
/// push operation, but only if you are the first item in the stack. if you did not succeed in
@@ -25,18 +31,20 @@ pub fn Stack(comptime T: type) type {
}
pub fn push(self: *Self, node: *Node) void {
- var root = @atomicLoad(?*Node, &self.root, AtomicOrder.SeqCst);
- while (true) {
- node.next = root;
- root = @cmpxchgWeak(?*Node, &self.root, root, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) orelse break;
- }
+ while (@atomicRmw(u8, &self.lock, builtin.AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst) != 0) {}
+ defer assert(@atomicRmw(u8, &self.lock, builtin.AtomicRmwOp.Xchg, 0, AtomicOrder.SeqCst) == 1);
+
+ node.next = self.root;
+ self.root = node;
}
pub fn pop(self: *Self) ?*Node {
- var root = @atomicLoad(?*Node, &self.root, AtomicOrder.SeqCst);
- while (true) {
- root = @cmpxchgWeak(?*Node, &self.root, root, (root orelse return null).next, AtomicOrder.SeqCst, AtomicOrder.SeqCst) orelse return root;
- }
+ while (@atomicRmw(u8, &self.lock, builtin.AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst) != 0) {}
+ defer assert(@atomicRmw(u8, &self.lock, builtin.AtomicRmwOp.Xchg, 0, AtomicOrder.SeqCst) == 1);
+
+ const root = self.root orelse return null;
+ self.root = root.next;
+ return root;
}
pub fn isEmpty(self: *Self) bool {
@@ -45,7 +53,7 @@ pub fn Stack(comptime T: type) type {
};
}
-const std = @import("std");
+const std = @import("../index.zig");
const Context = struct {
allocator: *std.mem.Allocator,
stack: *Stack(i32),