Skip to content

Instantly share code, notes, and snippets.

@lassade
Created August 9, 2024 22:41
Show Gist options
  • Save lassade/d2b46b98b97e757c97c12f098c2f5f1f to your computer and use it in GitHub Desktop.
Save lassade/d2b46b98b97e757c97c12f098c2f5f1f to your computer and use it in GitHub Desktop.
reference counted implementation for critique
const std = @import("std");
const Atomic = std.atomic.Value;
// core is a upper namespace, with way more stuff with it
const core = struct {
pub var allocator: std.mem.Allocator = undefined;
};
/// lock-free stack
pub const Stack = extern struct {
top: Atomic(?*Node) = .{ .raw = null },
const Node = extern struct {
next: ?*@This() = null,
};
pub fn push(self: *@This(), node: *Node) void {
var top = self.top.load(.monotonic);
while (true) {
node.next = top;
top = self.top.cmpxchgWeak(top, node, .monotonic, .monotonic) orelse break;
}
}
pub fn pop(self: *@This()) ?*Node {
var top = self.top.load(.monotonic);
while (top) |node| {
const prev = node.next;
top = self.top.cmpxchgWeak(top, prev, .monotonic, .monotonic) orelse {
return node;
};
}
return null;
}
};
const Allocation = struct {
node: Stack.Node,
size: usize,
n: usize,
};
const State = struct {
allocations: Stack = .{},
pub fn deinit(self: *@This()) void {
var it = self.allocations.top.swap(null, .monotonic);
while (it) |node| {
it = node.next;
const allocation: *Allocation = @alignCast(@fieldParentPtr("node", node));
core.allocator.free(@as([*]align(@alignOf(Allocation)) u8, @ptrCast(allocation))[0..allocation.size]);
}
}
};
pub var state: State = .{};
/// atomic refence counted object
pub fn Rc(comptime T: type) type {
return struct {
inner: *Inner,
pub fn init(value: T) @This() {
const inner = allocator.create() catch unreachable;
inner.value = value;
return .{ .inner = inner };
}
pub fn weaken(self: *const @This()) Weak(T) {
std.debug.assert(self.inner.strong.load(.monotonic) != 0);
_ = self.inner.weak.fetchAdd(1, .monotonic);
return .{ .inner = self.inner };
}
pub fn clone(self: @This()) @This() {
const c = self.inner.strong.fetchAdd(1, .monotonic);
std.debug.assert(c > 0);
return self;
}
pub fn deinit(self: @This()) void {
if (self.inner.strong.fetchSub(1, .monotonic) != 1) return;
if (self.inner.weak.load(.monotonic) != 0) return;
allocator.free(self.inner);
}
const Inner = struct {
node: Stack.Node,
strong: Atomic(u32),
weak: Atomic(u32),
value: T,
};
const Allocator = struct {
hold: Atomic(bool) = .{ .raw = false },
capacity: usize = 0,
free_queue: Stack = .{},
nodes: Stack = .{},
const Node = struct {
allocation: Allocation,
node: Stack.Node,
pad: void align(@alignOf(Inner)),
};
fn create(self: *@This()) !*Inner {
while (true) {
if (self.free_queue.pop()) |node| {
const inner: *Inner = @fieldParentPtr("node", node);
const c = inner.strong.swap(1, .monotonic);
std.debug.assert(c == 0);
return inner;
}
// only one thread at the time can go past this point
if (self.hold.swap(true, .monotonic)) {
continue;
}
defer _ = self.hold.swap(false, .monotonic);
const capacity = self.capacity;
const n = capacity / 2 + 8;
self.capacity += n;
const size = @sizeOf(Node) + @sizeOf(Inner) * n;
const raw = try core.allocator.alignedAlloc(u8, @alignOf(Node), size);
const bucket: *Node = @ptrCast(raw.ptr);
bucket.allocation.node = .{};
bucket.allocation.size = size;
bucket.allocation.n = n;
bucket.node = .{};
self.nodes.push(&bucket.node);
state.allocations.push(&bucket.allocation.node);
const data = @as([*]Inner, @ptrCast(&bucket.pad));
var i = n - 1;
while (i < n) : (i -%= 1) {
const inner = &data[i];
inner.node = .{};
inner.strong = .{ .raw = 0 };
inner.weak = .{ .raw = 0 };
self.free_queue.push(&inner.node);
}
}
}
fn free(self: *@This(), inner: *Inner) void {
self.free_queue.push(&inner.node);
if (std.meta.hasMethod(T, "deinit")) inner.value.deinit();
}
};
var allocator = Allocator{};
};
}
pub fn Weak(comptime T: type) type {
return struct {
inner: *Rc(T).Inner,
pub fn upgrade(self: @This()) ?Rc(T) {
var c = self.inner.strong.load(.monotonic);
while (c != 0) {
c = self.inner.strong.cmpxchgWeak(c, c + 1, .monotonic, .monotonic) orelse {
return .{ .inner = self.inner };
};
}
return null;
}
pub fn deinit(self: @This()) void {
if (self.inner.weak.fetchSub(1, .monotonic) != 1) return;
if (self.inner.strong.load(.monotonic) != 0) return;
Rc(T).allocator.free(self.inner);
}
};
}
test {
core.allocator = std.testing.allocator;
// clear global state
defer state.deinit();
const Texture = struct {
size: [2]usize = .{ 0, 0 },
data: []u8 = &.{},
mip_count: usize = 0,
mips: [8][]u8 = undefined,
pub fn deinit(self: *const @This()) void {
core.allocator.free(self.data);
}
};
const TextureRc = Rc(Texture);
// create as grayscale texture with 2 mip levels
const tex = TextureRc.init(.{});
const tex_inner = &tex.inner.value;
tex_inner.size = .{ 8, 8 };
tex_inner.data = try core.allocator.alloc(u8, 64 + 16);
tex_inner.mip_count = 2;
tex_inner.mips[0] = tex_inner.data[0..64];
tex_inner.mips[1] = tex_inner.data[64..];
@memset(tex_inner.data, 0xff);
defer tex.deinit();
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment