diff --git a/.github/workflows/static.yml b/.github/workflows/static.yml index ef25eeb..caf318f 100644 --- a/.github/workflows/static.yml +++ b/.github/workflows/static.yml @@ -30,7 +30,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Pages uses: actions/configure-pages@v3 - name: Setup Zig diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 042eca6..70990bc 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -11,7 +11,7 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: goto-bus-stop/setup-zig@v2 with: version: 0.12.0 diff --git a/.gitignore b/.gitignore index 00a7078..5c10486 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ -docs/ zig-cache/ zig-out/ kcov-out diff --git a/Justfile b/Justfile index 61847fd..d97152b 100644 --- a/Justfile +++ b/Justfile @@ -1,9 +1,11 @@ coverage: clean - zig build test - kcov --include-pattern=src/main.zig,src/tests.zig kcov-out zig-cache/o/**/test + zig build test -Doptimize=Debug + kcov --include-pattern=src/root.zig,src/tests.zig kcov-out zig-cache/o/**/test docs: zig build + xdg-open http://localhost:3000/ + bun run docs/index.ts test: zig test src/tests.zig diff --git a/README.md b/README.md index 85e361b..110342d 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ ![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/Aandreba/zigrc/tests.yml) -[![Docs](https://img.shields.io/badge/docs-zig-blue)](https://aandreba.github.io/zigrc/) +[![Docs](https://img.shields.io/badge/docs-zig-blue)](https://aandreba.github.io/zigrc/#zig-rc.main) # zigrc diff --git a/build.zig b/build.zig index dfbeba0..62fa75e 100644 --- a/build.zig +++ b/build.zig @@ -27,7 +27,7 @@ fn build_v10(b: *std.build.Builder) void { const coverage = b.option(bool, "coverage", "Generate test coverage") orelse false; // Docs - const docs = b.addStaticLibrary("zig-rc", "src/main.zig"); + const docs = b.addStaticLibrary("zig-rc", "src/root.zig"); docs.emit_docs = .emit; docs.setBuildMode(mode); docs.install(); @@ -39,7 +39,7 @@ fn build_v10(b: *std.build.Builder) void { if (coverage) { main_tests.setExecCmd(&[_]?[]const u8{ "kcov", - "--include-pattern=src/main.zig,src/tests.zig", + "--include-pattern=src/root.zig,src/tests.zig", "kcov-out", null, // to get zig to use the --test-cmd-bin flag }); @@ -72,7 +72,7 @@ fn build_v11(b: *std.Build) void { // Docs const docs = b.addStaticLibrary(.{ .name = "zig-rc", - .root_source_file = std.build.LazyPath.relative("src/main.zig"), + .root_source_file = std.build.LazyPath.relative("src/root.zig"), .target = target, .optimize = optimize, }); @@ -96,7 +96,7 @@ fn build_v11(b: *std.Build) void { if (coverage) { main_tests.setExecCmd(&[_]?[]const u8{ "kcov", - "--include-pattern=src/main.zig,src/tests.zig", + "--include-pattern=src/root.zig,src/tests.zig", "kcov-out", null, // to get zig to use the --test-cmd-bin flag }); @@ -131,7 +131,7 @@ fn build_v12(b: *std.Build) void { // Docs const docs = b.addStaticLibrary(.{ .name = "zig-rc", - .root_source_file = b.path("src/main.zig"), + .root_source_file = b.path("src/root.zig"), .target = target, .optimize = optimize, }); @@ -155,7 +155,7 @@ fn build_v12(b: *std.Build) void { if (coverage) { main_tests.setExecCmd(&[_]?[]const u8{ "kcov", - "--include-pattern=src/main.zig,src/tests.zig", + "--include-pattern=src/root.zig,src/tests.zig", "kcov-out", null, // to get zig to use the --test-cmd-bin flag }); diff --git a/build.zig.zon b/build.zig.zon new file mode 100644 index 0000000..4c0e3f8 --- /dev/null +++ b/build.zig.zon @@ -0,0 +1,61 @@ +.{ + .name = "zigrc", + // This is a [Semantic Version](https://semver.org/). + // In a future version of Zig it will be used for package deduplication. + .version = "0.5.0", + + // This field is optional. + // This is currently advisory only; Zig does not yet do anything + // with this value. + .minimum_zig_version = "0.12.0", + + // This field is optional. + // Each dependency must either provide a `url` and `hash`, or a `path`. + // `zig build --fetch` can be used to fetch all dependencies of a package, recursively. + // Once all dependencies are fetched, `zig build` no longer requires + // internet connectivity. + .dependencies = .{ + // See `zig fetch --save ` for a command-line interface for adding dependencies. + //.example = .{ + // // When updating this field to a new URL, be sure to delete the corresponding + // // `hash`, otherwise you are communicating that you expect to find the old hash at + // // the new URL. + // .url = "https://example.com/foo.tar.gz", + // + // // This is computed from the file contents of the directory of files that is + // // obtained after fetching `url` and applying the inclusion rules given by + // // `paths`. + // // + // // This field is the source of truth; packages do not come from a `url`; they + // // come from a `hash`. `url` is just one of many possible mirrors for how to + // // obtain a package matching this `hash`. + // // + // // Uses the [multihash](https://multiformats.io/multihash/) format. + // .hash = "...", + // + // // When this is provided, the package is found in a directory relative to the + // // build root. In this case the package's hash is irrelevant and therefore not + // // computed. This field and `url` are mutually exclusive. + // .path = "foo", + + // // When this is set to `true`, a package is declared to be lazily + // // fetched. This makes the dependency only get fetched if it is + // // actually used. + // .lazy = false, + //}, + }, + + // Specifies the set of files and directories that are included in this package. + // Only files and directories listed here are included in the `hash` that + // is computed for this package. + // Paths are relative to the build root. Use the empty string (`""`) to refer to + // the build root itself. + // A directory listed here means that all files within, recursively, are included. + .paths = .{ + "build.zig", + "build.zig.zon", + "src", + "LICENSE", + "README.md", + }, +} diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000..1822831 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +bun.lockb diff --git a/docs/index.ts b/docs/index.ts new file mode 100644 index 0000000..9e26a7d --- /dev/null +++ b/docs/index.ts @@ -0,0 +1,9 @@ +const server = Bun.serve({ + fetch(req) { + const url = new URL(req.url); + const path = url.pathname === "/" ? "/index.html" : url.pathname; + return new Response(Bun.file(`./zig-out/docs${path}`)); + }, +}); + +console.log(`Server started at ${server.port}`); diff --git a/docs/package.json b/docs/package.json new file mode 100644 index 0000000..e788e86 --- /dev/null +++ b/docs/package.json @@ -0,0 +1,11 @@ +{ + "name": "docs", + "module": "index.ts", + "type": "module", + "devDependencies": { + "bun-types": "latest" + }, + "peerDependencies": { + "typescript": "^5.0.0" + } +} \ No newline at end of file diff --git a/docs/tsconfig.json b/docs/tsconfig.json new file mode 100644 index 0000000..d34b2e1 --- /dev/null +++ b/docs/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + // add Bun type definitions + "types": ["bun-types"], + + // enable latest features + "lib": ["esnext"], + "module": "esnext", + "target": "esnext", + + "moduleResolution": "bundler", + "noEmit": true, + "allowImportingTsExtensions": true, + "moduleDetection": "force", + + "jsx": "react-jsx", // support JSX + "allowJs": true, // allow importing `.js` from `.ts` + "esModuleInterop": true, // allow default imports for CommonJS modules + + // best practices + "strict": true, + "forceConsistentCasingInFileNames": true, + "skipLibCheck": true + } +} diff --git a/src/example.zig b/src/example.zig index d5dfba3..d086a59 100644 --- a/src/example.zig +++ b/src/example.zig @@ -1,5 +1,5 @@ const std = @import("std"); -const rc = @import("main.zig"); +const rc = @import("root.zig"); const Thread = std.Thread; const Mutex = Thread.Mutex; diff --git a/src/main.zig b/src/main.zig deleted file mode 100644 index bcfaccb..0000000 --- a/src/main.zig +++ /dev/null @@ -1,489 +0,0 @@ -const std = @import("std"); -const builtin = @import("builtin"); - -/// DEPRECATED: It's now simply equal to `!builtin.single_threaded` -pub const atomic_arc = !builtin.single_threaded; - -/// A single threaded, strong reference to a reference-counted value. -pub fn Rc(comptime T: type) type { - return struct { - value: *T, - alloc: std.mem.Allocator, - - const Self = @This(); - const Inner = struct { - strong: usize, - weak: usize, - value: T, - - fn innerSize() comptime_int { - return @sizeOf(@This()); - } - - fn innerAlign() comptime_int { - return @alignOf(@This()); - } - }; - - /// Creates a new reference-counted value. - pub fn init(alloc: std.mem.Allocator, t: T) std.mem.Allocator.Error!Self { - const inner = try alloc.create(Inner); - inner.* = Inner{ .strong = 1, .weak = 1, .value = t }; - return Self{ .value = &inner.value, .alloc = alloc }; - } - - /// Constructs a new `Rc` while giving you a `Weak` to the allocation, - /// to allow you to construct a `T` which holds a weak pointer to itself. - pub fn initCyclic(alloc: std.mem.Allocator, comptime data_fn: fn (*Weak) T) std.mem.Allocator.Error!Self { - const inner = try alloc.create(Inner); - inner.* = Inner{ .strong = 0, .weak = 1, .value = undefined }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - var weak = Weak{ .inner = inner, .alloc = alloc }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - inner.value = data_fn(&weak); - - std.debug.assert(inner.strong == 0); - inner.strong = 1; - - return Self{ .value = &inner.value, .alloc = alloc }; - } - - /// Gets the number of strong references to this value. - pub fn strongCount(self: *const Self) usize { - return self.innerPtr().strong; - } - - /// Gets the number of weak references to this value. - pub fn weakCount(self: *const Self) usize { - return self.innerPtr().weak - 1; - } - - /// Increments the strong count. - pub fn retain(self: *Self) Self { - self.innerPtr().strong += 1; - return self.*; - } - - /// Creates a new weak reference to the pointed value - pub fn downgrade(self: *Self) Weak { - return Weak.init(self); - } - - /// Decrements the reference count, deallocating if the weak count reaches zero. - /// The continued use of the pointer after calling `release` is undefined behaviour. - pub fn release(self: Self) void { - const ptr = self.innerPtr(); - - ptr.strong -= 1; - if (ptr.strong == 0) { - ptr.weak -= 1; - if (ptr.weak == 0) { - self.alloc.destroy(ptr); - } - } - } - - /// Decrements the reference count, deallocating the weak count reaches zero, - /// and executing `f` if the strong count reaches zero. - /// The continued use of the pointer after calling `release` is undefined behaviour. - pub fn releaseWithFn(self: Self, comptime f: fn (T) void) void { - const ptr = self.innerPtr(); - - ptr.strong -= 1; - if (ptr.strong == 0) { - f(self.value.*); - - ptr.weak -= 1; - if (ptr.weak == 0) { - self.alloc.destroy(ptr); - } - } - } - - /// Returns the inner value, if the `Rc` has exactly one strong reference. - /// Otherwise, `null` is returned. - /// This will succeed even if there are outstanding weak references. - /// The continued use of the pointer if the method successfully returns `T` is undefined behaviour. - pub fn tryUnwrap(self: Self) ?T { - const ptr = self.innerPtr(); - - if (ptr.strong == 1) { - ptr.strong = 0; - const tmp = self.value.*; - - ptr.weak -= 1; - if (ptr.weak == 0) { - self.alloc.destroy(ptr); - } - - return tmp; - } - - return null; - } - - /// Total size (in bytes) of the reference counted value on the heap. - /// This value accounts for the extra memory required to count the references. - pub fn innerSize() comptime_int { - return Inner.innerSize(); - } - - /// Alignment (in bytes) of the reference counted value on the heap. - /// This value accounts for the extra memory required to count the references. - pub fn innerAlign() comptime_int { - return Inner.innerAlign(); - } - - inline fn innerPtr(self: *const Self) *Inner { - return @alignCast(@fieldParentPtr("value", self.value)); - } - - /// A single threaded, weak reference to a reference-counted value. - pub const Weak = struct { - inner: ?*Inner = null, - alloc: std.mem.Allocator, - - /// Creates a new weak reference. - pub fn init(parent: *Rc(T)) Weak { - const ptr = parent.innerPtr(); - ptr.weak += 1; - return Weak{ .inner = ptr, .alloc = parent.alloc }; - } - - /// Creates a new weak reference object from a pointer to it's underlying value, - /// without increasing the weak count. - pub fn fromValuePtr(value: *T, alloc: std.mem.Allocator) Weak { - return .{ .inner = @fieldParentPtr("value", value), .alloc = alloc }; - } - - /// Gets the number of strong references to this value. - pub fn strongCount(self: *const Weak) usize { - return (self.innerPtr() orelse return 0).strong; - } - - /// Gets the number of weak references to this value. - pub fn weakCount(self: *const Weak) usize { - const ptr = self.innerPtr() orelse return 1; - if (ptr.strong == 0) { - return ptr.weak; - } else { - return ptr.weak - 1; - } - } - - /// Increments the weak count. - pub fn retain(self: *Weak) Weak { - if (self.innerPtr()) |ptr| { - ptr.weak += 1; - } - return self.*; - } - - /// Attempts to upgrade the weak pointer to an `Rc`, delaying dropping of the inner value if successful. - /// - /// Returns `null` if the inner value has since been dropped. - pub fn upgrade(self: *Weak) ?Rc(T) { - const ptr = self.innerPtr() orelse return null; - - if (ptr.strong == 0) { - ptr.weak -= 1; - if (ptr.weak == 0) { - self.alloc.destroy(ptr); - self.inner = null; - } - return null; - } - - ptr.strong += 1; - return Rc(T){ - .value = &ptr.value, - .alloc = self.alloc, - }; - } - - /// Decrements the weak reference count, deallocating if it reaches zero. - /// The continued use of the pointer after calling `release` is undefined behaviour. - pub fn release(self: Weak) void { - if (self.innerPtr()) |ptr| { - ptr.weak -= 1; - if (ptr.weak == 0) { - self.alloc.destroy(ptr); - } - } - } - - /// Total size (in bytes) of the reference counted value on the heap. - /// This value accounts for the extra memory required to count the references, - /// and is valid for single and multi-threaded refrence counters. - pub fn innerSize() comptime_int { - return Inner.innerSize(); - } - - /// Alignment (in bytes) of the reference counted value on the heap. - /// This value accounts for the extra memory required to count the references, - /// and is valid for single and multi-threaded refrence counters. - pub fn innerAlign() comptime_int { - return Inner.innerAlign(); - } - - inline fn innerPtr(self: *const Weak) ?*Inner { - return @as(?*Inner, @ptrCast(self.inner)); - } - }; - }; -} - -/// A multi-threaded, strong reference to a reference-counted value. -pub fn Arc(comptime T: type) type { - if (!atomic_arc) { - return Rc(T); - } - - return struct { - value: *T, - alloc: std.mem.Allocator, - - const Self = @This(); - const Inner = struct { - strong: usize align(std.atomic.cache_line), - weak: usize align(std.atomic.cache_line), - value: T, - - fn innerSize() comptime_int { - return @sizeOf(@This()); - } - - fn innerAlign() comptime_int { - return @alignOf(@This()); - } - }; - - /// Creates a new reference-counted value. - pub fn init(alloc: std.mem.Allocator, t: T) std.mem.Allocator.Error!Self { - const inner = try alloc.create(Inner); - inner.* = Inner{ .strong = 1, .weak = 1, .value = t }; - return Self{ .value = &inner.value, .alloc = alloc }; - } - - /// Constructs a new `Arc` while giving you a `Aweak` to the allocation, - /// to allow you to construct a `T` which holds a weak pointer to itself. - pub fn initCyclic(alloc: std.mem.Allocator, comptime data_fn: fn (*Weak) T) std.mem.Allocator.Error!Self { - const inner = try alloc.create(Inner); - inner.* = Inner{ .strong = 0, .weak = 1, .value = undefined }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - var weak = Weak{ .inner = inner, .alloc = alloc }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - inner.value = data_fn(&weak); - - std.debug.assert(@atomicRmw(usize, &inner.strong, .Add, 1, .release) == 0); - return Self{ .value = &inner.value, .alloc = alloc }; - } - - /// Gets the number of strong references to this value. - pub fn strongCount(self: *const Self) usize { - return @atomicLoad(usize, &self.innerPtr().strong, .acquire); - } - - /// Gets the number of weak references to this value. - pub fn weakCount(self: *const Self) usize { - return @atomicLoad(usize, &self.innerPtr().weak, .acquire) - 1; - } - - /// Increments the strong count. - pub fn retain(self: *Self) Self { - _ = @atomicRmw(usize, &self.innerPtr().strong, .Add, 1, .acq_rel); - return self.*; - } - - /// Creates a new weak reference to the pointed value. - pub fn downgrade(self: *Self) Weak { - return Weak.init(self); - } - - /// Decrements the reference count, deallocating if the weak count reaches zero. - /// The continued use of the pointer after calling `release` is undefined behaviour. - pub fn release(self: Self) void { - const ptr = self.innerPtr(); - - if (@atomicRmw(usize, &ptr.strong, .Sub, 1, .acq_rel) == 1) { - if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { - self.alloc.destroy(ptr); - } - } - } - - /// Decrements the reference count, deallocating the weak count reaches zero, - /// and executing `f` if the strong count reaches zero. - /// The continued use of the pointer after calling `release` is undefined behaviour. - pub fn releaseWithFn(self: Self, comptime f: fn (T) void) void { - const ptr = self.innerPtr(); - - if (@atomicRmw(usize, &ptr.strong, .Sub, 1, .acq_rel) == 1) { - f(self.value.*); - if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { - self.alloc.destroy(ptr); - } - } - } - - /// Returns the inner value, if the `Arc` has exactly one strong reference. - /// Otherwise, `null` is returned. - /// This will succeed even if there are outstanding weak references. - /// The continued use of the pointer if the method successfully returns `T` is undefined behaviour. - pub fn tryUnwrap(self: Self) ?T { - const ptr = self.innerPtr(); - - if (@cmpxchgStrong(usize, &ptr.strong, 1, 0, .monotonic, .monotonic) == null) { - ptr.strong = 0; - const tmp = self.value.*; - if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { - self.alloc.destroy(ptr); - } - return tmp; - } - - return null; - } - - /// Total size (in bytes) of the reference counted value on the heap. - /// This value accounts for the extra memory required to count the references. - pub fn innerSize() comptime_int { - return Inner.innerSize(); - } - - /// Alignment (in bytes) of the reference counted value on the heap. - /// This value accounts for the extra memory required to count the references. - pub fn innerAlign() comptime_int { - return Inner.innerAlign(); - } - - inline fn innerPtr(self: *const Self) *Inner { - return @alignCast(@fieldParentPtr("value", self.value)); - } - - /// A multi-threaded, weak reference to a reference-counted value. - pub const Weak = struct { - inner: ?*Inner = null, - alloc: std.mem.Allocator, - - /// Creates a new weak reference. - pub fn init(parent: *Arc(T)) Weak { - const ptr = parent.innerPtr(); - _ = @atomicRmw(usize, &ptr.weak, .Add, 1, .acq_rel); - return Weak{ .inner = ptr, .alloc = parent.alloc }; - } - - /// Creates a new weak reference object from a pointer to it's underlying value, - /// without increasing the weak count. - pub fn fromValuePtr(value: *T, alloc: std.mem.Allocator) Weak { - return .{ .inner = @fieldParentPtr("value", value), .alloc = alloc }; - } - - /// Gets the number of strong references to this value. - pub fn strongCount(self: *const Weak) usize { - const ptr = self.innerPtr() orelse return 0; - return @atomicLoad(usize, &ptr.strong, .acquire); - } - - /// Gets the number of weak references to this value. - pub fn weakCount(self: *const Weak) usize { - const ptr = self.innerPtr() orelse return 1; - const weak = @atomicLoad(usize, &ptr.weak, .acquire); - - if (@atomicLoad(usize, &ptr.strong, .acquire) == 0) { - return weak; - } else { - return weak - 1; - } - } - - /// Increments the weak count. - pub fn retain(self: *Weak) Weak { - if (self.innerPtr()) |ptr| { - _ = @atomicRmw(usize, &ptr.weak, .Add, 1, .acq_rel); - } - return self.*; - } - - /// Attempts to upgrade the weak pointer to an `Arc`, delaying dropping of the inner value if successful. - /// - /// Returns `null` if the inner value has since been dropped. - pub fn upgrade(self: *Weak) ?Arc(T) { - const ptr = self.innerPtr() orelse return null; - - while (true) { - const prev = @atomicLoad(usize, &ptr.strong, .acquire); - - if (prev == 0) { - if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { - self.alloc.destroy(ptr); - self.inner = null; - } - return null; - } - - if (@cmpxchgStrong(usize, &ptr.strong, prev, prev + 1, .acquire, .monotonic) == null) { - return Arc(T){ - .value = &ptr.value, - .alloc = self.alloc, - }; - } - - std.atomic.spinLoopHint(); - } - } - - /// Decrements the weak reference count, deallocating if it reaches zero. - /// The continued use of the pointer after calling `release` is undefined behaviour. - pub fn release(self: Weak) void { - if (self.innerPtr()) |ptr| { - if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { - self.alloc.destroy(ptr); - } - } - } - - /// Total size (in bytes) of the reference counted value on the heap. - /// This value accounts for the extra memory required to count the references. - pub fn innerSize() comptime_int { - return Inner.innerSize(); - } - - /// Alignment (in bytes) of the reference counted value on the heap. - /// This value accounts for the extra memory required to count the references. - pub fn innerAlign() comptime_int { - return Inner.innerAlign(); - } - - inline fn innerPtr(self: *const Weak) ?*Inner { - return @as(?*Inner, @ptrCast(self.inner)); - } - }; - }; -} - -/// Creates a new `Rc` inferring the type of `value` -pub fn rc(alloc: std.mem.Allocator, value: anytype) std.mem.Allocator.Error!Rc(@TypeOf(value)) { - return Rc(@TypeOf(value)).init(alloc, value); -} - -/// Creates a new `Arc` inferring the type of `value` -pub fn arc(alloc: std.mem.Allocator, value: anytype) std.mem.Allocator.Error!Arc(@TypeOf(value)) { - return Arc(@TypeOf(value)).init(alloc, value); -} diff --git a/src/root.zig b/src/root.zig new file mode 100644 index 0000000..25ef4da --- /dev/null +++ b/src/root.zig @@ -0,0 +1,923 @@ +const std = @import("std"); +const builtin = @import("builtin"); +const Allocator = std.mem.Allocator; + +/// A single threaded, strong reference to a reference-counted value. +pub fn Rc(comptime T: type) type { + return RcAligned(T, null); +} + +/// A single threaded, strong reference to a reference-counted value. +pub fn RcAligned(comptime T: type, comptime alignment: ?u29) type { + if (alignment) |a| { + if (a == @alignOf(T)) { + return RcAligned(T, null); + } + } + + return struct { + value: if (alignment) |a| *align(a) T else *T, + alloc: Allocator, + + const Self = @This(); + const Unmanaged = RcAlignedUnmanaged(T, alignment); + const Inner = Unmanaged.Inner; + + /// Creates a new reference-counted value. + pub fn init(alloc: Allocator, t: T) Allocator.Error!Self { + return Self{ + .value = (try Unmanaged.init(alloc, t)).value, + .alloc = alloc, + }; + } + + /// Constructs a new `Rc` while giving you a `Weak` to the allocation, + /// to allow you to construct a `T` which holds a weak pointer to itself. + pub fn initCyclic(alloc: Allocator, comptime data_fn: fn (*Weak) T) Allocator.Error!Self { + const inner = try alloc.create(Inner); + inner.* = Inner{ .strong = 0, .weak = 1, .value = undefined }; + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + var weak = Weak{ .inner = inner, .alloc = alloc }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + inner.value = data_fn(&weak); + + std.debug.assert(inner.strong == 0); + inner.strong = 1; + + return Self{ .value = &inner.value, .alloc = alloc }; + } + + /// Gets the number of strong references to this value. + pub fn strongCount(self: Self) usize { + return self.asUnmanaged().strongCount(); + } + + /// Gets the number of weak references to this value. + pub fn weakCount(self: Self) usize { + return self.asUnmanaged().weakCount(); + } + + /// Increments the strong count. + pub fn retain(self: Self) Self { + _ = self.asUnmanaged().retain(); + return self; + } + + /// Creates a new weak reference to the pointed value + pub fn downgrade(self: Self) Weak { + return Weak.init(self); + } + + /// Decrements the reference count, deallocating if the weak count reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn release(self: Self) void { + return self.asUnmanaged().release(self.alloc); + } + + /// Decrements the reference count, deallocating the weak count reaches zero, + /// and executing `f` if the strong count reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn releaseWithFn(self: Self, comptime f: fn (T) void) void { + return self.asUnmanaged().releaseWithFn(self.alloc, f); + } + + /// Returns the inner value, if the `Rc` has exactly one strong reference. + /// Otherwise, `null` is returned. + /// This will succeed even if there are outstanding weak references. + /// The continued use of the pointer if the method successfully returns `T` is undefined behaviour. + pub fn tryUnwrap(self: Self) ?T { + return self.asUnmanaged().tryUnwrap(self.alloc); + } + + /// Total size (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerSize() comptime_int { + return Inner.innerSize(); + } + + /// Alignment (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerAlign() comptime_int { + return Inner.innerAlign(); + } + + inline fn asUnmanaged(self: Self) Unmanaged { + return .{ .value = self.value }; + } + + inline fn innerPtr(self: Self) *Inner { + return @alignCast(@fieldParentPtr("value", self.value)); + } + + /// A single threaded, weak reference to a reference-counted value. + pub const Weak = struct { + inner: ?*Inner = null, + alloc: Allocator, + + const WeakUnmanaged = Unmanaged.Weak; + + /// Creates a new weak reference. + pub fn init(parent: RcAligned(T, alignment)) Weak { + return Weak{ + .inner = WeakUnmanaged.init(parent.asUnmanaged()).inner, + .alloc = parent.alloc, + }; + } + + /// Creates a new weak reference object from a pointer to it's underlying value, + /// without increasing the weak count. + pub fn fromValuePtr(value: if (alignment) |a| *align(a) T else *T) Weak { + return .{ .inner = @fieldParentPtr("value", value) }; + } + + /// Gets the number of strong references to this value. + pub fn strongCount(self: Weak) usize { + return self.asUnmanaged().strongCount(); + } + + /// Gets the number of weak references to this value. + pub fn weakCount(self: Weak) usize { + return self.asUnmanaged().weakCount(); + } + + /// Increments the weak count. + pub fn retain(self: Weak) Weak { + _ = self.asUnmanaged().retain(); + return self; + } + + /// Attempts to upgrade the weak pointer to an `Rc`, delaying dropping of the inner value if successful. + /// + /// Returns `null` if the inner value has since been dropped. + pub fn upgrade(self: *Weak) ?RcAligned(T, alignment) { + const ptr = self.innerPtr() orelse return null; + + if (ptr.strong == 0) { + ptr.weak -= 1; + if (ptr.weak == 0) { + self.alloc.destroy(ptr); + self.inner = null; + } + return null; + } + + ptr.strong += 1; + return .{ + .value = &ptr.value, + .alloc = self.alloc, + }; + } + + /// Decrements the weak reference count, deallocating if it reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn release(self: Weak) void { + return self.asUnmanaged().release(self.alloc); + } + + /// Total size (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references, + /// and is valid for single and multi-threaded refrence counters. + pub fn innerSize() comptime_int { + return Inner.innerSize(); + } + + /// Alignment (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references, + /// and is valid for single and multi-threaded refrence counters. + pub fn innerAlign() comptime_int { + return Inner.innerAlign(); + } + + inline fn asUnmanaged(self: Weak) WeakUnmanaged { + return .{ .inner = self.inner }; + } + + inline fn innerPtr(self: Weak) ?*Inner { + return @as(?*Inner, @ptrCast(self.inner)); + } + }; + }; +} + +/// A multi-threaded, strong reference to a reference-counted value. +pub fn Arc(comptime T: type) type { + return ArcAligned(T, null); +} + +/// A multi-threaded, strong reference to a reference-counted value. +pub fn ArcAligned(comptime T: type, comptime alignment: ?u29) type { + if (builtin.single_threaded) return RcAligned(T, alignment); + if (alignment) |a| { + if (a == @alignOf(T)) { + return ArcAligned(T, null); + } + } + + return struct { + value: if (alignment) |a| *align(a) T else *T, + alloc: Allocator, + + const Self = @This(); + const Unmanaged = ArcAlignedUnmanaged(T, alignment); + const Inner = Unmanaged.Inner; + + /// Creates a new reference-counted value. + pub fn init(alloc: Allocator, t: T) Allocator.Error!Self { + return Self{ + .value = (try Unmanaged.init(alloc, t)).value, + .alloc = alloc, + }; + } + + /// Constructs a new `Arc` while giving you a `Aweak` to the allocation, + /// to allow you to construct a `T` which holds a weak pointer to itself. + pub fn initCyclic(alloc: Allocator, comptime data_fn: fn (*Weak) T) Allocator.Error!Self { + const inner = try alloc.create(Inner); + inner.* = Inner{ .strong = 0, .weak = 1, .value = undefined }; + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + var weak = Weak{ .inner = inner, .alloc = alloc }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + inner.value = data_fn(&weak); + + std.debug.assert(@atomicRmw(usize, &inner.strong, .Add, 1, .release) == 0); + return Self{ .value = &inner.value, .alloc = alloc }; + } + + /// Gets the number of strong references to this value. + pub fn strongCount(self: Self) usize { + return self.asUnmanaged().strongCount(); + } + + /// Gets the number of weak references to this value. + pub fn weakCount(self: Self) usize { + return self.asUnmanaged().weakCount(); + } + + /// Increments the strong count. + pub fn retain(self: Self) Self { + _ = self.asUnmanaged().retain(); + return self; + } + + /// Creates a new weak reference to the pointed value. + pub fn downgrade(self: Self) Weak { + return Weak.init(self); + } + + /// Decrements the reference count, deallocating if the weak count reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn release(self: Self) void { + return self.asUnmanaged().release(self.alloc); + } + + /// Decrements the reference count, deallocating the weak count reaches zero, + /// and executing `f` if the strong count reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn releaseWithFn(self: Self, comptime f: fn (T) void) void { + return self.asUnmanaged().releaseWithFn(self.alloc, f); + } + + /// Returns the inner value, if the `Arc` has exactly one strong reference. + /// Otherwise, `null` is returned. + /// This will succeed even if there are outstanding weak references. + /// The continued use of the pointer if the method successfully returns `T` is undefined behaviour. + pub fn tryUnwrap(self: Self) ?T { + return self.asUnmanaged().tryUnwrap(self.alloc); + } + + /// Total size (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerSize() comptime_int { + return Inner.innerSize(); + } + + /// Alignment (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerAlign() comptime_int { + return Inner.innerAlign(); + } + + inline fn asUnmanaged(self: Self) Unmanaged { + return .{ .value = self.value }; + } + + inline fn innerPtr(self: Self) *Inner { + return @alignCast(@fieldParentPtr("value", self.value)); + } + + /// A multi-threaded, weak reference to a reference-counted value. + pub const Weak = struct { + inner: ?*Inner = null, + alloc: Allocator, + + const UnmanagedWeak = Unmanaged.Weak; + + /// Creates a new weak reference. + pub fn init(parent: ArcAligned(T, alignment)) Weak { + return Weak{ + .inner = UnmanagedWeak.init(parent.asUnmanaged()).inner, + .alloc = parent.alloc, + }; + } + + /// Creates a new weak reference object from a pointer to it's underlying value, + /// without increasing the weak count. + pub fn fromValuePtr(value: if (alignment) |a| *align(a) T else *T, alloc: Allocator) Weak { + return .{ .inner = @fieldParentPtr("value", value), .alloc = alloc }; + } + + /// Gets the number of strong references to this value. + pub fn strongCount(self: Weak) usize { + return self.asUnmanaged().strongCount(); + } + + /// Gets the number of weak references to this value. + pub fn weakCount(self: Weak) usize { + return self.asUnmanaged().weakCount(); + } + + /// Increments the weak count. + pub fn retain(self: Weak) Weak { + _ = self.asUnmanaged().retain(); + return self; + } + + /// Attempts to upgrade the weak pointer to an `Arc`, delaying dropping of the inner value if successful. + /// + /// Returns `null` if the inner value has since been dropped. + pub fn upgrade(self: *Weak) ?ArcAligned(T, alignment) { + const ptr = self.innerPtr() orelse return null; + + while (true) { + const prev = @atomicLoad(usize, &ptr.strong, .acquire); + + if (prev == 0) { + if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { + self.alloc.destroy(ptr); + self.inner = null; + } + return null; + } + + if (@cmpxchgStrong(usize, &ptr.strong, prev, prev + 1, .acquire, .monotonic) == null) { + return .{ + .value = &ptr.value, + .alloc = self.alloc, + }; + } + + std.atomic.spinLoopHint(); + } + } + + /// Decrements the weak reference count, deallocating if it reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn release(self: Weak) void { + return self.asUnmanaged().release(self.alloc); + } + + /// Total size (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerSize() comptime_int { + return Inner.innerSize(); + } + + /// Alignment (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerAlign() comptime_int { + return Inner.innerAlign(); + } + + inline fn asUnmanaged(self: Weak) UnmanagedWeak { + return .{ .inner = self.inner }; + } + + inline fn innerPtr(self: Weak) ?*Inner { + return @as(?*Inner, @ptrCast(self.inner)); + } + }; + }; +} + +/// A single threaded, strong reference to a reference-counted value. +pub fn RcUnmanaged(comptime T: type) type { + return RcAlignedUnmanaged(T, null); +} + +/// A single threaded, strong reference to a reference-counted value. +pub fn RcAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) type { + if (alignment) |a| { + if (a == @alignOf(T)) { + return RcAlignedUnmanaged(T, null); + } + } + + return struct { + value: if (alignment) |a| *align(a) T else *T, + + const Self = @This(); + const Inner = struct { + strong: usize, + weak: usize, + value: T align(alignment orelse @alignOf(T)), + + fn innerSize() comptime_int { + return @sizeOf(@This()); + } + + fn innerAlign() comptime_int { + return @alignOf(@This()); + } + }; + + /// Creates a new reference-counted value. + pub fn init(alloc: Allocator, t: T) Allocator.Error!Self { + const inner = try alloc.create(Inner); + inner.* = Inner{ .strong = 1, .weak = 1, .value = t }; + return Self{ .value = &inner.value }; + } + + /// Constructs a new `Rc` while giving you a `Weak` to the allocation, + /// to allow you to construct a `T` which holds a weak pointer to itself. + pub fn initCyclic(alloc: Allocator, comptime data_fn: fn (*Weak) T) Allocator.Error!Self { + const inner = try alloc.create(Inner); + inner.* = Inner{ .strong = 0, .weak = 1, .value = undefined }; + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + var weak = Weak{ .inner = inner, .alloc = alloc }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + inner.value = data_fn(&weak); + + std.debug.assert(inner.strong == 0); + inner.strong = 1; + + return Self{ .value = &inner.value, .alloc = alloc }; + } + + /// Gets the number of strong references to this value. + pub fn strongCount(self: Self) usize { + return self.innerPtr().strong; + } + + /// Gets the number of weak references to this value. + pub fn weakCount(self: Self) usize { + return self.innerPtr().weak - 1; + } + + /// Increments the strong count. + pub fn retain(self: Self) Self { + self.innerPtr().strong += 1; + return self; + } + + /// Creates a new weak reference to the pointed value + pub fn downgrade(self: Self) Weak { + return Weak.init(self); + } + + /// Decrements the reference count, deallocating if the weak count reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn release(self: Self, allocator: Allocator) void { + const ptr = self.innerPtr(); + + ptr.strong -= 1; + if (ptr.strong == 0) { + ptr.weak -= 1; + if (ptr.weak == 0) { + allocator.destroy(ptr); + } + } + } + + /// Decrements the reference count, deallocating the weak count reaches zero, + /// and executing `f` if the strong count reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn releaseWithFn(self: Self, allocator: Allocator, comptime f: fn (T) void) void { + const ptr = self.innerPtr(); + + ptr.strong -= 1; + if (ptr.strong == 0) { + f(self.value.*); + + ptr.weak -= 1; + if (ptr.weak == 0) { + allocator.destroy(ptr); + } + } + } + + /// Returns the inner value, if the `Rc` has exactly one strong reference. + /// Otherwise, `null` is returned. + /// This will succeed even if there are outstanding weak references. + /// The continued use of the pointer if the method successfully returns `T` is undefined behaviour. + pub fn tryUnwrap(self: Self, allocator: Allocator) ?T { + const ptr = self.innerPtr(); + + if (ptr.strong == 1) { + ptr.strong = 0; + const tmp = self.value.*; + + ptr.weak -= 1; + if (ptr.weak == 0) { + allocator.destroy(ptr); + } + + return tmp; + } + + return null; + } + + /// Total size (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerSize() comptime_int { + return Inner.innerSize(); + } + + /// Alignment (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerAlign() comptime_int { + return Inner.innerAlign(); + } + + inline fn innerPtr(self: Self) *Inner { + return @alignCast(@fieldParentPtr("value", self.value)); + } + + /// A single threaded, weak reference to a reference-counted value. + pub const Weak = struct { + inner: ?*Inner = null, + + /// Creates a new weak reference. + pub fn init(parent: RcAlignedUnmanaged(T, alignment)) Weak { + const ptr = parent.innerPtr(); + ptr.weak += 1; + return Weak{ .inner = ptr }; + } + + /// Creates a new weak reference object from a pointer to it's underlying value, + /// without increasing the weak count. + pub fn fromValuePtr(value: *T) Weak { + return .{ .inner = @fieldParentPtr("value", value) }; + } + + /// Gets the number of strong references to this value. + pub fn strongCount(self: Weak) usize { + return (self.innerPtr() orelse return 0).strong; + } + + /// Gets the number of weak references to this value. + pub fn weakCount(self: Weak) usize { + const ptr = self.innerPtr() orelse return 1; + if (ptr.strong == 0) { + return ptr.weak; + } else { + return ptr.weak - 1; + } + } + + /// Increments the weak count. + pub fn retain(self: Weak) Weak { + if (self.innerPtr()) |ptr| { + ptr.weak += 1; + } + return self; + } + + /// Attempts to upgrade the weak pointer to an `Rc`, delaying dropping of the inner value if successful. + /// + /// Returns `null` if the inner value has since been dropped. + pub fn upgrade(self: *Weak, allocator: Allocator) ?RcAlignedUnmanaged(T, alignment) { + const ptr = self.innerPtr() orelse return null; + + if (ptr.strong == 0) { + ptr.weak -= 1; + if (ptr.weak == 0) { + allocator.destroy(ptr); + self.inner = null; + } + return null; + } + + ptr.strong += 1; + return RcAlignedUnmanaged(T, alignment){ + .value = &ptr.value, + }; + } + + /// Decrements the weak reference count, deallocating if it reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn release(self: Weak, allocator: Allocator) void { + if (self.innerPtr()) |ptr| { + ptr.weak -= 1; + if (ptr.weak == 0) { + allocator.destroy(ptr); + } + } + } + + /// Total size (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references, + /// and is valid for single and multi-threaded refrence counters. + pub fn innerSize() comptime_int { + return Inner.innerSize(); + } + + /// Alignment (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references, + /// and is valid for single and multi-threaded refrence counters. + pub fn innerAlign() comptime_int { + return Inner.innerAlign(); + } + + inline fn innerPtr(self: Weak) ?*Inner { + return @as(?*Inner, @ptrCast(self.inner)); + } + }; + }; +} + +/// A multi-threaded, strong reference to a reference-counted value. +pub fn ArcUnmanaged(comptime T: type) type { + return ArcAlignedUnmanaged(T, null); +} + +/// A multi-threaded, strong reference to a reference-counted value. +pub fn ArcAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) type { + if (builtin.single_threaded) return RcAlignedUnmanaged(T, alignment); + if (alignment) |a| { + if (a == @alignOf(T)) { + return ArcAlignedUnmanaged(T, null); + } + } + + return struct { + value: if (alignment) |a| *align(a) T else *T, + + const Self = @This(); + const Inner = struct { + strong: usize align(std.atomic.cache_line), + weak: usize align(std.atomic.cache_line), + value: T align(alignment orelse @alignOf(T)), + + fn innerSize() comptime_int { + return @sizeOf(@This()); + } + + fn innerAlign() comptime_int { + return @alignOf(@This()); + } + }; + + /// Creates a new reference-counted value. + pub fn init(alloc: Allocator, t: T) Allocator.Error!Self { + const inner = try alloc.create(Inner); + inner.* = Inner{ .strong = 1, .weak = 1, .value = t }; + return Self{ .value = &inner.value }; + } + + /// Constructs a new `Arc` while giving you a `Aweak` to the allocation, + /// to allow you to construct a `T` which holds a weak pointer to itself. + pub fn initCyclic(alloc: Allocator, comptime data_fn: fn (*Weak) T) Allocator.Error!Self { + const inner = try alloc.create(Inner); + inner.* = Inner{ .strong = 0, .weak = 1, .value = undefined }; + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + var weak = Weak{ .inner = inner, .alloc = alloc }; + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + inner.value = data_fn(&weak); + + std.debug.assert(@atomicRmw(usize, &inner.strong, .Add, 1, .release) == 0); + return Self{ .value = &inner.value, .alloc = alloc }; + } + + /// Gets the number of strong references to this value. + pub fn strongCount(self: Self) usize { + return @atomicLoad(usize, &self.innerPtr().strong, .acquire); + } + + /// Gets the number of weak references to this value. + pub fn weakCount(self: Self) usize { + return @atomicLoad(usize, &self.innerPtr().weak, .acquire) - 1; + } + + /// Increments the strong count. + pub fn retain(self: Self) Self { + _ = @atomicRmw(usize, &self.innerPtr().strong, .Add, 1, .acq_rel); + return self; + } + + /// Creates a new weak reference to the pointed value. + pub fn downgrade(self: Self) Weak { + return Weak.init(self); + } + + /// Decrements the reference count, deallocating if the weak count reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn release(self: Self, allocator: Allocator) void { + const ptr = self.innerPtr(); + + if (@atomicRmw(usize, &ptr.strong, .Sub, 1, .acq_rel) == 1) { + if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { + allocator.destroy(ptr); + } + } + } + + /// Decrements the reference count, deallocating the weak count reaches zero, + /// and executing `f` if the strong count reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn releaseWithFn(self: Self, allocator: Allocator, comptime f: fn (T) void) void { + const ptr = self.innerPtr(); + + if (@atomicRmw(usize, &ptr.strong, .Sub, 1, .acq_rel) == 1) { + f(self.value.*); + if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { + allocator.destroy(ptr); + } + } + } + + /// Returns the inner value, if the `Arc` has exactly one strong reference. + /// Otherwise, `null` is returned. + /// This will succeed even if there are outstanding weak references. + /// The continued use of the pointer if the method successfully returns `T` is undefined behaviour. + pub fn tryUnwrap(self: Self, allocator: Allocator) ?T { + const ptr = self.innerPtr(); + + if (@cmpxchgStrong(usize, &ptr.strong, 1, 0, .monotonic, .monotonic) == null) { + ptr.strong = 0; + const tmp = self.value.*; + if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { + allocator.destroy(ptr); + } + return tmp; + } + + return null; + } + + /// Total size (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerSize() comptime_int { + return Inner.innerSize(); + } + + /// Alignment (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerAlign() comptime_int { + return Inner.innerAlign(); + } + + inline fn innerPtr(self: Self) *Inner { + return @alignCast(@fieldParentPtr("value", self.value)); + } + + /// A multi-threaded, weak reference to a reference-counted value. + pub const Weak = struct { + inner: ?*Inner = null, + + /// Creates a new weak reference. + pub fn init(parent: ArcAlignedUnmanaged(T, alignment)) Weak { + const ptr = parent.innerPtr(); + _ = @atomicRmw(usize, &ptr.weak, .Add, 1, .acq_rel); + return Weak{ .inner = ptr }; + } + + /// Creates a new weak reference object from a pointer to it's underlying value, + /// without increasing the weak count. + pub fn fromValuePtr(value: *T) Weak { + return .{ .inner = @fieldParentPtr("value", value) }; + } + + /// Gets the number of strong references to this value. + pub fn strongCount(self: Weak) usize { + const ptr = self.innerPtr() orelse return 0; + return @atomicLoad(usize, &ptr.strong, .acquire); + } + + /// Gets the number of weak references to this value. + pub fn weakCount(self: Weak) usize { + const ptr = self.innerPtr() orelse return 1; + const weak = @atomicLoad(usize, &ptr.weak, .acquire); + + if (@atomicLoad(usize, &ptr.strong, .acquire) == 0) { + return weak; + } else { + return weak - 1; + } + } + + /// Increments the weak count. + pub fn retain(self: Weak) Weak { + if (self.innerPtr()) |ptr| { + _ = @atomicRmw(usize, &ptr.weak, .Add, 1, .acq_rel); + } + return self; + } + + /// Attempts to upgrade the weak pointer to an `Arc`, delaying dropping of the inner value if successful. + /// + /// Returns `null` if the inner value has since been dropped. + pub fn upgrade(self: *Weak, allocator: Allocator) ?ArcAlignedUnmanaged(T, alignment) { + const ptr = self.innerPtr() orelse return null; + + while (true) { + const prev = @atomicLoad(usize, &ptr.strong, .acquire); + + if (prev == 0) { + if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { + allocator.destroy(ptr); + self.inner = null; + } + return null; + } + + if (@cmpxchgStrong(usize, &ptr.strong, prev, prev + 1, .acquire, .monotonic) == null) { + return ArcAlignedUnmanaged(T, alignment){ + .value = &ptr.value, + }; + } + + std.atomic.spinLoopHint(); + } + } + + /// Decrements the weak reference count, deallocating if it reaches zero. + /// The continued use of the pointer after calling `release` is undefined behaviour. + pub fn release(self: Weak, allocator: Allocator) void { + if (self.innerPtr()) |ptr| { + if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) { + allocator.destroy(ptr); + } + } + } + + /// Total size (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerSize() comptime_int { + return Inner.innerSize(); + } + + /// Alignment (in bytes) of the reference counted value on the heap. + /// This value accounts for the extra memory required to count the references. + pub fn innerAlign() comptime_int { + return Inner.innerAlign(); + } + + inline fn innerPtr(self: Weak) ?*Inner { + return @as(?*Inner, @ptrCast(self.inner)); + } + }; + }; +} + +/// Creates a new `Rc` inferring the type of `value` +pub fn rc(alloc: Allocator, value: anytype) Allocator.Error!Rc(@TypeOf(value)) { + return Rc(@TypeOf(value)).init(alloc, value); +} + +/// Creates a new `Arc` inferring the type of `value` +pub fn arc(alloc: Allocator, value: anytype) Allocator.Error!Arc(@TypeOf(value)) { + return Arc(@TypeOf(value)).init(alloc, value); +} + +/// Creates a new `Rc` inferring the type of `value` +pub fn unmanagedRc(alloc: Allocator, value: anytype) Allocator.Error!RcUnmanaged(@TypeOf(value)) { + return RcUnmanaged(@TypeOf(value)).init(alloc, value); +} + +/// Creates a new `Arc` inferring the type of `value` +pub fn unmanagedArc(alloc: Allocator, value: anytype) Allocator.Error!ArcUnmanaged(@TypeOf(value)) { + return ArcUnmanaged(@TypeOf(value)).init(alloc, value); +} diff --git a/src/tests.zig b/src/tests.zig index f07ec58..6834a56 100644 --- a/src/tests.zig +++ b/src/tests.zig @@ -1,5 +1,5 @@ const std = @import("std"); -const rc = @import("main.zig"); +const rc = @import("root.zig"); const expect = std.testing.expect; const alloc = std.testing.allocator;