diff --git a/build.zig b/build.zig index 83d4a6639..8a769ea8c 100644 --- a/build.zig +++ b/build.zig @@ -228,6 +228,19 @@ pub fn build(b: *std.Build) !void { main_step.dependOn(&run.step); } + { + const main_step = b.step("trace-test", "Run trace tests."); + + var opts = getDefaultOptions(); + opts.trackGlobalRc = true; + opts.applyOverrides(); + + const step = try addTraceTest(b, opts); + const run = b.addRunArtifact(step); + run.has_side_effects = no_cache; + main_step.dependOn(&run.step); + } + { const main_step = b.step("test", "Run all tests."); diff --git a/docs/docs.md b/docs/docs.md index 354d9ea82..95613b57e 100644 --- a/docs/docs.md +++ b/docs/docs.md @@ -3470,6 +3470,7 @@ void myNodeFinalizer(CLVM* vm, void* obj) { * [ARC.](#arc) * [Reference counting.](#reference-counting) + * [Object destructor.](#object-destructor) * [Optimizations.](#optimizations) * [Closures.](#closures-1) * [Fibers.](#fibers-1) @@ -3490,7 +3491,17 @@ In Cyber, there are [primitive and object](#basic-types) values. Primitives don' Objects are managed by ARC. Each object has its own reference counter. Upon creating a new object, it receives a reference count of 1. When the object is copied, it's **retained** and the reference count increments by 1. When an object value is removed from it's parent or is no longer reachable in the current stack frame, it is **released** and the reference count decrements by 1. -Once the reference count reaches 0 the object begins its destruction procedure. First, child references are released thereby decrementing their reference counts by 1. If the object is a host object, it will invoke its `finalizer` function. Afterwards, the object is freed from memory. +Once the reference count reaches 0 the object begins its [destruction](#object-destructor) procedure. + +### Object destructor. +An object's destructor invoked from ARC performs the following in order: +1. Release child references thereby decrementing their reference counts by 1. If any child reference counts reach 0, their destructors are invoked. +2. If the object has a finalizer, it's invoked. +3. The object is freed from memory. + +If the destructor is invoked by the GC instead of ARC, cyclable child references are not released in step 1. +Since objects freed by the GC either belongs to a reference cycle or branched from one, the GC will still end up invoking the destructor of all unreachable objects. +This implies that the destructor order is not reliable, but destructors are guaranteed to be invoked for all unreachable objects. ### Optimizations. The compiler can reduce the number of retain/release ops since it can infer value types even though they are dynamically typed to the user. Arguments passed to functions are only retained depending on the analysis from the callsite. diff --git a/src/arc.zig b/src/arc.zig index 2b4eaa96a..7cea2cb32 100644 --- a/src/arc.zig +++ b/src/arc.zig @@ -41,7 +41,7 @@ pub fn release(vm: *cy.VM, val: cy.Value) void { } if (obj.head.rc == 0) { // Free children and the object. - @call(.never_inline, cy.heap.freeObject, .{vm, obj, true, false, true}); + @call(.never_inline, cy.heap.freeObject, .{vm, obj, false}); if (cy.Trace) { if (vm.countFrees) { @@ -97,12 +97,7 @@ pub fn releaseObject(vm: *cy.VM, obj: *cy.HeapObject) void { vm.c.trace.numReleaseAttempts += 1; } if (obj.head.rc == 0) { - @call(.never_inline, cy.heap.freeObject, .{vm, obj, true, false, true}); - if (cy.Trace) { - if (vm.countFrees) { - vm.numFreed += 1; - } - } + @call(.never_inline, cy.heap.freeObject, .{vm, obj, false}); } } @@ -127,9 +122,7 @@ pub inline fn retainObject(self: *cy.VM, obj: *cy.HeapObject) void { obj.head.rc += 1; if (cy.Trace) { checkRetainDanglingPointer(self, obj); - if (cy.TraceRC) { - log.tracevIf(log_mem, "{} +1 retain: {s}", .{obj.head.rc, self.getTypeName(obj.getTypeId())}); - } + log.tracevIf(log_mem, "{} +1 retain: {s}", .{obj.head.rc, self.getTypeName(obj.getTypeId())}); } if (cy.TrackGlobalRC) { self.c.refCounts += 1; @@ -172,9 +165,7 @@ pub inline fn retain(self: *cy.VM, val: cy.Value) void { const obj = val.asHeapObject(); if (cy.Trace) { checkRetainDanglingPointer(self, obj); - if (cy.TraceRC) { - log.tracev("{} +1 retain: {s}", .{obj.head.rc, self.getTypeName(obj.getTypeId())}); - } + log.tracevIf(log_mem, "{} +1 retain: {s}", .{obj.head.rc, self.getTypeName(obj.getTypeId())}); } obj.head.rc += 1; if (cy.TrackGlobalRC) { @@ -194,9 +185,7 @@ pub inline fn retainInc(self: *cy.VM, val: cy.Value, inc: u32) void { const obj = val.asHeapObject(); if (cy.Trace) { checkRetainDanglingPointer(self, obj); - if (cy.TraceRC) { - log.tracev("{} +{} retain: {s}", .{obj.head.rc, inc, self.getTypeName(obj.getTypeId())}); - } + log.tracevIf(log_mem, "{} +{} retain: {s}", .{obj.head.rc, inc, self.getTypeName(obj.getTypeId())}); } obj.head.rc += inc; if (cy.TrackGlobalRC) { @@ -252,17 +241,9 @@ fn performMark(vm: *cy.VM) !void { fn performSweep(vm: *cy.VM) !c.GCResult { log.tracev("Perform sweep.", .{}); // Collect cyc nodes and release their children (child cyc nodes are skipped). - if (cy.Trace) { - vm.countFrees = true; - vm.numFreed = 0; - } - defer { - if (cy.Trace) { - vm.countFrees = false; - } - } - var cycObjs: std.ArrayListUnmanaged(*cy.HeapObject) = .{}; - defer cycObjs.deinit(vm.alloc); + // TODO: Report `num_freed` after flattening recursive release. + const num_freed: u32 = 0; + var num_cyc_freed: u32 = 0; log.tracev("Sweep heap pages.", .{}); for (vm.heapPages.items()) |page| { @@ -270,9 +251,16 @@ fn performSweep(vm: *cy.VM) !c.GCResult { while (i < page.objects.len) { const obj = &page.objects[i]; if (obj.freeSpan.typeId != cy.NullId) { - if (obj.isGcConfirmedCyc()) { - try cycObjs.append(vm.alloc, obj); - cy.heap.freeObject(vm, obj, true, true, false); + if (obj.isNoMarkCyc()) { + log.tracev("gc free: {s}, rc={}", .{vm.getTypeName(obj.getTypeId()), obj.head.rc}); + if (cy.Trace) { + checkDoubleFree(vm, obj); + } + if (cy.TrackGlobalRC) { + vm.c.refCounts -= obj.head.rc; + } + cy.heap.freeObject(vm, obj, true); + num_cyc_freed += 1; } else if (obj.isGcMarked()) { obj.resetGcMarked(); } @@ -288,37 +276,32 @@ fn performSweep(vm: *cy.VM) !c.GCResult { log.tracev("Sweep non-pool cyc nodes.", .{}); var mbNode: ?*cy.heap.DListNode = vm.cyclableHead; while (mbNode) |node| { + // Obtain next before node is freed. + mbNode = node.next; + const obj = node.getHeapObject(); - if (obj.isGcConfirmedCyc()) { - try cycObjs.append(vm.alloc, obj); - cy.heap.freeObject(vm, obj, true, true, false); + if (obj.isNoMarkCyc()) { + log.tracev("gc free: {s}, rc={}", .{vm.getTypeName(obj.getTypeId()), obj.head.rc}); + if (cy.Trace) { + checkDoubleFree(vm, obj); + } + if (cy.TrackGlobalRC) { + vm.c.refCounts -= obj.head.rc; + } + cy.heap.freeObject(vm, obj, true); + num_cyc_freed += 1; } else if (obj.isGcMarked()) { obj.resetGcMarked(); } - mbNode = node.next; - } - - // Free cyc nodes. - for (cycObjs.items) |obj| { - log.tracev("cyc free: {s}, rc={}", .{vm.getTypeName(obj.getTypeId()), obj.head.rc}); - if (cy.Trace) { - checkDoubleFree(vm, obj); - } - if (cy.TrackGlobalRC) { - vm.c.refCounts -= obj.head.rc; - } - // No need to bother with their refcounts. - cy.heap.freeObject(vm, obj, false, false, true); } if (cy.Trace) { - vm.c.trace.numCycFrees += @intCast(cycObjs.items.len); - vm.numFreed += @intCast(cycObjs.items.len); + vm.c.trace.numCycFrees += num_cyc_freed; } const res = c.GCResult{ - .numCycFreed = @intCast(cycObjs.items.len), - .numObjFreed = if (cy.Trace) vm.numFreed else 0, + .numCycFreed = num_cyc_freed, + .numObjFreed = num_freed, }; log.tracev("gc result: num cyc {}, num obj {}", .{res.numCycFreed, res.numObjFreed}); return res; @@ -396,7 +379,7 @@ fn markValue(vm: *cy.VM, v: cy.Value) void { } }, bt.MapIter => { - markValue(vm, cy.Value.initNoCycPtr(obj.mapIter.map)); + markValue(vm, obj.mapIter.map); }, bt.Closure => { const vals = obj.closure.getCapturedValuesPtr()[0..obj.closure.numCaptured]; diff --git a/src/builtins/bindings.zig b/src/builtins/bindings.zig index b262ff100..564d40723 100644 --- a/src/builtins/bindings.zig +++ b/src/builtins/bindings.zig @@ -419,12 +419,12 @@ pub fn listResize(vm: *cy.VM, args: [*]const Value, _: u8) Value { pub fn mapIterator(vm: *cy.VM, args: [*]const Value, _: u8) Value { const obj = args[0].asHeapObject(); vm.retainObject(obj); - return vm.allocMapIterator(&obj.map) catch fatal(); + return vm.allocMapIterator(args[0]) catch fatal(); } pub fn mapIteratorNext(vm: *cy.VM, args: [*]const Value, _: u8) Value { const obj = args[0].asHeapObject(); - const map: *cy.ValueMap = @ptrCast(&obj.mapIter.map.inner); + const map: *cy.ValueMap = @ptrCast(&obj.mapIter.map.castHeapObject(*cy.heap.Map).inner); if (map.next(&obj.mapIter.nextIdx)) |entry| { vm.retain(entry.key); vm.retain(entry.value); diff --git a/src/heap.zig b/src/heap.zig index 01bee09d0..464c19a47 100644 --- a/src/heap.zig +++ b/src/heap.zig @@ -161,7 +161,7 @@ pub const HeapObject = extern union { self.head.typeId = self.head.typeId & ~vmc.GC_MARK_MASK; } - pub inline fn isGcConfirmedCyc(self: *HeapObject) bool { + pub inline fn isNoMarkCyc(self: *HeapObject) bool { return (self.head.typeId & vmc.GC_MARK_CYC_TYPE_MASK) == vmc.CYC_TYPE_MASK; } @@ -354,7 +354,7 @@ pub const Map = extern struct { pub const MapIterator = extern struct { typeId: cy.TypeId, rc: u32, - map: *Map, + map: Value, nextIdx: u32, }; @@ -817,9 +817,7 @@ pub fn allocExternalObject(vm: *cy.VM, size: usize, comptime cyclable: bool) !*H if (cy.Malloc == .zig) { @as(*u64, @ptrCast(slice.ptr + PayloadSize - ZigLenSize)).* = size; } - if (cy.TraceRC) { - cy.arc.log.tracev("0 +1 alloc external object: {*}", .{slice.ptr + PayloadSize}); - } + cy.arc.log.tracevIf(log_mem, "0 +1 alloc external object: {*}", .{slice.ptr + PayloadSize}); if (cy.TrackGlobalRC) { vm.c.refCounts += 1; } @@ -848,9 +846,7 @@ pub fn allocPoolObject(self: *cy.VM) !*HeapObject { self.heapFreeTail = self.heapFreeHead; } } - if (cy.TraceRC) { - cy.arc.log.tracev("0 +1 alloc pool object: {*}", .{ptr}); - } + cy.arc.log.tracevIf(log_mem, "0 +1 alloc pool object: {*}", .{ptr}); if (cy.TrackGlobalRC) { self.c.refCounts += 1; } @@ -1242,7 +1238,7 @@ pub fn allocMap(self: *cy.VM, keyIdxs: []const align(1) u16, vals: []const Value } /// Assumes map is already retained for the iterator. -pub fn allocMapIterator(self: *cy.VM, map: *Map) !Value { +pub fn allocMapIterator(self: *cy.VM, map: Value) !Value { const obj = try allocPoolObject(self); obj.mapIter = .{ .typeId = bt.MapIter | vmc.CYC_TYPE_MASK, @@ -1888,11 +1884,10 @@ pub fn allocFuncFromSym(self: *cy.VM, func: rt.FuncSymbol) !Value { } } +/// `skip_cyc_children` is important for reducing the work for the GC mark/sweep. /// Use comptime options to keep closely related logic together. /// TODO: flatten recursion. -pub fn freeObject(vm: *cy.VM, obj: *HeapObject, - comptime releaseChildren: bool, comptime skipCycChildren: bool, comptime free: bool, -) void { +pub fn freeObject(vm: *cy.VM, obj: *HeapObject, comptime skip_cyc_children: bool) void { if (cy.Trace) { if (obj.isFreed()) { cy.panicFmt("Double free object: {*} Should have been discovered in release op.", .{obj}); @@ -1906,245 +1901,175 @@ pub fn freeObject(vm: *cy.VM, obj: *HeapObject, const typeId = obj.getTypeId(); switch (typeId) { bt.Tuple => { - if (releaseChildren) { - for (obj.tuple.getElemsPtr()[0..obj.tuple.len]) |it| { - if (skipCycChildren and it.isGcConfirmedCyc()) { - continue; - } - cy.arc.release(vm, it); + for (obj.tuple.getElemsPtr()[0..obj.tuple.len]) |it| { + if (skip_cyc_children and it.isCycPointer()) { + continue; } + cy.arc.release(vm, it); } - if (free) { - if (obj.tuple.len <= 3) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, (2 + obj.tuple.len) * @sizeOf(Value), true); - } + if (obj.tuple.len <= 3) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, (2 + obj.tuple.len) * @sizeOf(Value), true); } }, bt.Range => { - if (free) { - freePoolObject(vm, obj); - } + freePoolObject(vm, obj); }, bt.Map => { const map = cy.ptrAlignCast(*MapInner, &obj.map.inner); - if (releaseChildren) { - var iter = map.iterator(); - while (iter.next()) |entry| { - if (skipCycChildren) { - if (!entry.key.isGcConfirmedCyc()) { - cy.arc.release(vm, entry.key); - } - if (!entry.value.isGcConfirmedCyc()) { - cy.arc.release(vm, entry.value); - } - } else { + var iter = map.iterator(); + while (iter.next()) |entry| { + if (skip_cyc_children) { + if (!entry.key.isCycPointer()) { cy.arc.release(vm, entry.key); + } + if (!entry.value.isCycPointer()) { cy.arc.release(vm, entry.value); } + } else { + cy.arc.release(vm, entry.key); + cy.arc.release(vm, entry.value); } } - if (free) { - map.deinit(vm.alloc); - freePoolObject(vm, obj); - } + map.deinit(vm.alloc); + freePoolObject(vm, obj); }, bt.MapIter => { - if (releaseChildren) { - if (skipCycChildren) { - if (!@as(*HeapObject, @ptrCast(@alignCast(obj.mapIter.map))).isGcConfirmedCyc()) { - cy.arc.releaseObject(vm, cy.ptrAlignCast(*HeapObject, obj.mapIter.map)); - } - } else { - cy.arc.releaseObject(vm, cy.ptrAlignCast(*HeapObject, obj.mapIter.map)); - } - } - if (free) { - freePoolObject(vm, obj); + if (!skip_cyc_children or !obj.mapIter.map.isCycPointer()) { + cy.arc.release(vm, obj.mapIter.map); } + freePoolObject(vm, obj); }, bt.Closure => { - if (releaseChildren) { - const src = obj.closure.getCapturedValuesPtr()[0..obj.closure.numCaptured]; - for (src) |capturedVal| { - if (skipCycChildren and capturedVal.isGcConfirmedCyc()) { - continue; - } - cy.arc.release(vm, capturedVal); + const src = obj.closure.getCapturedValuesPtr()[0..obj.closure.numCaptured]; + for (src) |capturedVal| { + if (skip_cyc_children and capturedVal.isCycPointer()) { + continue; } + cy.arc.release(vm, capturedVal); } - if (free) { - if (obj.closure.numCaptured <= 2) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, (3 + obj.closure.numCaptured) * @sizeOf(Value), true); - } + if (obj.closure.numCaptured <= 2) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, (3 + obj.closure.numCaptured) * @sizeOf(Value), true); } }, bt.Lambda => { - if (free) { - freePoolObject(vm, obj); - } + freePoolObject(vm, obj); }, bt.String => { switch (obj.string.getType()) { .astring => { - if (free) { - const len = obj.string.len(); - if (len <= DefaultStringInternMaxByteLen) { - - // Check both the key and value to make sure this object is the intern entry. - // TODO: Use a flag bit instead of a map query. - const key = obj.astring.getSlice(); - if (vm.strInterns.get(key)) |val| { - if (val == obj) { - _ = vm.strInterns.remove(key); - } + const len = obj.string.len(); + if (len <= DefaultStringInternMaxByteLen) { + + // Check both the key and value to make sure this object is the intern entry. + // TODO: Use a flag bit instead of a map query. + const key = obj.astring.getSlice(); + if (vm.strInterns.get(key)) |val| { + if (val == obj) { + _ = vm.strInterns.remove(key); } } - if (len <= MaxPoolObjectAstringByteLen) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, Astring.BufOffset + len, false); - } + } + if (len <= MaxPoolObjectAstringByteLen) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, Astring.BufOffset + len, false); } }, .ustring => { - if (free) { - const len = obj.string.len(); - if (len <= DefaultStringInternMaxByteLen) { - const key = obj.ustring.getSlice(); - if (vm.strInterns.get(key)) |val| { - if (val == obj) { - _ = vm.strInterns.remove(key); - } + const len = obj.string.len(); + if (len <= DefaultStringInternMaxByteLen) { + const key = obj.ustring.getSlice(); + if (vm.strInterns.get(key)) |val| { + if (val == obj) { + _ = vm.strInterns.remove(key); } } - if (len <= MaxPoolObjectUstringByteLen) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, Ustring.BufOffset + len, false); - } + } + if (len <= MaxPoolObjectUstringByteLen) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, Ustring.BufOffset + len, false); } }, .aslice => { - if (releaseChildren) { - if (obj.aslice.parent) |parent| { - cy.arc.releaseObject(vm, parent); - } - } - if (free) { - freePoolObject(vm, obj); + if (obj.aslice.parent) |parent| { + cy.arc.releaseObject(vm, parent); } + freePoolObject(vm, obj); }, .uslice => { - if (releaseChildren) { - if (obj.uslice.parent) |parent| { - cy.arc.releaseObject(vm, parent); - } - } - if (free) { - freePoolObject(vm, obj); + if (obj.uslice.parent) |parent| { + cy.arc.releaseObject(vm, parent); } + freePoolObject(vm, obj); }, } }, bt.Array => { if (obj.array.isSlice()) { - if (releaseChildren) { - if (obj.arraySlice.getParentPtr()) |parent| { - cy.arc.releaseObject(vm, parent); - } - } - if (free) { - freePoolObject(vm, obj); + if (obj.arraySlice.getParentPtr()) |parent| { + cy.arc.releaseObject(vm, parent); } + freePoolObject(vm, obj); } else { - if (free) { - const len = obj.array.len(); - if (len <= MaxPoolObjectArrayByteLen) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, Array.BufOffset + len, false); - } + const len = obj.array.len(); + if (len <= MaxPoolObjectArrayByteLen) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, Array.BufOffset + len, false); } } }, bt.Fiber => { - if (releaseChildren) { - const fiber: *vmc.Fiber = @ptrCast(obj); - // TODO: isCyc. - cy.fiber.releaseFiberStack(vm, fiber) catch |err| { - cy.panicFmt("release fiber: {}", .{err}); - }; - } - if (free) { - cy.fiber.freeFiberPanic(vm, @ptrCast(obj)); - freeExternalObject(vm, obj, @sizeOf(vmc.Fiber), true); - } + const fiber: *vmc.Fiber = @ptrCast(obj); + // TODO: isCyc. + cy.fiber.releaseFiberStack(vm, fiber) catch |err| { + cy.panicFmt("release fiber: {}", .{err}); + }; + cy.fiber.freeFiberPanic(vm, @ptrCast(obj)); + freeExternalObject(vm, obj, @sizeOf(vmc.Fiber), true); }, bt.Box => { - if (releaseChildren) { - if (skipCycChildren) { - if (!obj.box.val.isGcConfirmedCyc()) { - cy.arc.release(vm, obj.box.val); - } - } else { - cy.arc.release(vm, obj.box.val); - } - } - if (free) { - freePoolObject(vm, obj); + if (!skip_cyc_children or !obj.box.val.isCycPointer()) { + cy.arc.release(vm, obj.box.val); } + freePoolObject(vm, obj); }, bt.ExternFunc => { - if (releaseChildren) { - cy.arc.releaseObject(vm, obj.externFunc.tccState.asHeapObject()); - cy.arc.releaseObject(vm, obj.externFunc.func.asHeapObject()); - } - if (free) { - freePoolObject(vm, obj); - } + cy.arc.releaseObject(vm, obj.externFunc.tccState.asHeapObject()); + cy.arc.releaseObject(vm, obj.externFunc.func.asHeapObject()); + freePoolObject(vm, obj); }, bt.HostFunc => { - if (releaseChildren) { - if (obj.hostFunc.hasTccState) { - cy.arc.releaseObject(vm, obj.hostFunc.tccState.asHeapObject()); - } - } - if (free) { - freePoolObject(vm, obj); + if (obj.hostFunc.hasTccState) { + cy.arc.releaseObject(vm, obj.hostFunc.tccState.asHeapObject()); } + freePoolObject(vm, obj); }, bt.TccState => { if (cy.hasFFI) { - if (free) { - tcc.tcc_delete(obj.tccState.state); - if (obj.tccState.hasDynLib) { - obj.tccState.lib.close(); - vm.alloc.destroy(obj.tccState.lib); - } - freePoolObject(vm, obj); + tcc.tcc_delete(obj.tccState.state); + if (obj.tccState.hasDynLib) { + obj.tccState.lib.close(); + vm.alloc.destroy(obj.tccState.lib); } + freePoolObject(vm, obj); } else { unreachable; } }, bt.Pointer => { - if (free) { - freePoolObject(vm, obj); - } + freePoolObject(vm, obj); }, bt.Type => { - if (free) { - freePoolObject(vm, obj); - } + freePoolObject(vm, obj); }, bt.MetaType => { - if (free) { - freePoolObject(vm, obj); - } + freePoolObject(vm, obj); }, else => { if (cy.Trace) { @@ -2161,105 +2086,83 @@ pub fn freeObject(vm: *cy.VM, obj: *HeapObject, const entry = vm.c.types[typeId]; switch (entry.kind) { .option => { - if (releaseChildren) { - const child = obj.object.getValuesConstPtr()[1]; - if (!skipCycChildren or !child.isGcConfirmedCyc()) { - cy.arc.release(vm, child); - } - } - if (free) { - freePoolObject(vm, obj); + const child = obj.object.getValuesConstPtr()[1]; + if (!skip_cyc_children or !child.isCycPointer()) { + cy.arc.release(vm, child); } + freePoolObject(vm, obj); }, .@"struct" => { const numFields = entry.data.@"struct".numFields; - if (releaseChildren) { - for (obj.object.getValuesConstPtr()[0..numFields]) |child| { - if (skipCycChildren and child.isGcConfirmedCyc()) { - continue; - } - cy.arc.release(vm, child); + for (obj.object.getValuesConstPtr()[0..numFields]) |child| { + if (skip_cyc_children and child.isCycPointer()) { + continue; } + cy.arc.release(vm, child); } - if (free) { - if (numFields <= 4) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, (1 + numFields) * @sizeOf(Value), true); - } + if (numFields <= 4) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, (1 + numFields) * @sizeOf(Value), true); } }, .object => { const numFields = entry.data.object.numFields; - if (releaseChildren) { - for (obj.object.getValuesConstPtr()[0..numFields]) |child| { - if (skipCycChildren and child.isGcConfirmedCyc()) { - continue; - } - cy.arc.release(vm, child); + for (obj.object.getValuesConstPtr()[0..numFields]) |child| { + if (skip_cyc_children and child.isCycPointer()) { + continue; } + cy.arc.release(vm, child); } - if (free) { - if (numFields <= 4) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, (1 + numFields) * @sizeOf(Value), true); - } + if (numFields <= 4) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, (1 + numFields) * @sizeOf(Value), true); } }, .choice => { - if (releaseChildren) { - const value = obj.object.getValuesConstPtr()[1]; - if (skipCycChildren and value.isGcConfirmedCyc()) { - // nop. - } else { - cy.arc.release(vm, value); - } - } - if (free) { - freePoolObject(vm, obj); + const value = obj.object.getValuesConstPtr()[1]; + if (!skip_cyc_children or !value.isCycPointer()) { + cy.arc.release(vm, value); } + freePoolObject(vm, obj); }, .custom => { - if (releaseChildren) { - if (entry.info.custom_pre) { - if (entry.data.custom.finalizerFn) |finalizer| { - finalizer(@ptrCast(vm), @ptrFromInt(@intFromPtr(obj) + 8)); - } + if (entry.info.custom_pre) { + if (entry.data.custom.finalizerFn) |finalizer| { + finalizer(@ptrCast(vm), @ptrFromInt(@intFromPtr(obj) + 8)); } - if (entry.data.custom.getChildrenFn) |getChildren| { - const children = getChildren(@ptrCast(vm), @ptrFromInt(@intFromPtr(obj) + 8)); - for (Value.fromSliceC(children)) |child| { - if (skipCycChildren and child.isGcConfirmedCyc()) { - continue; - } - cy.arc.release(vm, child); + } + if (entry.data.custom.getChildrenFn) |getChildren| { + const children = getChildren(@ptrCast(vm), @ptrFromInt(@intFromPtr(obj) + 8)); + for (Value.fromSliceC(children)) |child| { + if (skip_cyc_children and child.isCycPointer()) { + continue; } + cy.arc.release(vm, child); } } - if (free) { - if (!entry.info.custom_pre) { - if (entry.data.custom.finalizerFn) |finalizer| { - finalizer(@ptrCast(vm), @ptrFromInt(@intFromPtr(obj) + 8)); - } + if (!entry.info.custom_pre) { + if (entry.data.custom.finalizerFn) |finalizer| { + finalizer(@ptrCast(vm), @ptrFromInt(@intFromPtr(obj) + 8)); } - if (!obj.isExternalObject()) { - freePoolObject(vm, obj); + } + if (!obj.isExternalObject()) { + freePoolObject(vm, obj); + } else { + if (obj.isCyclable()) { + if (cy.Malloc == .zig) { + const size = (@as([*]u64, @ptrCast(obj)) - 1)[0]; + freeExternalObject(vm, obj, @intCast(size), true); + } else { + freeExternalObject(vm, obj, 1, true); + } } else { - if (obj.isCyclable()) { - if (cy.Malloc == .zig) { - const size = (@as([*]u64, @ptrCast(obj)) - 1)[0]; - freeExternalObject(vm, obj, @intCast(size), true); - } else { - freeExternalObject(vm, obj, 1, true); - } + if (cy.Malloc == .zig) { + const size = (@as([*]u64, @ptrCast(obj)) - 1)[0]; + freeExternalObject(vm, obj, @intCast(size), false); } else { - if (cy.Malloc == .zig) { - const size = (@as([*]u64, @ptrCast(obj)) - 1)[0]; - freeExternalObject(vm, obj, @intCast(size), false); - } else { - freeExternalObject(vm, obj, 1, false); - } + freeExternalObject(vm, obj, 1, false); } } } diff --git a/src/string.zig b/src/string.zig index 9ebc30814..70f618325 100644 --- a/src/string.zig +++ b/src/string.zig @@ -227,7 +227,7 @@ pub const HeapArrayBuilder = struct { // Free pool object. oldObj.array.headerAndLen = self.len; - cy.heap.freeObject(self.vm, oldObj, true, false, true); + cy.heap.freeObject(self.vm, oldObj, false); } } diff --git a/src/vm.zig b/src/vm.zig index f62fd8269..db3aa16d2 100644 --- a/src/vm.zig +++ b/src/vm.zig @@ -2704,7 +2704,7 @@ fn zDumpEvalOp(vm: *VM, pc: [*]const cy.Inst) callconv(.C) void { } pub fn zFreeObject(vm: *cy.VM, obj: *HeapObject) callconv(.C) void { - cy.heap.freeObject(vm, obj, true, false, true); + cy.heap.freeObject(vm, obj, false); } fn zEnd(vm: *cy.VM, pc: [*]const cy.Inst) callconv(.C) void {