diff --git a/build.zig b/build.zig index e220e795f..52d23e329 100644 --- a/build.zig +++ b/build.zig @@ -283,6 +283,7 @@ pub const Options = struct { target: std.zig.CrossTarget, optimize: std.builtin.OptimizeMode, useMimalloc: bool, + gc: bool, }; // deps: struct { @@ -296,6 +297,7 @@ fn getDefaultOptions(target: std.zig.CrossTarget, optimize: std.builtin.Optimize .trace = trace, .target = target, .optimize = optimize, + .gc = true, // Use mimalloc for fast builds. .useMimalloc = optimize == .ReleaseFast and !target.getCpuArch().isWasm(), @@ -328,6 +330,7 @@ fn createBuildOptions(b: *std.build.Builder, opts: Options) !*std.build.Step.Opt build_options.addOption(bool, "trace", opts.trace); build_options.addOption(bool, "trackGlobalRC", opts.trackGlobalRc); build_options.addOption(bool, "is32Bit", is32Bit(opts.target)); + build_options.addOption(bool, "gc", opts.gc); build_options.addOption([]const u8, "full_version", b.fmt("Cyber {s} build-{s}-{s}", .{Version, buildTag, commitTag})); return build_options; } @@ -440,6 +443,11 @@ pub fn buildCVM(alloc: std.mem.Allocator, step: *std.build.CompileStep, opts: Op } else { try cflags.append("-DIS_32BIT=0"); } + if (opts.gc) { + try cflags.append("-DHAS_GC=1"); + } else { + try cflags.append("-DHAS_GC=0"); + } // Disable traps for arithmetic/bitshift overflows. // Note that changing this alone doesn't clear the build cache. diff --git a/docs/hugo/content/docs/toc/modules.md b/docs/hugo/content/docs/toc/modules.md index 470a252f3..326ec79db 100644 --- a/docs/hugo/content/docs/toc/modules.md +++ b/docs/hugo/content/docs/toc/modules.md @@ -115,6 +115,7 @@ print contents | `panic(e symbol) noreturn` | Stop execution in the current fiber and starts unwinding the call stack. See [Unexpected Errors]({{}}). | | `parseCyber(src any) map` | Parses Cyber source string into structured map object. Currently, only metadata about static declarations is made available but this will be extended to include an AST. | | `parseCyon(src any) any` | Parses a CYON string into a value. | +| `performGC() map` | Runs the garbage collector once to detect reference cycles and abandoned objects. Returns the statistics of the run in a map value. | | `pointer(val any) pointer` | Converts a `int` to a `pointer` value, or casts to a `pointer`. This is usually used with FFI. | | `print(s string) none` | Prints a value as a string to stdout. The new line is also printed. | | `prints(s string) none` | Prints a value as a string to stdout. | diff --git a/src/arc.zig b/src/arc.zig index 33f2093fc..dc8f8d396 100644 --- a/src/arc.zig +++ b/src/arc.zig @@ -7,9 +7,14 @@ const builtin = @import("builtin"); const stdx = @import("stdx"); const log = cy.log.scoped(.arc); const cy = @import("cyber.zig"); +const vmc = cy.vmc; const rt = cy.rt; pub fn release(vm: *cy.VM, val: cy.Value) linksection(cy.HotSection) void { + releaseExt(vm, val, false); +} + +pub fn releaseExt(vm: *cy.VM, val: cy.Value, comptime trackFrees: bool) linksection(cy.HotSection) void { if (cy.Trace) { vm.trace.numReleaseAttempts += 1; } @@ -23,7 +28,7 @@ pub fn release(vm: *cy.VM, val: cy.Value) linksection(cy.HotSection) void { if (cy.TrackGlobalRC) { if (cy.Trace) { if (vm.refCounts == 0) { - cy.fmt.printStderr("Double free. {}\n", &.{cy.fmt.v(obj.head.typeId)}); + cy.fmt.printStderr("Double free. {}\n", &.{cy.fmt.v(obj.getTypeId())}); cy.fatal(); } } @@ -33,7 +38,11 @@ pub fn release(vm: *cy.VM, val: cy.Value) linksection(cy.HotSection) void { vm.trace.numReleases += 1; } if (obj.head.rc == 0) { - @call(.never_inline, cy.heap.freeObject, .{vm, obj}); + // Free children and the object. + @call(.never_inline, cy.heap.freeObject, .{vm, obj, true, false, true, trackFrees}); + if (trackFrees) { + numObjectsFreed += 1; + } } } else { log.tracev("release: {}, nop", .{val.getUserTag()}); @@ -41,7 +50,7 @@ pub fn release(vm: *cy.VM, val: cy.Value) linksection(cy.HotSection) void { } pub fn isObjectAlreadyFreed(vm: *cy.VM, obj: *cy.HeapObject) bool { - if (obj.head.typeId == cy.NullId) { + if (obj.isFreed()) { // Can check structId for pool objects since they are still in memory. return true; } @@ -68,6 +77,10 @@ pub fn checkDoubleFree(vm: *cy.VM, obj: *cy.HeapObject) void { } pub fn releaseObject(vm: *cy.VM, obj: *cy.HeapObject) linksection(cy.HotSection) void { + releaseObjectExt(vm, obj, false); +} + +pub fn releaseObjectExt(vm: *cy.VM, obj: *cy.HeapObject, comptime trackFrees: bool) linksection(cy.HotSection) void { if (cy.Trace) { checkDoubleFree(vm, obj); } @@ -81,7 +94,10 @@ pub fn releaseObject(vm: *cy.VM, obj: *cy.HeapObject) linksection(cy.HotSection) vm.trace.numReleaseAttempts += 1; } if (obj.head.rc == 0) { - @call(.never_inline, cy.heap.freeObject, .{vm, obj}); + @call(.never_inline, cy.heap.freeObject, .{vm, obj, true, false, true, trackFrees}); + if (trackFrees) { + numObjectsFreed += 1; + } } } @@ -167,33 +183,6 @@ pub inline fn retainInc(self: *cy.VM, val: cy.Value, inc: u32) linksection(cy.Ho } } -pub fn forceRelease(self: *cy.VM, obj: *cy.HeapObject) void { - if (cy.Trace) { - self.trace.numForceReleases += 1; - } - switch (obj.head.typeId) { - rt.ListT => { - const list = cy.ptrAlignCast(*cy.List(cy.Value), &obj.list.list); - list.deinit(self.alloc); - cy.heap.freePoolObject(self, obj); - if (cy.TrackGlobalRC) { - self.refCounts -= obj.head.rc; - } - }, - rt.MapT => { - const map = cy.ptrAlignCast(*cy.MapInner, &obj.map.inner); - map.deinit(self.alloc); - cy.heap.freePoolObject(self, obj); - if (cy.TrackGlobalRC) { - self.refCounts -= obj.head.rc; - } - }, - else => { - return cy.panic("unsupported struct type"); - }, - } -} - pub fn getGlobalRC(self: *const cy.VM) usize { if (cy.TrackGlobalRC) { return self.refCounts; @@ -202,78 +191,227 @@ pub fn getGlobalRC(self: *const cy.VM) usize { } } -/// Performs an iteration over the heap pages to check whether there are retain cycles. -pub fn checkMemory(self: *cy.VM) !bool { - var nodes: std.AutoHashMapUnmanaged(*cy.HeapObject, RcNode) = .{}; - defer nodes.deinit(self.alloc); - - var cycleRoots: std.ArrayListUnmanaged(*cy.HeapObject) = .{}; - defer cycleRoots.deinit(self.alloc); - - // No concept of root vars yet. Just report any existing retained objects. - // First construct the graph. - for (self.heapPages.items()) |page| { - for (page.objects[1..]) |*obj| { - if (obj.head.typeId != cy.NullId) { - try nodes.put(self.alloc, obj, .{ - .visited = false, - .entered = false, - }); - } +const GCResult = struct { + numCycFreed: u32, + numObjFreed: u32, +}; + +/// Mark-sweep leveraging refcounts and deals only with cyclable objects. +/// 1. Looks at all root nodes from the stack and globals. +/// Traverse the children and sets the mark flag to true. +/// Only cyclable objects that aren't marked are visited. +/// 2. Sweep iterates all cyclable objects. +/// If the mark flag is not set: +/// - the object's children are released exlcuding confirmed child cyc objects. +/// - the object is queued to be freed later. +/// If the mark flag is set, reset the flag for the next gc run. +/// TODO: Allocate using separate pages for cyclable and non-cyclable objects, +/// so only cyclable objects are iterated. +pub fn performGC(vm: *cy.VM) !GCResult { + log.tracev("Run gc.", .{}); + try performMark(vm); + + // Make sure dummy node has mark bit. + cy.vm.dummyCyclableHead.typeId = vmc.GC_MARK_MASK | rt.NoneT; + + return try performSweep(vm); +} + +fn performMark(vm: *cy.VM) !void { + try markMainStackRoots(vm); + + // Mark globals. + for (vm.varSyms.items()) |sym| { + if (sym.value.isCycPointer()) { + markValue(vm, sym.value); } } - const S = struct { - fn visit(alloc: std.mem.Allocator, graph: *std.AutoHashMapUnmanaged(*cy.HeapObject, RcNode), cycleRoots_: *std.ArrayListUnmanaged(*cy.HeapObject), obj: *cy.HeapObject, node: *RcNode) bool { - if (node.visited) { - return false; - } - if (node.entered) { - return true; - } - node.entered = true; - - switch (obj.head.typeId) { - rt.ListT => { - const list = cy.ptrAlignCast(*cy.List(cy.Value), &obj.list.list); - for (list.items()) |it| { - if (it.isPointer()) { - const ptr = it.asHeapObject(); - if (visit(alloc, graph, cycleRoots_, ptr, graph.getPtr(ptr).?)) { - cycleRoots_.append(alloc, obj) catch cy.fatal(); - return true; - } - } - } - }, - else => { - }, +} + +var numObjectsFreed: u32 = 0; + +fn performSweep(vm: *cy.VM) !GCResult { + // Collect cyc nodes and release their children (child cyc nodes are skipped). + numObjectsFreed = 0; + var cycObjs: std.ArrayListUnmanaged(*cy.HeapObject) = .{}; + defer cycObjs.deinit(vm.alloc); + for (vm.heapPages.items()) |page| { + var i: u32 = 1; + while (i < page.objects.len) { + const obj = &page.objects[i]; + if (obj.freeSpan.typeId != cy.NullId) { + if (obj.isGcConfirmedCyc()) { + try cycObjs.append(vm.alloc, obj); + cy.heap.freeObject(vm, obj, true, true, false, true); + } else if (obj.isGcMarked()) { + obj.resetGcMarked(); + } + i += 1; + } else { + // Freespan, skip to end. + i += obj.freeSpan.len; } - node.entered = false; - node.visited = true; - return false; } + } + + // Traverse non-pool cyc nodes. + var mbNode: ?*cy.heap.DListNode = vm.cyclableHead; + while (mbNode) |node| { + const obj = node.getHeapObject(); + if (obj.isGcConfirmedCyc()) { + try cycObjs.append(vm.alloc, obj); + cy.heap.freeObject(vm, obj, true, true, false, true); + } else if (obj.isGcMarked()) { + obj.resetGcMarked(); + } + mbNode = node.next; + } + + // Free cyc nodes. + for (cycObjs.items) |obj| { + log.tracev("cyc free: {s}, rc={}", .{vm.getTypeName(obj.getTypeId()), obj.head.rc}); + if (cy.Trace) { + checkDoubleFree(vm, obj); + } + if (cy.TrackGlobalRC) { + vm.refCounts -= obj.head.rc; + } + // No need to bother with their refcounts. + cy.heap.freeObject(vm, obj, false, false, true, false); + } + + if (cy.Trace) { + vm.trace.numCycFrees += @intCast(cycObjs.items.len); + } + + return GCResult{ + .numCycFreed = @intCast(cycObjs.items.len), + .numObjFreed = @intCast(cycObjs.items.len + numObjectsFreed), }; - var iter = nodes.iterator(); - while (iter.next()) |*entry| { - if (S.visit(self.alloc, &nodes, &cycleRoots, entry.key_ptr.*, entry.value_ptr)) { - if (cy.Trace) { - self.trace.numRetainCycles = 1; - self.trace.numRetainCycleRoots = @intCast(cycleRoots.items.len); +} + +fn markMainStackRoots(vm: *cy.VM) !void { + if (vm.pc[0].opcode() == .end) { + return; + } + + var pcOff = cy.fiber.getInstOffset(vm.ops.ptr, vm.pc); + var fpOff = cy.fiber.getStackOffset(vm.stack.ptr, vm.framePtr); + + while (true) { + const symIdx = cy.debug.indexOfDebugSym(vm, pcOff) orelse return error.NoDebugSym; + const sym = cy.debug.getDebugSymByIndex(vm, symIdx); + const tempIdx = cy.debug.getDebugTempIndex(vm, symIdx); + const endLocalsPc = cy.debug.debugSymToEndLocalsPc(vm, sym); + log.debug("mark frame: {} {} {} {}", .{pcOff, vm.ops[pcOff].opcode(), tempIdx, endLocalsPc}); + + if (tempIdx != cy.NullId) { + const fp = vm.stack.ptr + fpOff; + log.tracev("mark temps", .{}); + var curIdx = tempIdx; + while (curIdx != cy.NullId) { + log.tracev("mark reg: {}", .{vm.unwindTempRegs[curIdx]}); + const v = fp[vm.unwindTempRegs[curIdx]]; + if (v.isCycPointer()) { + markValue(vm, v); + } + curIdx = vm.unwindTempPrevIndexes[curIdx]; } - for (cycleRoots.items) |root| { - // Force release. - forceRelease(self, root); + } + + if (endLocalsPc != cy.NullId) { + if (vm.ops[endLocalsPc].opcode() == .releaseN) { + const numLocals = vm.ops[endLocalsPc+1].val; + for (vm.ops[endLocalsPc+2..endLocalsPc+2+numLocals]) |local| { + const v = vm.stack[fpOff + local.val]; + if (v.isCycPointer()) { + markValue(vm, v); + } + } } - return false; + } + if (fpOff == 0) { + // Done, at main block. + return; + } else { + // Unwind. + pcOff = cy.fiber.getInstOffset(vm.ops.ptr, vm.stack[fpOff + 2].retPcPtr) - vm.stack[fpOff + 1].retInfoCallInstOffset(); + fpOff = cy.fiber.getStackOffset(vm.stack.ptr, vm.stack[fpOff + 3].retFramePtr); } } - return true; } -const RcNode = struct { - visited: bool, - entered: bool, -}; +/// Assumes `v` is a cyclable pointer. +fn markValue(vm: *cy.VM, v: cy.Value) void { + const obj = v.asHeapObject(); + if (!obj.isGcMarked()) { + obj.setGcMarked(); + } else { + // Already marked. + return; + } + // Visit children. + const typeId = obj.getTypeId(); + switch (typeId) { + rt.ListT => { + const items = obj.list.items(); + for (items) |it| { + if (it.isCycPointer()) { + markValue(vm, it); + } + } + }, + rt.MapT => { + const map = obj.map.map(); + var iter = map.iterator(); + while (iter.next()) |entry| { + if (entry.key.isCycPointer()) { + markValue(vm, entry.key); + } + if (entry.value.isCycPointer()) { + markValue(vm, entry.value); + } + } + }, + rt.ListIteratorT => { + markValue(vm, cy.Value.initPtr(obj.listIter.list)); + }, + rt.MapIteratorT => { + markValue(vm, cy.Value.initPtr(obj.mapIter.map)); + }, + rt.ClosureT => { + const vals = obj.closure.getCapturedValuesPtr()[0..obj.closure.numCaptured]; + for (vals) |val| { + // TODO: Can this be assumed to always be a Box value? + if (val.isCycPointer()) { + markValue(vm, val); + } + } + }, + rt.BoxT => { + if (obj.box.val.isCycPointer()) { + markValue(vm, obj.box.val); + } + }, + rt.FiberT => { + // TODO: Visit other fiber stacks. + }, + else => { + if (typeId < rt.NumBuiltinTypes) { + // Skip, non-cyclable object. + } else { + // User type. + const numMembers = vm.types.buf[typeId].numFields; + const members = obj.object.getValuesConstPtr()[0..numMembers]; + for (members) |m| { + if (m.isCycPointer()) { + markValue(vm, m); + } + } + } + }, + } +} pub fn checkGlobalRC(vm: *cy.VM) !void { const rc = getGlobalRC(vm); @@ -286,7 +424,7 @@ pub fn checkGlobalRC(vm: *cy.VM) !void { while (iter.next()) |it| { const trace = it.value_ptr.*; if (trace.freePc == cy.NullId) { - const typeName = vm.getTypeName(it.key_ptr.*.head.typeId); + const typeName = vm.getTypeName(it.key_ptr.*.getTypeId()); const msg = try std.fmt.bufPrint(&buf, "Init alloc: {*}, type: {s}, rc: {} at pc: {}\nval={s}", .{ it.key_ptr.*, typeName, it.key_ptr.*.head.rc, trace.allocPc, vm.valueToTempString(cy.Value.initPtr(it.key_ptr.*)), diff --git a/src/builtins/bindings.zig b/src/builtins/bindings.zig index fcbe8300a..b7009e77a 100644 --- a/src/builtins/bindings.zig +++ b/src/builtins/bindings.zig @@ -2191,7 +2191,7 @@ pub fn fileReadToEnd(vm: *cy.UserVM, recv: Value, _: [*]const Value, _: u8) link pub fn fileOrDirStat(vm: *cy.UserVM, recv: Value, _: [*]const Value, _: u8) linksection(StdSection) Value { const obj = recv.asHeapObject(); - if (obj.head.typeId == rt.FileT) { + if (obj.getTypeId() == rt.FileT) { if (obj.file.closed) { return prepareThrowSymbol(vm, .Closed); } diff --git a/src/builtins/core.zig b/src/builtins/core.zig index 798b58a83..60f99939b 100644 --- a/src/builtins/core.zig +++ b/src/builtins/core.zig @@ -60,6 +60,7 @@ pub fn initModule(self: *cy.VMcompiler, modId: cy.ModuleId) anyerror!void { try b.setFunc("panic", &.{ bt.Any }, bt.None, panic); try b.setFunc("parseCyber", &.{ bt.Any }, bt.Map, parseCyber); try b.setFunc("parseCyon", &.{ bt.Any }, bt.Any, parseCyon); + try b.setFunc("performGC", &.{}, bt.Map, performGC); try b.setFunc("print", &.{bt.Any}, bt.None, print); try b.setFunc("prints", &.{bt.Any}, bt.None, prints); if (cy.hasStdFiles) { @@ -161,9 +162,6 @@ pub fn copy(vm: *cy.UserVM, args: [*]const Value, _: u8) linksection(cy.StdSecti pub fn errorReport(vm: *cy.UserVM, _: [*]const Value, _: u8) linksection(cy.StdSection) Value { const ivm = vm.internal(); - // Unwind the fp to before the function call. - ivm.framePtr -= 4; - cy.debug.buildStackTrace(ivm) catch |err| { log.debug("unexpected {}", .{err}); fatal(); @@ -587,6 +585,20 @@ fn stdMapPut(vm: *cy.UserVM, obj: *cy.HeapObject, key: Value, value: Value) void map.put(vm.allocator(), ivm, key, value) catch cy.fatal(); } +pub fn performGC(vm: *cy.UserVM, _: [*]const Value, _: u8) linksection(cy.StdSection) Value { + const res = cy.arc.performGC(vm.internal()) catch cy.fatal(); + const map = vm.allocEmptyMap() catch cy.fatal(); + const cycKey = vm.allocAstring("numCycFreed") catch cy.fatal(); + const objKey = vm.allocAstring("numObjFreed") catch cy.fatal(); + defer { + vm.release(cycKey); + vm.release(objKey); + } + map.asHeapObject().map.set(vm.internal(), cycKey, Value.initInt(@intCast(res.numCycFreed))) catch cy.fatal(); + map.asHeapObject().map.set(vm.internal(), objKey, Value.initInt(@intCast(res.numObjFreed))) catch cy.fatal(); + return map; +} + pub fn print(vm: *cy.UserVM, args: [*]const Value, _: u8) linksection(cy.StdSection) Value { const str = vm.valueToTempRawString(args[0]); if (cy.isWasmFreestanding) { diff --git a/src/builtins/os_ffi.zig b/src/builtins/os_ffi.zig index 74f31a0b3..6d6feaba6 100644 --- a/src/builtins/os_ffi.zig +++ b/src/builtins/os_ffi.zig @@ -477,7 +477,7 @@ pub fn bindLib(vm: *cy.UserVM, args: [*]const Value, config: BindLibConfig) !Val \\#define int16_t short \\#define uint16_t unsigned short \\#define uint32_t unsigned int - \\#define PointerMask 0xFFFC000000000000 + \\#define PointerMask 0xFFFE000000000000 \\typedef struct UserVM *UserVM; \\extern void icyRelease(UserVM*, uint64_t); \\extern void* icyGetPtr(uint64_t); diff --git a/src/cyber.zig b/src/cyber.zig index 92563413c..8980ffcde 100644 --- a/src/cyber.zig +++ b/src/cyber.zig @@ -155,6 +155,7 @@ pub const isWasmFreestanding = isWasm and builtin.os.tag == .freestanding; pub const is32Bit = build_options.is32Bit; pub const hasJit = !isWasm; pub const hasStdFiles = !isWasm; +pub const hasGC = build_options.gc; const build_options = @import("build_options"); pub const Trace = build_options.trace; diff --git a/src/fiber.zig b/src/fiber.zig index ad75f0893..92ecbe55e 100644 --- a/src/fiber.zig +++ b/src/fiber.zig @@ -41,10 +41,10 @@ pub fn allocFiber(vm: *cy.VM, pc: usize, args: []const cy.Value, initialStackSiz // Assumes call start local is at 1. std.mem.copy(Value, stack[5..5+args.len], args); - const obj: *vmc.Fiber = @ptrCast(try cy.heap.allocExternalObject(vm, @sizeOf(vmc.Fiber))); + const obj: *vmc.Fiber = @ptrCast(try cy.heap.allocExternalObject(vm, @sizeOf(vmc.Fiber), true)); const parentDstLocal = cy.NullU8; obj.* = .{ - .typeId = rt.FiberT, + .typeId = rt.FiberT | vmc.CYC_TYPE_MASK, .rc = 1, .stackPtr = @ptrCast(stack.ptr), .stackLen = @intCast(stack.len), @@ -63,7 +63,7 @@ pub fn allocFiber(vm: *cy.VM, pc: usize, args: []const cy.Value, initialStackSiz .prevFiber = undefined, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } /// Since this is called from a coresume expression, the fiber should already be retained. @@ -192,7 +192,7 @@ pub fn releaseFiberStack(vm: *cy.VM, fiber: *cy.Fiber) !void { /// TODO: See if releaseFiberStack can resuse the same code. pub fn unwindReleaseStack(vm: *cy.VM, stack: []const Value, startFramePtr: [*]const Value, startPc: [*]const cy.Inst) !void { var pcOffset = getInstOffset(vm.ops.ptr, startPc); - var fpOffset = getStackOffset(vm.stack.ptr, startFramePtr); + var fpOffset = getStackOffset(stack.ptr, startFramePtr); while (true) { const symIdx = cy.debug.indexOfDebugSym(vm, pcOffset) orelse return error.NoDebugSym; @@ -202,7 +202,9 @@ pub fn unwindReleaseStack(vm: *cy.VM, stack: []const Value, startFramePtr: [*]co log.debug("release frame: {} {} {} {}", .{pcOffset, vm.ops[pcOffset].opcode(), tempIdx, endLocalsPc}); // Release temporaries in the current frame. - cy.arc.runTempReleaseOps(vm, vm.stack.ptr + fpOffset, tempIdx); + if (tempIdx != cy.NullId) { + cy.arc.runTempReleaseOps(vm, vm.stack.ptr + fpOffset, tempIdx); + } if (endLocalsPc != cy.NullId) { cy.arc.runBlockEndReleaseOps(vm, stack, fpOffset, endLocalsPc); @@ -257,6 +259,13 @@ pub fn unwindThrowUntilFramePtr(vm: *cy.VM, startFp: [*]const Value, pc: [*]cons const tempIdx = cy.debug.getDebugTempIndex(vm, symIdx); log.debug("release temps: {} {}, {}", .{pcOffset, vm.ops[pcOffset].opcode(), tempIdx}); cy.arc.runTempReleaseOps(vm, vm.stack.ptr + fpOffset, tempIdx); + + // Frame inside the try block. + const sym = cy.debug.getDebugSymByIndex(vm, symIdx); + try vm.throwTrace.append(vm.alloc, .{ + .pcOffset = sym.pc, + .fpOffset = fpOffset, + }); } pub fn throw(vm: *cy.VM, startFp: [*]Value, pc: [*]const cy.Inst, err: Value) !?PcSp { diff --git a/src/heap.zig b/src/heap.zig index b04d95224..7d48fe266 100644 --- a/src/heap.zig +++ b/src/heap.zig @@ -122,8 +122,36 @@ pub const HeapObject = extern union { pointer: Pointer, metatype: MetaType, + pub inline fn getTypeId(self: HeapObject) u32 { + return self.head.typeId & vmc.TYPE_MASK; + } + + pub inline fn isFreed(self: HeapObject) bool { + return self.head.typeId == cy.NullId; + } + + pub inline fn isGcMarked(self: HeapObject) bool { + return (self.head.typeId & vmc.GC_MARK_MASK) == vmc.GC_MARK_MASK; + } + + pub inline fn setGcMarked(self: *HeapObject) void { + self.head.typeId = self.head.typeId | vmc.GC_MARK_MASK; + } + + pub inline fn resetGcMarked(self: *HeapObject) void { + self.head.typeId = self.head.typeId & ~vmc.GC_MARK_MASK; + } + + pub inline fn isGcConfirmedCyc(self: *HeapObject) bool { + return (self.head.typeId & vmc.GC_MARK_CYC_TYPE_MASK) == vmc.CYC_TYPE_MASK; + } + + pub inline fn getDListNode(self: *HeapObject) *DListNode { + return @ptrCast(@as([*]DListNode, @ptrCast(self)) - 1); + } + pub fn getUserTag(self: *const HeapObject) cy.ValueUserTag { - switch (self.head.typeId) { + switch (self.getTypeId()) { rt.ListT => return .list, rt.MapT => return .map, rt.AstringT => return .string, @@ -159,7 +187,7 @@ pub const MetaType = extern struct { }; pub const List = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, list: extern struct { ptr: [*]Value, @@ -193,7 +221,7 @@ pub const List = extern struct { }; pub const ListIterator = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, list: *List, nextIdx: u32, @@ -201,7 +229,7 @@ pub const ListIterator = extern struct { pub const MapInner = cy.ValueMap; pub const Map = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, inner: extern struct { metadata: ?[*]u64, @@ -233,14 +261,14 @@ pub const Map = extern struct { }; pub const MapIterator = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, map: *Map, nextIdx: u32, }; pub const Closure = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, funcPc: u32, numParams: u8, @@ -259,7 +287,7 @@ pub const Closure = extern struct { }; const Lambda = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, funcPc: u32, numParams: u8, @@ -272,7 +300,7 @@ const Lambda = extern struct { pub const MaxPoolObjectAstringByteLen = 28; pub const Astring = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, len: u32, bufStart: u8, @@ -292,7 +320,7 @@ pub const Astring = extern struct { pub const MaxPoolObjectUstringByteLen = 16; const Ustring = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, len: u32, charLen: u32, @@ -328,7 +356,7 @@ pub const StringSlice = struct { pub const MaxPoolObjectRawStringByteLen = 28; pub const RawString = extern struct { - structId: if (cy.isWasm) rt.TypeId else rt.TypeId align(8), + typeId: if (cy.isWasm) rt.TypeId else rt.TypeId align(8), rc: u32, len: u32, bufStart: u8, @@ -345,7 +373,7 @@ pub const RawString = extern struct { }; const RawStringSlice = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, buf: [*]const u8, len: u32, @@ -358,7 +386,7 @@ const RawStringSlice = extern struct { }; pub const Object = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, firstValue: Value, @@ -380,13 +408,13 @@ pub const Object = extern struct { }; const Box = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, val: Value, }; const NativeFunc1 = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, func: *const fn (*cy.UserVM, [*]const Value, u8) Value, numParams: u32, @@ -396,14 +424,14 @@ const NativeFunc1 = extern struct { }; const TccState = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, state: *tcc.TCCState, lib: *std.DynLib, }; const File = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, readBuf: [*]u8, /// Can be up to 8 bytes on windows, otherwise 4 bytes. @@ -433,7 +461,7 @@ const File = extern struct { }; pub const DirIterator = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, dir: *Dir, inner: extern union { @@ -445,7 +473,7 @@ pub const DirIterator = extern struct { }; pub const Dir = extern struct { - structId: rt.TypeId align(8), + typeId: rt.TypeId align(8), rc: u32, /// Padding to make Dir.fd match the offset of File.fd. padding: usize = 0, @@ -477,7 +505,7 @@ pub const Dir = extern struct { }; pub const Pointer = extern struct { - structId: rt.TypeId, + typeId: rt.TypeId, rc: u32, ptr: ?*anyopaque, }; @@ -535,14 +563,37 @@ const HeapObjectList = struct { tail: *HeapObject, }; -pub fn allocExternalObject(vm: *cy.VM, size: usize) !*HeapObject { +pub const DListNode = extern struct { + prev: ?*DListNode, + next: ?*DListNode, + + pub fn getHeapObject(self: *DListNode) *HeapObject { + return @ptrCast(@alignCast(@as([*]DListNode, @ptrCast(self)) + 1)); + } +}; + +pub fn allocExternalObject(vm: *cy.VM, size: usize, comptime cyclable: bool) !*HeapObject { // Align with HeapObject so it can be casted. - const slice = try vm.alloc.alignedAlloc(u8, @alignOf(HeapObject), size); + const addToCyclableList = comptime (cy.hasGC and cyclable); + const slice = try vm.alloc.alignedAlloc(u8, @alignOf(HeapObject), if (addToCyclableList) size + @sizeOf(DListNode) else size); defer { if (cy.Trace) { - cy.heap.traceAlloc(vm, @ptrCast(slice.ptr)); + if (addToCyclableList) { + cy.heap.traceAlloc(vm, @ptrCast(slice.ptr + @sizeOf(DListNode))); + } else { + cy.heap.traceAlloc(vm, @ptrCast(slice.ptr)); + } } } + if (addToCyclableList) { + const node: *DListNode = @ptrCast(slice.ptr); + vm.cyclableHead.prev = node; + node.* = .{ + .prev = null, + .next = vm.cyclableHead, + }; + vm.cyclableHead = node; + } if (cy.TrackGlobalRC) { vm.refCounts += 1; } @@ -550,7 +601,11 @@ pub fn allocExternalObject(vm: *cy.VM, size: usize) !*HeapObject { vm.trace.numRetains += 1; vm.trace.numRetainAttempts += 1; } - return @ptrCast(slice.ptr); + if (addToCyclableList) { + return @ptrCast(slice.ptr + @sizeOf(DListNode)); + } else { + return @ptrCast(slice.ptr); + } } /// Assumes new object will have an RC = 1. @@ -598,30 +653,53 @@ pub fn allocPoolObject(self: *cy.VM) linksection(cy.HotSection) !*HeapObject { } } -fn freeExternalObject(vm: *cy.VM, obj: *HeapObject, len: usize) void { +fn freeExternalObject(vm: *cy.VM, obj: *HeapObject, len: usize, comptime cyclable: bool) void { + // Unlink. + if (cy.hasGC) { + if (cyclable) { + const node = obj.getDListNode(); + if (node.prev) |prev| { + prev.next = node.next; + if (node.next) |next| { + next.prev = prev; + } + } else { + // It's the head. + vm.cyclableHead = node.next.?; + node.next.?.prev = null; + } + } + } if (cy.Trace) { if (vm.objectTraceMap.getPtr(obj)) |trace| { trace.freePc = vm.debugPc; - trace.freeTypeId = obj.head.typeId; + trace.freeTypeId = obj.getTypeId(); } else { - log.debug("Missing object trace {*} {}", .{obj, obj.head.typeId}); + log.debug("Missing object trace {*} {}", .{obj, obj.getTypeId()}); } } - const slice = @as([*]align(@alignOf(HeapObject)) u8, @ptrCast(obj))[0..len]; - vm.alloc.free(slice); + if (cy.hasGC and cyclable) { + const slice = (@as([*]align(@alignOf(HeapObject)) u8, @ptrCast(obj)) - @sizeOf(DListNode))[0..len + @sizeOf(DListNode)]; + vm.alloc.free(slice); + } else { + const slice = @as([*]align(@alignOf(HeapObject)) u8, @ptrCast(obj))[0..len]; + vm.alloc.free(slice); + } } +/// typeId should be cleared in trace mode since tracking may still hold a reference to the object. +/// The gc also needs it for now to traverse objects in pages. pub fn freePoolObject(vm: *cy.VM, obj: *HeapObject) linksection(cy.HotSection) void { if (cy.Trace) { if (vm.objectTraceMap.getPtr(obj)) |trace| { trace.freePc = vm.debugPc; - trace.freeTypeId = obj.head.typeId; + trace.freeTypeId = obj.getTypeId(); } else { - log.debug("Missing object trace {*} {}", .{obj, obj.head.typeId}); + log.debug("Missing object trace {*} {}", .{obj, obj.getTypeId()}); } } const prev = &(@as([*]HeapObject, @ptrCast(obj)) - 1)[0]; - if (prev.head.typeId == NullId) { + if (prev.freeSpan.typeId == NullId) { // Left is a free span. Extend length. prev.freeSpan.start.freeSpan.len += 1; obj.freeSpan.start = prev.freeSpan.start; @@ -651,6 +729,7 @@ pub fn freePoolObject(vm: *cy.VM, obj: *HeapObject) linksection(cy.HotSection) v .start = obj, .next = vm.heapFreeHead, }; + // Update MRU free list head. vm.heapFreeHead = obj; } } @@ -670,7 +749,7 @@ pub fn allocMetaType(self: *cy.VM, symType: u8, symId: u32) !Value { pub fn allocEmptyList(self: *cy.VM) linksection(cy.Section) !Value { const obj = try allocPoolObject(self); obj.list = .{ - .structId = rt.ListT, + .typeId = rt.ListT | vmc.CYC_TYPE_MASK, .rc = 1, .list = .{ .ptr = undefined, @@ -678,13 +757,13 @@ pub fn allocEmptyList(self: *cy.VM) linksection(cy.Section) !Value { .cap = 0, }, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocOwnedList(self: *cy.VM, elems: []Value) !Value { const obj = try allocPoolObject(self); obj.list = .{ - .structId = rt.ListT, + .typeId = rt.ListT | vmc.CYC_TYPE_MASK, .rc = 1, .list = .{ .ptr = elems.ptr, @@ -692,13 +771,13 @@ pub fn allocOwnedList(self: *cy.VM, elems: []Value) !Value { .cap = elems.len, }, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocListFill(self: *cy.VM, val: Value, n: u32) linksection(cy.StdSection) !Value { const obj = try allocPoolObject(self); obj.list = .{ - .structId = rt.ListT, + .typeId = rt.ListT | vmc.CYC_TYPE_MASK, .rc = 1, .list = .{ .ptr = undefined, @@ -718,13 +797,13 @@ pub fn allocListFill(self: *cy.VM, val: Value, n: u32) linksection(cy.StdSection list.buf[i] = cy.value.shallowCopy(self, val); } } - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocList(self: *cy.VM, elems: []const Value) linksection(cy.HotSection) !Value { const obj = try allocPoolObject(self); obj.list = .{ - .structId = rt.ListT, + .typeId = rt.ListT | vmc.CYC_TYPE_MASK, .rc = 1, .list = .{ .ptr = undefined, @@ -737,25 +816,25 @@ pub fn allocList(self: *cy.VM, elems: []const Value) linksection(cy.HotSection) try list.ensureTotalCapacityPrecise(self.alloc, elems.len); list.len = elems.len; std.mem.copy(Value, list.items(), elems); - return Value.initPtr(obj); + return Value.initCycPtr(obj); } /// Assumes list is already retained for the iterator. pub fn allocListIterator(self: *cy.VM, list: *List) linksection(cy.HotSection) !Value { const obj = try allocPoolObject(self); obj.listIter = .{ - .structId = rt.ListIteratorT, + .typeId = rt.ListIteratorT | vmc.CYC_TYPE_MASK, .rc = 1, .list = list, .nextIdx = 0, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocEmptyMap(self: *cy.VM) !Value { const obj = try allocPoolObject(self); obj.map = .{ - .structId = rt.MapT, + .typeId = rt.MapT | vmc.CYC_TYPE_MASK, .rc = 1, .inner = .{ .metadata = null, @@ -765,13 +844,13 @@ pub fn allocEmptyMap(self: *cy.VM) !Value { .available = 0, }, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocMap(self: *cy.VM, keyIdxs: []const align(1) u16, vals: []const Value) !Value { const obj = try allocPoolObject(self); obj.map = .{ - .structId = rt.MapT, + .typeId = rt.MapT | vmc.CYC_TYPE_MASK, .rc = 1, .inner = .{ .metadata = null, @@ -796,19 +875,19 @@ pub fn allocMap(self: *cy.VM, keyIdxs: []const align(1) u16, vals: []const Value } } - return Value.initPtr(obj); + return Value.initCycPtr(obj); } /// Assumes map is already retained for the iterator. pub fn allocMapIterator(self: *cy.VM, map: *Map) linksection(cy.HotSection) !Value { const obj = try allocPoolObject(self); obj.mapIter = .{ - .structId = rt.MapIteratorT, + .typeId = rt.MapIteratorT | vmc.CYC_TYPE_MASK, .rc = 1, .map = map, .nextIdx = 0, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } /// Captured values are retained during alloc. @@ -823,7 +902,7 @@ pub fn allocClosure( obj = try allocExternalObject(self, (2 + capturedVals.len) * @sizeOf(Value)); } obj.closure = .{ - .structId = rt.ClosureT, + .typeId = rt.ClosureT | vmc.CYC_TYPE_MASK, .rc = 1, .funcPc = @intCast(funcPc), .numParams = numParams, @@ -843,13 +922,13 @@ pub fn allocClosure( cy.arc.retain(self, fp[local.val]); dst[i] = fp[local.val]; } - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocLambda(self: *cy.VM, funcPc: usize, numParams: u8, stackSize: u8, funcSigId: u16) !Value { const obj = try allocPoolObject(self); obj.lambda = .{ - .structId = rt.LambdaT, + .typeId = rt.LambdaT, .rc = 1, .funcPc = @intCast(funcPc), .numParams = numParams, @@ -888,10 +967,10 @@ pub fn allocRawStringConcat(self: *cy.VM, str: []const u8, str2: []const u8) lin if (len <= MaxPoolObjectRawStringByteLen) { obj = try allocPoolObject(self); } else { - obj = try allocExternalObject(self, len + RawString.BufOffset); + obj = try allocExternalObject(self, len + RawString.BufOffset, false); } obj.rawstring = .{ - .structId = rt.RawstringT, + .typeId = rt.RawstringT, .rc = 1, .len = len, .bufStart = undefined, @@ -1082,10 +1161,10 @@ pub fn allocUnsetAstringObject(self: *cy.VM, len: usize) linksection(cy.Section) if (len <= MaxPoolObjectAstringByteLen) { obj = try allocPoolObject(self); } else { - obj = try allocExternalObject(self, len + Astring.BufOffset); + obj = try allocExternalObject(self, len + Astring.BufOffset, false); } obj.astring = .{ - .structId = rt.AstringT, + .typeId = rt.AstringT, .rc = 1, .len = @intCast(len), .bufStart = undefined, @@ -1146,10 +1225,10 @@ pub fn allocUnsetUstringObject(self: *cy.VM, len: usize, charLen: u32) linksecti if (len <= MaxPoolObjectUstringByteLen) { obj = try allocPoolObject(self); } else { - obj = try allocExternalObject(self, len + Ustring.BufOffset); + obj = try allocExternalObject(self, len + Ustring.BufOffset, false); } obj.ustring = .{ - .structId = rt.UstringT, + .typeId = rt.UstringT, .rc = 1, .len = @intCast(len), .charLen = charLen, @@ -1195,10 +1274,10 @@ pub fn allocUnsetRawStringObject(self: *cy.VM, len: usize) linksection(cy.Sectio if (len <= MaxPoolObjectRawStringByteLen) { obj = try allocPoolObject(self); } else { - obj = try allocExternalObject(self, len + RawString.BufOffset); + obj = try allocExternalObject(self, len + RawString.BufOffset, false); } obj.rawstring = .{ - .structId = rt.RawstringT, + .typeId = rt.RawstringT, .rc = 1, .len = @intCast(len), .bufStart = undefined, @@ -1216,7 +1295,7 @@ pub fn allocRawString(self: *cy.VM, str: []const u8) linksection(cy.Section) !Va pub fn allocRawStringSlice(self: *cy.VM, slice: []const u8, parent: *HeapObject) !Value { const obj = try allocPoolObject(self); obj.rawstringSlice = .{ - .structId = rt.RawstringSliceT, + .typeId = rt.RawstringSliceT, .rc = 1, .buf = slice.ptr, .len = @intCast(slice.len), @@ -1228,17 +1307,17 @@ pub fn allocRawStringSlice(self: *cy.VM, slice: []const u8, parent: *HeapObject) pub fn allocBox(vm: *cy.VM, val: Value) !Value { const obj = try allocPoolObject(vm); obj.box = .{ - .structId = rt.BoxT, + .typeId = rt.BoxT | vmc.CYC_TYPE_MASK, .rc = 1, .val = val, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocNativeFunc1(self: *cy.VM, func: *const fn (*cy.UserVM, [*]const Value, u8) Value, numParams: u32, funcSigId: cy.sema.FuncSigId, tccState: ?Value) !Value { const obj = try allocPoolObject(self); obj.nativeFunc1 = .{ - .structId = rt.NativeFuncT, + .typeId = rt.NativeFuncT, .rc = 1, .func = func, .numParams = numParams, @@ -1256,7 +1335,7 @@ pub fn allocNativeFunc1(self: *cy.VM, func: *const fn (*cy.UserVM, [*]const Valu pub fn allocTccState(self: *cy.VM, state: *tcc.TCCState, lib: *std.DynLib) linksection(cy.StdSection) !Value { const obj = try allocPoolObject(self); obj.tccState = .{ - .structId = rt.TccStateT, + .typeId = rt.TccStateT, .rc = 1, .state = state, .lib = lib, @@ -1267,7 +1346,7 @@ pub fn allocTccState(self: *cy.VM, state: *tcc.TCCState, lib: *std.DynLib) links pub fn allocPointer(self: *cy.VM, ptr: ?*anyopaque) !Value { const obj = try allocPoolObject(self); obj.pointer = .{ - .structId = rt.PointerT, + .typeId = rt.PointerT, .rc = 1, .ptr = ptr, }; @@ -1277,7 +1356,7 @@ pub fn allocPointer(self: *cy.VM, ptr: ?*anyopaque) !Value { pub fn allocFile(self: *cy.VM, fd: std.os.fd_t) linksection(cy.StdSection) !Value { const obj = try allocPoolObject(self); obj.file = .{ - .structId = rt.FileT, + .typeId = rt.FileT, .rc = 1, .fd = fd, .curPos = 0, @@ -1294,7 +1373,7 @@ pub fn allocFile(self: *cy.VM, fd: std.os.fd_t) linksection(cy.StdSection) !Valu pub fn allocDir(self: *cy.VM, fd: std.os.fd_t, iterable: bool) linksection(cy.StdSection) !Value { const obj = try allocPoolObject(self); obj.dir = .{ - .structId = rt.DirT, + .typeId = rt.DirT, .rc = 1, .fd = fd, .iterable = iterable, @@ -1304,9 +1383,9 @@ pub fn allocDir(self: *cy.VM, fd: std.os.fd_t, iterable: bool) linksection(cy.St } pub fn allocDirIterator(self: *cy.VM, dir: *Dir, recursive: bool) linksection(cy.StdSection) !Value { - const obj: *cy.DirIterator = @ptrCast(try allocExternalObject(self, @sizeOf(cy.DirIterator))); + const obj: *cy.DirIterator = @ptrCast(try allocExternalObject(self, @sizeOf(cy.DirIterator), false)); obj.* = .{ - .structId = rt.DirIteratorT, + .typeId = rt.DirIteratorT, .rc = 1, .dir = dir, .inner = undefined, @@ -1324,34 +1403,34 @@ pub fn allocDirIterator(self: *cy.VM, dir: *Dir, recursive: bool) linksection(cy /// Allocates an object outside of the object pool. pub fn allocObject(self: *cy.VM, sid: rt.TypeId, fields: []const Value) !Value { - // First slot holds the structId and rc. - const obj: *Object = @ptrCast(try allocExternalObject(self, (1 + fields.len) * @sizeOf(Value))); + // First slot holds the typeId and rc. + const obj: *Object = @ptrCast(try allocExternalObject(self, (1 + fields.len) * @sizeOf(Value), true)); obj.* = .{ - .structId = sid, + .typeId = sid | vmc.CYC_TYPE_MASK, .rc = 1, .firstValue = undefined, }; const dst = obj.getValuesPtr(); std.mem.copy(Value, dst[0..fields.len], fields); - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocEmptyObject(self: *cy.VM, sid: rt.TypeId, numFields: u32) !Value { - // First slot holds the structId and rc. - const obj: *Object = @ptrCast(try allocExternalObject(self, (1 + numFields) * @sizeOf(Value))); + // First slot holds the typeId and rc. + const obj: *Object = @ptrCast(try allocExternalObject(self, (1 + numFields) * @sizeOf(Value), true)); obj.* = .{ - .structId = sid, + .typeId = sid | vmc.CYC_TYPE_MASK, .rc = 1, .firstValue = undefined, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocObjectSmall(self: *cy.VM, sid: rt.TypeId, fields: []const Value) !Value { const obj = try allocPoolObject(self); obj.object = .{ - .structId = sid, + .typeId = sid | vmc.CYC_TYPE_MASK, .rc = 1, .firstValue = undefined, }; @@ -1359,17 +1438,17 @@ pub fn allocObjectSmall(self: *cy.VM, sid: rt.TypeId, fields: []const Value) !Va const dst = obj.object.getValuesPtr(); std.mem.copy(Value, dst[0..fields.len], fields); - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocEmptyObjectSmall(self: *cy.VM, sid: rt.TypeId) !Value { const obj = try allocPoolObject(self); obj.object = .{ - .structId = sid, + .typeId = sid | vmc.CYC_TYPE_MASK, .rc = 1, .firstValue = undefined, }; - return Value.initPtr(obj); + return Value.initCycPtr(obj); } pub fn allocFuncFromSym(self: *cy.VM, symId: cy.vm.SymbolId) !Value { @@ -1395,196 +1474,302 @@ pub fn allocFuncFromSym(self: *cy.VM, symId: cy.vm.SymbolId) !Value { } } -pub fn freeObject(vm: *cy.VM, obj: *HeapObject) linksection(cy.HotSection) void { +/// Use comptime options to keep closely related logic together. +/// TODO: flatten recursion. +pub fn freeObject(vm: *cy.VM, obj: *HeapObject, + comptime freeChildren: bool, comptime skipCycChildren: bool, comptime free: bool, comptime trackFrees: bool, +) linksection(cy.HotSection) void { if (cy.Trace) { - if (obj.head.typeId == cy.NullId) { + if (obj.isFreed()) { cy.panicFmt("Double free object: {*} Should have been discovered in release op.", .{obj}); } else { - if (cy.verbose) { - log.debug("free type={}({s})", .{ - obj.head.typeId, vm.getTypeName(obj.head.typeId), - }); - } + log.tracev("free type={}({s})", .{ + obj.getTypeId(), vm.getTypeName(obj.getTypeId()), + }); } } - switch (obj.head.typeId) { + switch (obj.getTypeId()) { rt.ListT => { const list = cy.ptrAlignCast(*cy.List(Value), &obj.list.list); - for (list.items()) |it| { - cy.arc.release(vm, it); + if (freeChildren) { + for (list.items()) |it| { + if (skipCycChildren and it.isGcConfirmedCyc()) { + continue; + } + cy.arc.releaseExt(vm, it, trackFrees); + } + } + if (free) { + list.deinit(vm.alloc); + freePoolObject(vm, obj); } - list.deinit(vm.alloc); - freePoolObject(vm, obj); }, rt.ListIteratorT => { - cy.arc.releaseObject(vm, cy.ptrAlignCast(*HeapObject, obj.listIter.list)); - freePoolObject(vm, obj); + if (freeChildren) { + if (skipCycChildren) { + if (!@as(*HeapObject, @ptrCast(@alignCast(obj.listIter.list))).isGcConfirmedCyc()) { + cy.arc.releaseObjectExt(vm, cy.ptrAlignCast(*HeapObject, obj.listIter.list), trackFrees); + } + } else { + cy.arc.releaseObjectExt(vm, cy.ptrAlignCast(*HeapObject, obj.listIter.list), trackFrees); + } + } + if (free) { + freePoolObject(vm, obj); + } }, rt.MapT => { const map = cy.ptrAlignCast(*MapInner, &obj.map.inner); - var iter = map.iterator(); - while (iter.next()) |entry| { - cy.arc.release(vm, entry.key); - cy.arc.release(vm, entry.value); + if (freeChildren) { + var iter = map.iterator(); + while (iter.next()) |entry| { + if (skipCycChildren) { + if (!entry.key.isGcConfirmedCyc()) { + cy.arc.releaseExt(vm, entry.key, trackFrees); + } + if (!entry.value.isGcConfirmedCyc()) { + cy.arc.releaseExt(vm, entry.value, trackFrees); + } + } else { + cy.arc.releaseExt(vm, entry.key, trackFrees); + cy.arc.releaseExt(vm, entry.value, trackFrees); + } + } + } + if (free) { + map.deinit(vm.alloc); + freePoolObject(vm, obj); } - map.deinit(vm.alloc); - freePoolObject(vm, obj); }, rt.MapIteratorT => { - cy.arc.releaseObject(vm, cy.ptrAlignCast(*HeapObject, obj.mapIter.map)); - freePoolObject(vm, obj); + if (freeChildren) { + if (skipCycChildren) { + if (!@as(*HeapObject, @ptrCast(@alignCast(obj.mapIter.map))).isGcConfirmedCyc()) { + cy.arc.releaseObjectExt(vm, cy.ptrAlignCast(*HeapObject, obj.mapIter.map), trackFrees); + } + } else { + cy.arc.releaseObjectExt(vm, cy.ptrAlignCast(*HeapObject, obj.mapIter.map), trackFrees); + } + } + if (free) { + freePoolObject(vm, obj); + } }, rt.ClosureT => { - const src = obj.closure.getCapturedValuesPtr()[0..obj.closure.numCaptured]; - for (src) |capturedVal| { - cy.arc.release(vm, capturedVal); + if (freeChildren) { + const src = obj.closure.getCapturedValuesPtr()[0..obj.closure.numCaptured]; + for (src) |capturedVal| { + if (skipCycChildren and capturedVal.isGcConfirmedCyc()) { + continue; + } + cy.arc.releaseExt(vm, capturedVal, trackFrees); + } } - if (obj.closure.numCaptured <= 3) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, (2 + obj.closure.numCaptured) * @sizeOf(Value)); + if (free) { + if (obj.closure.numCaptured <= 3) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, (2 + obj.closure.numCaptured) * @sizeOf(Value), true); + } } }, rt.LambdaT => { - freePoolObject(vm, obj); + if (free) { + freePoolObject(vm, obj); + } }, rt.AstringT => { - if (obj.astring.len <= DefaultStringInternMaxByteLen) { - // Check both the key and value to make sure this object is the intern entry. - const key = obj.astring.getConstSlice(); - if (vm.strInterns.get(key)) |val| { - if (val == obj) { - _ = vm.strInterns.remove(key); + if (free) { + if (obj.astring.len <= DefaultStringInternMaxByteLen) { + // Check both the key and value to make sure this object is the intern entry. + const key = obj.astring.getConstSlice(); + if (vm.strInterns.get(key)) |val| { + if (val == obj) { + _ = vm.strInterns.remove(key); + } } } - } - if (obj.astring.len <= MaxPoolObjectAstringByteLen) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, Astring.BufOffset + obj.astring.len); + if (obj.astring.len <= MaxPoolObjectAstringByteLen) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, Astring.BufOffset + obj.astring.len, false); + } } }, rt.UstringT => { - if (obj.ustring.len <= DefaultStringInternMaxByteLen) { - const key = obj.ustring.getConstSlice(); - if (vm.strInterns.get(key)) |val| { - if (val == obj) { - _ = vm.strInterns.remove(key); + if (free) { + if (obj.ustring.len <= DefaultStringInternMaxByteLen) { + const key = obj.ustring.getConstSlice(); + if (vm.strInterns.get(key)) |val| { + if (val == obj) { + _ = vm.strInterns.remove(key); + } } } - } - if (obj.ustring.len <= MaxPoolObjectUstringByteLen) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, Ustring.BufOffset + obj.ustring.len); + if (obj.ustring.len <= MaxPoolObjectUstringByteLen) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, Ustring.BufOffset + obj.ustring.len, false); + } } }, rt.StringSliceT => { - if (cy.heap.StringSlice.getParentPtr(obj.stringSlice)) |parent| { - cy.arc.releaseObject(vm, parent); + if (freeChildren) { + if (cy.heap.StringSlice.getParentPtr(obj.stringSlice)) |parent| { + cy.arc.releaseObjectExt(vm, parent, trackFrees); + } + } + if (free) { + freePoolObject(vm, obj); } - freePoolObject(vm, obj); }, rt.RawstringT => { - if (obj.rawstring.len <= MaxPoolObjectRawStringByteLen) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, RawString.BufOffset + obj.rawstring.len); + if (free) { + if (obj.rawstring.len <= MaxPoolObjectRawStringByteLen) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, RawString.BufOffset + obj.rawstring.len, false); + } } }, rt.RawstringSliceT => { - const parent: *cy.HeapObject = @ptrCast(obj.rawstringSlice.parent); - cy.arc.releaseObject(vm, parent); - freePoolObject(vm, obj); + if (freeChildren) { + const parent: *cy.HeapObject = @ptrCast(obj.rawstringSlice.parent); + cy.arc.releaseObjectExt(vm, parent, trackFrees); + } + if (free) { + freePoolObject(vm, obj); + } }, rt.FiberT => { - const fiber: *vmc.Fiber = @ptrCast(obj); - cy.fiber.releaseFiberStack(vm, fiber) catch |err| { - cy.panicFmt("release fiber: {}", .{err}); - }; - freeExternalObject(vm, obj, @sizeOf(vmc.Fiber)); + if (freeChildren) { + const fiber: *vmc.Fiber = @ptrCast(obj); + // TODO: isCyc + trackFrees. + cy.fiber.releaseFiberStack(vm, fiber) catch |err| { + cy.panicFmt("release fiber: {}", .{err}); + }; + } + if (free) { + freeExternalObject(vm, obj, @sizeOf(vmc.Fiber), true); + } }, rt.BoxT => { - cy.arc.release(vm, obj.box.val); - freePoolObject(vm, obj); + if (freeChildren) { + if (skipCycChildren) { + if (!obj.box.val.isGcConfirmedCyc()) { + cy.arc.releaseExt(vm, obj.box.val, trackFrees); + } + } else { + cy.arc.releaseExt(vm, obj.box.val, trackFrees); + } + } + if (free) { + freePoolObject(vm, obj); + } }, rt.NativeFuncT => { - if (obj.nativeFunc1.hasTccState) { - cy.arc.releaseObject(vm, obj.nativeFunc1.tccState.asHeapObject()); + if (freeChildren) { + if (obj.nativeFunc1.hasTccState) { + cy.arc.releaseObjectExt(vm, obj.nativeFunc1.tccState.asHeapObject(), trackFrees); + } + } + if (free) { + freePoolObject(vm, obj); } - freePoolObject(vm, obj); }, rt.TccStateT => { if (cy.hasJit) { - tcc.tcc_delete(obj.tccState.state); - obj.tccState.lib.close(); - vm.alloc.destroy(obj.tccState.lib); - freePoolObject(vm, obj); + if (free) { + tcc.tcc_delete(obj.tccState.state); + obj.tccState.lib.close(); + vm.alloc.destroy(obj.tccState.lib); + freePoolObject(vm, obj); + } } else { unreachable; } }, rt.PointerT => { - freePoolObject(vm, obj); + if (free) { + freePoolObject(vm, obj); + } }, rt.FileT => { - if (cy.hasStdFiles) { - if (obj.file.hasReadBuf) { - vm.alloc.free(obj.file.readBuf[0..obj.file.readBufCap]); + if (free) { + if (cy.hasStdFiles) { + if (obj.file.hasReadBuf) { + vm.alloc.free(obj.file.readBuf[0..obj.file.readBufCap]); + } + obj.file.close(); } - obj.file.close(); + freePoolObject(vm, obj); } - freePoolObject(vm, obj); }, rt.DirT => { - if (cy.hasStdFiles) { - obj.dir.close(); + if (free) { + if (cy.hasStdFiles) { + obj.dir.close(); + } + freePoolObject(vm, obj); } - freePoolObject(vm, obj); }, rt.DirIteratorT => { if (cy.hasStdFiles) { var dir: *DirIterator = @ptrCast(obj); - if (dir.recursive) { - const walker = cy.ptrAlignCast(*std.fs.IterableDir.Walker, &dir.inner.walker); - walker.deinit(); + if (free) { + if (dir.recursive) { + const walker = cy.ptrAlignCast(*std.fs.IterableDir.Walker, &dir.inner.walker); + walker.deinit(); + } + } + if (freeChildren) { + cy.arc.releaseObjectExt(vm, @ptrCast(dir.dir), trackFrees); } - cy.arc.releaseObject(vm, @ptrCast(dir.dir)); } - freeExternalObject(vm, obj, @sizeOf(DirIterator)); + if (free) { + freeExternalObject(vm, obj, @sizeOf(DirIterator), false); + } }, rt.MetaTypeT => { - freePoolObject(vm, obj); + if (free) { + freePoolObject(vm, obj); + } }, else => { // Struct deinit. if (cy.Trace) { - if (cy.verbose) { - log.debug("free {s}", .{vm.getTypeName(obj.head.typeId)}); + log.debug("free {s}", .{vm.getTypeName(obj.getTypeId())}); } // Check range. - if (obj.head.typeId >= vm.types.len) { - log.debug("unsupported struct type {}", .{obj.head.typeId}); + if (obj.getTypeId() >= vm.types.len) { + log.debug("unsupported struct type {}", .{obj.getTypeId()}); cy.fatal(); } } - const numFields = vm.types.buf[obj.head.typeId].numFields; - for (obj.object.getValuesConstPtr()[0..numFields]) |child| { - cy.arc.release(vm, child); + const numFields = vm.types.buf[obj.getTypeId()].numFields; + if (freeChildren) { + for (obj.object.getValuesConstPtr()[0..numFields]) |child| { + if (skipCycChildren and child.isGcConfirmedCyc()) { + continue; + } + cy.arc.releaseExt(vm, child, trackFrees); + } } - if (numFields <= 4) { - freePoolObject(vm, obj); - } else { - freeExternalObject(vm, obj, (1 + numFields) * @sizeOf(Value)); + if (free) { + if (numFields <= 4) { + freePoolObject(vm, obj); + } else { + freeExternalObject(vm, obj, (1 + numFields) * @sizeOf(Value), true); + } } }, } } pub fn traceAlloc(vm: *cy.VM, ptr: *HeapObject) void { - // log.debug("alloc {*} {} {}", .{ptr, ptr.head.typeId, vm.debugPc}); + // log.debug("alloc {*} {} {}", .{ptr, ptr.getTypeId(), vm.debugPc}); vm.objectTraceMap.put(vm.alloc, ptr, .{ .allocPc = vm.debugPc, .freePc = cy.NullId, @@ -1616,10 +1801,10 @@ test "Free object invalidation." { try t.eq(obj2.head.typeId, cy.NullId); // Free external object invalidates object pointer. - obj = try allocExternalObject(&vm, 40); + obj = try allocExternalObject(&vm, 40, false); obj.head.typeId = 100; vm.debugPc = 123; - freeExternalObject(&vm, obj, 40); + freeExternalObject(&vm, obj, 40, false); try t.eq(cy.arc.isObjectAlreadyFreed(&vm, obj), true); } } @@ -1630,12 +1815,47 @@ test "heap internals." { try t.eq(@alignOf(List), 4); try t.eq(@alignOf(ListIterator), 4); try t.eq(@alignOf(DirIterator), 4); + try t.eq(@sizeOf(List), 20); + try t.eq(@sizeOf(ListIterator), 16); + try t.eq(@sizeOf(Map), 32); + try t.eq(@sizeOf(MapIterator), 16); + try t.eq(@sizeOf(RawStringSlice), 24); + try t.eq(@sizeOf(Pointer), 12); } else { try t.eq(@sizeOf(MapInner), 32); try t.eq(@alignOf(List), 8); try t.eq(@alignOf(ListIterator), 8); try t.eq(@alignOf(DirIterator), 8); + + if (builtin.os.tag != .windows) { + try t.eq(@sizeOf(List), 32); + try t.eq(@sizeOf(ListIterator), 24); + try t.eq(@sizeOf(Map), 40); + try t.eq(@sizeOf(MapIterator), 24); + try t.eq(@sizeOf(RawStringSlice), 32); + try t.eq(@sizeOf(Pointer), 16); + } } + if (builtin.os.tag != .windows) { + try t.eq(@sizeOf(Closure), 32); + try t.eq(@sizeOf(Lambda), 24); + try t.eq(@sizeOf(Astring), 16); + try t.eq(@sizeOf(Ustring), 28); + try t.eq(@sizeOf(vmc.StringSlice), 40); + try t.eq(@sizeOf(RawString), 16); + try t.eq(@sizeOf(Object), 16); + try t.eq(@sizeOf(Box), 16); + try t.eq(@sizeOf(NativeFunc1), 40); + try t.eq(@sizeOf(MetaType), 16); + if (cy.hasJit) { + try t.eq(@sizeOf(TccState), 24); + } + if (cy.hasStdFiles) { + try t.eq(@sizeOf(File), 40); + try t.eq(@sizeOf(Dir), 24); + } + } + try t.eq(@sizeOf(HeapObject), 40); try t.eq(@alignOf(HeapObject), 8); try t.eq(@sizeOf(HeapPage), 40 * 102); @@ -1646,39 +1866,39 @@ test "heap internals." { try t.eq(@alignOf(Dir), 8); var dir: Dir = undefined; - try t.eq(@intFromPtr(&dir.structId), @intFromPtr(&dir)); + try t.eq(@intFromPtr(&dir.typeId), @intFromPtr(&dir)); try t.eq(@intFromPtr(&dir.rc), @intFromPtr(&dir) + 4); var dirIter: DirIterator = undefined; - try t.eq(@intFromPtr(&dirIter.structId), @intFromPtr(&dirIter)); + try t.eq(@intFromPtr(&dirIter.typeId), @intFromPtr(&dirIter)); try t.eq(@intFromPtr(&dirIter.rc), @intFromPtr(&dirIter) + 4); const rstr = RawString{ - .structId = rt.RawstringT, + .typeId = rt.RawstringT, .rc = 1, .len = 1, .bufStart = undefined, }; - try t.eq(@intFromPtr(&rstr.structId), @intFromPtr(&rstr)); + try t.eq(@intFromPtr(&rstr.typeId), @intFromPtr(&rstr)); try t.eq(@intFromPtr(&rstr.rc), @intFromPtr(&rstr) + 4); try t.eq(@intFromPtr(&rstr.len), @intFromPtr(&rstr) + 8); try t.eq(RawString.BufOffset, 12); try t.eq(@intFromPtr(&rstr.bufStart), @intFromPtr(&rstr) + RawString.BufOffset); const astr = Astring{ - .structId = rt.AstringT, + .typeId = rt.AstringT, .rc = 1, .len = 1, .bufStart = undefined, }; - try t.eq(@intFromPtr(&astr.structId), @intFromPtr(&astr)); + try t.eq(@intFromPtr(&astr.typeId), @intFromPtr(&astr)); try t.eq(@intFromPtr(&astr.rc), @intFromPtr(&astr) + 4); try t.eq(@intFromPtr(&astr.len), @intFromPtr(&astr) + 8); try t.eq(Astring.BufOffset, 12); try t.eq(@intFromPtr(&astr.bufStart), @intFromPtr(&astr) + Astring.BufOffset); const ustr = Ustring{ - .structId = rt.UstringT, + .typeId = rt.UstringT, .rc = 1, .len = 1, .charLen = 1, @@ -1686,7 +1906,7 @@ test "heap internals." { .mruCharIdx = 0, .bufStart = undefined, }; - try t.eq(@intFromPtr(&ustr.structId), @intFromPtr(&ustr)); + try t.eq(@intFromPtr(&ustr.typeId), @intFromPtr(&ustr)); try t.eq(@intFromPtr(&ustr.rc), @intFromPtr(&ustr) + 4); try t.eq(@intFromPtr(&ustr.len), @intFromPtr(&ustr) + 8); try t.eq(@intFromPtr(&ustr.charLen), @intFromPtr(&ustr) + 12); @@ -1695,6 +1915,6 @@ test "heap internals." { try t.eq(Ustring.BufOffset, 24); try t.eq(@intFromPtr(&ustr.bufStart), @intFromPtr(&ustr) + Ustring.BufOffset); - try t.eq(@offsetOf(List, "structId"), 0); + try t.eq(@offsetOf(List, "typeId"), 0); try t.eq(@offsetOf(List, "rc"), 4); } \ No newline at end of file diff --git a/src/runtime.zig b/src/runtime.zig index 6d783f3c9..69f56e20d 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -42,6 +42,7 @@ pub const MetaTypeT: TypeId = vmc.TYPE_METATYPE; pub const AnyT: TypeId = 29; pub const StringUnionT: TypeId = 30; pub const RawstringUnionT: TypeId = 31; +pub const NumBuiltinTypes: TypeId = 32; pub const TypeKey = cy.hash.KeyU64; diff --git a/src/string.zig b/src/string.zig index 3ba21e59d..0fc2792ea 100644 --- a/src/string.zig +++ b/src/string.zig @@ -22,7 +22,7 @@ pub const HeapStringBuilder = struct { pub fn init(vm: *cy.VM) !HeapStringBuilder { const obj = try vm.allocPoolObject(); obj.astring = .{ - .structId = rt.AstringT, + .typeId = rt.AstringT, .rc = 1, .len = cy.MaxPoolObjectStringByteLen, .bufStart = undefined, @@ -90,7 +90,7 @@ pub const HeapStringBuilder = struct { const objSlice = try alloc.alignedAlloc(u8, @alignOf(cy.HeapObject), 16 + newCap); const obj: *cy.HeapObject = @ptrCast(objSlice.ptr); obj.astring = .{ - .structId = if (self.isAstring) rt.AstringT else rt.UstringT, + .typeId = if (self.isAstring) rt.AstringT else rt.UstringT, .rc = 1, .len = 0, .bufStart = undefined, @@ -105,7 +105,7 @@ pub const HeapStringBuilder = struct { const objSlice = try alloc.alignedAlloc(u8, @alignOf(cy.HeapObject), 16 + newCap); const obj: *cy.HeapObject = @ptrCast(objSlice.ptr); obj.astring = .{ - .structId = if (self.isAstring) rt.AstringT else rt.UstringT, + .typeId = if (self.isAstring) rt.AstringT else rt.UstringT, .rc = 1, .len = 0, .bufStart = undefined, @@ -140,7 +140,7 @@ pub const HeapRawStringBuilder = struct { pub fn init(vm: *cy.VM) !HeapRawStringBuilder { const obj = try cy.heap.allocPoolObject(vm); obj.rawstring = .{ - .structId = rt.RawstringT, + .typeId = rt.RawstringT, .rc = 1, .len = cy.MaxPoolObjectRawStringByteLen, .bufStart = undefined, @@ -161,7 +161,7 @@ pub const HeapRawStringBuilder = struct { if (self.hasObject) { const obj = self.getHeapObject(); obj.rawstring.len = self.len; - cy.heap.freeObject(self.vm, obj); + cy.heap.freeObject(self.vm, obj, true, false, true, false); self.hasObject = false; } } @@ -201,7 +201,7 @@ pub const HeapRawStringBuilder = struct { const objSlice = try alloc.alignedAlloc(u8, @alignOf(cy.HeapObject), cy.RawString.BufOffset + newCap); const obj: *cy.HeapObject = @ptrCast(objSlice.ptr); obj.rawstring = .{ - .structId = rt.RawstringT, + .typeId = rt.RawstringT, .rc = 1, .len = 0, .bufStart = undefined, @@ -216,7 +216,7 @@ pub const HeapRawStringBuilder = struct { const objSlice = try alloc.alignedAlloc(u8, @alignOf(cy.HeapObject), cy.RawString.BufOffset + newCap); const obj: *cy.HeapObject = @ptrCast(objSlice.ptr); obj.rawstring = .{ - .structId = rt.RawstringT, + .typeId = rt.RawstringT, .rc = 1, .len = 0, .bufStart = undefined, @@ -226,7 +226,7 @@ pub const HeapRawStringBuilder = struct { // Free pool object. oldObj.rawstring.len = self.len; - cy.heap.freeObject(self.vm, oldObj); + cy.heap.freeObject(self.vm, oldObj, true, false, true, false); } } diff --git a/src/value.zig b/src/value.zig index b9ca00bb7..2bb65b86a 100644 --- a/src/value.zig +++ b/src/value.zig @@ -12,7 +12,6 @@ const vmc = @import("vm_c.zig"); const SignMask: u64 = 1 << 63; const TaggedValueMask: u64 = 0x7ffc000000000000; -const PointerMask: u64 = TaggedValueMask | SignMask; const IntegerMask: u64 = 1 << 48; const TaggedIntegerMask: u64 = TaggedValueMask | IntegerMask; @@ -150,17 +149,17 @@ pub const Value = packed union { pub fn otherToF64(self: *const Value) linksection(cy.HotSection) !f64 { if (self.isPointer()) { const obj = self.asHeapObject(); - if (obj.head.typeId == rt.AstringT) { + if (obj.getTypeId() == rt.AstringT) { const str = obj.astring.getConstSlice(); return std.fmt.parseFloat(f64, str) catch 0; - } else if (obj.head.typeId == rt.UstringT) { + } else if (obj.getTypeId() == rt.UstringT) { const str = obj.ustring.getConstSlice(); return std.fmt.parseFloat(f64, str) catch 0; - } else if (obj.head.typeId == rt.StringSliceT) { + } else if (obj.getTypeId() == rt.StringSliceT) { const str = cy.heap.StringSlice.getConstSlice(obj.stringSlice); return std.fmt.parseFloat(f64, str) catch 0; } else { - log.debug("unsupported conv to number: {}", .{obj.head.typeId}); + log.debug("unsupported conv to number: {}", .{obj.getTypeId()}); return error.Unsupported; } } else { @@ -191,14 +190,14 @@ pub const Value = packed union { if (!self.isPointer()) { return false; } - const typeId = self.asHeapObject().head.typeId; + const typeId = self.asHeapObject().getTypeId(); return typeId == rt.RawstringT or typeId == rt.RawstringSliceT; } pub fn isString(self: *const Value) linksection(cy.HotSection) bool { if (self.isPointer()) { const obj = self.asHeapObject(); - return obj.head.typeId == rt.AstringT or obj.head.typeId == rt.UstringT or obj.head.typeId == rt.StringSliceT; + return obj.getTypeId() == rt.AstringT or obj.getTypeId() == rt.UstringT or obj.getTypeId() == rt.StringSliceT; } else { return self.assumeNotPtrIsStaticString(); } @@ -258,7 +257,7 @@ pub const Value = packed union { const bits = self.val & TaggedPrimitiveMask; if (bits >= TaggedValueMask) { if (self.isPointer()) { - return self.asHeapObject().head.typeId; + return self.asHeapObject().getTypeId(); } else { if (bits >= TaggedIntegerMask) { return rt.IntegerT; @@ -286,53 +285,63 @@ pub const Value = packed union { } pub inline fn isPointer(self: *const Value) bool { - // Only a pointer if nan bits and sign bit are set. - return self.val & PointerMask == PointerMask; + return self.val >= vmc.NOCYC_POINTER_MASK; + } + + pub inline fn isCycPointer(self: *const Value) bool { + return self.val >= vmc.CYC_POINTER_MASK; } pub inline fn isObjectType(self: *const Value, typeId: rt.TypeId) bool { - return isPointer(self) and self.asHeapObject().head.typeId == typeId; + return isPointer(self) and self.asHeapObject().getTypeId() == typeId; } pub inline fn isPointerT(self: *const Value) bool { - return self.isPointer() and self.asHeapObject().head.typeId == rt.PointerT; + return self.isPointer() and self.asHeapObject().getTypeId() == rt.PointerT; } pub inline fn isMap(self: *const Value) bool { - return self.isPointer() and self.asHeapObject().head.typeId == rt.MapT; + return self.isPointer() and self.asHeapObject().getTypeId() == rt.MapT; } pub inline fn isList(self: *const Value) bool { - return self.isPointer() and self.asHeapObject().head.typeId == rt.ListT; + return self.isPointer() and self.asHeapObject().getTypeId() == rt.ListT; } pub inline fn isBox(self: *const Value) bool { - return self.isPointer() and self.asHeapObject().head.typeId == rt.BoxT; + return self.isPointer() and self.asHeapObject().getTypeId() == rt.BoxT; } pub inline fn isClosure(self: *const Value) bool { - return self.isPointer() and self.asHeapObject().head.typeId == rt.ClosureT; + return self.isPointer() and self.asHeapObject().getTypeId() == rt.ClosureT; } pub inline fn asRawString(self: *const Value) []const u8 { const obj = self.asHeapObject(); - if (obj.head.typeId == rt.RawstringT) { + if (obj.getTypeId() == rt.RawstringT) { return obj.rawstring.getConstSlice(); - } else if (obj.head.typeId == rt.RawstringSliceT) { + } else if (obj.getTypeId() == rt.RawstringSliceT) { return obj.rawstringSlice.getConstSlice(); } else unreachable; } + pub fn isGcConfirmedCyc(self: *const Value) bool { + if (self.isCycPointer()) { + return self.asHeapObject().isGcConfirmedCyc(); + } + return false; + } + pub inline fn asHeapObject(self: *const Value) *cy.HeapObject { - return @ptrFromInt(@as(usize, @intCast(self.val & ~PointerMask))); + return @ptrFromInt(@as(usize, @intCast(self.val & ~vmc.POINTER_MASK))); } pub inline fn asPointer(self: *const Value, comptime Ptr: type) Ptr { - return @ptrFromInt(@as(usize, @intCast(self.val & ~PointerMask))); + return @ptrFromInt(@as(usize, @intCast(self.val & ~vmc.POINTER_MASK))); } pub inline fn asAnyOpaque(self: *const Value) ?*anyopaque { - return @ptrFromInt(@as(usize, @intCast(self.val & ~PointerMask))); + return @ptrFromInt(@as(usize, @intCast(self.val & ~vmc.POINTER_MASK))); } pub inline fn asBool(self: *const Value) linksection(cy.HotSection) bool { @@ -395,7 +404,6 @@ pub const Value = packed union { } pub inline fn initRaw(val: u64) Value { - @setRuntimeSafety(debug); return .{ .val = val }; } @@ -408,8 +416,11 @@ pub const Value = packed union { } pub inline fn initPtr(ptr: ?*anyopaque) Value { - @setRuntimeSafety(debug); - return .{ .val = PointerMask | @intFromPtr(ptr) }; + return .{ .val = vmc.NOCYC_POINTER_MASK | @intFromPtr(ptr) }; + } + + pub inline fn initCycPtr(ptr: ?*anyopaque) Value { + return .{ .val = vmc.CYC_POINTER_MASK | @intFromPtr(ptr) }; } pub inline fn initStaticAstring(start: u32, len: u15) Value { @@ -481,7 +492,7 @@ pub const Value = packed union { else => { if (self.isPointer()) { const obj = self.asHeapObject(); - switch (obj.head.typeId) { + switch (obj.getTypeId()) { rt.ListT => log.info("List {*} len={}", .{obj, obj.list.list.len}), rt.MapT => log.info("Map {*} size={}", .{obj, obj.map.inner.size}), rt.AstringT => { @@ -506,7 +517,7 @@ pub const Value = packed union { rt.NativeFuncT => return log.info("NativeFunc {*}", .{obj}), rt.PointerT => return log.info("Pointer {*} ptr={*}", .{obj, obj.pointer.ptr}), else => { - log.info("HeapObject {*} {}", .{obj, obj.head.typeId}); + log.info("HeapObject {*} {}", .{obj, obj.getTypeId()}); }, } } else { @@ -593,7 +604,7 @@ pub const ValueUserTag = enum { pub fn shallowCopy(vm: *cy.VM, val: Value) linksection(cy.StdSection) Value { if (val.isPointer()) { const obj = val.asHeapObject(); - switch (obj.head.typeId) { + switch (obj.getTypeId()) { rt.ListT => { const list = cy.ptrAlignCast(*cy.List(Value), &obj.list.list); const new = cy.heap.allocList(vm, list.items()) catch cy.fatal(); @@ -649,13 +660,13 @@ pub fn shallowCopy(vm: *cy.VM, val: Value) linksection(cy.StdSection) Value { fmt.panic("Unsupported copy pointer.", &.{}); }, else => { - const numFields = @as(*const cy.VM, @ptrCast(vm)).types.buf[obj.head.typeId].numFields; + const numFields = @as(*const cy.VM, @ptrCast(vm)).types.buf[obj.getTypeId()].numFields; const fields = obj.object.getValuesConstPtr()[0..numFields]; var new: Value = undefined; if (numFields <= 4) { - new = cy.heap.allocObjectSmall(vm, obj.head.typeId, fields) catch cy.fatal(); + new = cy.heap.allocObjectSmall(vm, obj.getTypeId(), fields) catch cy.fatal(); } else { - new = cy.heap.allocObject(vm, obj.head.typeId, fields) catch cy.fatal(); + new = cy.heap.allocObject(vm, obj.getTypeId(), fields) catch cy.fatal(); } for (fields) |field| { cy.arc.retain(vm, field); @@ -694,7 +705,7 @@ test "value internals." { try t.eq(StaticUstringMask, 0x7FFC000400000000); try t.eq(NoneMask, 0x7FFC000000000000); try t.eq(TrueMask, 0x7FFC000100000001); - try t.eq(PointerMask, 0xFFFC000000000000); + try t.eq(vmc.POINTER_MASK, 0xFFFE000000000000); // Check Zig/C struct compat. try t.eq(@sizeOf(Value), @sizeOf(vmc.Value)); diff --git a/src/vm.c b/src/vm.c index fd727596b..317e2e27c 100644 --- a/src/vm.c +++ b/src/vm.c @@ -19,59 +19,6 @@ do { if (!(cond)) zFatal(); } while (false) #define BITCAST(type, x) (((union {typeof(x) src; type dst;})(x)).dst) -// 1000000000000000: Most significant bit. -#define SIGN_MASK ((u64)1 << 63) - -// 0111111111110000 (7FF0): +INF (from math.inf, +/zero, overflow) -// 1111111111110000 (FFF0): -INF (from math.neginf, -/zero, overflow) -// 0111111111111000 (7FF8): QNAN (from neg op on -QNAN, math.nan, zero/zero non-intel, QNAN arithmetic) -// 1111111111111000 (FFF8): -QNAN (from neg op on QNAN, zero/zero intel, -QNAN arithmetic, -QNAN/zero) -// Intel uses the sign bit for an indefinite real or -QNAN. - -// 0111111111111100 (7FFC): QNAN and one extra bit to the right. -#define TAGGED_VALUE_MASK ((u64)0x7ffc000000000000) - -// 0000000000000001 -#define INTEGER_MASK ((u64)1 << 48) - -// 0111111111111101 0000000000000111 -#define TAGGED_PRIMITIVE_MASK (TAGGED_VALUE_MASK | PRIMITIVE_MASK) -// 0000000000000001 0000000000000111: Bits relevant to the primitive's type. -#define PRIMITIVE_MASK (TAGGED_VALUE_MASK | ((u64)TAG_MASK << 32) | INTEGER_MASK) - -// 1111111111111100: TaggedMask + Sign bit indicates a pointer value. -#define POINTER_MASK (TAGGED_VALUE_MASK | SIGN_MASK) - -// 0111111111111101 -#define TAGGED_INTEGER_MASK (TAGGED_VALUE_MASK | INTEGER_MASK) - -// 0111111111111100 0000000000000000 -#define NONE_MASK (TAGGED_VALUE_MASK | ((u64)TAG_NONE << 32)) - -// 0111111111111100 0000000000000001 -#define BOOLEAN_MASK (TAGGED_VALUE_MASK | ((u64)TAG_BOOLEAN << 32)) - -#define TAG_MASK (((uint32_t)1 << 3) - 1) -#define TAG_NONE ((uint8_t)0) -#define TAG_BOOLEAN ((uint8_t)1) -#define TAG_ERROR ((uint8_t)2) -#define TAG_STATIC_ASTRING ((uint8_t)3) -#define TAG_STATIC_USTRING ((uint8_t)4) -#define TAG_ENUM ((uint8_t)5) -#define TAG_SYMBOL ((uint8_t)6) -#define FALSE_MASK BOOLEAN_MASK -#define TRUE_BIT_MASK ((uint64_t)1) -#define TRUE_MASK (BOOLEAN_MASK | TRUE_BIT_MASK) - -#define ERROR_MASK (TAGGED_VALUE_MASK | ((u64)TAG_ERROR << 32)) -#define ENUM_MASK (TAGGED_VALUE_MASK | ((u64)TAG_ENUM << 32)) -#define SYMBOL_MASK (TAGGED_VALUE_MASK | ((u64)TAG_SYMBOL << 32)) -#define STATIC_ASTRING_MASK (TAGGED_VALUE_MASK | ((u64)TAG_STATIC_ASTRING << 32)) -#define STATIC_USTRING_MASK (TAGGED_VALUE_MASK | ((u64)TAG_STATIC_USTRING << 32)) -#define BEFORE_TAG_MASK ((u32)(0x00007fff << 3)) -#define NULL_U32 UINT32_MAX -#define NULL_U8 UINT8_MAX - // Construct value. // _BitInt zeroes padding bits after cast. @@ -87,7 +34,8 @@ #define VALUE_FALSE FALSE_MASK #define VALUE_INTERRUPT (ERROR_MASK | 0xffff) #define VALUE_RAW(u) u -#define VALUE_PTR(ptr) (POINTER_MASK | (uint64_t)ptr) +#define VALUE_PTR(ptr) (NOCYC_POINTER_MASK | (u64)ptr) +#define VALUE_CYCPTR(ptr) (CYC_POINTER_MASK | (u64)ptr) #define VALUE_STATIC_STRING_SLICE(v) ((IndexSlice){ .start = v & 0xffffffff, .len = (((u32)(v >> 32)) & BEFORE_TAG_MASK) >> 3 }) #define VALUE_SYMBOL(symId) (SYMBOL_MASK | symId) @@ -106,19 +54,20 @@ #define VALUE_RETINFO_RETFLAG(v) ((v & 0xff00) >> 8) #define VALUE_IS_BOOLEAN(v) ((v & (TAGGED_PRIMITIVE_MASK | SIGN_MASK)) == BOOLEAN_MASK) -#define VALUE_IS_POINTER(v) (v >= POINTER_MASK) -#define VALUE_IS_CLOSURE(v) (VALUE_IS_POINTER(v) && (VALUE_AS_HEAPOBJECT(v)->head.typeId == TYPE_CLOSURE)) -#define VALUE_IS_BOX(v) (VALUE_IS_POINTER(v) && (VALUE_AS_HEAPOBJECT(v)->head.typeId == TYPE_BOX)) +#define VALUE_IS_POINTER(v) (v >= NOCYC_POINTER_MASK) +#define VALUE_IS_CLOSURE(v) (VALUE_IS_POINTER(v) && (OBJ_TYPEID(VALUE_AS_HEAPOBJECT(v)) == TYPE_CLOSURE)) +#define VALUE_IS_BOX(v) (VALUE_IS_POINTER(v) && (OBJ_TYPEID(VALUE_AS_HEAPOBJECT(v)) == TYPE_BOX)) #define VALUE_IS_NONE(v) (v == NONE_MASK) #define VALUE_IS_FLOAT(v) ((v & TAGGED_VALUE_MASK) != TAGGED_VALUE_MASK) #define VALUE_IS_ERROR(v) ((v & (TAGGED_PRIMITIVE_MASK | SIGN_MASK)) == ERROR_MASK) -#define VALUE_IS_LIST(v) (VALUE_IS_POINTER(v) && (VALUE_AS_HEAPOBJECT(v)->head.typeId == TYPE_LIST)) -#define VALUE_IS_MAP(v) (VALUE_IS_POINTER(v) && (VALUE_AS_HEAPOBJECT(v)->head.typeId == TYPE_MAP)) +#define VALUE_IS_LIST(v) (VALUE_IS_POINTER(v) && (OBJ_TYPEID(VALUE_AS_HEAPOBJECT(v)) == TYPE_LIST)) +#define VALUE_IS_MAP(v) (VALUE_IS_POINTER(v) && (OBJ_TYPEID(VALUE_AS_HEAPOBJECT(v)) == TYPE_MAP)) #define VALUE_BOTH_FLOATS(a, b) (VALUE_IS_FLOAT(a) && VALUE_IS_FLOAT(b)) #define VALUE_IS_INTEGER(v) ((v & (TAGGED_INTEGER_MASK | SIGN_MASK)) == TAGGED_INTEGER_MASK) #define VALUE_BOTH_INTEGERS(a, b) ((a & b & (TAGGED_INTEGER_MASK | SIGN_MASK)) == TAGGED_INTEGER_MASK) #define VALUE_ALL3_INTEGERS(a, b, c) ((a & b & c & (TAGGED_INTEGER_MASK | SIGN_MASK)) == TAGGED_INTEGER_MASK) +#define OBJ_TYPEID(o) (o->head.typeId & TYPE_MASK) #define FMT_STRZ(s) ((FmtValue){ .type = FMT_TYPE_STRING, .data = { .string = s }, .data2 = { .string = strlen(s) }}) #define FMT_STR(s) ((FmtValue){ .type = FMT_TYPE_STRING, .data = { .string = s.ptr }, .data2 = { .string = s.len }}) @@ -136,7 +85,7 @@ static inline bool valueAssumeNotPtrIsStaticString(Value v) { static inline bool valueIsString(Value v) { if (VALUE_IS_POINTER(v)) { HeapObject* obj = VALUE_AS_HEAPOBJECT(v); - return (obj->head.typeId == TYPE_ASTRING) || (obj->head.typeId == TYPE_USTRING) || (obj->head.typeId == TYPE_STRING_SLICE); + return (OBJ_TYPEID(obj) == TYPE_ASTRING) || (OBJ_TYPEID(obj) == TYPE_USTRING) || (OBJ_TYPEID(obj) == TYPE_STRING_SLICE); } else { return valueAssumeNotPtrIsStaticString(v); } @@ -146,7 +95,7 @@ static inline bool valueIsRawString(Value v) { if (!VALUE_IS_POINTER(v)) { return false; } - TypeId typeId = VALUE_AS_HEAPOBJECT(v)->head.typeId; + TypeId typeId = OBJ_TYPEID(VALUE_AS_HEAPOBJECT(v)); return (typeId == TYPE_RAWSTRING) || (typeId == TYPE_RAWSTRING_SLICE); } @@ -202,7 +151,7 @@ static inline void release(VM* vm, Value val) { } static inline void releaseObject(VM* vm, HeapObject* obj) { - VLOG("release obj: {}, rc={}\n", FMT_STR(getTypeName(vm, obj->head.typeId)), FMT_U32(obj->head.rc)); + VLOG("release obj: {}, rc={}\n", FMT_STR(getTypeName(vm, OBJ_TYPEID(obj))), FMT_U32(obj->head.rc)); #if (TRACE) zCheckDoubleFree(vm, obj); #endif @@ -210,7 +159,7 @@ static inline void releaseObject(VM* vm, HeapObject* obj) { #if TRACK_GLOBAL_RC #if TRACE if (vm->refCounts == 0) { - PRINT("Double free. {}\n", FMT_U32(obj->head.typeId)); + PRINT("Double free. {}\n", FMT_U32(OBJ_TYPEID(obj))); zFatal(); } #endif @@ -229,7 +178,7 @@ static inline void retainObject(VM* vm, HeapObject* obj) { obj->head.rc += 1; #if TRACE zCheckRetainDanglingPointer(vm, obj); - VLOG("retain {} rc={}\n", FMT_STR(getTypeName(vm, obj->head.typeId)), FMT_U32(obj->head.rc)); + VLOG("retain {} rc={}\n", FMT_STR(getTypeName(vm, OBJ_TYPEID(obj))), FMT_U32(obj->head.rc)); #endif #if TRACK_GLOBAL_RC vm->refCounts += 1; @@ -275,7 +224,7 @@ static inline TypeId getTypeId(Value val) { if (bits >= TAGGED_VALUE_MASK) { // Tagged. if (VALUE_IS_POINTER(val)) { - return VALUE_AS_HEAPOBJECT(val)->head.typeId; + return OBJ_TYPEID(VALUE_AS_HEAPOBJECT(val)); } else { if (bits >= TAGGED_INTEGER_MASK) { return TYPE_INTEGER; @@ -308,10 +257,10 @@ static inline uint32_t stackOffset(VM* vm, Value* stack) { static inline uint8_t getFieldOffset(VM* vm, HeapObject* obj, uint32_t symId) { FieldSymbolMap* symMap = ((FieldSymbolMap*)vm->fieldSyms.buf) + symId; - if (obj->head.typeId == symMap->mruTypeId) { + if (OBJ_TYPEID(obj) == symMap->mruTypeId) { return (uint8_t)symMap->mruOffset; } else { - return zGetFieldOffsetFromTable(vm, obj->head.typeId, symId); + return zGetFieldOffsetFromTable(vm, OBJ_TYPEID(obj), symId); } } @@ -348,19 +297,19 @@ static inline FuncSig getResolvedFuncSig(VM* vm, FuncSigId id) { static inline ValueResult allocObject(VM* vm, TypeId typeId, Value* fields, u8 numFields) { // First slot holds the typeId and rc. - HeapObjectResult res = zAllocExternalObject(vm, (1 + numFields) * sizeof(Value)); + HeapObjectResult res = zAllocExternalCycObject(vm, (1 + numFields) * sizeof(Value)); if (UNLIKELY(res.code != RES_CODE_SUCCESS)) { return (ValueResult){ .code = res.code }; } res.obj->object = (Object){ - .typeId = typeId, + .typeId = typeId | CYC_TYPE_MASK, .rc = 1, }; Value* dst = objectGetValuesPtr(&res.obj->object); memcpy(dst, fields, numFields * sizeof(Value)); - return (ValueResult){ .val = VALUE_PTR(res.obj), .code = RES_CODE_SUCCESS }; + return (ValueResult){ .val = VALUE_CYCPTR(res.obj), .code = RES_CODE_SUCCESS }; } static inline ValueResult allocEmptyMap(VM* vm) { @@ -369,7 +318,7 @@ static inline ValueResult allocEmptyMap(VM* vm) { return (ValueResult){ .code = res.code }; } res.obj->map = (Map){ - .typeId = TYPE_MAP, + .typeId = TYPE_MAP | CYC_TYPE_MASK, .rc = 1, .inner = { .metadata = 0, @@ -379,7 +328,7 @@ static inline ValueResult allocEmptyMap(VM* vm) { .available = 0, }, }; - return (ValueResult){ .val = VALUE_PTR(res.obj), .code = RES_CODE_SUCCESS }; + return (ValueResult){ .val = VALUE_CYCPTR(res.obj), .code = RES_CODE_SUCCESS }; } static inline ValueResult allocClosure( @@ -390,13 +339,13 @@ static inline ValueResult allocClosure( if (numCapturedVals <= 2) { res = zAllocPoolObject(vm); } else { - res = zAllocExternalObject(vm, (2 + numCapturedVals) * sizeof(Value)); + res = zAllocExternalCycObject(vm, (2 + numCapturedVals) * sizeof(Value)); } if (UNLIKELY(res.code != RES_CODE_SUCCESS)) { return (ValueResult){ .code = res.code }; } res.obj->closure = (Closure){ - .typeId = TYPE_CLOSURE, + .typeId = TYPE_CLOSURE | CYC_TYPE_MASK, .rc = 1, .funcPc = funcPc, .numParams = numParams, @@ -417,7 +366,7 @@ static inline ValueResult allocClosure( retain(vm, fp[local]); dst[i] = fp[local]; } - return (ValueResult){ .val = VALUE_PTR(res.obj), .code = RES_CODE_SUCCESS }; + return (ValueResult){ .val = VALUE_CYCPTR(res.obj), .code = RES_CODE_SUCCESS }; } static inline ValueResult allocLambda(VM* vm, uint32_t funcPc, uint8_t numParams, uint8_t stackSize, uint16_t rFuncSigId) { @@ -442,11 +391,11 @@ static inline ValueResult allocBox(VM* vm, Value val) { return (ValueResult){ .code = res.code }; } res.obj->box = (Box){ - .typeId = TYPE_BOX, + .typeId = TYPE_BOX | CYC_TYPE_MASK, .rc = 1, .val = val, }; - return (ValueResult){ .val = VALUE_PTR(res.obj), .code = RES_CODE_SUCCESS }; + return (ValueResult){ .val = VALUE_CYCPTR(res.obj), .code = RES_CODE_SUCCESS }; } static inline ValueResult allocMetaType(VM* vm, uint8_t symType, uint32_t symId) { @@ -1362,7 +1311,7 @@ ResultCode execBytecode(VM* vm) { if (offset != NULL_U8) { \ stack[dst] = objectGetField((Object*)obj, offset); \ pc[0] = FIELD_BODY_IC_##v; \ - WRITE_U16(5, obj->head.typeId); \ + WRITE_U16(5, OBJ_TYPEID(obj)); \ pc[7] = offset; \ } else { \ stack[dst] = zGetFieldFallback(vm, obj, ((FieldSymbolMap*)vm->fieldSyms.buf)[symId].nameId); \ @@ -1383,7 +1332,7 @@ ResultCode execBytecode(VM* vm) { uint8_t dst = pc[2]; if (VALUE_IS_POINTER(recv)) { HeapObject* obj = VALUE_AS_HEAPOBJECT(recv); - if (obj->head.typeId == READ_U16(5)) { + if (OBJ_TYPEID(obj) == READ_U16(5)) { stack[dst] = objectGetField((Object*)obj, pc[7]); pc += 8; NEXT(); @@ -1407,7 +1356,7 @@ ResultCode execBytecode(VM* vm) { uint8_t dst = pc[2]; if (VALUE_IS_POINTER(recv)) { HeapObject* obj = VALUE_AS_HEAPOBJECT(recv); - if (obj->head.typeId == READ_U16(5)) { + if (OBJ_TYPEID(obj) == READ_U16(5)) { stack[dst] = objectGetField((Object*)obj, pc[7]); retain(vm, stack[dst]); pc += 8; @@ -1599,7 +1548,7 @@ ResultCode execBytecode(VM* vm) { *lastValue = val; pc[0] = CodeSetFieldReleaseIC; - WRITE_U16(4, obj->head.typeId); + WRITE_U16(4, OBJ_TYPEID(obj)); pc[6] = offset; pc += 7; NEXT(); @@ -1616,7 +1565,7 @@ ResultCode execBytecode(VM* vm) { Value recv = stack[pc[1]]; if (VALUE_IS_POINTER(recv)) { HeapObject* obj = VALUE_AS_HEAPOBJECT(recv); - if (obj->head.typeId == READ_U16(4)) { + if (OBJ_TYPEID(obj) == READ_U16(4)) { Value* lastValue = objectGetFieldPtr((Object*)obj, pc[6]); release(vm, *lastValue); *lastValue = stack[pc[2]]; @@ -1732,7 +1681,7 @@ ResultCode execBytecode(VM* vm) { Value fiber = stack[pc[1]]; if (VALUE_IS_POINTER(fiber)) { HeapObject* obj = VALUE_AS_HEAPOBJECT(fiber); - if (obj->head.typeId == TYPE_FIBER) { + if (OBJ_TYPEID(obj) == TYPE_FIBER) { if ((Fiber*)obj != vm->curFiber) { if (obj->fiber.pcOffset != NULL_U32) { PcSp res = zPushFiber(vm, pcOffset(vm, pc + 3), stack, (Fiber*)obj, pc[2]); @@ -1788,7 +1737,7 @@ ResultCode execBytecode(VM* vm) { #endif HeapObject* obj = VALUE_AS_HEAPOBJECT(box); #if TRACE - ASSERT(obj->head.typeId == TYPE_BOX); + ASSERT(OBJ_TYPEID(obj) == TYPE_BOX); #endif obj->box.val = rval; pc += 3; @@ -1802,7 +1751,7 @@ ResultCode execBytecode(VM* vm) { #endif HeapObject* obj = VALUE_AS_HEAPOBJECT(box); #if TRACE - ASSERT(obj->head.typeId == TYPE_BOX); + ASSERT(OBJ_TYPEID(obj) == TYPE_BOX); #endif release(vm, obj->box.val); obj->box.val = rval; diff --git a/src/vm.h b/src/vm.h index a321c569f..f7e4ce852 100644 --- a/src/vm.h +++ b/src/vm.h @@ -21,6 +21,76 @@ typedef struct IndexSlice { u32 len; } IndexSlice; +// 1000000000000000: Most significant bit. +#define SIGN_MASK ((u64)1 << 63) + +// 0111111111110000 (7FF0): +INF (from math.inf, +/zero, overflow) +// 1111111111110000 (FFF0): -INF (from math.neginf, -/zero, overflow) +// 0111111111111000 (7FF8): QNAN (from neg op on -QNAN, math.nan, zero/zero non-intel, QNAN arithmetic) +// 1111111111111000 (FFF8): -QNAN (from neg op on QNAN, zero/zero intel, -QNAN arithmetic, -QNAN/zero) +// Intel uses the sign bit for an indefinite real or -QNAN. + +// 0111111111111100 (7FFC): QNAN and one extra bit to the right. +#define TAGGED_VALUE_MASK ((u64)0x7ffc000000000000) + +// 0000000000000001 +#define INTEGER_MASK ((u64)1 << 48) + +// 0111111111111101 0000000000000111 +#define TAGGED_PRIMITIVE_MASK (TAGGED_VALUE_MASK | PRIMITIVE_MASK) +// 0000000000000001 0000000000000111: Bits relevant to the primitive's type. +#define PRIMITIVE_MASK (TAGGED_VALUE_MASK | ((u64)TAG_MASK << 32) | INTEGER_MASK) + +// 1111111111111100: TaggedMask + Sign bit indicates a pointer value. +#define NOCYC_POINTER_MASK (TAGGED_VALUE_MASK | SIGN_MASK) + +// 1111111111111110: Extra bit indicating cycable pointer. +#define CYC_POINTER_MASK (NOCYC_POINTER_MASK | ((u64)1 << 49)) + +#define POINTER_MASK (CYC_POINTER_MASK) + +// 0111111111111101 +#define TAGGED_INTEGER_MASK (TAGGED_VALUE_MASK | INTEGER_MASK) + +// 0111111111111100 0000000000000000 +#define NONE_MASK (TAGGED_VALUE_MASK | ((u64)TAG_NONE << 32)) + +// 0111111111111100 0000000000000001 +#define BOOLEAN_MASK (TAGGED_VALUE_MASK | ((u64)TAG_BOOLEAN << 32)) + +#define TAG_MASK (((uint32_t)1 << 3) - 1) +#define TAG_NONE ((uint8_t)0) +#define TAG_BOOLEAN ((uint8_t)1) +#define TAG_ERROR ((uint8_t)2) +#define TAG_STATIC_ASTRING ((uint8_t)3) +#define TAG_STATIC_USTRING ((uint8_t)4) +#define TAG_ENUM ((uint8_t)5) +#define TAG_SYMBOL ((uint8_t)6) +#define FALSE_MASK BOOLEAN_MASK +#define TRUE_BIT_MASK ((uint64_t)1) +#define TRUE_MASK (BOOLEAN_MASK | TRUE_BIT_MASK) + +#define ERROR_MASK (TAGGED_VALUE_MASK | ((u64)TAG_ERROR << 32)) +#define ENUM_MASK (TAGGED_VALUE_MASK | ((u64)TAG_ENUM << 32)) +#define SYMBOL_MASK (TAGGED_VALUE_MASK | ((u64)TAG_SYMBOL << 32)) +#define STATIC_ASTRING_MASK (TAGGED_VALUE_MASK | ((u64)TAG_STATIC_ASTRING << 32)) +#define STATIC_USTRING_MASK (TAGGED_VALUE_MASK | ((u64)TAG_STATIC_USTRING << 32)) +#define BEFORE_TAG_MASK ((u32)(0x00007fff << 3)) +#define NULL_U32 UINT32_MAX +#define NULL_U8 UINT8_MAX + +// 0011111111111111 +#define TYPE_MASK ((u32)0x3fffffff) + +// 0100000000000000: Cyclable type bit. +#define CYC_TYPE_MASK ((u32)0x40000000) + +// 1000000000000000: Mark bit. +#define GC_MARK_MASK ((u32)0x80000000) + +// 1100000000000000 +#define GC_MARK_CYC_TYPE_MASK ((u32)0xC0000000) + typedef enum { FMT_TYPE_CHAR, FMT_TYPE_STRING, @@ -453,8 +523,8 @@ typedef struct List { typedef union HeapObject { struct { - uint32_t typeId; - uint32_t rc; + u32 typeId; + u32 rc; } head; Fiber fiber; Object object; @@ -585,9 +655,9 @@ typedef struct TraceInfo { u32 numRetainAttempts; u32 numReleases; u32 numReleaseAttempts; - u32 numForceReleases; - u32 numRetainCycles; - u32 numRetainCycleRoots; + + // Number cycle objects freed by gc. + u32 numCycFrees; } TraceInfo; typedef enum { @@ -655,6 +725,9 @@ typedef struct VM { #if TRACE HeapObject* heapFreeTail; #endif +#if HAS_GC + void* cyclableHead; +#endif ZCyList tryStack; @@ -828,6 +901,7 @@ Value zEvalCompareNot(VM* vm, Value left, Value right); PcSpResult zCall(VM* vm, Inst* pc, Value* stack, Value callee, uint8_t startLocal, uint8_t numArgs, Value retInfo); HeapObjectResult zAllocPoolObject(VM* vm); HeapObjectResult zAllocExternalObject(VM* vm, size_t size); +HeapObjectResult zAllocExternalCycObject(VM* vm, size_t size); ValueResult zAllocStringTemplate(VM* vm, Inst* strs, u8 strCount, Value* vals, u8 valCount); ValueResult zAllocMap(VM* vm, u16* keyIdxs, Value* vals, u32 numEntries); Value zGetFieldFallback(VM* vm, HeapObject* obj, NameId nameId); diff --git a/src/vm.zig b/src/vm.zig index 0339de0ab..38cdb84d7 100644 --- a/src/vm.zig +++ b/src/vm.zig @@ -62,9 +62,13 @@ pub const VM = struct { /// Object heap pages. heapPages: cy.List(*cy.heap.HeapPage), heapFreeHead: ?*HeapObject, - /// Tail is only used in debug mode. + /// Tail is only used in trace mode. heapFreeTail: if (cy.Trace) ?*HeapObject else void, + /// GC: Contains the head to the first cyclable object. + /// Always contains one dummy node to avoid null checking. + cyclableHead: if (cy.hasGC) *cy.heap.DListNode else void, + tryStack: cy.List(vmc.TryFrame), refCounts: if (cy.TrackGlobalRC) usize else void, @@ -199,6 +203,7 @@ pub const VM = struct { .heapPages = .{}, .heapFreeHead = null, .heapFreeTail = if (cy.Trace) null else undefined, + .cyclableHead = if (cy.hasGC) @ptrCast(&dummyCyclableHead) else {}, .pc = undefined, .framePtr = undefined, .tryStack = .{}, @@ -299,6 +304,7 @@ pub const VM = struct { for (self.varSyms.items()) |vsym| { release(self, vsym.value); } + self.varSyms.clearRetainingCapacity(); { var iter = self.funcSymDeps.iterator(); @@ -563,11 +569,9 @@ pub const VM = struct { self.trace.totalOpCounts = 0; self.trace.numReleases = 0; self.trace.numReleaseAttempts = 0; - self.trace.numForceReleases = 0; self.trace.numRetains = 0; self.trace.numRetainAttempts = 0; - self.trace.numRetainCycles = 0; - self.trace.numRetainCycleRoots = 0; + self.trace.numCycFrees = 0; } tt = cy.debug.timer(); @@ -637,6 +641,9 @@ pub const VM = struct { } pub fn getTypeName(vm: *const cy.VM, typeId: rt.TypeId) []const u8 { + if (typeId == cy.NullId >> 2) { + return "danglingObject"; + } const vmType = vm.types.buf[typeId]; return vmType.namePtr[0..vmType.nameLen]; } @@ -1129,7 +1136,7 @@ pub const VM = struct { const obj = recv.asHeapObject(); const symMap = &self.fieldSyms.buf[fieldId]; - if (obj.head.typeId == symMap.mruTypeId) { + if (obj.getTypeId() == symMap.mruTypeId) { obj.object.getValuePtr(symMap.mruOffset).* = val; } else { const offset = self.getFieldOffset(obj, fieldId); @@ -1171,10 +1178,10 @@ pub const VM = struct { pub fn getFieldOffset(self: *VM, obj: *HeapObject, symId: SymbolId) linksection(cy.HotSection) u8 { const symMap = self.fieldSyms.buf[symId]; - if (obj.head.typeId == symMap.mruTypeId) { + if (obj.getTypeId() == symMap.mruTypeId) { return @intCast(symMap.mruOffset); } else { - return @call(.never_inline, VM.getFieldOffsetFromTable, .{self, obj.head.typeId, symId}); + return @call(.never_inline, VM.getFieldOffsetFromTable, .{self, obj.getTypeId(), symId}); } } @@ -1228,13 +1235,13 @@ pub const VM = struct { fn getFieldFallback(self: *const VM, obj: *const HeapObject, nameId: sema.NameSymId) linksection(cy.HotSection) Value { @setCold(true); const name = sema.getName(self.compiler, nameId); - if (obj.head.typeId == rt.MapT) { + if (obj.getTypeId() == rt.MapT) { const map = cy.ptrAlignCast(*const cy.MapInner, &obj.map.inner); if (map.getByString(self, name)) |val| { return val; } else return Value.None; } else { - log.debug("Missing symbol for object: {}", .{obj.head.typeId}); + log.debug("Missing symbol for object: {}", .{obj.getTypeId()}); return Value.None; } } @@ -1254,7 +1261,7 @@ pub const VM = struct { @as(*align(1) u48, @ptrCast(pc + 6)).* = @intCast(@intFromPtr(sym.inner.nativeFunc1)); self.pc = pc; - self.framePtr = newFramePtr; + self.framePtr = framePtr; const res = sym.inner.nativeFunc1(@ptrCast(self), @ptrCast(newFramePtr + 4), numArgs); if (res.isInterrupt()) { return error.Panic; @@ -1434,15 +1441,15 @@ pub const VM = struct { pub fn tryValueAsComparableString(self: *const VM, val: Value) linksection(cy.Section) ?[]const u8 { if (val.isPointer()) { const obj = val.asHeapObject(); - if (obj.head.typeId == rt.AstringT) { + if (obj.getTypeId() == rt.AstringT) { return obj.astring.getConstSlice(); - } else if (obj.head.typeId == rt.UstringT) { + } else if (obj.getTypeId() == rt.UstringT) { return obj.ustring.getConstSlice(); - } else if (obj.head.typeId == rt.StringSliceT) { + } else if (obj.getTypeId() == rt.StringSliceT) { return cy.heap.StringSlice.getConstSlice(obj.stringSlice); - } else if (obj.head.typeId == rt.RawstringT) { + } else if (obj.getTypeId() == rt.RawstringT) { return obj.rawstring.getConstSlice(); - } else if (obj.head.typeId == rt.RawstringSliceT) { + } else if (obj.getTypeId() == rt.RawstringSliceT) { return obj.rawstringSlice.getConstSlice(); } else return null; } else { @@ -1487,11 +1494,11 @@ pub const VM = struct { pub fn valueAsString(self: *const VM, val: Value) linksection(cy.Section) []const u8 { if (val.isPointer()) { const obj = val.asHeapObject(); - if (obj.head.typeId == rt.AstringT) { + if (obj.getTypeId() == rt.AstringT) { return obj.astring.getConstSlice(); - } else if (obj.head.typeId == rt.UstringT) { + } else if (obj.getTypeId() == rt.UstringT) { return obj.ustring.getConstSlice(); - } else if (obj.head.typeId == rt.StringSliceT) { + } else if (obj.getTypeId() == rt.StringSliceT) { return cy.heap.StringSlice.getConstSlice(obj.stringSlice); } else unreachable; } else { @@ -1712,7 +1719,7 @@ pub const VM = struct { return slice; }, else => { - const vmType = self.types.buf[obj.head.typeId]; + const vmType = self.types.buf[obj.getTypeId()]; const buf = vmType.namePtr[0..vmType.nameLen]; if (getCharLen) { outCharLen.* = @intCast(buf.len); @@ -1743,15 +1750,15 @@ pub const StringType = enum { fn getComparableStringType(val: Value) ?StringType { if (val.isPointer()) { const obj = val.asHeapObject(); - if (obj.head.typeId == rt.AstringT) { + if (obj.getTypeId() == rt.AstringT) { return .astring; - } else if (obj.head.typeId == rt.UstringT) { + } else if (obj.getTypeId() == rt.UstringT) { return .ustring; - } else if (obj.head.typeId == rt.StringSliceT) { + } else if (obj.getTypeId() == rt.StringSliceT) { return .slice; - } else if (obj.head.typeId == rt.RawstringT) { + } else if (obj.getTypeId() == rt.RawstringT) { return .rawstring; - } else if (obj.head.typeId == rt.RawstringSliceT) { + } else if (obj.getTypeId() == rt.RawstringSliceT) { return .rawSlice; } return null; @@ -1890,6 +1897,9 @@ test "vm internals." { try t.eq(@offsetOf(VM, "strInterns"), @offsetOf(vmc.VM, "strInterns")); try t.eq(@offsetOf(VM, "heapPages"), @offsetOf(vmc.VM, "heapPages")); try t.eq(@offsetOf(VM, "heapFreeHead"), @offsetOf(vmc.VM, "heapFreeHead")); + if (cy.hasGC) { + try t.eq(@offsetOf(VM, "cyclableHead"), @offsetOf(vmc.VM, "cyclableHead")); + } try t.eq(@offsetOf(VM, "tryStack"), @offsetOf(vmc.VM, "tryStack")); if (cy.TrackGlobalRC) { try t.eq(@offsetOf(VM, "refCounts"), @offsetOf(vmc.VM, "refCounts")); @@ -2102,7 +2112,7 @@ fn evalLoop(vm: *VM) linksection(cy.HotSection) error{StackOverflow, OutOfMemory const dst = pc[2].val; if (recv.isPointer()) { const obj = recv.asHeapObject(); - if (obj.head.typeId == @as(*align (1) u16, @ptrCast(pc + 5)).*) { + if (obj.getTypeId() == @as(*align (1) u16, @ptrCast(pc + 5)).*) { framePtr[dst] = obj.object.getValue(pc[7].val); pc += 8; continue; @@ -2728,7 +2738,7 @@ fn evalLoop(vm: *VM) linksection(cy.HotSection) error{StackOverflow, OutOfMemory const recv = framePtr[pc[1].val]; if (recv.isPointer()) { const obj = recv.asHeapObject(); - if (obj.head.typeId == @as(*align (1) u16, @ptrCast(pc + 4)).*) { + if (obj.getTypeId() == @as(*align (1) u16, @ptrCast(pc + 4)).*) { const lastValue = obj.object.getValuePtr(pc[6].val); release(vm, lastValue.*); lastValue.* = framePtr[pc[2].val]; @@ -2752,7 +2762,7 @@ fn evalLoop(vm: *VM) linksection(cy.HotSection) error{StackOverflow, OutOfMemory const dst = pc[2].val; if (recv.isPointer()) { const obj = recv.asHeapObject(); - if (obj.head.typeId == @as(*align (1) u16, @ptrCast(pc + 5)).*) { + if (obj.getTypeId() == @as(*align (1) u16, @ptrCast(pc + 5)).*) { framePtr[dst] = obj.object.getValue(pc[7].val); retain(vm, framePtr[dst]); pc += 8; @@ -2863,7 +2873,7 @@ fn evalLoop(vm: *VM) linksection(cy.HotSection) error{StackOverflow, OutOfMemory framePtr[dst] = obj.object.getValue(offset); // Inline cache. pc[0] = cy.Inst.initOpCode(.fieldIC); - @as(*align (1) u16, @ptrCast(pc + 5)).* = @intCast(obj.head.typeId); + @as(*align (1) u16, @ptrCast(pc + 5)).* = @intCast(obj.getTypeId()); pc[7] = cy.Inst{ .val = offset }; } else { const sym = vm.fieldSyms.buf[symId]; @@ -2890,7 +2900,7 @@ fn evalLoop(vm: *VM) linksection(cy.HotSection) error{StackOverflow, OutOfMemory framePtr[dst] = obj.object.getValue(offset); // Inline cache. pc[0] = cy.Inst.initOpCode(.fieldRetainIC); - @as(*align (1) u16, @ptrCast(pc + 5)).* = @intCast(obj.head.typeId); + @as(*align (1) u16, @ptrCast(pc + 5)).* = @intCast(obj.getTypeId()); pc[7] = cy.Inst { .val = offset }; } else { const sym = vm.fieldSyms.buf[symId]; @@ -2920,7 +2930,7 @@ fn evalLoop(vm: *VM) linksection(cy.HotSection) error{StackOverflow, OutOfMemory // Inline cache. pc[0] = cy.Inst.initOpCode(.setFieldReleaseIC); - @as(*align (1) u16, @ptrCast(pc + 4)).* = @intCast(obj.head.typeId); + @as(*align (1) u16, @ptrCast(pc + 4)).* = @intCast(obj.getTypeId()); pc[6] = cy.Inst { .val = offset }; pc += 7; continue; @@ -3165,7 +3175,7 @@ fn evalLoop(vm: *VM) linksection(cy.HotSection) error{StackOverflow, OutOfMemory } const obj = box.asHeapObject(); if (builtin.mode == .Debug) { - std.debug.assert(obj.head.typeId == rt.BoxT); + std.debug.assert(obj.getTypeId() == rt.BoxT); } obj.box.val = rval; pc += 3; @@ -3182,7 +3192,7 @@ fn evalLoop(vm: *VM) linksection(cy.HotSection) error{StackOverflow, OutOfMemory } const obj = box.asHeapObject(); if (builtin.mode == .Debug) { - std.debug.assert(obj.head.typeId == rt.BoxT); + std.debug.assert(obj.getTypeId() == rt.BoxT); } @call(.never_inline, release, .{vm, obj.box.val}); obj.box.val = rval; @@ -3587,7 +3597,7 @@ const FieldEntry = struct { pub fn call(vm: *VM, pc: [*]cy.Inst, framePtr: [*]Value, callee: Value, startLocal: u8, numArgs: u8, retInfo: Value) !cy.fiber.PcSp { if (callee.isPointer()) { const obj = callee.asHeapObject(); - switch (obj.head.typeId) { + switch (obj.getTypeId()) { rt.ClosureT => { if (numArgs != obj.closure.numParams) { log.debug("params/args mismatch {} {}", .{numArgs, obj.lambda.numParams}); @@ -3659,7 +3669,7 @@ pub fn call(vm: *VM, pc: [*]cy.Inst, framePtr: [*]Value, callee: Value, startLoc pub fn callNoInline(vm: *VM, pc: *[*]cy.Inst, framePtr: *[*]Value, callee: Value, startLocal: u8, numArgs: u8, retInfo: Value) !void { if (callee.isPointer()) { const obj = callee.asHeapObject(); - switch (obj.head.typeId) { + switch (obj.getTypeId()) { rt.ClosureT => { if (numArgs != obj.closure.numParams) { cy.panic("params/args mismatch"); @@ -3958,7 +3968,7 @@ pub fn dumpValue(vm: *const VM, val: Value) void { } else { if (val.isPointer()) { const obj = val.asHeapObject(); - switch (obj.head.typeId) { + switch (obj.getTypeId()) { rt.ListT => fmt.printStdout("List {} len={}\n", &.{v(obj), v(obj.list.list.len)}), rt.MapT => fmt.printStdout("Map {} size={}\n", &.{v(obj), v(obj.map.inner.size)}), rt.AstringT => { @@ -3982,8 +3992,8 @@ pub fn dumpValue(vm: *const VM, val: Value) void { rt.FiberT => fmt.printStdout("Fiber {}\n", &.{v(obj)}), rt.NativeFuncT => fmt.printStdout("NativeFunc {}\n", &.{v(obj)}), else => { - const vmType = vm.types.buf[obj.head.typeId]; - fmt.printStdout("HeapObject {} {} {}\n", &.{v(obj), v(obj.head.typeId), v(vmType.namePtr[0..vmType.nameLen])}); + const vmType = vm.types.buf[obj.getTypeId()]; + fmt.printStdout("HeapObject {} {} {}\n", &.{v(obj), v(obj.getTypeId()), v(vmType.namePtr[0..vmType.nameLen])}); }, } } else { @@ -4172,7 +4182,7 @@ fn isAssignFuncSigCompat(vm: *VM, srcFuncSigId: sema.FuncSigId, dstFuncSigId: se fn setStaticFunc(vm: *VM, symId: SymbolId, val: Value) linksection(cy.Section) !void { if (val.isPointer()) { const obj = val.asHeapObject(); - switch (obj.head.typeId) { + switch (obj.getTypeId()) { rt.NativeFuncT => { const dstRFuncSigId = getFuncSigIdOfSym(vm, symId); if (!isAssignFuncSigCompat(vm, obj.nativeFunc1.funcSigId, dstRFuncSigId)) { @@ -4230,7 +4240,7 @@ fn setStaticFunc(vm: *VM, symId: SymbolId, val: Value) linksection(cy.Section) ! // Don't set func sym dep since the closure is assigned into the func sym entry. }, else => { - return vm.panicFmt("Assigning to static function with unsupported type {}.", &.{v(obj.head.typeId)}); + return vm.panicFmt("Assigning to static function with unsupported type {}.", &.{v(obj.getTypeId())}); } } } else { @@ -4558,7 +4568,7 @@ export fn zDumpEvalOp(vm: *const VM, pc: [*]const cy.Inst) void { } export fn zFreeObject(vm: *cy.VM, obj: *HeapObject) linksection(cy.HotSection) void { - cy.heap.freeObject(vm, obj); + cy.heap.freeObject(vm, obj, true, false, true, false); } export fn zEnd(vm: *cy.VM, pc: [*]const cy.Inst) void { @@ -4732,8 +4742,21 @@ export fn zAllocPoolObject(vm: *cy.VM) vmc.HeapObjectResult { }; } +export fn zAllocExternalCycObject(vm: *cy.VM, size: usize) vmc.HeapObjectResult { + const obj = cy.heap.allocExternalObject(vm, size, true) catch { + return .{ + .obj = undefined, + .code = vmc.RES_CODE_UNKNOWN, + }; + }; + return .{ + .obj = @ptrCast(obj), + .code = vmc.RES_CODE_SUCCESS, + }; +} + export fn zAllocExternalObject(vm: *cy.VM, size: usize) vmc.HeapObjectResult { - const obj = cy.heap.allocExternalObject(vm, size) catch { + const obj = cy.heap.allocExternalObject(vm, size, false) catch { return .{ .obj = undefined, .code = vmc.RES_CODE_UNKNOWN, @@ -4917,4 +4940,16 @@ comptime { @export(c_strlen, .{ .name = "strlen", .linkage = .Strong }); @export(c_pow, .{ .name = "pow", .linkage = .Strong }); } -} \ No newline at end of file +} + +const DummyCyclableNode = extern struct { + prev: ?*cy.heap.DListNode, + next: ?*cy.heap.DListNode, + typeId: u32, +}; +pub var dummyCyclableHead = DummyCyclableNode{ + .prev = null, + .next = null, + // This will be marked automatically before sweep, so it's never considered as a cyc object. + .typeId = vmc.GC_MARK_MASK | rt.NoneT, +}; \ No newline at end of file diff --git a/src/vm_c.zig b/src/vm_c.zig index 1df8a090a..0496bcf9f 100644 --- a/src/vm_c.zig +++ b/src/vm_c.zig @@ -8,6 +8,7 @@ const c = @cImport({ @cDefine("TRACK_GLOBAL_RC", if (build_options.trackGlobalRC) "1" else "0"); @cDefine("TRACE", if (build_options.trace) "1" else "0"); @cDefine("IS_32BIT", if (cy.is32Bit) "1" else "0"); + @cDefine("HAS_GC", if (cy.hasGC) "1" else "0"); @cInclude("vm.h"); }); diff --git a/test/behavior_test.zig b/test/behavior_test.zig index a68b48226..38966931c 100644 --- a/test/behavior_test.zig +++ b/test/behavior_test.zig @@ -2715,4 +2715,73 @@ test "Arithmetic operators." { }}.func); try evalPass(.{}, @embedFile("arithmetic_op_test.cy")); -} \ No newline at end of file +} + +test "ARC cycles." { + // GC is able to detect reference cycle. + _ = try evalPass(.{}, + \\import t 'test' + \\func foo(): + \\ var a = [] + \\ var b = [] + \\ a.append(b) + \\ b.append(a) + \\ var res = performGC() + \\ -- Cycle still alive in the current stack so no gc. + \\ t.eq(res['numCycFreed'], 0) + \\ t.eq(res['numObjFreed'], 0) + \\foo() + \\var res = performGC() + \\t.eq(res['numCycFreed'], 2) + \\t.eq(res['numObjFreed'], 2) + ); + + // Reference cycle but still reachable from a root value. + _ = try evalPass(.{ .cleanupGC = true }, + \\import t 'test' + \\var g: none + \\var a = [] + \\var b = [] + \\a.append(b) + \\b.append(a) + \\g = a + \\var res = performGC() + \\t.eq(res['numCycFreed'], 0) + \\t.eq(res['numObjFreed'], 0) + ); + + // Reference cycle with child non cyclable. + _ = try evalPass(.{}, + \\import t 'test' + \\func foo(): + \\ var a = [] + \\ var b = [] + \\ a.append(b) + \\ b.append(a) + \\ a.append(pointer(1)) + \\foo() + \\var res = performGC() + \\t.eq(res['numCycFreed'], 2) + \\t.eq(res['numObjFreed'], 3) + ); + + // Reference cycle with non pool objects. + _ = try evalPass(.{}, + \\import t 'test' + \\type T object: + \\ a any + \\ b any + \\ c any + \\ d any + \\ e any + \\func foo(): + \\ var a = T{} + \\ var b = T{} + \\ a.c = b + \\ b.c = a + \\foo() + \\var res = performGC() + \\t.eq(res['numCycFreed'], 2) + \\t.eq(res['numObjFreed'], 2) + ); +} diff --git a/test/setup.zig b/test/setup.zig index fc1e5a365..d9a7ad4e2 100644 --- a/test/setup.zig +++ b/test/setup.zig @@ -18,6 +18,9 @@ pub const Config = struct { checkGlobalRc: bool = true, + // Whether to performGC at end of eval. + cleanupGC: bool = false, + preEval: ?*const fn (run: *VMrunner) void = null, debug: bool = false, @@ -289,6 +292,10 @@ pub fn eval(config: Config, src: []const u8, optCb: ?*const fn (*VMrunner, EvalR run.vm.internal().compiler.deinitRtObjects(); run.vm.internal().deinitRtObjects(); + if (config.cleanupGC) { + _ = try cy.arc.performGC(run.vm.internal()); + } + if (config.checkGlobalRc) { try cy.arc.checkGlobalRC(run.vm.internal()); } diff --git a/test/trace_test.zig b/test/trace_test.zig index 94fbb638f..cf557d8bd 100644 --- a/test/trace_test.zig +++ b/test/trace_test.zig @@ -94,21 +94,6 @@ test "ARC." { try t.eq(trace.numRetains, 2); try t.eq(trace.numReleases, 2); - // vm.checkMemory is able to detect retain cycle. - val = try run.eval( - \\var a = [] - \\var b = [] - \\a.append(b) - \\b.append(a) - ); - trace = run.getTrace(); - try t.eq(trace.numRetains, 6); - try t.eq(trace.numReleases, 4); - try t.eq(trace.numForceReleases, 0); - try t.eq(try run.checkMemory(), false); - try t.eq(trace.numRetainCycles, 1); - try t.eq(trace.numRetainCycleRoots, 2); - try t.eq(trace.numForceReleases, 2); } test "ARC for static variable declarations." { @@ -337,10 +322,6 @@ const VMrunner = struct { self.vm.release(val); } - fn checkMemory(self: *VMrunner) !bool { - return self.vm.checkMemory(); - } - fn compile(self: *VMrunner, src: []const u8) !cy.ByteCodeBuffer { return self.vm.compile(src); }