diff --git a/CHANGELOG.md b/CHANGELOG.md index a8336f4..6c357cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [0.0.1-alpha.112](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.111...v0.0.1-alpha.112) (2024-10-03) + + +### Bug Fixes + +* double spend error ([3bed620](https://github.com/DIG-Network/dig-chia-sdk/commit/3bed620363e86979cfb2c3035cc7b4501a115b17)) + ### [0.0.1-alpha.111](https://github.com/DIG-Network/dig-chia-sdk/compare/v0.0.1-alpha.110...v0.0.1-alpha.111) (2024-10-02) diff --git a/package-lock.json b/package-lock.json index 81d201c..211bc63 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "@dignetwork/dig-sdk", - "version": "0.0.1-alpha.111", + "version": "0.0.1-alpha.112", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@dignetwork/dig-sdk", - "version": "0.0.1-alpha.111", + "version": "0.0.1-alpha.112", "license": "ISC", "dependencies": { - "@dignetwork/datalayer-driver": "^0.1.28", + "@dignetwork/datalayer-driver": "^0.1.29", "archiver": "^7.0.1", "axios": "^1.7.7", "bip39": "^3.1.0", @@ -58,12 +58,12 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.25.7.tgz", + "integrity": "sha512-0xZJFNE5XMpENsgfHYTw8FbX4kv53mFLn2i3XPoq69LyhYSCBJtitaHx9QnsVTrsogI4Z3+HtEfZ2/GFPOtf5g==", "dev": true, "dependencies": { - "@babel/highlight": "^7.24.7", + "@babel/highlight": "^7.25.7", "picocolors": "^1.0.0" }, "engines": { @@ -71,21 +71,21 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", - "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.7.tgz", + "integrity": "sha512-AM6TzwYqGChO45oiuPqwL2t20/HdMC1rTPAesnBCgPCSF1x3oN9MVUwQV2iyz4xqWrctwK5RNC8LV22kaQCNYg==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", - "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.25.7.tgz", + "integrity": "sha512-iYyACpW3iW8Fw+ZybQK+drQre+ns/tKpXbNESfrhNnPLIklLbXr7MYJ6gPEd0iETGLOK+SxMjVvKb/ffmk+FEw==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.24.7", + "@babel/helper-validator-identifier": "^7.25.7", "chalk": "^2.4.2", "js-tokens": "^4.0.0", "picocolors": "^1.0.0" @@ -178,24 +178,24 @@ } }, "node_modules/@dignetwork/datalayer-driver": { - "version": "0.1.28", - "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver/-/datalayer-driver-0.1.28.tgz", - "integrity": "sha512-wAtrh0kcVJXL6ngYdr622E2btWN+Ap9piwwvKkhLQ7DxRll7ssppVprfl79SXBoQiPrEJ4NEaC8N6TvYGiSNRg==", + "version": "0.1.29", + "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver/-/datalayer-driver-0.1.29.tgz", + "integrity": "sha512-yXkEBns2jRqHkCj4oqgNL5y5lwLdfLz8yu0fqAr4m1lpCiwbUmAwndmQa2xWx56y4L2n+ixQFTJ+DP1i4TxtsA==", "engines": { "node": ">= 10" }, "optionalDependencies": { - "@dignetwork/datalayer-driver-darwin-arm64": "0.1.28", - "@dignetwork/datalayer-driver-darwin-x64": "0.1.28", - "@dignetwork/datalayer-driver-linux-arm64-gnu": "0.1.28", - "@dignetwork/datalayer-driver-linux-x64-gnu": "0.1.28", - "@dignetwork/datalayer-driver-win32-x64-msvc": "0.1.28" + "@dignetwork/datalayer-driver-darwin-arm64": "0.1.29", + "@dignetwork/datalayer-driver-darwin-x64": "0.1.29", + "@dignetwork/datalayer-driver-linux-arm64-gnu": "0.1.29", + "@dignetwork/datalayer-driver-linux-x64-gnu": "0.1.29", + "@dignetwork/datalayer-driver-win32-x64-msvc": "0.1.29" } }, "node_modules/@dignetwork/datalayer-driver-darwin-arm64": { - "version": "0.1.28", - "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-darwin-arm64/-/datalayer-driver-darwin-arm64-0.1.28.tgz", - "integrity": "sha512-ky11BOCnIBLXnZOYn+OkOydGRbbMErhPF2StqJz+3zfN+gO+zyfU3B9fUdCrPaaF6blWmRrgrevmyORZhyXHyQ==", + "version": "0.1.29", + "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-darwin-arm64/-/datalayer-driver-darwin-arm64-0.1.29.tgz", + "integrity": "sha512-Ev1pEAkrgJyuZv4MqG8hB2kHdFGReW6rkJJfqDK3n61baf8Rx48H9E+JqpBZk31Jp+ZbxIrGzU0sa8VyndMfWA==", "cpu": [ "arm64" ], @@ -208,9 +208,9 @@ } }, "node_modules/@dignetwork/datalayer-driver-darwin-x64": { - "version": "0.1.28", - "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-darwin-x64/-/datalayer-driver-darwin-x64-0.1.28.tgz", - "integrity": "sha512-DCBiT2R4Aj06Bz8QGRE24Cw8+4w6PlMCSlfvXqZS/TffVFoBUfiVxv0c638Qz23DU+eocFeGLDjkUmv4Jg26FA==", + "version": "0.1.29", + "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-darwin-x64/-/datalayer-driver-darwin-x64-0.1.29.tgz", + "integrity": "sha512-XF4KxRPLj3rGABDafzol5kbfGftwSIFLDBmHW3BK4Nkhxr4FzwJ2JiTBwhR7vrDHH9B/g4EmE6ldTm1oq8Wslg==", "cpu": [ "x64" ], @@ -223,9 +223,9 @@ } }, "node_modules/@dignetwork/datalayer-driver-linux-arm64-gnu": { - "version": "0.1.28", - "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-linux-arm64-gnu/-/datalayer-driver-linux-arm64-gnu-0.1.28.tgz", - "integrity": "sha512-dFwXN9SCNfC37h+jzRLR/g1nU2nmsslkw5HSZzTLf/WorV2agqlExXE42a+lrGN8jIdOiemVIjmB5pH5iudyNw==", + "version": "0.1.29", + "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-linux-arm64-gnu/-/datalayer-driver-linux-arm64-gnu-0.1.29.tgz", + "integrity": "sha512-jrxK7nj+Zx7iZY1+M4Q30p4pWVFN9kKk+Jk7ka0+Ts3Eo4tIgz/lV88QTkArwPfZCRAxixpeNV687CdYcpd0UA==", "cpu": [ "arm64" ], @@ -238,9 +238,9 @@ } }, "node_modules/@dignetwork/datalayer-driver-linux-x64-gnu": { - "version": "0.1.28", - "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-linux-x64-gnu/-/datalayer-driver-linux-x64-gnu-0.1.28.tgz", - "integrity": "sha512-o5bor9R3owWwmiuGXrtbtY6Oeqd+LiNyfKPc6sWD6pVbwvYWZHrQOSIUIacDvw7IDSHqth9bUwavTmzvc7hI7g==", + "version": "0.1.29", + "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-linux-x64-gnu/-/datalayer-driver-linux-x64-gnu-0.1.29.tgz", + "integrity": "sha512-YuvEExvIEaBSmfxuJcrvFEg4GtNRJhfLnBZOMQIud4Q9hwtQMzXTN8kz7NZSE46eJ0tGuOPfV/1nZuXUxl04XQ==", "cpu": [ "x64" ], @@ -253,9 +253,9 @@ } }, "node_modules/@dignetwork/datalayer-driver-win32-x64-msvc": { - "version": "0.1.28", - "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-win32-x64-msvc/-/datalayer-driver-win32-x64-msvc-0.1.28.tgz", - "integrity": "sha512-+YhvKQcIFgRCt1Ovuxpwae+sbhLNNr1oHNQrsp4dqTsmNt59FYkuRHFPwrIfLfSO2J8Pz2nSAa2vLqsWH3j5rA==", + "version": "0.1.29", + "resolved": "https://registry.npmjs.org/@dignetwork/datalayer-driver-win32-x64-msvc/-/datalayer-driver-win32-x64-msvc-0.1.29.tgz", + "integrity": "sha512-+YbvJCrPQwG8lWaE7ftAEUpxs48LI3bptODCG/9gQ7j/N0V8HyyavNvDDdSGV+uPrQKeM85ZQBPCyo+Bml8llw==", "cpu": [ "x64" ], @@ -805,9 +805,9 @@ } }, "node_modules/@types/lodash": { - "version": "4.17.9", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.9.tgz", - "integrity": "sha512-w9iWudx1XWOHW5lQRS9iKpK/XuRhnN+0T7HvdCCd802FYkT1AMTnxndJHGrNJwRoRHkslGr4S29tjm1cT7x/7w==", + "version": "4.17.10", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.10.tgz", + "integrity": "sha512-YpS0zzoduEhuOWjAotS6A5AVCva7X4lVlYLF0FYHAY9sdraBfnatttHItlWeZdGhuEkf+OzMNg2ZYAx8t+52uQ==", "dev": true }, "node_modules/@types/methods": { @@ -843,9 +843,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "22.7.3", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.3.tgz", - "integrity": "sha512-qXKfhXXqGTyBskvWEzJZPUxSslAiLaB6JGP1ic/XTH9ctGgzdgYguuLP1C601aRTSDNlLb0jbKqXjZ48GNraSA==", + "version": "22.7.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.4.tgz", + "integrity": "sha512-y+NPi1rFzDs1NdQHHToqeiX2TIS79SWEAw9GYhkkx8bD0ChpfqC+n2j5OXOCpzfojBEBt6DnEnnG9MY0zk1XLg==", "dependencies": { "undici-types": "~6.19.2" } diff --git a/package.json b/package.json index f70ef8d..ffff611 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@dignetwork/dig-sdk", - "version": "0.0.1-alpha.111", + "version": "0.0.1-alpha.112", "description": "", "type": "commonjs", "main": "./dist/index.js", @@ -25,7 +25,7 @@ "LICENSE" ], "dependencies": { - "@dignetwork/datalayer-driver": "^0.1.28", + "@dignetwork/datalayer-driver": "^0.1.29", "archiver": "^7.0.1", "axios": "^1.7.7", "bip39": "^3.1.0", diff --git a/src/DataIntegrityTree/DataIntegrityTree.ts b/src/DataIntegrityTree/DataIntegrityTree.ts index 38343f3..fef1a63 100644 --- a/src/DataIntegrityTree/DataIntegrityTree.ts +++ b/src/DataIntegrityTree/DataIntegrityTree.ts @@ -60,7 +60,7 @@ class DataIntegrityTree { private storeBaseDir: string; private storeDir: string; private dataDir: string; - public files: Map; + public files: Map; private tree: MerkleTree; constructor(storeId: string, options: DataIntegrityTreeOptions = {}) { @@ -208,6 +208,8 @@ class DataIntegrityTree { } const tempFilePath = path.join(tempDir, `${crypto.randomUUID()}.gz`); + let totalBytes = 0; + return new Promise((resolve, reject) => { const tempWriteStream = fs.createWriteStream(tempFilePath); @@ -215,6 +217,11 @@ class DataIntegrityTree { uncompressedHash.update(chunk); }); + // Now listen to the gzip stream for compressed data size + gzip.on("data", (chunk) => { + totalBytes += chunk.length; // This counts compressed bytes + }); + readStream.pipe(gzip).pipe(tempWriteStream); tempWriteStream.on("finish", async () => { @@ -255,6 +262,7 @@ class DataIntegrityTree { this.files.set(key, { hash: combinedHash, sha256: sha256, + bytes: totalBytes.toString(), }); await new Promise((resolve) => setTimeout(resolve, 100)); this._rebuildTree(); diff --git a/src/DigNetwork/DigPeer.ts b/src/DigNetwork/DigPeer.ts index e956e70..d7d1de7 100644 --- a/src/DigNetwork/DigPeer.ts +++ b/src/DigNetwork/DigPeer.ts @@ -17,7 +17,6 @@ import { } from "@dignetwork/datalayer-driver"; import { FullNodePeer } from "../blockchain"; import { Wallet } from "../blockchain"; -import { selectUnspentCoins } from "../blockchain/coins"; export class DigPeer { private ipAddress: string; @@ -94,12 +93,11 @@ export class DigPeer { (acc, output) => acc + output.amount, BigInt(0) ); - const coins = await selectUnspentCoins( + const coins = await wallet.selectUnspentCoins( peer, totalAmount, totalFee, - [], - walletName + [] ); const coinSpends = await sendXch( diff --git a/src/blockchain/DataStore.ts b/src/blockchain/DataStore.ts index 61f76d4..9788cb5 100644 --- a/src/blockchain/DataStore.ts +++ b/src/blockchain/DataStore.ts @@ -22,8 +22,9 @@ import { MIN_HEIGHT_HEADER_HASH, getActiveStoreId, STORE_PATH, + USER_DIR_PATH, } from "../utils/config"; -import { selectUnspentCoins, calculateFeeForCoinSpends } from "./coins"; +import { calculateFeeForCoinSpends } from "./coins"; import { RootHistoryItem, DatFile } from "../types"; import { green, red } from "colorette"; import { getFilePathFromSha256 } from "../utils/hashUtils"; @@ -38,6 +39,7 @@ import { DataStoreSerializer } from "./DataStoreSerializer"; import NodeCache from "node-cache"; import { MAIN_NET_GENISES_CHALLENGE } from "../utils/config"; import { StoreInfoCacheUpdater } from "./StoreInfoCacheUpdater"; +import { Environment } from "../utils"; // Initialize the cache with a TTL of 180 seconds (3 minutes) const rootHistoryCache = new NodeCache({ stdTTL: 180 }); @@ -173,7 +175,7 @@ export class DataStore { const publicSyntheticKey = await wallet.getPublicSyntheticKey(); const ownerSyntheicPuzzleHash = syntheticKeyToPuzzleHash(publicSyntheticKey); - const storeCreationCoins = await selectUnspentCoins( + const storeCreationCoins = await wallet.selectUnspentCoins( peer, BigInt(1), BigInt(0) @@ -312,39 +314,42 @@ export class DataStore { latestHash: Buffer; }> { try { - // Kick off the updater instance in the backgounf if not already running - StoreInfoCacheUpdater.initInstance(); - // Initialize the cache for the current storeId's coin info const storeCoinCache = new FileCache<{ latestStore: ReturnType; latestHeight: number; latestHash: string; - }>(`stores`); - - // Try to get cached store info - const cachedInfo = storeCoinCache.get(this.storeId); - - if (cachedInfo) { - // If we have cached info, return it directly - const { latestStore, latestHeight } = - DataStoreSerializer.deserialize({ - latestStore: cachedInfo.latestStore, - latestHeight: cachedInfo.latestHeight.toString(), - latestHash: cachedInfo.latestHash, - }); - - return { - latestStore, - latestHeight: cachedInfo.latestHeight, - latestHash: Buffer.from(cachedInfo.latestHash, "hex"), - }; + }>(`stores`, USER_DIR_PATH); + + if (!Environment.CLI_MODE) { + // Kick off the updater instance in the backgounf if not already running + StoreInfoCacheUpdater.initInstance(); + + // Try to get cached store info + const cachedInfo = storeCoinCache.get(this.storeId); + + if (cachedInfo) { + // If we have cached info, return it directly + const { latestStore } = DataStoreSerializer.deserialize( + { + latestStore: cachedInfo.latestStore, + latestHeight: cachedInfo.latestHeight.toString(), + latestHash: cachedInfo.latestHash, + } + ); + + return { + latestStore, + latestHeight: cachedInfo.latestHeight, + latestHash: Buffer.from(cachedInfo.latestHash, "hex"), + }; + } } - + // If no cached info, proceed to fetch and cache it // Use getCreationHeight to retrieve height and hash information const { createdAtHeight, createdAtHash } = await this.getCreationHeight(); - + // Sync store from peer const peer = await FullNodePeer.connect(); const { latestStore, latestHeight } = await peer.syncStoreFromLauncherId( @@ -353,29 +358,29 @@ export class DataStore { createdAtHash, false ); - + const latestHash = await peer.getHeaderHash(latestHeight); - + // Serialize the latest store info for caching const serializedLatestStore = new DataStoreSerializer( latestStore, latestHeight, latestHash ).serialize(); - + // Cache the latest store info storeCoinCache.set(this.storeId, { latestStore: serializedLatestStore, latestHeight, latestHash: latestHash.toString("hex"), }); - + return { latestStore, latestHeight, latestHash }; } catch (error) { console.trace("Failed to fetch coin info", error); throw error; } - } + } public async cacheStoreCreationHeight(): Promise<{ createdAtHeight: number; @@ -536,6 +541,7 @@ export class DataStore { const publicSyntheticKey = await wallet.getPublicSyntheticKey(); const { latestStore } = await this.fetchCoinInfo(); + const updateStoreResponse = updateStoreMetadata( latestStore, metadata.rootHash, @@ -548,7 +554,7 @@ export class DataStore { ); const fee = await calculateFeeForCoinSpends(peer, null); - const unspentCoins = await selectUnspentCoins(peer, BigInt(0), fee); + const unspentCoins = await wallet.selectUnspentCoins(peer, BigInt(0), fee); const feeCoinSpends = await addFee( publicSyntheticKey, unspentCoins, diff --git a/src/blockchain/ServerCoin.ts b/src/blockchain/ServerCoin.ts index 04f8495..ac3a83b 100644 --- a/src/blockchain/ServerCoin.ts +++ b/src/blockchain/ServerCoin.ts @@ -9,7 +9,6 @@ import { Coin, } from "@dignetwork/datalayer-driver"; import { FullNodePeer } from "./FullNodePeer"; -import { selectUnspentCoins } from "./coins"; import { Wallet } from "./Wallet"; import { NconfManager } from "../utils/NconfManager"; import { CoinData, ServerCoinData } from "../types"; @@ -39,7 +38,7 @@ export class ServerCoin { const peer = await FullNodePeer.connect(); const wallet = await Wallet.load("default"); const publicSyntheticKey = await wallet.getPublicSyntheticKey(); - const serverCoinCreationCoins = await selectUnspentCoins( + const serverCoinCreationCoins = await wallet.selectUnspentCoins( peer, BigInt(serverCoinCollateral), BigInt(1000000) @@ -135,7 +134,7 @@ export class ServerCoin { ); } - const feeCoins = await selectUnspentCoins(peer, BigInt(0), BigInt(1000000)); + const feeCoins = await wallet.selectUnspentCoins(peer, BigInt(0), BigInt(1000000)); const coin = { amount: BigInt(serverCoin.coin.amount), diff --git a/src/blockchain/StoreInfoCacheUpdater.ts b/src/blockchain/StoreInfoCacheUpdater.ts index 91582ef..f763b06 100644 --- a/src/blockchain/StoreInfoCacheUpdater.ts +++ b/src/blockchain/StoreInfoCacheUpdater.ts @@ -1,10 +1,14 @@ import { FullNodePeer } from "./FullNodePeer"; -import { FileCache } from "../utils"; +import { FileCache, USER_DIR_PATH, DIG_FOLDER_PATH } from "../utils"; import { DataStoreSerializer } from "./DataStoreSerializer"; import { withTimeout } from "../utils"; -import * as lockfile from 'proper-lockfile'; -import * as path from 'path'; -import { DIG_FOLDER_PATH } from "../utils"; +import * as lockfile from "proper-lockfile"; +import * as path from "path"; +import { + getCoinId, + Peer, + getMainnetGenesisChallenge, +} from "@dignetwork/datalayer-driver"; export class StoreInfoCacheUpdater { private static instance: StoreInfoCacheUpdater; @@ -13,165 +17,203 @@ export class StoreInfoCacheUpdater { latestHeight: number; latestHash: string; }>; - private updateInterval: number; - private lockFilePath: string; // Lock file path in DIG_FOLDER_PATH - private releaseLock: (() => Promise) | null = null; // Holds the release function for cleanup + private monitors: Map> = new Map(); + private lockFilePath: string; + private releaseLock: (() => Promise) | null = null; + private isMonitoring: boolean = true; - private constructor(updateIntervalInMinutes: number = 5) { - this.storeCoinCache = new FileCache(`stores`); - this.updateInterval = updateIntervalInMinutes * 60 * 1000; // Convert minutes to milliseconds + private constructor() { + this.storeCoinCache = new FileCache(`stores`, USER_DIR_PATH); // Construct lock file path using the path module - this.lockFilePath = path.join(DIG_FOLDER_PATH, 'store-info-cache.lock'); + this.lockFilePath = path.join(DIG_FOLDER_PATH, "store-info-cache.lock"); - // Start the cache updater using setTimeout - this.scheduleNextUpdate(); - - // Set up process exit handlers for cleanup - this.setupExitHandlers(); + // Start monitors for existing storeIds + this.startMonitors(); } public static initInstance(): StoreInfoCacheUpdater { if (!StoreInfoCacheUpdater.instance) { + console.log("Initializing DataStore Monitor"); StoreInfoCacheUpdater.instance = new StoreInfoCacheUpdater(); } return StoreInfoCacheUpdater.instance; } - private scheduleNextUpdate() { - setTimeout(() => this.checkAndUpdateCache(), this.updateInterval); - } - - private async checkAndUpdateCache() { + private async startMonitors() { try { + // Check if the lockfile is already held const isLocked = await lockfile.check(this.lockFilePath, { - stale: this.updateInterval, + realpath: false, }); - - if (!isLocked) { - await this.updateCache(); + if (isLocked) { + // Another process is already running the monitors; skip starting monitors + console.log( + "Another process is already running the StoreInfoCacheUpdater." + ); + return; } - // Else, lock is held; skip update without logging - } catch (error) { - console.error("Error checking lockfile:", error); - } finally { - // Schedule the next update regardless of whether we updated or not - this.scheduleNextUpdate(); - } - } - private async updateCache() { - try { // Attempt to acquire the lock - const release = await lockfile.lock(this.lockFilePath, { + this.releaseLock = await lockfile.lock(this.lockFilePath, { retries: { - retries: 0, // No retries since we already checked the lock + retries: 0, // No retries since we only need one lock }, - stale: this.updateInterval, // Lock expires after the update interval + stale: 60000, // Lock expires after 1 minute (adjust as needed) + realpath: false, // Ensure lockfile uses the exact path }); - // Store the release function for cleanup during process exit - this.releaseLock = release; + const storeIds = this.storeCoinCache.getCachedKeys(); - try { - const storeIds = this.storeCoinCache.getCachedKeys(); - - for (const storeId of storeIds) { - try { - const cachedInfo = this.storeCoinCache.get(storeId); - - if (!cachedInfo) { - continue; - } - - // Deserialize the cached store info - const { latestStore: serializedStore, latestHeight, latestHash } = - cachedInfo; - - const { latestStore: previousInfo } = DataStoreSerializer.deserialize({ - latestStore: serializedStore, - latestHeight: latestHeight.toString(), - latestHash: latestHash, - }); - - // Wrap the connection with a timeout - const peer = await withTimeout( - FullNodePeer.connect(), - 60000, - "Timeout connecting to FullNodePeer" - ); - - // Wrap the syncStore call with a timeout - const { latestStore, latestHeight: newHeight } = await withTimeout( - peer.syncStore( - previousInfo, - latestHeight, - Buffer.from(latestHash, "hex"), - false - ), - 60000, - `Timeout syncing store for storeId ${storeId}` - ); - - // Wrap the getHeaderHash call with a timeout - const latestHashBuffer = await withTimeout( - peer.getHeaderHash(newHeight), - 60000, - `Timeout getting header hash for height ${newHeight}` - ); - - // Serialize the updated store data for caching - const serializedLatestStore = new DataStoreSerializer( - latestStore, - newHeight, - latestHashBuffer - ).serialize(); - - // Recache the updated store info - this.storeCoinCache.set(storeId, { - latestStore: serializedLatestStore, - latestHeight: newHeight, - latestHash: latestHashBuffer.toString("hex"), - }); - } catch (error) { - console.error(`Failed to update cache for storeId ${storeId}:`, error); - // Continue with the next storeId - } + for (const storeId of storeIds) { + // Check if a monitor is already running for this storeId + if (!this.monitors.has(storeId)) { + // Start monitoring in the background + const monitorPromise = this.monitorStore(storeId); + this.monitors.set(storeId, monitorPromise); } - } finally { - // Always release the lock after finishing the update - await this.releaseLock?.(); - this.releaseLock = null; } + + // Wait for all monitors to settle + const monitorPromises = Array.from(this.monitors.values()); + + await Promise.all(monitorPromises); } catch (error: any) { - if (error.code === 'ELOCKED') { - // Another process acquired the lock; skip without logging - } else { - + console.error("Monitor system encountered an error:", error); + } finally { + // Release the lock + if (this.releaseLock) { + try { + await this.releaseLock(); + console.log("Lock released successfully."); + } catch (releaseError) { + console.error("Error releasing the lock:", releaseError); + } } } } - private setupExitHandlers() { - const cleanup = async () => { - if (this.releaseLock) { + // Monitor a single store's coin + private async monitorStore(storeId: string): Promise { + while (this.isMonitoring) { + let peer: Peer | null = null; + try { + // Connect to a peer + peer = await withTimeout( + FullNodePeer.connect(), + 60000, + "Timeout connecting to FullNodePeer" + ); + + // Get the latest store info (from cache if available) + const cachedInfo = this.storeCoinCache.get(storeId); + if (!cachedInfo) { + // If no cached info, skip and wait before retrying + console.error(`No cached info for storeId ${storeId}`); + await new Promise((resolve) => setTimeout(resolve, 5000)); + continue; + } + + const { + latestStore: serializedStore, + latestHeight, + latestHash, + } = cachedInfo; + + const { latestStore } = DataStoreSerializer.deserialize({ + latestStore: serializedStore, + latestHeight: latestHeight.toString(), + latestHash: latestHash, + }); + + // Get the coinId associated with the store + const coinId = getCoinId(latestStore.coin); + + // Wait for the coin to be spent + await peer.waitForCoinToBeSpent( + coinId, + latestHeight, + Buffer.from(latestHash, "hex") + ); + + let updatedStore, newHeight; + try { - await this.releaseLock(); - console.log("Lock released successfully on process exit."); - } catch (error) { - console.error("Failed to release lock on exit:", error); + // When resolved, sync the store + //const { latestStore: updatedStore, latestHeight: newHeight } = await withTimeout( + const storeInfo = await withTimeout( + peer.syncStore( + latestStore, + latestHeight, + Buffer.from(latestHash, "hex"), + false // withHistory + ), + 60000, + `Timeout syncing store for storeId ${storeId}` + ); + + updatedStore = storeInfo.latestStore; + newHeight = storeInfo.latestHeight; + } catch { + const genesisChallenge = await getMainnetGenesisChallenge(); + const storeInfo = await withTimeout( + peer.syncStore(latestStore, null, genesisChallenge, false), + 60000, + `Timeout syncing store for storeId ${storeId}` + ); + + updatedStore = storeInfo.latestStore; + newHeight = storeInfo.latestHeight; + } + + // Get the latest header hash + const latestHashBuffer = await withTimeout( + peer.getHeaderHash(newHeight), + 60000, + `Timeout getting header hash for height ${newHeight}` + ); + + // Serialize the updated store data for caching + const serializedLatestStore = new DataStoreSerializer( + updatedStore, + newHeight, + latestHashBuffer + ).serialize(); + + // Update the cache + this.storeCoinCache.set(storeId, { + latestStore: serializedLatestStore, + latestHeight: newHeight, + latestHash: latestHashBuffer.toString("hex"), + }); + + peer = null; + + // Continue monitoring + } catch (error) { + console.error(`Error monitoring store ${storeId}:`, error); + + // Close the peer connection if it's open + if (peer) { + peer = null; + } + + // Determine if the error is unrecoverable + if (this.isUnrecoverableError(error)) { + this.isMonitoring = false; // Signal other monitors to stop + throw error; // Propagate error up to stop monitoring } + + // Wait before retrying + await new Promise((resolve) => setTimeout(resolve, 5000)); } - }; - - // Listen for process exit events and call cleanup - process.on('SIGINT', cleanup); // Catch CTRL+C - process.on('SIGTERM', cleanup); // Catch termination signals - process.on('exit', cleanup); // On normal exit - process.on('uncaughtException', async (error) => { - console.error("Uncaught exception, cleaning up:", error); - await cleanup(); - process.exit(1); // Ensure process exits after handling exception - }); + } + } + + private isUnrecoverableError(error: any): boolean { + // Determine whether the error is unrecoverable + // For this example, we'll treat any unexpected error as unrecoverable + // You can customize this logic based on your application's needs + return true; } } diff --git a/src/blockchain/Wallet.ts b/src/blockchain/Wallet.ts index 1c3357e..3e573bc 100644 --- a/src/blockchain/Wallet.ts +++ b/src/blockchain/Wallet.ts @@ -227,7 +227,8 @@ export class Wallet { omitCoins: Coin[] = [] ): Promise { const cache = new FileCache<{ coinId: string; expiry: number }>( - path.join(USER_DIR_PATH, "reserved_coins") + "reserved_coins", + USER_DIR_PATH ); const ownerPuzzleHash = await this.getOwnerPuzzleHash(); @@ -264,6 +265,7 @@ export class Wallet { ); const selectedCoins = selectCoins(unspentCoins, feeBigInt + coinAmount); + return selectedCoins; }; diff --git a/src/blockchain/coins.ts b/src/blockchain/coins.ts index b2480aa..0244ffa 100644 --- a/src/blockchain/coins.ts +++ b/src/blockchain/coins.ts @@ -55,83 +55,6 @@ export const calculateFeeForCoinSpends = async ( */ }; -export const selectUnspentCoins = async ( - peer: Peer, - coinAmount: bigint, - feeBigInt: bigint, - omitCoins: Coin[] = [], - walletName: string = "default" -): Promise => { - // Initialize the cache for reserved coins - const cache = new FileCache<{ coinId: string; expiry: number }>("reserved_coins"); - - // Get all cached reserved coins - const cachedReservedCoins = cache.getCachedKeys(); - - // Filter expired reserved coins and omit valid reservations - const now = Date.now(); - const omitCoinIds = omitCoins.map((coin) => getCoinId(coin).toString("hex")); - - const validReservedCoins = cachedReservedCoins.map((coinId) => { - const reservation = cache.get(coinId); - if (reservation && reservation.expiry > now) { - // Valid reservation, add to omit list - omitCoinIds.push(coinId); - return true; - } else { - // Reservation expired, remove it - cache.delete(coinId); - return false; - } - }); - - // Fetch all unspent coins from the peer - const wallet = await Wallet.load(walletName); - const ownerPuzzleHash = await wallet.getOwnerPuzzleHash(); - - const coinsResp = await peer.getAllUnspentCoins( - ownerPuzzleHash, - MIN_HEIGHT, - Buffer.from(MIN_HEIGHT_HEADER_HASH, "hex") - ); - - if (Environment.DEBUG) { - console.log("Unspent Coins:", coinsResp); // Debugging - } - - const unspentCoins = coinsResp.coins; - - // Filter out the omitted coins - const filteredUnspentCoins = unspentCoins.filter( - (coin) => !omitCoinIds.includes(getCoinId(coin).toString("hex")) - ); - - if (Environment.DEBUG) { - console.log("Unspent Coins after filtering:", filteredUnspentCoins); // Debugging - } - - // Select coins needed for the transaction - const selectedCoins = selectCoins(filteredUnspentCoins, feeBigInt + coinAmount); - - if (Environment.DEBUG) { - console.log("Selected Coins:", selectedCoins); // Debugging - } - - // If no coins are selected, throw an error - if (selectedCoins.length === 0) { - throw new Error("No unspent coins available."); - } - - // Cache the selected coins as reserved for the next 5 minutes - selectedCoins.forEach((coin) => { - const coinId = getCoinId(coin).toString("hex"); - cache.set(coinId, { coinId, expiry: Date.now() + CACHE_DURATION }); - }); - - return selectedCoins; -}; - - export const isCoinSpendable = async ( peer: Peer, coinId: Buffer diff --git a/src/utils/Environment.ts b/src/utils/Environment.ts index 728caa7..730dd11 100644 --- a/src/utils/Environment.ts +++ b/src/utils/Environment.ts @@ -2,6 +2,8 @@ import fs from "fs"; import path from "path"; export class Environment { + private static cliMode: boolean = false; + // Helper to validate if a string is a valid IP address (IPv4) private static isValidIp(ip: string): boolean { const ipPattern = @@ -92,4 +94,12 @@ export class Environment { const value = process.env["REMOTE_NODE"]; return value === "1" ? true : value === "0" ? false : undefined; } + + static set CLI_MODE(mode: boolean) { + this.cliMode = mode; + } + + static get CLI_MODE(): boolean | undefined { + return Environment.cliMode; + } } diff --git a/src/utils/FileCache.ts b/src/utils/FileCache.ts index a55017c..1f5b557 100644 --- a/src/utils/FileCache.ts +++ b/src/utils/FileCache.ts @@ -5,8 +5,8 @@ import { DIG_FOLDER_PATH } from "./config"; export class FileCache { private cacheDir: string; - constructor(relativeFilePath: string) { - this.cacheDir = path.join(DIG_FOLDER_PATH, relativeFilePath); + constructor(relativeFilePath: string, baseDir: string = DIG_FOLDER_PATH) { + this.cacheDir = path.join(baseDir, relativeFilePath); this.ensureDirectoryExists(); }