From 69b70c8f6d18457cf326ffabb100556d71b60791 Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Fri, 18 Oct 2024 16:43:42 +1100 Subject: [PATCH 01/14] wip: applying cancellability to handlers --- src/git/http.ts | 18 +++++++++++------- src/git/utils.ts | 10 +++++++--- src/nodes/agent/handlers/VaultsGitPackGet.ts | 2 ++ src/nodes/agent/handlers/VaultsScan.ts | 4 ++++ src/vaults/VaultManager.ts | 14 +++++++++----- 5 files changed, 33 insertions(+), 15 deletions(-) diff --git a/src/git/http.ts b/src/git/http.ts index bd81dac36..87d45cf6e 100644 --- a/src/git/http.ts +++ b/src/git/http.ts @@ -10,6 +10,7 @@ import { Buffer } from 'buffer'; import git from 'isomorphic-git'; import * as gitUtils from './utils'; import * as utils from '../utils'; +import {ContextCancellable} from "@matrixai/contexts"; /** * Reference discovery @@ -118,7 +119,7 @@ async function* advertiseRefGenerator({ efs: EncryptedFS; dir: string; gitDir: string; -}): AsyncGenerator { +}, ctx: ContextCancellable): AsyncGenerator { // Providing side-band-64, symref for the HEAD and agent name capabilities const capabilityList = [ gitUtils.SIDE_BAND_64_CAPABILITY, @@ -134,14 +135,14 @@ async function* advertiseRefGenerator({ efs, dir, gitDir, - }); + }, ctx ); // PKT-LINE("# service=$servicename" LF) yield packetLineBuffer(gitUtils.REFERENCE_DISCOVERY_HEADER); // "0000" yield gitUtils.FLUSH_PACKET_BUFFER; // Ref_list - yield* referenceListGenerator(objectGenerator, capabilityList); + yield* referenceListGenerator(objectGenerator, capabilityList, ctx); // "0000" yield gitUtils.FLUSH_PACKET_BUFFER; } @@ -165,6 +166,7 @@ async function* advertiseRefGenerator({ async function* referenceListGenerator( objectGenerator: AsyncGenerator<[Reference, ObjectId], void, void>, capabilities: CapabilityList, + ctx: ContextCancellable, ): AsyncGenerator { // Cap-list = capability *(SP capability) const capabilitiesListBuffer = Buffer.from( @@ -175,6 +177,7 @@ async function* referenceListGenerator( // *ref_record let first = true; for await (const [name, objectId] of objectGenerator) { + ctx.signal.throwIfAborted(); if (first) { // PKT-LINE(obj-id SP name NUL cap_list LF) yield packetLineBuffer( @@ -351,7 +354,7 @@ async function* generatePackRequest({ dir: string; gitDir: string; body: Array; -}): AsyncGenerator { +}, ctx: ContextCancellable): AsyncGenerator { const [wants, haves, _capabilities] = await parsePackRequest(body); const objectIds = await gitUtils.listObjects({ efs: efs, @@ -359,7 +362,7 @@ async function* generatePackRequest({ gitDir: gitDir, wants, haves, - }); + }, ctx); // Reply that we have no common history and that we need to send everything yield packetLineBuffer(gitUtils.NAK_BUFFER); // Send everything over in pack format @@ -368,7 +371,7 @@ async function* generatePackRequest({ dir, gitDir, objectIds, - }); + }, ctx); // Send dummy progress data yield packetLineBuffer( gitUtils.DUMMY_PROGRESS_BUFFER, @@ -396,7 +399,7 @@ async function* generatePackData({ gitDir: string; objectIds: Array; chunkSize?: number; -}): AsyncGenerator { +}, ctx: ContextCancellable): AsyncGenerator { let packFile: PackObjectsResult; // In case of errors we don't want to throw them. This will result in the error being thrown into `isometric-git` // when it consumes the response. It handles this by logging out the error which we don't want to happen. @@ -423,6 +426,7 @@ async function* generatePackData({ // Streaming the packFile as chunks of the length specified by the `chunkSize`. // Each line is formatted as a `PKT-LINE` do { + ctx.signal.throwIfAborted(); const subBuffer = packFileBuffer.subarray(0, chunkSize); packFileBuffer = packFileBuffer.subarray(chunkSize); yield packetLineBuffer(subBuffer, gitUtils.CHANNEL_DATA); diff --git a/src/git/utils.ts b/src/git/utils.ts index dd5f914d6..db81065d7 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -13,6 +13,7 @@ import git from 'isomorphic-git'; import { requestTypes } from './types'; import * as utils from '../utils'; import * as validationErrors from '../validation/errors'; +import {ContextCancellable} from "@matrixai/contexts"; // Constants // Total number of bytes per pack line minus the 4 size bytes and 1 channel byte @@ -75,7 +76,7 @@ async function* listReferencesGenerator({ efs: EncryptedFS; dir: string; gitDir: string; -}): AsyncGenerator<[Reference, ObjectId], void, void> { +}, ctx: ContextCancellable): AsyncGenerator<[Reference, ObjectId], void, void> { const refs: Array<[string, Promise]> = await git .listBranches({ fs: efs, @@ -84,6 +85,7 @@ async function* listReferencesGenerator({ }) .then((refs) => { return refs.map((ref) => { + ctx.signal.throwIfAborted(); return [ `${REFERENCES_STRING}${ref}`, git.resolveRef({ fs: efs, dir, gitdir: gitDir, ref: ref }), @@ -99,6 +101,7 @@ async function* listReferencesGenerator({ }); yield [HEAD_REFERENCE, resolvedHead]; for (const [key, refP] of refs) { + ctx.signal.throwIfAborted(); yield [key, await refP]; } } @@ -155,7 +158,7 @@ async function listObjects({ gitDir: string; wants: ObjectIdList; haves: ObjectIdList; -}): Promise { +}, ctx: ContextCancellable): Promise { const commits = new Set(); const trees = new Set(); const blobs = new Set(); @@ -163,6 +166,7 @@ async function listObjects({ const havesSet: Set = new Set(haves); async function walk(objectId: ObjectId, type: ObjectType): Promise { + ctx.signal.throwIfAborted(); // If object was listed as a have then we don't need to walk over it if (havesSet.has(objectId)) return; switch (type) { @@ -245,7 +249,7 @@ async function listObjectsAll({ }: { fs: EncryptedFS; gitDir: string; -}) { +}): Promise> { const objectsDirPath = path.join(gitDir, objectsDirName); const objectSet: Set = new Set(); const objectDirs = await fs.promises.readdir(objectsDirPath); diff --git a/src/nodes/agent/handlers/VaultsGitPackGet.ts b/src/nodes/agent/handlers/VaultsGitPackGet.ts index dcdc6846e..22259244a 100644 --- a/src/nodes/agent/handlers/VaultsGitPackGet.ts +++ b/src/nodes/agent/handlers/VaultsGitPackGet.ts @@ -1,5 +1,6 @@ import type { DB } from '@matrixai/db'; import type { JSONObject, JSONRPCRequest } from '@matrixai/rpc'; +import type {ContextTimed} from '@matrixai/contexts'; import type { VaultName } from '../../../vaults/types'; import type ACL from '../../../acl/ACL'; import type VaultManager from '../../../vaults/VaultManager'; @@ -24,6 +25,7 @@ class VaultsGitPackGet extends RawHandler<{ input: [JSONRPCRequest, ReadableStream], _cancel, meta, + ctx: ContextTimed, ): Promise<[JSONObject, ReadableStream]> => { const { vaultManager, acl, db } = this.container; const [headerMessage, inputStream] = input; diff --git a/src/nodes/agent/handlers/VaultsScan.ts b/src/nodes/agent/handlers/VaultsScan.ts index 9d2922cfe..5a89e9680 100644 --- a/src/nodes/agent/handlers/VaultsScan.ts +++ b/src/nodes/agent/handlers/VaultsScan.ts @@ -1,4 +1,5 @@ import type { DB } from '@matrixai/db'; +import type {ContextTimed} from '@matrixai/contexts'; import type { AgentRPCRequestParams, AgentRPCResponseResult, @@ -25,6 +26,7 @@ class VaultsScan extends ServerHandler< input: AgentRPCRequestParams, _cancel, meta, + ctx: ContextTimed, ): AsyncGenerator> { const { vaultManager, db } = this.container; const requestingNodeId = agentUtils.nodeIdFromMeta(meta); @@ -36,6 +38,7 @@ class VaultsScan extends ServerHandler< > { const listResponse = vaultManager.handleScanVaults( requestingNodeId, + ctx, tran, ); for await (const { @@ -43,6 +46,7 @@ class VaultsScan extends ServerHandler< vaultName, vaultPermissions, } of listResponse) { + ctx.signal.throwIfAborted(); yield { vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId), vaultName, diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 30a7183bd..8048f442c 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -40,6 +40,7 @@ import * as nodesUtils from '../nodes/utils'; import * as keysUtils from '../keys/utils'; import config from '../config'; import { mkdirExists } from '../utils/utils'; +import {ContextCancellable} from "@matrixai/contexts"; /** * Object map pattern for each vault @@ -838,12 +839,13 @@ class VaultManager { public async *handlePackRequest( vaultId: VaultId, body: Array, + ctx: ContextCancellable, tran?: DBTransaction, ): AsyncGenerator { if (tran == null) { // Lambda to maintain `this` context const handlePackRequest = (tran: DBTransaction) => - this.handlePackRequest(vaultId, body, tran); + this.handlePackRequest(vaultId, body, ctx, tran); return yield* this.db.withTransactionG(async function* (tran) { return yield* handlePackRequest(tran); }); @@ -853,8 +855,8 @@ class VaultManager { const efs = this.efs; yield* withG( [ - this.vaultLocks.lock([vaultId.toString(), RWLockWriter, 'read']), - vault.getLock().read(), + this.vaultLocks.lock([vaultId.toString(), RWLockWriter, 'read'], ctx), + vault.getLock().read(ctx), ], async function* (): AsyncGenerator { yield* gitHttp.generatePackRequest({ @@ -862,7 +864,7 @@ class VaultManager { dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), gitDir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), body: body, - }); + }, ctx); }, ); } @@ -900,6 +902,7 @@ class VaultManager { */ public async *handleScanVaults( nodeId: NodeId, + ctx: ContextCancellable, tran?: DBTransaction, ): AsyncGenerator<{ vaultId: VaultId; @@ -909,7 +912,7 @@ class VaultManager { if (tran == null) { // Lambda to maintain `this` context const handleScanVaults = (tran: DBTransaction) => - this.handleScanVaults(nodeId, tran); + this.handleScanVaults(nodeId, ctx, tran); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleScanVaults(tran); }); @@ -932,6 +935,7 @@ class VaultManager { // Getting the list of vaults const vaults = permissions.vaults; for (const vaultIdString of Object.keys(vaults)) { + ctx.signal.throwIfAborted(); // Getting vault permissions const vaultId = IdInternal.fromString(vaultIdString); const vaultPermissions = Object.keys( From fcbea023becb1439d86ae601c3e9d42bd1f376c4 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Tue, 19 Nov 2024 14:04:15 +1100 Subject: [PATCH 02/14] fix: build --- src/git/http.ts | 122 +++++++++++-------- src/git/utils.ts | 5 +- src/nodes/agent/handlers/VaultsGitInfoGet.ts | 9 +- src/nodes/agent/handlers/VaultsGitPackGet.ts | 10 +- src/nodes/agent/handlers/VaultsScan.ts | 12 +- src/vaults/VaultManager.ts | 7 +- 6 files changed, 95 insertions(+), 70 deletions(-) diff --git a/src/git/http.ts b/src/git/http.ts index 87d45cf6e..e65fd4fd1 100644 --- a/src/git/http.ts +++ b/src/git/http.ts @@ -1,3 +1,4 @@ +import type { ContextCancellable } from '@matrixai/contexts'; import type { CapabilityList, Reference, @@ -10,7 +11,6 @@ import { Buffer } from 'buffer'; import git from 'isomorphic-git'; import * as gitUtils from './utils'; import * as utils from '../utils'; -import {ContextCancellable} from "@matrixai/contexts"; /** * Reference discovery @@ -111,15 +111,18 @@ import {ContextCancellable} from "@matrixai/contexts"; * * `referenceList` is called for generating the `ref_list` stage. */ -async function* advertiseRefGenerator({ - efs, - dir, - gitDir, -}: { - efs: EncryptedFS; - dir: string; - gitDir: string; -}, ctx: ContextCancellable): AsyncGenerator { +async function* advertiseRefGenerator( + { + efs, + dir, + gitDir, + }: { + efs: EncryptedFS; + dir: string; + gitDir: string; + }, + ctx: ContextCancellable, +): AsyncGenerator { // Providing side-band-64, symref for the HEAD and agent name capabilities const capabilityList = [ gitUtils.SIDE_BAND_64_CAPABILITY, @@ -131,11 +134,14 @@ async function* advertiseRefGenerator({ }), gitUtils.AGENT_CAPABILITY, ]; - const objectGenerator = gitUtils.listReferencesGenerator({ - efs, - dir, - gitDir, - }, ctx ); + const objectGenerator = gitUtils.listReferencesGenerator( + { + efs, + dir, + gitDir, + }, + ctx, + ); // PKT-LINE("# service=$servicename" LF) yield packetLineBuffer(gitUtils.REFERENCE_DISCOVERY_HEADER); @@ -344,34 +350,43 @@ async function parsePackRequest( * It will respond with the `PKT-LINE(NAK_BUFFER)` and then the `packFile` data chunked into lines for the stream. * */ -async function* generatePackRequest({ - efs, - dir, - gitDir, - body, -}: { - efs: EncryptedFS; - dir: string; - gitDir: string; - body: Array; -}, ctx: ContextCancellable): AsyncGenerator { - const [wants, haves, _capabilities] = await parsePackRequest(body); - const objectIds = await gitUtils.listObjects({ - efs: efs, +async function* generatePackRequest( + { + efs, dir, - gitDir: gitDir, - wants, - haves, - }, ctx); + gitDir, + body, + }: { + efs: EncryptedFS; + dir: string; + gitDir: string; + body: Array; + }, + ctx: ContextCancellable, +): AsyncGenerator { + const [wants, haves, _capabilities] = await parsePackRequest(body); + const objectIds = await gitUtils.listObjects( + { + efs: efs, + dir, + gitDir: gitDir, + wants, + haves, + }, + // ctx, + ); // Reply that we have no common history and that we need to send everything yield packetLineBuffer(gitUtils.NAK_BUFFER); // Send everything over in pack format - yield* generatePackData({ - efs: efs, - dir, - gitDir, - objectIds, - }, ctx); + yield* generatePackData( + { + efs: efs, + dir, + gitDir, + objectIds, + }, + ctx, + ); // Send dummy progress data yield packetLineBuffer( gitUtils.DUMMY_PROGRESS_BUFFER, @@ -387,19 +402,22 @@ async function* generatePackRequest({ * The `packFile` is chunked into the `packetLineBuffer` with the size defined by `chunkSize`. * */ -async function* generatePackData({ - efs, - dir, - gitDir, - objectIds, - chunkSize = gitUtils.PACK_CHUNK_SIZE, -}: { - efs: EncryptedFS; - dir: string; - gitDir: string; - objectIds: Array; - chunkSize?: number; -}, ctx: ContextCancellable): AsyncGenerator { +async function* generatePackData( + { + efs, + dir, + gitDir, + objectIds, + chunkSize = gitUtils.PACK_CHUNK_SIZE, + }: { + efs: EncryptedFS; + dir: string; + gitDir: string; + objectIds: Array; + chunkSize?: number; + }, + ctx: ContextCancellable, +): AsyncGenerator { let packFile: PackObjectsResult; // In case of errors we don't want to throw them. This will result in the error being thrown into `isometric-git` // when it consumes the response. It handles this by logging out the error which we don't want to happen. diff --git a/src/git/utils.ts b/src/git/utils.ts index db81065d7..ce1e17227 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -158,7 +158,8 @@ async function listObjects({ gitDir: string; wants: ObjectIdList; haves: ObjectIdList; -}, ctx: ContextCancellable): Promise { +}/*, ctx: ContextCancellable*/): Promise { + // TODO: add support for ctx const commits = new Set(); const trees = new Set(); const blobs = new Set(); @@ -166,7 +167,7 @@ async function listObjects({ const havesSet: Set = new Set(haves); async function walk(objectId: ObjectId, type: ObjectType): Promise { - ctx.signal.throwIfAborted(); + // ctx.signal.throwIfAborted(); // If object was listed as a have then we don't need to walk over it if (havesSet.has(objectId)) return; switch (type) { diff --git a/src/nodes/agent/handlers/VaultsGitInfoGet.ts b/src/nodes/agent/handlers/VaultsGitInfoGet.ts index 8c0518713..65ae55ed2 100644 --- a/src/nodes/agent/handlers/VaultsGitInfoGet.ts +++ b/src/nodes/agent/handlers/VaultsGitInfoGet.ts @@ -25,9 +25,9 @@ class VaultsGitInfoGet extends RawHandler<{ }> { public handle = async ( input: [JSONRPCRequest, ReadableStream], - _cancel, + _cancel: (reason?: any) => void, meta: Record | undefined, - _ctx: ContextTimed, // TODO: use + ctx: ContextTimed, ): Promise<[JSONObject, ReadableStream]> => { const { db, vaultManager, acl } = this.container; const [headerMessage, inputStream] = input; @@ -91,7 +91,10 @@ class VaultsGitInfoGet extends RawHandler<{ let handleInfoRequestGen: AsyncGenerator; const stream = new ReadableStream({ start: async () => { - handleInfoRequestGen = vaultManager.handleInfoRequest(data.vaultId); + handleInfoRequestGen = vaultManager.handleInfoRequest( + data.vaultId, + ctx, + ); }, pull: async (controller) => { const result = await handleInfoRequestGen.next(); diff --git a/src/nodes/agent/handlers/VaultsGitPackGet.ts b/src/nodes/agent/handlers/VaultsGitPackGet.ts index 22259244a..3b658a67d 100644 --- a/src/nodes/agent/handlers/VaultsGitPackGet.ts +++ b/src/nodes/agent/handlers/VaultsGitPackGet.ts @@ -1,6 +1,6 @@ import type { DB } from '@matrixai/db'; -import type { JSONObject, JSONRPCRequest } from '@matrixai/rpc'; -import type {ContextTimed} from '@matrixai/contexts'; +import type { JSONObject, JSONRPCRequest, JSONValue } from '@matrixai/rpc'; +import type { ContextTimed } from '@matrixai/contexts'; import type { VaultName } from '../../../vaults/types'; import type ACL from '../../../acl/ACL'; import type VaultManager from '../../../vaults/VaultManager'; @@ -23,8 +23,8 @@ class VaultsGitPackGet extends RawHandler<{ }> { public handle = async ( input: [JSONRPCRequest, ReadableStream], - _cancel, - meta, + _cancel: (reason: any) => void, + meta: Record, ctx: ContextTimed, ): Promise<[JSONObject, ReadableStream]> => { const { vaultManager, acl, db } = this.container; @@ -79,7 +79,7 @@ class VaultsGitPackGet extends RawHandler<{ for await (const message of inputStream) { body.push(Buffer.from(message)); } - packRequestGen = vaultManager.handlePackRequest(vaultId, body); + packRequestGen = vaultManager.handlePackRequest(vaultId, body, ctx); }, pull: async (controller) => { const next = await packRequestGen.next(); diff --git a/src/nodes/agent/handlers/VaultsScan.ts b/src/nodes/agent/handlers/VaultsScan.ts index 5a89e9680..77c39db59 100644 --- a/src/nodes/agent/handlers/VaultsScan.ts +++ b/src/nodes/agent/handlers/VaultsScan.ts @@ -1,11 +1,12 @@ import type { DB } from '@matrixai/db'; -import type {ContextTimed} from '@matrixai/contexts'; +import type { ContextTimed } from '@matrixai/contexts'; import type { AgentRPCRequestParams, AgentRPCResponseResult, VaultsScanMessage, } from '../types'; import type VaultManager from '../../../vaults/VaultManager'; +import type { JSONValue } from '@matrixai/rpc'; import { ServerHandler } from '@matrixai/rpc'; import * as agentErrors from '../errors'; import * as agentUtils from '../utils'; @@ -23,12 +24,13 @@ class VaultsScan extends ServerHandler< AgentRPCResponseResult > { public handle = async function* ( - input: AgentRPCRequestParams, - _cancel, - meta, + _input: AgentRPCRequestParams, + _cancel: (reason?: any) => void, + meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator> { - const { vaultManager, db } = this.container; + const { vaultManager, db }: { vaultManager: VaultManager; db: DB } = + this.container; const requestingNodeId = agentUtils.nodeIdFromMeta(meta); if (requestingNodeId == null) { throw new agentErrors.ErrorAgentNodeIdMissing(); diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 8048f442c..4e39d0e13 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -40,7 +40,7 @@ import * as nodesUtils from '../nodes/utils'; import * as keysUtils from '../keys/utils'; import config from '../config'; import { mkdirExists } from '../utils/utils'; -import {ContextCancellable} from "@matrixai/contexts"; +import {ContextCancellable, ContextTimed} from "@matrixai/contexts"; /** * Object map pattern for each vault @@ -804,11 +804,12 @@ class VaultManager { @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async *handleInfoRequest( vaultId: VaultId, + ctx: ContextTimed, tran?: DBTransaction, ): AsyncGenerator { if (tran == null) { const handleInfoRequest = (tran: DBTransaction) => - this.handleInfoRequest(vaultId, tran); + this.handleInfoRequest(vaultId, ctx, tran); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleInfoRequest(tran); }); @@ -826,7 +827,7 @@ class VaultManager { efs, dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), gitDir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), - }); + }, ctx); }, ); } From 8f50de2314349b0db3f22bbda1026bc0969a484b Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Tue, 10 Dec 2024 14:39:07 +1100 Subject: [PATCH 03/14] chore: added ctx to most of the vault domain --- src/client/handlers/VaultsSecretsRemove.ts | 7 ++- src/git/http.ts | 2 +- src/git/utils.ts | 55 +++++++++-------- src/vaults/VaultInternal.ts | 70 +++++++++++++++------- src/vaults/VaultManager.ts | 49 ++++++++------- tests/git/http.test.ts | 20 +++---- tests/git/utils.test.ts | 26 +++++--- tests/git/utils.ts | 22 ++++--- tests/vaults/VaultInternal.test.ts | 37 ++++++++---- tests/vaults/VaultManager.test.ts | 24 +++++--- 10 files changed, 188 insertions(+), 124 deletions(-) diff --git a/src/client/handlers/VaultsSecretsRemove.ts b/src/client/handlers/VaultsSecretsRemove.ts index eb8796ef5..8722bcd91 100644 --- a/src/client/handlers/VaultsSecretsRemove.ts +++ b/src/client/handlers/VaultsSecretsRemove.ts @@ -1,5 +1,7 @@ +import type { ContextCancellable } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; import type { ResourceAcquire } from '@matrixai/resources'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -31,6 +33,9 @@ class VaultsSecretsRemove extends DuplexHandler< SecretsRemoveHeaderMessage | SecretIdentifierMessageTagged > >, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextCancellable, ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -59,7 +64,7 @@ class VaultsSecretsRemove extends DuplexHandler< } const acquire = await vaultManager.withVaults( [vaultId], - async (vault) => vault.acquireWrite(), + async (vault) => vault.acquireWrite(ctx), ); vaultAcquires.push(acquire); } diff --git a/src/git/http.ts b/src/git/http.ts index e65fd4fd1..b062ec098 100644 --- a/src/git/http.ts +++ b/src/git/http.ts @@ -373,7 +373,7 @@ async function* generatePackRequest( wants, haves, }, - // ctx, + ctx, ); // Reply that we have no common history and that we need to send everything yield packetLineBuffer(gitUtils.NAK_BUFFER); diff --git a/src/git/utils.ts b/src/git/utils.ts index ce1e17227..38eb9096e 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -1,3 +1,4 @@ +import type { ContextCancellable } from '@matrixai/contexts'; import type { Capability, CapabilityList, @@ -13,7 +14,6 @@ import git from 'isomorphic-git'; import { requestTypes } from './types'; import * as utils from '../utils'; import * as validationErrors from '../validation/errors'; -import {ContextCancellable} from "@matrixai/contexts"; // Constants // Total number of bytes per pack line minus the 4 size bytes and 1 channel byte @@ -68,15 +68,18 @@ const DUMMY_PROGRESS_BUFFER = Buffer.from('progress is at 50%', BUFFER_FORMAT); * This will generate references and the objects they point to as a tuple. * `HEAD` is always yielded first along with all branches. */ -async function* listReferencesGenerator({ - efs, - dir, - gitDir, -}: { - efs: EncryptedFS; - dir: string; - gitDir: string; -}, ctx: ContextCancellable): AsyncGenerator<[Reference, ObjectId], void, void> { +async function* listReferencesGenerator( + { + efs, + dir, + gitDir, + }: { + efs: EncryptedFS; + dir: string; + gitDir: string; + }, + ctx: ContextCancellable, +): AsyncGenerator<[Reference, ObjectId], void, void> { const refs: Array<[string, Promise]> = await git .listBranches({ fs: efs, @@ -146,20 +149,22 @@ async function referenceCapability({ * The walk is preformed recursively and concurrently using promises. * Inspecting the git data structure objects is done using `isomorphic-git`. */ -async function listObjects({ - efs, - dir, - gitDir, - wants, - haves, -}: { - efs: EncryptedFS; - dir: string; - gitDir: string; - wants: ObjectIdList; - haves: ObjectIdList; -}/*, ctx: ContextCancellable*/): Promise { - // TODO: add support for ctx +async function listObjects( + { + efs, + dir, + gitDir, + wants, + haves, + }: { + efs: EncryptedFS; + dir: string; + gitDir: string; + wants: ObjectIdList; + haves: ObjectIdList; + }, + ctx: ContextCancellable, +): Promise { const commits = new Set(); const trees = new Set(); const blobs = new Set(); @@ -167,7 +172,7 @@ async function listObjects({ const havesSet: Set = new Set(haves); async function walk(objectId: ObjectId, type: ObjectType): Promise { - // ctx.signal.throwIfAborted(); + ctx.signal.throwIfAborted(); // If object was listed as a have then we don't need to walk over it if (havesSet.has(objectId)) return; switch (type) { diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index af778590d..33163be1c 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -1,5 +1,6 @@ import type { ReadCommitResult } from 'isomorphic-git'; import type { EncryptedFS } from 'encryptedfs'; +import type { ContextCancellable } from '@matrixai/contexts'; import type { DB, DBTransaction, LevelPath } from '@matrixai/db'; import type { RPCClient } from '@matrixai/rpc'; import type { ResourceAcquire, ResourceRelease } from '@matrixai/resources'; @@ -27,11 +28,12 @@ import { ready, } from '@matrixai/async-init/dist/CreateDestroyStartStop'; import { withF, withG } from '@matrixai/resources'; +import { context, cancellable } from '@matrixai/contexts/dist/decorators'; import { RWLockWriter } from '@matrixai/async-locks'; +import { tagLast } from './types'; import * as vaultsUtils from './utils'; import * as vaultsErrors from './errors'; import * as vaultsEvents from './events'; -import { tagLast } from './types'; import * as ids from '../ids'; import * as nodesUtils from '../nodes/utils'; import * as gitUtils from '../git/utils'; @@ -441,13 +443,20 @@ class VaultInternal { /** * With context handler for using a vault in a writable context. */ + public async writeF( + f: (fs: FileSystemWritable) => Promise, + ctx?: Partial, + tran?: DBTransaction, + ): Promise; @ready(new vaultsErrors.ErrorVaultNotRunning()) + @cancellable(true) public async writeF( f: (fs: FileSystemWritable) => Promise, + @context ctx: ContextCancellable, tran?: DBTransaction, ): Promise { if (tran == null) { - return this.db.withTransactionF((tran) => this.writeF(f, tran)); + return this.db.withTransactionF((tran) => this.writeF(f, ctx, tran)); } return withF([this.lock.write()], async () => { @@ -475,7 +484,7 @@ class VaultInternal { try { await f(this.efsVault); // After doing mutation we need to commit the new history - await this.createCommit(); + await this.createCommit(ctx); } catch (e) { // Error implies dirty state await this.cleanWorkingDirectory(); @@ -494,15 +503,16 @@ class VaultInternal { @ready(new vaultsErrors.ErrorVaultNotRunning()) public writeG( g: (fs: FileSystemWritable) => AsyncGenerator, + ctx: ContextCancellable, tran?: DBTransaction, ): AsyncGenerator { if (tran == null) { - return this.db.withTransactionG((tran) => this.writeG(g, tran)); + return this.db.withTransactionG((tran) => this.writeG(g, ctx, tran)); } const efsVault = this.efsVault; const vaultMetadataDbPath = this.vaultMetadataDbPath; - const createCommit = () => this.createCommit(); + const createCommit = () => this.createCommit(ctx); const cleanWorkingDirectory = () => this.cleanWorkingDirectory(); return withG([this.lock.write()], async function* () { if ( @@ -559,6 +569,7 @@ class VaultInternal { */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public acquireWrite( + ctx: ContextCancellable, tran?: DBTransaction, ): ResourceAcquire { return async () => { @@ -593,7 +604,7 @@ class VaultInternal { if (e == null) { try { // After doing mutation we need to commit the new history - await this.createCommit(); + await this.createCommit(ctx); } catch (e_) { e = e_; // Error implies dirty state @@ -968,7 +979,7 @@ class VaultInternal { * and the old history is removed from the old canonical head to the branch point. This is to maintain the strict * non-branching linear history. */ - protected async createCommit() { + protected async createCommit(ctx: ContextCancellable) { // Forced wait for 1 ms to allow difference in mTime between file changes await utils.sleep(1); // Checking if commit is appending or branching @@ -1080,7 +1091,7 @@ class VaultInternal { }); // We clean old history if a commit was made on previous version if (headRef !== masterRef) { - await this.garbageCollectGitObjectsLocal(masterRef, headRef); + await this.garbageCollectGitObjectsLocal(masterRef, headRef, ctx); } } } @@ -1131,7 +1142,13 @@ class VaultInternal { * This will walk the current canonicalBranch history and delete any objects that are not a part of it. * This is costly since it will compare the walked tree with all existing objects. */ - protected async garbageCollectGitObjectsGlobal() { + protected async garbageCollectGitObjectsGlobal( + ctx?: Partial, + ): Promise; + @cancellable(true) + protected async garbageCollectGitObjectsGlobal( + @context ctx: ContextCancellable, + ) { const objectIdsAll = await gitUtils.listObjectsAll({ fs: this.efs, gitDir: this.vaultGitDir, @@ -1143,13 +1160,16 @@ class VaultInternal { gitdir: this.vaultGitDir, ref: vaultsUtils.canonicalBranch, }); - const reachableObjects = await gitUtils.listObjects({ - efs: this.efs, - dir: this.vaultDataDir, - gitDir: this.vaultGitDir, - wants: [masterRef], - haves: [], - }); + const reachableObjects = await gitUtils.listObjects( + { + efs: this.efs, + dir: this.vaultDataDir, + gitDir: this.vaultGitDir, + wants: [masterRef], + haves: [], + }, + ctx, + ); // Walk from head to all reachable objects for (const objectReachable of reachableObjects) { objects.delete(objectReachable); @@ -1172,14 +1192,18 @@ class VaultInternal { protected async garbageCollectGitObjectsLocal( startId: string, stopId: string, + ctx: ContextCancellable, ) { - const objects = await gitUtils.listObjects({ - efs: this.efs, - dir: this.vaultDataDir, - gitDir: this.vaultGitDir, - wants: [startId], - haves: [stopId], - }); + const objects = await gitUtils.listObjects( + { + efs: this.efs, + dir: this.vaultDataDir, + gitDir: this.vaultGitDir, + wants: [startId], + haves: [stopId], + }, + ctx, + ); const deletePs: Array> = []; for (const objectId of objects) { deletePs.push( diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 4e39d0e13..32e7d2b02 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -1,4 +1,6 @@ import type { DBTransaction, LevelPath } from '@matrixai/db'; +import type { LockRequest } from '@matrixai/async-locks'; +import type { ContextCancellable, ContextTimed } from '@matrixai/contexts'; import type { VaultId, VaultName, @@ -17,12 +19,10 @@ import type NotificationsManager from '../notifications/NotificationsManager'; import type ACL from '../acl/ACL'; import type { RemoteInfo } from './VaultInternal'; import type { VaultAction } from './types'; -import type { LockRequest } from '@matrixai/async-locks'; import type { Key } from '../keys/types'; import path from 'path'; import { DB } from '@matrixai/db'; import { EncryptedFS, errors as encryptedFsErrors } from 'encryptedfs'; -import Logger from '@matrixai/logger'; import { CreateDestroyStartStop, ready, @@ -30,17 +30,17 @@ import { import { IdInternal } from '@matrixai/id'; import { withF, withG } from '@matrixai/resources'; import { LockBox, RWLockWriter } from '@matrixai/async-locks'; +import Logger from '@matrixai/logger'; import VaultInternal from './VaultInternal'; import * as vaultsEvents from './events'; import * as vaultsUtils from './utils'; import * as vaultsErrors from './errors'; +import config from '../config'; +import { mkdirExists } from '../utils/utils'; import * as utils from '../utils'; import * as gitHttp from '../git/http'; import * as nodesUtils from '../nodes/utils'; import * as keysUtils from '../keys/utils'; -import config from '../config'; -import { mkdirExists } from '../utils/utils'; -import {ContextCancellable, ContextTimed} from "@matrixai/contexts"; /** * Object map pattern for each vault @@ -55,6 +55,7 @@ type VaultMetadata = { }; interface VaultManager extends CreateDestroyStartStop {} + @CreateDestroyStartStop( new vaultsErrors.ErrorVaultManagerRunning(), new vaultsErrors.ErrorVaultManagerDestroyed(), @@ -823,11 +824,14 @@ class VaultManager { ], async function* (): AsyncGenerator { // Read the commit state of the vault - yield* gitHttp.advertiseRefGenerator({ - efs, - dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - gitDir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), - }, ctx); + yield* gitHttp.advertiseRefGenerator( + { + efs, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitDir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + }, + ctx, + ); }, ); } @@ -860,12 +864,15 @@ class VaultManager { vault.getLock().read(ctx), ], async function* (): AsyncGenerator { - yield* gitHttp.generatePackRequest({ - efs, - dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - gitDir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), - body: body, - }, ctx); + yield* gitHttp.generatePackRequest( + { + efs, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitDir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + body: body, + }, + ctx, + ); }, ); } @@ -913,7 +920,7 @@ class VaultManager { if (tran == null) { // Lambda to maintain `this` context const handleScanVaults = (tran: DBTransaction) => - this.handleScanVaults(nodeId, ctx, tran); + this.handleScanVaults(nodeId, ctx, tran); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleScanVaults(tran); }); @@ -1045,11 +1052,11 @@ class VaultManager { * @param tran */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async *withVaultsG( + public async *withVaultsG( vaultIds: Array, - g: (...args: Array) => AsyncGenerator, + g: (...args: Array) => AsyncGenerator, tran?: DBTransaction, - ): AsyncGenerator { + ): AsyncGenerator { if (tran == null) { return yield* this.db.withTransactionG((tran) => this.withVaultsG(vaultIds, g, tran), @@ -1066,7 +1073,7 @@ class VaultManager { const vaultThis = this; return yield* this.vaultLocks.withG( ...vaultLocks, - async function* (): AsyncGenerator { + async function* (): AsyncGenerator { // Getting the vaults while locked const vaults = await Promise.all( vaultIds.map(async (vaultId) => { diff --git a/tests/git/http.test.ts b/tests/git/http.test.ts index 0bb81b696..228ab11cd 100644 --- a/tests/git/http.test.ts +++ b/tests/git/http.test.ts @@ -115,7 +115,9 @@ describe('Git Http', () => { }, ], }); - const gen = gitHttp.advertiseRefGenerator(gitDirs); + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + const gen = gitHttp.advertiseRefGenerator(gitDirs, ctx); let response = ''; for await (const result of gen) { response += result.toString(); @@ -195,10 +197,9 @@ describe('Git Http', () => { ], }); const objectIds = await gitUtils.listObjectsAll(gitDirs); - const gen = gitHttp.generatePackData({ - ...gitDirs, - objectIds, - }); + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + const gen = gitHttp.generatePackData({ ...gitDirs, objectIds }, ctx); let acc = Buffer.alloc(0); for await (const line of gen) { acc = Buffer.concat([acc, line.subarray(5)]); @@ -246,10 +247,9 @@ describe('Git Http', () => { }, ], }); - const gen = gitHttp.generatePackRequest({ - ...gitDirs, - body: [], - }); + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + const gen = gitHttp.generatePackRequest({ ...gitDirs, body: [] }, ctx); let response = ''; for await (const line of gen) { response += line.toString(); @@ -321,7 +321,7 @@ describe('Git Http', () => { (await fs.promises.readFile(path.join(newDirs.dir, 'file2'))).toString(), ).toBe('this is another file'); }); - test('end to end Pull', async () => { + test('end to end pull', async () => { await gitTestUtils.createGitRepo({ ...gitDirs, author: 'tester', diff --git a/tests/git/utils.test.ts b/tests/git/utils.test.ts index 69b960bfd..d1ceb6ede 100644 --- a/tests/git/utils.test.ts +++ b/tests/git/utils.test.ts @@ -2,8 +2,8 @@ import fs from 'fs'; import os from 'os'; import path from 'path'; import git from 'isomorphic-git'; -import { test } from '@fast-check/jest'; import fc from 'fast-check'; +import { test } from '@fast-check/jest'; import * as gitUtils from '@/git/utils'; import * as validationErrors from '@/validation/errors'; import * as gitTestUtils from './utils'; @@ -81,9 +81,12 @@ describe('Git utils', () => { }) )[0].oid; const expectedReferences = ['HEAD', 'refs/heads/master']; - for await (const [reference, objectId] of gitUtils.listReferencesGenerator({ - ...gitDirs, - })) { + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + for await (const [reference, objectId] of gitUtils.listReferencesGenerator( + { ...gitDirs }, + ctx, + )) { expect(reference).toBeOneOf(expectedReferences); expect(objectId).toBe(headObjectId); } @@ -178,11 +181,16 @@ describe('Git utils', () => { }) ).map((v) => v.oid); - const objectList = await gitUtils.listObjects({ - ...gitDirs, - wants: commitIds, - haves: [], - }); + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + const objectList = await gitUtils.listObjects( + { + ...gitDirs, + wants: commitIds, + haves: [], + }, + ctx, + ); const expectedObjectIds = await gitUtils.listObjectsAll(gitDirs); // Found objects should include all the commits expect(objectList).toIncludeAllMembers(commitIds); diff --git a/tests/git/utils.ts b/tests/git/utils.ts index 1311208ae..d5b6d81d9 100644 --- a/tests/git/utils.ts +++ b/tests/git/utils.ts @@ -153,15 +153,15 @@ function request({ headers: POJO; body: Array; }) => { - // Console.log('body', body.map(v => v.toString())) + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; switch (method) { case 'GET': { // Send back the GET request info response - const advertiseRefGen = gitHttp.advertiseRefGenerator({ - efs, - dir, - gitDir, - }); + const advertiseRefGen = gitHttp.advertiseRefGenerator( + { efs, dir, gitDir }, + ctx, + ); return { url: url, @@ -173,12 +173,10 @@ function request({ }; } case 'POST': { - const packGen = gitHttp.generatePackRequest({ - efs, - dir, - gitDir, - body, - }); + const packGen = gitHttp.generatePackRequest( + { efs, dir, gitDir, body }, + ctx, + ); return { url: url, method: method, diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index 2d487da30..ed3f6a689 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -1,20 +1,21 @@ +import type { ContextCancellable } from '@matrixai/contexts'; import type { VaultId } from '@/vaults/types'; import type { Vault } from '@/vaults/Vault'; -import type KeyRing from '@/keys/KeyRing'; import type { LevelPath } from '@matrixai/db'; import type { Key } from '@/keys/types'; +import type KeyRing from '@/keys/KeyRing'; import os from 'os'; import path from 'path'; import fs from 'fs'; +import git from 'isomorphic-git'; +import { EncryptedFS } from 'encryptedfs'; import { DB } from '@matrixai/db'; import { withF } from '@matrixai/resources'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { EncryptedFS } from 'encryptedfs'; -import git from 'isomorphic-git'; import { tagLast } from '@/vaults/types'; +import { sleep } from '@/utils'; import VaultInternal from '@/vaults/VaultInternal'; import * as vaultsErrors from '@/vaults/errors'; -import { sleep } from '@/utils'; import * as keysUtils from '@/keys/utils'; import * as vaultsUtils from '@/vaults/utils'; import * as utils from '@/utils'; @@ -642,7 +643,7 @@ describe('VaultInternal', () => { const commit = (await vault.log())[0].commitId; const gen = vault.writeG(async function* (efs): AsyncGenerator { yield await efs.writeFile('secret-1', 'secret-content'); - }); + }, {} as ContextCancellable); for await (const _ of gen) { // Do nothing } @@ -652,7 +653,9 @@ describe('VaultInternal', () => { }); test('no commit added if no mutation in writeG', async () => { const commit = (await vault.log())[0].commitId; - const gen = vault.writeG(async function* (_efs): AsyncGenerator {}); + const gen = vault.writeG(async function* ( + _efs, + ): AsyncGenerator {}, {} as ContextCancellable); for await (const _ of gen) { // Do nothing } @@ -665,7 +668,7 @@ describe('VaultInternal', () => { const gen = vault.writeG(async function* (efs): AsyncGenerator { yield await efs.writeFile(secret1.name, secret1.content); yield await efs.rename('notValid', 'randomName'); // Throws - }); + }, {} as ContextCancellable); // Failing commit operation await expect(() => consumeGenerator(gen)).rejects.toThrow(); @@ -734,8 +737,10 @@ describe('VaultInternal', () => { for (const logElement of log) { refs.push(await quickCommit(logElement.commitId, `secret-${num++}`)); } + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; // @ts-ignore: protected method - await vault.garbageCollectGitObjectsGlobal(); + await vault.garbageCollectGitObjectsGlobal(ctx); for (const ref of refs) { await expect( @@ -787,7 +792,7 @@ describe('VaultInternal', () => { yield; finished = true; yield; - }); + }, {} as ContextCancellable); const runP = consumeGenerator(writeGen); await sleep(waitDelay); expect(finished).toBe(false); @@ -801,7 +806,7 @@ describe('VaultInternal', () => { yield; finished = true; yield; - }); + }, {} as ContextCancellable); const runP2 = consumeGenerator(writeGen2); await sleep(waitDelay); await releaseRead(); @@ -917,7 +922,9 @@ describe('VaultInternal', () => { await releaseRead(); }); test('can acquire a write resource', async () => { - const acquireWrite = vault.acquireWrite(); + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + const acquireWrite = vault.acquireWrite(ctx); await withF([acquireWrite], async ([efs]) => { await efs.writeFile(secret1.name, secret1.content); }); @@ -927,10 +934,12 @@ describe('VaultInternal', () => { }); }); test('acquiring write resource respects write locking', async () => { + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; const lock = vault.getLock(); const [releaseWrite] = await lock.write()(); let finished = false; - const writeP = withF([vault.acquireWrite()], async () => { + const writeP = withF([vault.acquireWrite(ctx)], async () => { finished = true; }); await sleep(waitDelay); @@ -940,10 +949,12 @@ describe('VaultInternal', () => { expect(finished).toBe(true); }); test('acquiring write resource respects read locking', async () => { + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; const lock = vault.getLock(); const [releaseRead] = await lock.read()(); let finished = false; - const writeP = withF([vault.acquireWrite()], async () => { + const writeP = withF([vault.acquireWrite(ctx)], async () => { finished = true; }); await sleep(waitDelay); diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index 1032ba16e..5b8809417 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -12,12 +12,12 @@ import type { AgentServerManifest } from '@/nodes/agent/handlers'; import fs from 'fs'; import os from 'os'; import path from 'path'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import git from 'isomorphic-git'; import { IdInternal } from '@matrixai/id'; import { DB } from '@matrixai/db'; import { destroyed, running } from '@matrixai/async-init'; -import git from 'isomorphic-git'; import { RWLockWriter } from '@matrixai/async-locks'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import TaskManager from '@/tasks/TaskManager'; import ACL from '@/acl/ACL'; import GestaltGraph from '@/gestalts/GestaltGraph'; @@ -26,12 +26,12 @@ import NodeConnectionManager from '@/nodes/NodeConnectionManager'; import KeyRing from '@/keys/KeyRing'; import PolykeyAgent from '@/PolykeyAgent'; import VaultManager from '@/vaults/VaultManager'; -import * as vaultsErrors from '@/vaults/errors'; import NodeGraph from '@/nodes/NodeGraph'; -import * as vaultsUtils from '@/vaults/utils'; -import { sleep } from '@/utils'; import VaultInternal from '@/vaults/VaultInternal'; +import { sleep } from '@/utils'; import * as keysUtils from '@/keys/utils'; +import * as vaultsErrors from '@/vaults/errors'; +import * as vaultsUtils from '@/vaults/utils'; import * as nodeTestUtils from '../nodes/utils'; import * as testUtils from '../utils'; import * as tlsTestsUtils from '../utils/tls'; @@ -351,8 +351,10 @@ describe('VaultManager', () => { await acl.setVaultAction(vault2, nodeId1, 'clone'); // No permissions for vault3 - // scanning vaults - const gen = vaultManager.handleScanVaults(nodeId1); + // Scanning vaults + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + const gen = vaultManager.handleScanVaults(nodeId1, ctx); const vaults: Record = {}; for await (const vault of gen) { vaults[vault.vaultId] = [vault.vaultName, vault.vaultPermissions]; @@ -363,14 +365,18 @@ describe('VaultManager', () => { // Should throw due to no permission await expect(async () => { - for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + for await (const _ of vaultManager.handleScanVaults(nodeId2, ctx)) { // Should throw } }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); // Should throw due to lack of scan permission await gestaltGraph.setGestaltAction(['node', nodeId2], 'notify'); await expect(async () => { - for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + const abortController = new AbortController(); + const ctx = { signal: abortController.signal }; + for await (const _ of vaultManager.handleScanVaults(nodeId2, ctx)) { // Should throw } }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); From 52e6ff1156579f9b0e06bcb4e9b0a6b9f58c97f2 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Thu, 12 Dec 2024 15:44:40 +1100 Subject: [PATCH 04/14] feat: added cancellation for most `polykey vaults` commands --- src/client/handlers/AgentStatus.ts | 2 +- src/client/handlers/AuditEventsGet.ts | 5 +- .../handlers/GestaltsActionsGetByIdentity.ts | 2 +- src/client/handlers/VaultsClone.ts | 27 +-- src/client/handlers/VaultsCreate.ts | 13 +- src/client/handlers/VaultsDelete.ts | 16 +- src/client/handlers/VaultsList.ts | 14 +- src/client/handlers/VaultsLog.ts | 16 +- src/client/handlers/VaultsPermissionGet.ts | 12 +- src/client/handlers/VaultsPermissionSet.ts | 36 ++-- src/client/handlers/VaultsPull.ts | 40 ++--- src/client/handlers/VaultsRename.ts | 15 +- src/client/handlers/VaultsScan.ts | 33 +--- src/gestalts/GestaltGraph.ts | 2 +- src/vaults/VaultInternal.ts | 71 +++++--- src/vaults/VaultManager.ts | 158 +++++++++++++----- tests/vaults/VaultManager.test.ts | 15 +- 17 files changed, 278 insertions(+), 199 deletions(-) diff --git a/src/client/handlers/AgentStatus.ts b/src/client/handlers/AgentStatus.ts index 249af6e58..4ee6017a2 100644 --- a/src/client/handlers/AgentStatus.ts +++ b/src/client/handlers/AgentStatus.ts @@ -5,8 +5,8 @@ import type { } from '../types'; import type PolykeyAgent from '../../PolykeyAgent'; import { UnaryHandler } from '@matrixai/rpc'; -import * as nodesUtils from '../../nodes/utils'; import config from '../../config'; +import * as nodesUtils from '../../nodes/utils'; class AgentStatus extends UnaryHandler< { diff --git a/src/client/handlers/AuditEventsGet.ts b/src/client/handlers/AuditEventsGet.ts index 8abeefcf5..308bb05c7 100644 --- a/src/client/handlers/AuditEventsGet.ts +++ b/src/client/handlers/AuditEventsGet.ts @@ -1,4 +1,5 @@ import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult } from '../types'; import type { AuditEvent, @@ -42,8 +43,8 @@ class AuditEventsGet extends ServerHandler< }> & { paths: Array; }, - _cancel, - _meta, + _cancel: (reason?: any) => void, + _meta: Record, ctx: ContextTimed, ): AsyncGenerator> { const { audit }: { audit: Audit } = this.container; diff --git a/src/client/handlers/GestaltsActionsGetByIdentity.ts b/src/client/handlers/GestaltsActionsGetByIdentity.ts index 5b5544e4d..33d118e04 100644 --- a/src/client/handlers/GestaltsActionsGetByIdentity.ts +++ b/src/client/handlers/GestaltsActionsGetByIdentity.ts @@ -8,9 +8,9 @@ import type GestaltGraph from '../../gestalts/GestaltGraph'; import type { GestaltAction } from '../../gestalts/types'; import type { IdentityId, ProviderId } from '../../ids'; import { UnaryHandler } from '@matrixai/rpc'; -import * as ids from '../../ids'; import { validateSync } from '../../validation'; import { matchSync } from '../../utils'; +import * as ids from '../../ids'; class GestaltsActionsGetByIdentity extends UnaryHandler< { diff --git a/src/client/handlers/VaultsClone.ts b/src/client/handlers/VaultsClone.ts index ff2b99376..1a5386dde 100644 --- a/src/client/handlers/VaultsClone.ts +++ b/src/client/handlers/VaultsClone.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -6,11 +8,8 @@ import type { SuccessMessage, } from '../types'; import type VaultManager from '../../vaults/VaultManager'; -import type { NodeId } from '../../ids'; import { UnaryHandler } from '@matrixai/rpc'; import * as ids from '../../ids'; -import { validateSync } from '../../validation'; -import { matchSync } from '../../utils'; class VaultsClone extends UnaryHandler< { @@ -22,27 +21,15 @@ class VaultsClone extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; - const { - nodeId, - }: { - nodeId: NodeId; - } = validateSync( - (keyPath, value) => { - return matchSync(keyPath)( - [['nodeId'], () => ids.parseNodeId(value)], - () => value, - ); - }, - { - nodeId: input.nodeIdEncoded, - }, - ); - // Vault id + const nodeId = ids.parseNodeId(input.nodeIdEncoded); await db.withTransactionF(async (tran) => { - await vaultManager.cloneVault(nodeId, input.nameOrId, tran); + await vaultManager.cloneVault(nodeId, input.nameOrId, ctx, tran); }); return { type: 'success', success: true }; }; diff --git a/src/client/handlers/VaultsCreate.ts b/src/client/handlers/VaultsCreate.ts index cd7f503d0..cc5d3a60f 100644 --- a/src/client/handlers/VaultsCreate.ts +++ b/src/client/handlers/VaultsCreate.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -19,17 +21,16 @@ class VaultsCreate extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; - const vaultId = await db.withTransactionF((tran) => - vaultManager.createVault(input.vaultName, tran), + vaultManager.createVault(input.vaultName, ctx, tran), ); - - return { - vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId), - }; + return { vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId) }; }; } diff --git a/src/client/handlers/VaultsDelete.ts b/src/client/handlers/VaultsDelete.ts index d341d4f66..f1d050a5e 100644 --- a/src/client/handlers/VaultsDelete.ts +++ b/src/client/handlers/VaultsDelete.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -6,10 +8,9 @@ import type { VaultIdentifierMessage, } from '../types'; import type VaultManager from '../../vaults/VaultManager'; -import type { VaultName } from '../../vaults/types'; import { UnaryHandler } from '@matrixai/rpc'; -import * as vaultsUtils from '../../vaults/utils'; import * as vaultsErrors from '../../vaults/errors'; +import * as vaultsUtils from '../../vaults/utils'; class VaultsDelete extends UnaryHandler< { @@ -21,20 +22,25 @@ class VaultsDelete extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; await db.withTransactionF(async (tran) => { const vaultIdFromName = await vaultManager.getVaultId( - input.nameOrId as VaultName, + input.nameOrId, tran, ); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } - await vaultManager.destroyVault(vaultId, tran); + await vaultManager.destroyVault(vaultId, ctx, tran); }); return { type: 'success', success: true }; }; diff --git a/src/client/handlers/VaultsList.ts b/src/client/handlers/VaultsList.ts index fb0c0aa3d..724f46a2a 100644 --- a/src/client/handlers/VaultsList.ts +++ b/src/client/handlers/VaultsList.ts @@ -1,3 +1,4 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; import type { ClientRPCRequestParams, @@ -5,6 +6,7 @@ import type { VaultListMessage, } from '../types'; import type VaultManager from '../../vaults/VaultManager'; +import type { JSONValue } from '@matrixai/rpc'; import { ServerHandler } from '@matrixai/rpc'; import * as vaultsUtils from '../../vaults/utils'; @@ -17,21 +19,21 @@ class VaultsList extends ServerHandler< ClientRPCResponseResult > { public handle = async function* ( - _input, - _cancel, - _meta, - ctx, + _input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { if (ctx.signal.aborted) throw ctx.signal.reason; const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; const vaults = await db.withTransactionF((tran) => - vaultManager.listVaults(tran), + vaultManager.listVaults(ctx, tran), ); for await (const [vaultName, vaultId] of vaults) { if (ctx.signal.aborted) throw ctx.signal.reason; yield { - vaultName, + vaultName: vaultName, vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId), }; } diff --git a/src/client/handlers/VaultsLog.ts b/src/client/handlers/VaultsLog.ts index 591b6ca9c..6b47ae98c 100644 --- a/src/client/handlers/VaultsLog.ts +++ b/src/client/handlers/VaultsLog.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -6,7 +8,6 @@ import type { VaultsLogMessage, } from '../types'; import type VaultManager from '../../vaults/VaultManager'; -import type { VaultName } from '../../vaults/types'; import { ServerHandler } from '@matrixai/rpc'; import * as vaultsUtils from '../../vaults/utils'; import * as vaultsErrors from '../../vaults/errors'; @@ -21,22 +22,24 @@ class VaultsLog extends ServerHandler< > { public handle = async function* ( input: ClientRPCRequestParams, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { if (ctx.signal.aborted) throw ctx.signal.reason; const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; const log = await db.withTransactionF(async (tran) => { const vaultIdFromName = await vaultManager.getVaultId( - input.nameOrId as VaultName, + input.nameOrId, tran, ); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } // Getting the log return await vaultManager.withVaults( @@ -44,6 +47,7 @@ class VaultsLog extends ServerHandler< async (vault) => { return await vault.log(input.commitId, input.depth); }, + ctx, tran, ); }); diff --git a/src/client/handlers/VaultsPermissionGet.ts b/src/client/handlers/VaultsPermissionGet.ts index 9e08d6bff..0ee38d8f0 100644 --- a/src/client/handlers/VaultsPermissionGet.ts +++ b/src/client/handlers/VaultsPermissionGet.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -26,9 +28,9 @@ class VaultsPermissionGet extends ServerHandler< > { public handle = async function* ( input: ClientRPCRequestParams, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { if (ctx.signal.aborted) throw ctx.signal.reason; const { @@ -45,7 +47,9 @@ class VaultsPermissionGet extends ServerHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } // Getting permissions return [await acl.getVaultPerm(vaultId, tran), vaultId]; diff --git a/src/client/handlers/VaultsPermissionSet.ts b/src/client/handlers/VaultsPermissionSet.ts index 358fd8340..3e7ee34ec 100644 --- a/src/client/handlers/VaultsPermissionSet.ts +++ b/src/client/handlers/VaultsPermissionSet.ts @@ -6,8 +6,7 @@ import type { SuccessMessage, } from '../types'; import type ACL from '../../acl/ACL'; -import type { VaultAction, VaultActions } from '../../vaults/types'; -import type { NodeId } from '../../ids'; +import type { VaultActions } from '../../vaults/types'; import type VaultManager from '../../vaults/VaultManager'; import type NotificationsManager from '../../notifications/NotificationsManager'; import type GestaltGraph from '../../gestalts/GestaltGraph'; @@ -15,8 +14,6 @@ import { UnaryHandler } from '@matrixai/rpc'; import * as ids from '../../ids'; import * as vaultsUtils from '../../vaults/utils'; import * as vaultsErrors from '../../vaults/errors'; -import { validateSync } from '../../validation'; -import { matchSync } from '../../utils'; class VaultsPermissionSet extends UnaryHandler< { @@ -53,30 +50,21 @@ class VaultsPermissionSet extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } - const { - nodeId, - actions, - }: { - nodeId: NodeId; - actions: Array; - } = validateSync( - (keyPath, value) => { - return matchSync(keyPath)( - [['nodeId'], () => ids.parseNodeId(value)], - [['actions'], () => value.map(vaultsUtils.parseVaultAction)], - () => value, - ); - }, - { - nodeId: input.nodeIdEncoded, - actions: input.vaultPermissionList, - }, + const nodeId = ids.parseNodeId(input.nodeIdEncoded); + const actions = input.vaultPermissionList.map( + vaultsUtils.parseVaultAction, ); // Checking if vault exists const vaultMeta = await vaultManager.getVaultMeta(vaultId, tran); - if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + if (!vaultMeta) { + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); + } // Setting permissions const actionsSet: VaultActions = {}; await gestaltGraph.setGestaltAction(['node', nodeId], 'scan', tran); diff --git a/src/client/handlers/VaultsPull.ts b/src/client/handlers/VaultsPull.ts index 328ea8a78..bf2eb9b39 100644 --- a/src/client/handlers/VaultsPull.ts +++ b/src/client/handlers/VaultsPull.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -6,14 +8,11 @@ import type { VaultsPullMessage, } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { NodeId } from '../../ids'; import type VaultManager from '../../vaults/VaultManager'; import { UnaryHandler } from '@matrixai/rpc'; import * as ids from '../../ids'; import * as vaultsUtils from '../../vaults/utils'; import * as vaultsErrors from '../../vaults/errors'; -import { validateSync } from '../../validation'; -import { matchSync } from '../../utils'; class VaultsPull extends UnaryHandler< { @@ -25,12 +24,14 @@ class VaultsPull extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; - let pullVaultId; - pullVaultId = vaultsUtils.decodeVaultId(input.pullVault); - pullVaultId = pullVaultId ?? input.pullVault; + const pullVaultId = + vaultsUtils.decodeVaultId(input.pullVault) ?? input.pullVault; await db.withTransactionF(async (tran) => { const vaultIdFromName = await vaultManager.getVaultId( input.nameOrId as VaultName, @@ -39,28 +40,19 @@ class VaultsPull extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } - const { - nodeId, - }: { - nodeId: NodeId | undefined; - } = validateSync( - (keyPath, value) => { - return matchSync(keyPath)( - [['nodeId'], () => (value ? ids.parseNodeId(value) : undefined)], - () => value, - ); - }, - { - nodeId: input.nodeIdEncoded, - }, - ); + const nodeId = input.nodeIdEncoded + ? ids.parseNodeId(input.nodeIdEncoded) + : undefined; await vaultManager.pullVault({ - vaultId, + vaultId: vaultId, pullNodeId: nodeId, pullVaultNameOrId: pullVaultId, - tran, + ctx: ctx, + tran: tran, }); }); return { type: 'success', success: true }; diff --git a/src/client/handlers/VaultsRename.ts b/src/client/handlers/VaultsRename.ts index 3720e792e..555f7361e 100644 --- a/src/client/handlers/VaultsRename.ts +++ b/src/client/handlers/VaultsRename.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -20,6 +22,9 @@ class VaultsRename extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -31,12 +36,12 @@ class VaultsRename extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } - await vaultManager.renameVault(vaultId, input.newName, tran); - return { - vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId), - }; + await vaultManager.renameVault(vaultId, input.newName, ctx, tran); + return { vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId) }; }); }; } diff --git a/src/client/handlers/VaultsScan.ts b/src/client/handlers/VaultsScan.ts index 7d0880399..87b432f51 100644 --- a/src/client/handlers/VaultsScan.ts +++ b/src/client/handlers/VaultsScan.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -5,11 +7,8 @@ import type { VaultsScanMessage, } from '../types'; import type VaultManager from '../../vaults/VaultManager'; -import type { NodeId } from '../../ids'; import { ServerHandler } from '@matrixai/rpc'; import * as ids from '../../ids'; -import { validateSync } from '../../validation'; -import { matchSync } from '../../utils'; class VaultsScan extends ServerHandler< { @@ -20,36 +19,22 @@ class VaultsScan extends ServerHandler< > { public handle = async function* ( input: ClientRPCRequestParams, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { if (ctx.signal.aborted) throw ctx.signal.reason; const { vaultManager }: { vaultManager: VaultManager } = this.container; - const { - nodeId, - }: { - nodeId: NodeId; - } = validateSync( - (keyPath, value) => { - return matchSync(keyPath)( - [['nodeId'], () => ids.parseNodeId(value)], - () => value, - ); - }, - { - nodeId: input.nodeIdEncoded, - }, - ); + const nodeId = ids.parseNodeId(input.nodeIdEncoded); for await (const { vaultIdEncoded, vaultName, vaultPermissions, - } of vaultManager.scanVaults(nodeId)) { + } of vaultManager.scanVaults(nodeId, ctx)) { if (ctx.signal.aborted) throw ctx.signal.reason; yield { - vaultName, - vaultIdEncoded, + vaultName: vaultName, + vaultIdEncoded: vaultIdEncoded, permissions: vaultPermissions, }; } diff --git a/src/gestalts/GestaltGraph.ts b/src/gestalts/GestaltGraph.ts index 30a1aa068..efe129876 100644 --- a/src/gestalts/GestaltGraph.ts +++ b/src/gestalts/GestaltGraph.ts @@ -1149,7 +1149,7 @@ class GestaltGraph { return; } default: - never(`type must be either "node" or "identity" got "${type}"`); + never(`type must be either "node" or "identity", got "${type}"`); } } diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index 33163be1c..201230cb4 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -1,6 +1,6 @@ import type { ReadCommitResult } from 'isomorphic-git'; import type { EncryptedFS } from 'encryptedfs'; -import type { ContextCancellable } from '@matrixai/contexts'; +import type { ContextTimed } from '@matrixai/contexts'; import type { DB, DBTransaction, LevelPath } from '@matrixai/db'; import type { RPCClient } from '@matrixai/rpc'; import type { ResourceAcquire, ResourceRelease } from '@matrixai/resources'; @@ -28,7 +28,7 @@ import { ready, } from '@matrixai/async-init/dist/CreateDestroyStartStop'; import { withF, withG } from '@matrixai/resources'; -import { context, cancellable } from '@matrixai/contexts/dist/decorators'; +import { context, timedCancellable } from '@matrixai/contexts/dist/decorators'; import { RWLockWriter } from '@matrixai/async-locks'; import { tagLast } from './types'; import * as vaultsUtils from './utils'; @@ -70,8 +70,9 @@ class VaultInternal { vaultsDbPath, keyRing, efs, - logger = new Logger(this.name), fresh = false, + ctx, + logger = new Logger(this.name), tran, }: { vaultId: VaultId; @@ -80,8 +81,9 @@ class VaultInternal { vaultsDbPath: LevelPath; keyRing: KeyRing; efs: EncryptedFS; - logger?: Logger; fresh?: boolean; + ctx: ContextTimed; + logger?: Logger; tran?: DBTransaction; }): Promise { if (tran == null) { @@ -93,8 +95,9 @@ class VaultInternal { vaultsDbPath, keyRing, efs, - logger, fresh, + ctx, + logger, tran, }), ); @@ -110,7 +113,7 @@ class VaultInternal { efs, logger, }); - await vault.start({ fresh, vaultName, tran }); + await vault.start({ fresh, vaultName, ctx, tran }); logger.info(`Created ${this.name} - ${vaultIdEncoded}`); return vault; } @@ -127,6 +130,7 @@ class VaultInternal { keyRing, nodeManager, efs, + ctx, logger = new Logger(this.name), tran, }: { @@ -138,6 +142,7 @@ class VaultInternal { efs: EncryptedFS; keyRing: KeyRing; nodeManager: NodeManager; + ctx: ContextTimed; logger?: Logger; tran?: DBTransaction; }): Promise { @@ -152,6 +157,7 @@ class VaultInternal { keyRing, nodeManager, efs, + ctx, logger, tran, }), @@ -173,12 +179,12 @@ class VaultInternal { const [vaultName, remoteVaultId]: [VaultName, VaultId] = await nodeManager.withConnF(targetNodeId, async (connection) => { const client = connection.getClient(); - const [request, vaultName, remoteVaultId] = await vault.request( client, targetVaultNameOrId, 'clone', ); + // TODO: ability to cancel git clone await git.clone({ fs: efs, http: { request }, @@ -195,7 +201,7 @@ class VaultInternal { remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), }; - await vault.start({ vaultName, tran }); + await vault.start({ vaultName, ctx, tran }); // Setting the remote in the metadata await tran.put( [...vault.vaultMetadataDbPath, VaultInternal.remoteKey], @@ -256,26 +262,28 @@ class VaultInternal { } /** - * * @param fresh Clears all state before starting * @param vaultName Name of the vault, Only used when creating a new vault + * @param ctx * @param tran */ public async start({ fresh = false, vaultName, + ctx, tran, }: { fresh?: boolean; vaultName?: VaultName; + ctx?: ContextTimed; tran?: DBTransaction; } = {}): Promise { if (tran == null) { return await this.db.withTransactionF((tran) => - this.start_(fresh, tran, vaultName), + this.start_(fresh, tran, ctx, vaultName), ); } - return await this.start_(fresh, tran, vaultName); + return await this.start_(fresh, tran, ctx, vaultName); } /** @@ -285,8 +293,16 @@ class VaultInternal { protected async start_( fresh: boolean, tran: DBTransaction, + ctx?: Partial, vaultName?: VaultName, - ) { + ): Promise; + @timedCancellable(true) + protected async start_( + fresh: boolean, + tran: DBTransaction, + @context ctx: ContextTimed, + vaultName?: VaultName, + ): Promise { this.logger.info( `Starting ${this.constructor.name} - ${this.vaultIdEncoded}`, ); @@ -308,7 +324,7 @@ class VaultInternal { await vaultsUtils.mkdirExists(this.efs, this.vaultDataDir); await vaultsUtils.mkdirExists(this.efs, this.vaultGitDir); await this.setupMeta({ vaultName, tran }); - await this.setupGit(tran); + await this.setupGit(ctx, tran); this.efsVault = await this.efs.chroot(this.vaultDataDir); this.logger.info( `Started ${this.constructor.name} - ${this.vaultIdEncoded}`, @@ -445,14 +461,14 @@ class VaultInternal { */ public async writeF( f: (fs: FileSystemWritable) => Promise, - ctx?: Partial, + ctx?: Partial, tran?: DBTransaction, ): Promise; @ready(new vaultsErrors.ErrorVaultNotRunning()) - @cancellable(true) + @timedCancellable(true) public async writeF( f: (fs: FileSystemWritable) => Promise, - @context ctx: ContextCancellable, + @context ctx: ContextTimed, tran?: DBTransaction, ): Promise { if (tran == null) { @@ -503,7 +519,7 @@ class VaultInternal { @ready(new vaultsErrors.ErrorVaultNotRunning()) public writeG( g: (fs: FileSystemWritable) => AsyncGenerator, - ctx: ContextCancellable, + ctx: ContextTimed, tran?: DBTransaction, ): AsyncGenerator { if (tran == null) { @@ -569,7 +585,7 @@ class VaultInternal { */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public acquireWrite( - ctx: ContextCancellable, + ctx: ContextTimed, tran?: DBTransaction, ): ResourceAcquire { return async () => { @@ -786,7 +802,10 @@ class VaultInternal { * If the vault is in a dirty state then we clean up the working directory * or any history not part of the canonicalBranch. */ - protected async setupGit(tran: DBTransaction): Promise { + protected async setupGit( + ctx: ContextTimed, + tran: DBTransaction, + ): Promise { // Initialization is idempotent // It works even with an existing git repository await git.init({ @@ -843,7 +862,7 @@ class VaultInternal { // This ensures that any uncommitted state is dropped await this.cleanWorkingDirectory(); // Do global GC operation - await this.garbageCollectGitObjectsGlobal(); + await this.garbageCollectGitObjectsGlobal(ctx); // Setting dirty back to false await tran.put( @@ -979,7 +998,7 @@ class VaultInternal { * and the old history is removed from the old canonical head to the branch point. This is to maintain the strict * non-branching linear history. */ - protected async createCommit(ctx: ContextCancellable) { + protected async createCommit(ctx: ContextTimed) { // Forced wait for 1 ms to allow difference in mTime between file changes await utils.sleep(1); // Checking if commit is appending or branching @@ -1143,12 +1162,10 @@ class VaultInternal { * This is costly since it will compare the walked tree with all existing objects. */ protected async garbageCollectGitObjectsGlobal( - ctx?: Partial, + ctx?: Partial, ): Promise; - @cancellable(true) - protected async garbageCollectGitObjectsGlobal( - @context ctx: ContextCancellable, - ) { + @timedCancellable(true) + protected async garbageCollectGitObjectsGlobal(@context ctx: ContextTimed) { const objectIdsAll = await gitUtils.listObjectsAll({ fs: this.efs, gitDir: this.vaultGitDir, @@ -1192,7 +1209,7 @@ class VaultInternal { protected async garbageCollectGitObjectsLocal( startId: string, stopId: string, - ctx: ContextCancellable, + ctx: ContextTimed, ) { const objects = await gitUtils.listObjects( { diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 32e7d2b02..59870a438 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -1,6 +1,6 @@ -import type { DBTransaction, LevelPath } from '@matrixai/db'; import type { LockRequest } from '@matrixai/async-locks'; -import type { ContextCancellable, ContextTimed } from '@matrixai/contexts'; +import type { ContextTimed } from '@matrixai/contexts'; +import type { DBTransaction, LevelPath } from '@matrixai/db'; import type { VaultId, VaultName, @@ -30,6 +30,7 @@ import { import { IdInternal } from '@matrixai/id'; import { withF, withG } from '@matrixai/resources'; import { LockBox, RWLockWriter } from '@matrixai/async-locks'; +import { context, timedCancellable } from '@matrixai/contexts/dist/decorators'; import Logger from '@matrixai/logger'; import VaultInternal from './VaultInternal'; import * as vaultsEvents from './events'; @@ -314,14 +315,21 @@ class VaultManager { * Constructs a new vault instance with a given name and * stores it in memory */ + public async createVault( + vaultName: VaultName, + ctx?: Partial, + tran?: DBTransaction, + ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timedCancellable(true) public async createVault( vaultName: VaultName, + @context ctx: ContextTimed, tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.createVault(vaultName, tran), + this.createVault(vaultName, ctx, tran), ); } // Adding vault to name map @@ -342,7 +350,7 @@ class VaultManager { ); const vaultIdString = vaultId.toString() as VaultIdString; return await this.vaultLocks.withF( - [vaultId.toString(), RWLockWriter, 'write'], + [vaultId.toString(), RWLockWriter, 'write', ctx], async () => { // Creating vault const vault = await VaultInternal.createVaultInternal({ @@ -354,6 +362,7 @@ class VaultManager { db: this.db, vaultsDbPath: this.vaultsDbPath, fresh: true, + ctx, tran, }); // Adding vault to object map @@ -407,19 +416,26 @@ class VaultManager { * Removes the metadata and EFS state of a vault using a * given VaultId */ + public async destroyVault( + vaultId: VaultId, + ctx?: Partial, + tran?: DBTransaction, + ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timedCancellable(true) public async destroyVault( vaultId: VaultId, + @context ctx: ContextTimed, tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.destroyVault(vaultId, tran), + this.destroyVault(vaultId, ctx, tran), ); } await this.vaultLocks.withF( - [vaultId.toString(), RWLockWriter, 'write'], + [vaultId.toString(), RWLockWriter, 'write', ctx], async () => { await tran.lock([...this.vaultsDbPath, vaultId].join('')); // Ensure protection from write skew @@ -479,10 +495,18 @@ class VaultManager { * Lists the vault name and associated VaultId of all * the vaults stored */ + public async listVaults( + ctx?: Partial, + tran?: DBTransaction, + ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async listVaults(tran?: DBTransaction): Promise { + @timedCancellable(true) + public async listVaults( + @context ctx: ContextTimed, + tran?: DBTransaction, + ): Promise { if (tran == null) { - return this.db.withTransactionF((tran) => this.listVaults(tran)); + return this.db.withTransactionF((tran) => this.listVaults(ctx, tran)); } const vaults: VaultList = new Map(); @@ -490,6 +514,7 @@ class VaultManager { for await (const [vaultNameBuffer, vaultIdBuffer] of tran.iterator( this.vaultsNamesDbPath, )) { + if (ctx.signal.aborted) throw ctx.signal.reason; const vaultName = vaultNameBuffer.toString() as VaultName; const vaultId = IdInternal.fromBuffer(vaultIdBuffer); vaults.set(vaultName, vaultId); @@ -500,20 +525,28 @@ class VaultManager { /** * Changes the vault name metadata of a VaultId */ + public async renameVault( + vaultId: VaultId, + newVaultName: VaultName, + ctx?: Partial, + tran?: DBTransaction, + ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timedCancellable(true) public async renameVault( vaultId: VaultId, newVaultName: VaultName, + @context ctx: ContextTimed, tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.renameVault(vaultId, newVaultName, tran), + this.renameVault(vaultId, newVaultName, ctx, tran), ); } await this.vaultLocks.withF( - [vaultId.toString(), RWLockWriter, 'write'], + [vaultId.toString(), RWLockWriter, 'write', ctx], async () => { await tran.lock( [...this.vaultsNamesDbPath, newVaultName] @@ -684,15 +717,23 @@ class VaultManager { * Clones the contents of a remote vault into a new local * vault instance */ + public async cloneVault( + nodeId: NodeId, + vaultNameOrId: VaultId | VaultName, + ctx?: Partial, + tran?: DBTransaction, + ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timedCancellable(true) public async cloneVault( nodeId: NodeId, vaultNameOrId: VaultId | VaultName, + @context ctx: ContextTimed, tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.cloneVault(nodeId, vaultNameOrId, tran), + this.cloneVault(nodeId, vaultNameOrId, ctx, tran), ); } @@ -707,14 +748,15 @@ class VaultManager { const vault = await VaultInternal.cloneVaultInternal({ targetNodeId: nodeId, targetVaultNameOrId: vaultNameOrId, - vaultId, + vaultId: vaultId, db: this.db, nodeManager: this.nodeManager, vaultsDbPath: this.vaultsDbPath, keyRing: this.keyRing, efs: this.efs, + ctx: ctx, logger: this.logger.getChild(VaultInternal.name), - tran, + tran: tran, }); this.vaultMap.set(vaultIdString, vault); const vaultMetadata = (await this.getVaultMeta(vaultId, tran))!; @@ -765,20 +807,38 @@ class VaultManager { * Pulls the contents of a remote vault into an existing vault * instance */ - public async pullVault({ - vaultId, - pullNodeId, - pullVaultNameOrId, - tran, - }: { - vaultId: VaultId; - pullNodeId?: NodeId; - pullVaultNameOrId?: VaultId | VaultName; - tran?: DBTransaction; - }): Promise { + public async pullVault( + { + vaultId, + pullNodeId, + pullVaultNameOrId, + tran, + }: { + vaultId: VaultId; + pullNodeId?: NodeId; + pullVaultNameOrId?: VaultId | VaultName; + tran?: DBTransaction; + }, + ctx?: Partial, + ): Promise; + @timedCancellable(true) + public async pullVault( + { + vaultId, + pullNodeId, + pullVaultNameOrId, + tran, + }: { + vaultId: VaultId; + pullNodeId?: NodeId; + pullVaultNameOrId?: VaultId | VaultName; + tran?: DBTransaction; + }, + @context ctx: ContextTimed, + ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.pullVault({ vaultId, pullNodeId, pullVaultNameOrId, tran }), + this.pullVault({ vaultId, pullNodeId, pullVaultNameOrId, tran }, ctx), ); } @@ -787,7 +847,7 @@ class VaultManager { [vaultId.toString(), RWLockWriter, 'write'], async () => { await tran.lock([...this.vaultsDbPath, vaultId].join('')); - const vault = await this.getVault(vaultId, tran); + const vault = await this.getVault(vaultId, tran, ctx); await vault.pullVault({ nodeManager: this.nodeManager, pullNodeId, @@ -844,7 +904,7 @@ class VaultManager { public async *handlePackRequest( vaultId: VaultId, body: Array, - ctx: ContextCancellable, + ctx: ContextTimed, tran?: DBTransaction, ): AsyncGenerator { if (tran == null) { @@ -880,7 +940,10 @@ class VaultManager { /** * Retrieves all the vaults for a peers node */ - public async *scanVaults(targetNodeId: NodeId): AsyncGenerator<{ + public async *scanVaults( + targetNodeId: NodeId, + ctx: ContextTimed, + ): AsyncGenerator<{ vaultName: VaultName; vaultIdEncoded: VaultIdEncoded; vaultPermissions: VaultAction[]; @@ -894,12 +957,14 @@ class VaultManager { vaultPermissions: VaultAction[]; }> { const client = connection.getClient(); - const genReadable = await client.methods.vaultsScan({}); + const genReadable = await client.methods.vaultsScan({}, ctx); for await (const vault of genReadable) { - const vaultName = vault.vaultName; - const vaultIdEncoded = vault.vaultIdEncoded; - const vaultPermissions = vault.vaultPermissions; - yield { vaultName, vaultIdEncoded, vaultPermissions }; + ctx.signal.throwIfAborted(); + yield { + vaultName: vault.vaultName, + vaultIdEncoded: vault.vaultIdEncoded, + vaultPermissions: vault.vaultPermissions, + }; } }, ); @@ -910,7 +975,7 @@ class VaultManager { */ public async *handleScanVaults( nodeId: NodeId, - ctx: ContextCancellable, + ctx: ContextTimed, tran?: DBTransaction, ): AsyncGenerator<{ vaultId: VaultId; @@ -977,10 +1042,17 @@ class VaultManager { return vaultId; } + protected async getVault( + vaultId: VaultId, + tran: DBTransaction, + ctx?: Partial, + ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timedCancellable(true) protected async getVault( vaultId: VaultId, tran: DBTransaction, + @context ctx: ContextTimed, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => this.getVault(vaultId, tran)); @@ -997,13 +1069,14 @@ class VaultManager { } // 2. if the state exists then create, add to map and return that const newVault = await VaultInternal.createVaultInternal({ - vaultId, + vaultId: vaultId, keyRing: this.keyRing, efs: this.efs, logger: this.logger.getChild(VaultInternal.name), db: this.db, vaultsDbPath: this.vaultsDbPath, - tran, + ctx: ctx, + tran: tran, }); this.vaultMap.set(vaultIdString, newVault); return newVault; @@ -1013,24 +1086,33 @@ class VaultManager { * Takes a function and runs it with the listed vaults. locking is handled automatically * @param vaultIds List of vault ID for vaults you wish to use * @param f Function you wish to run with the provided vaults + * @param ctx * @param tran */ + public async withVaults( + vaultIds: VaultId[], + f: (...args: Vault[]) => Promise, + ctx?: Partial, + tran?: DBTransaction, + ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timedCancellable(true) public async withVaults( vaultIds: VaultId[], f: (...args: Vault[]) => Promise, + @context ctx: ContextTimed, tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.withVaults(vaultIds, f, tran), + this.withVaults(vaultIds, f, ctx, tran), ); } // Obtaining locks const vaultLocks: Array> = vaultIds.map( (vaultId) => { - return [vaultId.toString(), RWLockWriter, 'read']; + return [vaultId.toString(), RWLockWriter, 'read', ctx]; }, ); // Running the function with locking diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index 5b8809417..92d4801ae 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -1,3 +1,4 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { NodeId } from '@/ids/types'; import type { VaultAction, @@ -353,7 +354,7 @@ describe('VaultManager', () => { // Scanning vaults const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; + const ctx = { signal: abortController.signal } as ContextTimed; const gen = vaultManager.handleScanVaults(nodeId1, ctx); const vaults: Record = {}; for await (const vault of gen) { @@ -366,7 +367,7 @@ describe('VaultManager', () => { // Should throw due to no permission await expect(async () => { const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; + const ctx = { signal: abortController.signal } as ContextTimed; for await (const _ of vaultManager.handleScanVaults(nodeId2, ctx)) { // Should throw } @@ -375,7 +376,7 @@ describe('VaultManager', () => { await gestaltGraph.setGestaltAction(['node', nodeId2], 'notify'); await expect(async () => { const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; + const ctx = { signal: abortController.signal } as ContextTimed; for await (const _ of vaultManager.handleScanVaults(nodeId2, ctx)) { // Should throw } @@ -1498,7 +1499,9 @@ describe('VaultManager', () => { // Should throw due to no permission const testFun = async () => { - for await (const _ of vaultManager.scanVaults(targetNodeId)) { + const abortController = new AbortController(); + const ctx = { signal: abortController.signal } as ContextTimed; + for await (const _ of vaultManager.scanVaults(targetNodeId, ctx)) { // Should throw } }; @@ -1526,7 +1529,9 @@ describe('VaultManager', () => { await remoteKeynode1.acl.setVaultAction(vault2, nodeId1, 'clone'); // No permissions for vault3 - const gen = vaultManager.scanVaults(targetNodeId); + const abortController = new AbortController(); + const ctx = { signal: abortController.signal } as ContextTimed; + const gen = vaultManager.scanVaults(targetNodeId, ctx); const vaults: Record = {}; for await (const vault of gen) { vaults[vault.vaultIdEncoded] = [ From 677a72f7bf7aa81b768690904ec794c19d46d6d5 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Thu, 12 Dec 2024 18:24:05 +1100 Subject: [PATCH 05/14] chore: added ctx to most of `secrets` commands --- src/client/handlers/VaultsPull.ts | 16 +++++++++------- src/client/handlers/VaultsSecretsCat.ts | 10 ++++++++-- src/client/handlers/VaultsSecretsEnv.ts | 17 ++++++++++------- src/client/handlers/VaultsSecretsMkdir.ts | 12 ++++++++++-- src/client/handlers/VaultsSecretsNew.ts | 10 +++++++++- src/client/handlers/VaultsSecretsNewDir.ts | 6 ++++++ src/client/handlers/VaultsSecretsRemove.ts | 4 ++-- src/client/handlers/VaultsSecretsRename.ts | 10 +++++++++- src/client/handlers/VaultsSecretsStat.ts | 10 +++++++++- src/client/handlers/VaultsSecretsWriteFile.ts | 6 ++++++ src/client/handlers/VaultsVersion.ts | 6 ++++++ src/vaults/VaultManager.ts | 6 +++--- 12 files changed, 87 insertions(+), 26 deletions(-) diff --git a/src/client/handlers/VaultsPull.ts b/src/client/handlers/VaultsPull.ts index bf2eb9b39..c7282e4b6 100644 --- a/src/client/handlers/VaultsPull.ts +++ b/src/client/handlers/VaultsPull.ts @@ -47,13 +47,15 @@ class VaultsPull extends UnaryHandler< const nodeId = input.nodeIdEncoded ? ids.parseNodeId(input.nodeIdEncoded) : undefined; - await vaultManager.pullVault({ - vaultId: vaultId, - pullNodeId: nodeId, - pullVaultNameOrId: pullVaultId, - ctx: ctx, - tran: tran, - }); + await vaultManager.pullVault( + { + vaultId: vaultId, + pullNodeId: nodeId, + pullVaultNameOrId: pullVaultId, + tran: tran, + }, + ctx, + ); }); return { type: 'success', success: true }; }; diff --git a/src/client/handlers/VaultsSecretsCat.ts b/src/client/handlers/VaultsSecretsCat.ts index e75be0c30..f0b5c3233 100644 --- a/src/client/handlers/VaultsSecretsCat.ts +++ b/src/client/handlers/VaultsSecretsCat.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -26,6 +28,9 @@ class VaultsSecretsCat extends DuplexHandler< input: AsyncIterableIterator< ClientRPCRequestParams >, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -34,8 +39,8 @@ class VaultsSecretsCat extends DuplexHandler< > { // As we need to preserve the order of parameters, we need to loop over // them individually, as grouping them would make them go out of order. - for await (const secretIdentiferMessage of input) { - const { nameOrId, secretName } = secretIdentiferMessage; + for await (const secretIdentifierMessage of input) { + const { nameOrId, secretName } = secretIdentifierMessage; const vaultIdFromName = await vaultManager.getVaultId(nameOrId, tran); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(nameOrId); if (vaultId == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); @@ -63,6 +68,7 @@ class VaultsSecretsCat extends DuplexHandler< throw e; } }, + ctx, tran, ); } diff --git a/src/client/handlers/VaultsSecretsEnv.ts b/src/client/handlers/VaultsSecretsEnv.ts index 58cb1b79d..625eb25af 100644 --- a/src/client/handlers/VaultsSecretsEnv.ts +++ b/src/client/handlers/VaultsSecretsEnv.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -22,24 +24,24 @@ class VaultsSecretsEnv extends DuplexHandler< input: AsyncIterableIterator< ClientRPCRequestParams >, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; - return yield* db.withTransactionG(async function* (tran): AsyncGenerator< ClientRPCResponseResult > { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); for await (const secretIdentifierMessage of input) { const { nameOrId, secretName } = secretIdentifierMessage; const vaultIdFromName = await vaultManager.getVaultId(nameOrId, tran); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${nameOrId}" does not exist`, + ); } const secrets = await vaultManager.withVaults( [vaultId], @@ -72,6 +74,7 @@ class VaultsSecretsEnv extends DuplexHandler< return results; }); }, + ctx, tran, ); for (const { filePath, value } of secrets) { diff --git a/src/client/handlers/VaultsSecretsMkdir.ts b/src/client/handlers/VaultsSecretsMkdir.ts index da8b2c00c..9f65821ce 100644 --- a/src/client/handlers/VaultsSecretsMkdir.ts +++ b/src/client/handlers/VaultsSecretsMkdir.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -22,6 +24,9 @@ class VaultsSecretsMkdir extends DuplexHandler< > { public handle = async function* ( input: AsyncIterableIterator>, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -38,10 +43,12 @@ class VaultsSecretsMkdir extends DuplexHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${nameOrId}" does not exist`, + ); } // Write directories. This doesn't need to be grouped by vault names, - // as no commit is created for empty directories anyways. The + // as no commit is created for empty directories anyway. The // vaultOps.mkdir() method also returns an object of type // SuccessOrErrorMessage. As such, we can return the result without // doing any type conversion or extra processing. @@ -68,6 +75,7 @@ class VaultsSecretsMkdir extends DuplexHandler< } } }, + ctx, tran, ); } diff --git a/src/client/handlers/VaultsSecretsNew.ts b/src/client/handlers/VaultsSecretsNew.ts index 33b21fd12..72290e714 100644 --- a/src/client/handlers/VaultsSecretsNew.ts +++ b/src/client/handlers/VaultsSecretsNew.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -21,6 +23,9 @@ class VaultsSecretsNew extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -32,7 +37,9 @@ class VaultsSecretsNew extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } const content = Buffer.from(input.secretContent, 'binary'); await vaultManager.withVaults( @@ -40,6 +47,7 @@ class VaultsSecretsNew extends UnaryHandler< async (vault) => { await vaultOps.addSecret(vault, input.secretName, content); }, + ctx, tran, ); }); diff --git a/src/client/handlers/VaultsSecretsNewDir.ts b/src/client/handlers/VaultsSecretsNewDir.ts index 22d436914..274efe1b0 100644 --- a/src/client/handlers/VaultsSecretsNewDir.ts +++ b/src/client/handlers/VaultsSecretsNewDir.ts @@ -1,5 +1,7 @@ import type { FileSystem } from 'types'; +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -23,6 +25,9 @@ class VaultsSecretsNewDir extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, @@ -44,6 +49,7 @@ class VaultsSecretsNewDir extends UnaryHandler< async (vault) => { await vaultOps.addSecretDirectory(vault, input.dirName, fs); }, + ctx, tran, ); }); diff --git a/src/client/handlers/VaultsSecretsRemove.ts b/src/client/handlers/VaultsSecretsRemove.ts index 8722bcd91..b085ad0da 100644 --- a/src/client/handlers/VaultsSecretsRemove.ts +++ b/src/client/handlers/VaultsSecretsRemove.ts @@ -1,4 +1,4 @@ -import type { ContextCancellable } from '@matrixai/contexts'; +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; import type { ResourceAcquire } from '@matrixai/resources'; import type { JSONValue } from '@matrixai/rpc'; @@ -35,7 +35,7 @@ class VaultsSecretsRemove extends DuplexHandler< >, _cancel: (reason?: any) => void, _meta: Record, - ctx: ContextCancellable, + ctx: ContextTimed, ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; diff --git a/src/client/handlers/VaultsSecretsRename.ts b/src/client/handlers/VaultsSecretsRename.ts index 433b409ee..407a76d7f 100644 --- a/src/client/handlers/VaultsSecretsRename.ts +++ b/src/client/handlers/VaultsSecretsRename.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -21,6 +23,9 @@ class VaultsSecretsRename extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -32,7 +37,9 @@ class VaultsSecretsRename extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } await vaultManager.withVaults( [vaultId], @@ -43,6 +50,7 @@ class VaultsSecretsRename extends UnaryHandler< input.newSecretName, ); }, + ctx, tran, ); }); diff --git a/src/client/handlers/VaultsSecretsStat.ts b/src/client/handlers/VaultsSecretsStat.ts index 456ff611e..8bbb8d6b0 100644 --- a/src/client/handlers/VaultsSecretsStat.ts +++ b/src/client/handlers/VaultsSecretsStat.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -21,6 +23,9 @@ class VaultsSecretsStat extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -32,7 +37,9 @@ class VaultsSecretsStat extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } const secretName = input.secretName; const stat = await vaultManager.withVaults( @@ -40,6 +47,7 @@ class VaultsSecretsStat extends UnaryHandler< async (vault) => { return await vaultOps.statSecret(vault, secretName); }, + ctx, tran, ); return { diff --git a/src/client/handlers/VaultsSecretsWriteFile.ts b/src/client/handlers/VaultsSecretsWriteFile.ts index 7a1cc7d5d..62dc7764c 100644 --- a/src/client/handlers/VaultsSecretsWriteFile.ts +++ b/src/client/handlers/VaultsSecretsWriteFile.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -21,6 +23,9 @@ class VaultsSecretsWriteFile extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -40,6 +45,7 @@ class VaultsSecretsWriteFile extends UnaryHandler< async (vault) => { await vaultOps.writeSecret(vault, input.secretName, secretContent); }, + ctx, tran, ); }); diff --git a/src/client/handlers/VaultsVersion.ts b/src/client/handlers/VaultsVersion.ts index f32daaa9f..48c8aa83b 100644 --- a/src/client/handlers/VaultsVersion.ts +++ b/src/client/handlers/VaultsVersion.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -20,6 +22,9 @@ class VaultsVersion extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -42,6 +47,7 @@ class VaultsVersion extends UnaryHandler< const currentVersionId = (await vault.log(versionId, 0))[0]?.commitId; return [latestOid, currentVersionId]; }, + ctx, tran, ); // Checking if latest version ID diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 59870a438..fc120bf30 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -850,9 +850,9 @@ class VaultManager { const vault = await this.getVault(vaultId, tran, ctx); await vault.pullVault({ nodeManager: this.nodeManager, - pullNodeId, - pullVaultNameOrId, - tran, + pullNodeId: pullNodeId, + pullVaultNameOrId: pullVaultNameOrId, + tran: tran, }); }, ); From 7f00cf76b8f0d3dde09c0f230d1944feb1820641 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Fri, 13 Dec 2024 17:16:14 +1100 Subject: [PATCH 06/14] chore: cleaned up `VaultInternal.ts` --- src/client/handlers/VaultsSecretsCat.ts | 5 +- src/git/utils.ts | 25 +- src/vaults/VaultInternal.ts | 373 +++++++++++++----------- src/vaults/VaultManager.ts | 86 +++--- src/vaults/VaultOps.ts | 1 + 5 files changed, 268 insertions(+), 222 deletions(-) diff --git a/src/client/handlers/VaultsSecretsCat.ts b/src/client/handlers/VaultsSecretsCat.ts index f0b5c3233..9db4d664c 100644 --- a/src/client/handlers/VaultsSecretsCat.ts +++ b/src/client/handlers/VaultsSecretsCat.ts @@ -43,7 +43,10 @@ class VaultsSecretsCat extends DuplexHandler< const { nameOrId, secretName } = secretIdentifierMessage; const vaultIdFromName = await vaultManager.getVaultId(nameOrId, tran); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(nameOrId); - if (vaultId == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + if (vaultId == null) + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${nameOrId}" does not exist`, + ); yield await vaultManager.withVaults( [vaultId], async (vault) => { diff --git a/src/git/utils.ts b/src/git/utils.ts index 38eb9096e..169976257 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -1,4 +1,4 @@ -import type { ContextCancellable } from '@matrixai/contexts'; +import type { ContextTimed } from '@matrixai/contexts'; import type { Capability, CapabilityList, @@ -78,7 +78,7 @@ async function* listReferencesGenerator( dir: string; gitDir: string; }, - ctx: ContextCancellable, + ctx: ContextTimed, ): AsyncGenerator<[Reference, ObjectId], void, void> { const refs: Array<[string, Promise]> = await git .listBranches({ @@ -163,7 +163,7 @@ async function listObjects( wants: ObjectIdList; haves: ObjectIdList; }, - ctx: ContextCancellable, + ctx: ContextTimed, ): Promise { const commits = new Set(); const trees = new Set(); @@ -249,17 +249,21 @@ const excludedDirs = ['pack', 'info']; /** * Walks the filesystem to list out all git objects in the objects directory */ -async function listObjectsAll({ - fs, - gitDir, -}: { - fs: EncryptedFS; - gitDir: string; -}): Promise> { +async function listObjectsAll( + { + fs, + gitDir, + }: { + fs: EncryptedFS; + gitDir: string; + }, + ctx: ContextTimed, +): Promise> { const objectsDirPath = path.join(gitDir, objectsDirName); const objectSet: Set = new Set(); const objectDirs = await fs.promises.readdir(objectsDirPath); for (const objectDir of objectDirs) { + ctx.signal.throwIfAborted(); if (typeof objectDir !== 'string') { utils.never('objectDir should be a string'); } @@ -268,6 +272,7 @@ async function listObjectsAll({ path.join(objectsDirPath, objectDir), ); for (const objectId of objectIds) { + ctx.signal.throwIfAborted(); objectSet.add(objectDir + objectId); } } diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index 201230cb4..d3f84b010 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -1,6 +1,6 @@ -import type { ReadCommitResult } from 'isomorphic-git'; import type { EncryptedFS } from 'encryptedfs'; -import type { ContextTimed } from '@matrixai/contexts'; +import type { ReadCommitResult } from 'isomorphic-git'; +import type { ContextTimed, ContextTimedInput } from '@matrixai/contexts'; import type { DB, DBTransaction, LevelPath } from '@matrixai/db'; import type { RPCClient } from '@matrixai/rpc'; import type { ResourceAcquire, ResourceRelease } from '@matrixai/resources'; @@ -15,11 +15,11 @@ import type { VaultName, VaultRef, } from './types'; -import type KeyRing from '../keys/KeyRing'; +import type { POJO } from '../types'; import type { NodeId, NodeIdEncoded } from '../ids/types'; +import type KeyRing from '../keys/KeyRing'; import type NodeManager from '../nodes/NodeManager'; import type agentClientManifest from '../nodes/agent/callers'; -import type { POJO } from '../types'; import path from 'path'; import git from 'isomorphic-git'; import Logger from '@matrixai/logger'; @@ -27,17 +27,17 @@ import { CreateDestroyStartStop, ready, } from '@matrixai/async-init/dist/CreateDestroyStartStop'; -import { withF, withG } from '@matrixai/resources'; -import { context, timedCancellable } from '@matrixai/contexts/dist/decorators'; import { RWLockWriter } from '@matrixai/async-locks'; +import { context, timedCancellable } from '@matrixai/contexts/dist/decorators'; +import { withF, withG } from '@matrixai/resources'; import { tagLast } from './types'; -import * as vaultsUtils from './utils'; import * as vaultsErrors from './errors'; import * as vaultsEvents from './events'; +import * as vaultsUtils from './utils'; import * as ids from '../ids'; +import * as utils from '../utils'; import * as nodesUtils from '../nodes/utils'; import * as gitUtils from '../git/utils'; -import * as utils from '../utils'; type RemoteInfo = { remoteNode: NodeIdEncoded; @@ -45,6 +45,7 @@ type RemoteInfo = { }; interface VaultInternal extends CreateDestroyStartStop {} + @CreateDestroyStartStop( new vaultsErrors.ErrorVaultRunning(), new vaultsErrors.ErrorVaultDestroyed(), @@ -63,43 +64,45 @@ class VaultInternal { * If no state already exists then state for the vault is initialized. * If state already exists then this just creates the `VaultInternal` instance for managing that state. */ - public static async createVaultInternal({ - vaultId, - vaultName, - db, - vaultsDbPath, - keyRing, - efs, - fresh = false, - ctx, - logger = new Logger(this.name), - tran, - }: { - vaultId: VaultId; - vaultName?: VaultName; - db: DB; - vaultsDbPath: LevelPath; - keyRing: KeyRing; - efs: EncryptedFS; - fresh?: boolean; - ctx: ContextTimed; - logger?: Logger; - tran?: DBTransaction; - }): Promise { + public static async createVaultInternal( + { + vaultId, + vaultName, + db, + vaultsDbPath, + keyRing, + efs, + fresh = false, + logger = new Logger(this.name), + }: { + vaultId: VaultId; + vaultName?: VaultName; + db: DB; + vaultsDbPath: LevelPath; + keyRing: KeyRing; + efs: EncryptedFS; + fresh?: boolean; + logger?: Logger; + }, + ctx: ContextTimed, + tran?: DBTransaction, + ): Promise { if (tran == null) { return await db.withTransactionF((tran) => - this.createVaultInternal({ - vaultId, - vaultName, - db, - vaultsDbPath, - keyRing, - efs, - fresh, + this.createVaultInternal( + { + vaultId, + vaultName, + db, + vaultsDbPath, + keyRing, + efs, + fresh, + logger, + }, ctx, - logger, tran, - }), + ), ); } @@ -113,7 +116,7 @@ class VaultInternal { efs, logger, }); - await vault.start({ fresh, vaultName, ctx, tran }); + await vault.start({ fresh, vaultName }, ctx, tran); logger.info(`Created ${this.name} - ${vaultIdEncoded}`); return vault; } @@ -121,46 +124,48 @@ class VaultInternal { /** * Will create a new vault by cloning the vault from a remote node. */ - public static async cloneVaultInternal({ - targetNodeId, - targetVaultNameOrId, - vaultId, - db, - vaultsDbPath, - keyRing, - nodeManager, - efs, - ctx, - logger = new Logger(this.name), - tran, - }: { - targetNodeId: NodeId; - targetVaultNameOrId: VaultId | VaultName; - vaultId: VaultId; - db: DB; - vaultsDbPath: LevelPath; - efs: EncryptedFS; - keyRing: KeyRing; - nodeManager: NodeManager; - ctx: ContextTimed; - logger?: Logger; - tran?: DBTransaction; - }): Promise { + public static async cloneVaultInternal( + { + targetNodeId, + targetVaultNameOrId, + vaultId, + db, + vaultsDbPath, + efs, + keyRing, + nodeManager, + logger = new Logger(this.name), + }: { + targetNodeId: NodeId; + targetVaultNameOrId: VaultId | VaultName; + vaultId: VaultId; + db: DB; + vaultsDbPath: LevelPath; + efs: EncryptedFS; + keyRing: KeyRing; + nodeManager: NodeManager; + logger?: Logger; + }, + ctx: ContextTimed, + tran?: DBTransaction, + ): Promise { if (tran == null) { return await db.withTransactionF((tran) => - this.cloneVaultInternal({ - targetNodeId, - targetVaultNameOrId, - vaultId, - db, - vaultsDbPath, - keyRing, - nodeManager, - efs, + this.cloneVaultInternal( + { + targetNodeId, + targetVaultNameOrId, + vaultId, + db, + vaultsDbPath, + efs, + keyRing, + nodeManager, + logger, + }, ctx, - logger, tran, - }), + ), ); } const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); @@ -184,7 +189,6 @@ class VaultInternal { targetVaultNameOrId, 'clone', ); - // TODO: ability to cancel git clone await git.clone({ fs: efs, http: { request }, @@ -201,7 +205,7 @@ class VaultInternal { remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), }; - await vault.start({ vaultName, ctx, tran }); + await vault.start({ vaultName }, ctx, tran); // Setting the remote in the metadata await tran.put( [...vault.vaultMetadataDbPath, VaultInternal.remoteKey], @@ -267,23 +271,35 @@ class VaultInternal { * @param ctx * @param tran */ - public async start({ - fresh = false, - vaultName, - ctx, - tran, - }: { - fresh?: boolean; - vaultName?: VaultName; - ctx?: ContextTimed; - tran?: DBTransaction; - } = {}): Promise { + public async start( + { + fresh = false, + vaultName, + }: { + fresh?: boolean; + vaultName?: VaultName; + }, + ctx?: Partial, + tran?: DBTransaction, + ): Promise; + @timedCancellable(true) + public async start( + { + fresh = false, + vaultName, + }: { + fresh?: boolean; + vaultName?: VaultName; + } = {}, + @context ctx: ContextTimed, + tran?: DBTransaction, + ): Promise { if (tran == null) { return await this.db.withTransactionF((tran) => - this.start_(fresh, tran, ctx, vaultName), + this.start_({ fresh, vaultName }, tran, ctx), ); } - return await this.start_(fresh, tran, ctx, vaultName); + return await this.start_({ fresh, vaultName }, tran, ctx); } /** @@ -291,17 +307,15 @@ class VaultInternal { * create a DBTransaction. */ protected async start_( - fresh: boolean, - tran: DBTransaction, - ctx?: Partial, - vaultName?: VaultName, - ): Promise; - @timedCancellable(true) - protected async start_( - fresh: boolean, + { + fresh, + vaultName, + }: { + fresh: boolean; + vaultName?: VaultName; + }, tran: DBTransaction, - @context ctx: ContextTimed, - vaultName?: VaultName, + ctx: ContextTimed, ): Promise { this.logger.info( `Starting ${this.constructor.name} - ${this.vaultIdEncoded}`, @@ -323,7 +337,7 @@ class VaultInternal { await vaultsUtils.mkdirExists(this.efs, this.vaultIdEncoded); await vaultsUtils.mkdirExists(this.efs, this.vaultDataDir); await vaultsUtils.mkdirExists(this.efs, this.vaultGitDir); - await this.setupMeta({ vaultName, tran }); + await this.setupMeta({ vaultName }, tran); await this.setupGit(ctx, tran); this.efsVault = await this.efs.chroot(this.vaultDataDir); this.logger.info( @@ -351,7 +365,7 @@ class VaultInternal { * We use a protected destroy method to avoid the `async-init` lifecycle deadlocking when doing the recursive call to * create a DBTransaction. */ - protected async destroy_(tran: DBTransaction) { + protected async destroy_(tran: DBTransaction): Promise { this.logger.info( `Destroying ${this.constructor.name} - ${this.vaultIdEncoded}`, ); @@ -369,10 +383,27 @@ class VaultInternal { ); } + public async log( + { + ref = 'HEAD', + limit, + }: { + ref: string | VaultRef; + limit?: number; + }, + ctx?: Partial, + ): Promise>; @ready(new vaultsErrors.ErrorVaultNotRunning()) + @timedCancellable(true) public async log( - ref: string | VaultRef = 'HEAD', - limit?: number, + { + ref = 'HEAD', + limit, + }: { + ref: string | VaultRef; + limit?: number; + }, + @context ctx: ContextTimed, ): Promise> { vaultsUtils.assertRef(ref); if (ref === vaultsUtils.tagLast) { @@ -382,10 +413,11 @@ class VaultInternal { fs: this.efs, dir: this.vaultDataDir, gitdir: this.vaultGitDir, - ref, + ref: ref, depth: limit, }); return commits.map(({ oid, commit }: ReadCommitResult) => { + ctx.signal.throwIfAborted(); return { commitId: oid as CommitId, parent: commit.parent as Array, @@ -403,8 +435,8 @@ class VaultInternal { } /** - * Checks out the vault repository to specific commit ID or special tags - * This changes the working directory and updates the HEAD reference + * Checks out the vault repository to specific commit ID or special tags. + * This changes the working directory and updates the HEAD reference. */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public async version(ref: string | VaultRef = tagLast): Promise { @@ -417,7 +449,7 @@ class VaultInternal { fs: this.efs, dir: this.vaultDataDir, gitdir: this.vaultGitDir, - ref, + ref: ref, force: true, }); } catch (e) { @@ -461,7 +493,7 @@ class VaultInternal { */ public async writeF( f: (fs: FileSystemWritable) => Promise, - ctx?: Partial, + ctx?: Partial, tran?: DBTransaction, ): Promise; @ready(new vaultsErrors.ErrorVaultNotRunning()) @@ -503,7 +535,7 @@ class VaultInternal { await this.createCommit(ctx); } catch (e) { // Error implies dirty state - await this.cleanWorkingDirectory(); + await this.cleanWorkingDirectory(ctx); throw e; } await tran.put( @@ -528,8 +560,9 @@ class VaultInternal { const efsVault = this.efsVault; const vaultMetadataDbPath = this.vaultMetadataDbPath; - const createCommit = () => this.createCommit(ctx); - const cleanWorkingDirectory = () => this.cleanWorkingDirectory(); + // In AsyncGenerators, "this" refers to the generator itself, so we alias + // "this" and use the alias to access protected methods. + const parentThis = this; return withG([this.lock.write()], async function* () { if ( (await tran.get([...vaultMetadataDbPath, VaultInternal.remoteKey])) != @@ -543,19 +576,16 @@ class VaultInternal { ); await tran.put([...vaultMetadataDbPath, VaultInternal.dirtyKey], true); + // Create the commit let result: TReturn; - // Do what you need to do here, create the commit try { result = yield* g(efsVault); - // At the end of the generator - // you need to do this - // but just before - // you need to finish it up - // After doing mutation we need to commit the new history - await createCommit(); + // After doing mutation we need to commit the new history. You need to + // do this at the end of the generator. + await parentThis.createCommit(ctx); } catch (e) { // Error implies dirty state - await cleanWorkingDirectory(); + await parentThis.cleanWorkingDirectory(ctx); throw e; } await tran.put([...vaultMetadataDbPath, VaultInternal.dirtyKey], false); @@ -624,7 +654,7 @@ class VaultInternal { } catch (e_) { e = e_; // Error implies dirty state - await this.cleanWorkingDirectory(); + await this.cleanWorkingDirectory(ctx); } } // For some reason, the transaction type doesn't properly waterfall @@ -646,25 +676,28 @@ class VaultInternal { * If `pullNodeId` and `pullVaultNameOrId` it uses that for the remote instead. */ @ready(new vaultsErrors.ErrorVaultNotRunning()) - public async pullVault({ - nodeManager, - pullNodeId, - pullVaultNameOrId, - tran, - }: { - nodeManager: NodeManager; - pullNodeId?: NodeId; - pullVaultNameOrId?: VaultId | VaultName; - tran?: DBTransaction; - }): Promise { + public async pullVault( + { + nodeManager, + pullNodeId, + pullVaultNameOrId, + }: { + nodeManager: NodeManager; + pullNodeId?: NodeId; + pullVaultNameOrId?: VaultId | VaultName; + }, + tran?: DBTransaction, + ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.pullVault({ - nodeManager, - pullNodeId, - pullVaultNameOrId, + this.pullVault( + { + nodeManager, + pullNodeId, + pullVaultNameOrId, + }, tran, - }), + ), ); } @@ -720,9 +753,7 @@ class VaultInternal { singleBranch: true, fastForward: true, fastForwardOnly: true, - author: { - name: nodesUtils.encodeNodeId(pullNodeId!), - }, + author: { name: nodesUtils.encodeNodeId(pullNodeId!) }, }); }); return remoteVaultId; @@ -759,13 +790,10 @@ class VaultInternal { * Creates a `dirty` boolean in the database to track dirty state of the vault. * Also adds the vault's name to the database. */ - protected async setupMeta({ - vaultName, - tran, - }: { - vaultName?: VaultName; - tran: DBTransaction; - }): Promise { + protected async setupMeta( + { vaultName }: { vaultName?: VaultName }, + tran: DBTransaction, + ): Promise { // Set up dirty key defaulting to false if ( (await tran.get([ @@ -860,7 +888,7 @@ class VaultInternal { ) { // Force checkout out to the latest commit // This ensures that any uncommitted state is dropped - await this.cleanWorkingDirectory(); + await this.cleanWorkingDirectory(ctx); // Do global GC operation await this.garbageCollectGitObjectsGlobal(ctx); @@ -943,7 +971,7 @@ class VaultInternal { const vaultsGitPackGetStream = await client.methods.vaultsGitPackGet({ nameOrId: result.vaultIdEncoded as string, - vaultAction, + vaultAction: vaultAction, }); return [ @@ -998,7 +1026,7 @@ class VaultInternal { * and the old history is removed from the old canonical head to the branch point. This is to maintain the strict * non-branching linear history. */ - protected async createCommit(ctx: ContextTimed) { + protected async createCommit(ctx: ContextTimed): Promise { // Forced wait for 1 ms to allow difference in mTime between file changes await utils.sleep(1); // Checking if commit is appending or branching @@ -1037,7 +1065,7 @@ class VaultInternal { stageStatus, ] of statusMatrix) { /* - Type StatusRow = [Filename, HeadStatus, WorkdirStatus, StageStatus] + Type StatusRow = [Filename, HeadStatus, WorkdirStatus, StageStatus]. The HeadStatus status is either absent (0) or present (1). The WorkdirStatus status is either absent (0), identical to HEAD (1), or different from HEAD (2). The StageStatus status is either absent (0), identical to HEAD (1), identical to WORKDIR (2), or different from WORKDIR (3). @@ -1057,6 +1085,7 @@ class VaultInternal { ] ``` */ + ctx.signal.throwIfAborted(); const status = `${HEADStatus}${workingDirStatus}${stageStatus}`; switch (status) { case '022': // Added, staged @@ -1067,15 +1096,15 @@ class VaultInternal { case '122': // Modified, staged message.push(`${filePath} modified`); break; - case '101': // Deleted, unStaged - // need to stage the deletion with remove + case '101': // Deleted, unstaged + // Need to stage the deletion with remove await git.remove({ fs: this.efs, dir: this.vaultDataDir, gitdir: this.vaultGitDir, filepath: filePath, }); - // Fall through + // Fallthrough case '100': // Deleted, staged message.push(`${filePath} deleted`); break; @@ -1093,9 +1122,7 @@ class VaultInternal { fs: this.efs, dir: this.vaultDataDir, gitdir: this.vaultGitDir, - author: { - name: nodeIdEncoded, - }, + author: { name: nodeIdEncoded }, message: message.toString(), ref: 'HEAD', }); @@ -1120,7 +1147,7 @@ class VaultInternal { * This will remove any un-committed changes since any untracked or modified files outside a commit is dirty state. * Dirty state should only happen if the usual commit procedure was interrupted ungracefully. */ - protected async cleanWorkingDirectory() { + protected async cleanWorkingDirectory(ctx: ContextTimed): Promise { // Check the status matrix for any un-staged file changes // which are considered dirty commits const statusMatrix = await git.statusMatrix({ @@ -1129,8 +1156,9 @@ class VaultInternal { gitdir: this.vaultGitDir, }); for await (const [filePath, , workingDirStatus] of statusMatrix) { - // For all files stage all changes, this is needed - // so that we can check out all untracked files as well + ctx.signal.throwIfAborted(); + // Stage all changes across all files. This is needed so that we can + // checkout all untracked files as well. if (workingDirStatus === 0) { await git.remove({ fs: this.efs, @@ -1162,14 +1190,15 @@ class VaultInternal { * This is costly since it will compare the walked tree with all existing objects. */ protected async garbageCollectGitObjectsGlobal( - ctx?: Partial, - ): Promise; - @timedCancellable(true) - protected async garbageCollectGitObjectsGlobal(@context ctx: ContextTimed) { - const objectIdsAll = await gitUtils.listObjectsAll({ - fs: this.efs, - gitDir: this.vaultGitDir, - }); + ctx: ContextTimed, + ): Promise { + const objectIdsAll = await gitUtils.listObjectsAll( + { + fs: this.efs, + gitDir: this.vaultGitDir, + }, + ctx, + ); const objects = new Set(objectIdsAll); const masterRef = await git.resolveRef({ fs: this.efs, @@ -1210,7 +1239,7 @@ class VaultInternal { startId: string, stopId: string, ctx: ContextTimed, - ) { + ): Promise { const objects = await gitUtils.listObjects( { efs: this.efs, diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index fc120bf30..b7c62c4f3 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -353,18 +353,20 @@ class VaultManager { [vaultId.toString(), RWLockWriter, 'write', ctx], async () => { // Creating vault - const vault = await VaultInternal.createVaultInternal({ - vaultId, - vaultName, - keyRing: this.keyRing, - efs: this.efs, - logger: this.logger.getChild(VaultInternal.name), - db: this.db, - vaultsDbPath: this.vaultsDbPath, - fresh: true, + const vault = await VaultInternal.createVaultInternal( + { + vaultId: vaultId, + vaultName: vaultName, + keyRing: this.keyRing, + efs: this.efs, + db: this.db, + vaultsDbPath: this.vaultsDbPath, + fresh: true, + logger: this.logger.getChild(VaultInternal.name), + }, ctx, tran, - }); + ); // Adding vault to object map this.vaultMap.set(vaultIdString, vault); return vault.vaultId; @@ -745,19 +747,21 @@ class VaultManager { return await this.vaultLocks.withF( [vaultId.toString(), RWLockWriter, 'write'], async () => { - const vault = await VaultInternal.cloneVaultInternal({ - targetNodeId: nodeId, - targetVaultNameOrId: vaultNameOrId, - vaultId: vaultId, - db: this.db, - nodeManager: this.nodeManager, - vaultsDbPath: this.vaultsDbPath, - keyRing: this.keyRing, - efs: this.efs, - ctx: ctx, - logger: this.logger.getChild(VaultInternal.name), - tran: tran, - }); + const vault = await VaultInternal.cloneVaultInternal( + { + targetNodeId: nodeId, + targetVaultNameOrId: vaultNameOrId, + vaultId: vaultId, + db: this.db, + nodeManager: this.nodeManager, + vaultsDbPath: this.vaultsDbPath, + keyRing: this.keyRing, + efs: this.efs, + logger: this.logger.getChild(VaultInternal.name), + }, + ctx, + tran, + ); this.vaultMap.set(vaultIdString, vault); const vaultMetadata = (await this.getVaultMeta(vaultId, tran))!; const baseVaultName = vaultMetadata.vaultName; @@ -848,12 +852,14 @@ class VaultManager { async () => { await tran.lock([...this.vaultsDbPath, vaultId].join('')); const vault = await this.getVault(vaultId, tran, ctx); - await vault.pullVault({ - nodeManager: this.nodeManager, - pullNodeId: pullNodeId, - pullVaultNameOrId: pullVaultNameOrId, - tran: tran, - }); + await vault.pullVault( + { + nodeManager: this.nodeManager, + pullNodeId: pullNodeId, + pullVaultNameOrId: pullVaultNameOrId, + }, + tran, + ); }, ); } @@ -1068,16 +1074,18 @@ class VaultManager { ); } // 2. if the state exists then create, add to map and return that - const newVault = await VaultInternal.createVaultInternal({ - vaultId: vaultId, - keyRing: this.keyRing, - efs: this.efs, - logger: this.logger.getChild(VaultInternal.name), - db: this.db, - vaultsDbPath: this.vaultsDbPath, - ctx: ctx, - tran: tran, - }); + const newVault = await VaultInternal.createVaultInternal( + { + vaultId: vaultId, + keyRing: this.keyRing, + efs: this.efs, + db: this.db, + vaultsDbPath: this.vaultsDbPath, + logger: this.logger.getChild(VaultInternal.name), + }, + ctx, + tran, + ); this.vaultMap.set(vaultIdString, newVault); return newVault; } diff --git a/src/vaults/VaultOps.ts b/src/vaults/VaultOps.ts index 6d2ea6079..03211814a 100644 --- a/src/vaults/VaultOps.ts +++ b/src/vaults/VaultOps.ts @@ -83,6 +83,7 @@ async function renameSecret( /** * Returns the contents of a secret */ +// TODO: use contexts async function getSecret(vault: Vault, secretName: string): Promise { try { return await vault.readF(async (efs) => { From c066a3f63958825290ae1216ec42cb249d5152e3 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Thu, 19 Dec 2024 16:26:40 +1100 Subject: [PATCH 07/14] chore: added ctx to `VaultInternal` and `VaultManager` [ci skip] --- src/client/handlers/VaultsSecretsCat.ts | 6 +- src/client/handlers/VaultsSecretsRemove.ts | 8 +- src/git/utils.ts | 12 +- src/nodes/NodeManager.ts | 2 +- src/vaults/VaultInternal.ts | 439 +++++++++++++-------- src/vaults/VaultManager.ts | 228 ++++++----- src/vaults/VaultOps.ts | 70 ++-- src/vaults/utils.ts | 36 +- 8 files changed, 467 insertions(+), 334 deletions(-) diff --git a/src/client/handlers/VaultsSecretsCat.ts b/src/client/handlers/VaultsSecretsCat.ts index 9db4d664c..4544585f1 100644 --- a/src/client/handlers/VaultsSecretsCat.ts +++ b/src/client/handlers/VaultsSecretsCat.ts @@ -37,8 +37,8 @@ class VaultsSecretsCat extends DuplexHandler< yield* db.withTransactionG(async function* (tran): AsyncGenerator< ClientRPCResponseResult > { - // As we need to preserve the order of parameters, we need to loop over - // them individually, as grouping them would make them go out of order. + // To preserve the order of parameters, we need to loop over them + // individually, as grouping them would make them go out of order. for await (const secretIdentifierMessage of input) { const { nameOrId, secretName } = secretIdentifierMessage; const vaultIdFromName = await vaultManager.getVaultId(nameOrId, tran); @@ -71,8 +71,8 @@ class VaultsSecretsCat extends DuplexHandler< throw e; } }, - ctx, tran, + ctx, ); } }); diff --git a/src/client/handlers/VaultsSecretsRemove.ts b/src/client/handlers/VaultsSecretsRemove.ts index b085ad0da..099f6c734 100644 --- a/src/client/handlers/VaultsSecretsRemove.ts +++ b/src/client/handlers/VaultsSecretsRemove.ts @@ -62,9 +62,11 @@ class VaultsSecretsRemove extends DuplexHandler< `Vault ${vaultName} does not exist`, ); } + // The resource acquisition will automatically create a transaction and + // release it when cleaning up. const acquire = await vaultManager.withVaults( [vaultId], - async (vault) => vault.acquireWrite(ctx), + async (vault) => vault.acquireWrite(undefined, ctx), ); vaultAcquires.push(acquire); } @@ -113,8 +115,8 @@ class VaultsSecretsRemove extends DuplexHandler< e.code === 'ENOTEMPTY' || e.code === 'EINVAL' ) { - // EINVAL can be triggered if removing the root of the - // vault is attempted. + // EINVAL can be triggered if removing the root of the vault is + // attempted. yield { type: 'error', code: e.code, diff --git a/src/git/utils.ts b/src/git/utils.ts index 169976257..c4dde6ae2 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -83,7 +83,7 @@ async function* listReferencesGenerator( const refs: Array<[string, Promise]> = await git .listBranches({ fs: efs, - dir, + dir: dir, gitdir: gitDir, }) .then((refs) => { @@ -98,7 +98,7 @@ async function* listReferencesGenerator( // HEAD always comes first const resolvedHead = await git.resolveRef({ fs: efs, - dir, + dir: dir, gitdir: gitDir, ref: HEAD_REFERENCE, }); @@ -128,7 +128,7 @@ async function referenceCapability({ try { const resolvedHead = await git.resolveRef({ fs: efs, - dir, + dir: dir, gitdir: gitDir, ref: reference, depth: 2, @@ -181,7 +181,7 @@ async function listObjects( commits.add(objectId); const readCommitResult = await git.readCommit({ fs: efs, - dir, + dir: dir, gitdir: gitDir, oid: objectId, }); @@ -198,7 +198,7 @@ async function listObjects( trees.add(objectId); const readTreeResult = await git.readTree({ fs: efs, - dir, + dir: dir, gitdir: gitDir, oid: objectId, }); @@ -219,7 +219,7 @@ async function listObjects( tags.add(objectId); const readTagResult = await git.readTag({ fs: efs, - dir, + dir: dir, gitdir: gitDir, oid: objectId, }); diff --git a/src/nodes/NodeManager.ts b/src/nodes/NodeManager.ts index e76ede91d..cb01f510e 100644 --- a/src/nodes/NodeManager.ts +++ b/src/nodes/NodeManager.ts @@ -494,7 +494,7 @@ class NodeManager { * Perform some function on another node over the network with a connection. * Will either retrieve an existing connection, or create a new one if it * doesn't exist. - * for use with normal arrow function + * For use with normal arrow function * @param nodeId Id of target node to communicate with * @param f Function to handle communication * @param ctx diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index d3f84b010..ddc1c4296 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -28,6 +28,7 @@ import { ready, } from '@matrixai/async-init/dist/CreateDestroyStartStop'; import { RWLockWriter } from '@matrixai/async-locks'; +import { timedCancellable as timedCancellableF } from '@matrixai/contexts/dist/functions'; import { context, timedCancellable } from '@matrixai/contexts/dist/decorators'; import { withF, withG } from '@matrixai/resources'; import { tagLast } from './types'; @@ -60,9 +61,10 @@ interface VaultInternal extends CreateDestroyStartStop {} ) class VaultInternal { /** - * Creates a VaultInternal. - * If no state already exists then state for the vault is initialized. - * If state already exists then this just creates the `VaultInternal` instance for managing that state. + * Creates a VaultInternal. + * If no state already exists then a new state for the vault is initialized. + * If state already exists then this just creates the `VaultInternal` + * instance for managing that state. */ public static async createVaultInternal( { @@ -84,8 +86,32 @@ class VaultInternal { fresh?: boolean; logger?: Logger; }, - ctx: ContextTimed, tran?: DBTransaction, + ctx?: Partial, + ): Promise; + @timedCancellable(true) + public static async createVaultInternal( + { + vaultId, + vaultName, + db, + vaultsDbPath, + keyRing, + efs, + fresh = false, + logger = new Logger(this.name), + }: { + vaultId: VaultId; + vaultName?: VaultName; + db: DB; + vaultsDbPath: LevelPath; + keyRing: KeyRing; + efs: EncryptedFS; + fresh?: boolean; + logger?: Logger; + }, + tran: DBTransaction, + @context ctx: ContextTimed, ): Promise { if (tran == null) { return await db.withTransactionF((tran) => @@ -100,8 +126,8 @@ class VaultInternal { fresh, logger, }, - ctx, tran, + ctx, ), ); } @@ -116,7 +142,7 @@ class VaultInternal { efs, logger, }); - await vault.start({ fresh, vaultName }, ctx, tran); + await vault.start({ fresh, vaultName }, tran, ctx); logger.info(`Created ${this.name} - ${vaultIdEncoded}`); return vault; } @@ -146,8 +172,34 @@ class VaultInternal { nodeManager: NodeManager; logger?: Logger; }, - ctx: ContextTimed, tran?: DBTransaction, + ctx?: Partial, + ): Promise; + @timedCancellable(true) + public static async cloneVaultInternal( + { + targetNodeId, + targetVaultNameOrId, + vaultId, + db, + vaultsDbPath, + efs, + keyRing, + nodeManager, + logger = new Logger(this.name), + }: { + targetNodeId: NodeId; + targetVaultNameOrId: VaultId | VaultName; + vaultId: VaultId; + db: DB; + vaultsDbPath: LevelPath; + efs: EncryptedFS; + keyRing: KeyRing; + nodeManager: NodeManager; + logger?: Logger; + }, + tran: DBTransaction, + @context ctx: ContextTimed, ): Promise { if (tran == null) { return await db.withTransactionF((tran) => @@ -163,11 +215,12 @@ class VaultInternal { nodeManager, logger, }, - ctx, tran, + ctx, ), ); } + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); logger.info(`Cloning ${this.name} - ${vaultIdEncoded}`); const vault = new this({ @@ -178,34 +231,38 @@ class VaultInternal { efs, logger, }); - // Make the directory where the .git files will be auto generated and - // where the contents will be cloned to ('contents' file) + // Make the directory where the .git files will be auto generated and where + // the contents will be cloned to ('contents' file) await efs.mkdir(vault.vaultDataDir, { recursive: true }); const [vaultName, remoteVaultId]: [VaultName, VaultId] = - await nodeManager.withConnF(targetNodeId, async (connection) => { - const client = connection.getClient(); - const [request, vaultName, remoteVaultId] = await vault.request( - client, - targetVaultNameOrId, - 'clone', - ); - await git.clone({ - fs: efs, - http: { request }, - dir: vault.vaultDataDir, - gitdir: vault.vaultGitDir, - url: 'http://', - singleBranch: true, - ref: vaultsUtils.canonicalBranchRef, - }); - return [vaultName, remoteVaultId]; - }); + await nodeManager.withConnF( + targetNodeId, + async (connection) => { + const client = connection.getClient(); + const [request, vaultName, remoteVaultId] = await vault.request( + client, + targetVaultNameOrId, + 'clone', + ); + await git.clone({ + fs: efs, + http: { request }, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + url: 'http://', + singleBranch: true, + ref: vaultsUtils.canonicalBranchRef, + }); + return [vaultName, remoteVaultId]; + }, + ctx, + ); const remote: RemoteInfo = { remoteNode: nodesUtils.encodeNodeId(targetNodeId), remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), }; - await vault.start({ vaultName }, ctx, tran); + await vault.start({ vaultName }, tran, ctx); // Setting the remote in the metadata await tran.put( [...vault.vaultMetadataDbPath, VaultInternal.remoteKey], @@ -266,56 +323,56 @@ class VaultInternal { } /** - * @param fresh Clears all state before starting - * @param vaultName Name of the vault, Only used when creating a new vault - * @param ctx + * @param fresh Should the state be cleared before starting? + * @param vaultName Name of the vault. Only used when creating a new vault. * @param tran + * @param ctx */ public async start( { - fresh = false, vaultName, - }: { - fresh?: boolean; - vaultName?: VaultName; - }, - ctx?: Partial, - tran?: DBTransaction, - ): Promise; - @timedCancellable(true) - public async start( - { fresh = false, - vaultName, }: { - fresh?: boolean; vaultName?: VaultName; + fresh?: boolean; } = {}, - @context ctx: ContextTimed, tran?: DBTransaction, + ctx?: ContextTimed, ): Promise { if (tran == null) { return await this.db.withTransactionF((tran) => - this.start_({ fresh, vaultName }, tran, ctx), + this.start_({ vaultName, fresh }, tran, ctx), ); } - return await this.start_({ fresh, vaultName }, tran, ctx); + return await this.start_({ vaultName, fresh }, tran, ctx); } /** - * We use a protected start method to avoid the `async-init` lifecycle deadlocking when doing the recursive call to - * create a DBTransaction. + * We use a protected start method to avoid the `async-init` lifecycle + * deadlocking when doing the recursive call to create a DBTransaction. */ protected async start_( { - fresh, vaultName, + fresh, }: { + vaultName?: VaultName; fresh: boolean; + }, + tran?: DBTransaction, + ctx?: Partial, + ): Promise; + @timedCancellable(true) + protected async start_( + { + vaultName, + fresh, + }: { vaultName?: VaultName; + fresh: boolean; }, tran: DBTransaction, - ctx: ContextTimed, + @context ctx: ContextTimed, ): Promise { this.logger.info( `Starting ${this.constructor.name} - ${this.vaultIdEncoded}`, @@ -338,7 +395,7 @@ class VaultInternal { await vaultsUtils.mkdirExists(this.efs, this.vaultDataDir); await vaultsUtils.mkdirExists(this.efs, this.vaultGitDir); await this.setupMeta({ vaultName }, tran); - await this.setupGit(ctx, tran); + await this.setupGit(tran, ctx); this.efsVault = await this.efs.chroot(this.vaultDataDir); this.logger.info( `Started ${this.constructor.name} - ${this.vaultIdEncoded}`, @@ -362,8 +419,8 @@ class VaultInternal { } /** - * We use a protected destroy method to avoid the `async-init` lifecycle deadlocking when doing the recursive call to - * create a DBTransaction. + * We use a protected start method to avoid the `async-init` lifecycle + * deadlocking when doing the recursive call to create a DBTransaction. */ protected async destroy_(tran: DBTransaction): Promise { this.logger.info( @@ -376,7 +433,6 @@ class VaultInternal { }); } catch (e) { if (e.code !== 'ENOENT') throw e; - // Otherwise ignore } this.logger.info( `Destroyed ${this.constructor.name} - ${this.vaultIdEncoded}`, @@ -476,7 +532,8 @@ class VaultInternal { } /** - * With context handler for using a vault in a read-only context for a generator. + * With context handler for using a vault in a read-only context for a + * generator. */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public readG( @@ -493,18 +550,18 @@ class VaultInternal { */ public async writeF( f: (fs: FileSystemWritable) => Promise, - ctx?: Partial, tran?: DBTransaction, + ctx?: Partial, ): Promise; @ready(new vaultsErrors.ErrorVaultNotRunning()) @timedCancellable(true) public async writeF( f: (fs: FileSystemWritable) => Promise, + tran: DBTransaction, @context ctx: ContextTimed, - tran?: DBTransaction, ): Promise { if (tran == null) { - return this.db.withTransactionF((tran) => this.writeF(f, ctx, tran)); + return this.db.withTransactionF((tran) => this.writeF(f, tran, ctx)); } return withF([this.lock.write()], async () => { @@ -512,10 +569,9 @@ class VaultInternal { [...this.vaultMetadataDbPath, VaultInternal.dirtyKey].join(''), ); - // This should really be an internal property - // get whether this is remote, and the remote address - // if it is, we consider this repo an "attached repo" - // this vault is a "mirrored" vault + // This should really be an internal property. Check whether this is the + // remote address. If it is, we consider this repo an "attached repo". + // This vault is a "mirrored" vault. if ( (await tran.get([ ...this.vaultMetadataDbPath, @@ -548,14 +604,29 @@ class VaultInternal { /** * With context handler for using a vault in a writable context for a generator. */ + public writeG( + g: (fs: FileSystemWritable) => AsyncGenerator, + tran?: DBTransaction, + ctx?: ContextTimed, + ): AsyncGenerator; @ready(new vaultsErrors.ErrorVaultNotRunning()) public writeG( g: (fs: FileSystemWritable) => AsyncGenerator, + tran: DBTransaction, ctx: ContextTimed, - tran?: DBTransaction, ): AsyncGenerator { if (tran == null) { - return this.db.withTransactionG((tran) => this.writeG(g, ctx, tran)); + return this.db.withTransactionG((tran) => this.writeG(g, tran, ctx)); + } + + // TODO: check if this works. it probably doesnt cuz of the generator + if (ctx == null) { + const parentThis = this; + const f = async function (ctx: ContextTimed) { + parentThis.writeG(g, tran, ctx); + }; + // Call the method with a created context + timedCancellableF(f, true)(); } const efsVault = this.efsVault; @@ -594,7 +665,7 @@ class VaultInternal { } /** - * Acquire a read-only lock on this vault + * Acquire a read-only lock on this vault. */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public acquireRead(): ResourceAcquire { @@ -611,69 +682,87 @@ class VaultInternal { } /** - * Acquire a read-write lock on this vault + * Acquire a read-write lock on this vault. */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public acquireWrite( - ctx: ContextTimed, tran?: DBTransaction, + ctx?: ContextTimed, ): ResourceAcquire { return async () => { let releaseTran: ResourceRelease | undefined = undefined; const acquire = this.lock.write(); const [release] = await acquire(); + + const acquireF = async ( + ctx: ContextTimed, + ): Promise<[(e?: Error) => Promise, EncryptedFS]> => { + if (tran == null) { + utils.never( + 'This method should not be called without a valid transaction', + ); + } + await tran.lock( + [...this.vaultMetadataDbPath, VaultInternal.dirtyKey].join(''), + ); + if ( + (await tran.get([ + ...this.vaultMetadataDbPath, + VaultInternal.remoteKey, + ])) != null + ) { + // Mirrored vaults are immutable + throw new vaultsErrors.ErrorVaultRemoteDefined(); + } + await tran.put( + [...this.vaultMetadataDbPath, VaultInternal.dirtyKey], + true, + ); + return [ + async (e?: Error) => { + if (e == null) { + try { + // After doing mutation we need to commit the new history + await this.createCommit(ctx); + } catch (e_) { + e = e_; + // Error implies dirty state + await this.cleanWorkingDirectory(ctx); + } + } + // For some reason, the transaction type doesn't properly waterfall + // down to here. + await tran!.put( + [...this.vaultMetadataDbPath, VaultInternal.dirtyKey], + false, + ); + if (releaseTran != null) await releaseTran(e); + await release(e); + }, + this.efsVault, + ]; + }; + if (tran == null) { const acquireTran = this.db.transaction(); [releaseTran, tran] = await acquireTran(); } - // The returned transaction can be undefined, too. We won't handle those - // cases. + // The returned transaction can be undefined. We won't handle those cases. if (tran == null) utils.never('Acquired transactions cannot be null'); - await tran.lock( - [...this.vaultMetadataDbPath, VaultInternal.dirtyKey].join(''), - ); - if ( - (await tran.get([ - ...this.vaultMetadataDbPath, - VaultInternal.remoteKey, - ])) != null - ) { - // Mirrored vaults are immutable - throw new vaultsErrors.ErrorVaultRemoteDefined(); + + // TODO: check if this works + if (ctx == null) { + const fTimedCancellable = timedCancellableF(acquireF, true); + return fTimedCancellable(); + } else { + return acquireF(ctx); } - await tran.put( - [...this.vaultMetadataDbPath, VaultInternal.dirtyKey], - true, - ); - return [ - async (e?: Error) => { - if (e == null) { - try { - // After doing mutation we need to commit the new history - await this.createCommit(ctx); - } catch (e_) { - e = e_; - // Error implies dirty state - await this.cleanWorkingDirectory(ctx); - } - } - // For some reason, the transaction type doesn't properly waterfall - // down to here. - await tran!.put( - [...this.vaultMetadataDbPath, VaultInternal.dirtyKey], - false, - ); - if (releaseTran != null) await releaseTran(e); - await release(e); - }, - this.efsVault, - ]; }; } /** - * Pulls changes to a vault from the vault's default remote. - * If `pullNodeId` and `pullVaultNameOrId` it uses that for the remote instead. + * Pulls changes to a vault from the vault's default remote. If `pullNodeId` + * and `pullVaultNameOrId` it uses that for the remote instead. */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public async pullVault( @@ -687,6 +776,7 @@ class VaultInternal { pullVaultNameOrId?: VaultId | VaultName; }, tran?: DBTransaction, + ctx?: ContextTimed, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => @@ -697,10 +787,26 @@ class VaultInternal { pullVaultNameOrId, }, tran, + ctx, ), ); } + if (ctx == null) { + const f = async (ctx: ContextTimed) => { + return this.pullVault( + { + nodeManager, + pullNodeId, + pullVaultNameOrId, + }, + tran, + ctx, + ); + }; + return timedCancellableF(f, true)(); + } + // Keeps track of whether the metadata needs changing to avoid unnecessary db ops // 0 = no change, 1 = change with vault ID, 2 = change with vault name let metaChange = 0; @@ -758,10 +864,11 @@ class VaultInternal { }); return remoteVaultId; }, + ctx, ); } catch (e) { - // If the error flag set, and we have the generalised SmartHttpError from - // isomorphic git then we need to throw the polykey error + // If the error flag is set, and we have the generalised SmartHttpError from + // isomorphic git, then we need to throw the Polykey error. if (e instanceof git.Errors.MergeNotSupportedError) { throw new vaultsErrors.ErrorVaultsMergeConflict(e.message, { cause: e, @@ -822,20 +929,20 @@ class VaultInternal { } // Dirty: boolean - // name: string | undefined + // Name: string | undefined } /** * Does an idempotent initialization of the git repository for the vault. * If the vault is in a dirty state then we clean up the working directory - * or any history not part of the canonicalBranch. + * or any history not part of the canonical branch. */ protected async setupGit( - ctx: ContextTimed, tran: DBTransaction, + ctx: ContextTimed, ): Promise { - // Initialization is idempotent - // It works even with an existing git repository + // Initialization is idempotent. It works even with an existing git + // repository. await git.init({ fs: this.efs, dir: this.vaultDataDir, @@ -853,8 +960,8 @@ class VaultInternal { }); commitIdLatest = commits[0]?.oid as CommitId | undefined; } catch (e) { - // Initialized repositories do not have any commits - // It complains that `refs/heads/master` file does not exist + // Initialized repositories do not have any commits. It complains that + // `refs/heads/master` file does not exist. if (!(e instanceof git.Errors.NotFoundError)) { throw e; } @@ -879,17 +986,17 @@ class VaultInternal { force: true, }); } else { - // Checking for dirty + // Checking for dirty state if ( (await tran.get([ ...this.vaultMetadataDbPath, VaultInternal.dirtyKey, ])) === true ) { - // Force checkout out to the latest commit - // This ensures that any uncommitted state is dropped + // Force checkout out to the latest commit. This ensures that any + // uncommitted state is dropped. A global garbage collection is + // executed immediately after. await this.cleanWorkingDirectory(ctx); - // Do global GC operation await this.garbageCollectGitObjectsGlobal(ctx); // Setting dirty back to false @@ -903,19 +1010,20 @@ class VaultInternal { } /** - * Creates a request arrow function that implements an api that `isomorphic-git` expects to use when making a http - * request. It makes RPC calls to `vaultsGitInfoGet` for the ref advertisement phase and `vaultsGitPackGet` for the - * git pack phase. + * Creates a request arrow function that implements an API that `isomorphic-git` + * expects to use when making a HTTP request. It makes RPC calls to + * `vaultsGitInfoGet` for the ref advertisement phase and `vaultsGitPackGet` + * for the git pack phase. * - * `vaultsGitInfoGet` wraps a call to `gitHttp.advertiseRefGenerator` and `vaultsGitPackGet` to - * `gitHttp.generatePackRequest`. + * `vaultsGitInfoGet` wraps a call to `gitHttp.advertiseRefGenerator` and + * `vaultsGitPackGet` to `gitHttp.generatePackRequest`. * * ``` * ┌─────────┐ ┌───────────────────────────┐ * │ │ │ │ * ┌──────────────────────┐ │ RPC │ │ │ * │ │ │ │ │ *advertiseRefGenerator() │ - * │ ├────────┼─────────┼────► │ + * │ ├────────┼─────────┼────▶ │ * │ vault.request() │ │ │ │ │ * │ │ │ │ └────┬──────────────────────┘ * │ ├──┐ │ │ │ @@ -1022,8 +1130,9 @@ class VaultInternal { /** * Creates a commit while moving the canonicalBranch reference to that new commit. - * If the commit creates a branch from the canonical history. Then the new commit becomes the new canonical history - * and the old history is removed from the old canonical head to the branch point. This is to maintain the strict + * If the commit creates a branch from the canonical history. Then the new commit + * becomes the new canonical history and the old history is removed from the old + * canonical head to the branch point. This is to maintain the strict * non-branching linear history. */ protected async createCommit(ctx: ContextTimed): Promise { @@ -1064,26 +1173,28 @@ class VaultInternal { workingDirStatus, stageStatus, ] of statusMatrix) { - /* - Type StatusRow = [Filename, HeadStatus, WorkdirStatus, StageStatus]. - The HeadStatus status is either absent (0) or present (1). - The WorkdirStatus status is either absent (0), identical to HEAD (1), or different from HEAD (2). - The StageStatus status is either absent (0), identical to HEAD (1), identical to WORKDIR (2), or different from WORKDIR (3). - - ```js - // example StatusMatrix - [ - ["a.txt", 0, 2, 0], // new, untracked - ["b.txt", 0, 2, 2], // added, staged - ["c.txt", 0, 2, 3], // added, staged, with unstaged changes - ["d.txt", 1, 1, 1], // unmodified - ["e.txt", 1, 2, 1], // modified, unstaged - ["f.txt", 1, 2, 2], // modified, staged - ["g.txt", 1, 2, 3], // modified, staged, with unstaged changes - ["h.txt", 1, 0, 1], // deleted, unstaged - ["i.txt", 1, 0, 0], // deleted, staged - ] - ``` + /** + * Type StatusRow = [Filename, HeadStatus, WorkdirStatus, StageStatus]. + * The HeadStatus status is either absent (0) or present (1). + * The WorkdirStatus status is either absent (0), identical to HEAD (1), + * or different from HEAD (2). + * The StageStatus status is either absent (0), identical to HEAD (1), + * identical to WORKDIR (2), or different from WORKDIR (3). + * + * ```js + * // Example StatusMatrix + * [ + * ["a.txt", 0, 2, 0], // new, untracked + * ["b.txt", 0, 2, 2], // added, staged + * ["c.txt", 0, 2, 3], // added, staged, unstaged changes + * ["d.txt", 1, 1, 1], // unmodified + * ["e.txt", 1, 2, 1], // modified, unstaged + * ["f.txt", 1, 2, 2], // modified, staged + * ["g.txt", 1, 2, 3], // modified, unstaged, unstaged changes + * ["h.txt", 1, 0, 1], // deleted, unstaged + * ["i.txt", 1, 0, 0], // deleted, staged + * ] + * ``` */ ctx.signal.throwIfAborted(); const status = `${HEADStatus}${workingDirStatus}${stageStatus}`; @@ -1109,7 +1220,8 @@ class VaultInternal { message.push(`${filePath} deleted`); break; default: - // We don't handle untracked and partially staged files since we add all files to staging before processing + // We don't handle untracked and partially staged files since we add + // all files to staging before processing. utils.never( `Status ${status} is unhandled because it was unexpected state`, ); @@ -1143,9 +1255,10 @@ class VaultInternal { } /** - * Cleans the git working directory by checking out the canonicalBranch. - * This will remove any un-committed changes since any untracked or modified files outside a commit is dirty state. - * Dirty state should only happen if the usual commit procedure was interrupted ungracefully. + * Cleans the git working directory by checking out the canonical branch. + * This will remove any un-committed changes since any untracked or modified + * files outside a commit is dirty state. Dirty state should only happen if + * the usual commit procedure was interrupted ungracefully. */ protected async cleanWorkingDirectory(ctx: ContextTimed): Promise { // Check the status matrix for any un-staged file changes @@ -1186,8 +1299,9 @@ class VaultInternal { } /** - * This will walk the current canonicalBranch history and delete any objects that are not a part of it. - * This is costly since it will compare the walked tree with all existing objects. + * This will walk the current canonical branch history and delete any objects + * that are not a part of it. This is costly since it will compare the walked + * tree with all existing objects. */ protected async garbageCollectGitObjectsGlobal( ctx: ContextTimed, @@ -1220,8 +1334,8 @@ class VaultInternal { for (const objectReachable of reachableObjects) { objects.delete(objectReachable); } - // Any objects left in `objects` was unreachable, thus they are a part of orphaned branches - // So we want to delete them. + // Any objects left in `objects` was unreachable, thus they are a part of + // orphaned branches, so we want to delete them. const deletePs: Array> = []; for (const objectId of objects) { deletePs.push( @@ -1232,8 +1346,9 @@ class VaultInternal { } /** - * This will walk from the `startId` to the `StopId` deleting objects as it goes. - * This is smarter since it only walks over the old history and not everything. + * This will walk from the `startId` to the `StopId` deleting objects as it + * goes. This is smarter since it only walks over the old history and not + * everything. */ protected async garbageCollectGitObjectsLocal( startId: string, diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index b7c62c4f3..86b35f343 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -1,5 +1,5 @@ import type { LockRequest } from '@matrixai/async-locks'; -import type { ContextTimed } from '@matrixai/contexts'; +import type { ContextTimed, ContextTimedInput } from '@matrixai/contexts'; import type { DBTransaction, LevelPath } from '@matrixai/db'; import type { VaultId, @@ -44,7 +44,7 @@ import * as nodesUtils from '../nodes/utils'; import * as keysUtils from '../keys/utils'; /** - * Object map pattern for each vault + * Object map pattern for each vault. */ type VaultMap = Map; @@ -92,7 +92,7 @@ class VaultManager { fs?: FileSystem; logger?: Logger; fresh?: boolean; - }) { + }): Promise { logger.info(`Creating ${this.name}`); logger.info(`Setting vaults path to ${vaultsPath}`); const vaultManager = new this({ @@ -186,7 +186,6 @@ class VaultManager { let efs: EncryptedFS; try { efsDb = await DB.createDB({ - fresh, crypto: { key: vaultKey, ops: { @@ -206,11 +205,12 @@ class VaultManager { }, dbPath: this.efsPath, logger: this.logger.getChild('EFS Database'), + fresh: fresh, }); efs = await EncryptedFS.createEncryptedFS({ - fresh, db: efsDb, logger: this.logger.getChild('EncryptedFileSystem'), + fresh: fresh, }); } catch (e) { if (e instanceof encryptedFsErrors.ErrorEncryptedFSKey) { @@ -233,7 +233,7 @@ class VaultManager { this.efs = efs; this.logger.info(`Started ${this.constructor.name}`); } catch (e) { - this.logger.warn(`Failed Starting ${this.constructor.name}`); + this.logger.warn(`Failed starting ${this.constructor.name}`); await this.efs?.stop(); await this.efsDb?.stop(); throw e; @@ -243,9 +243,8 @@ class VaultManager { public async stop(): Promise { this.logger.info(`Stopping ${this.constructor.name}`); - - // Iterate over vaults in memory and destroy them, ensuring that - // the working directory commit state is saved + // Iterate over vaults in memory and destroy them, ensuring that the working + // directory commit state is saved. const promises: Array> = []; for (const vaultIdString of this.vaultMap.keys()) { const vaultId = IdInternal.fromString(vaultIdString); @@ -271,7 +270,6 @@ class VaultManager { public async destroy(): Promise { this.logger.info(`Destroying ${this.constructor.name}`); await this.efsDb.start({ - fresh: false, crypto: { key: this.vaultKey, ops: { @@ -289,6 +287,7 @@ class VaultManager { }, }, }, + fresh: false, }); await this.efs.destroy(); await this.efsDb.stop(); @@ -312,24 +311,23 @@ class VaultManager { } /** - * Constructs a new vault instance with a given name and - * stores it in memory + * Constructs a new Vault instance with a given name and stores it in memory. */ public async createVault( vaultName: VaultName, - ctx?: Partial, tran?: DBTransaction, + ctx?: Partial, ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) @timedCancellable(true) public async createVault( vaultName: VaultName, + tran: DBTransaction, @context ctx: ContextTimed, - tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.createVault(vaultName, ctx, tran), + this.createVault(vaultName, tran, ctx), ); } // Adding vault to name map @@ -350,7 +348,7 @@ class VaultManager { ); const vaultIdString = vaultId.toString() as VaultIdString; return await this.vaultLocks.withF( - [vaultId.toString(), RWLockWriter, 'write', ctx], + [vaultId.toString(), RWLockWriter, 'write'], async () => { // Creating vault const vault = await VaultInternal.createVaultInternal( @@ -364,8 +362,8 @@ class VaultManager { fresh: true, logger: this.logger.getChild(VaultInternal.name), }, - ctx, tran, + ctx, ); // Adding vault to object map this.vaultMap.set(vaultIdString, vault); @@ -375,8 +373,8 @@ class VaultManager { } /** - * Retrieves the vault metadata using the VaultId - * and parses it to return the associated vault name + * Retrieves the vault metadata using the VaultId and parses it to return the + * associated vault name. */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async getVaultMeta( @@ -415,29 +413,28 @@ class VaultManager { } /** - * Removes the metadata and EFS state of a vault using a - * given VaultId + * Removes the metadata and EFS state of a vault using a given VaultId. */ public async destroyVault( vaultId: VaultId, - ctx?: Partial, tran?: DBTransaction, + ctx?: Partial, ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) @timedCancellable(true) public async destroyVault( vaultId: VaultId, + tran: DBTransaction, @context ctx: ContextTimed, - tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.destroyVault(vaultId, ctx, tran), + this.destroyVault(vaultId, tran, ctx), ); } await this.vaultLocks.withF( - [vaultId.toString(), RWLockWriter, 'write', ctx], + [vaultId.toString(), RWLockWriter, 'write'], async () => { await tran.lock([...this.vaultsDbPath, vaultId].join('')); // Ensure protection from write skew @@ -453,7 +450,7 @@ class VaultManager { `Destroying Vault ${vaultsUtils.encodeVaultId(vaultId)}`, ); const vaultIdString = vaultId.toString() as VaultIdString; - const vault = await this.getVault(vaultId, tran); + const vault = await this.getVault(vaultId, tran, ctx); // Destroying vault state and metadata await vault.stop(); await vault.destroy(tran); @@ -467,15 +464,22 @@ class VaultManager { } /** - * Removes vault from the vault map + * Removes a vault from the vault map. */ - @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async closeVault( vaultId: VaultId, tran?: DBTransaction, + ctx?: Partial, + ): Promise; + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timedCancellable(true) + public async closeVault( + vaultId: VaultId, + tran: DBTransaction, + @context ctx: ContextTimed, ): Promise { if (tran == null) { - return this.db.withTransactionF((tran) => this.closeVault(vaultId, tran)); + return this.db.withTransactionF((tran) => this.closeVault(vaultId, tran, ctx)); } if ((await this.getVaultName(vaultId, tran)) == null) { @@ -486,7 +490,7 @@ class VaultManager { [vaultId.toString(), RWLockWriter, 'write'], async () => { await tran.lock([...this.vaultsDbPath, vaultId].join('')); - const vault = await this.getVault(vaultId, tran); + const vault = await this.getVault(vaultId, tran, ctx); await vault.stop(); this.vaultMap.delete(vaultIdString); }, @@ -494,8 +498,7 @@ class VaultManager { } /** - * Lists the vault name and associated VaultId of all - * the vaults stored + * Lists the vault name and associated VaultId of all the stored vaults. */ public async listVaults( ctx?: Partial, @@ -525,30 +528,30 @@ class VaultManager { } /** - * Changes the vault name metadata of a VaultId + * Changes the vault name metadata of a VaultId. */ public async renameVault( vaultId: VaultId, newVaultName: VaultName, - ctx?: Partial, tran?: DBTransaction, + ctx?: Partial, ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) @timedCancellable(true) public async renameVault( vaultId: VaultId, newVaultName: VaultName, + tran: DBTransaction, @context ctx: ContextTimed, - tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.renameVault(vaultId, newVaultName, ctx, tran), + this.renameVault(vaultId, newVaultName, tran, ctx), ); } await this.vaultLocks.withF( - [vaultId.toString(), RWLockWriter, 'write', ctx], + [vaultId.toString(), RWLockWriter, 'write'], async () => { await tran.lock( [...this.vaultsNamesDbPath, newVaultName] @@ -593,7 +596,7 @@ class VaultManager { } /** - * Retrieves the VaultId associated with a vault name + * Retrieves the VaultId associated with a vault name. */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async getVaultId( @@ -616,7 +619,7 @@ class VaultManager { } /** - * Retrieves the vault name associated with a VaultId + * Retrieves the vault name associated with a VaultId. */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async getVaultName( @@ -656,8 +659,8 @@ class VaultManager { } /** - * Sets clone, pull and scan permissions of a vault for a - * gestalt and send a notification to this gestalt + * Sets clone, pull and scan permissions of a vault for a gestalt and send a + * notification to this gestalt. */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async shareVault( @@ -673,13 +676,12 @@ class VaultManager { const vaultMeta = await this.getVaultMeta(vaultId, tran); if (vaultMeta == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); - // NodeId permissions translated to other nodes in - // a gestalt by other domains + // NodeId permissions translated to other nodes in a gestalt by other domains await this.gestaltGraph.setGestaltAction(['node', nodeId], 'scan', tran); await this.acl.setVaultAction(vaultId, nodeId, 'pull', tran); await this.acl.setVaultAction(vaultId, nodeId, 'clone', tran); await this.notificationsManager.sendNotification({ - nodeId, + nodeId: nodeId, data: { type: 'VaultShare', vaultId: vaultsUtils.encodeVaultId(vaultId), @@ -693,8 +695,7 @@ class VaultManager { } /** - * Unsets clone, pull and scan permissions of a vault for a - * gestalt + * Unsets clone, pull and scan permissions of a vault for a gestalt. */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async unshareVault( @@ -722,20 +723,20 @@ class VaultManager { public async cloneVault( nodeId: NodeId, vaultNameOrId: VaultId | VaultName, - ctx?: Partial, tran?: DBTransaction, + ctx?: Partial, ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) @timedCancellable(true) public async cloneVault( nodeId: NodeId, vaultNameOrId: VaultId | VaultName, + tran: DBTransaction, @context ctx: ContextTimed, - tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.cloneVault(nodeId, vaultNameOrId, ctx, tran), + this.cloneVault(nodeId, vaultNameOrId, tran, ctx), ); } @@ -759,8 +760,8 @@ class VaultManager { efs: this.efs, logger: this.logger.getChild(VaultInternal.name), }, - ctx, tran, + ctx, ); this.vaultMap.set(vaultIdString, vault); const vaultMetadata = (await this.getVaultMeta(vaultId, tran))!; @@ -808,21 +809,19 @@ class VaultManager { } /** - * Pulls the contents of a remote vault into an existing vault - * instance + * Pulls the contents of a remote vault into an existing vault instance. */ public async pullVault( { vaultId, pullNodeId, pullVaultNameOrId, - tran, }: { vaultId: VaultId; pullNodeId?: NodeId; pullVaultNameOrId?: VaultId | VaultName; - tran?: DBTransaction; }, + tran?: DBTransaction, ctx?: Partial, ): Promise; @timedCancellable(true) @@ -831,18 +830,17 @@ class VaultManager { vaultId, pullNodeId, pullVaultNameOrId, - tran, }: { vaultId: VaultId; pullNodeId?: NodeId; pullVaultNameOrId?: VaultId | VaultName; - tran?: DBTransaction; }, + tran: DBTransaction, @context ctx: ContextTimed, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.pullVault({ vaultId, pullNodeId, pullVaultNameOrId, tran }, ctx), + this.pullVault({ vaultId, pullNodeId, pullVaultNameOrId }, tran, ctx), ); } @@ -859,36 +857,43 @@ class VaultManager { pullVaultNameOrId: pullVaultNameOrId, }, tran, + ctx, ); }, ); } /** - * Handler for receiving http GET requests when being - * cloned or pulled from + * Handler for receiving http GET requests when being cloned or pulled from. */ + public handleInfoRequest( + vaultId: VaultId, + tran?: DBTransaction, + ctx?: Partial, + ): AsyncGenerator; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async *handleInfoRequest( vaultId: VaultId, - ctx: ContextTimed, - tran?: DBTransaction, + tran: DBTransaction, + @context ctx: ContextTimed, ): AsyncGenerator { if (tran == null) { const handleInfoRequest = (tran: DBTransaction) => - this.handleInfoRequest(vaultId, ctx, tran); + this.handleInfoRequest(vaultId, tran, ctx); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleInfoRequest(tran); }); } + const efs = this.efs; - const vault = await this.getVault(vaultId, tran); + const vault = await this.getVault(vaultId, tran, ctx); return yield* withG( [ - this.vaultLocks.lock([vaultId.toString(), RWLockWriter, 'read']), + this.vaultLocks.lock([vaultId.toString(), RWLockWriter, 'read'], ctx), vault.getLock().read(), ], async function* (): AsyncGenerator { + ctx.signal.throwIfAborted(); // Read the commit state of the vault yield* gitHttp.advertiseRefGenerator( { @@ -903,26 +908,31 @@ class VaultManager { } /** - * Handler for receiving http POST requests when being - * cloned or pulled from + * Handler for receiving http POST requests when being cloned or pulled from. */ + public handlePackRequest( + vaultId: VaultId, + body: Array, + tran?: DBTransaction, + ctx?: Partial, + ): AsyncGenerator; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async *handlePackRequest( vaultId: VaultId, body: Array, - ctx: ContextTimed, - tran?: DBTransaction, + tran: DBTransaction, + @context ctx: ContextTimed, ): AsyncGenerator { if (tran == null) { // Lambda to maintain `this` context const handlePackRequest = (tran: DBTransaction) => - this.handlePackRequest(vaultId, body, ctx, tran); + this.handlePackRequest(vaultId, body, tran, ctx); return yield* this.db.withTransactionG(async function* (tran) { return yield* handlePackRequest(tran); }); } - const vault = await this.getVault(vaultId, tran); + const vault = await this.getVault(vaultId, tran, ctx); const efs = this.efs; yield* withG( [ @@ -930,6 +940,7 @@ class VaultManager { vault.getLock().read(ctx), ], async function* (): AsyncGenerator { + ctx.signal.throwIfAborted(); yield* gitHttp.generatePackRequest( { efs, @@ -944,11 +955,19 @@ class VaultManager { } /** - * Retrieves all the vaults for a peers node + * Retrieves all the vaults for a peer's node. */ + public scanVaults( + targetNodeId: NodeId, + ctx?: Partial, + ): AsyncGenerator<{ + vaultName: VaultName; + vaultIdEncoded: VaultIdEncoded; + vaultPermissions: VaultAction[]; + }>; public async *scanVaults( targetNodeId: NodeId, - ctx: ContextTimed, + @context ctx: ContextTimed, ): AsyncGenerator<{ vaultName: VaultName; vaultIdEncoded: VaultIdEncoded; @@ -973,16 +992,26 @@ class VaultManager { }; } }, + ctx, ); } /** * Returns all the shared vaults for a NodeId. */ - public async *handleScanVaults( + public handleScanVaults( nodeId: NodeId, - ctx: ContextTimed, tran?: DBTransaction, + ctx?: Partial, + ): AsyncGenerator<{ + vaultId: VaultId; + vaultName: VaultName; + vaultPermissions: VaultAction[]; + }>; + public async *handleScanVaults( + nodeId: NodeId, + tran: DBTransaction, + @context ctx: ContextTimed, ): AsyncGenerator<{ vaultId: VaultId; vaultName: VaultName; @@ -991,7 +1020,7 @@ class VaultManager { if (tran == null) { // Lambda to maintain `this` context const handleScanVaults = (tran: DBTransaction) => - this.handleScanVaults(nodeId, ctx, tran); + this.handleScanVaults(nodeId, tran, ctx); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleScanVaults(tran); }); @@ -1048,32 +1077,27 @@ class VaultManager { return vaultId; } - protected async getVault( - vaultId: VaultId, - tran: DBTransaction, - ctx?: Partial, - ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - @timedCancellable(true) protected async getVault( vaultId: VaultId, tran: DBTransaction, - @context ctx: ContextTimed, + ctx: ContextTimed, ): Promise { if (tran == null) { - return this.db.withTransactionF((tran) => this.getVault(vaultId, tran)); + return this.db.withTransactionF((tran) => this.getVault(vaultId, tran, ctx)); } + const vaultIdString = vaultId.toString() as VaultIdString; - // 1. get the vault, if it exists then return that + // 1. Try to get the vault. If it exists then return that. const vault = this.vaultMap.get(vaultIdString); if (vault != null) return vault; - // No vault or state exists then we throw error? + // If no vault or state exists, then we throw an error if ((await this.getVaultMeta(vaultId, tran)) == null) { throw new vaultsErrors.ErrorVaultsVaultUndefined( `Vault ${vaultsUtils.encodeVaultId(vaultId)} doesn't exist`, ); } - // 2. if the state exists then create, add to map and return that + // 2. If the state doesn't exist then create it, add to map and return that. const newVault = await VaultInternal.createVaultInternal( { vaultId: vaultId, @@ -1083,44 +1107,45 @@ class VaultManager { vaultsDbPath: this.vaultsDbPath, logger: this.logger.getChild(VaultInternal.name), }, - ctx, tran, + ctx, ); this.vaultMap.set(vaultIdString, newVault); return newVault; } /** - * Takes a function and runs it with the listed vaults. locking is handled automatically + * Takes a function and runs it with the listed vaults. Locking is handled + * automatically. * @param vaultIds List of vault ID for vaults you wish to use * @param f Function you wish to run with the provided vaults - * @param ctx * @param tran + * @param ctx */ public async withVaults( vaultIds: VaultId[], f: (...args: Vault[]) => Promise, - ctx?: Partial, tran?: DBTransaction, + ctx?: Partial, ): Promise; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) @timedCancellable(true) public async withVaults( vaultIds: VaultId[], f: (...args: Vault[]) => Promise, + tran: DBTransaction, @context ctx: ContextTimed, - tran?: DBTransaction, ): Promise { if (tran == null) { return this.db.withTransactionF((tran) => - this.withVaults(vaultIds, f, ctx, tran), + this.withVaults(vaultIds, f, tran, ctx), ); } // Obtaining locks const vaultLocks: Array> = vaultIds.map( (vaultId) => { - return [vaultId.toString(), RWLockWriter, 'read', ctx]; + return [vaultId.toString(), RWLockWriter, 'read']; }, ); // Running the function with locking @@ -1128,7 +1153,7 @@ class VaultManager { // Getting the vaults while locked const vaults = await Promise.all( vaultIds.map(async (vaultId) => { - return await this.getVault(vaultId, tran); + return await this.getVault(vaultId, tran, ctx); }), ); return await f(...vaults); @@ -1136,16 +1161,25 @@ class VaultManager { } /** - * Takes a generator and runs it with the listed vaults. locking is handled automatically + * Takes a generator and runs it with the listed vaults. Locking is handled + * automatically. * @param vaultIds List of vault ID for vaults you wish to use * @param g Generator you wish to run with the provided vaults * @param tran + * @param ctx */ + public withVaultsG( + vaultIds: Array, + g: (...args: Array) => AsyncGenerator, + tran?: DBTransaction, + ctx?: Partial, + ): AsyncGenerator; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async *withVaultsG( vaultIds: Array, g: (...args: Array) => AsyncGenerator, - tran?: DBTransaction, + tran: DBTransaction, + @context ctx: ContextTimed, ): AsyncGenerator { if (tran == null) { return yield* this.db.withTransactionG((tran) => @@ -1167,7 +1201,7 @@ class VaultManager { // Getting the vaults while locked const vaults = await Promise.all( vaultIds.map(async (vaultId) => { - return await vaultThis.getVault(vaultId, tran); + return await vaultThis.getVault(vaultId, tran, ctx); }), ); return yield* g(...vaults); diff --git a/src/vaults/VaultOps.ts b/src/vaults/VaultOps.ts index 03211814a..06efbc98e 100644 --- a/src/vaults/VaultOps.ts +++ b/src/vaults/VaultOps.ts @@ -1,9 +1,7 @@ -/** - * Adds a secret to the vault - */ import type Logger from '@matrixai/logger'; -import type { Vault } from './Vault'; +import type { ContextTimed } from '@matrixai/contexts'; import type { Stat } from 'encryptedfs'; +import type { Vault } from './Vault'; import path from 'path'; import * as vaultsErrors from './errors'; import * as vaultsUtils from './utils'; @@ -12,50 +10,30 @@ type FileOptions = { recursive?: boolean; }; -async function addSecret( - vault: Vault, - secretName: string, - content: Buffer | string, - logger?: Logger, -): Promise { - await vault.writeF(async (efs) => { - if (await efs.exists(secretName)) { - throw new vaultsErrors.ErrorSecretsSecretDefined( - `${secretName} already exists, try updating instead`, - ); - } - - // Create the directory to the secret if it doesn't exist - await vaultsUtils.mkdirExists(efs, path.dirname(secretName)); - // Write the secret into the vault - await efs.writeFile(secretName, content); - }); - - logger?.info(`Added secret ${secretName} to vault ${vault.vaultId}`); -} - /** - * Changes the contents of a secret + * Adds a secret to the vault */ -async function updateSecret( +async function addSecret( vault: Vault, secretName: string, content: Buffer | string, logger?: Logger, + ctx?: ContextTimed, ): Promise { - await vault.writeF(async (efs) => { - // Throw error if secret does not exist - if (!(await efs.exists(secretName))) { - throw new vaultsErrors.ErrorSecretsSecretUndefined( - 'Secret does not exist, try adding it instead.', - ); - } - - // Write secret into vault - await efs.writeFile(secretName, content); - }); - - logger?.info(`Updated secret ${secretName} in vault ${vault.vaultId}`); + await vault.writeF( + async (efs) => { + if (await efs.exists(secretName)) { + throw new vaultsErrors.ErrorSecretsSecretDefined( + `A secret with name '${secretName}' already exists`, + ); + } + await vaultsUtils.mkdirExists(efs, path.dirname(secretName)); + await efs.writeFile(secretName, content); + }, + undefined, + ctx, + ); + logger?.info(`Added secret ${secretName} to vault ${vault.vaultId}`); } /** @@ -76,14 +54,13 @@ async function renameSecret( await efs.rename(secretName, secretNameNew); }); logger?.info( - `Renamed secret at ${secretName} to ${secretNameNew} in vault ${vault.vaultId}`, + `Renamed secret ${secretName} to ${secretNameNew} in vault ${vault.vaultId}`, ); } /** * Returns the contents of a secret */ -// TODO: use contexts async function getSecret(vault: Vault, secretName: string): Promise { try { return await vault.readF(async (efs) => { @@ -164,8 +141,8 @@ async function deleteSecret( } /** - * Adds an empty directory to the root of the vault. - * i.e. mkdir("folder", { recursive = false }) creates the "/folder" directory + * Adds an empty directory to the root of the vault. Note that efs does not + * track empty directories. */ async function mkdir( vault: Vault, @@ -186,7 +163,7 @@ async function mkdir( logger?.error(`Failed to create directory '${dirPath}'. Reason: ${e.code}`); if (e.code === 'ENOENT' && !recursive) { throw new vaultsErrors.ErrorVaultsRecursive( - `Could not create direcotry '${dirPath}' without recursive option`, + `Could not create directory '${dirPath}' without recursive option`, { cause: e }, ); } @@ -298,7 +275,6 @@ async function writeSecret( export { addSecret, - updateSecret, renameSecret, getSecret, statSecret, diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index a946127b1..7dd0de702 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -16,16 +16,16 @@ import * as nodesUtils from '../nodes/utils'; import * as validationErrors from '../validation/errors'; /** - * Vault history is designed for linear-history - * The canonical branch represents the one and only true timeline - * In the future, we can introduce non-linear history - * Where branches are automatically made when new timelines are created + * Vault history is designed for linear-history. + * The canonical branch represents the one and only true timeline. + * In the future, we can introduce non-linear history where branches are + * automatically made when new timelines are created. */ const canonicalBranch = 'master'; const canonicalBranchRef = 'refs/heads/' + canonicalBranch; /** - * Vault reference can be HEAD, any of the special tags or a commit ID + * Vault reference can be HEAD, any of the special tags, or a commit ID. */ function validateRef(ref: any): ref is VaultRef { return refs.includes(ref) || validateCommitId(ref); @@ -38,7 +38,8 @@ function assertRef(ref: any): asserts ref is VaultRef { } /** - * Commit ids are SHA1 hashes encoded as 40-character long lowercase hexadecimal strings + * Commit IDs are SHA1 hashes encoded as 40-character long lowercase + * hexadecimal strings. */ function validateCommitId(commitId: any): commitId is CommitId { return /^[a-f0-9]{40}$/.test(commitId); @@ -70,7 +71,7 @@ async function* readDirRecursively( async function* walkFs( efs: FileSystemReadable, path: string = '.', -): AsyncGenerator { +): AsyncGenerator { const shortList: Array = [path]; let path_: Path | undefined = undefined; while ((path_ = shortList.shift()) != null) { @@ -102,7 +103,11 @@ function parseVaultAction(data: any): VaultAction { return data; } -async function deleteObject(fs: EncryptedFS, gitdir: string, ref: string) { +async function deleteObject( + fs: EncryptedFS, + gitdir: string, + ref: string, +): Promise { const bucket = ref.slice(0, 2); const shortRef = ref.slice(2); const objectPath = path.join(gitdir, 'objects', bucket, shortRef); @@ -113,32 +118,33 @@ async function deleteObject(fs: EncryptedFS, gitdir: string, ref: string) { } } -async function mkdirExists(efs: FileSystemWritable, directory: string) { +async function mkdirExists( + efs: FileSystemWritable, + directory: string, +): Promise { try { await efs.mkdir(directory, { recursive: true }); } catch (e) { - if (e.code !== 'EEXIST') { - throw e; - } + if (e.code !== 'EEXIST') throw e; } } /** - * Converts a `Buffer` to a `Uint8Array` without copying the contents + * Converts a `Buffer` to a `Uint8Array` without copying the contents. */ function bufferToUint8ArrayCopyless(data: Buffer): Uint8Array { return new Uint8Array(data.buffer, data.byteOffset, data.byteLength); } /** - * Converts a `Uint8Array` to a `Buffer` without copying the contents + * Converts a `Uint8Array` to a `Buffer` without copying the contents. */ function uint8ArrayToBufferCopyless(data: Uint8Array): Buffer { return Buffer.from(data.buffer, data.byteOffset, data.byteLength); } /** - * Concatenates `Buffers` or `Uint8Array`s into a `Uint8Array` + * Concatenates `Buffers` or `Uint8Array`s into a `Uint8Array`. */ function uint8ArrayConcat(list: Array): Uint8Array { return bufferToUint8ArrayCopyless(Buffer.concat(list)); From fda4be8a1df925bc249303ad407313b6b87e3b8b Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Tue, 7 Jan 2025 15:29:33 +1100 Subject: [PATCH 08/14] feat: added context cancellation to the entire `vault` domain --- src/acl/ACL.ts | 4 +- src/client/handlers/IdentitiesAuthenticate.ts | 14 +- .../handlers/IdentitiesAuthenticatedGet.ts | 16 +- .../handlers/IdentitiesInfoConnectedGet.ts | 13 +- src/client/handlers/IdentitiesInfoGet.ts | 14 +- src/client/handlers/KeysCertsChainGet.ts | 16 +- src/client/handlers/KeysCertsGet.ts | 4 +- src/client/handlers/KeysEncrypt.ts | 4 +- src/client/handlers/KeysKeyPairRenew.ts | 2 - src/client/handlers/KeysVerify.ts | 2 +- src/client/handlers/NodesAdd.ts | 11 +- src/client/handlers/NodesClaim.ts | 6 +- src/client/handlers/NodesFind.ts | 18 +- src/client/handlers/NodesGetAll.ts | 16 +- src/client/handlers/NodesListConnections.ts | 16 +- src/client/handlers/NodesPing.ts | 2 +- .../handlers/NotificationsInboxClear.ts | 5 +- src/client/handlers/NotificationsInboxRead.ts | 17 +- .../handlers/NotificationsOutboxRead.ts | 17 +- src/client/handlers/NotificationsSend.ts | 4 +- src/client/handlers/VaultsClone.ts | 23 +- src/client/handlers/VaultsCreate.ts | 2 +- src/client/handlers/VaultsDelete.ts | 4 +- src/client/handlers/VaultsList.ts | 4 +- src/client/handlers/VaultsLog.ts | 8 +- src/client/handlers/VaultsPermissionGet.ts | 4 +- src/client/handlers/VaultsPermissionSet.ts | 2 +- src/client/handlers/VaultsPermissionUnset.ts | 2 +- src/client/handlers/VaultsPull.ts | 7 +- src/client/handlers/VaultsRename.ts | 2 +- src/client/handlers/VaultsScan.ts | 4 +- src/client/handlers/VaultsSecretsCat.ts | 3 +- src/client/handlers/VaultsSecretsEnv.ts | 2 +- src/client/handlers/VaultsSecretsMkdir.ts | 10 +- src/client/handlers/VaultsSecretsNew.ts | 4 +- src/client/handlers/VaultsSecretsNewDir.ts | 12 +- src/client/handlers/VaultsSecretsRemove.ts | 8 +- src/client/handlers/VaultsSecretsRename.ts | 6 +- src/client/handlers/VaultsSecretsStat.ts | 2 +- src/client/handlers/VaultsSecretsWriteFile.ts | 12 +- src/client/handlers/VaultsVersion.ts | 14 +- src/client/types.ts | 17 +- src/git/http.ts | 24 +- src/nodes/NodeManager.ts | 27 +- src/nodes/agent/handlers/VaultsGitInfoGet.ts | 4 +- src/nodes/agent/handlers/VaultsGitPackGet.ts | 8 +- src/nodes/agent/handlers/VaultsScan.ts | 2 +- src/validation/index.ts | 3 +- src/vaults/VaultInternal.ts | 41 +-- src/vaults/VaultManager.ts | 20 +- src/vaults/VaultOps.ts | 206 ++++++----- tests/git/http.test.ts | 321 +++++++++--------- tests/git/utils.test.ts | 214 ++++++------ tests/vaults/VaultInternal.test.ts | 229 +++++++------ tests/vaults/VaultManager.test.ts | 22 +- tests/vaults/VaultOps/updatesecret.test.ts | 141 -------- 56 files changed, 809 insertions(+), 806 deletions(-) delete mode 100644 tests/vaults/VaultOps/updatesecret.test.ts diff --git a/src/acl/ACL.ts b/src/acl/ACL.ts index beb4fc4fa..47ace1f3f 100644 --- a/src/acl/ACL.ts +++ b/src/acl/ACL.ts @@ -614,8 +614,8 @@ class ACL { [...this.aclNodesDbPath, nodeId.toBuffer()], true, ); - // Skip if the nodeId doesn't exist - // this means that it previously been removed + // Skip if the nodeId doesn't exist. This means that it has previously + // been removed. if (permId == null) { continue; } diff --git a/src/client/handlers/IdentitiesAuthenticate.ts b/src/client/handlers/IdentitiesAuthenticate.ts index 6f0c3cd61..f5ec068d8 100644 --- a/src/client/handlers/IdentitiesAuthenticate.ts +++ b/src/client/handlers/IdentitiesAuthenticate.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { AuthProcessMessage, ClientRPCRequestParams, @@ -21,11 +23,11 @@ class IdentitiesAuthenticate extends ServerHandler< public timeout = 120000; // 2 Minutes public handle = async function* ( input: ClientRPCRequestParams<{ providerId: string }>, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { identitiesManager }: { identitiesManager: IdentitiesManager } = this.container; const { @@ -52,7 +54,7 @@ class IdentitiesAuthenticate extends ServerHandler< if (authFlowResult.done) { never('authFlow signalled done too soon'); } - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { request: { url: authFlowResult.value.url, @@ -63,7 +65,7 @@ class IdentitiesAuthenticate extends ServerHandler< if (!authFlowResult.done) { never('authFlow did not signal done when expected'); } - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { response: { identityId: authFlowResult.value, diff --git a/src/client/handlers/IdentitiesAuthenticatedGet.ts b/src/client/handlers/IdentitiesAuthenticatedGet.ts index dd6de73e8..11f43ffe7 100644 --- a/src/client/handlers/IdentitiesAuthenticatedGet.ts +++ b/src/client/handlers/IdentitiesAuthenticatedGet.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -19,11 +21,11 @@ class IdentitiesAuthenticatedGet extends ServerHandler< > { public handle = async function* ( input: ClientRPCRequestParams<{ providerId?: string }>, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { identitiesManager }: { identitiesManager: IdentitiesManager } = this.container; let providerId: ProviderId | undefined; @@ -46,12 +48,10 @@ class IdentitiesAuthenticatedGet extends ServerHandler< : [providerId]; for (const providerId of providerIds) { const provider = identitiesManager.getProvider(providerId); - if (provider == null) { - continue; - } + if (provider == null) continue; const identities = await provider.getAuthIdentityIds(); for (const identityId of identities) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { providerId: provider.id, identityId: identityId, diff --git a/src/client/handlers/IdentitiesInfoConnectedGet.ts b/src/client/handlers/IdentitiesInfoConnectedGet.ts index 09178cded..5dbe759c6 100644 --- a/src/client/handlers/IdentitiesInfoConnectedGet.ts +++ b/src/client/handlers/IdentitiesInfoConnectedGet.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -22,11 +24,11 @@ class IdentitiesInfoConnectedGet extends ServerHandler< > { public handle = async function* ( input: ClientRPCRequestParams, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { identitiesManager }: { identitiesManager: IdentitiesManager } = this.container; const { @@ -71,6 +73,7 @@ class IdentitiesInfoConnectedGet extends ServerHandler< } const identities: Array> = []; for (const providerId of providerIds) { + ctx.signal.throwIfAborted(); // Get provider from id const provider = identitiesManager.getProvider(providerId); if (provider === undefined) { @@ -94,7 +97,7 @@ class IdentitiesInfoConnectedGet extends ServerHandler< let count = 0; for (const gen of identities) { for await (const identity of gen) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); if (input.limit !== undefined && count >= input.limit) break; yield { providerId: identity.providerId, diff --git a/src/client/handlers/IdentitiesInfoGet.ts b/src/client/handlers/IdentitiesInfoGet.ts index a85ff6d21..80ded3ee8 100644 --- a/src/client/handlers/IdentitiesInfoGet.ts +++ b/src/client/handlers/IdentitiesInfoGet.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -8,11 +10,11 @@ import type { IdentityId, ProviderId } from '../../ids'; import type IdentitiesManager from '../../identities/IdentitiesManager'; import type { IdentityData } from '../../identities/types'; import { ServerHandler } from '@matrixai/rpc'; +import { validateSync } from '../../validation'; +import { matchSync } from '../../utils'; import * as ids from '../../ids'; import * as identitiesErrors from '../../identities/errors'; import * as identitiesUtils from '../../identities/utils'; -import { validateSync } from '../../validation'; -import { matchSync } from '../../utils'; class IdentitiesInfoGet extends ServerHandler< { @@ -23,9 +25,9 @@ class IdentitiesInfoGet extends ServerHandler< > { public handle = async function* ( input: ClientRPCRequestParams, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { if (ctx.signal.aborted) throw ctx.signal.reason; const { identitiesManager }: { identitiesManager: IdentitiesManager } = @@ -86,7 +88,7 @@ class IdentitiesInfoGet extends ServerHandler< input.limit = identities.length; } for (let i = 0; i < input.limit; i++) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const identity = identities[i]; if (identity !== undefined) { if (identitiesUtils.matchIdentityData(identity, searchTerms)) { diff --git a/src/client/handlers/KeysCertsChainGet.ts b/src/client/handlers/KeysCertsChainGet.ts index c3466c550..ef3ca5ff5 100644 --- a/src/client/handlers/KeysCertsChainGet.ts +++ b/src/client/handlers/KeysCertsChainGet.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { CertMessage, ClientRPCRequestParams, @@ -14,17 +16,15 @@ class KeysCertsChainGet extends ServerHandler< ClientRPCResponseResult > { public handle = async function* ( - _input, - _cancel, - _meta, - ctx, + _input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { const { certManager }: { certManager: CertManager } = this.container; for (const certPEM of await certManager.getCertPEMsChain()) { - if (ctx.signal.aborted) throw ctx.signal.reason; - yield { - cert: certPEM, - }; + ctx.signal.throwIfAborted(); + yield { cert: certPEM }; } }; } diff --git a/src/client/handlers/KeysCertsGet.ts b/src/client/handlers/KeysCertsGet.ts index 7684af221..f699f4527 100644 --- a/src/client/handlers/KeysCertsGet.ts +++ b/src/client/handlers/KeysCertsGet.ts @@ -16,9 +16,7 @@ class KeysCertsGet extends UnaryHandler< public handle = async (): Promise> => { const { certManager }: { certManager: CertManager } = this.container; const cert = await certManager.getCurrentCertPEM(); - return { - cert, - }; + return { cert }; }; } diff --git a/src/client/handlers/KeysEncrypt.ts b/src/client/handlers/KeysEncrypt.ts index 11598460c..fc8618635 100644 --- a/src/client/handlers/KeysEncrypt.ts +++ b/src/client/handlers/KeysEncrypt.ts @@ -7,7 +7,7 @@ import type { import type KeyRing from '../../keys/KeyRing'; import type { PublicKey } from '../../keys/types'; import { UnaryHandler } from '@matrixai/rpc'; -import { never } from '../../utils'; +import * as utils from '../../utils'; import * as keysUtils from '../../keys/utils'; import * as keysErrors from '../../keys/errors'; @@ -27,7 +27,7 @@ class KeysEncrypt extends UnaryHandler< try { const jwk = input.publicKeyJwk; publicKey = keysUtils.publicKeyFromJWK(jwk); - if (publicKey == null) never('failed to get public key from JWK'); + if (publicKey == null) utils.never('failed to get public key from JWK'); } catch (e) { throw new keysErrors.ErrorPublicKeyParse(undefined, { cause: e }); } diff --git a/src/client/handlers/KeysKeyPairRenew.ts b/src/client/handlers/KeysKeyPairRenew.ts index 29e3c298b..3e22f9602 100644 --- a/src/client/handlers/KeysKeyPairRenew.ts +++ b/src/client/handlers/KeysKeyPairRenew.ts @@ -17,11 +17,9 @@ class KeysKeyPairRenew extends UnaryHandler< input: ClientRPCRequestParams, ): Promise => { const { certManager }: { certManager: CertManager } = this.container; - // Other domains will be updated accordingly via the `EventBus` so we // only need to modify the KeyManager await certManager.renewCertWithNewKeyPair(input.password); - return {}; }; } diff --git a/src/client/handlers/KeysVerify.ts b/src/client/handlers/KeysVerify.ts index 2f6c0b171..0609f897f 100644 --- a/src/client/handlers/KeysVerify.ts +++ b/src/client/handlers/KeysVerify.ts @@ -35,7 +35,7 @@ class KeysVerify extends UnaryHandler< Buffer.from(input.data, 'binary'), Buffer.from(input.signature, 'binary') as Signature, ); - return { type: 'success', success: success }; + return { success: success }; }; } diff --git a/src/client/handlers/NodesAdd.ts b/src/client/handlers/NodesAdd.ts index 68d4985da..84991763b 100644 --- a/src/client/handlers/NodesAdd.ts +++ b/src/client/handlers/NodesAdd.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -8,11 +10,11 @@ import type { NodeId } from '../../ids'; import type { Host, Port } from '../../network/types'; import type NodeManager from '../../nodes/NodeManager'; import { UnaryHandler } from '@matrixai/rpc'; +import { matchSync } from '../../utils'; +import { validateSync } from '../../validation'; import * as ids from '../../ids'; import * as networkUtils from '../../network/utils'; import * as nodeErrors from '../../nodes/errors'; -import { matchSync } from '../../utils'; -import { validateSync } from '../../validation'; class NodesAdd extends UnaryHandler< { @@ -24,6 +26,9 @@ class NodesAdd extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): Promise => { const { db, nodeManager }: { db: DB; nodeManager: NodeManager } = this.container; @@ -72,8 +77,8 @@ class NodesAdd extends UnaryHandler< true, input.force ?? false, 1500, - undefined, tran, + ctx, ), ); return {}; diff --git a/src/client/handlers/NodesClaim.ts b/src/client/handlers/NodesClaim.ts index 9056f07bd..deed7352c 100644 --- a/src/client/handlers/NodesClaim.ts +++ b/src/client/handlers/NodesClaim.ts @@ -41,11 +41,11 @@ class NodesClaim extends UnaryHandler< }, ); await db.withTransactionF(async (tran) => { - // Attempt to claim the node, - // if there is no permission then we get an error + // Attempt to claim the node. If there is no permission then we get an + // error. await nodeManager.claimNode(nodeId, tran); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/NodesFind.ts b/src/client/handlers/NodesFind.ts index d8de9d7ba..904ed7137 100644 --- a/src/client/handlers/NodesFind.ts +++ b/src/client/handlers/NodesFind.ts @@ -1,3 +1,5 @@ +import type { JSONValue } from '@matrixai/rpc'; +import type { ContextTimed } from '@matrixai/contexts'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -6,12 +8,11 @@ import type { } from '../types'; import type { NodeId } from '../../ids'; import type NodeManager from '../../nodes/NodeManager'; -import type { ContextTimed } from '@matrixai/contexts'; import { UnaryHandler } from '@matrixai/rpc'; -import * as ids from '../../ids'; -import * as nodesErrors from '../../nodes/errors'; import { validateSync } from '../../validation'; import { matchSync } from '../../utils'; +import * as ids from '../../ids'; +import * as nodesErrors from '../../nodes/errors'; class NodesFind extends UnaryHandler< { @@ -22,8 +23,8 @@ class NodesFind extends UnaryHandler< > { public handle = async ( input: ClientRPCRequestParams, - _cancel, - _meta, + _cancel: (reason?: any) => void, + _meta: Record, ctx: ContextTimed, ): Promise> => { const { nodeManager }: { nodeManager: NodeManager } = this.container; @@ -42,12 +43,7 @@ class NodesFind extends UnaryHandler< nodeId: input.nodeIdEncoded, }, ); - const result = await nodeManager.findNode( - { - nodeId: nodeId, - }, - ctx, - ); + const result = await nodeManager.findNode({ nodeId: nodeId }, ctx); if (result == null) { throw new nodesErrors.ErrorNodeGraphNodeIdNotFound(); } diff --git a/src/client/handlers/NodesGetAll.ts b/src/client/handlers/NodesGetAll.ts index 1477edc2d..f031411d8 100644 --- a/src/client/handlers/NodesGetAll.ts +++ b/src/client/handlers/NodesGetAll.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -15,20 +17,18 @@ class NodesGetAll extends ServerHandler< ClientRPCResponseResult > { public handle = async function* ( - _input, - _cancel, - _meta, - ctx, + _input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { nodeGraph } = this.container; for await (const [index, bucket] of nodeGraph.getBuckets()) { for (const [id, nodeContact] of bucket) { const encodedId = nodesUtils.encodeNodeId(id); // For every node in every bucket, add it to our message - if (ctx.signal.aborted) { - throw ctx.signal.reason; - } + ctx.signal.throwIfAborted(); yield { bucketIndex: index, nodeIdEncoded: encodedId, diff --git a/src/client/handlers/NodesListConnections.ts b/src/client/handlers/NodesListConnections.ts index 0e7710189..2fcfb8c87 100644 --- a/src/client/handlers/NodesListConnections.ts +++ b/src/client/handlers/NodesListConnections.ts @@ -4,6 +4,8 @@ import type { NodeConnectionMessage, } from '../types'; import type NodeConnectionManager from '../../nodes/NodeConnectionManager'; +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import { ServerHandler } from '@matrixai/rpc'; import * as nodesUtils from '../../nodes/utils'; @@ -15,17 +17,19 @@ class NodesListConnections extends ServerHandler< ClientRPCResponseResult > { public handle = async function* ( - _input, - _cancel, - _meta, - ctx, + _input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { const { nodeConnectionManager, - }: { nodeConnectionManager: NodeConnectionManager } = this.container; + }: { + nodeConnectionManager: NodeConnectionManager; + } = this.container; const connections = nodeConnectionManager.listConnections(); for (const connection of connections) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { host: connection.address.host, hostname: connection.address.hostname ?? '', diff --git a/src/client/handlers/NodesPing.ts b/src/client/handlers/NodesPing.ts index 7d9e16f03..e89b91056 100644 --- a/src/client/handlers/NodesPing.ts +++ b/src/client/handlers/NodesPing.ts @@ -38,7 +38,7 @@ class NodesPing extends UnaryHandler< }, ); const result = await nodeManager.pingNode(nodeId); - return { type: 'success', success: result != null }; + return { success: result != null }; }; } diff --git a/src/client/handlers/NotificationsInboxClear.ts b/src/client/handlers/NotificationsInboxClear.ts index 1789a5bc0..38f0d4c26 100644 --- a/src/client/handlers/NotificationsInboxClear.ts +++ b/src/client/handlers/NotificationsInboxClear.ts @@ -15,7 +15,10 @@ class NotificationsInboxClear extends UnaryHandler< const { db, notificationsManager, - }: { db: DB; notificationsManager: NotificationsManager } = this.container; + }: { + db: DB; + notificationsManager: NotificationsManager; + } = this.container; await db.withTransactionF((tran) => notificationsManager.clearInboxNotifications(tran), ); diff --git a/src/client/handlers/NotificationsInboxRead.ts b/src/client/handlers/NotificationsInboxRead.ts index 232204d0d..9d45458a7 100644 --- a/src/client/handlers/NotificationsInboxRead.ts +++ b/src/client/handlers/NotificationsInboxRead.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -20,15 +22,18 @@ class NotificationsInboxRead extends ServerHandler< > { public handle( input: ClientRPCRequestParams, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { db, notificationsManager, - }: { db: DB; notificationsManager: NotificationsManager } = this.container; + }: { + db: DB; + notificationsManager: NotificationsManager; + } = this.container; const { seek, seekEnd, unread, order, limit } = input; let seek_: NotificationId | number | undefined; @@ -55,7 +60,7 @@ class NotificationsInboxRead extends ServerHandler< tran, }); for await (const notification of notifications) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { notification: notification, }; diff --git a/src/client/handlers/NotificationsOutboxRead.ts b/src/client/handlers/NotificationsOutboxRead.ts index 8160f1d1b..da9d7473c 100644 --- a/src/client/handlers/NotificationsOutboxRead.ts +++ b/src/client/handlers/NotificationsOutboxRead.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { ClientRPCRequestParams, ClientRPCResponseResult, @@ -20,15 +22,18 @@ class NotificationsOutboxRead extends ServerHandler< > { public handle( input: ClientRPCRequestParams, - _cancel, - _meta, - ctx, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { db, notificationsManager, - }: { db: DB; notificationsManager: NotificationsManager } = this.container; + }: { + db: DB; + notificationsManager: NotificationsManager; + } = this.container; const { seek, seekEnd, order, limit } = input; let seek_: NotificationId | number | undefined; @@ -54,7 +59,7 @@ class NotificationsOutboxRead extends ServerHandler< tran, }); for await (const notification of notifications) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const taskInfo = await notificationsManager.getOutboxNotificationTaskInfoById( notificationsUtils.decodeNotificationId( diff --git a/src/client/handlers/NotificationsSend.ts b/src/client/handlers/NotificationsSend.ts index 86de7d3e9..6628c6a22 100644 --- a/src/client/handlers/NotificationsSend.ts +++ b/src/client/handlers/NotificationsSend.ts @@ -23,7 +23,9 @@ class NotificationsSend extends UnaryHandler< ): Promise => { const { notificationsManager, - }: { notificationsManager: NotificationsManager } = this.container; + }: { + notificationsManager: NotificationsManager; + } = this.container; const { nodeId, }: { diff --git a/src/client/handlers/VaultsClone.ts b/src/client/handlers/VaultsClone.ts index 1a5386dde..30ed70788 100644 --- a/src/client/handlers/VaultsClone.ts +++ b/src/client/handlers/VaultsClone.ts @@ -7,8 +7,11 @@ import type { CloneMessage, SuccessMessage, } from '../types'; +import type { NodeId } from '../../ids'; import type VaultManager from '../../vaults/VaultManager'; import { UnaryHandler } from '@matrixai/rpc'; +import { validateSync } from '../../validation'; +import { matchSync } from '../../utils'; import * as ids from '../../ids'; class VaultsClone extends UnaryHandler< @@ -27,11 +30,25 @@ class VaultsClone extends UnaryHandler< ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; - const nodeId = ids.parseNodeId(input.nodeIdEncoded); + const { + nodeId, + }: { + nodeId: NodeId; + } = validateSync( + (keyPath, value) => { + return matchSync(keyPath)( + [['nodeId'], () => ids.parseNodeId(value)], + () => value, + ); + }, + { + nodeId: input.nodeIdEncoded, + }, + ); await db.withTransactionF(async (tran) => { - await vaultManager.cloneVault(nodeId, input.nameOrId, ctx, tran); + await vaultManager.cloneVault(nodeId, input.nameOrId, tran, ctx); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsCreate.ts b/src/client/handlers/VaultsCreate.ts index cc5d3a60f..bc578b18f 100644 --- a/src/client/handlers/VaultsCreate.ts +++ b/src/client/handlers/VaultsCreate.ts @@ -28,7 +28,7 @@ class VaultsCreate extends UnaryHandler< const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; const vaultId = await db.withTransactionF((tran) => - vaultManager.createVault(input.vaultName, ctx, tran), + vaultManager.createVault(input.vaultName, tran, ctx), ); return { vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId) }; }; diff --git a/src/client/handlers/VaultsDelete.ts b/src/client/handlers/VaultsDelete.ts index f1d050a5e..98c1d511f 100644 --- a/src/client/handlers/VaultsDelete.ts +++ b/src/client/handlers/VaultsDelete.ts @@ -40,9 +40,9 @@ class VaultsDelete extends UnaryHandler< `Vault "${input.nameOrId}" does not exist`, ); } - await vaultManager.destroyVault(vaultId, ctx, tran); + await vaultManager.destroyVault(vaultId, tran, ctx); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsList.ts b/src/client/handlers/VaultsList.ts index 724f46a2a..967975124 100644 --- a/src/client/handlers/VaultsList.ts +++ b/src/client/handlers/VaultsList.ts @@ -24,14 +24,14 @@ class VaultsList extends ServerHandler< _meta: Record, ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; + ctx.signal.throwIfAborted(); const vaults = await db.withTransactionF((tran) => vaultManager.listVaults(ctx, tran), ); for await (const [vaultName, vaultId] of vaults) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { vaultName: vaultName, vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId), diff --git a/src/client/handlers/VaultsLog.ts b/src/client/handlers/VaultsLog.ts index 6b47ae98c..7218c929d 100644 --- a/src/client/handlers/VaultsLog.ts +++ b/src/client/handlers/VaultsLog.ts @@ -26,7 +26,7 @@ class VaultsLog extends ServerHandler< _meta: Record, ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; const log = await db.withTransactionF(async (tran) => { @@ -45,14 +45,14 @@ class VaultsLog extends ServerHandler< return await vaultManager.withVaults( [vaultId], async (vault) => { - return await vault.log(input.commitId, input.depth); + return await vault.log(input.commitId ?? 'HEAD', input.depth, ctx); }, - ctx, tran, + ctx, ); }); for (const entry of log) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { commitId: entry.commitId, committer: entry.committer.name, diff --git a/src/client/handlers/VaultsPermissionGet.ts b/src/client/handlers/VaultsPermissionGet.ts index 0ee38d8f0..d7b8eee13 100644 --- a/src/client/handlers/VaultsPermissionGet.ts +++ b/src/client/handlers/VaultsPermissionGet.ts @@ -32,7 +32,7 @@ class VaultsPermissionGet extends ServerHandler< _meta: Record, ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { db, vaultManager, @@ -66,7 +66,7 @@ class VaultsPermissionGet extends ServerHandler< const actions = Object.keys( permissionList[nodeIdString], ) as Array; - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId), nodeIdEncoded: nodesUtils.encodeNodeId(nodeId), diff --git a/src/client/handlers/VaultsPermissionSet.ts b/src/client/handlers/VaultsPermissionSet.ts index 3e7ee34ec..a816ab5e0 100644 --- a/src/client/handlers/VaultsPermissionSet.ts +++ b/src/client/handlers/VaultsPermissionSet.ts @@ -83,7 +83,7 @@ class VaultsPermissionSet extends UnaryHandler< }, }); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsPermissionUnset.ts b/src/client/handlers/VaultsPermissionUnset.ts index 4e19a91d7..85f689918 100644 --- a/src/client/handlers/VaultsPermissionUnset.ts +++ b/src/client/handlers/VaultsPermissionUnset.ts @@ -93,7 +93,7 @@ class VaultsPermissionUnset extends UnaryHandler< } }); // Formatting response - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsPull.ts b/src/client/handlers/VaultsPull.ts index c7282e4b6..02dcc9659 100644 --- a/src/client/handlers/VaultsPull.ts +++ b/src/client/handlers/VaultsPull.ts @@ -7,7 +7,6 @@ import type { SuccessMessage, VaultsPullMessage, } from '../types'; -import type { VaultName } from '../../vaults/types'; import type VaultManager from '../../vaults/VaultManager'; import { UnaryHandler } from '@matrixai/rpc'; import * as ids from '../../ids'; @@ -34,7 +33,7 @@ class VaultsPull extends UnaryHandler< vaultsUtils.decodeVaultId(input.pullVault) ?? input.pullVault; await db.withTransactionF(async (tran) => { const vaultIdFromName = await vaultManager.getVaultId( - input.nameOrId as VaultName, + input.nameOrId!, tran, ); const vaultId = @@ -52,12 +51,12 @@ class VaultsPull extends UnaryHandler< vaultId: vaultId, pullNodeId: nodeId, pullVaultNameOrId: pullVaultId, - tran: tran, }, + tran, ctx, ); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsRename.ts b/src/client/handlers/VaultsRename.ts index 555f7361e..9fde877b4 100644 --- a/src/client/handlers/VaultsRename.ts +++ b/src/client/handlers/VaultsRename.ts @@ -40,7 +40,7 @@ class VaultsRename extends UnaryHandler< `Vault "${input.nameOrId}" does not exist`, ); } - await vaultManager.renameVault(vaultId, input.newName, ctx, tran); + await vaultManager.renameVault(vaultId, input.newName, tran, ctx); return { vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId) }; }); }; diff --git a/src/client/handlers/VaultsScan.ts b/src/client/handlers/VaultsScan.ts index 87b432f51..8fe51dd0b 100644 --- a/src/client/handlers/VaultsScan.ts +++ b/src/client/handlers/VaultsScan.ts @@ -23,7 +23,7 @@ class VaultsScan extends ServerHandler< _meta: Record, ctx: ContextTimed, ): AsyncGenerator> { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); const { vaultManager }: { vaultManager: VaultManager } = this.container; const nodeId = ids.parseNodeId(input.nodeIdEncoded); for await (const { @@ -31,7 +31,7 @@ class VaultsScan extends ServerHandler< vaultName, vaultPermissions, } of vaultManager.scanVaults(nodeId, ctx)) { - if (ctx.signal.aborted) throw ctx.signal.reason; + ctx.signal.throwIfAborted(); yield { vaultName: vaultName, vaultIdEncoded: vaultIdEncoded, diff --git a/src/client/handlers/VaultsSecretsCat.ts b/src/client/handlers/VaultsSecretsCat.ts index 4544585f1..dfa3ec368 100644 --- a/src/client/handlers/VaultsSecretsCat.ts +++ b/src/client/handlers/VaultsSecretsCat.ts @@ -43,10 +43,11 @@ class VaultsSecretsCat extends DuplexHandler< const { nameOrId, secretName } = secretIdentifierMessage; const vaultIdFromName = await vaultManager.getVaultId(nameOrId, tran); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(nameOrId); - if (vaultId == null) + if (vaultId == null) { throw new vaultsErrors.ErrorVaultsVaultUndefined( `Vault "${nameOrId}" does not exist`, ); + } yield await vaultManager.withVaults( [vaultId], async (vault) => { diff --git a/src/client/handlers/VaultsSecretsEnv.ts b/src/client/handlers/VaultsSecretsEnv.ts index 625eb25af..0c14e316d 100644 --- a/src/client/handlers/VaultsSecretsEnv.ts +++ b/src/client/handlers/VaultsSecretsEnv.ts @@ -74,8 +74,8 @@ class VaultsSecretsEnv extends DuplexHandler< return results; }); }, - ctx, tran, + ctx, ); for (const { filePath, value } of secrets) { yield { diff --git a/src/client/handlers/VaultsSecretsMkdir.ts b/src/client/handlers/VaultsSecretsMkdir.ts index 9f65821ce..fb6ef4957 100644 --- a/src/client/handlers/VaultsSecretsMkdir.ts +++ b/src/client/handlers/VaultsSecretsMkdir.ts @@ -5,7 +5,7 @@ import type { ClientRPCRequestParams, ClientRPCResponseResult, SecretDirMessage, - SuccessOrErrorMessage, + SuccessOrErrorMessageTagged, } from '../types'; import type VaultManager from '../../vaults/VaultManager'; import type { POJO } from '../../types'; @@ -20,19 +20,19 @@ class VaultsSecretsMkdir extends DuplexHandler< vaultManager: VaultManager; }, ClientRPCRequestParams, - ClientRPCResponseResult + ClientRPCResponseResult > { public handle = async function* ( input: AsyncIterableIterator>, _cancel: (reason?: any) => void, _meta: Record, ctx: ContextTimed, - ): AsyncGenerator> { + ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; let metadata: POJO; yield* db.withTransactionG( - async function* (tran): AsyncGenerator { + async function* (tran): AsyncGenerator { for await (const secretDirMessage of input) { // Unpack input if (metadata == null) metadata = secretDirMessage.metadata ?? {}; @@ -75,8 +75,8 @@ class VaultsSecretsMkdir extends DuplexHandler< } } }, - ctx, tran, + ctx, ); } }, diff --git a/src/client/handlers/VaultsSecretsNew.ts b/src/client/handlers/VaultsSecretsNew.ts index 72290e714..bad8f3b15 100644 --- a/src/client/handlers/VaultsSecretsNew.ts +++ b/src/client/handlers/VaultsSecretsNew.ts @@ -47,11 +47,11 @@ class VaultsSecretsNew extends UnaryHandler< async (vault) => { await vaultOps.addSecret(vault, input.secretName, content); }, - ctx, tran, + ctx, ); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsSecretsNewDir.ts b/src/client/handlers/VaultsSecretsNewDir.ts index 274efe1b0..c3283db41 100644 --- a/src/client/handlers/VaultsSecretsNewDir.ts +++ b/src/client/handlers/VaultsSecretsNewDir.ts @@ -47,13 +47,19 @@ class VaultsSecretsNewDir extends UnaryHandler< await vaultManager.withVaults( [vaultId], async (vault) => { - await vaultOps.addSecretDirectory(vault, input.dirName, fs); + await vaultOps.addSecretDirectory( + vault, + input.dirName, + fs, + undefined, + ctx, + ); }, - ctx, tran, + ctx, ); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsSecretsRemove.ts b/src/client/handlers/VaultsSecretsRemove.ts index 099f6c734..c44392ab0 100644 --- a/src/client/handlers/VaultsSecretsRemove.ts +++ b/src/client/handlers/VaultsSecretsRemove.ts @@ -7,7 +7,7 @@ import type { ClientRPCResponseResult, SecretsRemoveHeaderMessage, SecretIdentifierMessageTagged, - SuccessOrErrorMessage, + SuccessOrErrorMessageTagged, } from '../types'; import type VaultManager from '../../vaults/VaultManager'; import type { FileSystemWritable } from '../../vaults/types'; @@ -25,7 +25,7 @@ class VaultsSecretsRemove extends DuplexHandler< ClientRPCRequestParams< SecretsRemoveHeaderMessage | SecretIdentifierMessageTagged >, - ClientRPCResponseResult + ClientRPCResponseResult > { public handle = async function* ( input: AsyncIterableIterator< @@ -36,7 +36,7 @@ class VaultsSecretsRemove extends DuplexHandler< _cancel: (reason?: any) => void, _meta: Record, ctx: ContextTimed, - ): AsyncGenerator> { + ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; // Extract the header message from the iterator @@ -75,7 +75,7 @@ class VaultsSecretsRemove extends DuplexHandler< // Acquire all locks in parallel and perform all operations at once yield* withG( vaultAcquires, - async function* (efses): AsyncGenerator { + async function* (efses): AsyncGenerator { // Creating the vault name to efs map for easy access const vaultMap = new Map(); for (let i = 0; i < efses.length; i++) { diff --git a/src/client/handlers/VaultsSecretsRename.ts b/src/client/handlers/VaultsSecretsRename.ts index 407a76d7f..37431beae 100644 --- a/src/client/handlers/VaultsSecretsRename.ts +++ b/src/client/handlers/VaultsSecretsRename.ts @@ -48,13 +48,15 @@ class VaultsSecretsRename extends UnaryHandler< vault, input.secretName, input.newSecretName, + undefined, + ctx, ); }, - ctx, tran, + ctx, ); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsSecretsStat.ts b/src/client/handlers/VaultsSecretsStat.ts index 8bbb8d6b0..3630ff59e 100644 --- a/src/client/handlers/VaultsSecretsStat.ts +++ b/src/client/handlers/VaultsSecretsStat.ts @@ -47,8 +47,8 @@ class VaultsSecretsStat extends UnaryHandler< async (vault) => { return await vaultOps.statSecret(vault, secretName); }, - ctx, tran, + ctx, ); return { stat: { diff --git a/src/client/handlers/VaultsSecretsWriteFile.ts b/src/client/handlers/VaultsSecretsWriteFile.ts index 62dc7764c..39f4ffcab 100644 --- a/src/client/handlers/VaultsSecretsWriteFile.ts +++ b/src/client/handlers/VaultsSecretsWriteFile.ts @@ -43,13 +43,19 @@ class VaultsSecretsWriteFile extends UnaryHandler< await vaultManager.withVaults( [vaultId], async (vault) => { - await vaultOps.writeSecret(vault, input.secretName, secretContent); + await vaultOps.writeSecret( + vault, + input.secretName, + secretContent, + undefined, + ctx, + ); }, - ctx, tran, + ctx, ); }); - return { type: 'success', success: true }; + return { success: true }; }; } diff --git a/src/client/handlers/VaultsVersion.ts b/src/client/handlers/VaultsVersion.ts index 48c8aa83b..bbb4b0dfb 100644 --- a/src/client/handlers/VaultsVersion.ts +++ b/src/client/handlers/VaultsVersion.ts @@ -42,18 +42,22 @@ class VaultsVersion extends UnaryHandler< const [latestOid, currentVersionId] = await vaultManager.withVaults( [vaultId], async (vault) => { - const latestOid = (await vault.log())[0].commitId; + // Use default values for the ref and limit. We only care about + // passing in the relevant context. + const latestOid = (await vault.log(undefined, undefined, ctx))[0] + .commitId; await vault.version(versionId); - const currentVersionId = (await vault.log(versionId, 0))[0]?.commitId; + const currentVersionId = ( + await vault.log(versionId, undefined, ctx) + )[0]?.commitId; return [latestOid, currentVersionId]; }, - ctx, tran, + ctx, ); // Checking if latest version ID - const latestVersion = latestOid === currentVersionId; return { - latestVersion, + latestVersion: latestOid === currentVersionId, }; }); }; diff --git a/src/client/types.ts b/src/client/types.ts index c8843e334..8c1032364 100644 --- a/src/client/types.ts +++ b/src/client/types.ts @@ -201,18 +201,22 @@ type SignatureMessage = { type VerifySignatureMessage = PublicKeyMessage & DataMessage & SignatureMessage; type SuccessMessage = { + success: boolean; +}; + +type SuccessMessageTagged = { type: 'success'; success: boolean; }; -type ErrorMessage = { +type ErrorMessageTagged = { type: 'error'; code?: string | number; reason?: string; data?: JSONObject; }; -type SuccessOrErrorMessage = SuccessMessage | ErrorMessage; +type SuccessOrErrorMessageTagged = SuccessMessageTagged | ErrorMessageTagged; // Notifications messages @@ -321,9 +325,9 @@ type ContentMessage = { secretContent: string; }; -type ContentSuccessMessage = ContentMessage & SuccessMessage; +type ContentSuccessMessage = ContentMessage & SuccessMessageTagged; -type ContentOrErrorMessage = ContentSuccessMessage | ErrorMessage; +type ContentOrErrorMessage = ContentSuccessMessage | ErrorMessageTagged; type SecretContentMessage = SecretIdentifierMessage & ContentMessage; @@ -416,8 +420,9 @@ export type { NodesGetMessage, NodesAddMessage, SuccessMessage, - ErrorMessage, - SuccessOrErrorMessage, + SuccessMessageTagged, + ErrorMessageTagged, + SuccessOrErrorMessageTagged, NotificationInboxMessage, NotificationOutboxMessage, NotificationReadMessage, diff --git a/src/git/http.ts b/src/git/http.ts index b062ec098..a4bd43f04 100644 --- a/src/git/http.ts +++ b/src/git/http.ts @@ -1,4 +1,4 @@ -import type { ContextCancellable } from '@matrixai/contexts'; +import type { ContextTimed } from '@matrixai/contexts'; import type { CapabilityList, Reference, @@ -121,7 +121,7 @@ async function* advertiseRefGenerator( dir: string; gitDir: string; }, - ctx: ContextCancellable, + ctx: ContextTimed, ): AsyncGenerator { // Providing side-band-64, symref for the HEAD and agent name capabilities const capabilityList = [ @@ -172,7 +172,7 @@ async function* advertiseRefGenerator( async function* referenceListGenerator( objectGenerator: AsyncGenerator<[Reference, ObjectId], void, void>, capabilities: CapabilityList, - ctx: ContextCancellable, + ctx: ContextTimed, ): AsyncGenerator { // Cap-list = capability *(SP capability) const capabilitiesListBuffer = Buffer.from( @@ -362,16 +362,16 @@ async function* generatePackRequest( gitDir: string; body: Array; }, - ctx: ContextCancellable, + ctx: ContextTimed, ): AsyncGenerator { const [wants, haves, _capabilities] = await parsePackRequest(body); const objectIds = await gitUtils.listObjects( { efs: efs, - dir, + dir: dir, gitDir: gitDir, - wants, - haves, + wants: wants, + haves: haves, }, ctx, ); @@ -381,9 +381,9 @@ async function* generatePackRequest( yield* generatePackData( { efs: efs, - dir, - gitDir, - objectIds, + dir: dir, + gitDir: gitDir, + objectIds: objectIds, }, ctx, ); @@ -416,7 +416,7 @@ async function* generatePackData( objectIds: Array; chunkSize?: number; }, - ctx: ContextCancellable, + ctx: ContextTimed, ): AsyncGenerator { let packFile: PackObjectsResult; // In case of errors we don't want to throw them. This will result in the error being thrown into `isometric-git` @@ -424,7 +424,7 @@ async function* generatePackData( try { packFile = await git.packObjects({ fs: efs, - dir, + dir: dir, gitdir: gitDir, oids: objectIds, }); diff --git a/src/nodes/NodeManager.ts b/src/nodes/NodeManager.ts index cb01f510e..18a526676 100644 --- a/src/nodes/NodeManager.ts +++ b/src/nodes/NodeManager.ts @@ -1632,8 +1632,8 @@ class NodeManager { block?: boolean, force?: boolean, connectionConnectTimeoutTime?: number, - ctx?: Partial, tran?: DBTransaction, + ctx?: Partial, ): PromiseCancellable; @ready(new nodesErrors.ErrorNodeManagerNotRunning(), true, ['stopping']) @timedCancellable(true) @@ -1644,8 +1644,8 @@ class NodeManager { block: boolean = false, force: boolean = false, connectionConnectTimeoutTime: number = this.connectionConnectTimeoutTime, + tran: DBTransaction, @context ctx: ContextTimed, - tran?: DBTransaction, ): Promise { // We don't want to add our own node if (nodeId.equals(this.keyRing.getNodeId())) { @@ -1662,22 +1662,23 @@ class NodeManager { block, force, connectionConnectTimeoutTime, - ctx, tran, + ctx, ), ); } - // Need to await node connection verification, if fail, need to reject connection. + // Need to await node connection verification. If failed, need to reject + // connection. // When adding a node we need to handle 3 cases // 1. The node already exists. We need to update it's last updated field - // 2. The node doesn't exist and bucket has room. - // We need to add the node to the bucket - // 3. The node doesn't exist and the bucket is full. - // We need to ping the oldest node. If the ping succeeds we need to update - // the lastUpdated of the oldest node and drop the new one. If the ping - // fails we delete the old node and add in the new one. + // 2. The node doesn't exist and bucket has room. We need to add the node + // to the bucket + // 3. The node doesn't exist and the bucket is full. We need to ping the + // oldest node. If the ping succeeds we need to update the lastUpdated of + // the oldest node and drop the new one. If the ping fails we delete the + // old node and add in the new one. const [bucketIndex] = this.nodeGraph.bucketIndex(nodeId); // To avoid conflict we want to lock on the bucket index await this.nodeGraph.lockBucket(bucketIndex, tran, ctx); @@ -1857,8 +1858,8 @@ class NodeManager { false, false, undefined, - ctx, tran, + ctx, ); } else { // We don't remove node the ping was aborted @@ -1891,8 +1892,8 @@ class NodeManager { false, false, undefined, - ctx, tran, + ctx, ); removedNodes -= 1; } @@ -1930,7 +1931,7 @@ class NodeManager { } protected async setupGCTask(bucketIndex: number) { - // Check and start a 'garbageCollect` bucket task + // Check and start a `garbageCollect` bucket task let scheduled: boolean = false; for await (const task of this.taskManager.getTasks('asc', true, [ this.tasksPath, diff --git a/src/nodes/agent/handlers/VaultsGitInfoGet.ts b/src/nodes/agent/handlers/VaultsGitInfoGet.ts index 65ae55ed2..b13fa48ac 100644 --- a/src/nodes/agent/handlers/VaultsGitInfoGet.ts +++ b/src/nodes/agent/handlers/VaultsGitInfoGet.ts @@ -26,7 +26,7 @@ class VaultsGitInfoGet extends RawHandler<{ public handle = async ( input: [JSONRPCRequest, ReadableStream], _cancel: (reason?: any) => void, - meta: Record | undefined, + meta: Record, ctx: ContextTimed, ): Promise<[JSONObject, ReadableStream]> => { const { db, vaultManager, acl } = this.container; @@ -91,8 +91,10 @@ class VaultsGitInfoGet extends RawHandler<{ let handleInfoRequestGen: AsyncGenerator; const stream = new ReadableStream({ start: async () => { + // Automatically handle the transaction lifetime handleInfoRequestGen = vaultManager.handleInfoRequest( data.vaultId, + undefined, ctx, ); }, diff --git a/src/nodes/agent/handlers/VaultsGitPackGet.ts b/src/nodes/agent/handlers/VaultsGitPackGet.ts index 3b658a67d..2f006b8cd 100644 --- a/src/nodes/agent/handlers/VaultsGitPackGet.ts +++ b/src/nodes/agent/handlers/VaultsGitPackGet.ts @@ -79,7 +79,13 @@ class VaultsGitPackGet extends RawHandler<{ for await (const message of inputStream) { body.push(Buffer.from(message)); } - packRequestGen = vaultManager.handlePackRequest(vaultId, body, ctx); + // Automatically handle the transaction lifetime + packRequestGen = vaultManager.handlePackRequest( + vaultId, + body, + undefined, + ctx, + ); }, pull: async (controller) => { const next = await packRequestGen.next(); diff --git a/src/nodes/agent/handlers/VaultsScan.ts b/src/nodes/agent/handlers/VaultsScan.ts index 77c39db59..9108a5ee2 100644 --- a/src/nodes/agent/handlers/VaultsScan.ts +++ b/src/nodes/agent/handlers/VaultsScan.ts @@ -40,8 +40,8 @@ class VaultsScan extends ServerHandler< > { const listResponse = vaultManager.handleScanVaults( requestingNodeId, - ctx, tran, + ctx, ); for await (const { vaultId, diff --git a/src/validation/index.ts b/src/validation/index.ts index 87164a2fa..2d29ef407 100644 --- a/src/validation/index.ts +++ b/src/validation/index.ts @@ -72,8 +72,7 @@ function validateSync( e.value = value; e.context = context; errors.push(e); - // If lazy mode, short circuit evaluation - // And throw the error up + // If lazy mode, short circuit evaluation and throw the error up if (options.mode === 'lazy') { throw e; } diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index ddc1c4296..e73551cd1 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -29,7 +29,11 @@ import { } from '@matrixai/async-init/dist/CreateDestroyStartStop'; import { RWLockWriter } from '@matrixai/async-locks'; import { timedCancellable as timedCancellableF } from '@matrixai/contexts/dist/functions'; -import { context, timedCancellable } from '@matrixai/contexts/dist/decorators'; +import { + context, + timed, + timedCancellable, +} from '@matrixai/contexts/dist/decorators'; import { withF, withG } from '@matrixai/resources'; import { tagLast } from './types'; import * as vaultsErrors from './errors'; @@ -440,25 +444,15 @@ class VaultInternal { } public async log( - { - ref = 'HEAD', - limit, - }: { - ref: string | VaultRef; - limit?: number; - }, + ref?: string | VaultRef, + limit?: number, ctx?: Partial, ): Promise>; @ready(new vaultsErrors.ErrorVaultNotRunning()) @timedCancellable(true) public async log( - { - ref = 'HEAD', - limit, - }: { - ref: string | VaultRef; - limit?: number; - }, + ref: string | VaultRef = 'HEAD', + limit: number, @context ctx: ContextTimed, ): Promise> { vaultsUtils.assertRef(ref); @@ -607,28 +601,19 @@ class VaultInternal { public writeG( g: (fs: FileSystemWritable) => AsyncGenerator, tran?: DBTransaction, - ctx?: ContextTimed, + ctx?: Partial, ): AsyncGenerator; @ready(new vaultsErrors.ErrorVaultNotRunning()) + @timed() public writeG( g: (fs: FileSystemWritable) => AsyncGenerator, tran: DBTransaction, - ctx: ContextTimed, + @context ctx: ContextTimed, ): AsyncGenerator { if (tran == null) { return this.db.withTransactionG((tran) => this.writeG(g, tran, ctx)); } - // TODO: check if this works. it probably doesnt cuz of the generator - if (ctx == null) { - const parentThis = this; - const f = async function (ctx: ContextTimed) { - parentThis.writeG(g, tran, ctx); - }; - // Call the method with a created context - timedCancellableF(f, true)(); - } - const efsVault = this.efsVault; const vaultMetadataDbPath = this.vaultMetadataDbPath; // In AsyncGenerators, "this" refers to the generator itself, so we alias @@ -864,7 +849,7 @@ class VaultInternal { }); return remoteVaultId; }, - ctx, + // Ctx, ); } catch (e) { // If the error flag is set, and we have the generalised SmartHttpError from diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 86b35f343..1928c8322 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -30,7 +30,11 @@ import { import { IdInternal } from '@matrixai/id'; import { withF, withG } from '@matrixai/resources'; import { LockBox, RWLockWriter } from '@matrixai/async-locks'; -import { context, timedCancellable } from '@matrixai/contexts/dist/decorators'; +import { + context, + timedCancellable, + timed, +} from '@matrixai/contexts/dist/decorators'; import Logger from '@matrixai/logger'; import VaultInternal from './VaultInternal'; import * as vaultsEvents from './events'; @@ -479,7 +483,9 @@ class VaultManager { @context ctx: ContextTimed, ): Promise { if (tran == null) { - return this.db.withTransactionF((tran) => this.closeVault(vaultId, tran, ctx)); + return this.db.withTransactionF((tran) => + this.closeVault(vaultId, tran, ctx), + ); } if ((await this.getVaultName(vaultId, tran)) == null) { @@ -747,6 +753,7 @@ class VaultManager { ); return await this.vaultLocks.withF( [vaultId.toString(), RWLockWriter, 'write'], + ctx, async () => { const vault = await VaultInternal.cloneVaultInternal( { @@ -872,6 +879,7 @@ class VaultManager { ctx?: Partial, ): AsyncGenerator; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timed() public async *handleInfoRequest( vaultId: VaultId, tran: DBTransaction, @@ -917,6 +925,7 @@ class VaultManager { ctx?: Partial, ): AsyncGenerator; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timed() public async *handlePackRequest( vaultId: VaultId, body: Array, @@ -965,6 +974,7 @@ class VaultManager { vaultIdEncoded: VaultIdEncoded; vaultPermissions: VaultAction[]; }>; + @timed() public async *scanVaults( targetNodeId: NodeId, @context ctx: ContextTimed, @@ -1008,6 +1018,7 @@ class VaultManager { vaultName: VaultName; vaultPermissions: VaultAction[]; }>; + @timed() public async *handleScanVaults( nodeId: NodeId, tran: DBTransaction, @@ -1084,7 +1095,9 @@ class VaultManager { ctx: ContextTimed, ): Promise { if (tran == null) { - return this.db.withTransactionF((tran) => this.getVault(vaultId, tran, ctx)); + return this.db.withTransactionF((tran) => + this.getVault(vaultId, tran, ctx), + ); } const vaultIdString = vaultId.toString() as VaultIdString; @@ -1175,6 +1188,7 @@ class VaultManager { ctx?: Partial, ): AsyncGenerator; @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + @timed() public async *withVaultsG( vaultIds: Array, g: (...args: Array) => AsyncGenerator, diff --git a/src/vaults/VaultOps.ts b/src/vaults/VaultOps.ts index 06efbc98e..3328abd32 100644 --- a/src/vaults/VaultOps.ts +++ b/src/vaults/VaultOps.ts @@ -44,15 +44,20 @@ async function renameSecret( secretName: string, secretNameNew: string, logger?: Logger, + ctx?: ContextTimed, ): Promise { - await vault.writeF(async (efs) => { - if (!(await efs.exists(secretName))) { - throw new vaultsErrors.ErrorSecretsSecretUndefined( - 'Secret does not exist, can not rename', - ); - } - await efs.rename(secretName, secretNameNew); - }); + await vault.writeF( + async (efs) => { + if (!(await efs.exists(secretName))) { + throw new vaultsErrors.ErrorSecretsSecretUndefined( + 'Secret does not exist, can not rename', + ); + } + await efs.rename(secretName, secretNameNew); + }, + undefined, + ctx, + ); logger?.info( `Renamed secret ${secretName} to ${secretNameNew} in vault ${vault.vaultId}`, ); @@ -110,34 +115,38 @@ async function deleteSecret( secretName: string, fileOptions?: FileOptions, logger?: Logger, + ctx?: ContextTimed, ): Promise { - await vault.writeF(async (efs) => { - try { - const stat = await efs.stat(secretName); - if (stat.isDirectory()) { - await efs.rmdir(secretName, fileOptions); - logger?.info(`Deleted directory at '${secretName}'`); - } else { - // Remove the specified file - await efs.unlink(secretName); - logger?.info(`Deleted secret at '${secretName}'`); - } - } catch (e) { - if (e.code === 'ENOENT') { - throw new vaultsErrors.ErrorSecretsSecretUndefined( - `Secret with name: ${secretName} does not exist`, - { cause: e }, - ); - } - if (e.code === 'ENOTEMPTY') { - throw new vaultsErrors.ErrorVaultsRecursive( - `Could not delete directory '${secretName}' without recursive option`, - { cause: e }, - ); - } - throw e; + try { + await vault.writeF( + async (efs) => { + const stat = await efs.stat(secretName); + if (stat.isDirectory()) { + await efs.rmdir(secretName, fileOptions); + logger?.info(`Deleted directory at '${secretName}'`); + } else { + await efs.unlink(secretName); + logger?.info(`Deleted secret at '${secretName}'`); + } + }, + undefined, + ctx, + ); + } catch (e) { + if (e.code === 'ENOENT') { + throw new vaultsErrors.ErrorSecretsSecretUndefined( + `Secret with name: ${secretName} does not exist`, + { cause: e }, + ); } - }); + if (e.code === 'ENOTEMPTY') { + throw new vaultsErrors.ErrorVaultsRecursive( + `Could not delete directory '${secretName}' without recursive option`, + { cause: e }, + ); + } + throw e; + } } /** @@ -149,16 +158,21 @@ async function mkdir( dirPath: string, fileOptions?: FileOptions, logger?: Logger, + ctx?: ContextTimed, ): Promise { const recursive = fileOptions?.recursive ?? false; // Technically, writing an empty directory won't make a commit, and doesn't // need a write resource as git doesn't track empty directories. It is // still being used to allow concurrency. try { - await vault.writeF(async (efs) => { - await efs.mkdir(dirPath, fileOptions); - logger?.info(`Created secret directory at '${dirPath}'`); - }); + await vault.writeF( + async (efs) => { + await efs.mkdir(dirPath, fileOptions); + logger?.info(`Created secret directory at '${dirPath}'`); + }, + undefined, + ctx, + ); } catch (e) { logger?.error(`Failed to create directory '${dirPath}'. Reason: ${e.code}`); if (e.code === 'ENOENT' && !recursive) { @@ -189,44 +203,51 @@ async function addSecretDirectory( secretDirectory: string, fs = require('fs'), logger?: Logger, + ctx?: ContextTimed, ): Promise { const absoluteDirPath = path.resolve(secretDirectory); + await vault.writeF( + async (efs) => { + for await (const secretPath of vaultsUtils.readDirRecursively( + fs, + absoluteDirPath, + )) { + // Determine the path to the secret + const relPath = path.relative( + path.dirname(absoluteDirPath), + secretPath, + ); + // Obtain the content of the secret + const content = await fs.promises.readFile(secretPath); - await vault.writeF(async (efs) => { - for await (const secretPath of vaultsUtils.readDirRecursively( - fs, - absoluteDirPath, - )) { - // Determine the path to the secret - const relPath = path.relative(path.dirname(absoluteDirPath), secretPath); - // Obtain the content of the secret - const content = await fs.promises.readFile(secretPath); - - if (await efs.exists(relPath)) { - try { - // Write secret into vault - await efs.writeFile(relPath, content); - logger?.info(`Added secret at directory '${relPath}'`); - } catch (e) { - // Warn of a failed addition but continue operation - logger?.warn(`Adding secret ${relPath} failed`); - throw e; - } - } else { - try { - // Create directory if it doesn't exist - await vaultsUtils.mkdirExists(efs, path.dirname(relPath)); - // Write secret into vault - await efs.writeFile(relPath, content, {}); - logger?.info(`Added secret to directory at '${relPath}'`); - } catch (e) { - // Warn of a failed addition but continue operation - logger?.warn(`Adding secret ${relPath} failed`); - throw e; + if (await efs.exists(relPath)) { + try { + // Write secret into vault + await efs.writeFile(relPath, content); + logger?.info(`Added secret at directory '${relPath}'`); + } catch (e) { + // Warn of a failed addition but continue operation + logger?.warn(`Adding secret ${relPath} failed`); + throw e; + } + } else { + try { + // Create directory if it doesn't exist + await vaultsUtils.mkdirExists(efs, path.dirname(relPath)); + // Write secret into vault + await efs.writeFile(relPath, content, {}); + logger?.info(`Added secret to directory at '${relPath}'`); + } catch (e) { + // Warn of a failed addition but continue operation + logger?.warn(`Adding secret ${relPath} failed`); + throw e; + } } } - } - }); + }, + undefined, + ctx, + ); } /** @@ -250,27 +271,32 @@ async function writeSecret( secretName: string, content: Buffer | string, logger?: Logger, + ctx?: ContextTimed, ): Promise { - await vault.writeF(async (efs) => { - try { - await efs.writeFile(secretName, content); - logger?.info(`Wrote secret ${secretName} in vault ${vault.vaultId}`); - } catch (e) { - if (e.code === 'ENOENT') { - throw new vaultsErrors.ErrorSecretsSecretUndefined( - `One or more parent directories for '${secretName}' do not exist`, - { cause: e }, - ); - } - if (e.code === 'EISDIR') { - throw new vaultsErrors.ErrorSecretsIsDirectory( - `Secret path '${secretName}' is a directory`, - { cause: e }, - ); - } - throw e; + try { + await vault.writeF( + async (efs) => { + await efs.writeFile(secretName, content); + logger?.info(`Wrote secret ${secretName} in vault ${vault.vaultId}`); + }, + undefined, + ctx, + ); + } catch (e) { + if (e.code === 'ENOENT') { + throw new vaultsErrors.ErrorSecretsSecretUndefined( + `One or more parent directories for '${secretName}' do not exist`, + { cause: e }, + ); } - }); + if (e.code === 'EISDIR') { + throw new vaultsErrors.ErrorSecretsIsDirectory( + `Secret path '${secretName}' is a directory`, + { cause: e }, + ); + } + throw e; + } } export { diff --git a/tests/git/http.test.ts b/tests/git/http.test.ts index 228ab11cd..0341724fb 100644 --- a/tests/git/http.test.ts +++ b/tests/git/http.test.ts @@ -1,6 +1,8 @@ +import type { ContextTimed } from '@matrixai/contexts'; import fs from 'fs'; import path from 'path'; import os from 'os'; +import { timedCancellable as timedCancellableF } from '@matrixai/contexts/dist/functions'; import git from 'isomorphic-git'; import { test } from '@fast-check/jest'; import fc from 'fast-check'; @@ -82,63 +84,64 @@ describe('Git Http', () => { } }); test('advertiseRefGenerator', async () => { - await gitTestUtils.createGitRepo({ - ...gitDirs, - author: 'tester', - commits: [ - { - message: 'commit1', - files: [ - { - name: 'file1', - contents: 'this is a file', - }, - ], - }, - { - message: 'commit2', - files: [ - { - name: 'file2', - contents: 'this is another file', - }, - ], - }, - { - message: 'commit3', - files: [ - { - name: 'file1', - contents: 'this is a changed file', - }, - ], - }, - ], - }); - const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; - const gen = gitHttp.advertiseRefGenerator(gitDirs, ctx); - let response = ''; - for await (const result of gen) { - response += result.toString(); - } - // Header - expect(response).toInclude('001e# service=git-upload-pack\n'); - // Includes flush packets - expect(response).toInclude('0000'); - // Includes capabilities - expect(response).toIncludeMultiple([ - 'side-band-64k', - 'symref=HEAD:refs/heads/master', - 'agent=git/isomorphic-git@1.8.1', - ]); - // HEAD commit is listed twice as `HEAD` and `master` - const headCommit = (await git.log({ ...gitDirs, ref: 'HEAD' }))[0].oid; - expect(response).toIncludeRepeated(headCommit, 2); - // `HEAD` and `master` are both listed - expect(response).toIncludeMultiple(['HEAD', 'master']); - // A null byte is included to delimit first line and capabilities - expect(response).toInclude('\0'); + const f = async (ctx: ContextTimed) => { + await gitTestUtils.createGitRepo({ + ...gitDirs, + author: 'tester', + commits: [ + { + message: 'commit1', + files: [ + { + name: 'file1', + contents: 'this is a file', + }, + ], + }, + { + message: 'commit2', + files: [ + { + name: 'file2', + contents: 'this is another file', + }, + ], + }, + { + message: 'commit3', + files: [ + { + name: 'file1', + contents: 'this is a changed file', + }, + ], + }, + ], + }); + const gen = gitHttp.advertiseRefGenerator(gitDirs, ctx); + let response = ''; + for await (const result of gen) { + response += result.toString(); + } + // Header + expect(response).toInclude('001e# service=git-upload-pack\n'); + // Includes flush packets + expect(response).toInclude('0000'); + // Includes capabilities + expect(response).toIncludeMultiple([ + 'side-band-64k', + 'symref=HEAD:refs/heads/master', + 'agent=git/isomorphic-git@1.8.1', + ]); + // HEAD commit is listed twice as `HEAD` and `master` + const headCommit = (await git.log({ ...gitDirs, ref: 'HEAD' }))[0].oid; + expect(response).toIncludeRepeated(headCommit, 2); + // `HEAD` and `master` are both listed + expect(response).toIncludeMultiple(['HEAD', 'master']); + // A null byte is included to delimit first line and capabilities + expect(response).toInclude('\0'); + }; + await timedCancellableF(f, true)(); }); test('parsePackRequest', async () => { const data = Buffer.from( @@ -163,105 +166,107 @@ describe('Git Http', () => { }, ); test('generatePackData', async () => { - await gitTestUtils.createGitRepo({ - ...gitDirs, - author: 'tester', - commits: [ - { - message: 'commit1', - files: [ - { - name: 'file1', - contents: 'this is a file', - }, - ], - }, - { - message: 'commit2', - files: [ - { - name: 'file2', - contents: 'this is another file', - }, - ], - }, - { - message: 'commit3', - files: [ - { - name: 'file1', - contents: 'this is a changed file', - }, - ], - }, - ], - }); - const objectIds = await gitUtils.listObjectsAll(gitDirs); - const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; - const gen = gitHttp.generatePackData({ ...gitDirs, objectIds }, ctx); - let acc = Buffer.alloc(0); - for await (const line of gen) { - acc = Buffer.concat([acc, line.subarray(5)]); - } - const packPath = path.join(gitDirs.dir, 'pack'); - await fs.promises.writeFile(packPath, acc); - // Checking that all objectIds are included and packFile is valid using isometric git - const result = await git.indexPack({ - ...gitDirs, - filepath: 'pack', - }); - expect(result.oids).toIncludeAllMembers(objectIds); + const f = async (ctx: ContextTimed) => { + await gitTestUtils.createGitRepo({ + ...gitDirs, + author: 'tester', + commits: [ + { + message: 'commit1', + files: [ + { + name: 'file1', + contents: 'this is a file', + }, + ], + }, + { + message: 'commit2', + files: [ + { + name: 'file2', + contents: 'this is another file', + }, + ], + }, + { + message: 'commit3', + files: [ + { + name: 'file1', + contents: 'this is a changed file', + }, + ], + }, + ], + }); + const objectIds = await gitUtils.listObjectsAll(gitDirs, ctx); + const gen = gitHttp.generatePackData({ ...gitDirs, objectIds }, ctx); + let acc = Buffer.alloc(0); + for await (const line of gen) { + acc = Buffer.concat([acc, line.subarray(5)]); + } + const packPath = path.join(gitDirs.dir, 'pack'); + await fs.promises.writeFile(packPath, acc); + // Checking that all objectIds are included and packFile is valid using isometric git + const result = await git.indexPack({ + ...gitDirs, + filepath: 'pack', + }); + expect(result.oids).toIncludeAllMembers(objectIds); + }; + await timedCancellableF(f, true)(); }); test('generatePackRequest', async () => { - await gitTestUtils.createGitRepo({ - ...gitDirs, - author: 'tester', - commits: [ - { - message: 'commit1', - files: [ - { - name: 'file1', - contents: 'this is a file', - }, - ], - }, - { - message: 'commit2', - files: [ - { - name: 'file2', - contents: 'this is another file', - }, - ], - }, - { - message: 'commit3', - files: [ - { - name: 'file1', - contents: 'this is a changed file', - }, - ], - }, - ], - }); - const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; - const gen = gitHttp.generatePackRequest({ ...gitDirs, body: [] }, ctx); - let response = ''; - for await (const line of gen) { - response += line.toString(); - } - // NAK response for no common objects - expect(response).toInclude('0008NAK\n'); - // Pack data included on chanel 1 - expect(response).toInclude('\x01PACK'); - // Progress data included on chanel 2 - expect(response).toInclude('0017\x02progress is at 50%'); - // Flush packet included - expect(response).toInclude('0000'); + const f = async (ctx: ContextTimed) => { + await gitTestUtils.createGitRepo({ + ...gitDirs, + author: 'tester', + commits: [ + { + message: 'commit1', + files: [ + { + name: 'file1', + contents: 'this is a file', + }, + ], + }, + { + message: 'commit2', + files: [ + { + name: 'file2', + contents: 'this is another file', + }, + ], + }, + { + message: 'commit3', + files: [ + { + name: 'file1', + contents: 'this is a changed file', + }, + ], + }, + ], + }); + const gen = gitHttp.generatePackRequest({ ...gitDirs, body: [] }, ctx); + let response = ''; + for await (const line of gen) { + response += line.toString(); + } + // NAK response for no common objects + expect(response).toInclude('0008NAK\n'); + // Pack data included on chanel 1 + expect(response).toInclude('\x01PACK'); + // Progress data included on chanel 2 + expect(response).toInclude('0017\x02progress is at 50%'); + // Flush packet included + expect(response).toInclude('0000'); + }; + await timedCancellableF(f, true)(); }); test('end to end clone', async () => { await gitTestUtils.createGitRepo({ @@ -301,14 +306,14 @@ describe('Git Http', () => { const request = gitTestUtils.request(gitDirs); const newDir = path.join(dataDir, 'newRepo'); const newDirs = { - fs, + fs: fs, dir: newDir, gitdir: path.join(newDir, '.git'), gitDir: path.join(newDir, '.git'), }; await git.clone({ - fs, + fs: fs, dir: newDir, http: { request }, url: 'http://', @@ -357,14 +362,14 @@ describe('Git Http', () => { }); const newDir = path.join(dataDir, 'newRepo'); const newDirs = { - fs, + fs: fs, dir: newDir, gitdir: path.join(newDir, '.git'), gitDir: path.join(newDir, '.git'), }; const request = gitTestUtils.request(gitDirs); await git.clone({ - fs, + fs: fs, dir: newDir, http: { request }, url: 'http://', diff --git a/tests/git/utils.test.ts b/tests/git/utils.test.ts index d1ceb6ede..fe7aaa90e 100644 --- a/tests/git/utils.test.ts +++ b/tests/git/utils.test.ts @@ -1,6 +1,8 @@ +import type { ContextTimed } from '@matrixai/contexts'; import fs from 'fs'; import os from 'os'; import path from 'path'; +import { timedCancellable as timedCancellableF } from '@matrixai/contexts/dist/functions'; import git from 'isomorphic-git'; import fc from 'fast-check'; import { test } from '@fast-check/jest'; @@ -39,57 +41,59 @@ describe('Git utils', () => { }); test('listReferencesGenerator', async () => { - // Start with creating a git repo with commits - await gitTestUtils.createGitRepo({ - ...gitDirs, - author: 'tester', - commits: [ - { - message: 'commit1', - files: [ - { - name: 'file1', - contents: 'this is a file', - }, - ], - }, - { - message: 'commit2', - files: [ - { - name: 'file2', - contents: 'this is another file', - }, - ], - }, - { - message: 'commit3', - files: [ - { - name: 'file1', - contents: 'this is a changed file', - }, - ], - }, - ], - }); - - const headObjectId = ( - await git.log({ + const f = async (ctx: ContextTimed) => { + // Start with creating a git repo with commits + await gitTestUtils.createGitRepo({ ...gitDirs, - depth: 1, - }) - )[0].oid; - const expectedReferences = ['HEAD', 'refs/heads/master']; - const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; - for await (const [reference, objectId] of gitUtils.listReferencesGenerator( - { ...gitDirs }, - ctx, - )) { - expect(reference).toBeOneOf(expectedReferences); - expect(objectId).toBe(headObjectId); - } + author: 'tester', + commits: [ + { + message: 'commit1', + files: [ + { + name: 'file1', + contents: 'this is a file', + }, + ], + }, + { + message: 'commit2', + files: [ + { + name: 'file2', + contents: 'this is another file', + }, + ], + }, + { + message: 'commit3', + files: [ + { + name: 'file1', + contents: 'this is a changed file', + }, + ], + }, + ], + }); + + const headObjectId = ( + await git.log({ + ...gitDirs, + depth: 1, + }) + )[0].oid; + const expectedReferences = ['HEAD', 'refs/heads/master']; + for await (const [ + reference, + objectId, + ] of gitUtils.listReferencesGenerator({ ...gitDirs }, ctx)) { + expect(reference).toBeOneOf(expectedReferences); + expect(objectId).toBe(headObjectId); + } + }; + // Generate a context for the test case + await timedCancellableF(f, true)(); }); test('refCapability', async () => { await gitTestUtils.createGitRepo({ @@ -140,62 +144,64 @@ describe('Git utils', () => { } }); test('listObjects', async () => { - await gitTestUtils.createGitRepo({ - ...gitDirs, - author: 'tester', - commits: [ - { - message: 'commit1', - files: [ - { - name: 'file1', - contents: 'this is a file', - }, - ], - }, - { - message: 'commit2', - files: [ - { - name: 'file2', - contents: 'this is another file', - }, - ], - }, - { - message: 'commit3', - files: [ - { - name: 'file1', - contents: 'this is a changed file', - }, - ], - }, - ], - }); - - const commitIds = ( - await git.log({ + const f = async (ctx: ContextTimed) => { + await gitTestUtils.createGitRepo({ ...gitDirs, - ref: 'HEAD', - }) - ).map((v) => v.oid); + author: 'tester', + commits: [ + { + message: 'commit1', + files: [ + { + name: 'file1', + contents: 'this is a file', + }, + ], + }, + { + message: 'commit2', + files: [ + { + name: 'file2', + contents: 'this is another file', + }, + ], + }, + { + message: 'commit3', + files: [ + { + name: 'file1', + contents: 'this is a changed file', + }, + ], + }, + ], + }); - const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; - const objectList = await gitUtils.listObjects( - { - ...gitDirs, - wants: commitIds, - haves: [], - }, - ctx, - ); - const expectedObjectIds = await gitUtils.listObjectsAll(gitDirs); - // Found objects should include all the commits - expect(objectList).toIncludeAllMembers(commitIds); - // Since it was an exhaustive walk of all commits, all objectIds should be included - expect(objectList).toIncludeAllMembers(expectedObjectIds); + const commitIds = ( + await git.log({ + ...gitDirs, + ref: 'HEAD', + }) + ).map((v) => v.oid); + + const objectList = await gitUtils.listObjects( + { + ...gitDirs, + wants: commitIds, + haves: [], + }, + ctx, + ); + const expectedObjectIds = await gitUtils.listObjectsAll(gitDirs, ctx); + // Found objects should include all the commits + expect(objectList).toIncludeAllMembers(commitIds); + // Since it was an exhaustive walk of all commits, all objectIds should be included + expect(objectList).toIncludeAllMembers(expectedObjectIds); + }; + // Generate a context for the test case + await timedCancellableF(f, true)(); }); test.prop([ gitTestUtils.gitRequestDataArb, diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index ed3f6a689..92d4beb47 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -1,4 +1,4 @@ -import type { ContextCancellable } from '@matrixai/contexts'; +import type { ContextTimed } from '@matrixai/contexts'; import type { VaultId } from '@/vaults/types'; import type { Vault } from '@/vaults/Vault'; import type { LevelPath } from '@matrixai/db'; @@ -9,6 +9,7 @@ import path from 'path'; import fs from 'fs'; import git from 'isomorphic-git'; import { EncryptedFS } from 'encryptedfs'; +import { timedCancellable as timedCancellableF } from '@matrixai/contexts/dist/functions'; import { DB } from '@matrixai/db'; import { withF } from '@matrixai/resources'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; @@ -640,44 +641,62 @@ describe('VaultInternal', () => { await expect(vault.version(newRef2)).rejects.toThrow(); }); test('commit added if mutation in writeG', async () => { - const commit = (await vault.log())[0].commitId; - const gen = vault.writeG(async function* (efs): AsyncGenerator { - yield await efs.writeFile('secret-1', 'secret-content'); - }, {} as ContextCancellable); - for await (const _ of gen) { - // Do nothing - } - const log = await vault.log(); - expect(log).toHaveLength(2); - expect(log[0].commitId).not.toStrictEqual(commit); + const f = async (ctx: ContextTimed) => { + const commit = (await vault.log())[0].commitId; + const gen = vault.writeG( + async function* (efs): AsyncGenerator { + yield await efs.writeFile('secret-1', 'secret-content'); + }, + undefined, + ctx, + ); + for await (const _ of gen) { + // Do nothing + } + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].commitId).not.toStrictEqual(commit); + }; + await timedCancellableF(f, true)(); }); test('no commit added if no mutation in writeG', async () => { - const commit = (await vault.log())[0].commitId; - const gen = vault.writeG(async function* ( - _efs, - ): AsyncGenerator {}, {} as ContextCancellable); - for await (const _ of gen) { - // Do nothing - } - const log = await vault.log(); - expect(log).toHaveLength(1); - expect(log[0].message).not.toContain('secret-1'); - expect(log[0].commitId).toStrictEqual(commit); + const f = async (ctx: ContextTimed) => { + const commit = (await vault.log())[0].commitId; + const gen = vault.writeG( + async function* (_efs): AsyncGenerator {}, + undefined, + ctx, + ); + for await (const _ of gen) { + // Do nothing + } + const log = await vault.log(); + expect(log).toHaveLength(1); + expect(log[0].message).not.toContain('secret-1'); + expect(log[0].commitId).toStrictEqual(commit); + }; + await timedCancellableF(f, true)(); }); test('no mutation to vault when part of a commit operation fails in writeG', async () => { - const gen = vault.writeG(async function* (efs): AsyncGenerator { - yield await efs.writeFile(secret1.name, secret1.content); - yield await efs.rename('notValid', 'randomName'); // Throws - }, {} as ContextCancellable); - // Failing commit operation - await expect(() => consumeGenerator(gen)).rejects.toThrow(); - - // Make sure secret1 wasn't written when the above commit failed - await vault.readF(async (efs) => { - expect(await efs.readdir('.')).not.toContain(secret1.name); - }); - // No new commit - expect(await vault.log()).toHaveLength(1); + const f = async (ctx: ContextTimed) => { + const gen = vault.writeG( + async function* (efs): AsyncGenerator { + yield await efs.writeFile(secret1.name, secret1.content); + yield await efs.rename('notValid', 'randomName'); // Throws + }, + undefined, + ctx, + ); + // Failing commit operation + await expect(() => consumeGenerator(gen)).rejects.toThrow(); + // Make sure secret1 wasn't written when the above commit failed + await vault.readF(async (efs) => { + expect(await efs.readdir('.')).not.toContain(secret1.name); + }); + // No new commit + expect(await vault.log()).toHaveLength(1); + }; + await timedCancellableF(f, true)(); }); test('no commit after readG', async () => { await vault.writeF(async (efs) => { @@ -783,35 +802,46 @@ describe('VaultInternal', () => { expect(finished).toBe(true); }); test('writeG respects read and write locking', async () => { - const lock = vault.getLock(); - // Hold a write lock - const [releaseWrite] = await lock.write()(); + const f = async (ctx: ContextTimed) => { + // Hold a write lock + const lock = vault.getLock(); + const [releaseWrite] = await lock.write()(); - let finished = false; - const writeGen = vault.writeG(async function* () { - yield; - finished = true; - yield; - }, {} as ContextCancellable); - const runP = consumeGenerator(writeGen); - await sleep(waitDelay); - expect(finished).toBe(false); - await releaseWrite(); - await runP; - expect(finished).toBe(true); + let finished = false; + const writeGen = vault.writeG( + async function* () { + yield; + finished = true; + yield; + }, + undefined, + ctx, + ); + const runP = consumeGenerator(writeGen); + await sleep(waitDelay); + expect(finished).toBe(false); + await releaseWrite(); + await runP; + expect(finished).toBe(true); - const [releaseRead] = await lock.read()(); - finished = false; - const writeGen2 = vault.writeG(async function* () { - yield; - finished = true; - yield; - }, {} as ContextCancellable); - const runP2 = consumeGenerator(writeGen2); - await sleep(waitDelay); - await releaseRead(); - await runP2; - expect(finished).toBe(true); + const [releaseRead] = await lock.read()(); + finished = false; + const writeGen2 = vault.writeG( + async function* () { + yield; + finished = true; + yield; + }, + undefined, + ctx, + ); + const runP2 = consumeGenerator(writeGen2); + await sleep(waitDelay); + await releaseRead(); + await runP2; + expect(finished).toBe(true); + }; + await timedCancellableF(f, true)(); }); test('readF respects write locking', async () => { const lock = vault.getLock(); @@ -922,46 +952,49 @@ describe('VaultInternal', () => { await releaseRead(); }); test('can acquire a write resource', async () => { - const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; - const acquireWrite = vault.acquireWrite(ctx); - await withF([acquireWrite], async ([efs]) => { - await efs.writeFile(secret1.name, secret1.content); - }); - await vault.readF(async (efs) => { - const content = await efs.readFile(secret1.name); - expect(content.toString()).toEqual(secret1.content); - }); + const f = async (ctx: ContextTimed) => { + const acquireWrite = vault.acquireWrite(undefined, ctx); + await withF([acquireWrite], async ([efs]) => { + await efs.writeFile(secret1.name, secret1.content); + }); + await vault.readF(async (efs) => { + const content = await efs.readFile(secret1.name); + expect(content.toString()).toEqual(secret1.content); + }); + }; + await timedCancellableF(f, true)(); }); test('acquiring write resource respects write locking', async () => { - const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; - const lock = vault.getLock(); - const [releaseWrite] = await lock.write()(); - let finished = false; - const writeP = withF([vault.acquireWrite(ctx)], async () => { - finished = true; - }); - await sleep(waitDelay); - expect(finished).toBe(false); - await releaseWrite(); - await writeP; - expect(finished).toBe(true); + const f = async (ctx: ContextTimed) => { + const lock = vault.getLock(); + const [releaseWrite] = await lock.write()(); + let finished = false; + const writeP = withF([vault.acquireWrite(undefined, ctx)], async () => { + finished = true; + }); + await sleep(waitDelay); + expect(finished).toBe(false); + await releaseWrite(); + await writeP; + expect(finished).toBe(true); + }; + await timedCancellableF(f, true)(); }); test('acquiring write resource respects read locking', async () => { - const abortController = new AbortController(); - const ctx = { signal: abortController.signal }; - const lock = vault.getLock(); - const [releaseRead] = await lock.read()(); - let finished = false; - const writeP = withF([vault.acquireWrite(ctx)], async () => { - finished = true; - }); - await sleep(waitDelay); - expect(finished).toBe(false); - await releaseRead(); - await writeP; - expect(finished).toBe(true); + const f = async (ctx: ContextTimed) => { + const lock = vault.getLock(); + const [releaseRead] = await lock.read()(); + let finished = false; + const writeP = withF([vault.acquireWrite(undefined, ctx)], async () => { + finished = true; + }); + await sleep(waitDelay); + expect(finished).toBe(false); + await releaseRead(); + await writeP; + expect(finished).toBe(true); + }; + await timedCancellableF(f, true)(); }); // Life-cycle test('can create with CreateVaultInternal', async () => { diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index 92d4801ae..1385c14ea 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -355,7 +355,7 @@ describe('VaultManager', () => { // Scanning vaults const abortController = new AbortController(); const ctx = { signal: abortController.signal } as ContextTimed; - const gen = vaultManager.handleScanVaults(nodeId1, ctx); + const gen = vaultManager.handleScanVaults(nodeId1, undefined, ctx); const vaults: Record = {}; for await (const vault of gen) { vaults[vault.vaultId] = [vault.vaultName, vault.vaultPermissions]; @@ -368,7 +368,11 @@ describe('VaultManager', () => { await expect(async () => { const abortController = new AbortController(); const ctx = { signal: abortController.signal } as ContextTimed; - for await (const _ of vaultManager.handleScanVaults(nodeId2, ctx)) { + for await (const _ of vaultManager.handleScanVaults( + nodeId2, + undefined, + ctx, + )) { // Should throw } }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); @@ -377,7 +381,11 @@ describe('VaultManager', () => { await expect(async () => { const abortController = new AbortController(); const ctx = { signal: abortController.signal } as ContextTimed; - for await (const _ of vaultManager.handleScanVaults(nodeId2, ctx)) { + for await (const _ of vaultManager.handleScanVaults( + nodeId2, + undefined, + ctx, + )) { // Should throw } }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); @@ -1499,9 +1507,7 @@ describe('VaultManager', () => { // Should throw due to no permission const testFun = async () => { - const abortController = new AbortController(); - const ctx = { signal: abortController.signal } as ContextTimed; - for await (const _ of vaultManager.scanVaults(targetNodeId, ctx)) { + for await (const _ of vaultManager.scanVaults(targetNodeId)) { // Should throw } }; @@ -1529,9 +1535,7 @@ describe('VaultManager', () => { await remoteKeynode1.acl.setVaultAction(vault2, nodeId1, 'clone'); // No permissions for vault3 - const abortController = new AbortController(); - const ctx = { signal: abortController.signal } as ContextTimed; - const gen = vaultManager.scanVaults(targetNodeId, ctx); + const gen = vaultManager.scanVaults(targetNodeId); const vaults: Record = {}; for await (const vault of gen) { vaults[vault.vaultIdEncoded] = [ diff --git a/tests/vaults/VaultOps/updatesecret.test.ts b/tests/vaults/VaultOps/updatesecret.test.ts deleted file mode 100644 index 89befde9a..000000000 --- a/tests/vaults/VaultOps/updatesecret.test.ts +++ /dev/null @@ -1,141 +0,0 @@ -import type { VaultId } from '@/vaults/types'; -import type { Vault } from '@/vaults/Vault'; -import type KeyRing from '@/keys/KeyRing'; -import type { LevelPath } from '@matrixai/db'; -import fs from 'fs'; -import path from 'path'; -import os from 'os'; -import { EncryptedFS } from 'encryptedfs'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { DB } from '@matrixai/db'; -import VaultInternal from '@/vaults/VaultInternal'; -import * as vaultOps from '@/vaults/VaultOps'; -import * as vaultsErrors from '@/vaults/errors'; -import * as vaultsUtils from '@/vaults/utils'; -import * as keysUtils from '@/keys/utils'; -import * as testNodesUtils from '../../nodes/utils'; -import * as testVaultsUtils from '../utils'; - -describe('updateSecret', () => { - const logger = new Logger('VaultOps', LogLevel.WARN, [new StreamHandler()]); - - const secretName = 'secret'; - const secretNameHidden = '.secret'; - const secretContent = 'secret-content'; - const secretContentNew = 'secret-content-new'; - const dirName = 'dir'; - const dirNameHidden = '.dir'; - - let dataDir: string; - let baseEfs: EncryptedFS; - let vaultId: VaultId; - let vaultInternal: VaultInternal; - let vault: Vault; - let db: DB; - let vaultsDbPath: LevelPath; - const vaultIdGenerator = vaultsUtils.createVaultIdGenerator(); - const dummyKeyRing = { - getNodeId: () => { - return testNodesUtils.generateRandomNodeId(); - }, - } as KeyRing; - - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const dbPath = path.join(dataDir, 'efsDb'); - const dbKey = keysUtils.generateKey(); - baseEfs = await EncryptedFS.createEncryptedFS({ - dbKey, - dbPath, - logger, - }); - await baseEfs.start(); - - vaultId = vaultIdGenerator(); - await baseEfs.mkdir( - path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - { - recursive: true, - }, - ); - db = await DB.createDB({ - dbPath: path.join(dataDir, 'db'), - logger, - }); - vaultsDbPath = ['vaults']; - vaultInternal = await VaultInternal.createVaultInternal({ - keyRing: dummyKeyRing, - vaultId, - efs: baseEfs, - logger: logger.getChild(VaultInternal.name), - fresh: true, - db, - vaultsDbPath: vaultsDbPath, - vaultName: 'VaultName', - }); - vault = vaultInternal as Vault; - }); - afterEach(async () => { - await vaultInternal.stop(); - await vaultInternal.destroy(); - await db.stop(); - await db.destroy(); - await baseEfs.stop(); - await baseEfs.destroy(); - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - - test('updating secret content', async () => { - await testVaultsUtils.writeSecret(vault, secretName, secretContent); - await vaultOps.updateSecret(vault, secretName, secretContentNew); - await testVaultsUtils.expectSecret(vault, secretName, secretContentNew); - }); - test('updating secret content within a directory', async () => { - const secretPath = path.join(dirName, secretName); - await testVaultsUtils.writeSecret(vault, secretPath, secretContent); - await vaultOps.updateSecret(vault, secretPath, secretContentNew); - await testVaultsUtils.expectSecret(vault, secretPath, secretContentNew); - }); - test( - 'updating a secret multiple times', - async () => { - await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - await testVaultsUtils.writeSecret(vault, secretName, secretContent); - for (let i = 0; i < 5; i++) { - const contentNew = `${secretContentNew}${i}`; - await vaultOps.updateSecret(vault, secretName, contentNew); - await testVaultsUtils.expectSecret(vault, secretName, contentNew); - } - }, - globalThis.defaultTimeout * 2, - ); - test('updating a secret that does not exist should fail', async () => { - await expect( - vaultOps.updateSecret(vault, secretName, secretContentNew), - ).rejects.toThrow(vaultsErrors.ErrorSecretsSecretUndefined); - }); - test('updating hidden secret content', async () => { - await testVaultsUtils.writeSecret(vault, secretNameHidden, secretContent); - await vaultOps.updateSecret(vault, secretNameHidden, secretContentNew); - await testVaultsUtils.expectSecret( - vault, - secretNameHidden, - secretContentNew, - ); - }); - test('updating hidden secret content within a hidden directory', async () => { - const secretPathHidden = path.join(dirNameHidden, secretNameHidden); - await testVaultsUtils.writeSecret(vault, secretPathHidden, secretContent); - await vaultOps.updateSecret(vault, secretPathHidden, secretContentNew); - await testVaultsUtils.expectSecret( - vault, - secretPathHidden, - secretContentNew, - ); - }); -}); From e94aa084010b86b8a732a162242f8d41d78910cb Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Tue, 7 Jan 2025 15:33:39 +1100 Subject: [PATCH 09/14] chore: unwrapped redundant loop --- src/acl/ACL.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/acl/ACL.ts b/src/acl/ACL.ts index 47ace1f3f..d602dfb38 100644 --- a/src/acl/ACL.ts +++ b/src/acl/ACL.ts @@ -144,11 +144,7 @@ class ACL { if (permId in permIds) { nodePerm = permIds[permId]; // Get the first existing perm object - let perm: Permission; - for (const nodeId_ in nodePerm) { - perm = nodePerm[nodeId_]; - break; - } + const perm = Object.values(nodePerm)[0] // All perm objects are shared nodePerm[nodeId] = perm!; } else { From 52057055261683f82489372a61f69b9b7a8e0ebc Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Tue, 7 Jan 2025 15:38:42 +1100 Subject: [PATCH 10/14] fix: lint --- src/acl/ACL.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/acl/ACL.ts b/src/acl/ACL.ts index d602dfb38..cf4d4d543 100644 --- a/src/acl/ACL.ts +++ b/src/acl/ACL.ts @@ -144,7 +144,7 @@ class ACL { if (permId in permIds) { nodePerm = permIds[permId]; // Get the first existing perm object - const perm = Object.values(nodePerm)[0] + const perm = Object.values(nodePerm)[0]; // All perm objects are shared nodePerm[nodeId] = perm!; } else { From 5c8633fe5e29dd0c94b3193cdebb6f2faddfc495 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Fri, 10 Jan 2025 11:39:28 +1100 Subject: [PATCH 11/14] chore: cleaned up vaults handlers --- src/client/handlers/KeysEncrypt.ts | 4 ++-- src/client/handlers/VaultsPermissionSet.ts | 28 ++++++++++++++++++---- src/client/handlers/VaultsSecretsEnv.ts | 2 ++ src/client/handlers/VaultsSecretsList.ts | 6 +++++ src/client/handlers/VaultsSecretsMkdir.ts | 6 ++--- src/client/handlers/VaultsSecretsRemove.ts | 2 ++ src/client/handlers/VaultsVersion.ts | 4 +--- 7 files changed, 38 insertions(+), 14 deletions(-) diff --git a/src/client/handlers/KeysEncrypt.ts b/src/client/handlers/KeysEncrypt.ts index fc8618635..11598460c 100644 --- a/src/client/handlers/KeysEncrypt.ts +++ b/src/client/handlers/KeysEncrypt.ts @@ -7,7 +7,7 @@ import type { import type KeyRing from '../../keys/KeyRing'; import type { PublicKey } from '../../keys/types'; import { UnaryHandler } from '@matrixai/rpc'; -import * as utils from '../../utils'; +import { never } from '../../utils'; import * as keysUtils from '../../keys/utils'; import * as keysErrors from '../../keys/errors'; @@ -27,7 +27,7 @@ class KeysEncrypt extends UnaryHandler< try { const jwk = input.publicKeyJwk; publicKey = keysUtils.publicKeyFromJWK(jwk); - if (publicKey == null) utils.never('failed to get public key from JWK'); + if (publicKey == null) never('failed to get public key from JWK'); } catch (e) { throw new keysErrors.ErrorPublicKeyParse(undefined, { cause: e }); } diff --git a/src/client/handlers/VaultsPermissionSet.ts b/src/client/handlers/VaultsPermissionSet.ts index a816ab5e0..9614ccb3f 100644 --- a/src/client/handlers/VaultsPermissionSet.ts +++ b/src/client/handlers/VaultsPermissionSet.ts @@ -6,11 +6,14 @@ import type { SuccessMessage, } from '../types'; import type ACL from '../../acl/ACL'; -import type { VaultActions } from '../../vaults/types'; +import type { VaultAction, VaultActions } from '../../vaults/types'; import type VaultManager from '../../vaults/VaultManager'; import type NotificationsManager from '../../notifications/NotificationsManager'; import type GestaltGraph from '../../gestalts/GestaltGraph'; +import type { NodeId } from '../../ids'; import { UnaryHandler } from '@matrixai/rpc'; +import { validateSync } from '../../validation'; +import { matchSync } from '../../utils'; import * as ids from '../../ids'; import * as vaultsUtils from '../../vaults/utils'; import * as vaultsErrors from '../../vaults/errors'; @@ -54,9 +57,24 @@ class VaultsPermissionSet extends UnaryHandler< `Vault "${input.nameOrId}" does not exist`, ); } - const nodeId = ids.parseNodeId(input.nodeIdEncoded); - const actions = input.vaultPermissionList.map( - vaultsUtils.parseVaultAction, + const { + nodeId, + actions, + }: { + nodeId: NodeId; + actions: Array; + } = validateSync( + (keyPath, value) => { + return matchSync(keyPath)( + [['nodeId'], () => ids.parseNodeId(value)], + [['actions'], () => value.map(vaultsUtils.parseVaultAction)], + () => value, + ); + }, + { + nodeId: input.nodeIdEncoded, + actions: input.vaultPermissionList, + }, ); // Checking if vault exists const vaultMeta = await vaultManager.getVaultMeta(vaultId, tran); @@ -74,7 +92,7 @@ class VaultsPermissionSet extends UnaryHandler< } // Sending notification await notificationsManager.sendNotification({ - nodeId, + nodeId: nodeId, data: { type: 'VaultShare', vaultId: vaultsUtils.encodeVaultId(vaultId), diff --git a/src/client/handlers/VaultsSecretsEnv.ts b/src/client/handlers/VaultsSecretsEnv.ts index 0c14e316d..9573cbfb6 100644 --- a/src/client/handlers/VaultsSecretsEnv.ts +++ b/src/client/handlers/VaultsSecretsEnv.ts @@ -56,6 +56,7 @@ class VaultsSecretsEnv extends DuplexHandler< fs, secretName, )) { + ctx.signal.throwIfAborted(); const fileContents = await fs.readFile(filePath); results.push({ filePath, @@ -78,6 +79,7 @@ class VaultsSecretsEnv extends DuplexHandler< ctx, ); for (const { filePath, value } of secrets) { + ctx.signal.throwIfAborted(); yield { nameOrId: nameOrId, secretName: filePath, diff --git a/src/client/handlers/VaultsSecretsList.ts b/src/client/handlers/VaultsSecretsList.ts index 5e5735357..a8f8a2ecb 100644 --- a/src/client/handlers/VaultsSecretsList.ts +++ b/src/client/handlers/VaultsSecretsList.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { DB } from '@matrixai/db'; import type { ClientRPCRequestParams, @@ -21,6 +23,9 @@ class VaultsSecretsList extends ServerHandler< > { public handle = async function* ( input: ClientRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator, void, void> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = this.container; @@ -58,6 +63,7 @@ class VaultsSecretsList extends ServerHandler< throw e; } for await (const file of files) { + ctx.signal.throwIfAborted(); const filePath = path.join(input.secretName, file.toString()); const stat = await fs.promises.stat(filePath); const type = stat.isFile() ? 'FILE' : 'DIRECTORY'; diff --git a/src/client/handlers/VaultsSecretsMkdir.ts b/src/client/handlers/VaultsSecretsMkdir.ts index fb6ef4957..cdbb7cfb9 100644 --- a/src/client/handlers/VaultsSecretsMkdir.ts +++ b/src/client/handlers/VaultsSecretsMkdir.ts @@ -34,6 +34,7 @@ class VaultsSecretsMkdir extends DuplexHandler< yield* db.withTransactionG( async function* (tran): AsyncGenerator { for await (const secretDirMessage of input) { + ctx.signal.throwIfAborted(); // Unpack input if (metadata == null) metadata = secretDirMessage.metadata ?? {}; const nameOrId = secretDirMessage.nameOrId; @@ -48,10 +49,7 @@ class VaultsSecretsMkdir extends DuplexHandler< ); } // Write directories. This doesn't need to be grouped by vault names, - // as no commit is created for empty directories anyway. The - // vaultOps.mkdir() method also returns an object of type - // SuccessOrErrorMessage. As such, we can return the result without - // doing any type conversion or extra processing. + // as no commit is created for empty directories anyway. yield await vaultManager.withVaults( [vaultId], async (vault) => { diff --git a/src/client/handlers/VaultsSecretsRemove.ts b/src/client/handlers/VaultsSecretsRemove.ts index c44392ab0..cfeb5fc5f 100644 --- a/src/client/handlers/VaultsSecretsRemove.ts +++ b/src/client/handlers/VaultsSecretsRemove.ts @@ -55,6 +55,7 @@ class VaultsSecretsRemove extends DuplexHandler< const vaultAcquires = await db.withTransactionF(async (tran) => { const vaultAcquires: Array> = []; for (const vaultName of headerMessage.vaultNames) { + ctx.signal.throwIfAborted(); const vaultIdFromName = await vaultManager.getVaultId(vaultName, tran); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(vaultName); if (vaultId == null) { @@ -83,6 +84,7 @@ class VaultsSecretsRemove extends DuplexHandler< } let loopRan = false; for await (const message of input) { + ctx.signal.throwIfAborted(); loopRan = true; // Header messages should not be seen anymore if (message.type === 'VaultNamesHeaderMessage') { diff --git a/src/client/handlers/VaultsVersion.ts b/src/client/handlers/VaultsVersion.ts index bbb4b0dfb..c4eecddbd 100644 --- a/src/client/handlers/VaultsVersion.ts +++ b/src/client/handlers/VaultsVersion.ts @@ -56,9 +56,7 @@ class VaultsVersion extends UnaryHandler< ctx, ); // Checking if latest version ID - return { - latestVersion: latestOid === currentVersionId, - }; + return { latestVersion: latestOid === currentVersionId }; }); }; } From cf4426f31338058e35c9b9a9df32657f49d00fb3 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Fri, 10 Jan 2025 14:02:27 +1100 Subject: [PATCH 12/14] chore: updated `meta` type to match signature --- src/client/handlers/VaultsClone.ts | 2 +- src/client/handlers/VaultsCreate.ts | 2 +- src/client/handlers/VaultsDelete.ts | 2 +- src/client/handlers/VaultsList.ts | 2 +- src/client/handlers/VaultsLog.ts | 2 +- src/client/handlers/VaultsPermissionGet.ts | 2 +- src/client/handlers/VaultsPermissionUnset.ts | 4 ++- src/client/handlers/VaultsPull.ts | 22 +++++++++--- src/client/handlers/VaultsRename.ts | 2 +- src/client/handlers/VaultsScan.ts | 17 ++++++++-- src/client/handlers/VaultsSecretsCat.ts | 2 +- src/client/handlers/VaultsSecretsEnv.ts | 4 +-- src/client/handlers/VaultsSecretsGet.ts | 6 +++- src/client/handlers/VaultsSecretsList.ts | 8 +++-- src/client/handlers/VaultsSecretsMkdir.ts | 3 +- src/client/handlers/VaultsSecretsNew.ts | 2 +- src/client/handlers/VaultsSecretsNewDir.ts | 6 ++-- src/client/handlers/VaultsSecretsRemove.ts | 2 +- src/client/handlers/VaultsSecretsRename.ts | 2 +- src/client/handlers/VaultsSecretsStat.ts | 2 +- src/client/handlers/VaultsSecretsWriteFile.ts | 6 ++-- src/client/handlers/VaultsVersion.ts | 6 ++-- .../agent/handlers/NodesClaimNetworkSign.ts | 4 +-- .../agent/handlers/NodesClaimNetworkVerify.ts | 8 ++--- src/nodes/agent/handlers/NodesClaimsGet.ts | 10 ++++-- .../NodesClosestActiveConnectionsGet.ts | 12 +++++-- .../handlers/NodesClosestLocalNodesGet.ts | 9 ++++- .../handlers/NodesConnectionSignalFinal.ts | 34 ++++++++++++++++--- .../handlers/NodesConnectionSignalInitial.ts | 25 +++++++++++--- .../agent/handlers/NodesCrossSignClaim.ts | 13 +++++-- src/nodes/agent/handlers/NotificationsSend.ts | 13 +++++-- src/nodes/agent/handlers/VaultsScan.ts | 4 +-- 32 files changed, 178 insertions(+), 60 deletions(-) diff --git a/src/client/handlers/VaultsClone.ts b/src/client/handlers/VaultsClone.ts index 30ed70788..069b2ddf4 100644 --- a/src/client/handlers/VaultsClone.ts +++ b/src/client/handlers/VaultsClone.ts @@ -25,7 +25,7 @@ class VaultsClone extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsCreate.ts b/src/client/handlers/VaultsCreate.ts index bc578b18f..fb4518fc7 100644 --- a/src/client/handlers/VaultsCreate.ts +++ b/src/client/handlers/VaultsCreate.ts @@ -22,7 +22,7 @@ class VaultsCreate extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsDelete.ts b/src/client/handlers/VaultsDelete.ts index 98c1d511f..714410c6d 100644 --- a/src/client/handlers/VaultsDelete.ts +++ b/src/client/handlers/VaultsDelete.ts @@ -23,7 +23,7 @@ class VaultsDelete extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsList.ts b/src/client/handlers/VaultsList.ts index 967975124..9820a9ffb 100644 --- a/src/client/handlers/VaultsList.ts +++ b/src/client/handlers/VaultsList.ts @@ -21,7 +21,7 @@ class VaultsList extends ServerHandler< public handle = async function* ( _input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsLog.ts b/src/client/handlers/VaultsLog.ts index 7218c929d..16fe0e573 100644 --- a/src/client/handlers/VaultsLog.ts +++ b/src/client/handlers/VaultsLog.ts @@ -23,7 +23,7 @@ class VaultsLog extends ServerHandler< public handle = async function* ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator> { ctx.signal.throwIfAborted(); diff --git a/src/client/handlers/VaultsPermissionGet.ts b/src/client/handlers/VaultsPermissionGet.ts index d7b8eee13..2d25c6d6b 100644 --- a/src/client/handlers/VaultsPermissionGet.ts +++ b/src/client/handlers/VaultsPermissionGet.ts @@ -29,7 +29,7 @@ class VaultsPermissionGet extends ServerHandler< public handle = async function* ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator> { ctx.signal.throwIfAborted(); diff --git a/src/client/handlers/VaultsPermissionUnset.ts b/src/client/handlers/VaultsPermissionUnset.ts index 85f689918..6f1668181 100644 --- a/src/client/handlers/VaultsPermissionUnset.ts +++ b/src/client/handlers/VaultsPermissionUnset.ts @@ -49,7 +49,9 @@ class VaultsPermissionUnset extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } const { nodeId, diff --git a/src/client/handlers/VaultsPull.ts b/src/client/handlers/VaultsPull.ts index 02dcc9659..917a79bab 100644 --- a/src/client/handlers/VaultsPull.ts +++ b/src/client/handlers/VaultsPull.ts @@ -7,8 +7,11 @@ import type { SuccessMessage, VaultsPullMessage, } from '../types'; +import type { NodeId } from '../../ids'; import type VaultManager from '../../vaults/VaultManager'; import { UnaryHandler } from '@matrixai/rpc'; +import { validateSync } from '../../validation'; +import { matchSync } from '../../utils'; import * as ids from '../../ids'; import * as vaultsUtils from '../../vaults/utils'; import * as vaultsErrors from '../../vaults/errors'; @@ -24,7 +27,7 @@ class VaultsPull extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = @@ -43,9 +46,20 @@ class VaultsPull extends UnaryHandler< `Vault "${input.nameOrId}" does not exist`, ); } - const nodeId = input.nodeIdEncoded - ? ids.parseNodeId(input.nodeIdEncoded) - : undefined; + const { nodeId }: { nodeId: NodeId } = validateSync( + (keyPath, value) => { + return matchSync(keyPath)( + [ + ['nodeId'], + () => (value != null ? ids.parseNodeId(value) : undefined), + ], + () => value, + ); + }, + { + nodeId: input.nodeIdEncoded, + }, + ); await vaultManager.pullVault( { vaultId: vaultId, diff --git a/src/client/handlers/VaultsRename.ts b/src/client/handlers/VaultsRename.ts index 9fde877b4..74415a907 100644 --- a/src/client/handlers/VaultsRename.ts +++ b/src/client/handlers/VaultsRename.ts @@ -23,7 +23,7 @@ class VaultsRename extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsScan.ts b/src/client/handlers/VaultsScan.ts index 8fe51dd0b..2f7dace6d 100644 --- a/src/client/handlers/VaultsScan.ts +++ b/src/client/handlers/VaultsScan.ts @@ -6,8 +6,11 @@ import type { NodeIdMessage, VaultsScanMessage, } from '../types'; +import type { NodeId } from '../../ids'; import type VaultManager from '../../vaults/VaultManager'; import { ServerHandler } from '@matrixai/rpc'; +import { validateSync } from '../../validation'; +import { matchSync } from '../../utils'; import * as ids from '../../ids'; class VaultsScan extends ServerHandler< @@ -20,12 +23,22 @@ class VaultsScan extends ServerHandler< public handle = async function* ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator> { ctx.signal.throwIfAborted(); const { vaultManager }: { vaultManager: VaultManager } = this.container; - const nodeId = ids.parseNodeId(input.nodeIdEncoded); + const { nodeId }: { nodeId: NodeId } = validateSync( + (keyPath, value) => { + return matchSync(keyPath)( + [['nodeId'], () => ids.parseNodeId(value)], + () => value, + ); + }, + { + nodeId: input.nodeIdEncoded, + }, + ); for await (const { vaultIdEncoded, vaultName, diff --git a/src/client/handlers/VaultsSecretsCat.ts b/src/client/handlers/VaultsSecretsCat.ts index dfa3ec368..d675d9b3d 100644 --- a/src/client/handlers/VaultsSecretsCat.ts +++ b/src/client/handlers/VaultsSecretsCat.ts @@ -29,7 +29,7 @@ class VaultsSecretsCat extends DuplexHandler< ClientRPCRequestParams >, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsSecretsEnv.ts b/src/client/handlers/VaultsSecretsEnv.ts index 9573cbfb6..becc15c07 100644 --- a/src/client/handlers/VaultsSecretsEnv.ts +++ b/src/client/handlers/VaultsSecretsEnv.ts @@ -25,7 +25,7 @@ class VaultsSecretsEnv extends DuplexHandler< ClientRPCRequestParams >, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = @@ -59,7 +59,7 @@ class VaultsSecretsEnv extends DuplexHandler< ctx.signal.throwIfAborted(); const fileContents = await fs.readFile(filePath); results.push({ - filePath, + filePath: filePath, value: fileContents.toString(), }); } diff --git a/src/client/handlers/VaultsSecretsGet.ts b/src/client/handlers/VaultsSecretsGet.ts index a0fa0f94d..21d4cf42e 100644 --- a/src/client/handlers/VaultsSecretsGet.ts +++ b/src/client/handlers/VaultsSecretsGet.ts @@ -34,7 +34,11 @@ class VaultsSecretsGet extends ServerHandler< ); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); - if (vaultId == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + if (vaultId == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); + } // Get the contents of the file return await vaultManager.withVaults([vaultId], async (vault) => { const content = await vaultOps.getSecret(vault, input.secretName); diff --git a/src/client/handlers/VaultsSecretsList.ts b/src/client/handlers/VaultsSecretsList.ts index a8f8a2ecb..c2e194f25 100644 --- a/src/client/handlers/VaultsSecretsList.ts +++ b/src/client/handlers/VaultsSecretsList.ts @@ -24,7 +24,7 @@ class VaultsSecretsList extends ServerHandler< public handle = async function* ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator, void, void> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = @@ -36,7 +36,11 @@ class VaultsSecretsList extends ServerHandler< ); const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); - if (vaultId == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + if (vaultId == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); + } return vaultId; }); diff --git a/src/client/handlers/VaultsSecretsMkdir.ts b/src/client/handlers/VaultsSecretsMkdir.ts index cdbb7cfb9..00e0f32cb 100644 --- a/src/client/handlers/VaultsSecretsMkdir.ts +++ b/src/client/handlers/VaultsSecretsMkdir.ts @@ -25,7 +25,7 @@ class VaultsSecretsMkdir extends DuplexHandler< public handle = async function* ( input: AsyncIterableIterator>, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): AsyncGenerator> { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = @@ -48,6 +48,7 @@ class VaultsSecretsMkdir extends DuplexHandler< `Vault "${nameOrId}" does not exist`, ); } + // Write directories. This doesn't need to be grouped by vault names, // as no commit is created for empty directories anyway. yield await vaultManager.withVaults( diff --git a/src/client/handlers/VaultsSecretsNew.ts b/src/client/handlers/VaultsSecretsNew.ts index bad8f3b15..23ff7360e 100644 --- a/src/client/handlers/VaultsSecretsNew.ts +++ b/src/client/handlers/VaultsSecretsNew.ts @@ -24,7 +24,7 @@ class VaultsSecretsNew extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsSecretsNewDir.ts b/src/client/handlers/VaultsSecretsNewDir.ts index c3283db41..1287fa077 100644 --- a/src/client/handlers/VaultsSecretsNewDir.ts +++ b/src/client/handlers/VaultsSecretsNewDir.ts @@ -26,7 +26,7 @@ class VaultsSecretsNewDir extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { @@ -42,7 +42,9 @@ class VaultsSecretsNewDir extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } await vaultManager.withVaults( [vaultId], diff --git a/src/client/handlers/VaultsSecretsRemove.ts b/src/client/handlers/VaultsSecretsRemove.ts index cfeb5fc5f..ed7147948 100644 --- a/src/client/handlers/VaultsSecretsRemove.ts +++ b/src/client/handlers/VaultsSecretsRemove.ts @@ -60,7 +60,7 @@ class VaultsSecretsRemove extends DuplexHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(vaultName); if (vaultId == null) { throw new vaultsErrors.ErrorVaultsVaultUndefined( - `Vault ${vaultName} does not exist`, + `Vault "${vaultName}" does not exist`, ); } // The resource acquisition will automatically create a transaction and diff --git a/src/client/handlers/VaultsSecretsRename.ts b/src/client/handlers/VaultsSecretsRename.ts index 37431beae..dea09fbae 100644 --- a/src/client/handlers/VaultsSecretsRename.ts +++ b/src/client/handlers/VaultsSecretsRename.ts @@ -24,7 +24,7 @@ class VaultsSecretsRename extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsSecretsStat.ts b/src/client/handlers/VaultsSecretsStat.ts index 3630ff59e..8f66527cc 100644 --- a/src/client/handlers/VaultsSecretsStat.ts +++ b/src/client/handlers/VaultsSecretsStat.ts @@ -24,7 +24,7 @@ class VaultsSecretsStat extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = diff --git a/src/client/handlers/VaultsSecretsWriteFile.ts b/src/client/handlers/VaultsSecretsWriteFile.ts index 39f4ffcab..1d6a30863 100644 --- a/src/client/handlers/VaultsSecretsWriteFile.ts +++ b/src/client/handlers/VaultsSecretsWriteFile.ts @@ -24,7 +24,7 @@ class VaultsSecretsWriteFile extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = @@ -37,7 +37,9 @@ class VaultsSecretsWriteFile extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } const secretContent = Buffer.from(input.secretContent, 'binary'); await vaultManager.withVaults( diff --git a/src/client/handlers/VaultsVersion.ts b/src/client/handlers/VaultsVersion.ts index c4eecddbd..835530b3c 100644 --- a/src/client/handlers/VaultsVersion.ts +++ b/src/client/handlers/VaultsVersion.ts @@ -23,7 +23,7 @@ class VaultsVersion extends UnaryHandler< public handle = async ( input: ClientRPCRequestParams, _cancel: (reason?: any) => void, - _meta: Record, + _meta: Record | undefined, ctx: ContextTimed, ): Promise> => { const { db, vaultManager }: { db: DB; vaultManager: VaultManager } = @@ -36,7 +36,9 @@ class VaultsVersion extends UnaryHandler< const vaultId = vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) { - throw new vaultsErrors.ErrorVaultsVaultUndefined(); + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault "${input.nameOrId}" does not exist`, + ); } const versionId = input.versionId; const [latestOid, currentVersionId] = await vaultManager.withVaults( diff --git a/src/nodes/agent/handlers/NodesClaimNetworkSign.ts b/src/nodes/agent/handlers/NodesClaimNetworkSign.ts index aac7cfd30..470885c82 100644 --- a/src/nodes/agent/handlers/NodesClaimNetworkSign.ts +++ b/src/nodes/agent/handlers/NodesClaimNetworkSign.ts @@ -18,10 +18,10 @@ class NodesClaimNetworkSign extends UnaryHandler< > { public handle = async ( input: AgentRPCRequestParams, - _cancel, + _cancel: (reason?: any) => void, meta: Record | undefined, ): Promise> => { - const { nodeManager } = this.container; + const { nodeManager }: { nodeManager: NodeManager } = this.container; // Connections should always be validated const requestingNodeId = agentUtils.nodeIdFromMeta(meta); if (requestingNodeId == null) { diff --git a/src/nodes/agent/handlers/NodesClaimNetworkVerify.ts b/src/nodes/agent/handlers/NodesClaimNetworkVerify.ts index f5eaa9886..fb786f5e7 100644 --- a/src/nodes/agent/handlers/NodesClaimNetworkVerify.ts +++ b/src/nodes/agent/handlers/NodesClaimNetworkVerify.ts @@ -18,17 +18,15 @@ class NodesClaimNetworkVerify extends UnaryHandler< > { public handle = async ( input: AgentRPCRequestParams, - _cancel, + _cancel: (reason?: any) => void, meta: Record | undefined, ): Promise> => { + const { nodeManager }: { nodeManager: NodeManager } = this.container; const requestingNodeId = agentUtils.nodeIdFromMeta(meta); if (requestingNodeId == null) { throw new agentErrors.ErrorAgentNodeIdMissing(); } - return this.container.nodeManager.handleVerifyClaimNetwork( - requestingNodeId, - input, - ); + return nodeManager.handleVerifyClaimNetwork(requestingNodeId, input); }; } diff --git a/src/nodes/agent/handlers/NodesClaimsGet.ts b/src/nodes/agent/handlers/NodesClaimsGet.ts index 6f1a9d618..287c76901 100644 --- a/src/nodes/agent/handlers/NodesClaimsGet.ts +++ b/src/nodes/agent/handlers/NodesClaimsGet.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type Sigchain from '../../../sigchain/Sigchain'; import type { AgentRPCRequestParams, @@ -22,15 +24,19 @@ class NodesClaimsGet extends ServerHandler< > { public handle = async function* ( _input: ClaimIdMessage, + _cancel: (reason?: any) => void, + _meta: Record | undefined, + ctx: ContextTimed, ): AsyncGenerator> { - const { sigchain, db } = this.container; + const { sigchain, db }: { sigchain: Sigchain; db: DB } = this.container; yield* db.withTransactionG(async function* (tran): AsyncGenerator< AgentRPCResponseResult > { for await (const [claimId, signedClaim] of sigchain.getSignedClaims( - { /* seek: seekClaimId,*/ order: 'asc' }, + { order: 'asc' }, tran, )) { + ctx.signal.throwIfAborted(); const encodedClaim = claimsUtils.generateSignedClaim(signedClaim); const response: AgentClaimMessage = { claimIdEncoded: claimsUtils.encodeClaimId(claimId), diff --git a/src/nodes/agent/handlers/NodesClosestActiveConnectionsGet.ts b/src/nodes/agent/handlers/NodesClosestActiveConnectionsGet.ts index cee6c9b1d..e6f699fd4 100644 --- a/src/nodes/agent/handlers/NodesClosestActiveConnectionsGet.ts +++ b/src/nodes/agent/handlers/NodesClosestActiveConnectionsGet.ts @@ -1,3 +1,5 @@ +import type { ContextTimed } from '@matrixai/contexts'; +import type { JSONValue } from '@matrixai/rpc'; import type { AgentRPCRequestParams, AgentRPCResponseResult, @@ -24,10 +26,15 @@ class NodesClosestActiveConnectionsGet extends ServerHandler< > { public handle = async function* ( input: AgentRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record, + ctx: ContextTimed, ): AsyncGenerator> { - const { nodeConnectionManager } = this.container as { + const { + nodeConnectionManager, + }: { nodeConnectionManager: NodeConnectionManager; - }; + } = this.container; const { nodeId, @@ -47,6 +54,7 @@ class NodesClosestActiveConnectionsGet extends ServerHandler< const nodes = nodeConnectionManager.getClosestConnections(nodeId); for (const nodeInfo of nodes) { + ctx.signal.throwIfAborted(); yield { nodeId: nodesUtils.encodeNodeId(nodeInfo.nodeId), connections: nodeInfo.connections, diff --git a/src/nodes/agent/handlers/NodesClosestLocalNodesGet.ts b/src/nodes/agent/handlers/NodesClosestLocalNodesGet.ts index a3c49fec6..b6a5cd088 100644 --- a/src/nodes/agent/handlers/NodesClosestLocalNodesGet.ts +++ b/src/nodes/agent/handlers/NodesClosestLocalNodesGet.ts @@ -1,4 +1,6 @@ +import type { ContextTimed } from '@matrixai/contexts'; import type { DB } from '@matrixai/db'; +import type { JSONValue } from '@matrixai/rpc'; import type { AgentRPCRequestParams, AgentRPCResponseResult, @@ -31,8 +33,11 @@ class NodesClosestLocalNodesGet extends ServerHandler< > { public handle = async function* ( input: AgentRPCRequestParams, + _cancel: (reason?: any) => void, + _meta: Record | undefined, + ctx: ContextTimed, ): AsyncGenerator> { - const { nodeGraph, db } = this.container; + const { nodeGraph, db }: { nodeGraph: NodeGraph; db: DB } = this.container; const { nodeId, @@ -49,6 +54,7 @@ class NodesClosestLocalNodesGet extends ServerHandler< nodeId: input.nodeIdEncoded, }, ); + // Get all local nodes that are closest to the target node from the request return yield* db.withTransactionG(async function* (tran): AsyncGenerator< AgentRPCResponseResult @@ -59,6 +65,7 @@ class NodesClosestLocalNodesGet extends ServerHandler< tran, ); for (const [nodeId, nodeContact] of closestNodes) { + ctx.signal.throwIfAborted(); // Filter out local scoped addresses const nodeContactOutput: NodeContact = {}; for (const key of Object.keys(nodeContact)) { diff --git a/src/nodes/agent/handlers/NodesConnectionSignalFinal.ts b/src/nodes/agent/handlers/NodesConnectionSignalFinal.ts index 4e8b52457..1d9f18506 100644 --- a/src/nodes/agent/handlers/NodesConnectionSignalFinal.ts +++ b/src/nodes/agent/handlers/NodesConnectionSignalFinal.ts @@ -1,12 +1,16 @@ import type Logger from '@matrixai/logger'; +import type { JSONValue } from '@matrixai/rpc'; import type { AgentRPCRequestParams, AgentRPCResponseResult, HolePunchRequestMessage, } from '../types'; +import type { NodeId } from '../../../ids'; import type NodeConnectionManager from '../../NodeConnectionManager'; import type { Host, Port } from '../../../network/types'; import { UnaryHandler } from '@matrixai/rpc'; +import { validateSync } from '../../../validation'; +import { matchSync } from '../../../utils'; import * as keysUtils from '../../../keys/utils'; import * as ids from '../../../ids'; import * as agentErrors from '../errors'; @@ -22,13 +26,33 @@ class NodesConnectionSignalFinal extends UnaryHandler< > { public handle = async ( input: AgentRPCRequestParams, - _cancel, - meta, + _cancel: (reason?: any) => void, + meta: Record | undefined, ): Promise => { - const { nodeConnectionManager, logger } = this.container; + const { + nodeConnectionManager, + logger, + }: { + nodeConnectionManager: NodeConnectionManager; + logger: Logger; + } = this.container; // Connections should always be validated - const sourceNodeId = ids.parseNodeId(input.sourceNodeIdEncoded); - const targetNodeId = ids.parseNodeId(input.targetNodeIdEncoded); + const { + sourceNodeId, + targetNodeId, + }: { sourceNodeId: NodeId; targetNodeId: NodeId } = validateSync( + (keyPath, value) => { + return matchSync(keyPath)( + [['sourceNodeId'], () => ids.parseNodeId(value)], + [['targetNodeId'], () => ids.parseNodeId(value)], + () => value, + ); + }, + { + sourceNodeId: input.sourceNodeIdEncoded, + targetNodeId: input.targetNodeIdEncoded, + }, + ); const relayingNodeId = agentUtils.nodeIdFromMeta(meta); if (relayingNodeId == null) { throw new agentErrors.ErrorAgentNodeIdMissing(); diff --git a/src/nodes/agent/handlers/NodesConnectionSignalInitial.ts b/src/nodes/agent/handlers/NodesConnectionSignalInitial.ts index 3cbd7ed30..b586dc280 100644 --- a/src/nodes/agent/handlers/NodesConnectionSignalInitial.ts +++ b/src/nodes/agent/handlers/NodesConnectionSignalInitial.ts @@ -4,13 +4,16 @@ import type { HolePunchSignalMessage, AddressMessage, } from '../types'; +import type { NodeId } from '../../../ids'; import type NodeConnectionManager from '../../../nodes/NodeConnectionManager'; import type { Host, Port } from '../../../network/types'; import type { JSONValue } from '../../../types'; import { UnaryHandler } from '@matrixai/rpc'; +import { validateSync } from '../../../validation'; +import { matchSync } from '../../../utils'; +import { never } from '../../../utils'; import * as agentErrors from '../errors'; import * as agentUtils from '../utils'; -import { never } from '../../../utils'; import * as keysUtils from '../../../keys/utils'; import * as ids from '../../../ids'; @@ -23,16 +26,30 @@ class NodesConnectionSignalInitial extends UnaryHandler< > { public handle = async ( input: AgentRPCRequestParams, - _cancel, + _cancel: (reason?: any) => void, meta: Record | undefined, ): Promise> => { - const { nodeConnectionManager } = this.container; + const { + nodeConnectionManager, + }: { + nodeConnectionManager: NodeConnectionManager; + } = this.container; // Connections should always be validated const requestingNodeId = agentUtils.nodeIdFromMeta(meta); if (requestingNodeId == null) { throw new agentErrors.ErrorAgentNodeIdMissing(); } - const targetNodeId = ids.parseNodeId(input.targetNodeIdEncoded); + const { targetNodeId }: { targetNodeId: NodeId } = validateSync( + (keyPath, value) => { + return matchSync(keyPath)( + [['targetNodeId'], () => ids.parseNodeId(value)], + () => value, + ); + }, + { + targetNodeId: input.targetNodeIdEncoded, + }, + ); const signature = Buffer.from(input.signature, 'base64url'); // Checking signature, data is just `` concatenated const data = Buffer.concat([requestingNodeId, targetNodeId]); diff --git a/src/nodes/agent/handlers/NodesCrossSignClaim.ts b/src/nodes/agent/handlers/NodesCrossSignClaim.ts index e617c39a8..6b669e526 100644 --- a/src/nodes/agent/handlers/NodesCrossSignClaim.ts +++ b/src/nodes/agent/handlers/NodesCrossSignClaim.ts @@ -1,3 +1,4 @@ +import type { JSONValue } from '@matrixai/rpc'; import type { AgentRPCRequestParams, AgentRPCResponseResult, @@ -23,10 +24,16 @@ class NodesCrossSignClaim extends DuplexHandler< > { public handle = async function* ( input: AsyncIterableIterator>, - _cancel, - meta, + _cancel: (reason?: any) => void, + meta: Record, ): AsyncGenerator> { - const { acl, nodeManager } = this.container; + const { + acl, + nodeManager, + }: { + acl: ACL; + nodeManager: NodeManager; + } = this.container; const requestingNodeId = agentUtils.nodeIdFromMeta(meta); if (requestingNodeId == null) { throw new agentErrors.ErrorAgentNodeIdMissing(); diff --git a/src/nodes/agent/handlers/NotificationsSend.ts b/src/nodes/agent/handlers/NotificationsSend.ts index cb13081f2..33c236302 100644 --- a/src/nodes/agent/handlers/NotificationsSend.ts +++ b/src/nodes/agent/handlers/NotificationsSend.ts @@ -6,7 +6,6 @@ import type { } from '../types'; import type KeyRing from '../../../keys/KeyRing'; import type NotificationsManager from '../../../notifications/NotificationsManager'; -import type { SignedNotification } from '../../../notifications/types'; import { UnaryHandler } from '@matrixai/rpc'; import * as notificationsUtils from '../../../notifications/utils'; @@ -25,9 +24,17 @@ class NotificationsSend extends UnaryHandler< public handle = async ( input: AgentRPCRequestParams, ): Promise => { - const { db, keyRing, notificationsManager } = this.container; + const { + db, + keyRing, + notificationsManager, + }: { + db: DB; + keyRing: KeyRing; + notificationsManager: NotificationsManager; + } = this.container; const notification = await notificationsUtils.verifyAndDecodeNotif( - input.signedNotificationEncoded as SignedNotification, + input.signedNotificationEncoded, keyRing.getNodeId(), ); await db.withTransactionF((tran) => diff --git a/src/nodes/agent/handlers/VaultsScan.ts b/src/nodes/agent/handlers/VaultsScan.ts index 9108a5ee2..72a4705bc 100644 --- a/src/nodes/agent/handlers/VaultsScan.ts +++ b/src/nodes/agent/handlers/VaultsScan.ts @@ -51,8 +51,8 @@ class VaultsScan extends ServerHandler< ctx.signal.throwIfAborted(); yield { vaultIdEncoded: vaultsUtils.encodeVaultId(vaultId), - vaultName, - vaultPermissions, + vaultName: vaultName, + vaultPermissions: vaultPermissions, }; } }); From a02796614efc7c65f9047e9c60dcb53bb952253e Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Fri, 10 Jan 2025 16:27:03 +1100 Subject: [PATCH 13/14] chore: made `SuccessOrErrorMessage` consistently tagged --- src/client/handlers/VaultsSecretsCat.ts | 4 +- src/client/handlers/VaultsSecretsMkdir.ts | 4 +- src/client/handlers/VaultsSecretsRemove.ts | 4 +- src/client/types.ts | 10 +-- tests/client/handlers/vaults.test.ts | 78 ++++++++++++---------- 5 files changed, 53 insertions(+), 47 deletions(-) diff --git a/src/client/handlers/VaultsSecretsCat.ts b/src/client/handlers/VaultsSecretsCat.ts index d675d9b3d..dcd8e0234 100644 --- a/src/client/handlers/VaultsSecretsCat.ts +++ b/src/client/handlers/VaultsSecretsCat.ts @@ -54,7 +54,7 @@ class VaultsSecretsCat extends DuplexHandler< try { const content = await vaultOps.getSecret(vault, secretName); return { - type: 'success', + type: 'SuccessMessage', success: true, secretContent: content.toString('binary'), }; @@ -64,7 +64,7 @@ class VaultsSecretsCat extends DuplexHandler< e instanceof vaultsErrors.ErrorSecretsIsDirectory ) { return { - type: 'error', + type: 'ErrorMessage', code: e.cause.code, reason: secretName, }; diff --git a/src/client/handlers/VaultsSecretsMkdir.ts b/src/client/handlers/VaultsSecretsMkdir.ts index 00e0f32cb..22dee5aed 100644 --- a/src/client/handlers/VaultsSecretsMkdir.ts +++ b/src/client/handlers/VaultsSecretsMkdir.ts @@ -58,14 +58,14 @@ class VaultsSecretsMkdir extends DuplexHandler< await vaultOps.mkdir(vault, dirName, { recursive: metadata?.options?.recursive, }); - return { type: 'success', success: true }; + return { type: 'SuccessMessage', success: true }; } catch (e) { if ( e instanceof vaultsErrors.ErrorVaultsRecursive || e instanceof vaultsErrors.ErrorSecretsSecretDefined ) { return { - type: 'error', + type: 'ErrorMessage', code: e.cause.code, reason: dirName, }; diff --git a/src/client/handlers/VaultsSecretsRemove.ts b/src/client/handlers/VaultsSecretsRemove.ts index ed7147948..9fd7ad787 100644 --- a/src/client/handlers/VaultsSecretsRemove.ts +++ b/src/client/handlers/VaultsSecretsRemove.ts @@ -108,7 +108,7 @@ class VaultsSecretsRemove extends DuplexHandler< await efs.unlink(message.secretName); } yield { - type: 'success', + type: 'SuccessMessage', success: true, }; } catch (e) { @@ -120,7 +120,7 @@ class VaultsSecretsRemove extends DuplexHandler< // EINVAL can be triggered if removing the root of the vault is // attempted. yield { - type: 'error', + type: 'ErrorMessage', code: e.code, reason: message.secretName, }; diff --git a/src/client/types.ts b/src/client/types.ts index 8c1032364..d5a180153 100644 --- a/src/client/types.ts +++ b/src/client/types.ts @@ -205,12 +205,12 @@ type SuccessMessage = { }; type SuccessMessageTagged = { - type: 'success'; + type: 'SuccessMessage'; success: boolean; }; type ErrorMessageTagged = { - type: 'error'; + type: 'ErrorMessage'; code?: string | number; reason?: string; data?: JSONObject; @@ -368,12 +368,12 @@ type SecretIdentifierMessageTagged = SecretIdentifierMessage & { type: 'SecretIdentifierMessage'; }; -type VaultNamesHeaderMessage = { +type VaultNamesHeaderMessageTagged = { type: 'VaultNamesHeaderMessage'; vaultNames: Array; }; -type SecretsRemoveHeaderMessage = VaultNamesHeaderMessage & { +type SecretsRemoveHeaderMessage = VaultNamesHeaderMessageTagged & { recursive?: boolean; }; @@ -454,7 +454,7 @@ export type { SecretFilesMessage, SecretStatMessage, SecretIdentifierMessageTagged, - VaultNamesHeaderMessage, + VaultNamesHeaderMessageTagged, SecretsRemoveHeaderMessage, SignatureMessage, OverrideRPClientType, diff --git a/tests/client/handlers/vaults.test.ts b/tests/client/handlers/vaults.test.ts index 326bea568..c9d8870df 100644 --- a/tests/client/handlers/vaults.test.ts +++ b/tests/client/handlers/vaults.test.ts @@ -72,7 +72,6 @@ import * as vaultsUtils from '@/vaults/utils'; import * as vaultsErrors from '@/vaults/errors'; import * as clientErrors from '@/client/errors'; import * as networkUtils from '@/network/utils'; -import * as utils from '@/utils'; import * as testsUtils from '../../utils'; describe('vaultsClone', () => { @@ -1441,7 +1440,9 @@ describe('vaultsSecretsMkdir', () => { await writer.close(); const consumeP = async () => { try { - for await (const _ of response.readable); + for await (const _ of response.readable) { + // Consume + } } catch (e) { throw e.cause; } @@ -1465,7 +1466,7 @@ describe('vaultsSecretsMkdir', () => { await writer.close(); // Check if the operation concluded as expected for await (const data of response.readable) { - expect(data.type).toEqual('success'); + expect(data.type).toEqual('SuccessMessage'); } await vaultManager.withVaults([vaultId], async (vault) => { await vault.readF(async (efs) => { @@ -1485,8 +1486,9 @@ describe('vaultsSecretsMkdir', () => { await writer.close(); // Check if the operation concluded as expected for await (const data of response.readable) { - expect(data.type).toEqual('error'); - if (data.type !== 'error') utils.never("Type is asserted to be 'error'"); + if (data.type !== 'ErrorMessage') { + fail('Type should be "ErrorMessage"'); + } expect(data.code).toEqual('ENOENT'); expect(data.reason).toEqual(dirPath); } @@ -1519,7 +1521,7 @@ describe('vaultsSecretsMkdir', () => { await writer.close(); // Check if the operation concluded as expected for await (const data of response.readable) { - expect(data.type).toEqual('success'); + expect(data.type).toEqual('SuccessMessage'); } await vaultManager.withVaults( [vaultId1, vaultId2], @@ -1554,7 +1556,7 @@ describe('vaultsSecretsMkdir', () => { // Check if the operation concluded as expected let successCount = 0; for await (const data of response.readable) { - if (data.type === 'error') { + if (data.type === 'ErrorMessage') { expect(data.code).toEqual('ENOENT'); expect(data.reason).toEqual(dirPath3); } else { @@ -1592,8 +1594,10 @@ describe('vaultsSecretsMkdir', () => { await writer.close(); // Check if the operation concluded as expected for await (const data of response.readable) { - expect(data.type).toEqual('error'); - if (data.type !== 'error') utils.never("Type is asserted to be 'error'"); + expect(data.type).toEqual('ErrorMessage'); + if (data.type !== 'ErrorMessage') { + fail('Type should be "ErrorMessage"'); + } expect(data.code).toEqual('EEXIST'); expect(data.reason).toEqual(dirPath); } @@ -1707,7 +1711,9 @@ describe('vaultsSecretsCat', () => { await writer.close(); // Read response const consumeP = async () => { - for await (const _ of response.readable); + for await (const _ of response.readable) { + // Consume + } }; await testsUtils.expectRemoteError( consumeP(), @@ -1735,9 +1741,8 @@ describe('vaultsSecretsCat', () => { await writer.close(); // Read response for await (const data of response.readable) { - expect(data.type).toEqual('success'); - if (data.type !== 'success') { - utils.never("Type is asserted to be 'success'"); + if (data.type !== 'SuccessMessage') { + fail('Type should be "SuccessMessage"'); } expect(data.secretContent).toEqual(secretContent); } @@ -1756,8 +1761,9 @@ describe('vaultsSecretsCat', () => { await writer.close(); // Read response for await (const data of response.readable) { - expect(data.type).toEqual('error'); - if (data.type !== 'error') utils.never("Type is asserted to be 'error'"); + if (data.type !== 'ErrorMessage') { + fail('Type should be "ErrorMessage"'); + } expect(data.code).toEqual('ENOENT'); expect(data.reason).toEqual(secretName); } @@ -1782,8 +1788,9 @@ describe('vaultsSecretsCat', () => { await writer.close(); // Read response for await (const data of response.readable) { - expect(data.type).toEqual('error'); - if (data.type !== 'error') utils.never("Type is asserted to be 'error'"); + if (data.type !== 'ErrorMessage') { + fail('Type should be "ErrorMessage"'); + } expect(data.code).toEqual('EISDIR'); expect(data.reason).toEqual(secretName); } @@ -1812,9 +1819,8 @@ describe('vaultsSecretsCat', () => { // Read response let totalContent = ''; for await (const data of response.readable) { - expect(data.type).toEqual('success'); - if (data.type !== 'success') { - utils.never("Type is asserted to be 'success'"); + if (data.type !== 'SuccessMessage') { + fail('Type should be "SuccessMessage"'); } totalContent += data.secretContent; } @@ -1856,9 +1862,8 @@ describe('vaultsSecretsCat', () => { // Read response let totalContent = ''; for await (const data of response.readable) { - expect(data.type).toEqual('success'); - if (data.type !== 'success') { - utils.never("Type is asserted to be 'success'"); + if (data.type !== 'SuccessMessage') { + fail('Type should be "SuccessMessage"'); } totalContent += data.secretContent; } @@ -1905,7 +1910,7 @@ describe('vaultsSecretsCat', () => { // Read response let totalContent = ''; for await (const data of response.readable) { - if (data.type === 'success') { + if (data.type === 'SuccessMessage') { totalContent += data.secretContent; } else { expect(data.code).toEqual('ENOENT'); @@ -2486,9 +2491,10 @@ describe('vaultsSecretsRemove', () => { let loopRun = false; for await (const data of response.readable) { loopRun = true; - expect(data.type).toStrictEqual('error'); - if (data.type !== 'error') utils.never("Type is asserted to be 'error'"); - expect(data.code).toStrictEqual('EINVAL'); + if (data.type !== 'ErrorMessage') { + fail('Type should be "ErrorMessage"'); + } + expect(data.code).toEqual('EINVAL'); } // Check expect(loopRun).toBeTruthy(); @@ -2533,7 +2539,7 @@ describe('vaultsSecretsRemove', () => { let loopRun = false; for await (const data of response.readable) { loopRun = true; - expect(data.type).toStrictEqual('success'); + expect(data.type).toEqual('SuccessMessage'); } expect(loopRun).toBeTruthy(); // Check each secret was deleted @@ -2544,7 +2550,7 @@ describe('vaultsSecretsRemove', () => { }); }); }); - test('continues on error', async () => { + test('should continue on error', async () => { // Create secrets const secretName1 = 'test-secret1'; const secretName2 = 'test-secret2'; @@ -2584,13 +2590,13 @@ describe('vaultsSecretsRemove', () => { await writer.close(); let errorCount = 0; for await (const data of response.readable) { - if (data.type === 'error') { + if (data.type === 'ErrorMessage') { // No other file name should raise this error - expect(data.reason).toStrictEqual(invalidName); + expect(data.reason).toEqual(invalidName); errorCount++; continue; } - expect(data.type).toStrictEqual('success'); + expect(data.type).toEqual('SuccessMessage'); } // Only one error should have happened expect(errorCount).toEqual(1); @@ -2642,7 +2648,7 @@ describe('vaultsSecretsRemove', () => { let loopRun = false; for await (const data of response.readable) { loopRun = true; - expect(data.type).toStrictEqual('success'); + expect(data.type).toEqual('SuccessMessage'); } expect(loopRun).toBeTruthy(); // Ensure single log message for deleting the secrets @@ -2700,7 +2706,7 @@ describe('vaultsSecretsRemove', () => { let loopRun = false; for await (const data of response.readable) { loopRun = true; - expect(data.type).toStrictEqual('success'); + expect(data.type).toEqual('SuccessMessage'); } // Ensure single log message for deleting the secrets expect(loopRun).toBeTruthy(); @@ -2750,7 +2756,7 @@ describe('vaultsSecretsRemove', () => { }); await writer.close(); for await (const data of response.readable) { - expect(data.type).toStrictEqual('success'); + expect(data.type).toEqual('SuccessMessage'); } // Check each secret and the secret directory were deleted await vaultManager.withVaults([vaultId], async (vault) => { @@ -2793,7 +2799,7 @@ describe('vaultsSecretsRemove', () => { }); await writer.close(); for await (const data of response.readable) { - expect(data.type).toStrictEqual('error'); + expect(data.type).toEqual('ErrorMessage'); } // Check each secret and the secret directory were deleted await vaultManager.withVaults([vaultId], async (vault) => { From 4780cd803fb6d9e15281bc011312440413728530 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Mon, 13 Jan 2025 15:37:22 +1100 Subject: [PATCH 14/14] chore: added test for cancellation to `VaultsSecretsRemove` --- tests/client/handlers/vaults.test.ts | 57 ++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/tests/client/handlers/vaults.test.ts b/tests/client/handlers/vaults.test.ts index c9d8870df..5cb8771c4 100644 --- a/tests/client/handlers/vaults.test.ts +++ b/tests/client/handlers/vaults.test.ts @@ -14,6 +14,7 @@ import os from 'os'; import Logger, { formatting, LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import { RPCClient } from '@matrixai/rpc'; +import { ErrorRPCTimedOut } from '@matrixai/rpc/dist/errors'; import { WebSocketClient } from '@matrixai/ws'; import TaskManager from '@/tasks/TaskManager'; import ACL from '@/acl/ACL'; @@ -2463,6 +2464,62 @@ describe('vaultsSecretsRemove', () => { vaultsErrors.ErrorVaultsVaultUndefined, ); }); + test('should fail when cancelled', async () => { + // Inducing a cancellation by a timeout + const response = await rpcClient.methods.vaultsSecretsRemove({ + timer: 100, + }); + // Read response + const consumeP = async () => { + for await (const _ of response.readable) { + // Consume values + } + }; + await expect(consumeP()).rejects.toThrow(ErrorRPCTimedOut); + }); + test('should cancel in the midst of an operation', async () => { + // Create secrets + const secretName = 'test-secret1'; + const vaultId = await vaultManager.createVault('test-vault'); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretName, secretName); + }); + }); + // Inducing a cancellation by a timeout + const response = await rpcClient.methods.vaultsSecretsRemove({ + timer: 100, + }); + // Header message + const writer = response.writable.getWriter(); + await writer.write({ + type: 'VaultNamesHeaderMessage', + vaultNames: [vaultIdEncoded], + }); + // Set a timeout so that the method will execute after RPC timeout + setTimeout(async () => { + // Content messages + await writer.write({ + type: 'SecretIdentifierMessage', + nameOrId: vaultIdEncoded, + secretName: secretName, + }); + await writer.close(); + // Read response + const consumeP = async () => { + for await (const _ of response.readable) { + // Consume values + } + }; + await expect(consumeP()).rejects.toThrow(ErrorRPCTimedOut); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.readF(async (efs) => { + expect(await efs.exists(secretName)).toBeTruthy(); + }); + }); + }, 150); + }); test('fails deleting vault root', async () => { // Create secrets const secretName = 'test-secret1';