diff --git a/src/client/handlers/VaultsSecretsGet.ts b/src/client/handlers/VaultsSecretsGet.ts index afcc393f5..491e6dcc5 100644 --- a/src/client/handlers/VaultsSecretsGet.ts +++ b/src/client/handlers/VaultsSecretsGet.ts @@ -1,7 +1,7 @@ import type { DB } from '@matrixai/db'; import type { JSONObject, JSONRPCRequest } from '@matrixai/rpc'; import type VaultManager from '../../vaults/VaultManager'; -import { ReadableStream } from 'stream/web'; +import type { ReadableStream } from 'stream/web'; import { RawHandler } from '@matrixai/rpc'; import { validateSync } from '../../validation'; import { matchSync } from '../../utils'; @@ -21,10 +21,11 @@ class VaultsSecretsGet extends RawHandler<{ const { vaultManager, db } = this.container; const [headerMessage, inputStream] = input; const params = headerMessage.params; - inputStream.cancel(); // Close input stream as it's useless for this call + await inputStream.cancel(); - if (params == undefined) + if (params == null) { throw new validationErrors.ErrorParse('Input params cannot be undefined'); + } const { nameOrId, @@ -34,9 +35,29 @@ class VaultsSecretsGet extends RawHandler<{ return matchSync(keyPath)( [ ['nameOrId'], - () => value as string, + () => { + if (typeof value != 'string') { + throw new validationErrors.ErrorParse( + 'Parameter must be of type string', + ); + } + return value as string; + }, + ], + [ ['secretNames'], - () => value as Array, + () => { + if ( + !Array.isArray(value) || + value.length === 0 || + !value.every((v) => typeof v === 'string') + ) { + throw new validationErrors.ErrorParse( + 'Parameter must be a non-empty array of strings', + ); + } + return value as Array; + }, ], () => value, ); @@ -53,7 +74,6 @@ class VaultsSecretsGet extends RawHandler<{ if (vaultId == null) { throw new vaultsErrors.ErrorVaultsVaultUndefined(); } - // Get secret contents yield* vaultManager.withVaultsG([vaultId], (vault) => { return vault.readG(async function* (fs): AsyncGenerator< @@ -62,8 +82,15 @@ class VaultsSecretsGet extends RawHandler<{ void > { const contents = fileTree.serializerStreamFactory(fs, secretNames); - for await (const chunk of contents) { - yield chunk; + try { + for await (const chunk of contents) yield chunk; + } catch (e) { + if (e.name === 'ErrorSecretsSecretUndefined') { + throw new vaultsErrors.ErrorSecretsSecretUndefined(e.message, { + cause: e.cause, + }); + } + throw e; } }); }); diff --git a/src/utils/utils.ts b/src/utils/utils.ts index 338707ac8..3d3235048 100644 --- a/src/utils/utils.ts +++ b/src/utils/utils.ts @@ -10,8 +10,8 @@ import os from 'os'; import process from 'process'; import path from 'path'; import nodesEvents from 'events'; +import { ReadableStream } from 'stream/web'; import lexi from 'lexicographic-integer'; -import { ReadableStream } from 'stream/web' import { PromiseCancellable } from '@matrixai/async-cancellable'; import { timedCancellable } from '@matrixai/contexts/dist/functions'; import * as utilsErrors from './errors'; diff --git a/src/vaults/fileTree.ts b/src/vaults/fileTree.ts index 5a791d8b0..49d31d454 100644 --- a/src/vaults/fileTree.ts +++ b/src/vaults/fileTree.ts @@ -1,5 +1,5 @@ import type { Stat } from 'encryptedfs'; -import type { FileSystem } from '../types'; +import type { FileHandle, FileSystem } from '../types'; import type { ContentNode, DoneMessage, @@ -11,6 +11,7 @@ import type { HeaderGeneric, HeaderContent, } from './types'; +import type { FdIndex } from 'encryptedfs/dist/fd'; import path from 'path'; import { ReadableStream, TransformStream } from 'stream/web'; import { minimatch } from 'minimatch'; @@ -18,6 +19,7 @@ import * as vaultsUtils from './utils'; import { HeaderSize, HeaderType, HeaderMagic } from './types'; import * as utils from '../utils'; import * as validationErrors from '../validation/errors'; +import * as vaultsErrors from '../vaults/errors'; /** * Generates a serializable format of file stats @@ -275,7 +277,17 @@ async function* encodeContent( path: string, chunkSize: number = 1024 * 4, ): AsyncGenerator { - const fd = await fs.promises.open(path, 'r'); + let fd: FileHandle | FdIndex; + try { + fd = await fs.promises.open(path, 'r'); + } catch (e) { + if (e.code === 'ENOENT') { + throw new vaultsErrors.ErrorSecretsSecretUndefined(e.message, { + cause: e, + }); + } + throw e; + } async function read(buffer: Uint8Array): Promise<{ bytesRead: number; buffer: Uint8Array; @@ -335,13 +347,14 @@ function serializerStreamFactory( fs: FileSystem | FileSystemReadable, filePaths: Array, ): ReadableStream { - let contentsGen: AsyncGenerator | undefined; + const paths = filePaths.slice(); + let contentsGen: AsyncGenerator | undefined = undefined; return new ReadableStream({ pull: async (controller) => { try { while (true) { if (contentsGen == null) { - const path = filePaths.shift(); + const path = paths.shift(); if (path == null) return controller.close(); contentsGen = encodeContent(fs, path); } @@ -440,32 +453,31 @@ function parserTransformStreamFactory(): TransformStream< transform: (chunk, controller) => { if (chunk.byteLength > 0) processedChunks = true; workingBuffer = vaultsUtils.uint8ArrayConcat([workingBuffer, chunk]); - if (contentLength == null) { - const genericHeader = parseGenericHeader(workingBuffer); - if (genericHeader.data == null) return; - if (genericHeader.data.type !== HeaderType.CONTENT) { - controller.error( - new validationErrors.ErrorParse( - `expected CONTENT message, got "${genericHeader.data.type}"`, - ), - ); - return; + + while (true) { + // Header parsing until enough data is acquired + if (contentLength == null) { + const genericHeader = parseGenericHeader(workingBuffer); + if (genericHeader.data == null) return; + if (genericHeader.data.type !== HeaderType.CONTENT) { + controller.error( + new validationErrors.ErrorParse( + `expected CONTENT message, got "${genericHeader.data.type}"`, + ), + ); + return; + } + const contentHeader = parseContentHeader(genericHeader.remainder); + if (contentHeader.data == null) return; + + contentLength = contentHeader.data.dataSize; + controller.enqueue({ type: 'CONTENT', dataSize: contentLength }); + workingBuffer = contentHeader.remainder; } - const contentHeader = parseContentHeader(genericHeader.remainder); - if (contentHeader.data == null) return; + // We yield the whole buffer, or split it for the next header + if (workingBuffer.byteLength < Number(contentLength)) break; - contentLength = contentHeader.data.dataSize; - controller.enqueue({ type: 'CONTENT', dataSize: contentLength }); - workingBuffer = contentHeader.remainder; - } - // We yield the whole buffer, or split it for the next header - if (workingBuffer.byteLength === 0) return; - if (workingBuffer.byteLength <= contentLength) { - contentLength -= BigInt(workingBuffer.byteLength); - controller.enqueue(workingBuffer); - workingBuffer = new Uint8Array(0); - if (contentLength === 0n) contentLength = undefined; - } else { + // Process the contents after enough data has been accumulated controller.enqueue(workingBuffer.subarray(0, Number(contentLength))); workingBuffer = workingBuffer.subarray(Number(contentLength)); contentLength = undefined; diff --git a/tests/client/handlers/vaults.test.ts b/tests/client/handlers/vaults.test.ts index 1263fc4bf..ffa0ad02a 100644 --- a/tests/client/handlers/vaults.test.ts +++ b/tests/client/handlers/vaults.test.ts @@ -1,6 +1,7 @@ import type { TLSConfig } from '@/network/types'; import type { FileSystem } from '@/types'; import type { VaultId } from '@/ids'; +import type { ContentNode } from '@/vaults/types'; import type NodeManager from '@/nodes/NodeManager'; import type { LogEntryMessage, @@ -15,6 +16,7 @@ import Logger, { formatting, LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import { RPCClient } from '@matrixai/rpc'; import { WebSocketClient } from '@matrixai/ws'; +import { fileTree } from '@/vaults'; import TaskManager from '@/tasks/TaskManager'; import ACL from '@/acl/ACL'; import KeyRing from '@/keys/KeyRing'; @@ -1449,12 +1451,26 @@ describe('vaultsSecretsNew and vaultsSecretsDelete, vaultsSecretsGet', () => { }); expect(createResponse.success).toBeTruthy(); // Get secret - const getResponse1 = await rpcClient.methods.vaultsSecretsGet({ + await testsUtils.expectRemoteError( + rpcClient.methods.vaultsSecretsGet({ + nameOrId: vaultIdEncoded, + secretNames: ['doesnt-exist'], + }), + vaultsErrors.ErrorSecretsSecretUndefined, + ); + const getResponse = await rpcClient.methods.vaultsSecretsGet({ nameOrId: vaultIdEncoded, - secretName: secret, + secretNames: [secret], }); - const secretContent = getResponse1.secretContent; - expect(secretContent).toStrictEqual(secret); + const data: Array = []; + const dataStream = getResponse.readable.pipeThrough( + fileTree.parserTransformStreamFactory(), + ); + for await (const chunk of dataStream) data.push(chunk); + const secretContent = data + .filter((v) => v instanceof Uint8Array) + .map((v) => Buffer.from(v as Uint8Array).toString()); + expect(secretContent).toStrictEqual([secret]); // Delete secret const deleteResponse = await rpcClient.methods.vaultsSecretsDelete({ nameOrId: vaultIdEncoded, @@ -1465,45 +1481,11 @@ describe('vaultsSecretsNew and vaultsSecretsDelete, vaultsSecretsGet', () => { await testsUtils.expectRemoteError( rpcClient.methods.vaultsSecretsGet({ nameOrId: vaultIdEncoded, - secretName: secret, + secretNames: [secret], }), vaultsErrors.ErrorSecretsSecretUndefined, ); }); - // TODO: TEST - test('view output', async () => { - const secret = 'test-secret'; - const vaultId = await vaultManager.createVault('test-vault'); - const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); - await rpcClient.methods.vaultsSecretsNew({ - nameOrId: vaultIdEncoded, - secretName: secret, - secretContent: Buffer.from('test-secret-contents-1').toString('binary'), - }); - await rpcClient.methods.vaultsSecretsNew({ - nameOrId: vaultIdEncoded, - secretName: 's2', - secretContent: Buffer.from('test-secret-contents-abc').toString('binary'), - }); - const response = await rpcClient.methods.vaultsSecretsGet({ - nameOrId: vaultIdEncoded, - secretNames: ['test-secret','s2'], - }); - // const secretContent = response.meta?.result; - const data: Array = []; - for await (const d of response.readable) data.push(d); - // console.log(new TextDecoder().decode(Buffer.concat(data))); - const output = Buffer.concat(data) - .toString('utf-8') - .split('') - .map(char => { - const code = char.charCodeAt(0); - return code >= 32 && code <= 126 ? char : `\\x${code.toString(16).padStart(2, '0')}`; - }) - .join(''); - console.log(output); - - }) }); describe('vaultsSecretsNewDir and vaultsSecretsList', () => { const logger = new Logger('vaultsSecretsNewDirList test', LogLevel.WARN, [ diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index 0d04713e8..df30fc4d1 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -579,21 +579,15 @@ describe('VaultOps', () => { }); test('serializer with content works with efs', async () => { const data = await vault.readF(async (fs) => { - const fileTreeGen = fileTree.globWalk({ - fs, - yieldStats: false, - yieldRoot: false, - yieldFiles: true, - yieldParents: true, - yieldDirectories: true, - }); const data: Array = []; const parserTransform = fileTree.parserTransformStreamFactory(); - const serializedStream = fileTree.serializerStreamFactory( - fs, - fileTreeGen, - true, - ); + const serializedStream = fileTree.serializerStreamFactory(fs, [ + file0b, + file1a, + file2b, + file3a, + file4b, + ]); const outputStream = serializedStream.pipeThrough(parserTransform); for await (const output of outputStream) { data.push(output); diff --git a/tests/vaults/fileTree.test.ts b/tests/vaults/fileTree.test.ts index 083668135..a7d998852 100644 --- a/tests/vaults/fileTree.test.ts +++ b/tests/vaults/fileTree.test.ts @@ -6,6 +6,7 @@ import { ReadableStream } from 'stream/web'; import { test } from '@fast-check/jest'; import fc from 'fast-check'; import * as fileTree from '@/vaults/fileTree'; +import * as vaultsErrors from '@/vaults/errors'; import * as vaultsTestUtils from './utils'; describe('fileTree', () => { @@ -437,6 +438,40 @@ describe('fileTree', () => { expect(result.remainder.byteLength).toBe(0); }, ); + test.prop( + [ + fc + .uint8Array({ size: 'large' }) + .noShrink() + .map((v) => Buffer.from(v)), + ], + { numRuns: 20 }, + )('handles invalid data', async (data) => { + let limit = 100; + const dataStream = new ReadableStream({ + pull: (controller) => + limit-- > 0 ? controller.enqueue(data) : controller.close(), + }); + const parserTransform = fileTree.parserTransformStreamFactory(); + const outputStream = dataStream.pipeThrough(parserTransform); + await expect( + (async () => { + for await (const _ of outputStream); // Consume values + })(), + ).toReject(); + }); + test('handles empty stream', async () => { + const emptyStream = new ReadableStream({ + pull: (controller) => controller.close(), + }); + const parserTransform = fileTree.parserTransformStreamFactory(); + const outputStream = emptyStream.pipeThrough(parserTransform); + await expect( + (async () => { + for await (const _ of outputStream); // Consume values + })(), + ).toReject(); + }); }); describe('serializer', () => { let cwd: string; @@ -458,6 +493,32 @@ describe('fileTree', () => { const file8b: string = path.join(dir22, 'file8.b'); const file9a: string = path.join(dir22, 'file9.a'); + const files = [ + file0b, + file1a, + file2b, + file3a, + file4b, + file5a, + file6b, + file7a, + file8b, + file9a, + ]; + + const contents = [ + 'content-file0', + 'content-file1', + 'content-file2', + 'content-file3', + 'content-file4', + 'content-file5', + 'content-file6', + 'content-file7', + 'content-file8', + '', + ]; + beforeEach(async () => { await fs.promises.mkdir(path.join(dataDir, dir1)); await fs.promises.mkdir(path.join(dataDir, dir11)); @@ -465,16 +526,16 @@ describe('fileTree', () => { await fs.promises.mkdir(path.join(dataDir, dir2)); await fs.promises.mkdir(path.join(dataDir, dir21)); await fs.promises.mkdir(path.join(dataDir, dir22)); - await fs.promises.writeFile(path.join(dataDir, file0b), 'content-file0'); - await fs.promises.writeFile(path.join(dataDir, file1a), 'content-file1'); - await fs.promises.writeFile(path.join(dataDir, file2b), 'content-file2'); - await fs.promises.writeFile(path.join(dataDir, file3a), 'content-file3'); - await fs.promises.writeFile(path.join(dataDir, file4b), 'content-file4'); - await fs.promises.writeFile(path.join(dataDir, file5a), 'content-file5'); - await fs.promises.writeFile(path.join(dataDir, file6b), 'content-file6'); - await fs.promises.writeFile(path.join(dataDir, file7a), 'content-file7'); - await fs.promises.writeFile(path.join(dataDir, file8b), 'content-file8'); - await fs.promises.writeFile(path.join(dataDir, file9a), 'content-file9'); + await fs.promises.writeFile(path.join(dataDir, file0b), contents[0]); + await fs.promises.writeFile(path.join(dataDir, file1a), contents[1]); + await fs.promises.writeFile(path.join(dataDir, file2b), contents[2]); + await fs.promises.writeFile(path.join(dataDir, file3a), contents[3]); + await fs.promises.writeFile(path.join(dataDir, file4b), contents[4]); + await fs.promises.writeFile(path.join(dataDir, file5a), contents[5]); + await fs.promises.writeFile(path.join(dataDir, file6b), contents[6]); + await fs.promises.writeFile(path.join(dataDir, file7a), contents[7]); + await fs.promises.writeFile(path.join(dataDir, file8b), contents[8]); + await fs.promises.writeFile(path.join(dataDir, file9a), contents[9]); cwd = process.cwd(); process.chdir(dataDir); }); @@ -482,256 +543,63 @@ describe('fileTree', () => { process.chdir(cwd); }); - // TODO: - // - Add test for testing serializer on vaults fs. + test.todo('testing serializer on vaults fs'); + test.todo('files larger than content chunks'); - test('sends single tree', async () => { - const fileTreeGen = fileTree.globWalk({ - fs, - yieldStats: false, - yieldRoot: false, - yieldFiles: true, - yieldParents: true, - yieldDirectories: true, - }); + test('file contents are being sent properly', async () => { const data: Array = []; const parserTransform = fileTree.parserTransformStreamFactory(); - const serializedStream = fileTree.serializerStreamFactory( - fs, - fileTreeGen, - ); + const serializedStream = fileTree.serializerStreamFactory(fs, files); const outputStream = serializedStream.pipeThrough(parserTransform); for await (const output of outputStream) { data.push(output); } - const paths = data.map((v) => { - fileTree.parseTreeNode(v); - return v.path; - }); - expect(paths).toIncludeAllMembers([ - dir1, - dir2, - dir11, - dir12, - dir21, - dir22, - file0b, - file1a, - file2b, - file3a, - file4b, - file5a, - file6b, - file7a, - file8b, - file9a, - ]); + const fileContents = data + .filter((v) => v instanceof Uint8Array) + .map((v) => Buffer.from(v as Uint8Array).toString()); + const contentHeaders = data.filter( + (v) => !(v instanceof Uint8Array) && v.type === 'CONTENT', + ) as Array; + expect(fileContents).toIncludeAllMembers(contents); + for (let i = 0; i < contentHeaders.length; i++) { + const contentHeader = contentHeaders[i]; + const contentSize = BigInt(contents[i].length); + expect(contentHeader.dataSize).toBe(contentSize); + } }); - test('sends tree with randomly sized chunks', async () => { - const fileTreeGen = fileTree.globWalk({ - fs, - yieldStats: false, - yieldRoot: false, - yieldFiles: true, - yieldParents: true, - yieldDirectories: true, - }); + test('file contents with randomly sized chunks', async () => { const data: Array = []; + const parserTransform = fileTree.parserTransformStreamFactory(); + const serializedStream = fileTree.serializerStreamFactory(fs, files); const snipperTransform = vaultsTestUtils.binaryStreamToSnippedStream([ 5, 7, 11, 13, ]); - const parserTransform = fileTree.parserTransformStreamFactory(); - const serializedStream = fileTree.serializerStreamFactory( - fs, - fileTreeGen, - ); const outputStream = serializedStream .pipeThrough(snipperTransform) .pipeThrough(parserTransform); for await (const output of outputStream) { data.push(output); } - const paths = data.map((v) => { - fileTree.parseTreeNode(v); - return v.path; - }); - expect(paths).toIncludeAllMembers([ - dir1, - dir2, - dir11, - dir12, - dir21, - dir22, - file0b, - file1a, - file2b, - file3a, - file4b, - file5a, - file6b, - file7a, - file8b, - file9a, - ]); - }); - test('sends multiple trees', async () => { - function doubleWalkFactory() { - const stream1 = fileTree.serializerStreamFactory( - fs, - fileTree.globWalk({ - fs, - yieldStats: false, - yieldRoot: false, - yieldFiles: true, - yieldParents: true, - yieldDirectories: true, - }), - ); - const stream2 = fileTree.serializerStreamFactory( - fs, - fileTree.globWalk({ - fs, - yieldStats: false, - yieldRoot: false, - yieldFiles: true, - yieldParents: true, - yieldDirectories: true, - }), - ); - return new ReadableStream({ - start: async (controller) => { - for await (const data of stream1) controller.enqueue(data); - for await (const data of stream2) controller.enqueue(data); - controller.close(); - }, - }); - } - const data: Array = []; - const parserTransform = fileTree.parserTransformStreamFactory(); - // Const serializedStream = fileTree.serializerStreamFactory(fileTreeGen); - const serializedStream = doubleWalkFactory(); - const outputStream = serializedStream.pipeThrough(parserTransform); - for await (const output of outputStream) { - data.push(output); - } - const paths = data.map((v) => { - fileTree.parseTreeNode(v); - return v.path; - }); - expect(paths).toIncludeAllMembers([ - dir1, - dir2, - dir11, - dir12, - dir21, - dir22, - file0b, - file1a, - file2b, - file3a, - file4b, - file5a, - file6b, - file7a, - file8b, - file9a, - ]); - const dupes = paths.reduce((previous, value) => { - previous.set(value, (previous.get(value) ?? 0) + 1); - return previous; - }, new Map()); - for (const dupe of dupes.values()) { - expect(dupe).toBe(2); - } - }); - test('file contents are sent and are correct', async () => { - const fileTreeGen = fileTree.globWalk({ - fs, - yieldStats: false, - yieldRoot: false, - yieldFiles: true, - yieldParents: false, - yieldDirectories: false, - }); - const data: Array = []; - const parserTransform = fileTree.parserTransformStreamFactory(); - const serializedStream = fileTree.serializerStreamFactory( - fs, - fileTreeGen, - ); - const outputStream = serializedStream.pipeThrough(parserTransform); - for await (const output of outputStream) { - data.push(output); - } - const contents = data + const fileContents = data .filter((v) => v instanceof Uint8Array) .map((v) => Buffer.from(v as Uint8Array).toString()); const contentHeaders = data.filter( (v) => !(v instanceof Uint8Array) && v.type === 'CONTENT', ) as Array; - expect(contents).toIncludeAllMembers([ - 'content-file0', - 'content-file1', - 'content-file2', - 'content-file3', - 'content-file4', - 'content-file5', - 'content-file6', - 'content-file7', - 'content-file8', - 'content-file9', - ]); - for (const contentHeader of contentHeaders) { - expect(contentHeader.dataSize).toBe(13n); + expect(fileContents).toIncludeAllMembers(contents); + for (let i = 0; i < contentHeaders.length; i++) { + const contentHeader = contentHeaders[i]; + const contentSize = BigInt(contents[i].length); + expect(contentHeader.dataSize).toBe(contentSize); } }); - test.prop( - [ - fc - .uint8Array({ size: 'large' }) - .noShrink() - .map((v) => Buffer.from(v)), - ], - { numRuns: 20 }, - )('handles invalid data', async (data) => { - let limit = 100; - const dataStream = new ReadableStream({ - pull: (controller) => - limit-- > 0 ? controller.enqueue(data) : controller.close(), - }); - const parserTransform = fileTree.parserTransformStreamFactory(); - const outputStream = dataStream.pipeThrough(parserTransform); - try { - for await (const _ of outputStream); // Consume values - } catch { - return; - } - throw Error('Should have thrown an error when parsing'); - }); - // TODO: tests for - // - empty files - // - files larger than content chunks - - // TEST: DEBUGGGG - test('view serializer', async () => { - const fileTreeGen = fileTree.globWalk({ - fs, - yieldStats: false, - yieldRoot: false, - yieldFiles: true, - yieldParents: false, - yieldDirectories: false, - }); - const data: Array = []; - for await (const p of fileTreeGen) data.push(p.path); - const serializedStream = fileTree.serializerStreamFactory(fs, data); - const parserTransform = fileTree.parserTransformStreamFactory(); - const outputStream = serializedStream.pipeThrough(parserTransform); - const output: Array = []; - for await (const d of outputStream) { - output.push(d); - } - console.log(output); + test('handles non-existent files', async () => { + const serializedStream = fileTree.serializerStreamFactory(fs, [ + 'wrong-file', + ]); + await expect(async () => { + for await (const _ of serializedStream); // Consume vaules + }).rejects.toThrow(vaultsErrors.ErrorSecretsSecretUndefined); }); }); }); diff --git a/tests/vaults/utils.ts b/tests/vaults/utils.ts index 797374ae3..0d4733776 100644 --- a/tests/vaults/utils.ts +++ b/tests/vaults/utils.ts @@ -24,7 +24,6 @@ const headerGenericArb = fc.record({ }); const headerContentArb = fc.record({ dataSize: fc.bigUint({ max: 2n ** 63n }), - iNode: fc.nat(), }); /**