From 7531ef047afcb31cef53e7e6c8fde915e2a45d53 Mon Sep 17 00:00:00 2001 From: Aryan Jassal Date: Mon, 2 Sep 2024 17:35:17 +1000 Subject: [PATCH] chore: simplified code --- src/vaults/fileTree.ts | 61 +++++++++-------------------------- tests/vaults/fileTree.test.ts | 35 +++++++------------- 2 files changed, 28 insertions(+), 68 deletions(-) diff --git a/src/vaults/fileTree.ts b/src/vaults/fileTree.ts index e822e8057..b733f1839 100644 --- a/src/vaults/fileTree.ts +++ b/src/vaults/fileTree.ts @@ -14,11 +14,9 @@ import type { import path from 'path'; import { ReadableStream, TransformStream } from 'stream/web'; import { minimatch } from 'minimatch'; -import { JSONParser, TokenizerError } from '@streamparser/json'; import * as vaultsUtils from './utils'; import { HeaderSize, HeaderType, HeaderMagic } from './types'; import * as utils from '../utils'; -import * as utilsErrors from '../utils/errors'; import * as validationErrors from '../validation/errors'; /** @@ -337,7 +335,6 @@ async function* encodeContent( * Takes an AsyncGenerator and serializes it into a `ReadableStream` * @param fs * @param treeGen - An AsyncGenerator that yields the files and directories of a file tree. - * @param yieldContents - Toggles sending the contents of files after the file tree. */ function serializerStreamFactory( fs: FileSystem | FileSystemReadable, @@ -345,20 +342,18 @@ function serializerStreamFactory( ): ReadableStream { let contentsGen: AsyncGenerator | undefined; let fileNode: TreeNode | undefined; - - async function getNextFileNode(): Promise { - while (true) { - const result = await treeGen.next(); - if (result.done) return undefined; - if (result.value.type === 'FILE') return result.value; - // If it's not a file, keep iterating - } - } async function getNextContentChunk(): Promise { while (true) { if (contentsGen == null) { - fileNode = await getNextFileNode(); - if (fileNode == null) return undefined; + // Keep consuming values if the result is not a file + while (true) { + const result = await treeGen.next(); + if (result.done) return undefined; + if (result.value.type === 'FILE') { + fileNode = result.value; + break; + } + } contentsGen = encodeContent(fs, fileNode.path, fileNode.iNode); } const contentChunk = await contentsGen.next(); @@ -374,12 +369,8 @@ function serializerStreamFactory( pull: async (controller) => { try { const contentChunk = await getNextContentChunk(); - if (contentChunk == null) { - return controller.close(); - } - else { - controller.enqueue(contentChunk); - } + if (contentChunk == null) return controller.close(); + else controller.enqueue(contentChunk); } catch (e) { await cleanup(e); return controller.error(e); @@ -447,13 +438,13 @@ function parseTreeNode(data: unknown): asserts data is TreeNode { */ function parserTransformStreamFactory(): TransformStream< Uint8Array, - string | ContentNode | Uint8Array + ContentNode | Uint8Array > { let workingBuffer: Uint8Array = new Uint8Array(0); let contentLength: bigint | undefined = undefined; let processedChunks: boolean = false; - return new TransformStream({ + return new TransformStream({ /** * Check if any chunks have been processed. If the stream is being flushed * without processing any chunks, then something went wrong with the stream. @@ -467,10 +458,7 @@ function parserTransformStreamFactory(): TransformStream< }, transform: (chunk, controller) => { if (chunk.byteLength > 0) processedChunks = true; - workingBuffer = vaultsUtils.uint8ArrayConcat([ - workingBuffer, - chunk, - ]); + workingBuffer = vaultsUtils.uint8ArrayConcat([workingBuffer, chunk]); if (contentLength == null) { const genericHeader = parseGenericHeader(workingBuffer); if (genericHeader.data == null) return; @@ -494,31 +482,14 @@ function parserTransformStreamFactory(): TransformStream< if (workingBuffer.byteLength === 0) return; if (workingBuffer.byteLength <= contentLength) { contentLength -= BigInt(workingBuffer.byteLength); - const fileContents = new TextDecoder().decode(workingBuffer); // newcode - controller.enqueue(fileContents); // newcode - // controller.enqueue(workingBuffer); + controller.enqueue(workingBuffer); workingBuffer = new Uint8Array(0); if (contentLength === 0n) contentLength = undefined; - // return; } else { - // controller.enqueue( - // workingBuffer.subarray(0, Number(contentLength)), - // ); - const contentChunk = workingBuffer.subarray(0, Number(contentLength)); // new - const contentString = new TextDecoder().decode(contentChunk); // nwe - controller.enqueue(contentString); // nwe + controller.enqueue(workingBuffer.subarray(0, Number(contentLength))); workingBuffer = workingBuffer.subarray(Number(contentLength)); contentLength = undefined; } - // return; - // default: - // controller.error( - // new utilsErrors.ErrorUtilsUndefinedBehaviour( - // `invalid state "${phase}"`, - // ), - // ); - // return; - // } }, }); } diff --git a/tests/vaults/fileTree.test.ts b/tests/vaults/fileTree.test.ts index 6b0a9f192..d007f9302 100644 --- a/tests/vaults/fileTree.test.ts +++ b/tests/vaults/fileTree.test.ts @@ -1,10 +1,10 @@ -import type { ContentNode, FileTree, TreeNode } from '@/vaults/types'; +import type { ContentNode, FileTree } from '@/vaults/types'; import fs from 'fs'; import os from 'os'; import path from 'path'; import { ReadableStream } from 'stream/web'; import { test } from '@fast-check/jest'; -import fc, { uint8Array } from 'fast-check'; +import fc from 'fast-check'; import * as fileTree from '@/vaults/fileTree'; import * as vaultsTestUtils from './utils'; @@ -494,12 +494,11 @@ describe('fileTree', () => { yieldParents: true, yieldDirectories: true, }); - const data: Array = []; + const data: Array = []; const parserTransform = fileTree.parserTransformStreamFactory(); const serializedStream = fileTree.serializerStreamFactory( fs, fileTreeGen, - false, ); const outputStream = serializedStream.pipeThrough(parserTransform); for await (const output of outputStream) { @@ -537,7 +536,7 @@ describe('fileTree', () => { yieldParents: true, yieldDirectories: true, }); - const data: Array = []; + const data: Array = []; const snipperTransform = vaultsTestUtils.binaryStreamToSnippedStream([ 5, 7, 11, 13, ]); @@ -545,7 +544,6 @@ describe('fileTree', () => { const serializedStream = fileTree.serializerStreamFactory( fs, fileTreeGen, - false, ); const outputStream = serializedStream .pipeThrough(snipperTransform) @@ -588,7 +586,6 @@ describe('fileTree', () => { yieldParents: true, yieldDirectories: true, }), - false, ); const stream2 = fileTree.serializerStreamFactory( fs, @@ -600,7 +597,6 @@ describe('fileTree', () => { yieldParents: true, yieldDirectories: true, }), - false, ); return new ReadableStream({ start: async (controller) => { @@ -610,7 +606,7 @@ describe('fileTree', () => { }, }); } - const data: Array = []; + const data: Array = []; const parserTransform = fileTree.parserTransformStreamFactory(); // Const serializedStream = fileTree.serializerStreamFactory(fileTreeGen); const serializedStream = doubleWalkFactory(); @@ -657,12 +653,11 @@ describe('fileTree', () => { yieldParents: false, yieldDirectories: false, }); - const data: Array = []; + const data: Array = []; const parserTransform = fileTree.parserTransformStreamFactory(); const serializedStream = fileTree.serializerStreamFactory( fs, fileTreeGen, - true, ); const outputStream = serializedStream.pipeThrough(parserTransform); for await (const output of outputStream) { @@ -707,9 +702,7 @@ describe('fileTree', () => { const parserTransform = fileTree.parserTransformStreamFactory(); const outputStream = dataStream.pipeThrough(parserTransform); try { - for await (const _ of outputStream) { - // Only consume - } + for await (const _ of outputStream); // Consume values } catch { return; } @@ -729,17 +722,13 @@ describe('fileTree', () => { yieldParents: false, yieldDirectories: false, }); - let serializedStream = fileTree.serializerStreamFactory(fs, fileTreeGen); - // const data: Array = []; - // for await (const d of serializedStream) { - // data.push(d); - // } - // console.log(data.map((v) => Buffer.from(v as Uint8Array).toString())); - - // serializedStream = fileTree.serializerStreamFactory(fs, fileTreeGen); + const serializedStream = fileTree.serializerStreamFactory( + fs, + fileTreeGen, + ); const parserTransform = fileTree.parserTransformStreamFactory(); const outputStream = serializedStream.pipeThrough(parserTransform); - const output: Array = []; + const output: Array = []; for await (const d of outputStream) { output.push(d); }