Skip to content

Commit

Permalink
chore: refactor & fixes, co-authored by @Julusian
Browse files Browse the repository at this point in the history
  • Loading branch information
nytamin committed Feb 16, 2024
1 parent 797e958 commit 190a356
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 43 deletions.
Original file line number Diff line number Diff line change
@@ -1,11 +1,22 @@
import { getTmpPath, updateJSONFile, updateJSONFileBatch } from '../json-write-file'
import { promises as fs } from 'fs'

const logWarning = jest.fn((message: string) => console.log('WARNING', message))
const logError = jest.fn((message: any) => console.log('ERROR', message))

const FILE_A = 'file_a.json'
async function cleanup() {
logWarning.mockClear()
logError.mockClear()

await Promise.all([unlinkIfExists(FILE_A), unlinkIfExists(getLockPath(FILE_A)), unlinkIfExists(getTmpPath(FILE_A))])
}

const config = {
logError,
logWarning,
}

beforeEach(cleanup)
afterEach(cleanup)

Expand All @@ -15,14 +26,16 @@ test('updateJSONFile: single write', async () => {
a: 1,
}
})
await updateJSONFile(FILE_A, cbManipulate)
await updateJSONFile(FILE_A, cbManipulate, config)

expect(cbManipulate).toBeCalledTimes(1)
expect(await readIfExists(FILE_A)).toBe(
JSON.stringify({
a: 1,
})
)
expect(logWarning).toBeCalledTimes(0)
expect(logError).toBeCalledTimes(0)
})

test('updateJSONFile: 2 writes', async () => {
Expand All @@ -32,15 +45,17 @@ test('updateJSONFile: 2 writes', async () => {
return o
})

const p0 = updateJSONFile(FILE_A, cbManipulate)
const p0 = updateJSONFile(FILE_A, cbManipulate, config)
await sleep(5)

const p1 = updateJSONFile(FILE_A, cbManipulate)
const p1 = updateJSONFile(FILE_A, cbManipulate, config)

await Promise.all([p0, p1])

expect(cbManipulate).toBeCalledTimes(2)
expect(await readIfExists(FILE_A)).toBe(JSON.stringify(['a', 'a']))
expect(logWarning).toBeCalledTimes(0)
expect(logError).toBeCalledTimes(0)
})
test('updateJSONFile: 10 writes', async () => {
const cbManipulate = jest.fn((o) => {
Expand All @@ -50,6 +65,8 @@ test('updateJSONFile: 10 writes', async () => {
})

const config = {
logError,
logWarning,
retryTimeout: 30,
retryCount: 3,
}
Expand Down Expand Up @@ -77,6 +94,9 @@ test('updateJSONFile: 10 writes', async () => {

// Wait for the lock functions to finish retrying:
await sleep(config.retryTimeout * config.retryCount)

expect(logWarning).toBeCalledTimes(0)
expect(logError).toBeCalledTimes(0)
})

test('updateJSONFileBatch: single write', async () => {
Expand All @@ -85,14 +105,16 @@ test('updateJSONFileBatch: single write', async () => {
b: 1,
}
})
await updateJSONFileBatch(FILE_A, cbManipulate)
await updateJSONFileBatch(FILE_A, cbManipulate, config)

expect(cbManipulate).toBeCalledTimes(1)
expect(await readIfExists(FILE_A)).toBe(
JSON.stringify({
b: 1,
})
)
expect(logWarning).toBeCalledTimes(0)
expect(logError).toBeCalledTimes(0)
})

test('updateJSONFileBatch: 3 writes', async () => {
Expand All @@ -105,16 +127,19 @@ test('updateJSONFileBatch: 3 writes', async () => {
return o
})

const p0 = updateJSONFileBatch(FILE_A, cbManipulate)
const p0 = updateJSONFileBatch(FILE_A, cbManipulate, config)
await sleep(5)

const p1 = updateJSONFileBatch(FILE_A, cbManipulate)
const p2 = updateJSONFileBatch(FILE_A, cbManipulate)
const p1 = updateJSONFileBatch(FILE_A, cbManipulate, config)
const p2 = updateJSONFileBatch(FILE_A, cbManipulate, config)

await Promise.all([p0, p1, p2])

expect(cbManipulate).toBeCalledTimes(3)
expect(await readIfExists(FILE_A)).toBe(JSON.stringify(['a', 'a', 'a']))

expect(logWarning).toBeCalledTimes(0)
expect(logError).toBeCalledTimes(0)
})
test('updateJSONFileBatch: 20 writes', async () => {
const cbManipulate = jest.fn((o) => {
Expand All @@ -124,6 +149,8 @@ test('updateJSONFileBatch: 20 writes', async () => {
})

const config = {
logWarning,
logError,
retryTimeout: 30,
retryCount: 3,
}
Expand All @@ -139,6 +166,9 @@ test('updateJSONFileBatch: 20 writes', async () => {

expect(cbManipulate).toBeCalledTimes(20)
expect(await readIfExists(FILE_A)).toBe(JSON.stringify(expectResult))

expect(logWarning).toBeCalledTimes(0)
expect(logError).toBeCalledTimes(0)
})

async function readIfExists(filePath: string): Promise<string | undefined> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,29 +13,22 @@ export async function updateJSONFileBatch<T>(
): Promise<void> {
// Add manipulator callback to queue:

const existingBatches = updateJSONFileBatches.get(filePath)
let batches: BatchOperation[]
if (existingBatches) {
batches = existingBatches
} else {
batches = []
updateJSONFileBatches.set(filePath, batches)
}

// Find a batch operation that is open to accept new callbacks:
const openBatch = batches.find((batch) => batch.open)
const openBatch = updateJSONFileBatches.get(filePath)

if (!openBatch) {
// Start a new batch:

const newBatch: BatchOperation = {
open: true,
callbacks: [cbManipulate],
promise: updateJSONFile(
filePath,
(oldValue: T | undefined) => {
// At this point, we close the batch, so no more callbacks can be added:
newBatch.open = false
// At this point, no more callbacks can be added, so we close the batch:

// Guard against this being called multiple times:
if (updateJSONFileBatches.get(filePath) === newBatch) {
updateJSONFileBatches.delete(filePath)
}

// Execute all callbacks in the batch:
let value = oldValue
Expand All @@ -47,31 +40,18 @@ export async function updateJSONFileBatch<T>(
config
),
}
batches.push(newBatch)
updateJSONFileBatches.set(filePath, newBatch)

let caughtError: any = undefined
try {
await newBatch.promise
} catch (e) {
caughtError = e
}
// After finished executing, remove the batch:
const i = batches.indexOf(newBatch)
if (i === -1) throw new Error('Internal Error: Batch not found')
batches.splice(i, 1)

if (caughtError) throw caughtError
await newBatch.promise
} else {
// There is a batch open for new callbacks. Add the callback to the batch:
openBatch.callbacks.push(cbManipulate)
await openBatch.promise
}
}

const updateJSONFileBatches = new Map<string, BatchOperation[]>()
const updateJSONFileBatches = new Map<string, BatchOperation>()
interface BatchOperation {
/** When true, new callbacks can be added */
open: boolean
/** Resolves when the batch operation has finished */
promise: Promise<void>
callbacks: ((oldValue: any | undefined) => any | undefined)[]
Expand All @@ -80,36 +60,49 @@ interface BatchOperation {
/**
* Read a JSON file, created by updateJSONFile()
*/
export async function readJSONFile(filePath: string): Promise<
export async function readJSONFile(
filePath: string,
logError?: (message: any) => void
): Promise<
| {
str: string
value: any
}
| undefined
> {
{
// eslint-disable-next-line no-console
logError = logError ?? console.error

try {
const str = await readIfExists(filePath)
if (str !== undefined) {
return { str, value: str ? JSON.parse(str) : undefined }
}
} catch (e) {
// file data is corrupt, log and continue
logError(e)
}

// Second try; Check if there is a temporary file, to use instead?
{
try {
const tmpPath = getTmpPath(filePath)

const str = await readIfExists(tmpPath)
if (str !== undefined) {
return { str, value: str ? JSON.parse(str) : undefined }
}
} catch (e) {
logError(e)
// file data is corrupt, return undefined then
return undefined
}

return undefined
}

/**
* A "safe" way to write JSON data to a file. Takes measures to avoid writing corrupt data to a file due to
* 1. Multiple process writing to the same file (uses a lock file)
* 1. Multiple processes writing to the same file (uses a lock file)
* 2. Writing corrupt files due to process exit (write to temporary file and rename)
*/
export async function updateJSONFile<T>(
Expand Down Expand Up @@ -166,7 +159,7 @@ export async function updateJSONFile<T>(
// At this point, we have acquired the lock.
try {
// Read and write to the file:
const oldValue = await readJSONFile(filePath)
const oldValue = await readJSONFile(filePath, logError)

const newValue = cbManipulate(oldValue?.value)
const newValueStr = newValue !== undefined ? JSON.stringify(newValue) : ''
Expand All @@ -179,11 +172,13 @@ export async function updateJSONFile<T>(
continue
}

// Note: We can't unlink the file anywhere in here, or other calls to Lockfile can break
// by overwriting the file with an empty one.

// Write to a temporary file first, to avoid corrupting the file in case of a process exit:
await fs.writeFile(tmpFilePath, newValueStr)

// Rename file:

await rename(tmpFilePath, filePath)
}

Expand Down

0 comments on commit 190a356

Please sign in to comment.