Skip to content

Commit

Permalink
refactor: Upgrade worker dependencies and migrate to TypeScript (#15012)
Browse files Browse the repository at this point in the history
  • Loading branch information
atomrc authored Apr 17, 2023
1 parent 2216724 commit 4868f07
Show file tree
Hide file tree
Showing 14 changed files with 920 additions and 276 deletions.
2 changes: 1 addition & 1 deletion babel.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ const presetEnvConfig = {
module.exports = {
env: {
test: {
plugins: ['@emotion'],
plugins: ['@emotion', 'babel-plugin-transform-import-meta'],
presets: [
['@babel/preset-react', {importSource: '@emotion/react', runtime: 'automatic'}],
'@babel/preset-typescript',
Expand Down
5 changes: 4 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
"dexie-batch": "0.4.3",
"highlight.js": "11.7.0",
"http-status-codes": "2.2.0",
"jimp": "0.22.7",
"jquery": "3.6.4",
"js-cookie": "3.0.1",
"jszip": "3.10.1",
Expand Down Expand Up @@ -104,6 +105,7 @@
"archiver": "^5.3.1",
"autoprefixer": "^10.4.13",
"babel-loader": "9.1.2",
"babel-plugin-transform-import-meta": "^2.2.0",
"caniuse-lite": "^1.0.30001478",
"cross-env": "7.0.3",
"cspell": "6.31.1",
Expand Down Expand Up @@ -240,7 +242,8 @@
"translate:download": "ts-node ./bin/translations_download.ts"
},
"resolutions": {
"libsodium": "0.7.10"
"libsodium": "0.7.10",
"xml2js": "0.5.0"
},
"version": "0.27.0",
"packageManager": "[email protected]"
Expand Down
2 changes: 1 addition & 1 deletion src/script/assets/AssetRepository.ts
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ export class AssetRepository {
if (skipCompression === true) {
compressedBytes = new Uint8Array(buffer as ArrayBuffer);
} else {
const worker = new WebWorker('/worker/image-worker.js');
const worker = new WebWorker(() => new Worker(new URL('./imageWorker', import.meta.url)));
compressedBytes = await worker.post({buffer, useProfileImageSize});
}
const compressedImage = await loadImage(new Blob([compressedBytes], {type: image.type}));
Expand Down
37 changes: 18 additions & 19 deletions src/worker/image-worker.js → src/script/assets/imageWorker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,14 @@
*
*/

importScripts('jimp.min.js');
// For some reason, Jimp attaches to self, even in Node.
// https://github.com/jimp-dev/jimp/issues/466
import * as _Jimp from 'jimp';

/**
* @typedef {{buffer: ArrayBuffer, useProfileImageSize?: boolean}} Data
*/
// @ts-ignore
const Jimp: typeof _Jimp = typeof self !== 'undefined' ? self.Jimp || _Jimp : _Jimp;

self.addEventListener('message', (/** @type {MessageEvent<Data>} */ event) => {
self.addEventListener('message', async event => {
const COMPRESSION = 80;
let MAX_SIZE = 1448;
let MAX_FILE_SIZE = 310 * 1024;
Expand All @@ -34,20 +35,18 @@ self.addEventListener('message', (/** @type {MessageEvent<Data>} */ event) => {
}

// Unfortunately, Jimp doesn't support MIME type "image/webp": https://github.com/oliver-moran/jimp/issues/144
Jimp.read(event.data.buffer).then(image => {
if (event.data.useProfileImageSize) {
image.cover(MAX_SIZE, MAX_SIZE);
} else if (image.bitmap.width > MAX_SIZE || image.bitmap.height > MAX_SIZE) {
image.scaleToFit(MAX_SIZE, MAX_SIZE);
}
const image = await Jimp.read(event.data.buffer);
if (event.data.useProfileImageSize) {
image.cover(MAX_SIZE, MAX_SIZE);
} else if (image.bitmap.width > MAX_SIZE || image.bitmap.height > MAX_SIZE) {
image.scaleToFit(MAX_SIZE, MAX_SIZE);
}

if (image.bitmap.data.length > MAX_FILE_SIZE) {
image.quality(COMPRESSION);
}
if (image.bitmap.data.length > MAX_FILE_SIZE) {
image.quality(COMPRESSION);
}

return image.getBuffer(Jimp.AUTO, (_error, src) => {
self.postMessage(src);
return self.close();
});
});
const buffer = await image.getBufferAsync(image.getMIME());
self.postMessage(buffer);
return self.close();
});
119 changes: 50 additions & 69 deletions src/script/backup/BackupRepository.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,19 @@
*
*/

import JSZip from 'jszip';
import {container} from 'tsyringe';

import {createRandomUuid, noop} from 'Util/util';
import {WebWorker} from 'Util/worker';

import {BackupRepository} from './BackupRepository';
import {BackupService} from './BackupService';
import {CancelError, DifferentAccountError, IncompatibleBackupError, IncompatiblePlatformError} from './Error';
import {handleZipEvent} from './zipWorker';

import {ConversationRepository} from '../conversation/ConversationRepository';
import {User} from '../entity/User';
import {ClientEvent} from '../event/Client';
import {DatabaseTypes, createStorageEngine} from '../service/StoreEngineProvider';
import {StorageService} from '../storage';
import {StorageSchemata} from '../storage/StorageSchemata';
Expand Down Expand Up @@ -88,6 +90,10 @@ async function buildBackupRepository() {
}

describe('BackupRepository', () => {
beforeAll(async () => {
jest.spyOn(WebWorker.prototype, 'post').mockImplementation(handleZipEvent as any);
});

describe('createMetaData', () => {
it('creates backup metadata', async () => {
const [backupRepository, {backupService}] = await buildBackupRepository();
Expand All @@ -111,40 +117,28 @@ describe('BackupRepository', () => {
describe('generateHistory', () => {
const eventStoreName = StorageSchemata.OBJECT_STORE.EVENTS;

// TODO: [JEST] Shim WebWorkers
/*
it.skip('generates an archive of the database', async () => {
const blob = await backupRepository.generateHistory(noop);
const zip = await new JSZip().loadAsync(blob);
const zipFilenames = Object.keys(zip.files);
Object.values(BackupRepository.CONFIG.FILENAME).forEach(filename => expect(zipFilenames).toContain(filename));
const conversationsStr = await zip.files[BackupRepository.CONFIG.FILENAME.CONVERSATIONS].async('string');
const conversations = JSON.parse(conversationsStr);
expect(conversations).toEqual([conversation]);
const eventsStr = await zip.files[BackupRepository.CONFIG.FILENAME.EVENTS].async('string');
const events = JSON.parse(eventsStr);
expect(events).toEqual(messages);
});
// TODO: [JEST] Shim WebWorkers
it.skip('ignores verification events in the backup', async () => {
it('ignores verification events in the backup', async () => {
const user = new User('user1');
const [backupRepository, {storageService, backupService}] = await buildBackupRepository();
const verificationEvent = {
conversation: conversationId,
type: ClientEvent.CONVERSATION.VERIFICATION,
};
const textEvent = {
conversation: conversationId,
type: ClientEvent.CONVERSATION.MESSAGE_ADD,
};
const importSpy = jest.spyOn(backupService, 'importEntities');

await storageService.save(StorageSchemata.OBJECT_STORE.EVENTS, undefined, verificationEvent);
await storageService.save(StorageSchemata.OBJECT_STORE.EVENTS, undefined, textEvent);
const blob = await backupRepository.generateHistory(user, 'client1', noop);

await testFactory.storage_service.save(StorageSchemata.OBJECT_STORE.EVENTS, undefined, verificationEvent);
const blob = await backupRepository.generateHistory(noop);
const zip = await new JSZip().loadAsync(blob);
await backupRepository.importHistory(new User('user1'), blob, noop, noop);

const eventsStr = await zip.files[BackupRepository.CONFIG.FILENAME.EVENTS].async('string');
const events = JSON.parse(eventsStr);
expect(events).not.toContain(verificationEvent);
expect(events.length).toBe(messages.length);
expect(importSpy).toHaveBeenCalledWith(eventStoreName, [textEvent]);
expect(importSpy).not.toHaveBeenCalledWith(eventStoreName, [verificationEvent]);
});
*/

it('cancels export', async () => {
const [backupRepository, {storageService}] = await buildBackupRepository();
Expand All @@ -163,41 +157,36 @@ describe('BackupRepository', () => {
});

describe('importHistory', () => {
it(`fails if metadata doesn't match`, async () => {
const [backupRepository] = await buildBackupRepository();
const tests = [
it.each([
[
{
expectedError: DifferentAccountError,
metaChanges: {user_id: 'fail'},
},
],
[
{
expectedError: IncompatibleBackupError,
metaChanges: {version: 13}, // version 14 contains a migration script, thus will generate an error
},
],
[
{
expectedError: IncompatiblePlatformError,
metaChanges: {platform: 'random'},
},
];

for (const testDescription of tests) {
const archive = new JSZip();
const meta = {
...backupRepository.createMetaData(new User('user1'), 'client1'),
...testDescription.metaChanges,
};

archive.file(BackupRepository.CONFIG.FILENAME.METADATA, JSON.stringify(meta));

const files: Record<string, any> = {};
for (const fileName in archive.files) {
files[fileName] = await archive.files[fileName].async('uint8array');
}

await expect(backupRepository.importHistory(new User('user1'), files, noop, noop)).rejects.toThrow(
testDescription.expectedError,
);
}
],
])(`fails if metadata doesn't match`, async ({metaChanges, expectedError}) => {
const [backupRepository] = await buildBackupRepository();

const meta = {...backupRepository.createMetaData(new User('user1'), 'client1'), ...metaChanges};

const files = {
[BackupRepository.CONFIG.FILENAME.METADATA]: JSON.stringify(meta),
};
const zip = (await handleZipEvent({type: 'zip', files})) as Uint8Array;

await expect(backupRepository.importHistory(new User('user1'), zip, noop, noop)).rejects.toThrow(expectedError);
});

it('successfully imports a backup', async () => {
Expand All @@ -206,28 +195,20 @@ describe('BackupRepository', () => {
jest.spyOn(backupService, 'getDatabaseVersion').mockReturnValue(15);
jest.spyOn(backupService, 'importEntities').mockResolvedValue(undefined);

const metadataArray = [{...backupRepository.createMetaData(user, 'client1'), version: 15}];
const metadata = {...backupRepository.createMetaData(user, 'client1'), version: 15};

const archives = metadataArray.map(metadata => {
const archive = new JSZip();
archive.file(BackupRepository.CONFIG.FILENAME.METADATA, JSON.stringify(metadata));
archive.file(BackupRepository.CONFIG.FILENAME.CONVERSATIONS, JSON.stringify([conversation]));
archive.file(BackupRepository.CONFIG.FILENAME.EVENTS, JSON.stringify(messages));

return archive;
});
const files = {
[BackupRepository.CONFIG.FILENAME.METADATA]: JSON.stringify(metadata),
[BackupRepository.CONFIG.FILENAME.CONVERSATIONS]: JSON.stringify([conversation]),
[BackupRepository.CONFIG.FILENAME.EVENTS]: JSON.stringify(messages),
};

for (const archive of archives) {
const files: Record<string, any> = {};
for (const fileName in archive.files) {
files[fileName] = await archive.files[fileName].async('uint8array');
}
const zip = (await handleZipEvent({type: 'zip', files})) as Uint8Array;

await backupRepository.importHistory(user, files, noop, noop);
await backupRepository.importHistory(user, zip, noop, noop);

expect(conversationRepository.updateConversationStates).toHaveBeenCalledWith([conversation]);
expect(backupService.importEntities).toHaveBeenCalledWith(StorageSchemata.OBJECT_STORE.EVENTS, messages);
}
expect(conversationRepository.updateConversationStates).toHaveBeenCalledWith([conversation]);
expect(backupService.importEntities).toHaveBeenCalledWith(StorageSchemata.OBJECT_STORE.EVENTS, messages);
});
});
});
18 changes: 13 additions & 5 deletions src/script/backup/BackupRepository.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,14 @@ import {container} from 'tsyringe';

import {chunk} from 'Util/ArrayUtil';
import {Logger, getLogger} from 'Util/Logger';
import {WebWorker} from 'Util/worker';

import {BackupService} from './BackupService';
import {
CancelError,
DifferentAccountError,
ExportError,
ImportError,
IncompatibleBackupError,
IncompatiblePlatformError,
InvalidMetaDataError,
Expand All @@ -40,7 +42,6 @@ import type {Conversation} from '../entity/Conversation';
import {User} from '../entity/User';
import {ConversationRecord} from '../storage/record/ConversationRecord';
import {StorageSchemata} from '../storage/StorageSchemata';
import {WebWorker} from '../util/worker';

export interface Metadata {
client_id: string;
Expand All @@ -62,6 +63,7 @@ export class BackupRepository {
private readonly conversationRepository: ConversationRepository;
private readonly logger: Logger;
private canceled: boolean;
private worker: WebWorker;

static get CONFIG() {
return {
Expand All @@ -85,6 +87,7 @@ export class BackupRepository {
this.conversationRepository = conversationRepository;

this.canceled = false;
this.worker = new WebWorker(() => new Worker(new URL('./zipWorker.ts', import.meta.url)));
}

public cancelAction(): void {
Expand Down Expand Up @@ -184,9 +187,7 @@ export class BackupRepository {

files[BackupRepository.CONFIG.FILENAME.METADATA] = encodedMetadata;

const worker = new WebWorker('worker/jszip-pack-worker.js');

const array = await worker.post<Uint8Array>(files);
const array = await this.worker.post<Uint8Array>({type: 'zip', files});
return new Blob([array], {type: 'application/zip'});
}

Expand All @@ -196,11 +197,18 @@ export class BackupRepository {

public async importHistory(
user: User,
files: Record<string, Uint8Array>,
data: ArrayBuffer | Blob,
initCallback: (numberOfRecords: number) => void,
progressCallback: (numberProcessed: number) => void,
): Promise<void> {
this.isCanceled = false;

const files = await this.worker.post<Record<string, Uint8Array>>({type: 'unzip', bytes: data});

if (files.error) {
throw new ImportError(files.error as unknown as string);
}

if (!files[BackupRepository.CONFIG.FILENAME.METADATA]) {
throw new InvalidMetaDataError();
}
Expand Down
Loading

0 comments on commit 4868f07

Please sign in to comment.