Skip to content

Commit

Permalink
feat: export & import users, conversations. Harden code (#18426)
Browse files Browse the repository at this point in the history
  • Loading branch information
aweiss-dev authored Dec 4, 2024
1 parent ee3fc70 commit ba37a97
Show file tree
Hide file tree
Showing 7 changed files with 169 additions and 127 deletions.
2 changes: 1 addition & 1 deletion src/script/backup/CrossPlatformBackup/AssetMetadata.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ const hasNameProperty = (infoObject: unknown): infoObject is {name: string} =>
isObject(infoObject) && 'name' in infoObject;
const isImageAsset = (contentType: string, infoObject: unknown): infoObject is ImageAsset =>
AssetContentType.isImage(contentType) &&
hasNameProperty(infoObject) &&
isObject(infoObject) &&
'height' in infoObject &&
'width' in infoObject &&
'tag' in infoObject;
Expand Down
143 changes: 73 additions & 70 deletions src/script/backup/CrossPlatformBackup/CPB.export.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,19 @@ import {buildMetaData} from './AssetMetadata';
import {
CPBackupExporter,
BackupQualifiedId,
BackUpConversation,
BackupUser,
BackupMessage,
BackUpConversation,
BackupDateTime,
BackupMessageContent,
BackupMessage,
} from './CPB.library';
import {ExportHistoryFromDatabaseParams} from './CPB.types';
import {ConversationTableSchema, UserTableSchema, EventTableSchema, AssetContentSchema} from './data.schema';
import {
AssetContentSchema,
ConversationTableEntrySchema,
EventTableEntrySchema,
UserTableEntrySchema,
} from './data.schema';

import {preprocessConversations, preprocessUsers, preprocessEvents} from '../recordPreprocessors';

Expand Down Expand Up @@ -63,62 +68,55 @@ export const exportCPBHistoryFromDatabase = async ({
}

// Taking care of conversations
const {
success: conversationsSuccess,
data: conversationsData,
error: conversationsError,
} = ConversationTableSchema.safeParse(
await exportTable<ConversationRecord>({
backupService,
table: conversationTable,
preprocessor: streamProgress(preprocessConversations),
}),
);
if (conversationsSuccess) {
conversationsData.forEach(conversationData =>
backupExporter.addConversation(
new BackUpConversation(
new BackupQualifiedId(conversationData.id, conversationData.domain),
conversationData.name ?? '',
),
),
);
} else {
CPBLogger.log('Conversation data schema validation failed', conversationsError);
}
const conversationRecords = await exportTable<ConversationRecord>({
backupService,
table: conversationTable,
preprocessor: streamProgress(preprocessConversations),
});
conversationRecords.forEach(record => {
const {success, data, error} = ConversationTableEntrySchema.safeParse(record);

if (success) {
backupExporter.addConversation(new BackUpConversation(new BackupQualifiedId(data.id, data.domain), data.name));
} else {
CPBLogger.error('Conversation data schema validation failed', error);
}
});
// ------------------------------

// Taking care of users
const {
success: usersSuccess,
data: usersData,
error: usersError,
} = UserTableSchema.safeParse(
await exportTable<UserRecord>({backupService, table: usersTable, preprocessor: streamProgress(preprocessUsers)}),
);
if (usersSuccess) {
usersData.forEach(userData =>
const userRecords = await exportTable<UserRecord>({
backupService,
table: usersTable,
preprocessor: streamProgress(preprocessUsers),
});
userRecords.forEach(record => {
const {success, data, error} = UserTableEntrySchema.safeParse(record);

if (success) {
backupExporter.addUser(
new BackupUser(
new BackupQualifiedId(userData?.qualified_id?.id ?? userData.id, userData?.qualified_id?.domain ?? ''),
userData.name,
userData.handle ?? '',
new BackupQualifiedId(data?.qualified_id?.id ?? data.id, data?.qualified_id?.domain ?? ''),
data.name,
data.handle ?? '',
),
),
);
} else {
CPBLogger.log('User data schema validation failed', usersError);
}
);
} else {
CPBLogger.error('User data schema validation failed', error);
}
});
// ------------------------------

// Taking care of events
const {
success: eventsSuccess,
data: eventsData,
error: eventsError,
} = EventTableSchema.safeParse(
await exportTable<EventRecord>({backupService, table: eventsTable, preprocessor: streamProgress(preprocessEvents)}),
);
if (eventsSuccess) {
eventsData.forEach(eventData => {
const eventRecords = await exportTable<EventRecord>({
backupService,
table: eventsTable,
preprocessor: streamProgress(preprocessEvents),
});

eventRecords.forEach(record => {
const {success, data: eventData, error} = EventTableEntrySchema.safeParse(record);
if (success) {
const {type} = eventData;
// ToDo: Add support for other types of messages and different types of content. Also figure out which fields are required.
if (!isSupportedEventType(type)) {
Expand Down Expand Up @@ -147,28 +145,31 @@ export const exportCPBHistoryFromDatabase = async ({
const webPrimaryKey = eventData.primary_key;

if (isAssetAddEvent(type)) {
const {success, error, data} = AssetContentSchema.safeParse(eventData.data);
if (!success) {
CPBLogger.log('Asset data schema validation failed', error);
const {
success: assetParseSuccess,
error: assetParseError,
data: assetParseData,
} = AssetContentSchema.safeParse(eventData.data);
if (!assetParseSuccess) {
CPBLogger.error('Asset data schema validation failed', assetParseError);
return;
}

const metaData = buildMetaData(data.content_type, data.info);

CPBLogger.log('metaData', metaData, data.content_type);
const metaData = buildMetaData(assetParseData.content_type, assetParseData.info);

const asset = new BackupMessageContent.Asset(
data.content_type,
data.content_length,
data.info.name,
transformObjectToArray(data.otr_key),
transformObjectToArray(data.sha256),
data.key,
data.token,
data.domain,
assetParseData.content_type,
Number.parseInt(assetParseData.content_length),
assetParseData.info.name,
transformObjectToArray(assetParseData.otr_key),
transformObjectToArray(assetParseData.sha256),
assetParseData.key,
assetParseData.token,
assetParseData.domain,
null,
metaData,
);

backupExporter.addMessage(
new BackupMessage(id, conversationId, senderUserId, senderClientId, creationDate, asset, webPrimaryKey),
);
Expand All @@ -180,10 +181,12 @@ export const exportCPBHistoryFromDatabase = async ({
new BackupMessage(id, conversationId, senderUserId, senderClientId, creationDate, text, webPrimaryKey),
);
}
});
} else {
CPBLogger.log('Event data schema validation failed', eventsError);
}
} else {
CPBLogger.error('Event data schema validation failed', error);
}
});

// ------------------------------

return backupExporter.serialize();
};
30 changes: 23 additions & 7 deletions src/script/backup/CrossPlatformBackup/CPB.import.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@

import {CPBackup, CPBackupImporter, BackupImportResult} from './CPB.library';
import {ImportHistoryToDatabaseParams} from './CPB.types';
import {mapConversationRecord, mapUserRecord} from './importMappers';
import {mapEventRecord} from './importMappers/mapEventRecord';

import {EventRecord} from '../../storage';
import {ConversationRecord, EventRecord, UserRecord} from '../../storage';
import {FileDescriptor, Filename} from '../Backup.types';
import {IncompatibleBackupError} from '../Error';

Expand All @@ -44,23 +45,38 @@ export const importCPBHistoryToDatabase = async ({
const result = backupImporter.importBackup(new Int8Array(backupRawData.buffer));

if (result instanceof BackupImportResult.Success) {
CPBLogger.log(`SUCCESSFUL BACKUP IMPORT: ${result.backupData}`);
// import events
const eventRecords: EventRecord[] = [];
result.backupData.messages.forEach(message => {
const eventRecord = mapEventRecord(message);
if (eventRecord) {
eventRecords.push(eventRecord);
}
});
FileDescriptor.push({entities: eventRecords, filename: Filename.EVENTS});
CPBLogger.log(`IMPORTED ${eventRecords.length} EVENTS`);

// import conversations
const conversationRecords: ConversationRecord[] = [];
result.backupData.conversations.forEach(conversation => {
// TODO: Import conversations
const conversationRecord = mapConversationRecord(conversation);
if (conversationRecord) {
conversationRecords.push(conversationRecord);
}
});
FileDescriptor.push({entities: conversationRecords, filename: Filename.CONVERSATIONS});
CPBLogger.log(`IMPORTED ${conversationRecords.length} CONVERSATIONS`);

// import users
const userRecords: UserRecord[] = [];
result.backupData.users.forEach(user => {
// TODO: Import users
const userRecord = mapUserRecord(user);
if (userRecord) {
userRecords.push(userRecord);
}
});

CPBLogger.log(`IMPORTED ${eventRecords.length} EVENTS`);
FileDescriptor.push({entities: eventRecords, filename: Filename.EVENTS});
FileDescriptor.push({entities: userRecords, filename: Filename.USERS});
CPBLogger.log(`IMPORTED ${userRecords.length} USERS`);
} else {
CPBLogger.log(`ERROR DURING BACKUP IMPORT: ${result}`);
throw new IncompatibleBackupError('Incompatible cross-platform backup');
Expand Down
79 changes: 39 additions & 40 deletions src/script/backup/CrossPlatformBackup/data.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,55 +19,55 @@

import zod from 'zod';

const ConversationSchema = zod.object({
accessModes: zod.array(zod.string()).optional(),
accessRole: zod.array(zod.any()).optional(),
accessRoleV2: zod.string().optional(),
archived_state: zod.boolean(),
archived_timestamp: zod.number(),
cipher_suite: zod.number().optional(),
creator: zod.string(),
// For now we just check for the data we urgently need for the backup
export const ConversationTableEntrySchema = zod.object({
domain: zod.string(),
group_id: zod.string().optional(),
id: zod.string(),
last_event_timestamp: zod.number(),
last_server_timestamp: zod.number(),
message_timer: zod.string().nullable().optional(),
muted_state: zod.number().nullable(),
muted_timestamp: zod.number(),
name: zod.string().nullable(),
others: zod.array(zod.any()),
protocol: zod.string(),
receipt_mode: zod.number().nullable(),
roles: zod.object({}).passthrough(),
status: zod.number(),
team_id: zod.string().nullable().optional(),
type: zod.number(),
name: zod.string(),
// accessModes: zod.array(zod.string()).optional(),
// accessRole: zod.array(zod.any()).optional(),
// accessRoleV2: zod.string().optional(),
// archived_state: zod.boolean(),
// archived_timestamp: zod.number(),
// cipher_suite: zod.number().optional(),
// creator: zod.string(),
// group_id: zod.string().optional(),
// last_event_timestamp: zod.number(),
// last_server_timestamp: zod.number(),
// message_timer: zod.string().nullable().optional(),
// muted_state: zod.number().nullable(),
// muted_timestamp: zod.number(),
// others: zod.array(zod.any()),
// protocol: zod.string(),
// receipt_mode: zod.number().nullable(),
// roles: zod.object({}).passthrough(),
// status: zod.number(),
// team_id: zod.string().nullable().optional(),
// type: zod.number(),
});
export const ConversationTableSchema = zod.array(ConversationSchema);
export type ConversationTable = zod.infer<typeof ConversationTableSchema>;
export type ConversationTableEntry = zod.infer<typeof ConversationTableEntrySchema>;

const UserSchema = zod.object({
accent_id: zod.number().optional(),
assets: zod.array(zod.any()).optional(),
// For now we just check for the data we urgently need for the backup
export const UserTableEntrySchema = zod.object({
handle: zod.string().optional(),
id: zod.string(),
legalhold_status: zod.string().optional(),
name: zod.string(),
picture: zod.array(zod.any()).optional(),
qualified_id: zod
.object({
domain: zod.string(),
id: zod.string(),
})
.optional(),
supported_protocols: zod.array(zod.string()).optional(),
team: zod.string().optional(),
// accent_id: zod.number().optional(),
// assets: zod.array(zod.any()).optional(),
// legalhold_status: zod.string().optional(),
// picture: zod.array(zod.any()).optional(),
// supported_protocols: zod.array(zod.string()).optional(),
// team: zod.string().optional(),
});
export const UserTableSchema = zod.array(UserSchema);
export type UserTable = zod.infer<typeof UserTableSchema>;
export type UserTableEntry = zod.infer<typeof UserTableEntrySchema>;

const EventSchema = zod.object({
export const EventTableEntrySchema = zod.object({
category: zod.number().int().optional(),
conversation: zod.string().min(1, 'Conversation is required'),
data: zod.any(),
Expand All @@ -89,19 +89,18 @@ const EventSchema = zod.object({
time: zod.string(),
type: zod.string().min(1, 'Type is required'),
});
export const EventTableSchema = zod.array(EventSchema);
export type EventTable = zod.infer<typeof EventTableSchema>;
export type EventTableEntry = zod.infer<typeof EventTableEntrySchema>;

export const AssetContentSchema = zod.object({
content_length: zod.number(),
content_length: zod.string(),
content_type: zod.string(),
domain: zod.string().optional(),
expects_read_confirmation: zod.boolean(),
info: zod.any(),
key: zod.string(),
legal_hold_status: zod.number(),
otr_key: zod.record(zod.string(), zod.number()),
sha256: zod.record(zod.string(), zod.number()),
status: zod.string().optional(),
token: zod.string().optional(),
// expects_read_confirmation: zod.boolean(),
// status: zod.string().optional(),
// legal_hold_status: zod.number(),
});
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,17 @@ import {ConversationRecord} from 'src/script/storage';

import {BackUpConversation} from '../CPB.library';

export const mapConversationRecord = ({id, name}: BackUpConversation): ConversationRecord | null => {
return null;
export const mapConversationRecord = ({id: qualifiedId, name}: BackUpConversation): ConversationRecord | null => {
if (!qualifiedId || !name) {
return null;
}

// We dont get all the "required" fields from the backup, so we need to outsmart the type system.
// ToDO: Fix the backup to include all required fields or check if we can make them optional without breaking anything.
const conversationRecord: ConversationRecord = {
id: qualifiedId.id.toString(),
name: name.toString(),
domain: qualifiedId.domain.toString(),
} as ConversationRecord;
return conversationRecord;
};
Loading

0 comments on commit ba37a97

Please sign in to comment.