-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #80 from flatironinstitute/zip-import-export-js
Zip import/export (based from zip-import-export branch)
- Loading branch information
Showing
15 changed files
with
554 additions
and
27 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -25,6 +25,7 @@ | |
"plotly.js-cartesian-dist": "^2.33.0", | ||
"react": "^18.2.0", | ||
"react-dom": "^18.2.0", | ||
"react-dropzone": "^14.2.3", | ||
"react-plotly.js": "^2.6.0", | ||
"react-router-dom": "^6.17.0", | ||
"react-visibility-sensor": "^5.1.1", | ||
|
@@ -50,6 +51,5 @@ | |
"typescript": "^5.0.2", | ||
"vite": "^5.2.12", | ||
"vitest": "^1.6.0" | ||
}, | ||
"packageManager": "[email protected]+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
import { SPAnalysisDataModel, SPAnalysisPersistentDataModel, stringifyField } from "./SPAnalysisDataModel" | ||
|
||
// This code exists to provide rigorous definitions for the mappings between | ||
// the in-memory representation of a Stan Playground project (i.e. the | ||
// SPAnalysisDataModel) and the on-disk representation of its parts, as (for example) | ||
// when downloading or uploading a zip. | ||
// | ||
// Effectively, we need to map among three things: | ||
// 1. the fields of the in-memory data model | ||
// 2. the names of the on-disk/in-zip files | ||
// 3. the actual contents of those on-disk files | ||
// We need the link between 1-2 to serialize the data model fields to files, and | ||
// between 2-3 for deserialization from files. | ||
|
||
// Mechanically, we'll also want an exhaustive list of the filenames we will use | ||
// (that's the FileNames enum). | ||
|
||
export enum FileNames { | ||
META = 'meta.json', | ||
SAMPLING = 'sampling_opts.json', | ||
STANFILE = 'main.stan', | ||
DATAFILE = 'data.json', | ||
} | ||
|
||
// FileMapType enforces an exhaustive mapping from data-model fields to the | ||
// known file names that store those fields. (This is the 1-2 leg of the | ||
// triangle). | ||
type FileMapType = { | ||
[name in keyof SPAnalysisPersistentDataModel]: FileNames | ||
} | ||
|
||
// This dictionary stores the actual (global) fields-to-file-names map. | ||
// Because it's of type FileMapType, it enforces that every key in the | ||
// data model (except the "ephemera" key, which is not to be preserved) | ||
// maps to some file name | ||
export const SPAnalysisFileMap: FileMapType = { | ||
meta: FileNames.META, | ||
samplingOpts: FileNames.SAMPLING, | ||
stanFileContent: FileNames.STANFILE, | ||
dataFileContent: FileNames.DATAFILE, | ||
} | ||
|
||
// The FileRegistry is the 2-3 leg of the triangle: it maps the known file names | ||
// to their actual contents when read from disk. | ||
// Since we don't *actually* want to mandate that all the known files | ||
// are present, it'll almost always be used in a Partial<>. | ||
// But this way, during deserialization, we can associate the (string) data with | ||
// the file it came from, and the file with the field of the data model, so we | ||
// know how to (re)populate the data model. | ||
export type FileRegistry = { | ||
[name in FileNames]: string | ||
} | ||
|
||
// This is a serialization function that maps a data model to a FileRegistry, | ||
// i.e. a dictionary mapping the intended file names to their intended contents. | ||
export const mapModelToFileManifest = (data: SPAnalysisDataModel): Partial<FileRegistry> => { | ||
const fileManifest: Partial<FileRegistry> = {}; | ||
const fields = Object.keys(SPAnalysisFileMap) as (keyof SPAnalysisDataModel)[] | ||
fields.forEach((k) => { | ||
if (k === "ephemera") return; | ||
const key = SPAnalysisFileMap[k] | ||
fileManifest[key] = stringifyField(data, k) | ||
}) | ||
return fileManifest | ||
} | ||
|
||
// This is used during deserialization as an intermediate representation. | ||
// It maps the (named) fields of the data model to the string representation of their | ||
// contents as was written into the file representation. | ||
// During actual deserialization, special case files can be deserialized as needed, | ||
// and the actual file list can just be mapped directly. | ||
export type FieldsContentsMap = { | ||
[name in keyof SPAnalysisPersistentDataModel]: string | ||
} | ||
|
||
// This is the inverse of the SPAnalysisFileMap dictionary; with the bonus that it actually | ||
// populates the fields. | ||
export const mapFileContentsToModel = (files: Partial<FileRegistry>): Partial<FieldsContentsMap> => { | ||
const fields = Object.keys(files) | ||
const theMap: Partial<FieldsContentsMap> = {} | ||
fields.forEach(f => { | ||
switch (f) { | ||
case FileNames.META: { | ||
theMap.meta = files[f] | ||
break; | ||
} | ||
case FileNames.DATAFILE: { | ||
theMap.dataFileContent = files[f] | ||
break; | ||
} | ||
case FileNames.STANFILE: { | ||
theMap.stanFileContent = files[f] | ||
break; | ||
} | ||
case FileNames.SAMPLING: { | ||
theMap.samplingOpts = files[f] | ||
break; | ||
} | ||
default: | ||
// Don't do anything for unrecognized filenames | ||
break; | ||
} | ||
}) | ||
return theMap | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
import JSZip from "jszip" | ||
import { replaceSpacesWithUnderscores } from "../util/replaceSpaces" | ||
import { FileNames, FileRegistry, mapFileContentsToModel, mapModelToFileManifest, SPAnalysisFileMap } from "./FileMapping" | ||
import { getStringKnownFileKeys, SPAnalysisDataModel } from "./SPAnalysisDataModel" | ||
|
||
export const serializeAnalysisToLocalStorage = (data: SPAnalysisDataModel): string => { | ||
const intermediary = { | ||
...data, ephemera: undefined } | ||
return JSON.stringify(intermediary) | ||
} | ||
|
||
export const deserializeAnalysisFromLocalStorage = (serialized: string): SPAnalysisDataModel => { | ||
const intermediary = JSON.parse(serialized) | ||
// Not sure if this is strictly necessary | ||
intermediary.ephemera = {} | ||
const stringFileKeys = getStringKnownFileKeys() | ||
stringFileKeys.forEach((k) => intermediary.ephemera[k] = intermediary[k]); | ||
return intermediary as SPAnalysisDataModel | ||
} | ||
|
||
export const serializeAsZip = async (data: SPAnalysisDataModel): Promise<[Blob, string]> => { | ||
const fileManifest = mapModelToFileManifest(data) | ||
const folderName = replaceSpacesWithUnderscores(data.meta.title) | ||
const zip = new JSZip() | ||
const folder = zip.folder(folderName) | ||
if (!folder) { | ||
throw new Error('Error creating folder in zip file') | ||
} | ||
Object.entries(fileManifest).forEach(([name, content]) => { | ||
folder.file(name, content) | ||
}) | ||
const zipBlob = await zip.generateAsync({type: 'blob'}) | ||
|
||
return [zipBlob, folderName] | ||
} | ||
|
||
export const parseFile = (fileBuffer: ArrayBuffer) => { | ||
const content = new TextDecoder().decode(fileBuffer) | ||
return content | ||
} | ||
|
||
export const deserializeZipToFiles = async (zipBuffer: ArrayBuffer) => { | ||
const zip = await JSZip.loadAsync(zipBuffer) | ||
const dirNames: string[] = [] | ||
zip.forEach((relpath, file) => file.dir && dirNames.push(relpath)) | ||
const folderName = dirNames[0] ?? '' | ||
if (! dirNames.every(n => n === folderName)) { | ||
throw new Error('Multiple directories in zip file') | ||
} | ||
zip.forEach((_, file) => { | ||
if (!file.name.startsWith(folderName)) { | ||
throw new Error('Files are not all in a single folder') | ||
} | ||
}) | ||
const folderLength = folderName.length | ||
const files: {[name: string]: string} = {} | ||
// we want to use a traditional for loop here, since async doesn't do nicely with higher-order callbacks | ||
for (const name in zip.files) { | ||
const file = zip.files[name] | ||
if (file.dir) continue | ||
const basename = name.substring(folderLength) | ||
if (Object.values(SPAnalysisFileMap).includes(basename as FileNames)) { | ||
const content = await file.async('arraybuffer') | ||
const decoded = new TextDecoder().decode(content) | ||
files[basename] = decoded | ||
} else { | ||
throw new Error(`Unrecognized file in zip: ${file.name} (basename ${basename})`) | ||
} | ||
|
||
} | ||
return mapFileContentsToModel(files as Partial<FileRegistry>) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.