Skip to content

Commit

Permalink
Completed the uploader.
Browse files Browse the repository at this point in the history
  • Loading branch information
LTLA committed Dec 2, 2023
1 parent e4e29dd commit d8b961e
Show file tree
Hide file tree
Showing 2 changed files with 158 additions and 67 deletions.
15 changes: 8 additions & 7 deletions src/upload.js
Original file line number Diff line number Diff line change
Expand Up @@ -305,19 +305,19 @@ export async function uploadPresignedFileHandler(request, nonblockers) {

export async function completeUploadHandler(request, nonblockers) {
let project = decodeURIComponent(request.params.project);
let asset = decodeURIComponent(request.params.project);
let asset = decodeURIComponent(request.params.asset);
let version = decodeURIComponent(request.params.version);
await lock.checkLock(project, asset, version, auth.extractBearerToken(request));

let list_promise = new Promise(resolve => {
let all_files = new Set;
utils.listApply(project + "/" + asset + "/" + version + "/", f => {
if (!f.key.startsWith("..")){
all_files.add(f.key);
}
let prefix = project + "/" + asset + "/" + version + "/";
utils.listApply(prefix, f => {
all_files.add(f.key.slice(prefix.length));
}).then(x => resolve(all_files));
});

let bound_bucket = s3.getR2Binding();
let sumpath = pkeys.versionSummary(project, asset, version);
let assets = await utils.namedResolve({
manifest: bound_bucket.get(pkeys.versionManifest(project, asset, version)).then(x => x.json()),
Expand All @@ -336,13 +336,15 @@ export async function completeUploadHandler(request, nonblockers) {
}
let i = k.lastIndexOf("/");
let hostdir = "";
let fname = k;
if (i >= 0) {
hostdir = k.slice(0, i + 1); // include the trailing slash, see below.
fname = k.slice(i + 1);
}
if (!(hostdir in linkable)) {
linkable[hostdir] = {};
}
linkable[hostdir][k.slice(i)] = v.link;
linkable[hostdir][fname] = v.link;
} else {
if (!assets.listing.has(k)) {
throw new utils.HttpError("path '" + k + "' in manifest should have a file", 400);
Expand All @@ -351,7 +353,6 @@ export async function completeUploadHandler(request, nonblockers) {
}

let info = await assets.summary;
let bound_bucket = s3.getR2Binding();
let preparation = [];
try {
// Create link structures within each subdirectory for bulk consumers.
Expand Down
210 changes: 150 additions & 60 deletions tests/upload.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ test("initializeUploadHandler works correctly for simple uploads", async () => {
let sinfo = await BOUND_BUCKET.get("test-upload/blob/v0/..summary");
let sbody = await sinfo.json();
expect(sbody.upload_user_id).toEqual("ProjectOwner");
expect(Number.isNaN(sbody.upload_started)).toBe(false);
expect(Number.isNaN(Date.parse(sbody.upload_start))).toBe(false);
expect(sbody.on_probation).toEqual(false);

// Check that a version manifest file was posted to the bucket.
Expand Down Expand Up @@ -355,65 +355,155 @@ test("uploadPresignedFileHandler works as expected", async () => {
expect(pres.md5sum_base64.endsWith("==")).toBe(true);
})

///******* Complete uploads checks *******/
//
//test("completeUploadHandler works correctly", async () => {
// // Initializing the upload.
// {
// let req = new Request("http://localhost", {
// method: "POST",
// body: JSON.stringify({
// filenames: [
// { check: "simple", filename: "WHEE", value: { md5sum: "a4caf5afa851da451e2161a4c3ac46bb" } },
// { check: "simple", filename: "BAR", value: { md5sum: "4209df9c96263664123450aa48fd1bfa" } }
// ]
// })
// });
// req.params = { project: "test-complete-upload", version: "test" };
// req.headers.append("Authorization", "Bearer " + setup.mockToken);
//
// let nb = [];
// await upload.initializeUploadHandler(req, nb);
// await Promise.all(nb);
// }
//
// // Completing the upload.
// let req = new Request("http://localhost", { method: "POST", body: "{}" });
// req.params = { project: "test-complete-upload", version: "test" };
// req.query = {};
// req.headers.append("Authorization", "Bearer " + setup.mockToken);
//
// let nb = [];
// gh_test_rigging.postNewIssue = [];
// let res = await upload.completeUploadHandler(req, nb);
//
// let body = await res.json();
// expect(body.job_id).toBe(-1); // placeholder number, used for testing.
// let postinfo = gh_test_rigging.postNewIssue[0];
// expect(postinfo.title).toBe("upload complete");
// let postbody = JSON.parse(postinfo.body);
// expect(postbody.project).toBe("test-complete-upload");
// expect(postbody.permissions.read_access).toBe("public");
// expect(postbody.permissions.owners).toEqual(["ArtifactDB-bot"]);
//})
//
//test("completeUploadHandler throws the right errors", async () => {
// let req = new Request("http://localhost", { method: "POST", body: '{ "read_access": "FOOABLE" }' });
// req.params = { project: "test-complete-check", version: "WHEE" };
//
// // First attempt without identity.
// let nb = [];
// await setup.expectError(upload.completeUploadHandler(req, nb), "user identity");
//
// // Trying again after adding headers.
// req.headers.append("Authorization", "Bearer " + setup.mockToken);
// await setup.expectError(upload.completeUploadHandler(req, nb), "not been previously locked");
//
// // Forcing a lock file.
// await BOUND_BUCKET.put("test-complete-check/WHEE/..LOCK", '{ "user_name": "ArtifactDB-bot" }')
// await setup.expectError(upload.completeUploadHandler(req, nb), "invalid request body");
//})
//
/******* Complete uploads checks *******/

test("completeUploadHandler works correctly", async () => {
let payload = await setup.mockProject();
await BOUND_BUCKET.put(pkeys.permissions("test-upload"), JSON.stringify({ "owners": [ "ProjectOwner" ], uploaders: [] }));

// Setting up the state.
let params = { project: "test-upload", asset: "blob", version: "v0" };
let key;
{
let req = new Request("http://localhost", {
method: "POST",
body: JSON.stringify({
files: [
{ type: "simple", path: "witch/makoto.csv", md5sum: "a4caf5afa851da451e2161a4c3ac46bb", size: 100 },
{ type: "simple", path: "witch/akane.csv", md5sum: "3f8aaed3d149be552fc2ec47ae2d1e57", size: 218 },
{ type: "link", path: "human/chinatsu.txt", link: { project: "test", asset: "blob", version: "v1", path: "foo/bar.txt" } },
{ type: "link", path: "human/nao.txt", link: { project: "test", asset: "blob", version: "v1", path: "whee.txt" } },
{ type: "link", path: "haru-no-hakobiya", link: { project: "test", asset: "blob", version: "v1", path: "blah.txt" } },
{ type: "link", path: "animal/cat/kenny.txt", link: { project: "test", asset: "blob", version: "v1", path: "foo/bar.txt" } },
{ type: "simple", path: "animal/cat/chito.txt", md5sum: "4ba0e96c086a229b4f39e544e2fa7873", size: 92 },
]
})
});
req.params = params;
req.headers.set("Authorization", "Bearer " + setup.mockTokenOwner);

let nb = [];
let init = await (await upload.initializeUploadHandler(req, nb)).json();
key = init.session_key;
}

// Now we do the two uploads that we're obliged to do.
await BOUND_BUCKET.put("test-upload/blob/v0/witch/makoto.csv", "Minami Shinoda");
await BOUND_BUCKET.put("test-upload/blob/v0/witch/akane.csv", "Kana Aoi");
await BOUND_BUCKET.put("test-upload/blob/v0/animal/cat/chito.txt", "Ai Kayano");

// Completing the upload.
let req = new Request("http://localhost", { method: "POST", body: "{}" });
req.params = params;
req.query = {};

let nb = [];
await setup.expectError(upload.completeUploadHandler(req, nb), "no user identity");

req.headers.append("Authorization", "Bearer " + key);
await upload.completeUploadHandler(req, nb);

// Checking that the lock on the folder has been removed.
let lckinfo = await BOUND_BUCKET.head("test-upload/blob/..LOCK");
expect(lckinfo).toBeNull();

// Check that an updated summary file was posted to the bucket.
let sinfo = await BOUND_BUCKET.get("test-upload/blob/v0/..summary");
let sbody = await sinfo.json();
expect(Number.isNaN(Date.parse(sbody.upload_finish))).toBe(false);
expect("on_probation" in sbody).toEqual(false);

// Check that we created the link files.
let link1 = await (await BOUND_BUCKET.get("test-upload/blob/v0/..links")).json();
expect(link1).toEqual({
"haru-no-hakobiya": { project: "test", asset: "blob", version: "v1", path: "blah.txt" }
});

let link2 = await (await BOUND_BUCKET.get("test-upload/blob/v0/human/..links")).json();
expect(link2).toEqual({
"chinatsu.txt": { project: "test", asset: "blob", version: "v1", path: "foo/bar.txt" },
"nao.txt": { project: "test", asset: "blob", version: "v1", path: "whee.txt" },
});

let link3 = await (await BOUND_BUCKET.get("test-upload/blob/v0/animal/cat/..links")).json();
expect(link3).toEqual({
"kenny.txt": { project: "test", asset: "blob", version: "v1", path: "foo/bar.txt" },
});
})

test("completeUploadHandler checks that all uploads are present", async () => {
await BOUND_BUCKET.put(pkeys.permissions("test-upload"), JSON.stringify({ "owners": [ "ProjectOwner" ], uploaders: [] }));

// Setting up the state.
let params = { project: "test-upload", asset: "blob", version: "v0" };
let key;
{
let req = new Request("http://localhost", {
method: "POST",
body: JSON.stringify({
files: [
{ type: "simple", path: "witch/makoto.csv", md5sum: "a4caf5afa851da451e2161a4c3ac46bb", size: 100 },
{ type: "simple", path: "witch/akane.csv", md5sum: "3f8aaed3d149be552fc2ec47ae2d1e57", size: 218 },
{ type: "simple", path: "animal/cat/chito.txt", md5sum: "4ba0e96c086a229b4f39e544e2fa7873", size: 92 },
]
})
});
req.params = params;
req.headers.set("Authorization", "Bearer " + setup.mockTokenOwner);

let nb = [];
let init = await (await upload.initializeUploadHandler(req, nb)).json();
key = init.session_key;
}

// Upload fails due to missing files.
let req = new Request("http://localhost", { method: "POST", body: "{}" });
req.params = params;
req.query = {};
req.headers.append("Authorization", "Bearer " + key);

let nb = [];
await setup.expectError(upload.completeUploadHandler(req, nb), "should have a file");
})

test("completeUploadHandler checks that all uploads are present", async () => {
let payload = await setup.mockProject();
await BOUND_BUCKET.put(pkeys.permissions("test-upload"), JSON.stringify({ "owners": [ "ProjectOwner" ], uploaders: [] }));

// Setting up the state.
let params = { project: "test-upload", asset: "blob", version: "v0" };
let key;
{
let req = new Request("http://localhost", {
method: "POST",
body: JSON.stringify({
files: [
{ type: "link", path: "human/chinatsu.txt", link: { project: "test", asset: "blob", version: "v1", path: "foo/bar.txt" } },
{ type: "link", path: "human/nao.txt", link: { project: "test", asset: "blob", version: "v1", path: "whee.txt" } },
]
})
});
req.params = params;
req.headers.set("Authorization", "Bearer " + setup.mockTokenOwner);

let nb = [];
let init = await (await upload.initializeUploadHandler(req, nb)).json();
key = init.session_key;
}

// Adding files at the links.
await BOUND_BUCKET.put("test-upload/blob/v0/human/chinatsu.txt", "Eri Suzuki");
await BOUND_BUCKET.put("test-upload/blob/v0/human/nao.txt", "Shiori Mikami");

// Upload fails due to files present at the links.
let req = new Request("http://localhost", { method: "POST", body: "{}" });
req.params = params;
req.query = {};
req.headers.append("Authorization", "Bearer " + key);

let nb = [];
await setup.expectError(upload.completeUploadHandler(req, nb), "should not have a file");
})

/******* Abort upload checks *******/

Expand Down

0 comments on commit d8b961e

Please sign in to comment.