From 5934a425fc537724b36346de54a857abc51404c0 Mon Sep 17 00:00:00 2001 From: Itamar Yuran Date: Thu, 21 Nov 2024 12:16:10 +0200 Subject: [PATCH] classy --- webui/src/lib/api/index.js | 9 ++-- .../pages/repositories/repository/objects.jsx | 50 ++++++++----------- 2 files changed, 26 insertions(+), 33 deletions(-) diff --git a/webui/src/lib/api/index.js b/webui/src/lib/api/index.js index 8c975b04332..98d58d2c56a 100644 --- a/webui/src/lib/api/index.js +++ b/webui/src/lib/api/index.js @@ -671,8 +671,7 @@ export const uploadWithProgress = (url, file, method = 'POST', onProgress = null status: xhr.status, body: xhr.responseText, contentType: xhr.getResponseHeader('Content-Type'), - rawHeaders: xhr.getAllResponseHeaders(), // add raw headers here - xhr: xhr, + rawHeaders: xhr.getAllResponseHeaders(), // add raw headers }); }); xhr.addEventListener('error', () => reject(new Error('Upload Failed'))); @@ -1124,9 +1123,9 @@ class Statistics { } class Staging { - async get(repoId, branchId, path, presign = false,checksum = null) { - const query = qs({ path, presign,checksum }); - const response = await apiRequest(`/repositories/${encodeURIComponent(repoId)}/branches/${encodeURIComponent(branchId)}/staging/backing?` + query, { + async get(repoId, branchId, path, presign = false) { + const query = qs({path, presign}); + const response = await apiRequest(`/repositories/${encodeURIComponent(repoId)}/branches/${encodeURIComponent(branchId)}/staging/backing?` + query, { method: 'GET' }); if (response.status !== 200) { diff --git a/webui/src/pages/repositories/repository/objects.jsx b/webui/src/pages/repositories/repository/objects.jsx index ccc7e3f84f6..77a4273a90c 100644 --- a/webui/src/pages/repositories/repository/objects.jsx +++ b/webui/src/pages/repositories/repository/objects.jsx @@ -226,13 +226,13 @@ const ImportModal = ({config, repoId, referenceId, referenceType, path = '', onD ); }; -const extractChecksumFromRawHeaders = (rawHeaders) => { - const headersString = typeof rawHeaders === 'string' ? rawHeaders : rawHeaders.toString(); - const cleanedHeadersString = headersString.trim(); - const headerLines = cleanedHeadersString.split('\n'); +function extractChecksumFromResponse(rawHeaders) { + const headersString = typeof rawHeaders === 'string' ? rawHeaders : rawHeaders.toString(); + const cleanedHeadersString = headersString.trim(); + const headerLines = cleanedHeadersString.split('\n'); + const parsedHeaders = {}; - const parsedHeaders = {}; - headerLines.forEach((line) => { + headerLines.forEach((line) => { const [key, value] = line.split(':', 2).map((part) => part.trim()); if (key && value) { parsedHeaders[key.toLowerCase()] = value; @@ -240,39 +240,33 @@ const extractChecksumFromRawHeaders = (rawHeaders) => { }); if (parsedHeaders['content-md5']) { - console.log("Found content-md5:", parsedHeaders['content-md5']); return parsedHeaders['content-md5']; } - // fallback to ETag if (parsedHeaders['etag']) { const cleanedEtag = parsedHeaders['etag'].replace(/"/g, ''); return cleanedEtag; } return null; -}; +} const uploadFile = async (config, repo, reference, path, file, onProgress) => { - const fpath = destinationPath(path, file); - if (config.pre_sign_support_ui) { - let additionalHeaders = {}; - - if (config.blockstore_type === "azure") { - additionalHeaders["x-ms-blob-type"] = "BlockBlob"; - console.log("Azure storage detected, setting BlockBlob header"); - } - - const getResp = await staging.get(repo.id, reference.id, fpath, config.pre_sign_support_ui); - const uploadResponse = await uploadWithProgress(getResp.presigned_url, file, 'PUT', onProgress); - const checksum = extractChecksumFromRawHeaders(uploadResponse.rawHeaders); - - if (uploadResponse.status >= 400) { - throw new Error(`Error uploading file: HTTP ${uploadResponse.status}`); + const fpath = destinationPath(path, file); + if (config.pre_sign_support_ui) { + let additionalHeaders; + if (config.blockstore_type === "azure") { + additionalHeaders = { "x-ms-blob-type": "BlockBlob" } + } + const getResp = await staging.get(repo.id, reference.id, fpath, config.pre_sign_support_ui); + const uploadResponse = await uploadWithProgress(getResp.presigned_url, file, 'PUT', onProgress, additionalHeaders); + const checksum = extractChecksumFromResponse(uploadResponse.rawHeaders); + if (uploadResponse.status >= 400) { + throw new Error(`Error uploading file: HTTP ${uploadResponse.status}`); + } + await staging.link(repo.id, reference.id, fpath, getResp, checksum, file.size, file.type); + } else { + await objects.upload(repo.id, reference.id, fpath, file, onProgress); } - await staging.link(repo.id, reference.id, fpath, getResp, checksum, file.size, file.type); - } else { - await objects.upload(repo.id, reference.id, fpath, file, onProgress); - } }; const destinationPath = (path, file) => {