From cc6b94ff295d048ead8554048d903952cfc358e8 Mon Sep 17 00:00:00 2001 From: Inian Date: Thu, 15 Jul 2021 15:46:17 +0800 Subject: [PATCH 01/13] move s3 utils to a separate storage backend folder --- src/{utils => backend}/s3.ts | 0 src/routes/bucket/emptyBucket.ts | 8 ++++++-- src/routes/object/copyObject.ts | 8 ++++++-- src/routes/object/createObject.ts | 2 +- src/routes/object/deleteObject.ts | 2 +- src/routes/object/deleteObjects.ts | 2 +- src/routes/object/getObject.ts | 2 +- src/routes/object/getPublicObject.ts | 2 +- src/routes/object/getSignedObject.ts | 2 +- src/routes/object/moveObject.ts | 2 +- src/routes/object/updateObject.ts | 2 +- src/test/bucket.test.ts | 2 +- src/test/object.test.ts | 2 +- 13 files changed, 22 insertions(+), 14 deletions(-) rename src/{utils => backend}/s3.ts (100%) diff --git a/src/utils/s3.ts b/src/backend/s3.ts similarity index 100% rename from src/utils/s3.ts rename to src/backend/s3.ts diff --git a/src/routes/bucket/emptyBucket.ts b/src/routes/bucket/emptyBucket.ts index f6c40ce7..34c3c3a5 100644 --- a/src/routes/bucket/emptyBucket.ts +++ b/src/routes/bucket/emptyBucket.ts @@ -4,7 +4,7 @@ import { AuthenticatedRequest, Bucket, Obj } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObjects, initClient } from '../../utils/s3' +import { deleteObjects, initClient } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() const client = initClient(region, globalS3Endpoint) @@ -60,7 +60,11 @@ export default async function routes(fastify: FastifyInstance) { let deleteError, deleteData, objectError, objects, objectStatus do { - ;({ data: objects, error: objectError, status: objectStatus } = await postgrest + ;({ + data: objects, + error: objectError, + status: objectStatus, + } = await postgrest .from('objects') .select('name, id') .eq('bucket_id', bucketId) diff --git a/src/routes/object/copyObject.ts b/src/routes/object/copyObject.ts index 7c35f6d9..b326a1db 100644 --- a/src/routes/object/copyObject.ts +++ b/src/routes/object/copyObject.ts @@ -4,7 +4,7 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { copyObject, initClient } from '../../utils/s3' +import { copyObject, initClient } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() const client = initClient(region, globalS3Endpoint) @@ -99,7 +99,11 @@ export default async function routes(fastify: FastifyInstance) { owner, }) request.log.info({ origObject }, 'newObject') - const { data: results, error, status } = await postgrest + const { + data: results, + error, + status, + } = await postgrest .from('objects') .insert([newObject], { returning: 'minimal', diff --git a/src/routes/object/createObject.ts b/src/routes/object/createObject.ts index 15a628c2..8aea87bb 100644 --- a/src/routes/object/createObject.ts +++ b/src/routes/object/createObject.ts @@ -6,7 +6,7 @@ import { Obj, ObjectMetadata } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObject, headObject, initClient, uploadObject } from '../../utils/s3' +import { deleteObject, headObject, initClient, uploadObject } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() const client = initClient(region, globalS3Endpoint) diff --git a/src/routes/object/deleteObject.ts b/src/routes/object/deleteObject.ts index 3e7d1aa1..6d70441e 100644 --- a/src/routes/object/deleteObject.ts +++ b/src/routes/object/deleteObject.ts @@ -4,7 +4,7 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObject, initClient } from '../../utils/s3' +import { deleteObject, initClient } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() const client = initClient(region, globalS3Endpoint) diff --git a/src/routes/object/deleteObjects.ts b/src/routes/object/deleteObjects.ts index 80b60334..569febf1 100644 --- a/src/routes/object/deleteObjects.ts +++ b/src/routes/object/deleteObjects.ts @@ -5,7 +5,7 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema } from '../../utils/generic-routes' -import { deleteObjects, initClient } from '../../utils/s3' +import { deleteObjects, initClient } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() const client = initClient(region, globalS3Endpoint) diff --git a/src/routes/object/getObject.ts b/src/routes/object/getObject.ts index d0848fe0..a394aac8 100644 --- a/src/routes/object/getObject.ts +++ b/src/routes/object/getObject.ts @@ -6,7 +6,7 @@ import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../u import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { createResponse } from '../../utils/generic-routes' -import { getObject, initClient } from '../../utils/s3' +import { getObject, initClient } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() const client = initClient(region, globalS3Endpoint) diff --git a/src/routes/object/getPublicObject.ts b/src/routes/object/getPublicObject.ts index de41e671..ecfca2d0 100644 --- a/src/routes/object/getPublicObject.ts +++ b/src/routes/object/getPublicObject.ts @@ -4,7 +4,7 @@ import { Bucket } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' -import { getObject, initClient } from '../../utils/s3' +import { getObject, initClient } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() const client = initClient(region, globalS3Endpoint) diff --git a/src/routes/object/getSignedObject.ts b/src/routes/object/getSignedObject.ts index 94057df7..230b24e7 100644 --- a/src/routes/object/getSignedObject.ts +++ b/src/routes/object/getSignedObject.ts @@ -5,7 +5,7 @@ import { verifyJWT } from '../../utils/' import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { createResponse } from '../../utils/generic-routes' -import { getObject, initClient } from '../../utils/s3' +import { getObject, initClient } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() const client = initClient(region, globalS3Endpoint) diff --git a/src/routes/object/moveObject.ts b/src/routes/object/moveObject.ts index df0a6404..391b2f97 100644 --- a/src/routes/object/moveObject.ts +++ b/src/routes/object/moveObject.ts @@ -4,7 +4,7 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { copyObject, deleteObject, initClient } from '../../utils/s3' +import { copyObject, deleteObject, initClient } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() const client = initClient(region, globalS3Endpoint) diff --git a/src/routes/object/updateObject.ts b/src/routes/object/updateObject.ts index de8571be..b63051f3 100644 --- a/src/routes/object/updateObject.ts +++ b/src/routes/object/updateObject.ts @@ -5,7 +5,7 @@ import { Obj, ObjectMetadata } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { headObject, initClient, uploadObject } from '../../utils/s3' +import { headObject, initClient, uploadObject } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() const client = initClient(region, globalS3Endpoint) diff --git a/src/test/bucket.test.ts b/src/test/bucket.test.ts index bd64cddd..7edb19db 100644 --- a/src/test/bucket.test.ts +++ b/src/test/bucket.test.ts @@ -2,7 +2,7 @@ import dotenv from 'dotenv' import app from '../app' import { getConfig } from '../utils/config' -import * as utils from '../utils/s3' +import * as utils from '../backend/s3' dotenv.config({ path: '.env.test' }) const { anonKey } = getConfig() diff --git a/src/test/object.test.ts b/src/test/object.test.ts index da5c9758..1285b656 100644 --- a/src/test/object.test.ts +++ b/src/test/object.test.ts @@ -5,7 +5,7 @@ import fs from 'fs' import app from '../app' import { getConfig } from '../utils/config' import { signJWT } from '../utils/index' -import * as utils from '../utils/s3' +import * as utils from '../backend/s3' dotenv.config({ path: '.env.test' }) const { anonKey, serviceKey } = getConfig() From 518f6112b39a17fc4ee161f366f17069c973676d Mon Sep 17 00:00:00 2001 From: Inian Date: Thu, 15 Jul 2021 16:18:54 +0800 Subject: [PATCH 02/13] convert s3Backend to a class --- src/backend/s3.ts | 175 +++++++++++++-------------- src/routes/bucket/emptyBucket.ts | 6 +- src/routes/bucket/getAllBuckets.ts | 6 +- src/routes/object/copyObject.ts | 10 +- src/routes/object/createObject.ts | 14 +-- src/routes/object/deleteObject.ts | 6 +- src/routes/object/deleteObjects.ts | 6 +- src/routes/object/getObject.ts | 6 +- src/routes/object/getPublicObject.ts | 6 +- src/routes/object/getSignedObject.ts | 6 +- src/routes/object/moveObject.ts | 8 +- src/routes/object/updateObject.ts | 12 +- 12 files changed, 127 insertions(+), 134 deletions(-) diff --git a/src/backend/s3.ts b/src/backend/s3.ts index b6e9adb0..6bbbdcd1 100644 --- a/src/backend/s3.ts +++ b/src/backend/s3.ts @@ -17,106 +17,95 @@ import { import { Upload } from '@aws-sdk/lib-storage' import { NodeHttpHandler } from '@aws-sdk/node-http-handler' -export function initClient(region: string, endpoint?: string | undefined): S3Client { - const params: S3ClientConfig = { - region, - runtime: 'node', - requestHandler: new NodeHttpHandler({ - socketTimeout: 300000, - }), - } - if (endpoint) { - params.endpoint = endpoint - } - return new S3Client(params) -} +export class S3Backend { + client: S3Client -export async function getObject( - client: S3Client, - bucketName: string, - key: string, - range?: string -): Promise { - const command = new GetObjectCommand({ - Bucket: bucketName, - Key: key, - Range: range, - }) - const data = await client.send(command) - return data -} + constructor(region: string, endpoint?: string | undefined) { + const params: S3ClientConfig = { + region, + runtime: 'node', + requestHandler: new NodeHttpHandler({ + socketTimeout: 300000, + }), + } + if (endpoint) { + params.endpoint = endpoint + } + this.client = new S3Client(params) + } -export async function uploadObject( - client: S3Client, - bucketName: string, - key: string, - body: NodeJS.ReadableStream, - contentType: string, - cacheControl: string -): Promise { - const paralellUploadS3 = new Upload({ - client, - params: { + async getObject( + bucketName: string, + key: string, + range?: string + ): Promise { + const command = new GetObjectCommand({ Bucket: bucketName, Key: key, - /* @ts-expect-error: https://github.com/aws/aws-sdk-js-v3/issues/2085 */ - Body: body, - ContentType: contentType, - CacheControl: cacheControl, - }, - }) + Range: range, + }) + const data = await this.client.send(command) + return data + } - return await paralellUploadS3.done() -} + async uploadObject( + bucketName: string, + key: string, + body: NodeJS.ReadableStream, + contentType: string, + cacheControl: string + ): Promise { + const paralellUploadS3 = new Upload({ + client: this.client, + params: { + Bucket: bucketName, + Key: key, + /* @ts-expect-error: https://github.com/aws/aws-sdk-js-v3/issues/2085 */ + Body: body, + ContentType: contentType, + CacheControl: cacheControl, + }, + }) -export async function deleteObject( - client: S3Client, - bucket: string, - key: string -): Promise { - const command = new DeleteObjectCommand({ - Bucket: bucket, - Key: key, - }) - return await client.send(command) -} + return await paralellUploadS3.done() + } -export async function copyObject( - client: S3Client, - bucket: string, - source: string, - destination: string -): Promise { - const command = new CopyObjectCommand({ - Bucket: bucket, - CopySource: `/${bucket}/${source}`, - Key: destination, - }) - return await client.send(command) -} + async deleteObject(bucket: string, key: string): Promise { + const command = new DeleteObjectCommand({ + Bucket: bucket, + Key: key, + }) + return await this.client.send(command) + } -export async function deleteObjects( - client: S3Client, - bucket: string, - prefixes: ObjectIdentifier[] -): Promise { - const command = new DeleteObjectsCommand({ - Bucket: bucket, - Delete: { - Objects: prefixes, - }, - }) - return await client.send(command) -} + async copyObject( + bucket: string, + source: string, + destination: string + ): Promise { + const command = new CopyObjectCommand({ + Bucket: bucket, + CopySource: `/${bucket}/${source}`, + Key: destination, + }) + return await this.client.send(command) + } -export async function headObject( - client: S3Client, - bucket: string, - key: string -): Promise { - const command = new HeadObjectCommand({ - Bucket: bucket, - Key: key, - }) - return await client.send(command) + async deleteObjects(bucket: string, prefixes: ObjectIdentifier[]): Promise { + const command = new DeleteObjectsCommand({ + Bucket: bucket, + Delete: { + Objects: prefixes, + }, + }) + return await this.client.send(command) + } + + async headObject(bucket: string, key: string): Promise { + const command = new HeadObjectCommand({ + Bucket: bucket, + Key: key, + }) + return await this.client.send(command) + } } diff --git a/src/routes/bucket/emptyBucket.ts b/src/routes/bucket/emptyBucket.ts index 34c3c3a5..fa8f7371 100644 --- a/src/routes/bucket/emptyBucket.ts +++ b/src/routes/bucket/emptyBucket.ts @@ -4,10 +4,10 @@ import { AuthenticatedRequest, Bucket, Obj } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObjects, initClient } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const emptyBucketParamsSchema = { type: 'object', @@ -96,7 +96,7 @@ export default async function routes(fastify: FastifyInstance) { } }) // delete files from s3 asynchronously - deleteObjects(client, globalS3Bucket, params) + storageBackend.deleteObjects(globalS3Bucket, params) } } } while (!deleteError && !objectError && objects && objects.length > 0) diff --git a/src/routes/bucket/getAllBuckets.ts b/src/routes/bucket/getAllBuckets.ts index 0099511b..308118f9 100644 --- a/src/routes/bucket/getAllBuckets.ts +++ b/src/routes/bucket/getAllBuckets.ts @@ -37,7 +37,11 @@ export default async function routes(fastify: FastifyInstance) { const jwt = authHeader.substring('Bearer '.length) const postgrest = getPostgrestClient(jwt) - const { data: results, error, status } = await postgrest + const { + data: results, + error, + status, + } = await postgrest .from('buckets') .select('id, name, public, owner, created_at, updated_at') diff --git a/src/routes/object/copyObject.ts b/src/routes/object/copyObject.ts index b326a1db..2720ceab 100644 --- a/src/routes/object/copyObject.ts +++ b/src/routes/object/copyObject.ts @@ -4,10 +4,10 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { copyObject, initClient } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const copyRequestBodySchema = { type: 'object', @@ -118,7 +118,11 @@ export default async function routes(fastify: FastifyInstance) { const s3SourceKey = `${projectRef}/${bucketId}/${sourceKey}` const s3DestinationKey = `${projectRef}/${bucketId}/${destinationKey}` - const copyResult = await copyObject(client, globalS3Bucket, s3SourceKey, s3DestinationKey) + const copyResult = await storageBackend.copyObject( + globalS3Bucket, + s3SourceKey, + s3DestinationKey + ) return response.status(copyResult.$metadata.httpStatusCode ?? 200).send({ Key: `${bucketId}/${destinationKey}`, }) diff --git a/src/routes/object/createObject.ts b/src/routes/object/createObject.ts index 8aea87bb..5d34e5b0 100644 --- a/src/routes/object/createObject.ts +++ b/src/routes/object/createObject.ts @@ -6,10 +6,10 @@ import { Obj, ObjectMetadata } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObject, headObject, initClient, uploadObject } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const createObjectParamsSchema = { type: 'object', @@ -155,8 +155,7 @@ export default async function routes(fastify: FastifyInstance) { const cacheTime = data.fields.cacheControl?.value cacheControl = cacheTime ? `max-age=${cacheTime}` : 'no-cache' mimeType = data.mimetype - uploadResult = await uploadObject( - client, + uploadResult = await storageBackend.uploadObject( globalS3Bucket, s3Key, data.file, @@ -173,8 +172,7 @@ export default async function routes(fastify: FastifyInstance) { mimeType = request.headers['content-type'] cacheControl = request.headers['cache-control'] ?? 'no-cache' - uploadResult = await uploadObject( - client, + uploadResult = await storageBackend.uploadObject( globalS3Bucket, s3Key, request.raw, @@ -195,7 +193,7 @@ export default async function routes(fastify: FastifyInstance) { bucket_id: bucketName, }) .single() - await deleteObject(client, globalS3Bucket, s3Key) + await storageBackend.deleteObject(globalS3Bucket, s3Key) // return an error response return response @@ -209,7 +207,7 @@ export default async function routes(fastify: FastifyInstance) { ) } - const objectMetadata = await headObject(client, globalS3Bucket, s3Key) + const objectMetadata = await storageBackend.headObject(globalS3Bucket, s3Key) // update content-length as super user since user may not have update permissions const metadata: ObjectMetadata = { mimetype: mimeType, diff --git a/src/routes/object/deleteObject.ts b/src/routes/object/deleteObject.ts index 6d70441e..4f2da913 100644 --- a/src/routes/object/deleteObject.ts +++ b/src/routes/object/deleteObject.ts @@ -4,10 +4,10 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObject, initClient } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const deleteObjectParamsSchema = { type: 'object', @@ -77,7 +77,7 @@ export default async function routes(fastify: FastifyInstance) { // if successfully deleted, delete from s3 too const s3Key = `${projectRef}/${bucketName}/${objectName}` - await deleteObject(client, globalS3Bucket, s3Key) + await storageBackend.deleteObject(globalS3Bucket, s3Key) return response.status(200).send(createResponse('Successfully deleted')) } diff --git a/src/routes/object/deleteObjects.ts b/src/routes/object/deleteObjects.ts index 569febf1..caf5748f 100644 --- a/src/routes/object/deleteObjects.ts +++ b/src/routes/object/deleteObjects.ts @@ -5,10 +5,10 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema } from '../../utils/generic-routes' -import { deleteObjects, initClient } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const deleteObjectsParamsSchema = { type: 'object', @@ -84,7 +84,7 @@ export default async function routes(fastify: FastifyInstance) { return { Key: `${projectRef}/${bucketName}/${ele.name}` } }) - await deleteObjects(client, globalS3Bucket, prefixesToDelete) + await storageBackend.deleteObjects(globalS3Bucket, prefixesToDelete) } return response.status(200).send(results) diff --git a/src/routes/object/getObject.ts b/src/routes/object/getObject.ts index a394aac8..46a430f0 100644 --- a/src/routes/object/getObject.ts +++ b/src/routes/object/getObject.ts @@ -6,10 +6,10 @@ import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../u import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { createResponse } from '../../utils/generic-routes' -import { getObject, initClient } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const getObjectParamsSchema = { type: 'object', @@ -66,7 +66,7 @@ async function requestHandler( // send the object from s3 const s3Key = `${projectRef}/${bucketName}/${objectName}` request.log.info(s3Key) - const data = await getObject(client, globalS3Bucket, s3Key, range) + const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response .status(data.$metadata.httpStatusCode ?? 200) diff --git a/src/routes/object/getPublicObject.ts b/src/routes/object/getPublicObject.ts index ecfca2d0..c3935b5f 100644 --- a/src/routes/object/getPublicObject.ts +++ b/src/routes/object/getPublicObject.ts @@ -4,10 +4,10 @@ import { Bucket } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' -import { getObject, initClient } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const getPublicObjectParamsSchema = { type: 'object', @@ -59,7 +59,7 @@ export default async function routes(fastify: FastifyInstance) { const s3Key = `${projectRef}/${bucketName}/${objectName}` request.log.info(s3Key) try { - const data = await getObject(client, globalS3Bucket, s3Key, range) + const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response .status(data.$metadata.httpStatusCode ?? 200) .header('Content-Type', normalizeContentType(data.ContentType)) diff --git a/src/routes/object/getSignedObject.ts b/src/routes/object/getSignedObject.ts index 230b24e7..d2770ee3 100644 --- a/src/routes/object/getSignedObject.ts +++ b/src/routes/object/getSignedObject.ts @@ -5,10 +5,10 @@ import { verifyJWT } from '../../utils/' import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { createResponse } from '../../utils/generic-routes' -import { getObject, initClient } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const getSignedObjectParamsSchema = { type: 'object', @@ -61,7 +61,7 @@ export default async function routes(fastify: FastifyInstance) { const { url } = payload as SignedToken const s3Key = `${projectRef}/${url}` request.log.info(s3Key) - const data = await getObject(client, globalS3Bucket, s3Key, range) + const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response .status(data.$metadata.httpStatusCode ?? 200) diff --git a/src/routes/object/moveObject.ts b/src/routes/object/moveObject.ts index 391b2f97..0b834dd5 100644 --- a/src/routes/object/moveObject.ts +++ b/src/routes/object/moveObject.ts @@ -4,10 +4,10 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { copyObject, deleteObject, initClient } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const moveObjectsBodySchema = { type: 'object', @@ -81,8 +81,8 @@ export default async function routes(fastify: FastifyInstance) { const newS3Key = `${projectRef}/${bucketId}/${destinationKey}` // @todo what happens if one of these fail? - await copyObject(client, globalS3Bucket, oldS3Key, newS3Key) - await deleteObject(client, globalS3Bucket, oldS3Key) + await storageBackend.copyObject(globalS3Bucket, oldS3Key, newS3Key) + await storageBackend.deleteObject(globalS3Bucket, oldS3Key) return response.status(200).send(createResponse('Successfully moved')) } diff --git a/src/routes/object/updateObject.ts b/src/routes/object/updateObject.ts index b63051f3..2d7be4d1 100644 --- a/src/routes/object/updateObject.ts +++ b/src/routes/object/updateObject.ts @@ -5,10 +5,10 @@ import { Obj, ObjectMetadata } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { headObject, initClient, uploadObject } from '../../backend/s3' +import { S3Backend } from '../../backend/s3' const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const storageBackend = new S3Backend(region, globalS3Endpoint) const updateObjectParamsSchema = { type: 'object', @@ -109,8 +109,7 @@ export default async function routes(fastify: FastifyInstance) { cacheControl = cacheTime ? `max-age=${cacheTime}` : 'no-cache' mimeType = data.mimetype - uploadResult = await uploadObject( - client, + uploadResult = await storageBackend.uploadObject( globalS3Bucket, s3Key, data.file, @@ -128,8 +127,7 @@ export default async function routes(fastify: FastifyInstance) { mimeType = request.headers['content-type'] cacheControl = request.headers['cache-control'] ?? 'no-cache' - uploadResult = await uploadObject( - client, + uploadResult = await storageBackend.uploadObject( globalS3Bucket, s3Key, request.raw, @@ -145,7 +143,7 @@ export default async function routes(fastify: FastifyInstance) { // @todo tricky to handle since we need to undo the s3 upload } - const objectMetadata = await headObject(client, globalS3Bucket, s3Key) + const objectMetadata = await storageBackend.headObject(globalS3Bucket, s3Key) // update content-length as super user since user may not have update permissions const metadata: ObjectMetadata = { mimetype: mimeType, From 761945684c1f7f687911b236acda5428553d01c5 Mon Sep 17 00:00:00 2001 From: Inian Date: Fri, 16 Jul 2021 16:49:06 +0800 Subject: [PATCH 03/13] fix: move s3 return types to generic types --- src/backend/s3.ts | 62 ++++++++++++++++------------ src/routes/object/copyObject.ts | 2 +- src/routes/object/createObject.ts | 7 ++-- src/routes/object/getObject.ts | 16 +++---- src/routes/object/getPublicObject.ts | 16 +++---- src/routes/object/getSignedObject.ts | 16 +++---- src/routes/object/updateObject.ts | 7 ++-- 7 files changed, 67 insertions(+), 59 deletions(-) diff --git a/src/backend/s3.ts b/src/backend/s3.ts index 6bbbdcd1..b042ba92 100644 --- a/src/backend/s3.ts +++ b/src/backend/s3.ts @@ -1,21 +1,16 @@ import { CopyObjectCommand, - CopyObjectCommandOutput, DeleteObjectCommand, - DeleteObjectCommandOutput, DeleteObjectsCommand, - DeleteObjectsOutput, GetObjectCommand, - GetObjectCommandOutput, HeadObjectCommand, - HeadObjectOutput, ObjectIdentifier, S3Client, S3ClientConfig, - ServiceOutputTypes, } from '@aws-sdk/client-s3' import { Upload } from '@aws-sdk/lib-storage' import { NodeHttpHandler } from '@aws-sdk/node-http-handler' +import { ObjectMetadata, ObjectResponse } from '../types/types' export class S3Backend { client: S3Client @@ -34,18 +29,25 @@ export class S3Backend { this.client = new S3Client(params) } - async getObject( - bucketName: string, - key: string, - range?: string - ): Promise { + async getObject(bucketName: string, key: string, range?: string): Promise { const command = new GetObjectCommand({ Bucket: bucketName, Key: key, Range: range, }) const data = await this.client.send(command) - return data + data.Body + return { + metadata: { + cacheControl: data.CacheControl, + mimetype: data.ContentType, + eTag: data.ETag, + lastModified: data.LastModified, + contentRange: data.ContentRange, + httpStatusCode: data.$metadata.httpStatusCode, + }, + body: data.Body, + } } async uploadObject( @@ -54,7 +56,7 @@ export class S3Backend { body: NodeJS.ReadableStream, contentType: string, cacheControl: string - ): Promise { + ): Promise { const paralellUploadS3 = new Upload({ client: this.client, params: { @@ -67,45 +69,53 @@ export class S3Backend { }, }) - return await paralellUploadS3.done() + const data = await paralellUploadS3.done() + return { + httpStatusCode: data.$metadata.httpStatusCode, + } } - async deleteObject(bucket: string, key: string): Promise { + async deleteObject(bucket: string, key: string): Promise { const command = new DeleteObjectCommand({ Bucket: bucket, Key: key, }) - return await this.client.send(command) + await this.client.send(command) + return {} } - async copyObject( - bucket: string, - source: string, - destination: string - ): Promise { + async copyObject(bucket: string, source: string, destination: string): Promise { const command = new CopyObjectCommand({ Bucket: bucket, CopySource: `/${bucket}/${source}`, Key: destination, }) - return await this.client.send(command) + const data = await this.client.send(command) + return { + httpStatusCode: data.$metadata.httpStatusCode, + } } - async deleteObjects(bucket: string, prefixes: ObjectIdentifier[]): Promise { + async deleteObjects(bucket: string, prefixes: ObjectIdentifier[]): Promise { const command = new DeleteObjectsCommand({ Bucket: bucket, Delete: { Objects: prefixes, }, }) - return await this.client.send(command) + await this.client.send(command) + return {} } - async headObject(bucket: string, key: string): Promise { + async headObject(bucket: string, key: string): Promise { const command = new HeadObjectCommand({ Bucket: bucket, Key: key, }) - return await this.client.send(command) + const data = await this.client.send(command) + return { + httpStatusCode: data.$metadata.httpStatusCode, + size: data.ContentLength, + } } } diff --git a/src/routes/object/copyObject.ts b/src/routes/object/copyObject.ts index 2720ceab..c3b978d2 100644 --- a/src/routes/object/copyObject.ts +++ b/src/routes/object/copyObject.ts @@ -123,7 +123,7 @@ export default async function routes(fastify: FastifyInstance) { s3SourceKey, s3DestinationKey ) - return response.status(copyResult.$metadata.httpStatusCode ?? 200).send({ + return response.status(copyResult.httpStatusCode ?? 200).send({ Key: `${bucketId}/${destinationKey}`, }) } diff --git a/src/routes/object/createObject.ts b/src/routes/object/createObject.ts index 5d34e5b0..ccdcc2bb 100644 --- a/src/routes/object/createObject.ts +++ b/src/routes/object/createObject.ts @@ -1,4 +1,3 @@ -import { ServiceOutputTypes } from '@aws-sdk/client-s3' import { PostgrestSingleResponse } from '@supabase/postgrest-js/dist/main/lib/types' import { FastifyInstance, RequestGenericInterface } from 'fastify' import { FromSchema } from 'json-schema-to-ts' @@ -74,7 +73,7 @@ export default async function routes(fastify: FastifyInstance) { const path = `${bucketName}/${objectName}` const s3Key = `${projectRef}/${path}` let mimeType: string, cacheControl: string, isTruncated: boolean - let uploadResult: ServiceOutputTypes + let uploadResult: ObjectMetadata if (!isValidKey(objectName) || !isValidKey(bucketName)) { return response @@ -212,7 +211,7 @@ export default async function routes(fastify: FastifyInstance) { const metadata: ObjectMetadata = { mimetype: mimeType, cacheControl, - size: objectMetadata.ContentLength, + size: objectMetadata.size, } const { error: updateError, status: updateStatus } = await superUserPostgrest .from('objects') @@ -227,7 +226,7 @@ export default async function routes(fastify: FastifyInstance) { return response.status(400).send(transformPostgrestError(updateError, updateStatus)) } - return response.status(uploadResult.$metadata.httpStatusCode ?? 200).send({ + return response.status(uploadResult.httpStatusCode ?? 200).send({ Key: path, }) } diff --git a/src/routes/object/getObject.ts b/src/routes/object/getObject.ts index 46a430f0..390288c2 100644 --- a/src/routes/object/getObject.ts +++ b/src/routes/object/getObject.ts @@ -69,15 +69,15 @@ async function requestHandler( const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response - .status(data.$metadata.httpStatusCode ?? 200) - .header('Content-Type', normalizeContentType(data.ContentType)) - .header('Cache-Control', data.CacheControl) - .header('ETag', data.ETag) - .header('Last-Modified', data.LastModified) - if (data.ContentRange) { - response.header('Content-Range', data.ContentRange) + .status(data.metadata.httpStatusCode ?? 200) + .header('Content-Type', normalizeContentType(data.metadata.mimetype)) + .header('Cache-Control', data.metadata.cacheControl) + .header('ETag', data.metadata.eTag) + .header('Last-Modified', data.metadata.lastModified) + if (data.metadata.contentRange) { + response.header('Content-Range', data.metadata.contentRange) } - return response.send(data.Body) + return response.send(data.body) } // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types diff --git a/src/routes/object/getPublicObject.ts b/src/routes/object/getPublicObject.ts index c3935b5f..2de1d7d2 100644 --- a/src/routes/object/getPublicObject.ts +++ b/src/routes/object/getPublicObject.ts @@ -61,15 +61,15 @@ export default async function routes(fastify: FastifyInstance) { try { const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response - .status(data.$metadata.httpStatusCode ?? 200) - .header('Content-Type', normalizeContentType(data.ContentType)) - .header('Cache-Control', data.CacheControl) - .header('ETag', data.ETag) - .header('Last-Modified', data.LastModified) - if (data.ContentRange) { - response.header('Content-Range', data.ContentRange) + .status(data.metadata.httpStatusCode ?? 200) + .header('Content-Type', normalizeContentType(data.metadata.mimetype)) + .header('Cache-Control', data.metadata.cacheControl) + .header('ETag', data.metadata.eTag) + .header('Last-Modified', data.metadata.lastModified) + if (data.metadata.contentRange) { + response.header('Content-Range', data.metadata.contentRange) } - return response.send(data.Body) + return response.send(data.body) } catch (err) { if (err.$metadata?.httpStatusCode === 404) { return response.status(404).send() diff --git a/src/routes/object/getSignedObject.ts b/src/routes/object/getSignedObject.ts index d2770ee3..bbee0a2e 100644 --- a/src/routes/object/getSignedObject.ts +++ b/src/routes/object/getSignedObject.ts @@ -64,15 +64,15 @@ export default async function routes(fastify: FastifyInstance) { const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response - .status(data.$metadata.httpStatusCode ?? 200) - .header('Content-Type', normalizeContentType(data.ContentType)) - .header('Cache-Control', data.CacheControl ?? 'no-cache') - .header('ETag', data.ETag) - .header('Last-Modified', data.LastModified) - if (data.ContentRange) { - response.header('Content-Range', data.ContentRange) + .status(data.metadata.httpStatusCode ?? 200) + .header('Content-Type', normalizeContentType(data.metadata.mimetype)) + .header('Cache-Control', data.metadata.cacheControl) + .header('ETag', data.metadata.eTag) + .header('Last-Modified', data.metadata.lastModified) + if (data.metadata.contentRange) { + response.header('Content-Range', data.metadata.contentRange) } - return response.send(data.Body) + return response.send(data.body) } catch (err) { request.log.error(err) return response.status(400).send(createResponse(err.message, '400', err.name)) diff --git a/src/routes/object/updateObject.ts b/src/routes/object/updateObject.ts index 2d7be4d1..051a9e12 100644 --- a/src/routes/object/updateObject.ts +++ b/src/routes/object/updateObject.ts @@ -1,4 +1,3 @@ -import { ServiceOutputTypes } from '@aws-sdk/client-s3' import { FastifyInstance, RequestGenericInterface } from 'fastify' import { FromSchema } from 'json-schema-to-ts' import { Obj, ObjectMetadata } from '../../types/types' @@ -70,7 +69,7 @@ export default async function routes(fastify: FastifyInstance) { const path = `${bucketName}/${objectName}` const s3Key = `${projectRef}/${path}` let mimeType: string, cacheControl: string, isTruncated: boolean - let uploadResult: ServiceOutputTypes + let uploadResult: ObjectMetadata if (!isValidKey(objectName) || !isValidKey(bucketName)) { return response @@ -148,7 +147,7 @@ export default async function routes(fastify: FastifyInstance) { const metadata: ObjectMetadata = { mimetype: mimeType, cacheControl, - size: objectMetadata.ContentLength, + size: objectMetadata.size, } const { error: updateError, status: updateStatus } = await postgrest .from('objects') @@ -163,7 +162,7 @@ export default async function routes(fastify: FastifyInstance) { return response.status(400).send(transformPostgrestError(updateError, updateStatus)) } - return response.status(uploadResult.$metadata.httpStatusCode ?? 200).send({ + return response.status(uploadResult.httpStatusCode ?? 200).send({ Key: path, }) } From 29597fb065f0a2e143b6960a1ecec1e8827d5924 Mon Sep 17 00:00:00 2001 From: Inian Date: Fri, 16 Jul 2021 16:49:36 +0800 Subject: [PATCH 04/13] fix: tests to work with new s3 class --- src/test/bucket.test.ts | 34 +++---- src/test/object.test.ts | 202 +++++++++++++++++----------------------- src/types/types.d.ts | 9 ++ 3 files changed, 108 insertions(+), 137 deletions(-) diff --git a/src/test/bucket.test.ts b/src/test/bucket.test.ts index 7edb19db..a9a62e7a 100644 --- a/src/test/bucket.test.ts +++ b/src/test/bucket.test.ts @@ -2,36 +2,26 @@ import dotenv from 'dotenv' import app from '../app' import { getConfig } from '../utils/config' -import * as utils from '../backend/s3' +import { S3Backend } from '../backend/s3' dotenv.config({ path: '.env.test' }) const { anonKey } = getConfig() -let mockDeleteObjects: any, mockGetObject: any - beforeAll(() => { - mockDeleteObjects = jest.spyOn(utils, 'deleteObjects') - mockDeleteObjects.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 204, - }, - }) - ) - mockGetObject = jest.spyOn(utils, 'getObject') - mockGetObject.mockImplementation(() => - Promise.resolve({ - $metadata: { + jest.spyOn(S3Backend.prototype, 'deleteObjects').mockImplementation(() => { + return Promise.resolve({}) + }) + + jest.spyOn(S3Backend.prototype, 'getObject').mockImplementation(() => { + return Promise.resolve({ + metadata: { httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }, - CacheControl: undefined, - ContentDisposition: undefined, - ContentEncoding: undefined, - ContentLength: 3746, - ContentType: 'image/png', - Metadata: {}, + body: '', }) - ) + }) }) beforeEach(() => { diff --git a/src/test/object.test.ts b/src/test/object.test.ts index 1285b656..1db32fc0 100644 --- a/src/test/object.test.ts +++ b/src/test/object.test.ts @@ -5,82 +5,54 @@ import fs from 'fs' import app from '../app' import { getConfig } from '../utils/config' import { signJWT } from '../utils/index' -import * as utils from '../backend/s3' +import { S3Backend } from '../backend/s3' dotenv.config({ path: '.env.test' }) const { anonKey, serviceKey } = getConfig() -let mockGetObject: any, - mockUploadObject: any, - mockCopyObject: any, - mockDeleteObject: any, - mockDeleteObjects: any, - mockHeadObject: any - beforeAll(() => { - mockGetObject = jest.spyOn(utils, 'getObject') - mockGetObject.mockImplementation(() => - Promise.resolve({ - $metadata: { + jest.spyOn(S3Backend.prototype, 'getObject').mockImplementation(() => { + return Promise.resolve({ + metadata: { httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }, - CacheControl: undefined, - ContentDisposition: undefined, - ContentEncoding: undefined, - ContentLength: 3746, - ContentType: 'image/png', - Metadata: {}, + body: '', }) - ) + }) - mockUploadObject = jest.spyOn(utils, 'uploadObject') - mockUploadObject.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 200, - }, - Bucket: 'xxx', - Key: 'bucket2/authenticated/sadcat-upload41.png', + jest.spyOn(S3Backend.prototype, 'uploadObject').mockImplementation(() => { + return Promise.resolve({ + httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }) - ) + }) - mockCopyObject = jest.spyOn(utils, 'copyObject') - mockCopyObject.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 200, - }, - Bucket: 'xxx', - Key: 'authenticated/casestudy11.png', + jest.spyOn(S3Backend.prototype, 'copyObject').mockImplementation(() => { + return Promise.resolve({ + httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }) - ) + }) - mockDeleteObject = jest.spyOn(utils, 'deleteObject') - mockDeleteObject.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 204, - }, - }) - ) + jest.spyOn(S3Backend.prototype, 'deleteObject').mockImplementation(() => { + return Promise.resolve({}) + }) - mockDeleteObjects = jest.spyOn(utils, 'deleteObjects') - mockDeleteObjects.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 204, - }, - }) - ) + jest.spyOn(S3Backend.prototype, 'deleteObjects').mockImplementation(() => { + return Promise.resolve({}) + }) - mockHeadObject = jest.spyOn(utils, 'headObject') - mockHeadObject.mockImplementation(() => - Promise.resolve({ - $metadata: { - ContentLength: 1000, - }, + jest.spyOn(S3Backend.prototype, 'headObject').mockImplementation(() => { + return Promise.resolve({ + httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }) - ) + }) }) beforeEach(() => { @@ -100,7 +72,7 @@ describe('testing GET object', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockGetObject).toBeCalled() + expect(S3Backend.prototype.getObject).toBeCalled() }) test('check if RLS policies are respected: anon user is not able to read authenticated resource', async () => { @@ -112,7 +84,7 @@ describe('testing GET object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockGetObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.getObject).not.toHaveBeenCalled() }) test('user is not able to read a resource without Auth header', async () => { @@ -121,7 +93,7 @@ describe('testing GET object', () => { url: '/object/authenticated/bucket2/authenticated/casestudy.png', }) expect(response.statusCode).toBe(400) - expect(mockGetObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.getObject).not.toHaveBeenCalled() }) test('return 400 when reading a non existent object', async () => { @@ -133,7 +105,7 @@ describe('testing GET object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockGetObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.getObject).not.toHaveBeenCalled() }) test('return 400 when reading a non existent bucket', async () => { @@ -145,7 +117,7 @@ describe('testing GET object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockGetObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.getObject).not.toHaveBeenCalled() }) }) /* @@ -167,7 +139,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toBeCalled() + expect(S3Backend.prototype.uploadObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/casestudy1.png"}`) }) @@ -185,7 +157,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() expect(response.body).toBe( JSON.stringify({ statusCode: '42501', @@ -206,7 +178,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when uploading to a non existent bucket', async () => { @@ -223,7 +195,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when uploading to duplicate object', async () => { @@ -240,7 +212,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 200 when upserting duplicate object', async () => { @@ -258,7 +230,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).toHaveBeenCalled() }) }) @@ -284,7 +256,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toBeCalled() + expect(S3Backend.prototype.uploadObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/binary-casestudy1.png"}`) }) @@ -305,7 +277,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() expect(response.body).toBe( JSON.stringify({ statusCode: '42501', @@ -331,7 +303,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when uploading to a non existent bucket', async () => { @@ -351,7 +323,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when uploading to duplicate object', async () => { @@ -371,7 +343,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 200 when upserting duplicate object', async () => { @@ -392,7 +364,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).toHaveBeenCalled() }) }) @@ -414,7 +386,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toBeCalled() + expect(S3Backend.prototype.uploadObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/cat.jpg"}`) }) @@ -432,7 +404,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() // expect(response.body).toBe(`new row violates row-level security policy for table "objects"`) }) @@ -447,7 +419,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when update to a non existent bucket', async () => { @@ -464,7 +436,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when updating a non existent key', async () => { @@ -481,7 +453,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) }) @@ -507,7 +479,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toBeCalled() + expect(S3Backend.prototype.uploadObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/cat.jpg"}`) }) @@ -528,7 +500,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('check if RLS policies are respected: user is not able to upload a resource without Auth header', async () => { @@ -547,7 +519,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when updating an object in a non existent bucket', async () => { @@ -567,7 +539,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when updating an object in a non existent key', async () => { @@ -587,7 +559,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) }) @@ -609,7 +581,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockCopyObject).toBeCalled() + expect(S3Backend.prototype.copyObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/casestudy11.png"}`) }) @@ -627,7 +599,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() }) test('user is not able to copy a resource without Auth header', async () => { @@ -641,7 +613,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() }) test('return 400 when copy from a non existent bucket', async () => { @@ -658,7 +630,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() }) test('return 400 when copying a non existent key', async () => { @@ -675,7 +647,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() }) }) @@ -692,7 +664,7 @@ describe('testing delete object', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObject).toBeCalled() + expect(S3Backend.prototype.deleteObject).toBeCalled() }) test('check if RLS policies are respected: anon user is not able to delete authenticated resource', async () => { @@ -704,7 +676,7 @@ describe('testing delete object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to delete a resource without Auth header', async () => { @@ -713,7 +685,7 @@ describe('testing delete object', () => { url: '/object/bucket2/authenticated/delete1.png', }) expect(response.statusCode).toBe(400) - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('return 400 when delete from a non existent bucket', async () => { @@ -725,7 +697,7 @@ describe('testing delete object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('return 400 when deleting a non existent key', async () => { @@ -737,7 +709,7 @@ describe('testing delete object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) }) @@ -757,7 +729,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).toBeCalled() + expect(S3Backend.prototype.deleteObjects).toBeCalled() const result = JSON.parse(response.body) expect(result[0].name).toBe('authenticated/delete-multiple1.png') @@ -776,7 +748,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled() const results = JSON.parse(response.body) expect(results.length).toBe(0) }) @@ -790,7 +762,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockDeleteObjects).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled() }) test('deleting from a non existent bucket', async () => { @@ -805,7 +777,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled() }) test('deleting a non existent key', async () => { @@ -820,7 +792,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled() const results = JSON.parse(response.body) expect(results.length).toBe(0) }) @@ -837,7 +809,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).toBeCalled() + expect(S3Backend.prototype.deleteObjects).toBeCalled() const results = JSON.parse(response.body) expect(results.length).toBe(1) expect(results[0].name).toBe('authenticated/delete-multiple7.png') @@ -979,8 +951,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockCopyObject).toHaveBeenCalled() - expect(mockDeleteObject).toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).toHaveBeenCalled() }) test('check if RLS policies are respected: anon user is not able to move an authenticated object', async () => { @@ -997,8 +969,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to move an object without auth header', async () => { @@ -1012,8 +984,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to move an object in a non existent bucket', async () => { @@ -1030,8 +1002,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to move an non existent object', async () => { @@ -1048,8 +1020,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to move to an existing key', async () => { @@ -1066,8 +1038,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) }) diff --git a/src/types/types.d.ts b/src/types/types.d.ts index 008e67f8..c6ced473 100644 --- a/src/types/types.d.ts +++ b/src/types/types.d.ts @@ -34,8 +34,17 @@ type StorageError = { message: string } +type ObjectResponse = { + metadata: ObjectMetadata + body?: ReadableStream | Readable | Blob +} + type ObjectMetadata = { cacheControl?: string size?: number mimetype?: string + lastModified?: Date + eTag?: string + contentRange?: string + httpStatusCode?: number } From 0a1be94f015d5aae87255809cca8943eaa55d263 Mon Sep 17 00:00:00 2001 From: Inian Date: Fri, 16 Jul 2021 18:08:43 +0800 Subject: [PATCH 05/13] fix: s3 remove dependence on ObjectIdentifier --- src/backend/s3.ts | 9 ++++++--- src/routes/bucket/emptyBucket.ts | 4 +--- src/routes/object/deleteObjects.ts | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/backend/s3.ts b/src/backend/s3.ts index b042ba92..f4cecd1b 100644 --- a/src/backend/s3.ts +++ b/src/backend/s3.ts @@ -4,7 +4,6 @@ import { DeleteObjectsCommand, GetObjectCommand, HeadObjectCommand, - ObjectIdentifier, S3Client, S3ClientConfig, } from '@aws-sdk/client-s3' @@ -96,11 +95,15 @@ export class S3Backend { } } - async deleteObjects(bucket: string, prefixes: ObjectIdentifier[]): Promise { + async deleteObjects(bucket: string, prefixes: string[]): Promise { + const s3Prefixes = prefixes.map((ele) => { + return { Key: ele } + }) + const command = new DeleteObjectsCommand({ Bucket: bucket, Delete: { - Objects: prefixes, + Objects: s3Prefixes, }, }) await this.client.send(command) diff --git a/src/routes/bucket/emptyBucket.ts b/src/routes/bucket/emptyBucket.ts index fa8f7371..ed9ac57b 100644 --- a/src/routes/bucket/emptyBucket.ts +++ b/src/routes/bucket/emptyBucket.ts @@ -91,9 +91,7 @@ export default async function routes(fastify: FastifyInstance) { if (deleteData && deleteData.length > 0) { const params = deleteData.map((ele) => { - return { - Key: `${projectRef}/${bucketName}/${ele.name}`, - } + return `${projectRef}/${bucketName}/${ele.name}` }) // delete files from s3 asynchronously storageBackend.deleteObjects(globalS3Bucket, params) diff --git a/src/routes/object/deleteObjects.ts b/src/routes/object/deleteObjects.ts index caf5748f..baa86a02 100644 --- a/src/routes/object/deleteObjects.ts +++ b/src/routes/object/deleteObjects.ts @@ -81,7 +81,7 @@ export default async function routes(fastify: FastifyInstance) { if (results.length > 0) { // if successfully deleted, delete from s3 too const prefixesToDelete = results.map((ele) => { - return { Key: `${projectRef}/${bucketName}/${ele.name}` } + return `${projectRef}/${bucketName}/${ele.name}` }) await storageBackend.deleteObjects(globalS3Bucket, prefixesToDelete) From 33291698159f4ca3e6577939d5b116c7d64d77a2 Mon Sep 17 00:00:00 2001 From: Inian Date: Mon, 19 Jul 2021 23:50:26 +0800 Subject: [PATCH 06/13] implement file system as a storage backend --- .env.sample | 2 + .gitignore | 3 +- package-lock.json | 20 +++++++ package.json | 2 + src/backend/file.ts | 88 ++++++++++++++++++++++++++++ src/backend/generic.ts | 32 ++++++++++ src/backend/s3.ts | 3 +- src/routes/bucket/emptyBucket.ts | 17 +++--- src/routes/object/copyObject.ts | 25 +++++--- src/routes/object/createObject.ts | 21 ++++++- src/routes/object/deleteObject.ts | 12 +++- src/routes/object/deleteObjects.ts | 12 +++- src/routes/object/getObject.ts | 12 +++- src/routes/object/getPublicObject.ts | 19 +++++- src/routes/object/getSignedObject.ts | 12 +++- src/routes/object/moveObject.ts | 12 +++- src/routes/object/updateObject.ts | 12 +++- src/test/bucket.test.ts | 2 +- src/test/object.test.ts | 2 +- src/types/types.d.ts | 3 +- src/utils/config.ts | 5 ++ 21 files changed, 280 insertions(+), 36 deletions(-) create mode 100644 src/backend/file.ts create mode 100644 src/backend/generic.ts diff --git a/.env.sample b/.env.sample index b50cca90..259746b0 100644 --- a/.env.sample +++ b/.env.sample @@ -8,3 +8,5 @@ PGRST_JWT_SECRET=f023d3db-39dc-4ac9-87b2-b2be72e9162b DATABASE_URL=postgresql://postgres:postgres@127.0.0.1/postgres PGOPTIONS="-c search_path=storage" FILE_SIZE_LIMIT=52428800 +STORAGE_BACKEND=s3 +FILE_STORAGE_BACKEND_PATH=./data diff --git a/.gitignore b/.gitignore index 97ff6dce..e7216b54 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,5 @@ dist/ .env .env.* !.*.sample -static/api.json \ No newline at end of file +static/api.json +data/ \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 9a037469..9dbc94d6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,6 +18,7 @@ "fastify-cors": "^5.2.0", "fastify-multipart": "^4.0.1", "fastify-swagger": "^4.4.1", + "fs-extra": "^8.1.0", "jsonwebtoken": "^8.5.1", "pg": "^8.5.1", "pkg": "^4.4.9", @@ -25,6 +26,7 @@ }, "devDependencies": { "@types/busboy": "^0.2.3", + "@types/fs-extra": "^9.0.12", "@types/jest": "^26.0.20", "@types/jsonwebtoken": "^8.5.0", "@types/node": "^14.14.33", @@ -2289,6 +2291,15 @@ "@types/node": "*" } }, + "node_modules/@types/fs-extra": { + "version": "9.0.12", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.12.tgz", + "integrity": "sha512-I+bsBr67CurCGnSenZZ7v94gd3tc3+Aj2taxMT4yu4ABLuOgOjeFxX3dokG24ztSRg5tnT00sL8BszO7gSMoIw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/graceful-fs": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz", @@ -12532,6 +12543,15 @@ "@types/node": "*" } }, + "@types/fs-extra": { + "version": "9.0.12", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.12.tgz", + "integrity": "sha512-I+bsBr67CurCGnSenZZ7v94gd3tc3+Aj2taxMT4yu4ABLuOgOjeFxX3dokG24ztSRg5tnT00sL8BszO7gSMoIw==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/graceful-fs": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz", diff --git a/package.json b/package.json index 11aa858f..3248d29a 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,7 @@ "fastify-cors": "^5.2.0", "fastify-multipart": "^4.0.1", "fastify-swagger": "^4.4.1", + "fs-extra": "^8.1.0", "jsonwebtoken": "^8.5.1", "pg": "^8.5.1", "pkg": "^4.4.9", @@ -40,6 +41,7 @@ }, "devDependencies": { "@types/busboy": "^0.2.3", + "@types/fs-extra": "^9.0.12", "@types/jest": "^26.0.20", "@types/jsonwebtoken": "^8.5.0", "@types/node": "^14.14.33", diff --git a/src/backend/file.ts b/src/backend/file.ts new file mode 100644 index 00000000..b5fff67a --- /dev/null +++ b/src/backend/file.ts @@ -0,0 +1,88 @@ +import { ObjectMetadata, ObjectResponse } from '../types/types' +import fs from 'fs-extra' +import path from 'path' +import { promisify } from 'util' +import stream from 'stream' +import { getConfig } from '../utils/config' +import { GenericStorageBackend } from './generic' +const pipeline = promisify(stream.pipeline) + +export class FileBackend implements GenericStorageBackend { + client: null + filePath: string + + constructor() { + const { fileStoragePath } = getConfig() + if (!fileStoragePath) { + throw new Error('FILE_STORAGE_BACKEND_PATH env variable not set') + } + this.filePath = fileStoragePath + } + + async getObject(bucketName: string, key: string, range?: string): Promise { + const file = path.resolve(this.filePath, `${bucketName}/${key}`) + const body = await fs.readFile(file) + const data = await fs.stat(file) + const lastModified = new Date(0) + lastModified.setUTCMilliseconds(data.mtimeMs) + return { + metadata: { + // cacheControl: data.CacheControl, + // mimetype: data.ContentType, + // eTag: data.ETag, + lastModified: lastModified, + // contentRange: data.ContentRange, + httpStatusCode: 200, + }, + body, + } + } + + async uploadObject( + bucketName: string, + key: string, + body: NodeJS.ReadableStream, + contentType: string, + cacheControl: string + ): Promise { + const file = path.resolve(this.filePath, `${bucketName}/${key}`) + await fs.ensureFile(file) + const destFile = fs.createWriteStream(file) + await pipeline(body, destFile) + return { + httpStatusCode: 200, + } + } + + async deleteObject(bucket: string, key: string): Promise { + const file = path.resolve(this.filePath, `${bucket}/${key}`) + await fs.rm(file) + return {} + } + + async copyObject(bucket: string, source: string, destination: string): Promise { + const srcFile = path.resolve(this.filePath, `${bucket}/${source}`) + const destFile = path.resolve(this.filePath, `${bucket}/${destination}`) + await fs.copyFile(srcFile, destFile) + return { + httpStatusCode: 200, + } + } + + async deleteObjects(bucket: string, prefixes: string[]): Promise { + const promises = prefixes.map((prefix) => { + return fs.rm(path.resolve(this.filePath, prefix)) + }) + await Promise.all(promises) + return {} + } + + async headObject(bucket: string, key: string): Promise { + const file = path.resolve(this.filePath, `${bucket}/${key}`) + const data = await fs.stat(file) + return { + httpStatusCode: 200, + size: data.size, + } + } +} diff --git a/src/backend/generic.ts b/src/backend/generic.ts new file mode 100644 index 00000000..6cac64ff --- /dev/null +++ b/src/backend/generic.ts @@ -0,0 +1,32 @@ +import { ObjectMetadata, ObjectResponse } from '../types/types' + +export abstract class GenericStorageBackend { + client: any + constructor() { + this.client = null + } + async getObject(bucketName: string, key: string, range?: string): Promise { + throw new Error('getObject not implemented') + } + async uploadObject( + bucketName: string, + key: string, + body: NodeJS.ReadableStream, + contentType: string, + cacheControl: string + ): Promise { + throw new Error('uploadObject not implemented') + } + async deleteObject(bucket: string, key: string): Promise { + throw new Error('deleteObject not implemented') + } + async copyObject(bucket: string, source: string, destination: string): Promise { + throw new Error('copyObject not implemented') + } + async deleteObjects(bucket: string, prefixes: string[]): Promise { + throw new Error('deleteObjects not implemented') + } + async headObject(bucket: string, key: string): Promise { + throw new Error('headObject not implemented') + } +} diff --git a/src/backend/s3.ts b/src/backend/s3.ts index f4cecd1b..2db48fd0 100644 --- a/src/backend/s3.ts +++ b/src/backend/s3.ts @@ -10,8 +10,9 @@ import { import { Upload } from '@aws-sdk/lib-storage' import { NodeHttpHandler } from '@aws-sdk/node-http-handler' import { ObjectMetadata, ObjectResponse } from '../types/types' +import { GenericStorageBackend } from './generic' -export class S3Backend { +export class S3Backend implements GenericStorageBackend { client: S3Client constructor(region: string, endpoint?: string | undefined) { diff --git a/src/routes/bucket/emptyBucket.ts b/src/routes/bucket/emptyBucket.ts index ed9ac57b..7e42886c 100644 --- a/src/routes/bucket/emptyBucket.ts +++ b/src/routes/bucket/emptyBucket.ts @@ -5,10 +5,17 @@ import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const emptyBucketParamsSchema = { type: 'object', properties: { @@ -60,11 +67,7 @@ export default async function routes(fastify: FastifyInstance) { let deleteError, deleteData, objectError, objects, objectStatus do { - ;({ - data: objects, - error: objectError, - status: objectStatus, - } = await postgrest + ;({ data: objects, error: objectError, status: objectStatus } = await postgrest .from('objects') .select('name, id') .eq('bucket_id', bucketId) diff --git a/src/routes/object/copyObject.ts b/src/routes/object/copyObject.ts index c3b978d2..f3219238 100644 --- a/src/routes/object/copyObject.ts +++ b/src/routes/object/copyObject.ts @@ -5,9 +5,24 @@ import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } fro import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { + region, + projectRef, + globalS3Bucket, + globalS3Endpoint, + serviceKey, + storageBackendType, +} = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const copyRequestBodySchema = { type: 'object', @@ -99,11 +114,7 @@ export default async function routes(fastify: FastifyInstance) { owner, }) request.log.info({ origObject }, 'newObject') - const { - data: results, - error, - status, - } = await postgrest + const { data: results, error, status } = await postgrest .from('objects') .insert([newObject], { returning: 'minimal', diff --git a/src/routes/object/createObject.ts b/src/routes/object/createObject.ts index ccdcc2bb..5a684690 100644 --- a/src/routes/object/createObject.ts +++ b/src/routes/object/createObject.ts @@ -6,9 +6,24 @@ import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } fro import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' - -const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' + +const { + region, + projectRef, + globalS3Bucket, + globalS3Endpoint, + serviceKey, + storageBackendType, +} = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const createObjectParamsSchema = { type: 'object', diff --git a/src/routes/object/deleteObject.ts b/src/routes/object/deleteObject.ts index 4f2da913..2da8c9f8 100644 --- a/src/routes/object/deleteObject.ts +++ b/src/routes/object/deleteObject.ts @@ -5,9 +5,17 @@ import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../u import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const deleteObjectParamsSchema = { type: 'object', diff --git a/src/routes/object/deleteObjects.ts b/src/routes/object/deleteObjects.ts index baa86a02..bb439cbe 100644 --- a/src/routes/object/deleteObjects.ts +++ b/src/routes/object/deleteObjects.ts @@ -6,9 +6,17 @@ import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const deleteObjectsParamsSchema = { type: 'object', diff --git a/src/routes/object/getObject.ts b/src/routes/object/getObject.ts index 390288c2..bbc7faef 100644 --- a/src/routes/object/getObject.ts +++ b/src/routes/object/getObject.ts @@ -7,9 +7,17 @@ import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { createResponse } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const getObjectParamsSchema = { type: 'object', diff --git a/src/routes/object/getPublicObject.ts b/src/routes/object/getPublicObject.ts index 2de1d7d2..9f23d281 100644 --- a/src/routes/object/getPublicObject.ts +++ b/src/routes/object/getPublicObject.ts @@ -5,9 +5,24 @@ import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { + region, + projectRef, + globalS3Bucket, + globalS3Endpoint, + serviceKey, + storageBackendType, +} = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const getPublicObjectParamsSchema = { type: 'object', diff --git a/src/routes/object/getSignedObject.ts b/src/routes/object/getSignedObject.ts index bbee0a2e..e7d2a7c3 100644 --- a/src/routes/object/getSignedObject.ts +++ b/src/routes/object/getSignedObject.ts @@ -6,9 +6,17 @@ import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { createResponse } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const getSignedObjectParamsSchema = { type: 'object', diff --git a/src/routes/object/moveObject.ts b/src/routes/object/moveObject.ts index 0b834dd5..154f20aa 100644 --- a/src/routes/object/moveObject.ts +++ b/src/routes/object/moveObject.ts @@ -5,9 +5,17 @@ import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../u import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const moveObjectsBodySchema = { type: 'object', diff --git a/src/routes/object/updateObject.ts b/src/routes/object/updateObject.ts index 051a9e12..a28b3f1d 100644 --- a/src/routes/object/updateObject.ts +++ b/src/routes/object/updateObject.ts @@ -5,9 +5,17 @@ import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } fro import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const storageBackend = new S3Backend(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const updateObjectParamsSchema = { type: 'object', diff --git a/src/test/bucket.test.ts b/src/test/bucket.test.ts index a9a62e7a..7bd86a36 100644 --- a/src/test/bucket.test.ts +++ b/src/test/bucket.test.ts @@ -19,7 +19,7 @@ beforeAll(() => { size: 3746, mimetype: 'image/png', }, - body: '', + body: Buffer.from(''), }) }) }) diff --git a/src/test/object.test.ts b/src/test/object.test.ts index 1db32fc0..50be9c77 100644 --- a/src/test/object.test.ts +++ b/src/test/object.test.ts @@ -18,7 +18,7 @@ beforeAll(() => { size: 3746, mimetype: 'image/png', }, - body: '', + body: Buffer.from(''), }) }) diff --git a/src/types/types.d.ts b/src/types/types.d.ts index c6ced473..506cce08 100644 --- a/src/types/types.d.ts +++ b/src/types/types.d.ts @@ -1,5 +1,6 @@ import { RequestGenericInterface } from 'fastify' import { FromSchema } from 'json-schema-to-ts' +import { Readable } from 'stream' import { bucketSchema } from '../schemas/bucket' import { objectSchema } from '../schemas/object' @@ -36,7 +37,7 @@ type StorageError = { type ObjectResponse = { metadata: ObjectMetadata - body?: ReadableStream | Readable | Blob + body?: ReadableStream | Readable | Blob | Buffer } type ObjectMetadata = { diff --git a/src/utils/config.ts b/src/utils/config.ts index cfc57775..1bd5fc91 100644 --- a/src/utils/config.ts +++ b/src/utils/config.ts @@ -1,5 +1,6 @@ import dotenv from 'dotenv' +type StorageBackendType = 'file' | 's3' type StorageConfigType = { anonKey: string serviceKey: string @@ -10,6 +11,8 @@ type StorageConfigType = { globalS3Endpoint?: string jwtSecret: string fileSizeLimit: number + storageBackendType: StorageBackendType + fileStoragePath?: string } function getOptionalConfigFromEnv(key: string): string | undefined { @@ -37,5 +40,7 @@ export function getConfig(): StorageConfigType { globalS3Endpoint: getOptionalConfigFromEnv('GLOBAL_S3_ENDPOINT'), jwtSecret: getConfigFromEnv('PGRST_JWT_SECRET'), fileSizeLimit: Number(getConfigFromEnv('FILE_SIZE_LIMIT')), + storageBackendType: getConfigFromEnv('STORAGE_BACKEND') as StorageBackendType, + fileStoragePath: getOptionalConfigFromEnv('FILE_STORAGE_BACKEND_PATH'), } } From f1e263cde8fb292b97298a0a87d4529978af1981 Mon Sep 17 00:00:00 2001 From: Inian Date: Tue, 20 Jul 2021 00:05:25 +0800 Subject: [PATCH 07/13] add storage_backend to env file for CI --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b41da8a4..7e88112e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -45,6 +45,7 @@ jobs: echo DATABASE_URL=postgresql://postgres:postgres@127.0.0.1/postgres >> .env echo PGOPTIONS='-c search_path=storage' >> .env echo FILE_SIZE_LIMIT=52428800 >> .env + echo STORAGE_BACKEND=s3 >> .env - name: Install dependencies run: | From d94da83b8f126cb506a7405b6230cfbf788e867c Mon Sep 17 00:00:00 2001 From: Inian Date: Tue, 20 Jul 2021 00:05:45 +0800 Subject: [PATCH 08/13] fix: deleteObject path resolution for file backend --- src/backend/file.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/file.ts b/src/backend/file.ts index b5fff67a..0a84ad1f 100644 --- a/src/backend/file.ts +++ b/src/backend/file.ts @@ -71,7 +71,7 @@ export class FileBackend implements GenericStorageBackend { async deleteObjects(bucket: string, prefixes: string[]): Promise { const promises = prefixes.map((prefix) => { - return fs.rm(path.resolve(this.filePath, prefix)) + return fs.rm(path.resolve(this.filePath, bucket, prefix)) }) await Promise.all(promises) return {} From 804fc4eef6f442b8cf877d83b9702caef95acab9 Mon Sep 17 00:00:00 2001 From: Inian Date: Tue, 20 Jul 2021 00:11:45 +0800 Subject: [PATCH 09/13] prettier fix --- src/routes/bucket/getAllBuckets.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/routes/bucket/getAllBuckets.ts b/src/routes/bucket/getAllBuckets.ts index 308118f9..0099511b 100644 --- a/src/routes/bucket/getAllBuckets.ts +++ b/src/routes/bucket/getAllBuckets.ts @@ -37,11 +37,7 @@ export default async function routes(fastify: FastifyInstance) { const jwt = authHeader.substring('Bearer '.length) const postgrest = getPostgrestClient(jwt) - const { - data: results, - error, - status, - } = await postgrest + const { data: results, error, status } = await postgrest .from('buckets') .select('id, name, public, owner, created_at, updated_at') From 8694f66a888498ef189677ef0c317495164c1bf2 Mon Sep 17 00:00:00 2001 From: Inian Date: Tue, 20 Jul 2021 00:15:42 +0800 Subject: [PATCH 10/13] docs needn't be updated on pull_request --- .github/workflows/docs.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index c1af13e3..019caa07 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -1,7 +1,6 @@ name: Docs on: - pull_request: push: branches: - master From b8e860577492bdcf736fa97254bd0691e0a0a432 Mon Sep 17 00:00:00 2001 From: Inian Date: Tue, 20 Jul 2021 01:24:22 +0800 Subject: [PATCH 11/13] use fs.remove instead of fs.rm fs.rm had some callback missing error --- src/backend/file.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/file.ts b/src/backend/file.ts index 0a84ad1f..d094a9f3 100644 --- a/src/backend/file.ts +++ b/src/backend/file.ts @@ -56,7 +56,7 @@ export class FileBackend implements GenericStorageBackend { async deleteObject(bucket: string, key: string): Promise { const file = path.resolve(this.filePath, `${bucket}/${key}`) - await fs.rm(file) + await fs.remove(file) return {} } From 69881542d62287f0717a4e0bfd04e783fd5f719f Mon Sep 17 00:00:00 2001 From: Inian Date: Tue, 20 Jul 2021 01:32:56 +0800 Subject: [PATCH 12/13] feat: fs-backend: store metadata in xattr --- package-lock.json | 18 ++++++++++++++++++ package.json | 1 + src/backend/file.ts | 22 +++++++++++++++++++--- 3 files changed, 38 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 9dbc94d6..d50aabcc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,6 +19,7 @@ "fastify-multipart": "^4.0.1", "fastify-swagger": "^4.4.1", "fs-extra": "^8.1.0", + "fs-xattr": "^0.3.1", "jsonwebtoken": "^8.5.1", "pg": "^8.5.1", "pkg": "^4.4.9", @@ -4894,6 +4895,18 @@ "node": ">=6 <7 || >=8" } }, + "node_modules/fs-xattr": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/fs-xattr/-/fs-xattr-0.3.1.tgz", + "integrity": "sha512-UVqkrEW0GfDabw4C3HOrFlxKfx0eeigfRne69FxSBdHIP8Qt5Sq6Pu3RM9KmMlkygtC4pPKkj5CiPO5USnj2GA==", + "hasInstallScript": true, + "os": [ + "!win32" + ], + "engines": { + "node": ">=8.6.0" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -14686,6 +14699,11 @@ "universalify": "^0.1.0" } }, + "fs-xattr": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/fs-xattr/-/fs-xattr-0.3.1.tgz", + "integrity": "sha512-UVqkrEW0GfDabw4C3HOrFlxKfx0eeigfRne69FxSBdHIP8Qt5Sq6Pu3RM9KmMlkygtC4pPKkj5CiPO5USnj2GA==" + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", diff --git a/package.json b/package.json index 3248d29a..d8a3fb64 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "fastify-multipart": "^4.0.1", "fastify-swagger": "^4.4.1", "fs-extra": "^8.1.0", + "fs-xattr": "^0.3.1", "jsonwebtoken": "^8.5.1", "pg": "^8.5.1", "pkg": "^4.4.9", diff --git a/src/backend/file.ts b/src/backend/file.ts index d094a9f3..b9a4520d 100644 --- a/src/backend/file.ts +++ b/src/backend/file.ts @@ -1,4 +1,5 @@ import { ObjectMetadata, ObjectResponse } from '../types/types' +import xattr from 'fs-xattr' import fs from 'fs-extra' import path from 'path' import { promisify } from 'util' @@ -19,17 +20,28 @@ export class FileBackend implements GenericStorageBackend { this.filePath = fileStoragePath } + getMetadata(file: string, attribute: string): Promise { + return xattr.get(file, attribute).then((value) => { + return value?.toString() ?? undefined + }) + } + + setMetadata(file: string, attribute: string, value: string): Promise { + return xattr.set(file, attribute, value) + } + async getObject(bucketName: string, key: string, range?: string): Promise { const file = path.resolve(this.filePath, `${bucketName}/${key}`) const body = await fs.readFile(file) const data = await fs.stat(file) + const cacheControl = await this.getMetadata(file, 'user.supabase.cache-control') + const contentType = await this.getMetadata(file, 'user.supabase.content-type') const lastModified = new Date(0) lastModified.setUTCMilliseconds(data.mtimeMs) return { metadata: { - // cacheControl: data.CacheControl, - // mimetype: data.ContentType, - // eTag: data.ETag, + cacheControl, + mimetype: contentType, lastModified: lastModified, // contentRange: data.ContentRange, httpStatusCode: 200, @@ -49,6 +61,10 @@ export class FileBackend implements GenericStorageBackend { await fs.ensureFile(file) const destFile = fs.createWriteStream(file) await pipeline(body, destFile) + await Promise.all([ + this.setMetadata(file, 'user.supabase.content-type', contentType), + this.setMetadata(file, 'user.supabase.cache-control', cacheControl), + ]) return { httpStatusCode: 200, } From 35161c28f51bf06960b16cc721e22123c3242f95 Mon Sep 17 00:00:00 2001 From: Inian Date: Tue, 20 Jul 2021 15:41:47 +0800 Subject: [PATCH 13/13] add todo for range requests in file backend --- src/backend/file.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/file.ts b/src/backend/file.ts index b9a4520d..b5d1a1d9 100644 --- a/src/backend/file.ts +++ b/src/backend/file.ts @@ -43,7 +43,7 @@ export class FileBackend implements GenericStorageBackend { cacheControl, mimetype: contentType, lastModified: lastModified, - // contentRange: data.ContentRange, + // contentRange: data.ContentRange, @todo: support range requests httpStatusCode: 200, }, body,