diff --git a/.env.sample b/.env.sample index b50cca90..259746b0 100644 --- a/.env.sample +++ b/.env.sample @@ -8,3 +8,5 @@ PGRST_JWT_SECRET=f023d3db-39dc-4ac9-87b2-b2be72e9162b DATABASE_URL=postgresql://postgres:postgres@127.0.0.1/postgres PGOPTIONS="-c search_path=storage" FILE_SIZE_LIMIT=52428800 +STORAGE_BACKEND=s3 +FILE_STORAGE_BACKEND_PATH=./data diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b41da8a4..7e88112e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -45,6 +45,7 @@ jobs: echo DATABASE_URL=postgresql://postgres:postgres@127.0.0.1/postgres >> .env echo PGOPTIONS='-c search_path=storage' >> .env echo FILE_SIZE_LIMIT=52428800 >> .env + echo STORAGE_BACKEND=s3 >> .env - name: Install dependencies run: | diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index c1af13e3..019caa07 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -1,7 +1,6 @@ name: Docs on: - pull_request: push: branches: - master diff --git a/.gitignore b/.gitignore index 97ff6dce..e7216b54 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,5 @@ dist/ .env .env.* !.*.sample -static/api.json \ No newline at end of file +static/api.json +data/ \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 9a037469..d50aabcc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,6 +18,8 @@ "fastify-cors": "^5.2.0", "fastify-multipart": "^4.0.1", "fastify-swagger": "^4.4.1", + "fs-extra": "^8.1.0", + "fs-xattr": "^0.3.1", "jsonwebtoken": "^8.5.1", "pg": "^8.5.1", "pkg": "^4.4.9", @@ -25,6 +27,7 @@ }, "devDependencies": { "@types/busboy": "^0.2.3", + "@types/fs-extra": "^9.0.12", "@types/jest": "^26.0.20", "@types/jsonwebtoken": "^8.5.0", "@types/node": "^14.14.33", @@ -2289,6 +2292,15 @@ "@types/node": "*" } }, + "node_modules/@types/fs-extra": { + "version": "9.0.12", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.12.tgz", + "integrity": "sha512-I+bsBr67CurCGnSenZZ7v94gd3tc3+Aj2taxMT4yu4ABLuOgOjeFxX3dokG24ztSRg5tnT00sL8BszO7gSMoIw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/graceful-fs": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz", @@ -4883,6 +4895,18 @@ "node": ">=6 <7 || >=8" } }, + "node_modules/fs-xattr": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/fs-xattr/-/fs-xattr-0.3.1.tgz", + "integrity": "sha512-UVqkrEW0GfDabw4C3HOrFlxKfx0eeigfRne69FxSBdHIP8Qt5Sq6Pu3RM9KmMlkygtC4pPKkj5CiPO5USnj2GA==", + "hasInstallScript": true, + "os": [ + "!win32" + ], + "engines": { + "node": ">=8.6.0" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -12532,6 +12556,15 @@ "@types/node": "*" } }, + "@types/fs-extra": { + "version": "9.0.12", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.12.tgz", + "integrity": "sha512-I+bsBr67CurCGnSenZZ7v94gd3tc3+Aj2taxMT4yu4ABLuOgOjeFxX3dokG24ztSRg5tnT00sL8BszO7gSMoIw==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/graceful-fs": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz", @@ -14666,6 +14699,11 @@ "universalify": "^0.1.0" } }, + "fs-xattr": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/fs-xattr/-/fs-xattr-0.3.1.tgz", + "integrity": "sha512-UVqkrEW0GfDabw4C3HOrFlxKfx0eeigfRne69FxSBdHIP8Qt5Sq6Pu3RM9KmMlkygtC4pPKkj5CiPO5USnj2GA==" + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", diff --git a/package.json b/package.json index 11aa858f..d8a3fb64 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,8 @@ "fastify-cors": "^5.2.0", "fastify-multipart": "^4.0.1", "fastify-swagger": "^4.4.1", + "fs-extra": "^8.1.0", + "fs-xattr": "^0.3.1", "jsonwebtoken": "^8.5.1", "pg": "^8.5.1", "pkg": "^4.4.9", @@ -40,6 +42,7 @@ }, "devDependencies": { "@types/busboy": "^0.2.3", + "@types/fs-extra": "^9.0.12", "@types/jest": "^26.0.20", "@types/jsonwebtoken": "^8.5.0", "@types/node": "^14.14.33", diff --git a/src/backend/file.ts b/src/backend/file.ts new file mode 100644 index 00000000..b5d1a1d9 --- /dev/null +++ b/src/backend/file.ts @@ -0,0 +1,104 @@ +import { ObjectMetadata, ObjectResponse } from '../types/types' +import xattr from 'fs-xattr' +import fs from 'fs-extra' +import path from 'path' +import { promisify } from 'util' +import stream from 'stream' +import { getConfig } from '../utils/config' +import { GenericStorageBackend } from './generic' +const pipeline = promisify(stream.pipeline) + +export class FileBackend implements GenericStorageBackend { + client: null + filePath: string + + constructor() { + const { fileStoragePath } = getConfig() + if (!fileStoragePath) { + throw new Error('FILE_STORAGE_BACKEND_PATH env variable not set') + } + this.filePath = fileStoragePath + } + + getMetadata(file: string, attribute: string): Promise { + return xattr.get(file, attribute).then((value) => { + return value?.toString() ?? undefined + }) + } + + setMetadata(file: string, attribute: string, value: string): Promise { + return xattr.set(file, attribute, value) + } + + async getObject(bucketName: string, key: string, range?: string): Promise { + const file = path.resolve(this.filePath, `${bucketName}/${key}`) + const body = await fs.readFile(file) + const data = await fs.stat(file) + const cacheControl = await this.getMetadata(file, 'user.supabase.cache-control') + const contentType = await this.getMetadata(file, 'user.supabase.content-type') + const lastModified = new Date(0) + lastModified.setUTCMilliseconds(data.mtimeMs) + return { + metadata: { + cacheControl, + mimetype: contentType, + lastModified: lastModified, + // contentRange: data.ContentRange, @todo: support range requests + httpStatusCode: 200, + }, + body, + } + } + + async uploadObject( + bucketName: string, + key: string, + body: NodeJS.ReadableStream, + contentType: string, + cacheControl: string + ): Promise { + const file = path.resolve(this.filePath, `${bucketName}/${key}`) + await fs.ensureFile(file) + const destFile = fs.createWriteStream(file) + await pipeline(body, destFile) + await Promise.all([ + this.setMetadata(file, 'user.supabase.content-type', contentType), + this.setMetadata(file, 'user.supabase.cache-control', cacheControl), + ]) + return { + httpStatusCode: 200, + } + } + + async deleteObject(bucket: string, key: string): Promise { + const file = path.resolve(this.filePath, `${bucket}/${key}`) + await fs.remove(file) + return {} + } + + async copyObject(bucket: string, source: string, destination: string): Promise { + const srcFile = path.resolve(this.filePath, `${bucket}/${source}`) + const destFile = path.resolve(this.filePath, `${bucket}/${destination}`) + await fs.copyFile(srcFile, destFile) + return { + httpStatusCode: 200, + } + } + + async deleteObjects(bucket: string, prefixes: string[]): Promise { + const promises = prefixes.map((prefix) => { + return fs.rm(path.resolve(this.filePath, bucket, prefix)) + }) + await Promise.all(promises) + return {} + } + + async headObject(bucket: string, key: string): Promise { + const file = path.resolve(this.filePath, `${bucket}/${key}`) + const data = await fs.stat(file) + return { + httpStatusCode: 200, + size: data.size, + } + } +} diff --git a/src/backend/generic.ts b/src/backend/generic.ts new file mode 100644 index 00000000..6cac64ff --- /dev/null +++ b/src/backend/generic.ts @@ -0,0 +1,32 @@ +import { ObjectMetadata, ObjectResponse } from '../types/types' + +export abstract class GenericStorageBackend { + client: any + constructor() { + this.client = null + } + async getObject(bucketName: string, key: string, range?: string): Promise { + throw new Error('getObject not implemented') + } + async uploadObject( + bucketName: string, + key: string, + body: NodeJS.ReadableStream, + contentType: string, + cacheControl: string + ): Promise { + throw new Error('uploadObject not implemented') + } + async deleteObject(bucket: string, key: string): Promise { + throw new Error('deleteObject not implemented') + } + async copyObject(bucket: string, source: string, destination: string): Promise { + throw new Error('copyObject not implemented') + } + async deleteObjects(bucket: string, prefixes: string[]): Promise { + throw new Error('deleteObjects not implemented') + } + async headObject(bucket: string, key: string): Promise { + throw new Error('headObject not implemented') + } +} diff --git a/src/backend/s3.ts b/src/backend/s3.ts new file mode 100644 index 00000000..2db48fd0 --- /dev/null +++ b/src/backend/s3.ts @@ -0,0 +1,125 @@ +import { + CopyObjectCommand, + DeleteObjectCommand, + DeleteObjectsCommand, + GetObjectCommand, + HeadObjectCommand, + S3Client, + S3ClientConfig, +} from '@aws-sdk/client-s3' +import { Upload } from '@aws-sdk/lib-storage' +import { NodeHttpHandler } from '@aws-sdk/node-http-handler' +import { ObjectMetadata, ObjectResponse } from '../types/types' +import { GenericStorageBackend } from './generic' + +export class S3Backend implements GenericStorageBackend { + client: S3Client + + constructor(region: string, endpoint?: string | undefined) { + const params: S3ClientConfig = { + region, + runtime: 'node', + requestHandler: new NodeHttpHandler({ + socketTimeout: 300000, + }), + } + if (endpoint) { + params.endpoint = endpoint + } + this.client = new S3Client(params) + } + + async getObject(bucketName: string, key: string, range?: string): Promise { + const command = new GetObjectCommand({ + Bucket: bucketName, + Key: key, + Range: range, + }) + const data = await this.client.send(command) + data.Body + return { + metadata: { + cacheControl: data.CacheControl, + mimetype: data.ContentType, + eTag: data.ETag, + lastModified: data.LastModified, + contentRange: data.ContentRange, + httpStatusCode: data.$metadata.httpStatusCode, + }, + body: data.Body, + } + } + + async uploadObject( + bucketName: string, + key: string, + body: NodeJS.ReadableStream, + contentType: string, + cacheControl: string + ): Promise { + const paralellUploadS3 = new Upload({ + client: this.client, + params: { + Bucket: bucketName, + Key: key, + /* @ts-expect-error: https://github.com/aws/aws-sdk-js-v3/issues/2085 */ + Body: body, + ContentType: contentType, + CacheControl: cacheControl, + }, + }) + + const data = await paralellUploadS3.done() + return { + httpStatusCode: data.$metadata.httpStatusCode, + } + } + + async deleteObject(bucket: string, key: string): Promise { + const command = new DeleteObjectCommand({ + Bucket: bucket, + Key: key, + }) + await this.client.send(command) + return {} + } + + async copyObject(bucket: string, source: string, destination: string): Promise { + const command = new CopyObjectCommand({ + Bucket: bucket, + CopySource: `/${bucket}/${source}`, + Key: destination, + }) + const data = await this.client.send(command) + return { + httpStatusCode: data.$metadata.httpStatusCode, + } + } + + async deleteObjects(bucket: string, prefixes: string[]): Promise { + const s3Prefixes = prefixes.map((ele) => { + return { Key: ele } + }) + + const command = new DeleteObjectsCommand({ + Bucket: bucket, + Delete: { + Objects: s3Prefixes, + }, + }) + await this.client.send(command) + return {} + } + + async headObject(bucket: string, key: string): Promise { + const command = new HeadObjectCommand({ + Bucket: bucket, + Key: key, + }) + const data = await this.client.send(command) + return { + httpStatusCode: data.$metadata.httpStatusCode, + size: data.ContentLength, + } + } +} diff --git a/src/routes/bucket/emptyBucket.ts b/src/routes/bucket/emptyBucket.ts index f6c40ce7..7e42886c 100644 --- a/src/routes/bucket/emptyBucket.ts +++ b/src/routes/bucket/emptyBucket.ts @@ -4,11 +4,18 @@ import { AuthenticatedRequest, Bucket, Obj } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObjects, initClient } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const emptyBucketParamsSchema = { type: 'object', properties: { @@ -87,12 +94,10 @@ export default async function routes(fastify: FastifyInstance) { if (deleteData && deleteData.length > 0) { const params = deleteData.map((ele) => { - return { - Key: `${projectRef}/${bucketName}/${ele.name}`, - } + return `${projectRef}/${bucketName}/${ele.name}` }) // delete files from s3 asynchronously - deleteObjects(client, globalS3Bucket, params) + storageBackend.deleteObjects(globalS3Bucket, params) } } } while (!deleteError && !objectError && objects && objects.length > 0) diff --git a/src/routes/object/copyObject.ts b/src/routes/object/copyObject.ts index 7c35f6d9..f3219238 100644 --- a/src/routes/object/copyObject.ts +++ b/src/routes/object/copyObject.ts @@ -4,10 +4,25 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { copyObject, initClient } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { + region, + projectRef, + globalS3Bucket, + globalS3Endpoint, + serviceKey, + storageBackendType, +} = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const copyRequestBodySchema = { type: 'object', @@ -114,8 +129,12 @@ export default async function routes(fastify: FastifyInstance) { const s3SourceKey = `${projectRef}/${bucketId}/${sourceKey}` const s3DestinationKey = `${projectRef}/${bucketId}/${destinationKey}` - const copyResult = await copyObject(client, globalS3Bucket, s3SourceKey, s3DestinationKey) - return response.status(copyResult.$metadata.httpStatusCode ?? 200).send({ + const copyResult = await storageBackend.copyObject( + globalS3Bucket, + s3SourceKey, + s3DestinationKey + ) + return response.status(copyResult.httpStatusCode ?? 200).send({ Key: `${bucketId}/${destinationKey}`, }) } diff --git a/src/routes/object/createObject.ts b/src/routes/object/createObject.ts index 15a628c2..5a684690 100644 --- a/src/routes/object/createObject.ts +++ b/src/routes/object/createObject.ts @@ -1,4 +1,3 @@ -import { ServiceOutputTypes } from '@aws-sdk/client-s3' import { PostgrestSingleResponse } from '@supabase/postgrest-js/dist/main/lib/types' import { FastifyInstance, RequestGenericInterface } from 'fastify' import { FromSchema } from 'json-schema-to-ts' @@ -6,10 +5,25 @@ import { Obj, ObjectMetadata } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObject, headObject, initClient, uploadObject } from '../../utils/s3' - -const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const client = initClient(region, globalS3Endpoint) +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' + +const { + region, + projectRef, + globalS3Bucket, + globalS3Endpoint, + serviceKey, + storageBackendType, +} = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const createObjectParamsSchema = { type: 'object', @@ -74,7 +88,7 @@ export default async function routes(fastify: FastifyInstance) { const path = `${bucketName}/${objectName}` const s3Key = `${projectRef}/${path}` let mimeType: string, cacheControl: string, isTruncated: boolean - let uploadResult: ServiceOutputTypes + let uploadResult: ObjectMetadata if (!isValidKey(objectName) || !isValidKey(bucketName)) { return response @@ -155,8 +169,7 @@ export default async function routes(fastify: FastifyInstance) { const cacheTime = data.fields.cacheControl?.value cacheControl = cacheTime ? `max-age=${cacheTime}` : 'no-cache' mimeType = data.mimetype - uploadResult = await uploadObject( - client, + uploadResult = await storageBackend.uploadObject( globalS3Bucket, s3Key, data.file, @@ -173,8 +186,7 @@ export default async function routes(fastify: FastifyInstance) { mimeType = request.headers['content-type'] cacheControl = request.headers['cache-control'] ?? 'no-cache' - uploadResult = await uploadObject( - client, + uploadResult = await storageBackend.uploadObject( globalS3Bucket, s3Key, request.raw, @@ -195,7 +207,7 @@ export default async function routes(fastify: FastifyInstance) { bucket_id: bucketName, }) .single() - await deleteObject(client, globalS3Bucket, s3Key) + await storageBackend.deleteObject(globalS3Bucket, s3Key) // return an error response return response @@ -209,12 +221,12 @@ export default async function routes(fastify: FastifyInstance) { ) } - const objectMetadata = await headObject(client, globalS3Bucket, s3Key) + const objectMetadata = await storageBackend.headObject(globalS3Bucket, s3Key) // update content-length as super user since user may not have update permissions const metadata: ObjectMetadata = { mimetype: mimeType, cacheControl, - size: objectMetadata.ContentLength, + size: objectMetadata.size, } const { error: updateError, status: updateStatus } = await superUserPostgrest .from('objects') @@ -229,7 +241,7 @@ export default async function routes(fastify: FastifyInstance) { return response.status(400).send(transformPostgrestError(updateError, updateStatus)) } - return response.status(uploadResult.$metadata.httpStatusCode ?? 200).send({ + return response.status(uploadResult.httpStatusCode ?? 200).send({ Key: path, }) } diff --git a/src/routes/object/deleteObject.ts b/src/routes/object/deleteObject.ts index 3e7d1aa1..2da8c9f8 100644 --- a/src/routes/object/deleteObject.ts +++ b/src/routes/object/deleteObject.ts @@ -4,10 +4,18 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { deleteObject, initClient } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const deleteObjectParamsSchema = { type: 'object', @@ -77,7 +85,7 @@ export default async function routes(fastify: FastifyInstance) { // if successfully deleted, delete from s3 too const s3Key = `${projectRef}/${bucketName}/${objectName}` - await deleteObject(client, globalS3Bucket, s3Key) + await storageBackend.deleteObject(globalS3Bucket, s3Key) return response.status(200).send(createResponse('Successfully deleted')) } diff --git a/src/routes/object/deleteObjects.ts b/src/routes/object/deleteObjects.ts index 80b60334..bb439cbe 100644 --- a/src/routes/object/deleteObjects.ts +++ b/src/routes/object/deleteObjects.ts @@ -5,10 +5,18 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema } from '../../utils/generic-routes' -import { deleteObjects, initClient } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const deleteObjectsParamsSchema = { type: 'object', @@ -81,10 +89,10 @@ export default async function routes(fastify: FastifyInstance) { if (results.length > 0) { // if successfully deleted, delete from s3 too const prefixesToDelete = results.map((ele) => { - return { Key: `${projectRef}/${bucketName}/${ele.name}` } + return `${projectRef}/${bucketName}/${ele.name}` }) - await deleteObjects(client, globalS3Bucket, prefixesToDelete) + await storageBackend.deleteObjects(globalS3Bucket, prefixesToDelete) } return response.status(200).send(results) diff --git a/src/routes/object/getObject.ts b/src/routes/object/getObject.ts index d0848fe0..bbc7faef 100644 --- a/src/routes/object/getObject.ts +++ b/src/routes/object/getObject.ts @@ -6,10 +6,18 @@ import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../u import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { createResponse } from '../../utils/generic-routes' -import { getObject, initClient } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const getObjectParamsSchema = { type: 'object', @@ -66,18 +74,18 @@ async function requestHandler( // send the object from s3 const s3Key = `${projectRef}/${bucketName}/${objectName}` request.log.info(s3Key) - const data = await getObject(client, globalS3Bucket, s3Key, range) + const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response - .status(data.$metadata.httpStatusCode ?? 200) - .header('Content-Type', normalizeContentType(data.ContentType)) - .header('Cache-Control', data.CacheControl) - .header('ETag', data.ETag) - .header('Last-Modified', data.LastModified) - if (data.ContentRange) { - response.header('Content-Range', data.ContentRange) + .status(data.metadata.httpStatusCode ?? 200) + .header('Content-Type', normalizeContentType(data.metadata.mimetype)) + .header('Cache-Control', data.metadata.cacheControl) + .header('ETag', data.metadata.eTag) + .header('Last-Modified', data.metadata.lastModified) + if (data.metadata.contentRange) { + response.header('Content-Range', data.metadata.contentRange) } - return response.send(data.Body) + return response.send(data.body) } // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types diff --git a/src/routes/object/getPublicObject.ts b/src/routes/object/getPublicObject.ts index de41e671..9f23d281 100644 --- a/src/routes/object/getPublicObject.ts +++ b/src/routes/object/getPublicObject.ts @@ -4,10 +4,25 @@ import { Bucket } from '../../types/types' import { getPostgrestClient, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' -import { getObject, initClient } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint, serviceKey } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { + region, + projectRef, + globalS3Bucket, + globalS3Endpoint, + serviceKey, + storageBackendType, +} = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const getPublicObjectParamsSchema = { type: 'object', @@ -59,17 +74,17 @@ export default async function routes(fastify: FastifyInstance) { const s3Key = `${projectRef}/${bucketName}/${objectName}` request.log.info(s3Key) try { - const data = await getObject(client, globalS3Bucket, s3Key, range) + const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response - .status(data.$metadata.httpStatusCode ?? 200) - .header('Content-Type', normalizeContentType(data.ContentType)) - .header('Cache-Control', data.CacheControl) - .header('ETag', data.ETag) - .header('Last-Modified', data.LastModified) - if (data.ContentRange) { - response.header('Content-Range', data.ContentRange) + .status(data.metadata.httpStatusCode ?? 200) + .header('Content-Type', normalizeContentType(data.metadata.mimetype)) + .header('Cache-Control', data.metadata.cacheControl) + .header('ETag', data.metadata.eTag) + .header('Last-Modified', data.metadata.lastModified) + if (data.metadata.contentRange) { + response.header('Content-Range', data.metadata.contentRange) } - return response.send(data.Body) + return response.send(data.body) } catch (err) { if (err.$metadata?.httpStatusCode === 404) { return response.status(404).send() diff --git a/src/routes/object/getSignedObject.ts b/src/routes/object/getSignedObject.ts index 94057df7..e7d2a7c3 100644 --- a/src/routes/object/getSignedObject.ts +++ b/src/routes/object/getSignedObject.ts @@ -5,10 +5,18 @@ import { verifyJWT } from '../../utils/' import { getConfig } from '../../utils/config' import { normalizeContentType } from '../../utils' import { createResponse } from '../../utils/generic-routes' -import { getObject, initClient } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const getSignedObjectParamsSchema = { type: 'object', @@ -61,18 +69,18 @@ export default async function routes(fastify: FastifyInstance) { const { url } = payload as SignedToken const s3Key = `${projectRef}/${url}` request.log.info(s3Key) - const data = await getObject(client, globalS3Bucket, s3Key, range) + const data = await storageBackend.getObject(globalS3Bucket, s3Key, range) response - .status(data.$metadata.httpStatusCode ?? 200) - .header('Content-Type', normalizeContentType(data.ContentType)) - .header('Cache-Control', data.CacheControl ?? 'no-cache') - .header('ETag', data.ETag) - .header('Last-Modified', data.LastModified) - if (data.ContentRange) { - response.header('Content-Range', data.ContentRange) + .status(data.metadata.httpStatusCode ?? 200) + .header('Content-Type', normalizeContentType(data.metadata.mimetype)) + .header('Cache-Control', data.metadata.cacheControl) + .header('ETag', data.metadata.eTag) + .header('Last-Modified', data.metadata.lastModified) + if (data.metadata.contentRange) { + response.header('Content-Range', data.metadata.contentRange) } - return response.send(data.Body) + return response.send(data.body) } catch (err) { request.log.error(err) return response.status(400).send(createResponse(err.message, '400', err.name)) diff --git a/src/routes/object/moveObject.ts b/src/routes/object/moveObject.ts index df0a6404..154f20aa 100644 --- a/src/routes/object/moveObject.ts +++ b/src/routes/object/moveObject.ts @@ -4,10 +4,18 @@ import { AuthenticatedRequest, Obj } from '../../types/types' import { getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { copyObject, deleteObject, initClient } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const moveObjectsBodySchema = { type: 'object', @@ -81,8 +89,8 @@ export default async function routes(fastify: FastifyInstance) { const newS3Key = `${projectRef}/${bucketId}/${destinationKey}` // @todo what happens if one of these fail? - await copyObject(client, globalS3Bucket, oldS3Key, newS3Key) - await deleteObject(client, globalS3Bucket, oldS3Key) + await storageBackend.copyObject(globalS3Bucket, oldS3Key, newS3Key) + await storageBackend.deleteObject(globalS3Bucket, oldS3Key) return response.status(200).send(createResponse('Successfully moved')) } diff --git a/src/routes/object/updateObject.ts b/src/routes/object/updateObject.ts index de8571be..a28b3f1d 100644 --- a/src/routes/object/updateObject.ts +++ b/src/routes/object/updateObject.ts @@ -1,14 +1,21 @@ -import { ServiceOutputTypes } from '@aws-sdk/client-s3' import { FastifyInstance, RequestGenericInterface } from 'fastify' import { FromSchema } from 'json-schema-to-ts' import { Obj, ObjectMetadata } from '../../types/types' import { getOwner, getPostgrestClient, isValidKey, transformPostgrestError } from '../../utils' import { getConfig } from '../../utils/config' import { createDefaultSchema, createResponse } from '../../utils/generic-routes' -import { headObject, initClient, uploadObject } from '../../utils/s3' +import { S3Backend } from '../../backend/s3' +import { FileBackend } from '../../backend/file' +import { GenericStorageBackend } from '../../backend/generic' -const { region, projectRef, globalS3Bucket, globalS3Endpoint } = getConfig() -const client = initClient(region, globalS3Endpoint) +const { region, projectRef, globalS3Bucket, globalS3Endpoint, storageBackendType } = getConfig() +let storageBackend: GenericStorageBackend + +if (storageBackendType === 'file') { + storageBackend = new FileBackend() +} else { + storageBackend = new S3Backend(region, globalS3Endpoint) +} const updateObjectParamsSchema = { type: 'object', @@ -70,7 +77,7 @@ export default async function routes(fastify: FastifyInstance) { const path = `${bucketName}/${objectName}` const s3Key = `${projectRef}/${path}` let mimeType: string, cacheControl: string, isTruncated: boolean - let uploadResult: ServiceOutputTypes + let uploadResult: ObjectMetadata if (!isValidKey(objectName) || !isValidKey(bucketName)) { return response @@ -109,8 +116,7 @@ export default async function routes(fastify: FastifyInstance) { cacheControl = cacheTime ? `max-age=${cacheTime}` : 'no-cache' mimeType = data.mimetype - uploadResult = await uploadObject( - client, + uploadResult = await storageBackend.uploadObject( globalS3Bucket, s3Key, data.file, @@ -128,8 +134,7 @@ export default async function routes(fastify: FastifyInstance) { mimeType = request.headers['content-type'] cacheControl = request.headers['cache-control'] ?? 'no-cache' - uploadResult = await uploadObject( - client, + uploadResult = await storageBackend.uploadObject( globalS3Bucket, s3Key, request.raw, @@ -145,12 +150,12 @@ export default async function routes(fastify: FastifyInstance) { // @todo tricky to handle since we need to undo the s3 upload } - const objectMetadata = await headObject(client, globalS3Bucket, s3Key) + const objectMetadata = await storageBackend.headObject(globalS3Bucket, s3Key) // update content-length as super user since user may not have update permissions const metadata: ObjectMetadata = { mimetype: mimeType, cacheControl, - size: objectMetadata.ContentLength, + size: objectMetadata.size, } const { error: updateError, status: updateStatus } = await postgrest .from('objects') @@ -165,7 +170,7 @@ export default async function routes(fastify: FastifyInstance) { return response.status(400).send(transformPostgrestError(updateError, updateStatus)) } - return response.status(uploadResult.$metadata.httpStatusCode ?? 200).send({ + return response.status(uploadResult.httpStatusCode ?? 200).send({ Key: path, }) } diff --git a/src/test/bucket.test.ts b/src/test/bucket.test.ts index bd64cddd..7bd86a36 100644 --- a/src/test/bucket.test.ts +++ b/src/test/bucket.test.ts @@ -2,36 +2,26 @@ import dotenv from 'dotenv' import app from '../app' import { getConfig } from '../utils/config' -import * as utils from '../utils/s3' +import { S3Backend } from '../backend/s3' dotenv.config({ path: '.env.test' }) const { anonKey } = getConfig() -let mockDeleteObjects: any, mockGetObject: any - beforeAll(() => { - mockDeleteObjects = jest.spyOn(utils, 'deleteObjects') - mockDeleteObjects.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 204, - }, - }) - ) - mockGetObject = jest.spyOn(utils, 'getObject') - mockGetObject.mockImplementation(() => - Promise.resolve({ - $metadata: { + jest.spyOn(S3Backend.prototype, 'deleteObjects').mockImplementation(() => { + return Promise.resolve({}) + }) + + jest.spyOn(S3Backend.prototype, 'getObject').mockImplementation(() => { + return Promise.resolve({ + metadata: { httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }, - CacheControl: undefined, - ContentDisposition: undefined, - ContentEncoding: undefined, - ContentLength: 3746, - ContentType: 'image/png', - Metadata: {}, + body: Buffer.from(''), }) - ) + }) }) beforeEach(() => { diff --git a/src/test/object.test.ts b/src/test/object.test.ts index da5c9758..50be9c77 100644 --- a/src/test/object.test.ts +++ b/src/test/object.test.ts @@ -5,82 +5,54 @@ import fs from 'fs' import app from '../app' import { getConfig } from '../utils/config' import { signJWT } from '../utils/index' -import * as utils from '../utils/s3' +import { S3Backend } from '../backend/s3' dotenv.config({ path: '.env.test' }) const { anonKey, serviceKey } = getConfig() -let mockGetObject: any, - mockUploadObject: any, - mockCopyObject: any, - mockDeleteObject: any, - mockDeleteObjects: any, - mockHeadObject: any - beforeAll(() => { - mockGetObject = jest.spyOn(utils, 'getObject') - mockGetObject.mockImplementation(() => - Promise.resolve({ - $metadata: { + jest.spyOn(S3Backend.prototype, 'getObject').mockImplementation(() => { + return Promise.resolve({ + metadata: { httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }, - CacheControl: undefined, - ContentDisposition: undefined, - ContentEncoding: undefined, - ContentLength: 3746, - ContentType: 'image/png', - Metadata: {}, + body: Buffer.from(''), }) - ) + }) - mockUploadObject = jest.spyOn(utils, 'uploadObject') - mockUploadObject.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 200, - }, - Bucket: 'xxx', - Key: 'bucket2/authenticated/sadcat-upload41.png', + jest.spyOn(S3Backend.prototype, 'uploadObject').mockImplementation(() => { + return Promise.resolve({ + httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }) - ) + }) - mockCopyObject = jest.spyOn(utils, 'copyObject') - mockCopyObject.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 200, - }, - Bucket: 'xxx', - Key: 'authenticated/casestudy11.png', + jest.spyOn(S3Backend.prototype, 'copyObject').mockImplementation(() => { + return Promise.resolve({ + httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }) - ) + }) - mockDeleteObject = jest.spyOn(utils, 'deleteObject') - mockDeleteObject.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 204, - }, - }) - ) + jest.spyOn(S3Backend.prototype, 'deleteObject').mockImplementation(() => { + return Promise.resolve({}) + }) - mockDeleteObjects = jest.spyOn(utils, 'deleteObjects') - mockDeleteObjects.mockImplementation(() => - Promise.resolve({ - $metadata: { - httpStatusCode: 204, - }, - }) - ) + jest.spyOn(S3Backend.prototype, 'deleteObjects').mockImplementation(() => { + return Promise.resolve({}) + }) - mockHeadObject = jest.spyOn(utils, 'headObject') - mockHeadObject.mockImplementation(() => - Promise.resolve({ - $metadata: { - ContentLength: 1000, - }, + jest.spyOn(S3Backend.prototype, 'headObject').mockImplementation(() => { + return Promise.resolve({ + httpStatusCode: 200, + size: 3746, + mimetype: 'image/png', }) - ) + }) }) beforeEach(() => { @@ -100,7 +72,7 @@ describe('testing GET object', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockGetObject).toBeCalled() + expect(S3Backend.prototype.getObject).toBeCalled() }) test('check if RLS policies are respected: anon user is not able to read authenticated resource', async () => { @@ -112,7 +84,7 @@ describe('testing GET object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockGetObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.getObject).not.toHaveBeenCalled() }) test('user is not able to read a resource without Auth header', async () => { @@ -121,7 +93,7 @@ describe('testing GET object', () => { url: '/object/authenticated/bucket2/authenticated/casestudy.png', }) expect(response.statusCode).toBe(400) - expect(mockGetObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.getObject).not.toHaveBeenCalled() }) test('return 400 when reading a non existent object', async () => { @@ -133,7 +105,7 @@ describe('testing GET object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockGetObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.getObject).not.toHaveBeenCalled() }) test('return 400 when reading a non existent bucket', async () => { @@ -145,7 +117,7 @@ describe('testing GET object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockGetObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.getObject).not.toHaveBeenCalled() }) }) /* @@ -167,7 +139,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toBeCalled() + expect(S3Backend.prototype.uploadObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/casestudy1.png"}`) }) @@ -185,7 +157,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() expect(response.body).toBe( JSON.stringify({ statusCode: '42501', @@ -206,7 +178,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when uploading to a non existent bucket', async () => { @@ -223,7 +195,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when uploading to duplicate object', async () => { @@ -240,7 +212,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 200 when upserting duplicate object', async () => { @@ -258,7 +230,7 @@ describe('testing POST object via multipart upload', () => { payload: form, }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).toHaveBeenCalled() }) }) @@ -284,7 +256,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toBeCalled() + expect(S3Backend.prototype.uploadObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/binary-casestudy1.png"}`) }) @@ -305,7 +277,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() expect(response.body).toBe( JSON.stringify({ statusCode: '42501', @@ -331,7 +303,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when uploading to a non existent bucket', async () => { @@ -351,7 +323,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when uploading to duplicate object', async () => { @@ -371,7 +343,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 200 when upserting duplicate object', async () => { @@ -392,7 +364,7 @@ describe('testing POST object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).toHaveBeenCalled() }) }) @@ -414,7 +386,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toBeCalled() + expect(S3Backend.prototype.uploadObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/cat.jpg"}`) }) @@ -432,7 +404,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() // expect(response.body).toBe(`new row violates row-level security policy for table "objects"`) }) @@ -447,7 +419,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when update to a non existent bucket', async () => { @@ -464,7 +436,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when updating a non existent key', async () => { @@ -481,7 +453,7 @@ describe('testing PUT object', () => { payload: form, }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) }) @@ -507,7 +479,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(200) - expect(mockUploadObject).toBeCalled() + expect(S3Backend.prototype.uploadObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/cat.jpg"}`) }) @@ -528,7 +500,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('check if RLS policies are respected: user is not able to upload a resource without Auth header', async () => { @@ -547,7 +519,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when updating an object in a non existent bucket', async () => { @@ -567,7 +539,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) test('return 400 when updating an object in a non existent key', async () => { @@ -587,7 +559,7 @@ describe('testing PUT object via binary upload', () => { payload: fs.createReadStream(path), }) expect(response.statusCode).toBe(400) - expect(mockUploadObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() }) }) @@ -609,7 +581,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockCopyObject).toBeCalled() + expect(S3Backend.prototype.copyObject).toBeCalled() expect(response.body).toBe(`{"Key":"bucket2/authenticated/casestudy11.png"}`) }) @@ -627,7 +599,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() }) test('user is not able to copy a resource without Auth header', async () => { @@ -641,7 +613,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() }) test('return 400 when copy from a non existent bucket', async () => { @@ -658,7 +630,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() }) test('return 400 when copying a non existent key', async () => { @@ -675,7 +647,7 @@ describe('testing copy object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() }) }) @@ -692,7 +664,7 @@ describe('testing delete object', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObject).toBeCalled() + expect(S3Backend.prototype.deleteObject).toBeCalled() }) test('check if RLS policies are respected: anon user is not able to delete authenticated resource', async () => { @@ -704,7 +676,7 @@ describe('testing delete object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to delete a resource without Auth header', async () => { @@ -713,7 +685,7 @@ describe('testing delete object', () => { url: '/object/bucket2/authenticated/delete1.png', }) expect(response.statusCode).toBe(400) - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('return 400 when delete from a non existent bucket', async () => { @@ -725,7 +697,7 @@ describe('testing delete object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('return 400 when deleting a non existent key', async () => { @@ -737,7 +709,7 @@ describe('testing delete object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) }) @@ -757,7 +729,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).toBeCalled() + expect(S3Backend.prototype.deleteObjects).toBeCalled() const result = JSON.parse(response.body) expect(result[0].name).toBe('authenticated/delete-multiple1.png') @@ -776,7 +748,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled() const results = JSON.parse(response.body) expect(results.length).toBe(0) }) @@ -790,7 +762,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockDeleteObjects).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled() }) test('deleting from a non existent bucket', async () => { @@ -805,7 +777,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled() }) test('deleting a non existent key', async () => { @@ -820,7 +792,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObjects).not.toHaveBeenCalled() const results = JSON.parse(response.body) expect(results.length).toBe(0) }) @@ -837,7 +809,7 @@ describe('testing deleting multiple objects', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockDeleteObjects).toBeCalled() + expect(S3Backend.prototype.deleteObjects).toBeCalled() const results = JSON.parse(response.body) expect(results.length).toBe(1) expect(results[0].name).toBe('authenticated/delete-multiple7.png') @@ -979,8 +951,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(200) - expect(mockCopyObject).toHaveBeenCalled() - expect(mockDeleteObject).toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).toHaveBeenCalled() }) test('check if RLS policies are respected: anon user is not able to move an authenticated object', async () => { @@ -997,8 +969,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to move an object without auth header', async () => { @@ -1012,8 +984,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to move an object in a non existent bucket', async () => { @@ -1030,8 +1002,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to move an non existent object', async () => { @@ -1048,8 +1020,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) test('user is not able to move to an existing key', async () => { @@ -1066,8 +1038,8 @@ describe('testing move object', () => { }, }) expect(response.statusCode).toBe(400) - expect(mockCopyObject).not.toHaveBeenCalled() - expect(mockDeleteObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.copyObject).not.toHaveBeenCalled() + expect(S3Backend.prototype.deleteObject).not.toHaveBeenCalled() }) }) diff --git a/src/types/types.d.ts b/src/types/types.d.ts index 008e67f8..506cce08 100644 --- a/src/types/types.d.ts +++ b/src/types/types.d.ts @@ -1,5 +1,6 @@ import { RequestGenericInterface } from 'fastify' import { FromSchema } from 'json-schema-to-ts' +import { Readable } from 'stream' import { bucketSchema } from '../schemas/bucket' import { objectSchema } from '../schemas/object' @@ -34,8 +35,17 @@ type StorageError = { message: string } +type ObjectResponse = { + metadata: ObjectMetadata + body?: ReadableStream | Readable | Blob | Buffer +} + type ObjectMetadata = { cacheControl?: string size?: number mimetype?: string + lastModified?: Date + eTag?: string + contentRange?: string + httpStatusCode?: number } diff --git a/src/utils/config.ts b/src/utils/config.ts index cfc57775..1bd5fc91 100644 --- a/src/utils/config.ts +++ b/src/utils/config.ts @@ -1,5 +1,6 @@ import dotenv from 'dotenv' +type StorageBackendType = 'file' | 's3' type StorageConfigType = { anonKey: string serviceKey: string @@ -10,6 +11,8 @@ type StorageConfigType = { globalS3Endpoint?: string jwtSecret: string fileSizeLimit: number + storageBackendType: StorageBackendType + fileStoragePath?: string } function getOptionalConfigFromEnv(key: string): string | undefined { @@ -37,5 +40,7 @@ export function getConfig(): StorageConfigType { globalS3Endpoint: getOptionalConfigFromEnv('GLOBAL_S3_ENDPOINT'), jwtSecret: getConfigFromEnv('PGRST_JWT_SECRET'), fileSizeLimit: Number(getConfigFromEnv('FILE_SIZE_LIMIT')), + storageBackendType: getConfigFromEnv('STORAGE_BACKEND') as StorageBackendType, + fileStoragePath: getOptionalConfigFromEnv('FILE_STORAGE_BACKEND_PATH'), } } diff --git a/src/utils/s3.ts b/src/utils/s3.ts deleted file mode 100644 index b6e9adb0..00000000 --- a/src/utils/s3.ts +++ /dev/null @@ -1,122 +0,0 @@ -import { - CopyObjectCommand, - CopyObjectCommandOutput, - DeleteObjectCommand, - DeleteObjectCommandOutput, - DeleteObjectsCommand, - DeleteObjectsOutput, - GetObjectCommand, - GetObjectCommandOutput, - HeadObjectCommand, - HeadObjectOutput, - ObjectIdentifier, - S3Client, - S3ClientConfig, - ServiceOutputTypes, -} from '@aws-sdk/client-s3' -import { Upload } from '@aws-sdk/lib-storage' -import { NodeHttpHandler } from '@aws-sdk/node-http-handler' - -export function initClient(region: string, endpoint?: string | undefined): S3Client { - const params: S3ClientConfig = { - region, - runtime: 'node', - requestHandler: new NodeHttpHandler({ - socketTimeout: 300000, - }), - } - if (endpoint) { - params.endpoint = endpoint - } - return new S3Client(params) -} - -export async function getObject( - client: S3Client, - bucketName: string, - key: string, - range?: string -): Promise { - const command = new GetObjectCommand({ - Bucket: bucketName, - Key: key, - Range: range, - }) - const data = await client.send(command) - return data -} - -export async function uploadObject( - client: S3Client, - bucketName: string, - key: string, - body: NodeJS.ReadableStream, - contentType: string, - cacheControl: string -): Promise { - const paralellUploadS3 = new Upload({ - client, - params: { - Bucket: bucketName, - Key: key, - /* @ts-expect-error: https://github.com/aws/aws-sdk-js-v3/issues/2085 */ - Body: body, - ContentType: contentType, - CacheControl: cacheControl, - }, - }) - - return await paralellUploadS3.done() -} - -export async function deleteObject( - client: S3Client, - bucket: string, - key: string -): Promise { - const command = new DeleteObjectCommand({ - Bucket: bucket, - Key: key, - }) - return await client.send(command) -} - -export async function copyObject( - client: S3Client, - bucket: string, - source: string, - destination: string -): Promise { - const command = new CopyObjectCommand({ - Bucket: bucket, - CopySource: `/${bucket}/${source}`, - Key: destination, - }) - return await client.send(command) -} - -export async function deleteObjects( - client: S3Client, - bucket: string, - prefixes: ObjectIdentifier[] -): Promise { - const command = new DeleteObjectsCommand({ - Bucket: bucket, - Delete: { - Objects: prefixes, - }, - }) - return await client.send(command) -} - -export async function headObject( - client: S3Client, - bucket: string, - key: string -): Promise { - const command = new HeadObjectCommand({ - Bucket: bucket, - Key: key, - }) - return await client.send(command) -}