Skip to content

Commit

Permalink
Merge pull request #48 from supabase/feat/add-fs-storage-backend
Browse files Browse the repository at this point in the history
Feat:add file storage backend
  • Loading branch information
inian authored Jul 20, 2021
2 parents 980e5c6 + 35161c2 commit abecbda
Show file tree
Hide file tree
Showing 24 changed files with 605 additions and 349 deletions.
2 changes: 2 additions & 0 deletions .env.sample
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,5 @@ PGRST_JWT_SECRET=f023d3db-39dc-4ac9-87b2-b2be72e9162b
DATABASE_URL=postgresql://postgres:[email protected]/postgres
PGOPTIONS="-c search_path=storage"
FILE_SIZE_LIMIT=52428800
STORAGE_BACKEND=s3
FILE_STORAGE_BACKEND_PATH=./data
1 change: 1 addition & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ jobs:
echo DATABASE_URL=postgresql://postgres:[email protected]/postgres >> .env
echo PGOPTIONS='-c search_path=storage' >> .env
echo FILE_SIZE_LIMIT=52428800 >> .env
echo STORAGE_BACKEND=s3 >> .env
- name: Install dependencies
run: |
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/docs.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
name: Docs

on:
pull_request:
push:
branches:
- master
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ dist/
.env
.env.*
!.*.sample
static/api.json
static/api.json
data/
38 changes: 38 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,16 @@
"fastify-cors": "^5.2.0",
"fastify-multipart": "^4.0.1",
"fastify-swagger": "^4.4.1",
"fs-extra": "^8.1.0",
"fs-xattr": "^0.3.1",
"jsonwebtoken": "^8.5.1",
"pg": "^8.5.1",
"pkg": "^4.4.9",
"postgres-migrations": "^5.1.1"
},
"devDependencies": {
"@types/busboy": "^0.2.3",
"@types/fs-extra": "^9.0.12",
"@types/jest": "^26.0.20",
"@types/jsonwebtoken": "^8.5.0",
"@types/node": "^14.14.33",
Expand Down
104 changes: 104 additions & 0 deletions src/backend/file.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import { ObjectMetadata, ObjectResponse } from '../types/types'
import xattr from 'fs-xattr'
import fs from 'fs-extra'
import path from 'path'
import { promisify } from 'util'
import stream from 'stream'
import { getConfig } from '../utils/config'
import { GenericStorageBackend } from './generic'
const pipeline = promisify(stream.pipeline)

export class FileBackend implements GenericStorageBackend {
client: null
filePath: string

constructor() {
const { fileStoragePath } = getConfig()
if (!fileStoragePath) {
throw new Error('FILE_STORAGE_BACKEND_PATH env variable not set')
}
this.filePath = fileStoragePath
}

getMetadata(file: string, attribute: string): Promise<string | undefined> {
return xattr.get(file, attribute).then((value) => {
return value?.toString() ?? undefined
})
}

setMetadata(file: string, attribute: string, value: string): Promise<void> {
return xattr.set(file, attribute, value)
}

async getObject(bucketName: string, key: string, range?: string): Promise<ObjectResponse> {
const file = path.resolve(this.filePath, `${bucketName}/${key}`)
const body = await fs.readFile(file)
const data = await fs.stat(file)
const cacheControl = await this.getMetadata(file, 'user.supabase.cache-control')
const contentType = await this.getMetadata(file, 'user.supabase.content-type')
const lastModified = new Date(0)
lastModified.setUTCMilliseconds(data.mtimeMs)
return {
metadata: {
cacheControl,
mimetype: contentType,
lastModified: lastModified,
// contentRange: data.ContentRange, @todo: support range requests
httpStatusCode: 200,
},
body,
}
}

async uploadObject(
bucketName: string,
key: string,
body: NodeJS.ReadableStream,
contentType: string,
cacheControl: string
): Promise<ObjectMetadata> {
const file = path.resolve(this.filePath, `${bucketName}/${key}`)
await fs.ensureFile(file)
const destFile = fs.createWriteStream(file)
await pipeline(body, destFile)
await Promise.all([
this.setMetadata(file, 'user.supabase.content-type', contentType),
this.setMetadata(file, 'user.supabase.cache-control', cacheControl),
])
return {
httpStatusCode: 200,
}
}

async deleteObject(bucket: string, key: string): Promise<ObjectMetadata> {
const file = path.resolve(this.filePath, `${bucket}/${key}`)
await fs.remove(file)
return {}
}

async copyObject(bucket: string, source: string, destination: string): Promise<ObjectMetadata> {
const srcFile = path.resolve(this.filePath, `${bucket}/${source}`)
const destFile = path.resolve(this.filePath, `${bucket}/${destination}`)
await fs.copyFile(srcFile, destFile)
return {
httpStatusCode: 200,
}
}

async deleteObjects(bucket: string, prefixes: string[]): Promise<ObjectMetadata> {
const promises = prefixes.map((prefix) => {
return fs.rm(path.resolve(this.filePath, bucket, prefix))
})
await Promise.all(promises)
return {}
}

async headObject(bucket: string, key: string): Promise<ObjectMetadata> {
const file = path.resolve(this.filePath, `${bucket}/${key}`)
const data = await fs.stat(file)
return {
httpStatusCode: 200,
size: data.size,
}
}
}
32 changes: 32 additions & 0 deletions src/backend/generic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import { ObjectMetadata, ObjectResponse } from '../types/types'

export abstract class GenericStorageBackend {
client: any
constructor() {
this.client = null
}
async getObject(bucketName: string, key: string, range?: string): Promise<ObjectResponse> {
throw new Error('getObject not implemented')
}
async uploadObject(
bucketName: string,
key: string,
body: NodeJS.ReadableStream,
contentType: string,
cacheControl: string
): Promise<ObjectMetadata> {
throw new Error('uploadObject not implemented')
}
async deleteObject(bucket: string, key: string): Promise<ObjectMetadata> {
throw new Error('deleteObject not implemented')
}
async copyObject(bucket: string, source: string, destination: string): Promise<ObjectMetadata> {
throw new Error('copyObject not implemented')
}
async deleteObjects(bucket: string, prefixes: string[]): Promise<ObjectMetadata> {
throw new Error('deleteObjects not implemented')
}
async headObject(bucket: string, key: string): Promise<ObjectMetadata> {
throw new Error('headObject not implemented')
}
}
125 changes: 125 additions & 0 deletions src/backend/s3.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
import {
CopyObjectCommand,
DeleteObjectCommand,
DeleteObjectsCommand,
GetObjectCommand,
HeadObjectCommand,
S3Client,
S3ClientConfig,
} from '@aws-sdk/client-s3'
import { Upload } from '@aws-sdk/lib-storage'
import { NodeHttpHandler } from '@aws-sdk/node-http-handler'
import { ObjectMetadata, ObjectResponse } from '../types/types'
import { GenericStorageBackend } from './generic'

export class S3Backend implements GenericStorageBackend {
client: S3Client

constructor(region: string, endpoint?: string | undefined) {
const params: S3ClientConfig = {
region,
runtime: 'node',
requestHandler: new NodeHttpHandler({
socketTimeout: 300000,
}),
}
if (endpoint) {
params.endpoint = endpoint
}
this.client = new S3Client(params)
}

async getObject(bucketName: string, key: string, range?: string): Promise<ObjectResponse> {
const command = new GetObjectCommand({
Bucket: bucketName,
Key: key,
Range: range,
})
const data = await this.client.send(command)
data.Body
return {
metadata: {
cacheControl: data.CacheControl,
mimetype: data.ContentType,
eTag: data.ETag,
lastModified: data.LastModified,
contentRange: data.ContentRange,
httpStatusCode: data.$metadata.httpStatusCode,
},
body: data.Body,
}
}

async uploadObject(
bucketName: string,
key: string,
body: NodeJS.ReadableStream,
contentType: string,
cacheControl: string
): Promise<ObjectMetadata> {
const paralellUploadS3 = new Upload({
client: this.client,
params: {
Bucket: bucketName,
Key: key,
/* @ts-expect-error: https://github.com/aws/aws-sdk-js-v3/issues/2085 */
Body: body,
ContentType: contentType,
CacheControl: cacheControl,
},
})

const data = await paralellUploadS3.done()
return {
httpStatusCode: data.$metadata.httpStatusCode,
}
}

async deleteObject(bucket: string, key: string): Promise<ObjectMetadata> {
const command = new DeleteObjectCommand({
Bucket: bucket,
Key: key,
})
await this.client.send(command)
return {}
}

async copyObject(bucket: string, source: string, destination: string): Promise<ObjectMetadata> {
const command = new CopyObjectCommand({
Bucket: bucket,
CopySource: `/${bucket}/${source}`,
Key: destination,
})
const data = await this.client.send(command)
return {
httpStatusCode: data.$metadata.httpStatusCode,
}
}

async deleteObjects(bucket: string, prefixes: string[]): Promise<ObjectMetadata> {
const s3Prefixes = prefixes.map((ele) => {
return { Key: ele }
})

const command = new DeleteObjectsCommand({
Bucket: bucket,
Delete: {
Objects: s3Prefixes,
},
})
await this.client.send(command)
return {}
}

async headObject(bucket: string, key: string): Promise<ObjectMetadata> {
const command = new HeadObjectCommand({
Bucket: bucket,
Key: key,
})
const data = await this.client.send(command)
return {
httpStatusCode: data.$metadata.httpStatusCode,
size: data.ContentLength,
}
}
}
Loading

0 comments on commit abecbda

Please sign in to comment.