Skip to content

Commit

Permalink
feat: custom metadata on upload (#518)
Browse files Browse the repository at this point in the history
  • Loading branch information
fenos authored Jul 9, 2024
1 parent 8347d13 commit d0f0348
Show file tree
Hide file tree
Showing 31 changed files with 555 additions and 150 deletions.
2 changes: 2 additions & 0 deletions migrations/tenant/0025-custom-metadata.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
ALTER TABLE storage.objects ADD COLUMN user_metadata jsonb NULL;
ALTER TABLE storage.s3_multipart_uploads ADD COLUMN user_metadata jsonb NULL;
78 changes: 39 additions & 39 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@
"@opentelemetry/instrumentation-pino": "^0.39.0",
"@shopify/semaphore": "^3.0.2",
"@smithy/node-http-handler": "^2.3.1",
"@tus/file-store": "1.3.1",
"@tus/s3-store": "1.4.1",
"@tus/server": "1.4.1",
"@tus/file-store": "1.4.0",
"@tus/s3-store": "1.5.0",
"@tus/server": "1.7.0",
"agentkeepalive": "^4.5.0",
"ajv": "^8.12.0",
"async-retry": "^1.3.3",
Expand Down
11 changes: 8 additions & 3 deletions src/http/routes/object/copyObject.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ const copyRequestBodySchema = {
sourceKey: { type: 'string', examples: ['folder/source.png'] },
destinationBucket: { type: 'string', examples: ['users'] },
destinationKey: { type: 'string', examples: ['folder/destination.png'] },
copyMetadata: { type: 'boolean', examples: [true] },
},
required: ['sourceKey', 'bucketId', 'destinationKey'],
} as const
Expand Down Expand Up @@ -51,9 +52,13 @@ export default async function routes(fastify: FastifyInstance) {

const destinationBucketId = destinationBucket || bucketId

const result = await request.storage
.from(bucketId)
.copyObject(sourceKey, destinationBucketId, destinationKey, request.owner)
const result = await request.storage.from(bucketId).copyObject({
sourceKey,
destinationBucket: destinationBucketId,
destinationKey,
owner: request.owner,
copyMetadata: request.body.copyMetadata ?? true,
})

return response.status(result.httpStatusCode ?? 200).send({
Id: result.destObject.id,
Expand Down
21 changes: 14 additions & 7 deletions src/http/routes/object/getObjectInfo.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ async function requestHandler(
getObjectRequestInterface,
unknown
>,
publicRoute = false
publicRoute = false,
method: 'head' | 'info' = 'head'
) {
const { bucketName } = request.params
const objectName = request.params['*']
Expand All @@ -42,15 +43,21 @@ async function requestHandler(
await request.storage.asSuperUser().findBucket(bucketName, 'id', {
isPublic: true,
})
obj = await request.storage.asSuperUser().from(bucketName).findObject(objectName, 'id,version')
obj = await request.storage
.asSuperUser()
.from(bucketName)
.findObject(objectName, 'id,version,metadata,user_metadata,created_at')
} else {
obj = await request.storage.from(bucketName).findObject(objectName, 'id,version')
obj = await request.storage
.from(bucketName)
.findObject(objectName, 'id,version,metadata,user_metadata,created_at')
}

return request.storage.renderer('head').render(request, response, {
return request.storage.renderer(method).render(request, response, {
bucket: storageS3Bucket,
key: s3Key,
version: obj.version,
object: obj,
})
}

Expand Down Expand Up @@ -90,7 +97,7 @@ export async function publicRoutes(fastify: FastifyInstance) {
},
},
async (request, response) => {
return requestHandler(request, response, true)
return requestHandler(request, response, true, 'info')
}
)
}
Expand Down Expand Up @@ -131,7 +138,7 @@ export async function authenticatedRoutes(fastify: FastifyInstance) {
},
},
async (request, response) => {
return requestHandler(request, response)
return requestHandler(request, response, false, 'info')
}
)

Expand All @@ -151,7 +158,7 @@ export async function authenticatedRoutes(fastify: FastifyInstance) {
},
},
async (request, response) => {
return requestHandler(request, response)
return requestHandler(request, response, false, 'info')
}
)

Expand Down
4 changes: 4 additions & 0 deletions src/http/routes/s3/commands/create-multipart-upload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ const CreateMultiPartUploadInput = {
},
Headers: {
type: 'object',
additionalProperties: true,
properties: {
authorization: { type: 'string' },
'content-type': { type: 'string' },
Expand All @@ -39,13 +40,16 @@ export default function CreateMultipartUpload(s3Router: S3Router) {
(req, ctx) => {
const s3Protocol = new S3ProtocolHandler(ctx.storage, ctx.tenantId, ctx.owner)

const metadata = s3Protocol.parseMetadataHeaders(req.Headers)

return s3Protocol.createMultiPartUpload({
Bucket: req.Params.Bucket,
Key: req.Params['*'],
ContentType: req.Headers?.['content-type'],
CacheControl: req.Headers?.['cache-control'],
ContentDisposition: req.Headers?.['content-disposition'],
ContentEncoding: req.Headers?.['content-encoding'],
Metadata: metadata,
})
}
)
Expand Down
4 changes: 4 additions & 0 deletions src/http/routes/s3/commands/upload-part.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,9 @@ export default function UploadPart(s3Router: S3Router) {
},
(req, ctx) => {
const s3Protocol = new S3ProtocolHandler(ctx.storage, ctx.tenantId, ctx.owner)

const metadata = s3Protocol.parseMetadataHeaders(req.Headers)

return s3Protocol.putObject({
Body: ctx.req as any,
Bucket: req.Params.Bucket,
Expand All @@ -102,6 +105,7 @@ export default function UploadPart(s3Router: S3Router) {
ContentType: req.Headers?.['content-type'],
Expires: req.Headers?.['expires'] ? new Date(req.Headers?.['expires']) : undefined,
ContentEncoding: req.Headers?.['content-encoding'],
Metadata: metadata,
})
}
)
Expand Down
29 changes: 21 additions & 8 deletions src/http/routes/tus/lifecycle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ export async function onCreate(
rawReq: http.IncomingMessage,
res: http.ServerResponse,
upload: Upload
): Promise<http.ServerResponse> {
): Promise<{ res: http.ServerResponse; metadata?: Upload['metadata'] }> {
const uploadID = UploadId.fromString(upload.id)

const req = rawReq as MultiPartRequest
Expand All @@ -166,17 +166,21 @@ export async function onCreate(

const uploader = new Uploader(storage.backend, storage.db)

if (upload.metadata && /^-?\d+$/.test(upload.metadata.cacheControl || '')) {
upload.metadata.cacheControl = `max-age=${upload.metadata.cacheControl}`
} else if (upload.metadata) {
upload.metadata.cacheControl = 'no-cache'
const metadata = {
...(upload.metadata ? upload.metadata : {}),
}

if (upload.metadata?.contentType && bucket.allowed_mime_types) {
uploader.validateMimeType(upload.metadata.contentType, bucket.allowed_mime_types)
if (/^-?\d+$/.test(metadata.cacheControl || '')) {
metadata.cacheControl = `max-age=${metadata.cacheControl}`
} else if (metadata) {
metadata.cacheControl = 'no-cache'
}

return res
if (metadata?.contentType && bucket.allowed_mime_types) {
uploader.validateMimeType(metadata.contentType, bucket.allowed_mime_types)
}

return { res, metadata }
}

/**
Expand All @@ -199,6 +203,14 @@ export async function onUploadFinish(
)

const uploader = new Uploader(req.upload.storage.backend, req.upload.storage.db)
let customMd: undefined | Record<string, string> = undefined
if (upload.metadata?.userMetadata) {
try {
customMd = JSON.parse(upload.metadata.userMetadata)
} catch (e) {
// no-op
}
}

await uploader.completeUpload({
version: resourceId.version,
Expand All @@ -208,6 +220,7 @@ export async function onUploadFinish(
isUpsert: req.upload.isUpsert,
uploadType: 'resumable',
owner: req.upload.owner,
userMetadata: customMd,
})

res.setHeader('Tus-Complete', '1')
Expand Down
10 changes: 7 additions & 3 deletions src/internal/database/connection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ export const connections = new TTLCache<string, Knex>({
if (!pool) return
try {
await pool.destroy()
pool.client.removeAllListeners()
} catch (e) {
logSchema.error(logger, 'pool was not able to be destroyed', {
type: 'db',
Expand Down Expand Up @@ -155,7 +154,6 @@ export class TenantConnection {
async dispose() {
if (this.options.isExternalPool) {
await this.pool.destroy()
this.pool.client.removeAllListeners()
}
}

Expand Down Expand Up @@ -201,7 +199,13 @@ export class TenantConnection {
// This should never be reached, since the above promise is always rejected in this edge case.
throw ERRORS.DatabaseError('Transaction already completed')
}
await tnx.raw(`SELECT set_config('search_path', ?, true)`, [searchPath.join(', ')])

try {
await tnx.raw(`SELECT set_config('search_path', ?, true)`, [searchPath.join(', ')])
} catch (e) {
await tnx.rollback()
throw e
}
}

return tnx
Expand Down
Loading

0 comments on commit d0f0348

Please sign in to comment.