Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CloudTAK Local Support - Minio Store #448

Merged
merged 2 commits into from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,13 @@ export default async function server(config: Config) {

return new Promise((resolve) => {
const srv = app.listen(5001, () => {
if (!config.silent) console.log('ok - http://localhost:5001');
if (!config.silent) {
if (process.env.CLOUDTAK_Mode === 'docker-compose') {
console.log('ok - http://localhost:5000');
} else {
console.log('ok - http://localhost:5001');
}
}
return resolve(srv);
});

Expand Down
60 changes: 37 additions & 23 deletions api/lib/aws/s3.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import type { S3ClientConfig } from '@aws-sdk/client-s3'
import * as S3AWS from '@aws-sdk/client-s3';
import { Upload } from '@aws-sdk/lib-storage';
import Err from '@openaddresses/batch-error';
Expand All @@ -8,11 +9,35 @@ import process from 'node:process';
* @class
*/
export default class S3 {
static #client() {
if (!process.env.ASSET_BUCKET) throw new Err(400, null, 'ASSET_BUCKET not set');

const config: S3ClientConfig = {
region: process.env.AWS_REGION
};

if (process.env.AWS_S3_Endpoint) {
config.endpoint = process.env.AWS_S3_Endpoint;
config.forcePathStyle = true;
config.sslEnabled = false;

if (!process.env.AWS_S3_AccessKeyId || !process.env.AWS_S3_SecretAccessKey) {
throw new Error('Cannot use custom S3 Endpoint without providing AWS_S3_AccessKeyId & AWS_S3_SecretAccessKey');
}

config.credentials = {
accessKeyId: process.env.AWS_S3_AccessKeyId,
secretAccessKey: process.env.AWS_S3_SecretAccessKey
}
}

return new S3AWS.S3Client(config);
}

static async head(key: string): Promise<S3AWS.HeadObjectCommandOutput> {
try {
if (!process.env.ASSET_BUCKET) throw new Err(400, null, 'ASSET_BUCKET not set');
const s3 = this.#client();

const s3 = new S3AWS.S3Client({ region: process.env.AWS_REGION });
const head = await s3.send(new S3AWS.HeadObjectCommand({
Bucket: process.env.ASSET_BUCKET,
Key: key
Expand All @@ -26,9 +51,7 @@ export default class S3 {

static async put(key: string, body: Readable | string): Promise<void> {
try {
if (!process.env.ASSET_BUCKET) throw new Err(400, null, 'ASSET_BUCKET not set');

const s3 = new S3AWS.S3Client({ region: process.env.AWS_REGION });
const s3 = this.#client();

const upload = new Upload({
client: s3,
Expand All @@ -47,9 +70,7 @@ export default class S3 {

static async get(key: string): Promise<Readable> {
try {
if (!process.env.ASSET_BUCKET) throw new Err(400, null, 'ASSET_BUCKET not set');

const s3 = new S3AWS.S3Client({ region: process.env.AWS_REGION });
const s3 = this.#client();

const res = await s3.send(new S3AWS.GetObjectCommand({
Bucket: process.env.ASSET_BUCKET,
Expand All @@ -65,9 +86,8 @@ export default class S3 {

static async exists(key: string): Promise<boolean> {
try {
if (!process.env.ASSET_BUCKET) throw new Err(400, null, 'ASSET_BUCKET not set');
const s3 = this.#client();

const s3 = new S3AWS.S3Client({ region: process.env.AWS_REGION });
await s3.send(new S3AWS.HeadObjectCommand({
Bucket: process.env.ASSET_BUCKET,
Key: key
Expand All @@ -86,9 +106,8 @@ export default class S3 {
*/
static async list(fragment: string): Promise<Array<S3AWS._Object>> {
try {
if (!process.env.ASSET_BUCKET) throw new Err(400, null, 'ASSET_BUCKET not set');
const s3 = this.#client();

const s3 = new S3AWS.S3Client({ region: process.env.AWS_REGION });
const list = await s3.send(new S3AWS.ListObjectsV2Command({
Bucket: process.env.ASSET_BUCKET,
Prefix: fragment
Expand All @@ -110,20 +129,15 @@ export default class S3 {
static async del(key: string, opts: {
recurse: boolean
} = { recurse: false }): Promise<void> {
if (!process.env.ASSET_BUCKET) return;
const s3 = new S3AWS.S3Client({ region: process.env.AWS_REGION });
try {
const s3 = this.#client();

if (!opts.recurse) {
try {
if (!opts.recurse) {
await s3.send(new S3AWS.DeleteObjectCommand({
Bucket: process.env.ASSET_BUCKET,
Key: key
}));
} catch (err) {
throw new Err(500, new Error(err instanceof Error ? err.message : String(err)), 'Failed to delete file');
}
} else {
try {
} else {
const list = await this.list(key);

if (!list.length) return;
Expand All @@ -138,9 +152,9 @@ export default class S3 {
})
}
}));
} catch (err) {
throw new Err(500, new Error(err instanceof Error ? err.message : String(err)), 'Failed to delete files');
}
} catch (err) {
throw new Err(500, new Error(err instanceof Error ? err.message : String(err)), 'Failed to delete files');
}
}
}
Loading
Loading