Skip to content

Commit

Permalink
more
Browse files Browse the repository at this point in the history
  • Loading branch information
cirospaciari committed Dec 27, 2024
1 parent a45df8f commit 10686c3
Show file tree
Hide file tree
Showing 5 changed files with 127 additions and 42 deletions.
4 changes: 2 additions & 2 deletions packages/bun-types/bun.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ declare module "bun" {
*/
// tslint:disable-next-line:unified-signatures
function write(
destination: BunFile | Bun.PathLike,
destination: BunFile | S3File | Bun.PathLike,
input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | Bun.BlobPart[],
options?: {
/** If writing to a PathLike, set the permissions of the file. */
Expand Down Expand Up @@ -1217,7 +1217,7 @@ declare module "bun" {
* @param options - The options to use for the write.
*/
write(
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response,
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile,
options?: { highWaterMark?: number },
): Promise<number>;

Expand Down
6 changes: 6 additions & 0 deletions packages/bun-types/globals.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ type _Body = typeof globalThis extends { onerror: any }
readonly text: () => Promise<string>;
};

import { S3FileOptions } from "bun";
import type { TextDecoder as NodeTextDecoder, TextEncoder as NodeTextEncoder } from "util";
import type { MessagePort } from "worker_threads";
import type { WebSocket as _WebSocket } from "ws";
Expand Down Expand Up @@ -815,6 +816,11 @@ declare global {
rejectUnauthorized?: boolean | undefined; // Defaults to true
checkServerIdentity?: any; // TODO: change `any` to `checkServerIdentity`
};

/**
* Override the default S3 options
*/
s3?: S3FileOptions;
}

/**
Expand Down
118 changes: 92 additions & 26 deletions src/bun.js/webcore/blob.zig
Original file line number Diff line number Diff line change
Expand Up @@ -498,6 +498,7 @@ pub const Blob = struct {
const blob = Blob.new(Blob.findOrCreateFileFromPath(
&path_or_fd,
globalThis,
true,
));

break :file blob;
Expand All @@ -514,6 +515,7 @@ pub const Blob = struct {
const blob = Blob.new(Blob.findOrCreateFileFromPath(
&dest,
globalThis,
true,
));

break :file blob;
Expand Down Expand Up @@ -1246,7 +1248,7 @@ pub const Blob = struct {

// if path_or_blob is a path, convert it into a file blob
var destination_blob: Blob = if (path_or_blob == .path) brk: {
break :brk Blob.findOrCreateFileFromPath(&path_or_blob.path, globalThis);
break :brk Blob.findOrCreateFileFromPath(&path_or_blob.path, globalThis, true);
} else path_or_blob.blob.dupe();

if (destination_blob.store == null) {
Expand Down Expand Up @@ -1284,6 +1286,7 @@ pub const Blob = struct {
}
const proxy = globalThis.bunVM().bundler.env.getHttpProxy(true, null);
const proxy_url = if (proxy) |p| p.href else null;

return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), readable, globalThis, aws_options.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined);
}
destination_blob.detach();
Expand Down Expand Up @@ -1833,18 +1836,18 @@ pub const Blob = struct {
var args = JSC.Node.ArgumentsSlice.init(vm, arguments);
defer args.deinit();

const path = (JSC.Node.PathLike.fromJS(globalThis, &args)) catch |err| switch (err) {
const path_or_fd = (JSC.Node.PathLike.fromJS(globalThis, &args)) catch |err| switch (err) {
error.JSError => null,
error.OutOfMemory => {
globalThis.throwOutOfMemory() catch {};
return null;
},
};
if (path == null) {
if (path_or_fd == null) {
globalThis.throwInvalidArguments("Expected file path string", .{}) catch return null;
return null;
}
return constructS3FileInternal(globalThis, path.?, args.nextEat()) catch |err| switch (err) {
return constructS3FileInternal(globalThis, path_or_fd.?, args.nextEat()) catch |err| switch (err) {
error.JSError => null,
error.OutOfMemory => {
globalThis.throwOutOfMemory() catch {};
Expand Down Expand Up @@ -1992,10 +1995,12 @@ pub const Blob = struct {
path: JSC.Node.PathLike,
options: ?JSC.JSValue,
) bun.JSError!Blob {

// get ENV config
var aws_options = try AWS.getCredentialsWithOptions(globalObject.bunVM().bundler.env.getAWSCredentials(), options, globalObject);
defer aws_options.deinit();
const store = Blob.Store.initS3(path, null, aws_options.credentials, bun.default_allocator) catch bun.outOfMemory();
errdefer store.deinit();
store.data.s3.options = aws_options.options;

var blob = Blob.initWithStore(store, globalObject);
Expand Down Expand Up @@ -2054,14 +2059,16 @@ pub const Blob = struct {
var path = (try JSC.Node.PathOrFileDescriptor.fromJS(globalObject, &args, bun.default_allocator)) orelse {
return globalObject.throwInvalidArguments("Expected file path string or file descriptor", .{});
};
defer path.deinitAndUnprotect();
const options = if (arguments.len >= 2) arguments[1] else null;

if (path == .path) {
if (strings.startsWith(path.path.slice(), "s3://")) {
return constructS3FileInternalJS(globalObject, path.path, options);
return try constructS3FileInternalJS(globalObject, path.path, options);
}
}
var blob = Blob.findOrCreateFileFromPath(&path, globalObject);
defer path.deinitAndUnprotect();

var blob = Blob.findOrCreateFileFromPath(&path, globalObject, false);

if (options) |opts| {
if (opts.isObject()) {
Expand Down Expand Up @@ -2112,10 +2119,19 @@ pub const Blob = struct {
return constructS3FileInternalJS(globalObject, path, args.nextEat());
}

pub fn findOrCreateFileFromPath(path_or_fd: *JSC.Node.PathOrFileDescriptor, globalThis: *JSGlobalObject) Blob {
pub fn findOrCreateFileFromPath(path_or_fd: *JSC.Node.PathOrFileDescriptor, globalThis: *JSGlobalObject, comptime check_s3: bool) Blob {
var vm = globalThis.bunVM();
const allocator = bun.default_allocator;

if (check_s3) {
if (path_or_fd.* == .path) {
if (strings.startsWith(path_or_fd.path.slice(), "s3://")) {
const credentials = globalThis.bunVM().bundler.env.getAWSCredentials();
const path = path_or_fd.*.path;
path_or_fd.* = .{ .path = .{ .string = bun.PathString.empty } };
return Blob.initWithStore(Blob.Store.initS3(path, null, credentials, allocator) catch bun.outOfMemory(), globalThis);
}
}
}
const path: JSC.Node.PathOrFileDescriptor = brk: {
switch (path_or_fd.*) {
.path => {
Expand Down Expand Up @@ -2170,13 +2186,6 @@ pub const Blob = struct {
}
};

if (path == .path) {
if (strings.startsWith(path.path.slice(), "s3://")) {
const credentials = globalThis.bunVM().bundler.env.getAWSCredentials();
return Blob.initWithStore(Blob.Store.initS3(path.path, null, credentials, allocator) catch bun.outOfMemory(), globalThis);
}
}

return Blob.initWithStore(Blob.Store.initFile(path, null, allocator) catch bun.outOfMemory(), globalThis);
}

Expand Down Expand Up @@ -2240,12 +2249,16 @@ pub const Blob = struct {
}

pub fn initS3(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: AWSCredentials, allocator: std.mem.Allocator) !*Store {
var path = pathlike;
// this actually protects/refs the pathlike
path.toThreadSafe();

const store = Blob.Store.new(.{
.data = .{
.s3 = S3Store.init(
pathlike,
path,
mime_type orelse brk: {
const sliced = pathlike.slice();
const sliced = path.slice();
if (sliced.len > 0) {
var extname = std.fs.path.extension(sliced);
extname = std.mem.trim(u8, extname, ".");
Expand Down Expand Up @@ -2332,7 +2345,7 @@ pub const Blob = struct {
}
}
},
.s3 => |s3| {
.s3 => |*s3| {
s3.deinit(allocator);
},
}
Expand Down Expand Up @@ -3756,14 +3769,15 @@ pub const Blob = struct {
pub const S3Store = struct {
pathlike: JSC.Node.PathLike,
mime_type: http.MimeType = http.MimeType.other,
credentials: *AWSCredentials,
credentials: ?*AWSCredentials,
options: S3MultiPartUpload.MultiPartUploadOptions = .{},
pub fn isSeekable(_: *const @This()) ?bool {
return true;
}

pub fn getCredentials(this: *const @This()) *AWSCredentials {
return this.credentials;
bun.assert(this.credentials != null);
return this.credentials.?;
}

pub fn getCredentialsWithOptions(this: *const @This(), options: ?JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!AWS.AWSCredentialsWithOptions {
Expand Down Expand Up @@ -3832,16 +3846,22 @@ pub const Blob = struct {
};
}
pub fn estimatedSize(this: *const @This()) usize {
return this.pathlike.estimatedSize() + this.credentials.estimatedSize();
return this.pathlike.estimatedSize() + if (this.credentials) |credentials| credentials.estimatedSize() else 0;
}

pub fn deinit(this: *const @This(), allocator: std.mem.Allocator) void {
pub fn deinit(this: *@This(), allocator: std.mem.Allocator) void {
if (this.pathlike == .string) {
allocator.free(@constCast(this.pathlike.slice()));
} else {
this.pathlike.deinit();
}
this.credentials.deref();
this.pathlike = .{
.string = bun.PathString.empty,
};
if (this.credentials) |credentials| {
credentials.deref();
this.credentials = null;
}
}
};

Expand Down Expand Up @@ -4280,6 +4300,29 @@ pub const Blob = struct {
}
mkdirp_if_not_exists = create_directory.toBoolean();
}
if (try options_object.getTruthy(globalThis, "type")) |content_type| {
//override the content type
if (!content_type.isString()) {
return globalThis.throwInvalidArgumentType("write", "options.type", "string");
}
var content_type_str = content_type.toSlice(globalThis, bun.default_allocator);
defer content_type_str.deinit();
const slice = content_type_str.slice();
if (strings.isAllASCII(slice)) {
if (this.content_type_allocated) {
bun.default_allocator.free(this.content_type);
}
this.content_type_was_set = true;

if (globalThis.bunVM().mimeType(slice)) |mime| {
this.content_type = mime.value;
} else {
const content_type_buf = bun.default_allocator.alloc(u8, slice.len) catch bun.outOfMemory();
this.content_type = strings.copyLowercase(slice, content_type_buf);
this.content_type_allocated = true;
}
}
}
} else if (!options_object.isEmptyOrUndefinedOrNull()) {
return globalThis.throwInvalidArgumentType("write", "options", "object");
}
Expand Down Expand Up @@ -4631,9 +4674,32 @@ pub const Blob = struct {
const path = s3.path();
const proxy = globalThis.bunVM().bundler.env.getHttpProxy(true, null);
const proxy_url = if (proxy) |p| p.href else null;
if (arguments.len > 1) {
const options = arguments.ptr[1];
if (arguments.len > 0) {
const options = arguments.ptr[0];
if (options.isObject()) {
if (try options.getTruthy(globalThis, "type")) |content_type| {
//override the content type
if (!content_type.isString()) {
return globalThis.throwInvalidArgumentType("write", "options.type", "string");
}
var content_type_str = content_type.toSlice(globalThis, bun.default_allocator);
defer content_type_str.deinit();
const slice = content_type_str.slice();
if (strings.isAllASCII(slice)) {
if (this.content_type_allocated) {
bun.default_allocator.free(this.content_type);
}
this.content_type_was_set = true;

if (globalThis.bunVM().mimeType(slice)) |mime| {
this.content_type = mime.value;
} else {
const content_type_buf = bun.default_allocator.alloc(u8, slice.len) catch bun.outOfMemory();
this.content_type = strings.copyLowercase(slice, content_type_buf);
this.content_type_allocated = true;
}
}
}
const credentialsWithOptions = try s3.getCredentialsWithOptions(options, globalThis);
return try credentialsWithOptions.credentials.dupe().s3WritableStream(path, globalThis, credentialsWithOptions.options, this.contentTypeOrMimeType(), proxy_url);
}
Expand Down
39 changes: 26 additions & 13 deletions src/bun.js/webcore/response.zig
Original file line number Diff line number Diff line change
Expand Up @@ -734,7 +734,14 @@ pub const Response = struct {
};

const null_fd = bun.invalid_fd;
fn setHeaders(headers: *?Headers, new_headers: []const picohttp.Header, allocator: std.mem.Allocator) void {
var old = headers.*;
headers.* = Headers.fromPicoHttpHeaders(new_headers, allocator) catch bun.outOfMemory();

if (old) |*headers_| {
headers_.deinit();
}
}
pub const Fetch = struct {
const headers_string = "headers";
const method_string = "method";
Expand Down Expand Up @@ -3097,6 +3104,7 @@ pub const Fetch = struct {
break :blob Blob.findOrCreateFileFromPath(
&pathlike,
globalThis,
true,
);
};

Expand Down Expand Up @@ -3378,9 +3386,7 @@ pub const Fetch = struct {
url = ZigURL.parse(result.url);
result.url = ""; // fetch now owns this
}
if (headers) |*headers_| {
headers_.deinit();
}

const content_type = if (headers) |h| h.getContentType() else null;

if (range) |range_| {
Expand All @@ -3392,7 +3398,8 @@ pub const Fetch = struct {
_headers[3],
.{ .name = "range", .value = range_ },
};
headers = Headers.fromPicoHttpHeaders(&headersWithRange, allocator) catch bun.outOfMemory();

setHeaders(&headers, &headersWithRange, allocator);
} else if (content_type) |ct| {
if (ct.len > 0) {
const _headers = result.headers();
Expand All @@ -3405,7 +3412,7 @@ pub const Fetch = struct {
_headers[4],
.{ .name = "Content-Type", .value = ct },
};
headers = Headers.fromPicoHttpHeaders(&headersWithContentType, allocator) catch bun.outOfMemory();
setHeaders(&headers, &headersWithContentType, allocator);
} else {
var headersWithContentType: [5]picohttp.Header = .{
_headers[0],
Expand All @@ -3414,13 +3421,14 @@ pub const Fetch = struct {
_headers[3],
.{ .name = "Content-Type", .value = ct },
};
headers = Headers.fromPicoHttpHeaders(&headersWithContentType, allocator) catch bun.outOfMemory();

setHeaders(&headers, &headersWithContentType, allocator);
}
} else {
headers = Headers.fromPicoHttpHeaders(result.headers(), allocator) catch bun.outOfMemory();
setHeaders(&headers, result.headers(), allocator);
}
} else {
headers = Headers.fromPicoHttpHeaders(result.headers(), allocator) catch bun.outOfMemory();
setHeaders(&headers, result.headers(), allocator);
}
}

Expand Down Expand Up @@ -3513,11 +3521,16 @@ pub const Headers = struct {
this.buf.clearAndFree(this.allocator);
}
pub fn getContentType(this: *const Headers) ?[]const u8 {
const slice = this.entries.slice();
for (0..slice.len) |i| {
const entry = slice.get(i);
if (std.mem.eql(u8, this.asStr(entry.name), "Content-Type")) {
return this.asStr(entry.value);
if (this.entries.len == 0 or this.buf.items.len == 0) {
return null;
}
const header_entries = this.entries.slice();
const header_names = header_entries.items(.name);
const header_values = header_entries.items(.value);

for (header_names, 0..header_names.len) |name, i| {
if (bun.strings.eqlCaseInsensitiveASCII(this.asStr(name), "content-type", true)) {
return this.asStr(header_values[i]);
}
}
return null;
Expand Down
Loading

0 comments on commit 10686c3

Please sign in to comment.