diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts index c9690785a73b46..d4a8d30c26c4dd 100644 --- a/packages/bun-types/bun.d.ts +++ b/packages/bun-types/bun.d.ts @@ -1226,8 +1226,56 @@ declare module "bun" { */ unlink(): Promise; } + interface NetworkSink extends FileSink { + /** + * Write a chunk of data to the network. + * + * If the network is not writable yet, the data is buffered. + */ + write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number; + /** + * Flush the internal buffer, committing the data to the network. + */ + flush(): number | Promise; + /** + * Finish the upload. This also flushes the internal buffer. + */ + end(error?: Error): number | Promise; + } - interface S3FileOptions extends BlobPropertyBag { + interface S3Options extends BlobPropertyBag { + /** + * The ACL to used to write the file to S3. by default will omit the ACL header/parameter. + */ + acl?: /** + * Owner gets FULL_CONTROL. No one else has access rights (default). + */ + | "private" + /** + * Owner gets FULL_CONTROL. The AllUsers group (see Who is a grantee?) gets READ access. + */ + | "public-read" + /** + * Owner gets FULL_CONTROL. The AllUsers group gets READ and WRITE access. Granting this on a bucket is generally not recommended. + */ + | "public-read-write" + /** + * Owner gets FULL_CONTROL. Amazon EC2 gets READ access to GET an Amazon Machine Image (AMI) bundle from Amazon S3. + */ + | "aws-exec-read" + /** + * Owner gets FULL_CONTROL. The AuthenticatedUsers group gets READ access. + */ + | "authenticated-read" + /** + * Object owner gets FULL_CONTROL. Bucket owner gets READ access. If you specify this canned ACL when creating a bucket, Amazon S3 ignores it. + */ + | "bucket-owner-read" + /** + * Both the object owner and the bucket owner get FULL_CONTROL over the object. If you specify this canned ACL when creating a bucket, Amazon S3 ignores it. + */ + | "bucket-owner-full-control" + | "log-delivery-write"; /** * The bucket to use for the S3 client. by default will use the `S3_BUCKET` and `AWS_BUCKET` environment variable, or deduce as first part of the path. */ @@ -1244,6 +1292,10 @@ declare module "bun" { * The secret access key to use for the S3 client. By default, it will use the `S3_SECRET_ACCESS_KEY and `AWS_SECRET_ACCESS_KEY` environment variable. */ secretAccessKey?: string; + /** + * The session token to use for the S3 client. By default, it will use the `S3_SESSION_TOKEN` and `AWS_SESSION_TOKEN` environment variable. + */ + sessionToken?: string; /** * The endpoint to use for the S3 client. Defaults to `https://s3.{region}.amazonaws.com`, it will also use the `S3_ENDPOINT` and `AWS_ENDPOINT` environment variable. @@ -1274,7 +1326,7 @@ declare module "bun" { highWaterMark?: number; } - interface S3FilePresignOptions extends S3FileOptions { + interface S3FilePresignOptions extends S3Options { /** * The number of seconds the presigned URL will be valid for. Defaults to 86400 (1 day). */ @@ -1290,7 +1342,7 @@ declare module "bun" { * @param path - The path to the file. If bucket options is not provided or set in the path, it will be deduced from the path. * @param options - The options to use for the S3 client. */ - new (path: string | URL, options?: S3FileOptions): S3File; + new (path: string | URL, options?: S3Options): S3File; /** * The size of the file in bytes. */ @@ -1327,9 +1379,9 @@ declare module "bun" { slice(contentType?: string): S3File; /** - * Incremental writer to stream writes to S3, this is equivalent of using MultipartUpload and is suitable for large files. + * Incremental writer to stream writes to the network, this is equivalent of using MultipartUpload and is suitable for large files. */ - writer(options?: S3FileOptions): FileSink; + writer(options?: S3Options): NetworkSink; /** * The readable stream of the file. @@ -1364,7 +1416,7 @@ declare module "bun" { */ write( data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob, - options?: S3FileOptions, + options?: S3Options, ): Promise; /** @@ -1379,38 +1431,43 @@ declare module "bun" { unlink(): Promise; } - namespace S3File { + interface S3Bucket { + /** + * Get a file from the bucket. + * @param path - The path to the file. + */ + (path: string, options?: S3Options): S3File; /** - * Uploads the data to S3. + * Uploads the data to S3. This will overwrite the file if it already exists. * @param data - The data to write. * @param options - The options to use for the S3 client. */ - function upload( - path: string | S3File, + write( + path: string, data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File, - options?: S3FileOptions, + options?: S3Options, ): Promise; /** * Returns a presigned URL for the file. * @param options - The options to use for the presigned URL. */ - function presign(path: string | S3File, options?: S3FilePresignOptions): string; + presign(path: string, options?: S3FilePresignOptions): string; /** * Deletes the file from S3. */ - function unlink(path: string | S3File, options?: S3FileOptions): Promise; + unlink(path: string, options?: S3Options): Promise; /** * The size of the file in bytes. */ - function size(path: string | S3File, options?: S3FileOptions): Promise; + size(path: string, options?: S3Options): Promise; /** - * The size of the file in bytes. + * Does the file exist? */ - function exists(path: string | S3File, options?: S3FileOptions): Promise; + exists(path: string, options?: S3Options): Promise; } /** @@ -3268,11 +3325,12 @@ declare module "bun" { * @param path - The path to the file. If bucket options is not provided or set in the path, it will be deduced from the path. * @param options - The options to use for the S3 client. */ - function s3(path: string | URL, options?: S3FileOptions): S3File; + function s3(path: string | URL, options?: S3Options): S3File; /** - * The S3 file class. + * Create a configured S3 bucket reference. + * @param options - The options to use for the S3 client. */ - const S3: typeof S3File; + function S3(options?: S3Options): S3Bucket; /** * Allocate a new [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) without zeroing the bytes. diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index 8996a196da75fa..06b282dde16ebb 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -21,12 +21,13 @@ const default_allocator = bun.default_allocator; const JestPrettyFormat = @import("./test/pretty_format.zig").JestPrettyFormat; const JSPromise = JSC.JSPromise; const EventType = JSC.EventType; - +const S3Bucket = @import("./webcore/S3Bucket.zig"); pub const shim = Shimmer("Bun", "ConsoleObject", @This()); pub const Type = *anyopaque; pub const name = "Bun::ConsoleObject"; pub const include = "\"ConsoleObject.h\""; pub const namespace = shim.namespace; + const Counter = std.AutoHashMapUnmanaged(u64, u32); const BufferedWriter = std.io.BufferedWriter(4096, Output.WriterType); @@ -2216,6 +2217,10 @@ pub const Formatter = struct { ); }, .Class => { + if (S3Bucket.fromJS(value)) |s3bucket| { + S3Bucket.writeFormat(s3bucket, ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {}; + return; + } var printable = ZigString.init(&name_buf); value.getClassName(this.globalThis, &printable); this.addForNewLine(printable.len); diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig index de110e5bdae102..8736bd7809d4ac 100644 --- a/src/bun.js/api/BunObject.zig +++ b/src/bun.js/api/BunObject.zig @@ -1,5 +1,6 @@ const conv = std.builtin.CallingConvention.Unspecified; - +const S3File = @import("../webcore/S3File.zig"); +const S3Bucket = @import("../webcore/S3Bucket.zig"); /// How to add a new function or property to the Bun global /// /// - Add a callback or property to the below struct @@ -30,7 +31,7 @@ pub const BunObject = struct { pub const registerMacro = toJSCallback(Bun.registerMacro); pub const resolve = toJSCallback(Bun.resolve); pub const resolveSync = toJSCallback(Bun.resolveSync); - pub const s3 = toJSCallback(WebCore.Blob.constructS3File); + pub const s3 = S3File.createJSS3File; pub const serve = toJSCallback(Bun.serve); pub const sha = toJSCallback(JSC.wrapStaticMethod(Crypto.SHA512_256, "hash_", true)); pub const shellEscape = toJSCallback(Bun.shellEscape); @@ -56,7 +57,6 @@ pub const BunObject = struct { pub const SHA384 = toJSGetter(Crypto.SHA384.getter); pub const SHA512 = toJSGetter(Crypto.SHA512.getter); pub const SHA512_256 = toJSGetter(Crypto.SHA512_256.getter); - pub const S3 = toJSGetter(JSC.WebCore.Blob.getJSS3FileConstructor); pub const TOML = toJSGetter(Bun.getTOMLObject); pub const Transpiler = toJSGetter(Bun.getTranspilerConstructor); pub const argv = toJSGetter(Bun.getArgv); @@ -109,7 +109,6 @@ pub const BunObject = struct { @export(BunObject.FileSystemRouter, .{ .name = getterName("FileSystemRouter") }); @export(BunObject.MD4, .{ .name = getterName("MD4") }); @export(BunObject.MD5, .{ .name = getterName("MD5") }); - @export(BunObject.S3, .{ .name = getterName("S3") }); @export(BunObject.SHA1, .{ .name = getterName("SHA1") }); @export(BunObject.SHA224, .{ .name = getterName("SHA224") }); @export(BunObject.SHA256, .{ .name = getterName("SHA256") }); diff --git a/src/bun.js/bindings/BunClientData.cpp b/src/bun.js/bindings/BunClientData.cpp index b5c037f0c2aae3..ed746e0e156213 100644 --- a/src/bun.js/bindings/BunClientData.cpp +++ b/src/bun.js/bindings/BunClientData.cpp @@ -23,7 +23,9 @@ #include "JSDOMWrapper.h" #include #include "NodeVM.h" +#include "JSS3Bucket.h" #include "../../bake/BakeGlobalObject.h" + namespace WebCore { using namespace JSC; @@ -32,6 +34,7 @@ RefPtr createBuiltinsSourceProvider(); JSHeapData::JSHeapData(Heap& heap) : m_heapCellTypeForJSWorkerGlobalScope(JSC::IsoHeapCellType::Args()) , m_heapCellTypeForNodeVMGlobalObject(JSC::IsoHeapCellType::Args()) + , m_heapCellTypeForJSS3Bucket(JSC::IsoHeapCellType::Args()) , m_heapCellTypeForBakeGlobalObject(JSC::IsoHeapCellType::Args()) , m_domBuiltinConstructorSpace ISO_SUBSPACE_INIT(heap, heap.cellHeapCellType, JSDOMBuiltinConstructorBase) , m_domConstructorSpace ISO_SUBSPACE_INIT(heap, heap.cellHeapCellType, JSDOMConstructorBase) diff --git a/src/bun.js/bindings/BunClientData.h b/src/bun.js/bindings/BunClientData.h index ef210b02ada063..953918f0eb81d5 100644 --- a/src/bun.js/bindings/BunClientData.h +++ b/src/bun.js/bindings/BunClientData.h @@ -59,6 +59,7 @@ class JSHeapData { JSC::IsoHeapCellType m_heapCellTypeForJSWorkerGlobalScope; JSC::IsoHeapCellType m_heapCellTypeForNodeVMGlobalObject; + JSC::IsoHeapCellType m_heapCellTypeForJSS3Bucket; JSC::IsoHeapCellType m_heapCellTypeForBakeGlobalObject; private: diff --git a/src/bun.js/bindings/BunCommonStrings.h b/src/bun.js/bindings/BunCommonStrings.h index 0abd69c1dbb35a..b74b2e7be8d40a 100644 --- a/src/bun.js/bindings/BunCommonStrings.h +++ b/src/bun.js/bindings/BunCommonStrings.h @@ -11,7 +11,8 @@ // These ones don't need to be in BunBuiltinNames.h // If we don't use it as an identifier name, but we want to avoid allocating the string frequently, put it in this list. #define BUN_COMMON_STRINGS_EACH_NAME_NOT_BUILTIN_NAMES(macro) \ - macro(SystemError) + macro(SystemError) \ + macro(S3Error) // clang-format on #define BUN_COMMON_STRINGS_ACCESSOR_DEFINITION(name) \ diff --git a/src/bun.js/bindings/BunObject+exports.h b/src/bun.js/bindings/BunObject+exports.h index d4f267b8227dcb..b638d6eb26d846 100644 --- a/src/bun.js/bindings/BunObject+exports.h +++ b/src/bun.js/bindings/BunObject+exports.h @@ -17,7 +17,6 @@ macro(SHA512_256) \ macro(TOML) \ macro(Transpiler) \ - macro(S3) \ macro(argv) \ macro(assetPrefix) \ macro(cwd) \ @@ -59,6 +58,7 @@ macro(resolve) \ macro(resolveSync) \ macro(s3) \ + macro(S3) \ macro(serve) \ macro(sha) \ macro(shrink) \ diff --git a/src/bun.js/bindings/BunObject.cpp b/src/bun.js/bindings/BunObject.cpp index 576e9a9baa2344..9693629256455a 100644 --- a/src/bun.js/bindings/BunObject.cpp +++ b/src/bun.js/bindings/BunObject.cpp @@ -58,6 +58,7 @@ BUN_DECLARE_HOST_FUNCTION(Bun__DNSResolver__getCacheStats); BUN_DECLARE_HOST_FUNCTION(Bun__fetch); BUN_DECLARE_HOST_FUNCTION(Bun__fetchPreconnect); BUN_DECLARE_HOST_FUNCTION(Bun__randomUUIDv7); +BUN_DECLARE_HOST_FUNCTION(Bun__S3Constructor); namespace Bun { using namespace JSC; @@ -620,7 +621,6 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj Glob BunObject_getter_wrap_Glob DontDelete|PropertyCallback MD4 BunObject_getter_wrap_MD4 DontDelete|PropertyCallback MD5 BunObject_getter_wrap_MD5 DontDelete|PropertyCallback - S3 BunObject_getter_wrap_S3 DontDelete|PropertyCallback SHA1 BunObject_getter_wrap_SHA1 DontDelete|PropertyCallback SHA224 BunObject_getter_wrap_SHA224 DontDelete|PropertyCallback SHA256 BunObject_getter_wrap_SHA256 DontDelete|PropertyCallback @@ -683,6 +683,7 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj revision constructBunRevision ReadOnly|DontDelete|PropertyCallback semver BunObject_getter_wrap_semver ReadOnly|DontDelete|PropertyCallback s3 BunObject_callback_s3 DontDelete|Function 1 + S3 Bun__S3Constructor DontDelete|Constructable|Function 1 sql constructBunSQLObject DontDelete|PropertyCallback serve BunObject_callback_serve DontDelete|Function 1 sha BunObject_callback_sha DontDelete|Function 1 diff --git a/src/bun.js/bindings/ErrorCode.ts b/src/bun.js/bindings/ErrorCode.ts index 14e93b2c85e073..bc9b2bfe289311 100644 --- a/src/bun.js/bindings/ErrorCode.ts +++ b/src/bun.js/bindings/ErrorCode.ts @@ -130,10 +130,11 @@ export default [ ["ERR_POSTGRES_CONNECTION_TIMEOUT", Error, "PostgresError"], ["ERR_POSTGRES_LIFETIME_TIMEOUT", Error, "PostgresError"], - // AWS - ["ERR_AWS_MISSING_CREDENTIALS", Error], - ["ERR_AWS_INVALID_METHOD", Error], - ["ERR_AWS_INVALID_PATH", Error], - ["ERR_AWS_INVALID_ENDPOINT", Error], - ["ERR_AWS_INVALID_SIGNATURE", Error], + // S3 + ["ERR_S3_MISSING_CREDENTIALS", Error], + ["ERR_S3_INVALID_METHOD", Error], + ["ERR_S3_INVALID_PATH", Error], + ["ERR_S3_INVALID_ENDPOINT", Error], + ["ERR_S3_INVALID_SIGNATURE", Error], + ["ERR_S3_INVALID_SESSION_TOKEN", Error], ] as ErrorCodeMapping; diff --git a/src/bun.js/bindings/JSDOMFile.cpp b/src/bun.js/bindings/JSDOMFile.cpp index 6b6f980062a2fa..c67cf8f62f6bcb 100644 --- a/src/bun.js/bindings/JSDOMFile.cpp +++ b/src/bun.js/bindings/JSDOMFile.cpp @@ -42,7 +42,7 @@ class JSDOMFile : public JSC::InternalFunction { static JSDOMFile* create(JSC::VM& vm, JSGlobalObject* globalObject) { - auto* zigGlobal = reinterpret_cast(globalObject); + auto* zigGlobal = defaultGlobalObject(globalObject); auto structure = createStructure(vm, globalObject, zigGlobal->functionPrototype()); auto* object = new (NotNull, JSC::allocateCell(vm)) JSDOMFile(vm, structure); object->finishCreation(vm); @@ -65,7 +65,7 @@ class JSDOMFile : public JSC::InternalFunction { static JSC_HOST_CALL_ATTRIBUTES JSC::EncodedJSValue construct(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) { - Zig::GlobalObject* globalObject = reinterpret_cast(lexicalGlobalObject); + auto* globalObject = defaultGlobalObject(lexicalGlobalObject); JSC::VM& vm = globalObject->vm(); JSObject* newTarget = asObject(callFrame->newTarget()); auto* constructor = globalObject->JSDOMFileConstructor(); @@ -75,15 +75,15 @@ class JSDOMFile : public JSC::InternalFunction { auto* functionGlobalObject = reinterpret_cast( // ShadowRealm functions belong to a different global object. - getFunctionRealm(globalObject, newTarget)); + getFunctionRealm(lexicalGlobalObject, newTarget)); RETURN_IF_EXCEPTION(scope, {}); structure = InternalFunction::createSubclassStructure( - globalObject, + lexicalGlobalObject, newTarget, functionGlobalObject->JSBlobStructure()); } - void* ptr = JSDOMFile__construct(globalObject, callFrame); + void* ptr = JSDOMFile__construct(lexicalGlobalObject, callFrame); if (UNLIKELY(!ptr)) { return JSValue::encode(JSC::jsUndefined()); diff --git a/src/bun.js/bindings/JSS3Bucket.cpp b/src/bun.js/bindings/JSS3Bucket.cpp new file mode 100644 index 00000000000000..f9880a3415a527 --- /dev/null +++ b/src/bun.js/bindings/JSS3Bucket.cpp @@ -0,0 +1,253 @@ + +#include "root.h" + +#include "JavaScriptCore/JSType.h" +#include "JavaScriptCore/JSObject.h" +#include "JavaScriptCore/JSGlobalObject.h" +#include +#include "ZigGeneratedClasses.h" + +#include "JSS3Bucket.h" +#include +#include +#include "JavaScriptCore/JSCJSValue.h" +#include "ErrorCode.h" + +namespace Bun { +using namespace JSC; + +// External C functions declarations +extern "C" { +SYSV_ABI void* JSS3Bucket__construct(JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3Bucket__call(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3Bucket__unlink(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3Bucket__write(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3Bucket__presign(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3Bucket__exists(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3Bucket__size(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI void* JSS3Bucket__deinit(void* ptr); +} + +// Forward declarations +JSC_DECLARE_HOST_FUNCTION(functionS3Bucket_unlink); +JSC_DECLARE_HOST_FUNCTION(functionS3Bucket_write); +JSC_DECLARE_HOST_FUNCTION(functionS3Bucket_presign); +JSC_DECLARE_HOST_FUNCTION(functionS3Bucket_exists); +JSC_DECLARE_HOST_FUNCTION(functionS3Bucket_size); + +static const HashTableValue JSS3BucketPrototypeTableValues[] = { + { "unlink"_s, static_cast(PropertyAttribute::Function | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::NativeFunctionType, functionS3Bucket_unlink, 0 } }, + { "write"_s, static_cast(PropertyAttribute::Function | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::NativeFunctionType, functionS3Bucket_write, 1 } }, + { "presign"_s, static_cast(PropertyAttribute::Function | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::NativeFunctionType, functionS3Bucket_presign, 1 } }, + { "exists"_s, static_cast(PropertyAttribute::Function | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::NativeFunctionType, functionS3Bucket_exists, 1 } }, + { "size"_s, static_cast(PropertyAttribute::Function | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::NativeFunctionType, functionS3Bucket_size, 1 } }, +}; + +class JSS3BucketPrototype final : public JSC::JSNonFinalObject { +public: + using Base = JSC::JSNonFinalObject; + static constexpr unsigned StructureFlags = Base::StructureFlags; + + static JSS3BucketPrototype* create( + JSC::VM& vm, + JSC::JSGlobalObject* globalObject, + JSC::Structure* structure) + { + JSS3BucketPrototype* prototype = new (NotNull, JSC::allocateCell(vm)) JSS3BucketPrototype(vm, structure); + prototype->finishCreation(vm, globalObject); + return prototype; + } + + static JSC::Structure* createStructure( + JSC::VM& vm, + JSC::JSGlobalObject* globalObject, + JSC::JSValue prototype) + { + auto* structure = JSC::Structure::create(vm, globalObject, prototype, TypeInfo(JSC::ObjectType, StructureFlags), info()); + structure->setMayBePrototype(true); + return structure; + } + + DECLARE_INFO; + + template + static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) + { + STATIC_ASSERT_ISO_SUBSPACE_SHARABLE(JSS3BucketPrototype, Base); + return &vm.plainObjectSpace(); + } + +protected: + JSS3BucketPrototype(JSC::VM& vm, JSC::Structure* structure) + : Base(vm, structure) + { + } + + void finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject) + { + Base::finishCreation(vm); + ASSERT(inherits(info())); + reifyStaticProperties(vm, info(), JSS3BucketPrototypeTableValues, *this); + } +}; + +// Implementation of JSS3Bucket methods +void JSS3Bucket::destroy(JSCell* cell) +{ + static_cast(cell)->JSS3Bucket::~JSS3Bucket(); +} + +JSS3Bucket::~JSS3Bucket() +{ + if (ptr) { + JSS3Bucket__deinit(ptr); + } +} + +JSC::GCClient::IsoSubspace* JSS3Bucket::subspaceForImpl(JSC::VM& vm) +{ + // This needs it's own heapcell because of the destructor. + return WebCore::subspaceForImpl( + vm, + [](auto& spaces) { return spaces.m_clientSubspaceForJSS3Bucket.get(); }, + [](auto& spaces, auto&& space) { spaces.m_clientSubspaceForJSS3Bucket = std::forward(space); }, + [](auto& spaces) { return spaces.m_subspaceForJSS3Bucket.get(); }, + [](auto& spaces, auto&& space) { spaces.m_subspaceForJSS3Bucket = std::forward(space); }, + [](auto& server) -> JSC::HeapCellType& { return server.m_heapCellTypeForJSS3Bucket; }); +} + +JSC_HOST_CALL_ATTRIBUTES EncodedJSValue JSS3Bucket::call(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) +{ + auto& vm = lexicalGlobalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + JSValue thisValue = callFrame->jsCallee(); + auto* thisObject = jsDynamicCast(thisValue); + if (UNLIKELY(!thisObject)) { + Bun::throwError(lexicalGlobalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3Bucket instance"_s); + return {}; + } + + ASSERT(thisObject->ptr); + + return JSS3Bucket__call(thisObject->ptr, lexicalGlobalObject, callFrame); +} + +JSC_HOST_CALL_ATTRIBUTES EncodedJSValue JSS3Bucket::construct(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) +{ + auto& vm = lexicalGlobalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + Bun::throwError(lexicalGlobalObject, scope, Bun::ErrorCode::ERR_ILLEGAL_CONSTRUCTOR, "S3Bucket is not constructable. To instantiate a bucket, do Bun.S3()"_s); + return {}; +} + +JSS3Bucket* JSS3Bucket::create(JSC::VM& vm, Zig::GlobalObject* globalObject, void* ptr) +{ + auto* structure = globalObject->m_JSS3BucketStructure.getInitializedOnMainThread(globalObject); + NativeExecutable* executable = vm.getHostFunction(&JSS3Bucket::call, ImplementationVisibility::Public, &JSS3Bucket::construct, String("S3Bucket"_s)); + JSS3Bucket* functionObject = new (NotNull, JSC::allocateCell(vm)) JSS3Bucket(vm, executable, globalObject, structure, ptr); + functionObject->finishCreation(vm, executable, 1, "S3Bucket"_s); + return functionObject; +} + +JSC::Structure* JSS3Bucket::createStructure(JSC::JSGlobalObject* globalObject) +{ + auto& vm = globalObject->vm(); + auto* prototype = JSS3BucketPrototype::create(vm, globalObject, JSS3BucketPrototype::createStructure(vm, globalObject, globalObject->functionPrototype())); + return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::JSFunctionType, StructureFlags), info(), NonArray); +} + +JSC_DEFINE_HOST_FUNCTION(functionS3Bucket_unlink, (JSGlobalObject * globalObject, CallFrame* callframe)) +{ + auto* thisObject = jsDynamicCast(callframe->thisValue()); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3Bucket instance"_s); + return {}; + } + + return JSS3Bucket__unlink(thisObject->ptr, globalObject, callframe); +} + +JSC_DEFINE_HOST_FUNCTION(functionS3Bucket_write, (JSGlobalObject * globalObject, CallFrame* callframe)) +{ + auto* thisObject = jsDynamicCast(callframe->thisValue()); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3Bucket instance"_s); + return {}; + } + + return JSS3Bucket__write(thisObject->ptr, globalObject, callframe); +} + +JSC_DEFINE_HOST_FUNCTION(functionS3Bucket_presign, (JSGlobalObject * globalObject, CallFrame* callframe)) +{ + auto* thisObject = jsDynamicCast(callframe->thisValue()); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3Bucket instance"_s); + return {}; + } + + return JSS3Bucket__presign(thisObject->ptr, globalObject, callframe); +} + +JSC_DEFINE_HOST_FUNCTION(functionS3Bucket_exists, (JSGlobalObject * globalObject, CallFrame* callframe)) +{ + auto* thisObject = jsDynamicCast(callframe->thisValue()); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3Bucket instance"_s); + return {}; + } + + return JSS3Bucket__exists(thisObject->ptr, globalObject, callframe); +} + +JSC_DEFINE_HOST_FUNCTION(functionS3Bucket_size, (JSGlobalObject * globalObject, CallFrame* callframe)) +{ + auto* thisObject = jsDynamicCast(callframe->thisValue()); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3Bucket instance"_s); + return {}; + } + + return JSS3Bucket__size(thisObject->ptr, globalObject, callframe); +} + +extern "C" { +SYSV_ABI void* BUN__getJSS3Bucket(JSC::EncodedJSValue value) +{ + JSValue thisValue = JSC::JSValue::decode(value); + auto* thisObject = jsDynamicCast(thisValue); + return thisObject ? thisObject->ptr : nullptr; +}; + +BUN_DEFINE_HOST_FUNCTION(Bun__S3Constructor, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callframe)) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + void* ptr = JSS3Bucket__construct(globalObject, callframe); + RETURN_IF_EXCEPTION(scope, {}); + ASSERT(ptr); + + return JSValue::encode(JSS3Bucket::create(vm, defaultGlobalObject(globalObject), ptr)); +} +} + +Structure* createJSS3BucketStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +{ + return JSS3Bucket::createStructure(globalObject); +} + +const JSC::ClassInfo JSS3BucketPrototype::s_info = { "S3Bucket"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSS3BucketPrototype) }; +const JSC::ClassInfo JSS3Bucket::s_info = { "S3Bucket"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSS3Bucket) }; + +} // namespace Bun diff --git a/src/bun.js/bindings/JSS3Bucket.h b/src/bun.js/bindings/JSS3Bucket.h new file mode 100644 index 00000000000000..84d0868e23c850 --- /dev/null +++ b/src/bun.js/bindings/JSS3Bucket.h @@ -0,0 +1,50 @@ +#pragma once + +namespace Zig { +class GlobalObject; +} + +namespace Bun { +using namespace JSC; + +class JSS3Bucket : public JSC::JSFunction { + using Base = JSC::JSFunction; + static constexpr unsigned StructureFlags = Base::StructureFlags; + +public: + static constexpr bool needsDestruction = true; + + JSS3Bucket(JSC::VM& vm, NativeExecutable* executable, JSGlobalObject* globalObject, Structure* structure, void* ptr) + : Base(vm, executable, globalObject, structure) + { + this->ptr = ptr; + } + DECLARE_INFO; + + static void destroy(JSCell* cell); + ~JSS3Bucket(); + + template + static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) + { + if constexpr (mode == JSC::SubspaceAccess::Concurrently) + return nullptr; + return subspaceForImpl(vm); + } + + static JSC::GCClient::IsoSubspace* subspaceForImpl(JSC::VM& vm); + + static JSC_HOST_CALL_ATTRIBUTES EncodedJSValue call(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame); + static JSC_HOST_CALL_ATTRIBUTES EncodedJSValue construct(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame); + + static JSS3Bucket* create(JSC::VM& vm, Zig::GlobalObject* globalObject, void* ptr); + static JSC::Structure* createStructure(JSC::JSGlobalObject* globalObject); + + void* ptr; +}; + +// Constructor helper +JSValue constructS3Bucket(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callframe); +Structure* createJSS3BucketStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject); + +} // namespace Bun diff --git a/src/bun.js/bindings/JSS3File.cpp b/src/bun.js/bindings/JSS3File.cpp index 418c449f57a2eb..614d6a983d2aad 100644 --- a/src/bun.js/bindings/JSS3File.cpp +++ b/src/bun.js/bindings/JSS3File.cpp @@ -1,125 +1,189 @@ + #include "root.h" + +#include "ZigGlobalObject.h" #include "ZigGeneratedClasses.h" -#include + +#include "JavaScriptCore/JSType.h" +#include "JavaScriptCore/JSObject.h" +#include "JavaScriptCore/JSGlobalObject.h" #include +#include #include -#include "JSS3File.h" +#include #include "JavaScriptCore/JSCJSValue.h" +#include "ErrorCode.h" -using namespace JSC; +#include "JSS3File.h" -extern "C" SYSV_ABI void* JSS3File__construct(JSC::JSGlobalObject*, JSC::CallFrame* callframe); -extern "C" SYSV_ABI bool JSS3File__hasInstance(EncodedJSValue, JSC::JSGlobalObject*, EncodedJSValue); +namespace Bun { +using namespace JSC; +using namespace WebCore; +// External C functions declarations extern "C" { +SYSV_ABI void* JSS3File__construct(JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3File__presign(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3File__bucket(void* ptr, JSC::JSGlobalObject*); +SYSV_ABI bool JSS3File__hasInstance(EncodedJSValue, JSC::JSGlobalObject*, EncodedJSValue); +} -JSC::EncodedJSValue BUN__createJSS3FileConstructor(JSGlobalObject* lexicalGlobalObject) +// Forward declarations +JSC_DECLARE_HOST_FUNCTION(functionS3File_presign); +static JSC_DECLARE_CUSTOM_GETTER(getterS3File_bucket); +static JSC_DEFINE_CUSTOM_GETTER(getterS3File_bucket, (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, JSC::PropertyName)) { - Zig::GlobalObject* globalObject = reinterpret_cast(lexicalGlobalObject); + JSC::VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); - return JSValue::encode(globalObject->JSS3FileConstructor()); -} + auto* thisObject = jsDynamicCast(JSValue::decode(thisValue)); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3File instance"_s); + return {}; + } + + return JSS3File__bucket(thisObject->wrapped(), globalObject); } +static const HashTableValue JSS3FilePrototypeTableValues[] = { + { "presign"_s, static_cast(PropertyAttribute::Function | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::NativeFunctionType, functionS3File_presign, 1 } }, + { "bucket"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor | PropertyAttribute::DOMAttribute), NoIntrinsic, { HashTableValue::GetterSetterType, getterS3File_bucket, 0 } }, +}; +class JSS3FilePrototype final : public WebCore::JSBlobPrototype { +public: + using Base = WebCore::JSBlobPrototype; + static constexpr unsigned StructureFlags = Base::StructureFlags; -// TODO: make this inehrit from JSBlob instead of InternalFunction -// That will let us remove this hack for [Symbol.hasInstance] and fix the prototype chain. -class JSS3File : public JSC::InternalFunction { - using Base = JSC::InternalFunction; + static JSS3FilePrototype* create( + JSC::VM& vm, + JSC::JSGlobalObject* globalObject, + JSC::Structure* structure) + { + JSS3FilePrototype* prototype = new (NotNull, JSC::allocateCell(vm)) JSS3FilePrototype(vm, globalObject, structure); + prototype->finishCreation(vm, globalObject); + return prototype; + } -public: - JSS3File(JSC::VM& vm, JSC::Structure* structure) - : Base(vm, structure, call, construct) + static JSC::Structure* createStructure( + JSC::VM& vm, + JSC::JSGlobalObject* globalObject, + JSC::JSValue prototype) { + auto* structure = JSC::Structure::create(vm, globalObject, prototype, TypeInfo(JSC::ObjectType, StructureFlags), info()); + structure->setMayBePrototype(true); + return structure; } DECLARE_INFO; - static constexpr unsigned StructureFlags = (Base::StructureFlags & ~ImplementsDefaultHasInstance) | ImplementsHasInstance; - template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { - return &vm.internalFunctionSpace(); + STATIC_ASSERT_ISO_SUBSPACE_SHARABLE(JSS3FilePrototype, Base); + return &vm.plainObjectSpace(); } - static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) + +protected: + JSS3FilePrototype(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) + : Base(vm, globalObject, structure) { - return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(InternalFunctionType, StructureFlags), info()); } - void finishCreation(JSC::VM& vm) + void finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject) { - Base::finishCreation(vm, 2, "S3"_s); + Base::finishCreation(vm, globalObject); + ASSERT(inherits(info())); + reifyStaticProperties(vm, JSS3File::info(), JSS3FilePrototypeTableValues, *this); } +}; - static JSS3File* create(JSC::VM& vm, JSGlobalObject* globalObject) - { - auto* zigGlobal = reinterpret_cast(globalObject); - auto structure = createStructure(vm, globalObject, zigGlobal->functionPrototype()); - auto* object = new (NotNull, JSC::allocateCell(vm)) JSS3File(vm, structure); - object->finishCreation(vm); +// Implementation of JSS3File methods +void JSS3File::destroy(JSCell* cell) +{ + static_cast(cell)->JSS3File::~JSS3File(); +} - // This is not quite right. But we'll fix it if someone files an issue about it. - object->putDirect(vm, vm.propertyNames->prototype, zigGlobal->JSBlobPrototype(), JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly | 0); +JSS3File::~JSS3File() +{ + // Base class destructor will be called automatically +} - return object; - } +JSS3File* JSS3File::create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, void* ptr) +{ + JSS3File* thisObject = new (NotNull, JSC::allocateCell(vm)) JSS3File(vm, structure, ptr); + thisObject->finishCreation(vm); + return thisObject; +} - static bool customHasInstance(JSObject* object, JSGlobalObject* globalObject, JSValue value) - { - if (!value.isObject()) - return false; +JSValue constructS3FileInternal(JSC::JSGlobalObject* lexicalGlobalObject, void* ptr) +{ + ASSERT(ptr); + JSC::VM& vm = lexicalGlobalObject->vm(); - // Note: this breaks [Symbol.hasInstance] - // We must do this for now until we update the code generator to export classes - return JSS3File__hasInstance(JSValue::encode(object), globalObject, JSValue::encode(value)); - } + auto* globalObject = defaultGlobalObject(lexicalGlobalObject); + auto* structure = globalObject->m_JSS3FileStructure.getInitializedOnMainThread(lexicalGlobalObject); + return JSS3File::create(vm, globalObject, structure, ptr); +} - static JSC_HOST_CALL_ATTRIBUTES JSC::EncodedJSValue construct(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) - { - Zig::GlobalObject* globalObject = reinterpret_cast(lexicalGlobalObject); - JSC::VM& vm = globalObject->vm(); - JSObject* newTarget = asObject(callFrame->newTarget()); - auto* constructor = globalObject->JSS3FileConstructor(); - - Structure* structure = globalObject->JSBlobStructure(); - if (constructor != newTarget) { - auto scope = DECLARE_THROW_SCOPE(vm); - - auto* functionGlobalObject = reinterpret_cast( - // ShadowRealm functions belong to a different global object. - getFunctionRealm(globalObject, newTarget)); - RETURN_IF_EXCEPTION(scope, {}); - structure = InternalFunction::createSubclassStructure( - globalObject, - newTarget, - functionGlobalObject->JSBlobStructure()); - } - - void* ptr = JSS3File__construct(globalObject, callFrame); - - if (UNLIKELY(!ptr)) { - return JSValue::encode(JSC::jsUndefined()); - } - - return JSValue::encode( - WebCore::JSBlob::create(vm, globalObject, structure, ptr)); - } +JSValue constructS3File(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callframe) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + void* ptr = JSS3File__construct(globalObject, callframe); + RETURN_IF_EXCEPTION(scope, {}); + ASSERT(ptr); - static JSC_HOST_CALL_ATTRIBUTES EncodedJSValue call(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) - { - auto scope = DECLARE_THROW_SCOPE(lexicalGlobalObject->vm()); - throwTypeError(lexicalGlobalObject, scope, "Class constructor S3 cannot be invoked without 'new'"_s); + return constructS3FileInternal(globalObject, ptr); +} + +JSC::Structure* JSS3File::createStructure(JSC::JSGlobalObject* globalObject) +{ + auto& vm = globalObject->vm(); + + JSC::JSObject* superPrototype = defaultGlobalObject(globalObject)->JSBlobPrototype(); + auto* protoStructure = JSS3FilePrototype::createStructure(vm, globalObject, superPrototype); + auto* prototype = JSS3FilePrototype::create(vm, globalObject, protoStructure); + return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(static_cast(0b11101110), StructureFlags), info(), NonArray); +} + +static bool customHasInstance(JSObject* object, JSGlobalObject* globalObject, JSValue value) +{ + if (!value.isObject()) + return false; + + return JSS3File__hasInstance(JSValue::encode(object), globalObject, JSValue::encode(value)); +} + +Structure* createJSS3FileStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +{ + return JSS3File::createStructure(globalObject); +} + +JSC_DEFINE_HOST_FUNCTION(functionS3File_presign, (JSGlobalObject * globalObject, CallFrame* callframe)) +{ + auto* thisObject = jsDynamicCast(callframe->thisValue()); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3File instance"_s); return {}; } -}; -const JSC::ClassInfo JSS3File::s_info = { "S3"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSS3File) }; + return JSS3File__presign(thisObject->wrapped(), globalObject, callframe); +} -namespace Bun { +const JSC::ClassInfo JSS3FilePrototype::s_info = { "S3File"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSS3FilePrototype) }; +const JSC::ClassInfo JSS3File::s_info = { "S3File"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSS3File) }; -JSC::JSObject* createJSS3FileConstructor(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +extern "C" { +SYSV_ABI EncodedJSValue BUN__createJSS3File(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callframe) { - return JSS3File::create(vm, globalObject); + return JSValue::encode(constructS3File(globalObject, callframe)); +}; + +SYSV_ABI EncodedJSValue BUN__createJSS3FileUnsafely(JSC::JSGlobalObject* globalObject, void* ptr) +{ + return JSValue::encode(constructS3FileInternal(globalObject, ptr)); +}; } } diff --git a/src/bun.js/bindings/JSS3File.h b/src/bun.js/bindings/JSS3File.h index 63b8170b060dea..fab0927efb0172 100644 --- a/src/bun.js/bindings/JSS3File.h +++ b/src/bun.js/bindings/JSS3File.h @@ -1,7 +1,41 @@ #pragma once -#include "root.h" +namespace Zig { +class GlobalObject; +} namespace Bun { -JSC::JSObject* createJSS3FileConstructor(JSC::VM&, JSC::JSGlobalObject*); -} +using namespace JSC; + +class JSS3File : public WebCore::JSBlob { + using Base = WebCore::JSBlob; + +public: + static constexpr bool needsDestruction = true; + static constexpr unsigned StructureFlags = Base::StructureFlags; + + JSS3File(JSC::VM& vm, Structure* structure, void* ptr) + : Base(vm, structure, ptr) + { + } + DECLARE_INFO; + + template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) + { + if constexpr (mode == JSC::SubspaceAccess::Concurrently) + return nullptr; + return WebCore::JSBlob::subspaceFor(vm); + } + + static void destroy(JSCell* cell); + ~JSS3File(); + + static JSS3File* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, void* ptr); + static JSC::Structure* createStructure(JSC::JSGlobalObject* globalObject); +}; + +// Constructor helper +JSValue constructS3File(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callframe); +Structure* createJSS3FileStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject); + +} // namespace Bun diff --git a/src/bun.js/bindings/S3Error.cpp b/src/bun.js/bindings/S3Error.cpp new file mode 100644 index 00000000000000..a3ae91651c64b8 --- /dev/null +++ b/src/bun.js/bindings/S3Error.cpp @@ -0,0 +1,63 @@ + +#include "root.h" + +#include +#include +#include "ZigGeneratedClasses.h" +#include "S3Error.h" + +namespace Bun { + +typedef struct S3Error { + BunString code; + BunString message; + BunString path; +} S3Error; + +Structure* createS3ErrorStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +{ + return JSC::ErrorInstance::createStructure(vm, globalObject, JSC::constructEmptyObject(globalObject, globalObject->errorPrototype())); +} + +extern "C" { +SYSV_ABI JSC::EncodedJSValue S3Error__toErrorInstance(const S3Error* arg0, + JSC::JSGlobalObject* globalObject) +{ + S3Error err = *arg0; + + JSC::VM& vm = globalObject->vm(); + + auto scope = DECLARE_THROW_SCOPE(vm); + JSC::JSValue message = JSC::jsUndefined(); + if (err.message.tag != BunStringTag::Empty) { + message = Bun::toJS(globalObject, err.message); + } + + auto& names = WebCore::builtinNames(vm); + + JSC::JSValue options = JSC::jsUndefined(); + auto prototype = defaultGlobalObject(globalObject)->m_S3ErrorStructure.getInitializedOnMainThread(globalObject); + JSC::JSObject* result = JSC::ErrorInstance::create(globalObject, prototype, message, options); + result->putDirect( + vm, vm.propertyNames->name, + JSC::JSValue(defaultGlobalObject(globalObject)->commonStrings().S3ErrorString(globalObject)), + JSC::PropertyAttribute::DontEnum | 0); + if (err.code.tag != BunStringTag::Empty) { + JSC::JSValue code = Bun::toJS(globalObject, err.code); + result->putDirect(vm, names.codePublicName(), code, + JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::DontEnum | 0); + } + + if (err.path.tag != BunStringTag::Empty) { + JSC::JSValue path = Bun::toJS(globalObject, err.path); + result->putDirect(vm, names.pathPublicName(), path, + JSC::PropertyAttribute::DontDelete | 0); + } + + RETURN_IF_EXCEPTION(scope, {}); + scope.release(); + + return JSC::JSValue::encode(JSC::JSValue(result)); +} +} +} diff --git a/src/bun.js/bindings/S3Error.h b/src/bun.js/bindings/S3Error.h new file mode 100644 index 00000000000000..516a9e907bb4a2 --- /dev/null +++ b/src/bun.js/bindings/S3Error.h @@ -0,0 +1,7 @@ +#pragma once + +namespace Bun { +using namespace JSC; + +Structure* createS3ErrorStructure(VM& vm, JSGlobalObject* globalObject); +} diff --git a/src/bun.js/bindings/Sink.h b/src/bun.js/bindings/Sink.h index 0b07ad0f5c6798..60ded138337273 100644 --- a/src/bun.js/bindings/Sink.h +++ b/src/bun.js/bindings/Sink.h @@ -9,7 +9,7 @@ enum SinkID : uint8_t { HTMLRewriterSink = 3, HTTPResponseSink = 4, HTTPSResponseSink = 5, - FetchTaskletChunkedRequestSink = 6, + NetworkSink = 6, }; static constexpr unsigned numberOfSinkIDs diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index 1889b0c9be950a..ce2cf49ca575c5 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -1,4 +1,5 @@ #include "root.h" + #include "JavaScriptCore/PropertySlot.h" #include "ZigGlobalObject.h" #include "helpers.h" @@ -33,6 +34,7 @@ #include "JavaScriptCore/JSLock.h" #include "JavaScriptCore/JSMap.h" #include "JavaScriptCore/JSMicrotask.h" + #include "JavaScriptCore/JSModuleLoader.h" #include "JavaScriptCore/JSModuleNamespaceObject.h" #include "JavaScriptCore/JSModuleNamespaceObjectInlines.h" @@ -84,7 +86,6 @@ #include "JSDOMConvertUnion.h" #include "JSDOMException.h" #include "JSDOMFile.h" -#include "JSS3File.h" #include "JSDOMFormData.h" #include "JSDOMURL.h" #include "JSEnvironmentVariableMap.h" @@ -158,6 +159,9 @@ #include "JSPerformanceResourceTiming.h" #include "JSPerformanceTiming.h" +#include "JSS3Bucket.h" +#include "JSS3File.h" +#include "S3Error.h" #if ENABLE(REMOTE_INSPECTOR) #include "JavaScriptCore/RemoteInspectorServer.h" #endif @@ -2793,37 +2797,6 @@ JSC_DEFINE_CUSTOM_SETTER(moduleNamespacePrototypeSetESModuleMarker, (JSGlobalObj return true; } -extern "C" JSC::EncodedJSValue JSS3File__upload(JSGlobalObject*, JSC::CallFrame*); -extern "C" JSC::EncodedJSValue JSS3File__presign(JSGlobalObject*, JSC::CallFrame*); -extern "C" JSC::EncodedJSValue JSS3File__unlink(JSGlobalObject*, JSC::CallFrame*); -extern "C" JSC::EncodedJSValue JSS3File__exists(JSGlobalObject*, JSC::CallFrame*); -extern "C" JSC::EncodedJSValue JSS3File__size(JSGlobalObject*, JSC::CallFrame*); - -JSC_DEFINE_HOST_FUNCTION(jsS3Upload, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__upload(lexicalGlobalObject, callFrame); -} - -JSC_DEFINE_HOST_FUNCTION(jsS3Presign, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__presign(lexicalGlobalObject, callFrame); -} - -JSC_DEFINE_HOST_FUNCTION(jsS3Unlink, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__unlink(lexicalGlobalObject, callFrame); -} - -JSC_DEFINE_HOST_FUNCTION(jsS3Exists, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__exists(lexicalGlobalObject, callFrame); -} - -JSC_DEFINE_HOST_FUNCTION(jsS3Size, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__size(lexicalGlobalObject, callFrame); -} - void GlobalObject::finishCreation(VM& vm) { Base::finishCreation(vm); @@ -2845,18 +2818,6 @@ void GlobalObject::finishCreation(VM& vm) init.set(fileConstructor); }); - m_JSS3FileConstructor.initLater( - [](const Initializer& init) { - JSObject* s3Constructor = Bun::createJSS3FileConstructor(init.vm, init.owner); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "upload"_s), 3, jsS3Upload, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "unlink"_s), 3, jsS3Unlink, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "presign"_s), 3, jsS3Presign, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "exists"_s), 3, jsS3Exists, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "size"_s), 3, jsS3Size, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - - init.set(s3Constructor); - }); - m_cryptoObject.initLater( [](const Initializer& init) { JSC::JSGlobalObject* globalObject = init.owner; @@ -2904,6 +2865,20 @@ void GlobalObject::finishCreation(VM& vm) init.set(result.toObject(init.owner)); }); + m_JSS3BucketStructure.initLater( + [](const Initializer& init) { + init.set(Bun::createJSS3BucketStructure(init.vm, init.owner)); + }); + m_JSS3FileStructure.initLater( + [](const Initializer& init) { + init.set(Bun::createJSS3FileStructure(init.vm, init.owner)); + }); + + m_S3ErrorStructure.initLater( + [](const Initializer& init) { + init.set(Bun::createS3ErrorStructure(init.vm, init.owner)); + }); + m_commonJSModuleObjectStructure.initLater( [](const Initializer& init) { init.set(Bun::createCommonJSModuleStructure(reinterpret_cast(init.owner))); @@ -3152,7 +3127,7 @@ void GlobalObject::finishCreation(VM& vm) m_JSFetchTaskletChunkedRequestControllerPrototype.initLater( [](const JSC::LazyProperty::Initializer& init) { - auto* prototype = createJSSinkControllerPrototype(init.vm, init.owner, WebCore::SinkID::FetchTaskletChunkedRequestSink); + auto* prototype = createJSSinkControllerPrototype(init.vm, init.owner, WebCore::SinkID::NetworkSink); init.set(prototype); }); @@ -3284,11 +3259,11 @@ void GlobalObject::finishCreation(VM& vm) init.setConstructor(constructor); }); - m_JSFetchTaskletChunkedRequestSinkClassStructure.initLater( + m_JSNetworkSinkClassStructure.initLater( [](LazyClassStructure::Initializer& init) { - auto* prototype = createJSSinkPrototype(init.vm, init.global, WebCore::SinkID::FetchTaskletChunkedRequestSink); - auto* structure = JSFetchTaskletChunkedRequestSink::createStructure(init.vm, init.global, prototype); - auto* constructor = JSFetchTaskletChunkedRequestSinkConstructor::create(init.vm, init.global, JSFetchTaskletChunkedRequestSinkConstructor::createStructure(init.vm, init.global, init.global->functionPrototype()), jsCast(prototype)); + auto* prototype = createJSSinkPrototype(init.vm, init.global, WebCore::SinkID::NetworkSink); + auto* structure = JSNetworkSink::createStructure(init.vm, init.global, prototype); + auto* constructor = JSNetworkSinkConstructor::create(init.vm, init.global, JSNetworkSinkConstructor::createStructure(init.vm, init.global, init.global->functionPrototype()), jsCast(prototype)); init.setPrototype(prototype); init.setStructure(structure); init.setConstructor(constructor); @@ -3831,7 +3806,9 @@ void GlobalObject::visitChildrenImpl(JSCell* cell, Visitor& visitor) thisObject->m_JSCryptoKey.visit(visitor); thisObject->m_lazyStackCustomGetterSetter.visit(visitor); thisObject->m_JSDOMFileConstructor.visit(visitor); - thisObject->m_JSS3FileConstructor.visit(visitor); + thisObject->m_JSS3BucketStructure.visit(visitor); + thisObject->m_JSS3FileStructure.visit(visitor); + thisObject->m_S3ErrorStructure.visit(visitor); thisObject->m_JSFFIFunctionStructure.visit(visitor); thisObject->m_JSFileSinkClassStructure.visit(visitor); thisObject->m_JSFileSinkControllerPrototype.visit(visitor); @@ -3839,7 +3816,7 @@ void GlobalObject::visitChildrenImpl(JSCell* cell, Visitor& visitor) thisObject->m_JSHTTPResponseSinkClassStructure.visit(visitor); thisObject->m_JSHTTPSResponseControllerPrototype.visit(visitor); thisObject->m_JSHTTPSResponseSinkClassStructure.visit(visitor); - thisObject->m_JSFetchTaskletChunkedRequestSinkClassStructure.visit(visitor); + thisObject->m_JSNetworkSinkClassStructure.visit(visitor); thisObject->m_JSFetchTaskletChunkedRequestControllerPrototype.visit(visitor); thisObject->m_JSSocketAddressStructure.visit(visitor); thisObject->m_JSSQLStatementStructure.visit(visitor); diff --git a/src/bun.js/bindings/ZigGlobalObject.h b/src/bun.js/bindings/ZigGlobalObject.h index 33beb34c7e66a4..fb6d919ba593f0 100644 --- a/src/bun.js/bindings/ZigGlobalObject.h +++ b/src/bun.js/bindings/ZigGlobalObject.h @@ -210,10 +210,10 @@ class GlobalObject : public Bun::GlobalScope { JSC::JSValue HTTPSResponseSinkPrototype() const { return m_JSHTTPSResponseSinkClassStructure.prototypeInitializedOnMainThread(this); } JSC::JSValue JSReadableHTTPSResponseSinkControllerPrototype() const { return m_JSHTTPSResponseControllerPrototype.getInitializedOnMainThread(this); } - JSC::Structure* FetchTaskletChunkedRequestSinkStructure() const { return m_JSFetchTaskletChunkedRequestSinkClassStructure.getInitializedOnMainThread(this); } - JSC::JSObject* FetchTaskletChunkedRequestSink() { return m_JSFetchTaskletChunkedRequestSinkClassStructure.constructorInitializedOnMainThread(this); } - JSC::JSValue FetchTaskletChunkedRequestSinkPrototype() const { return m_JSFetchTaskletChunkedRequestSinkClassStructure.prototypeInitializedOnMainThread(this); } - JSC::JSValue JSReadableFetchTaskletChunkedRequestSinkControllerPrototype() const { return m_JSFetchTaskletChunkedRequestControllerPrototype.getInitializedOnMainThread(this); } + JSC::Structure* NetworkSinkStructure() const { return m_JSNetworkSinkClassStructure.getInitializedOnMainThread(this); } + JSC::JSObject* NetworkSink() { return m_JSNetworkSinkClassStructure.constructorInitializedOnMainThread(this); } + JSC::JSValue NetworkSinkPrototype() const { return m_JSNetworkSinkClassStructure.prototypeInitializedOnMainThread(this); } + JSC::JSValue JSReadableNetworkSinkControllerPrototype() const { return m_JSFetchTaskletChunkedRequestControllerPrototype.getInitializedOnMainThread(this); } JSC::Structure* JSBufferListStructure() const { return m_JSBufferListClassStructure.getInitializedOnMainThread(this); } JSC::JSObject* JSBufferList() { return m_JSBufferListClassStructure.constructorInitializedOnMainThread(this); } @@ -478,9 +478,12 @@ class GlobalObject : public Bun::GlobalScope { LazyProperty m_processEnvObject; + LazyProperty m_JSS3BucketStructure; + LazyProperty m_JSS3FileStructure; + LazyProperty m_S3ErrorStructure; + JSObject* cryptoObject() const { return m_cryptoObject.getInitializedOnMainThread(this); } JSObject* JSDOMFileConstructor() const { return m_JSDOMFileConstructor.getInitializedOnMainThread(this); } - JSObject* JSS3FileConstructor() const { return m_JSS3FileConstructor.getInitializedOnMainThread(this); } Bun::CommonStrings& commonStrings() { return m_commonStrings; } Bun::Http2CommonStrings& http2CommonStrings() { return m_http2_commongStrings; } @@ -521,7 +524,7 @@ class GlobalObject : public Bun::GlobalScope { LazyClassStructure m_JSFileSinkClassStructure; LazyClassStructure m_JSHTTPResponseSinkClassStructure; LazyClassStructure m_JSHTTPSResponseSinkClassStructure; - LazyClassStructure m_JSFetchTaskletChunkedRequestSinkClassStructure; + LazyClassStructure m_JSNetworkSinkClassStructure; LazyClassStructure m_JSStringDecoderClassStructure; LazyClassStructure m_NapiClassStructure; @@ -574,7 +577,6 @@ class GlobalObject : public Bun::GlobalScope { LazyProperty m_importMetaObjectStructure; LazyProperty m_asyncBoundFunctionStructure; LazyProperty m_JSDOMFileConstructor; - LazyProperty m_JSS3FileConstructor; LazyProperty m_JSCryptoKey; LazyProperty m_NapiExternalStructure; diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index 19ba3bb1dfeb29..4bb4689510b4c5 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -1952,7 +1952,7 @@ JSC__JSValue SystemError__toErrorInstance(const SystemError* arg0, if (err.code.tag != BunStringTag::Empty) { JSC::JSValue code = Bun::toJS(globalObject, err.code); result->putDirect(vm, names.codePublicName(), code, - JSC::PropertyAttribute::DontDelete | 0); + JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::DontEnum | 0); result->putDirect(vm, vm.propertyNames->name, code, JSC::PropertyAttribute::DontEnum | 0); } else { diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index 4fc4b0ddeef221..964eab3dee3ff5 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -6789,6 +6789,12 @@ pub const JSHostFunctionType = fn (*JSGlobalObject, *CallFrame) callconv(JSC.con pub const JSHostFunctionTypeWithCCallConvForAssertions = fn (*JSGlobalObject, *CallFrame) callconv(.C) JSValue; pub const JSHostFunctionPtr = *const JSHostFunctionType; pub const JSHostZigFunction = fn (*JSGlobalObject, *CallFrame) bun.JSError!JSValue; +pub fn JSHostZigFunctionWithContext(comptime ContextType: type) type { + return fn (*ContextType, *JSGlobalObject, *CallFrame) bun.JSError!JSValue; +} +pub fn JSHostFunctionTypeWithContext(comptime ContextType: type) type { + return fn (*ContextType, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue; +} pub fn toJSHostFunction(comptime Function: JSHostZigFunction) JSC.JSHostFunctionType { return struct { @@ -6826,6 +6832,42 @@ pub fn toJSHostFunction(comptime Function: JSHostZigFunction) JSC.JSHostFunction } }.function; } +pub fn toJSHostFunctionWithContext(comptime ContextType: type, comptime Function: JSHostZigFunctionWithContext(ContextType)) JSHostFunctionTypeWithContext(ContextType) { + return struct { + pub fn function(ctx: *ContextType, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { + if (bun.Environment.allow_assert and bun.Environment.is_canary) { + const value = Function(ctx, globalThis, callframe) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + if (comptime bun.Environment.isDebug) { + if (value != .zero) { + if (globalThis.hasException()) { + var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalThis }; + bun.Output.prettyErrorln( + \\Assertion failed: Native function returned a non-zero JSValue while an exception is pending + \\ + \\ fn: {s} + \\ value: {} + \\ + , .{ + &Function, // use `(lldb) image lookup --address 0x1ec4` to discover what function failed + value.toFmt(&formatter), + }); + Output.flush(); + } + } + } + bun.assert((value == .zero) == globalThis.hasException()); + return value; + } + return @call(.always_inline, Function, .{ ctx, globalThis, callframe }) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + } + }.function; +} pub fn toJSHostValue(globalThis: *JSGlobalObject, value: error{ OutOfMemory, JSError }!JSValue) JSValue { if (bun.Environment.allow_assert and bun.Environment.is_canary) { diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig index b7374f27051857..a581958296af62 100644 --- a/src/bun.js/bindings/exports.zig +++ b/src/bun.js/bindings/exports.zig @@ -144,7 +144,7 @@ pub const JSArrayBufferSink = JSC.WebCore.ArrayBufferSink.JSSink; pub const JSHTTPSResponseSink = JSC.WebCore.HTTPSResponseSink.JSSink; pub const JSHTTPResponseSink = JSC.WebCore.HTTPResponseSink.JSSink; pub const JSFileSink = JSC.WebCore.FileSink.JSSink; -pub const JSFetchTaskletChunkedRequestSink = JSC.WebCore.FetchTaskletChunkedRequestSink.JSSink; +pub const JSNetworkSink = JSC.WebCore.NetworkSink.JSSink; // WebSocket pub const WebSocketHTTPClient = @import("../../http/websocket_http_client.zig").WebSocketHTTPClient; @@ -967,7 +967,7 @@ comptime { JSArrayBufferSink.shim.ref(); JSHTTPResponseSink.shim.ref(); JSHTTPSResponseSink.shim.ref(); - JSFetchTaskletChunkedRequestSink.shim.ref(); + JSNetworkSink.shim.ref(); JSFileSink.shim.ref(); JSFileSink.shim.ref(); _ = ZigString__free; diff --git a/src/bun.js/bindings/headers.h b/src/bun.js/bindings/headers.h index 9bdf332b16f7bf..ab9f3ca4370449 100644 --- a/src/bun.js/bindings/headers.h +++ b/src/bun.js/bindings/headers.h @@ -686,24 +686,24 @@ ZIG_DECL void FileSink__updateRef(void* arg0, bool arg1); BUN_DECLARE_HOST_FUNCTION(FileSink__write); #endif -CPP_DECL JSC__JSValue FetchTaskletChunkedRequestSink__assignToStream(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1, void* arg2, void** arg3); -CPP_DECL JSC__JSValue FetchTaskletChunkedRequestSink__createObject(JSC__JSGlobalObject* arg0, void* arg1, uintptr_t destructor); -CPP_DECL void FetchTaskletChunkedRequestSink__detachPtr(JSC__JSValue JSValue0); -CPP_DECL void* FetchTaskletChunkedRequestSink__fromJS(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1); -CPP_DECL void FetchTaskletChunkedRequestSink__onClose(JSC__JSValue JSValue0, JSC__JSValue JSValue1); -CPP_DECL void FetchTaskletChunkedRequestSink__onReady(JSC__JSValue JSValue0, JSC__JSValue JSValue1, JSC__JSValue JSValue2); - -#ifdef __cplusplus - -ZIG_DECL JSC__JSValue FetchTaskletChunkedRequestSink__close(JSC__JSGlobalObject* arg0, void* arg1); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__construct); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__end); -ZIG_DECL JSC__JSValue SYSV_ABI SYSV_ABI FetchTaskletChunkedRequestSink__endWithSink(void* arg0, JSC__JSGlobalObject* arg1); -ZIG_DECL void FetchTaskletChunkedRequestSink__finalize(void* arg0); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__flush); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__start); -ZIG_DECL void FetchTaskletChunkedRequestSink__updateRef(void* arg0, bool arg1); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__write); +CPP_DECL JSC__JSValue NetworkSink__assignToStream(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1, void* arg2, void** arg3); +CPP_DECL JSC__JSValue NetworkSink__createObject(JSC__JSGlobalObject* arg0, void* arg1, uintptr_t destructor); +CPP_DECL void NetworkSink__detachPtr(JSC__JSValue JSValue0); +CPP_DECL void* NetworkSink__fromJS(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1); +CPP_DECL void NetworkSink__onClose(JSC__JSValue JSValue0, JSC__JSValue JSValue1); +CPP_DECL void NetworkSink__onReady(JSC__JSValue JSValue0, JSC__JSValue JSValue1, JSC__JSValue JSValue2); + +#ifdef __cplusplus + +ZIG_DECL JSC__JSValue NetworkSink__close(JSC__JSGlobalObject* arg0, void* arg1); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__construct); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__end); +ZIG_DECL JSC__JSValue SYSV_ABI SYSV_ABI NetworkSink__endWithSink(void* arg0, JSC__JSGlobalObject* arg1); +ZIG_DECL void NetworkSink__finalize(void* arg0); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__flush); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__start); +ZIG_DECL void NetworkSink__updateRef(void* arg0, bool arg1); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__write); #endif #ifdef __cplusplus diff --git a/src/bun.js/bindings/headers.zig b/src/bun.js/bindings/headers.zig index c4f37d3490cc6c..91264230dcce74 100644 --- a/src/bun.js/bindings/headers.zig +++ b/src/bun.js/bindings/headers.zig @@ -377,13 +377,13 @@ pub extern fn FileSink__setDestroyCallback(JSValue0: JSC__JSValue, callback: usi pub extern fn FileSink__fromJS(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) ?*anyopaque; pub extern fn FileSink__onClose(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue) void; pub extern fn FileSink__onReady(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue, JSValue2: JSC__JSValue) void; -pub extern fn FetchTaskletChunkedRequestSink__assignToStream(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue, arg2: ?*anyopaque, arg3: [*c]*anyopaque) JSC__JSValue; -pub extern fn FetchTaskletChunkedRequestSink__createObject(arg0: *bindings.JSGlobalObject, arg1: ?*anyopaque, onDestroyPtrTag: usize) JSC__JSValue; -pub extern fn FetchTaskletChunkedRequestSink__detachPtr(JSValue0: JSC__JSValue) void; -pub extern fn FetchTaskletChunkedRequestSink__setDestroyCallback(JSValue0: JSC__JSValue, callback: usize) void; -pub extern fn FetchTaskletChunkedRequestSink__fromJS(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) ?*anyopaque; -pub extern fn FetchTaskletChunkedRequestSink__onClose(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue) void; -pub extern fn FetchTaskletChunkedRequestSink__onReady(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue, JSValue2: JSC__JSValue) void; +pub extern fn NetworkSink__assignToStream(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue, arg2: ?*anyopaque, arg3: [*c]*anyopaque) JSC__JSValue; +pub extern fn NetworkSink__createObject(arg0: *bindings.JSGlobalObject, arg1: ?*anyopaque, onDestroyPtrTag: usize) JSC__JSValue; +pub extern fn NetworkSink__detachPtr(JSValue0: JSC__JSValue) void; +pub extern fn NetworkSink__setDestroyCallback(JSValue0: JSC__JSValue, callback: usize) void; +pub extern fn NetworkSink__fromJS(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) ?*anyopaque; +pub extern fn NetworkSink__onClose(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue) void; +pub extern fn NetworkSink__onReady(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue, JSValue2: JSC__JSValue) void; pub extern fn ZigException__fromException(arg0: [*c]bindings.Exception) ZigException; pub const JSC__GetterSetter = bindings.GetterSetter; diff --git a/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h b/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h index dc805895d2ec81..2dffbe84658348 100644 --- a/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h +++ b/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h @@ -58,6 +58,8 @@ class DOMClientIsoSubspaces { std::unique_ptr m_clientSubspaceForFunctionTemplate; std::unique_ptr m_clientSubspaceForV8Function; std::unique_ptr m_clientSubspaceForNodeVMGlobalObject; + std::unique_ptr m_clientSubspaceForJSS3Bucket; + std::unique_ptr m_clientSubspaceForJSS3File; #include "ZigGeneratedClasses+DOMClientIsoSubspaces.h" /* --- bun --- */ diff --git a/src/bun.js/bindings/webcore/DOMIsoSubspaces.h b/src/bun.js/bindings/webcore/DOMIsoSubspaces.h index 2d4eb091c5129f..5af65d80acb605 100644 --- a/src/bun.js/bindings/webcore/DOMIsoSubspaces.h +++ b/src/bun.js/bindings/webcore/DOMIsoSubspaces.h @@ -58,6 +58,8 @@ class DOMIsoSubspaces { std::unique_ptr m_subspaceForFunctionTemplate; std::unique_ptr m_subspaceForV8Function; std::unique_ptr m_subspaceForNodeVMGlobalObject; + std::unique_ptr m_subspaceForJSS3Bucket; + std::unique_ptr m_subspaceForJSS3File; #include "ZigGeneratedClasses+DOMIsoSubspaces.h" /*-- BUN --*/ diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 72232be19642b0..1a7440ebe89c07 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -2193,6 +2193,31 @@ pub const Process = struct { pub export const Bun__versions_zstd: [*:0]const u8 = bun.Global.versions.zstd; }; +pub const PathOrBlob = union(enum) { + path: JSC.Node.PathOrFileDescriptor, + blob: Blob, + + const Blob = JSC.WebCore.Blob; + + pub fn fromJSNoCopy(ctx: *JSC.JSGlobalObject, args: *JSC.Node.ArgumentsSlice) bun.JSError!PathOrBlob { + if (try JSC.Node.PathOrFileDescriptor.fromJS(ctx, args, bun.default_allocator)) |path| { + return PathOrBlob{ + .path = path, + }; + } + + const arg = args.nextEat() orelse { + return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", .undefined); + }; + if (arg.as(Blob)) |blob| { + return PathOrBlob{ + .blob = blob.*, + }; + } + return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", arg); + } +}; + comptime { std.testing.refAllDecls(Process); } diff --git a/src/bun.js/webcore/S3Bucket.zig b/src/bun.js/webcore/S3Bucket.zig new file mode 100644 index 00000000000000..7c8eed53927537 --- /dev/null +++ b/src/bun.js/webcore/S3Bucket.zig @@ -0,0 +1,267 @@ +const bun = @import("root").bun; +const JSC = bun.JSC; +const JSValue = JSC.JSValue; +const Blob = JSC.WebCore.Blob; +const PathOrBlob = JSC.Node.PathOrBlob; +const ZigString = JSC.ZigString; +const Method = bun.http.Method; +const S3File = @import("./S3File.zig"); +const AWSCredentials = bun.AWSCredentials; + +const S3BucketOptions = struct { + credentials: *AWSCredentials, + options: bun.S3.MultiPartUpload.MultiPartUploadOptions = .{}, + acl: ?bun.S3.ACL = null, + pub usingnamespace bun.New(@This()); + + pub fn deinit(this: *@This()) void { + this.credentials.deref(); + this.destroy(); + } +}; + +pub fn writeFormatCredentials(credentials: *AWSCredentials, options: bun.S3.MultiPartUpload.MultiPartUploadOptions, acl: ?bun.S3.ACL, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + try writer.writeAll("\n"); + + { + const Writer = @TypeOf(writer); + + formatter.indent += 1; + defer formatter.indent -|= 1; + + const endpoint = if (credentials.endpoint.len > 0) credentials.endpoint else "https://s3..amazonaws.com"; + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("endpoint: \"", enable_ansi_colors)); + try writer.print(comptime bun.Output.prettyFmt("{s}\"", enable_ansi_colors), .{endpoint}); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + const region = if (credentials.region.len > 0) credentials.region else AWSCredentials.guessRegion(credentials.endpoint); + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("region: \"", enable_ansi_colors)); + try writer.print(comptime bun.Output.prettyFmt("{s}\"", enable_ansi_colors), .{region}); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + // PS: We don't want to print the credentials if they are empty just signal that they are there without revealing them + if (credentials.accessKeyId.len > 0) { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("accessKeyId: \"[REDACTED]\"", enable_ansi_colors)); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + } + + if (credentials.secretAccessKey.len > 0) { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("secretAccessKey: \"[REDACTED]\"", enable_ansi_colors)); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + } + + if (credentials.sessionToken.len > 0) { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("sessionToken: \"[REDACTED]\"", enable_ansi_colors)); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + } + + if (acl) |acl_value| { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("acl: ", enable_ansi_colors)); + try writer.print(comptime bun.Output.prettyFmt("{s}\"", enable_ansi_colors), .{acl_value.toString()}); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + } + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("partSize: ", enable_ansi_colors)); + try formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(options.partSize), .NumberObject, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("queueSize: ", enable_ansi_colors)); + try formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(options.queueSize), .NumberObject, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("retry: ", enable_ansi_colors)); + try formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(options.retry), .NumberObject, enable_ansi_colors); + try writer.writeAll("\n"); + } +} +pub fn writeFormat(this: *S3BucketOptions, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + try writer.writeAll(comptime bun.Output.prettyFmt("S3Bucket", enable_ansi_colors)); + if (this.credentials.bucket.len > 0) { + try writer.print( + comptime bun.Output.prettyFmt(" (\"{s}\") {{", enable_ansi_colors), + .{ + this.credentials.bucket, + }, + ); + } else { + try writer.writeAll(comptime bun.Output.prettyFmt(" {{", enable_ansi_colors)); + } + + try writeFormatCredentials(this.credentials, this.options, this.acl, Formatter, formatter, writer, enable_ansi_colors); + try formatter.writeIndent(@TypeOf(writer), writer); + try writer.writeAll("}"); + formatter.resetLine(); +} +extern fn BUN__getJSS3Bucket(value: JSValue) callconv(JSC.conv) ?*S3BucketOptions; + +pub fn fromJS(value: JSValue) ?*S3BucketOptions { + return BUN__getJSS3Bucket(value); +} + +pub fn call(ptr: *S3BucketOptions, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path ", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path", .{}); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = Blob.new(try S3File.constructS3FileWithAWSCredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl)); + blob.allocator = bun.default_allocator; + return blob.toJS(globalThis); +} + +pub fn presign(ptr: *S3BucketOptions, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path to presign", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path to presign", .{}); + }; + errdefer path.deinit(); + + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithAWSCredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return S3File.getPresignUrlFrom(&blob, globalThis, options); +} + +pub fn exists(ptr: *S3BucketOptions, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path to check if it exists", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path to check if it exists", .{}); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithAWSCredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return S3File.S3BlobStatTask.exists(globalThis, &blob); +} + +pub fn size(ptr: *S3BucketOptions, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path to check the size of", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path to check the size of", .{}); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithAWSCredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return S3File.S3BlobStatTask.size(globalThis, &blob); +} + +pub fn write(ptr: *S3BucketOptions, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + return globalThis.ERR_MISSING_ARGS("Expected a path to write to", .{}).throw(); + }; + errdefer path.deinit(); + const data = args.nextEat() orelse { + return globalThis.ERR_MISSING_ARGS("Expected a Blob-y thing to write", .{}).throw(); + }; + + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithAWSCredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + var blob_internal: PathOrBlob = .{ .blob = blob }; + return Blob.writeFileInternal(globalThis, &blob_internal, data, .{ + .mkdirp_if_not_exists = false, + .extra_options = options, + }); +} + +pub fn unlink(ptr: *S3BucketOptions, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + return globalThis.ERR_MISSING_ARGS("Expected a path to unlink", .{}).throw(); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithAWSCredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return blob.store.?.data.s3.unlink(blob.store.?, globalThis, options); +} +pub fn construct(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) ?*S3BucketOptions { + const arguments = callframe.arguments_old(1).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const options = args.nextEat() orelse { + globalThis.ERR_MISSING_ARGS("Expected S3 options to be passed", .{}).throw() catch return null; + }; + if (options.isEmptyOrUndefinedOrNull() or !options.isObject()) { + globalThis.throwInvalidArguments("Expected S3 options to be passed", .{}) catch return null; + } + var aws_options = AWSCredentials.getCredentialsWithOptions(globalThis.bunVM().transpiler.env.getAWSCredentials(), .{}, options, null, globalThis) catch return null; + defer aws_options.deinit(); + return S3BucketOptions.new(.{ + .credentials = aws_options.credentials.dupe(), + .options = aws_options.options, + .acl = aws_options.acl, + }); +} +pub fn finalize(ptr: *S3BucketOptions) callconv(JSC.conv) void { + ptr.deinit(); +} +pub const exports = struct { + pub const JSS3Bucket__exists = JSC.toJSHostFunctionWithContext(S3BucketOptions, exists); + pub const JSS3Bucket__size = JSC.toJSHostFunctionWithContext(S3BucketOptions, size); + pub const JSS3Bucket__write = JSC.toJSHostFunctionWithContext(S3BucketOptions, write); + pub const JSS3Bucket__unlink = JSC.toJSHostFunctionWithContext(S3BucketOptions, unlink); + pub const JSS3Bucket__presign = JSC.toJSHostFunctionWithContext(S3BucketOptions, presign); + pub const JSS3Bucket__call = JSC.toJSHostFunctionWithContext(S3BucketOptions, call); +}; + +comptime { + @export(exports.JSS3Bucket__exists, .{ .name = "JSS3Bucket__exists" }); + @export(exports.JSS3Bucket__size, .{ .name = "JSS3Bucket__size" }); + @export(exports.JSS3Bucket__write, .{ .name = "JSS3Bucket__write" }); + @export(exports.JSS3Bucket__unlink, .{ .name = "JSS3Bucket__unlink" }); + @export(exports.JSS3Bucket__presign, .{ .name = "JSS3Bucket__presign" }); + @export(exports.JSS3Bucket__call, .{ .name = "JSS3Bucket__call" }); + @export(finalize, .{ .name = "JSS3Bucket__deinit" }); + @export(construct, .{ .name = "JSS3Bucket__construct" }); +} diff --git a/src/bun.js/webcore/S3File.zig b/src/bun.js/webcore/S3File.zig new file mode 100644 index 00000000000000..a7a4a272a518b4 --- /dev/null +++ b/src/bun.js/webcore/S3File.zig @@ -0,0 +1,545 @@ +const bun = @import("root").bun; +const JSC = bun.JSC; +const JSValue = JSC.JSValue; +const Blob = JSC.WebCore.Blob; +const PathOrBlob = JSC.Node.PathOrBlob; +const ZigString = JSC.ZigString; +const Method = bun.http.Method; +const strings = bun.strings; +const Output = bun.Output; +const S3Bucket = @import("./S3Bucket.zig"); + +pub fn writeFormat(s3: *Blob.S3Store, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + try writer.writeAll(comptime Output.prettyFmt("S3Ref", enable_ansi_colors)); + const credentials = s3.getCredentials(); + + if (credentials.bucket.len > 0) { + try writer.print( + comptime Output.prettyFmt(" (\"{s}/{s}\") {{", enable_ansi_colors), + .{ + credentials.bucket, + s3.path(), + }, + ); + } else { + try writer.print( + comptime Output.prettyFmt(" (\"{s}\") {{", enable_ansi_colors), + .{ + s3.path(), + }, + ); + } + + try S3Bucket.writeFormatCredentials(credentials, s3.options, s3.acl, Formatter, formatter, writer, enable_ansi_colors); + try formatter.writeIndent(@TypeOf(writer), writer); + try writer.writeAll("}"); + formatter.resetLine(); +} +pub fn presign(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to presign", .{}); + } + + switch (path_or_blob) { + .path => |path| { + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to presign", .{}); + } + const options = args.nextEat(); + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + return try getPresignUrlFrom(&blob, globalThis, options); + }, + .blob => return try getPresignUrlFrom(&path_or_blob.blob, globalThis, args.nextEat()), + } +} + +pub fn unlink(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to delete", .{}); + } + + switch (path_or_blob) { + .path => |path| { + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to delete", .{}); + } + const options = args.nextEat(); + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + return try blob.store.?.data.s3.unlink(blob.store.?, globalThis, options); + }, + .blob => |blob| { + return try blob.store.?.data.s3.unlink(blob.store.?, globalThis, args.nextEat()); + }, + } +} + +pub fn upload(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to upload", .{}); + } + + const data = args.nextEat() orelse { + return globalThis.ERR_MISSING_ARGS("Expected a Blob-y thing to upload", .{}).throw(); + }; + + switch (path_or_blob) { + .path => |path| { + const options = args.nextEat(); + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to upload", .{}); + } + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + + var blob_internal: PathOrBlob = .{ .blob = blob }; + return try Blob.writeFileInternal(globalThis, &blob_internal, data, .{ + .mkdirp_if_not_exists = false, + .extra_options = options, + }); + }, + .blob => return try Blob.writeFileInternal(globalThis, &path_or_blob, data, .{ + .mkdirp_if_not_exists = false, + .extra_options = args.nextEat(), + }), + } +} + +pub fn size(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to get size", .{}); + } + + switch (path_or_blob) { + .path => |path| { + const options = args.nextEat(); + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to get size", .{}); + } + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + + return S3BlobStatTask.size(globalThis, &blob); + }, + .blob => |*blob| { + return Blob.getSize(blob, globalThis); + }, + } +} +pub fn exists(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to check if it exists", .{}); + } + + switch (path_or_blob) { + .path => |path| { + const options = args.nextEat(); + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to check if it exists", .{}); + } + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + + return S3BlobStatTask.exists(globalThis, &blob); + }, + .blob => |*blob| { + return Blob.getExists(blob, globalThis, callframe); + }, + } +} + +fn constructS3FileInternalStore( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, +) bun.JSError!Blob { + // get credentials from env + const existing_credentials = globalObject.bunVM().transpiler.env.getAWSCredentials(); + return constructS3FileWithAWSCredentials(globalObject, path, options, existing_credentials); +} +/// if the credentials have changed, we need to clone it, if not we can just ref/deref it +pub fn constructS3FileWithAWSCredentialsAndOptions( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, + default_credentials: *AWS, + default_options: bun.S3.MultiPartUpload.MultiPartUploadOptions, + default_acl: ?bun.S3.ACL, +) bun.JSError!Blob { + var aws_options = try AWS.getCredentialsWithOptions(default_credentials.*, default_options, options, default_acl, globalObject); + defer aws_options.deinit(); + + const store = brk: { + if (aws_options.changed_credentials) { + break :brk Blob.Store.initS3(path, null, aws_options.credentials, bun.default_allocator) catch bun.outOfMemory(); + } else { + break :brk Blob.Store.initS3WithReferencedCredentials(path, null, default_credentials, bun.default_allocator) catch bun.outOfMemory(); + } + }; + errdefer store.deinit(); + store.data.s3.options = aws_options.options; + store.data.s3.acl = aws_options.acl; + var blob = Blob.initWithStore(store, globalObject); + if (options) |opts| { + if (opts.isObject()) { + if (try opts.getTruthyComptime(globalObject, "type")) |file_type| { + inner: { + if (file_type.isString()) { + var allocator = bun.default_allocator; + var str = file_type.toSlice(globalObject, bun.default_allocator); + defer str.deinit(); + const slice = str.slice(); + if (!strings.isAllASCII(slice)) { + break :inner; + } + blob.content_type_was_set = true; + if (globalObject.bunVM().mimeType(str.slice())) |entry| { + blob.content_type = entry.value; + break :inner; + } + const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + blob.content_type = strings.copyLowercase(slice, content_type_buf); + blob.content_type_allocated = true; + } + } + } + } + } + return blob; +} + +pub fn constructS3FileWithAWSCredentials( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, + existing_credentials: AWS, +) bun.JSError!Blob { + var aws_options = try AWS.getCredentialsWithOptions(existing_credentials, .{}, options, null, globalObject); + defer aws_options.deinit(); + const store = Blob.Store.initS3(path, null, aws_options.credentials, bun.default_allocator) catch bun.outOfMemory(); + errdefer store.deinit(); + store.data.s3.options = aws_options.options; + store.data.s3.acl = aws_options.acl; + var blob = Blob.initWithStore(store, globalObject); + if (options) |opts| { + if (opts.isObject()) { + if (try opts.getTruthyComptime(globalObject, "type")) |file_type| { + inner: { + if (file_type.isString()) { + var allocator = bun.default_allocator; + var str = file_type.toSlice(globalObject, bun.default_allocator); + defer str.deinit(); + const slice = str.slice(); + if (!strings.isAllASCII(slice)) { + break :inner; + } + blob.content_type_was_set = true; + if (globalObject.bunVM().mimeType(str.slice())) |entry| { + blob.content_type = entry.value; + break :inner; + } + const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + blob.content_type = strings.copyLowercase(slice, content_type_buf); + blob.content_type_allocated = true; + } + } + } + } + } + return blob; +} +fn constructS3FileInternal( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, +) bun.JSError!*Blob { + var ptr = Blob.new(try constructS3FileInternalStore(globalObject, path, options)); + ptr.allocator = bun.default_allocator; + return ptr; +} + +const AWS = bun.S3.AWSCredentials; + +pub const S3BlobStatTask = struct { + promise: JSC.JSPromise.Strong, + store: *Blob.Store, + usingnamespace bun.New(S3BlobStatTask); + + pub fn onS3ExistsResolved(result: AWS.S3StatResult, this: *S3BlobStatTask) void { + defer this.deinit(); + const globalThis = this.promise.globalObject().?; + switch (result) { + .not_found => { + this.promise.resolve(globalThis, .false); + }, + .success => |_| { + // calling .exists() should not prevent it to download a bigger file + // this would make it download a slice of the actual value, if the file changes before we download it + // if (this.blob.size == Blob.max_size) { + // this.blob.size = @truncate(stat.size); + // } + this.promise.resolve(globalThis, .true); + }, + .failure => |err| { + this.promise.rejectOnNextTick(globalThis, err.toJS(globalThis, this.store.data.s3.path())); + }, + } + } + + pub fn onS3SizeResolved(result: AWS.S3StatResult, this: *S3BlobStatTask) void { + defer this.deinit(); + const globalThis = this.promise.globalObject().?; + + switch (result) { + .success => |stat| { + this.promise.resolve(globalThis, JSValue.jsNumber(stat.size)); + }, + inline .not_found, .failure => |err| { + this.promise.rejectOnNextTick(globalThis, err.toJS(globalThis, this.store.data.s3.path())); + }, + } + } + + pub fn exists(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { + const this = S3BlobStatTask.new(.{ + .promise = JSC.JSPromise.Strong.init(globalThis), + .store = blob.store.?, + }); + this.store.ref(); + const promise = this.promise.value(); + const credentials = blob.store.?.data.s3.getCredentials(); + const path = blob.store.?.data.s3.path(); + const env = globalThis.bunVM().transpiler.env; + + credentials.s3Stat(path, @ptrCast(&S3BlobStatTask.onS3ExistsResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + return promise; + } + + pub fn size(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { + const this = S3BlobStatTask.new(.{ + .promise = JSC.JSPromise.Strong.init(globalThis), + .store = blob.store.?, + }); + this.store.ref(); + const promise = this.promise.value(); + const credentials = blob.store.?.data.s3.getCredentials(); + const path = blob.store.?.data.s3.path(); + const env = globalThis.bunVM().transpiler.env; + + credentials.s3Stat(path, @ptrCast(&S3BlobStatTask.onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + return promise; + } + + pub fn deinit(this: *S3BlobStatTask) void { + this.store.deref(); + this.promise.deinit(); + this.destroy(); + } +}; + +pub fn getPresignUrlFrom(this: *Blob, globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { + if (!this.isS3()) { + return globalThis.ERR_INVALID_THIS("presign is only possible for s3:// files", .{}).throw(); + } + + var method: bun.http.Method = .GET; + var expires: usize = 86400; // 1 day default + + var credentialsWithOptions: AWS.AWSCredentialsWithOptions = .{ + .credentials = this.store.?.data.s3.getCredentials().*, + }; + defer { + credentialsWithOptions.deinit(); + } + const s3 = &this.store.?.data.s3; + + if (extra_options) |options| { + if (options.isObject()) { + if (try options.getTruthyComptime(globalThis, "method")) |method_| { + method = Method.fromJS(globalThis, method_) orelse { + return globalThis.throwInvalidArguments("method must be GET, PUT, DELETE or HEAD when using s3 protocol", .{}); + }; + } + if (try options.getOptional(globalThis, "expiresIn", i32)) |expires_| { + if (expires_ <= 0) return globalThis.throwInvalidArguments("expiresIn must be greather than 0", .{}); + expires = @intCast(expires_); + } + } + credentialsWithOptions = try s3.getCredentialsWithOptions(options, globalThis); + } + const path = s3.path(); + + const result = credentialsWithOptions.credentials.signRequest(.{ + .path = path, + .method = method, + .acl = credentialsWithOptions.acl, + }, .{ .expires = expires }) catch |sign_err| { + return AWS.throwSignError(sign_err, globalThis); + }; + defer result.deinit(); + var str = bun.String.fromUTF8(result.url); + return str.transferToJS(this.globalThis); +} +pub fn getBucketName( + this: *const Blob, +) ?[]const u8 { + const store = this.store orelse return null; + if (store.data != .s3) return null; + const credentials = store.data.s3.getCredentials(); + var full_path = store.data.s3.path(); + if (strings.startsWith(full_path, "/")) { + full_path = full_path[1..]; + } + var bucket: []const u8 = credentials.bucket; + + if (bucket.len == 0) { + if (strings.indexOf(full_path, "/")) |end| { + bucket = full_path[0..end]; + if (bucket.len > 0) { + return bucket; + } + } + return null; + } + return bucket; +} + +pub fn getBucket( + this: *Blob, + globalThis: *JSC.JSGlobalObject, +) callconv(JSC.conv) JSValue { + if (getBucketName(this)) |name| { + var str = bun.String.createUTF8(name); + return str.transferToJS(globalThis); + } + return .undefined; +} +pub fn getPresignUrl(this: *Blob, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const args = callframe.arguments_old(1); + return getPresignUrlFrom(this, globalThis, if (args.len > 0) args.ptr[0] else null); +} + +pub fn constructInternalJS( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, +) bun.JSError!JSValue { + const blob = try constructS3FileInternal(globalObject, path, options); + return blob.toJS(globalObject); +} + +pub fn toJSUnchecked( + globalObject: *JSC.JSGlobalObject, + this: *Blob, +) JSValue { + return BUN__createJSS3FileUnsafely(globalObject, this); +} + +pub fn constructInternal( + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!*Blob { + const vm = globalObject.bunVM(); + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(vm, arguments); + defer args.deinit(); + + const path = (try JSC.Node.PathLike.fromJS(globalObject, &args)) orelse { + return globalObject.throwInvalidArguments("Expected file path string", .{}); + }; + return constructS3FileInternal(globalObject, path, args.nextEat()); +} + +pub fn construct( + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) callconv(JSC.conv) ?*Blob { + return constructInternal(globalObject, callframe) catch |err| switch (err) { + error.JSError => null, + error.OutOfMemory => { + _ = globalObject.throwOutOfMemoryValue(); + return null; + }, + }; +} +pub fn hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { + JSC.markBinding(@src()); + const blob = value.as(Blob) orelse return false; + return blob.isS3(); +} + +comptime { + @export(exports.JSS3File__presign, .{ .name = "JSS3File__presign" }); + @export(construct, .{ .name = "JSS3File__construct" }); + @export(hasInstance, .{ .name = "JSS3File__hasInstance" }); + @export(getBucket, .{ .name = "JSS3File__bucket" }); +} + +pub const exports = struct { + pub const JSS3File__presign = JSC.toJSHostFunctionWithContext(Blob, getPresignUrl); +}; +extern fn BUN__createJSS3File(*JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSValue; +extern fn BUN__createJSS3FileUnsafely(*JSC.JSGlobalObject, *Blob) callconv(JSC.conv) JSValue; +pub fn createJSS3File(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { + return BUN__createJSS3File(globalObject, callframe); +} diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig index a154a8cb75bf47..23e9f3b03be58b 100644 --- a/src/bun.js/webcore/blob.zig +++ b/src/bun.js/webcore/blob.zig @@ -43,33 +43,11 @@ const Request = JSC.WebCore.Request; const libuv = bun.windows.libuv; -const AWSCredentials = @import("../../s3.zig").AWSCredentials; -const S3MultiPartUpload = @import("../../s3.zig").MultiPartUpload; +const S3 = @import("../../s3.zig"); +const AWSCredentials = S3.AWSCredentials; +const S3MultiPartUpload = S3.MultiPartUpload; const AWS = AWSCredentials; - -const PathOrBlob = union(enum) { - path: JSC.Node.PathOrFileDescriptor, - blob: Blob, - - pub fn fromJSNoCopy(ctx: js.JSContextRef, args: *JSC.Node.ArgumentsSlice) bun.JSError!PathOrBlob { - if (try JSC.Node.PathOrFileDescriptor.fromJS(ctx, args, bun.default_allocator)) |path| { - return PathOrBlob{ - .path = path, - }; - } - - const arg = args.nextEat() orelse { - return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", .undefined); - }; - if (arg.as(Blob)) |blob| { - return PathOrBlob{ - .blob = blob.*, - }; - } - return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", arg); - } -}; - +const PathOrBlob = JSC.Node.PathOrBlob; const WriteFilePromise = @import("./blob/WriteFile.zig").WriteFilePromise; const WriteFileWaitFromLockedValueTask = @import("./blob/WriteFile.zig").WriteFileWaitFromLockedValueTask; const NewReadFileHandler = @import("./blob/ReadFile.zig").NewReadFileHandler; @@ -77,6 +55,8 @@ const WriteFile = @import("./blob/WriteFile.zig").WriteFile; const ReadFile = @import("./blob/ReadFile.zig").ReadFile; const WriteFileWindows = @import("./blob/WriteFile.zig").WriteFileWindows; +const S3File = @import("./S3File.zig"); + pub const Blob = struct { const bloblog = Output.scoped(.Blob, false); @@ -718,14 +698,8 @@ pub const Blob = struct { { const store = this.store.?; switch (store.data) { - .s3 => |s3| { - try writer.writeAll(comptime Output.prettyFmt("S3Ref", enable_ansi_colors)); - try writer.print( - comptime Output.prettyFmt(" (\"{s}\")", enable_ansi_colors), - .{ - s3.pathlike.slice(), - }, - ); + .s3 => |*s3| { + try S3File.writeFormat(s3, Formatter, formatter, writer, enable_ansi_colors); }, .file => |file| { try writer.writeAll(comptime Output.prettyFmt("FileRef", enable_ansi_colors)); @@ -923,6 +897,7 @@ pub const Blob = struct { const Wrapper = struct { promise: JSC.JSPromise.Strong, + store: *Store, pub usingnamespace bun.New(@This()); pub fn resolve(result: AWS.S3UploadResult, this: *@This()) void { @@ -930,7 +905,7 @@ pub const Blob = struct { switch (result) { .success => this.promise.resolve(globalObject, JSC.jsNumber(0)), .failure => |err| { - this.promise.rejectOnNextTick(globalObject, err.toJS(globalObject)); + this.promise.rejectOnNextTick(globalObject, err.toJS(globalObject, this.store.getPath())); }, } } @@ -939,6 +914,8 @@ pub const Blob = struct { fn deinit(this: *@This()) void { this.promise.deinit(); + this.store.deref(); + this.destroy(); } }; @@ -946,8 +923,10 @@ pub const Blob = struct { const promise_value = promise.value(); const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - aws_options.credentials.s3Upload(s3.path(), "", destination_blob.contentTypeOrMimeType(), proxy_url, @ptrCast(&Wrapper.resolve), Wrapper.new(.{ + destination_blob.store.?.ref(); + aws_options.credentials.s3Upload(s3.path(), "", destination_blob.contentTypeOrMimeType(), aws_options.acl, proxy_url, @ptrCast(&Wrapper.resolve), Wrapper.new(.{ .promise = promise, + .store = destination_blob.store.?, })); return promise_value; } @@ -1064,7 +1043,7 @@ pub const Blob = struct { source_blob, @truncate(s3.options.partSize * S3MultiPartUpload.OneMiB), ), ctx)) |stream| { - return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), stream, ctx, aws_options.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); + return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), stream, ctx, aws_options.options, aws_options.acl, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); } else { return JSC.JSPromise.rejectedPromiseValue(ctx, ctx.createErrorInstance("Failed to stream bytes to s3 bucket", .{})); } @@ -1079,7 +1058,7 @@ pub const Blob = struct { switch (result) { .success => this.promise.resolve(globalObject, JSC.jsNumber(this.store.data.bytes.len)), .failure => |err| { - this.promise.rejectOnNextTick(globalObject, err.toJS(globalObject)); + this.promise.rejectOnNextTick(globalObject, err.toJS(globalObject, this.store.getPath())); }, } } @@ -1095,7 +1074,7 @@ pub const Blob = struct { const promise = JSC.JSPromise.Strong.init(ctx); const promise_value = promise.value(); - aws_options.credentials.s3Upload(s3.path(), bytes.slice(), destination_blob.contentTypeOrMimeType(), proxy_url, @ptrCast(&Wrapper.resolve), Wrapper.new(.{ + aws_options.credentials.s3Upload(s3.path(), bytes.slice(), destination_blob.contentTypeOrMimeType(), aws_options.acl, proxy_url, @ptrCast(&Wrapper.resolve), Wrapper.new(.{ .store = store, .promise = promise, })); @@ -1109,7 +1088,7 @@ pub const Blob = struct { source_blob, @truncate(s3.options.partSize * S3MultiPartUpload.OneMiB), ), ctx)) |stream| { - return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), stream, ctx, s3.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); + return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), stream, ctx, s3.options, aws_options.acl, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); } else { return JSC.JSPromise.rejectedPromiseValue(ctx, ctx.createErrorInstance("Failed to stream bytes to s3 bucket", .{})); } @@ -1287,7 +1266,7 @@ pub const Blob = struct { const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), readable, globalThis, aws_options.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); + return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), readable, globalThis, aws_options.options, aws_options.acl, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); } destination_blob.detach(); return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); @@ -1335,7 +1314,7 @@ pub const Blob = struct { } const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), readable, globalThis, aws_options.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); + return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), readable, globalThis, aws_options.options, aws_options.acl, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); } destination_blob.detach(); return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); @@ -1593,269 +1572,6 @@ pub const Blob = struct { return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(written)); } - - pub fn JSS3File_upload_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.upload(pathOrS3, blob) expects a S3 or path to upload", .{}); - } - - const data = args.nextEat() orelse { - return globalThis.throwInvalidArguments("S3.upload(pathOrS3, blob) expects a Blob-y thing to upload", .{}); - }; - - switch (path_or_blob) { - .path => |path| { - const options = args.nextEat(); - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.upload(pathOrS3, blob) expects a S3 or path to upload", .{}); - } - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - - var blob_internal: PathOrBlob = .{ .blob = blob }; - return try writeFileInternal(globalThis, &blob_internal, data, .{ - .mkdirp_if_not_exists = false, - .extra_options = options, - }); - }, - .blob => return try writeFileInternal(globalThis, &path_or_blob, data, .{ - .mkdirp_if_not_exists = false, - .extra_options = args.nextEat(), - }), - } - } - - pub fn JSS3File_size_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.size(pathOrS3) expects a S3 or path to get size", .{}); - } - - switch (path_or_blob) { - .path => |path| { - const options = args.nextEat(); - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.size(pathOrS3) expects a S3 or path to get size", .{}); - } - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - - return S3BlobStatTask.size(globalThis, &blob); - }, - .blob => |*blob| { - return getSize(blob, globalThis); - }, - } - } - pub fn JSS3File_exists_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.exists(pathOrS3) expects a S3 or path to check if it exists", .{}); - } - - switch (path_or_blob) { - .path => |path| { - const options = args.nextEat(); - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.exists(pathOrS3) expects a S3 or path to check if it exists", .{}); - } - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - - return S3BlobStatTask.exists(globalThis, &blob); - }, - .blob => |*blob| { - return getExists(blob, globalThis, callframe); - }, - } - } - - pub export fn JSS3File__exists(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_exists_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub export fn JSS3File__size(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_size_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub export fn JSS3File__upload(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_upload_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub fn JSS3File_presign_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.presign(pathOrS3, options) expects a S3 or path to presign", .{}); - } - - switch (path_or_blob) { - .path => |path| { - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.presign(pathOrS3, options) expects a S3 or path to presign", .{}); - } - const options = args.nextEat(); - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - return try getPresignUrlFrom(&blob, globalThis, options); - }, - .blob => return try getPresignUrlFrom(&path_or_blob.blob, globalThis, args.nextEat()), - } - } - - pub export fn JSS3File__presign(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_presign_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub fn JSS3File_unlink_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.unlink(pathOrS3) expects a S3 or path to delete", .{}); - } - - switch (path_or_blob) { - .path => |path| { - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.unlink(pathOrS3) expects a S3 or path to delete", .{}); - } - const options = args.nextEat(); - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - return try blob.store.?.data.s3.unlink(globalThis, options); - }, - .blob => |blob| { - return try blob.store.?.data.s3.unlink(globalThis, args.nextEat()); - }, - } - } - - pub export fn JSS3File__unlink(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_unlink_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub export fn JSS3File__hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { - JSC.markBinding(@src()); - const blob = value.as(Blob) orelse return false; - return blob.isS3(); - } - - pub export fn JSDOMFile__hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { - JSC.markBinding(@src()); - const blob = value.as(Blob) orelse return false; - return blob.is_jsdom_file; - } - extern fn BUN__createJSS3FileConstructor(*JSC.JSGlobalObject) JSValue; - - pub fn getJSS3FileConstructor( - globalObject: *JSC.JSGlobalObject, - _: *JSC.JSObject, - ) callconv(JSC.conv) JSValue { - return BUN__createJSS3FileConstructor(globalObject); - } - export fn JSS3File__construct(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) ?*Blob { - const vm = globalThis.bunVM(); - const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(vm, arguments); - defer args.deinit(); - - const path_or_fd = (JSC.Node.PathLike.fromJS(globalThis, &args)) catch |err| switch (err) { - error.JSError => null, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return null; - }, - }; - if (path_or_fd == null) { - globalThis.throwInvalidArguments("Expected file path string", .{}) catch return null; - return null; - } - return constructS3FileInternal(globalThis, path_or_fd.?, args.nextEat()) catch |err| switch (err) { - error.JSError => null, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return null; - }, - }; - } export fn JSDOMFile__construct(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) ?*Blob { return JSDOMFile__construct_(globalThis, callframe) catch |err| switch (err) { error.JSError => null, @@ -1986,66 +1702,7 @@ pub const Blob = struct { } comptime { - if (!JSC.is_bindgen) { - _ = JSDOMFile__hasInstance; - } - } - - fn constructS3FileInternalStore( - globalObject: *JSC.JSGlobalObject, - path: JSC.Node.PathLike, - options: ?JSC.JSValue, - ) bun.JSError!Blob { - - // get ENV config - var aws_options = try AWS.getCredentialsWithOptions(globalObject.bunVM().transpiler.env.getAWSCredentials(), options, globalObject); - defer aws_options.deinit(); - const store = Blob.Store.initS3(path, null, aws_options.credentials, bun.default_allocator) catch bun.outOfMemory(); - errdefer store.deinit(); - store.data.s3.options = aws_options.options; - - var blob = Blob.initWithStore(store, globalObject); - if (options) |opts| { - if (try opts.getTruthy(globalObject, "type")) |file_type| { - inner: { - if (file_type.isString()) { - var allocator = bun.default_allocator; - var str = file_type.toSlice(globalObject, bun.default_allocator); - defer str.deinit(); - const slice = str.slice(); - if (!strings.isAllASCII(slice)) { - break :inner; - } - blob.content_type_was_set = true; - if (globalObject.bunVM().mimeType(str.slice())) |entry| { - blob.content_type = entry.value; - break :inner; - } - const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); - blob.content_type = strings.copyLowercase(slice, content_type_buf); - blob.content_type_allocated = true; - } - } - } - } - return blob; - } - fn constructS3FileInternal( - globalObject: *JSC.JSGlobalObject, - path: JSC.Node.PathLike, - options: ?JSC.JSValue, - ) bun.JSError!*Blob { - var ptr = Blob.new(try constructS3FileInternalStore(globalObject, path, options)); - ptr.allocator = bun.default_allocator; - return ptr; - } - fn constructS3FileInternalJS( - globalObject: *JSC.JSGlobalObject, - path: JSC.Node.PathLike, - options: ?JSC.JSValue, - ) bun.JSError!JSC.JSValue { - var ptr = try constructS3FileInternal(globalObject, path, options); - return ptr.toJS(globalObject); + _ = JSDOMFile__hasInstance; } pub fn constructBunFile( @@ -2063,8 +1720,8 @@ pub const Blob = struct { const options = if (arguments.len >= 2) arguments[1] else null; if (path == .path) { - if (strings.startsWith(path.path.slice(), "s3://")) { - return try constructS3FileInternalJS(globalObject, path.path, options); + if (strings.hasPrefixComptime(path.path.slice(), "s3://")) { + return try S3File.constructInternalJS(globalObject, path.path, options); } } defer path.deinitAndUnprotect(); @@ -2105,21 +1762,6 @@ pub const Blob = struct { return ptr.toJS(globalObject); } - pub fn constructS3File( - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const vm = globalObject.bunVM(); - const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(vm, arguments); - defer args.deinit(); - - const path = (try JSC.Node.PathLike.fromJS(globalObject, &args)) orelse { - return globalObject.throwInvalidArguments("Expected file path string", .{}); - }; - return constructS3FileInternalJS(globalObject, path, args.nextEat()); - } - pub fn findOrCreateFileFromPath(path_or_fd: *JSC.Node.PathOrFileDescriptor, globalThis: *JSGlobalObject, comptime check_s3: bool) Blob { var vm = globalThis.bunVM(); const allocator = bun.default_allocator; @@ -2208,6 +1850,14 @@ pub const Blob = struct { } else 0; } + pub fn getPath(this: *const Store) ?[]const u8 { + return switch (this.data) { + .bytes => |*bytes| if (bytes.stored_name.len > 0) bytes.stored_name.slice() else null, + .file => |*file| if (file.pathlike == .path) file.pathlike.path.slice() else null, + .s3 => |*s3| s3.pathlike.slice(), + }; + } + pub fn size(this: *const Store) SizeType { return switch (this.data) { .bytes => this.data.bytes.len, @@ -2248,7 +1898,34 @@ pub const Blob = struct { var this = bun.cast(*Store, ptr); this.deref(); } + pub fn initS3WithReferencedCredentials(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: *AWS, allocator: std.mem.Allocator) !*Store { + var path = pathlike; + // this actually protects/refs the pathlike + path.toThreadSafe(); + const store = Blob.Store.new(.{ + .data = .{ + .s3 = S3Store.initWithReferencedCredentials( + path, + mime_type orelse brk: { + const sliced = path.slice(); + if (sliced.len > 0) { + var extname = std.fs.path.extension(sliced); + extname = std.mem.trim(u8, extname, "."); + if (http.MimeType.byExtensionNoDefault(extname)) |mime| { + break :brk mime; + } + } + break :brk null; + }, + credentials, + ), + }, + .allocator = allocator, + .ref_count = std.atomic.Value(u32).init(1), + }); + return store; + } pub fn initS3(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: AWSCredentials, allocator: std.mem.Allocator) !*Store { var path = pathlike; // this actually protects/refs the pathlike @@ -3772,6 +3449,7 @@ pub const Blob = struct { mime_type: http.MimeType = http.MimeType.other, credentials: ?*AWSCredentials, options: S3MultiPartUpload.MultiPartUploadOptions = .{}, + acl: ?S3.ACL = null, pub fn isSeekable(_: *const @This()) ?bool { return true; } @@ -3782,7 +3460,7 @@ pub const Blob = struct { } pub fn getCredentialsWithOptions(this: *const @This(), options: ?JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!AWS.AWSCredentialsWithOptions { - return AWS.getCredentialsWithOptions(this.getCredentials().*, options, globalObject); + return AWS.getCredentialsWithOptions(this.getCredentials().*, this.options, options, this.acl, globalObject); } pub fn path(this: *@This()) []const u8 { @@ -3790,16 +3468,21 @@ pub const Blob = struct { // normalize start and ending if (strings.endsWith(path_name, "/")) { path_name = path_name[0..path_name.len]; + } else if (strings.endsWith(path_name, "\\")) { + path_name = path_name[0 .. path_name.len - 1]; } if (strings.startsWith(path_name, "/")) { path_name = path_name[1..]; + } else if (strings.startsWith(path_name, "\\")) { + path_name = path_name[1..]; } return path_name; } - pub fn unlink(this: *@This(), globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { + pub fn unlink(this: *@This(), store: *Store, globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { const Wrapper = struct { promise: JSC.JSPromise.Strong, + store: *Store, pub usingnamespace bun.New(@This()); @@ -3810,18 +3493,14 @@ pub const Blob = struct { .success => { self.promise.resolve(globalObject, .true); }, - .not_found => { - const js_err = globalObject.createErrorInstance("File not found", .{}); - js_err.put(globalObject, ZigString.static("code"), ZigString.init("FileNotFound").toJS(globalObject)); - self.promise.reject(globalObject, js_err); - }, - .failure => |err| { - self.promise.rejectOnNextTick(globalObject, err.toJS(globalObject)); + inline .not_found, .failure => |err| { + self.promise.rejectOnNextTick(globalObject, err.toJS(globalObject, self.store.getPath())); }, } } fn deinit(self: *@This()) void { + self.store.deref(); self.promise.deinit(); self.destroy(); } @@ -3834,11 +3513,20 @@ pub const Blob = struct { defer aws_options.deinit(); aws_options.credentials.s3Delete(this.path(), @ptrCast(&Wrapper.resolve), Wrapper.new(.{ .promise = promise, + .store = store, // store is needed in case of not found error }), proxy); + store.ref(); return value; } - + pub fn initWithReferencedCredentials(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: *AWS) S3Store { + credentials.ref(); + return .{ + .credentials = credentials, + .pathlike = pathlike, + .mime_type = mime_type orelse http.MimeType.other, + }; + } pub fn init(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: AWSCredentials) S3Store { return .{ .credentials = credentials.dupe(), @@ -4156,13 +3844,8 @@ pub const Blob = struct { } JSC.AnyPromise.wrap(.{ .normal = this.promise.get() }, this.globalThis, S3BlobDownloadTask.callHandler, .{ this, bytes }); }, - .not_found => { - const js_err = this.globalThis.createErrorInstance("File not found", .{}); - js_err.put(this.globalThis, ZigString.static("code"), ZigString.init("FileNotFound").toJS(this.globalThis)); - this.promise.reject(this.globalThis, js_err); - }, - .failure => |err| { - this.promise.rejectOnNextTick(this.globalThis, err.toJS(this.globalThis)); + inline .not_found, .failure => |err| { + this.promise.rejectOnNextTick(this.globalThis, err.toJS(this.globalThis, this.blob.store.?.getPath())); }, } } @@ -4198,83 +3881,7 @@ pub const Blob = struct { pub fn deinit(this: *S3BlobDownloadTask) void { this.blob.store.?.deref(); - this.poll_ref.unrefOnNextTick(this.globalThis.bunVM()); - this.promise.deinit(); - this.destroy(); - } - }; - - const S3BlobStatTask = struct { - promise: JSC.JSPromise.Strong, - usingnamespace bun.New(S3BlobStatTask); - - pub fn onS3ExistsResolved(result: AWS.S3StatResult, this: *S3BlobStatTask) void { - defer this.deinit(); - const globalThis = this.promise.globalObject().?; - switch (result) { - .not_found => { - this.promise.resolve(globalThis, .false); - }, - .success => |_| { - // calling .exists() should not prevent it to download a bigger file - // this would make it download a slice of the actual value, if the file changes before we download it - // if (this.blob.size == Blob.max_size) { - // this.blob.size = @truncate(stat.size); - // } - this.promise.resolve(globalThis, .true); - }, - .failure => |err| { - this.promise.rejectOnNextTick(globalThis, err.toJS(globalThis)); - }, - } - } - - pub fn onS3SizeResolved(result: AWS.S3StatResult, this: *S3BlobStatTask) void { - defer this.deinit(); - const globalThis = this.promise.globalObject().?; - - switch (result) { - .not_found => { - const js_err = globalThis.createErrorInstance("File not Found", .{}); - js_err.put(globalThis, ZigString.static("code"), ZigString.static("FileNotFound").toJS(globalThis)); - this.promise.rejectOnNextTick(globalThis, js_err); - }, - .success => |stat| { - this.promise.resolve(globalThis, JSValue.jsNumber(stat.size)); - }, - .failure => |err| { - this.promise.rejectOnNextTick(globalThis, err.toJS(globalThis)); - }, - } - } - - pub fn exists(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { - const this = S3BlobStatTask.new(.{ - .promise = JSC.JSPromise.Strong.init(globalThis), - }); - const promise = this.promise.value(); - const credentials = blob.store.?.data.s3.getCredentials(); - const path = blob.store.?.data.s3.path(); - const env = globalThis.bunVM().transpiler.env; - - credentials.s3Stat(path, @ptrCast(&S3BlobStatTask.onS3ExistsResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); - return promise; - } - - pub fn size(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { - const this = S3BlobStatTask.new(.{ - .promise = JSC.JSPromise.Strong.init(globalThis), - }); - const promise = this.promise.value(); - const credentials = blob.store.?.data.s3.getCredentials(); - const path = blob.store.?.data.s3.path(); - const env = globalThis.bunVM().transpiler.env; - - credentials.s3Stat(path, @ptrCast(&S3BlobStatTask.onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); - return promise; - } - - pub fn deinit(this: *S3BlobStatTask) void { + this.poll_ref.unref(this.globalThis.bunVM()); this.promise.deinit(); this.destroy(); } @@ -4340,7 +3947,7 @@ pub const Blob = struct { return JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Blob is detached", .{})); }; return switch (store.data) { - .s3 => |*s3| try s3.unlink(globalThis, args.nextEat()), + .s3 => |*s3| try s3.unlink(store, globalThis, args.nextEat()), .file => |file| file.unlink(globalThis), else => JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Blob is read-only", .{})), }; @@ -4353,57 +3960,11 @@ pub const Blob = struct { _: *JSC.CallFrame, ) bun.JSError!JSValue { if (this.isS3()) { - return S3BlobStatTask.exists(globalThis, this); + return S3File.S3BlobStatTask.exists(globalThis, this); } return JSC.JSPromise.resolvedPromiseValue(globalThis, this.getExistsSync()); } - pub fn getPresignUrlFrom(this: *Blob, globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { - if (this.isS3()) { - var method: bun.http.Method = .GET; - var expires: usize = 86400; // 1 day default - - var credentialsWithOptions: AWS.AWSCredentialsWithOptions = .{ - .credentials = this.store.?.data.s3.getCredentials().*, - }; - defer { - credentialsWithOptions.deinit(); - } - if (extra_options) |options| { - if (options.isObject()) { - if (try options.getTruthyComptime(globalThis, "method")) |method_| { - method = Method.fromJS(globalThis, method_) orelse { - return globalThis.throwInvalidArguments("method must be GET, PUT, DELETE or HEAD when using s3 protocol", .{}); - }; - } - if (try options.getOptional(globalThis, "expiresIn", i32)) |expires_| { - if (expires_ <= 0) return globalThis.throwInvalidArguments("expiresIn must be greather than 0", .{}); - expires = @intCast(expires_); - } - } - credentialsWithOptions = try this.store.?.data.s3.getCredentialsWithOptions(options, globalThis); - } - const path = this.store.?.data.s3.path(); - - const result = credentialsWithOptions.credentials.signRequest(.{ - .path = path, - .method = method, - }, .{ .expires = expires }) catch |sign_err| { - return AWS.throwSignError(sign_err, globalThis); - }; - defer result.deinit(); - var str = bun.String.fromUTF8(result.url); - return str.transferToJS(this.globalThis); - } - - return globalThis.throwError(error.NotSupported, "is only possible to presign s3:// files"); - } - - pub fn getPresignUrl(this: *Blob, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const args = callframe.arguments_old(1); - return getPresignUrlFrom(this, globalThis, if (args.len > 0) args.ptr[0] else null); - } - pub const FileStreamWrapper = struct { promise: JSC.JSPromise.Strong, readable_stream_ref: JSC.WebCore.ReadableStream.Strong, @@ -4472,7 +4033,7 @@ pub const Blob = struct { const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - return (if (extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(path, readable_stream, globalThis, aws_options.options, this.contentTypeOrMimeType(), proxy_url, null, undefined); + return (if (extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(path, readable_stream, globalThis, aws_options.options, aws_options.acl, this.contentTypeOrMimeType(), proxy_url, null, undefined); } if (store.data != .file) { @@ -4987,17 +4548,6 @@ pub const Blob = struct { return if (this.getNameString()) |name| name.toJS(globalThis) else .undefined; } - pub fn getBucket( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ) JSValue { - if (this.getBucketName()) |name| { - var str = bun.String.createUTF8(name); - return str.transferToJS(globalThis); - } - return .undefined; - } - pub fn setName( this: *Blob, jsThis: JSC.JSValue, @@ -5048,30 +4598,6 @@ pub const Blob = struct { return null; } - pub fn getBucketName( - this: *const Blob, - ) ?[]const u8 { - const store = this.store orelse return null; - if (store.data != .s3) return null; - const credentials = store.data.s3.getCredentials(); - var full_path = store.data.s3.path(); - if (strings.startsWith(full_path, "/")) { - full_path = full_path[1..]; - } - var bucket: []const u8 = credentials.bucket; - - if (bucket.len == 0) { - if (strings.indexOf(full_path, "/")) |end| { - bucket = full_path[0..end]; - if (bucket.len > 0) { - return bucket; - } - } - return null; - } - return bucket; - } - // TODO: Move this to a separate `File` object or BunFile pub fn getLastModified( this: *Blob, @@ -5126,7 +4652,7 @@ pub const Blob = struct { pub fn getSize(this: *Blob, globalThis: *JSC.JSGlobalObject) JSValue { if (this.size == Blob.max_size) { if (this.isS3()) { - return S3BlobStatTask.size(globalThis, this); + return S3File.S3BlobStatTask.size(globalThis, this); } this.resolveSize(); if (this.size == Blob.max_size and this.store != null) { @@ -5441,8 +4967,12 @@ pub const Blob = struct { // if (comptime Environment.allow_assert) { // assert(this.allocator != null); // } - this.calculateEstimatedByteSize(); + + if (this.isS3()) { + return S3File.toJSUnchecked(globalObject, this); + } + return Blob.toJSUnchecked(globalObject, this); } @@ -6606,3 +6136,9 @@ pub const InlineBlob = extern struct { }; const assert = bun.assert; + +pub export fn JSDOMFile__hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { + JSC.markBinding(@src()); + const blob = value.as(Blob) orelse return false; + return blob.is_jsdom_file; +} diff --git a/src/bun.js/webcore/response.classes.ts b/src/bun.js/webcore/response.classes.ts index ba7e022fa3f78e..d09c7a0c1d866b 100644 --- a/src/bun.js/webcore/response.classes.ts +++ b/src/bun.js/webcore/response.classes.ts @@ -125,6 +125,7 @@ export default [ }), define({ name: "Blob", + final: false, construct: true, finalize: true, JSType: "0b11101110", @@ -168,13 +169,6 @@ export default [ // Non-standard, s3 + BunFile support unlink: { fn: "doUnlink", length: 0 }, write: { fn: "doWrite", length: 2 }, - // Non-standard, s3 support - bucket: { - cache: true, - getter: "getBucket", - }, - presign: { fn: "getPresignUrl", length: 1 }, - size: { getter: "getSize", }, diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index 38e737ea52382b..b0e7ff60567d30 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -804,7 +804,7 @@ pub const Fetch = struct { }; pub const FetchTasklet = struct { - pub const FetchTaskletStream = JSC.WebCore.FetchTaskletChunkedRequestSink; + pub const FetchTaskletStream = JSC.WebCore.NetworkSink; const log = Output.scoped(.FetchTasklet, false); sink: ?*FetchTaskletStream.JSSink = null, @@ -3255,6 +3255,7 @@ pub const Fetch = struct { var credentialsWithOptions: s3.AWSCredentials.AWSCredentialsWithOptions = .{ .credentials = globalThis.bunVM().transpiler.env.getAWSCredentials(), .options = .{}, + .acl = null, }; defer { credentialsWithOptions.deinit(); @@ -3264,7 +3265,7 @@ pub const Fetch = struct { if (try options.getTruthyComptime(globalThis, "s3")) |s3_options| { if (s3_options.isObject()) { s3_options.ensureStillAlive(); - credentialsWithOptions = try s3.AWSCredentials.getCredentialsWithOptions(credentialsWithOptions.credentials, s3_options, globalThis); + credentialsWithOptions = try s3.AWSCredentials.getCredentialsWithOptions(credentialsWithOptions.credentials, .{}, s3_options, null, globalThis); } } } @@ -3338,6 +3339,7 @@ pub const Fetch = struct { body.ReadableStream.get().?, globalThis, credentialsWithOptions.options, + credentialsWithOptions.acl, if (headers) |h| h.getContentType() else null, proxy_url, @ptrCast(&Wrapper.resolve), @@ -3379,42 +3381,15 @@ pub const Fetch = struct { } const content_type = if (headers) |h| h.getContentType() else null; + var header_buffer: [10]picohttp.Header = undefined; if (range) |range_| { - const _headers = result.headers(); - var headersWithRange: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "range", .value = range_ }, - }; - - setHeaders(&headers, &headersWithRange, allocator); + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); + setHeaders(&headers, _headers, allocator); } else if (content_type) |ct| { if (ct.len > 0) { - const _headers = result.headers(); - if (_headers.len > 4) { - var headersWithContentType: [6]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - _headers[4], - .{ .name = "Content-Type", .value = ct }, - }; - setHeaders(&headers, &headersWithContentType, allocator); - } else { - var headersWithContentType: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "Content-Type", .value = ct }, - }; - - setHeaders(&headers, &headersWithContentType, allocator); - } + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "Content-Type", .value = ct }); + setHeaders(&headers, _headers, allocator); } else { setHeaders(&headers, result.headers(), allocator); } diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 0d437358eab230..8516f1215868c5 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -482,7 +482,7 @@ pub const StreamStart = union(Tag) { FileSink: FileSinkOptions, HTTPSResponseSink: void, HTTPResponseSink: void, - FetchTaskletChunkedRequestSink: void, + NetworkSink: void, ready: void, owned_and_done: bun.ByteList, done: bun.ByteList, @@ -509,7 +509,7 @@ pub const StreamStart = union(Tag) { FileSink, HTTPSResponseSink, HTTPResponseSink, - FetchTaskletChunkedRequestSink, + NetworkSink, ready, owned_and_done, done, @@ -660,7 +660,7 @@ pub const StreamStart = union(Tag) { }, }; }, - .FetchTaskletChunkedRequestSink, .HTTPSResponseSink, .HTTPResponseSink => { + .NetworkSink, .HTTPSResponseSink, .HTTPResponseSink => { var empty = true; var chunk_size: JSC.WebCore.Blob.SizeType = 2048; @@ -2650,7 +2650,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { } pub const HTTPSResponseSink = HTTPServerWritable(true); pub const HTTPResponseSink = HTTPServerWritable(false); -pub const FetchTaskletChunkedRequestSink = struct { +pub const NetworkSink = struct { task: ?HTTPWritableStream = null, signal: Signal = .{}, globalThis: *JSGlobalObject = undefined, @@ -2658,13 +2658,14 @@ pub const FetchTaskletChunkedRequestSink = struct { buffer: bun.io.StreamBuffer, ended: bool = false, done: bool = false, + cancel: bool = false, encoded: bool = true, endPromise: JSC.JSPromise.Strong = .{}, auto_flusher: AutoFlusher = AutoFlusher{}, - pub usingnamespace bun.New(FetchTaskletChunkedRequestSink); + pub usingnamespace bun.New(NetworkSink); const HTTPWritableStream = union(enum) { fetch: *JSC.WebCore.Fetch.FetchTasklet, s3_upload: *S3MultiPartUpload, @@ -2689,6 +2690,16 @@ pub const FetchTaskletChunkedRequestSink = struct { AutoFlusher.registerDeferredMicrotaskWithTypeUnchecked(@This(), this, this.globalThis.bunVM()); } + pub fn path(this: *@This()) ?[]const u8 { + if (this.task) |task| { + return switch (task) { + .s3_upload => |s3| s3.path, + else => null, + }; + } + return null; + } + pub fn onAutoFlush(this: *@This()) bool { if (this.done) { this.auto_flusher.registered = false; @@ -2819,6 +2830,7 @@ pub const FetchTaskletChunkedRequestSink = struct { this.ended = true; this.done = true; this.signal.close(null); + this.cancel = true; this.finalize(); } @@ -2963,7 +2975,7 @@ pub const FetchTaskletChunkedRequestSink = struct { return this.buffer.memoryCost(); } - const name = "FetchTaskletChunkedRequestSink"; + const name = "NetworkSink"; pub const JSSink = NewJSSink(@This(), name); }; pub const BufferedReadableStreamAction = enum { diff --git a/src/bun.zig b/src/bun.zig index 77d75c1f0deb4b..db49641f88946f 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -4221,3 +4221,6 @@ pub const WPathBufferPool = if (Environment.isWindows) PathBufferPoolT(bun.WPath pub fn deleteAll() void {} }; pub const OSPathBufferPool = if (Environment.isWindows) WPathBufferPool else PathBufferPool; + +pub const S3 = @import("./s3.zig"); +pub const AWSCredentials = S3.AWSCredentials; diff --git a/src/codegen/class-definitions.ts b/src/codegen/class-definitions.ts index daf15ed5b5e7a1..64d5272f8c8716 100644 --- a/src/codegen/class-definitions.ts +++ b/src/codegen/class-definitions.ts @@ -59,6 +59,8 @@ export interface ClassDefinition { JSType?: string; noConstructor?: boolean; + final?: boolean; + // Do not try to track the `this` value in the constructor automatically. // That is a memory leak. wantsThis?: never; diff --git a/src/codegen/generate-classes.ts b/src/codegen/generate-classes.ts index c3333635a4275f..1875972f615b76 100644 --- a/src/codegen/generate-classes.ts +++ b/src/codegen/generate-classes.ts @@ -456,11 +456,11 @@ void ${proto}::finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject) `; } -function generatePrototypeHeader(typename) { +function generatePrototypeHeader(typename, final = true) { const proto = prototypeName(typename); return ` -class ${proto} final : public JSC::JSNonFinalObject { +class ${proto} ${final ? "final" : ""} : public JSC::JSNonFinalObject { public: using Base = JSC::JSNonFinalObject; @@ -483,7 +483,7 @@ class ${proto} final : public JSC::JSNonFinalObject { return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info()); } - private: + protected: ${proto}(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) : Base(vm, structure) { @@ -537,7 +537,7 @@ class ${name} final : public JSC::InternalFunction { static JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES call(JSC::JSGlobalObject*, JSC::CallFrame*); DECLARE_EXPORT_INFO; - private: + protected: ${name}(JSC::VM& vm, JSC::Structure* structure); void finishCreation(JSC::VM&, JSC::JSGlobalObject* globalObject, ${prototypeName(typeName)}* prototype); }; @@ -1250,8 +1250,10 @@ function generateClassHeader(typeName, obj: ClassDefinition) { suffix += `JSC::JSValue getInternalProperties(JSC::VM &vm, JSC::JSGlobalObject *globalObject, ${name}*);`; } + const final = obj.final ?? true; + return ` - class ${name} final : public JSC::JSDestructibleObject { + class ${name}${final ? " final" : ""} : public JSC::JSDestructibleObject { public: using Base = JSC::JSDestructibleObject; static ${name}* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, void* ctx); @@ -1652,7 +1654,12 @@ ${DEFINE_VISIT_CHILDREN} } function generateHeader(typeName, obj) { - return generateClassHeader(typeName, obj).trim() + "\n\n"; + const fields = [ + generateClassHeader(typeName, obj).trim() + "\n\n", + !(obj.final ?? true) ? generatePrototypeHeader(typeName, false) : null, + ].filter(Boolean); + + return "\n" + fields.join("\n").trim(); } function generateImpl(typeName, obj) { @@ -1660,7 +1667,7 @@ function generateImpl(typeName, obj) { const proto = obj.proto; return [ - generatePrototypeHeader(typeName), + (obj.final ?? true) ? generatePrototypeHeader(typeName, true) : null, !obj.noConstructor ? generateConstructorHeader(typeName).trim() + "\n" : null, generatePrototype(typeName, obj).trim(), !obj.noConstructor ? generateConstructorImpl(typeName, obj).trim() : null, @@ -2059,7 +2066,7 @@ function generateLazyClassStructureHeader(typeName, { klass = {}, proto = {}, zi return ` JSC::Structure* ${className(typeName)}Structure() const { return m_${className(typeName)}.getInitializedOnMainThread(this); } JSC::JSObject* ${className(typeName)}Constructor() const { return m_${className(typeName)}.constructorInitializedOnMainThread(this); } - JSC::JSValue ${className(typeName)}Prototype() const { return m_${className(typeName)}.prototypeInitializedOnMainThread(this); } + JSC::JSObject* ${className(typeName)}Prototype() const { return m_${className(typeName)}.prototypeInitializedOnMainThread(this); } JSC::LazyClassStructure m_${className(typeName)}; `.trim(); } diff --git a/src/codegen/generate-jssink.ts b/src/codegen/generate-jssink.ts index afd9b36bdc28ea..7ec71fa427f187 100644 --- a/src/codegen/generate-jssink.ts +++ b/src/codegen/generate-jssink.ts @@ -1,12 +1,6 @@ import { join, resolve } from "path"; -const classes = [ - "ArrayBufferSink", - "FileSink", - "HTTPResponseSink", - "HTTPSResponseSink", - "FetchTaskletChunkedRequestSink", -]; +const classes = ["ArrayBufferSink", "FileSink", "HTTPResponseSink", "HTTPSResponseSink", "NetworkSink"]; function names(name) { return { diff --git a/src/env_loader.zig b/src/env_loader.zig index 8ea780553fd5cf..29cfcb7c08d5cc 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -125,6 +125,7 @@ pub const Loader = struct { var region: []const u8 = ""; var endpoint: []const u8 = ""; var bucket: []const u8 = ""; + var session_token: []const u8 = ""; if (this.get("S3_ACCESS_KEY_ID")) |access_key| { accessKeyId = access_key; @@ -152,12 +153,18 @@ pub const Loader = struct { } else if (this.get("AWS_BUCKET")) |bucket_| { bucket = bucket_; } + if (this.get("S3_SESSION_TOKEN")) |token| { + session_token = token; + } else if (this.get("AWS_SESSION_TOKEN")) |token| { + session_token = token; + } this.aws_credentials = .{ .accessKeyId = accessKeyId, .secretAccessKey = secretAccessKey, .region = region, .endpoint = endpoint, .bucket = bucket, + .sessionToken = session_token, }; return this.aws_credentials.?; diff --git a/src/s3.zig b/src/s3.zig index aa12dadd49629e..396ee291c90196 100644 --- a/src/s3.zig +++ b/src/s3.zig @@ -7,12 +7,55 @@ pub const RareData = @import("./bun.js/rare_data.zig"); const JSC = bun.JSC; const strings = bun.strings; +pub const ACL = enum { + /// Owner gets FULL_CONTROL. No one else has access rights (default). + private, + /// Owner gets FULL_CONTROL. The AllUsers group (see Who is a grantee?) gets READ access. + public_read, + /// Owner gets FULL_CONTROL. The AllUsers group gets READ and WRITE access. Granting this on a bucket is generally not recommended. + public_read_write, + /// Owner gets FULL_CONTROL. Amazon EC2 gets READ access to GET an Amazon Machine Image (AMI) bundle from Amazon S3. + aws_exec_read, + /// Owner gets FULL_CONTROL. The AuthenticatedUsers group gets READ access. + authenticated_read, + /// Object owner gets FULL_CONTROL. Bucket owner gets READ access. If you specify this canned ACL when creating a bucket, Amazon S3 ignores it. + bucket_owner_read, + /// Both the object owner and the bucket owner get FULL_CONTROL over the object. If you specify this canned ACL when creating a bucket, Amazon S3 ignores it. + bucket_owner_full_control, + log_delivery_write, + + pub fn toString(this: @This()) []const u8 { + return switch (this) { + .private => "private", + .public_read => "public-read", + .public_read_write => "public-read-write", + .aws_exec_read => "aws-exec-read", + .authenticated_read => "authenticated-read", + .bucket_owner_read => "bucket-owner-read", + .bucket_owner_full_control => "bucket-owner-full-control", + .log_delivery_write => "log-delivery-write", + }; + } + + pub const Map = bun.ComptimeStringMap(ACL, .{ + .{ "private", .private }, + .{ "public-read", .public_read }, + .{ "public-read-write", .public_read_write }, + .{ "aws-exec-read", .aws_exec_read }, + .{ "authenticated-read", .authenticated_read }, + .{ "bucket-owner-read", .bucket_owner_read }, + .{ "bucket-owner-full-control", .bucket_owner_full_control }, + .{ "log-delivery-write", .log_delivery_write }, + }); +}; + pub const AWSCredentials = struct { accessKeyId: []const u8, secretAccessKey: []const u8, region: []const u8, endpoint: []const u8, bucket: []const u8, + sessionToken: []const u8, ref_count: u32 = 1, pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); @@ -24,12 +67,16 @@ pub const AWSCredentials = struct { pub const AWSCredentialsWithOptions = struct { credentials: AWSCredentials, options: MultiPartUpload.MultiPartUploadOptions = .{}, + acl: ?ACL = null, + /// indicates if the credentials have changed + changed_credentials: bool = false, _accessKeyIdSlice: ?JSC.ZigString.Slice = null, _secretAccessKeySlice: ?JSC.ZigString.Slice = null, _regionSlice: ?JSC.ZigString.Slice = null, _endpointSlice: ?JSC.ZigString.Slice = null, _bucketSlice: ?JSC.ZigString.Slice = null, + _sessionTokenSlice: ?JSC.ZigString.Slice = null, pub fn deinit(this: *@This()) void { if (this._accessKeyIdSlice) |slice| slice.deinit(); @@ -37,13 +84,31 @@ pub const AWSCredentials = struct { if (this._regionSlice) |slice| slice.deinit(); if (this._endpointSlice) |slice| slice.deinit(); if (this._bucketSlice) |slice| slice.deinit(); + if (this._sessionTokenSlice) |slice| slice.deinit(); } }; - pub fn getCredentialsWithOptions(this: AWSCredentials, options: ?JSC.JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!AWSCredentialsWithOptions { + + fn hashConst(acl: []const u8) u64 { + var hasher = std.hash.Wyhash.init(0); + var remain = acl; + + var buf: [@sizeOf(@TypeOf(hasher.buf))]u8 = undefined; + + while (remain.len > 0) { + const end = @min(hasher.buf.len, remain.len); + + hasher.update(strings.copyLowercaseIfNeeded(remain[0..end], &buf)); + remain = remain[end..]; + } + + return hasher.final(); + } + pub fn getCredentialsWithOptions(this: AWSCredentials, default_options: MultiPartUpload.MultiPartUploadOptions, options: ?JSC.JSValue, default_acl: ?ACL, globalObject: *JSC.JSGlobalObject) bun.JSError!AWSCredentialsWithOptions { // get ENV config var new_credentials = AWSCredentialsWithOptions{ .credentials = this, - .options = .{}, + .options = default_options, + .acl = default_acl, }; errdefer { new_credentials.deinit(); @@ -59,6 +124,7 @@ pub const AWSCredentials = struct { if (str.tag != .Empty and str.tag != .Dead) { new_credentials._accessKeyIdSlice = str.toUTF8(bun.default_allocator); new_credentials.credentials.accessKeyId = new_credentials._accessKeyIdSlice.?.slice(); + new_credentials.changed_credentials = true; } } else { return globalObject.throwInvalidArgumentTypeValue("accessKeyId", "string", js_value); @@ -73,6 +139,7 @@ pub const AWSCredentials = struct { if (str.tag != .Empty and str.tag != .Dead) { new_credentials._secretAccessKeySlice = str.toUTF8(bun.default_allocator); new_credentials.credentials.secretAccessKey = new_credentials._secretAccessKeySlice.?.slice(); + new_credentials.changed_credentials = true; } } else { return globalObject.throwInvalidArgumentTypeValue("secretAccessKey", "string", js_value); @@ -87,6 +154,7 @@ pub const AWSCredentials = struct { if (str.tag != .Empty and str.tag != .Dead) { new_credentials._regionSlice = str.toUTF8(bun.default_allocator); new_credentials.credentials.region = new_credentials._regionSlice.?.slice(); + new_credentials.changed_credentials = true; } } else { return globalObject.throwInvalidArgumentTypeValue("region", "string", js_value); @@ -103,6 +171,7 @@ pub const AWSCredentials = struct { const normalized_endpoint = bun.URL.parse(new_credentials._endpointSlice.?.slice()).host; if (normalized_endpoint.len > 0) { new_credentials.credentials.endpoint = normalized_endpoint; + new_credentials.changed_credentials = true; } } } else { @@ -118,6 +187,23 @@ pub const AWSCredentials = struct { if (str.tag != .Empty and str.tag != .Dead) { new_credentials._bucketSlice = str.toUTF8(bun.default_allocator); new_credentials.credentials.bucket = new_credentials._bucketSlice.?.slice(); + new_credentials.changed_credentials = true; + } + } else { + return globalObject.throwInvalidArgumentTypeValue("bucket", "string", js_value); + } + } + } + + if (try opts.getTruthyComptime(globalObject, "sessionToken")) |js_value| { + if (!js_value.isEmptyOrUndefinedOrNull()) { + if (js_value.isString()) { + const str = bun.String.fromJS(js_value, globalObject); + defer str.deref(); + if (str.tag != .Empty and str.tag != .Dead) { + new_credentials._sessionTokenSlice = str.toUTF8(bun.default_allocator); + new_credentials.credentials.sessionToken = new_credentials._sessionTokenSlice.?.slice(); + new_credentials.changed_credentials = true; } } else { return globalObject.throwInvalidArgumentTypeValue("bucket", "string", js_value); @@ -147,6 +233,21 @@ pub const AWSCredentials = struct { new_credentials.options.queueSize = @intCast(@max(queueSize, std.math.maxInt(u8))); } } + + if (try opts.getOptional(globalObject, "retry", i32)) |retry| { + if (retry < 0 and retry > 255) { + return globalObject.throwRangeError(retry, .{ + .min = 0, + .max = 255, + .field_name = "retry", + }); + } else { + new_credentials.options.retry = @intCast(retry); + } + } + if (try opts.getOptionalEnum(globalObject, "acl", ACL)) |acl| { + new_credentials.acl = acl; + } } } return new_credentials; @@ -177,6 +278,11 @@ pub const AWSCredentials = struct { bun.default_allocator.dupe(u8, this.bucket) catch bun.outOfMemory() else "", + + .sessionToken = if (this.sessionToken.len > 0) + bun.default_allocator.dupe(u8, this.sessionToken) catch bun.outOfMemory() + else + "", }); } pub fn deinit(this: *@This()) void { @@ -195,6 +301,9 @@ pub const AWSCredentials = struct { if (this.bucket.len > 0) { bun.default_allocator.free(this.bucket); } + if (this.sessionToken.len > 0) { + bun.default_allocator.free(this.sessionToken); + } this.destroy(); } @@ -250,19 +359,43 @@ pub const AWSCredentials = struct { authorization: []const u8, url: []const u8, - content_disposition: []const u8, - _headers: [5]picohttp.Header, - _headers_len: u8 = 4, + content_disposition: []const u8 = "", + session_token: []const u8 = "", + acl: ?ACL = null, + _headers: [7]picohttp.Header = .{ + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + }, + _headers_len: u8 = 0, pub fn headers(this: *const @This()) []const picohttp.Header { return this._headers[0..this._headers_len]; } + pub fn mixWithHeader(this: *const @This(), headers_buffer: []picohttp.Header, header: picohttp.Header) []const picohttp.Header { + // copy the headers to buffer + const len = this._headers_len; + for (this._headers[0..len], 0..len) |existing_header, i| { + headers_buffer[i] = existing_header; + } + headers_buffer[len] = header; + return headers_buffer[0 .. len + 1]; + } + pub fn deinit(this: *const @This()) void { if (this.amz_date.len > 0) { bun.default_allocator.free(this.amz_date); } + if (this.session_token.len > 0) { + bun.default_allocator.free(this.session_token); + } + if (this.content_disposition.len > 0) { bun.default_allocator.free(this.content_disposition); } @@ -284,15 +417,16 @@ pub const AWSCredentials = struct { pub const SignQueryOptions = struct { expires: usize = 86400, }; - pub const SignOptions = struct { path: []const u8, method: bun.http.Method, content_hash: ?[]const u8 = null, search_params: ?[]const u8 = null, content_disposition: ?[]const u8 = null, + acl: ?ACL = null, }; - fn guessRegion(endpoint: []const u8) []const u8 { + + pub fn guessRegion(endpoint: []const u8) []const u8 { if (endpoint.len > 0) { if (strings.endsWith(endpoint, ".r2.cloudflarestorage.com")) return "auto"; if (strings.indexOf(endpoint, ".amazonaws.com")) |end| { @@ -310,7 +444,7 @@ pub const AWSCredentials = struct { else => error.InvalidHexChar, }; } - fn encodeURIComponent(input: []const u8, buffer: []u8) ![]const u8 { + fn encodeURIComponent(input: []const u8, buffer: []u8, comptime encode_slash: bool) ![]const u8 { var written: usize = 0; for (input) |c| { @@ -323,6 +457,12 @@ pub const AWSCredentials = struct { }, // All other characters need to be percent-encoded else => { + if (!encode_slash and (c == '/' or c == '\\')) { + if (written >= buffer.len) return error.BufferTooSmall; + buffer[written] = if (c == '\\') '/' else c; + written += 1; + continue; + } if (written + 3 > buffer.len) return error.BufferTooSmall; buffer[written] = '%'; // Convert byte to hex @@ -344,40 +484,46 @@ pub const AWSCredentials = struct { }; fn getSignErrorMessage(comptime err: anyerror) [:0]const u8 { return switch (err) { - error.MissingCredentials => return "missing s3 credentials", - error.InvalidMethod => return "method must be GET, PUT, DELETE or HEAD when using s3 protocol", - error.InvalidPath => return "invalid s3 bucket, key combination", - error.InvalidEndpoint => return "invalid s3 endpoint", - else => return "failed to retrieve s3 content check your credentials", + error.MissingCredentials => return "Missing S3 credentials. 'accessKeyId', 'secretAccessKey', 'bucket', and 'endpoint' are required", + error.InvalidMethod => return "Method must be GET, PUT, DELETE or HEAD when using s3:// protocol", + error.InvalidPath => return "Invalid S3 bucket, key combination", + error.InvalidEndpoint => return "Invalid S3 endpoint", + error.InvalidSessionToken => return "Invalid session token", + else => return "Failed to retrieve S3 content. Are the credentials correct?", }; } pub fn getJSSignError(err: anyerror, globalThis: *JSC.JSGlobalObject) JSC.JSValue { return switch (err) { - error.MissingCredentials => return globalThis.ERR_AWS_MISSING_CREDENTIALS(getSignErrorMessage(error.MissingCredentials), .{}).toJS(), - error.InvalidMethod => return globalThis.ERR_AWS_INVALID_METHOD(getSignErrorMessage(error.InvalidMethod), .{}).toJS(), - error.InvalidPath => return globalThis.ERR_AWS_INVALID_PATH(getSignErrorMessage(error.InvalidPath), .{}).toJS(), - error.InvalidEndpoint => return globalThis.ERR_AWS_INVALID_ENDPOINT(getSignErrorMessage(error.InvalidEndpoint), .{}).toJS(), - else => return globalThis.ERR_AWS_INVALID_SIGNATURE(getSignErrorMessage(error.SignError), .{}).toJS(), + error.MissingCredentials => return globalThis.ERR_S3_MISSING_CREDENTIALS(getSignErrorMessage(error.MissingCredentials), .{}).toJS(), + error.InvalidMethod => return globalThis.ERR_S3_INVALID_METHOD(getSignErrorMessage(error.InvalidMethod), .{}).toJS(), + error.InvalidPath => return globalThis.ERR_S3_INVALID_PATH(getSignErrorMessage(error.InvalidPath), .{}).toJS(), + error.InvalidEndpoint => return globalThis.ERR_S3_INVALID_ENDPOINT(getSignErrorMessage(error.InvalidEndpoint), .{}).toJS(), + error.InvalidSessionToken => return globalThis.ERR_S3_INVALID_SESSION_TOKEN(getSignErrorMessage(error.InvalidSessionToken), .{}).toJS(), + else => return globalThis.ERR_S3_INVALID_SIGNATURE(getSignErrorMessage(error.SignError), .{}).toJS(), }; } pub fn throwSignError(err: anyerror, globalThis: *JSC.JSGlobalObject) bun.JSError { return switch (err) { - error.MissingCredentials => globalThis.ERR_AWS_MISSING_CREDENTIALS(getSignErrorMessage(error.MissingCredentials), .{}).throw(), - error.InvalidMethod => globalThis.ERR_AWS_INVALID_METHOD(getSignErrorMessage(error.InvalidMethod), .{}).throw(), - error.InvalidPath => globalThis.ERR_AWS_INVALID_PATH(getSignErrorMessage(error.InvalidPath), .{}).throw(), - error.InvalidEndpoint => globalThis.ERR_AWS_INVALID_ENDPOINT(getSignErrorMessage(error.InvalidEndpoint), .{}).throw(), - else => globalThis.ERR_AWS_INVALID_SIGNATURE(getSignErrorMessage(error.SignError), .{}).throw(), + error.MissingCredentials => globalThis.ERR_S3_MISSING_CREDENTIALS(getSignErrorMessage(error.MissingCredentials), .{}).throw(), + error.InvalidMethod => globalThis.ERR_S3_INVALID_METHOD(getSignErrorMessage(error.InvalidMethod), .{}).throw(), + error.InvalidPath => globalThis.ERR_S3_INVALID_PATH(getSignErrorMessage(error.InvalidPath), .{}).throw(), + error.InvalidEndpoint => globalThis.ERR_S3_INVALID_ENDPOINT(getSignErrorMessage(error.InvalidEndpoint), .{}).throw(), + error.InvalidSessionToken => globalThis.ERR_S3_INVALID_SESSION_TOKEN(getSignErrorMessage(error.InvalidSessionToken), .{}).throw(), + else => globalThis.ERR_S3_INVALID_SIGNATURE(getSignErrorMessage(error.SignError), .{}).throw(), }; } pub fn getSignErrorCodeAndMessage(err: anyerror) ErrorCodeAndMessage { + // keep error codes consistent for internal errors return switch (err) { - error.MissingCredentials => .{ .code = "MissingCredentials", .message = getSignErrorMessage(error.MissingCredentials) }, - error.InvalidMethod => .{ .code = "InvalidMethod", .message = getSignErrorMessage(error.InvalidMethod) }, - error.InvalidPath => .{ .code = "InvalidPath", .message = getSignErrorMessage(error.InvalidPath) }, - error.InvalidEndpoint => .{ .code = "InvalidEndpoint", .message = getSignErrorMessage(error.InvalidEndpoint) }, - else => .{ .code = "SignError", .message = getSignErrorMessage(error.SignError) }, + error.MissingCredentials => .{ .code = "ERR_S3_MISSING_CREDENTIALS", .message = getSignErrorMessage(error.MissingCredentials) }, + error.InvalidMethod => .{ .code = "ERR_S3_INVALID_METHOD", .message = getSignErrorMessage(error.InvalidMethod) }, + error.InvalidPath => .{ .code = "ERR_S3_INVALID_PATH", .message = getSignErrorMessage(error.InvalidPath) }, + error.InvalidEndpoint => .{ .code = "ERR_S3_INVALID_ENDPOINT", .message = getSignErrorMessage(error.InvalidEndpoint) }, + error.InvalidSessionToken => .{ .code = "ERR_S3_INVALID_SESSION_TOKEN", .message = getSignErrorMessage(error.InvalidSessionToken) }, + else => .{ .code = "ERR_S3_INVALID_SIGNATURE", .message = getSignErrorMessage(error.SignError) }, }; } + pub fn signRequest(this: *const @This(), signOptions: SignOptions, signQueryOption: ?SignQueryOptions) !SignResult { const method = signOptions.method; const request_path = signOptions.path; @@ -388,6 +534,9 @@ pub const AWSCredentials = struct { if (content_disposition != null and content_disposition.?.len == 0) { content_disposition = null; } + const session_token: ?[]const u8 = if (this.sessionToken.len == 0) null else this.sessionToken; + + const acl: ?[]const u8 = if (signOptions.acl) |acl_value| acl_value.toString() else null; if (this.accessKeyId.len == 0 or this.secretAccessKey.len == 0) return error.MissingCredentials; const signQuery = signQueryOption != null; @@ -403,9 +552,13 @@ pub const AWSCredentials = struct { const region = if (this.region.len > 0) this.region else guessRegion(this.endpoint); var full_path = request_path; + // handle \\ on bucket name if (strings.startsWith(full_path, "/")) { full_path = full_path[1..]; + } else if (strings.startsWith(full_path, "\\")) { + full_path = full_path[1..]; } + var path: []const u8 = full_path; var bucket: []const u8 = this.bucket; @@ -414,25 +567,41 @@ pub const AWSCredentials = struct { // guess bucket using path if (strings.indexOf(full_path, "/")) |end| { + if (strings.indexOf(full_path, "\\")) |backslash_index| { + if (backslash_index < end) { + bucket = full_path[0..backslash_index]; + path = full_path[backslash_index + 1 ..]; + } + } bucket = full_path[0..end]; path = full_path[end + 1 ..]; + } else if (strings.indexOf(full_path, "\\")) |backslash_index| { + bucket = full_path[0..backslash_index]; + path = full_path[backslash_index + 1 ..]; } else { return error.InvalidPath; } } if (strings.endsWith(path, "/")) { path = path[0..path.len]; + } else if (strings.endsWith(path, "\\")) { + path = path[0 .. path.len - 1]; } if (strings.startsWith(path, "/")) { path = path[1..]; + } else if (strings.startsWith(path, "\\")) { + path = path[1..]; } // if we allow path.len == 0 it will list the bucket for now we disallow if (path.len == 0) return error.InvalidPath; - var path_buffer: [1024 + 63 + 2]u8 = undefined; // 1024 max key size and 63 max bucket name - - const normalizedPath = std.fmt.bufPrint(&path_buffer, "/{s}/{s}", .{ bucket, path }) catch return error.InvalidPath; + var normalized_path_buffer: [1024 + 63 + 2]u8 = undefined; // 1024 max key size and 63 max bucket name + var path_buffer: [1024]u8 = undefined; + var bucket_buffer: [63]u8 = undefined; + bucket = encodeURIComponent(bucket, &bucket_buffer, false) catch return error.InvalidPath; + path = encodeURIComponent(path, &path_buffer, false) catch return error.InvalidPath; + const normalizedPath = std.fmt.bufPrint(&normalized_path_buffer, "/{s}/{s}", .{ bucket, path }) catch return error.InvalidPath; const date_result = getAMZDate(bun.default_allocator); const amz_date = date_result.date; @@ -440,10 +609,34 @@ pub const AWSCredentials = struct { const amz_day = amz_date[0..8]; const signed_headers = if (signQuery) "host" else brk: { - if (content_disposition != null) { - break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date"; + if (acl != null) { + if (content_disposition != null) { + if (session_token != null) { + break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-security-token"; + } else { + break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date"; + } + } else { + if (session_token != null) { + break :brk "host;x-amz-content-sha256;x-amz-date;x-amz-security-token"; + } else { + break :brk "host;x-amz-content-sha256;x-amz-date"; + } + } } else { - break :brk "host;x-amz-content-sha256;x-amz-date"; + if (content_disposition != null) { + if (session_token != null) { + break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date;x-amz-security-token"; + } else { + break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date"; + } + } else { + if (session_token != null) { + break :brk "host;x-amz-content-sha256;x-amz-date;x-amz-security-token"; + } else { + break :brk "host;x-amz-content-sha256;x-amz-date"; + } + } } }; // detect service name and host from region or endpoint @@ -451,7 +644,7 @@ pub const AWSCredentials = struct { var encoded_host: []const u8 = ""; const host = brk_host: { if (this.endpoint.len > 0) { - encoded_host = encodeURIComponent(this.endpoint, &encoded_host_buffer) catch return error.InvalidEndpoint; + encoded_host = encodeURIComponent(this.endpoint, &encoded_host_buffer, true) catch return error.InvalidEndpoint; break :brk_host try bun.default_allocator.dupe(u8, this.endpoint); } else { break :brk_host try std.fmt.allocPrint(bun.default_allocator, "s3.{s}.amazonaws.com", .{region}); @@ -462,7 +655,7 @@ pub const AWSCredentials = struct { errdefer bun.default_allocator.free(host); const aws_content_hash = if (content_hash) |hash| hash else ("UNSIGNED-PAYLOAD"); - var tmp_buffer: [2048]u8 = undefined; + var tmp_buffer: [4096]u8 = undefined; const authorization = brk: { // we hash the hash so we need 2 buffers @@ -485,26 +678,93 @@ pub const AWSCredentials = struct { break :brk_sign result; }; if (signQuery) { - const canonical = try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, if (encoded_host.len > 0) encoded_host else host, signed_headers, aws_content_hash }); + var token_encoded_buffer: [2048]u8 = undefined; // token is normaly like 600-700 but can be up to 2k + var encoded_session_token: ?[]const u8 = null; + if (session_token) |token| { + encoded_session_token = encodeURIComponent(token, &token_encoded_buffer, true) catch return error.InvalidSessionToken; + } + const canonical = brk_canonical: { + if (acl) |acl_value| { + if (encoded_session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, if (encoded_host.len > 0) encoded_host else host, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, if (encoded_host.len > 0) encoded_host else host, signed_headers, aws_content_hash }); + } + } else { + if (encoded_session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, if (encoded_host.len > 0) encoded_host else host, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, if (encoded_host.len > 0) encoded_host else host, signed_headers, aws_content_hash }); + } + } + }; var sha_digest = std.mem.zeroes(bun.sha.SHA256.Digest); bun.sha.SHA256.hash(canonical, &sha_digest, JSC.VirtualMachine.get().rareData().boringEngine()); const signValue = try std.fmt.bufPrint(&tmp_buffer, "AWS4-HMAC-SHA256\n{s}\n{s}/{s}/{s}/aws4_request\n{s}", .{ amz_date, amz_day, region, service_name, bun.fmt.bytesToHex(sha_digest[0..bun.sha.SHA256.digest], .lower) }); const signature = bun.hmac.generate(sigDateRegionServiceReq, signValue, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; - break :brk try std.fmt.allocPrint( - bun.default_allocator, - "https://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", - .{ host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, - ); + if (acl) |acl_value| { + if (encoded_session_token) |token| { + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "https://{s}{s}?X-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", + .{ host, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } else { + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "https://{s}{s}?X-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", + .{ host, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } + } else { + if (encoded_session_token) |token| { + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "https://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", + .{ host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } else { + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "https://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", + .{ host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } + } } else { var encoded_content_disposition_buffer: [255]u8 = undefined; - const encoded_content_disposition: []const u8 = if (content_disposition) |cd| encodeURIComponent(cd, &encoded_content_disposition_buffer) catch return error.ContentTypeIsTooLong else ""; + const encoded_content_disposition: []const u8 = if (content_disposition) |cd| encodeURIComponent(cd, &encoded_content_disposition_buffer, true) catch return error.ContentTypeIsTooLong else ""; const canonical = brk_canonical: { - if (content_disposition != null) { - break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, if (encoded_host.len > 0) encoded_host else host, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + if (acl) |acl_value| { + if (content_disposition != null) { + if (session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, if (encoded_host.len > 0) encoded_host else host, acl_value, aws_content_hash, amz_date, token, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, if (encoded_host.len > 0) encoded_host else host, acl_value, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + } + } else { + if (session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", if (encoded_host.len > 0) encoded_host else host, acl_value, aws_content_hash, amz_date, token, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", if (encoded_host.len > 0) encoded_host else host, acl_value, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + } + } } else { - break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", if (encoded_host.len > 0) encoded_host else host, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + if (content_disposition != null) { + if (session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, if (encoded_host.len > 0) encoded_host else host, aws_content_hash, amz_date, token, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, if (encoded_host.len > 0) encoded_host else host, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + } + } else { + if (session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", if (encoded_host.len > 0) encoded_host else host, aws_content_hash, amz_date, token, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", if (encoded_host.len > 0) encoded_host else host, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + } + } } }; var sha_digest = std.mem.zeroes(bun.sha.SHA256.Digest); @@ -531,61 +791,86 @@ pub const AWSCredentials = struct { .amz_date = "", .host = "", .authorization = "", + .acl = signOptions.acl, .url = authorization, - .content_disposition = "", - ._headers = .{ - .{ .name = "", .value = "" }, - .{ .name = "", .value = "" }, - .{ .name = "", .value = "" }, - .{ .name = "", .value = "" }, - .{ .name = "", .value = "" }, - }, - ._headers_len = 0, }; } - if (content_disposition) |cd| { - const content_disposition_value = bun.default_allocator.dupe(u8, cd) catch bun.outOfMemory(); - return SignResult{ - .amz_date = amz_date, - .host = host, - .authorization = authorization, - .url = try std.fmt.allocPrint(bun.default_allocator, "https://{s}{s}{s}", .{ host, normalizedPath, if (search_params) |s| s else "" }), - .content_disposition = content_disposition_value, - ._headers = .{ - .{ .name = "x-amz-content-sha256", .value = aws_content_hash }, - .{ .name = "x-amz-date", .value = amz_date }, - .{ .name = "Authorization", .value = authorization[0..] }, - .{ .name = "Host", .value = host }, - .{ .name = "Content-Disposition", .value = content_disposition_value }, - }, - ._headers_len = 5, - }; - } - return SignResult{ + var result = SignResult{ .amz_date = amz_date, .host = host, .authorization = authorization, + .acl = signOptions.acl, .url = try std.fmt.allocPrint(bun.default_allocator, "https://{s}{s}{s}", .{ host, normalizedPath, if (search_params) |s| s else "" }), - .content_disposition = "", - ._headers = .{ + ._headers = [_]picohttp.Header{ .{ .name = "x-amz-content-sha256", .value = aws_content_hash }, .{ .name = "x-amz-date", .value = amz_date }, .{ .name = "Authorization", .value = authorization[0..] }, .{ .name = "Host", .value = host }, .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, }, ._headers_len = 4, }; + + if (acl) |acl_value| { + result._headers[result._headers_len] = .{ .name = "x-amz-acl", .value = acl_value }; + result._headers_len += 1; + } + + if (session_token) |token| { + const session_token_value = bun.default_allocator.dupe(u8, token) catch bun.outOfMemory(); + result.session_token = session_token_value; + result._headers[result._headers_len] = .{ .name = "x-amz-security-token", .value = session_token_value }; + result._headers_len += 1; + } + + if (content_disposition) |cd| { + const content_disposition_value = bun.default_allocator.dupe(u8, cd) catch bun.outOfMemory(); + result.content_disposition = content_disposition_value; + result._headers[result._headers_len] = .{ .name = "Content-Disposition", .value = content_disposition_value }; + result._headers_len += 1; + } + + return result; } + const JSS3Error = extern struct { + code: bun.String = bun.String.empty, + message: bun.String = bun.String.empty, + path: bun.String = bun.String.empty, + + pub fn init(code: []const u8, message: []const u8, path: ?[]const u8) @This() { + return .{ + // lets make sure we can reuse code and message and keep it service independent + .code = bun.String.createAtomIfPossible(code), + .message = bun.String.createAtomIfPossible(message), + .path = if (path) |p| bun.String.init(p) else bun.String.empty, + }; + } + + pub fn deinit(this: *const @This()) void { + this.path.deref(); + this.code.deref(); + this.message.deref(); + } + + pub fn toErrorInstance(this: *const @This(), global: *JSC.JSGlobalObject) JSC.JSValue { + defer this.deinit(); + + return S3Error__toErrorInstance(this, global); + } + extern fn S3Error__toErrorInstance(this: *const @This(), global: *JSC.JSGlobalObject) callconv(JSC.conv) JSC.JSValue; + }; + pub const S3Error = struct { code: []const u8, message: []const u8, - pub fn toJS(err: *const @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { - const js_err = globalObject.createErrorInstance("{s}", .{err.message}); - js_err.put(globalObject, JSC.ZigString.static("code"), JSC.ZigString.init(err.code).toJS(globalObject)); - return js_err; + pub fn toJS(err: *const @This(), globalObject: *JSC.JSGlobalObject, path: ?[]const u8) JSC.JSValue { + const value = JSS3Error.init(err.code, err.message, path).toErrorInstance(globalObject); + bun.assert(!globalObject.hasException()); + return value; } }; pub const S3StatResult = union(enum) { @@ -594,7 +879,7 @@ pub const AWSCredentials = struct { /// etag is not owned and need to be copied if used after this callback etag: []const u8 = "", }, - not_found: void, + not_found: S3Error, /// failure error is not owned and need to be copied if used after this callback failure: S3Error, @@ -606,7 +891,7 @@ pub const AWSCredentials = struct { /// body is owned and dont need to be copied, but dont forget to free it body: bun.MutableString, }, - not_found: void, + not_found: S3Error, /// failure error is not owned and need to be copied if used after this callback failure: S3Error, }; @@ -617,7 +902,7 @@ pub const AWSCredentials = struct { }; pub const S3DeleteResult = union(enum) { success: void, - not_found: void, + not_found: S3Error, /// failure error is not owned and need to be copied if used after this callback failure: S3Error, @@ -678,6 +963,20 @@ pub const AWSCredentials = struct { }, context), } } + pub fn notFound(this: @This(), code: []const u8, message: []const u8, context: *anyopaque) void { + switch (this) { + inline .download, + .stat, + .delete, + => |callback| callback(.{ + .not_found = .{ + .code = code, + .message = message, + }, + }, context), + else => this.fail(code, message, context), + } + } }; pub fn deinit(this: *@This()) void { if (this.result.certificate_info) |*certificate| { @@ -697,11 +996,17 @@ pub const AWSCredentials = struct { this.destroy(); } - fn fail(this: *@This()) void { + const ErrorType = enum { + not_found, + failure, + }; + fn errorWithBody(this: @This(), comptime error_type: ErrorType) void { var code: []const u8 = "UnknownError"; var message: []const u8 = "an unexpected error has occurred"; + var has_error_code = false; if (this.result.fail) |err| { code = @errorName(err); + has_error_code = true; } else if (this.result.body) |body| { const bytes = body.list.items; if (bytes.len > 0) { @@ -709,6 +1014,7 @@ pub const AWSCredentials = struct { if (strings.indexOf(bytes, "")) |start| { if (strings.indexOf(bytes, "")) |end| { code = bytes[start + "".len .. end]; + has_error_code = true; } } if (strings.indexOf(bytes, "")) |start| { @@ -718,7 +1024,16 @@ pub const AWSCredentials = struct { } } } - this.callback.fail(code, message, this.callback_context); + + if (error_type == .not_found) { + if (!has_error_code) { + code = "NoSuchKey"; + message = "The specified key does not exist."; + } + this.callback.notFound(code, message, this.callback_context); + } else { + this.callback.fail(code, message, this.callback_context); + } } fn failIfContainsError(this: *@This(), status: u32) bool { @@ -759,7 +1074,7 @@ pub const AWSCredentials = struct { pub fn onResponse(this: *@This()) void { defer this.deinit(); if (!this.result.isSuccess()) { - this.fail(); + this.errorWithBody(.failure); return; } bun.assert(this.result.metadata != null); @@ -767,9 +1082,6 @@ pub const AWSCredentials = struct { switch (this.callback) { .stat => |callback| { switch (response.status_code) { - 404 => { - callback(.{ .not_found = {} }, this.callback_context); - }, 200 => { callback(.{ .success = .{ @@ -778,21 +1090,24 @@ pub const AWSCredentials = struct { }, }, this.callback_context); }, + 404 => { + this.errorWithBody(.not_found); + }, else => { - this.fail(); + this.errorWithBody(.failure); }, } }, .delete => |callback| { switch (response.status_code) { - 404 => { - callback(.{ .not_found = {} }, this.callback_context); - }, 200, 204 => { callback(.{ .success = {} }, this.callback_context); }, + 404 => { + this.errorWithBody(.not_found); + }, else => { - this.fail(); + this.errorWithBody(.failure); }, } }, @@ -802,15 +1117,12 @@ pub const AWSCredentials = struct { callback(.{ .success = {} }, this.callback_context); }, else => { - this.fail(); + this.errorWithBody(.failure); }, } }, .download => |callback| { switch (response.status_code) { - 404 => { - callback(.{ .not_found = {} }, this.callback_context); - }, 200, 204, 206 => { const body = this.response_buffer; this.response_buffer = .{ @@ -827,9 +1139,12 @@ pub const AWSCredentials = struct { }, }, this.callback_context); }, + 404 => { + this.errorWithBody(.not_found); + }, else => { //error - this.fail(); + this.errorWithBody(.failure); }, } }, @@ -844,7 +1159,7 @@ pub const AWSCredentials = struct { if (response.headers.get("etag")) |etag| { callback(.{ .etag = etag }, this.callback_context); } else { - this.fail(); + this.errorWithBody(.failure); } } }, @@ -978,14 +1293,7 @@ pub const AWSCredentials = struct { } } } - if (state.status_code == 404) { - if (!has_body_code) { - code = "FileNotFound"; - } - if (!has_body_message) { - message = "File not found"; - } - } + err = .{ .code = code, .message = message, @@ -1085,6 +1393,7 @@ pub const AWSCredentials = struct { body: []const u8, proxy_url: ?[]const u8 = null, range: ?[]const u8 = null, + acl: ?ACL = null, }; pub fn executeSimpleS3Request( @@ -1098,6 +1407,7 @@ pub const AWSCredentials = struct { .method = options.method, .search_params = options.search_params, .content_disposition = options.content_disposition, + .acl = options.acl, }, null) catch |sign_err| { if (options.range) |range_| bun.default_allocator.free(range_); const error_code_and_message = getSignErrorCodeAndMessage(sign_err); @@ -1106,40 +1416,15 @@ pub const AWSCredentials = struct { }; const headers = brk: { + var header_buffer: [10]picohttp.Header = undefined; if (options.range) |range_| { - const _headers = result.headers(); - var headersWithRange: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "range", .value = range_ }, - }; - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(&headersWithRange, bun.default_allocator) catch bun.outOfMemory(); + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); + break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); } else { if (options.content_type) |content_type| { if (content_type.len > 0) { - const _headers = result.headers(); - if (_headers.len > 4) { - var headersWithContentType: [6]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - _headers[4], - .{ .name = "Content-Type", .value = content_type }, - }; - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(&headersWithContentType, bun.default_allocator) catch bun.outOfMemory(); - } - - var headersWithContentType: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "Content-Type", .value = content_type }, - }; - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(&headersWithContentType, bun.default_allocator) catch bun.outOfMemory(); + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "Content-Type", .value = content_type }); + break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); } } @@ -1252,17 +1537,11 @@ pub const AWSCredentials = struct { return; }; + var header_buffer: [10]picohttp.Header = undefined; const headers = brk: { if (range) |range_| { - const _headers = result.headers(); - var headersWithRange: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "range", .value = range_ }, - }; - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(&headersWithRange, bun.default_allocator) catch bun.outOfMemory(); + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); + break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); } else { break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); } @@ -1328,12 +1607,14 @@ pub const AWSCredentials = struct { .ptr = .{ .Bytes = &reader.context }, .value = readable_value, }, globalThis), + .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), })); return readable_value; } const S3DownloadStreamWrapper = struct { readable_stream_ref: JSC.WebCore.ReadableStream.Strong, + path: []const u8, pub usingnamespace bun.New(@This()); pub fn callback(chunk: bun.MutableString, has_more: bool, request_err: ?S3Error, this: *@This()) void { @@ -1348,7 +1629,7 @@ pub const AWSCredentials = struct { readable.ptr.Bytes.onData( .{ - .err = .{ .JSValue = err.toJS(globalThis) }, + .err = .{ .JSValue = err.toJS(globalThis, this.path) }, }, bun.default_allocator, ); @@ -1381,6 +1662,7 @@ pub const AWSCredentials = struct { pub fn deinit(this: *@This()) void { this.readable_stream_ref.deinit(); + bun.default_allocator.free(this.path); this.destroy(); } }; @@ -1394,37 +1676,41 @@ pub const AWSCredentials = struct { }, .{ .delete = callback }, callback_context); } - pub fn s3Upload(this: *const @This(), path: []const u8, content: []const u8, content_type: ?[]const u8, proxy_url: ?[]const u8, callback: *const fn (S3UploadResult, *anyopaque) void, callback_context: *anyopaque) void { + pub fn s3Upload(this: *const @This(), path: []const u8, content: []const u8, content_type: ?[]const u8, acl: ?ACL, proxy_url: ?[]const u8, callback: *const fn (S3UploadResult, *anyopaque) void, callback_context: *anyopaque) void { this.executeSimpleS3Request(.{ .path = path, .method = .PUT, .proxy_url = proxy_url, .body = content, .content_type = content_type, + .acl = acl, }, .{ .upload = callback }, callback_context); } const S3UploadStreamWrapper = struct { readable_stream_ref: JSC.WebCore.ReadableStream.Strong, - sink: *JSC.WebCore.FetchTaskletChunkedRequestSink, + sink: *JSC.WebCore.NetworkSink, + task: *MultiPartUpload, callback: ?*const fn (S3UploadResult, *anyopaque) void, callback_context: *anyopaque, ref_count: u32 = 1, + path: []const u8, // this is owned by the task not by the wrapper pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); pub fn resolve(result: S3UploadResult, self: *@This()) void { const sink = self.sink; defer self.deref(); - - if (sink.endPromise.globalObject()) |globalObject| { - switch (result) { - .success => sink.endPromise.resolve(globalObject, JSC.jsNumber(0)), - .failure => |err| { - if (!sink.done) { - sink.abort(); - return; - } - sink.endPromise.rejectOnNextTick(globalObject, err.toJS(globalObject)); - }, + if (sink.endPromise.hasValue()) { + if (sink.endPromise.globalObject()) |globalObject| { + switch (result) { + .success => sink.endPromise.resolve(globalObject, JSC.jsNumber(0)), + .failure => |err| { + if (!sink.done) { + sink.abort(); + return; + } + sink.endPromise.rejectOnNextTick(globalObject, err.toJS(globalObject, self.path)); + }, + } } } if (self.callback) |callback| { @@ -1436,6 +1722,7 @@ pub const AWSCredentials = struct { self.readable_stream_ref.deinit(); self.sink.finalize(); self.sink.destroy(); + self.task.deref(); self.destroy(); } }; @@ -1443,13 +1730,12 @@ pub const AWSCredentials = struct { var args = callframe.arguments_old(2); var this = args.ptr[args.len - 1].asPromisePtr(S3UploadStreamWrapper); defer this.deref(); - if (this.sink.endPromise.hasValue()) { - this.sink.endPromise.resolve(globalThis, JSC.jsNumber(0)); - } + if (this.readable_stream_ref.get()) |stream| { stream.done(globalThis); } this.readable_stream_ref.deinit(); + this.task.continueStream(); return .undefined; } @@ -1458,6 +1744,7 @@ pub const AWSCredentials = struct { const args = callframe.arguments_old(2); var this = args.ptr[args.len - 1].asPromisePtr(S3UploadStreamWrapper); defer this.deref(); + const err = args.ptr[0]; if (this.sink.endPromise.hasValue()) { this.sink.endPromise.rejectOnNextTick(globalThis, err); @@ -1475,6 +1762,8 @@ pub const AWSCredentials = struct { }); } } + this.task.continueStream(); + return .undefined; } pub const shim = JSC.Shimmer("Bun", "S3UploadStream", @This()); @@ -1491,10 +1780,34 @@ pub const AWSCredentials = struct { } /// consumes the readable stream and upload to s3 - pub fn s3UploadStream(this: *@This(), path: []const u8, readable_stream: JSC.WebCore.ReadableStream, globalThis: *JSC.JSGlobalObject, options: MultiPartUpload.MultiPartUploadOptions, content_type: ?[]const u8, proxy: ?[]const u8, callback: ?*const fn (S3UploadResult, *anyopaque) void, callback_context: *anyopaque) JSC.JSValue { + pub fn s3UploadStream(this: *@This(), path: []const u8, readable_stream: JSC.WebCore.ReadableStream, globalThis: *JSC.JSGlobalObject, options: MultiPartUpload.MultiPartUploadOptions, acl: ?ACL, content_type: ?[]const u8, proxy: ?[]const u8, callback: ?*const fn (S3UploadResult, *anyopaque) void, callback_context: *anyopaque) JSC.JSValue { this.ref(); // ref the credentials const proxy_url = (proxy orelse ""); + if (readable_stream.isDisturbed(globalThis)) { + return JSC.JSPromise.rejectedPromiseValue(globalThis, bun.String.static("ReadableStream is already disturbed").toErrorInstance(globalThis)); + } + + switch (readable_stream.ptr) { + .Invalid => { + return JSC.JSPromise.rejectedPromiseValue(globalThis, bun.String.static("ReadableStream is invalid").toErrorInstance(globalThis)); + }, + inline .File, .Bytes => |stream| { + if (stream.pending.result == .err) { + // we got an error, fail early + const err = stream.pending.result.err; + stream.pending = .{ .result = .{ .done = {} } }; + const js_err, const was_strong = err.toJSWeak(globalThis); + if (was_strong == .Strong) { + js_err.unprotect(); + } + js_err.ensureStillAlive(); + return JSC.JSPromise.rejectedPromise(globalThis, js_err).asValue(globalThis); + } + }, + else => {}, + } + const task = MultiPartUpload.new(.{ .credentials = this, .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), @@ -1503,32 +1816,42 @@ pub const AWSCredentials = struct { .callback = @ptrCast(&S3UploadStreamWrapper.resolve), .callback_context = undefined, .globalThis = globalThis, + .state = .wait_stream_check, .options = options, + .acl = acl, .vm = JSC.VirtualMachine.get(), }); task.poll_ref.ref(task.vm); - task.ref(); // + 1 for the stream + task.ref(); // + 1 for the stream sink - var response_stream = JSC.WebCore.FetchTaskletChunkedRequestSink.new(.{ + var response_stream = JSC.WebCore.NetworkSink.new(.{ .task = .{ .s3_upload = task }, .buffer = .{}, .globalThis = globalThis, .encoded = false, .endPromise = JSC.JSPromise.Strong.init(globalThis), }).toSink(); + task.ref(); // + 1 for the stream wrapper + const endPromise = response_stream.sink.endPromise.value(); const ctx = S3UploadStreamWrapper.new(.{ .readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(readable_stream, globalThis), .sink = &response_stream.sink, .callback = callback, .callback_context = callback_context, + .path = task.path, + .task = task, }); task.callback_context = @ptrCast(ctx); + // keep the task alive until we are done configuring the signal + task.ref(); + defer task.deref(); + var signal = &response_stream.sink.signal; - signal.* = JSC.WebCore.FetchTaskletChunkedRequestSink.JSSink.SinkSignal.init(.zero); + signal.* = JSC.WebCore.NetworkSink.JSSink.SinkSignal.init(.zero); // explicitly set it to a dead pointer // we use this memory address to disable signals being sent @@ -1536,7 +1859,7 @@ pub const AWSCredentials = struct { bun.assert(signal.isDead()); // We are already corked! - const assignment_result: JSC.JSValue = JSC.WebCore.FetchTaskletChunkedRequestSink.JSSink.assignToStream( + const assignment_result: JSC.JSValue = JSC.WebCore.NetworkSink.JSSink.assignToStream( globalThis, readable_stream.value, response_stream, @@ -1549,14 +1872,15 @@ pub const AWSCredentials = struct { bun.assert(!signal.isDead()); if (assignment_result.toError()) |err| { - readable_stream.cancel(globalThis); if (response_stream.sink.endPromise.hasValue()) { response_stream.sink.endPromise.rejectOnNextTick(globalThis, err); } + task.fail(.{ .code = "UnknownError", .message = "ReadableStream ended with an error", }); + readable_stream.cancel(globalThis); return endPromise; } @@ -1568,40 +1892,54 @@ pub const AWSCredentials = struct { if (assignment_result.asAnyPromise()) |promise| { switch (promise.status(globalThis.vm())) { .pending => { + // if we eended and its not canceled the promise is the endPromise + // because assignToStream can return the sink.end() promise + // we set the endPromise in the NetworkSink so we need to resolve it + if (response_stream.sink.ended and !response_stream.sink.cancel) { + task.continueStream(); + + readable_stream.done(globalThis); + return endPromise; + } ctx.ref(); + assignment_result.then( globalThis, task.callback_context, onUploadStreamResolveRequestStream, onUploadStreamRejectRequestStream, ); + // we need to wait the promise to resolve because can be an error/cancel here + if (!task.ended) + task.continueStream(); }, .fulfilled => { + task.continueStream(); + readable_stream.done(globalThis); - if (response_stream.sink.endPromise.hasValue()) { - response_stream.sink.endPromise.resolve(globalThis, JSC.jsNumber(0)); - } }, .rejected => { - readable_stream.cancel(globalThis); if (response_stream.sink.endPromise.hasValue()) { response_stream.sink.endPromise.rejectOnNextTick(globalThis, promise.result(globalThis.vm())); } + task.fail(.{ .code = "UnknownError", .message = "ReadableStream ended with an error", }); + readable_stream.cancel(globalThis); }, } } else { - readable_stream.cancel(globalThis); if (response_stream.sink.endPromise.hasValue()) { response_stream.sink.endPromise.rejectOnNextTick(globalThis, assignment_result); } + task.fail(.{ .code = "UnknownError", .message = "ReadableStream ended with an error", }); + readable_stream.cancel(globalThis); } } return endPromise; @@ -1609,23 +1947,25 @@ pub const AWSCredentials = struct { /// returns a writable stream that writes to the s3 path pub fn s3WritableStream(this: *@This(), path: []const u8, globalThis: *JSC.JSGlobalObject, options: MultiPartUpload.MultiPartUploadOptions, content_type: ?[]const u8, proxy: ?[]const u8) bun.JSError!JSC.JSValue { const Wrapper = struct { - pub fn callback(result: S3UploadResult, sink: *JSC.WebCore.FetchTaskletChunkedRequestSink) void { - if (sink.endPromise.globalObject()) |globalObject| { - const event_loop = globalObject.bunVM().eventLoop(); - event_loop.enter(); - defer event_loop.exit(); - switch (result) { - .success => { - sink.endPromise.resolve(globalObject, JSC.jsNumber(0)); - }, - .failure => |err| { - if (!sink.done) { - sink.abort(); - return; - } + pub fn callback(result: S3UploadResult, sink: *JSC.WebCore.NetworkSink) void { + if (sink.endPromise.hasValue()) { + if (sink.endPromise.globalObject()) |globalObject| { + const event_loop = globalObject.bunVM().eventLoop(); + event_loop.enter(); + defer event_loop.exit(); + switch (result) { + .success => { + sink.endPromise.resolve(globalObject, JSC.jsNumber(0)); + }, + .failure => |err| { + if (!sink.done) { + sink.abort(); + return; + } - sink.endPromise.rejectOnNextTick(globalObject, err.toJS(globalObject)); - }, + sink.endPromise.rejectOnNextTick(globalObject, err.toJS(globalObject, sink.path())); + }, + } } } sink.finalize(); @@ -1649,7 +1989,7 @@ pub const AWSCredentials = struct { task.poll_ref.ref(task.vm); task.ref(); // + 1 for the stream - var response_stream = JSC.WebCore.FetchTaskletChunkedRequestSink.new(.{ + var response_stream = JSC.WebCore.NetworkSink.new(.{ .task = .{ .s3_upload = task }, .buffer = .{}, .globalThis = globalThis, @@ -1660,7 +2000,7 @@ pub const AWSCredentials = struct { task.callback_context = @ptrCast(response_stream); var signal = &response_stream.sink.signal; - signal.* = JSC.WebCore.FetchTaskletChunkedRequestSink.JSSink.SinkSignal.init(.zero); + signal.* = JSC.WebCore.NetworkSink.JSSink.SinkSignal.init(.zero); // explicitly set it to a dead pointer // we use this memory address to disable signals being sent @@ -1686,6 +2026,7 @@ pub const MultiPartUpload = struct { ended: bool = false, options: MultiPartUploadOptions = .{}, + acl: ?ACL = null, credentials: *AWSCredentials, poll_ref: bun.Async.KeepAlive = bun.Async.KeepAlive.init(), vm: *JSC.VirtualMachine, @@ -1704,6 +2045,7 @@ pub const MultiPartUpload = struct { multipart_upload_list: bun.ByteList = .{}, state: enum { + wait_stream_check, not_started, multipart_started, multipart_completed, @@ -1756,7 +2098,7 @@ pub const MultiPartUpload = struct { } pub fn onPartResponse(result: AWS.S3PartResult, this: *@This()) void { - if (this.state == .canceled) { + if (this.state == .canceled or this.ctx.state == .finished) { log("onPartResponse {} canceled", .{this.partNumber}); if (this.owns_data) bun.default_allocator.free(this.data); this.ctx.deref(); @@ -1814,7 +2156,7 @@ pub const MultiPartUpload = struct { }, .{ .part = @ptrCast(&onPartResponse) }, this); } pub fn start(this: *@This()) void { - if (this.state != .pending or this.ctx.state != .multipart_completed) return; + if (this.state != .pending or this.ctx.state != .multipart_completed or this.ctx.state == .finished) return; this.ctx.ref(); this.state = .started; this.perform(); @@ -1860,11 +2202,14 @@ pub const MultiPartUpload = struct { } pub fn singleSendUploadResponse(result: AWS.S3UploadResult, this: *@This()) void { + defer this.deref(); + if (this.state == .finished) return; switch (result) { .failure => |err| { if (this.options.retry > 0) { log("singleSendUploadResponse {} retry", .{this.options.retry}); this.options.retry -= 1; + this.ref(); // retry failed this.credentials.executeSimpleS3Request(.{ .path = this.path, @@ -1872,6 +2217,7 @@ pub const MultiPartUpload = struct { .proxy_url = this.proxyUrl(), .body = this.buffered.items, .content_type = this.content_type, + .acl = this.acl, }, .{ .upload = @ptrCast(&singleSendUploadResponse) }, this); return; @@ -1925,6 +2271,9 @@ pub const MultiPartUpload = struct { } fn drainEnqueuedParts(this: *@This()) void { + if (this.state == .finished) { + return; + } // check pending to start or transformed buffered ones into tasks if (this.state == .multipart_completed) { for (this.queue.items) |*part| { @@ -1946,13 +2295,16 @@ pub const MultiPartUpload = struct { } pub fn fail(this: *@This(), _err: AWS.S3Error) void { log("fail {s}:{s}", .{ _err.code, _err.message }); + this.ended = true; for (this.queue.items) |*task| { task.cancel(); } if (this.state != .finished) { - this.callback(.{ .failure = _err }, this.callback_context); + const old_state = this.state; this.state = .finished; - if (this.state == .multipart_completed) { + this.callback(.{ .failure = _err }, this.callback_context); + + if (old_state == .multipart_completed) { // will deref after rollback this.rollbackMultiPartRequest(); } else { @@ -1984,6 +2336,8 @@ pub const MultiPartUpload = struct { } } pub fn startMultiPartRequestResult(result: AWS.S3DownloadResult, this: *@This()) void { + defer this.deref(); + if (this.state == .finished) return; switch (result) { .failure => |err| { log("startMultiPartRequestResult {s} failed {s}: {s}", .{ this.path, err.message, err.message }); @@ -2021,6 +2375,7 @@ pub const MultiPartUpload = struct { pub fn onCommitMultiPartRequest(result: AWS.S3CommitResult, this: *@This()) void { log("onCommitMultiPartRequest {s}", .{this.upload_id}); + switch (result) { .failure => |err| { if (this.options.retry > 0) { @@ -2094,6 +2449,7 @@ pub const MultiPartUpload = struct { if (this.state == .not_started) { // will auto start later this.state = .multipart_started; + this.ref(); this.credentials.executeSimpleS3Request(.{ .path = this.path, .method = .POST, @@ -2101,6 +2457,7 @@ pub const MultiPartUpload = struct { .body = "", .search_params = "?uploads=", .content_type = this.content_type, + .acl = this.acl, }, .{ .download = @ptrCast(&startMultiPartRequestResult) }, this); } else if (this.state == .multipart_completed) { part.start(); @@ -2138,6 +2495,7 @@ pub const MultiPartUpload = struct { if (this.ended and this.buffered.items.len < this.partSizeInBytes() and this.state == .not_started) { log("processBuffered {s} singlefile_started", .{this.path}); this.state = .singlefile_started; + this.ref(); // we can do only 1 request this.credentials.executeSimpleS3Request(.{ .path = this.path, @@ -2145,6 +2503,7 @@ pub const MultiPartUpload = struct { .proxy_url = this.proxyUrl(), .body = this.buffered.items, .content_type = this.content_type, + .acl = this.acl, }, .{ .upload = @ptrCast(&singleSendUploadResponse) }, this); } else { // we need to split @@ -2156,9 +2515,22 @@ pub const MultiPartUpload = struct { return this.options.partSize * OneMiB; } + pub fn continueStream(this: *@This()) void { + if (this.state == .wait_stream_check) { + this.state = .not_started; + if (this.ended) { + this.processBuffered(this.partSizeInBytes()); + } + } + } + pub fn sendRequestData(this: *@This(), chunk: []const u8, is_last: bool) void { if (this.ended) return; - + if (this.state == .wait_stream_check and chunk.len == 0 and is_last) { + // we do this because stream will close if the file dont exists and we dont wanna to send an empty part in this case + this.ended = true; + return; + } if (is_last) { this.ended = true; if (chunk.len > 0) { diff --git a/test/js/bun/s3/s3.test.ts b/test/js/bun/s3/s3.test.ts index 7ac336fc6c96c6..f265c4c95e3b82 100644 --- a/test/js/bun/s3/s3.test.ts +++ b/test/js/bun/s3/s3.test.ts @@ -124,38 +124,40 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { expect(result.status).toBe(200); expect(result.headers.get("content-length")).toBe((buffer.byteLength * 10).toString()); } - }, 10_000); + }, 20_000); }); }); describe("Bun.S3", () => { describe(bucketInName ? "bucket in path" : "bucket in options", () => { const tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`; - const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket }; + const options = bucketInName ? null : { bucket: S3Bucket }; + + var bucket = S3(s3Options); beforeAll(async () => { - const file = new S3(tmp_filename, options); + const file = bucket(tmp_filename, options); await file.write("Hello Bun!"); }); afterAll(async () => { - const file = new S3(tmp_filename, options); + const file = bucket(tmp_filename, options); await file.unlink(); }); it("should download file via Bun.s3().text()", async () => { - const file = new S3(tmp_filename, options); + const file = bucket(tmp_filename, options); const text = await file.text(); expect(text).toBe("Hello Bun!"); }); it("should download range", async () => { - const file = new S3(tmp_filename, options); + const file = bucket(tmp_filename, options); const text = await file.slice(6, 10).text(); expect(text).toBe("Bun!"); }); it("should check if a key exists or content-length", async () => { - const file = new S3(tmp_filename, options); + const file = bucket(tmp_filename, options); const exists = await file.exists(); expect(exists).toBe(true); const contentLength = await file.size; @@ -163,27 +165,27 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { }); it("should check if a key does not exist", async () => { - const file = new S3(tmp_filename + "-does-not-exist", options); + const file = bucket(tmp_filename + "-does-not-exist", options); const exists = await file.exists(); expect(exists).toBe(false); }); it("should be able to set content-type", async () => { { - const s3file = new S3(tmp_filename, { ...options, type: "text/css" }); - await s3file.write("Hello Bun!"); + const s3file = bucket(tmp_filename, options); + await s3file.write("Hello Bun!", { type: "text/css" }); const response = await fetch(s3file.presign()); expect(response.headers.get("content-type")).toStartWith("text/css"); } { - const s3file = new S3(tmp_filename, options); + const s3file = bucket(tmp_filename, options); await s3file.write("Hello Bun!", { type: "text/plain" }); const response = await fetch(s3file.presign()); expect(response.headers.get("content-type")).toStartWith("text/plain"); } { - const s3file = new S3(tmp_filename, options); + const s3file = bucket(tmp_filename, options); const writer = s3file.writer({ type: "application/json" }); writer.write("Hello Bun!"); await writer.end(); @@ -192,15 +194,15 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { } { - await S3.upload(tmp_filename, "Hello Bun!", { ...options, type: "application/xml" }); - const response = await fetch(s3(tmp_filename, options).presign()); + await bucket.write(tmp_filename, "Hello Bun!", { ...options, type: "application/xml" }); + const response = await fetch(bucket(tmp_filename, options).presign()); expect(response.headers.get("content-type")).toStartWith("application/xml"); } }); - it("should be able to upload large files using S3.upload + readable Request", async () => { + it("should be able to upload large files using bucket.write + readable Request", async () => { { - await S3.upload( + await bucket.write( tmp_filename, new Request("https://example.com", { method: "PUT", @@ -215,21 +217,21 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { }), options, ); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigishPayload) * 10); + expect(await bucket.size(tmp_filename, options)).toBe(Buffer.byteLength(bigishPayload) * 10); } }, 10_000); - it("should be able to upload large files in one go using S3.upload", async () => { + it("should be able to upload large files in one go using bucket.write", async () => { { - await S3.upload(tmp_filename, bigPayload, options); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); - expect(await new S3(tmp_filename, options).text()).toBe(bigPayload); + await bucket.write(tmp_filename, bigPayload, options); + expect(await bucket.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); + expect(await bucket(tmp_filename, options).text()).toBe(bigPayload); } }, 10_000); it("should be able to upload large files in one go using S3File.write", async () => { { - const s3File = new S3(tmp_filename, options); + const s3File = bucket(tmp_filename, options); await s3File.write(bigPayload); expect(await s3File.size).toBe(Buffer.byteLength(bigPayload)); expect(await s3File.text()).toBe(bigPayload); @@ -305,7 +307,7 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { it("should be able to upload large files in one go using Bun.write", async () => { { await Bun.write(file(tmp_filename, options), bigPayload); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); + expect(await s3(tmp_filename, options).size).toBe(Buffer.byteLength(bigPayload)); expect(await file(tmp_filename, options).text()).toEqual(bigPayload); } }, 15_000); @@ -392,18 +394,12 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { } }); - it("should be able to upload large files in one go using S3.upload", async () => { - { - await S3.upload(s3(tmp_filename, options), bigPayload); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); - } - }, 10_000); - it("should be able to upload large files in one go using Bun.write", async () => { { - await Bun.write(s3(tmp_filename, options), bigPayload); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); - expect(await s3(tmp_filename, options).text()).toBe(bigPayload); + const s3file = s3(tmp_filename, options); + await Bun.write(s3file, bigPayload); + expect(await s3file.size).toBe(Buffer.byteLength(bigPayload)); + expect(await s3file.text()).toBe(bigPayload); } }, 10_000); @@ -461,55 +457,315 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { }); }); } + describe("special characters", () => { + it("should allow special characters in the path", async () => { + const options = { ...s3Options, bucket: S3Bucket }; + const s3file = s3(`🌈🦄${randomUUID()}.txt`, options); + await s3file.write("Hello Bun!"); + await s3file.exists(); + await s3file.unlink(); + expect().pass(); + }); + it("should allow forward slashes in the path", async () => { + const options = { ...s3Options, bucket: S3Bucket }; + const s3file = s3(`${randomUUID()}/test.txt`, options); + await s3file.write("Hello Bun!"); + await s3file.exists(); + await s3file.unlink(); + expect().pass(); + }); + it("should allow backslashes in the path", async () => { + const options = { ...s3Options, bucket: S3Bucket }; + const s3file = s3(`${randomUUID()}\\test.txt`, options); + await s3file.write("Hello Bun!"); + await s3file.exists(); + await s3file.unlink(); + expect().pass(); + }); + it("should allow starting with slashs and backslashes", async () => { + const options = { ...s3Options, bucket: S3Bucket }; + { + const s3file = s3(`/${randomUUID()}test.txt`, options); + await s3file.write("Hello Bun!"); + await s3file.unlink(); + } + { + const s3file = s3(`\\${randomUUID()}test.txt`, options); + await s3file.write("Hello Bun!"); + await s3file.unlink(); + } + expect().pass(); + }); + it("should allow ending with slashs and backslashes", async () => { + const options = { ...s3Options, bucket: S3Bucket }; + { + const s3file = s3(`${randomUUID()}/`, options); + await s3file.write("Hello Bun!"); + await s3file.unlink(); + } + { + const s3file = s3(`${randomUUID()}\\`, options); + await s3file.write("Hello Bun!"); + await s3file.unlink(); + } + expect().pass(); + }); + }); + describe("errors", () => { + it("Bun.write(s3file, file) should throw if the file does not exist", async () => { + try { + await Bun.write(s3("test.txt", { ...s3Options, bucket: S3Bucket }), file("./do-not-exist.txt")); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ENOENT"); + expect(e?.path).toBe("./do-not-exist.txt"); + expect(e?.syscall).toBe("open"); + } + }); + + it("Bun.write(s3file, file) should work with empty file", async () => { + const dir = tempDirWithFiles("fsr", { + "hello.txt": "", + }); + await Bun.write(s3("test.txt", { ...s3Options, bucket: S3Bucket }), file(path.join(dir, "hello.txt"))); + }); + it("Bun.write(s3file, file) should throw if the file does not exist", async () => { + try { + await Bun.write( + s3("test.txt", { ...s3Options, bucket: S3Bucket }), + s3("do-not-exist.txt", { ...s3Options, bucket: S3Bucket }), + ); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("NoSuchKey"); + expect(e?.path).toBe("do-not-exist.txt"); + expect(e?.name).toBe("S3Error"); + } + }); + it("Bun.write(s3file, file) should throw if the file does not exist", async () => { + try { + await Bun.write( + s3("test.txt", { ...s3Options, bucket: S3Bucket }), + s3("do-not-exist.txt", { ...s3Options, bucket: "does-not-exists" }), + ); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("AccessDenied"); + expect(e?.path).toBe("do-not-exist.txt"); + expect(e?.name).toBe("S3Error"); + } + }); + it("should error if bucket is missing", async () => { + try { + await Bun.write(s3("test.txt", s3Options), "Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_PATH"); + expect(e?.name).toBe("S3Error"); + } + }); + + it("should error if bucket is missing on payload", async () => { + try { + await Bun.write(s3("test.txt", { ...s3Options, bucket: S3Bucket }), s3("test2.txt", s3Options)); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_PATH"); + expect(e?.path).toBe("test2.txt"); + expect(e?.name).toBe("S3Error"); + } + }); + + it("should error when invalid method", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args)(path)].map(async fn => { + const s3file = fn("method-test", { + ...s3Options, + bucket: S3Bucket, + }); + + try { + await s3file.presign({ method: "OPTIONS" }); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_METHOD"); + } + }), + ); + }); + + it("should error when path is too long", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args)(path)].map(async fn => { + try { + const s3file = fn("test" + "a".repeat(4096), { + ...s3Options, + bucket: S3Bucket, + }); + + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ENAMETOOLONG"); + } + }), + ); + }); + }); describe("credentials", () => { it("should error with invalid access key id", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://bucket/credentials-test", { - ...s3Options, - accessKeyId: "invalid", - }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); + await Promise.all( + [s3, (path, ...args) => S3(...args)(path), file].map(async fn => { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, + accessKeyId: "invalid", + }); + + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("InvalidArgument"); + } + }), + ); }); it("should error with invalid secret key id", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://bucket/credentials-test", { - ...s3Options, - secretAccessKey: "invalid", - }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); + await Promise.all( + [s3, (path, ...args) => S3(...args)(path), file].map(async fn => { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, + secretAccessKey: "invalid", + }); + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("AccessDenied"); + } + }), + ); }); it("should error with invalid endpoint", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://bucket/credentials-test", { - ...s3Options, - endpoint: "🙂.🥯", - }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); + await Promise.all( + [s3, (path, ...args) => S3(...args)(path), file].map(async fn => { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, + endpoint: "🙂.🥯", + }); + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("InvalidAccessKeyId"); + } + }), + ); }); it("should error with invalid endpoint", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://bucket/credentials-test", { - ...s3Options, - endpoint: "..asd.@%&&&%%", - }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); + await Promise.all( + [s3, (path, ...args) => S3(...args)(path), file].map(async fn => { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, + endpoint: "..asd.@%&&&%%", + }); + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("InvalidAccessKeyId"); + } + }), + ); }); it("should error with invalid bucket", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://credentials-test", { - ...s3Options, - bucket: "invalid", - }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); + await Promise.all( + [s3, (path, ...args) => S3(...args)(path), file].map(async fn => { + const s3file = fn("s3://credentials-test", { + ...s3Options, + bucket: "invalid", + }); + + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("AccessDenied"); + expect(e?.name).toBe("S3Error"); + } + }), + ); + }); + + it("should error when missing credentials", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args)(path), file].map(async fn => { + const s3file = fn("s3://credentials-test", { + bucket: "invalid", + }); + + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_MISSING_CREDENTIALS"); + } + }), + ); + }); + it("should error when presign missing credentials", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args)(path)].map(async fn => { + const s3file = fn("method-test", { + bucket: S3Bucket, + }); + + try { + await s3file.presign(); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_MISSING_CREDENTIALS"); + } + }), + ); + }); + + it("should error when presign with invalid endpoint", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args)(path)].map(async fn => { + let options = { ...s3Options, bucket: S3Bucket }; + options.endpoint = Buffer.alloc(1024, "a").toString(); + + try { + const s3file = fn(randomUUID(), options); + + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_ENDPOINT"); + } + }), + ); + }); + it("should error when presign with invalid token", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args)(path)].map(async fn => { + let options = { ...s3Options, bucket: S3Bucket }; + options.sessionToken = Buffer.alloc(4096, "a").toString(); + + try { + const s3file = fn(randomUUID(), options); + await s3file.presign(); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_SESSION_TOKEN"); + } + }), + ); }); }); @@ -539,10 +795,24 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { expect(url.includes("X-Amz-Algorithm")).toBe(true); expect(url.includes("X-Amz-SignedHeaders")).toBe(true); }); + it("should work with acl", async () => { + const s3file = s3("s3://bucket/credentials-test", s3Options); + const url = s3file.presign({ + expiresIn: 10, + acl: "public-read", + }); + expect(url).toBeDefined(); + expect(url.includes("X-Amz-Expires=10")).toBe(true); + expect(url.includes("X-Amz-Acl=public-read")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); + }); - it("S3.presign should work", async () => { - const url = S3.presign("s3://bucket/credentials-test", { - ...s3Options, + it("s3().presign() should work", async () => { + const url = s3("s3://bucket/credentials-test", s3Options).presign({ expiresIn: 10, }); expect(url).toBeDefined(); @@ -554,9 +824,8 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { expect(url.includes("X-Amz-SignedHeaders")).toBe(true); }); - it("S3.presign endpoint should work", async () => { - const url = S3.presign("s3://bucket/credentials-test", { - ...s3Options, + it("s3().presign() endpoint should work", async () => { + const url = s3("s3://bucket/credentials-test", s3Options).presign({ expiresIn: 10, endpoint: "https://s3.bun.sh", }); @@ -570,9 +839,8 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { expect(url.includes("X-Amz-SignedHeaders")).toBe(true); }); - it("S3.presign endpoint should work", async () => { - const url = S3.presign("s3://folder/credentials-test", { - ...s3Options, + it("s3().presign() endpoint should work", async () => { + const url = s3("s3://folder/credentials-test", s3Options).presign({ expiresIn: 10, bucket: "my-bucket", }); @@ -587,16 +855,19 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { }); }); - it("exists, upload, size, unlink should work", async () => { - const filename = randomUUID(); - const fullPath = `s3://${S3Bucket}/${filename}`; - expect(await S3.exists(fullPath, s3Options)).toBe(false); + it("exists, write, size, unlink should work", async () => { + const fullPath = randomUUID(); + const bucket = S3({ + ...s3Options, + bucket: S3Bucket, + }); + expect(await bucket.exists(fullPath)).toBe(false); - await S3.upload(fullPath, "bun", s3Options); - expect(await S3.exists(fullPath, s3Options)).toBe(true); - expect(await S3.size(fullPath, s3Options)).toBe(3); - await S3.unlink(fullPath, s3Options); - expect(await S3.exists(fullPath, s3Options)).toBe(false); + await bucket.write(fullPath, "bun"); + expect(await bucket.exists(fullPath)).toBe(true); + expect(await bucket.size(fullPath)).toBe(3); + await bucket.unlink(fullPath); + expect(await bucket.exists(fullPath)).toBe(false); }); it("should be able to upload a slice", async () => { @@ -608,7 +879,7 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { expect(await slice.text()).toBe("Bun!"); expect(await s3file.text()).toBe("Hello Bun!"); - await S3.upload(fullPath, slice, s3Options); + await s3file.write(slice); const text = await s3file.text(); expect(text).toBe("Bun!"); await s3file.unlink();