mirror of
https://github.com/oven-sh/bun
synced 2026-02-17 06:12:08 +00:00
Compare commits
2 Commits
claude/fix
...
cursor/imp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ba0caa398 | ||
|
|
2162b2ead5 |
@@ -59,6 +59,23 @@ pub const JSBundler = struct {
|
||||
env_behavior: Api.DotEnvBehavior = .disable,
|
||||
env_prefix: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
|
||||
// S3 credentials for uploading outputs
|
||||
s3: ?S3Config = null,
|
||||
|
||||
pub const S3Config = struct {
|
||||
url: []const u8 = "",
|
||||
credentials: ?*bun.S3.S3Credentials = null,
|
||||
|
||||
pub fn deinit(self: *S3Config, allocator: std.mem.Allocator) void {
|
||||
if (self.url.len > 0) {
|
||||
allocator.free(self.url);
|
||||
}
|
||||
if (self.credentials) |creds| {
|
||||
creds.deref();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const List = bun.StringArrayHashMapUnmanaged(Config);
|
||||
|
||||
pub fn fromJS(globalThis: *JSC.JSGlobalObject, config: JSC.JSValue, plugins: *?*Plugin, allocator: std.mem.Allocator) JSError!Config {
|
||||
@@ -471,6 +488,42 @@ pub const JSBundler = struct {
|
||||
this.throw_on_error = flag;
|
||||
}
|
||||
|
||||
// Parse S3 configuration
|
||||
if (try config.getTruthy(globalThis, "s3")) |s3_value| {
|
||||
// Handle string case: s3: "s3://bucket/prefix"
|
||||
if (s3_value.isString()) {
|
||||
const slice = try s3_value.toSlice(globalThis, bun.default_allocator);
|
||||
defer slice.deinit();
|
||||
this.s3 = S3Config{
|
||||
.url = allocator.dupe(u8, slice.slice()) catch bun.outOfMemory(),
|
||||
};
|
||||
} else if (s3_value.isObject()) {
|
||||
// Handle object case with credentials
|
||||
var credentials = try bun.S3.S3Credentials.getCredentialsWithOptions(
|
||||
globalThis.bunVM().transpiler.env.getS3Credentials(),
|
||||
.{},
|
||||
s3_value,
|
||||
null,
|
||||
null,
|
||||
globalThis,
|
||||
);
|
||||
|
||||
// Check if URL is provided in the object
|
||||
var url: []const u8 = "";
|
||||
if (try s3_value.getOptional(globalThis, "url", ZigString.Slice)) |url_slice| {
|
||||
defer url_slice.deinit();
|
||||
url = allocator.dupe(u8, url_slice.slice()) catch bun.outOfMemory();
|
||||
}
|
||||
|
||||
this.s3 = S3Config{
|
||||
.url = url,
|
||||
.credentials = if (credentials.changed_credentials) credentials.credentials.dupe() else null,
|
||||
};
|
||||
} else {
|
||||
return globalThis.throwInvalidArguments("Expected s3 to be a string URL or object with credentials", .{});
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -529,6 +582,9 @@ pub const JSBundler = struct {
|
||||
self.banner.deinit();
|
||||
self.env_prefix.deinit();
|
||||
self.footer.deinit();
|
||||
if (self.s3) |*s3| {
|
||||
s3.deinit(allocator);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,62 +1,4 @@
|
||||
// Generated with scripts/generate-perf-trace-events.sh
|
||||
// clang-format off
|
||||
#define FOR_EACH_TRACE_EVENT(macro) \
|
||||
macro(Bundler.BindImportsToExports, 0) \
|
||||
macro(Bundler.CloneLinkerGraph, 1) \
|
||||
macro(Bundler.CreateNamespaceExports, 2) \
|
||||
macro(Bundler.FigureOutCommonJS, 3) \
|
||||
macro(Bundler.MatchImportsWithExports, 4) \
|
||||
macro(Bundler.ParseJS, 5) \
|
||||
macro(Bundler.ParseJSON, 6) \
|
||||
macro(Bundler.ParseTOML, 7) \
|
||||
macro(Bundler.ResolveExportStarStatements, 8) \
|
||||
macro(Bundler.Worker.create, 9) \
|
||||
macro(Bundler.WrapDependencies, 10) \
|
||||
macro(Bundler.breakOutputIntoPieces, 11) \
|
||||
macro(Bundler.cloneAST, 12) \
|
||||
macro(Bundler.computeChunks, 13) \
|
||||
macro(Bundler.findAllImportedPartsInJSOrder, 14) \
|
||||
macro(Bundler.findReachableFiles, 15) \
|
||||
macro(Bundler.generateChunksInParallel, 16) \
|
||||
macro(Bundler.generateCodeForFileInChunkCss, 17) \
|
||||
macro(Bundler.generateCodeForFileInChunkJS, 18) \
|
||||
macro(Bundler.generateIsolatedHash, 19) \
|
||||
macro(Bundler.generateSourceMapForChunk, 20) \
|
||||
macro(Bundler.markFileLiveForTreeShaking, 21) \
|
||||
macro(Bundler.markFileReachableForCodeSplitting, 22) \
|
||||
macro(Bundler.onParseTaskComplete, 23) \
|
||||
macro(Bundler.postProcessJSChunk, 24) \
|
||||
macro(Bundler.readFile, 25) \
|
||||
macro(Bundler.renameSymbolsInChunk, 26) \
|
||||
macro(Bundler.scanImportsAndExports, 27) \
|
||||
macro(Bundler.treeShakingAndCodeSplitting, 28) \
|
||||
macro(Bundler.writeChunkToDisk, 29) \
|
||||
macro(Bundler.writeOutputFilesToDisk, 30) \
|
||||
macro(ExtractTarball.extract, 31) \
|
||||
macro(FolderResolver.readPackageJSONFromDisk.folder, 32) \
|
||||
macro(FolderResolver.readPackageJSONFromDisk.workspace, 33) \
|
||||
macro(JSBundler.addPlugin, 34) \
|
||||
macro(JSBundler.hasAnyMatches, 35) \
|
||||
macro(JSBundler.matchOnLoad, 36) \
|
||||
macro(JSBundler.matchOnResolve, 37) \
|
||||
macro(JSGlobalObject.create, 38) \
|
||||
macro(JSParser.analyze, 39) \
|
||||
macro(JSParser.parse, 40) \
|
||||
macro(JSParser.postvisit, 41) \
|
||||
macro(JSParser.visit, 42) \
|
||||
macro(JSPrinter.print, 43) \
|
||||
macro(JSPrinter.printWithSourceMap, 44) \
|
||||
macro(ModuleResolver.resolve, 45) \
|
||||
macro(PackageInstaller.install, 46) \
|
||||
macro(PackageInstaller.installPatch, 47) \
|
||||
macro(PackageManifest.Serializer.loadByFile, 48) \
|
||||
macro(PackageManifest.Serializer.save, 49) \
|
||||
macro(RuntimeTranspilerCache.fromFile, 50) \
|
||||
macro(RuntimeTranspilerCache.save, 51) \
|
||||
macro(RuntimeTranspilerCache.toFile, 52) \
|
||||
macro(StandaloneModuleGraph.serialize, 53) \
|
||||
macro(Symbols.followAll, 54) \
|
||||
macro(TestCommand.printCodeCoverageLCov, 55) \
|
||||
macro(TestCommand.printCodeCoverageLCovAndText, 56) \
|
||||
macro(TestCommand.printCodeCoverageText, 57) \
|
||||
// end
|
||||
|
||||
@@ -1523,6 +1523,7 @@ pub const BundleV2 = struct {
|
||||
.plugins = plugins,
|
||||
.log = Logger.Log.init(bun.default_allocator),
|
||||
.task = undefined,
|
||||
.s3_config = config.s3,
|
||||
});
|
||||
completion.task = JSBundleCompletionTask.TaskCompletion.init(completion);
|
||||
|
||||
@@ -1604,6 +1605,7 @@ pub const BundleV2 = struct {
|
||||
transpiler: *BundleV2 = undefined,
|
||||
plugins: ?*bun.JSC.API.JSBundler.Plugin = null,
|
||||
started_at_ns: u64 = 0,
|
||||
s3_config: ?bun.JSC.API.JSBundler.Config.S3Config = null,
|
||||
|
||||
pub fn configureBundler(
|
||||
completion: *JSBundleCompletionTask,
|
||||
@@ -1668,6 +1670,11 @@ pub const BundleV2 = struct {
|
||||
transpiler.options.banner = config.banner.slice();
|
||||
transpiler.options.footer = config.footer.slice();
|
||||
|
||||
// Pass S3 configuration
|
||||
if (config.s3) |s3_config| {
|
||||
transpiler.options.s3_url = s3_config.url;
|
||||
}
|
||||
|
||||
transpiler.configureLinker();
|
||||
try transpiler.configureDefines();
|
||||
|
||||
@@ -1740,6 +1747,29 @@ pub const BundleV2 = struct {
|
||||
.value => |*build| {
|
||||
const root_obj = JSC.JSValue.createEmptyObject(globalThis, 3);
|
||||
const output_files: []options.OutputFile = build.output_files.items;
|
||||
|
||||
// Check if S3 upload is configured
|
||||
if (this.s3_config) |s3_config| {
|
||||
if (s3_config.url.len > 0) {
|
||||
@import("./s3_upload.zig").uploadOutputFilesToS3(
|
||||
output_files,
|
||||
s3_config.url,
|
||||
s3_config.credentials,
|
||||
globalThis,
|
||||
) catch |err| {
|
||||
// Add error to log
|
||||
this.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "S3 upload failed: {s}", .{@errorName(err)}) catch {};
|
||||
|
||||
// Still return the build result but with the error logged
|
||||
root_obj.put(
|
||||
globalThis,
|
||||
JSC.ZigString.static("s3_error"),
|
||||
JSC.ZigString.init(@errorName(err)).toJS(globalThis),
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const output_files_js = JSC.JSValue.createEmptyArray(globalThis, output_files.len) catch return promise.reject(globalThis, error.JSError);
|
||||
if (output_files_js == .zero) {
|
||||
@panic("Unexpected pending JavaScript exception in JSBundleCompletionTask.onComplete. This is a bug in Bun.");
|
||||
|
||||
@@ -340,7 +340,9 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
return error.MultipleOutputFilesWithoutOutputDir;
|
||||
}
|
||||
|
||||
if (root_path.len > 0) {
|
||||
// Check if S3 upload is requested - in that case, we skip writing to disk
|
||||
// The actual S3 upload will be handled in the completion task where globalThis is available
|
||||
if (c.resolver.opts.s3_url == null and root_path.len > 0) {
|
||||
try c.writeOutputFilesToDisk(root_path, chunks, &output_files);
|
||||
} else {
|
||||
// In-memory build
|
||||
|
||||
168
src/bundler/s3_upload.zig
Normal file
168
src/bundler/s3_upload.zig
Normal file
@@ -0,0 +1,168 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const strings = bun.strings;
|
||||
const Output = bun.Output;
|
||||
const options = @import("../options.zig");
|
||||
const JSC = bun.JSC;
|
||||
|
||||
pub fn uploadOutputFilesToS3(
|
||||
output_files: []const options.OutputFile,
|
||||
s3_url: []const u8,
|
||||
s3_credentials: ?*bun.S3.S3Credentials,
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
) !void {
|
||||
// Parse S3 URL to extract bucket and prefix
|
||||
var bucket: []const u8 = "";
|
||||
var prefix: []const u8 = "";
|
||||
if (strings.hasPrefixComptime(s3_url, "s3://")) {
|
||||
const url_without_protocol = s3_url[5..];
|
||||
if (strings.indexOfChar(url_without_protocol, '/')) |slash_index| {
|
||||
bucket = url_without_protocol[0..slash_index];
|
||||
prefix = url_without_protocol[slash_index + 1 ..];
|
||||
} else {
|
||||
bucket = url_without_protocol;
|
||||
}
|
||||
} else {
|
||||
return error.InvalidS3URL;
|
||||
}
|
||||
|
||||
// Get or create S3 credentials
|
||||
const credentials = s3_credentials orelse brk: {
|
||||
const env = globalThis.bunVM().transpiler.env;
|
||||
const access_key_id = env.map.get("AWS_ACCESS_KEY_ID") orelse "";
|
||||
const secret_access_key = env.map.get("AWS_SECRET_ACCESS_KEY") orelse "";
|
||||
|
||||
if (access_key_id.len == 0 or secret_access_key.len == 0) {
|
||||
return error.MissingS3Credentials;
|
||||
}
|
||||
|
||||
const creds = bun.new(bun.S3.S3Credentials, .{
|
||||
.ref_count = .init(),
|
||||
.accessKeyId = bun.default_allocator.dupe(u8, access_key_id) catch unreachable,
|
||||
.secretAccessKey = bun.default_allocator.dupe(u8, secret_access_key) catch unreachable,
|
||||
.bucket = bun.default_allocator.dupe(u8, bucket) catch unreachable,
|
||||
.region = if (env.map.get("AWS_REGION")) |region|
|
||||
bun.default_allocator.dupe(u8, region) catch unreachable
|
||||
else
|
||||
"us-east-1",
|
||||
.endpoint = if (env.map.get("AWS_ENDPOINT_URL_S3")) |endpoint|
|
||||
bun.default_allocator.dupe(u8, endpoint) catch unreachable
|
||||
else
|
||||
"",
|
||||
.sessionToken = if (env.map.get("AWS_SESSION_TOKEN")) |token|
|
||||
bun.default_allocator.dupe(u8, token) catch unreachable
|
||||
else
|
||||
"",
|
||||
.insecure_http = false,
|
||||
.virtual_hosted_style = false,
|
||||
});
|
||||
creds.ref();
|
||||
break :brk creds;
|
||||
};
|
||||
defer if (s3_credentials == null) credentials.deref();
|
||||
|
||||
const total_files = output_files.len;
|
||||
Output.prettyln("<r><d>Uploading {d} files to S3...<r>", .{total_files});
|
||||
|
||||
var upload_state = S3UploadState{
|
||||
.total_count = total_files,
|
||||
.completed_count = 0,
|
||||
.error_count = 0,
|
||||
.globalThis = globalThis,
|
||||
.credentials = credentials,
|
||||
.prefix = prefix,
|
||||
};
|
||||
|
||||
// Create upload tasks for all files
|
||||
const tasks = bun.default_allocator.alloc(S3UploadTask, output_files.len) catch unreachable;
|
||||
defer bun.default_allocator.free(tasks);
|
||||
|
||||
for (output_files, 0..) |*output_file, i| {
|
||||
// Skip files without buffer data
|
||||
const content = switch (output_file.value) {
|
||||
.buffer => |buf| buf.bytes,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
// Prepare S3 path
|
||||
const s3_path = if (prefix.len > 0)
|
||||
std.fmt.allocPrint(bun.default_allocator, "{s}/{s}", .{ prefix, output_file.dest_path }) catch unreachable
|
||||
else
|
||||
bun.default_allocator.dupe(u8, output_file.dest_path) catch unreachable;
|
||||
|
||||
const content_type = output_file.loader.toMimeType(&.{});
|
||||
|
||||
tasks[i] = .{
|
||||
.state = &upload_state,
|
||||
.path = s3_path,
|
||||
.content = content,
|
||||
.content_type = content_type.value,
|
||||
.index = i,
|
||||
};
|
||||
|
||||
// Start the upload
|
||||
credentials.ref();
|
||||
bun.S3.upload(
|
||||
credentials,
|
||||
s3_path,
|
||||
content,
|
||||
content_type.value,
|
||||
null, // acl
|
||||
null, // proxy_url
|
||||
null, // storage_class
|
||||
S3UploadTask.onComplete,
|
||||
&tasks[i],
|
||||
);
|
||||
}
|
||||
|
||||
// Wait for all uploads to complete using the event loop
|
||||
while (upload_state.completed_count < upload_state.total_count) {
|
||||
_ = globalThis.bunVM().tick();
|
||||
|
||||
// Check if we should timeout
|
||||
// TODO: Add proper timeout handling
|
||||
}
|
||||
|
||||
if (upload_state.error_count > 0) {
|
||||
return error.S3UploadFailed;
|
||||
}
|
||||
|
||||
Output.prettyln("<r><green>✓<r> Successfully uploaded {d} files to S3", .{total_files});
|
||||
}
|
||||
|
||||
const S3UploadState = struct {
|
||||
total_count: usize,
|
||||
completed_count: usize,
|
||||
error_count: usize,
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
credentials: *bun.S3.S3Credentials,
|
||||
prefix: []const u8,
|
||||
};
|
||||
|
||||
const S3UploadTask = struct {
|
||||
state: *S3UploadState,
|
||||
path: []const u8,
|
||||
content: []const u8,
|
||||
content_type: []const u8,
|
||||
index: usize,
|
||||
|
||||
pub fn onComplete(result: bun.S3.S3UploadResult, ctx: *anyopaque) void {
|
||||
const task: *S3UploadTask = @ptrCast(@alignCast(ctx));
|
||||
defer {
|
||||
task.state.credentials.deref();
|
||||
bun.default_allocator.free(task.path);
|
||||
}
|
||||
|
||||
switch (result) {
|
||||
.success => {
|
||||
task.state.completed_count += 1;
|
||||
Output.prettyln("<r><d> Uploaded: {s}<r>", .{task.path});
|
||||
},
|
||||
.failure => |err| {
|
||||
task.state.error_count += 1;
|
||||
task.state.completed_count += 1;
|
||||
Output.prettyErrorln("<r><red>Failed to upload {s}: {s}<r>", .{ task.path, err.message });
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
18
src/cli.zig
18
src/cli.zig
@@ -300,6 +300,7 @@ pub const Arguments = struct {
|
||||
clap.parseParam("--env <inline|prefix*|disable> Inline environment variables into the bundle as process.env.${name}. Defaults to 'disable'. To inline environment variables matching a prefix, use my prefix like 'FOO_PUBLIC_*'.") catch unreachable,
|
||||
clap.parseParam("--windows-hide-console When using --compile targeting Windows, prevent a Command prompt from opening alongside the executable") catch unreachable,
|
||||
clap.parseParam("--windows-icon <STR> When using --compile targeting Windows, assign an executable icon") catch unreachable,
|
||||
clap.parseParam("--s3 <STR> Upload output files to S3. Example: s3://bucket/prefix") catch unreachable,
|
||||
} ++ if (FeatureFlags.bake_debugging_features) [_]ParamType{
|
||||
clap.parseParam("--debug-dump-server-files When --app is set, dump all server files to disk even when building statically") catch unreachable,
|
||||
clap.parseParam("--debug-no-minify When --app is set, do not minify anything") catch unreachable,
|
||||
@@ -907,7 +908,7 @@ pub const Arguments = struct {
|
||||
ctx.bundler_options.css_chunking = args.flag("--css-chunking");
|
||||
|
||||
ctx.bundler_options.emit_dce_annotations = args.flag("--emit-dce-annotations") or
|
||||
!ctx.bundler_options.minify_whitespace;
|
||||
(args.flag("--minify-whitespace") == false);
|
||||
|
||||
if (args.options("--external").len > 0) {
|
||||
var externals = try allocator.alloc([]const u8, args.options("--external").len);
|
||||
@@ -1121,6 +1122,18 @@ pub const Arguments = struct {
|
||||
opts.source_map = .external;
|
||||
}
|
||||
}
|
||||
|
||||
const conditions = args.options("--conditions");
|
||||
for (conditions) |condition| {
|
||||
try ctx.bundler_options.conditions.append(condition);
|
||||
}
|
||||
|
||||
// Handle --s3 flag
|
||||
if (args.option("--s3")) |s3_url| {
|
||||
ctx.bundler_options.s3_url = s3_url;
|
||||
}
|
||||
|
||||
ctx.bundler_options.dump_environment_variables = args.flag("--dump-environment-variables");
|
||||
}
|
||||
|
||||
if (opts.entry_points.len == 0) {
|
||||
@@ -1611,6 +1624,9 @@ pub const Command = struct {
|
||||
compile_target: Cli.CompileTarget = .{},
|
||||
windows_hide_console: bool = false,
|
||||
windows_icon: ?[]const u8 = null,
|
||||
conditions: std.ArrayList(string) = std.ArrayList(string).init(bun.default_allocator),
|
||||
s3_url: ?string = null,
|
||||
dump_environment_variables: bool = false,
|
||||
};
|
||||
|
||||
pub fn create(allocator: std.mem.Allocator, log: *logger.Log, comptime command: Command.Tag) anyerror!Context {
|
||||
|
||||
@@ -1,61 +1,61 @@
|
||||
// Generated with scripts/generate-perf-trace-events.sh
|
||||
pub const PerfEvent = enum(i32) {
|
||||
@"Bundler.BindImportsToExports",
|
||||
@"Bundler.CloneLinkerGraph",
|
||||
@"Bundler.CreateNamespaceExports",
|
||||
@"Bundler.FigureOutCommonJS",
|
||||
@"Bundler.MatchImportsWithExports",
|
||||
@"Bundler.ParseJS",
|
||||
@"Bundler.ParseJSON",
|
||||
@"Bundler.ParseTOML",
|
||||
@"Bundler.ResolveExportStarStatements",
|
||||
@"Bundler.Worker.create",
|
||||
@"Bundler.WrapDependencies",
|
||||
@"Bundler.breakOutputIntoPieces",
|
||||
@"Bundler.cloneAST",
|
||||
@"Bundler.computeChunks",
|
||||
@"Bundler.findAllImportedPartsInJSOrder",
|
||||
@"Bundler.findReachableFiles",
|
||||
@"Bundler.generateChunksInParallel",
|
||||
@"Bundler.generateCodeForFileInChunkCss",
|
||||
@"Bundler.generateCodeForFileInChunkJS",
|
||||
@"Bundler.generateIsolatedHash",
|
||||
@"Bundler.generateSourceMapForChunk",
|
||||
@"Bundler.markFileLiveForTreeShaking",
|
||||
@"Bundler.markFileReachableForCodeSplitting",
|
||||
@"Bundler.onParseTaskComplete",
|
||||
@"Bundler.postProcessJSChunk",
|
||||
@"Bundler.readFile",
|
||||
@"Bundler.renameSymbolsInChunk",
|
||||
@"Bundler.scanImportsAndExports",
|
||||
@"Bundler.treeShakingAndCodeSplitting",
|
||||
@"Bundler.writeChunkToDisk",
|
||||
@"Bundler.writeOutputFilesToDisk",
|
||||
@"ExtractTarball.extract",
|
||||
@"FolderResolver.readPackageJSONFromDisk.folder",
|
||||
@"FolderResolver.readPackageJSONFromDisk.workspace",
|
||||
@"JSBundler.addPlugin",
|
||||
@"JSBundler.hasAnyMatches",
|
||||
@"JSBundler.matchOnLoad",
|
||||
@"JSBundler.matchOnResolve",
|
||||
@"JSGlobalObject.create",
|
||||
@"JSParser.analyze",
|
||||
@"JSParser.parse",
|
||||
@"JSParser.postvisit",
|
||||
@"JSParser.visit",
|
||||
@"StandaloneModuleGraph.serialize",
|
||||
@"JSPrinter.print",
|
||||
@"JSPrinter.printWithSourceMap",
|
||||
@"ModuleResolver.resolve",
|
||||
@"PackageInstaller.install",
|
||||
@"PackageInstaller.installPatch",
|
||||
@"PackageManifest.Serializer.loadByFile",
|
||||
@"PackageManifest.Serializer.save",
|
||||
@"RuntimeTranspilerCache.fromFile",
|
||||
@"Bundler.Worker.create",
|
||||
@"Bundler.CloneLinkerGraph",
|
||||
@"Bundler.treeShakingAndCodeSplitting",
|
||||
@"Bundler.markFileLiveForTreeShaking",
|
||||
@"Bundler.markFileReachableForCodeSplitting",
|
||||
@"Bundler.generateSourceMapForChunk",
|
||||
@"Bundler.generateIsolatedHash",
|
||||
@"Bundler.breakOutputIntoPieces",
|
||||
@"Bundler.findReachableFiles",
|
||||
@"Bundler.cloneAST",
|
||||
@"Bundler.onParseTaskComplete",
|
||||
@"JSGlobalObject.create",
|
||||
@"RuntimeTranspilerCache.save",
|
||||
@"RuntimeTranspilerCache.fromFile",
|
||||
@"RuntimeTranspilerCache.toFile",
|
||||
@"StandaloneModuleGraph.serialize",
|
||||
@"Symbols.followAll",
|
||||
@"TestCommand.printCodeCoverageLCov",
|
||||
@"Bundler.ParseJS",
|
||||
@"Bundler.ParseJSON",
|
||||
@"Bundler.ParseTOML",
|
||||
@"Bundler.readFile",
|
||||
@"JSBundler.hasAnyMatches",
|
||||
@"JSBundler.matchOnLoad",
|
||||
@"JSBundler.matchOnResolve",
|
||||
@"JSBundler.addPlugin",
|
||||
@"Bundler.computeChunks",
|
||||
@"Bundler.postProcessJSChunk",
|
||||
@"Bundler.CreateNamespaceExports",
|
||||
@"Bundler.writeOutputFilesToDisk",
|
||||
@"Bundler.writeChunkToDisk",
|
||||
@"Bundler.generateCodeForFileInChunkCss",
|
||||
@"Bundler.renameSymbolsInChunk",
|
||||
@"TestCommand.printCodeCoverageLCovAndText",
|
||||
@"TestCommand.printCodeCoverageText",
|
||||
@"TestCommand.printCodeCoverageLCov",
|
||||
@"Bundler.scanImportsAndExports",
|
||||
@"Bundler.FigureOutCommonJS",
|
||||
@"Bundler.WrapDependencies",
|
||||
@"Bundler.ResolveExportStarStatements",
|
||||
@"Bundler.MatchImportsWithExports",
|
||||
@"Bundler.BindImportsToExports",
|
||||
@"Bundler.generateCodeForFileInChunkJS",
|
||||
@"Bundler.generateChunksInParallel",
|
||||
@"Bundler.findAllImportedPartsInJSOrder",
|
||||
@"ExtractTarball.extract",
|
||||
@"FolderResolver.readPackageJSONFromDisk.workspace",
|
||||
@"FolderResolver.readPackageJSONFromDisk.folder",
|
||||
@"PackageManifest.Serializer.save",
|
||||
@"PackageManifest.Serializer.loadByFile",
|
||||
@"Symbols.followAll",
|
||||
@"PackageInstaller.install",
|
||||
@"PackageInstaller.installPatch",
|
||||
@"JSParser.parse",
|
||||
@"JSParser.visit",
|
||||
@"JSParser.analyze",
|
||||
@"JSParser.postvisit",
|
||||
};
|
||||
|
||||
@@ -1797,6 +1797,9 @@ pub const BundleOptions = struct {
|
||||
|
||||
ignore_module_resolution_errors: bool = false,
|
||||
|
||||
/// S3 URL for uploading output files (e.g., s3://bucket/prefix)
|
||||
s3_url: ?string = null,
|
||||
|
||||
pub const ForceNodeEnv = enum {
|
||||
unspecified,
|
||||
development,
|
||||
|
||||
52
test_s3_build.js
Normal file
52
test_s3_build.js
Normal file
@@ -0,0 +1,52 @@
|
||||
// Test script for S3 build functionality
|
||||
import { $ } from "bun";
|
||||
|
||||
// Create a simple test file
|
||||
await Bun.write(
|
||||
"test_app.js",
|
||||
`
|
||||
console.log("Hello from S3 build test!");
|
||||
export default { message: "This is a test build" };
|
||||
`,
|
||||
);
|
||||
|
||||
// Test 1: Basic S3 URL support
|
||||
console.log("Test 1: Building with S3 URL...");
|
||||
try {
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./test_app.js"],
|
||||
outdir: "./out",
|
||||
s3: "s3://my-bucket/builds/test",
|
||||
});
|
||||
|
||||
console.log("Build result:", result);
|
||||
console.log("Success:", result.success);
|
||||
console.log("Outputs:", result.outputs?.length || 0);
|
||||
} catch (error) {
|
||||
console.error("Error:", error.message);
|
||||
}
|
||||
|
||||
// Test 2: S3 with credentials object
|
||||
console.log("\nTest 2: Building with S3 credentials object...");
|
||||
try {
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./test_app.js"],
|
||||
outdir: "./out",
|
||||
s3: {
|
||||
url: "s3://my-bucket/builds/test2",
|
||||
accessKeyId: "test-key",
|
||||
secretAccessKey: "test-secret",
|
||||
region: "us-west-2",
|
||||
},
|
||||
});
|
||||
|
||||
console.log("Build result:", result);
|
||||
console.log("Success:", result.success);
|
||||
console.log("Outputs:", result.outputs?.length || 0);
|
||||
} catch (error) {
|
||||
console.error("Error:", error.message);
|
||||
}
|
||||
|
||||
// Clean up
|
||||
await $`rm -f test_app.js`;
|
||||
await $`rm -rf out`;
|
||||
Reference in New Issue
Block a user