mirror of
https://github.com/oven-sh/bun
synced 2026-02-04 07:58:54 +00:00
Compare commits
1 Commits
dylan/pyth
...
claude/aut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea79a84b9a |
@@ -164,6 +164,11 @@ pub const New = struct {
|
||||
can_be_unwrapped_if_unused: CallUnwrap = .never,
|
||||
|
||||
close_parens_loc: logger.Loc,
|
||||
|
||||
/// If this is a `new Worker(path)` call and the path is a string literal,
|
||||
/// this contains the import record index for the worker file.
|
||||
/// Used to bundle workers into standalone executables.
|
||||
worker_import_record_index: ?u32 = null,
|
||||
};
|
||||
pub const NewTarget = struct {
|
||||
range: logger.Range,
|
||||
|
||||
@@ -311,6 +311,9 @@ pub fn NewParser_(
|
||||
enclosing_class_keyword: logger.Range = logger.Range.None,
|
||||
import_items_for_namespace: std.AutoHashMapUnmanaged(Ref, ImportItemForNamespaceMap) = .{},
|
||||
is_import_item: RefMap = .{},
|
||||
/// Tracks refs that are Worker imported from 'node:worker_threads' or 'worker_threads'.
|
||||
/// Used by visitExpr to detect `new Worker(string_literal)` patterns for standalone binary bundling.
|
||||
worker_threads_worker_refs: RefMap = .{},
|
||||
named_imports: NamedImportsType,
|
||||
named_exports: js_ast.Ast.NamedExports,
|
||||
import_namespace_cc_map: Map(ImportNamespaceCallOrConstruct, bool) = .{},
|
||||
@@ -2795,6 +2798,16 @@ pub fn NewParser_(
|
||||
try p.is_import_item.put(p.allocator, ref, {});
|
||||
p.checkForNonBMPCodePoint(item.alias_loc, item.alias);
|
||||
|
||||
// Track Worker imports from node:worker_threads or worker_threads
|
||||
// This is used by visitExpr to detect new Worker(string_literal) patterns
|
||||
if (p.options.bundle and strings.eqlComptime(item.alias, "Worker")) {
|
||||
if (strings.eqlComptime(path.text, "node:worker_threads") or
|
||||
strings.eqlComptime(path.text, "worker_threads"))
|
||||
{
|
||||
try p.worker_threads_worker_refs.put(p.allocator, ref, {});
|
||||
}
|
||||
}
|
||||
|
||||
// ensure every e_import_identifier holds the namespace
|
||||
if (p.options.features.hot_module_reloading) {
|
||||
const symbol = &p.symbols.items[ref.inner_index];
|
||||
|
||||
@@ -1519,6 +1519,48 @@ pub fn VisitExpr(
|
||||
arg.* = p.visitExpr(arg.*);
|
||||
}
|
||||
|
||||
// Detect `new Worker(string_literal)` pattern and create import record
|
||||
// This allows bundler to automatically include worker files in standalone binaries
|
||||
// Only detect Worker when:
|
||||
// 1. It's imported from 'node:worker_threads' or 'worker_threads' module, OR
|
||||
// 2. It's the global unbound Worker (Bun's Web Worker API)
|
||||
// We do NOT detect user-defined classes named Worker
|
||||
if (p.options.bundle) {
|
||||
// After visiting, import identifiers are converted to e_import_identifier
|
||||
const target_ref: ?Ref = switch (e_.target.data) {
|
||||
.e_identifier => |ident| ident.ref,
|
||||
.e_import_identifier => |ident| ident.ref,
|
||||
else => null,
|
||||
};
|
||||
if (target_ref) |ref| {
|
||||
// Check if this identifier is a Worker that should be bundled:
|
||||
// - Imported from worker_threads module (set populated in processImportStatement)
|
||||
// - OR the global unbound Worker (Bun's Web Worker API)
|
||||
const is_worker_threads_import = p.worker_threads_worker_refs.contains(ref);
|
||||
const is_global_worker = blk: {
|
||||
const symbol = p.symbols.items[ref.innerIndex()];
|
||||
break :blk symbol.kind == .unbound and strings.eqlComptime(symbol.original_name, "Worker");
|
||||
};
|
||||
|
||||
if (is_worker_threads_import or is_global_worker) {
|
||||
// Check if first argument is a string literal
|
||||
const args = e_.args.slice();
|
||||
if (args.len >= 1) {
|
||||
if (args[0].data.as(.e_string)) |str| {
|
||||
// Ignore calls if control flow is provably dead
|
||||
if (!p.is_control_flow_dead) {
|
||||
const path_str = str.slice(p.allocator);
|
||||
const import_record_index = p.addImportRecord(.new_worker, args[0].loc, path_str);
|
||||
p.import_records.items[import_record_index].flags.handles_import_errors = p.fn_or_arrow_data_visit.try_body_count != 0;
|
||||
p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable;
|
||||
e_.worker_import_record_index = import_record_index;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (p.options.features.minify_syntax) {
|
||||
if (KnownGlobal.minifyGlobalConstructor(p.allocator, e_, p.symbols.items, expr.loc, p.options.features.minify_whitespace)) |minified| {
|
||||
return minified;
|
||||
|
||||
@@ -41,6 +41,7 @@ pub const ResolveMessage = struct {
|
||||
.url,
|
||||
.internal,
|
||||
.composes,
|
||||
.new_worker,
|
||||
=> "RESOLVE_ERROR",
|
||||
};
|
||||
};
|
||||
|
||||
@@ -213,8 +213,9 @@ pub const LinkerContext = struct {
|
||||
|
||||
const sources: []const Logger.Source = this.parse_graph.input_files.items(.source);
|
||||
|
||||
try this.graph.load(entry_points, sources, server_component_boundaries, bundle.dynamic_import_entry_points.keys(), &this.parse_graph.entry_point_original_names);
|
||||
try this.graph.load(entry_points, sources, server_component_boundaries, bundle.dynamic_import_entry_points.keys(), bundle.new_worker_entry_points.keys(), &this.parse_graph.entry_point_original_names);
|
||||
bundle.dynamic_import_entry_points.deinit();
|
||||
bundle.new_worker_entry_points.deinit();
|
||||
|
||||
var runtime_named_exports = &this.graph.ast.items(.named_exports)[Index.runtime.get()];
|
||||
|
||||
|
||||
@@ -226,6 +226,7 @@ pub fn load(
|
||||
sources: []const Logger.Source,
|
||||
server_component_boundaries: ServerComponentBoundary.List,
|
||||
dynamic_import_entry_points: []const Index.Int,
|
||||
new_worker_entry_points: []const Index.Int,
|
||||
entry_point_original_names: *const IndexStringMap,
|
||||
) !void {
|
||||
const scb = server_component_boundaries.slice();
|
||||
@@ -246,7 +247,7 @@ pub fn load(
|
||||
|
||||
// Setup entry points
|
||||
{
|
||||
try this.entry_points.setCapacity(this.allocator, entry_points.len + server_component_boundaries.list.len + dynamic_import_entry_points.len);
|
||||
try this.entry_points.setCapacity(this.allocator, entry_points.len + server_component_boundaries.list.len + dynamic_import_entry_points.len + new_worker_entry_points.len);
|
||||
this.entry_points.len = entry_points.len;
|
||||
const source_indices = this.entry_points.items(.source_index);
|
||||
|
||||
@@ -291,6 +292,23 @@ pub fn load(
|
||||
});
|
||||
}
|
||||
|
||||
// Worker files are always separate entry points (bundled separately)
|
||||
for (new_worker_entry_points) |id| {
|
||||
if (entry_point_kinds[id] != .none) {
|
||||
// Worker could reference a file that is already an entry point
|
||||
continue;
|
||||
}
|
||||
|
||||
const source = &sources[id];
|
||||
entry_point_kinds[id] = EntryPoint.Kind.new_worker_import;
|
||||
|
||||
this.entry_points.appendAssumeCapacity(.{
|
||||
.source_index = id,
|
||||
.output_path = bun.PathString.init(source.path.text),
|
||||
.output_path_was_auto_generated = true,
|
||||
});
|
||||
}
|
||||
|
||||
var import_records_list: []ImportRecord.List = this.ast.items(.import_records);
|
||||
try this.meta.setCapacity(this.allocator, import_records_list.len);
|
||||
this.meta.len = this.ast.len;
|
||||
@@ -509,6 +527,8 @@ pub fn propagateAsyncDependencies(this: *LinkerGraph) !void {
|
||||
.at, .at_conditional, .url, .composes => continue,
|
||||
// Other non-JS imports
|
||||
.html_manifest, .internal => continue,
|
||||
// Worker files are bundled separately
|
||||
.new_worker => continue,
|
||||
}
|
||||
|
||||
const import_index: usize = import_record.source_index.get();
|
||||
|
||||
@@ -129,6 +129,7 @@ pub const BundleV2 = struct {
|
||||
/// See the comment in `Chunk.OutputPiece`
|
||||
unique_key: u64 = 0,
|
||||
dynamic_import_entry_points: std.AutoArrayHashMap(Index.Int, void) = undefined,
|
||||
new_worker_entry_points: std.AutoArrayHashMap(Index.Int, void) = undefined,
|
||||
has_on_parse_plugins: bool = false,
|
||||
|
||||
finalizers: std.ArrayListUnmanaged(CacheEntry.ExternalFreeFunction) = .{},
|
||||
@@ -272,6 +273,8 @@ pub const BundleV2 = struct {
|
||||
redirects: []u32,
|
||||
redirect_map: PathToSourceIndexMap,
|
||||
dynamic_import_entry_points: *std.AutoArrayHashMap(Index.Int, void),
|
||||
/// Files which are imported via `new Worker()`
|
||||
new_worker_entry_points: *std.AutoArrayHashMap(Index.Int, void),
|
||||
/// Files which are Server Component Boundaries
|
||||
scb_bitset: ?bun.bit_set.DynamicBitSetUnmanaged,
|
||||
scb_list: ServerComponentBoundary.List.Slice,
|
||||
@@ -346,6 +349,11 @@ pub const BundleV2 = struct {
|
||||
v.additional_files_imported_by_css_and_inlined.set(import_record.source_index.get());
|
||||
}
|
||||
|
||||
// Worker files are treated as separate entry points (like dynamic imports with code splitting)
|
||||
if (import_record.kind == .new_worker and import_record.source_index.isValid()) {
|
||||
v.new_worker_entry_points.put(import_record.source_index.get(), {}) catch unreachable;
|
||||
}
|
||||
|
||||
v.visit(import_record.source_index, check_dynamic_imports and import_record.kind == .dynamic, check_dynamic_imports);
|
||||
}
|
||||
}
|
||||
@@ -390,6 +398,7 @@ pub const BundleV2 = struct {
|
||||
}
|
||||
|
||||
this.dynamic_import_entry_points = std.AutoArrayHashMap(Index.Int, void).init(this.allocator());
|
||||
this.new_worker_entry_points = std.AutoArrayHashMap(Index.Int, void).init(this.allocator());
|
||||
|
||||
const all_urls_for_css = this.graph.ast.items(.url_for_css);
|
||||
|
||||
@@ -402,6 +411,7 @@ pub const BundleV2 = struct {
|
||||
.all_urls_for_css = all_urls_for_css,
|
||||
.redirect_map = this.pathToSourceIndexMap(this.transpiler.options.target).*,
|
||||
.dynamic_import_entry_points = &this.dynamic_import_entry_points,
|
||||
.new_worker_entry_points = &this.new_worker_entry_points,
|
||||
.scb_bitset = scb_bitset,
|
||||
.scb_list = if (scb_bitset != null)
|
||||
this.graph.server_component_boundaries.slice()
|
||||
@@ -4102,10 +4112,13 @@ pub const EntryPoint = struct {
|
||||
user_specified,
|
||||
dynamic_import,
|
||||
html,
|
||||
/// A file imported via `new Worker(path)`
|
||||
new_worker_import,
|
||||
|
||||
pub fn outputKind(this: Kind) jsc.API.BuildArtifact.OutputKind {
|
||||
return switch (this) {
|
||||
.user_specified => .@"entry-point",
|
||||
.new_worker_import => .@"entry-point",
|
||||
else => .chunk,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
pub fn computeCrossChunkDependencies(c: *LinkerContext, chunks: []Chunk) bun.OOM!void {
|
||||
// Even without code_splitting, we need to rewrite worker import paths
|
||||
// because workers are always separate entry points
|
||||
if (!c.graph.code_splitting) {
|
||||
// No need to compute cross-chunk dependencies if there can't be any
|
||||
try rewriteWorkerImportPaths(c, chunks);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -92,10 +94,13 @@ const CrossChunkDependencies = struct {
|
||||
if (!part.is_live)
|
||||
continue;
|
||||
|
||||
// Rewrite external dynamic imports to point to the chunk for that entry point
|
||||
// Rewrite external dynamic imports and worker imports to point to the chunk for that entry point
|
||||
for (part.import_record_indices.slice()) |import_record_id| {
|
||||
var import_record = &import_records[import_record_id];
|
||||
if (import_record.source_index.isValid() and deps.ctx.isExternalDynamicImport(import_record, source_index)) {
|
||||
const is_external_dynamic = import_record.source_index.isValid() and deps.ctx.isExternalDynamicImport(import_record, source_index);
|
||||
const is_worker_import = import_record.source_index.isValid() and import_record.kind == .new_worker and
|
||||
deps.ctx.graph.files.items(.entry_point_kind)[import_record.source_index.get()].isEntryPoint();
|
||||
if (is_external_dynamic or is_worker_import) {
|
||||
const other_chunk_index = entry_point_chunk_indices[import_record.source_index.get()];
|
||||
import_record.path.text = _chunks[other_chunk_index].unique_key;
|
||||
import_record.source_index = Index.invalid;
|
||||
@@ -452,3 +457,42 @@ const debug = LinkerContext.debug;
|
||||
|
||||
const Logger = bun.logger;
|
||||
const Loc = Logger.Loc;
|
||||
|
||||
/// Rewrite worker import record paths to point to the correct chunk.
|
||||
/// This is used when code_splitting is false, but we still have worker entry points.
|
||||
fn rewriteWorkerImportPaths(c: *LinkerContext, chunks: []Chunk) bun.OOM!void {
|
||||
const entry_point_chunk_indices = c.graph.files.items(.entry_point_chunk_index);
|
||||
const entry_point_kinds = c.graph.files.items(.entry_point_kind);
|
||||
const parts_list = c.graph.ast.items(.parts);
|
||||
const import_records_list = c.graph.ast.items(.import_records);
|
||||
|
||||
// Iterate through all chunks and their files to find worker imports
|
||||
for (chunks) |*chunk| {
|
||||
if (chunk.content != .javascript) continue;
|
||||
|
||||
for (chunk.files_with_parts_in_chunk.keys()) |source_index| {
|
||||
const parts = parts_list[source_index].slice();
|
||||
var import_records = import_records_list[source_index].slice();
|
||||
|
||||
for (parts) |*part| {
|
||||
if (!part.is_live) continue;
|
||||
|
||||
for (part.import_record_indices.slice()) |import_record_id| {
|
||||
var import_record = &import_records[import_record_id];
|
||||
|
||||
// Check if this is a worker import with a valid entry point
|
||||
if (import_record.kind == .new_worker and
|
||||
import_record.source_index.isValid() and
|
||||
entry_point_kinds[import_record.source_index.get()].isEntryPoint())
|
||||
{
|
||||
const other_chunk_index = entry_point_chunk_indices[import_record.source_index.get()];
|
||||
if (other_chunk_index < chunks.len) {
|
||||
import_record.path.text = chunks[other_chunk_index].unique_key;
|
||||
import_record.source_index = Index.invalid;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,9 @@ pub const ImportKind = enum(u8) {
|
||||
|
||||
internal = 11,
|
||||
|
||||
/// A `new Worker(...)` expression with a string argument
|
||||
new_worker = 12,
|
||||
|
||||
pub const Label = std.EnumArray(ImportKind, []const u8);
|
||||
pub const all_labels: Label = brk: {
|
||||
// If these are changed, make sure to update
|
||||
@@ -41,6 +44,7 @@ pub const ImportKind = enum(u8) {
|
||||
labels.set(ImportKind.composes, "composes");
|
||||
labels.set(ImportKind.internal, "internal");
|
||||
labels.set(ImportKind.html_manifest, "html_manifest");
|
||||
labels.set(ImportKind.new_worker, "new-worker");
|
||||
break :brk labels;
|
||||
};
|
||||
|
||||
@@ -57,6 +61,7 @@ pub const ImportKind = enum(u8) {
|
||||
labels.set(ImportKind.internal, "<bun internal>");
|
||||
labels.set(ImportKind.composes, "composes");
|
||||
labels.set(ImportKind.html_manifest, "HTML import");
|
||||
labels.set(ImportKind.new_worker, "new Worker()");
|
||||
break :brk labels;
|
||||
};
|
||||
|
||||
|
||||
@@ -2150,7 +2150,13 @@ fn NewPrinter(
|
||||
p.print("(");
|
||||
|
||||
if (args.len > 0) {
|
||||
p.printExpr(args[0], .comma, ExprFlag.None());
|
||||
// For new Worker() calls, use the bundled path from the import record
|
||||
if (e.worker_import_record_index) |import_record_index| {
|
||||
const record = p.importRecord(import_record_index);
|
||||
p.printImportRecordPath(record);
|
||||
} else {
|
||||
p.printExpr(args[0], .comma, ExprFlag.None());
|
||||
}
|
||||
|
||||
for (args[1..]) |arg| {
|
||||
p.print(",");
|
||||
|
||||
@@ -105,6 +105,34 @@ describe("bundler", () => {
|
||||
outfile: "dist/out",
|
||||
run: { stdout: "Hello, world!" },
|
||||
});
|
||||
// Test that workers are automatically bundled without explicit entryPointsRaw
|
||||
itBundled("compile/WorkerAutoBundle", {
|
||||
backend: "cli",
|
||||
compile: true,
|
||||
files: {
|
||||
"/entry.ts": /* js */ `
|
||||
import {rmSync} from 'fs';
|
||||
// Verify we're not just importing from the filesystem
|
||||
rmSync("./worker.ts", {force: true});
|
||||
|
||||
const worker = new Worker("./worker.ts");
|
||||
// Wait for worker to signal completion
|
||||
await new Promise((resolve) => {
|
||||
worker.onmessage = (event) => {
|
||||
console.log("Main: " + event.data);
|
||||
resolve();
|
||||
};
|
||||
});
|
||||
`,
|
||||
"/worker.ts": /* js */ `
|
||||
console.log("Worker loaded!");
|
||||
postMessage("done");
|
||||
`.trim(),
|
||||
},
|
||||
// No entryPointsRaw - worker should be auto-detected
|
||||
outfile: "dist/out",
|
||||
run: { stdout: "Worker loaded!\nMain: done\n", file: "dist/out", setCwd: true },
|
||||
});
|
||||
itBundled("compile/WorkerRelativePathNoExtension", {
|
||||
backend: "cli",
|
||||
compile: true,
|
||||
|
||||
Reference in New Issue
Block a user