Compare commits

..

2 Commits

Author SHA1 Message Date
Jarred-Sumner
69579d7fcf deps: update elysia to 1.4.25 (1.4.25) 2026-02-15 04:38:59 +00:00
Jarred Sumner
337a9f7f2b fix(module): prevent crash when resolving bun:main before entry_po… (#27027)
…int.generate()

`ServerEntryPoint.source` defaults to `undefined`, and accessing its
`.contents` or `.path.text` fields before `generate()` has been called
causes a segfault. This happens when `bun:main` is resolved in contexts
where `entry_point.generate()` is skipped (HTML entry points) or never
called (test runner).

Add a `generated` flag to `ServerEntryPoint` and guard both access
sites:
- `getHardcodedModule()` in ModuleLoader.zig (returns null instead of
crashing)
- `_resolve()` in VirtualMachine.zig (falls through to normal
resolution)

### What does this PR do?

### How did you verify your code works?

Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
2026-02-14 12:11:41 -08:00
7 changed files with 51 additions and 77 deletions

View File

@@ -1140,14 +1140,14 @@ export fn Bun__runVirtualModule(globalObject: *JSGlobalObject, specifier_ptr: *c
fn getHardcodedModule(jsc_vm: *VirtualMachine, specifier: bun.String, hardcoded: HardcodedModule) ?ResolvedSource {
analytics.Features.builtin_modules.insert(hardcoded);
return switch (hardcoded) {
.@"bun:main" => .{
.@"bun:main" => if (jsc_vm.entry_point.generated) .{
.allocator = null,
.source_code = bun.String.cloneUTF8(jsc_vm.entry_point.source.contents),
.specifier = specifier,
.source_url = specifier,
.tag = .esm,
.source_code_needs_deref = true,
},
} else null,
.@"bun:internal-for-testing" => {
if (!Environment.isDebug) {
if (!is_allowed_to_use_internal_testing_apis)

View File

@@ -1616,7 +1616,7 @@ fn _resolve(
if (strings.eqlComptime(std.fs.path.basename(specifier), Runtime.Runtime.Imports.alt_name)) {
ret.path = Runtime.Runtime.Imports.Name;
return;
} else if (strings.eqlComptime(specifier, main_file_name)) {
} else if (strings.eqlComptime(specifier, main_file_name) and jsc_vm.entry_point.generated) {
ret.result = null;
ret.path = jsc_vm.entry_point.source.path.text;
return;

View File

@@ -150,6 +150,7 @@ pub const ClientEntryPoint = struct {
pub const ServerEntryPoint = struct {
source: logger.Source = undefined,
generated: bool = false,
pub fn generate(
entry: *ServerEntryPoint,
@@ -230,6 +231,7 @@ pub const ServerEntryPoint = struct {
entry.source = logger.Source.initPathString(name, code);
entry.source.path.text = name;
entry.source.path.namespace = "server-entry";
entry.generated = true;
}
};

View File

@@ -277,7 +277,6 @@ pub fn Builder(comptime method: BuilderMethod) type {
pub const Entry = struct {
tree: Tree,
dependencies: Lockfile.DependencyIDList,
name_hash_map: NameHashMap,
};
pub const CleanResult = struct {
@@ -300,11 +299,8 @@ pub fn Builder(comptime method: BuilderMethod) type {
var dep_ids = try DependencyIDList.initCapacity(this.allocator, total);
const name_hash_maps = slice.items(.name_hash_map);
for (trees, dependencies, name_hash_maps) |*tree, *child, *name_map| {
for (trees, dependencies) |*tree, *child| {
defer child.deinit(this.allocator);
defer name_map.deinit(this.allocator);
const off: u32 = @intCast(dep_ids.items.len);
for (child.items) |dep_id| {
@@ -475,13 +471,11 @@ pub fn processSubtree(
.dependency_id = dependency_id,
},
.dependencies = .{},
.name_hash_map = .{},
});
const list_slice = builder.list.slice();
const trees = list_slice.items(.tree);
const dependency_lists = list_slice.items(.dependencies);
const name_hash_maps = list_slice.items(.name_hash_map);
const next: *Tree = &trees[builder.list.len - 1];
const pkgs = builder.lockfile.packages.slice();
@@ -559,7 +553,6 @@ pub fn processSubtree(
pkg_id,
&dependency,
dependency_lists,
name_hash_maps,
trees,
method,
builder,
@@ -580,7 +573,6 @@ pub fn processSubtree(
pkg_id,
&dependency,
dependency_lists,
name_hash_maps,
trees,
method,
builder,
@@ -625,8 +617,6 @@ pub fn processSubtree(
placed_dep_id.* = dep_id;
}
}
// Update the name_hash_map to point to the new dep_id
name_hash_maps[replace.id].put(builder.allocator, dependency.name_hash, dep_id) catch bun.outOfMemory();
if (pkg_id != invalid_package_id and builder.resolution_lists[pkg_id].len > 0) {
try builder.queue.writeItem(.{
.tree_id = replace.id,
@@ -648,7 +638,6 @@ pub fn processSubtree(
},
.placement => |dest| {
bun.handleOom(dependency_lists[dest.id].append(builder.allocator, dep_id));
name_hash_maps[dest.id].put(builder.allocator, dependency.name_hash, dep_id) catch bun.outOfMemory();
trees[dest.id].dependencies.len += 1;
if (pkg_id != invalid_package_id and builder.resolution_lists[pkg_id].len > 0) {
try builder.queue.writeItem(.{
@@ -680,13 +669,16 @@ fn hoistDependency(
package_id: PackageID,
dependency: *const Dependency,
dependency_lists: []Lockfile.DependencyIDList,
name_hash_maps: []NameHashMap,
trees: []Tree,
comptime method: BuilderMethod,
builder: *Builder(method),
) !HoistDependencyResult {
if (name_hash_maps[this.id].get(dependency.name_hash)) |dep_id| {
const this_dependencies = this.dependencies.get(dependency_lists[this.id].items);
for (0..this_dependencies.len) |i| {
const dep_id = this_dependencies[i];
const dep = builder.dependencies[dep_id];
if (dep.name_hash != dependency.name_hash) continue;
const res_id = builder.resolutions[dep_id];
if (res_id == invalid_package_id and package_id == invalid_package_id) {
@@ -766,7 +758,6 @@ fn hoistDependency(
package_id,
dependency,
dependency_lists,
name_hash_maps,
trees,
method,
builder,
@@ -778,8 +769,6 @@ fn hoistDependency(
return .{ .placement = .{ .id = this.id } }; // 2
}
pub const NameHashMap = std.AutoHashMapUnmanaged(PackageNameHash, DependencyID);
pub const FillItem = struct {
tree_id: Tree.Id,
dependency_id: DependencyID,

View File

@@ -634,3 +634,42 @@ test.concurrent("bun serve files with correct Content-Type headers", async () =>
// The process will be automatically cleaned up by 'await using'
}
});
test("importing bun:main from HTML entry preload does not crash", async () => {
const dir = tempDirWithFiles("html-entry-bun-main", {
"index.html": /*html*/ `
<!DOCTYPE html>
<html>
<head><title>Test</title></head>
<body><h1>Hello</h1></body>
</html>
`,
"preload.mjs": /*js*/ `
try {
await import("bun:main");
} catch {}
// Signal that preload ran successfully without crashing
console.log("PRELOAD_OK");
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "--preload", "./preload.mjs", "index.html", "--port=0"],
env: bunEnv,
cwd: dir,
stdout: "pipe",
stderr: "pipe",
});
const decoder = new TextDecoder();
let text = "";
for await (const chunk of proc.stdout) {
text += decoder.decode(chunk, { stream: true });
if (text.includes("http://")) break;
}
expect(text).toContain("PRELOAD_OK");
proc.kill();
await proc.exited;
});

View File

@@ -1,56 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
// Regression test for #27033: `bun install` had exponential slowdown with workspace
// configurations containing many packages with overlapping transitive dependencies.
// The root cause was O(n) linear scans in hoistDependency() for each dependency being
// hoisted, resulting in O(n^2) behavior when many deps are hoisted to the root tree.
test("workspace install with many overlapping workspace dependencies does not hang", async () => {
// Create a workspace with many workspace packages that cross-reference each other.
// This exercises the hoisting code path that was O(n^2) before the fix.
// With 30 workspace packages each depending on all others, this creates ~900
// dependency edges that all need to be hoisted.
const numPackages = 30;
const files: Record<string, string> = {};
for (let i = 0; i < numPackages; i++) {
const deps: Record<string, string> = {};
// Each package depends on all other packages
for (let j = 0; j < numPackages; j++) {
if (i !== j) {
deps[`pkg-${j}`] = "workspace:*";
}
}
files[`packages/pkg-${i}/package.json`] = JSON.stringify({
name: `pkg-${i}`,
version: "1.0.0",
dependencies: deps,
});
}
using dir = tempDir("issue-27033", {
"package.json": JSON.stringify({
name: "workspace-root",
private: true,
workspaces: ["packages/*"],
}),
...files,
});
// Before the fix, this would hang or take >60s with many overlapping deps.
// After the fix, it should complete in a few seconds.
await using proc = Bun.spawn({
cmd: [bunExe(), "install"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stderr).not.toContain("panic:");
expect(stderr).not.toContain("error:");
expect(stdout).toContain("31 packages");
expect(exitCode).toBe(0);
}, 30_000);

View File

@@ -2,6 +2,6 @@
{
"package": "elysia",
"repository": "https://github.com/elysiajs/elysia",
"tag": "1.4.12"
"tag": "1.4.25"
}
]