mirror of
https://github.com/oven-sh/bun
synced 2026-02-17 14:22:01 +00:00
Merge branch 'main' into claude/fix-html-bundle-missing-assets
This commit is contained in:
@@ -198,13 +198,16 @@ const myPlugin: BunPlugin = {
|
||||
};
|
||||
```
|
||||
|
||||
The builder object provides some methods for hooking into parts of the bundling process. Bun implements `onResolve` and `onLoad`; it does not yet implement the esbuild hooks `onStart`, `onEnd`, and `onDispose`, and `resolve` utilities. `initialOptions` is partially implemented, being read-only and only having a subset of esbuild's options; use `config` (same thing but with Bun's `BuildConfig` format) instead.
|
||||
The builder object provides some methods for hooking into parts of the bundling process. Bun implements `onStart`, `onEnd`, `onResolve`, and `onLoad`. It does not yet implement the esbuild hooks `onDispose` and `resolve`. `initialOptions` is partially implemented, being read-only and only having a subset of esbuild's options; use `config` (same thing but with Bun's `BuildConfig` format) instead.
|
||||
|
||||
```ts title="myPlugin.ts" icon="/icons/typescript.svg"
|
||||
import type { BunPlugin } from "bun";
|
||||
const myPlugin: BunPlugin = {
|
||||
name: "my-plugin",
|
||||
setup(builder) {
|
||||
builder.onStart(() => {
|
||||
/* called when the bundle starts */
|
||||
});
|
||||
builder.onResolve(
|
||||
{
|
||||
/* onResolve.options */
|
||||
@@ -225,6 +228,9 @@ const myPlugin: BunPlugin = {
|
||||
};
|
||||
},
|
||||
);
|
||||
builder.onEnd(result => {
|
||||
/* called when the bundle is complete */
|
||||
});
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
@@ -15,6 +15,7 @@ Plugins can register callbacks to be run at various points in the lifecycle of a
|
||||
- `onResolve()`: Run before a module is resolved
|
||||
- `onLoad()`: Run before a module is loaded
|
||||
- `onBeforeParse()`: Run zero-copy native addons in the parser thread before a file is parsed
|
||||
- `onEnd()`: Run after the bundle is complete
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -39,6 +40,7 @@ type PluginBuilder = {
|
||||
exports?: Record<string, any>;
|
||||
},
|
||||
) => void;
|
||||
onEnd(callback: (result: BuildOutput) => void | Promise<void>): void;
|
||||
config: BuildConfig;
|
||||
};
|
||||
|
||||
@@ -423,3 +425,53 @@ This lifecycle callback is run immediately before a file is parsed by Bun's bund
|
||||
As input, it receives the file's contents and can optionally return new source code.
|
||||
|
||||
<Info>This callback can be called from any thread and so the napi module implementation must be thread-safe.</Info>
|
||||
|
||||
### onEnd
|
||||
|
||||
```ts
|
||||
onEnd(callback: (result: BuildOutput) => void | Promise<void>): void;
|
||||
```
|
||||
|
||||
Registers a callback to be run after the bundle is complete. The callback receives the [`BuildOutput`](/docs/bundler#outputs) object containing the build results, including output files and any build messages.
|
||||
|
||||
```ts title="index.ts" icon="/icons/typescript.svg"
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
plugins: [
|
||||
{
|
||||
name: "onEnd example",
|
||||
setup(build) {
|
||||
build.onEnd(result => {
|
||||
console.log(`Build completed with ${result.outputs.length} files`);
|
||||
for (const log of result.logs) {
|
||||
console.log(log);
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
The callback can return a `Promise`. The build output promise from `Bun.build()` will not resolve until all `onEnd()` callbacks have completed.
|
||||
|
||||
```ts title="index.ts" icon="/icons/typescript.svg"
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
plugins: [
|
||||
{
|
||||
name: "Upload to S3",
|
||||
setup(build) {
|
||||
build.onEnd(async result => {
|
||||
if (!result.success) return;
|
||||
for (const output of result.outputs) {
|
||||
await uploadToS3(output);
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
@@ -1154,6 +1154,14 @@ pub const FetchTasklet = struct {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the request body should skip chunked transfer encoding framing.
|
||||
/// True for upgraded connections (e.g. WebSocket) or when the user explicitly
|
||||
/// set Content-Length without setting Transfer-Encoding.
|
||||
fn skipChunkedFraming(this: *const FetchTasklet) bool {
|
||||
return this.upgraded_connection or
|
||||
(this.request_headers.get("content-length") != null and this.request_headers.get("transfer-encoding") == null);
|
||||
}
|
||||
|
||||
pub fn writeRequestData(this: *FetchTasklet, data: []const u8) ResumableSinkBackpressure {
|
||||
log("writeRequestData {}", .{data.len});
|
||||
if (this.signal) |signal| {
|
||||
@@ -1175,7 +1183,7 @@ pub const FetchTasklet = struct {
|
||||
// dont have backpressure so we will schedule the data to be written
|
||||
// if we have backpressure the onWritable will drain the buffer
|
||||
needs_schedule = stream_buffer.isEmpty();
|
||||
if (this.upgraded_connection) {
|
||||
if (this.skipChunkedFraming()) {
|
||||
bun.handleOom(stream_buffer.write(data));
|
||||
} else {
|
||||
//16 is the max size of a hex number size that represents 64 bits + 2 for the \r\n
|
||||
@@ -1209,15 +1217,14 @@ pub const FetchTasklet = struct {
|
||||
}
|
||||
this.abortTask();
|
||||
} else {
|
||||
if (!this.upgraded_connection) {
|
||||
// If is not upgraded we need to send the terminating chunk
|
||||
if (!this.skipChunkedFraming()) {
|
||||
// Using chunked transfer encoding, send the terminating chunk
|
||||
const thread_safe_stream_buffer = this.request_body_streaming_buffer orelse return;
|
||||
const stream_buffer = thread_safe_stream_buffer.acquire();
|
||||
defer thread_safe_stream_buffer.release();
|
||||
bun.handleOom(stream_buffer.write(http.end_of_chunked_http1_1_encoding_response_body));
|
||||
}
|
||||
if (this.http) |http_| {
|
||||
// just tell to write the end of the chunked encoding aka 0\r\n\r\n
|
||||
http.http_thread.scheduleRequestWrite(http_, .end);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3683,7 +3683,20 @@ pub const BundleV2 = struct {
|
||||
}
|
||||
}
|
||||
|
||||
const import_record_loader = import_record.loader orelse path.loader(&transpiler.options.loaders) orelse .file;
|
||||
const import_record_loader = brk: {
|
||||
const resolved_loader = import_record.loader orelse path.loader(&transpiler.options.loaders) orelse .file;
|
||||
// When an HTML file references a URL asset (e.g. <link rel="manifest" href="./manifest.json" />),
|
||||
// the file must be copied to the output directory as-is. If the resolved loader would
|
||||
// parse/transform the file (e.g. .json, .toml) rather than copy it, force the .file loader
|
||||
// so that `shouldCopyForBundling()` returns true and the asset is emitted.
|
||||
// Only do this for HTML sources — CSS url() imports should retain their original behavior.
|
||||
if (loader == .html and import_record.kind == .url and !resolved_loader.shouldCopyForBundling() and
|
||||
!resolved_loader.isJavaScriptLike() and !resolved_loader.isCSS() and resolved_loader != .html)
|
||||
{
|
||||
break :brk Loader.file;
|
||||
}
|
||||
break :brk resolved_loader;
|
||||
};
|
||||
import_record.loader = import_record_loader;
|
||||
|
||||
const is_html_entrypoint = import_record_loader == .html and target.isServerSide() and this.transpiler.options.dev_server == null;
|
||||
|
||||
16
src/http.zig
16
src/http.zig
@@ -719,7 +719,21 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request {
|
||||
|
||||
if (body_len > 0 or this.method.hasRequestBody()) {
|
||||
if (this.flags.is_streaming_request_body) {
|
||||
if (add_transfer_encoding and this.flags.upgrade_state == .none) {
|
||||
if (original_content_length) |content_length| {
|
||||
if (add_transfer_encoding) {
|
||||
// User explicitly set Content-Length and did not set Transfer-Encoding;
|
||||
// preserve Content-Length instead of using chunked encoding.
|
||||
// This matches Node.js behavior where an explicit Content-Length is always honored.
|
||||
request_headers_buf[header_count] = .{
|
||||
.name = content_length_header_name,
|
||||
.value = content_length,
|
||||
};
|
||||
header_count += 1;
|
||||
}
|
||||
// If !add_transfer_encoding, the user explicitly set Transfer-Encoding,
|
||||
// which was already added to request_headers_buf. We respect that and
|
||||
// do not add Content-Length (they are mutually exclusive per HTTP/1.1).
|
||||
} else if (add_transfer_encoding and this.flags.upgrade_state == .none) {
|
||||
request_headers_buf[header_count] = chunked_encoded_header;
|
||||
header_count += 1;
|
||||
}
|
||||
|
||||
@@ -623,6 +623,17 @@ pub const PackageInstaller = struct {
|
||||
// else => unreachable,
|
||||
// };
|
||||
|
||||
// If a newly computed integrity hash is available (e.g. for a GitHub
|
||||
// tarball) and the lockfile doesn't already have one, persist it so
|
||||
// the lockfile gets re-saved with the hash.
|
||||
if (data.integrity.tag.isSupported()) {
|
||||
var pkg_metas = this.lockfile.packages.items(.meta);
|
||||
if (!pkg_metas[package_id].integrity.tag.isSupported()) {
|
||||
pkg_metas[package_id].integrity = data.integrity;
|
||||
this.manager.options.enable.force_save_lockfile = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.manager.task_queue.fetchRemove(task_id)) |removed| {
|
||||
var callbacks = removed.value;
|
||||
defer callbacks.deinit(this.manager.allocator);
|
||||
|
||||
@@ -133,6 +133,12 @@ pub fn processExtractedTarballPackage(
|
||||
break :package pkg;
|
||||
};
|
||||
|
||||
// Store the tarball integrity hash so the lockfile can pin the
|
||||
// exact content downloaded from the remote (GitHub) server.
|
||||
if (data.integrity.tag.isSupported()) {
|
||||
package.meta.integrity = data.integrity;
|
||||
}
|
||||
|
||||
package = manager.lockfile.appendPackage(package) catch unreachable;
|
||||
package_id.* = package.meta.id;
|
||||
|
||||
|
||||
@@ -23,7 +23,26 @@ pub inline fn run(this: *const ExtractTarball, log: *logger.Log, bytes: []const
|
||||
return error.IntegrityCheckFailed;
|
||||
}
|
||||
}
|
||||
return this.extract(log, bytes);
|
||||
var result = try this.extract(log, bytes);
|
||||
|
||||
// Compute and store SHA-512 integrity hash for GitHub tarballs so the
|
||||
// lockfile can pin the exact tarball content. On subsequent installs the
|
||||
// hash stored in the lockfile is forwarded via this.integrity and verified
|
||||
// above, preventing a compromised server from silently swapping the tarball.
|
||||
if (this.resolution.tag == .github) {
|
||||
if (this.integrity.tag.isSupported()) {
|
||||
// Re-installing with an existing lockfile: integrity was already
|
||||
// verified above, propagate the known value to ExtractData so that
|
||||
// the lockfile keeps it on re-serialisation.
|
||||
result.integrity = this.integrity;
|
||||
} else {
|
||||
// First install (no integrity in the lockfile yet): compute it.
|
||||
result.integrity = .{ .tag = .sha512 };
|
||||
Crypto.SHA512.hash(bytes, result.integrity.value[0..Crypto.SHA512.digest]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn buildURL(
|
||||
@@ -547,6 +566,7 @@ const string = []const u8;
|
||||
|
||||
const Npm = @import("./npm.zig");
|
||||
const std = @import("std");
|
||||
const Crypto = @import("../sha.zig").Hashers;
|
||||
const FileSystem = @import("../fs.zig").FileSystem;
|
||||
const Integrity = @import("./integrity.zig").Integrity;
|
||||
const Resolution = @import("./resolution.zig").Resolution;
|
||||
|
||||
@@ -209,6 +209,7 @@ pub const ExtractData = struct {
|
||||
path: string = "",
|
||||
buf: []u8 = "",
|
||||
} = null,
|
||||
integrity: Integrity = .{},
|
||||
};
|
||||
|
||||
pub const DependencyInstallContext = struct {
|
||||
@@ -271,6 +272,7 @@ pub const VersionSlice = external.VersionSlice;
|
||||
|
||||
pub const Dependency = @import("./dependency.zig");
|
||||
pub const Behavior = @import("./dependency.zig").Behavior;
|
||||
pub const Integrity = @import("./integrity.zig").Integrity;
|
||||
|
||||
pub const Lockfile = @import("./lockfile.zig");
|
||||
pub const PatchedDep = Lockfile.PatchedDep;
|
||||
|
||||
@@ -644,9 +644,16 @@ pub const Stringifier = struct {
|
||||
&path_buf,
|
||||
);
|
||||
|
||||
try writer.print(", {f}]", .{
|
||||
repo.resolved.fmtJson(buf, .{}),
|
||||
});
|
||||
if (pkg_meta.integrity.tag.isSupported()) {
|
||||
try writer.print(", {f}, \"{f}\"]", .{
|
||||
repo.resolved.fmtJson(buf, .{}),
|
||||
pkg_meta.integrity,
|
||||
});
|
||||
} else {
|
||||
try writer.print(", {f}]", .{
|
||||
repo.resolved.fmtJson(buf, .{}),
|
||||
});
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
@@ -1885,6 +1892,15 @@ pub fn parseIntoBinaryLockfile(
|
||||
};
|
||||
|
||||
@field(res.value, @tagName(tag)).resolved = try string_buf.append(bun_tag_str);
|
||||
|
||||
// Optional integrity hash (added to pin tarball content)
|
||||
if (i < pkg_info.len) {
|
||||
const integrity_expr = pkg_info.at(i);
|
||||
if (integrity_expr.asString(allocator)) |integrity_str| {
|
||||
pkg.meta.integrity = Integrity.parse(integrity_str);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
@@ -271,7 +271,7 @@ const OutgoingMessagePrototype = {
|
||||
return this;
|
||||
},
|
||||
setHeaders(headers) {
|
||||
if (this._header || this[headerStateSymbol] !== NodeHTTPHeaderState.none) {
|
||||
if ((this._header != null) || this[headerStateSymbol] === NodeHTTPHeaderState.sent) {
|
||||
throw $ERR_HTTP_HEADERS_SENT("set");
|
||||
}
|
||||
|
||||
|
||||
@@ -899,4 +899,75 @@ body {
|
||||
expect(entry2Html).toMatch(/src=".*\.js"/);
|
||||
},
|
||||
});
|
||||
|
||||
// Test manifest.json is copied as an asset and link href is rewritten
|
||||
itBundled("html/manifest-json", {
|
||||
outdir: "out/",
|
||||
files: {
|
||||
"/index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="manifest" href="./manifest.json" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>App</h1>
|
||||
<script src="./app.js"></script>
|
||||
</body>
|
||||
</html>`,
|
||||
"/manifest.json": JSON.stringify({
|
||||
name: "My App",
|
||||
short_name: "App",
|
||||
start_url: "/",
|
||||
display: "standalone",
|
||||
background_color: "#ffffff",
|
||||
theme_color: "#000000",
|
||||
}),
|
||||
"/app.js": "console.log('hello')",
|
||||
},
|
||||
entryPoints: ["/index.html"],
|
||||
onAfterBundle(api) {
|
||||
const htmlContent = api.readFile("out/index.html");
|
||||
|
||||
// The original manifest.json reference should be rewritten to a hashed filename
|
||||
expect(htmlContent).not.toContain('manifest.json"');
|
||||
expect(htmlContent).toMatch(/href="(?:\.\/|\/)?manifest-[a-zA-Z0-9]+\.json"/);
|
||||
|
||||
// Extract the hashed manifest filename and verify its content
|
||||
const manifestMatch = htmlContent.match(/href="(?:\.\/|\/)?(manifest-[a-zA-Z0-9]+\.json)"/);
|
||||
expect(manifestMatch).not.toBeNull();
|
||||
const manifestContent = api.readFile("out/" + manifestMatch![1]);
|
||||
expect(manifestContent).toContain('"name"');
|
||||
expect(manifestContent).toContain('"My App"');
|
||||
},
|
||||
});
|
||||
|
||||
// Test that other non-JS/CSS file types referenced via URL imports are copied as assets
|
||||
itBundled("html/xml-asset", {
|
||||
outdir: "out/",
|
||||
files: {
|
||||
"/index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="manifest" href="./site.webmanifest" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>App</h1>
|
||||
</body>
|
||||
</html>`,
|
||||
"/site.webmanifest": JSON.stringify({
|
||||
name: "My App",
|
||||
icons: [{ src: "/icon.png", sizes: "192x192" }],
|
||||
}),
|
||||
},
|
||||
entryPoints: ["/index.html"],
|
||||
onAfterBundle(api) {
|
||||
const htmlContent = api.readFile("out/index.html");
|
||||
|
||||
// The webmanifest reference should be rewritten to a hashed filename
|
||||
expect(htmlContent).not.toContain("site.webmanifest");
|
||||
expect(htmlContent).toMatch(/href=".*\.webmanifest"/);
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
255
test/cli/install/GHSA-pfwx-36v6-832x.test.ts
Normal file
255
test/cli/install/GHSA-pfwx-36v6-832x.test.ts
Normal file
@@ -0,0 +1,255 @@
|
||||
import { file } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { rm } from "fs/promises";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
// Each test uses its own BUN_INSTALL_CACHE_DIR inside the temp dir for full
|
||||
// isolation. This avoids interfering with the global cache or other tests.
|
||||
function envWithCache(dir: string) {
|
||||
return { ...bunEnv, BUN_INSTALL_CACHE_DIR: join(String(dir), ".bun-cache") };
|
||||
}
|
||||
|
||||
describe.concurrent("GitHub tarball integrity", () => {
|
||||
test("should store integrity hash in lockfile for GitHub dependencies", async () => {
|
||||
using dir = tempDir("github-integrity", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const env = envWithCache(dir);
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stderr).toContain("Saved lockfile");
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
const lockfileContent = await file(join(String(dir), "bun.lock")).text();
|
||||
|
||||
// The lockfile should contain a sha512 integrity hash for the GitHub dependency
|
||||
expect(lockfileContent).toContain("sha512-");
|
||||
// The resolved commit hash should be present
|
||||
expect(lockfileContent).toContain("jonschlinkert-is-number-98e8ff1");
|
||||
// Verify the format: the integrity appears after the resolved commit hash
|
||||
expect(lockfileContent).toMatch(/"jonschlinkert-is-number-98e8ff1",\s*"sha512-/);
|
||||
});
|
||||
|
||||
test("should verify integrity passes on re-install with matching hash", async () => {
|
||||
using dir = tempDir("github-integrity-match", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity-match",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const env = envWithCache(dir);
|
||||
|
||||
// First install to generate lockfile with correct integrity
|
||||
await using proc1 = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout1, stderr1, exitCode1] = await Promise.all([proc1.stdout.text(), proc1.stderr.text(), proc1.exited]);
|
||||
expect(stderr1).not.toContain("error:");
|
||||
expect(exitCode1).toBe(0);
|
||||
|
||||
// Read the generated lockfile and extract the integrity hash adjacent to
|
||||
// the GitHub resolved entry to avoid accidentally matching an npm hash.
|
||||
const lockfileContent = await file(join(String(dir), "bun.lock")).text();
|
||||
const integrityMatch = lockfileContent.match(/"jonschlinkert-is-number-98e8ff1",\s*"(sha512-[A-Za-z0-9+/]+=*)"/);
|
||||
expect(integrityMatch).not.toBeNull();
|
||||
const integrityHash = integrityMatch![1];
|
||||
|
||||
// Clear cache and node_modules, then re-install with the same lockfile
|
||||
await rm(join(String(dir), ".bun-cache"), { recursive: true, force: true });
|
||||
await rm(join(String(dir), "node_modules"), { recursive: true, force: true });
|
||||
|
||||
await using proc2 = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout2, stderr2, exitCode2] = await Promise.all([proc2.stdout.text(), proc2.stderr.text(), proc2.exited]);
|
||||
|
||||
// Should succeed because the integrity matches
|
||||
expect(stderr2).not.toContain("Integrity check failed");
|
||||
expect(exitCode2).toBe(0);
|
||||
|
||||
// Lockfile should still contain the same integrity hash
|
||||
const lockfileContent2 = await file(join(String(dir), "bun.lock")).text();
|
||||
expect(lockfileContent2).toContain(integrityHash);
|
||||
});
|
||||
|
||||
test("should reject GitHub tarball when integrity check fails", async () => {
|
||||
using dir = tempDir("github-integrity-reject", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity-reject",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
// Pre-create a lockfile with an invalid integrity hash (valid base64, 64 zero bytes)
|
||||
"bun.lock": JSON.stringify({
|
||||
lockfileVersion: 1,
|
||||
configVersion: 1,
|
||||
workspaces: {
|
||||
"": {
|
||||
name: "test-github-integrity-reject",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
},
|
||||
},
|
||||
packages: {
|
||||
"is-number": [
|
||||
"is-number@github:jonschlinkert/is-number#98e8ff1",
|
||||
{},
|
||||
"jonschlinkert-is-number-98e8ff1",
|
||||
"sha512-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
|
||||
],
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Fresh per-test cache ensures the tarball must be downloaded from the network
|
||||
const env = envWithCache(dir);
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stderr).toContain("Integrity check failed");
|
||||
expect(exitCode).not.toBe(0);
|
||||
});
|
||||
|
||||
test("should update lockfile with integrity when old format has none", async () => {
|
||||
using dir = tempDir("github-integrity-upgrade", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity-upgrade",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
// Pre-create a lockfile in the old format (no integrity hash)
|
||||
"bun.lock": JSON.stringify({
|
||||
lockfileVersion: 1,
|
||||
configVersion: 1,
|
||||
workspaces: {
|
||||
"": {
|
||||
name: "test-github-integrity-upgrade",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
},
|
||||
},
|
||||
packages: {
|
||||
"is-number": ["is-number@github:jonschlinkert/is-number#98e8ff1", {}, "jonschlinkert-is-number-98e8ff1"],
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Fresh per-test cache ensures the tarball must be downloaded
|
||||
const env = envWithCache(dir);
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// Should succeed without errors
|
||||
expect(stderr).not.toContain("Integrity check failed");
|
||||
expect(stderr).not.toContain("error:");
|
||||
// The lockfile should be re-saved with the new integrity hash
|
||||
expect(stderr).toContain("Saved lockfile");
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify the lockfile now contains the integrity hash
|
||||
const lockfileContent = await file(join(String(dir), "bun.lock")).text();
|
||||
expect(lockfileContent).toContain("sha512-");
|
||||
expect(lockfileContent).toMatch(/"jonschlinkert-is-number-98e8ff1",\s*"sha512-/);
|
||||
});
|
||||
|
||||
test("should accept GitHub dependency from cache without re-downloading", async () => {
|
||||
// Use a shared cache dir for both installs so the second is a true cache hit
|
||||
using dir = tempDir("github-integrity-cached", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity-cached",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const env = envWithCache(dir);
|
||||
|
||||
// First install warms the per-test cache
|
||||
await using proc1 = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout1, stderr1, exitCode1] = await Promise.all([proc1.stdout.text(), proc1.stderr.text(), proc1.exited]);
|
||||
expect(stderr1).not.toContain("error:");
|
||||
expect(exitCode1).toBe(0);
|
||||
|
||||
// Remove node_modules but keep the cache
|
||||
await rm(join(String(dir), "node_modules"), { recursive: true, force: true });
|
||||
|
||||
// Strip the integrity from the lockfile to simulate an old-format lockfile
|
||||
// that should still work when the cache already has the package
|
||||
const lockfileContent = await file(join(String(dir), "bun.lock")).text();
|
||||
const stripped = lockfileContent.replace(/,\s*"sha512-[^"]*"/, "");
|
||||
await Bun.write(join(String(dir), "bun.lock"), stripped);
|
||||
|
||||
// Second install should hit the cache and succeed without re-downloading
|
||||
await using proc2 = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout2, stderr2, exitCode2] = await Promise.all([proc2.stdout.text(), proc2.stderr.text(), proc2.exited]);
|
||||
|
||||
// Should succeed without integrity errors (package served from cache)
|
||||
expect(stderr2).not.toContain("Integrity check failed");
|
||||
expect(stderr2).not.toContain("error:");
|
||||
expect(exitCode2).toBe(0);
|
||||
});
|
||||
});
|
||||
89
test/regression/issue/27049.test.ts
Normal file
89
test/regression/issue/27049.test.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import http from "node:http";
|
||||
|
||||
test("ClientRequest.setHeaders should not throw ERR_HTTP_HEADERS_SENT on new request", async () => {
|
||||
await using server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
return new Response(req.headers.get("x-test") ?? "missing");
|
||||
},
|
||||
});
|
||||
|
||||
const { resolve, reject, promise } = Promise.withResolvers<string>();
|
||||
|
||||
const req = http.request(`http://localhost:${server.port}/test`, { method: "GET" }, res => {
|
||||
let data = "";
|
||||
res.on("data", (chunk: Buffer) => {
|
||||
data += chunk.toString();
|
||||
});
|
||||
res.on("end", () => resolve(data));
|
||||
});
|
||||
|
||||
req.on("error", reject);
|
||||
|
||||
// This should not throw - headers haven't been sent yet
|
||||
req.setHeaders(new Headers({ "x-test": "value" }));
|
||||
|
||||
req.end();
|
||||
|
||||
const body = await promise;
|
||||
expect(body).toBe("value");
|
||||
});
|
||||
|
||||
test("ClientRequest.setHeaders works with Map", async () => {
|
||||
await using server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
return new Response(req.headers.get("x-map-test") ?? "missing");
|
||||
},
|
||||
});
|
||||
|
||||
const { resolve, reject, promise } = Promise.withResolvers<string>();
|
||||
|
||||
const req = http.request(`http://localhost:${server.port}/test`, { method: "GET" }, res => {
|
||||
let data = "";
|
||||
res.on("data", (chunk: Buffer) => {
|
||||
data += chunk.toString();
|
||||
});
|
||||
res.on("end", () => resolve(data));
|
||||
});
|
||||
|
||||
req.on("error", reject);
|
||||
|
||||
req.setHeaders(new Map([["x-map-test", "map-value"]]));
|
||||
|
||||
req.end();
|
||||
|
||||
const body = await promise;
|
||||
expect(body).toBe("map-value");
|
||||
});
|
||||
|
||||
test("ServerResponse.setHeaders should not throw before headers are sent", async () => {
|
||||
const { resolve, reject, promise } = Promise.withResolvers<string>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
// This should not throw - headers haven't been sent yet
|
||||
res.setHeaders(new Headers({ "x-custom": "server-value" }));
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
|
||||
try {
|
||||
server.listen(0, () => {
|
||||
const port = (server.address() as any).port;
|
||||
try {
|
||||
const req = http.request(`http://localhost:${port}/test`, res => {
|
||||
resolve(res.headers["x-custom"] as string);
|
||||
});
|
||||
req.on("error", reject);
|
||||
req.end();
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
|
||||
expect(await promise).toBe("server-value");
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
336
test/regression/issue/27061.test.ts
Normal file
336
test/regression/issue/27061.test.ts
Normal file
@@ -0,0 +1,336 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import http from "node:http";
|
||||
|
||||
// Regression test for https://github.com/oven-sh/bun/issues/27061
|
||||
// When http.ClientRequest.write() is called more than once (streaming data in chunks),
|
||||
// Bun was stripping the explicitly-set Content-Length header and switching to
|
||||
// Transfer-Encoding: chunked. Node.js preserves Content-Length in all cases.
|
||||
|
||||
describe("node:http ClientRequest preserves explicit Content-Length", () => {
|
||||
test("with multiple req.write() calls", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
const totalLength = chunk1.length + chunk2.length;
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": totalLength.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.write(chunk2);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
expect(result.contentLength).toBe("200");
|
||||
expect(result.transferEncoding).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("with req.write() + req.end(data)", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
const totalLength = chunk1.length + chunk2.length;
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": totalLength.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.end(chunk2);
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
expect(result.contentLength).toBe("200");
|
||||
expect(result.transferEncoding).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("with three req.write() calls", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
const chunk3 = Buffer.alloc(100, "c");
|
||||
const totalLength = chunk1.length + chunk2.length + chunk3.length;
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": totalLength.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.write(chunk2);
|
||||
req.write(chunk3);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
expect(result.contentLength).toBe("300");
|
||||
expect(result.transferEncoding).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(300);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("single req.write() still works", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const data = Buffer.alloc(200, "x");
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": data.length.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(data);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
expect(result.contentLength).toBe("200");
|
||||
expect(result.transferEncoding).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("without explicit Content-Length still uses chunked encoding", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
// No Content-Length header
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.write(chunk2);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
// Without explicit Content-Length, chunked encoding should be used
|
||||
expect(result.transferEncoding).toBe("chunked");
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("explicit Transfer-Encoding takes precedence over Content-Length", async () => {
|
||||
const { promise, resolve } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": "200",
|
||||
"Transfer-Encoding": "chunked",
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.write(chunk2);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
// When user explicitly sets Transfer-Encoding, it should be used
|
||||
// and Content-Length should not be added
|
||||
expect(result.transferEncoding).toBe("chunked");
|
||||
expect(result.contentLength).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user