mirror of
https://github.com/oven-sh/bun
synced 2026-02-22 08:41:46 +00:00
Compare commits
1 Commits
claude/fix
...
claude/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a3df1a8cfd |
@@ -198,16 +198,13 @@ const myPlugin: BunPlugin = {
|
||||
};
|
||||
```
|
||||
|
||||
The builder object provides some methods for hooking into parts of the bundling process. Bun implements `onStart`, `onEnd`, `onResolve`, and `onLoad`. It does not yet implement the esbuild hooks `onDispose` and `resolve`. `initialOptions` is partially implemented, being read-only and only having a subset of esbuild's options; use `config` (same thing but with Bun's `BuildConfig` format) instead.
|
||||
The builder object provides some methods for hooking into parts of the bundling process. Bun implements `onResolve` and `onLoad`; it does not yet implement the esbuild hooks `onStart`, `onEnd`, and `onDispose`, and `resolve` utilities. `initialOptions` is partially implemented, being read-only and only having a subset of esbuild's options; use `config` (same thing but with Bun's `BuildConfig` format) instead.
|
||||
|
||||
```ts title="myPlugin.ts" icon="/icons/typescript.svg"
|
||||
import type { BunPlugin } from "bun";
|
||||
const myPlugin: BunPlugin = {
|
||||
name: "my-plugin",
|
||||
setup(builder) {
|
||||
builder.onStart(() => {
|
||||
/* called when the bundle starts */
|
||||
});
|
||||
builder.onResolve(
|
||||
{
|
||||
/* onResolve.options */
|
||||
@@ -228,9 +225,6 @@ const myPlugin: BunPlugin = {
|
||||
};
|
||||
},
|
||||
);
|
||||
builder.onEnd(result => {
|
||||
/* called when the bundle is complete */
|
||||
});
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
@@ -15,7 +15,6 @@ Plugins can register callbacks to be run at various points in the lifecycle of a
|
||||
- `onResolve()`: Run before a module is resolved
|
||||
- `onLoad()`: Run before a module is loaded
|
||||
- `onBeforeParse()`: Run zero-copy native addons in the parser thread before a file is parsed
|
||||
- `onEnd()`: Run after the bundle is complete
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -40,7 +39,6 @@ type PluginBuilder = {
|
||||
exports?: Record<string, any>;
|
||||
},
|
||||
) => void;
|
||||
onEnd(callback: (result: BuildOutput) => void | Promise<void>): void;
|
||||
config: BuildConfig;
|
||||
};
|
||||
|
||||
@@ -425,53 +423,3 @@ This lifecycle callback is run immediately before a file is parsed by Bun's bund
|
||||
As input, it receives the file's contents and can optionally return new source code.
|
||||
|
||||
<Info>This callback can be called from any thread and so the napi module implementation must be thread-safe.</Info>
|
||||
|
||||
### onEnd
|
||||
|
||||
```ts
|
||||
onEnd(callback: (result: BuildOutput) => void | Promise<void>): void;
|
||||
```
|
||||
|
||||
Registers a callback to be run after the bundle is complete. The callback receives the [`BuildOutput`](/docs/bundler#outputs) object containing the build results, including output files and any build messages.
|
||||
|
||||
```ts title="index.ts" icon="/icons/typescript.svg"
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
plugins: [
|
||||
{
|
||||
name: "onEnd example",
|
||||
setup(build) {
|
||||
build.onEnd(result => {
|
||||
console.log(`Build completed with ${result.outputs.length} files`);
|
||||
for (const log of result.logs) {
|
||||
console.log(log);
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
The callback can return a `Promise`. The build output promise from `Bun.build()` will not resolve until all `onEnd()` callbacks have completed.
|
||||
|
||||
```ts title="index.ts" icon="/icons/typescript.svg"
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
plugins: [
|
||||
{
|
||||
name: "Upload to S3",
|
||||
setup(build) {
|
||||
build.onEnd(async result => {
|
||||
if (!result.success) return;
|
||||
for (const output of result.outputs) {
|
||||
await uploadToS3(output);
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
@@ -1707,15 +1707,6 @@ pub fn NewWrappedHandler(comptime tls: bool) type {
|
||||
|
||||
pub fn onClose(this: WrappedSocket, socket: Socket, err: c_int, data: ?*anyopaque) bun.JSError!void {
|
||||
if (comptime tls) {
|
||||
// Clean up the raw TCP socket from upgradeTLS() — its onClose
|
||||
// never fires because uws closes through the TLS context only.
|
||||
defer {
|
||||
if (!this.tcp.socket.isDetached()) {
|
||||
this.tcp.socket.detach();
|
||||
this.tcp.has_pending_activity.store(false, .release);
|
||||
this.tcp.deref();
|
||||
}
|
||||
}
|
||||
try TLSSocket.onClose(this.tls, socket, err, data);
|
||||
} else {
|
||||
try TLSSocket.onClose(this.tcp, socket, err, data);
|
||||
|
||||
@@ -42,9 +42,6 @@
|
||||
#include <JavaScriptCore/ExceptionScope.h>
|
||||
#include <JavaScriptCore/FunctionConstructor.h>
|
||||
#include <JavaScriptCore/Heap.h>
|
||||
#include <JavaScriptCore/Integrity.h>
|
||||
#include <JavaScriptCore/MarkedBlock.h>
|
||||
#include <JavaScriptCore/PreciseAllocation.h>
|
||||
#include <JavaScriptCore/Identifier.h>
|
||||
#include <JavaScriptCore/InitializeThreading.h>
|
||||
#include <JavaScriptCore/IteratorOperations.h>
|
||||
@@ -2418,18 +2415,6 @@ extern "C" napi_status napi_typeof(napi_env env, napi_value val,
|
||||
if (value.isCell()) {
|
||||
JSCell* cell = value.asCell();
|
||||
|
||||
// Validate that the cell pointer is a real GC-managed object.
|
||||
// Native modules may accidentally pass garbage (e.g. a C string pointer)
|
||||
// as napi_value, which would crash when we dereference the cell.
|
||||
// isSanePointer rejects obviously invalid addresses (null-near, non-canonical).
|
||||
// The bloom filter provides fast rejection of pointers not in any known
|
||||
// MarkedBlock, using only pointer arithmetic (no dereference).
|
||||
if (!JSC::Integrity::isSanePointer(cell)
|
||||
|| (!JSC::PreciseAllocation::isPreciseAllocation(cell)
|
||||
&& toJS(env)->vm().heap.objectSpace().blocks().filter().ruleOut(
|
||||
std::bit_cast<uintptr_t>(JSC::MarkedBlock::blockFor(cell))))) [[unlikely]]
|
||||
return napi_set_last_error(env, napi_invalid_arg);
|
||||
|
||||
switch (cell->type()) {
|
||||
case JSC::JSFunctionType:
|
||||
case JSC::InternalFunctionType:
|
||||
|
||||
@@ -1154,14 +1154,6 @@ pub const FetchTasklet = struct {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the request body should skip chunked transfer encoding framing.
|
||||
/// True for upgraded connections (e.g. WebSocket) or when the user explicitly
|
||||
/// set Content-Length without setting Transfer-Encoding.
|
||||
fn skipChunkedFraming(this: *const FetchTasklet) bool {
|
||||
return this.upgraded_connection or
|
||||
(this.request_headers.get("content-length") != null and this.request_headers.get("transfer-encoding") == null);
|
||||
}
|
||||
|
||||
pub fn writeRequestData(this: *FetchTasklet, data: []const u8) ResumableSinkBackpressure {
|
||||
log("writeRequestData {}", .{data.len});
|
||||
if (this.signal) |signal| {
|
||||
@@ -1183,7 +1175,7 @@ pub const FetchTasklet = struct {
|
||||
// dont have backpressure so we will schedule the data to be written
|
||||
// if we have backpressure the onWritable will drain the buffer
|
||||
needs_schedule = stream_buffer.isEmpty();
|
||||
if (this.skipChunkedFraming()) {
|
||||
if (this.upgraded_connection) {
|
||||
bun.handleOom(stream_buffer.write(data));
|
||||
} else {
|
||||
//16 is the max size of a hex number size that represents 64 bits + 2 for the \r\n
|
||||
@@ -1217,14 +1209,15 @@ pub const FetchTasklet = struct {
|
||||
}
|
||||
this.abortTask();
|
||||
} else {
|
||||
if (!this.skipChunkedFraming()) {
|
||||
// Using chunked transfer encoding, send the terminating chunk
|
||||
if (!this.upgraded_connection) {
|
||||
// If is not upgraded we need to send the terminating chunk
|
||||
const thread_safe_stream_buffer = this.request_body_streaming_buffer orelse return;
|
||||
const stream_buffer = thread_safe_stream_buffer.acquire();
|
||||
defer thread_safe_stream_buffer.release();
|
||||
bun.handleOom(stream_buffer.write(http.end_of_chunked_http1_1_encoding_response_body));
|
||||
}
|
||||
if (this.http) |http_| {
|
||||
// just tell to write the end of the chunked encoding aka 0\r\n\r\n
|
||||
http.http_thread.scheduleRequestWrite(http_, .end);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3683,20 +3683,7 @@ pub const BundleV2 = struct {
|
||||
}
|
||||
}
|
||||
|
||||
const import_record_loader = brk: {
|
||||
const resolved_loader = import_record.loader orelse path.loader(&transpiler.options.loaders) orelse .file;
|
||||
// When an HTML file references a URL asset (e.g. <link rel="manifest" href="./manifest.json" />),
|
||||
// the file must be copied to the output directory as-is. If the resolved loader would
|
||||
// parse/transform the file (e.g. .json, .toml) rather than copy it, force the .file loader
|
||||
// so that `shouldCopyForBundling()` returns true and the asset is emitted.
|
||||
// Only do this for HTML sources — CSS url() imports should retain their original behavior.
|
||||
if (loader == .html and import_record.kind == .url and !resolved_loader.shouldCopyForBundling() and
|
||||
!resolved_loader.isJavaScriptLike() and !resolved_loader.isCSS() and resolved_loader != .html)
|
||||
{
|
||||
break :brk Loader.file;
|
||||
}
|
||||
break :brk resolved_loader;
|
||||
};
|
||||
const import_record_loader = import_record.loader orelse path.loader(&transpiler.options.loaders) orelse .file;
|
||||
import_record.loader = import_record_loader;
|
||||
|
||||
const is_html_entrypoint = import_record_loader == .html and target.isServerSide() and this.transpiler.options.dev_server == null;
|
||||
|
||||
@@ -1637,6 +1637,21 @@ pub const RunCommand = struct {
|
||||
return;
|
||||
}
|
||||
|
||||
// Support `node --run <script>` (Node.js v22+ feature).
|
||||
// The --run flag is silently discarded by the arg parser since it's
|
||||
// unrecognized, but the script name ends up as ctx.positionals[0].
|
||||
// Scan the raw argv to detect if --run was present.
|
||||
if (ctx.positionals.len > 0) {
|
||||
for (bun.argv) |arg| {
|
||||
if (strings.eqlComptime(arg, "--run")) {
|
||||
if (exec(ctx, .{ .bin_dirs_only = false, .log_errors = true, .allow_fast_run_for_extensions = false })) |ok| {
|
||||
if (ok) return;
|
||||
} else |_| {}
|
||||
Global.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ctx.positionals.len == 0) {
|
||||
Output.errGeneric("Missing script to execute. Bun's provided 'node' cli wrapper does not support a repl.", .{});
|
||||
Global.exit(1);
|
||||
|
||||
16
src/http.zig
16
src/http.zig
@@ -719,21 +719,7 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request {
|
||||
|
||||
if (body_len > 0 or this.method.hasRequestBody()) {
|
||||
if (this.flags.is_streaming_request_body) {
|
||||
if (original_content_length) |content_length| {
|
||||
if (add_transfer_encoding) {
|
||||
// User explicitly set Content-Length and did not set Transfer-Encoding;
|
||||
// preserve Content-Length instead of using chunked encoding.
|
||||
// This matches Node.js behavior where an explicit Content-Length is always honored.
|
||||
request_headers_buf[header_count] = .{
|
||||
.name = content_length_header_name,
|
||||
.value = content_length,
|
||||
};
|
||||
header_count += 1;
|
||||
}
|
||||
// If !add_transfer_encoding, the user explicitly set Transfer-Encoding,
|
||||
// which was already added to request_headers_buf. We respect that and
|
||||
// do not add Content-Length (they are mutually exclusive per HTTP/1.1).
|
||||
} else if (add_transfer_encoding and this.flags.upgrade_state == .none) {
|
||||
if (add_transfer_encoding and this.flags.upgrade_state == .none) {
|
||||
request_headers_buf[header_count] = chunked_encoded_header;
|
||||
header_count += 1;
|
||||
}
|
||||
|
||||
@@ -623,17 +623,6 @@ pub const PackageInstaller = struct {
|
||||
// else => unreachable,
|
||||
// };
|
||||
|
||||
// If a newly computed integrity hash is available (e.g. for a GitHub
|
||||
// tarball) and the lockfile doesn't already have one, persist it so
|
||||
// the lockfile gets re-saved with the hash.
|
||||
if (data.integrity.tag.isSupported()) {
|
||||
var pkg_metas = this.lockfile.packages.items(.meta);
|
||||
if (!pkg_metas[package_id].integrity.tag.isSupported()) {
|
||||
pkg_metas[package_id].integrity = data.integrity;
|
||||
this.manager.options.enable.force_save_lockfile = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.manager.task_queue.fetchRemove(task_id)) |removed| {
|
||||
var callbacks = removed.value;
|
||||
defer callbacks.deinit(this.manager.allocator);
|
||||
|
||||
@@ -133,12 +133,6 @@ pub fn processExtractedTarballPackage(
|
||||
break :package pkg;
|
||||
};
|
||||
|
||||
// Store the tarball integrity hash so the lockfile can pin the
|
||||
// exact content downloaded from the remote (GitHub) server.
|
||||
if (data.integrity.tag.isSupported()) {
|
||||
package.meta.integrity = data.integrity;
|
||||
}
|
||||
|
||||
package = manager.lockfile.appendPackage(package) catch unreachable;
|
||||
package_id.* = package.meta.id;
|
||||
|
||||
|
||||
@@ -23,26 +23,7 @@ pub inline fn run(this: *const ExtractTarball, log: *logger.Log, bytes: []const
|
||||
return error.IntegrityCheckFailed;
|
||||
}
|
||||
}
|
||||
var result = try this.extract(log, bytes);
|
||||
|
||||
// Compute and store SHA-512 integrity hash for GitHub tarballs so the
|
||||
// lockfile can pin the exact tarball content. On subsequent installs the
|
||||
// hash stored in the lockfile is forwarded via this.integrity and verified
|
||||
// above, preventing a compromised server from silently swapping the tarball.
|
||||
if (this.resolution.tag == .github) {
|
||||
if (this.integrity.tag.isSupported()) {
|
||||
// Re-installing with an existing lockfile: integrity was already
|
||||
// verified above, propagate the known value to ExtractData so that
|
||||
// the lockfile keeps it on re-serialisation.
|
||||
result.integrity = this.integrity;
|
||||
} else {
|
||||
// First install (no integrity in the lockfile yet): compute it.
|
||||
result.integrity = .{ .tag = .sha512 };
|
||||
Crypto.SHA512.hash(bytes, result.integrity.value[0..Crypto.SHA512.digest]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return this.extract(log, bytes);
|
||||
}
|
||||
|
||||
pub fn buildURL(
|
||||
@@ -566,7 +547,6 @@ const string = []const u8;
|
||||
|
||||
const Npm = @import("./npm.zig");
|
||||
const std = @import("std");
|
||||
const Crypto = @import("../sha.zig").Hashers;
|
||||
const FileSystem = @import("../fs.zig").FileSystem;
|
||||
const Integrity = @import("./integrity.zig").Integrity;
|
||||
const Resolution = @import("./resolution.zig").Resolution;
|
||||
|
||||
@@ -209,7 +209,6 @@ pub const ExtractData = struct {
|
||||
path: string = "",
|
||||
buf: []u8 = "",
|
||||
} = null,
|
||||
integrity: Integrity = .{},
|
||||
};
|
||||
|
||||
pub const DependencyInstallContext = struct {
|
||||
@@ -272,7 +271,6 @@ pub const VersionSlice = external.VersionSlice;
|
||||
|
||||
pub const Dependency = @import("./dependency.zig");
|
||||
pub const Behavior = @import("./dependency.zig").Behavior;
|
||||
pub const Integrity = @import("./integrity.zig").Integrity;
|
||||
|
||||
pub const Lockfile = @import("./lockfile.zig");
|
||||
pub const PatchedDep = Lockfile.PatchedDep;
|
||||
|
||||
@@ -644,16 +644,9 @@ pub const Stringifier = struct {
|
||||
&path_buf,
|
||||
);
|
||||
|
||||
if (pkg_meta.integrity.tag.isSupported()) {
|
||||
try writer.print(", {f}, \"{f}\"]", .{
|
||||
repo.resolved.fmtJson(buf, .{}),
|
||||
pkg_meta.integrity,
|
||||
});
|
||||
} else {
|
||||
try writer.print(", {f}]", .{
|
||||
repo.resolved.fmtJson(buf, .{}),
|
||||
});
|
||||
}
|
||||
try writer.print(", {f}]", .{
|
||||
repo.resolved.fmtJson(buf, .{}),
|
||||
});
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
@@ -1892,15 +1885,6 @@ pub fn parseIntoBinaryLockfile(
|
||||
};
|
||||
|
||||
@field(res.value, @tagName(tag)).resolved = try string_buf.append(bun_tag_str);
|
||||
|
||||
// Optional integrity hash (added to pin tarball content)
|
||||
if (i < pkg_info.len) {
|
||||
const integrity_expr = pkg_info.at(i);
|
||||
if (integrity_expr.asString(allocator)) |integrity_str| {
|
||||
pkg.meta.integrity = Integrity.parse(integrity_str);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
@@ -51,15 +51,6 @@ function onError(msg, err, callback) {
|
||||
process.nextTick(emitErrorNt, msg, err, callback);
|
||||
}
|
||||
|
||||
function isHTTPHeaderStateSentOrAssigned(state) {
|
||||
return state === NodeHTTPHeaderState.sent || state === NodeHTTPHeaderState.assigned;
|
||||
}
|
||||
function throwHeadersSentIfNecessary(self, action) {
|
||||
if (self._header != null || isHTTPHeaderStateSentOrAssigned(self[headerStateSymbol])) {
|
||||
throw $ERR_HTTP_HEADERS_SENT(action);
|
||||
}
|
||||
}
|
||||
|
||||
function write_(msg, chunk, encoding, callback, fromEnd) {
|
||||
if (typeof callback !== "function") callback = nop;
|
||||
|
||||
@@ -261,14 +252,18 @@ const OutgoingMessagePrototype = {
|
||||
|
||||
removeHeader(name) {
|
||||
validateString(name, "name");
|
||||
throwHeadersSentIfNecessary(this, "remove");
|
||||
if ((this._header !== undefined && this._header !== null) || this[headerStateSymbol] === NodeHTTPHeaderState.sent) {
|
||||
throw $ERR_HTTP_HEADERS_SENT("remove");
|
||||
}
|
||||
const headers = this[headersSymbol];
|
||||
if (!headers) return;
|
||||
headers.delete(name);
|
||||
},
|
||||
|
||||
setHeader(name, value) {
|
||||
throwHeadersSentIfNecessary(this, "set");
|
||||
if ((this._header !== undefined && this._header !== null) || this[headerStateSymbol] == NodeHTTPHeaderState.sent) {
|
||||
throw $ERR_HTTP_HEADERS_SENT("set");
|
||||
}
|
||||
validateHeaderName(name);
|
||||
validateHeaderValue(name, value);
|
||||
const headers = (this[headersSymbol] ??= new Headers());
|
||||
@@ -276,7 +271,9 @@ const OutgoingMessagePrototype = {
|
||||
return this;
|
||||
},
|
||||
setHeaders(headers) {
|
||||
throwHeadersSentIfNecessary(this, "set");
|
||||
if (this._header || this[headerStateSymbol] !== NodeHTTPHeaderState.none) {
|
||||
throw $ERR_HTTP_HEADERS_SENT("set");
|
||||
}
|
||||
|
||||
if (!headers || $isArray(headers) || typeof headers.keys !== "function" || typeof headers.get !== "function") {
|
||||
throw $ERR_INVALID_ARG_TYPE("headers", ["Headers", "Map"], headers);
|
||||
|
||||
@@ -899,75 +899,4 @@ body {
|
||||
expect(entry2Html).toMatch(/src=".*\.js"/);
|
||||
},
|
||||
});
|
||||
|
||||
// Test manifest.json is copied as an asset and link href is rewritten
|
||||
itBundled("html/manifest-json", {
|
||||
outdir: "out/",
|
||||
files: {
|
||||
"/index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="manifest" href="./manifest.json" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>App</h1>
|
||||
<script src="./app.js"></script>
|
||||
</body>
|
||||
</html>`,
|
||||
"/manifest.json": JSON.stringify({
|
||||
name: "My App",
|
||||
short_name: "App",
|
||||
start_url: "/",
|
||||
display: "standalone",
|
||||
background_color: "#ffffff",
|
||||
theme_color: "#000000",
|
||||
}),
|
||||
"/app.js": "console.log('hello')",
|
||||
},
|
||||
entryPoints: ["/index.html"],
|
||||
onAfterBundle(api) {
|
||||
const htmlContent = api.readFile("out/index.html");
|
||||
|
||||
// The original manifest.json reference should be rewritten to a hashed filename
|
||||
expect(htmlContent).not.toContain('manifest.json"');
|
||||
expect(htmlContent).toMatch(/href="(?:\.\/|\/)?manifest-[a-zA-Z0-9]+\.json"/);
|
||||
|
||||
// Extract the hashed manifest filename and verify its content
|
||||
const manifestMatch = htmlContent.match(/href="(?:\.\/|\/)?(manifest-[a-zA-Z0-9]+\.json)"/);
|
||||
expect(manifestMatch).not.toBeNull();
|
||||
const manifestContent = api.readFile("out/" + manifestMatch![1]);
|
||||
expect(manifestContent).toContain('"name"');
|
||||
expect(manifestContent).toContain('"My App"');
|
||||
},
|
||||
});
|
||||
|
||||
// Test that other non-JS/CSS file types referenced via URL imports are copied as assets
|
||||
itBundled("html/xml-asset", {
|
||||
outdir: "out/",
|
||||
files: {
|
||||
"/index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="manifest" href="./site.webmanifest" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>App</h1>
|
||||
</body>
|
||||
</html>`,
|
||||
"/site.webmanifest": JSON.stringify({
|
||||
name: "My App",
|
||||
icons: [{ src: "/icon.png", sizes: "192x192" }],
|
||||
}),
|
||||
},
|
||||
entryPoints: ["/index.html"],
|
||||
onAfterBundle(api) {
|
||||
const htmlContent = api.readFile("out/index.html");
|
||||
|
||||
// The webmanifest reference should be rewritten to a hashed filename
|
||||
expect(htmlContent).not.toContain("site.webmanifest");
|
||||
expect(htmlContent).toMatch(/href=".*\.webmanifest"/);
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
import { file } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { rm } from "fs/promises";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
// Each test uses its own BUN_INSTALL_CACHE_DIR inside the temp dir for full
|
||||
// isolation. This avoids interfering with the global cache or other tests.
|
||||
function envWithCache(dir: string) {
|
||||
return { ...bunEnv, BUN_INSTALL_CACHE_DIR: join(String(dir), ".bun-cache") };
|
||||
}
|
||||
|
||||
describe.concurrent("GitHub tarball integrity", () => {
|
||||
test("should store integrity hash in lockfile for GitHub dependencies", async () => {
|
||||
using dir = tempDir("github-integrity", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const env = envWithCache(dir);
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stderr).toContain("Saved lockfile");
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
const lockfileContent = await file(join(String(dir), "bun.lock")).text();
|
||||
|
||||
// The lockfile should contain a sha512 integrity hash for the GitHub dependency
|
||||
expect(lockfileContent).toContain("sha512-");
|
||||
// The resolved commit hash should be present
|
||||
expect(lockfileContent).toContain("jonschlinkert-is-number-98e8ff1");
|
||||
// Verify the format: the integrity appears after the resolved commit hash
|
||||
expect(lockfileContent).toMatch(/"jonschlinkert-is-number-98e8ff1",\s*"sha512-/);
|
||||
});
|
||||
|
||||
test("should verify integrity passes on re-install with matching hash", async () => {
|
||||
using dir = tempDir("github-integrity-match", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity-match",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const env = envWithCache(dir);
|
||||
|
||||
// First install to generate lockfile with correct integrity
|
||||
await using proc1 = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout1, stderr1, exitCode1] = await Promise.all([proc1.stdout.text(), proc1.stderr.text(), proc1.exited]);
|
||||
expect(stderr1).not.toContain("error:");
|
||||
expect(exitCode1).toBe(0);
|
||||
|
||||
// Read the generated lockfile and extract the integrity hash adjacent to
|
||||
// the GitHub resolved entry to avoid accidentally matching an npm hash.
|
||||
const lockfileContent = await file(join(String(dir), "bun.lock")).text();
|
||||
const integrityMatch = lockfileContent.match(/"jonschlinkert-is-number-98e8ff1",\s*"(sha512-[A-Za-z0-9+/]+=*)"/);
|
||||
expect(integrityMatch).not.toBeNull();
|
||||
const integrityHash = integrityMatch![1];
|
||||
|
||||
// Clear cache and node_modules, then re-install with the same lockfile
|
||||
await rm(join(String(dir), ".bun-cache"), { recursive: true, force: true });
|
||||
await rm(join(String(dir), "node_modules"), { recursive: true, force: true });
|
||||
|
||||
await using proc2 = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout2, stderr2, exitCode2] = await Promise.all([proc2.stdout.text(), proc2.stderr.text(), proc2.exited]);
|
||||
|
||||
// Should succeed because the integrity matches
|
||||
expect(stderr2).not.toContain("Integrity check failed");
|
||||
expect(exitCode2).toBe(0);
|
||||
|
||||
// Lockfile should still contain the same integrity hash
|
||||
const lockfileContent2 = await file(join(String(dir), "bun.lock")).text();
|
||||
expect(lockfileContent2).toContain(integrityHash);
|
||||
});
|
||||
|
||||
test("should reject GitHub tarball when integrity check fails", async () => {
|
||||
using dir = tempDir("github-integrity-reject", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity-reject",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
// Pre-create a lockfile with an invalid integrity hash (valid base64, 64 zero bytes)
|
||||
"bun.lock": JSON.stringify({
|
||||
lockfileVersion: 1,
|
||||
configVersion: 1,
|
||||
workspaces: {
|
||||
"": {
|
||||
name: "test-github-integrity-reject",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
},
|
||||
},
|
||||
packages: {
|
||||
"is-number": [
|
||||
"is-number@github:jonschlinkert/is-number#98e8ff1",
|
||||
{},
|
||||
"jonschlinkert-is-number-98e8ff1",
|
||||
"sha512-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
|
||||
],
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Fresh per-test cache ensures the tarball must be downloaded from the network
|
||||
const env = envWithCache(dir);
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stderr).toContain("Integrity check failed");
|
||||
expect(exitCode).not.toBe(0);
|
||||
});
|
||||
|
||||
test("should update lockfile with integrity when old format has none", async () => {
|
||||
using dir = tempDir("github-integrity-upgrade", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity-upgrade",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
// Pre-create a lockfile in the old format (no integrity hash)
|
||||
"bun.lock": JSON.stringify({
|
||||
lockfileVersion: 1,
|
||||
configVersion: 1,
|
||||
workspaces: {
|
||||
"": {
|
||||
name: "test-github-integrity-upgrade",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
},
|
||||
},
|
||||
packages: {
|
||||
"is-number": ["is-number@github:jonschlinkert/is-number#98e8ff1", {}, "jonschlinkert-is-number-98e8ff1"],
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Fresh per-test cache ensures the tarball must be downloaded
|
||||
const env = envWithCache(dir);
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// Should succeed without errors
|
||||
expect(stderr).not.toContain("Integrity check failed");
|
||||
expect(stderr).not.toContain("error:");
|
||||
// The lockfile should be re-saved with the new integrity hash
|
||||
expect(stderr).toContain("Saved lockfile");
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify the lockfile now contains the integrity hash
|
||||
const lockfileContent = await file(join(String(dir), "bun.lock")).text();
|
||||
expect(lockfileContent).toContain("sha512-");
|
||||
expect(lockfileContent).toMatch(/"jonschlinkert-is-number-98e8ff1",\s*"sha512-/);
|
||||
});
|
||||
|
||||
test("should accept GitHub dependency from cache without re-downloading", async () => {
|
||||
// Use a shared cache dir for both installs so the second is a true cache hit
|
||||
using dir = tempDir("github-integrity-cached", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-github-integrity-cached",
|
||||
dependencies: {
|
||||
"is-number": "jonschlinkert/is-number#98e8ff1",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const env = envWithCache(dir);
|
||||
|
||||
// First install warms the per-test cache
|
||||
await using proc1 = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout1, stderr1, exitCode1] = await Promise.all([proc1.stdout.text(), proc1.stderr.text(), proc1.exited]);
|
||||
expect(stderr1).not.toContain("error:");
|
||||
expect(exitCode1).toBe(0);
|
||||
|
||||
// Remove node_modules but keep the cache
|
||||
await rm(join(String(dir), "node_modules"), { recursive: true, force: true });
|
||||
|
||||
// Strip the integrity from the lockfile to simulate an old-format lockfile
|
||||
// that should still work when the cache already has the package
|
||||
const lockfileContent = await file(join(String(dir), "bun.lock")).text();
|
||||
const stripped = lockfileContent.replace(/,\s*"sha512-[^"]*"/, "");
|
||||
await Bun.write(join(String(dir), "bun.lock"), stripped);
|
||||
|
||||
// Second install should hit the cache and succeed without re-downloading
|
||||
await using proc2 = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout2, stderr2, exitCode2] = await Promise.all([proc2.stdout.text(), proc2.stderr.text(), proc2.exited]);
|
||||
|
||||
// Should succeed without integrity errors (package served from cache)
|
||||
expect(stderr2).not.toContain("Integrity check failed");
|
||||
expect(stderr2).not.toContain("error:");
|
||||
expect(exitCode2).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -101,4 +101,39 @@ describe("fake node cli", () => {
|
||||
const temp = tempDirWithFiles("fake-node", {});
|
||||
expect(() => fakeNodeRun(temp, [])).toThrow();
|
||||
});
|
||||
|
||||
describe("node --run", () => {
|
||||
test("runs a package.json script", () => {
|
||||
const temp = tempDirWithFiles("fake-node", {
|
||||
"package.json": JSON.stringify({
|
||||
scripts: {
|
||||
echo_test: "echo pass",
|
||||
},
|
||||
}),
|
||||
});
|
||||
expect(fakeNodeRun(temp, ["--run", "echo_test"]).stdout).toBe("pass");
|
||||
});
|
||||
|
||||
test("runs pre/post scripts", () => {
|
||||
const temp = tempDirWithFiles("fake-node", {
|
||||
"package.json": JSON.stringify({
|
||||
scripts: {
|
||||
premyscript: "echo pre",
|
||||
myscript: "echo main",
|
||||
postmyscript: "echo post",
|
||||
},
|
||||
}),
|
||||
});
|
||||
expect(fakeNodeRun(temp, ["--run", "myscript"]).stdout).toBe("pre\nmain\npost");
|
||||
});
|
||||
|
||||
test("errors on missing script", () => {
|
||||
const temp = tempDirWithFiles("fake-node", {
|
||||
"package.json": JSON.stringify({
|
||||
scripts: {},
|
||||
}),
|
||||
});
|
||||
expect(() => fakeNodeRun(temp, ["--run", "nonexistent"])).toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2119,35 +2119,6 @@ static napi_value test_napi_create_tsfn_async_context_frame(const Napi::Callback
|
||||
return env.Undefined();
|
||||
}
|
||||
|
||||
// Test for BUN-1PYR: napi_typeof should not crash when given an invalid
|
||||
// napi_value that is actually a raw C string pointer. This simulates the
|
||||
// scenario where a native module passes garbage data (e.g., a string pointer
|
||||
// like "Tensor ...") as a napi_value to napi_typeof.
|
||||
static napi_value test_napi_typeof_invalid_pointer(const Napi::CallbackInfo &info) {
|
||||
Napi::Env env = info.Env();
|
||||
|
||||
// Simulate the exact crash scenario: a C string pointer reinterpreted as napi_value.
|
||||
// The crash address 0x6F20726F736E6554 decoded to ASCII is "Tensor o",
|
||||
// meaning a string pointer was being used as a JSValue.
|
||||
// Use aligned_alloc to ensure 16-byte alignment (bit 3 = 0), so the pointer
|
||||
// goes through the MarkedBlock validation path (not the PreciseAllocation path).
|
||||
char *fake_string = static_cast<char *>(aligned_alloc(16, 64));
|
||||
memcpy(fake_string, "Tensor operation test string", 29);
|
||||
napi_value bad_value = reinterpret_cast<napi_value>(fake_string);
|
||||
|
||||
napi_valuetype type;
|
||||
napi_status status = napi_typeof(env, bad_value, &type);
|
||||
|
||||
if (status != napi_ok) {
|
||||
printf("PASS: napi_typeof returned error status %d for invalid pointer\n", status);
|
||||
} else {
|
||||
printf("PASS: napi_typeof did not crash for invalid pointer (returned type %d)\n", type);
|
||||
}
|
||||
|
||||
free(fake_string);
|
||||
return ok(env);
|
||||
}
|
||||
|
||||
void register_standalone_tests(Napi::Env env, Napi::Object exports) {
|
||||
REGISTER_FUNCTION(env, exports, test_issue_7685);
|
||||
REGISTER_FUNCTION(env, exports, test_issue_11949);
|
||||
@@ -2186,7 +2157,6 @@ void register_standalone_tests(Napi::Env env, Napi::Object exports) {
|
||||
REGISTER_FUNCTION(env, exports, test_issue_25933);
|
||||
REGISTER_FUNCTION(env, exports, test_napi_make_callback_async_context_frame);
|
||||
REGISTER_FUNCTION(env, exports, test_napi_create_tsfn_async_context_frame);
|
||||
REGISTER_FUNCTION(env, exports, test_napi_typeof_invalid_pointer);
|
||||
}
|
||||
|
||||
} // namespace napitests
|
||||
|
||||
@@ -822,24 +822,6 @@ describe("cleanup hooks", () => {
|
||||
expect(output).toContain("PASS: napi_create_threadsafe_function accepted AsyncContextFrame");
|
||||
});
|
||||
|
||||
it("should not crash when given an invalid pointer as napi_value", async () => {
|
||||
// Regression test for BUN-1PYR: napi_typeof segfaults when a native
|
||||
// module passes a raw C string pointer as napi_value. The crash address
|
||||
// 0x6F20726F736E6554 decoded to "Tensor o", indicating string data was
|
||||
// being dereferenced as a JSValue.
|
||||
const { BUN_INSPECT_CONNECT_TO: _, ...rest } = bunEnv;
|
||||
await using exec = spawn({
|
||||
cmd: [bunExe(), "--expose-gc", join(__dirname, "napi-app/main.js"), "test_napi_typeof_invalid_pointer", "[]"],
|
||||
env: rest,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const [stdout, exitCode] = await Promise.all([new Response(exec.stdout).text(), exec.exited]);
|
||||
// napi_typeof should return an error status instead of crashing
|
||||
expect(stdout).toContain("PASS");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it("should return napi_object for boxed primitives (String, Number, Boolean)", async () => {
|
||||
// Regression test for https://github.com/oven-sh/bun/issues/25351
|
||||
// napi_typeof was incorrectly returning napi_string for String objects (new String("hello"))
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
// Regression test for TLS upgrade raw socket leak (#12117, #24118, #25948)
|
||||
// When a TCP socket is upgraded to TLS via tls.connect({ socket }),
|
||||
// both a TLS wrapper and a raw TCP wrapper are created in Zig.
|
||||
// Previously, the raw socket's has_pending_activity was never set to
|
||||
// false on close, causing it (and all its retained objects) to leak.
|
||||
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { tls as COMMON_CERT, expectMaxObjectTypeCount } from "harness";
|
||||
import { once } from "node:events";
|
||||
import net from "node:net";
|
||||
import tls from "node:tls";
|
||||
|
||||
describe("TLS upgrade", () => {
|
||||
it("should not leak TLSSocket objects after close", async () => {
|
||||
// Create a TLS server that echoes data and closes
|
||||
const server = tls.createServer(
|
||||
{
|
||||
key: COMMON_CERT.key,
|
||||
cert: COMMON_CERT.cert,
|
||||
},
|
||||
socket => {
|
||||
socket.end("hello");
|
||||
},
|
||||
);
|
||||
|
||||
await once(server.listen(0, "127.0.0.1"), "listening");
|
||||
const port = (server.address() as net.AddressInfo).port;
|
||||
|
||||
// Simulate the MongoDB driver pattern: create a plain TCP socket,
|
||||
// then upgrade it to TLS via tls.connect({ socket }).
|
||||
// Do this multiple times to accumulate leaked objects.
|
||||
const iterations = 50;
|
||||
|
||||
try {
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
const tcpSocket = net.createConnection({ host: "127.0.0.1", port });
|
||||
await once(tcpSocket, "connect");
|
||||
|
||||
const tlsSocket = tls.connect({
|
||||
socket: tcpSocket,
|
||||
ca: COMMON_CERT.cert,
|
||||
rejectUnauthorized: false,
|
||||
});
|
||||
await once(tlsSocket, "secureConnect");
|
||||
|
||||
// Read any data and destroy the TLS socket (simulates SDAM close)
|
||||
tlsSocket.on("data", () => {});
|
||||
tlsSocket.destroy();
|
||||
|
||||
await once(tlsSocket, "close");
|
||||
}
|
||||
} finally {
|
||||
server.close();
|
||||
await once(server, "close");
|
||||
}
|
||||
|
||||
// After all connections are closed and GC runs, the TLSSocket count
|
||||
// should be low. Before the fix, each iteration would leak 1 raw
|
||||
// TLSSocket (the TCP wrapper from upgradeTLS), accumulating over time.
|
||||
// Allow some slack for prototypes/structures (typically 2-3 baseline).
|
||||
await expectMaxObjectTypeCount(expect, "TLSSocket", 10, 1000);
|
||||
});
|
||||
});
|
||||
@@ -1,89 +0,0 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import http from "node:http";
|
||||
|
||||
test("ClientRequest.setHeaders should not throw ERR_HTTP_HEADERS_SENT on new request", async () => {
|
||||
await using server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
return new Response(req.headers.get("x-test") ?? "missing");
|
||||
},
|
||||
});
|
||||
|
||||
const { resolve, reject, promise } = Promise.withResolvers<string>();
|
||||
|
||||
const req = http.request(`http://localhost:${server.port}/test`, { method: "GET" }, res => {
|
||||
let data = "";
|
||||
res.on("data", (chunk: Buffer) => {
|
||||
data += chunk.toString();
|
||||
});
|
||||
res.on("end", () => resolve(data));
|
||||
});
|
||||
|
||||
req.on("error", reject);
|
||||
|
||||
// This should not throw - headers haven't been sent yet
|
||||
req.setHeaders(new Headers({ "x-test": "value" }));
|
||||
|
||||
req.end();
|
||||
|
||||
const body = await promise;
|
||||
expect(body).toBe("value");
|
||||
});
|
||||
|
||||
test("ClientRequest.setHeaders works with Map", async () => {
|
||||
await using server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
return new Response(req.headers.get("x-map-test") ?? "missing");
|
||||
},
|
||||
});
|
||||
|
||||
const { resolve, reject, promise } = Promise.withResolvers<string>();
|
||||
|
||||
const req = http.request(`http://localhost:${server.port}/test`, { method: "GET" }, res => {
|
||||
let data = "";
|
||||
res.on("data", (chunk: Buffer) => {
|
||||
data += chunk.toString();
|
||||
});
|
||||
res.on("end", () => resolve(data));
|
||||
});
|
||||
|
||||
req.on("error", reject);
|
||||
|
||||
req.setHeaders(new Map([["x-map-test", "map-value"]]));
|
||||
|
||||
req.end();
|
||||
|
||||
const body = await promise;
|
||||
expect(body).toBe("map-value");
|
||||
});
|
||||
|
||||
test("ServerResponse.setHeaders should not throw before headers are sent", async () => {
|
||||
const { resolve, reject, promise } = Promise.withResolvers<string>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
// This should not throw - headers haven't been sent yet
|
||||
res.setHeaders(new Headers({ "x-custom": "server-value" }));
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
|
||||
try {
|
||||
server.listen(0, () => {
|
||||
const port = (server.address() as any).port;
|
||||
try {
|
||||
const req = http.request(`http://localhost:${port}/test`, res => {
|
||||
resolve(res.headers["x-custom"] as string);
|
||||
});
|
||||
req.on("error", reject);
|
||||
req.end();
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
|
||||
expect(await promise).toBe("server-value");
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
@@ -1,336 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import http from "node:http";
|
||||
|
||||
// Regression test for https://github.com/oven-sh/bun/issues/27061
|
||||
// When http.ClientRequest.write() is called more than once (streaming data in chunks),
|
||||
// Bun was stripping the explicitly-set Content-Length header and switching to
|
||||
// Transfer-Encoding: chunked. Node.js preserves Content-Length in all cases.
|
||||
|
||||
describe("node:http ClientRequest preserves explicit Content-Length", () => {
|
||||
test("with multiple req.write() calls", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
const totalLength = chunk1.length + chunk2.length;
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": totalLength.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.write(chunk2);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
expect(result.contentLength).toBe("200");
|
||||
expect(result.transferEncoding).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("with req.write() + req.end(data)", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
const totalLength = chunk1.length + chunk2.length;
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": totalLength.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.end(chunk2);
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
expect(result.contentLength).toBe("200");
|
||||
expect(result.transferEncoding).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("with three req.write() calls", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
const chunk3 = Buffer.alloc(100, "c");
|
||||
const totalLength = chunk1.length + chunk2.length + chunk3.length;
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": totalLength.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.write(chunk2);
|
||||
req.write(chunk3);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
expect(result.contentLength).toBe("300");
|
||||
expect(result.transferEncoding).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(300);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("single req.write() still works", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const data = Buffer.alloc(200, "x");
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": data.length.toString(),
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(data);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
expect(result.contentLength).toBe("200");
|
||||
expect(result.transferEncoding).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("without explicit Content-Length still uses chunked encoding", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
// No Content-Length header
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.write(chunk2);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
// Without explicit Content-Length, chunked encoding should be used
|
||||
expect(result.transferEncoding).toBe("chunked");
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
|
||||
test("explicit Transfer-Encoding takes precedence over Content-Length", async () => {
|
||||
const { promise, resolve } = Promise.withResolvers<{
|
||||
contentLength: string | undefined;
|
||||
transferEncoding: string | undefined;
|
||||
bodyLength: number;
|
||||
}>();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||
req.on("end", () => {
|
||||
resolve({
|
||||
contentLength: req.headers["content-length"],
|
||||
transferEncoding: req.headers["transfer-encoding"],
|
||||
bodyLength: Buffer.concat(chunks).length,
|
||||
});
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>(res => server.listen(0, "127.0.0.1", res));
|
||||
const port = (server.address() as any).port;
|
||||
|
||||
try {
|
||||
const chunk1 = Buffer.alloc(100, "a");
|
||||
const chunk2 = Buffer.alloc(100, "b");
|
||||
|
||||
const req = http.request({
|
||||
hostname: "127.0.0.1",
|
||||
port,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Length": "200",
|
||||
"Transfer-Encoding": "chunked",
|
||||
},
|
||||
});
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
req.on("error", rej);
|
||||
req.on("response", () => res());
|
||||
req.write(chunk1);
|
||||
req.write(chunk2);
|
||||
req.end();
|
||||
});
|
||||
|
||||
const result = await promise;
|
||||
// When user explicitly sets Transfer-Encoding, it should be used
|
||||
// and Content-Length should not be added
|
||||
expect(result.transferEncoding).toBe("chunked");
|
||||
expect(result.contentLength).toBeUndefined();
|
||||
expect(result.bodyLength).toBe(200);
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
29
test/regression/issue/27074.test.ts
Normal file
29
test/regression/issue/27074.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "../../harness";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/27074
|
||||
// `bun run --bun build` fails when a pre-script uses `node --run`
|
||||
test("bun run --bun works with node --run in lifecycle scripts", () => {
|
||||
const temp = tempDirWithFiles("issue-27074", {
|
||||
"package.json": JSON.stringify({
|
||||
scripts: {
|
||||
echo_test: "echo echo_test_ran",
|
||||
prebuild: "node --run echo_test",
|
||||
build: "echo build_ran",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "run", "--bun", "build"],
|
||||
cwd: temp,
|
||||
env: bunEnv,
|
||||
});
|
||||
|
||||
const stdout = result.stdout.toString("utf8").trim();
|
||||
const stderr = result.stderr.toString("utf8").trim();
|
||||
|
||||
expect(stdout).toContain("echo_test_ran");
|
||||
expect(stdout).toContain("build_ran");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
Reference in New Issue
Block a user