Compare commits

..

1 Commits

Author SHA1 Message Date
Dylan Conway
c1a9098b94 fix(shell): remove double-free in createShellInterpreter error path
When `globalThis.hasException()` is true after `Interpreter.init` succeeds,
the code was calling both `shargs.deinit()` and `interpreter.finalize()`.
Since `interpreter.args == shargs`, and `finalize()` calls `this.args.deinit()`,
this caused a double-free of the ShellArgs object, corrupting the heap.

The fix removes the redundant `shargs.deinit()` call since `finalize()`
already handles cleanup.

Fixes BUN-ZFQ

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2026-01-07 11:00:30 -08:00
58 changed files with 10075 additions and 12933 deletions

View File

@@ -419,9 +419,12 @@ execute_process(
--command=list-outputs
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
--codegen-path=${CODEGEN_PATH}
RESULT_VARIABLE bindgen_result
OUTPUT_VARIABLE bindgen_outputs
COMMAND_ERROR_IS_FATAL ANY
)
if(${bindgen_result})
message(FATAL_ERROR "bindgenv2/script.ts exited with non-zero status")
endif()
foreach(output IN LISTS bindgen_outputs)
if(output MATCHES "\.cpp$")
list(APPEND BUN_BINDGENV2_CPP_OUTPUTS ${output})

View File

@@ -28,7 +28,6 @@ if(WEBKIT_LOCAL)
# make jsc-compile-debug jsc-copy-headers
include_directories(
${WEBKIT_PATH}
${WEBKIT_PATH}/JavaScriptCore/Headers
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
${WEBKIT_PATH}/bmalloc/Headers
@@ -91,14 +90,7 @@ if(EXISTS ${WEBKIT_PATH}/package.json)
endif()
endif()
file(
DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS
STATUS WEBKIT_DOWNLOAD_STATUS
)
if(NOT "${WEBKIT_DOWNLOAD_STATUS}" MATCHES "^0;")
message(FATAL_ERROR "Failed to download WebKit: ${WEBKIT_DOWNLOAD_STATUS}")
endif()
file(DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS)
file(ARCHIVE_EXTRACT INPUT ${CACHE_PATH}/${WEBKIT_FILENAME} DESTINATION ${CACHE_PATH} TOUCH)
file(REMOVE ${CACHE_PATH}/${WEBKIT_FILENAME})
file(REMOVE_RECURSE ${WEBKIT_PATH})

View File

@@ -41,10 +41,9 @@ endif()
# Since Bun 1.1, Windows has been built using ReleaseSafe.
# This is because it caught more crashes, but we can reconsider this in the future
# Switched back to ReleaseFast for Windows builds for performance
# if(WIN32 AND DEFAULT_ZIG_OPTIMIZE STREQUAL "ReleaseFast")
# set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
# endif()
if(WIN32 AND DEFAULT_ZIG_OPTIMIZE STREQUAL "ReleaseFast")
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
endif()
optionx(ZIG_OPTIMIZE "ReleaseFast|ReleaseSafe|ReleaseSmall|Debug" "The Zig optimize level to use" DEFAULT ${DEFAULT_ZIG_OPTIMIZE})

View File

@@ -1219,79 +1219,6 @@ declare module "bun:bundle" {
Ensure the file is included in your `tsconfig.json` (e.g., `"include": ["src", "env.d.ts"]`). Now `feature()` only accepts those flags, and invalid strings like `feature("TYPO")` become type errors.
### metafile
Generate metadata about the build in a structured format. The metafile contains information about all input files, output files, their sizes, imports, and exports. This is useful for:
- **Bundle analysis**: Understand what's contributing to bundle size
- **Visualization**: Feed into tools like [esbuild's bundle analyzer](https://esbuild.github.io/analyze/) or other visualization tools
- **Dependency tracking**: See the full import graph of your application
- **CI integration**: Track bundle size changes over time
<Tabs>
<Tab title="JavaScript">
```ts title="build.ts" icon="/icons/typescript.svg"
const result = await Bun.build({
entrypoints: ['./src/index.ts'],
outdir: './dist',
metafile: true,
});
if (result.metafile) {
// Analyze inputs
for (const [path, meta] of Object.entries(result.metafile.inputs)) {
console.log(`${path}: ${meta.bytes} bytes`);
}
// Analyze outputs
for (const [path, meta] of Object.entries(result.metafile.outputs)) {
console.log(`${path}: ${meta.bytes} bytes`);
}
// Save for external analysis tools
await Bun.write('./dist/meta.json', JSON.stringify(result.metafile));
}
```
</Tab>
<Tab title="CLI">
```bash terminal icon="terminal"
bun build ./src/index.ts --outdir ./dist --metafile ./dist/meta.json
```
</Tab>
</Tabs>
The metafile structure contains:
```ts
interface BuildMetafile {
inputs: {
[path: string]: {
bytes: number;
imports: Array<{
path: string;
kind: ImportKind;
original?: string; // Original specifier before resolution
external?: boolean;
}>;
format?: "esm" | "cjs" | "json" | "css";
};
};
outputs: {
[path: string]: {
bytes: number;
inputs: {
[path: string]: { bytesInOutput: number };
};
imports: Array<{ path: string; kind: ImportKind }>;
exports: string[];
entryPoint?: string;
cssBundle?: string; // Associated CSS file for JS entry points
};
};
}
```
## Outputs
The `Bun.build` function returns a `Promise<BuildOutput>`, defined as:
@@ -1301,7 +1228,6 @@ interface BuildOutput {
outputs: BuildArtifact[];
success: boolean;
logs: Array<object>; // see docs for details
metafile?: BuildMetafile; // only when metafile: true
}
interface BuildArtifact extends Blob {

View File

@@ -127,54 +127,3 @@ const socket = await Bun.udpSocket({
},
});
```
### Socket options
UDP sockets support setting various socket options:
```ts
const socket = await Bun.udpSocket({});
// Enable broadcasting to send packets to a broadcast address
socket.setBroadcast(true);
// Set the IP TTL (time to live) for outgoing packets
socket.setTTL(64);
```
### Multicast
Bun supports multicast operations for UDP sockets. Use `addMembership` and `dropMembership` to join and leave multicast groups:
```ts
const socket = await Bun.udpSocket({});
// Join a multicast group
socket.addMembership("224.0.0.1");
// Join with a specific interface
socket.addMembership("224.0.0.1", "192.168.1.100");
// Leave a multicast group
socket.dropMembership("224.0.0.1");
```
Additional multicast options:
```ts
// Set TTL for multicast packets (number of network hops)
socket.setMulticastTTL(2);
// Control whether multicast packets loop back to the local socket
socket.setMulticastLoopback(true);
// Specify which interface to use for outgoing multicast packets
socket.setMulticastInterface("192.168.1.100");
```
For source-specific multicast (SSM), use `addSourceSpecificMembership` and `dropSourceSpecificMembership`:
```ts
socket.addSourceSpecificMembership("10.0.0.1", "232.0.0.1");
socket.dropSourceSpecificMembership("10.0.0.1", "232.0.0.1");
```

View File

@@ -1952,38 +1952,6 @@ declare module "bun" {
*/
reactFastRefresh?: boolean;
/**
* Generate a JSON file containing metadata about the build.
*
* The metafile contains information about inputs, outputs, imports, and exports
* which can be used for bundle analysis, visualization, or integration with
* other tools.
*
* When `true`, the metafile JSON string is included in the {@link BuildOutput.metafile} property.
*
* @default false
*
* @example
* ```ts
* const result = await Bun.build({
* entrypoints: ['./src/index.ts'],
* outdir: './dist',
* metafile: true,
* });
*
* // Write metafile to disk for analysis
* if (result.metafile) {
* await Bun.write('./dist/meta.json', result.metafile);
* }
*
* // Parse and analyze the metafile
* const meta = JSON.parse(result.metafile!);
* console.log('Input files:', Object.keys(meta.inputs));
* console.log('Output files:', Object.keys(meta.outputs));
* ```
*/
metafile?: boolean;
outdir?: string;
}
@@ -2635,106 +2603,6 @@ declare module "bun" {
outputs: BuildArtifact[];
success: boolean;
logs: Array<BuildMessage | ResolveMessage>;
/**
* Metadata about the build including inputs, outputs, and their relationships.
*
* Only present when {@link BuildConfig.metafile} is `true`.
*
* The metafile contains detailed information about:
* - **inputs**: All source files that were bundled, their byte sizes, imports, and format
* - **outputs**: All generated output files, their byte sizes, which inputs contributed to each output, imports between chunks, and exports
*
* This can be used for:
* - Bundle size analysis and visualization
* - Detecting unused code or dependencies
* - Understanding the dependency graph
* - Integration with bundle analyzer tools
*
* @example
* ```ts
* const result = await Bun.build({
* entrypoints: ['./src/index.ts'],
* outdir: './dist',
* metafile: true,
* });
*
* if (result.metafile) {
* // Analyze input files
* for (const [path, input] of Object.entries(result.metafile.inputs)) {
* console.log(`${path}: ${input.bytes} bytes, ${input.imports.length} imports`);
* }
*
* // Analyze output files
* for (const [path, output] of Object.entries(result.metafile.outputs)) {
* console.log(`${path}: ${output.bytes} bytes`);
* for (const [inputPath, info] of Object.entries(output.inputs)) {
* console.log(` - ${inputPath}: ${info.bytesInOutput} bytes`);
* }
* }
*
* // Write to disk for external analysis tools
* await Bun.write('./dist/meta.json', JSON.stringify(result.metafile));
* }
* ```
*/
metafile?: BuildMetafile;
}
/**
* Metafile structure containing build metadata for analysis.
*
* @category Bundler
*/
interface BuildMetafile {
/** Information about all input source files */
inputs: {
[path: string]: {
/** Size of the input file in bytes */
bytes: number;
/** List of imports from this file */
imports: Array<{
/** Resolved path of the imported file */
path: string;
/** Type of import statement */
kind: ImportKind;
/** Original import specifier before resolution (if different from path) */
original?: string;
/** Whether this import is external to the bundle */
external?: boolean;
/** Import attributes (e.g., `{ type: "json" }`) */
with?: Record<string, string>;
}>;
/** Module format of the input file */
format?: "esm" | "cjs" | "json" | "css";
};
};
/** Information about all output files */
outputs: {
[path: string]: {
/** Size of the output file in bytes */
bytes: number;
/** Map of input files to their contribution in this output */
inputs: {
[path: string]: {
/** Number of bytes this input contributed to the output */
bytesInOutput: number;
};
};
/** List of imports to other chunks */
imports: Array<{
/** Path to the imported chunk */
path: string;
/** Type of import */
kind: ImportKind;
}>;
/** List of exported names from this output */
exports: string[];
/** Entry point path if this output is an entry point */
entryPoint?: string;
/** Path to the associated CSS bundle (for JS entry points with CSS) */
cssBundle?: string;
};
};
}
/**
@@ -5470,67 +5338,6 @@ declare module "bun" {
ref(): void;
unref(): void;
close(): void;
/**
* Enable or disable SO_BROADCAST socket option.
* @param enabled Whether to enable broadcast
* @returns The enabled value
*/
setBroadcast(enabled: boolean): boolean;
/**
* Set the IP_TTL socket option.
* @param ttl Time to live value
* @returns The TTL value
*/
setTTL(ttl: number): number;
/**
* Set the IP_MULTICAST_TTL socket option.
* @param ttl Time to live value for multicast packets
* @returns The TTL value
*/
setMulticastTTL(ttl: number): number;
/**
* Enable or disable IP_MULTICAST_LOOP socket option.
* @param enabled Whether to enable multicast loopback
* @returns The enabled value
*/
setMulticastLoopback(enabled: boolean): boolean;
/**
* Set the IP_MULTICAST_IF socket option to specify the outgoing interface
* for multicast packets.
* @param interfaceAddress The address of the interface to use
* @returns true on success
*/
setMulticastInterface(interfaceAddress: string): boolean;
/**
* Join a multicast group.
* @param multicastAddress The multicast group address
* @param interfaceAddress Optional interface address to use
* @returns true on success
*/
addMembership(multicastAddress: string, interfaceAddress?: string): boolean;
/**
* Leave a multicast group.
* @param multicastAddress The multicast group address
* @param interfaceAddress Optional interface address to use
* @returns true on success
*/
dropMembership(multicastAddress: string, interfaceAddress?: string): boolean;
/**
* Join a source-specific multicast group.
* @param sourceAddress The source address
* @param groupAddress The multicast group address
* @param interfaceAddress Optional interface address to use
* @returns true on success
*/
addSourceSpecificMembership(sourceAddress: string, groupAddress: string, interfaceAddress?: string): boolean;
/**
* Leave a source-specific multicast group.
* @param sourceAddress The source address
* @param groupAddress The multicast group address
* @param interfaceAddress Optional interface address to use
* @returns true on success
*/
dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, interfaceAddress?: string): boolean;
}
export interface ConnectedSocket<DataBinaryType extends BinaryType> extends BaseUDPSocket {

View File

@@ -1128,7 +1128,6 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
...process.env,
PATH: path,
TMPDIR: tmpdirPath,
BUN_TMPDIR: tmpdirPath,
USER: username,
HOME: homedir,
SHELL: shellPath,

View File

@@ -8,5 +8,4 @@ Syntax reminders:
Conventions:
- Prefer `@import` at the **bottom** of the file, but the auto formatter will move them so you don't need to worry about it.
- **Never** use `@import()` inline inside of functions. **Always** put them at the bottom of the file or containing struct. Imports in Zig are free of side-effects, so there's no such thing as a "dynamic" import.
- You must be patient with the build.

View File

@@ -74,7 +74,7 @@ pub const FileOperation = struct {
pub fn getPathname(file: *const FileOperation) string {
if (file.is_tmpdir) {
return resolve_path.joinAbs(Fs.FileSystem.RealFS.tmpdirPath(), .auto, file.pathname);
return resolve_path.joinAbs(@TypeOf(Fs.FileSystem.instance.fs).tmpdir_path, .auto, file.pathname);
} else {
return file.pathname;
}

View File

@@ -670,7 +670,7 @@ pub const StandaloneModuleGraph = struct {
if (!tried_changing_abs_dir) {
tried_changing_abs_dir = true;
const zname_z = bun.strings.concat(bun.default_allocator, &.{
bun.fs.FileSystem.RealFS.tmpdirPath(),
bun.fs.FileSystem.instance.fs.tmpdirPath(),
std.fs.path.sep_str,
zname,
&.{0},

View File

@@ -1494,7 +1494,7 @@ pub const Parser = struct {
var state: PragmaState = .{};
while (cursor < end) : (cursor += 1) {
while (cursor < self.lexer.end) : (cursor += 1) {
switch (contents[cursor]) {
'\n' => break,
'@' => {

View File

@@ -94,7 +94,8 @@ static JSC::JSInternalPromise* rejectedInternalPromise(JSC::JSGlobalObject* glob
{
auto& vm = JSC::getVM(globalObject);
JSC::JSInternalPromise* promise = JSC::JSInternalPromise::create(vm, globalObject->internalPromiseStructure());
promise->rejectAsHandled(vm, globalObject, value);
promise->internalField(JSC::JSPromise::Field::ReactionsOrResult).set(vm, promise, value);
promise->internalField(JSC::JSPromise::Field::Flags).set(vm, promise, JSC::jsNumber(promise->internalField(JSC::JSPromise::Field::Flags).get().asUInt32AsAnyInt() | JSC::JSPromise::isFirstResolvingFunctionCalledFlag | static_cast<unsigned>(JSC::JSPromise::Status::Rejected)));
return promise;
}
@@ -102,7 +103,8 @@ static JSC::JSInternalPromise* resolvedInternalPromise(JSC::JSGlobalObject* glob
{
auto& vm = JSC::getVM(globalObject);
JSC::JSInternalPromise* promise = JSC::JSInternalPromise::create(vm, globalObject->internalPromiseStructure());
promise->fulfill(vm, globalObject, value);
promise->internalField(JSC::JSPromise::Field::ReactionsOrResult).set(vm, promise, value);
promise->internalField(JSC::JSPromise::Field::Flags).set(vm, promise, JSC::jsNumber(promise->internalField(JSC::JSPromise::Field::Flags).get().asUInt32AsAnyInt() | JSC::JSPromise::isFirstResolvingFunctionCalledFlag | static_cast<unsigned>(JSC::JSPromise::Status::Fulfilled)));
return promise;
}

View File

@@ -92,7 +92,7 @@ pub fn resolveEmbeddedFile(vm: *VirtualMachine, path_buf: *bun.PathBuffer, linux
},
else => {},
}
return bun.path.joinAbsStringBuf(bun.fs.FileSystem.RealFS.tmpdirPath(), path_buf, &[_]string{tmpfilename}, .auto);
return bun.path.joinAbsStringBuf(bun.fs.FileSystem.instance.fs.tmpdirPath(), path_buf, &[_]string{tmpfilename}, .auto);
}
pub export fn Bun__getDefaultLoader(global: *JSGlobalObject, str: *const bun.String) api.Loader {

View File

@@ -423,7 +423,7 @@ pub const RuntimeTranspilerCache = struct {
}
{
const parts = &[_][]const u8{ bun.fs.FileSystem.RealFS.tmpdirPath(), "bun", "@t@" };
const parts = &[_][]const u8{ bun.fs.FileSystem.instance.fs.tmpdirPath(), "bun", "@t@" };
return bun.fs.FileSystem.instance.absBufZ(parts, buf);
}
}

View File

@@ -46,7 +46,6 @@ pub const JSBundler = struct {
env_prefix: OwnedString = OwnedString.initEmpty(bun.default_allocator),
tsconfig_override: OwnedString = OwnedString.initEmpty(bun.default_allocator),
compile: ?CompileOptions = null,
metafile: bool = false,
pub const CompileOptions = struct {
compile_target: CompileTarget = .{},
@@ -709,10 +708,6 @@ pub const JSBundler = struct {
this.throw_on_error = flag;
}
if (try config.getBooleanLoose(globalThis, "metafile")) |flag| {
this.metafile = flag;
}
if (try CompileOptions.fromJS(
globalThis,
config,

View File

@@ -1,70 +0,0 @@
/**
* Lazy getter for BuildOutput.metafile that parses JSON on first access.
* Uses CustomValue so the parsed result replaces the getter.
*/
#include "root.h"
#include "BunBuiltinNames.h"
#include "ZigGlobalObject.h"
#include <JavaScriptCore/CustomGetterSetter.h>
#include <JavaScriptCore/JSCJSValueInlines.h>
#include <JavaScriptCore/JSONObject.h>
namespace Bun {
using namespace JSC;
JSC_DEFINE_CUSTOM_GETTER(bundlerMetafileLazyGetter, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName property))
{
auto& vm = JSC::getVM(globalObject);
auto scope = DECLARE_THROW_SCOPE(vm);
JSObject* thisObject = JSValue::decode(thisValue).getObject();
if (!thisObject) {
return JSValue::encode(jsUndefined());
}
// Get the raw JSON string from private property
const auto& privateName = Bun::builtinNames(vm).dataPrivateName();
JSValue metafileStringValue = thisObject->getDirect(vm, privateName);
ASSERT(metafileStringValue.isString());
auto str = metafileStringValue.toString(globalObject);
RETURN_IF_EXCEPTION(scope, {});
auto view = str->view(globalObject);
RETURN_IF_EXCEPTION(scope, {});
JSValue parsedValue = JSC::JSONParseWithException(globalObject, view);
RETURN_IF_EXCEPTION(scope, {});
// Replace the lazy getter with the parsed value (memoize for subsequent accesses)
thisObject->putDirect(vm, property, parsedValue, 0);
// Clear the raw JSON string so it can be GC'd
thisObject->putDirect(vm, privateName, jsUndefined(), 0);
return JSValue::encode(parsedValue);
}
// Helper to set up the lazy metafile on a BuildOutput object
extern "C" SYSV_ABI void Bun__setupLazyMetafile(JSC::JSGlobalObject* globalObject, JSC::EncodedJSValue buildOutputEncoded, JSC::EncodedJSValue metafileStringEncoded)
{
auto& vm = JSC::getVM(globalObject);
JSObject* buildOutput = JSValue::decode(buildOutputEncoded).getObject();
ASSERT(buildOutput);
// Store the raw JSON string in a private property
const auto& privateName = Bun::builtinNames(vm).dataPrivateName();
buildOutput->putDirect(vm, privateName, JSValue::decode(metafileStringEncoded), 0);
// Set up the lazy getter
buildOutput->putDirectCustomAccessor(
vm,
Identifier::fromString(vm, "metafile"_s),
CustomGetterSetter::create(vm, bundlerMetafileLazyGetter, nullptr),
PropertyAttribute::CustomValue | 0);
}
} // namespace Bun

View File

@@ -79,7 +79,8 @@ static JSC::JSInternalPromise* resolvedInternalPromise(JSC::JSGlobalObject* glob
{
auto& vm = JSC::getVM(globalObject);
JSInternalPromise* promise = JSInternalPromise::create(vm, globalObject->internalPromiseStructure());
promise->fulfill(vm, globalObject, value);
promise->internalField(JSC::JSPromise::Field::ReactionsOrResult).set(vm, promise, value);
promise->internalField(JSC::JSPromise::Field::Flags).set(vm, promise, jsNumber(promise->internalField(JSC::JSPromise::Field::Flags).get().asUInt32AsAnyInt() | JSC::JSPromise::isFirstResolvingFunctionCalledFlag | static_cast<unsigned>(JSC::JSPromise::Status::Fulfilled)));
return promise;
}
@@ -677,7 +678,8 @@ JSValue fetchCommonJSModule(
JSPromise* promise = jsCast<JSPromise*>(promiseOrCommonJSModule);
switch (promise->status()) {
case JSPromise::Status::Rejected: {
promise->markAsHandled();
uint32_t promiseFlags = promise->internalField(JSPromise::Field::Flags).get().asUInt32AsAnyInt();
promise->internalField(JSPromise::Field::Flags).set(vm, promise, jsNumber(promiseFlags | JSPromise::isHandledFlag));
JSC::throwException(globalObject, scope, promise->result());
RELEASE_AND_RETURN(scope, JSValue {});
}
@@ -727,7 +729,8 @@ JSValue fetchCommonJSModule(
JSPromise* promise = jsCast<JSPromise*>(promiseOrCommonJSModule);
switch (promise->status()) {
case JSPromise::Status::Rejected: {
promise->markAsHandled();
uint32_t promiseFlags = promise->internalField(JSPromise::Field::Flags).get().asUInt32AsAnyInt();
promise->internalField(JSPromise::Field::Flags).set(vm, promise, jsNumber(promiseFlags | JSPromise::isHandledFlag));
JSC::throwException(globalObject, scope, promise->result());
RELEASE_AND_RETURN(scope, JSValue {});
}

View File

@@ -188,7 +188,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel
case DataCellTag::Json: {
if (cell.value.json) {
auto str = WTF::String(cell.value.json);
JSC::JSValue json = JSC::JSONParseWithException(globalObject, str);
JSC::JSValue json = JSC::JSONParse(globalObject, str);
RETURN_IF_EXCEPTION(scope, {});
return json;
}

View File

@@ -3293,7 +3293,8 @@ static JSC::JSInternalPromise* rejectedInternalPromise(JSC::JSGlobalObject* glob
{
auto& vm = JSC::getVM(globalObject);
JSInternalPromise* promise = JSInternalPromise::create(vm, globalObject->internalPromiseStructure());
promise->rejectAsHandled(vm, globalObject, value);
promise->internalField(JSC::JSPromise::Field::ReactionsOrResult).set(vm, promise, value);
promise->internalField(JSC::JSPromise::Field::Flags).set(vm, promise, jsNumber(promise->internalField(JSC::JSPromise::Field::Flags).get().asUInt32AsAnyInt() | JSC::JSPromise::isFirstResolvingFunctionCalledFlag | static_cast<unsigned>(JSC::JSPromise::Status::Rejected)));
return promise;
}

View File

@@ -149,7 +149,9 @@ pub fn BundleThread(CompletionStruct: type) type {
completion.log = out_log;
}
completion.result = .{ .value = try this.runFromJSInNewThread(transpiler.options.entry_points) };
completion.result = .{ .value = .{
.output_files = try this.runFromJSInNewThread(transpiler.options.entry_points),
} };
var out_log = Logger.Log.init(bun.default_allocator);
bun.handleOom(this.transpiler.log.appendToWithRecycled(&out_log, true));

View File

@@ -9,9 +9,7 @@ pub const Chunk = struct {
/// for more info on this technique.
unique_key: string = "",
/// Maps source index to bytes contributed to this chunk's output (for metafile).
/// The value is updated during chunk generation to track bytesInOutput.
files_with_parts_in_chunk: std.AutoArrayHashMapUnmanaged(Index.Int, usize) = .{},
files_with_parts_in_chunk: std.AutoArrayHashMapUnmanaged(Index.Int, void) = .{},
/// We must not keep pointers to this type until all chunks have been allocated.
entry_bits: AutoBitSet = undefined,
@@ -36,10 +34,6 @@ pub const Chunk = struct {
compile_results_for_chunk: []CompileResult = &.{},
/// Pre-built JSON fragment for this chunk's metafile output entry.
/// Generated during parallel chunk generation, joined at the end.
metafile_chunk_json: []const u8 = "",
/// Pack boolean flags to reduce padding overhead.
/// Previously 3 separate bool fields caused ~21 bytes of padding waste.
flags: Flags = .{},

View File

@@ -4,7 +4,6 @@ pub const LinkerContext = struct {
pub const OutputFileListBuilder = @import("./linker_context/OutputFileListBuilder.zig");
pub const StaticRouteVisitor = @import("./linker_context/StaticRouteVisitor.zig");
pub const MetafileBuilder = @import("./linker_context/MetafileBuilder.zig");
parse_graph: *Graph = undefined,
graph: LinkerGraph = undefined,
@@ -70,7 +69,6 @@ pub const LinkerContext = struct {
css_chunking: bool = false,
source_maps: options.SourceMapOption = .none,
target: options.Target = .browser,
metafile: bool = false,
mode: Mode = .bundle,

View File

@@ -929,7 +929,6 @@ pub const BundleV2 = struct {
this.linker.options.target = transpiler.options.target;
this.linker.options.output_format = transpiler.options.output_format;
this.linker.options.generate_bytecode_cache = transpiler.options.bytecode;
this.linker.options.metafile = transpiler.options.metafile;
this.linker.dev_server = transpiler.options.dev_server;
@@ -1482,7 +1481,7 @@ pub const BundleV2 = struct {
minify_duration: *u64,
source_code_size: *u64,
fetcher: ?*DependenciesScanner,
) !BuildResult {
) !std.array_list.Managed(options.OutputFile) {
var this = try BundleV2.init(
transpiler,
null,
@@ -1536,27 +1535,10 @@ pub const BundleV2 = struct {
// Do this at the very end, after processing all the imports/exports so that we can follow exports as needed.
if (fetcher) |fetch| {
try this.getAllDependencies(reachable_files, fetch);
return .{
.output_files = std.array_list.Managed(options.OutputFile).init(alloc),
.metafile = null,
};
return std.array_list.Managed(options.OutputFile).init(alloc);
}
const output_files = try this.linker.generateChunksInParallel(chunks, false);
// Generate metafile if requested
const metafile: ?[]const u8 = if (this.linker.options.metafile)
LinkerContext.MetafileBuilder.generate(bun.default_allocator, &this.linker, chunks) catch |err| blk: {
bun.Output.warn("Failed to generate metafile: {s}", .{@errorName(err)});
break :blk null;
}
else
null;
return .{
.output_files = output_files,
.metafile = metafile,
};
return try this.linker.generateChunksInParallel(chunks, false);
}
pub fn generateFromBakeProductionCLI(
@@ -1768,7 +1750,6 @@ pub const BundleV2 = struct {
pub const BuildResult = struct {
output_files: std.array_list.Managed(options.OutputFile),
metafile: ?[]const u8 = null,
pub fn deinit(this: *BuildResult) void {
for (this.output_files.items) |*output_file| {
@@ -1776,11 +1757,6 @@ pub const BundleV2 = struct {
}
this.output_files.clearAndFree();
if (this.metafile) |mf| {
bun.default_allocator.free(mf);
this.metafile = null;
}
}
};
@@ -1929,7 +1905,6 @@ pub const BundleV2 = struct {
transpiler.options.banner = config.banner.slice();
transpiler.options.footer = config.footer.slice();
transpiler.options.react_fast_refresh = config.react_fast_refresh;
transpiler.options.metafile = config.metafile;
if (transpiler.options.compile) {
// Emitting DCE annotations is nonsensical in --compile.
@@ -2230,7 +2205,7 @@ pub const BundleV2 = struct {
return promise.reject(globalThis, err);
};
}
const build_output = jsc.JSValue.createEmptyObject(globalThis, 4);
const build_output = jsc.JSValue.createEmptyObject(globalThis, 3);
build_output.put(globalThis, jsc.ZigString.static("outputs"), output_files_js);
build_output.put(globalThis, jsc.ZigString.static("success"), .true);
build_output.put(
@@ -2241,15 +2216,6 @@ pub const BundleV2 = struct {
},
);
// Add metafile if it was generated (lazy parsing via getter)
if (build.metafile) |metafile| {
const metafile_js_str = bun.String.createUTF8ForJS(globalThis, metafile) catch |err| {
return promise.reject(globalThis, err);
};
// Set up lazy getter that parses JSON on first access and memoizes
Bun__setupLazyMetafile(globalThis, build_output, metafile_js_str);
}
const didHandleCallbacks = if (this.plugins) |plugin| runOnEndCallbacks(globalThis, plugin, promise, build_output, .js_undefined) catch |err| {
return promise.reject(globalThis, err);
} else false;
@@ -2637,7 +2603,7 @@ pub const BundleV2 = struct {
pub fn runFromJSInNewThread(
this: *BundleV2,
entry_points: []const []const u8,
) !BuildResult {
) !std.array_list.Managed(options.OutputFile) {
this.unique_key = generateUniqueKey();
if (this.transpiler.log.errors > 0) {
@@ -2684,21 +2650,7 @@ pub const BundleV2 = struct {
return error.BuildFailed;
}
const output_files = try this.linker.generateChunksInParallel(chunks, false);
// Generate metafile if requested
const metafile: ?[]const u8 = if (this.linker.options.metafile)
LinkerContext.MetafileBuilder.generate(bun.default_allocator, &this.linker, chunks) catch |err| blk: {
bun.Output.warn("Failed to generate metafile: {s}", .{@errorName(err)});
break :blk null;
}
else
null;
return .{
.output_files = output_files,
.metafile = metafile,
};
return try this.linker.generateChunksInParallel(chunks, false);
}
fn shouldAddWatcherPlugin(bv2: *BundleV2, namespace: []const u8, path: []const u8) bool {
@@ -3149,11 +3101,6 @@ pub const BundleV2 = struct {
var last_error: ?anyerror = null;
outer: for (ast.import_records.slice(), 0..) |*import_record, i| {
// Preserve original import specifier before resolution modifies path
if (import_record.original_path.len == 0) {
import_record.original_path = import_record.path.text;
}
if (
// Don't resolve TypeScript types
import_record.flags.is_unused or
@@ -4622,10 +4569,6 @@ pub const Graph = @import("./Graph.zig");
const string = []const u8;
// C++ binding for lazy metafile getter (defined in BundlerMetafile.cpp)
// Uses jsc.conv (SYSV_ABI on Windows x64) for proper calling convention
extern "C" fn Bun__setupLazyMetafile(globalThis: *jsc.JSGlobalObject, buildOutput: jsc.JSValue, metafileString: jsc.JSValue) callconv(jsc.conv) void;
const options = @import("../options.zig");
const bun = @import("bun");

View File

@@ -1,354 +0,0 @@
//! MetafileBuilder generates metafile JSON output compatible with esbuild's format.
//!
//! The metafile format is:
//! ```json
//! {
//! "inputs": {
//! "path/to/file.js": {
//! "bytes": 1234,
//! "imports": [
//! { "path": "dependency.js", "kind": "import-statement" },
//! { "path": "external", "kind": "require-call", "external": true }
//! ],
//! "format": "esm"
//! }
//! },
//! "outputs": {
//! "path/to/output.js": {
//! "bytes": 5678,
//! "inputs": {
//! "path/to/file.js": { "bytesInOutput": 1200 }
//! },
//! "imports": [
//! { "path": "chunk.js", "kind": "import-statement" }
//! ],
//! "exports": ["default", "foo"],
//! "entryPoint": "path/to/file.js"
//! }
//! }
//! }
//! ```
const MetafileBuilder = @This();
/// Generates the JSON fragment for a single output chunk.
/// Called during parallel chunk generation in postProcessJSChunk/postProcessCSSChunk.
/// The result is stored in chunk.metafile_chunk_json and assembled later.
pub fn generateChunkJson(
allocator: std.mem.Allocator,
c: *const LinkerContext,
chunk: *const Chunk,
chunks: []const Chunk,
) ![]const u8 {
var json = std.array_list.Managed(u8).init(allocator);
errdefer json.deinit();
const writer = json.writer();
const sources = c.parse_graph.input_files.items(.source);
// Start chunk entry: "path/to/output.js": {
try writeJSONString(writer, chunk.final_rel_path);
try writer.writeAll(": {");
// Write bytes
const chunk_bytes = chunk.intermediate_output.getSize();
try writer.print("\n \"bytes\": {d}", .{chunk_bytes});
// Write inputs for this output (bytesInOutput is pre-computed during chunk generation)
try writer.writeAll(",\n \"inputs\": {");
var first_chunk_input = true;
var chunk_iter = chunk.files_with_parts_in_chunk.iterator();
while (chunk_iter.next()) |entry| {
const file_source_index = entry.key_ptr.*;
const bytes_in_output = entry.value_ptr.*;
if (file_source_index >= sources.len) continue;
if (file_source_index == Index.runtime.get()) continue;
const file_source = &sources[file_source_index];
if (file_source.path.text.len == 0) continue;
const file_path = file_source.path.pretty;
if (file_path.len == 0) continue;
if (!first_chunk_input) {
try writer.writeAll(",");
}
first_chunk_input = false;
try writer.writeAll("\n ");
try writeJSONString(writer, file_path);
try writer.print(": {{\n \"bytesInOutput\": {d}\n }}", .{bytes_in_output});
}
try writer.writeAll("\n }");
// Write cross-chunk imports
try writer.writeAll(",\n \"imports\": [");
var first_chunk_import = true;
for (chunk.cross_chunk_imports.slice()) |cross_import| {
// Bounds check to prevent OOB access from corrupted data
if (cross_import.chunk_index >= chunks.len) continue;
if (!first_chunk_import) {
try writer.writeAll(",");
}
first_chunk_import = false;
const imported_chunk = &chunks[cross_import.chunk_index];
try writer.writeAll("\n {\n \"path\": ");
try writeJSONString(writer, imported_chunk.final_rel_path);
try writer.writeAll(",\n \"kind\": ");
try writeJSONString(writer, cross_import.import_kind.label());
try writer.writeAll("\n }");
}
try writer.writeAll("\n ]");
// Write exports and entry point if applicable
// Use sorted_and_filtered_export_aliases for deterministic output and to exclude internal exports
try writer.writeAll(",\n \"exports\": [");
if (chunk.entry_point.is_entry_point) {
const entry_source_index = chunk.entry_point.source_index;
// Use sources.len as the authoritative bounds check
if (entry_source_index < sources.len) {
const sorted_exports = c.graph.meta.items(.sorted_and_filtered_export_aliases)[entry_source_index];
var first_export = true;
for (sorted_exports) |alias| {
if (!first_export) {
try writer.writeAll(",");
}
first_export = false;
try writer.writeAll("\n ");
try writeJSONString(writer, alias);
}
if (!first_export) {
try writer.writeAll("\n ");
}
}
}
try writer.writeAll("]");
// Write entry point path
if (chunk.entry_point.is_entry_point) {
const entry_source_index = chunk.entry_point.source_index;
if (entry_source_index < sources.len) {
const entry_source = &sources[entry_source_index];
if (entry_source.path.text.len > 0 and entry_source.path.pretty.len > 0) {
try writer.writeAll(",\n \"entryPoint\": ");
try writeJSONString(writer, entry_source.path.pretty);
}
}
}
// Write cssBundle if this JS chunk has associated CSS
if (chunk.content == .javascript) {
const css_chunks = chunk.content.javascript.css_chunks;
if (css_chunks.len > 0) {
// Get the first CSS chunk path
const css_chunk_index = css_chunks[0];
if (css_chunk_index < chunks.len) {
const css_chunk = &chunks[css_chunk_index];
if (css_chunk.final_rel_path.len > 0) {
try writer.writeAll(",\n \"cssBundle\": ");
try writeJSONString(writer, css_chunk.final_rel_path);
}
}
}
}
try writer.writeAll("\n }");
return json.toOwnedSlice();
}
/// Assembles the final metafile JSON from pre-built chunk fragments.
/// Called after all chunks have been generated in parallel.
/// Chunk references (unique_keys) are resolved to their final output paths.
/// The caller is responsible for freeing the returned slice.
pub fn generate(
allocator: std.mem.Allocator,
c: *LinkerContext,
chunks: []Chunk,
) ![]const u8 {
// Use StringJoiner so we can use breakOutputIntoPieces to resolve chunk references
var j = StringJoiner{
.allocator = allocator,
};
errdefer j.deinit();
j.pushStatic("{\n \"inputs\": {");
// Collect all input files that are reachable
var first_input = true;
const sources = c.parse_graph.input_files.items(.source);
const loaders = c.parse_graph.input_files.items(.loader);
const import_records_list = c.parse_graph.ast.items(.import_records);
// Iterate through all files in chunks to collect unique source indices
var seen_sources = try std.DynamicBitSet.initEmpty(allocator, sources.len);
defer seen_sources.deinit();
// Mark all files that appear in chunks
for (chunks) |*chunk| {
var iter = chunk.files_with_parts_in_chunk.iterator();
while (iter.next()) |entry| {
const source_index = entry.key_ptr.*;
if (source_index < sources.len) {
seen_sources.set(source_index);
}
}
}
// Write inputs
var source_index: u32 = 0;
while (source_index < sources.len) : (source_index += 1) {
if (!seen_sources.isSet(source_index)) continue;
// Skip runtime and other special files
if (source_index == Index.runtime.get()) continue;
const source = &sources[source_index];
if (source.path.text.len == 0) continue;
const path = source.path.pretty;
if (path.len == 0) continue;
if (!first_input) {
j.pushStatic(",");
}
first_input = false;
j.pushStatic("\n ");
j.push(try std.fmt.allocPrint(allocator, "{f}", .{bun.fmt.formatJSONStringUTF8(path, .{})}), allocator);
j.push(try std.fmt.allocPrint(allocator, ": {{\n \"bytes\": {d}", .{source.contents.len}), allocator);
// Write imports
j.pushStatic(",\n \"imports\": [");
if (source_index < import_records_list.len) {
const import_records = import_records_list[source_index];
var first_import = true;
for (import_records.slice()) |record| {
if (record.kind == .internal) continue;
if (!first_import) {
j.pushStatic(",");
}
first_import = false;
j.pushStatic("\n {\n \"path\": ");
// Write path with JSON escaping - chunk references (unique_keys) will be resolved
// by breakOutputIntoPieces and code() below
j.push(try std.fmt.allocPrint(allocator, "{f}", .{bun.fmt.formatJSONStringUTF8(record.path.text, .{})}), allocator);
j.pushStatic(",\n \"kind\": \"");
j.pushStatic(record.kind.label());
j.pushStatic("\"");
// Add "original" field if different from path
if (record.original_path.len > 0 and !std.mem.eql(u8, record.original_path, record.path.text)) {
j.pushStatic(",\n \"original\": ");
j.push(try std.fmt.allocPrint(allocator, "{f}", .{bun.fmt.formatJSONStringUTF8(record.original_path, .{})}), allocator);
}
// Add "external": true for external imports
if (record.flags.is_external_without_side_effects or !record.source_index.isValid()) {
j.pushStatic(",\n \"external\": true");
}
// Add "with" for import attributes (json, toml, text loaders)
if (record.source_index.isValid() and record.source_index.get() < loaders.len) {
const loader = loaders[record.source_index.get()];
const with_type: ?[]const u8 = switch (loader) {
.json => "json",
.toml => "toml",
.text => "text",
else => null,
};
if (with_type) |wt| {
j.pushStatic(",\n \"with\": { \"type\": \"");
j.pushStatic(wt);
j.pushStatic("\" }");
}
}
j.pushStatic("\n }");
}
}
j.pushStatic("\n ]");
// Write format based on exports_kind (esm vs cjs detection)
const loader = loaders[source_index];
const format: ?[]const u8 = switch (loader) {
.js, .jsx, .ts, .tsx => blk: {
const exports_kind = c.graph.ast.items(.exports_kind);
if (source_index < exports_kind.len) {
break :blk switch (exports_kind[source_index]) {
.cjs, .esm_with_dynamic_fallback_from_cjs => "cjs",
.esm, .esm_with_dynamic_fallback => "esm",
.none => null, // Unknown format, don't emit
};
}
break :blk null;
},
.json => "json",
.css => "css",
else => null,
};
if (format) |fmt| {
j.pushStatic(",\n \"format\": \"");
j.pushStatic(fmt);
j.pushStatic("\"");
}
j.pushStatic("\n }");
}
j.pushStatic("\n },\n \"outputs\": {");
// Write outputs by joining pre-built chunk JSON fragments
var first_output = true;
for (chunks) |*chunk| {
if (chunk.final_rel_path.len == 0) continue;
if (!first_output) {
j.pushStatic(",");
}
first_output = false;
j.pushStatic("\n ");
j.pushStatic(chunk.metafile_chunk_json);
}
j.pushStatic("\n }\n}\n");
// If no chunks, there are no chunk references to resolve, so just return the joined string
if (chunks.len == 0) {
return j.done(allocator);
}
// Break output into pieces and resolve chunk references to final paths
var intermediate = try c.breakOutputIntoPieces(allocator, &j, @intCast(chunks.len));
// Get final output with all chunk references resolved
const code_result = try intermediate.code(
allocator,
c.parse_graph,
&c.graph,
"", // no import prefix for metafile
&chunks[0], // dummy chunk, not used for metafile
chunks,
null, // no display size
false, // not force absolute path
false, // no source map shifts
);
return code_result.buffer;
}
fn writeJSONString(writer: anytype, str: []const u8) !void {
try writer.print("{f}", .{bun.fmt.formatJSONStringUTF8(str, .{})});
}
const std = @import("std");
const bun = @import("bun");
const StringJoiner = bun.StringJoiner;
const Chunk = bun.bundle_v2.Chunk;
const Index = bun.bundle_v2.Index;
const LinkerContext = bun.bundle_v2.LinkerContext;

View File

@@ -157,10 +157,10 @@ pub noinline fn computeChunks(
js_chunks_with_css += 1;
if (!css_chunk_entry.found_existing) {
var css_files_with_parts_in_chunk = std.AutoArrayHashMapUnmanaged(Index.Int, usize){};
var css_files_with_parts_in_chunk = std.AutoArrayHashMapUnmanaged(Index.Int, void){};
for (order.slice()) |entry| {
if (entry.kind == .source_index) {
bun.handleOom(css_files_with_parts_in_chunk.put(this.allocator(), entry.kind.source_index.get(), 0));
bun.handleOom(css_files_with_parts_in_chunk.put(this.allocator(), entry.kind.source_index.get(), {}));
}
}
css_chunk_entry.value_ptr.* = .{
@@ -195,10 +195,7 @@ pub noinline fn computeChunks(
source_id: u32,
pub fn next(c: *@This(), chunk_id: usize) void {
const entry = c.chunks[chunk_id].files_with_parts_in_chunk.getOrPut(c.allocator, @as(u32, @truncate(c.source_id))) catch unreachable;
if (!entry.found_existing) {
entry.value_ptr.* = 0; // Initialize byte count to 0
}
_ = c.chunks[chunk_id].files_with_parts_in_chunk.getOrPut(c.allocator, @as(u32, @truncate(c.source_id))) catch unreachable;
}
};
@@ -231,10 +228,7 @@ pub noinline fn computeChunks(
};
}
const entry = js_chunk_entry.value_ptr.files_with_parts_in_chunk.getOrPut(this.allocator(), @as(u32, @truncate(source_index.get()))) catch unreachable;
if (!entry.found_existing) {
entry.value_ptr.* = 0; // Initialize byte count to 0
}
_ = js_chunk_entry.value_ptr.files_with_parts_in_chunk.getOrPut(this.allocator(), @as(u32, @truncate(source_index.get()))) catch unreachable;
} else {
var handler = Handler{
.chunks = js_chunks.values(),

View File

@@ -304,18 +304,6 @@ pub fn generateChunksInParallel(
}
}
// Generate metafile JSON fragments for each chunk (after paths are resolved)
if (c.options.metafile) {
for (chunks) |*chunk| {
chunk.metafile_chunk_json = LinkerContext.MetafileBuilder.generateChunkJson(
bun.default_allocator,
c,
chunk,
chunks,
) catch "";
}
}
var output_files = try OutputFileListBuilder.init(bun.default_allocator, c, chunks, c.parse_graph.additional_output_files.items.len);
const root_path = c.resolver.opts.output_dir;

View File

@@ -135,17 +135,9 @@ fn generateCompileResultForCssChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCo
};
},
};
const output = allocating_writer.written();
// Update bytesInOutput for this source in the chunk (for metafile)
// Use atomic operation since multiple threads may update the same counter
if (output.len > 0) {
if (chunk.files_with_parts_in_chunk.getPtr(idx.get())) |bytes_ptr| {
_ = @atomicRmw(usize, bytes_ptr, .Add, output.len, .monotonic);
}
}
return CompileResult{
.css = .{
.result = .{ .result = output },
.result = .{ .result = allocating_writer.written() },
.source_index = idx.get(),
},
};

View File

@@ -59,18 +59,6 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
arena.allocator(),
);
// Update bytesInOutput for this source in the chunk (for metafile)
// Use atomic operation since multiple threads may update the same counter
const code_len = switch (result) {
.result => |r| r.code.len,
else => 0,
};
if (code_len > 0 and !part_range.source_index.isRuntime()) {
if (chunk.files_with_parts_in_chunk.getPtr(part_range.source_index.get())) |bytes_ptr| {
_ = @atomicRmw(usize, bytes_ptr, .Add, code_len, .monotonic);
}
}
return .{
.javascript = .{
.source_index = part_range.source_index.get(),

View File

@@ -424,7 +424,6 @@ pub const Command = struct {
pub const BundlerOptions = struct {
outdir: []const u8 = "",
outfile: []const u8 = "",
metafile: [:0]const u8 = "",
root_dir: []const u8 = "",
public_path: []const u8 = "",
entry_naming: []const u8 = "[dir]/[name].[ext]",

View File

@@ -164,7 +164,6 @@ pub const build_only_params = [_]ParamType{
clap.parseParam("--target <STR> The intended execution environment for the bundle. \"browser\", \"bun\" or \"node\"") catch unreachable,
clap.parseParam("--outdir <STR> Default to \"dist\" if multiple files") catch unreachable,
clap.parseParam("--outfile <STR> Write to a file") catch unreachable,
clap.parseParam("--metafile <STR>? Write a JSON file with metadata about the build") catch unreachable,
clap.parseParam("--sourcemap <STR>? Build with sourcemaps - 'linked', 'inline', 'external', or 'none'") catch unreachable,
clap.parseParam("--banner <STR> Add a banner to the bundled output such as \"use client\"; for a bundle being used with RSCs") catch unreachable,
clap.parseParam("--footer <STR> Add a footer to the bundled output such as // built with bun!") catch unreachable,
@@ -1196,14 +1195,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
}
}
if (args.option("--metafile")) |metafile| {
// If --metafile is passed without a value, default to "meta.json"
ctx.bundler_options.metafile = if (metafile.len > 0)
bun.handleOom(allocator.dupeZ(u8, metafile))
else
"meta.json";
}
if (args.option("--root")) |root_dir| {
if (root_dir.len > 0) {
ctx.bundler_options.root_dir = root_dir;

View File

@@ -85,7 +85,6 @@ pub const BuildCommand = struct {
this_transpiler.options.bundler_feature_flags = Runtime.Features.initBundlerFeatureFlags(allocator, ctx.args.feature_flags);
this_transpiler.options.css_chunking = ctx.bundler_options.css_chunking;
this_transpiler.options.metafile = ctx.bundler_options.metafile.len > 0;
this_transpiler.options.output_dir = ctx.bundler_options.outdir;
this_transpiler.options.output_format = ctx.bundler_options.output_format;
@@ -309,7 +308,7 @@ pub const BuildCommand = struct {
this_transpiler.resolver.opts.entry_naming = this_transpiler.options.entry_naming;
}
const build_result = BundleV2.generateFromCLI(
break :brk (BundleV2.generateFromCLI(
&this_transpiler,
allocator,
bun.jsc.AnyEventLoop.init(ctx.allocator),
@@ -327,34 +326,7 @@ pub const BuildCommand = struct {
Output.flush();
exitOrWatch(1, ctx.debug.hot_reload == .watch);
};
// Write metafile if requested
if (build_result.metafile) |metafile_json| {
if (ctx.bundler_options.metafile.len > 0) {
// Use makeOpen which auto-creates parent directories on failure
const file = switch (bun.sys.File.makeOpen(ctx.bundler_options.metafile, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664)) {
.result => |f| f,
.err => |err| {
Output.err(err, "could not open metafile {f}", .{bun.fmt.quote(ctx.bundler_options.metafile)});
exitOrWatch(1, ctx.debug.hot_reload == .watch);
unreachable;
},
};
defer file.close();
switch (file.writeAll(metafile_json)) {
.result => {},
.err => |err| {
Output.err(err, "could not write metafile {f}", .{bun.fmt.quote(ctx.bundler_options.metafile)});
exitOrWatch(1, ctx.debug.hot_reload == .watch);
unreachable;
},
}
}
}
break :brk build_result.output_files.items;
}).items;
};
const bundled_end = std.time.nanoTimestamp();

View File

@@ -75,9 +75,6 @@ fn onPipeClose(this: *WindowsNamedPipe) void {
log("onPipeClose", .{});
this.flags.disconnected = true;
this.pipe = null;
// Clear the writer's source to prevent access to dangling pipe pointer
// This fixes a race condition where the writer tries to access the pipe after it's been freed
this.writer.source = null;
this.onClose();
}

View File

@@ -109,11 +109,11 @@ pub const SHELL = PlatformSpecificNew(kind.string, "SHELL", null, .{});
/// C:\Windows, for example.
/// Note: Do not use this variable directly -- use os.zig's implementation instead.
pub const SYSTEMROOT = PlatformSpecificNew(kind.string, null, "SYSTEMROOT", .{});
pub const TEMP = PlatformSpecificNew(kind.string, "TEMP", "TEMP", .{});
pub const TEMP = PlatformSpecificNew(kind.string, null, "TEMP", .{});
pub const TERM = New(kind.string, "TERM", .{});
pub const TERM_PROGRAM = New(kind.string, "TERM_PROGRAM", .{});
pub const TMP = PlatformSpecificNew(kind.string, "TMP", "TMP", .{});
pub const TMPDIR = PlatformSpecificNew(kind.string, "TMPDIR", "TMPDIR", .{});
pub const TMP = PlatformSpecificNew(kind.string, null, "TMP", .{});
pub const TMPDIR = PlatformSpecificNew(kind.string, "TMPDIR", null, .{});
pub const TMUX = New(kind.string, "TMUX", .{});
pub const TODIUM = New(kind.string, "TODIUM", .{});
pub const USER = PlatformSpecificNew(kind.string, "USER", "USERNAME", .{});
@@ -601,16 +601,6 @@ fn PlatformSpecificNew(
return null;
}
pub fn getNotEmpty() ReturnType {
if (Self.get()) |v| {
if (v.len == 0) {
return null;
}
return v;
}
return null;
}
/// Retrieve the value of the environment variable, loading it if necessary.
/// Fails if the current platform is unsupported.
pub fn get() ReturnType {

View File

@@ -530,69 +530,69 @@ pub const FileSystem = struct {
file_limit: usize = 32,
file_quota: usize = 32,
fn #platformTempDir() []const u8 {
// Try TMPDIR, TMP, and TEMP in that order, matching Node.js.
// https://github.com/nodejs/node/blob/e172be269890702bf2ad06252f2f152e7604d76c/src/node_credentials.cc#L132
if (bun.env_var.TMPDIR.getNotEmpty() orelse
bun.env_var.TMP.getNotEmpty() orelse
bun.env_var.TEMP.getNotEmpty()) |dir|
{
if (dir.len > 1 and dir[dir.len - 1] == std.fs.path.sep) {
return dir[0 .. dir.len - 1];
}
return dir;
}
pub var win_tempdir_cache: ?[]const u8 = undefined;
pub fn platformTempDir() []const u8 {
return switch (Environment.os) {
// https://learn.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-gettemppathw#remarks
.windows => {
if (bun.env_var.SYSTEMROOT.get() orelse bun.env_var.WINDIR.get()) |windir| {
return std.fmt.allocPrint(
.windows => win_tempdir_cache orelse {
const value = bun.env_var.TEMP.get() orelse bun.env_var.TMP.get() orelse brk: {
if (bun.env_var.SYSTEMROOT.get() orelse bun.env_var.WINDIR.get()) |windir| {
break :brk std.fmt.allocPrint(
bun.default_allocator,
"{s}\\Temp",
.{strings.withoutTrailingSlash(windir)},
) catch |err| bun.handleOom(err);
}
if (bun.env_var.HOME.get()) |profile| {
var buf: bun.PathBuffer = undefined;
var parts = [_]string{"AppData\\Local\\Temp"};
const out = bun.path.joinAbsStringBuf(profile, &buf, &parts, .loose);
break :brk bun.handleOom(bun.default_allocator.dupe(u8, out));
}
var tmp_buf: bun.PathBuffer = undefined;
const cwd = std.posix.getcwd(&tmp_buf) catch @panic("Failed to get cwd for platformTempDir");
const root = bun.path.windowsFilesystemRoot(cwd);
break :brk std.fmt.allocPrint(
bun.default_allocator,
"{s}\\Temp",
.{strings.withoutTrailingSlash(windir)},
"{s}\\Windows\\Temp",
.{strings.withoutTrailingSlash(root)},
) catch |err| bun.handleOom(err);
}
if (bun.env_var.HOME.get()) |profile| {
var buf: bun.PathBuffer = undefined;
var parts = [_]string{"AppData\\Local\\Temp"};
const out = bun.path.joinAbsStringBuf(profile, &buf, &parts, .loose);
return bun.handleOom(bun.default_allocator.dupe(u8, out));
}
var tmp_buf: bun.PathBuffer = undefined;
const cwd = std.posix.getcwd(&tmp_buf) catch @panic("Failed to get cwd for platformTempDir");
const root = bun.path.windowsFilesystemRoot(cwd);
return std.fmt.allocPrint(
bun.default_allocator,
"{s}\\Windows\\Temp",
.{strings.withoutTrailingSlash(root)},
) catch |err| bun.handleOom(err);
};
win_tempdir_cache = value;
return value;
},
.mac => "/private/tmp",
else => "/tmp",
};
}
var get_platform_tempdir = bun.once(#platformTempDir);
pub fn platformTempDir() []const u8 {
return get_platform_tempdir.call(.{});
}
pub const Tmpfile = switch (Environment.os) {
.windows => TmpfileWindows,
else => TmpfilePosix,
};
pub fn tmpdirPath() []const u8 {
return bun.env_var.BUN_TMPDIR.getNotEmpty() orelse platformTempDir();
pub var tmpdir_path: []const u8 = undefined;
pub var tmpdir_path_set = false;
pub fn tmpdirPath(_: *const @This()) []const u8 {
if (!tmpdir_path_set) {
tmpdir_path = bun.env_var.BUN_TMPDIR.get() orelse platformTempDir();
tmpdir_path_set = true;
}
return tmpdir_path;
}
pub fn openTmpDir(_: *const RealFS) !std.fs.Dir {
if (!tmpdir_path_set) {
tmpdir_path = bun.env_var.BUN_TMPDIR.get() orelse platformTempDir();
tmpdir_path_set = true;
}
if (comptime Environment.isWindows) {
return (try bun.sys.openDirAtWindowsA(bun.invalid_fd, tmpdirPath(), .{
return (try bun.sys.openDirAtWindowsA(bun.invalid_fd, tmpdir_path, .{
.iterable = true,
// we will not delete the temp directory
.can_rename_or_delete = false,
@@ -600,7 +600,7 @@ pub const FileSystem = struct {
}).unwrap()).stdDir();
}
return try bun.openDirAbsolute(tmpdirPath());
return try bun.openDirAbsolute(tmpdir_path);
}
pub fn entriesAt(this: *RealFS, index: allocators.IndexType, generation: bun.Generation) ?*EntriesOption {
@@ -639,6 +639,11 @@ pub const FileSystem = struct {
return bun.env_var.BUN_TMPDIR.get() orelse platformTempDir();
}
pub fn setTempdir(path: ?string) void {
tmpdir_path = path orelse getDefaultTempDir();
tmpdir_path_set = true;
}
pub const TmpfilePosix = struct {
fd: bun.FileDescriptor = bun.invalid_fd,
dir_fd: bun.FileDescriptor = bun.invalid_fd,

View File

@@ -522,14 +522,13 @@ pub fn isKeepAlivePossible(this: *HTTPClient) bool {
if (comptime FeatureFlags.enable_keepalive) {
// TODO keepalive for unix sockets
if (this.unix_socket_path.length() > 0) return false;
// is not possible to reuse Proxy with TLS, so disable keepalive if url is tunneling HTTPS
// is not possible to reuse Proxy with TSL, so disable keepalive if url is tunneling HTTPS
if (this.proxy_tunnel != null or (this.http_proxy != null and this.url.isHTTPS())) {
log("Keep-Alive release (proxy tunneling https)", .{});
return false;
}
// check state
//check state
if (this.state.flags.allow_keepalive and !this.flags.disable_keepalive) return true;
}
return false;
@@ -2339,18 +2338,12 @@ pub fn handleResponseMetadata(
return ShouldContinue.continue_streaming;
}
// proxy denied connection so return proxy result (407, 403 etc)
//proxy denied connection so return proxy result (407, 403 etc)
this.flags.proxy_tunneling = false;
this.flags.disable_keepalive = true;
}
const status_code = response.status_code;
if (status_code == 407) {
// If the request is being proxied and passes through the 407 status code, then let's also not do HTTP Keep-Alive.
this.flags.disable_keepalive = true;
}
// if is no redirect or if is redirect == "manual" just proceed
const is_redirect = status_code >= 300 and status_code <= 399;
if (is_redirect) {

View File

@@ -108,11 +108,6 @@ pub const ImportRecord = struct {
source_index: bun.ast.Index = .invalid,
/// The original import specifier as written in source code (e.g., "./foo.js").
/// This is preserved before resolution overwrites `path` with the resolved path.
/// Used for metafile generation.
original_path: []const u8 = "",
/// Pack all boolean flags into 2 bytes to reduce padding overhead.
/// Previously 15 separate bool fields caused ~14-16 bytes of padding waste.
flags: Flags = .{},

View File

@@ -1812,7 +1812,6 @@ pub const BundleOptions = struct {
debugger: bool = false,
compile: bool = false,
metafile: bool = false,
/// Set when bake.DevServer is bundling.
dev_server: ?*bun.bake.DevServer = null,

View File

@@ -748,7 +748,6 @@ pub const Interpreter = struct {
jsobjs.deinit();
if (export_env) |*ee| ee.deinit();
if (cwd) |*cc| cc.deref();
shargs.deinit();
interpreter.finalize();
return error.JSError;
}

View File

@@ -203,33 +203,4 @@ module.exports = 1;`,
`);
},
});
itBundled("banner/SourceHashbangWithBytecodeAndCJSTargetBun", {
banner: "// Copyright 2024 Example Corp",
format: "cjs",
target: "bun",
bytecode: true,
outdir: "/out",
minifyWhitespace: true,
backend: "api",
files: {
"/a.js": `#!/usr/bin/env bun
module.exports = 1;
console.log("bun!");`,
},
onAfterBundle(api) {
const content = api.readFile("/out/a.js");
// Shebang from source should come first, then @bun pragma
expect(content).toMatchInlineSnapshot(`
"#!/usr/bin/env bun
// @bun @bytecode @bun-cjs
(function(exports, require, module, __filename, __dirname) {// Copyright 2024 Example Corp
module.exports=1;console.log("bun!");})
"
`);
},
run: {
stdout: "bun!\n",
},
});
});

View File

@@ -2150,10 +2150,7 @@ c {
toplevel-tilde.css: WARNING: CSS nesting syntax is not supported in the configured target environment (chrome10)
`, */
});
// TODO: Bun's bundler doesn't support multiple entry points generating CSS outputs
// with identical content hashes to the same output path. This test exposes that
// limitation. Skip until the bundler can deduplicate or handle this case.
itBundled.skip("css/MetafileCSSBundleTwoToOne", {
itBundled("css/MetafileCSSBundleTwoToOne", {
files: {
"/foo/entry.js": /* js */ `
import '../common.css'

View File

@@ -1,345 +0,0 @@
import { describe, expect } from "bun:test";
import { existsSync, readFileSync } from "fs";
import { itBundled } from "../expectBundled";
// Tests ported from:
// https://github.com/evanw/esbuild/blob/main/internal/bundler_tests/bundler_default_test.go
describe("bundler", () => {
itBundled("metafile/ImportWithTypeJSON", {
files: {
"/project/entry.js": /* js */ `
import a from './data.json'
import b from './data.json' assert { type: 'json' }
import c from './data.json' with { type: 'json' }
x = [a, b, c]
`,
"/project/data.json": `{"some": "data"}`,
},
outdir: "/out",
metafile: "/metafile.json",
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
expect(metafile.inputs).toBeDefined();
expect(metafile.outputs).toBeDefined();
// Should have imports with 'with' clause for JSON
const entryInputKey = Object.keys(metafile.inputs).find(k => k.includes("entry.js"));
expect(entryInputKey).toBeDefined();
const entryInput = metafile.inputs[entryInputKey!];
expect(entryInput.imports.length).toBeGreaterThan(0);
// At least one import should have a 'with' clause
const hasWithClause = entryInput.imports.some((imp: any) => imp.with?.type === "json");
expect(hasWithClause).toBe(true);
},
});
itBundled("metafile/BasicStructure", {
files: {
"/entry.js": /* js */ `
import { foo } from './foo.js';
console.log(foo);
`,
"/foo.js": /* js */ `
export const foo = 42;
`,
},
outdir: "/out",
metafile: "/metafile.json",
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
// Check basic structure
expect(metafile.inputs).toBeDefined();
expect(metafile.outputs).toBeDefined();
expect(Object.keys(metafile.inputs).length).toBeGreaterThanOrEqual(2);
expect(Object.keys(metafile.outputs).length).toBeGreaterThanOrEqual(1);
// Check input has bytes and imports
for (const input of Object.values(metafile.inputs) as any[]) {
expect(typeof input.bytes).toBe("number");
expect(Array.isArray(input.imports)).toBe(true);
}
// Check output has bytes, inputs, imports, exports
for (const output of Object.values(metafile.outputs) as any[]) {
expect(typeof output.bytes).toBe("number");
expect(typeof output.inputs).toBe("object");
expect(Array.isArray(output.imports)).toBe(true);
expect(Array.isArray(output.exports)).toBe(true);
}
},
});
itBundled("metafile/MultipleEntryPoints", {
files: {
"/a.js": /* js */ `
import { shared } from './shared.js';
console.log('a', shared);
`,
"/b.js": /* js */ `
import { shared } from './shared.js';
console.log('b', shared);
`,
"/shared.js": /* js */ `
export const shared = 'shared value';
`,
},
entryPoints: ["/a.js", "/b.js"],
outdir: "/out",
metafile: "/metafile.json",
splitting: true,
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
expect(metafile.inputs).toBeDefined();
expect(metafile.outputs).toBeDefined();
// With splitting, we should have multiple outputs
expect(Object.keys(metafile.outputs).length).toBeGreaterThanOrEqual(2);
},
});
itBundled("metafile/ExternalImports", {
files: {
"/entry.js": /* js */ `
import ext1 from 'external-pkg-1';
import ext2 from 'external-pkg-2';
console.log(ext1, ext2);
`,
},
outdir: "/out",
metafile: "/metafile.json",
external: ["external-pkg-1", "external-pkg-2"],
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
// Find the entry file
const entryKey = Object.keys(metafile.inputs).find(k => k.includes("entry.js"));
expect(entryKey).toBeDefined();
const entry = metafile.inputs[entryKey!];
// Check that external imports are marked
const externalImports = entry.imports.filter((imp: any) => imp.external === true);
expect(externalImports.length).toBe(2);
},
});
itBundled("metafile/DynamicImport", {
files: {
"/entry.js": /* js */ `
import('./dynamic.js').then(m => console.log(m));
`,
"/dynamic.js": /* js */ `
export const value = 123;
`,
},
outdir: "/out",
metafile: "/metafile.json",
splitting: true,
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
expect(metafile.inputs).toBeDefined();
expect(metafile.outputs).toBeDefined();
// Find the entry file
const entryKey = Object.keys(metafile.inputs).find(k => k.includes("entry.js"));
expect(entryKey).toBeDefined();
const entry = metafile.inputs[entryKey!];
// Should have a dynamic import
const dynamicImports = entry.imports.filter((imp: any) => imp.kind === "dynamic-import");
expect(dynamicImports.length).toBe(1);
},
});
itBundled("metafile/RequireCall", {
files: {
"/entry.js": /* js */ `
const foo = require('./foo.js');
console.log(foo);
`,
"/foo.js": /* js */ `
module.exports = { value: 42 };
`,
},
outdir: "/out",
metafile: "/metafile.json",
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
expect(metafile.inputs).toBeDefined();
// Find the entry file
const entryKey = Object.keys(metafile.inputs).find(k => k.includes("entry.js"));
expect(entryKey).toBeDefined();
const entry = metafile.inputs[entryKey!];
// Should have a require call
const requireImports = entry.imports.filter((imp: any) => imp.kind === "require-call");
expect(requireImports.length).toBe(1);
},
});
itBundled("metafile/ReExports", {
files: {
"/entry.js": /* js */ `
export { foo } from './foo.js';
export * from './bar.js';
`,
"/foo.js": /* js */ `
export const foo = 1;
`,
"/bar.js": /* js */ `
export const bar = 2;
export const baz = 3;
`,
},
outdir: "/out",
metafile: "/metafile.json",
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
expect(metafile.outputs).toBeDefined();
// Find the output
const outputKey = Object.keys(metafile.outputs)[0];
const output = metafile.outputs[outputKey];
// Should have exports
expect(output.exports.length).toBeGreaterThanOrEqual(3); // foo, bar, baz
},
});
itBundled("metafile/NestedImports", {
files: {
"/entry.js": /* js */ `
import { a } from './a.js';
console.log(a);
`,
"/a.js": /* js */ `
import { b } from './b.js';
export const a = b + 1;
`,
"/b.js": /* js */ `
import { c } from './c.js';
export const b = c + 1;
`,
"/c.js": /* js */ `
export const c = 1;
`,
},
outdir: "/out",
metafile: "/metafile.json",
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
expect(metafile.inputs).toBeDefined();
// Should have 4 input files
expect(Object.keys(metafile.inputs).length).toBe(4);
// Each file should have proper imports
for (const [path, input] of Object.entries(metafile.inputs) as any) {
expect(typeof input.bytes).toBe("number");
expect(Array.isArray(input.imports)).toBe(true);
}
},
});
itBundled("metafile/JSONImport", {
files: {
"/entry.js": /* js */ `
import data from './data.json';
console.log(data);
`,
"/data.json": `{"key": "value", "number": 42}`,
},
outdir: "/out",
metafile: "/metafile.json",
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
// Find the entry file
const entryKey = Object.keys(metafile.inputs).find(k => k.includes("entry.js"));
expect(entryKey).toBeDefined();
const entry = metafile.inputs[entryKey!];
// Should have an import to the JSON file with 'with' clause
const jsonImport = entry.imports.find((imp: any) => imp.path.includes("data.json"));
expect(jsonImport).toBeDefined();
expect(jsonImport.with?.type).toBe("json");
},
});
itBundled("metafile/TextImport", {
files: {
"/entry.js": /* js */ `
import text from './file.txt';
console.log(text);
`,
"/file.txt": `Hello, World!`,
},
outdir: "/out",
metafile: "/metafile.json",
loader: {
".txt": "text",
},
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
// Find the entry file
const entryKey = Object.keys(metafile.inputs).find(k => k.includes("entry.js"));
expect(entryKey).toBeDefined();
const entry = metafile.inputs[entryKey!];
// Should have an import to the text file with 'with' clause
const textImport = entry.imports.find((imp: any) => imp.path.includes("file.txt"));
expect(textImport).toBeDefined();
expect(textImport.with?.type).toBe("text");
},
});
itBundled("metafile/EntryPoint", {
files: {
"/entry.js": /* js */ `
console.log('entry');
`,
},
outdir: "/out",
metafile: "/metafile.json",
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
expect(metafile.outputs).toBeDefined();
// Find an output with entryPoint
const outputWithEntryPoint = Object.values(metafile.outputs).find((o: any) => o.entryPoint);
expect(outputWithEntryPoint).toBeDefined();
expect(typeof (outputWithEntryPoint as any).entryPoint).toBe("string");
},
});
itBundled("metafile/OriginalPath", {
files: {
"/entry.js": /* js */ `
import { helper } from './lib/helper.js';
console.log(helper);
`,
"/lib/helper.js": /* js */ `
export const helper = 'helper';
`,
},
outdir: "/out",
metafile: "/metafile.json",
onAfterBundle(api) {
const metafilePath = api.join("metafile.json");
expect(existsSync(metafilePath)).toBe(true);
const metafile = JSON.parse(readFileSync(metafilePath, "utf-8"));
// Find the entry file
const entryKey = Object.keys(metafile.inputs).find(k => k.includes("entry.js"));
expect(entryKey).toBeDefined();
const entry = metafile.inputs[entryKey!];
// Should have an import with original path
expect(entry.imports.length).toBe(1);
expect(entry.imports[0].original).toBe("./lib/helper.js");
},
});
});

View File

@@ -541,6 +541,9 @@ function expectBundled(
throw new Error("bundling:false only supports a single entry point");
}
if (!ESBUILD && metafile) {
throw new Error("metafile not implemented in bun build");
}
if (!ESBUILD && legalComments) {
throw new Error("legalComments not implemented in bun build");
}
@@ -1121,7 +1124,6 @@ function expectBundled(
define: define ?? {},
throw: _throw ?? false,
compile,
metafile: !!metafile,
jsx: jsx
? {
runtime: jsx.runtime,
@@ -1198,11 +1200,6 @@ for (const [key, blob] of build.outputs) {
configRef = null!;
Bun.gc(true);
// Write metafile if requested
if (metafile && build.success && (build as any).metafile) {
writeFileSync(metafile, JSON.stringify((build as any).metafile, null, 2));
}
const buildLogs = build.logs.filter(x => x.level === "error");
if (buildLogs.length) {
const allErrors: ErrorMeta[] = [];

View File

@@ -1,527 +0,0 @@
import { describe, expect, test } from "bun:test";
import { tempDir } from "harness";
// Type definitions for metafile structure
interface MetafileImport {
path: string;
kind: string;
original?: string;
external?: boolean;
with?: { type: string };
}
interface MetafileInput {
bytes: number;
imports: MetafileImport[];
format?: "esm" | "cjs";
}
interface MetafileOutput {
bytes: number;
inputs: Record<string, { bytesInOutput: number }>;
imports: Array<{ path: string; kind: string; external?: boolean }>;
exports: string[];
entryPoint?: string;
cssBundle?: string;
}
interface Metafile {
inputs: Record<string, MetafileInput>;
outputs: Record<string, MetafileOutput>;
}
describe("bundler metafile", () => {
test("metafile option returns metafile object", async () => {
using dir = tempDir("metafile-test", {
"index.js": `import { foo } from "./foo.js"; console.log(foo);`,
"foo.js": `export const foo = "hello";`,
});
const result = await Bun.build({
entrypoints: [`${dir}/index.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
expect(typeof result.metafile).toBe("object");
// Check inputs structure
expect(result.metafile.inputs).toBeDefined();
expect(typeof result.metafile.inputs).toBe("object");
// Check outputs structure
expect(result.metafile.outputs).toBeDefined();
expect(typeof result.metafile.outputs).toBe("object");
});
test("metafile inputs contain file metadata", async () => {
using dir = tempDir("metafile-inputs-test", {
"entry.js": `import { helper } from "./helper.js"; helper();`,
"helper.js": `export function helper() { return 42; }`,
});
const result = await Bun.build({
entrypoints: [`${dir}/entry.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
const inputKeys = Object.keys(inputs);
// Should have at least 2 input files
expect(inputKeys.length).toBeGreaterThanOrEqual(2);
// Each input should have bytes and imports
for (const key of inputKeys) {
const input = inputs[key];
expect(typeof input.bytes).toBe("number");
expect(input.bytes).toBeGreaterThan(0);
expect(Array.isArray(input.imports)).toBe(true);
}
});
test("metafile outputs contain chunk metadata", async () => {
using dir = tempDir("metafile-outputs-test", {
"main.js": `export const value = 123;`,
});
const result = await Bun.build({
entrypoints: [`${dir}/main.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
const outputKeys = Object.keys(outputs);
// Should have at least 1 output
expect(outputKeys.length).toBeGreaterThanOrEqual(1);
// Each output should have bytes, inputs, imports, exports
for (const key of outputKeys) {
const output = outputs[key];
expect(typeof output.bytes).toBe("number");
expect(typeof output.inputs).toBe("object");
expect(Array.isArray(output.imports)).toBe(true);
expect(Array.isArray(output.exports)).toBe(true);
}
});
test("metafile tracks import relationships", async () => {
using dir = tempDir("metafile-imports-test", {
"index.js": `import { a } from "./a.js"; console.log(a);`,
"a.js": `import { b } from "./b.js"; export const a = b + 1;`,
"b.js": `export const b = 10;`,
});
const result = await Bun.build({
entrypoints: [`${dir}/index.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
// Find the entry file in inputs
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
let entryInput: MetafileInput | null = null;
for (const [path, input] of Object.entries(inputs)) {
if (path.includes("index.js")) {
entryInput = input;
break;
}
}
expect(entryInput).not.toBeNull();
// Entry should have an import to a.js
expect(entryInput!.imports.length).toBeGreaterThan(0);
});
test("metafile imports have resolved path and original specifier", async () => {
using dir = tempDir("metafile-resolved-path-test", {
"entry.js": `import { foo } from "./lib/helper.js"; console.log(foo);`,
"lib/helper.js": `export const foo = 42;`,
});
const result = await Bun.build({
entrypoints: [`${dir}/entry.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
// Find the entry file in inputs
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
let entryImports: MetafileImport[] | null = null;
for (const [path, input] of Object.entries(inputs)) {
if (path.includes("entry.js")) {
entryImports = input.imports;
break;
}
}
expect(entryImports).not.toBeNull();
expect(entryImports!.length).toBe(1);
const imp = entryImports![0];
// path should be the resolved path (contains lib/helper.js or lib\helper.js on Windows)
expect(imp.path.includes("lib/helper.js") || imp.path.includes("lib\\helper.js")).toBe(true);
expect(imp.kind).toBe("import-statement");
// original should be the original import specifier
expect(imp.original).toBe("./lib/helper.js");
});
test("metafile without option returns undefined", async () => {
using dir = tempDir("metafile-disabled-test", {
"test.js": `console.log("test");`,
});
const result = await Bun.build({
entrypoints: [`${dir}/test.js`],
// metafile is not set (defaults to false)
});
expect(result.success).toBe(true);
expect(result.metafile).toBeUndefined();
});
test("metafile tracks exports", async () => {
using dir = tempDir("metafile-exports-test", {
"lib.js": `export const foo = 1; export const bar = 2; export default function() {}`,
});
const result = await Bun.build({
entrypoints: [`${dir}/lib.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
const outputKeys = Object.keys(outputs);
expect(outputKeys.length).toBeGreaterThanOrEqual(1);
// Find the main output
const mainOutput = outputs[outputKeys[0]];
expect(mainOutput.exports).toBeDefined();
expect(Array.isArray(mainOutput.exports)).toBe(true);
});
test("metafile includes entryPoint for entry chunks", async () => {
using dir = tempDir("metafile-entrypoint-test", {
"entry.js": `console.log("entry");`,
});
const result = await Bun.build({
entrypoints: [`${dir}/entry.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
const outputKeys = Object.keys(outputs);
// At least one output should have entryPoint
let hasEntryPoint = false;
for (const key of outputKeys) {
if (outputs[key].entryPoint) {
hasEntryPoint = true;
expect(typeof outputs[key].entryPoint).toBe("string");
break;
}
}
expect(hasEntryPoint).toBe(true);
});
test("metafile includes format for JS inputs", async () => {
using dir = tempDir("metafile-format-test", {
"esm.js": `export const x = 1;`,
});
const result = await Bun.build({
entrypoints: [`${dir}/esm.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
// At least one input should have format
let hasFormat = false;
for (const key of Object.keys(inputs)) {
if (inputs[key].format) {
hasFormat = true;
expect(["esm", "cjs"]).toContain(inputs[key].format);
break;
}
}
expect(hasFormat).toBe(true);
});
test("metafile detects cjs format for CommonJS files", async () => {
using dir = tempDir("metafile-cjs-format-test", {
"entry.js": `const foo = require("./foo.js"); console.log(foo);`,
"foo.js": `module.exports = { value: 42 };`,
});
const result = await Bun.build({
entrypoints: [`${dir}/entry.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
// Find the foo.js file which uses CommonJS exports
let fooInput: MetafileInput | null = null;
for (const [path, input] of Object.entries(inputs)) {
if (path.includes("foo.js")) {
fooInput = input;
break;
}
}
expect(fooInput).not.toBeNull();
expect(fooInput!.format).toBe("cjs");
});
test("metafile marks external imports", async () => {
using dir = tempDir("metafile-external-test", {
"index.js": `import fs from "fs"; console.log(fs);`,
});
const result = await Bun.build({
entrypoints: [`${dir}/index.js`],
metafile: true,
external: ["fs"],
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
let foundExternal = false;
for (const key of Object.keys(inputs)) {
const input = inputs[key];
for (const imp of input.imports) {
if (imp.path === "fs" && imp.external === true) {
foundExternal = true;
break;
}
}
}
expect(foundExternal).toBe(true);
});
test("metafile with code splitting", async () => {
using dir = tempDir("metafile-splitting-test", {
"a.js": `import { shared } from "./shared.js"; console.log("a", shared);`,
"b.js": `import { shared } from "./shared.js"; console.log("b", shared);`,
"shared.js": `export const shared = "shared value";`,
});
const result = await Bun.build({
entrypoints: [`${dir}/a.js`, `${dir}/b.js`],
metafile: true,
splitting: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
const outputKeys = Object.keys(outputs);
// With splitting, we should have more outputs (shared chunk)
expect(outputKeys.length).toBeGreaterThanOrEqual(2);
});
test("metafile includes with clause for JSON imports", async () => {
using dir = tempDir("metafile-with-json-test", {
"entry.js": `import data from "./data.json"; console.log(data);`,
"data.json": `{"key": "value"}`,
});
const result = await Bun.build({
entrypoints: [`${dir}/entry.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
// Find the entry file in inputs
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
let jsonImport: MetafileImport | null = null;
for (const [path, input] of Object.entries(inputs)) {
if (path.includes("entry.js")) {
for (const imp of input.imports) {
if (imp.path.includes("data.json")) {
jsonImport = imp;
break;
}
}
break;
}
}
expect(jsonImport).not.toBeNull();
expect(jsonImport!.with).toBeDefined();
expect(jsonImport!.with!.type).toBe("json");
});
test("metafile tracks require-call imports", async () => {
using dir = tempDir("metafile-require-test", {
"entry.js": `const foo = require("./foo.js"); console.log(foo);`,
"foo.js": `module.exports = { value: 42 };`,
});
const result = await Bun.build({
entrypoints: [`${dir}/entry.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
// Find the entry file in inputs
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
let requireImport: MetafileImport | null = null;
for (const [path, input] of Object.entries(inputs)) {
if (path.includes("entry.js")) {
for (const imp of input.imports) {
if (imp.path.includes("foo.js")) {
requireImport = imp;
break;
}
}
break;
}
}
expect(requireImport).not.toBeNull();
expect(requireImport!.kind).toBe("require-call");
});
test("metafile tracks dynamic-import imports", async () => {
using dir = tempDir("metafile-dynamic-import-test", {
"entry.js": `import("./dynamic.js").then(m => console.log(m));`,
"dynamic.js": `export const value = 123;`,
});
const result = await Bun.build({
entrypoints: [`${dir}/entry.js`],
metafile: true,
splitting: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
// Find the entry file in inputs
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
let dynamicImport: MetafileImport | null = null;
for (const [path, input] of Object.entries(inputs)) {
if (path.includes("entry.js")) {
for (const imp of input.imports) {
if (imp.kind === "dynamic-import" && imp.original === "./dynamic.js") {
dynamicImport = imp;
break;
}
}
break;
}
}
expect(dynamicImport).not.toBeNull();
expect(dynamicImport!.kind).toBe("dynamic-import");
expect(dynamicImport!.original).toBe("./dynamic.js");
// The path should be the final chunk path (e.g., "./chunk-xxx.js"), not the internal unique_key
expect(dynamicImport!.path).toMatch(/^\.\/chunk-[a-z0-9]+\.js$/);
// Verify the path corresponds to an actual output chunk
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
const outputPaths = Object.keys(outputs);
expect(outputPaths).toContain(dynamicImport!.path);
});
test("metafile includes cssBundle for CSS outputs", async () => {
using dir = tempDir("metafile-css-bundle-test", {
"entry.js": `import "./styles.css"; console.log("styled");`,
"styles.css": `.foo { color: red; }`,
});
const result = await Bun.build({
entrypoints: [`${dir}/entry.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
// Find the JS output that should reference the CSS bundle
let foundCssBundle = false;
for (const [outputPath, output] of Object.entries(outputs)) {
if (outputPath.endsWith(".js") && output.cssBundle) {
foundCssBundle = true;
expect(typeof output.cssBundle).toBe("string");
expect(output.cssBundle.endsWith(".css")).toBe(true);
break;
}
}
expect(foundCssBundle).toBe(true);
});
test("metafile handles circular imports", async () => {
using dir = tempDir("metafile-circular-test", {
"a.js": `import { b } from "./b.js"; export const a = 1; console.log(b);`,
"b.js": `import { a } from "./a.js"; export const b = 2; console.log(a);`,
});
const result = await Bun.build({
entrypoints: [`${dir}/a.js`],
metafile: true,
});
expect(result.success).toBe(true);
expect(result.metafile).toBeDefined();
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
const inputKeys = Object.keys(inputs);
// Should have both files
expect(inputKeys.length).toBe(2);
// Both files should have imports to each other
let aImportsB = false;
let bImportsA = false;
for (const [path, input] of Object.entries(inputs)) {
if (path.includes("a.js")) {
aImportsB = input.imports.some(imp => imp.path.includes("b.js"));
}
if (path.includes("b.js")) {
bImportsA = input.imports.some(imp => imp.path.includes("a.js"));
}
}
expect(aImportsB).toBe(true);
expect(bImportsA).toBe(true);
});
});

View File

@@ -34,38 +34,3 @@ error: "workspaces.packages" expects an array of strings, e.g.
`;
exports[`should handle modified git resolutions in bun.lock 1`] = `"{"lockfileVersion":0,"configVersion":1,"workspaces":{"":{"dependencies":{"jquery":"3.7.1"}}},"packages":{"jquery":["jquery@git+ssh://git@github.com/dylan-conway/install-test-8.git#3a1288830817d13da39e9231302261896f8721ea",{},"3a1288830817d13da39e9231302261896f8721ea"]}}"`;
exports[`bun-install should report error on invalid format for package.json 1`] = `
"1 | foo
^
error: Unexpected foo
at [dir]/package.json:1:1
ParserError: failed to parse '[dir]/package.json'
"
`;
exports[`bun-install should report error on invalid format for dependencies 1`] = `
"1 | {"name":"foo","version":"0.0.1","dependencies":[]}
^
error: dependencies expects a map of specifiers, e.g.
"dependencies": {
<green>"bun"<r>: <green>"latest"<r>
}
at [dir]/package.json:1:33
"
`;
exports[`bun-install should report error on invalid format for workspaces 1`] = `
"1 | {"name":"foo","version":"0.0.1","workspaces":{"packages":{"bar":true}}}
^
error: "workspaces.packages" expects an array of strings, e.g.
"workspaces": {
"packages": [
"path/to/package"
]
}
at [dir]/package.json:1:58
"
`;
exports[`bun-install should handle modified git resolutions in bun.lock 1`] = `"{"lockfileVersion":0,"configVersion":1,"workspaces":{"":{"dependencies":{"jquery":"3.7.1"}}},"packages":{"jquery":["jquery@git+ssh://git@github.com/dylan-conway/install-test-8.git#3a1288830817d13da39e9231302261896f8721ea",{},"3a1288830817d13da39e9231302261896f8721ea"]}}"`;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,193 +1,178 @@
import { spawn } from "bun";
import { spawn, spawnSync } from "bun";
import { upgrade_test_helpers } from "bun:internal-for-testing";
import { beforeAll, describe, expect, it, setDefaultTimeout } from "bun:test";
import { beforeAll, beforeEach, expect, it, setDefaultTimeout } from "bun:test";
import { bunExe, bunEnv as env, tls, tmpdirSync } from "harness";
import { copyFile } from "node:fs/promises";
import { copyFileSync } from "node:fs";
import { basename, join } from "path";
const { openTempDirWithoutSharingDelete, closeTempDirHandle } = upgrade_test_helpers;
let cwd: string;
let execPath: string;
beforeAll(() => {
setDefaultTimeout(1000 * 60 * 5);
});
describe.concurrent(() => {
it("two invalid arguments, should display error message and suggest command", async () => {
const cwd = tmpdirSync();
const execPath = join(cwd, basename(bunExe()));
await copyFile(bunExe(), execPath);
const { stderr } = spawn({
cmd: [execPath, "upgrade", "bun-types", "--dev"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
expect(err.split(/\r?\n/)).toContain("error: This command updates Bun itself, and does not take package names.");
expect(err.split(/\r?\n/)).toContain("note: Use `bun update bun-types --dev` instead.");
});
it("two invalid arguments flipped, should display error message and suggest command", async () => {
const cwd = tmpdirSync();
const execPath = join(cwd, basename(bunExe()));
await copyFile(bunExe(), execPath);
const { stderr } = spawn({
cmd: [execPath, "upgrade", "--dev", "bun-types"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
expect(err.split(/\r?\n/)).toContain("error: This command updates Bun itself, and does not take package names.");
expect(err.split(/\r?\n/)).toContain("note: Use `bun update --dev bun-types` instead.");
});
it("one invalid argument, should display error message and suggest command", async () => {
const cwd = tmpdirSync();
const execPath = join(cwd, basename(bunExe()));
await copyFile(bunExe(), execPath);
const { stderr } = spawn({
cmd: [execPath, "upgrade", "bun-types"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
expect(err.split(/\r?\n/)).toContain("error: This command updates Bun itself, and does not take package names.");
expect(err.split(/\r?\n/)).toContain("note: Use `bun update bun-types` instead.");
});
it("one valid argument, should succeed", async () => {
const cwd = tmpdirSync();
const execPath = join(cwd, basename(bunExe()));
await copyFile(bunExe(), execPath);
const { stderr } = spawn({
cmd: [execPath, "upgrade", "--help"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
// Should not contain error message
expect(err.split(/\r?\n/)).not.toContain(
"error: This command updates bun itself, and does not take package names.",
);
expect(err.split(/\r?\n/)).not.toContain("note: Use `bun update --help` instead.");
});
it("two valid argument, should succeed", async () => {
const cwd = tmpdirSync();
const execPath = join(cwd, basename(bunExe()));
await copyFile(bunExe(), execPath);
const { stderr } = spawn({
cmd: [execPath, "upgrade", "--stable", "--profile"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
// Should not contain error message
expect(err.split(/\r?\n/)).not.toContain(
"error: This command updates Bun itself, and does not take package names.",
);
expect(err.split(/\r?\n/)).not.toContain("note: Use `bun update --stable --profile` instead.");
});
it("zero arguments, should succeed", async () => {
const tagName = bunExe().includes("-debug") ? "canary" : `bun-v${Bun.version}`;
using server = Bun.serve({
tls: tls,
port: 0,
async fetch() {
return new Response(
JSON.stringify({
"tag_name": tagName,
"assets": [
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-windows-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-windows-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-windows-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-windows-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-linux-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-linux-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-linux-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-linux-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-aarch64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-aarch64.zip`,
},
],
}),
);
},
});
// On windows, open the temporary directory without FILE_SHARE_DELETE before spawning
// the upgrade process. This is to test for EBUSY errors
openTempDirWithoutSharingDelete();
const cwd = tmpdirSync();
const execPath = join(cwd, basename(bunExe()));
await copyFile(bunExe(), execPath);
const { stderr } = Bun.spawn({
cmd: [execPath, "upgrade"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env: {
...env,
NODE_TLS_REJECT_UNAUTHORIZED: "0",
GITHUB_API_DOMAIN: `${server.hostname}:${server.port}`,
},
});
closeTempDirHandle();
// Should not contain error message
expect(await stderr.text()).not.toContain("error:");
});
beforeEach(async () => {
cwd = tmpdirSync();
execPath = join(cwd, basename(bunExe()));
copyFileSync(bunExe(), execPath);
});
it("two invalid arguments, should display error message and suggest command", async () => {
const { stderr } = spawn({
cmd: [execPath, "upgrade", "bun-types", "--dev"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
expect(err.split(/\r?\n/)).toContain("error: This command updates Bun itself, and does not take package names.");
expect(err.split(/\r?\n/)).toContain("note: Use `bun update bun-types --dev` instead.");
});
it("two invalid arguments flipped, should display error message and suggest command", async () => {
const { stderr } = spawn({
cmd: [execPath, "upgrade", "--dev", "bun-types"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
expect(err.split(/\r?\n/)).toContain("error: This command updates Bun itself, and does not take package names.");
expect(err.split(/\r?\n/)).toContain("note: Use `bun update --dev bun-types` instead.");
});
it("one invalid argument, should display error message and suggest command", async () => {
const { stderr } = spawn({
cmd: [execPath, "upgrade", "bun-types"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
expect(err.split(/\r?\n/)).toContain("error: This command updates Bun itself, and does not take package names.");
expect(err.split(/\r?\n/)).toContain("note: Use `bun update bun-types` instead.");
});
it("one valid argument, should succeed", async () => {
const { stderr } = spawn({
cmd: [execPath, "upgrade", "--help"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
// Should not contain error message
expect(err.split(/\r?\n/)).not.toContain("error: This command updates bun itself, and does not take package names.");
expect(err.split(/\r?\n/)).not.toContain("note: Use `bun update --help` instead.");
});
it("two valid argument, should succeed", async () => {
const { stderr } = spawn({
cmd: [execPath, "upgrade", "--stable", "--profile"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env,
});
const err = await stderr.text();
// Should not contain error message
expect(err.split(/\r?\n/)).not.toContain("error: This command updates Bun itself, and does not take package names.");
expect(err.split(/\r?\n/)).not.toContain("note: Use `bun update --stable --profile` instead.");
});
it("zero arguments, should succeed", async () => {
const tagName = bunExe().includes("-debug") ? "canary" : `bun-v${Bun.version}`;
using server = Bun.serve({
tls: tls,
port: 0,
async fetch() {
return new Response(
JSON.stringify({
"tag_name": tagName,
"assets": [
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-windows-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-windows-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-windows-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-windows-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-linux-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-linux-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-linux-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-linux-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-x64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-x64.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-x64-baseline.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-x64-baseline.zip`,
},
{
"url": "foo",
"content_type": "application/zip",
"name": "bun-darwin-aarch64.zip",
"browser_download_url": `https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${tagName}/bun-darwin-aarch64.zip`,
},
],
}),
);
},
});
// On windows, open the temporary directory without FILE_SHARE_DELETE before spawning
// the upgrade process. This is to test for EBUSY errors
openTempDirWithoutSharingDelete();
const { stderr } = spawnSync({
cmd: [execPath, "upgrade"],
cwd,
stdout: null,
stdin: "pipe",
stderr: "pipe",
env: {
...env,
NODE_TLS_REJECT_UNAUTHORIZED: "0",
GITHUB_API_DOMAIN: `${server.hostname}:${server.port}`,
},
});
closeTempDirHandle();
// Should not contain error message
expect(stderr.toString()).not.toContain("error:");
});

View File

@@ -2,30 +2,6 @@
* This file can be directly run
*
* PACKAGE_DIR_TO_USE=(realpath .) bun test/cli/install/dummy.registry.ts
*
* ## Concurrent Test Support
*
* This module supports running tests concurrently by using URL prefixes to isolate
* each test's registry requests. Each test gets a unique context with:
* - Its own handler for registry requests
* - Its own package_dir (temp directory)
* - Its own request counter
* - A unique registry URL with a prefix (e.g., http://localhost:PORT/test-123/)
*
* ### Usage for concurrent tests:
* ```typescript
* it("my test", async () => {
* const ctx = await createTestContext({ linker: "hoisted" });
* try {
* const urls: string[] = [];
* setContextHandler(ctx, dummyRegistry(urls, ctx));
* // Use ctx.package_dir, ctx.registry_url, ctx.requested
* await writeFile(join(ctx.package_dir, "package.json"), ...);
* } finally {
* destroyTestContext(ctx);
* }
* });
* ```
*/
import { file, Server } from "bun";
import { tmpdirSync } from "harness";
@@ -43,202 +19,24 @@ type Pkg = {
tarball: string;
};
};
let handler: Handler;
let server: Server;
export let package_dir: string;
export let requested: number;
export let root_url: string;
export let check_npm_auth_type = { check: true };
// ============================================================================
// Concurrent Test Context Support
// ============================================================================
export async function write(path: string, content: string | object) {
if (!package_dir) throw new Error("writeToPackageDir() must be called in a test");
/** Global counter for generating unique test IDs */
let testIdCounter = 0;
/**
* Context for a single test, containing all per-test state.
* Use this for concurrent test execution.
*/
export interface TestContext {
/** Unique identifier for this test context (e.g., "test-1") */
id: string;
/** The package directory for this test (a unique temp directory) */
package_dir: string;
/** Number of requests made to this test's handler */
requested: number;
/** The handler for this test's registry requests */
handler: Handler;
/** The registry URL for this test (includes prefix, e.g., http://localhost:PORT/test-1/) */
registry_url: string;
await Bun.write(join(package_dir, path), typeof content === "string" ? content : JSON.stringify(content));
}
/** Map of test ID prefix -> test context */
const testContexts = new Map<string, TestContext>();
/** Default handler for unmatched requests */
function defaultHandler(): Response {
return new Response("Tea Break~", { status: 418 });
export function read(path: string) {
return Bun.file(join(package_dir, path));
}
/**
* Extract the test ID prefix from a URL path.
* URL format: /test-123/package-name or /test-123/@scope%2fpackage-name
*/
function extractTestPrefix(url: string): { prefix: string; remainingPath: string } | null {
const urlObj = new URL(url);
const path = urlObj.pathname;
// Match /test-N/ followed by anything
const match = path.match(/^\/(test-\d+)(\/.*)?$/);
if (match) {
return {
prefix: match[1],
remainingPath: match[2] || "/",
};
}
return null;
}
/**
* Creates a new isolated test context for concurrent test execution.
* Each context has its own handler, package_dir, and request counter.
*
* The bunfig.toml is automatically created with the prefixed registry URL.
*
* @param opts - Optional configuration for the test context
* @returns A new TestContext that should be used for all test operations
*/
export async function createTestContext(opts?: { linker: "hoisted" | "isolated" }): Promise<TestContext> {
const id = `test-${++testIdCounter}`;
const pkg_dir = tmpdirSync();
const ctx: TestContext = {
id,
package_dir: pkg_dir,
requested: 0,
handler: defaultHandler,
registry_url: `${root_url}/${id}/`,
};
testContexts.set(id, ctx);
// Create bunfig.toml with the prefixed registry URL
await writeFile(
join(pkg_dir, "bunfig.toml"),
`
[install]
cache = false
registry = "${ctx.registry_url}"
saveTextLockfile = false
${opts ? `linker = "${opts.linker}"` : ""}
`,
);
return ctx;
}
/**
* Cleans up a test context after the test is done.
* This removes the context from the registry so requests won't be routed to it.
*/
export function destroyTestContext(ctx: TestContext): void {
testContexts.delete(ctx.id);
}
/**
* Sets the handler for a specific test context.
* The handler will receive all requests that have this context's URL prefix.
*/
export function setContextHandler(ctx: TestContext, newHandler: Handler): void {
ctx.handler = newHandler;
}
/**
* Creates a dummy registry handler for a specific test context.
* This is the concurrent-safe version that uses the context's registry_url for tarballs.
*
* @param ctx - The test context (provides registry_url for tarball URLs)
* @param urls - Array to collect requested URLs (passed by reference)
* @param info - Package version info (default: { "0.0.2": {} })
* @param numberOfTimesTo500PerURL - Number of times to return 500 before success (for retry testing)
*/
export function dummyRegistryForContext(
ctx: TestContext,
urls: string[],
info: any = { "0.0.2": {} },
numberOfTimesTo500PerURL = 0,
): Handler {
let retryCountsByURL = new Map<string, number>();
const _handler: Handler = async request => {
urls.push(request.url);
const url = request.url.replaceAll("%2f", "/");
let status = 200;
if (numberOfTimesTo500PerURL > 0) {
let currentCount = retryCountsByURL.get(request.url);
if (currentCount === undefined) {
retryCountsByURL.set(request.url, numberOfTimesTo500PerURL);
status = 500;
} else {
retryCountsByURL.set(request.url, currentCount - 1);
status = currentCount > 0 ? 500 : 200;
}
}
expect(request.method).toBe("GET");
if (url.endsWith(".tgz")) {
return new Response(file(join(import.meta.dir, basename(url).toLowerCase())), { status });
}
expect(request.headers.get("accept")).toBe(
"application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*",
);
if (check_npm_auth_type.check) {
expect(request.headers.get("npm-auth-type")).toBe(null);
}
expect(await request.text()).toBe("");
// For context-based requests, strip the test prefix
const urlObj = new URL(url);
const pathAfterPrefix = urlObj.pathname.replace(`/${ctx.id}/`, "/");
const name = pathAfterPrefix.slice(1); // Remove leading slash
const versions: Record<string, Pkg> = {};
let version;
for (version in info) {
if (!/^[0-9]/.test(version)) continue;
versions[version] = {
name,
version,
dist: {
tarball: `${ctx.registry_url}${name}-${info[version].as ?? version}.tgz`,
},
...info[version],
};
}
return new Response(
JSON.stringify({
name,
versions,
"dist-tags": {
latest: info.latest ?? version,
},
}),
{ status },
);
};
return _handler;
}
/**
* Creates a dummy registry handler (legacy version for backward compatibility).
*
* @param urls - Array to collect requested URLs (passed by reference)
* @param info - Package version info (default: { "0.0.2": {} })
* @param numberOfTimesTo500PerURL - Number of times to return 500 before success (for retry testing)
*/
export function dummyRegistry(urls: string[], info: any = { "0.0.2": {} }, numberOfTimesTo500PerURL = 0): Handler {
export function dummyRegistry(urls: string[], info: any = { "0.0.2": {} }, numberOfTimesTo500PerURL = 0) {
let retryCountsByURL = new Map<string, number>();
const _handler: Handler = async request => {
urls.push(request.url);
@@ -298,58 +96,19 @@ export function dummyRegistry(urls: string[], info: any = { "0.0.2": {} }, numbe
return _handler;
}
// ============================================================================
// Legacy API (for backward compatibility with non-concurrent tests)
// ============================================================================
/** @deprecated Use createTestContext() for concurrent tests */
export let package_dir: string;
/** @deprecated Use ctx.requested for concurrent tests */
export let requested: number;
/** Legacy handler for non-prefixed requests */
let legacyHandler: Handler = defaultHandler;
export async function write(path: string, content: string | object) {
if (!package_dir) throw new Error("writeToPackageDir() must be called in a test");
await Bun.write(join(package_dir, path), typeof content === "string" ? content : JSON.stringify(content));
}
export function read(path: string) {
return Bun.file(join(package_dir, path));
}
/** @deprecated Use setContextHandler() for concurrent tests */
export function setHandler(newHandler: Handler) {
legacyHandler = newHandler;
handler = newHandler;
}
function resetHandler() {
setHandler(defaultHandler);
setHandler(() => new Response("Tea Break~", { status: 418 }));
}
export function dummyBeforeAll() {
server = Bun.serve({
async fetch(request) {
const url = request.url;
// Check if this is a prefixed request (for concurrent tests)
const prefixInfo = extractTestPrefix(url);
if (prefixInfo) {
const ctx = testContexts.get(prefixInfo.prefix);
if (ctx) {
ctx.requested++;
return await ctx.handler(request);
}
// Unknown test prefix - return 404
return new Response(`Unknown test prefix: ${prefixInfo.prefix}`, { status: 404 });
}
// Legacy non-prefixed request
requested++;
return await legacyHandler(request);
return await handler(request);
},
port: 0,
});
@@ -358,7 +117,6 @@ export function dummyBeforeAll() {
export function dummyAfterAll() {
server.stop();
testContexts.clear();
}
export function getPort() {
@@ -368,8 +126,6 @@ export function getPort() {
let packageDirGetter: () => string = () => {
return tmpdirSync();
};
/** @deprecated Use createTestContext() for concurrent tests */
export async function dummyBeforeEach(opts?: { linker: "hoisted" | "isolated" }) {
resetHandler();
requested = 0;
@@ -386,7 +142,6 @@ ${opts ? `linker = "${opts.linker}"` : ""}
);
}
/** @deprecated Use destroyTestContext() for concurrent tests */
export async function dummyAfterEach() {
resetHandler();
}

View File

@@ -2,100 +2,205 @@ import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, isBroken, isCI, isIntelMacOS, isMacOS, isWindows, tempDirWithFiles } from "harness";
import { join } from "path";
describe.concurrent("require.cache", () => {
test("require.cache is not an empty object literal when inspected", () => {
const inspected = Bun.inspect(require.cache);
expect(inspected).not.toBe("{}");
expect(inspected).toContain("Module {");
test("require.cache is not an empty object literal when inspected", () => {
const inspected = Bun.inspect(require.cache);
expect(inspected).not.toBe("{}");
expect(inspected).toContain("Module {");
});
// This also tests __dirname and __filename
test("require.cache", () => {
const { stdout, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "run", join(import.meta.dir, "require-cache-fixture.cjs")],
env: bunEnv,
stderr: "inherit",
});
// This also tests __dirname and __filename
test("require.cache", async () => {
await using proc = Bun.spawn({
cmd: [bunExe(), "run", join(import.meta.dir, "require-cache-fixture.cjs")],
expect(stdout.toString().trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
});
// https://github.com/oven-sh/bun/issues/5188
test("require.cache does not include unevaluated modules", () => {
const { stdout, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "run", join(import.meta.dir, "require-cache-bug-5188.js")],
env: bunEnv,
stderr: "inherit",
});
expect(stdout.toString().trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
});
describe.skipIf(isBroken && isIntelMacOS)("files transpiled and loaded don't leak the output source code", () => {
test("via require() with a lot of long export names", () => {
let text = "";
for (let i = 0; i < 10000; i++) {
text += `exports.superDuperExtraCrazyLongNameWowSuchNameLongYouveNeverSeenANameThisLongForACommonJSModuleExport${i} = 1;\n`;
}
console.log("Text length:", text.length);
const dir = tempDirWithFiles("require-cache-bug-leak-1", {
"index.js": text,
"require-cache-bug-leak-fixture.js": `
const path = require.resolve("./index.js");
const gc = global.gc || globalThis?.Bun?.gc || (() => {});
const noChildren = module.children = { indexOf() { return 0; } }; // disable children tracking
function bust() {
const mod = require.cache[path];
if (mod) {
mod.parent = null;
mod.children = noChildren;
delete require.cache[path];
}
}
for (let i = 0; i < 50; i++) {
require(path);
bust();
}
gc(true);
const baseline = process.memoryUsage.rss();
for (let i = 0; i < 500; i++) {
require(path);
bust(path);
console.log("RSS", (process.memoryUsage.rss() / 1024 / 1024) | 0, "MB");
}
gc(true);
const rss = process.memoryUsage.rss();
const diff = rss - baseline;
console.log("RSS diff", (diff / 1024 / 1024) | 0, "MB");
console.log("RSS", (diff / 1024 / 1024) | 0, "MB");
if (diff > 100 * 1024 * 1024) {
// Bun v1.1.21 reported 844 MB here on macOS arm64.
throw new Error("Memory leak detected");
}
exports.abc = 123;
`,
});
console.log({ dir });
const { exitCode, resourceUsage } = Bun.spawnSync({
cmd: [bunExe(), "run", "--smol", join(dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stderr: "inherit",
stdio: ["inherit", "inherit", "inherit"],
});
const [stdout, exitCode] = await Promise.all([proc.stdout.text(), proc.exited]);
expect(stdout.trim()).toEndWith("--pass--");
console.log(resourceUsage);
expect(exitCode).toBe(0);
});
}, 60000);
// https://github.com/oven-sh/bun/issues/5188
test("require.cache does not include unevaluated modules", async () => {
await using proc = Bun.spawn({
cmd: [bunExe(), "run", join(import.meta.dir, "require-cache-bug-5188.js")],
test("via await import() with a lot of function calls", () => {
let text = "function i() { return 1; }\n";
for (let i = 0; i < 20000; i++) {
text += `i();\n`;
}
text += "exports.forceCommonJS = true;\n";
console.log("Text length:", text.length);
const dir = tempDirWithFiles("require-cache-bug-leak-3", {
"index.js": text,
"require-cache-bug-leak-fixture.js": `
const path = require.resolve("./index.js");
const gc = global.gc || globalThis?.Bun?.gc || (() => {});
function bust() {
delete require.cache[path];
}
for (let i = 0; i < 100; i++) {
await import(path);
bust();
}
gc(true);
const baseline = process.memoryUsage.rss();
for (let i = 0; i < 400; i++) {
await import(path);
bust(path);
console.log("RSS", (process.memoryUsage.rss() / 1024 / 1024) | 0, "MB");
}
gc(true);
const rss = process.memoryUsage.rss();
const diff = rss - baseline;
console.log("RSS diff", (diff / 1024 / 1024) | 0, "MB");
console.log("RSS", (diff / 1024 / 1024) | 0, "MB");
if (diff > 64 * 1024 * 1024) {
// Bun v1.1.22 reported 1 MB here on macoS arm64.
// Bun v1.1.21 reported 257 MB here on macoS arm64.
throw new Error("Memory leak detected");
}
export default 123;
`,
});
const { exitCode, resourceUsage } = Bun.spawnSync({
cmd: [bunExe(), "run", "--smol", join(dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stderr: "inherit",
stdio: ["inherit", "inherit", "inherit"],
});
const [stdout, exitCode] = await Promise.all([proc.stdout.text(), proc.exited]);
expect(stdout.trim()).toEndWith("--pass--");
console.log(resourceUsage);
expect(exitCode).toBe(0);
});
}, 60000); // takes 4s on an M1 in release build
describe.skipIf(isBroken && isIntelMacOS)("files transpiled and loaded don't leak the output source code", () => {
test("via require() with a lot of long export names", async () => {
let text = "";
for (let i = 0; i < 10000; i++) {
text += `exports.superDuperExtraCrazyLongNameWowSuchNameLongYouveNeverSeenANameThisLongForACommonJSModuleExport${i} = 1;\n`;
}
test("via import() with a lot of long export names", () => {
let text = "";
for (let i = 0; i < 10000; i++) {
text += `export const superDuperExtraCrazyLongNameWowSuchNameLongYouveNeverSeenANameThisLongForACommonJSModuleExport${i} = 1;\n`;
}
console.log("Text length:", text.length);
const dir = tempDirWithFiles("require-cache-bug-leak-4", {
"index.js": text,
"require-cache-bug-leak-fixture.js": `
const path = require.resolve("./index.js");
const gc = global.gc || globalThis?.Bun?.gc || (() => {});
function bust() {
delete require.cache[path];
}
const dir = tempDirWithFiles("require-cache-bug-leak-1", {
"index.js": text,
"require-cache-bug-leak-fixture.js": `
const path = require.resolve("./index.js");
const gc = global.gc || globalThis?.Bun?.gc || (() => {});
const noChildren = module.children = { indexOf() { return 0; } }; // disable children tracking
function bust() {
const mod = require.cache[path];
if (mod) {
mod.parent = null;
mod.children = noChildren;
delete require.cache[path];
}
}
for (let i = 0; i < 50; i++) {
await import(path);
bust();
}
gc(true);
const baseline = process.memoryUsage.rss();
for (let i = 0; i < 250; i++) {
await import(path);
bust(path);
console.log("RSS", (process.memoryUsage.rss() / 1024 / 1024) | 0, "MB");
}
gc(true);
const rss = process.memoryUsage.rss();
const diff = rss - baseline;
console.log("RSS diff", (diff / 1024 / 1024) | 0, "MB");
console.log("RSS", (diff / 1024 / 1024) | 0, "MB");
if (diff > 64 * 1024 * 1024) {
// Bun v1.1.21 reported 423 MB here on macoS arm64.
// Bun v1.1.22 reported 4 MB here on macoS arm64.
throw new Error("Memory leak detected");
}
for (let i = 0; i < 50; i++) {
require(path);
bust();
}
gc(true);
const baseline = process.memoryUsage.rss();
for (let i = 0; i < 500; i++) {
require(path);
bust(path);
}
gc(true);
const rss = process.memoryUsage.rss();
const diff = rss - baseline;
console.log("RSS diff", (diff / 1024 / 1024) | 0, "MB");
console.log("RSS", (diff / 1024 / 1024) | 0, "MB");
if (diff > 100 * 1024 * 1024) {
// Bun v1.1.21 reported 844 MB here on macOS arm64.
throw new Error("Memory leak detected");
}
export default 124;
`,
});
console.log({ dir });
const { exitCode, resourceUsage } = Bun.spawnSync({
cmd: [bunExe(), "run", "--smol", join(dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
exports.abc = 123;
`,
});
console.log({ dir });
await using proc = Bun.spawn({
cmd: [bunExe(), "run", "--smol", join(dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
console.log(resourceUsage);
expect(exitCode).toBe(0);
}, 60000);
const exitCode = await proc.exited;
expect(exitCode).toBe(0);
}, 60000);
test("via await import() with a lot of function calls", async () => {
test.todoIf(
// Flaky specifically on macOS CI.
isBroken && isMacOS && isCI,
)(
"via require() with a lot of function calls",
() => {
let text = "function i() { return 1; }\n";
for (let i = 0; i < 20000; i++) {
text += `i();\n`;
@@ -104,223 +209,108 @@ describe.concurrent("require.cache", () => {
console.log("Text length:", text.length);
const dir = tempDirWithFiles("require-cache-bug-leak-3", {
const dir = tempDirWithFiles("require-cache-bug-leak-2", {
"index.js": text,
"require-cache-bug-leak-fixture.js": `
const path = require.resolve("./index.js");
const gc = global.gc || globalThis?.Bun?.gc || (() => {});
function bust() {
const path = require.resolve("./index.js");
const gc = global.gc || globalThis?.Bun?.gc || (() => {});
function bust() {
const mod = require.cache[path];
if (mod) {
mod.parent = null;
mod.children = [];
delete require.cache[path];
}
for (let i = 0; i < 100; i++) {
await import(path);
bust();
}
gc(true);
const baseline = process.memoryUsage.rss();
for (let i = 0; i < 400; i++) {
await import(path);
bust(path);
}
gc(true);
const rss = process.memoryUsage.rss();
const diff = rss - baseline;
console.log("RSS diff", (diff / 1024 / 1024) | 0, "MB");
console.log("RSS", (diff / 1024 / 1024) | 0, "MB");
if (diff > 64 * 1024 * 1024) {
// Bun v1.1.22 reported 1 MB here on macoS arm64.
// Bun v1.1.21 reported 257 MB here on macoS arm64.
throw new Error("Memory leak detected");
}
export default 123;
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "run", "--smol", join(dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
const exitCode = await proc.exited;
expect(exitCode).toBe(0);
}, 60000); // takes 4s on an M1 in release build
test("via import() with a lot of long export names", async () => {
let text = "";
for (let i = 0; i < 10000; i++) {
text += `export const superDuperExtraCrazyLongNameWowSuchNameLongYouveNeverSeenANameThisLongForACommonJSModuleExport${i} = 1;\n`;
}
const dir = tempDirWithFiles("require-cache-bug-leak-4", {
"index.js": text,
"require-cache-bug-leak-fixture.js": `
const path = require.resolve("./index.js");
const gc = global.gc || globalThis?.Bun?.gc || (() => {});
function bust() {
delete require.cache[path];
}
for (let i = 0; i < 50; i++) {
await import(path);
bust();
}
gc(true);
const baseline = process.memoryUsage.rss();
for (let i = 0; i < 250; i++) {
await import(path);
bust(path);
}
gc(true);
const rss = process.memoryUsage.rss();
const diff = rss - baseline;
console.log("RSS diff", (diff / 1024 / 1024) | 0, "MB");
console.log("RSS", (diff / 1024 / 1024) | 0, "MB");
if (diff > 64 * 1024 * 1024) {
// Bun v1.1.21 reported 423 MB here on macoS arm64.
// Bun v1.1.22 reported 4 MB here on macoS arm64.
throw new Error("Memory leak detected");
}
export default 124;
`,
});
console.log({ dir });
await using proc = Bun.spawn({
cmd: [bunExe(), "run", "--smol", join(dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
const exitCode = await proc.exited;
expect(exitCode).toBe(0);
}, 60000);
test.todoIf(
// Flaky specifically on macOS CI.
isBroken && isMacOS && isCI,
)(
"via require() with a lot of function calls",
async () => {
let text = "function i() { return 1; }\n";
for (let i = 0; i < 20000; i++) {
text += `i();\n`;
}
text += "exports.forceCommonJS = true;\n";
console.log("Text length:", text.length);
for (let i = 0; i < 100; i++) {
require(path);
bust();
}
gc(true);
const baseline = process.memoryUsage.rss();
for (let i = 0; i < 400; i++) {
require(path);
bust(path);
console.log("RSS", (process.memoryUsage.rss() / 1024 / 1024) | 0, "MB");
}
gc(true);
const rss = process.memoryUsage.rss();
const diff = rss - baseline;
console.log("RSS diff", (diff / 1024 / 1024) | 0, "MB");
console.log("RSS", (diff / 1024 / 1024) | 0, "MB");
if (diff > 64 * 1024 * 1024) {
// Bun v1.1.22 reported 4 MB here on macoS arm64.
// Bun v1.1.21 reported 248 MB here on macoS arm64.
throw new Error("Memory leak detected");
}
const dir = tempDirWithFiles("require-cache-bug-leak-2", {
"index.js": text,
"require-cache-bug-leak-fixture.js": `
const path = require.resolve("./index.js");
const gc = global.gc || globalThis?.Bun?.gc || (() => {});
function bust() {
const mod = require.cache[path];
if (mod) {
mod.parent = null;
mod.children = [];
delete require.cache[path];
}
}
for (let i = 0; i < 100; i++) {
require(path);
bust();
}
gc(true);
const baseline = process.memoryUsage.rss();
for (let i = 0; i < 400; i++) {
require(path);
bust(path);
}
gc(true);
const rss = process.memoryUsage.rss();
const diff = rss - baseline;
console.log("RSS diff", (diff / 1024 / 1024) | 0, "MB");
console.log("RSS", (diff / 1024 / 1024) | 0, "MB");
if (diff > 64 * 1024 * 1024) {
// Bun v1.1.22 reported 4 MB here on macoS arm64.
// Bun v1.1.21 reported 248 MB here on macoS arm64.
throw new Error("Memory leak detected");
}
exports.abc = 123;
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "run", "--smol", join(dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
const exitCode = await proc.exited;
expect(exitCode).toBe(0);
},
60000,
); // takes 4s on an M1 in release build
});
describe("files transpiled and loaded don't leak the AST", () => {
test("via require()", async () => {
await using proc = Bun.spawn({
cmd: [bunExe(), "run", join(import.meta.dir, "require-cache-bug-leak-fixture.js")],
exports.abc = 123;
`,
});
const { exitCode, resourceUsage } = Bun.spawnSync({
cmd: [bunExe(), "run", "--smol", join(dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stderr: "inherit",
stdio: ["inherit", "inherit", "inherit"],
});
const [stdout, exitCode] = await Promise.all([proc.stdout.text(), proc.exited]);
expect(stdout.trim()).toEndWith("--pass--");
console.log(resourceUsage);
expect(exitCode).toBe(0);
}, 20000);
test("via import()", async () => {
await using proc = Bun.spawn({
cmd: [bunExe(), "run", join(import.meta.dir, "esm-bug-leak-fixture.mjs")],
env: bunEnv,
stderr: "inherit",
});
const [stdout, exitCode] = await Promise.all([proc.stdout.text(), proc.exited]);
expect(stdout.trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
}, 20000);
});
// These tests are extra slow in debug builds
describe("files transpiled and loaded don't leak file paths", () => {
test("via require()", async () => {
await using proc = Bun.spawn({
cmd: [bunExe(), "--smol", "run", join(import.meta.dir, "cjs-fixture-leak-small.js")],
env: bunEnv,
stderr: "inherit",
});
const [stdout, exitCode] = await Promise.all([proc.stdout.text(), proc.exited]);
expect(stdout.trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
}, 30000);
test(
"via import()",
async () => {
await using proc = Bun.spawn({
cmd: [bunExe(), "--smol", "run", join(import.meta.dir, "esm-fixture-leak-small.mjs")],
env: bunEnv,
stderr: "inherit",
});
const [stdout, exitCode] = await Promise.all([proc.stdout.text(), proc.exited]);
expect(stdout.trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
},
// TODO: Investigate why this is so slow on Windows
isWindows ? 60000 : 30000,
);
});
},
60000,
); // takes 4s on an M1 in release build
});
describe("files transpiled and loaded don't leak the AST", () => {
test("via require()", () => {
const { stdout, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "run", join(import.meta.dir, "require-cache-bug-leak-fixture.js")],
env: bunEnv,
stderr: "inherit",
});
expect(stdout.toString().trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
}, 20000);
test("via import()", () => {
const { stdout, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "run", join(import.meta.dir, "esm-bug-leak-fixture.mjs")],
env: bunEnv,
stderr: "inherit",
});
expect(stdout.toString().trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
}, 20000);
});
// These tests are extra slow in debug builds
describe("files transpiled and loaded don't leak file paths", () => {
test("via require()", () => {
const { stdout, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "--smol", "run", join(import.meta.dir, "cjs-fixture-leak-small.js")],
env: bunEnv,
stderr: "inherit",
});
expect(stdout.toString().trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
}, 30000);
test(
"via import()",
() => {
const { stdout, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "--smol", "run", join(import.meta.dir, "esm-fixture-leak-small.mjs")],
env: bunEnv,
stderr: "inherit",
});
expect(stdout.toString().trim()).toEndWith("--pass--");
expect(exitCode).toBe(0);
},
// TODO: Investigate why this is so slow on Windows
isWindows ? 60000 : 30000,
);
});

View File

@@ -811,16 +811,6 @@ export async function toBeWorkspaceLink(actual: string, expectedLinkPath: string
return { pass, message };
}
export function getFDCount(): number {
if (isMacOS || isLinux) {
return fs.readdirSync(isMacOS ? "/dev/fd" : "/proc/self/fd").length;
}
const maxFD = openSync("/dev/null", "r");
closeSync(maxFD);
return maxFD;
}
export function getMaxFD(): number {
if (isMacOS || isLinux) {
let max = -1;

View File

@@ -1,58 +0,0 @@
import * as Bun from "bun";
import { expectType } from "./utilities";
const socket = await Bun.udpSocket({
port: 0,
});
expectType(socket.hostname).is<string>();
expectType(socket.port).is<number>();
expectType(socket.address).is<Bun.SocketAddress>();
expectType(socket.binaryType).is<Bun.BinaryType>();
expectType(socket.closed).is<boolean>();
expectType(socket.send("Hello", 41234, "127.0.0.1")).is<boolean>();
expectType(socket.send(new Uint8Array([1, 2, 3]), 41234, "127.0.0.1")).is<boolean>();
expectType(socket.sendMany(["Hello", 41234, "127.0.0.1", "World", 41235, "127.0.0.2"])).is<number>();
expectType(socket.setBroadcast(true)).is<boolean>();
expectType(socket.setTTL(64)).is<number>();
expectType(socket.setMulticastTTL(2)).is<number>();
expectType(socket.setMulticastLoopback(true)).is<boolean>();
expectType(socket.setMulticastInterface("192.168.1.100")).is<boolean>();
expectType(socket.addMembership("224.0.0.1")).is<boolean>();
expectType(socket.addMembership("224.0.0.1", "192.168.1.100")).is<boolean>();
expectType(socket.dropMembership("224.0.0.1")).is<boolean>();
expectType(socket.dropMembership("224.0.0.1", "192.168.1.100")).is<boolean>();
expectType(socket.addSourceSpecificMembership("10.0.0.1", "232.0.0.1")).is<boolean>();
expectType(socket.addSourceSpecificMembership("10.0.0.1", "232.0.0.1", "192.168.1.100")).is<boolean>();
expectType(socket.dropSourceSpecificMembership("10.0.0.1", "232.0.0.1")).is<boolean>();
expectType(socket.dropSourceSpecificMembership("10.0.0.1", "232.0.0.1", "192.168.1.100")).is<boolean>();
expectType(socket.ref()).is<void>();
expectType(socket.unref()).is<void>();
expectType(socket.close()).is<void>();
const connectedSocket = await Bun.udpSocket({
port: 0,
connect: {
hostname: "127.0.0.1",
port: 41234,
},
});
expectType(connectedSocket.remoteAddress).is<Bun.SocketAddress>();
expectType(connectedSocket.send("Hello")).is<boolean>();
expectType(connectedSocket.send(new Uint8Array([1, 2, 3]))).is<boolean>();
expectType(connectedSocket.sendMany(["Hello", "World"])).is<number>();
expectType(connectedSocket.setBroadcast(false)).is<boolean>();
expectType(connectedSocket.setTTL(128)).is<number>();
expectType(connectedSocket.setMulticastTTL(1)).is<number>();
expectType(connectedSocket.setMulticastLoopback(false)).is<boolean>();
connectedSocket.close();

View File

@@ -1,6 +1,7 @@
import { afterAll, beforeAll, describe, expect, it } from "bun:test";
import { afterAll, beforeAll, expect, it } from "bun:test";
import fs from "fs";
import { bunEnv, bunExe, gc } from "harness";
import { tmpdir } from "os";
import path from "path";
let proxy, auth_proxy, server;
@@ -76,185 +77,145 @@ afterAll(() => {
});
const test = process.env.PROXY_URL ? it : it.skip;
describe.concurrent(() => {
test("should be able to post on TLS", async () => {
const data = JSON.stringify({
"name": "bun",
});
const result = await fetch("https://httpbin.org/post", {
method: "POST",
proxy: process.env.PROXY_URL,
verbose: true,
headers: {
"Content-Type": "application/json",
},
body: data,
}).then(res => res.json());
expect(result.data).toBe(data);
test("should be able to post on TLS", async () => {
const data = JSON.stringify({
"name": "bun",
});
test("should be able to post bigger on TLS", async () => {
const data = fs.readFileSync(path.join(import.meta.dir, "fetch.json")).toString("utf8");
const result = await fetch("https://httpbin.org/post", {
method: "POST",
proxy: process.env.PROXY_URL,
verbose: true,
headers: {
"Content-Type": "application/json",
},
body: data,
}).then(res => res.json());
expect(result.data).toBe(data);
});
const result = await fetch("https://httpbin.org/post", {
method: "POST",
proxy: process.env.PROXY_URL,
verbose: true,
headers: {
"Content-Type": "application/json",
},
body: data,
}).then(res => res.json());
describe("proxy non-TLS", async () => {
let url;
let auth_proxy_url;
let proxy_url;
const requests = [
() => [new Request(url), auth_proxy_url],
() => [
new Request(url, {
method: "POST",
body: "Hello, World",
}),
auth_proxy_url,
],
() => [url, auth_proxy_url],
() => [new Request(url), proxy_url],
() => [
new Request(url, {
method: "POST",
body: "Hello, World",
}),
proxy_url,
],
() => [url, proxy_url],
];
beforeAll(() => {
url = `http://localhost:${server.port}`;
auth_proxy_url = `http://squid_user:ASD123%40123asd@localhost:${auth_proxy.port}`;
proxy_url = `localhost:${proxy.port}`;
});
expect(result.data).toBe(data);
});
for (let callback of requests) {
test(async () => {
const [request, proxy] = callback();
gc();
const response = await fetch(request, { verbose: true, proxy });
gc();
const text = await response.text();
gc();
expect(text).toBe("Hello, World");
test("should be able to post bigger on TLS", async () => {
const data = fs.readFileSync(path.join(import.meta.dir, "fetch.json")).toString("utf8");
const result = await fetch("https://httpbin.org/post", {
method: "POST",
proxy: process.env.PROXY_URL,
verbose: true,
headers: {
"Content-Type": "application/json",
},
body: data,
}).then(res => res.json());
expect(result.data).toBe(data);
});
it("proxy non-TLS", async () => {
const url = `http://localhost:${server.port}`;
const auth_proxy_url = `http://squid_user:ASD123%40123asd@localhost:${auth_proxy.port}`;
const proxy_url = `localhost:${proxy.port}`;
const requests = [
[new Request(url), auth_proxy_url],
[
new Request(url, {
method: "POST",
body: "Hello, World",
}),
auth_proxy_url,
],
[url, auth_proxy_url],
[new Request(url), proxy_url],
[
new Request(url, {
method: "POST",
body: "Hello, World",
}),
proxy_url,
],
[url, proxy_url],
];
for (let [request, proxy] of requests) {
gc();
const response = await fetch(request, { verbose: true, proxy });
gc();
const text = await response.text();
gc();
expect(text).toBe("Hello, World");
}
});
it("proxy non-TLS auth can fail", async () => {
const url = `http://localhost:${server.port}`;
{
try {
const response = await fetch(url, { verbose: true, proxy: `http://localhost:${auth_proxy.port}` });
expect(response.status).toBe(407);
} catch (err) {
expect(true).toBeFalsy();
}
}
{
try {
const response = await fetch(url, {
verbose: true,
proxy: `http://squid_user:asdf123@localhost:${auth_proxy.port}`,
});
expect(response.status).toBe(403);
} catch (err) {
expect(true).toBeFalsy();
}
}
});
it.each([
[undefined, undefined],
["", ""],
["''", "''"],
['""', '""'],
])("test proxy env, http_proxy=%s https_proxy=%s", async (http_proxy, https_proxy) => {
const path = `${tmpdir()}/bun-test-http-proxy-env-${Date.now()}.ts`;
fs.writeFileSync(path, 'await fetch("https://example.com");');
const { stderr, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "run", path],
env: {
...bunEnv,
http_proxy: http_proxy,
https_proxy: https_proxy,
},
stdout: "inherit",
stderr: "pipe",
});
it("proxy non-TLS auth can fail", async () => {
const url = `http://localhost:${server.port}`;
{
try {
const response = await fetch(url, {
verbose: true,
proxy: `http://localhost:${auth_proxy.port}`,
});
expect(response.status).toBe(407);
} catch (err) {
expect(true).toBeFalsy();
}
}
{
try {
const response = await fetch(url, {
verbose: true,
proxy: `http://squid_user:asdf123@localhost:${auth_proxy.port}`,
});
expect(response.status).toBe(403);
} catch (err) {
expect(true).toBeFalsy();
}
}
});
it("simultaneous proxy auth failures should not hang", async () => {
const url = `http://localhost:${server.port}`;
const invalidProxy = `http://localhost:${auth_proxy.port}`;
// First batch: 5 simultaneous fetches with invalid credentials
const firstBatch = await Promise.all([
fetch(url, { proxy: invalidProxy }),
fetch(url, { proxy: invalidProxy }),
fetch(url, { proxy: invalidProxy }),
fetch(url, { proxy: invalidProxy }),
fetch(url, { proxy: invalidProxy }),
]);
expect(firstBatch.map(r => r.status)).toEqual([407, 407, 407, 407, 407]);
await Promise.all(firstBatch.map(r => r.text())).catch(() => {});
// Second batch: immediately send another 5
// Before the fix, these would hang due to keep-alive on failed proxy connections
const secondBatch = await Promise.all([
fetch(url, { proxy: invalidProxy }),
fetch(url, { proxy: invalidProxy }),
fetch(url, { proxy: invalidProxy }),
fetch(url, { proxy: invalidProxy }),
fetch(url, { proxy: invalidProxy }),
]);
expect(secondBatch.map(r => r.status)).toEqual([407, 407, 407, 407, 407]);
await Promise.all(secondBatch.map(r => r.text())).catch(() => {});
});
it.each([
[undefined, undefined],
["", ""],
["''", "''"],
['""', '""'],
])("test proxy env, http_proxy=%s https_proxy=%s", async (http_proxy, https_proxy) => {
const { exited, stderr: stream } = Bun.spawn({
cmd: [bunExe(), "-e", 'await fetch("https://example.com")'],
env: {
...bunEnv,
http_proxy: http_proxy,
https_proxy: https_proxy,
},
stdout: "inherit",
stderr: "pipe",
});
const [exitCode, stderr] = await Promise.all([exited, stream.text()]);
try {
expect(stderr.includes("FailedToOpenSocket: Was there a typo in the url or port?")).toBe(false);
expect(exitCode).toBe(0);
} finally {
fs.unlinkSync(path);
}
});
it.each([
// Empty entries in NO_PROXY should not cause out-of-bounds access
["localhost, , example.com"],
[",localhost,example.com"],
["localhost,example.com,"],
[" , , "],
[",,,"],
[". , .. , ..."],
])("NO_PROXY with empty entries does not crash: %s", async no_proxy => {
// We just need to verify parsing NO_PROXY doesn't crash.
// The fetch target doesn't matter - NO_PROXY parsing happens before the connection.
const { exitCode } = Bun.spawnSync({
cmd: [bunExe(), "-e", `fetch("http://localhost:1").catch(() => {})`],
env: {
...bunEnv,
http_proxy: "http://127.0.0.1:1",
NO_PROXY: no_proxy,
},
});
it.each([
// Empty entries in NO_PROXY should not cause out-of-bounds access
["localhost, , example.com"],
[",localhost,example.com"],
["localhost,example.com,"],
[" , , "],
[",,,"],
[". , .. , ..."],
])("NO_PROXY with empty entries does not crash: %s", async no_proxy => {
// We just need to verify parsing NO_PROXY doesn't crash.
// The fetch target doesn't matter - NO_PROXY parsing happens before the connection.
const { exited, stderr: stream } = Bun.spawn({
cmd: [bunExe(), "-e", `fetch("http://localhost:1").catch(() => {})`],
env: {
...bunEnv,
http_proxy: "http://127.0.0.1:1",
NO_PROXY: no_proxy,
},
stderr: "pipe",
});
const [exitCode, stderr] = await Promise.all([exited, stream.text()]);
if (exitCode !== 0) {
console.error("stderr:", stderr);
}
expect(exitCode).toBe(0);
});
expect(exitCode).toBe(0);
});

View File

@@ -1,7 +1,9 @@
import { $ } from "bun";
import { heapStats } from "bun:jsc";
import { describe, expect, test } from "bun:test";
import { bunEnv, isPosix, tempDir, tempDirWithFiles } from "harness";
import { bunEnv, isPosix, tempDirWithFiles } from "harness";
import { appendFileSync, closeSync, openSync, writeFileSync } from "node:fs";
import { devNull, tmpdir } from "os";
import { join } from "path";
import { bunExe } from "./test_builder";
import { createTestBuilder } from "./util";
@@ -51,50 +53,22 @@ const TESTS: [name: string, builder: () => TestBuilder, runs?: number][] = [
],
];
describe.concurrent("fd leak", () => {
describe("fd leak", () => {
function fdLeakTest(name: string, builder: () => TestBuilder, runs: number = 1000, threshold: number = 5) {
test(`fdleak_${name}`, async () => {
const testcode = await Bun.file(join(import.meta.dirname, "./test_builder.ts")).text();
Bun.gc(true);
const baseline = openSync(devNull, "r");
closeSync(baseline);
const impl = /* ts */ `
import { openSync, closeSync } from "node:fs";
import { devNull } from "os";
const TestBuilder = createTestBuilder(import.meta.path);
const runs = ${runs};
const threshold = ${threshold};
Bun.gc(true);
const baseline = openSync(devNull, "r");
closeSync(baseline);
for (let i = 0; i < runs; i++) {
await ${builder.toString().slice("() =>".length)}.quiet().run();
}
// Run the GC, because the interpreter closes file descriptors when it
// deinitializes when its finalizer is called
Bun.gc(true);
const fd = openSync(devNull, "r");
closeSync(fd);
if (fd - baseline > threshold) {
console.error('FD leak detected:', fd - baseline, 'leaked (threshold:', threshold, ')');
process.exit(1);
}
`;
using dir = tempDir("fdleak", {
"script.ts": testcode + impl,
});
const { exited, stderr: stream } = Bun.spawn([process.argv0, "--smol", "test", join(dir, "script.ts")], {
env: bunEnv,
stderr: "pipe",
});
const [exitCode, stderr] = await Promise.all([exited, stream.text()]);
if (exitCode != 0) {
console.log("\n\nSTDERR:", stderr);
for (let i = 0; i < runs; i++) {
await builder().quiet().run();
}
expect(exitCode).toBe(0);
// Run the GC, because the interpreter closes file descriptors when it
// deinitializes when its finalizer is called
Bun.gc(true);
const fd = openSync(devNull, "r");
closeSync(fd);
expect(fd - baseline).toBeLessThanOrEqual(threshold);
}, 100_000);
}
@@ -105,7 +79,12 @@ describe.concurrent("fd leak", () => {
threshold: number = DEFAULT_THRESHOLD,
) {
test(`memleak_${name}`, async () => {
const testcode = await Bun.file(join(import.meta.dirname, "./test_builder.ts")).text();
const tempfile = join(tmpdir(), "script.ts");
const filepath = import.meta.dirname;
const testcode = await Bun.file(join(filepath, "./test_builder.ts")).text();
writeFileSync(tempfile, testcode);
const impl = /* ts */ `
import { heapStats } from "bun:jsc";
@@ -140,17 +119,16 @@ describe.concurrent("fd leak", () => {
}
`;
using dir = tempDir("memleak", {
"script.ts": testcode + impl,
});
appendFileSync(tempfile, impl);
const { exited, stderr: stream } = Bun.spawn([process.argv0, "--smol", "test", join(dir, "script.ts")], {
// console.log("THE CODE", readFileSync(tempfile, "utf-8"));
const { stdout, stderr, exitCode } = Bun.spawnSync([process.argv0, "--smol", "test", tempfile], {
env: bunEnv,
stderr: "pipe",
});
const [exitCode, stderr] = await Promise.all([exited, stream.text()]);
// console.log("STDOUT:", stdout.toString(), "\n\nSTDERR:", stderr.toString());
if (exitCode != 0) {
console.log("\n\nSTDERR:", stderr);
console.log("\n\nSTDERR:", stderr.toString());
}
expect(exitCode).toBe(0);
}, 100_000);
@@ -204,7 +182,12 @@ describe.concurrent("fd leak", () => {
test.if(runTheTest)(
`memleak_protect_${name}`,
async () => {
const testcode = await Bun.file(join(import.meta.dirname, "./test_builder.ts")).text();
const tempfile = join(tmpdir(), "script.ts");
const filepath = import.meta.dirname;
const testcode = await Bun.file(join(filepath, "./test_builder.ts")).text();
writeFileSync(tempfile, testcode);
const impl = /* ts */ `
import { heapStats } from "bun:jsc";
@@ -228,18 +211,16 @@ describe.concurrent("fd leak", () => {
}
`;
using dir = tempDir("memleak_protect", {
"script.ts": testcode + impl,
});
appendFileSync(tempfile, impl);
const { stderr: stream, exited } = Bun.spawn([process.argv0, "--smol", "test", join(dir, "script.ts")], {
// console.log("THE CODE", readFileSync(tempfile, "utf-8"));
const { stdout, stderr, exitCode } = Bun.spawnSync([process.argv0, "--smol", "test", tempfile], {
env: bunEnv,
stdout: "ignore",
stderr: "pipe",
});
const [exitCode, stderr] = await Promise.all([exited, stream.text()]);
// console.log("STDOUT:", stdout.toString(), "\n\nSTDERR:", stderr.toString());
if (exitCode != 0) {
console.log("\n\nSTDERR:", stderr);
console.log("\n\nSTDERR:", stderr.toString());
}
expect(exitCode).toBe(0);
},
@@ -392,7 +373,7 @@ describe.concurrent("fd leak", () => {
true,
);
describe.serial("#11816", async () => {
describe("#11816", async () => {
function doit(builtin: boolean) {
test(builtin ? "builtin" : "external", async () => {
const files = tempDirWithFiles("hi", {
@@ -425,7 +406,7 @@ describe.concurrent("fd leak", () => {
doit(true);
});
describe.serial("not leaking ParsedShellScript when ShellInterpreter never runs", () => {
describe("not leaking ParsedShellScript when ShellInterpreter never runs", async () => {
function doit(builtin: boolean) {
test(builtin ? "builtin" : "external", async () => {
const files = tempDirWithFiles("hi", {

View File

@@ -8,6 +8,14 @@ import { join } from "node:path";
export function createTestBuilder(path: string) {
var { describe, test, afterAll, beforeAll, expect, beforeEach, afterEach } = Bun.jest(path);
var insideTestScope = false;
beforeEach(() => {
insideTestScope = true;
});
afterEach(() => {
insideTestScope = false;
});
class TestBuilder {
_testName: string | undefined = undefined;
@@ -239,6 +247,14 @@ export function createTestBuilder(path: string) {
}
async run(): Promise<undefined> {
if (!insideTestScope) {
const err = new Error("TestBuilder.run() must be called inside a test scope");
test("TestBuilder.run() must be called inside a test scope", () => {
throw err;
});
return Promise.resolve(undefined);
}
try {
let finalPromise = Bun.$(this._scriptStr, ...this._expresssions);
if (this.tempdir) finalPromise = finalPromise.cwd(this.tempdir);

View File

@@ -3,7 +3,7 @@ import { describe, test } from "bun:test";
import { bunEnv, bunExe, isASAN, isBroken, isLinux, nodeExe } from "harness";
import { basename, join } from "path";
describe.concurrent("AsyncLocalStorage passes context to callbacks", () => {
describe("AsyncLocalStorage passes context to callbacks", () => {
let files = [...new Glob(join(import.meta.dir, "async-context", "async-context-*.js")).scanSync()];
let todos = ["async-context-worker_threads-message.js"];

View File

@@ -29,20 +29,9 @@ exports.nodeEchoServer = async function nodeEchoServer(paddingStrategy = 0) {
subprocess.stdout.off("data", readData);
resolve({ address, url, subprocess });
} catch (e) {
// JSON parse failed, need more data - don't log, just wait for more chunks
console.error(e);
}
}
subprocess.on("error", reject);
subprocess.on("exit", code => {
if (code !== 0 && code !== null) {
reject(new Error(`node-echo-server exited with code ${code}`));
}
});
subprocess.stdout.on("data", readData);
const result = await promise;
// Add Symbol.asyncDispose for use with `await using`
result[Symbol.asyncDispose] = async () => {
result.subprocess?.kill?.(9);
};
return result;
return await promise;
};

View File

@@ -69,12 +69,12 @@ server.on("stream", (stream, headers, flags) => {
});
}
});
let baseurl = "https://127.0.0.1:";
let baseurl = "https://localhost:";
server.listen(0, "127.0.0.1");
server.listen(0, "localhost");
server.on("listening", () => {
const { port, address, family } = server.address();
baseurl = `https://127.0.0.1:${port}`;
process.stdout.write(JSON.stringify({ port, address: "127.0.0.1", family: "IPv4" }));
baseurl = `https://localhost:${port}`;
process.stdout.write(JSON.stringify({ port, address: "localhost", family: "IPv4" }));
});

View File

@@ -9,7 +9,7 @@ import tls from "node:tls";
import { Duplex } from "stream";
import http2utils from "./helpers";
import { nodeEchoServer, TLS_CERT, TLS_OPTIONS } from "./http2-helpers";
const { describe, expect, it, createCallCheckCtx } = createTest(import.meta.path);
const { afterEach, beforeEach, describe, expect, it, createCallCheckCtx } = createTest(import.meta.path);
const ASAN_MULTIPLIER = isASAN ? 3 : 1;
function invalidArgTypeHelper(input) {
@@ -39,7 +39,18 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
http2.constants.PADDING_STRATEGY_MAX,
http2.constants.PADDING_STRATEGY_ALIGNED,
]) {
describe.concurrent(`${path.basename(nodeExecutable)} ${paddingStrategyName(paddingStrategy)}`, () => {
describe(`${path.basename(nodeExecutable)} ${paddingStrategyName(paddingStrategy)}`, () => {
let nodeEchoServer_;
let HTTPS_SERVER;
beforeEach(async () => {
nodeEchoServer_ = await nodeEchoServer(paddingStrategy);
HTTPS_SERVER = nodeEchoServer_.url;
});
afterEach(async () => {
nodeEchoServer_.subprocess?.kill?.(9);
});
async function nodeDynamicServer(test_name, code) {
if (!nodeExecutable) throw new Error("node executable not found");
@@ -74,7 +85,7 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
return { address, url, subprocess };
}
function doHttp2Request(HTTPS_SERVER, url, headers, payload, options, request_options) {
function doHttp2Request(url, headers, payload, options, request_options) {
const { promise, resolve, reject: promiseReject } = Promise.withResolvers();
if (url.startsWith(HTTPS_SERVER)) {
options = { ...(options || {}), rejectUnauthorized: true, ...TLS_OPTIONS };
@@ -161,23 +172,15 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
describe("Client Basics", () => {
// we dont support server yet but we support client
it("should be able to send a GET request", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const result = await doHttp2Request(HTTPS_SERVER, HTTPS_SERVER, {
":path": "/get",
"test-header": "test-value",
});
const result = await doHttp2Request(HTTPS_SERVER, { ":path": "/get", "test-header": "test-value" });
let parsed;
expect(() => (parsed = JSON.parse(result.data))).not.toThrow();
expect(parsed.url).toBe(`${HTTPS_SERVER}/get`);
expect(parsed.headers["test-header"]).toBe("test-value");
});
it("should be able to send a POST request", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const payload = JSON.stringify({ "hello": "bun" });
const result = await doHttp2Request(
HTTPS_SERVER,
HTTPS_SERVER,
{ ":path": "/post", "test-header": "test-value", ":method": "POST" },
payload,
@@ -190,8 +193,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(parsed.data).toEqual(payload);
});
it("should be able to send data using end", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const payload = JSON.stringify({ "hello": "bun" });
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
@@ -220,8 +221,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(parsed.data).toEqual(payload);
});
it("should be able to mutiplex GET requests", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const results = await doMultiplexHttp2Request(HTTPS_SERVER, [
{ headers: { ":path": "/get" } },
{ headers: { ":path": "/get" } },
@@ -237,8 +236,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
}
});
it("http2 should receive remoteSettings when receiving default settings frame", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const session = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
@@ -264,8 +261,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
}
});
it("should be able to mutiplex POST requests", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const results = await doMultiplexHttp2Request(HTTPS_SERVER, [
{ headers: { ":path": "/post", ":method": "POST" }, payload: JSON.stringify({ "request": 1 }) },
{ headers: { ":path": "/post", ":method": "POST" }, payload: JSON.stringify({ "request": 2 }) },
@@ -570,10 +565,8 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(() => http2.getUnpackedSettings(buffer)).toThrow(/Expected buf to be a Buffer/);
});
it("headers cannot be bigger than 65536 bytes", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
try {
await doHttp2Request(HTTPS_SERVER, HTTPS_SERVER, { ":path": "/", "test-header": "A".repeat(90000) });
await doHttp2Request(HTTPS_SERVER, { ":path": "/", "test-header": "A".repeat(90000) });
expect("unreachable").toBe(true);
} catch (err) {
expect(err.code).toBe("ERR_HTTP2_STREAM_ERROR");
@@ -581,8 +574,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
}
});
it("should be destroyed after close", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject: promiseReject } = Promise.withResolvers();
const client = http2.connect(`${HTTPS_SERVER}/get`, TLS_OPTIONS);
client.on("error", promiseReject);
@@ -604,8 +595,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(client.destroyed).toBe(true);
});
it("should be destroyed after destroy", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject: promiseReject } = Promise.withResolvers();
const client = http2.connect(`${HTTPS_SERVER}/get`, TLS_OPTIONS);
client.on("error", promiseReject);
@@ -627,14 +616,12 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(client.destroyed).toBe(true);
});
it("should fail to connect over HTTP/1.1", async () => {
await using server_ = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server_.url;
const tlsCert = TLS_CERT;
const tls = TLS_CERT;
using server = Bun.serve({
port: 0,
hostname: "127.0.0.1",
tls: {
...tlsCert,
...tls,
ca: TLS_CERT.ca,
},
fetch() {
@@ -643,15 +630,13 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
});
const url = `https://127.0.0.1:${server.port}`;
try {
await doHttp2Request(HTTPS_SERVER, url, { ":path": "/" }, null, TLS_OPTIONS);
await doHttp2Request(url, { ":path": "/" }, null, TLS_OPTIONS);
expect("unreachable").toBe(true);
} catch (err) {
expect(err.code).toBe("ERR_HTTP2_ERROR");
}
});
it("works with Duplex", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
class JSSocket extends Duplex {
constructor(socket) {
super({ emitClose: true });
@@ -678,7 +663,7 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
...TLS_OPTIONS,
},
() => {
doHttp2Request(HTTPS_SERVER, `${HTTPS_SERVER}/get`, { ":path": "/get" }, null, {
doHttp2Request(`${HTTPS_SERVER}/get`, { ":path": "/get" }, null, {
createConnection: () => {
return new JSSocket(socket);
},
@@ -693,8 +678,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
socket.destroy();
});
it("close callback", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(`${HTTPS_SERVER}/get`, TLS_OPTIONS);
client.on("error", reject);
@@ -703,10 +686,8 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(client.destroyed).toBe(true);
});
it("is possible to abort request", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const abortController = new AbortController();
const promise = doHttp2Request(HTTPS_SERVER, `${HTTPS_SERVER}/get`, { ":path": "/get" }, null, null, {
const promise = doHttp2Request(`${HTTPS_SERVER}/get`, { ":path": "/get" }, null, null, {
signal: abortController.signal,
});
abortController.abort();
@@ -718,8 +699,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
}
});
it("aborted event should work with abortController", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const abortController = new AbortController();
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
@@ -743,8 +722,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
});
it("aborted event should work with aborted signal", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
client.on("error", reject);
@@ -767,8 +744,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
});
it("state should work", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
client.on("error", reject);
@@ -877,8 +852,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(req.rstCode).toBe(http2.constants.NGHTTP2_NO_ERROR);
});
it("ping events should work", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
client.on("error", reject);
@@ -906,8 +879,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(received_ping).toEqual(Buffer.from("12345678"));
});
it("ping without events should work", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
client.on("error", reject);
@@ -934,8 +905,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(received_ping).toEqual(result.payload);
});
it("ping with wrong payload length events should error", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
client.on("error", reject);
@@ -954,8 +923,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(result.code).toBe("ERR_HTTP2_PING_LENGTH");
});
it("ping with wrong payload type events should throw", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
client.on("error", reject);
@@ -975,8 +942,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
expect(result.code).toBe("ERR_INVALID_ARG_TYPE");
});
it("stream event should work", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
client.on("error", reject);
@@ -991,8 +956,6 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
});
it("wantTrailers should work", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
client.on("error", reject);
@@ -1037,21 +1000,19 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
it.skipIf(!isCI)(
"should not leak memory",
async () => {
await using server = await nodeEchoServer(paddingStrategy);
await using proc = Bun.spawn({
() => {
const { stdout, exitCode } = Bun.spawnSync({
cmd: [bunExe(), "--smol", "run", path.join(import.meta.dir, "node-http2-memory-leak.js")],
env: {
...bunEnv,
BUN_JSC_forceRAMSize: (1024 * 1024 * 64).toString("10"),
HTTP2_SERVER_INFO: JSON.stringify(server),
HTTP2_SERVER_INFO: JSON.stringify(nodeEchoServer_),
HTTP2_SERVER_TLS: JSON.stringify(TLS_OPTIONS),
},
stderr: "inherit",
stdin: "inherit",
stdout: "inherit",
});
const exitCode = await proc.exited;
expect(exitCode || 0).toBe(0);
},
100000,
@@ -1123,57 +1084,53 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
server.subprocess.kill();
}
});
it("should not be able to write on socket", async () => {
await using server = await nodeEchoServer(paddingStrategy);
const HTTPS_SERVER = server.url;
const { promise, resolve, reject } = Promise.withResolvers();
it("should not be able to write on socket", done => {
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS, (session, socket) => {
try {
client.socket.write("hello");
client.socket.end();
reject(new Error("unreachable"));
expect().fail("unreachable");
} catch (err) {
try {
expect(err.code).toBe("ERR_HTTP2_NO_SOCKET_MANIPULATION");
resolve();
} catch (err2) {
reject(err2);
} catch (err) {
done(err);
}
done();
}
});
await promise;
});
it("should handle bad GOAWAY server frame size", async () => {
const { promise: serverListening, resolve: serverResolve } = Promise.withResolvers();
it("should handle bad GOAWAY server frame size", done => {
const server = net.createServer(socket => {
const settings = new http2utils.SettingsFrame(true);
socket.write(settings.data);
const frame = new http2utils.Frame(7, 7, 0, 0).data;
socket.write(Buffer.concat([frame, Buffer.alloc(7)]));
});
server.listen(0, "127.0.0.1", () => serverResolve());
await serverListening;
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
} finally {
server.close();
}
server.listen(0, "127.0.0.1", async () => {
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
done();
} catch (err) {
done(err);
} finally {
server.close();
}
});
});
it("should handle bad DATA_FRAME server frame size", async () => {
it("should handle bad DATA_FRAME server frame size", done => {
const { promise: waitToWrite, resolve: allowWrite } = Promise.withResolvers();
const { promise: serverListening, resolve: serverResolve } = Promise.withResolvers();
const server = net.createServer(async socket => {
const settings = new http2utils.SettingsFrame(true);
socket.write(settings.data);
@@ -1181,30 +1138,31 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
const frame = new http2utils.DataFrame(1, Buffer.alloc(16384 * 2), 0, 1).data;
socket.write(frame);
});
server.listen(0, "127.0.0.1", () => serverResolve());
await serverListening;
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
} finally {
server.close();
}
server.listen(0, "127.0.0.1", async () => {
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
done();
} catch (err) {
done(err);
} finally {
server.close();
}
});
});
it("should handle bad RST_FRAME server frame size (no stream)", async () => {
it("should handle bad RST_FRAME server frame size (no stream)", done => {
const { promise: waitToWrite, resolve: allowWrite } = Promise.withResolvers();
const { promise: serverListening, resolve: serverResolve } = Promise.withResolvers();
const server = net.createServer(async socket => {
const settings = new http2utils.SettingsFrame(true);
socket.write(settings.data);
@@ -1212,30 +1170,31 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
const frame = new http2utils.Frame(4, 3, 0, 0).data;
socket.write(Buffer.concat([frame, Buffer.alloc(4)]));
});
server.listen(0, "127.0.0.1", () => serverResolve());
await serverListening;
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_PROTOCOL_ERROR");
} finally {
server.close();
}
server.listen(0, "127.0.0.1", async () => {
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_PROTOCOL_ERROR");
done();
} catch (err) {
done(err);
} finally {
server.close();
}
});
});
it("should handle bad RST_FRAME server frame size (less than allowed)", async () => {
it("should handle bad RST_FRAME server frame size (less than allowed)", done => {
const { promise: waitToWrite, resolve: allowWrite } = Promise.withResolvers();
const { promise: serverListening, resolve: serverResolve } = Promise.withResolvers();
const server = net.createServer(async socket => {
const settings = new http2utils.SettingsFrame(true);
socket.write(settings.data);
@@ -1243,30 +1202,31 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
const frame = new http2utils.Frame(3, 3, 0, 1).data;
socket.write(Buffer.concat([frame, Buffer.alloc(3)]));
});
server.listen(0, "127.0.0.1", () => serverResolve());
await serverListening;
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
} finally {
server.close();
}
server.listen(0, "127.0.0.1", async () => {
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
done();
} catch (err) {
done(err);
} finally {
server.close();
}
});
});
it("should handle bad RST_FRAME server frame size (more than allowed)", async () => {
it("should handle bad RST_FRAME server frame size (more than allowed)", done => {
const { promise: waitToWrite, resolve: allowWrite } = Promise.withResolvers();
const { promise: serverListening, resolve: serverResolve } = Promise.withResolvers();
const server = net.createServer(async socket => {
const settings = new http2utils.SettingsFrame(true);
socket.write(settings.data);
@@ -1275,31 +1235,32 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
const frame = new http2utils.Frame(buffer.byteLength, 3, 0, 1).data;
socket.write(Buffer.concat([frame, buffer]));
});
server.listen(0, "127.0.0.1", () => serverResolve());
await serverListening;
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
} finally {
server.close();
}
server.listen(0, "127.0.0.1", async () => {
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
done();
} catch (err) {
done(err);
} finally {
server.close();
}
});
});
it("should handle bad CONTINUATION_FRAME server frame size", async () => {
it("should handle bad CONTINUATION_FRAME server frame size", done => {
const { promise: waitToWrite, resolve: allowWrite } = Promise.withResolvers();
const { promise: serverListening, resolve: serverResolve } = Promise.withResolvers();
const server = net.createServer(async socket => {
const settings = new http2utils.SettingsFrame(true);
socket.write(settings.data);
@@ -1316,31 +1277,32 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
);
socket.write(continuationFrame.data);
});
server.listen(0, "127.0.0.1", () => serverResolve());
await serverListening;
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_PROTOCOL_ERROR");
} finally {
server.close();
}
server.listen(0, "127.0.0.1", async () => {
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_PROTOCOL_ERROR");
done();
} catch (err) {
done(err);
} finally {
server.close();
}
});
});
it("should handle bad PRIOTITY_FRAME server frame size", async () => {
it("should handle bad PRIOTITY_FRAME server frame size", done => {
const { promise: waitToWrite, resolve: allowWrite } = Promise.withResolvers();
const { promise: serverListening, resolve: serverResolve } = Promise.withResolvers();
const server = net.createServer(async socket => {
const settings = new http2utils.SettingsFrame(true);
socket.write(settings.data);
@@ -1349,26 +1311,28 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
const frame = new http2utils.Frame(4, 2, 0, 1).data;
socket.write(Buffer.concat([frame, Buffer.alloc(4)]));
});
server.listen(0, "127.0.0.1", () => serverResolve());
await serverListening;
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
} finally {
server.close();
}
server.listen(0, "127.0.0.1", async () => {
const url = `http://127.0.0.1:${server.address().port}`;
try {
const { promise, resolve } = Promise.withResolvers();
const client = http2.connect(url);
client.on("error", resolve);
client.on("connect", () => {
const req = client.request({ ":path": "/" });
req.end();
allowWrite();
});
const result = await promise;
expect(result).toBeDefined();
expect(result.code).toBe("ERR_HTTP2_SESSION_ERROR");
expect(result.message).toBe("Session closed with error code NGHTTP2_FRAME_SIZE_ERROR");
done();
} catch (err) {
done(err);
} finally {
server.close();
}
});
});
});
});