Compare commits

..

2 Commits

Author SHA1 Message Date
Claude Bot
e1f3200fc4 fix: validate u32 range before casting RtlNtStatusToDosError result to u16
Replace @truncate with an explicit range check (raw > maxInt(u16)) before
@intCast to avoid silently wrapping out-of-range values from the u32
return of RtlNtStatusToDosError.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-01 05:26:48 +00:00
Claude Bot
e7e719cfcf fix(windows): prevent panic from unmapped Win32 error codes in enum conversion
RtlNtStatusToDosError returns a u32 Win32 error code, but was declared
as returning Win32Error enum(u16) directly. When the API returns an
error code not in Bun's Win32Error subset, Zig panics with "invalid
enum value". Use std.meta.intToEnum for safe conversion, falling back
to MR_MID_NOT_FOUND for unmapped codes. Also hardens Win32Error.get()
and SystemErrno.init() against the same class of invalid enum panics.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-01 05:17:21 +00:00
70 changed files with 133 additions and 2083 deletions

View File

@@ -46,7 +46,7 @@ macro(optionx variable type description)
set(${variable}_PREVIEW -D${variable})
if(DEFINED ENV{${variable}})
set(${variable} "$ENV{${variable}}" CACHE ${${variable}_TYPE} ${description} FORCE)
set(${variable} $ENV{${variable}} CACHE ${${variable}_TYPE} ${description} FORCE)
set(${variable}_SOURCE "environment variable")
set(${variable}_PREVIEW ${variable})
endif()

View File

@@ -127,7 +127,7 @@ if(ENABLE_ASAN AND ENABLE_LTO)
endif()
if(BUILDKITE_COMMIT)
set(DEFAULT_REVISION "${BUILDKITE_COMMIT}")
set(DEFAULT_REVISION ${BUILDKITE_COMMIT})
else()
execute_process(
COMMAND git rev-parse HEAD
@@ -141,7 +141,7 @@ else()
endif()
endif()
optionx(REVISION STRING "The git revision of the build" DEFAULT "${DEFAULT_REVISION}")
optionx(REVISION STRING "The git revision of the build" DEFAULT ${DEFAULT_REVISION})
# Used in process.version, process.versions.node, napi, and elsewhere
setx(NODEJS_VERSION "24.3.0")

View File

@@ -702,7 +702,7 @@ endif()
set(ZIG_FLAGS_BUN)
if(NOT "${REVISION}" STREQUAL "")
set(ZIG_FLAGS_BUN ${ZIG_FLAGS_BUN} "-Dsha=${REVISION}")
set(ZIG_FLAGS_BUN ${ZIG_FLAGS_BUN} -Dsha=${REVISION})
endif()
register_command(
@@ -728,10 +728,10 @@ register_command(
-Denable_tinycc=$<IF:$<BOOL:${ENABLE_TINYCC}>,true,false>
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
"-Dversion=${VERSION}"
"-Dreported_nodejs_version=${NODEJS_VERSION}"
"-Dcanary=${CANARY_REVISION}"
"-Dcodegen_path=${CODEGEN_PATH}"
-Dversion=${VERSION}
-Dreported_nodejs_version=${NODEJS_VERSION}
-Dcanary=${CANARY_REVISION}
-Dcodegen_path=${CODEGEN_PATH}
-Dcodegen_embed=$<IF:$<BOOL:${CODEGEN_EMBED}>,true,false>
--prominent-compile-errors
--summary all

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
cloudflare/lol-html
COMMIT
e3aa54798602dd27250fafde1b5a66f080046252
e9e16dca48dd4a8ffbc77642bc4be60407585f11
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)

View File

@@ -65,14 +65,10 @@ string(REPLACE "\\n" "\\\\n" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "\\r" "\\\\r" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "\\t" "\\\\t" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(REPLACE "${BKSLASH_PLACEHOLDER}" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
# CMake treats semicolons as list separators in unquoted variable expansions.
# Commit messages and other JSON string fields can contain semicolons, which would
# cause string(JSON) to receive garbled arguments. Escape them before parsing.
string(REPLACE ";" "\\;" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
string(JSON BUILDKITE_BUILD_UUID GET "${BUILDKITE_BUILD}" id)
string(JSON BUILDKITE_JOBS GET "${BUILDKITE_BUILD}" jobs)
string(JSON BUILDKITE_JOBS_COUNT LENGTH "${BUILDKITE_JOBS}")
string(JSON BUILDKITE_BUILD_UUID GET ${BUILDKITE_BUILD} id)
string(JSON BUILDKITE_JOBS GET ${BUILDKITE_BUILD} jobs)
string(JSON BUILDKITE_JOBS_COUNT LENGTH ${BUILDKITE_JOBS})
if(NOT BUILDKITE_JOBS_COUNT GREATER 0)
message(FATAL_ERROR "No jobs found: ${BUILDKITE_BUILD_URL}")
@@ -87,14 +83,14 @@ set(BUILDKITE_JOBS_MATCH)
math(EXPR BUILDKITE_JOBS_MAX_INDEX "${BUILDKITE_JOBS_COUNT} - 1")
foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
string(JSON BUILDKITE_JOB GET "${BUILDKITE_JOBS}" ${i})
string(JSON BUILDKITE_JOB_ID GET "${BUILDKITE_JOB}" id)
string(JSON BUILDKITE_JOB_PASSED GET "${BUILDKITE_JOB}" passed)
string(JSON BUILDKITE_JOB_GROUP_ID GET "${BUILDKITE_JOB}" group_uuid)
string(JSON BUILDKITE_JOB_GROUP_KEY GET "${BUILDKITE_JOB}" group_identifier)
string(JSON BUILDKITE_JOB_NAME GET "${BUILDKITE_JOB}" step_key)
string(JSON BUILDKITE_JOB GET ${BUILDKITE_JOBS} ${i})
string(JSON BUILDKITE_JOB_ID GET ${BUILDKITE_JOB} id)
string(JSON BUILDKITE_JOB_PASSED GET ${BUILDKITE_JOB} passed)
string(JSON BUILDKITE_JOB_GROUP_ID GET ${BUILDKITE_JOB} group_uuid)
string(JSON BUILDKITE_JOB_GROUP_KEY GET ${BUILDKITE_JOB} group_identifier)
string(JSON BUILDKITE_JOB_NAME GET ${BUILDKITE_JOB} step_key)
if(NOT BUILDKITE_JOB_NAME)
string(JSON BUILDKITE_JOB_NAME GET "${BUILDKITE_JOB}" name)
string(JSON BUILDKITE_JOB_NAME GET ${BUILDKITE_JOB} name)
endif()
if(NOT BUILDKITE_JOB_PASSED)
@@ -125,8 +121,7 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
endif()
file(READ ${BUILDKITE_ARTIFACTS_PATH} BUILDKITE_ARTIFACTS)
string(REPLACE ";" "\\;" BUILDKITE_ARTIFACTS "${BUILDKITE_ARTIFACTS}")
string(JSON BUILDKITE_ARTIFACTS_LENGTH LENGTH "${BUILDKITE_ARTIFACTS}")
string(JSON BUILDKITE_ARTIFACTS_LENGTH LENGTH ${BUILDKITE_ARTIFACTS})
if(NOT BUILDKITE_ARTIFACTS_LENGTH GREATER 0)
list(APPEND BUILDKITE_JOBS_NO_ARTIFACTS ${BUILDKITE_JOB_NAME})
continue()
@@ -134,9 +129,9 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
math(EXPR BUILDKITE_ARTIFACTS_MAX_INDEX "${BUILDKITE_ARTIFACTS_LENGTH} - 1")
foreach(i RANGE 0 ${BUILDKITE_ARTIFACTS_MAX_INDEX})
string(JSON BUILDKITE_ARTIFACT GET "${BUILDKITE_ARTIFACTS}" ${i})
string(JSON BUILDKITE_ARTIFACT_ID GET "${BUILDKITE_ARTIFACT}" id)
string(JSON BUILDKITE_ARTIFACT_PATH GET "${BUILDKITE_ARTIFACT}" path)
string(JSON BUILDKITE_ARTIFACT GET ${BUILDKITE_ARTIFACTS} ${i})
string(JSON BUILDKITE_ARTIFACT_ID GET ${BUILDKITE_ARTIFACT} id)
string(JSON BUILDKITE_ARTIFACT_PATH GET ${BUILDKITE_ARTIFACT} path)
if(NOT BUILDKITE_ARTIFACT_PATH MATCHES "\\.(o|a|lib|zip|tar|gz)")
continue()

View File

@@ -96,7 +96,7 @@ $2b$10$Lyj9kHYZtiyfxh2G60TEfeqs7xkkGiEFFDi3iJGc50ZG/XJ1sxIFi;
The format is composed of:
- `bcrypt`: `$2b`
- `rounds`: `$10` - rounds (log2 of the actual number of rounds)
- `rounds`: `$10` - rounds (log10 of the actual number of rounds)
- `salt`: `$Lyj9kHYZtiyfxh2G60TEfeqs7xkkGiEFFDi3iJGc50ZG/XJ1sxIFi`
- `hash`: `$GzJ8PuBi+K+BVojzPfS5mjnC8OpLGtv8KJqF99eP6a4`

View File

@@ -299,25 +299,6 @@ declare module "bun" {
*/
contentDisposition?: string | undefined;
/**
* The Content-Encoding header value.
* Specifies what content encodings have been applied to the object,
* for example to indicate that it has been compressed.
*
* @example
* // Setting gzip encoding
* const file = s3.file("data.json.gz", {
* contentEncoding: "gzip"
* });
*
* @example
* // Setting encoding when writing
* await s3.write("data.json.gz", compressedData, {
* contentEncoding: "gzip"
* });
*/
contentEncoding?: string | undefined;
/**
* By default, Amazon S3 uses the STANDARD Storage Class to store newly created objects.
*

View File

@@ -1231,6 +1231,12 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port, int op
return LIBUS_SOCKET_ERROR;
}
if (port != 0) {
/* Should this also go for UDP? */
int enabled = 1;
setsockopt(listenFd, SOL_SOCKET, SO_REUSEADDR, &enabled, sizeof(enabled));
}
if (bsd_set_reuse(listenFd, options) != 0) {
freeaddrinfo(result);
return LIBUS_SOCKET_ERROR;

View File

@@ -504,11 +504,6 @@ namespace uWS
return ((c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z')) || c == '-';
}
/* RFC 9110 Section 5.5: optional whitespace (OWS) is SP or HTAB */
static inline bool isHTTPHeaderValueWhitespace(unsigned char c) {
return c == ' ' || c == '\t';
}
static inline int isHTTPorHTTPSPrefixForProxies(char *data, char *end) {
// We can check 8 because:
// 1. If it's "http://" that's 7 bytes, and it's supposed to at least have a trailing slash.
@@ -780,13 +775,13 @@ namespace uWS
/* Store this header, it is valid */
headers->value = std::string_view(preliminaryValue, (size_t) (postPaddedBuffer - preliminaryValue));
postPaddedBuffer += 2;
/* Trim trailing whitespace (SP, HTAB) per RFC 9110 Section 5.5 */
while (headers->value.length() && isHTTPHeaderValueWhitespace(headers->value.back())) {
/* Trim trailing whitespace (SP, HTAB) */
while (headers->value.length() && headers->value.back() < 33) {
headers->value.remove_suffix(1);
}
/* Trim initial whitespace (SP, HTAB) per RFC 9110 Section 5.5 */
while (headers->value.length() && isHTTPHeaderValueWhitespace(headers->value.front())) {
/* Trim initial whitespace (SP, HTAB) */
while (headers->value.length() && headers->value.front() < 33) {
headers->value.remove_prefix(1);
}

View File

@@ -1314,7 +1314,7 @@ async function spawnBunTest(execPath, testPath, opts = { cwd }) {
const isReallyTest = isTestStrict(testPath) || absPath.includes("vendor");
const args = opts["args"] ?? [];
const testArgs = ["test", ...args, `--timeout=${perTestTimeout}`, "--reporter=dots"];
const testArgs = ["test", ...args, `--timeout=${perTestTimeout}`];
// This will be set if a JUnit file is generated
let junitFilePath = null;

View File

@@ -2811,24 +2811,10 @@ export function toYaml(obj, indent = 0) {
value.includes("#") ||
value.includes("'") ||
value.includes('"') ||
value.includes("\\") ||
value.includes("\n") ||
value.includes("*") ||
value.includes("&") ||
value.includes("!") ||
value.includes("|") ||
value.includes(">") ||
value.includes("%") ||
value.includes("@") ||
value.includes("`") ||
value.includes("{") ||
value.includes("}") ||
value.includes("[") ||
value.includes("]") ||
value.includes(",") ||
value.includes(";"))
value.includes("*"))
) {
result += `${spaces}${key}: "${value.replace(/\\/g, "\\\\").replace(/"/g, '\\"')}"\n`;
result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`;
continue;
}
result += `${spaces}${key}: ${value}\n`;

View File

@@ -515,8 +515,6 @@ pub const TransformTask = struct {
.path = source.path,
.virtual_source = &source,
.replace_exports = this.replace_exports,
.experimental_decorators = if (this.tsconfig) |ts| ts.experimental_decorators else false,
.emit_decorator_metadata = if (this.tsconfig) |ts| ts.emit_decorator_metadata else false,
};
const parse_result = this.transpiler.parse(parse_options, null) orelse {
@@ -586,8 +584,9 @@ pub const TransformTask = struct {
this.log.deinit();
this.input_code.deinitAndUnprotect();
this.output_code.deref();
// tsconfig is owned by JSTranspiler, not by TransformTask.
// Do not free it here — JSTranspiler.deinit handles it.
if (this.tsconfig) |tsconfig| {
tsconfig.deinit();
}
this.js_instance.deref();
bun.destroy(this);
}
@@ -661,9 +660,6 @@ pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) b
});
errdefer {
this.config.log.deinit();
if (this.config.tsconfig) |tsconfig| {
tsconfig.deinit();
}
this.arena.deinit();
this.ref_count.clearWithoutDestructor();
bun.destroy(this);
@@ -748,9 +744,6 @@ pub fn deinit(this: *JSTranspiler) void {
this.buffer_writer.?.buffer.deinit();
}
if (this.config.tsconfig) |tsconfig| {
tsconfig.deinit();
}
this.arena.deinit();
bun.destroy(this);
}
@@ -813,8 +806,7 @@ fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []con
.virtual_source = source,
.replace_exports = this.config.runtime.replace_exports,
.macro_js_ctx = macro_js_ctx,
.experimental_decorators = if (this.config.tsconfig) |ts| ts.experimental_decorators else false,
.emit_decorator_metadata = if (this.config.tsconfig) |ts| ts.emit_decorator_metadata else false,
// .allocator = this.
};
return this.transpiler.parse(parse_options, null);
@@ -882,7 +874,6 @@ pub fn scan(this: *JSTranspiler, globalThis: *jsc.JSGlobalObject, callframe: *js
const named_imports_value = try namedImportsToJS(
globalThis,
parse_result.ast.import_records.slice(),
this.config.trim_unused_imports orelse false,
);
const named_exports_value = try namedExportsToJS(
@@ -1069,30 +1060,20 @@ fn namedExportsToJS(global: *JSGlobalObject, named_exports: *JSAst.Ast.NamedExpo
return bun.String.toJSArray(global, names);
}
fn namedImportsToJS(global: *JSGlobalObject, import_records: []const ImportRecord, trim_unused_imports: bool) bun.JSError!jsc.JSValue {
fn namedImportsToJS(global: *JSGlobalObject, import_records: []const ImportRecord) bun.JSError!jsc.JSValue {
const path_label = jsc.ZigString.static("path");
const kind_label = jsc.ZigString.static("kind");
var count: u32 = 0;
for (import_records) |record| {
if (record.flags.is_internal) continue;
if (trim_unused_imports and record.flags.is_unused) continue;
count += 1;
}
const array = try jsc.JSValue.createEmptyArray(global, count);
const array = try jsc.JSValue.createEmptyArray(global, import_records.len);
array.ensureStillAlive();
var i: u32 = 0;
for (import_records) |record| {
for (import_records, 0..) |record, i| {
if (record.flags.is_internal) continue;
if (trim_unused_imports and record.flags.is_unused) continue;
array.ensureStillAlive();
const path = jsc.ZigString.init(record.path.text).toJS(global);
const kind = jsc.ZigString.init(record.kind.label()).toJS(global);
try array.putIndex(global, i, try jsc.JSValue.createObject2(global, path_label, kind_label, path, kind));
i += 1;
try array.putIndex(global, @as(u32, @truncate(i)), try jsc.JSValue.createObject2(global, path_label, kind_label, path, kind));
}
return array;
@@ -1184,7 +1165,6 @@ pub fn scanImports(this: *JSTranspiler, globalThis: *jsc.JSGlobalObject, callfra
const named_imports_value = try namedImportsToJS(
globalThis,
this.scan_pass_result.import_records.items,
this.config.trim_unused_imports orelse false,
);
return named_imports_value;
}

View File

@@ -2864,19 +2864,10 @@ pub const H2FrameParser = struct {
if (this.usedWindowSize > windowSizeValue) {
return globalObject.throwInvalidArguments("Expected windowSize to be greater than usedWindowSize", .{});
}
const oldWindowSize = this.windowSize;
this.windowSize = windowSizeValue;
if (this.localSettings.initialWindowSize < windowSizeValue) {
this.localSettings.initialWindowSize = windowSizeValue;
}
// Send a connection-level WINDOW_UPDATE frame to the peer so it knows
// about the increased window. Per RFC 9113 Section 6.9, the
// INITIAL_WINDOW_SIZE setting only applies to stream-level windows;
// the connection-level window must be updated explicitly.
if (windowSizeValue > oldWindowSize) {
const increment: u31 = @truncate(windowSizeValue - oldWindowSize);
this.sendWindowUpdate(0, UInt31WithReserved.init(increment, false));
}
var it = this.streams.valueIterator();
while (it.next()) |stream| {
if (stream.usedWindowSize > windowSizeValue) {

View File

@@ -10,56 +10,8 @@ using namespace JSC;
extern "C" SYSV_ABI void* JSDOMFile__construct(JSC::JSGlobalObject*, JSC::CallFrame* callframe);
extern "C" SYSV_ABI bool JSDOMFile__hasInstance(EncodedJSValue, JSC::JSGlobalObject*, EncodedJSValue);
// File.prototype inherits from Blob.prototype per the spec.
// This gives File instances all Blob methods while having a distinct prototype
// with constructor === File and [Symbol.toStringTag] === "File".
class JSDOMFilePrototype final : public JSC::JSNonFinalObject {
using Base = JSC::JSNonFinalObject;
public:
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSDOMFilePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
{
JSDOMFilePrototype* prototype = new (NotNull, JSC::allocateCell<JSDOMFilePrototype>(vm)) JSDOMFilePrototype(vm, structure);
prototype->finishCreation(vm, globalObject);
return prototype;
}
DECLARE_INFO;
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
structure->setMayBePrototype(true);
return structure;
}
template<typename CellType, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
STATIC_ASSERT_ISO_SUBSPACE_SHARABLE(JSDOMFilePrototype, Base);
return &vm.plainObjectSpace();
}
protected:
JSDOMFilePrototype(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure)
{
}
void finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject)
{
Base::finishCreation(vm);
// Set [Symbol.toStringTag] = "File" so Object.prototype.toString.call(file) === "[object File]"
this->putDirectWithoutTransition(vm, vm.propertyNames->toStringTagSymbol,
jsNontrivialString(vm, "File"_s),
JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::ReadOnly);
}
};
const JSC::ClassInfo JSDOMFilePrototype::s_info = { "File"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSDOMFilePrototype) };
// TODO: make this inehrit from JSBlob instead of InternalFunction
// That will let us remove this hack for [Symbol.hasInstance] and fix the prototype chain.
class JSDOMFile : public JSC::InternalFunction {
using Base = JSC::InternalFunction;
@@ -88,20 +40,15 @@ public:
Base::finishCreation(vm, 2, "File"_s);
}
static JSDOMFile* create(JSC::VM& vm, JSGlobalObject* globalObject, JSC::JSObject* filePrototype)
static JSDOMFile* create(JSC::VM& vm, JSGlobalObject* globalObject)
{
auto* zigGlobal = defaultGlobalObject(globalObject);
auto structure = createStructure(vm, globalObject, zigGlobal->functionPrototype());
auto* object = new (NotNull, JSC::allocateCell<JSDOMFile>(vm)) JSDOMFile(vm, structure);
object->finishCreation(vm);
// Set File.prototype to the distinct FilePrototype object (which inherits from Blob.prototype).
object->putDirect(vm, vm.propertyNames->prototype, filePrototype,
JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
// Set FilePrototype.constructor = File
filePrototype->putDirect(vm, vm.propertyNames->constructor, object,
static_cast<unsigned>(JSC::PropertyAttribute::DontEnum));
// This is not quite right. But we'll fix it if someone files an issue about it.
object->putDirect(vm, vm.propertyNames->prototype, zigGlobal->JSBlobPrototype(), JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly | 0);
return object;
}
@@ -122,7 +69,7 @@ public:
auto& vm = JSC::getVM(globalObject);
JSObject* newTarget = asObject(callFrame->newTarget());
auto* constructor = globalObject->JSDOMFileConstructor();
Structure* structure = globalObject->JSFileStructure();
Structure* structure = globalObject->JSBlobStructure();
if (constructor != newTarget) {
auto scope = DECLARE_THROW_SCOPE(vm);
@@ -130,7 +77,7 @@ public:
// ShadowRealm functions belong to a different global object.
getFunctionRealm(lexicalGlobalObject, newTarget));
RETURN_IF_EXCEPTION(scope, {});
structure = InternalFunction::createSubclassStructure(lexicalGlobalObject, newTarget, functionGlobalObject->JSFileStructure());
structure = InternalFunction::createSubclassStructure(lexicalGlobalObject, newTarget, functionGlobalObject->JSBlobStructure());
RETURN_IF_EXCEPTION(scope, {});
}
@@ -156,30 +103,9 @@ const JSC::ClassInfo JSDOMFile::s_info = { "File"_s, &Base::s_info, nullptr, nul
namespace Bun {
JSC::Structure* createJSFileStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject)
{
auto* zigGlobal = defaultGlobalObject(globalObject);
JSC::JSObject* blobPrototype = zigGlobal->JSBlobPrototype();
// Create FilePrototype with [[Prototype]] = Blob.prototype
auto* protoStructure = JSDOMFilePrototype::createStructure(vm, globalObject, blobPrototype);
auto* filePrototype = JSDOMFilePrototype::create(vm, globalObject, protoStructure);
// Create the structure for File instances: [[Prototype]] = FilePrototype
return JSC::Structure::create(vm, globalObject, filePrototype,
JSC::TypeInfo(static_cast<JSC::JSType>(0b11101110), WebCore::JSBlob::StructureFlags),
WebCore::JSBlob::info(), NonArray);
}
JSC::JSObject* createJSDOMFileConstructor(JSC::VM& vm, JSC::JSGlobalObject* globalObject)
{
auto* zigGlobal = defaultGlobalObject(globalObject);
// Get the File instance structure - its prototype is the FilePrototype we need
auto* fileStructure = zigGlobal->JSFileStructure();
auto* filePrototype = fileStructure->storedPrototypeObject();
return JSDOMFile::create(vm, globalObject, filePrototype);
return JSDOMFile::create(vm, globalObject);
}
}

View File

@@ -4,5 +4,4 @@
namespace Bun {
JSC::JSObject* createJSDOMFileConstructor(JSC::VM&, JSC::JSGlobalObject*);
JSC::Structure* createJSFileStructure(JSC::VM&, JSC::JSGlobalObject*);
}

View File

@@ -1744,11 +1744,6 @@ void GlobalObject::finishCreation(VM& vm)
init.set(CustomGetterSetter::create(init.vm, errorInstanceLazyStackCustomGetter, errorInstanceLazyStackCustomSetter));
});
m_JSFileStructure.initLater(
[](const Initializer<Structure>& init) {
init.set(Bun::createJSFileStructure(init.vm, init.owner));
});
m_JSDOMFileConstructor.initLater(
[](const Initializer<JSObject>& init) {
JSObject* fileConstructor = Bun::createJSDOMFileConstructor(init.vm, init.owner);

View File

@@ -609,7 +609,6 @@ public:
V(private, LazyPropertyOfGlobalObject<Structure>, m_importMetaBakeObjectStructure) \
V(private, LazyPropertyOfGlobalObject<Structure>, m_asyncBoundFunctionStructure) \
V(public, LazyPropertyOfGlobalObject<JSC::JSObject>, m_JSDOMFileConstructor) \
V(public, LazyPropertyOfGlobalObject<Structure>, m_JSFileStructure) \
V(public, LazyPropertyOfGlobalObject<JSC::JSObject>, m_JSMIMEParamsConstructor) \
V(public, LazyPropertyOfGlobalObject<JSC::JSObject>, m_JSMIMETypeConstructor) \
\
@@ -713,7 +712,6 @@ public:
JSObject* cryptoObject() const { return m_cryptoObject.getInitializedOnMainThread(this); }
JSObject* JSDOMFileConstructor() const { return m_JSDOMFileConstructor.getInitializedOnMainThread(this); }
JSC::Structure* JSFileStructure() const { return m_JSFileStructure.getInitializedOnMainThread(this); }
JSMap* nodeWorkerEnvironmentData() { return m_nodeWorkerEnvironmentData.get(); }
void setNodeWorkerEnvironmentData(JSMap* data);

View File

@@ -5149,7 +5149,7 @@ restart:
RETURN_IF_EXCEPTION(scope, void());
for (auto& property : properties) {
if (property.isNull()) [[unlikely]]
if (property.isEmpty() || property.isNull()) [[unlikely]]
continue;
// ignore constructor
@@ -5180,6 +5180,9 @@ restart:
ZigString key = toZigString(property.isSymbol() && !property.isPrivateName() ? property.impl() : property.string());
if (key.len == 0)
continue;
JSC::JSValue propertyValue = jsUndefined();
if ((slot.attributes() & PropertyAttribute::DontEnum) != 0) {
@@ -5320,7 +5323,7 @@ extern "C" [[ZIG_EXPORT(nothrow)]] bool JSC__isBigIntInInt64Range(JSC::EncodedJS
auto clientData = WebCore::clientData(vm);
for (auto property : vector) {
if (property.isNull()) [[unlikely]]
if (property.isEmpty() || property.isNull()) [[unlikely]]
continue;
// ignore constructor

View File

@@ -2507,7 +2507,6 @@ EVPKeyPointer::ParseKeyResult EVPKeyPointer::TryParsePrivateKey(
const PrivateKeyEncodingConfig& config,
const Buffer<const unsigned char>& buffer)
{
ClearErrorOnReturn clear_error_on_return;
static constexpr auto keyOrError = [](EVPKeyPointer pkey,
bool had_passphrase = false) {
if (int err = ERR_peek_error()) {

View File

@@ -117,7 +117,6 @@
#include "JSPrivateKeyObject.h"
#include "CryptoKeyType.h"
#include "JSNodePerformanceHooksHistogram.h"
#include "../napi.h"
#include <limits>
#include <algorithm>
@@ -2666,9 +2665,7 @@ SerializationReturnCode CloneSerializer::serialize(JSValue in)
// objects have been handled. If we reach this point and
// the input is not an Object object then we should throw
// a DataCloneError.
// NapiPrototype is allowed because napi_create_object should behave
// like a plain object from JS's perspective (matches Node.js).
if (inObject->classInfo() != JSFinalObject::info() && inObject->classInfo() != Zig::NapiPrototype::info())
if (inObject->classInfo() != JSFinalObject::info())
return SerializationReturnCode::DataCloneError;
inputObjectStack.append(inObject);
indexStack.append(0);

View File

@@ -66,13 +66,9 @@ pub fn CompressionStream(comptime T: type) type {
const this_value = callframe.this();
if (arguments[0].isUndefined()) {
return globalThis.ERR(.INVALID_ARG_VALUE, "flush value is required", .{}).throw();
}
bun.assert(!arguments[0].isUndefined()); // must provide flush value
flush = arguments[0].toU32();
_ = std.meta.intToEnum(bun.zlib.FlushValue, flush) catch {
return globalThis.ERR(.INVALID_ARG_VALUE, "Invalid flush value", .{}).throw();
};
_ = std.meta.intToEnum(bun.zlib.FlushValue, flush) catch bun.assert(false); // Invalid flush value
if (arguments[1].isNull()) {
// just a flush
@@ -80,33 +76,21 @@ pub fn CompressionStream(comptime T: type) type {
in_len = 0;
in_off = 0;
} else {
const in_buf = arguments[1].asArrayBuffer(globalThis) orelse {
return globalThis.ERR(.INVALID_ARG_TYPE, "The \"in\" argument must be a TypedArray or DataView", .{}).throw();
};
const in_buf = arguments[1].asArrayBuffer(globalThis).?;
in_off = arguments[2].toU32();
in_len = arguments[3].toU32();
if (in_buf.byte_len < @as(usize, in_off) + @as(usize, in_len)) {
return globalThis.ERR(.OUT_OF_RANGE, "in_off + in_len ({d}) exceeds input buffer length ({d})", .{ @as(usize, in_off) + @as(usize, in_len), in_buf.byte_len }).throw();
}
bun.assert(in_buf.byte_len >= in_off + in_len);
in = in_buf.byteSlice()[in_off..][0..in_len];
}
const out_buf = arguments[4].asArrayBuffer(globalThis) orelse {
return globalThis.ERR(.INVALID_ARG_TYPE, "The \"out\" argument must be a TypedArray or DataView", .{}).throw();
};
const out_buf = arguments[4].asArrayBuffer(globalThis).?;
out_off = arguments[5].toU32();
out_len = arguments[6].toU32();
if (out_buf.byte_len < @as(usize, out_off) + @as(usize, out_len)) {
return globalThis.ERR(.OUT_OF_RANGE, "out_off + out_len ({d}) exceeds output buffer length ({d})", .{ @as(usize, out_off) + @as(usize, out_len), out_buf.byte_len }).throw();
}
bun.assert(out_buf.byte_len >= out_off + out_len);
out = out_buf.byteSlice()[out_off..][0..out_len];
if (this.write_in_progress) {
return globalThis.ERR(.INVALID_STATE, "Write already in progress", .{}).throw();
}
if (this.pending_close) {
return globalThis.ERR(.INVALID_STATE, "Pending close", .{}).throw();
}
bun.assert(!this.write_in_progress);
bun.assert(!this.pending_close);
this.write_in_progress = true;
this.ref();
@@ -186,13 +170,9 @@ pub fn CompressionStream(comptime T: type) type {
var in: ?[]const u8 = null;
var out: ?[]u8 = null;
if (arguments[0].isUndefined()) {
return globalThis.ERR(.INVALID_ARG_VALUE, "flush value is required", .{}).throw();
}
bun.assert(!arguments[0].isUndefined()); // must provide flush value
flush = arguments[0].toU32();
_ = std.meta.intToEnum(bun.zlib.FlushValue, flush) catch {
return globalThis.ERR(.INVALID_ARG_VALUE, "Invalid flush value", .{}).throw();
};
_ = std.meta.intToEnum(bun.zlib.FlushValue, flush) catch bun.assert(false); // Invalid flush value
if (arguments[1].isNull()) {
// just a flush
@@ -200,33 +180,21 @@ pub fn CompressionStream(comptime T: type) type {
in_len = 0;
in_off = 0;
} else {
const in_buf = arguments[1].asArrayBuffer(globalThis) orelse {
return globalThis.ERR(.INVALID_ARG_TYPE, "The \"in\" argument must be a TypedArray or DataView", .{}).throw();
};
const in_buf = arguments[1].asArrayBuffer(globalThis).?;
in_off = arguments[2].toU32();
in_len = arguments[3].toU32();
if (in_buf.byte_len < @as(usize, in_off) + @as(usize, in_len)) {
return globalThis.ERR(.OUT_OF_RANGE, "in_off + in_len ({d}) exceeds input buffer length ({d})", .{ @as(usize, in_off) + @as(usize, in_len), in_buf.byte_len }).throw();
}
bun.assert(in_buf.byte_len >= in_off + in_len);
in = in_buf.byteSlice()[in_off..][0..in_len];
}
const out_buf = arguments[4].asArrayBuffer(globalThis) orelse {
return globalThis.ERR(.INVALID_ARG_TYPE, "The \"out\" argument must be a TypedArray or DataView", .{}).throw();
};
const out_buf = arguments[4].asArrayBuffer(globalThis).?;
out_off = arguments[5].toU32();
out_len = arguments[6].toU32();
if (out_buf.byte_len < @as(usize, out_off) + @as(usize, out_len)) {
return globalThis.ERR(.OUT_OF_RANGE, "out_off + out_len ({d}) exceeds output buffer length ({d})", .{ @as(usize, out_off) + @as(usize, out_len), out_buf.byte_len }).throw();
}
bun.assert(out_buf.byte_len >= out_off + out_len);
out = out_buf.byteSlice()[out_off..][0..out_len];
if (this.write_in_progress) {
return globalThis.ERR(.INVALID_STATE, "Write already in progress", .{}).throw();
}
if (this.pending_close) {
return globalThis.ERR(.INVALID_STATE, "Pending close", .{}).throw();
}
bun.assert(!this.write_in_progress);
bun.assert(!this.pending_close);
this.write_in_progress = true;
this.ref();

View File

@@ -613,14 +613,6 @@ pub const PathLike = union(enum) {
}
}
if (sliced.len >= buf.len) {
bun.Output.debugWarn("path too long: {d} bytes exceeds PathBuffer capacity of {d}\n", .{ sliced.len, buf.len });
if (comptime !force) return "";
buf[0] = 0;
return buf[0..0 :0];
}
@memcpy(buf[0..sliced.len], sliced);
buf[sliced.len] = 0;
return buf[0..sliced.len :0];
@@ -734,13 +726,12 @@ pub const PathLike = union(enum) {
}
pub fn fromBunString(global: *jsc.JSGlobalObject, str: *bun.String, will_be_async: bool, allocator: std.mem.Allocator) !PathLike {
try Valid.pathStringLength(str.length(), global);
if (will_be_async) {
var sliced = try str.toThreadSafeSlice(allocator);
errdefer sliced.deinit();
// Validate the UTF-8 byte length after conversion, since the path
// will be stored in a fixed-size PathBuffer.
try Valid.pathStringLength(sliced.slice().len, global);
try Valid.pathNullBytes(sliced.slice(), global);
sliced.reportExtraMemory(global.vm());
@@ -753,9 +744,6 @@ pub const PathLike = union(enum) {
var sliced = str.toSlice(allocator);
errdefer sliced.deinit();
// Validate the UTF-8 byte length after conversion, since the path
// will be stored in a fixed-size PathBuffer.
try Valid.pathStringLength(sliced.slice().len, global);
try Valid.pathNullBytes(sliced.slice(), global);
// Costs nothing to keep both around.

View File

@@ -219,10 +219,9 @@ pub const BundleV2 = struct {
client_transpiler.options.chunk_naming = bun.options.PathTemplate.chunk.data;
client_transpiler.options.entry_naming = "./[name]-[hash].[ext]";
// Use "/" so that asset URLs in HTML are absolute (e.g. "/chunk-abc.js"
// instead of "./chunk-abc.js"). Relative paths break when the HTML is
// served from a nested route like "/foo/".
client_transpiler.options.public_path = "/";
// Avoid setting a public path for --compile since all the assets
// will be served relative to the server root.
client_transpiler.options.public_path = "";
}
client_transpiler.setLog(this_transpiler.log);

View File

@@ -236,8 +236,8 @@ pub const BorderRadiusHandler = struct {
.unparsed => |unparsed| {
ctx.addLogicalRule(
ctx.allocator,
Property{ .unparsed = unparsed.withPropertyId(ctx.allocator, @unionInit(css.PropertyId, ltr, prefix)) },
Property{ .unparsed = unparsed.withPropertyId(ctx.allocator, @unionInit(css.PropertyId, rtl, prefix)) },
Property{ .unparsed = unparsed.withPropertyId(ctx.allocator, .{ .@"border-top-left-radius" = prefix }) },
Property{ .unparsed = unparsed.withPropertyId(ctx.allocator, .{ .@"border-top-right-radius" = prefix }) },
);
},
else => {},

View File

@@ -287,7 +287,7 @@ pub const UnicodeRange = struct {
if (digit < 10) return digit;
// Force the 6th bit to be set to ensure ascii is lower case.
// digit = (@as(u32, b) | 0b10_0000).wrapping_sub('a' as u32).saturating_add(10);
digit = ((@as(u32, b) | 0b10_0000) -% @as(u32, 'a')) +| 10;
digit = (@as(u32, b) | 0b10_0000) -% (@as(u32, 'a') +% 10);
return if (digit < 16) digit else null;
}
};
@@ -696,7 +696,7 @@ pub const FontFaceDeclarationParser = struct {
return .{ .result = .{ .font_stretch = c } };
}
}
} else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "unicode-range")) {
} else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "unicode-renage")) {
if (input.parseList(UnicodeRange, UnicodeRange.parse).asValue()) |c| {
if (input.expectExhausted().isOk()) {
return .{ .result = .{ .unicode_range = c } };

View File

@@ -49,15 +49,10 @@ pub const Loader = struct {
}
pub fn getNodePath(this: *Loader, fs: *Fs.FileSystem, buf: *bun.PathBuffer) ?[:0]const u8 {
// Check NODE or npm_node_execpath env var, but only use it if the file actually exists
if (this.get("NODE") orelse this.get("npm_node_execpath")) |node| {
if (node.len > 0 and node.len < bun.MAX_PATH_BYTES) {
@memcpy(buf[0..node.len], node);
buf[node.len] = 0;
if (bun.sys.isExecutableFilePath(buf[0..node.len :0])) {
return buf[0..node.len :0];
}
}
@memcpy(buf[0..node.len], node);
buf[node.len] = 0;
return buf[0..node.len :0];
}
if (which(buf, this.get("PATH") orelse return null, fs.top_level_dir, "node")) |node| {

View File

@@ -968,7 +968,7 @@ pub const SystemErrno = enum(u16) {
if (@TypeOf(code) == u16 or (@TypeOf(code) == c_int and code > 0)) {
// Win32Error and WSA Error codes
if (code <= @intFromEnum(Win32Error.IO_REISSUE_AS_CACHED) or (code >= @intFromEnum(Win32Error.WSAEINTR) and code <= @intFromEnum(Win32Error.WSA_QOS_RESERVED_PETYPE))) {
return init(@as(Win32Error, @enumFromInt(code)));
return init(std.meta.intToEnum(Win32Error, code) catch return null);
} else {
// uv error codes
inline for (@typeInfo(SystemErrno).@"enum".fields) |field| {
@@ -988,7 +988,7 @@ pub const SystemErrno = enum(u16) {
}
if (comptime @TypeOf(code) == Win32Error or @TypeOf(code) == std.os.windows.Win32Error) {
return switch (@as(Win32Error, @enumFromInt(@intFromEnum(code)))) {
return switch (std.meta.intToEnum(Win32Error, @intFromEnum(code)) catch return null) {
Win32Error.NOACCESS => SystemErrno.EACCES,
Win32Error.WSAEACCES => SystemErrno.EACCES,
Win32Error.ELEVATION_REQUIRED => SystemErrno.EACCES,

View File

@@ -31,6 +31,9 @@ pub const enable_keepalive = true;
pub const atomic_file_watcher = env.isLinux;
// This change didn't seem to make a meaningful difference in microbenchmarks
pub const latin1_is_now_ascii = false;
pub const http_buffer_pooling = true;
pub const disable_lolhtml = false;

View File

@@ -584,13 +584,7 @@ pub fn installWithManager(
try waitForEverythingExceptPeers(manager);
}
// Resolving a peer dep can create a NEW package whose own peer deps
// get re-queued to `peer_dependencies` during `drainDependencyList`.
// When all manifests are cached (synchronous resolution), no I/O tasks
// are spawned, so `pendingTaskCount() == 0`. We must drain the peer
// queue iteratively here — entering the event loop (`waitForPeers`)
// with zero pending I/O would block forever.
while (manager.peer_dependencies.readableLength() > 0) {
if (manager.peer_dependencies.readableLength() > 0) {
try manager.processPeerDependencyList();
manager.drainDependencyList();
}

View File

@@ -1,11 +1,7 @@
// Hardcoded module "node:_http_server"
const EventEmitter: typeof import("node:events").EventEmitter = require("node:events");
const { Duplex, Stream } = require("node:stream");
const {
_checkInvalidHeaderChar: checkInvalidHeaderChar,
validateHeaderName,
validateHeaderValue,
} = require("node:_http_common");
const { _checkInvalidHeaderChar: checkInvalidHeaderChar } = require("node:_http_common");
const { validateObject, validateLinkHeaderValue, validateBoolean, validateInteger } = require("internal/validators");
const { ConnResetException } = require("internal/shared");
@@ -1288,10 +1284,7 @@ ServerResponse.prototype.writeEarlyHints = function (hints, cb) {
for (const key of ObjectKeys(hints)) {
if (key !== "link") {
const value = hints[key];
validateHeaderName(key);
validateHeaderValue(key, value);
head += key + ": " + value + "\r\n";
head += key + ": " + hints[key] + "\r\n";
}
}

View File

@@ -1016,9 +1016,6 @@ function checkExecSyncError(ret, args, cmd?) {
if (ret.error) {
err = ret.error;
ObjectAssign(err, ret);
// ObjectAssign copies ret.error onto err, but err IS ret.error,
// creating a self-referencing cycle (err.error === err). Remove it.
delete err.error;
} else if (ret.status !== 0) {
let msg = "Command failed: ";
msg += cmd || ArrayPrototypeJoin.$call(args, " ");

View File

@@ -539,6 +539,8 @@ pub fn downloadStream(
) void {
const range = brk: {
if (size) |size_| {
if (offset == 0) break :brk null;
var end = (offset + size_);
if (size_ > 0) {
end -= 1;

View File

@@ -510,12 +510,6 @@ fn initRedirections(
},
.jsbuf => |val| {
const globalObject = interpreter.event_loop.js.global;
if (file.jsbuf.idx >= interpreter.jsobjs.len) {
globalObject.throw("Invalid JS object reference in shell", .{}) catch {};
return .failed;
}
if (interpreter.jsobjs[file.jsbuf.idx].asArrayBuffer(globalObject)) |buf| {
const arraybuf: BuiltinIO.ArrayBuf = .{ .buf = jsc.ArrayBuffer.Strong{
.array_buffer = buf,

View File

@@ -289,7 +289,7 @@ pub noinline fn next(this: *Rm) Yield {
}
switch (this.state) {
.done => return this.bltn().done(this.state.done.exit_code),
.done => return this.bltn().done(0),
.err => return this.bltn().done(this.state.err),
else => unreachable,
}
@@ -430,7 +430,7 @@ pub fn onShellRmTaskDone(this: *Rm, task: *ShellRmTask) void {
if (tasks_done >= this.state.exec.total_tasks and
exec.getOutputCount(.output_done) >= exec.getOutputCount(.output_count))
{
this.state = .{ .done = .{ .exit_code = if (exec.err != null) 1 else 0 } };
this.state = .{ .done = .{ .exit_code = if (exec.err) |theerr| theerr.errno else 0 } };
this.next().run();
}
}

View File

@@ -792,14 +792,13 @@ pub const Interpreter = struct {
out_parser: *?bun.shell.Parser,
out_lex_result: *?shell.LexResult,
) !ast.Script {
const jsobjs_len: u32 = @intCast(jsobjs.len);
const lex_result = brk: {
if (bun.strings.isAllASCII(script)) {
var lexer = bun.shell.LexerAscii.new(arena_allocator, script, jsstrings_to_escape, jsobjs_len);
var lexer = bun.shell.LexerAscii.new(arena_allocator, script, jsstrings_to_escape);
try lexer.lex();
break :brk lexer.get_result();
}
var lexer = bun.shell.LexerUnicode.new(arena_allocator, script, jsstrings_to_escape, jsobjs_len);
var lexer = bun.shell.LexerUnicode.new(arena_allocator, script, jsstrings_to_escape);
try lexer.lex();
break :brk lexer.get_result();
};

View File

@@ -2334,9 +2334,6 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
/// Not owned by this struct
string_refs: []bun.String,
/// Number of JS object references expected (for bounds validation)
jsobjs_len: u32 = 0,
const SubShellKind = enum {
/// (echo hi; echo hello)
normal,
@@ -2366,14 +2363,13 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
delimit_quote: bool,
};
pub fn new(alloc: Allocator, src: []const u8, strings_to_escape: []bun.String, jsobjs_len: u32) @This() {
pub fn new(alloc: Allocator, src: []const u8, strings_to_escape: []bun.String) @This() {
return .{
.chars = Chars.init(src),
.tokens = ArrayList(Token).init(alloc),
.strpool = ArrayList(u8).init(alloc),
.errors = ArrayList(LexError).init(alloc),
.string_refs = strings_to_escape,
.jsobjs_len = jsobjs_len,
};
}
@@ -2404,7 +2400,6 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
.word_start = self.word_start,
.j = self.j,
.string_refs = self.string_refs,
.jsobjs_len = self.jsobjs_len,
};
sublexer.chars.state = .Normal;
return sublexer;
@@ -3363,7 +3358,7 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
}
fn validateJSObjRefIdx(self: *@This(), idx: usize) bool {
if (idx >= self.jsobjs_len) {
if (idx >= std.math.maxInt(u32)) {
self.add_error("Invalid JS object ref (out of bounds)");
return false;
}
@@ -4134,7 +4129,7 @@ pub const ShellSrcBuilder = struct {
};
/// Characters that need to escaped
const SPECIAL_CHARS = [_]u8{ '~', '[', ']', '#', ';', '\n', '*', '{', ',', '}', '`', '$', '=', '(', ')', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '|', '>', '<', '&', '\'', '"', ' ', '\\', SPECIAL_JS_CHAR };
const SPECIAL_CHARS = [_]u8{ '~', '[', ']', '#', ';', '\n', '*', '{', ',', '}', '`', '$', '=', '(', ')', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '|', '>', '<', '&', '\'', '"', ' ', '\\' };
const SPECIAL_CHARS_TABLE: bun.bit_set.IntegerBitSet(256) = brk: {
var table = bun.bit_set.IntegerBitSet(256).initEmpty();
for (SPECIAL_CHARS) |c| {
@@ -4559,16 +4554,15 @@ pub const TestingAPIs = struct {
var script = std.array_list.Managed(u8).init(arena.allocator());
try shellCmdFromJS(globalThis, string_args, &template_args, &jsobjs, &jsstrings, &script, marked_argument_buffer);
const jsobjs_len: u32 = @intCast(jsobjs.items.len);
const lex_result = brk: {
if (bun.strings.isAllASCII(script.items[0..])) {
var lexer = LexerAscii.new(arena.allocator(), script.items[0..], jsstrings.items[0..], jsobjs_len);
var lexer = LexerAscii.new(arena.allocator(), script.items[0..], jsstrings.items[0..]);
lexer.lex() catch |err| {
return globalThis.throwError(err, "failed to lex shell");
};
break :brk lexer.get_result();
}
var lexer = LexerUnicode.new(arena.allocator(), script.items[0..], jsstrings.items[0..], jsobjs_len);
var lexer = LexerUnicode.new(arena.allocator(), script.items[0..], jsstrings.items[0..]);
lexer.lex() catch |err| {
return globalThis.throwError(err, "failed to lex shell");
};

View File

@@ -556,10 +556,6 @@ fn initRedirections(this: *Cmd, spawn_args: *Subprocess.SpawnArgs) bun.JSError!?
if (this.base.eventLoop() != .js) @panic("JS values not allowed in this context");
const global = this.base.eventLoop().js.global;
if (val.idx >= this.base.interpreter.jsobjs.len) {
return global.throw("Invalid JS object reference in shell", .{});
}
if (this.base.interpreter.jsobjs[val.idx].asArrayBuffer(global)) |buf| {
const stdio: bun.shell.subproc.Stdio = .{ .array_buffer = jsc.ArrayBuffer.Strong{
.array_buffer = buf,
@@ -572,9 +568,9 @@ fn initRedirections(this: *Cmd, spawn_args: *Subprocess.SpawnArgs) bun.JSError!?
if (this.node.redirect.stdin) {
try spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stdin_no);
} else if (this.node.redirect.stdout) {
try spawn_args.stdio[stdout_no].extractBlob(global, .{ .Blob = blob }, stdout_no);
try spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stdout_no);
} else if (this.node.redirect.stderr) {
try spawn_args.stdio[stderr_no].extractBlob(global, .{ .Blob = blob }, stderr_no);
try spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stderr_no);
}
} else if (try jsc.WebCore.ReadableStream.fromJS(this.base.interpreter.jsobjs[val.idx], global)) |rstream| {
_ = rstream;

View File

@@ -426,6 +426,12 @@ pub const EncodeIntoResult = struct {
written: u32 = 0,
};
pub fn allocateLatin1IntoUTF8(allocator: std.mem.Allocator, latin1_: []const u8) ![]u8 {
if (comptime bun.FeatureFlags.latin1_is_now_ascii) {
var out = try allocator.alloc(u8, latin1_.len);
@memcpy(out[0..latin1_.len], latin1_);
return out;
}
const list = try std.array_list.Managed(u8).initCapacity(allocator, latin1_.len);
var foo = try allocateLatin1IntoUTF8WithList(list, 0, latin1_);
return try foo.toOwnedSlice();
@@ -679,6 +685,13 @@ pub fn copyLatin1IntoUTF8(buf_: []u8, latin1_: []const u8) EncodeIntoResult {
}
pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, latin1_: []const u8, comptime stop: bool) EncodeIntoResult {
if (comptime bun.FeatureFlags.latin1_is_now_ascii) {
const to_copy = @as(u32, @truncate(@min(buf_.len, latin1_.len)));
@memcpy(buf_[0..to_copy], latin1_[0..to_copy]);
return .{ .written = to_copy, .read = to_copy };
}
var buf = buf_;
var latin1 = latin1_;

View File

@@ -149,7 +149,7 @@ pub extern "kernel32" fn SetCurrentDirectoryW(
lpPathName: win32.LPCWSTR,
) callconv(.winapi) win32.BOOL;
pub const SetCurrentDirectory = SetCurrentDirectoryW;
pub extern "ntdll" fn RtlNtStatusToDosError(win32.NTSTATUS) callconv(.winapi) Win32Error;
pub extern "ntdll" fn RtlNtStatusToDosError(win32.NTSTATUS) callconv(.winapi) u32;
pub extern "advapi32" fn SaferiIsExecutableFileType(szFullPathname: win32.LPCWSTR, bFromShellExecute: win32.BOOLEAN) callconv(.winapi) win32.BOOL;
// This was originally copied from Zig's standard library
/// Codes are from https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-erref/18d8fbe8-a967-4f1c-ae50-99ca8e491d2d
@@ -2952,7 +2952,8 @@ pub const Win32Error = enum(u16) {
pub const WSA_QOS_RESERVED_PETYPE: Win32Error = @enumFromInt(11031);
pub fn get() Win32Error {
return @enumFromInt(@intFromEnum(bun.windows.kernel32.GetLastError()));
const raw: u16 = @intFromEnum(bun.windows.kernel32.GetLastError());
return std.meta.intToEnum(Win32Error, raw) catch .MR_MID_NOT_FOUND;
}
pub fn int(this: Win32Error) u16 {
@@ -2971,7 +2972,12 @@ pub const Win32Error = enum(u16) {
}
pub fn fromNTStatus(status: win32.NTSTATUS) Win32Error {
return RtlNtStatusToDosError(status);
// RtlNtStatusToDosError returns a u32 Win32 error code that may not be
// in our Win32Error enum subset. Safely convert to avoid panic on
// invalid enum values.
const raw = RtlNtStatusToDosError(status);
if (raw > std.math.maxInt(u16)) return .MR_MID_NOT_FOUND;
return std.meta.intToEnum(Win32Error, @as(u16, @intCast(raw))) catch .MR_MID_NOT_FOUND;
}
};

View File

@@ -589,6 +589,7 @@ function expectBundled(
dotenv ||
typeof production !== "undefined" ||
bundling === false ||
(run && target === "node") ||
emitDCEAnnotations ||
bundleWarnings ||
env ||

View File

@@ -2,7 +2,7 @@ import { file, spawn, write } from "bun";
import { afterAll, beforeAll, describe, expect, test } from "bun:test";
import { existsSync, lstatSync, readlinkSync } from "fs";
import { mkdir, readlink, rm, symlink } from "fs/promises";
import { VerdaccioRegistry, bunEnv, bunExe, readdirSorted, runBunInstall, tempDir } from "harness";
import { VerdaccioRegistry, bunEnv, bunExe, readdirSorted, runBunInstall } from "harness";
import { join } from "path";
const registry = new VerdaccioRegistry();
@@ -1234,112 +1234,3 @@ test("runs lifecycle scripts correctly", async () => {
expect(lifecyclePostinstallDir).toEqual(["lifecycle-postinstall"]);
expect(allLifecycleScriptsDir).toEqual(["all-lifecycle-scripts"]);
});
// When an auto-installed peer dependency has its OWN peer deps, those
// transitive peers get re-queued during peer processing. If all manifest
// loads are synchronous (cached with valid max-age) AND the transitive peer's
// version constraint doesn't match what's already in the lockfile,
// pendingTaskCount() stays at 0 and waitForPeers was skipped — leaving
// the transitive peer's resolution unset (= invalid_package_id → filtered
// from the install).
test("transitive peer deps are resolved when resolution is fully synchronous", async () => {
const packagesDir = join(import.meta.dir, "registry", "packages");
// Self-contained HTTP server that serves package manifests & tarballs
// directly from the Verdaccio fixtures, with Cache-Control: max-age=300
// to replicate npmjs.org behavior (fully synchronous on warm cache).
using server = Bun.serve({
port: 0,
async fetch(req) {
const url = new URL(req.url);
const pathname = url.pathname;
// Tarball: /<name>/-/<name>-<version>.tgz
if (pathname.endsWith(".tgz")) {
const match = pathname.match(/\/([^/]+)\/-\/(.+\.tgz)$/);
if (match) {
const tarball = file(join(packagesDir, match[1], match[2]));
if (await tarball.exists()) {
return new Response(tarball, {
headers: { "Content-Type": "application/octet-stream" },
});
}
}
return new Response("Not found", { status: 404 });
}
// Manifest: /<name>
const packageName = decodeURIComponent(pathname.slice(1));
const metaFile = file(join(packagesDir, packageName, "package.json"));
if (!(await metaFile.exists())) {
return new Response("Not found", { status: 404 });
}
// Rewrite tarball URLs to point at this server
const meta = await metaFile.json();
const port = server.port;
for (const [ver, info] of Object.entries(meta.versions ?? {}) as [string, any][]) {
if (info?.dist?.tarball) {
info.dist.tarball = `http://localhost:${port}/${packageName}/-/${packageName}-${ver}.tgz`;
}
}
return new Response(JSON.stringify(meta), {
headers: {
"Content-Type": "application/json",
"Cache-Control": "public, max-age=300",
},
});
},
});
using packageDir = tempDir("transitive-peer-test-", {});
const packageJson = join(String(packageDir), "package.json");
const cacheDir = join(String(packageDir), ".bun-cache");
const bunfig = `[install]\ncache = "${cacheDir.replaceAll("\\", "\\\\")}"\nregistry = "http://localhost:${server.port}/"\nlinker = "isolated"\n`;
await write(join(String(packageDir), "bunfig.toml"), bunfig);
await write(
packageJson,
JSON.stringify({
name: "test-transitive-peer",
dependencies: {
// Chain: uses-strict-peer → (peer) strict-peer-dep → (peer) no-deps@^2.0.0
// Root has no-deps@1.0.0, which does NOT satisfy ^2.0.0. This forces
// strict-peer-dep's peer `no-deps` through the full resolution pass
// (can't reuse root's no-deps via getPackageID).
"no-deps": "1.0.0",
"uses-strict-peer": "1.0.0",
},
}),
);
// First install: populates manifest cache (with max-age=300 from server)
await runBunInstall(bunEnv, String(packageDir), { allowWarnings: true });
// Second install with NO lockfile and WARM cache. Manifests are fresh
// (within max-age) so all loads are synchronous — this is the bug trigger.
await rm(join(String(packageDir), "node_modules"), { recursive: true, force: true });
await rm(join(String(packageDir), "bun.lock"), { force: true });
await runBunInstall(bunEnv, String(packageDir), { allowWarnings: true });
// Entry names have peer hashes; find them dynamically
const bunDir = join(String(packageDir), "node_modules", ".bun");
const entries = await readdirSorted(bunDir);
const strictPeerEntry = entries.find(e => e.startsWith("strict-peer-dep@1.0.0"));
const usesStrictEntry = entries.find(e => e.startsWith("uses-strict-peer@1.0.0"));
// strict-peer-dep must exist (auto-installed via uses-strict-peer's peer)
expect(strictPeerEntry).toBeDefined();
expect(usesStrictEntry).toBeDefined();
// strict-peer-dep's own peer `no-deps` must be resolved and symlinked.
// Without the fix: this symlink is missing because the transitive peer
// queue was never drained after drainDependencyList re-queued it.
expect(existsSync(join(bunDir, strictPeerEntry!, "node_modules", "no-deps"))).toBe(true);
// Verify the chain is intact
expect(readlinkSync(join(bunDir, usesStrictEntry!, "node_modules", "strict-peer-dep"))).toBe(
join("..", "..", strictPeerEntry!, "node_modules", "strict-peer-dep"),
);
});

View File

@@ -1,41 +0,0 @@
{
"name": "strict-peer-dep",
"versions": {
"1.0.0": {
"name": "strict-peer-dep",
"version": "1.0.0",
"peerDependencies": {
"no-deps": "^2.0.0"
},
"_id": "strict-peer-dep@1.0.0",
"_nodeVersion": "22.2.0",
"_npmVersion": "10.8.1",
"dist": {
"integrity": "sha512-bz2RC/Fp4Nvc9aIiHB6Szko9m6sxNy/clIHnTAGeD9VSpQJTvlPAJqJ09lWo7N3q4JNLEqDTf3Mn+zNUsYOKWQ==",
"shasum": "1548927b5ca502c008c3ab091fb707f96181ecaf",
"tarball": "http://localhost:4873/strict-peer-dep/-/strict-peer-dep-1.0.0.tgz"
},
"contributors": []
}
},
"time": {
"modified": "2026-02-28T00:00:00.000Z",
"created": "2026-02-28T00:00:00.000Z",
"1.0.0": "2026-02-28T00:00:00.000Z"
},
"users": {},
"dist-tags": {
"latest": "1.0.0"
},
"_uplinks": {},
"_distfiles": {},
"_attachments": {
"strict-peer-dep-1.0.0.tgz": {
"shasum": "1548927b5ca502c008c3ab091fb707f96181ecaf",
"version": "1.0.0"
}
},
"_rev": "",
"_id": "strict-peer-dep",
"readme": "ERROR: No README data found!"
}

View File

@@ -1,41 +0,0 @@
{
"name": "uses-strict-peer",
"versions": {
"1.0.0": {
"name": "uses-strict-peer",
"version": "1.0.0",
"peerDependencies": {
"strict-peer-dep": "1.0.0"
},
"_id": "uses-strict-peer@1.0.0",
"_nodeVersion": "22.2.0",
"_npmVersion": "10.8.1",
"dist": {
"integrity": "sha512-RbQ5blabFjzZxf/5rXghqXxa2+Dmv/owDb1YzHwNOOBmxGJZTqPt3OIYHlsGX/wnPVjAP6gBwJl3nxLxU0pzlw==",
"shasum": "7cff9823abdca5ab698f2c6b73410b87004960e9",
"tarball": "http://localhost:4873/uses-strict-peer/-/uses-strict-peer-1.0.0.tgz"
},
"contributors": []
}
},
"time": {
"modified": "2026-02-28T00:00:00.000Z",
"created": "2026-02-28T00:00:00.000Z",
"1.0.0": "2026-02-28T00:00:00.000Z"
},
"users": {},
"dist-tags": {
"latest": "1.0.0"
},
"_uplinks": {},
"_distfiles": {},
"_attachments": {
"uses-strict-peer-1.0.0.tgz": {
"shasum": "7cff9823abdca5ab698f2c6b73410b87004960e9",
"version": "1.0.0"
}
},
"_rev": "",
"_id": "uses-strict-peer",
"readme": "ERROR: No README data found!"
}

View File

@@ -880,44 +880,6 @@ for (let credentials of allCredentials) {
expect(SHA1).toBe(SHA1_2);
}
}, 30_000);
it("should work with sliced files (offset 0)", async () => {
await using tmpfile = await tmp();
const s3file = s3(tmpfile.name + "-readable-stream-slice", options);
await s3file.write("Hello Bun!");
const sliced = s3file.slice(0, 5);
const stream = sliced.stream();
const reader = stream.getReader();
let bytes = 0;
let chunks: Array<Buffer> = [];
while (true) {
const { done, value } = await reader.read();
if (done) break;
bytes += value?.length ?? 0;
if (value) chunks.push(value as Buffer);
}
expect(bytes).toBe(5);
expect(Buffer.concat(chunks)).toEqual(Buffer.from("Hello"));
});
it("should work with sliced files (non-zero offset)", async () => {
await using tmpfile = await tmp();
const s3file = s3(tmpfile.name + "-readable-stream-slice-offset", options);
await s3file.write("Hello Bun!");
const sliced = s3file.slice(6, 10);
const stream = sliced.stream();
const reader = stream.getReader();
let bytes = 0;
let chunks: Array<Buffer> = [];
while (true) {
const { done, value } = await reader.read();
if (done) break;
bytes += value?.length ?? 0;
if (value) chunks.push(value as Buffer);
}
expect(bytes).toBe(4);
expect(Buffer.concat(chunks)).toEqual(Buffer.from("Bun!"));
});
});
});
});

View File

@@ -1,58 +0,0 @@
import { $ } from "bun";
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
describe("shell sentinel character hardening", () => {
test("string matching internal obj-ref prefix round-trips through interpolation", async () => {
// \x08 is the shell's internal sentinel byte. When followed by "__bun_"
// and then non-digit characters, the old code didn't escape \x08 (it wasn't
// in SPECIAL_CHARS), so the raw bytes were injected into the script buffer.
// The lexer then misinterpreted them as a malformed internal object
// reference pattern and produced a lex error.
// The suffix must contain non-digit, non-special chars so that:
// 1. needsEscape() returns false without the \x08 fix
// 2. looksLikeJSObjRef() matches the __bun_ prefix
// 3. eatJSObjRef() fails because it finds no digit index
const str = "\x08__bun_abc";
const result = await $`echo ${str}`.text();
expect(result).toBe(str + "\n");
});
test("string matching internal str-ref prefix round-trips through interpolation", async () => {
// Same issue but for the __bunstr_ prefix pattern.
const str = "\x08__bunstr_abc";
const result = await $`echo ${str}`.text();
expect(result).toBe(str + "\n");
});
test("raw sentinel injection with out-of-bounds index does not crash", async () => {
// { raw: ... } bypasses string escaping, allowing injection of a sentinel
// pattern with a digit suffix into the script buffer. The old
// validateJSObjRefIdx only rejected indices >= maxInt(u32), so index 9999
// was accepted. At execution time, accessing jsobjs[9999] on an empty
// array caused a segfault. The fix checks against actual jsobjs.len.
// Run in a subprocess so a crash on old bun doesn't kill the test runner.
const testScript = [
'import { $ } from "bun";',
"const sentinel = String.fromCharCode(8) + '__bun_9999';",
"try { await $`echo hello > ${{ raw: sentinel }}`; } catch {}",
'console.log("OK");',
].join("\n");
using dir = tempDir("sentinel-test", {
"test.js": testScript,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test.js"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, exitCode] = await Promise.all([proc.stdout.text(), proc.exited]);
expect(stdout.trim()).toBe("OK");
expect(exitCode).toBe(0);
});
});

View File

@@ -1,76 +0,0 @@
import { describe, expect, test } from "bun:test";
describe("Transpiler tsconfig lifetime", () => {
test("multiple async transform() calls with tsconfig do not crash", async () => {
const transpiler = new Bun.Transpiler({
loader: "tsx",
tsconfig: JSON.stringify({
compilerOptions: {
experimentalDecorators: true,
jsx: "react",
jsxFactory: "React.createElement",
},
}),
});
// First async transform
const result1 = await transpiler.transform("const x: number = 1;");
expect(result1).toContain("const x = 1");
// Second async transform — would crash before the fix due to use-after-free
// on the tsconfig pointer that was freed by the first TransformTask.deinit
const result2 = await transpiler.transform("const y: number = 2;");
expect(result2).toContain("const y = 2");
// Third call to be safe
const result3 = await transpiler.transform("const z: number = 3;");
expect(result3).toContain("const z = 3");
});
test("async transform() followed by transformSync() with tsconfig does not crash", async () => {
const transpiler = new Bun.Transpiler({
loader: "tsx",
tsconfig: JSON.stringify({
compilerOptions: {
experimentalDecorators: true,
jsx: "react",
jsxFactory: "React.createElement",
},
}),
});
// Before this fix, async transform freed tsconfig in TransformTask.deinit
const result1 = await transpiler.transform("const a: string = 'hello';");
expect(result1).toContain('const a = "hello"');
// Sync transform would read freed memory without the fix
const result2 = transpiler.transformSync("const b: string = 'world';");
expect(result2).toContain('const b = "world"');
});
test("tsconfig jsx settings are preserved across multiple async transforms", async () => {
const transpiler = new Bun.Transpiler({
loader: "tsx",
tsconfig: JSON.stringify({
compilerOptions: {
jsx: "react",
jsxFactory: "h",
jsxFragmentFactory: "Fragment",
},
}),
});
const code = "export default <div>hello</div>;";
const result1 = await transpiler.transform(code);
expect(result1).toContain("h(");
// After the first async transform, tsconfig should still be valid
const result2 = await transpiler.transform(code);
expect(result2).toContain("h(");
// Sync should also work
const result3 = transpiler.transformSync(code);
expect(result3).toContain("h(");
});
});

View File

@@ -1,54 +0,0 @@
import { describe, expect, it } from "bun:test";
import { isPosix } from "harness";
import fs from "node:fs";
// On POSIX systems, MAX_PATH_BYTES is 4096.
// Path validation must account for the actual UTF-8 byte length of strings,
// not just the number of characters (UTF-16 code units), since multi-byte
// characters expand when encoded as UTF-8.
describe.if(isPosix)("path length validation with multi-byte characters", () => {
// U+4E00 (一) is a CJK character that is 3 bytes in UTF-8 (0xE4 0xB8 0x80).
// 2000 such characters = 2000 UTF-16 code units but 6000 UTF-8 bytes,
// which exceeds the 4096-byte PathBuffer.
const cjkPath = "\u4e00".repeat(2000);
it("rejects overly long multi-byte paths in openSync", () => {
expect(() => fs.openSync(cjkPath, "r")).toThrow("ENAMETOOLONG");
});
it("rejects overly long multi-byte paths in readFileSync", () => {
expect(() => fs.readFileSync(cjkPath)).toThrow("ENAMETOOLONG");
});
it("rejects overly long multi-byte paths in statSync", () => {
expect(() => fs.statSync(cjkPath)).toThrow("ENAMETOOLONG");
});
it("rejects overly long multi-byte paths in realpathSync", () => {
expect(() => fs.realpathSync(cjkPath)).toThrow("ENAMETOOLONG");
});
it("rejects overly long multi-byte paths in async readFile", async () => {
expect(async () => await fs.promises.readFile(cjkPath)).toThrow("ENAMETOOLONG");
});
it("rejects overly long multi-byte paths in async stat", async () => {
expect(async () => await fs.promises.stat(cjkPath)).toThrow("ENAMETOOLONG");
});
// 2-byte UTF-8 characters (U+0080 to U+07FF range)
it("rejects overly long 2-byte UTF-8 paths", () => {
// U+00E9 (é) is 2 bytes in UTF-8. 3000 chars = 6000 bytes > 4096
const accentPath = "\u00e9".repeat(3000);
expect(() => fs.statSync(accentPath)).toThrow("ENAMETOOLONG");
});
// Verify that the process does not crash - the key property is that these
// throw a proper JS error rather than segfaulting.
it("does not crash with 4-byte UTF-8 characters exceeding buffer", () => {
// U+1F600 (😀) is 4 bytes in UTF-8, 2 UTF-16 code units (surrogate pair).
// 1500 emoji = 3000 UTF-16 code units but 6000 UTF-8 bytes > 4096
const emojiPath = "\u{1F600}".repeat(1500);
expect(() => fs.statSync(emojiPath)).toThrow("ENAMETOOLONG");
});
});

View File

@@ -1,137 +0,0 @@
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe } from "harness";
describe("writeEarlyHints", () => {
test("rejects CRLF injection in header name", async () => {
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
const http = require("node:http");
const server = http.createServer((req, res) => {
try {
res.writeEarlyHints({
link: "</style.css>; rel=preload",
"x-custom\\r\\nSet-Cookie: session=evil\\r\\nX-Injected": "val",
});
console.log("FAIL: no error thrown");
process.exit(1);
} catch (e) {
console.log("error_code:" + e.code);
res.writeHead(200);
res.end("ok");
}
});
server.listen(0, () => {
http.get({ port: server.address().port }, (res) => {
let data = "";
res.on("data", (c) => data += c);
res.on("end", () => {
console.log("body:" + data);
server.close();
});
});
});
`,
],
env: bunEnv,
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stdout).toContain("error_code:ERR_INVALID_HTTP_TOKEN");
expect(stdout).toContain("body:ok");
expect(exitCode).toBe(0);
});
test("rejects CRLF injection in header value", async () => {
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
const http = require("node:http");
const server = http.createServer((req, res) => {
try {
res.writeEarlyHints({
link: "</style.css>; rel=preload",
"x-custom": "legitimate\\r\\nSet-Cookie: session=evil",
});
console.log("FAIL: no error thrown");
process.exit(1);
} catch (e) {
console.log("error_code:" + e.code);
res.writeHead(200);
res.end("ok");
}
});
server.listen(0, () => {
http.get({ port: server.address().port }, (res) => {
let data = "";
res.on("data", (c) => data += c);
res.on("end", () => {
console.log("body:" + data);
server.close();
});
});
});
`,
],
env: bunEnv,
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stdout).toContain("error_code:ERR_INVALID_CHAR");
expect(stdout).toContain("body:ok");
expect(exitCode).toBe(0);
});
test("allows valid non-link headers in early hints", async () => {
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
const http = require("node:http");
const server = http.createServer((req, res) => {
try {
res.writeEarlyHints({
link: "</style.css>; rel=preload",
"x-custom": "valid-value",
"x-another": "also-valid",
});
console.log("OK: no error");
res.writeHead(200);
res.end("ok");
} catch (e) {
console.log("FAIL: " + e.message);
process.exit(1);
}
});
server.listen(0, () => {
http.get({ port: server.address().port }, (res) => {
let data = "";
res.on("data", (c) => data += c);
res.on("end", () => {
console.log("body:" + data);
server.close();
});
});
});
`,
],
env: bunEnv,
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stdout).toContain("OK: no error");
expect(stdout).toContain("body:ok");
expect(exitCode).toBe(0);
});
});

View File

@@ -1,89 +0,0 @@
import { describe, expect, test } from "bun:test";
// Tests for bounds checking on native zlib handle write/writeSync methods.
// These verify that user-controlled offset/length parameters are validated
// against actual buffer bounds, preventing out-of-bounds memory access.
describe("zlib native handle bounds checking", () => {
function createHandle() {
const zlib = require("zlib");
const deflate = zlib.createDeflateRaw();
return deflate._handle;
}
test("writeSync rejects in_len exceeding input buffer", () => {
const handle = createHandle();
const inBuf = Buffer.alloc(16);
const outBuf = Buffer.alloc(1024);
// in_len=65536 far exceeds the 16-byte input buffer
expect(() => {
handle.writeSync(0, inBuf, 0, 65536, outBuf, 0, 1024);
}).toThrow(/exceeds input buffer length/);
});
test("writeSync rejects out_len exceeding output buffer", () => {
const handle = createHandle();
const inBuf = Buffer.alloc(16);
const outBuf = Buffer.alloc(16);
// out_len=65536 far exceeds the 16-byte output buffer
expect(() => {
handle.writeSync(0, inBuf, 0, 16, outBuf, 0, 65536);
}).toThrow(/exceeds output buffer length/);
});
test("writeSync rejects in_off + in_len exceeding input buffer", () => {
const handle = createHandle();
const inBuf = Buffer.alloc(16);
const outBuf = Buffer.alloc(1024);
// in_off=10 + in_len=16 = 26 > 16
expect(() => {
handle.writeSync(0, inBuf, 10, 16, outBuf, 0, 1024);
}).toThrow(/exceeds input buffer length/);
});
test("writeSync rejects out_off + out_len exceeding output buffer", () => {
const handle = createHandle();
const inBuf = Buffer.alloc(16);
const outBuf = Buffer.alloc(16);
// out_off=10 + out_len=16 = 26 > 16
expect(() => {
handle.writeSync(0, inBuf, 0, 16, outBuf, 10, 16);
}).toThrow(/exceeds output buffer length/);
});
test("writeSync allows valid bounds", () => {
const handle = createHandle();
const inBuf = Buffer.alloc(16);
const outBuf = Buffer.alloc(1024);
// This should not throw - valid bounds
expect(() => {
handle.writeSync(0, inBuf, 0, 16, outBuf, 0, 1024);
}).not.toThrow();
});
test("writeSync allows valid offset + length within bounds", () => {
const handle = createHandle();
const inBuf = Buffer.alloc(32);
const outBuf = Buffer.alloc(1024);
// in_off=8 + in_len=16 = 24 <= 32, valid
expect(() => {
handle.writeSync(0, inBuf, 8, 16, outBuf, 0, 1024);
}).not.toThrow();
});
test("writeSync allows null input (flush only)", () => {
const handle = createHandle();
const outBuf = Buffer.alloc(1024);
// null input is valid (flush only)
expect(() => {
handle.writeSync(0, null, 0, 0, outBuf, 0, 1024);
}).not.toThrow();
});
});

View File

@@ -236,13 +236,6 @@ static napi_value make_empty_array(const Napi::CallbackInfo &info) {
return array;
}
static napi_value make_empty_object(const Napi::CallbackInfo &info) {
napi_env env = info.Env();
napi_value object;
NODE_API_CALL(env, napi_create_object(env, &object));
return object;
}
// add_tag(object, lower, upper)
static napi_value add_tag(const Napi::CallbackInfo &info) {
Napi::Env env = info.Env();
@@ -431,7 +424,6 @@ void register_js_test_helpers(Napi::Env env, Napi::Object exports) {
REGISTER_FUNCTION(env, exports, throw_error);
REGISTER_FUNCTION(env, exports, create_and_throw_error);
REGISTER_FUNCTION(env, exports, make_empty_array);
REGISTER_FUNCTION(env, exports, make_empty_object);
REGISTER_FUNCTION(env, exports, add_tag);
REGISTER_FUNCTION(env, exports, try_add_tag);
REGISTER_FUNCTION(env, exports, check_tag);

View File

@@ -446,22 +446,6 @@ nativeTests.test_reflect_construct_no_prototype_crash = () => {
console.log("✓ Success - no crash!");
};
nativeTests.test_napi_create_object_structured_clone = () => {
// https://github.com/oven-sh/bun/issues/25658
const obj = nativeTests.make_empty_object();
assert.deepStrictEqual(obj, {});
const cloned = structuredClone(obj);
assert.deepStrictEqual(cloned, {});
obj.foo = "bar";
obj.nested = { x: 1 };
const cloned2 = structuredClone(obj);
assert.deepStrictEqual(cloned2, { foo: "bar", nested: { x: 1 } });
assert(cloned2 !== obj);
assert(cloned2.nested !== obj.nested);
console.log("pass");
};
nativeTests.test_napi_wrap = () => {
const values = [
{},

View File

@@ -426,13 +426,6 @@ describe.concurrent("napi", () => {
});
});
describe("napi_create_object", () => {
// https://github.com/oven-sh/bun/issues/25658
it("result is clonable with structuredClone", async () => {
await checkSameOutput("test_napi_create_object_structured_clone", []);
});
});
// TODO(@190n) test allocating in a finalizer from a napi module with the right version
describe("napi_wrap", () => {

View File

@@ -195,7 +195,7 @@ test/js/node/test/parallel/test-http-server-stale-close.js
test/js/third_party/comlink/comlink.test.ts
test/regression/issue/22635/22635.test.ts
test/js/node/test/parallel/test-http-url.parse-https.request.js
test/bundler/bundler_compile_autoload.test.ts
test/bundler/bundler_compile_autoload.test.ts
# Bun::JSNodeHTTPServerSocket::clearSocketData
test/js/node/test/parallel/test-http-server-keep-alive-max-requests-null.js

View File

@@ -1,77 +0,0 @@
import { expect, test } from "bun:test";
import net from "net";
// Regression test for https://github.com/oven-sh/bun/issues/8893
// Bytes >= 0x80 in HTTP header values were incorrectly stripped because
// the whitespace trimming in HttpParser.h compared signed chars against 33.
// On platforms where char is signed (x86_64), bytes 0x80-0xFF are negative
// and thus < 33, causing them to be trimmed as if they were whitespace.
test("header values preserve bytes >= 0x80", async () => {
let receivedValue: string | null = null;
await using server = Bun.serve({
port: 0,
fetch(req) {
receivedValue = req.headers.get("x-test");
return new Response("OK");
},
});
const client = net.connect(server.port, "127.0.0.1");
// Send a raw HTTP request with 0xFF bytes surrounding the header value
const request = Buffer.concat([
Buffer.from("GET / HTTP/1.1\r\nHost: localhost\r\nX-Test: "),
Buffer.from([0xff]),
Buffer.from("value"),
Buffer.from([0xff]),
Buffer.from("\r\n\r\n"),
]);
await new Promise<void>((resolve, reject) => {
client.on("error", reject);
client.on("data", data => {
const response = data.toString();
expect(response).toContain("HTTP/1.1 200");
// The header value should preserve the 0xFF bytes — not strip them.
// 0xFF as a Latin-1 byte becomes U+00FF (ÿ) in the JS string.
expect(receivedValue).not.toBeNull();
expect(receivedValue!.length).toBe(7);
expect(receivedValue!.charCodeAt(0)).toBe(0xff);
expect(receivedValue!.charCodeAt(6)).toBe(0xff);
client.end();
resolve();
});
client.write(request);
});
});
test("header values still trim actual whitespace (SP, HTAB)", async () => {
let receivedValue: string | null = null;
await using server = Bun.serve({
port: 0,
fetch(req) {
receivedValue = req.headers.get("x-test");
return new Response("OK");
},
});
const client = net.connect(server.port, "127.0.0.1");
// Send a raw HTTP request with spaces and tabs surrounding the header value
const request = Buffer.from("GET / HTTP/1.1\r\nHost: localhost\r\nX-Test: \t value \t \r\n\r\n");
await new Promise<void>((resolve, reject) => {
client.on("error", reject);
client.on("data", data => {
const response = data.toString();
expect(response).toContain("HTTP/1.1 200");
expect(receivedValue).toBe("value");
client.end();
resolve();
});
client.write(request);
});
});

View File

@@ -1,71 +0,0 @@
import { expect, test } from "bun:test";
test("scanImports respects trimUnusedImports", () => {
const transpiler = new Bun.Transpiler({
trimUnusedImports: true,
loader: "tsx",
});
// Unused named import should be trimmed
expect(transpiler.scanImports(`import { Component } from "./Component";`)).toEqual([]);
// Unused default import should be trimmed
expect(transpiler.scanImports(`import Foo from "./Foo";`)).toEqual([]);
// Unused namespace import should be trimmed
expect(transpiler.scanImports(`import * as Utils from "./Utils";`)).toEqual([]);
// Used named import should be kept
expect(transpiler.scanImports(`import { Component } from "./Component"; console.log(Component);`)).toEqual([
{ path: "./Component", kind: "import-statement" },
]);
// Bare/side-effect import should always be kept
expect(transpiler.scanImports(`import "./side-effect";`)).toEqual([
{ path: "./side-effect", kind: "import-statement" },
]);
// Type-only import should always be trimmed
expect(transpiler.scanImports(`import type { Foo } from "./Foo";`)).toEqual([]);
});
test("scan respects trimUnusedImports", () => {
const transpiler = new Bun.Transpiler({
trimUnusedImports: true,
loader: "tsx",
});
// Unused named import should be trimmed from scan result
const unusedResult = transpiler.scan(`import { Component } from "./Component";`);
expect(unusedResult.imports).toEqual([]);
// Used named import should be kept in scan result
const usedResult = transpiler.scan(`import { Component } from "./Component"; console.log(Component);`);
expect(usedResult.imports).toEqual([{ path: "./Component", kind: "import-statement" }]);
// Bare/side-effect import should always be kept
const sideEffectResult = transpiler.scan(`import "./side-effect";`);
expect(sideEffectResult.imports).toEqual([{ path: "./side-effect", kind: "import-statement" }]);
});
test("scanImports returns all imports when trimUnusedImports is false", () => {
const transpiler = new Bun.Transpiler({
trimUnusedImports: false,
loader: "tsx",
});
// Unused named import should NOT be trimmed when option is false
expect(transpiler.scanImports(`import { Component } from "./Component";`)).toEqual([
{ path: "./Component", kind: "import-statement" },
]);
});
test("scan returns all imports when trimUnusedImports is false", () => {
const transpiler = new Bun.Transpiler({
trimUnusedImports: false,
loader: "tsx",
});
const result = transpiler.scan(`import { Component } from "./Component";`);
expect(result.imports).toEqual([{ path: "./Component", kind: "import-statement" }]);
});

View File

@@ -1,66 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
// https://github.com/oven-sh/bun/issues/18028
// Object diff in bun test should display empty string keys correctly.
test("toStrictEqual diff shows empty string keys", () => {
expect({
val: { "": "value" },
}).toStrictEqual({
val: { "": "value" },
});
});
test("toEqual diff shows empty string keys", () => {
expect({ "": "hello" }).toEqual({ "": "hello" });
});
test("empty string key with various value types", () => {
expect({ "": 0 }).toEqual({ "": 0 });
expect({ "": null }).toEqual({ "": null });
expect({ "": "" }).toEqual({ "": "" });
expect({ "": false }).toEqual({ "": false });
expect({ "": undefined }).toEqual({ "": undefined });
});
test("empty string key mixed with other keys", () => {
expect({ foo: "bar", "": "value" }).toEqual({ foo: "bar", "": "value" });
});
test("toStrictEqual fails and shows diff with empty string key", async () => {
using dir = tempDir("issue-18028", {
"test.test.ts": `
import { test, expect } from "bun:test";
test("diff", () => {
expect({ val: {} }).toStrictEqual({ val: { "": "value" } });
});
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test", "test.test.ts"],
cwd: String(dir),
env: { ...bunEnv, FORCE_COLOR: "0" },
stdout: "pipe",
stderr: "pipe",
});
const [stderr, exitCode] = await Promise.all([proc.stderr.text(), proc.exited]);
// The diff section should show non-zero changed lines
expect(stderr).toContain("- Expected - 3");
expect(stderr).toContain("+ Received + 1");
// The diff should include the empty string key
expect(stderr).toContain('"": "value"');
expect(exitCode).toBe(1);
});
test("console.log shows empty string keys", () => {
const result = Bun.spawnSync({
cmd: [bunExe(), "-e", 'console.log({ "": "value", foo: "bar" })'],
env: { ...bunEnv, NO_COLOR: "1" },
});
const stdout = result.stdout.toString();
expect(stdout).toContain('"": "value"');
});

View File

@@ -1,46 +0,0 @@
import { $ } from "bun";
import { describe, expect, test } from "bun:test";
import { tempDir } from "harness";
describe("shell .quiet() should preserve exit codes", () => {
test("builtin rm with .quiet() throws on failure", async () => {
using dir = tempDir("issue-18161", {});
try {
await $`rm ${dir}/nonexistent-file.txt`.quiet();
expect.unreachable();
} catch (e: any) {
expect(e.exitCode).not.toBe(0);
}
});
test("builtin rm with .nothrow().quiet() returns non-zero exit code", async () => {
using dir = tempDir("issue-18161", {});
const result = await $`rm ${dir}/nonexistent-file.txt`.nothrow().quiet();
expect(result.exitCode).not.toBe(0);
});
test("builtin rm with .text() throws on failure", async () => {
using dir = tempDir("issue-18161", {});
try {
await $`rm ${dir}/nonexistent-file.txt`.text();
expect.unreachable();
} catch (e: any) {
expect(e.exitCode).not.toBe(0);
}
});
test("builtin rm with .quiet() returns 0 on success", async () => {
using dir = tempDir("issue-18161", {
"existing-file.txt": "hello",
});
const result = await $`rm ${dir}/existing-file.txt`.nothrow().quiet();
expect(result.exitCode).toBe(0);
});
test("builtin rm exit code matches between quiet and non-quiet", async () => {
using dir = tempDir("issue-18161", {});
const nonQuiet = await $`rm ${dir}/nonexistent-file.txt`.nothrow();
const quiet = await $`rm ${dir}/nonexistent-file.txt`.nothrow().quiet();
expect(quiet.exitCode).toBe(nonQuiet.exitCode);
});
});

View File

@@ -1,133 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
// https://github.com/oven-sh/bun/issues/24157
// Without reuseAddr, a second process should not be able to bind to the same
// UDP port. Previously, Bun unconditionally set SO_REUSEADDR on all UDP sockets
// when port != 0, allowing duplicate binds and masking EADDRINUSE errors from
// addMembership.
test("UDP bind throws EADDRINUSE without reuseAddr when port is in use", async () => {
// First, find a free port by briefly binding to port 0 and closing.
using dir = tempDir("dgram-24157-a", {
"main.ts": `
import dgram from 'node:dgram';
import { spawn } from 'node:child_process';
// Find a free port
const tmp = dgram.createSocket('udp4');
tmp.bind(0, () => {
const port = tmp.address().port;
tmp.close();
// Now both parent and child bind to that specific non-zero port
const s = dgram.createSocket({ type: 'udp4', reuseAddr: false });
s.bind(port, () => {
const child = spawn(process.execPath, [__dirname + '/child.ts', String(port)], {
stdio: 'inherit'
});
child.on('close', () => {
s.close();
});
});
s.on('error', (err) => {
console.log('parent-error:' + err.code);
});
});
`,
"child.ts": `
import dgram from 'node:dgram';
const port = parseInt(process.argv[2]);
const s = dgram.createSocket({ type: 'udp4', reuseAddr: false });
s.bind(port, () => {
console.log('child-bound:' + s.address().port);
s.close();
});
s.on('error', (err) => {
console.log('child-error:' + err.code);
});
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "main.ts"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// The child should fail to bind with EADDRINUSE since reuseAddr is false
expect(stdout).toContain("child-error:EADDRINUSE");
expect(exitCode).toBe(0);
});
test("addMembership succeeds with reuseAddr: true", async () => {
using dir = tempDir("dgram-24157-b", {
"main.ts": `
import dgram from 'node:dgram';
import { spawn } from 'node:child_process';
// Find a free port
const tmp = dgram.createSocket('udp4');
tmp.bind(0, () => {
const port = tmp.address().port;
tmp.close();
const s = dgram.createSocket({ type: 'udp4', reuseAddr: true });
s.bind(port, () => {
try {
s.addMembership('239.255.0.2');
} catch (e) {
console.log('parent-error:' + e.message);
s.close();
return;
}
const child = spawn(process.execPath, [__dirname + '/child.ts', String(port)], {
stdio: 'inherit'
});
child.on('close', () => {
s.close();
});
});
s.on('error', (err) => {
console.log('parent-error:' + err.message);
});
});
`,
"child.ts": `
import dgram from 'node:dgram';
const port = parseInt(process.argv[2]);
const s = dgram.createSocket({ type: 'udp4', reuseAddr: true });
s.bind(port, () => {
try {
s.addMembership('239.255.0.2');
console.log('child-joined:' + s.address().port);
} catch (e) {
console.log('child-error:' + e.message);
}
s.close();
});
s.on('error', (err) => {
console.log('child-error:' + err.message);
});
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "main.ts"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// With reuseAddr: true, both should succeed in joining the multicast group
expect(stdout).toContain("child-joined:");
expect(exitCode).toBe(0);
});

View File

@@ -1,123 +0,0 @@
// https://github.com/oven-sh/bun/issues/26207
// bun run --filter and --workspaces should fall back to bun's node symlink
// when NODE env var points to a non-existent path
import { expect, test } from "bun:test";
import { chmodSync } from "fs";
import { bunEnv, bunExe, isWindows, tempDirWithFiles } from "harness";
test("bun run --workspaces creates node symlink when NODE env points to non-existent path", async () => {
const dir = tempDirWithFiles("workspaces-node-fallback", {
"package.json": JSON.stringify({
name: "root",
workspaces: ["packages/*"],
}),
"packages/a/package.json": JSON.stringify({
name: "a",
scripts: {
test: "node -e \"console.log('node works')\"",
},
}),
});
// Set NODE to a non-existent path and remove system node from PATH
const env = {
...bunEnv,
NODE: "/nonexistent/path/to/node",
PATH: "/usr/bin", // PATH without node
};
const proc = Bun.spawn({
cmd: [bunExe(), "run", "--workspaces", "test"],
env,
cwd: dir,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Should succeed because bun creates a symlink to its own node
expect(stdout).toContain("node works");
expect(exitCode).toBe(0);
});
test("bun run --filter creates node symlink when NODE env points to non-existent path", async () => {
const dir = tempDirWithFiles("filter-node-fallback", {
"package.json": JSON.stringify({
name: "root",
workspaces: ["packages/*"],
}),
"packages/a/package.json": JSON.stringify({
name: "a",
scripts: {
test: "node -e \"console.log('node works from filter')\"",
},
}),
});
// Set NODE to a non-existent path and remove system node from PATH
const env = {
...bunEnv,
NODE: "/nonexistent/path/to/node",
PATH: "/usr/bin", // PATH without node
};
const proc = Bun.spawn({
cmd: [bunExe(), "run", "--filter", "*", "test"],
env,
cwd: dir,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Should succeed because bun creates a symlink to its own node
expect(stdout).toContain("node works from filter");
expect(exitCode).toBe(0);
});
// Skip on Windows: shebang scripts (#!/usr/bin/env node) are Unix-specific
test.skipIf(isWindows)("bun run --workspaces runs scripts that have #!/usr/bin/env node shebang", async () => {
const dir = tempDirWithFiles("workspaces-shebang", {
"package.json": JSON.stringify({
name: "root",
workspaces: ["packages/*"],
}),
"packages/a/package.json": JSON.stringify({
name: "a",
scripts: {
build: "./build.js",
},
}),
// Create an executable script with node shebang
"packages/a/build.js": "#!/usr/bin/env node\nconsole.log('build script ran');",
});
// Make the script executable
chmodSync(`${dir}/packages/a/build.js`, 0o755);
// Remove system node from PATH, and clear NODE/npm_node_execpath to avoid
// interfering with bun's node symlink creation
const env = {
...bunEnv,
NODE: undefined,
npm_node_execpath: undefined,
PATH: "/usr/bin", // PATH without node
};
const proc = Bun.spawn({
cmd: [bunExe(), "run", "--workspaces", "build"],
env,
cwd: dir,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Should succeed because bun creates a symlink to its own node
expect(stdout).toContain("build script ran");
expect(exitCode).toBe(0);
});

View File

@@ -1,24 +0,0 @@
import { expect, test } from "bun:test";
import { execFileSync, execSync } from "child_process";
test("execFileSync error should not have self-referencing cycle", () => {
try {
execFileSync("nonexistent_binary_xyz_123");
expect.unreachable();
} catch (err: any) {
// err.error should not be the same object as err (self-referencing cycle)
expect(err.error).not.toBe(err);
// JSON.stringify should not throw due to cyclic structure
expect(() => JSON.stringify(err)).not.toThrow();
}
});
test("execSync error should not have self-referencing cycle", () => {
try {
execSync("nonexistent_binary_xyz_123");
expect.unreachable();
} catch (err: any) {
expect(err.error).not.toBe(err);
expect(() => JSON.stringify(err)).not.toThrow();
}
});

View File

@@ -1,67 +0,0 @@
import { expect, test } from "bun:test";
// https://github.com/oven-sh/bun/issues/26899
// File.prototype should be distinct from Blob.prototype
test("File.prototype !== Blob.prototype", () => {
expect(File.prototype).not.toBe(Blob.prototype);
});
test("File.prototype inherits from Blob.prototype", () => {
expect(Object.getPrototypeOf(File.prototype)).toBe(Blob.prototype);
});
test("new File(...).constructor.name === 'File'", () => {
const file = new File(["hello"], "hello.txt");
expect(file.constructor.name).toBe("File");
});
test("new File(...).constructor === File", () => {
const file = new File(["hello"], "hello.txt");
expect(file.constructor).toBe(File);
});
test("new File(...).constructor !== Blob", () => {
const file = new File(["hello"], "hello.txt");
expect(file.constructor).not.toBe(Blob);
});
test("Object.prototype.toString.call(file) === '[object File]'", () => {
const file = new File(["hello"], "hello.txt");
expect(Object.prototype.toString.call(file)).toBe("[object File]");
});
test("file instanceof File", () => {
const file = new File(["hello"], "hello.txt");
expect(file instanceof File).toBe(true);
});
test("file instanceof Blob", () => {
const file = new File(["hello"], "hello.txt");
expect(file instanceof Blob).toBe(true);
});
test("blob is not instanceof File", () => {
const blob = new Blob(["hello"]);
expect(blob instanceof File).toBe(false);
});
test("File instances have Blob methods", () => {
const file = new File(["hello"], "hello.txt");
expect(typeof file.text).toBe("function");
expect(typeof file.arrayBuffer).toBe("function");
expect(typeof file.slice).toBe("function");
expect(typeof file.stream).toBe("function");
});
test("File name and lastModified work", () => {
const file = new File(["hello"], "hello.txt", { lastModified: 12345 });
expect(file.name).toBe("hello.txt");
expect(file.lastModified).toBe(12345);
});
test("File.prototype has correct Symbol.toStringTag", () => {
const desc = Object.getOwnPropertyDescriptor(File.prototype, Symbol.toStringTag);
expect(desc).toBeDefined();
expect(desc!.value).toBe("File");
});

View File

@@ -1,85 +0,0 @@
import assert from "node:assert";
import { once } from "node:events";
import http2 from "node:http2";
import { test } from "node:test";
// Regression test for https://github.com/oven-sh/bun/issues/26915
// setLocalWindowSize() must send a connection-level WINDOW_UPDATE frame.
// Without this, the peer's connection-level window stays at the default
// 65,535 bytes and streams stall when receiving larger payloads.
function startServer(payloadSize: number): Promise<{ port: number; server: http2.Http2Server }> {
const payload = Buffer.alloc(payloadSize, "x");
return new Promise(resolve => {
const server = http2.createServer();
server.on("stream", stream => {
stream.respond({ ":status": 200 });
stream.end(payload);
});
server.listen(0, () => {
const addr = server.address();
if (addr && typeof addr === "object") {
resolve({ port: addr.port, server });
}
});
});
}
function doRequest(client: http2.ClientHttp2Session): Promise<Buffer> {
return new Promise((resolve, reject) => {
const req = client.request({ ":path": "/" });
const chunks: Buffer[] = [];
req.on("data", (chunk: Buffer) => chunks.push(chunk));
req.on("end", () => resolve(Buffer.concat(chunks)));
req.on("error", reject);
req.end();
});
}
function closeClient(client: http2.ClientHttp2Session): Promise<void> {
return new Promise(resolve => client.close(resolve));
}
function closeServer(server: http2.Http2Server): Promise<void> {
return new Promise(resolve => server.close(() => resolve()));
}
test("http2 client setLocalWindowSize sends connection-level WINDOW_UPDATE", async () => {
const payloadSize = 256 * 1024; // 256 KB - well above the 65535 default
const { port, server } = await startServer(payloadSize);
try {
const client = http2.connect(`http://127.0.0.1:${port}`, {
settings: { initialWindowSize: 10 * 1024 * 1024 },
});
// setLocalWindowSize requires the session handle to be ready
await once(client, "connect");
client.setLocalWindowSize(10 * 1024 * 1024);
const result = await doRequest(client);
assert.strictEqual(result.length, payloadSize);
await closeClient(client);
} finally {
await closeServer(server);
}
});
test("http2 client initialWindowSize setting allows large stream payloads", async () => {
const payloadSize = 1024 * 1024; // 1 MB
const { port, server } = await startServer(payloadSize);
try {
const client = http2.connect(`http://127.0.0.1:${port}`, {
settings: { initialWindowSize: 10 * 1024 * 1024 },
});
const result = await doRequest(client);
assert.strictEqual(result.length, payloadSize);
await closeClient(client);
} finally {
await closeServer(server);
}
});

View File

@@ -1,65 +0,0 @@
import { S3Client } from "bun";
import { describe, expect, it } from "bun:test";
import { getSecret } from "harness";
const s3Options = {
accessKeyId: getSecret("S3_R2_ACCESS_KEY"),
secretAccessKey: getSecret("S3_R2_SECRET_KEY"),
endpoint: getSecret("S3_R2_ENDPOINT"),
bucket: getSecret("S3_R2_BUCKET"),
};
describe.skipIf(!s3Options.accessKeyId)("issue#27272 - S3 .slice().stream() ignores slice range", () => {
const client = new S3Client(s3Options);
it("slice(0, N).stream() should only return N bytes", async () => {
const filename = `test-issue-27272-${crypto.randomUUID()}`;
const s3file = client.file(filename);
try {
await s3file.write("Hello Bun! This is a longer string for testing.");
const sliced = s3file.slice(0, 5);
const stream = sliced.stream();
const reader = stream.getReader();
let bytes = 0;
const chunks: Array<Buffer> = [];
while (true) {
const { done, value } = await reader.read();
if (done) break;
bytes += value?.length ?? 0;
if (value) chunks.push(value as Buffer);
}
expect(bytes).toBe(5);
expect(Buffer.concat(chunks).toString()).toBe("Hello");
} finally {
await s3file.unlink();
}
});
it("slice(0, N).text() and slice(0, N).stream() should return the same data", async () => {
const filename = `test-issue-27272-consistency-${crypto.randomUUID()}`;
const s3file = client.file(filename);
try {
await s3file.write("Hello Bun! This is a longer string for testing.");
const textResult = await s3file.slice(0, 10).text();
const stream = s3file.slice(0, 10).stream();
const reader = stream.getReader();
const chunks: Array<Buffer> = [];
while (true) {
const { done, value } = await reader.read();
if (done) break;
if (value) chunks.push(value as Buffer);
}
const streamResult = Buffer.concat(chunks).toString();
expect(streamResult).toBe(textResult);
expect(streamResult).toBe("Hello Bun!");
} finally {
await s3file.unlink();
}
});
});

View File

@@ -1,41 +0,0 @@
import { expect, test } from "bun:test";
import crypto from "node:crypto";
// https://github.com/oven-sh/bun/issues/27445
// A failed crypto.createPrivateKey() call on an encrypted legacy RSA PEM
// should not poison subsequent unrelated crypto.createPrivateKey() calls.
test("crypto.createPrivateKey error does not poison subsequent calls", () => {
// Generate an Ed25519 key pair
const { privateKey: ed25519Key } = crypto.generateKeyPairSync("ed25519");
const ed25519Der = ed25519Key.export({ format: "der", type: "pkcs8" });
// Generate an encrypted RSA PEM key
const { privateKey: rsaKey } = crypto.generateKeyPairSync("rsa", {
modulusLength: 2048,
});
const encryptedRsaPem = rsaKey.export({
format: "pem",
type: "pkcs1",
cipher: "aes-256-cbc",
passphrase: "test-passphrase",
});
// First parse: Ed25519 DER should succeed
const key1 = crypto.createPrivateKey({
key: ed25519Der,
format: "der",
type: "pkcs8",
});
expect(key1.asymmetricKeyType).toBe("ed25519");
// Try to import encrypted RSA PEM without passphrase -- should throw
expect(() => crypto.createPrivateKey(encryptedRsaPem)).toThrow("Passphrase required for encrypted key");
// Second parse: the same Ed25519 DER should still succeed
const key2 = crypto.createPrivateKey({
key: ed25519Der,
format: "der",
type: "pkcs8",
});
expect(key2.asymmetricKeyType).toBe("ed25519");
});

View File

@@ -1,45 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
test("CSS bundler maps logical border-radius properties to correct physical properties", async () => {
using dir = tempDir("css-logical-border-radius", {
"input.css": `.box {
border-start-start-radius: var(--r, 20px);
border-start-end-radius: var(--r, 20px);
border-end-start-radius: var(--r, 20px);
border-end-end-radius: var(--r, 20px);
}
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "build", "input.css", "--target=browser", "--outdir", "out"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
const output = await Bun.file(`${dir}/out/input.css`).text();
// Each logical property must map to its own distinct physical property.
// The output contains LTR and RTL variants (with :lang() selectors), so
// each physical property appears multiple times. The key check is that all
// four distinct physical properties are present (not all mapped to one).
expect(output).toContain("border-top-left-radius:");
expect(output).toContain("border-top-right-radius:");
expect(output).toContain("border-bottom-left-radius:");
expect(output).toContain("border-bottom-right-radius:");
// In the LTR block, verify each physical property appears exactly once.
// Extract the first rule block (LTR) to check the mapping is correct.
const firstBlock = output.split("}")[0];
expect((firstBlock.match(/border-top-left-radius/g) || []).length).toBe(1);
expect((firstBlock.match(/border-top-right-radius/g) || []).length).toBe(1);
expect((firstBlock.match(/border-bottom-right-radius/g) || []).length).toBe(1);
expect((firstBlock.match(/border-bottom-left-radius/g) || []).length).toBe(1);
expect(exitCode).toBe(0);
});

View File

@@ -1,71 +0,0 @@
import { describe } from "bun:test";
import { itBundled } from "../../bundler/expectBundled";
describe("bundler", () => {
// Test that `bun build --compile` produces absolute asset URLs in HTML,
// so that assets load correctly when served from nested routes like "/foo/".
// Regression test for https://github.com/oven-sh/bun/issues/27465
for (const backend of ["api", "cli"] as const) {
itBundled(`compile/${backend}/HTMLNestedRouteAssetURLs`, {
compile: true,
backend: backend,
files: {
"/entry.ts": /* js */ `
import { serve } from "bun";
import index from "./index.html";
const server = serve({
port: 0,
routes: {
"/foo/": index,
"/foo/*": index,
},
});
const res = await fetch(server.url + "foo/");
const html = await res.text();
const srcMatch = html.match(/src="([^"]+)"/);
if (!srcMatch) {
console.log("ERROR: no src attribute found in HTML");
server.stop(true);
process.exit(1);
}
const src = srcMatch[1];
if (src.startsWith("./")) {
console.log("FAIL: relative URL " + src);
server.stop(true);
process.exit(1);
}
// Asset URLs should be absolute (start with "/")
const assetRes = await fetch(server.url + src.slice(1));
if (!assetRes.ok) {
console.log("FAIL: asset not accessible at " + src);
server.stop(true);
process.exit(1);
}
console.log("Asset URL is absolute: " + src);
server.stop(true);
`,
"/index.html": /* html */ `
<!DOCTYPE html>
<html>
<head><title>Test</title></head>
<body>
<h1>Hello</h1>
<script src="./app.ts"></script>
</body>
</html>
`,
"/app.ts": /* js */ `
console.log("client loaded");
`,
},
run: {
stdout: /Asset URL is absolute: \/.+/,
},
});
}
});

View File

@@ -1,115 +0,0 @@
import { expect, test } from "bun:test";
// https://github.com/oven-sh/bun/issues/27575
// Bun.Transpiler ignored experimentalDecorators: true from tsconfig,
// always emitting TC39-style decorators instead of legacy TypeScript decorators.
test("Bun.Transpiler respects experimentalDecorators: true from tsconfig", () => {
const transpiler = new Bun.Transpiler({
loader: "ts",
target: "browser",
tsconfig: JSON.stringify({
compilerOptions: {
experimentalDecorators: true,
},
}),
});
const code = `
function Prop() { return function(target: any, key: string) {}; }
class Foo {
@Prop() bar: number = 0;
}
`;
const result = transpiler.transformSync(code);
// Legacy decorators use __legacyDecorateClassTS, NOT TC39 helpers
expect(result).not.toContain("__decorateElement");
expect(result).not.toContain("__decoratorStart");
expect(result).not.toContain("__runInitializers");
// Legacy decorators produce __legacyDecorateClassTS calls
expect(result).toContain("__legacyDecorateClassTS");
});
test("Bun.Transpiler respects emitDecoratorMetadata: true from tsconfig", () => {
const transpiler = new Bun.Transpiler({
loader: "ts",
target: "browser",
tsconfig: JSON.stringify({
compilerOptions: {
experimentalDecorators: true,
emitDecoratorMetadata: true,
},
}),
});
const code = `
function Dec() { return function(target: any, key: string) {}; }
class Foo {
@Dec() bar: string = "";
}
`;
const result = transpiler.transformSync(code);
// Should emit legacy decorators with metadata
expect(result).not.toContain("__decorateElement");
expect(result).toContain("__legacyDecorateClassTS");
expect(result).toContain("__legacyMetadataTS");
});
test("Bun.Transpiler emits TC39 decorators when experimentalDecorators is not set", () => {
const transpiler = new Bun.Transpiler({
loader: "ts",
target: "browser",
tsconfig: JSON.stringify({
compilerOptions: {},
}),
});
const code = `
function Prop() { return function(target: any, key: string) {}; }
class Foo {
@Prop() bar: number = 0;
}
`;
const result = transpiler.transformSync(code);
// TC39 decorators use __decorateElement / __decoratorStart / __runInitializers
expect(result).toContain("__decorateElement");
expect(result).not.toContain("__legacyDecorateClassTS");
});
test("Bun.Transpiler.transform (async) respects experimentalDecorators: true", async () => {
const transpiler = new Bun.Transpiler({
loader: "ts",
target: "browser",
tsconfig: JSON.stringify({
compilerOptions: {
experimentalDecorators: true,
},
}),
});
const code = `
function Prop() { return function(target: any, key: string) {}; }
class Foo {
@Prop() bar: number = 0;
}
`;
const result = await transpiler.transform(code);
// Legacy decorators use __legacyDecorateClassTS, NOT TC39 helpers
expect(result).not.toContain("__decorateElement");
expect(result).not.toContain("__decoratorStart");
expect(result).not.toContain("__runInitializers");
expect(result).toContain("__legacyDecorateClassTS");
});

View File

@@ -1,40 +0,0 @@
import { cssInternals } from "bun:internal-for-testing";
import { expect, test } from "bun:test";
const { minifyTest, testWithOptions } = cssInternals;
test("unicode-range in @font-face is preserved", () => {
const source = `@font-face {
font-family: "Roboto Variable";
unicode-range: U+0000-00FF, U+0131, U+0152-0153;
}`;
const expected = `@font-face {
font-family: Roboto Variable;
unicode-range: U+??, U+131, U+152-153;
}`;
expect(testWithOptions(source, expected)).toEqualIgnoringWhitespace(expected);
});
test("unicode-range in @font-face is preserved when minified", () => {
const source = `@font-face { font-family: "Roboto Variable"; unicode-range: U+0000-00FF, U+0131, U+0152-0153; }`;
const expected = `@font-face{font-family:Roboto Variable;unicode-range:U+??,U+131,U+152-153}`;
expect(minifyTest(source, expected)).toEqual(expected);
});
test("unicode-range wildcard in @font-face is preserved", () => {
const source = `@font-face { font-family: "Test"; unicode-range: U+4??; }`;
const expected = `@font-face{font-family:Test;unicode-range:U+4??}`;
expect(minifyTest(source, expected)).toEqual(expected);
});
test("unicode-range with hex letters in @font-face is preserved", () => {
const source = `@font-face { font-family: "Test"; unicode-range: U+A640-A69F; }`;
const expected = `@font-face{font-family:Test;unicode-range:U+a640-a69f}`;
expect(minifyTest(source, expected)).toEqual(expected);
});
test("unicode-range single hex value in @font-face is preserved", () => {
const source = `@font-face { font-family: "Test"; unicode-range: U+00FF; }`;
const expected = `@font-face{font-family:Test;unicode-range:U+ff}`;
expect(minifyTest(source, expected)).toEqual(expected);
});