mirror of
https://github.com/oven-sh/bun
synced 2026-03-01 13:01:06 +01:00
Compare commits
27 Commits
claude/fix
...
claude/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26b3d8a1e9 | ||
|
|
694adfd898 | ||
|
|
b41e5f650b | ||
|
|
cd3f036814 | ||
|
|
1a86bd85ff | ||
|
|
61f3bcb4c6 | ||
|
|
915749f87e | ||
|
|
a3930399b4 | ||
|
|
9f88b3ffe4 | ||
|
|
488ef6d7be | ||
|
|
668d960f1a | ||
|
|
c5f0e4adf7 | ||
|
|
106a953e3b | ||
|
|
0e98e44714 | ||
|
|
bbeab8726f | ||
|
|
dc31aa1c8a | ||
|
|
68d8bb5ce5 | ||
|
|
977506c329 | ||
|
|
f06119ad0c | ||
|
|
7ef4b1be38 | ||
|
|
f931515906 | ||
|
|
fabc0523c8 | ||
|
|
ea212ca159 | ||
|
|
1034fc922c | ||
|
|
2f03dab658 | ||
|
|
0ee3b7215e | ||
|
|
333970dc2f |
@@ -46,7 +46,7 @@ macro(optionx variable type description)
|
||||
set(${variable}_PREVIEW -D${variable})
|
||||
|
||||
if(DEFINED ENV{${variable}})
|
||||
set(${variable} $ENV{${variable}} CACHE ${${variable}_TYPE} ${description} FORCE)
|
||||
set(${variable} "$ENV{${variable}}" CACHE ${${variable}_TYPE} ${description} FORCE)
|
||||
set(${variable}_SOURCE "environment variable")
|
||||
set(${variable}_PREVIEW ${variable})
|
||||
endif()
|
||||
|
||||
@@ -127,7 +127,7 @@ if(ENABLE_ASAN AND ENABLE_LTO)
|
||||
endif()
|
||||
|
||||
if(BUILDKITE_COMMIT)
|
||||
set(DEFAULT_REVISION ${BUILDKITE_COMMIT})
|
||||
set(DEFAULT_REVISION "${BUILDKITE_COMMIT}")
|
||||
else()
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD
|
||||
@@ -141,7 +141,7 @@ else()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
optionx(REVISION STRING "The git revision of the build" DEFAULT ${DEFAULT_REVISION})
|
||||
optionx(REVISION STRING "The git revision of the build" DEFAULT "${DEFAULT_REVISION}")
|
||||
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
setx(NODEJS_VERSION "24.3.0")
|
||||
|
||||
@@ -702,7 +702,7 @@ endif()
|
||||
|
||||
set(ZIG_FLAGS_BUN)
|
||||
if(NOT "${REVISION}" STREQUAL "")
|
||||
set(ZIG_FLAGS_BUN ${ZIG_FLAGS_BUN} -Dsha=${REVISION})
|
||||
set(ZIG_FLAGS_BUN ${ZIG_FLAGS_BUN} "-Dsha=${REVISION}")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
@@ -728,10 +728,10 @@ register_command(
|
||||
-Denable_tinycc=$<IF:$<BOOL:${ENABLE_TINYCC}>,true,false>
|
||||
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
|
||||
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
|
||||
-Dversion=${VERSION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
-Dcodegen_path=${CODEGEN_PATH}
|
||||
"-Dversion=${VERSION}"
|
||||
"-Dreported_nodejs_version=${NODEJS_VERSION}"
|
||||
"-Dcanary=${CANARY_REVISION}"
|
||||
"-Dcodegen_path=${CODEGEN_PATH}"
|
||||
-Dcodegen_embed=$<IF:$<BOOL:${CODEGEN_EMBED}>,true,false>
|
||||
--prominent-compile-errors
|
||||
--summary all
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
e9e16dca48dd4a8ffbc77642bc4be60407585f11
|
||||
e3aa54798602dd27250fafde1b5a66f080046252
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
|
||||
@@ -65,10 +65,14 @@ string(REPLACE "\\n" "\\\\n" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "\\r" "\\\\r" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "\\t" "\\\\t" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "${BKSLASH_PLACEHOLDER}" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
# CMake treats semicolons as list separators in unquoted variable expansions.
|
||||
# Commit messages and other JSON string fields can contain semicolons, which would
|
||||
# cause string(JSON) to receive garbled arguments. Escape them before parsing.
|
||||
string(REPLACE ";" "\\;" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
|
||||
string(JSON BUILDKITE_BUILD_UUID GET ${BUILDKITE_BUILD} id)
|
||||
string(JSON BUILDKITE_JOBS GET ${BUILDKITE_BUILD} jobs)
|
||||
string(JSON BUILDKITE_JOBS_COUNT LENGTH ${BUILDKITE_JOBS})
|
||||
string(JSON BUILDKITE_BUILD_UUID GET "${BUILDKITE_BUILD}" id)
|
||||
string(JSON BUILDKITE_JOBS GET "${BUILDKITE_BUILD}" jobs)
|
||||
string(JSON BUILDKITE_JOBS_COUNT LENGTH "${BUILDKITE_JOBS}")
|
||||
|
||||
if(NOT BUILDKITE_JOBS_COUNT GREATER 0)
|
||||
message(FATAL_ERROR "No jobs found: ${BUILDKITE_BUILD_URL}")
|
||||
@@ -83,14 +87,14 @@ set(BUILDKITE_JOBS_MATCH)
|
||||
|
||||
math(EXPR BUILDKITE_JOBS_MAX_INDEX "${BUILDKITE_JOBS_COUNT} - 1")
|
||||
foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
|
||||
string(JSON BUILDKITE_JOB GET ${BUILDKITE_JOBS} ${i})
|
||||
string(JSON BUILDKITE_JOB_ID GET ${BUILDKITE_JOB} id)
|
||||
string(JSON BUILDKITE_JOB_PASSED GET ${BUILDKITE_JOB} passed)
|
||||
string(JSON BUILDKITE_JOB_GROUP_ID GET ${BUILDKITE_JOB} group_uuid)
|
||||
string(JSON BUILDKITE_JOB_GROUP_KEY GET ${BUILDKITE_JOB} group_identifier)
|
||||
string(JSON BUILDKITE_JOB_NAME GET ${BUILDKITE_JOB} step_key)
|
||||
string(JSON BUILDKITE_JOB GET "${BUILDKITE_JOBS}" ${i})
|
||||
string(JSON BUILDKITE_JOB_ID GET "${BUILDKITE_JOB}" id)
|
||||
string(JSON BUILDKITE_JOB_PASSED GET "${BUILDKITE_JOB}" passed)
|
||||
string(JSON BUILDKITE_JOB_GROUP_ID GET "${BUILDKITE_JOB}" group_uuid)
|
||||
string(JSON BUILDKITE_JOB_GROUP_KEY GET "${BUILDKITE_JOB}" group_identifier)
|
||||
string(JSON BUILDKITE_JOB_NAME GET "${BUILDKITE_JOB}" step_key)
|
||||
if(NOT BUILDKITE_JOB_NAME)
|
||||
string(JSON BUILDKITE_JOB_NAME GET ${BUILDKITE_JOB} name)
|
||||
string(JSON BUILDKITE_JOB_NAME GET "${BUILDKITE_JOB}" name)
|
||||
endif()
|
||||
|
||||
if(NOT BUILDKITE_JOB_PASSED)
|
||||
@@ -121,7 +125,8 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
|
||||
endif()
|
||||
|
||||
file(READ ${BUILDKITE_ARTIFACTS_PATH} BUILDKITE_ARTIFACTS)
|
||||
string(JSON BUILDKITE_ARTIFACTS_LENGTH LENGTH ${BUILDKITE_ARTIFACTS})
|
||||
string(REPLACE ";" "\\;" BUILDKITE_ARTIFACTS "${BUILDKITE_ARTIFACTS}")
|
||||
string(JSON BUILDKITE_ARTIFACTS_LENGTH LENGTH "${BUILDKITE_ARTIFACTS}")
|
||||
if(NOT BUILDKITE_ARTIFACTS_LENGTH GREATER 0)
|
||||
list(APPEND BUILDKITE_JOBS_NO_ARTIFACTS ${BUILDKITE_JOB_NAME})
|
||||
continue()
|
||||
@@ -129,9 +134,9 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
|
||||
|
||||
math(EXPR BUILDKITE_ARTIFACTS_MAX_INDEX "${BUILDKITE_ARTIFACTS_LENGTH} - 1")
|
||||
foreach(i RANGE 0 ${BUILDKITE_ARTIFACTS_MAX_INDEX})
|
||||
string(JSON BUILDKITE_ARTIFACT GET ${BUILDKITE_ARTIFACTS} ${i})
|
||||
string(JSON BUILDKITE_ARTIFACT_ID GET ${BUILDKITE_ARTIFACT} id)
|
||||
string(JSON BUILDKITE_ARTIFACT_PATH GET ${BUILDKITE_ARTIFACT} path)
|
||||
string(JSON BUILDKITE_ARTIFACT GET "${BUILDKITE_ARTIFACTS}" ${i})
|
||||
string(JSON BUILDKITE_ARTIFACT_ID GET "${BUILDKITE_ARTIFACT}" id)
|
||||
string(JSON BUILDKITE_ARTIFACT_PATH GET "${BUILDKITE_ARTIFACT}" path)
|
||||
|
||||
if(NOT BUILDKITE_ARTIFACT_PATH MATCHES "\\.(o|a|lib|zip|tar|gz)")
|
||||
continue()
|
||||
|
||||
@@ -96,7 +96,7 @@ $2b$10$Lyj9kHYZtiyfxh2G60TEfeqs7xkkGiEFFDi3iJGc50ZG/XJ1sxIFi;
|
||||
The format is composed of:
|
||||
|
||||
- `bcrypt`: `$2b`
|
||||
- `rounds`: `$10` - rounds (log10 of the actual number of rounds)
|
||||
- `rounds`: `$10` - rounds (log2 of the actual number of rounds)
|
||||
- `salt`: `$Lyj9kHYZtiyfxh2G60TEfeqs7xkkGiEFFDi3iJGc50ZG/XJ1sxIFi`
|
||||
- `hash`: `$GzJ8PuBi+K+BVojzPfS5mjnC8OpLGtv8KJqF99eP6a4`
|
||||
|
||||
|
||||
19
packages/bun-types/s3.d.ts
vendored
19
packages/bun-types/s3.d.ts
vendored
@@ -299,6 +299,25 @@ declare module "bun" {
|
||||
*/
|
||||
contentDisposition?: string | undefined;
|
||||
|
||||
/**
|
||||
* The Content-Encoding header value.
|
||||
* Specifies what content encodings have been applied to the object,
|
||||
* for example to indicate that it has been compressed.
|
||||
*
|
||||
* @example
|
||||
* // Setting gzip encoding
|
||||
* const file = s3.file("data.json.gz", {
|
||||
* contentEncoding: "gzip"
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Setting encoding when writing
|
||||
* await s3.write("data.json.gz", compressedData, {
|
||||
* contentEncoding: "gzip"
|
||||
* });
|
||||
*/
|
||||
contentEncoding?: string | undefined;
|
||||
|
||||
/**
|
||||
* By default, Amazon S3 uses the STANDARD Storage Class to store newly created objects.
|
||||
*
|
||||
|
||||
@@ -1231,12 +1231,6 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port, int op
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
if (port != 0) {
|
||||
/* Should this also go for UDP? */
|
||||
int enabled = 1;
|
||||
setsockopt(listenFd, SOL_SOCKET, SO_REUSEADDR, &enabled, sizeof(enabled));
|
||||
}
|
||||
|
||||
if (bsd_set_reuse(listenFd, options) != 0) {
|
||||
freeaddrinfo(result);
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
|
||||
@@ -504,6 +504,11 @@ namespace uWS
|
||||
return ((c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z')) || c == '-';
|
||||
}
|
||||
|
||||
/* RFC 9110 Section 5.5: optional whitespace (OWS) is SP or HTAB */
|
||||
static inline bool isHTTPHeaderValueWhitespace(unsigned char c) {
|
||||
return c == ' ' || c == '\t';
|
||||
}
|
||||
|
||||
static inline int isHTTPorHTTPSPrefixForProxies(char *data, char *end) {
|
||||
// We can check 8 because:
|
||||
// 1. If it's "http://" that's 7 bytes, and it's supposed to at least have a trailing slash.
|
||||
@@ -775,13 +780,13 @@ namespace uWS
|
||||
/* Store this header, it is valid */
|
||||
headers->value = std::string_view(preliminaryValue, (size_t) (postPaddedBuffer - preliminaryValue));
|
||||
postPaddedBuffer += 2;
|
||||
/* Trim trailing whitespace (SP, HTAB) */
|
||||
while (headers->value.length() && headers->value.back() < 33) {
|
||||
/* Trim trailing whitespace (SP, HTAB) per RFC 9110 Section 5.5 */
|
||||
while (headers->value.length() && isHTTPHeaderValueWhitespace(headers->value.back())) {
|
||||
headers->value.remove_suffix(1);
|
||||
}
|
||||
|
||||
/* Trim initial whitespace (SP, HTAB) */
|
||||
while (headers->value.length() && headers->value.front() < 33) {
|
||||
/* Trim initial whitespace (SP, HTAB) per RFC 9110 Section 5.5 */
|
||||
while (headers->value.length() && isHTTPHeaderValueWhitespace(headers->value.front())) {
|
||||
headers->value.remove_prefix(1);
|
||||
}
|
||||
|
||||
|
||||
@@ -2811,10 +2811,24 @@ export function toYaml(obj, indent = 0) {
|
||||
value.includes("#") ||
|
||||
value.includes("'") ||
|
||||
value.includes('"') ||
|
||||
value.includes("\\") ||
|
||||
value.includes("\n") ||
|
||||
value.includes("*"))
|
||||
value.includes("*") ||
|
||||
value.includes("&") ||
|
||||
value.includes("!") ||
|
||||
value.includes("|") ||
|
||||
value.includes(">") ||
|
||||
value.includes("%") ||
|
||||
value.includes("@") ||
|
||||
value.includes("`") ||
|
||||
value.includes("{") ||
|
||||
value.includes("}") ||
|
||||
value.includes("[") ||
|
||||
value.includes("]") ||
|
||||
value.includes(",") ||
|
||||
value.includes(";"))
|
||||
) {
|
||||
result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`;
|
||||
result += `${spaces}${key}: "${value.replace(/\\/g, "\\\\").replace(/"/g, '\\"')}"\n`;
|
||||
continue;
|
||||
}
|
||||
result += `${spaces}${key}: ${value}\n`;
|
||||
|
||||
@@ -515,6 +515,8 @@ pub const TransformTask = struct {
|
||||
.path = source.path,
|
||||
.virtual_source = &source,
|
||||
.replace_exports = this.replace_exports,
|
||||
.experimental_decorators = if (this.tsconfig) |ts| ts.experimental_decorators else false,
|
||||
.emit_decorator_metadata = if (this.tsconfig) |ts| ts.emit_decorator_metadata else false,
|
||||
};
|
||||
|
||||
const parse_result = this.transpiler.parse(parse_options, null) orelse {
|
||||
@@ -584,9 +586,8 @@ pub const TransformTask = struct {
|
||||
this.log.deinit();
|
||||
this.input_code.deinitAndUnprotect();
|
||||
this.output_code.deref();
|
||||
if (this.tsconfig) |tsconfig| {
|
||||
tsconfig.deinit();
|
||||
}
|
||||
// tsconfig is owned by JSTranspiler, not by TransformTask.
|
||||
// Do not free it here — JSTranspiler.deinit handles it.
|
||||
this.js_instance.deref();
|
||||
bun.destroy(this);
|
||||
}
|
||||
@@ -660,6 +661,9 @@ pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) b
|
||||
});
|
||||
errdefer {
|
||||
this.config.log.deinit();
|
||||
if (this.config.tsconfig) |tsconfig| {
|
||||
tsconfig.deinit();
|
||||
}
|
||||
this.arena.deinit();
|
||||
this.ref_count.clearWithoutDestructor();
|
||||
bun.destroy(this);
|
||||
@@ -744,6 +748,9 @@ pub fn deinit(this: *JSTranspiler) void {
|
||||
this.buffer_writer.?.buffer.deinit();
|
||||
}
|
||||
|
||||
if (this.config.tsconfig) |tsconfig| {
|
||||
tsconfig.deinit();
|
||||
}
|
||||
this.arena.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
@@ -806,7 +813,8 @@ fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []con
|
||||
.virtual_source = source,
|
||||
.replace_exports = this.config.runtime.replace_exports,
|
||||
.macro_js_ctx = macro_js_ctx,
|
||||
// .allocator = this.
|
||||
.experimental_decorators = if (this.config.tsconfig) |ts| ts.experimental_decorators else false,
|
||||
.emit_decorator_metadata = if (this.config.tsconfig) |ts| ts.emit_decorator_metadata else false,
|
||||
};
|
||||
|
||||
return this.transpiler.parse(parse_options, null);
|
||||
|
||||
@@ -2507,6 +2507,7 @@ EVPKeyPointer::ParseKeyResult EVPKeyPointer::TryParsePrivateKey(
|
||||
const PrivateKeyEncodingConfig& config,
|
||||
const Buffer<const unsigned char>& buffer)
|
||||
{
|
||||
ClearErrorOnReturn clear_error_on_return;
|
||||
static constexpr auto keyOrError = [](EVPKeyPointer pkey,
|
||||
bool had_passphrase = false) {
|
||||
if (int err = ERR_peek_error()) {
|
||||
|
||||
@@ -613,6 +613,14 @@ pub const PathLike = union(enum) {
|
||||
}
|
||||
}
|
||||
|
||||
if (sliced.len >= buf.len) {
|
||||
bun.Output.debugWarn("path too long: {d} bytes exceeds PathBuffer capacity of {d}\n", .{ sliced.len, buf.len });
|
||||
if (comptime !force) return "";
|
||||
|
||||
buf[0] = 0;
|
||||
return buf[0..0 :0];
|
||||
}
|
||||
|
||||
@memcpy(buf[0..sliced.len], sliced);
|
||||
buf[sliced.len] = 0;
|
||||
return buf[0..sliced.len :0];
|
||||
@@ -726,12 +734,13 @@ pub const PathLike = union(enum) {
|
||||
}
|
||||
|
||||
pub fn fromBunString(global: *jsc.JSGlobalObject, str: *bun.String, will_be_async: bool, allocator: std.mem.Allocator) !PathLike {
|
||||
try Valid.pathStringLength(str.length(), global);
|
||||
|
||||
if (will_be_async) {
|
||||
var sliced = try str.toThreadSafeSlice(allocator);
|
||||
errdefer sliced.deinit();
|
||||
|
||||
// Validate the UTF-8 byte length after conversion, since the path
|
||||
// will be stored in a fixed-size PathBuffer.
|
||||
try Valid.pathStringLength(sliced.slice().len, global);
|
||||
try Valid.pathNullBytes(sliced.slice(), global);
|
||||
|
||||
sliced.reportExtraMemory(global.vm());
|
||||
@@ -744,6 +753,9 @@ pub const PathLike = union(enum) {
|
||||
var sliced = str.toSlice(allocator);
|
||||
errdefer sliced.deinit();
|
||||
|
||||
// Validate the UTF-8 byte length after conversion, since the path
|
||||
// will be stored in a fixed-size PathBuffer.
|
||||
try Valid.pathStringLength(sliced.slice().len, global);
|
||||
try Valid.pathNullBytes(sliced.slice(), global);
|
||||
|
||||
// Costs nothing to keep both around.
|
||||
|
||||
@@ -219,9 +219,10 @@ pub const BundleV2 = struct {
|
||||
client_transpiler.options.chunk_naming = bun.options.PathTemplate.chunk.data;
|
||||
client_transpiler.options.entry_naming = "./[name]-[hash].[ext]";
|
||||
|
||||
// Avoid setting a public path for --compile since all the assets
|
||||
// will be served relative to the server root.
|
||||
client_transpiler.options.public_path = "";
|
||||
// Use "/" so that asset URLs in HTML are absolute (e.g. "/chunk-abc.js"
|
||||
// instead of "./chunk-abc.js"). Relative paths break when the HTML is
|
||||
// served from a nested route like "/foo/".
|
||||
client_transpiler.options.public_path = "/";
|
||||
}
|
||||
|
||||
client_transpiler.setLog(this_transpiler.log);
|
||||
|
||||
@@ -236,8 +236,8 @@ pub const BorderRadiusHandler = struct {
|
||||
.unparsed => |unparsed| {
|
||||
ctx.addLogicalRule(
|
||||
ctx.allocator,
|
||||
Property{ .unparsed = unparsed.withPropertyId(ctx.allocator, .{ .@"border-top-left-radius" = prefix }) },
|
||||
Property{ .unparsed = unparsed.withPropertyId(ctx.allocator, .{ .@"border-top-right-radius" = prefix }) },
|
||||
Property{ .unparsed = unparsed.withPropertyId(ctx.allocator, @unionInit(css.PropertyId, ltr, prefix)) },
|
||||
Property{ .unparsed = unparsed.withPropertyId(ctx.allocator, @unionInit(css.PropertyId, rtl, prefix)) },
|
||||
);
|
||||
},
|
||||
else => {},
|
||||
|
||||
@@ -287,7 +287,7 @@ pub const UnicodeRange = struct {
|
||||
if (digit < 10) return digit;
|
||||
// Force the 6th bit to be set to ensure ascii is lower case.
|
||||
// digit = (@as(u32, b) | 0b10_0000).wrapping_sub('a' as u32).saturating_add(10);
|
||||
digit = (@as(u32, b) | 0b10_0000) -% (@as(u32, 'a') +% 10);
|
||||
digit = ((@as(u32, b) | 0b10_0000) -% @as(u32, 'a')) +| 10;
|
||||
return if (digit < 16) digit else null;
|
||||
}
|
||||
};
|
||||
@@ -696,7 +696,7 @@ pub const FontFaceDeclarationParser = struct {
|
||||
return .{ .result = .{ .font_stretch = c } };
|
||||
}
|
||||
}
|
||||
} else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "unicode-renage")) {
|
||||
} else if (bun.strings.eqlCaseInsensitiveASCIIICheckLength(name, "unicode-range")) {
|
||||
if (input.parseList(UnicodeRange, UnicodeRange.parse).asValue()) |c| {
|
||||
if (input.expectExhausted().isOk()) {
|
||||
return .{ .result = .{ .unicode_range = c } };
|
||||
|
||||
@@ -31,9 +31,6 @@ pub const enable_keepalive = true;
|
||||
|
||||
pub const atomic_file_watcher = env.isLinux;
|
||||
|
||||
// This change didn't seem to make a meaningful difference in microbenchmarks
|
||||
pub const latin1_is_now_ascii = false;
|
||||
|
||||
pub const http_buffer_pooling = true;
|
||||
|
||||
pub const disable_lolhtml = false;
|
||||
|
||||
@@ -584,7 +584,13 @@ pub fn installWithManager(
|
||||
try waitForEverythingExceptPeers(manager);
|
||||
}
|
||||
|
||||
if (manager.peer_dependencies.readableLength() > 0) {
|
||||
// Resolving a peer dep can create a NEW package whose own peer deps
|
||||
// get re-queued to `peer_dependencies` during `drainDependencyList`.
|
||||
// When all manifests are cached (synchronous resolution), no I/O tasks
|
||||
// are spawned, so `pendingTaskCount() == 0`. We must drain the peer
|
||||
// queue iteratively here — entering the event loop (`waitForPeers`)
|
||||
// with zero pending I/O would block forever.
|
||||
while (manager.peer_dependencies.readableLength() > 0) {
|
||||
try manager.processPeerDependencyList();
|
||||
manager.drainDependencyList();
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
// Hardcoded module "node:_http_server"
|
||||
const EventEmitter: typeof import("node:events").EventEmitter = require("node:events");
|
||||
const { Duplex, Stream } = require("node:stream");
|
||||
const { _checkInvalidHeaderChar: checkInvalidHeaderChar } = require("node:_http_common");
|
||||
const {
|
||||
_checkInvalidHeaderChar: checkInvalidHeaderChar,
|
||||
validateHeaderName,
|
||||
validateHeaderValue,
|
||||
} = require("node:_http_common");
|
||||
const { validateObject, validateLinkHeaderValue, validateBoolean, validateInteger } = require("internal/validators");
|
||||
const { ConnResetException } = require("internal/shared");
|
||||
|
||||
@@ -1284,7 +1288,10 @@ ServerResponse.prototype.writeEarlyHints = function (hints, cb) {
|
||||
|
||||
for (const key of ObjectKeys(hints)) {
|
||||
if (key !== "link") {
|
||||
head += key + ": " + hints[key] + "\r\n";
|
||||
const value = hints[key];
|
||||
validateHeaderName(key);
|
||||
validateHeaderValue(key, value);
|
||||
head += key + ": " + value + "\r\n";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -539,8 +539,6 @@ pub fn downloadStream(
|
||||
) void {
|
||||
const range = brk: {
|
||||
if (size) |size_| {
|
||||
if (offset == 0) break :brk null;
|
||||
|
||||
var end = (offset + size_);
|
||||
if (size_ > 0) {
|
||||
end -= 1;
|
||||
|
||||
@@ -510,6 +510,12 @@ fn initRedirections(
|
||||
},
|
||||
.jsbuf => |val| {
|
||||
const globalObject = interpreter.event_loop.js.global;
|
||||
|
||||
if (file.jsbuf.idx >= interpreter.jsobjs.len) {
|
||||
globalObject.throw("Invalid JS object reference in shell", .{}) catch {};
|
||||
return .failed;
|
||||
}
|
||||
|
||||
if (interpreter.jsobjs[file.jsbuf.idx].asArrayBuffer(globalObject)) |buf| {
|
||||
const arraybuf: BuiltinIO.ArrayBuf = .{ .buf = jsc.ArrayBuffer.Strong{
|
||||
.array_buffer = buf,
|
||||
|
||||
@@ -289,7 +289,7 @@ pub noinline fn next(this: *Rm) Yield {
|
||||
}
|
||||
|
||||
switch (this.state) {
|
||||
.done => return this.bltn().done(0),
|
||||
.done => return this.bltn().done(this.state.done.exit_code),
|
||||
.err => return this.bltn().done(this.state.err),
|
||||
else => unreachable,
|
||||
}
|
||||
@@ -430,7 +430,7 @@ pub fn onShellRmTaskDone(this: *Rm, task: *ShellRmTask) void {
|
||||
if (tasks_done >= this.state.exec.total_tasks and
|
||||
exec.getOutputCount(.output_done) >= exec.getOutputCount(.output_count))
|
||||
{
|
||||
this.state = .{ .done = .{ .exit_code = if (exec.err) |theerr| theerr.errno else 0 } };
|
||||
this.state = .{ .done = .{ .exit_code = if (exec.err != null) 1 else 0 } };
|
||||
this.next().run();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -792,13 +792,14 @@ pub const Interpreter = struct {
|
||||
out_parser: *?bun.shell.Parser,
|
||||
out_lex_result: *?shell.LexResult,
|
||||
) !ast.Script {
|
||||
const jsobjs_len: u32 = @intCast(jsobjs.len);
|
||||
const lex_result = brk: {
|
||||
if (bun.strings.isAllASCII(script)) {
|
||||
var lexer = bun.shell.LexerAscii.new(arena_allocator, script, jsstrings_to_escape);
|
||||
var lexer = bun.shell.LexerAscii.new(arena_allocator, script, jsstrings_to_escape, jsobjs_len);
|
||||
try lexer.lex();
|
||||
break :brk lexer.get_result();
|
||||
}
|
||||
var lexer = bun.shell.LexerUnicode.new(arena_allocator, script, jsstrings_to_escape);
|
||||
var lexer = bun.shell.LexerUnicode.new(arena_allocator, script, jsstrings_to_escape, jsobjs_len);
|
||||
try lexer.lex();
|
||||
break :brk lexer.get_result();
|
||||
};
|
||||
|
||||
@@ -1020,6 +1020,9 @@ pub const AST = struct {
|
||||
Var: []const u8,
|
||||
VarArgv: u8,
|
||||
Text: []const u8,
|
||||
/// An empty string from a quoted context (e.g. "", '', or ${''}). Preserved as an
|
||||
/// explicit empty argument during expansion, unlike unquoted empty text which is dropped.
|
||||
quoted_empty,
|
||||
asterisk,
|
||||
double_asterisk,
|
||||
brace_begin,
|
||||
@@ -1042,6 +1045,7 @@ pub const AST = struct {
|
||||
.Var => false,
|
||||
.VarArgv => false,
|
||||
.Text => false,
|
||||
.quoted_empty => false,
|
||||
.asterisk => true,
|
||||
.double_asterisk => true,
|
||||
.brace_begin => false,
|
||||
@@ -1845,6 +1849,9 @@ pub const Parser = struct {
|
||||
if (txt.len > 0) {
|
||||
try atoms.append(.{ .Text = txt });
|
||||
}
|
||||
} else if (txt.len == 0 and (peeked == .SingleQuotedText or peeked == .DoubleQuotedText)) {
|
||||
// Preserve empty quoted strings ("", '') as explicit empty arguments
|
||||
try atoms.append(.quoted_empty);
|
||||
} else {
|
||||
try atoms.append(.{ .Text = txt });
|
||||
}
|
||||
@@ -2334,6 +2341,9 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
|
||||
/// Not owned by this struct
|
||||
string_refs: []bun.String,
|
||||
|
||||
/// Number of JS object references expected (for bounds validation)
|
||||
jsobjs_len: u32 = 0,
|
||||
|
||||
const SubShellKind = enum {
|
||||
/// (echo hi; echo hello)
|
||||
normal,
|
||||
@@ -2363,13 +2373,14 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
|
||||
delimit_quote: bool,
|
||||
};
|
||||
|
||||
pub fn new(alloc: Allocator, src: []const u8, strings_to_escape: []bun.String) @This() {
|
||||
pub fn new(alloc: Allocator, src: []const u8, strings_to_escape: []bun.String, jsobjs_len: u32) @This() {
|
||||
return .{
|
||||
.chars = Chars.init(src),
|
||||
.tokens = ArrayList(Token).init(alloc),
|
||||
.strpool = ArrayList(u8).init(alloc),
|
||||
.errors = ArrayList(LexError).init(alloc),
|
||||
.string_refs = strings_to_escape,
|
||||
.jsobjs_len = jsobjs_len,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2400,6 +2411,7 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
|
||||
.word_start = self.word_start,
|
||||
.j = self.j,
|
||||
.string_refs = self.string_refs,
|
||||
.jsobjs_len = self.jsobjs_len,
|
||||
};
|
||||
sublexer.chars.state = .Normal;
|
||||
return sublexer;
|
||||
@@ -2789,10 +2801,12 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
|
||||
comptime assertSpecialChar('\'');
|
||||
|
||||
if (self.chars.state == .Single) {
|
||||
try self.break_word(false);
|
||||
self.chars.state = .Normal;
|
||||
continue;
|
||||
}
|
||||
if (self.chars.state == .Normal) {
|
||||
try self.break_word(false);
|
||||
self.chars.state = .Single;
|
||||
continue;
|
||||
}
|
||||
@@ -2888,9 +2902,12 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
|
||||
}
|
||||
|
||||
inline fn isImmediatelyEscapedQuote(self: *@This()) bool {
|
||||
return (self.chars.state == .Double and
|
||||
return ((self.chars.state == .Double and
|
||||
(self.chars.current != null and !self.chars.current.?.escaped and self.chars.current.?.char == '"') and
|
||||
(self.chars.prev != null and !self.chars.prev.?.escaped and self.chars.prev.?.char == '"'));
|
||||
(self.chars.prev != null and !self.chars.prev.?.escaped and self.chars.prev.?.char == '"')) or
|
||||
(self.chars.state == .Single and
|
||||
(self.chars.current != null and !self.chars.current.?.escaped and self.chars.current.?.char == '\'') and
|
||||
(self.chars.prev != null and !self.chars.prev.?.escaped and self.chars.prev.?.char == '\'')));
|
||||
}
|
||||
|
||||
fn break_word_impl(self: *@This(), add_delimiter: bool, in_normal_space: bool, in_operator: bool) !void {
|
||||
@@ -3229,6 +3246,15 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
|
||||
}
|
||||
|
||||
fn handleJSStringRef(self: *@This(), bunstr: bun.String) !void {
|
||||
if (bunstr.length() == 0) {
|
||||
// Empty JS string ref: emit a zero-length DoubleQuotedText token directly.
|
||||
// The parser converts this to a quoted_empty atom, preserving the empty arg.
|
||||
// This works regardless of the lexer's current quote state (Normal/Single/Double)
|
||||
// because the \x08 marker is processed before quote-state handling.
|
||||
const pos = self.j;
|
||||
try self.tokens.append(@unionInit(Token, "DoubleQuotedText", .{ .start = pos, .end = pos }));
|
||||
return;
|
||||
}
|
||||
try self.appendStringToStrPool(bunstr);
|
||||
}
|
||||
|
||||
@@ -3358,7 +3384,7 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
|
||||
}
|
||||
|
||||
fn validateJSObjRefIdx(self: *@This(), idx: usize) bool {
|
||||
if (idx >= std.math.maxInt(u32)) {
|
||||
if (idx >= self.jsobjs_len) {
|
||||
self.add_error("Invalid JS object ref (out of bounds)");
|
||||
return false;
|
||||
}
|
||||
@@ -4063,6 +4089,13 @@ pub const ShellSrcBuilder = struct {
|
||||
) bun.OOM!bool {
|
||||
const invalid = (bunstr.isUTF16() and !bun.simdutf.validate.utf16le(bunstr.utf16())) or (bunstr.isUTF8() and !bun.simdutf.validate.utf8(bunstr.byteSlice()));
|
||||
if (invalid) return false;
|
||||
// Empty interpolated values must still produce an argument (e.g. `${''}` should
|
||||
// pass "" as an arg). Route through appendJSStrRef so the \x08 marker is recognized
|
||||
// by the lexer regardless of quote context (e.g. inside single quotes).
|
||||
if (allow_escape and bunstr.length() == 0) {
|
||||
try this.appendJSStrRef(bunstr);
|
||||
return true;
|
||||
}
|
||||
if (allow_escape) {
|
||||
if (needsEscapeBunstr(bunstr)) {
|
||||
try this.appendJSStrRef(bunstr);
|
||||
@@ -4129,7 +4162,7 @@ pub const ShellSrcBuilder = struct {
|
||||
};
|
||||
|
||||
/// Characters that need to escaped
|
||||
const SPECIAL_CHARS = [_]u8{ '~', '[', ']', '#', ';', '\n', '*', '{', ',', '}', '`', '$', '=', '(', ')', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '|', '>', '<', '&', '\'', '"', ' ', '\\' };
|
||||
const SPECIAL_CHARS = [_]u8{ '~', '[', ']', '#', ';', '\n', '*', '{', ',', '}', '`', '$', '=', '(', ')', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '|', '>', '<', '&', '\'', '"', ' ', '\\', SPECIAL_JS_CHAR };
|
||||
const SPECIAL_CHARS_TABLE: bun.bit_set.IntegerBitSet(256) = brk: {
|
||||
var table = bun.bit_set.IntegerBitSet(256).initEmpty();
|
||||
for (SPECIAL_CHARS) |c| {
|
||||
@@ -4554,15 +4587,16 @@ pub const TestingAPIs = struct {
|
||||
var script = std.array_list.Managed(u8).init(arena.allocator());
|
||||
try shellCmdFromJS(globalThis, string_args, &template_args, &jsobjs, &jsstrings, &script, marked_argument_buffer);
|
||||
|
||||
const jsobjs_len: u32 = @intCast(jsobjs.items.len);
|
||||
const lex_result = brk: {
|
||||
if (bun.strings.isAllASCII(script.items[0..])) {
|
||||
var lexer = LexerAscii.new(arena.allocator(), script.items[0..], jsstrings.items[0..]);
|
||||
var lexer = LexerAscii.new(arena.allocator(), script.items[0..], jsstrings.items[0..], jsobjs_len);
|
||||
lexer.lex() catch |err| {
|
||||
return globalThis.throwError(err, "failed to lex shell");
|
||||
};
|
||||
break :brk lexer.get_result();
|
||||
}
|
||||
var lexer = LexerUnicode.new(arena.allocator(), script.items[0..], jsstrings.items[0..]);
|
||||
var lexer = LexerUnicode.new(arena.allocator(), script.items[0..], jsstrings.items[0..], jsobjs_len);
|
||||
lexer.lex() catch |err| {
|
||||
return globalThis.throwError(err, "failed to lex shell");
|
||||
};
|
||||
|
||||
@@ -556,6 +556,10 @@ fn initRedirections(this: *Cmd, spawn_args: *Subprocess.SpawnArgs) bun.JSError!?
|
||||
if (this.base.eventLoop() != .js) @panic("JS values not allowed in this context");
|
||||
const global = this.base.eventLoop().js.global;
|
||||
|
||||
if (val.idx >= this.base.interpreter.jsobjs.len) {
|
||||
return global.throw("Invalid JS object reference in shell", .{});
|
||||
}
|
||||
|
||||
if (this.base.interpreter.jsobjs[val.idx].asArrayBuffer(global)) |buf| {
|
||||
const stdio: bun.shell.subproc.Stdio = .{ .array_buffer = jsc.ArrayBuffer.Strong{
|
||||
.array_buffer = buf,
|
||||
@@ -568,9 +572,9 @@ fn initRedirections(this: *Cmd, spawn_args: *Subprocess.SpawnArgs) bun.JSError!?
|
||||
if (this.node.redirect.stdin) {
|
||||
try spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stdin_no);
|
||||
} else if (this.node.redirect.stdout) {
|
||||
try spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stdout_no);
|
||||
try spawn_args.stdio[stdout_no].extractBlob(global, .{ .Blob = blob }, stdout_no);
|
||||
} else if (this.node.redirect.stderr) {
|
||||
try spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stderr_no);
|
||||
try spawn_args.stdio[stderr_no].extractBlob(global, .{ .Blob = blob }, stderr_no);
|
||||
}
|
||||
} else if (try jsc.WebCore.ReadableStream.fromJS(this.base.interpreter.jsobjs[val.idx], global)) |rstream| {
|
||||
_ = rstream;
|
||||
|
||||
@@ -168,8 +168,10 @@ fn commandImplStart(this: *CondExpr) Yield {
|
||||
.@"-d",
|
||||
.@"-f",
|
||||
=> {
|
||||
// Empty string expansion produces no args; the path doesn't exist.
|
||||
if (this.args.items.len == 0) return this.parent.childDone(this, 1);
|
||||
// Empty string expansion produces no args, or the path is an empty string;
|
||||
// the path doesn't exist. On Windows, stat("") can succeed and return the
|
||||
// cwd's stat, so we must check for empty paths explicitly.
|
||||
if (this.args.items.len == 0 or this.args.items[0].len == 0) return this.parent.childDone(this, 1);
|
||||
this.state = .waiting_stat;
|
||||
return this.doStat();
|
||||
},
|
||||
|
||||
@@ -36,6 +36,9 @@ child_state: union(enum) {
|
||||
out_exit_code: ExitCode = 0,
|
||||
out: Result,
|
||||
out_idx: u32,
|
||||
/// Set when the word contains a quoted_empty atom, indicating that an empty
|
||||
/// result should still be preserved as an argument (POSIX: `""` produces an empty arg).
|
||||
has_quoted_empty: bool = false,
|
||||
|
||||
pub const ParentPtr = StatePtrUnion(.{
|
||||
Cmd,
|
||||
@@ -193,6 +196,9 @@ pub fn next(this: *Expansion) Yield {
|
||||
bun.handleOom(this.current_out.insert(0, '~'));
|
||||
},
|
||||
}
|
||||
} else if (this.has_quoted_empty) {
|
||||
// ~"" or ~'' should expand to the home directory
|
||||
bun.handleOom(this.current_out.appendSlice(homedir.slice()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -586,6 +592,11 @@ pub fn expandSimpleNoIO(this: *Expansion, atom: *const ast.SimpleAtom, str_list:
|
||||
.Text => |txt| {
|
||||
bun.handleOom(str_list.appendSlice(txt));
|
||||
},
|
||||
.quoted_empty => {
|
||||
// A quoted empty string ("", '', or ${''}). We must ensure the word
|
||||
// is not dropped by pushCurrentOut, so mark it with a flag.
|
||||
this.has_quoted_empty = true;
|
||||
},
|
||||
.Var => |label| {
|
||||
bun.handleOom(str_list.appendSlice(this.expandVar(label)));
|
||||
},
|
||||
@@ -630,8 +641,8 @@ pub fn appendSlice(this: *Expansion, buf: *std.array_list.Managed(u8), slice: []
|
||||
}
|
||||
|
||||
pub fn pushCurrentOut(this: *Expansion) void {
|
||||
if (this.current_out.items.len == 0) return;
|
||||
if (this.current_out.items[this.current_out.items.len - 1] != 0) bun.handleOom(this.current_out.append(0));
|
||||
if (this.current_out.items.len == 0 and !this.has_quoted_empty) return;
|
||||
if (this.current_out.items.len == 0 or this.current_out.items[this.current_out.items.len - 1] != 0) bun.handleOom(this.current_out.append(0));
|
||||
switch (this.out.pushResult(&this.current_out)) {
|
||||
.copied => {
|
||||
this.current_out.clearRetainingCapacity();
|
||||
@@ -709,6 +720,7 @@ fn expansionSizeHint(this: *const Expansion, atom: *const ast.Atom, has_unknown:
|
||||
fn expansionSizeHintSimple(this: *const Expansion, simple: *const ast.SimpleAtom, has_unknown: *bool) usize {
|
||||
return switch (simple.*) {
|
||||
.Text => |txt| txt.len,
|
||||
.quoted_empty => 0,
|
||||
.Var => |label| this.expandVar(label).len,
|
||||
.VarArgv => |int| this.expandVarArgv(int).len,
|
||||
.brace_begin, .brace_end, .comma, .asterisk => 1,
|
||||
|
||||
@@ -426,12 +426,6 @@ pub const EncodeIntoResult = struct {
|
||||
written: u32 = 0,
|
||||
};
|
||||
pub fn allocateLatin1IntoUTF8(allocator: std.mem.Allocator, latin1_: []const u8) ![]u8 {
|
||||
if (comptime bun.FeatureFlags.latin1_is_now_ascii) {
|
||||
var out = try allocator.alloc(u8, latin1_.len);
|
||||
@memcpy(out[0..latin1_.len], latin1_);
|
||||
return out;
|
||||
}
|
||||
|
||||
const list = try std.array_list.Managed(u8).initCapacity(allocator, latin1_.len);
|
||||
var foo = try allocateLatin1IntoUTF8WithList(list, 0, latin1_);
|
||||
return try foo.toOwnedSlice();
|
||||
@@ -685,13 +679,6 @@ pub fn copyLatin1IntoUTF8(buf_: []u8, latin1_: []const u8) EncodeIntoResult {
|
||||
}
|
||||
|
||||
pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, latin1_: []const u8, comptime stop: bool) EncodeIntoResult {
|
||||
if (comptime bun.FeatureFlags.latin1_is_now_ascii) {
|
||||
const to_copy = @as(u32, @truncate(@min(buf_.len, latin1_.len)));
|
||||
@memcpy(buf_[0..to_copy], latin1_[0..to_copy]);
|
||||
|
||||
return .{ .written = to_copy, .read = to_copy };
|
||||
}
|
||||
|
||||
var buf = buf_;
|
||||
var latin1 = latin1_;
|
||||
|
||||
|
||||
@@ -589,7 +589,6 @@ function expectBundled(
|
||||
dotenv ||
|
||||
typeof production !== "undefined" ||
|
||||
bundling === false ||
|
||||
(run && target === "node") ||
|
||||
emitDCEAnnotations ||
|
||||
bundleWarnings ||
|
||||
env ||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { file, spawn, write } from "bun";
|
||||
import { afterAll, beforeAll, describe, expect, test } from "bun:test";
|
||||
import { existsSync, lstatSync, readlinkSync } from "fs";
|
||||
import { mkdir, readlink, rm, symlink } from "fs/promises";
|
||||
import { VerdaccioRegistry, bunEnv, bunExe, readdirSorted, runBunInstall } from "harness";
|
||||
import { VerdaccioRegistry, bunEnv, bunExe, readdirSorted, runBunInstall, tempDir } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
const registry = new VerdaccioRegistry();
|
||||
@@ -1234,3 +1234,112 @@ test("runs lifecycle scripts correctly", async () => {
|
||||
expect(lifecyclePostinstallDir).toEqual(["lifecycle-postinstall"]);
|
||||
expect(allLifecycleScriptsDir).toEqual(["all-lifecycle-scripts"]);
|
||||
});
|
||||
|
||||
// When an auto-installed peer dependency has its OWN peer deps, those
|
||||
// transitive peers get re-queued during peer processing. If all manifest
|
||||
// loads are synchronous (cached with valid max-age) AND the transitive peer's
|
||||
// version constraint doesn't match what's already in the lockfile,
|
||||
// pendingTaskCount() stays at 0 and waitForPeers was skipped — leaving
|
||||
// the transitive peer's resolution unset (= invalid_package_id → filtered
|
||||
// from the install).
|
||||
test("transitive peer deps are resolved when resolution is fully synchronous", async () => {
|
||||
const packagesDir = join(import.meta.dir, "registry", "packages");
|
||||
|
||||
// Self-contained HTTP server that serves package manifests & tarballs
|
||||
// directly from the Verdaccio fixtures, with Cache-Control: max-age=300
|
||||
// to replicate npmjs.org behavior (fully synchronous on warm cache).
|
||||
using server = Bun.serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const url = new URL(req.url);
|
||||
const pathname = url.pathname;
|
||||
|
||||
// Tarball: /<name>/-/<name>-<version>.tgz
|
||||
if (pathname.endsWith(".tgz")) {
|
||||
const match = pathname.match(/\/([^/]+)\/-\/(.+\.tgz)$/);
|
||||
if (match) {
|
||||
const tarball = file(join(packagesDir, match[1], match[2]));
|
||||
if (await tarball.exists()) {
|
||||
return new Response(tarball, {
|
||||
headers: { "Content-Type": "application/octet-stream" },
|
||||
});
|
||||
}
|
||||
}
|
||||
return new Response("Not found", { status: 404 });
|
||||
}
|
||||
|
||||
// Manifest: /<name>
|
||||
const packageName = decodeURIComponent(pathname.slice(1));
|
||||
const metaFile = file(join(packagesDir, packageName, "package.json"));
|
||||
if (!(await metaFile.exists())) {
|
||||
return new Response("Not found", { status: 404 });
|
||||
}
|
||||
|
||||
// Rewrite tarball URLs to point at this server
|
||||
const meta = await metaFile.json();
|
||||
const port = server.port;
|
||||
for (const [ver, info] of Object.entries(meta.versions ?? {}) as [string, any][]) {
|
||||
if (info?.dist?.tarball) {
|
||||
info.dist.tarball = `http://localhost:${port}/${packageName}/-/${packageName}-${ver}.tgz`;
|
||||
}
|
||||
}
|
||||
|
||||
return new Response(JSON.stringify(meta), {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Cache-Control": "public, max-age=300",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
using packageDir = tempDir("transitive-peer-test-", {});
|
||||
const packageJson = join(String(packageDir), "package.json");
|
||||
const cacheDir = join(String(packageDir), ".bun-cache");
|
||||
const bunfig = `[install]\ncache = "${cacheDir.replaceAll("\\", "\\\\")}"\nregistry = "http://localhost:${server.port}/"\nlinker = "isolated"\n`;
|
||||
await write(join(String(packageDir), "bunfig.toml"), bunfig);
|
||||
|
||||
await write(
|
||||
packageJson,
|
||||
JSON.stringify({
|
||||
name: "test-transitive-peer",
|
||||
dependencies: {
|
||||
// Chain: uses-strict-peer → (peer) strict-peer-dep → (peer) no-deps@^2.0.0
|
||||
// Root has no-deps@1.0.0, which does NOT satisfy ^2.0.0. This forces
|
||||
// strict-peer-dep's peer `no-deps` through the full resolution pass
|
||||
// (can't reuse root's no-deps via getPackageID).
|
||||
"no-deps": "1.0.0",
|
||||
"uses-strict-peer": "1.0.0",
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// First install: populates manifest cache (with max-age=300 from server)
|
||||
await runBunInstall(bunEnv, String(packageDir), { allowWarnings: true });
|
||||
|
||||
// Second install with NO lockfile and WARM cache. Manifests are fresh
|
||||
// (within max-age) so all loads are synchronous — this is the bug trigger.
|
||||
await rm(join(String(packageDir), "node_modules"), { recursive: true, force: true });
|
||||
await rm(join(String(packageDir), "bun.lock"), { force: true });
|
||||
await runBunInstall(bunEnv, String(packageDir), { allowWarnings: true });
|
||||
|
||||
// Entry names have peer hashes; find them dynamically
|
||||
const bunDir = join(String(packageDir), "node_modules", ".bun");
|
||||
const entries = await readdirSorted(bunDir);
|
||||
const strictPeerEntry = entries.find(e => e.startsWith("strict-peer-dep@1.0.0"));
|
||||
const usesStrictEntry = entries.find(e => e.startsWith("uses-strict-peer@1.0.0"));
|
||||
|
||||
// strict-peer-dep must exist (auto-installed via uses-strict-peer's peer)
|
||||
expect(strictPeerEntry).toBeDefined();
|
||||
expect(usesStrictEntry).toBeDefined();
|
||||
|
||||
// strict-peer-dep's own peer `no-deps` must be resolved and symlinked.
|
||||
// Without the fix: this symlink is missing because the transitive peer
|
||||
// queue was never drained after drainDependencyList re-queued it.
|
||||
expect(existsSync(join(bunDir, strictPeerEntry!, "node_modules", "no-deps"))).toBe(true);
|
||||
|
||||
// Verify the chain is intact
|
||||
expect(readlinkSync(join(bunDir, usesStrictEntry!, "node_modules", "strict-peer-dep"))).toBe(
|
||||
join("..", "..", strictPeerEntry!, "node_modules", "strict-peer-dep"),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"name": "strict-peer-dep",
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"name": "strict-peer-dep",
|
||||
"version": "1.0.0",
|
||||
"peerDependencies": {
|
||||
"no-deps": "^2.0.0"
|
||||
},
|
||||
"_id": "strict-peer-dep@1.0.0",
|
||||
"_nodeVersion": "22.2.0",
|
||||
"_npmVersion": "10.8.1",
|
||||
"dist": {
|
||||
"integrity": "sha512-bz2RC/Fp4Nvc9aIiHB6Szko9m6sxNy/clIHnTAGeD9VSpQJTvlPAJqJ09lWo7N3q4JNLEqDTf3Mn+zNUsYOKWQ==",
|
||||
"shasum": "1548927b5ca502c008c3ab091fb707f96181ecaf",
|
||||
"tarball": "http://localhost:4873/strict-peer-dep/-/strict-peer-dep-1.0.0.tgz"
|
||||
},
|
||||
"contributors": []
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"modified": "2026-02-28T00:00:00.000Z",
|
||||
"created": "2026-02-28T00:00:00.000Z",
|
||||
"1.0.0": "2026-02-28T00:00:00.000Z"
|
||||
},
|
||||
"users": {},
|
||||
"dist-tags": {
|
||||
"latest": "1.0.0"
|
||||
},
|
||||
"_uplinks": {},
|
||||
"_distfiles": {},
|
||||
"_attachments": {
|
||||
"strict-peer-dep-1.0.0.tgz": {
|
||||
"shasum": "1548927b5ca502c008c3ab091fb707f96181ecaf",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
},
|
||||
"_rev": "",
|
||||
"_id": "strict-peer-dep",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
||||
Binary file not shown.
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"name": "uses-strict-peer",
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"name": "uses-strict-peer",
|
||||
"version": "1.0.0",
|
||||
"peerDependencies": {
|
||||
"strict-peer-dep": "1.0.0"
|
||||
},
|
||||
"_id": "uses-strict-peer@1.0.0",
|
||||
"_nodeVersion": "22.2.0",
|
||||
"_npmVersion": "10.8.1",
|
||||
"dist": {
|
||||
"integrity": "sha512-RbQ5blabFjzZxf/5rXghqXxa2+Dmv/owDb1YzHwNOOBmxGJZTqPt3OIYHlsGX/wnPVjAP6gBwJl3nxLxU0pzlw==",
|
||||
"shasum": "7cff9823abdca5ab698f2c6b73410b87004960e9",
|
||||
"tarball": "http://localhost:4873/uses-strict-peer/-/uses-strict-peer-1.0.0.tgz"
|
||||
},
|
||||
"contributors": []
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"modified": "2026-02-28T00:00:00.000Z",
|
||||
"created": "2026-02-28T00:00:00.000Z",
|
||||
"1.0.0": "2026-02-28T00:00:00.000Z"
|
||||
},
|
||||
"users": {},
|
||||
"dist-tags": {
|
||||
"latest": "1.0.0"
|
||||
},
|
||||
"_uplinks": {},
|
||||
"_distfiles": {},
|
||||
"_attachments": {
|
||||
"uses-strict-peer-1.0.0.tgz": {
|
||||
"shasum": "7cff9823abdca5ab698f2c6b73410b87004960e9",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
},
|
||||
"_rev": "",
|
||||
"_id": "uses-strict-peer",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
||||
Binary file not shown.
@@ -880,6 +880,44 @@ for (let credentials of allCredentials) {
|
||||
expect(SHA1).toBe(SHA1_2);
|
||||
}
|
||||
}, 30_000);
|
||||
it("should work with sliced files (offset 0)", async () => {
|
||||
await using tmpfile = await tmp();
|
||||
const s3file = s3(tmpfile.name + "-readable-stream-slice", options);
|
||||
await s3file.write("Hello Bun!");
|
||||
const sliced = s3file.slice(0, 5);
|
||||
const stream = sliced.stream();
|
||||
const reader = stream.getReader();
|
||||
let bytes = 0;
|
||||
let chunks: Array<Buffer> = [];
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
bytes += value?.length ?? 0;
|
||||
if (value) chunks.push(value as Buffer);
|
||||
}
|
||||
expect(bytes).toBe(5);
|
||||
expect(Buffer.concat(chunks)).toEqual(Buffer.from("Hello"));
|
||||
});
|
||||
it("should work with sliced files (non-zero offset)", async () => {
|
||||
await using tmpfile = await tmp();
|
||||
const s3file = s3(tmpfile.name + "-readable-stream-slice-offset", options);
|
||||
await s3file.write("Hello Bun!");
|
||||
const sliced = s3file.slice(6, 10);
|
||||
const stream = sliced.stream();
|
||||
const reader = stream.getReader();
|
||||
let bytes = 0;
|
||||
let chunks: Array<Buffer> = [];
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
bytes += value?.length ?? 0;
|
||||
if (value) chunks.push(value as Buffer);
|
||||
}
|
||||
expect(bytes).toBe(4);
|
||||
expect(Buffer.concat(chunks)).toEqual(Buffer.from("Bun!"));
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -105,8 +105,7 @@ describe("lex shell", () => {
|
||||
{ "Delimit": {} },
|
||||
{ "Text": "dev" },
|
||||
{ "Delimit": {} },
|
||||
{ "Text": "hello how is it going" },
|
||||
{ "Delimit": {} },
|
||||
{ "SingleQuotedText": "hello how is it going" },
|
||||
{ "Eof": {} },
|
||||
];
|
||||
const result = JSON.parse(lex`next dev 'hello how is it going'`);
|
||||
|
||||
58
test/js/bun/shell/shell-sentinel-hardening.test.ts
Normal file
58
test/js/bun/shell/shell-sentinel-hardening.test.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { $ } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
describe("shell sentinel character hardening", () => {
|
||||
test("string matching internal obj-ref prefix round-trips through interpolation", async () => {
|
||||
// \x08 is the shell's internal sentinel byte. When followed by "__bun_"
|
||||
// and then non-digit characters, the old code didn't escape \x08 (it wasn't
|
||||
// in SPECIAL_CHARS), so the raw bytes were injected into the script buffer.
|
||||
// The lexer then misinterpreted them as a malformed internal object
|
||||
// reference pattern and produced a lex error.
|
||||
// The suffix must contain non-digit, non-special chars so that:
|
||||
// 1. needsEscape() returns false without the \x08 fix
|
||||
// 2. looksLikeJSObjRef() matches the __bun_ prefix
|
||||
// 3. eatJSObjRef() fails because it finds no digit index
|
||||
const str = "\x08__bun_abc";
|
||||
const result = await $`echo ${str}`.text();
|
||||
expect(result).toBe(str + "\n");
|
||||
});
|
||||
|
||||
test("string matching internal str-ref prefix round-trips through interpolation", async () => {
|
||||
// Same issue but for the __bunstr_ prefix pattern.
|
||||
const str = "\x08__bunstr_abc";
|
||||
const result = await $`echo ${str}`.text();
|
||||
expect(result).toBe(str + "\n");
|
||||
});
|
||||
|
||||
test("raw sentinel injection with out-of-bounds index does not crash", async () => {
|
||||
// { raw: ... } bypasses string escaping, allowing injection of a sentinel
|
||||
// pattern with a digit suffix into the script buffer. The old
|
||||
// validateJSObjRefIdx only rejected indices >= maxInt(u32), so index 9999
|
||||
// was accepted. At execution time, accessing jsobjs[9999] on an empty
|
||||
// array caused a segfault. The fix checks against actual jsobjs.len.
|
||||
// Run in a subprocess so a crash on old bun doesn't kill the test runner.
|
||||
const testScript = [
|
||||
'import { $ } from "bun";',
|
||||
"const sentinel = String.fromCharCode(8) + '__bun_9999';",
|
||||
"try { await $`echo hello > ${{ raw: sentinel }}`; } catch {}",
|
||||
'console.log("OK");',
|
||||
].join("\n");
|
||||
|
||||
using dir = tempDir("sentinel-test", {
|
||||
"test.js": testScript,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "test.js"],
|
||||
cwd: String(dir),
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, exitCode] = await Promise.all([proc.stdout.text(), proc.exited]);
|
||||
expect(stdout.trim()).toBe("OK");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
76
test/js/bun/transpiler/transpiler-tsconfig-uaf.test.ts
Normal file
76
test/js/bun/transpiler/transpiler-tsconfig-uaf.test.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
|
||||
describe("Transpiler tsconfig lifetime", () => {
|
||||
test("multiple async transform() calls with tsconfig do not crash", async () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "tsx",
|
||||
tsconfig: JSON.stringify({
|
||||
compilerOptions: {
|
||||
experimentalDecorators: true,
|
||||
jsx: "react",
|
||||
jsxFactory: "React.createElement",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// First async transform
|
||||
const result1 = await transpiler.transform("const x: number = 1;");
|
||||
expect(result1).toContain("const x = 1");
|
||||
|
||||
// Second async transform — would crash before the fix due to use-after-free
|
||||
// on the tsconfig pointer that was freed by the first TransformTask.deinit
|
||||
const result2 = await transpiler.transform("const y: number = 2;");
|
||||
expect(result2).toContain("const y = 2");
|
||||
|
||||
// Third call to be safe
|
||||
const result3 = await transpiler.transform("const z: number = 3;");
|
||||
expect(result3).toContain("const z = 3");
|
||||
});
|
||||
|
||||
test("async transform() followed by transformSync() with tsconfig does not crash", async () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "tsx",
|
||||
tsconfig: JSON.stringify({
|
||||
compilerOptions: {
|
||||
experimentalDecorators: true,
|
||||
jsx: "react",
|
||||
jsxFactory: "React.createElement",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Before this fix, async transform freed tsconfig in TransformTask.deinit
|
||||
const result1 = await transpiler.transform("const a: string = 'hello';");
|
||||
expect(result1).toContain('const a = "hello"');
|
||||
|
||||
// Sync transform would read freed memory without the fix
|
||||
const result2 = transpiler.transformSync("const b: string = 'world';");
|
||||
expect(result2).toContain('const b = "world"');
|
||||
});
|
||||
|
||||
test("tsconfig jsx settings are preserved across multiple async transforms", async () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "tsx",
|
||||
tsconfig: JSON.stringify({
|
||||
compilerOptions: {
|
||||
jsx: "react",
|
||||
jsxFactory: "h",
|
||||
jsxFragmentFactory: "Fragment",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const code = "export default <div>hello</div>;";
|
||||
|
||||
const result1 = await transpiler.transform(code);
|
||||
expect(result1).toContain("h(");
|
||||
|
||||
// After the first async transform, tsconfig should still be valid
|
||||
const result2 = await transpiler.transform(code);
|
||||
expect(result2).toContain("h(");
|
||||
|
||||
// Sync should also work
|
||||
const result3 = transpiler.transformSync(code);
|
||||
expect(result3).toContain("h(");
|
||||
});
|
||||
});
|
||||
54
test/js/node/fs/fs-path-length.test.ts
Normal file
54
test/js/node/fs/fs-path-length.test.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { isPosix } from "harness";
|
||||
import fs from "node:fs";
|
||||
|
||||
// On POSIX systems, MAX_PATH_BYTES is 4096.
|
||||
// Path validation must account for the actual UTF-8 byte length of strings,
|
||||
// not just the number of characters (UTF-16 code units), since multi-byte
|
||||
// characters expand when encoded as UTF-8.
|
||||
describe.if(isPosix)("path length validation with multi-byte characters", () => {
|
||||
// U+4E00 (一) is a CJK character that is 3 bytes in UTF-8 (0xE4 0xB8 0x80).
|
||||
// 2000 such characters = 2000 UTF-16 code units but 6000 UTF-8 bytes,
|
||||
// which exceeds the 4096-byte PathBuffer.
|
||||
const cjkPath = "\u4e00".repeat(2000);
|
||||
|
||||
it("rejects overly long multi-byte paths in openSync", () => {
|
||||
expect(() => fs.openSync(cjkPath, "r")).toThrow("ENAMETOOLONG");
|
||||
});
|
||||
|
||||
it("rejects overly long multi-byte paths in readFileSync", () => {
|
||||
expect(() => fs.readFileSync(cjkPath)).toThrow("ENAMETOOLONG");
|
||||
});
|
||||
|
||||
it("rejects overly long multi-byte paths in statSync", () => {
|
||||
expect(() => fs.statSync(cjkPath)).toThrow("ENAMETOOLONG");
|
||||
});
|
||||
|
||||
it("rejects overly long multi-byte paths in realpathSync", () => {
|
||||
expect(() => fs.realpathSync(cjkPath)).toThrow("ENAMETOOLONG");
|
||||
});
|
||||
|
||||
it("rejects overly long multi-byte paths in async readFile", async () => {
|
||||
expect(async () => await fs.promises.readFile(cjkPath)).toThrow("ENAMETOOLONG");
|
||||
});
|
||||
|
||||
it("rejects overly long multi-byte paths in async stat", async () => {
|
||||
expect(async () => await fs.promises.stat(cjkPath)).toThrow("ENAMETOOLONG");
|
||||
});
|
||||
|
||||
// 2-byte UTF-8 characters (U+0080 to U+07FF range)
|
||||
it("rejects overly long 2-byte UTF-8 paths", () => {
|
||||
// U+00E9 (é) is 2 bytes in UTF-8. 3000 chars = 6000 bytes > 4096
|
||||
const accentPath = "\u00e9".repeat(3000);
|
||||
expect(() => fs.statSync(accentPath)).toThrow("ENAMETOOLONG");
|
||||
});
|
||||
|
||||
// Verify that the process does not crash - the key property is that these
|
||||
// throw a proper JS error rather than segfaulting.
|
||||
it("does not crash with 4-byte UTF-8 characters exceeding buffer", () => {
|
||||
// U+1F600 (😀) is 4 bytes in UTF-8, 2 UTF-16 code units (surrogate pair).
|
||||
// 1500 emoji = 3000 UTF-16 code units but 6000 UTF-8 bytes > 4096
|
||||
const emojiPath = "\u{1F600}".repeat(1500);
|
||||
expect(() => fs.statSync(emojiPath)).toThrow("ENAMETOOLONG");
|
||||
});
|
||||
});
|
||||
137
test/js/node/http/early-hints-crlf-injection.test.ts
Normal file
137
test/js/node/http/early-hints-crlf-injection.test.ts
Normal file
@@ -0,0 +1,137 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe } from "harness";
|
||||
|
||||
describe("writeEarlyHints", () => {
|
||||
test("rejects CRLF injection in header name", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"-e",
|
||||
`
|
||||
const http = require("node:http");
|
||||
const server = http.createServer((req, res) => {
|
||||
try {
|
||||
res.writeEarlyHints({
|
||||
link: "</style.css>; rel=preload",
|
||||
"x-custom\\r\\nSet-Cookie: session=evil\\r\\nX-Injected": "val",
|
||||
});
|
||||
console.log("FAIL: no error thrown");
|
||||
process.exit(1);
|
||||
} catch (e) {
|
||||
console.log("error_code:" + e.code);
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
}
|
||||
});
|
||||
server.listen(0, () => {
|
||||
http.get({ port: server.address().port }, (res) => {
|
||||
let data = "";
|
||||
res.on("data", (c) => data += c);
|
||||
res.on("end", () => {
|
||||
console.log("body:" + data);
|
||||
server.close();
|
||||
});
|
||||
});
|
||||
});
|
||||
`,
|
||||
],
|
||||
env: bunEnv,
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toContain("error_code:ERR_INVALID_HTTP_TOKEN");
|
||||
expect(stdout).toContain("body:ok");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("rejects CRLF injection in header value", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"-e",
|
||||
`
|
||||
const http = require("node:http");
|
||||
const server = http.createServer((req, res) => {
|
||||
try {
|
||||
res.writeEarlyHints({
|
||||
link: "</style.css>; rel=preload",
|
||||
"x-custom": "legitimate\\r\\nSet-Cookie: session=evil",
|
||||
});
|
||||
console.log("FAIL: no error thrown");
|
||||
process.exit(1);
|
||||
} catch (e) {
|
||||
console.log("error_code:" + e.code);
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
}
|
||||
});
|
||||
server.listen(0, () => {
|
||||
http.get({ port: server.address().port }, (res) => {
|
||||
let data = "";
|
||||
res.on("data", (c) => data += c);
|
||||
res.on("end", () => {
|
||||
console.log("body:" + data);
|
||||
server.close();
|
||||
});
|
||||
});
|
||||
});
|
||||
`,
|
||||
],
|
||||
env: bunEnv,
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toContain("error_code:ERR_INVALID_CHAR");
|
||||
expect(stdout).toContain("body:ok");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("allows valid non-link headers in early hints", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"-e",
|
||||
`
|
||||
const http = require("node:http");
|
||||
const server = http.createServer((req, res) => {
|
||||
try {
|
||||
res.writeEarlyHints({
|
||||
link: "</style.css>; rel=preload",
|
||||
"x-custom": "valid-value",
|
||||
"x-another": "also-valid",
|
||||
});
|
||||
console.log("OK: no error");
|
||||
res.writeHead(200);
|
||||
res.end("ok");
|
||||
} catch (e) {
|
||||
console.log("FAIL: " + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
server.listen(0, () => {
|
||||
http.get({ port: server.address().port }, (res) => {
|
||||
let data = "";
|
||||
res.on("data", (c) => data += c);
|
||||
res.on("end", () => {
|
||||
console.log("body:" + data);
|
||||
server.close();
|
||||
});
|
||||
});
|
||||
});
|
||||
`,
|
||||
],
|
||||
env: bunEnv,
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toContain("OK: no error");
|
||||
expect(stdout).toContain("body:ok");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -195,7 +195,7 @@ test/js/node/test/parallel/test-http-server-stale-close.js
|
||||
test/js/third_party/comlink/comlink.test.ts
|
||||
test/regression/issue/22635/22635.test.ts
|
||||
test/js/node/test/parallel/test-http-url.parse-https.request.js
|
||||
test/bundler/bundler_compile_autoload.test.ts
|
||||
test/bundler/bundler_compile_autoload.test.ts
|
||||
|
||||
# Bun::JSNodeHTTPServerSocket::clearSocketData
|
||||
test/js/node/test/parallel/test-http-server-keep-alive-max-requests-null.js
|
||||
|
||||
77
test/regression/issue/08893.test.ts
Normal file
77
test/regression/issue/08893.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import net from "net";
|
||||
|
||||
// Regression test for https://github.com/oven-sh/bun/issues/8893
|
||||
// Bytes >= 0x80 in HTTP header values were incorrectly stripped because
|
||||
// the whitespace trimming in HttpParser.h compared signed chars against 33.
|
||||
// On platforms where char is signed (x86_64), bytes 0x80-0xFF are negative
|
||||
// and thus < 33, causing them to be trimmed as if they were whitespace.
|
||||
|
||||
test("header values preserve bytes >= 0x80", async () => {
|
||||
let receivedValue: string | null = null;
|
||||
|
||||
await using server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
receivedValue = req.headers.get("x-test");
|
||||
return new Response("OK");
|
||||
},
|
||||
});
|
||||
|
||||
const client = net.connect(server.port, "127.0.0.1");
|
||||
|
||||
// Send a raw HTTP request with 0xFF bytes surrounding the header value
|
||||
const request = Buffer.concat([
|
||||
Buffer.from("GET / HTTP/1.1\r\nHost: localhost\r\nX-Test: "),
|
||||
Buffer.from([0xff]),
|
||||
Buffer.from("value"),
|
||||
Buffer.from([0xff]),
|
||||
Buffer.from("\r\n\r\n"),
|
||||
]);
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
client.on("error", reject);
|
||||
client.on("data", data => {
|
||||
const response = data.toString();
|
||||
expect(response).toContain("HTTP/1.1 200");
|
||||
// The header value should preserve the 0xFF bytes — not strip them.
|
||||
// 0xFF as a Latin-1 byte becomes U+00FF (ÿ) in the JS string.
|
||||
expect(receivedValue).not.toBeNull();
|
||||
expect(receivedValue!.length).toBe(7);
|
||||
expect(receivedValue!.charCodeAt(0)).toBe(0xff);
|
||||
expect(receivedValue!.charCodeAt(6)).toBe(0xff);
|
||||
client.end();
|
||||
resolve();
|
||||
});
|
||||
client.write(request);
|
||||
});
|
||||
});
|
||||
|
||||
test("header values still trim actual whitespace (SP, HTAB)", async () => {
|
||||
let receivedValue: string | null = null;
|
||||
|
||||
await using server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
receivedValue = req.headers.get("x-test");
|
||||
return new Response("OK");
|
||||
},
|
||||
});
|
||||
|
||||
const client = net.connect(server.port, "127.0.0.1");
|
||||
|
||||
// Send a raw HTTP request with spaces and tabs surrounding the header value
|
||||
const request = Buffer.from("GET / HTTP/1.1\r\nHost: localhost\r\nX-Test: \t value \t \r\n\r\n");
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
client.on("error", reject);
|
||||
client.on("data", data => {
|
||||
const response = data.toString();
|
||||
expect(response).toContain("HTTP/1.1 200");
|
||||
expect(receivedValue).toBe("value");
|
||||
client.end();
|
||||
resolve();
|
||||
});
|
||||
client.write(request);
|
||||
});
|
||||
});
|
||||
64
test/regression/issue/17294.test.ts
Normal file
64
test/regression/issue/17294.test.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { $ } from "bun";
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe } from "harness";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/17294
|
||||
// Empty string arguments should be passed through, not silently dropped.
|
||||
|
||||
test("empty string interpolation passes empty arg", async () => {
|
||||
const result = await $`${bunExe()} -e "console.log(JSON.stringify(process.argv.slice(1)))" -- ${""}`
|
||||
.env(bunEnv)
|
||||
.text();
|
||||
expect(JSON.parse(result.trim())).toEqual([""]);
|
||||
});
|
||||
|
||||
test("double-quoted empty string passes empty arg", async () => {
|
||||
const result = await $`${bunExe()} -e "console.log(JSON.stringify(process.argv.slice(1)))" -- ""`.env(bunEnv).text();
|
||||
expect(JSON.parse(result.trim())).toEqual([""]);
|
||||
});
|
||||
|
||||
test("single-quoted empty string passes empty arg", async () => {
|
||||
const result = await $`${bunExe()} -e "console.log(JSON.stringify(process.argv.slice(1)))" -- ''`.env(bunEnv).text();
|
||||
expect(JSON.parse(result.trim())).toEqual([""]);
|
||||
});
|
||||
|
||||
test("non-empty string still works (control)", async () => {
|
||||
const result = await $`${bunExe()} -e "console.log(JSON.stringify(process.argv.slice(1)))" -- ${"hello"}`
|
||||
.env(bunEnv)
|
||||
.text();
|
||||
expect(JSON.parse(result.trim())).toEqual(["hello"]);
|
||||
});
|
||||
|
||||
test("multiple empty strings", async () => {
|
||||
const result = await $`${bunExe()} -e "console.log(JSON.stringify(process.argv.slice(1)))" -- ${""} ${""}`
|
||||
.env(bunEnv)
|
||||
.text();
|
||||
expect(JSON.parse(result.trim())).toEqual(["", ""]);
|
||||
});
|
||||
|
||||
test("empty string between non-empty strings", async () => {
|
||||
const result = await $`${bunExe()} -e "console.log(JSON.stringify(process.argv.slice(1)))" -- ${"a"} ${""} ${"b"}`
|
||||
.env(bunEnv)
|
||||
.text();
|
||||
expect(JSON.parse(result.trim())).toEqual(["a", "", "b"]);
|
||||
});
|
||||
|
||||
test("empty interpolation inside single quotes passes empty arg", async () => {
|
||||
const empty = "";
|
||||
const result = await $`${bunExe()} -e "console.log(JSON.stringify(process.argv.slice(1)))" -- ${empty}`
|
||||
.env(bunEnv)
|
||||
.text();
|
||||
expect(JSON.parse(result.trim())).toEqual([""]);
|
||||
});
|
||||
|
||||
test("tilde with empty double quotes expands to homedir", async () => {
|
||||
const result = await $`echo ~""`.env(bunEnv).text();
|
||||
const homedir = require("os").homedir();
|
||||
expect(result.trim()).toBe(homedir);
|
||||
});
|
||||
|
||||
test("tilde with empty single quotes expands to homedir", async () => {
|
||||
const result = await $`echo ~''`.env(bunEnv).text();
|
||||
const homedir = require("os").homedir();
|
||||
expect(result.trim()).toBe(homedir);
|
||||
});
|
||||
46
test/regression/issue/18161.test.ts
Normal file
46
test/regression/issue/18161.test.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { $ } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { tempDir } from "harness";
|
||||
|
||||
describe("shell .quiet() should preserve exit codes", () => {
|
||||
test("builtin rm with .quiet() throws on failure", async () => {
|
||||
using dir = tempDir("issue-18161", {});
|
||||
try {
|
||||
await $`rm ${dir}/nonexistent-file.txt`.quiet();
|
||||
expect.unreachable();
|
||||
} catch (e: any) {
|
||||
expect(e.exitCode).not.toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
test("builtin rm with .nothrow().quiet() returns non-zero exit code", async () => {
|
||||
using dir = tempDir("issue-18161", {});
|
||||
const result = await $`rm ${dir}/nonexistent-file.txt`.nothrow().quiet();
|
||||
expect(result.exitCode).not.toBe(0);
|
||||
});
|
||||
|
||||
test("builtin rm with .text() throws on failure", async () => {
|
||||
using dir = tempDir("issue-18161", {});
|
||||
try {
|
||||
await $`rm ${dir}/nonexistent-file.txt`.text();
|
||||
expect.unreachable();
|
||||
} catch (e: any) {
|
||||
expect(e.exitCode).not.toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
test("builtin rm with .quiet() returns 0 on success", async () => {
|
||||
using dir = tempDir("issue-18161", {
|
||||
"existing-file.txt": "hello",
|
||||
});
|
||||
const result = await $`rm ${dir}/existing-file.txt`.nothrow().quiet();
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("builtin rm exit code matches between quiet and non-quiet", async () => {
|
||||
using dir = tempDir("issue-18161", {});
|
||||
const nonQuiet = await $`rm ${dir}/nonexistent-file.txt`.nothrow();
|
||||
const quiet = await $`rm ${dir}/nonexistent-file.txt`.nothrow().quiet();
|
||||
expect(quiet.exitCode).toBe(nonQuiet.exitCode);
|
||||
});
|
||||
});
|
||||
133
test/regression/issue/24157.test.ts
Normal file
133
test/regression/issue/24157.test.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/24157
|
||||
// Without reuseAddr, a second process should not be able to bind to the same
|
||||
// UDP port. Previously, Bun unconditionally set SO_REUSEADDR on all UDP sockets
|
||||
// when port != 0, allowing duplicate binds and masking EADDRINUSE errors from
|
||||
// addMembership.
|
||||
test("UDP bind throws EADDRINUSE without reuseAddr when port is in use", async () => {
|
||||
// First, find a free port by briefly binding to port 0 and closing.
|
||||
using dir = tempDir("dgram-24157-a", {
|
||||
"main.ts": `
|
||||
import dgram from 'node:dgram';
|
||||
import { spawn } from 'node:child_process';
|
||||
|
||||
// Find a free port
|
||||
const tmp = dgram.createSocket('udp4');
|
||||
tmp.bind(0, () => {
|
||||
const port = tmp.address().port;
|
||||
tmp.close();
|
||||
|
||||
// Now both parent and child bind to that specific non-zero port
|
||||
const s = dgram.createSocket({ type: 'udp4', reuseAddr: false });
|
||||
s.bind(port, () => {
|
||||
const child = spawn(process.execPath, [__dirname + '/child.ts', String(port)], {
|
||||
stdio: 'inherit'
|
||||
});
|
||||
child.on('close', () => {
|
||||
s.close();
|
||||
});
|
||||
});
|
||||
s.on('error', (err) => {
|
||||
console.log('parent-error:' + err.code);
|
||||
});
|
||||
});
|
||||
`,
|
||||
"child.ts": `
|
||||
import dgram from 'node:dgram';
|
||||
|
||||
const port = parseInt(process.argv[2]);
|
||||
const s = dgram.createSocket({ type: 'udp4', reuseAddr: false });
|
||||
s.bind(port, () => {
|
||||
console.log('child-bound:' + s.address().port);
|
||||
s.close();
|
||||
});
|
||||
s.on('error', (err) => {
|
||||
console.log('child-error:' + err.code);
|
||||
});
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "main.ts"],
|
||||
cwd: String(dir),
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// The child should fail to bind with EADDRINUSE since reuseAddr is false
|
||||
expect(stdout).toContain("child-error:EADDRINUSE");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("addMembership succeeds with reuseAddr: true", async () => {
|
||||
using dir = tempDir("dgram-24157-b", {
|
||||
"main.ts": `
|
||||
import dgram from 'node:dgram';
|
||||
import { spawn } from 'node:child_process';
|
||||
|
||||
// Find a free port
|
||||
const tmp = dgram.createSocket('udp4');
|
||||
tmp.bind(0, () => {
|
||||
const port = tmp.address().port;
|
||||
tmp.close();
|
||||
|
||||
const s = dgram.createSocket({ type: 'udp4', reuseAddr: true });
|
||||
s.bind(port, () => {
|
||||
try {
|
||||
s.addMembership('239.255.0.2');
|
||||
} catch (e) {
|
||||
console.log('parent-error:' + e.message);
|
||||
s.close();
|
||||
return;
|
||||
}
|
||||
const child = spawn(process.execPath, [__dirname + '/child.ts', String(port)], {
|
||||
stdio: 'inherit'
|
||||
});
|
||||
child.on('close', () => {
|
||||
s.close();
|
||||
});
|
||||
});
|
||||
s.on('error', (err) => {
|
||||
console.log('parent-error:' + err.message);
|
||||
});
|
||||
});
|
||||
`,
|
||||
"child.ts": `
|
||||
import dgram from 'node:dgram';
|
||||
|
||||
const port = parseInt(process.argv[2]);
|
||||
const s = dgram.createSocket({ type: 'udp4', reuseAddr: true });
|
||||
s.bind(port, () => {
|
||||
try {
|
||||
s.addMembership('239.255.0.2');
|
||||
console.log('child-joined:' + s.address().port);
|
||||
} catch (e) {
|
||||
console.log('child-error:' + e.message);
|
||||
}
|
||||
s.close();
|
||||
});
|
||||
s.on('error', (err) => {
|
||||
console.log('child-error:' + err.message);
|
||||
});
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "main.ts"],
|
||||
cwd: String(dir),
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// With reuseAddr: true, both should succeed in joining the multicast group
|
||||
expect(stdout).toContain("child-joined:");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
65
test/regression/issue/27272.test.ts
Normal file
65
test/regression/issue/27272.test.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { S3Client } from "bun";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { getSecret } from "harness";
|
||||
|
||||
const s3Options = {
|
||||
accessKeyId: getSecret("S3_R2_ACCESS_KEY"),
|
||||
secretAccessKey: getSecret("S3_R2_SECRET_KEY"),
|
||||
endpoint: getSecret("S3_R2_ENDPOINT"),
|
||||
bucket: getSecret("S3_R2_BUCKET"),
|
||||
};
|
||||
|
||||
describe.skipIf(!s3Options.accessKeyId)("issue#27272 - S3 .slice().stream() ignores slice range", () => {
|
||||
const client = new S3Client(s3Options);
|
||||
|
||||
it("slice(0, N).stream() should only return N bytes", async () => {
|
||||
const filename = `test-issue-27272-${crypto.randomUUID()}`;
|
||||
const s3file = client.file(filename);
|
||||
try {
|
||||
await s3file.write("Hello Bun! This is a longer string for testing.");
|
||||
|
||||
const sliced = s3file.slice(0, 5);
|
||||
const stream = sliced.stream();
|
||||
const reader = stream.getReader();
|
||||
let bytes = 0;
|
||||
const chunks: Array<Buffer> = [];
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
bytes += value?.length ?? 0;
|
||||
if (value) chunks.push(value as Buffer);
|
||||
}
|
||||
|
||||
expect(bytes).toBe(5);
|
||||
expect(Buffer.concat(chunks).toString()).toBe("Hello");
|
||||
} finally {
|
||||
await s3file.unlink();
|
||||
}
|
||||
});
|
||||
|
||||
it("slice(0, N).text() and slice(0, N).stream() should return the same data", async () => {
|
||||
const filename = `test-issue-27272-consistency-${crypto.randomUUID()}`;
|
||||
const s3file = client.file(filename);
|
||||
try {
|
||||
await s3file.write("Hello Bun! This is a longer string for testing.");
|
||||
|
||||
const textResult = await s3file.slice(0, 10).text();
|
||||
|
||||
const stream = s3file.slice(0, 10).stream();
|
||||
const reader = stream.getReader();
|
||||
const chunks: Array<Buffer> = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
if (value) chunks.push(value as Buffer);
|
||||
}
|
||||
const streamResult = Buffer.concat(chunks).toString();
|
||||
|
||||
expect(streamResult).toBe(textResult);
|
||||
expect(streamResult).toBe("Hello Bun!");
|
||||
} finally {
|
||||
await s3file.unlink();
|
||||
}
|
||||
});
|
||||
});
|
||||
41
test/regression/issue/27445.test.ts
Normal file
41
test/regression/issue/27445.test.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import crypto from "node:crypto";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/27445
|
||||
// A failed crypto.createPrivateKey() call on an encrypted legacy RSA PEM
|
||||
// should not poison subsequent unrelated crypto.createPrivateKey() calls.
|
||||
test("crypto.createPrivateKey error does not poison subsequent calls", () => {
|
||||
// Generate an Ed25519 key pair
|
||||
const { privateKey: ed25519Key } = crypto.generateKeyPairSync("ed25519");
|
||||
const ed25519Der = ed25519Key.export({ format: "der", type: "pkcs8" });
|
||||
|
||||
// Generate an encrypted RSA PEM key
|
||||
const { privateKey: rsaKey } = crypto.generateKeyPairSync("rsa", {
|
||||
modulusLength: 2048,
|
||||
});
|
||||
const encryptedRsaPem = rsaKey.export({
|
||||
format: "pem",
|
||||
type: "pkcs1",
|
||||
cipher: "aes-256-cbc",
|
||||
passphrase: "test-passphrase",
|
||||
});
|
||||
|
||||
// First parse: Ed25519 DER should succeed
|
||||
const key1 = crypto.createPrivateKey({
|
||||
key: ed25519Der,
|
||||
format: "der",
|
||||
type: "pkcs8",
|
||||
});
|
||||
expect(key1.asymmetricKeyType).toBe("ed25519");
|
||||
|
||||
// Try to import encrypted RSA PEM without passphrase -- should throw
|
||||
expect(() => crypto.createPrivateKey(encryptedRsaPem)).toThrow("Passphrase required for encrypted key");
|
||||
|
||||
// Second parse: the same Ed25519 DER should still succeed
|
||||
const key2 = crypto.createPrivateKey({
|
||||
key: ed25519Der,
|
||||
format: "der",
|
||||
type: "pkcs8",
|
||||
});
|
||||
expect(key2.asymmetricKeyType).toBe("ed25519");
|
||||
});
|
||||
45
test/regression/issue/27458.test.ts
Normal file
45
test/regression/issue/27458.test.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
test("CSS bundler maps logical border-radius properties to correct physical properties", async () => {
|
||||
using dir = tempDir("css-logical-border-radius", {
|
||||
"input.css": `.box {
|
||||
border-start-start-radius: var(--r, 20px);
|
||||
border-start-end-radius: var(--r, 20px);
|
||||
border-end-start-radius: var(--r, 20px);
|
||||
border-end-end-radius: var(--r, 20px);
|
||||
}
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build", "input.css", "--target=browser", "--outdir", "out"],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
const output = await Bun.file(`${dir}/out/input.css`).text();
|
||||
|
||||
// Each logical property must map to its own distinct physical property.
|
||||
// The output contains LTR and RTL variants (with :lang() selectors), so
|
||||
// each physical property appears multiple times. The key check is that all
|
||||
// four distinct physical properties are present (not all mapped to one).
|
||||
expect(output).toContain("border-top-left-radius:");
|
||||
expect(output).toContain("border-top-right-radius:");
|
||||
expect(output).toContain("border-bottom-left-radius:");
|
||||
expect(output).toContain("border-bottom-right-radius:");
|
||||
|
||||
// In the LTR block, verify each physical property appears exactly once.
|
||||
// Extract the first rule block (LTR) to check the mapping is correct.
|
||||
const firstBlock = output.split("}")[0];
|
||||
expect((firstBlock.match(/border-top-left-radius/g) || []).length).toBe(1);
|
||||
expect((firstBlock.match(/border-top-right-radius/g) || []).length).toBe(1);
|
||||
expect((firstBlock.match(/border-bottom-right-radius/g) || []).length).toBe(1);
|
||||
expect((firstBlock.match(/border-bottom-left-radius/g) || []).length).toBe(1);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
71
test/regression/issue/27465.test.ts
Normal file
71
test/regression/issue/27465.test.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { describe } from "bun:test";
|
||||
import { itBundled } from "../../bundler/expectBundled";
|
||||
|
||||
describe("bundler", () => {
|
||||
// Test that `bun build --compile` produces absolute asset URLs in HTML,
|
||||
// so that assets load correctly when served from nested routes like "/foo/".
|
||||
// Regression test for https://github.com/oven-sh/bun/issues/27465
|
||||
for (const backend of ["api", "cli"] as const) {
|
||||
itBundled(`compile/${backend}/HTMLNestedRouteAssetURLs`, {
|
||||
compile: true,
|
||||
backend: backend,
|
||||
files: {
|
||||
"/entry.ts": /* js */ `
|
||||
import { serve } from "bun";
|
||||
import index from "./index.html";
|
||||
|
||||
const server = serve({
|
||||
port: 0,
|
||||
routes: {
|
||||
"/foo/": index,
|
||||
"/foo/*": index,
|
||||
},
|
||||
});
|
||||
|
||||
const res = await fetch(server.url + "foo/");
|
||||
const html = await res.text();
|
||||
|
||||
const srcMatch = html.match(/src="([^"]+)"/);
|
||||
if (!srcMatch) {
|
||||
console.log("ERROR: no src attribute found in HTML");
|
||||
server.stop(true);
|
||||
process.exit(1);
|
||||
}
|
||||
const src = srcMatch[1];
|
||||
if (src.startsWith("./")) {
|
||||
console.log("FAIL: relative URL " + src);
|
||||
server.stop(true);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Asset URLs should be absolute (start with "/")
|
||||
const assetRes = await fetch(server.url + src.slice(1));
|
||||
if (!assetRes.ok) {
|
||||
console.log("FAIL: asset not accessible at " + src);
|
||||
server.stop(true);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log("Asset URL is absolute: " + src);
|
||||
server.stop(true);
|
||||
`,
|
||||
"/index.html": /* html */ `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><title>Test</title></head>
|
||||
<body>
|
||||
<h1>Hello</h1>
|
||||
<script src="./app.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
"/app.ts": /* js */ `
|
||||
console.log("client loaded");
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
stdout: /Asset URL is absolute: \/.+/,
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
115
test/regression/issue/27575.test.ts
Normal file
115
test/regression/issue/27575.test.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/27575
|
||||
// Bun.Transpiler ignored experimentalDecorators: true from tsconfig,
|
||||
// always emitting TC39-style decorators instead of legacy TypeScript decorators.
|
||||
|
||||
test("Bun.Transpiler respects experimentalDecorators: true from tsconfig", () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
target: "browser",
|
||||
tsconfig: JSON.stringify({
|
||||
compilerOptions: {
|
||||
experimentalDecorators: true,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const code = `
|
||||
function Prop() { return function(target: any, key: string) {}; }
|
||||
|
||||
class Foo {
|
||||
@Prop() bar: number = 0;
|
||||
}
|
||||
`;
|
||||
|
||||
const result = transpiler.transformSync(code);
|
||||
|
||||
// Legacy decorators use __legacyDecorateClassTS, NOT TC39 helpers
|
||||
expect(result).not.toContain("__decorateElement");
|
||||
expect(result).not.toContain("__decoratorStart");
|
||||
expect(result).not.toContain("__runInitializers");
|
||||
|
||||
// Legacy decorators produce __legacyDecorateClassTS calls
|
||||
expect(result).toContain("__legacyDecorateClassTS");
|
||||
});
|
||||
|
||||
test("Bun.Transpiler respects emitDecoratorMetadata: true from tsconfig", () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
target: "browser",
|
||||
tsconfig: JSON.stringify({
|
||||
compilerOptions: {
|
||||
experimentalDecorators: true,
|
||||
emitDecoratorMetadata: true,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const code = `
|
||||
function Dec() { return function(target: any, key: string) {}; }
|
||||
|
||||
class Foo {
|
||||
@Dec() bar: string = "";
|
||||
}
|
||||
`;
|
||||
|
||||
const result = transpiler.transformSync(code);
|
||||
|
||||
// Should emit legacy decorators with metadata
|
||||
expect(result).not.toContain("__decorateElement");
|
||||
expect(result).toContain("__legacyDecorateClassTS");
|
||||
expect(result).toContain("__legacyMetadataTS");
|
||||
});
|
||||
|
||||
test("Bun.Transpiler emits TC39 decorators when experimentalDecorators is not set", () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
target: "browser",
|
||||
tsconfig: JSON.stringify({
|
||||
compilerOptions: {},
|
||||
}),
|
||||
});
|
||||
|
||||
const code = `
|
||||
function Prop() { return function(target: any, key: string) {}; }
|
||||
|
||||
class Foo {
|
||||
@Prop() bar: number = 0;
|
||||
}
|
||||
`;
|
||||
|
||||
const result = transpiler.transformSync(code);
|
||||
|
||||
// TC39 decorators use __decorateElement / __decoratorStart / __runInitializers
|
||||
expect(result).toContain("__decorateElement");
|
||||
expect(result).not.toContain("__legacyDecorateClassTS");
|
||||
});
|
||||
|
||||
test("Bun.Transpiler.transform (async) respects experimentalDecorators: true", async () => {
|
||||
const transpiler = new Bun.Transpiler({
|
||||
loader: "ts",
|
||||
target: "browser",
|
||||
tsconfig: JSON.stringify({
|
||||
compilerOptions: {
|
||||
experimentalDecorators: true,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const code = `
|
||||
function Prop() { return function(target: any, key: string) {}; }
|
||||
|
||||
class Foo {
|
||||
@Prop() bar: number = 0;
|
||||
}
|
||||
`;
|
||||
|
||||
const result = await transpiler.transform(code);
|
||||
|
||||
// Legacy decorators use __legacyDecorateClassTS, NOT TC39 helpers
|
||||
expect(result).not.toContain("__decorateElement");
|
||||
expect(result).not.toContain("__decoratorStart");
|
||||
expect(result).not.toContain("__runInitializers");
|
||||
expect(result).toContain("__legacyDecorateClassTS");
|
||||
});
|
||||
40
test/regression/issue/27598.test.ts
Normal file
40
test/regression/issue/27598.test.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { cssInternals } from "bun:internal-for-testing";
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
const { minifyTest, testWithOptions } = cssInternals;
|
||||
|
||||
test("unicode-range in @font-face is preserved", () => {
|
||||
const source = `@font-face {
|
||||
font-family: "Roboto Variable";
|
||||
unicode-range: U+0000-00FF, U+0131, U+0152-0153;
|
||||
}`;
|
||||
const expected = `@font-face {
|
||||
font-family: Roboto Variable;
|
||||
unicode-range: U+??, U+131, U+152-153;
|
||||
}`;
|
||||
expect(testWithOptions(source, expected)).toEqualIgnoringWhitespace(expected);
|
||||
});
|
||||
|
||||
test("unicode-range in @font-face is preserved when minified", () => {
|
||||
const source = `@font-face { font-family: "Roboto Variable"; unicode-range: U+0000-00FF, U+0131, U+0152-0153; }`;
|
||||
const expected = `@font-face{font-family:Roboto Variable;unicode-range:U+??,U+131,U+152-153}`;
|
||||
expect(minifyTest(source, expected)).toEqual(expected);
|
||||
});
|
||||
|
||||
test("unicode-range wildcard in @font-face is preserved", () => {
|
||||
const source = `@font-face { font-family: "Test"; unicode-range: U+4??; }`;
|
||||
const expected = `@font-face{font-family:Test;unicode-range:U+4??}`;
|
||||
expect(minifyTest(source, expected)).toEqual(expected);
|
||||
});
|
||||
|
||||
test("unicode-range with hex letters in @font-face is preserved", () => {
|
||||
const source = `@font-face { font-family: "Test"; unicode-range: U+A640-A69F; }`;
|
||||
const expected = `@font-face{font-family:Test;unicode-range:U+a640-a69f}`;
|
||||
expect(minifyTest(source, expected)).toEqual(expected);
|
||||
});
|
||||
|
||||
test("unicode-range single hex value in @font-face is preserved", () => {
|
||||
const source = `@font-face { font-family: "Test"; unicode-range: U+00FF; }`;
|
||||
const expected = `@font-face{font-family:Test;unicode-range:U+ff}`;
|
||||
expect(minifyTest(source, expected)).toEqual(expected);
|
||||
});
|
||||
Reference in New Issue
Block a user