mirror of
https://github.com/oven-sh/bun
synced 2026-02-23 09:11:51 +00:00
Compare commits
15 Commits
claude/fix
...
claude/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62d99ce112 | ||
|
|
c01a5e08be | ||
|
|
e9db16c257 | ||
|
|
21c3439bb4 | ||
|
|
7baf50f379 | ||
|
|
76754a8ead | ||
|
|
ecd4e680eb | ||
|
|
044bb00382 | ||
|
|
655aab845d | ||
|
|
4141ef1edf | ||
|
|
e57593759f | ||
|
|
e7cf4b77ba | ||
|
|
2e5e21015f | ||
|
|
b04303cb23 | ||
|
|
b6eaa96e56 |
33
.github/workflows/on-slop.yml
vendored
Normal file
33
.github/workflows/on-slop.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: Close AI Slop PRs
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
on-slop:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'slop' && github.repository == 'oven-sh/bun'
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Comment and close PR
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.pull_request.number,
|
||||
body: 'This PR has been closed because it was flagged as AI slop.\n\nMany AI-generated PRs are fine, but this one was identified as having one or more of the following issues:\n- Fails to verify the problem actually exists\n- Fails to test that the fix works\n- Makes incorrect assumptions about the codebase\n- Submits changes that are incomplete or misleading\n\nIf you believe this was done in error, please leave a comment explaining why.'
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
title: 'ai slop',
|
||||
body: 'This PR has been marked as AI slop and the description has been updated to avoid confusion or misleading reviewers.\n\nMany AI PRs are fine, but sometimes they submit a PR too early, fail to test if the problem is real, fail to reproduce the problem, or fail to test that the problem is fixed. If you think this PR is not AI slop, please leave a comment.',
|
||||
state: 'closed'
|
||||
});
|
||||
@@ -1164,6 +1164,7 @@ void JSCommonJSModule::visitChildrenImpl(JSCell* cell, Visitor& visitor)
|
||||
visitor.appendHidden(thisObject->m_dirname);
|
||||
visitor.appendHidden(thisObject->m_paths);
|
||||
visitor.appendHidden(thisObject->m_overriddenParent);
|
||||
visitor.appendHidden(thisObject->m_overriddenCompile);
|
||||
visitor.appendHidden(thisObject->m_childrenValue);
|
||||
visitor.appendValues(thisObject->m_children.begin(), thisObject->m_children.size());
|
||||
}
|
||||
|
||||
@@ -703,6 +703,17 @@ void NodeVMSpecialSandbox::finishCreation(VM& vm)
|
||||
|
||||
const JSC::ClassInfo NodeVMSpecialSandbox::s_info = { "NodeVMSpecialSandbox"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(NodeVMSpecialSandbox) };
|
||||
|
||||
template<typename Visitor>
|
||||
void NodeVMSpecialSandbox::visitChildrenImpl(JSCell* cell, Visitor& visitor)
|
||||
{
|
||||
auto* thisObject = jsCast<NodeVMSpecialSandbox*>(cell);
|
||||
ASSERT_GC_OBJECT_INHERITS(thisObject, info());
|
||||
Base::visitChildren(thisObject, visitor);
|
||||
visitor.append(thisObject->m_parentGlobal);
|
||||
}
|
||||
|
||||
DEFINE_VISIT_CHILDREN(NodeVMSpecialSandbox);
|
||||
|
||||
NodeVMGlobalObject::NodeVMGlobalObject(JSC::VM& vm, JSC::Structure* structure, NodeVMContextOptions contextOptions, JSValue importer)
|
||||
: Base(vm, structure, &globalObjectMethodTable())
|
||||
, m_dynamicImportCallback(vm, this, importer)
|
||||
|
||||
@@ -85,6 +85,7 @@ public:
|
||||
static NodeVMSpecialSandbox* create(VM& vm, Structure* structure, NodeVMGlobalObject* globalObject);
|
||||
|
||||
DECLARE_INFO;
|
||||
DECLARE_VISIT_CHILDREN;
|
||||
template<typename, JSC::SubspaceAccess mode> static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm);
|
||||
static Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype);
|
||||
|
||||
|
||||
@@ -2020,7 +2020,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSQLStatementSetPrototypeFunction, (JSGlobalObject * l
|
||||
return {};
|
||||
}
|
||||
|
||||
castedThis->userPrototype.set(vm, classObject, prototype.getObject());
|
||||
castedThis->userPrototype.set(vm, castedThis, prototype.getObject());
|
||||
|
||||
// Force the prototypes to be re-created
|
||||
if (castedThis->version_db) {
|
||||
|
||||
@@ -694,7 +694,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type {
|
||||
this.result.err.deinit();
|
||||
}
|
||||
if (comptime !is_shell) this.ref.unref(this.evtloop);
|
||||
this.args.deinit();
|
||||
this.args.deinitAndUnprotect();
|
||||
this.promise.deinit();
|
||||
this.arena.deinit();
|
||||
bun.destroy(this);
|
||||
@@ -1249,7 +1249,7 @@ pub const AsyncReaddirRecursiveTask = struct {
|
||||
}
|
||||
|
||||
this.ref.unref(this.globalObject.bunVM());
|
||||
this.args.deinit();
|
||||
this.args.deinitAndUnprotect();
|
||||
bun.default_allocator.free(this.root_path.slice());
|
||||
this.clearResultList();
|
||||
this.promise.deinit();
|
||||
@@ -3050,6 +3050,13 @@ pub const Arguments = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deinitAndUnprotect(this: *const Cp) void {
|
||||
if (this.flags.deinit_paths) {
|
||||
this.src.deinitAndUnprotect();
|
||||
this.dest.deinitAndUnprotect();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fromJS(ctx: *jsc.JSGlobalObject, arguments: *ArgumentsSlice) bun.JSError!Cp {
|
||||
const src = try PathLike.fromJS(ctx, arguments) orelse {
|
||||
return ctx.throwInvalidArguments("src must be a string or TypedArray", .{});
|
||||
|
||||
@@ -674,7 +674,7 @@ pub const PathLike = union(enum) {
|
||||
try Valid.pathBuffer(buffer, ctx);
|
||||
try Valid.pathNullBytes(buffer.slice(), ctx);
|
||||
|
||||
arguments.protectEat();
|
||||
arguments.eat();
|
||||
return .{ .buffer = buffer };
|
||||
},
|
||||
|
||||
@@ -683,7 +683,7 @@ pub const PathLike = union(enum) {
|
||||
try Valid.pathBuffer(buffer, ctx);
|
||||
try Valid.pathNullBytes(buffer.slice(), ctx);
|
||||
|
||||
arguments.protectEat();
|
||||
arguments.eat();
|
||||
return .{ .buffer = buffer };
|
||||
},
|
||||
|
||||
|
||||
@@ -75,6 +75,7 @@ pub const PendingValue = struct {
|
||||
onStartBuffering: ?*const fn (ctx: *anyopaque) void = null,
|
||||
onStartStreaming: ?*const fn (ctx: *anyopaque) jsc.WebCore.DrainResult = null,
|
||||
onReadableStreamAvailable: ?*const fn (ctx: *anyopaque, globalThis: *jsc.JSGlobalObject, readable: jsc.WebCore.ReadableStream) void = null,
|
||||
onStreamCancelled: ?*const fn (ctx: ?*anyopaque) void = null,
|
||||
size_hint: Blob.SizeType = 0,
|
||||
|
||||
deinit: bool = false,
|
||||
@@ -495,6 +496,13 @@ pub const Value = union(Tag) {
|
||||
.globalThis = globalThis,
|
||||
});
|
||||
|
||||
if (locked.onStreamCancelled) |onCancelled| {
|
||||
if (locked.task) |task| {
|
||||
reader.cancel_handler = onCancelled;
|
||||
reader.cancel_ctx = task;
|
||||
}
|
||||
}
|
||||
|
||||
reader.context.setup();
|
||||
|
||||
if (drain_result == .estimated_size) {
|
||||
|
||||
@@ -442,6 +442,8 @@ pub fn NewSource(
|
||||
close_handler: ?*const fn (?*anyopaque) void = null,
|
||||
close_ctx: ?*anyopaque = null,
|
||||
close_jsvalue: jsc.Strong.Optional = .empty,
|
||||
cancel_handler: ?*const fn (?*anyopaque) void = null,
|
||||
cancel_ctx: ?*anyopaque = null,
|
||||
globalThis: *JSGlobalObject = undefined,
|
||||
this_jsvalue: jsc.JSValue = .zero,
|
||||
is_closed: bool = false,
|
||||
@@ -493,6 +495,10 @@ pub fn NewSource(
|
||||
|
||||
this.cancelled = true;
|
||||
onCancel(&this.context);
|
||||
if (this.cancel_handler) |handler| {
|
||||
this.cancel_handler = null;
|
||||
handler(this.cancel_ctx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn onClose(this: *This) void {
|
||||
|
||||
@@ -231,6 +231,7 @@ pub const FetchTasklet = struct {
|
||||
response.unref();
|
||||
}
|
||||
|
||||
this.clearStreamCancelHandler();
|
||||
this.readable_stream_ref.deinit();
|
||||
|
||||
this.scheduled_response_buffer.deinit();
|
||||
@@ -363,6 +364,7 @@ pub const FetchTasklet = struct {
|
||||
bun.default_allocator,
|
||||
);
|
||||
} else {
|
||||
this.clearStreamCancelHandler();
|
||||
var prev = this.readable_stream_ref;
|
||||
this.readable_stream_ref = .{};
|
||||
defer prev.deinit();
|
||||
@@ -865,6 +867,25 @@ pub const FetchTasklet = struct {
|
||||
};
|
||||
}
|
||||
|
||||
/// Clear the cancel_handler on the ByteStream.Source to prevent use-after-free.
|
||||
/// Must be called before releasing readable_stream_ref, while the Strong ref
|
||||
/// still keeps the ReadableStream (and thus the ByteStream.Source) alive.
|
||||
fn clearStreamCancelHandler(this: *FetchTasklet) void {
|
||||
if (this.readable_stream_ref.get(this.global_this)) |readable| {
|
||||
if (readable.ptr == .Bytes) {
|
||||
const source = readable.ptr.Bytes.parent();
|
||||
source.cancel_handler = null;
|
||||
source.cancel_ctx = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn onStreamCancelledCallback(ctx: ?*anyopaque) void {
|
||||
const this = bun.cast(*FetchTasklet, ctx.?);
|
||||
if (this.ignore_data) return;
|
||||
this.ignoreRemainingResponseBody();
|
||||
}
|
||||
|
||||
fn toBodyValue(this: *FetchTasklet) Body.Value {
|
||||
if (this.getAbortError()) |err| {
|
||||
return .{ .Error = err };
|
||||
@@ -877,6 +898,7 @@ pub const FetchTasklet = struct {
|
||||
.global = this.global_this,
|
||||
.onStartStreaming = FetchTasklet.onStartStreamingHTTPResponseBodyCallback,
|
||||
.onReadableStreamAvailable = FetchTasklet.onReadableStreamAvailable,
|
||||
.onStreamCancelled = FetchTasklet.onStreamCancelledCallback,
|
||||
},
|
||||
};
|
||||
return response;
|
||||
@@ -930,7 +952,8 @@ pub const FetchTasklet = struct {
|
||||
// we should not keep the process alive if we are ignoring the body
|
||||
const vm = this.javascript_vm;
|
||||
this.poll_ref.unref(vm);
|
||||
// clean any remaining refereces
|
||||
// clean any remaining references
|
||||
this.clearStreamCancelHandler();
|
||||
this.readable_stream_ref.deinit();
|
||||
this.response.deinit();
|
||||
|
||||
|
||||
@@ -154,10 +154,8 @@ fn prepareCssAstsForChunkImpl(c: *LinkerContext, chunk: *Chunk, allocator: std.m
|
||||
|
||||
filter: {
|
||||
// Filter out "@charset", "@import", and leading "@layer" rules
|
||||
// TODO: we are doing simple version rn, only @import
|
||||
for (ast.rules.v.items, 0..) |*rule, ruleidx| {
|
||||
// if ((rule.* == .import and import_records[source_index.get()].at(rule.import.import_record_idx).flags.is_internal) or rule.* == .ignored) {} else {
|
||||
if (rule.* == .import or rule.* == .ignored) {} else {
|
||||
if (rule.* == .import or rule.* == .ignored or rule.* == .layer_statement) {} else {
|
||||
// It's okay to do this because AST is allocated into arena
|
||||
const reslice = ast.rules.v.items[ruleidx..];
|
||||
ast.rules.v = .{
|
||||
|
||||
@@ -452,6 +452,14 @@ pub fn CssRuleList(comptime AtRule: type) type {
|
||||
}
|
||||
|
||||
bun.handleOom(rules.append(context.allocator, rule.*));
|
||||
moved_rule = true;
|
||||
|
||||
// Non-style rules (e.g. @property, @keyframes) act as a barrier for
|
||||
// style rule deduplication. We cannot safely merge identical style rules
|
||||
// across such boundaries because the intervening at-rule may affect how
|
||||
// the declarations are interpreted (e.g. @property defines a custom
|
||||
// property that a :root rule above may set differently than one below).
|
||||
style_rules.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
// MISSING SHIT HERE
|
||||
|
||||
@@ -480,7 +480,9 @@ pub fn NewHTTPContext(comptime ssl: bool) type {
|
||||
|
||||
const DeadSocket = struct {
|
||||
garbage: u8 = 0,
|
||||
pub var dead_socket: DeadSocket = .{};
|
||||
/// Must be aligned to `@alignOf(usize)` so that tagged pointer values
|
||||
/// embedding this address pass the `@alignCast` in `bun.cast`.
|
||||
pub var dead_socket: DeadSocket align(@alignOf(usize)) = .{};
|
||||
};
|
||||
|
||||
var dead_socket = &DeadSocket.dead_socket;
|
||||
|
||||
@@ -108,6 +108,11 @@ export function overridableRequire(this: JSCommonJSModule, originalId: string, o
|
||||
} catch (exception) {
|
||||
// Since the ESM code is mostly JS, we need to handle exceptions here.
|
||||
$requireMap.$delete(id);
|
||||
// Also remove the failed module from the ESM registry so that
|
||||
// a subsequent import() can re-evaluate it from scratch instead
|
||||
// of finding the partially-initialized module entry.
|
||||
// https://github.com/oven-sh/bun/issues/27287
|
||||
Loader.registry.$delete(id);
|
||||
throw exception;
|
||||
}
|
||||
|
||||
@@ -321,6 +326,11 @@ export function requireESMFromHijackedExtension(this: JSCommonJSModule, id: stri
|
||||
} catch (exception) {
|
||||
// Since the ESM code is mostly JS, we need to handle exceptions here.
|
||||
$requireMap.$delete(id);
|
||||
// Also remove the failed module from the ESM registry so that
|
||||
// a subsequent import() can re-evaluate it from scratch instead
|
||||
// of finding the partially-initialized module entry.
|
||||
// https://github.com/oven-sh/bun/issues/27287
|
||||
Loader.registry.$delete(id);
|
||||
throw exception;
|
||||
}
|
||||
|
||||
|
||||
@@ -499,6 +499,23 @@ class Database implements SqliteTypes.Database {
|
||||
|
||||
close(throwOnError = false) {
|
||||
this.clearQueryCache();
|
||||
// Finalize any prepared statements created by db.transaction()
|
||||
if (controllers) {
|
||||
const controller = controllers.get(this);
|
||||
if (controller) {
|
||||
controllers.delete(this);
|
||||
const seen = new Set();
|
||||
for (const ctrl of [controller.default, controller.deferred, controller.immediate, controller.exclusive]) {
|
||||
if (!ctrl) continue;
|
||||
for (const stmt of [ctrl.begin, ctrl.commit, ctrl.rollback, ctrl.savepoint, ctrl.release, ctrl.rollbackTo]) {
|
||||
if (stmt && !seen.has(stmt)) {
|
||||
seen.add(stmt);
|
||||
stmt.finalize?.();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
this.#hasClosed = true;
|
||||
return SQL.close(this.#handle, throwOnError);
|
||||
}
|
||||
|
||||
@@ -1708,13 +1708,19 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
// Internal "require()" or "import()"
|
||||
const has_side_effects = meta.wrapper_ref.isValid() or
|
||||
meta.exports_ref.isValid() or
|
||||
meta.was_unwrapped_require or
|
||||
p.options.input_files_for_dev_server != null;
|
||||
if (record.kind == .dynamic) {
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.print("Promise.resolve()");
|
||||
|
||||
level = p.printDotThenPrefix();
|
||||
if (has_side_effects) {
|
||||
level = p.printDotThenPrefix();
|
||||
}
|
||||
}
|
||||
defer if (record.kind == .dynamic) p.printDotThenSuffix();
|
||||
defer if (record.kind == .dynamic and has_side_effects) p.printDotThenSuffix();
|
||||
|
||||
// Make sure the comma operator is properly wrapped
|
||||
const wrap_comma_operator = meta.exports_ref.isValid() and
|
||||
|
||||
@@ -731,23 +731,22 @@ pub const Route = struct {
|
||||
|
||||
if (abs_path_str.len == 0) {
|
||||
var file: std.fs.File = undefined;
|
||||
var needs_close = false;
|
||||
var needs_close = true;
|
||||
defer if (needs_close) file.close();
|
||||
if (entry.cache.fd.unwrapValid()) |valid| {
|
||||
file = valid.stdFile();
|
||||
needs_close = false;
|
||||
} else {
|
||||
var parts = [_]string{ entry.dir, entry.base() };
|
||||
abs_path_str = FileSystem.instance.absBuf(&parts, &route_file_buf);
|
||||
route_file_buf[abs_path_str.len] = 0;
|
||||
const buf = route_file_buf[0..abs_path_str.len :0];
|
||||
file = std.fs.openFileAbsoluteZ(buf, .{ .mode = .read_only }) catch |err| {
|
||||
needs_close = false;
|
||||
log.addErrorFmt(null, Logger.Loc.Empty, allocator, "{s} opening route: {s}", .{ @errorName(err), abs_path_str }) catch unreachable;
|
||||
return null;
|
||||
};
|
||||
FileSystem.setMaxFd(file.handle);
|
||||
|
||||
needs_close = FileSystem.instance.fs.needToCloseFiles();
|
||||
if (!needs_close) entry.cache.fd = .fromStdFile(file);
|
||||
}
|
||||
|
||||
const _abs = bun.getFdPath(.fromStdFile(file), &route_file_buf) catch |err| {
|
||||
|
||||
@@ -675,18 +675,53 @@ pub fn readableStream(
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear the cancel_handler on the ByteStream.Source to prevent use-after-free.
|
||||
/// Must be called before releasing readable_stream_ref.
|
||||
fn clearStreamCancelHandler(self: *@This()) void {
|
||||
if (self.readable_stream_ref.get(self.global)) |readable| {
|
||||
if (readable.ptr == .Bytes) {
|
||||
const source = readable.ptr.Bytes.parent();
|
||||
source.cancel_handler = null;
|
||||
source.cancel_ctx = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deinit(self: *@This()) void {
|
||||
self.clearStreamCancelHandler();
|
||||
self.readable_stream_ref.deinit();
|
||||
bun.default_allocator.free(self.path);
|
||||
bun.destroy(self);
|
||||
}
|
||||
|
||||
fn onStreamCancelled(ctx: ?*anyopaque) void {
|
||||
const self: *@This() = @ptrCast(@alignCast(ctx.?));
|
||||
// Release the Strong ref so the ReadableStream can be GC'd.
|
||||
// The download may still be in progress, but the callback will
|
||||
// see readable_stream_ref.get() return null and skip data delivery.
|
||||
// When the download finishes (has_more == false), deinit() will
|
||||
// clean up the remaining resources.
|
||||
self.readable_stream_ref.deinit();
|
||||
}
|
||||
|
||||
pub fn opaqueCallback(chunk: bun.MutableString, has_more: bool, err: ?Error.S3Error, opaque_self: *anyopaque) void {
|
||||
const self: *@This() = @ptrCast(@alignCast(opaque_self));
|
||||
callback(chunk, has_more, err, self) catch {}; // TODO: properly propagate exception upwards
|
||||
}
|
||||
};
|
||||
|
||||
const wrapper = S3DownloadStreamWrapper.new(.{
|
||||
.readable_stream_ref = jsc.WebCore.ReadableStream.Strong.init(.{
|
||||
.ptr = .{ .Bytes = &reader.context },
|
||||
.value = readable_value,
|
||||
}, globalThis),
|
||||
.path = bun.handleOom(bun.default_allocator.dupe(u8, path)),
|
||||
.global = globalThis,
|
||||
});
|
||||
|
||||
reader.cancel_handler = S3DownloadStreamWrapper.onStreamCancelled;
|
||||
reader.cancel_ctx = wrapper;
|
||||
|
||||
downloadStream(
|
||||
this,
|
||||
path,
|
||||
@@ -695,14 +730,7 @@ pub fn readableStream(
|
||||
proxy_url,
|
||||
request_payer,
|
||||
S3DownloadStreamWrapper.opaqueCallback,
|
||||
S3DownloadStreamWrapper.new(.{
|
||||
.readable_stream_ref = jsc.WebCore.ReadableStream.Strong.init(.{
|
||||
.ptr = .{ .Bytes = &reader.context },
|
||||
.value = readable_value,
|
||||
}, globalThis),
|
||||
.path = bun.handleOom(bun.default_allocator.dupe(u8, path)),
|
||||
.global = globalThis,
|
||||
}),
|
||||
wrapper,
|
||||
);
|
||||
return readable_value;
|
||||
}
|
||||
|
||||
@@ -12,27 +12,79 @@ state: union(enum) {
|
||||
|
||||
pub fn start(this: *Echo) Yield {
|
||||
var args = this.bltn().argsSlice();
|
||||
const no_newline = args.len >= 1 and std.mem.eql(u8, bun.sliceTo(args[0], 0), "-n");
|
||||
|
||||
args = args[if (no_newline) 1 else 0..];
|
||||
// Parse flags: echo accepts -n, -e, -E in any combination.
|
||||
// Flag parsing stops at the first arg that doesn't start with '-'
|
||||
// or contains an invalid flag character.
|
||||
var no_newline = false;
|
||||
var escape_sequences = false;
|
||||
var flags_done = false;
|
||||
var args_start: usize = 0;
|
||||
|
||||
for (args) |arg| {
|
||||
if (flags_done) break;
|
||||
const flag = std.mem.span(arg);
|
||||
if (flag.len < 2 or flag[0] != '-') {
|
||||
flags_done = true;
|
||||
break;
|
||||
}
|
||||
// Validate all characters are valid echo flags
|
||||
var valid = true;
|
||||
for (flag[1..]) |c| {
|
||||
switch (c) {
|
||||
'n', 'e', 'E' => {},
|
||||
else => {
|
||||
valid = false;
|
||||
break;
|
||||
},
|
||||
}
|
||||
}
|
||||
if (!valid) {
|
||||
flags_done = true;
|
||||
break;
|
||||
}
|
||||
// Apply flags (last -e/-E wins)
|
||||
for (flag[1..]) |c| {
|
||||
switch (c) {
|
||||
'n' => no_newline = true,
|
||||
'e' => escape_sequences = true,
|
||||
'E' => escape_sequences = false,
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
args_start += 1;
|
||||
}
|
||||
|
||||
args = args[args_start..];
|
||||
const args_len = args.len;
|
||||
var has_leading_newline: bool = false;
|
||||
var stop_output = false;
|
||||
|
||||
// TODO: Should flush buffer after it gets to a certain size
|
||||
for (args, 0..) |arg, i| {
|
||||
if (stop_output) break;
|
||||
const thearg = std.mem.span(arg);
|
||||
if (i < args_len - 1) {
|
||||
bun.handleOom(this.output.appendSlice(thearg));
|
||||
bun.handleOom(this.output.append(' '));
|
||||
const is_last = i == args_len - 1;
|
||||
|
||||
if (escape_sequences) {
|
||||
stop_output = appendWithEscapes(&this.output, thearg);
|
||||
} else {
|
||||
if (thearg.len > 0 and thearg[thearg.len - 1] == '\n') {
|
||||
has_leading_newline = true;
|
||||
if (is_last) {
|
||||
if (thearg.len > 0 and thearg[thearg.len - 1] == '\n') {
|
||||
has_leading_newline = true;
|
||||
}
|
||||
bun.handleOom(this.output.appendSlice(bun.strings.trimSubsequentLeadingChars(thearg, '\n')));
|
||||
} else {
|
||||
bun.handleOom(this.output.appendSlice(thearg));
|
||||
}
|
||||
bun.handleOom(this.output.appendSlice(bun.strings.trimSubsequentLeadingChars(thearg, '\n')));
|
||||
}
|
||||
|
||||
if (!stop_output and !is_last) {
|
||||
bun.handleOom(this.output.append(' '));
|
||||
}
|
||||
}
|
||||
|
||||
if (!has_leading_newline and !no_newline) bun.handleOom(this.output.append('\n'));
|
||||
if (!stop_output and !has_leading_newline and !no_newline) bun.handleOom(this.output.append('\n'));
|
||||
|
||||
if (this.bltn().stdout.needsIO()) |safeguard| {
|
||||
this.state = .waiting;
|
||||
@@ -43,6 +95,109 @@ pub fn start(this: *Echo) Yield {
|
||||
return this.bltn().done(0);
|
||||
}
|
||||
|
||||
/// Appends `input` to `output`, interpreting backslash escape sequences.
|
||||
/// Returns true if a \c escape was encountered (meaning stop all output).
|
||||
fn appendWithEscapes(output: *std.array_list.Managed(u8), input: []const u8) bool {
|
||||
var i: usize = 0;
|
||||
while (i < input.len) {
|
||||
if (input[i] == '\\' and i + 1 < input.len) {
|
||||
switch (input[i + 1]) {
|
||||
'\\' => {
|
||||
bun.handleOom(output.append('\\'));
|
||||
i += 2;
|
||||
},
|
||||
'a' => {
|
||||
bun.handleOom(output.append('\x07'));
|
||||
i += 2;
|
||||
},
|
||||
'b' => {
|
||||
bun.handleOom(output.append('\x08'));
|
||||
i += 2;
|
||||
},
|
||||
'c' => {
|
||||
// \c: produce no further output
|
||||
return true;
|
||||
},
|
||||
'e', 'E' => {
|
||||
bun.handleOom(output.append('\x1b'));
|
||||
i += 2;
|
||||
},
|
||||
'f' => {
|
||||
bun.handleOom(output.append('\x0c'));
|
||||
i += 2;
|
||||
},
|
||||
'n' => {
|
||||
bun.handleOom(output.append('\n'));
|
||||
i += 2;
|
||||
},
|
||||
'r' => {
|
||||
bun.handleOom(output.append('\r'));
|
||||
i += 2;
|
||||
},
|
||||
't' => {
|
||||
bun.handleOom(output.append('\t'));
|
||||
i += 2;
|
||||
},
|
||||
'v' => {
|
||||
bun.handleOom(output.append('\x0b'));
|
||||
i += 2;
|
||||
},
|
||||
'0' => {
|
||||
// \0nnn: octal value (up to 3 octal digits)
|
||||
i += 2; // skip \0
|
||||
var val: u8 = 0;
|
||||
var digits: usize = 0;
|
||||
while (digits < 3 and i < input.len and input[i] >= '0' and input[i] <= '7') {
|
||||
val = val *% 8 +% (input[i] - '0');
|
||||
i += 1;
|
||||
digits += 1;
|
||||
}
|
||||
bun.handleOom(output.append(val));
|
||||
},
|
||||
'x' => {
|
||||
// \xHH: hex value (up to 2 hex digits)
|
||||
i += 2; // skip \x
|
||||
var val: u8 = 0;
|
||||
var digits: usize = 0;
|
||||
while (digits < 2 and i < input.len) {
|
||||
const hex_val = hexDigitValue(input[i]);
|
||||
if (hex_val) |hv| {
|
||||
val = val *% 16 +% hv;
|
||||
i += 1;
|
||||
digits += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (digits > 0) {
|
||||
bun.handleOom(output.append(val));
|
||||
} else {
|
||||
// No valid hex digits: output \x literally
|
||||
bun.handleOom(output.appendSlice("\\x"));
|
||||
}
|
||||
},
|
||||
else => {
|
||||
// Unknown escape: output backslash and the character as-is
|
||||
bun.handleOom(output.append('\\'));
|
||||
bun.handleOom(output.append(input[i + 1]));
|
||||
i += 2;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
bun.handleOom(output.append(input[i]));
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn hexDigitValue(c: u8) ?u8 {
|
||||
if (c >= '0' and c <= '9') return c - '0';
|
||||
if (c >= 'a' and c <= 'f') return c - 'a' + 10;
|
||||
if (c >= 'A' and c <= 'F') return c - 'A' + 10;
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn onIOWriterChunk(this: *Echo, _: usize, e: ?jsc.SystemError) Yield {
|
||||
if (comptime bun.Environment.allow_assert) {
|
||||
assert(this.state == .waiting or this.state == .waiting_write_err);
|
||||
|
||||
@@ -3251,11 +3251,15 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
|
||||
self.chars.current = .{ .char = cur_ascii_char };
|
||||
return;
|
||||
}
|
||||
// Set the cursor to decode the codepoint at new_idx.
|
||||
// Use width=0 so that nextCursor (which computes pos = width + i)
|
||||
// starts reading from exactly new_idx.
|
||||
self.chars.src.cursor = CodepointIterator.Cursor{
|
||||
.i = @intCast(new_idx),
|
||||
.c = cur_ascii_char,
|
||||
.width = 1,
|
||||
.c = 0,
|
||||
.width = 0,
|
||||
};
|
||||
SrcUnicode.nextCursor(&self.chars.src.iter, &self.chars.src.cursor);
|
||||
self.chars.src.next_cursor = self.chars.src.cursor;
|
||||
SrcUnicode.nextCursor(&self.chars.src.iter, &self.chars.src.next_cursor);
|
||||
if (prev_ascii_char) |pc| self.chars.prev = .{ .char = pc };
|
||||
@@ -3602,13 +3606,13 @@ pub fn ShellCharIter(comptime encoding: StringEncoding) type {
|
||||
return bytes[self.src.i..];
|
||||
}
|
||||
|
||||
if (self.src.iter.i >= bytes.len) return "";
|
||||
return bytes[self.src.iter.i..];
|
||||
if (self.src.cursor.i >= bytes.len) return "";
|
||||
return bytes[self.src.cursor.i..];
|
||||
}
|
||||
|
||||
pub fn cursorPos(self: *@This()) usize {
|
||||
if (comptime encoding == .ascii) return self.src.i;
|
||||
return self.src.iter.i;
|
||||
return self.src.cursor.i;
|
||||
}
|
||||
|
||||
pub fn eat(self: *@This()) ?InputChar {
|
||||
|
||||
89
test/js/bun/s3/s3-stream-cancel-leak.test.ts
Normal file
89
test/js/bun/s3/s3-stream-cancel-leak.test.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { S3Client } from "bun";
|
||||
import { heapStats } from "bun:jsc";
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
// Test that ReadableStream objects from cancelled S3 download streams are properly GC'd.
|
||||
//
|
||||
// When a streaming S3 download body is cancelled mid-stream, S3DownloadStreamWrapper's
|
||||
// readable_stream_ref (a Strong GC root) is not released because:
|
||||
// 1. ByteStream.onCancel() doesn't notify the S3DownloadStreamWrapper
|
||||
// 2. The S3 download continues in the background, so has_more never becomes false
|
||||
// 3. The Strong ref prevents GC of the ReadableStream
|
||||
//
|
||||
// This is the same pattern as the FetchTasklet stream cancel leak.
|
||||
|
||||
test("ReadableStream from S3 stream() should be GC'd after reader.cancel()", async () => {
|
||||
// Use a raw TCP server to mock an S3 GET response.
|
||||
// The server sends one HTTP chunk immediately, then keeps the connection open
|
||||
// to simulate a large file download in progress.
|
||||
using server = Bun.listen({
|
||||
port: 0,
|
||||
hostname: "127.0.0.1",
|
||||
socket: {
|
||||
data(socket) {
|
||||
// Respond to any incoming request with a chunked 200 OK
|
||||
socket.write(
|
||||
"HTTP/1.1 200 OK\r\n" +
|
||||
"Transfer-Encoding: chunked\r\n" +
|
||||
"Connection: keep-alive\r\n" +
|
||||
"Content-Type: application/octet-stream\r\n" +
|
||||
"\r\n" +
|
||||
"400\r\n" +
|
||||
Buffer.alloc(0x400, "x").toString() +
|
||||
"\r\n",
|
||||
);
|
||||
// Don't send terminal chunk "0\r\n\r\n" — keep connection open
|
||||
},
|
||||
open() {},
|
||||
close() {},
|
||||
error() {},
|
||||
},
|
||||
});
|
||||
|
||||
const s3 = new S3Client({
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
endpoint: `http://127.0.0.1:${server.port}`,
|
||||
bucket: "test",
|
||||
});
|
||||
|
||||
const N = 30;
|
||||
|
||||
// Warmup: ensure JIT, lazy init, and connection pool are warmed up
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const file = s3.file(`warmup-${i}.bin`);
|
||||
const stream = file.stream();
|
||||
const reader = stream.getReader();
|
||||
await reader.read();
|
||||
await reader.cancel();
|
||||
}
|
||||
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(10);
|
||||
Bun.gc(true);
|
||||
|
||||
const baseline = heapStats().objectTypeCounts.ReadableStream ?? 0;
|
||||
|
||||
// Main test: stream, read one chunk, cancel, repeat N times
|
||||
for (let i = 0; i < N; i++) {
|
||||
const file = s3.file(`test-${i}.bin`);
|
||||
const stream = file.stream();
|
||||
const reader = stream.getReader();
|
||||
await reader.read();
|
||||
await reader.cancel();
|
||||
}
|
||||
|
||||
// Allow finalizers to run, then GC aggressively
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(50);
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(50);
|
||||
Bun.gc(true);
|
||||
|
||||
const after = heapStats().objectTypeCounts.ReadableStream ?? 0;
|
||||
const leaked = after - baseline;
|
||||
|
||||
// With the bug: leaked ≈ N (each cancelled stream's Strong ref prevents GC)
|
||||
// When fixed: leaked should be near 0 (Strong ref released on cancel)
|
||||
expect(leaked).toBeLessThanOrEqual(5);
|
||||
});
|
||||
@@ -61,11 +61,7 @@ describe("echo error handling", async () => {
|
||||
});
|
||||
|
||||
describe("echo special cases", async () => {
|
||||
TestBuilder.command`echo -n -n hello`
|
||||
.exitCode(0)
|
||||
.stdout("-n hello")
|
||||
.stderr("")
|
||||
.runAsTest("-n flag with -n as argument");
|
||||
TestBuilder.command`echo -n -n hello`.exitCode(0).stdout("hello").stderr("").runAsTest("-n flag with -n as argument");
|
||||
|
||||
TestBuilder.command`echo -- -n hello`
|
||||
.exitCode(0)
|
||||
|
||||
36
test/js/node/fs/readdir-buffer-leak.test.ts
Normal file
36
test/js/node/fs/readdir-buffer-leak.test.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { heapStats } from "bun:jsc";
|
||||
import { expect, test } from "bun:test";
|
||||
import { mkdirSync, writeFileSync } from "fs";
|
||||
import { readdir } from "fs/promises";
|
||||
import { tempDir } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
test("fs.promises.readdir with Buffer path does not leak GC protection", async () => {
|
||||
using dir = tempDir("readdir-leak", {});
|
||||
const base = join(String(dir), "a".repeat(200), "b".repeat(200));
|
||||
mkdirSync(base, { recursive: true });
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const sub = join(base, `sub${i}`);
|
||||
mkdirSync(sub);
|
||||
for (let j = 0; j < 3; j++) {
|
||||
writeFileSync(join(sub, `f${j}`), "x");
|
||||
}
|
||||
}
|
||||
|
||||
// Warm up
|
||||
for (let i = 0; i < 100; i++) {
|
||||
await readdir(Buffer.from(base), { recursive: true });
|
||||
}
|
||||
Bun.gc(true);
|
||||
const before = heapStats().protectedObjectCount;
|
||||
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await readdir(Buffer.from(base), { recursive: true });
|
||||
}
|
||||
Bun.gc(true);
|
||||
const after = heapStats().protectedObjectCount;
|
||||
|
||||
// Should not accumulate protected objects — allow a small margin for noise
|
||||
expect(after - before).toBeLessThan(10);
|
||||
});
|
||||
140
test/js/web/fetch/fetch-stream-cancel-leak.test.ts
Normal file
140
test/js/web/fetch/fetch-stream-cancel-leak.test.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { heapStats } from "bun:jsc";
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
// Test that ReadableStream objects from cancelled fetch responses are properly GC'd.
|
||||
//
|
||||
// When a streaming HTTP response body is cancelled mid-stream, FetchTasklet's
|
||||
// readable_stream_ref (a Strong GC root) is not released because:
|
||||
// 1. ByteStream.onCancel() doesn't notify the FetchTasklet
|
||||
// 2. The HTTP connection stays open, so has_more never becomes false
|
||||
// 3. Bun__FetchResponse_finalize sees the Strong ref and skips cleanup
|
||||
//
|
||||
// This creates a circular dependency where the Strong ref prevents GC,
|
||||
// and the GC finalizer skips cleanup because the Strong ref exists.
|
||||
|
||||
test("ReadableStream from fetch should be GC'd after reader.cancel()", async () => {
|
||||
// Use a raw TCP server to avoid server-side JS ReadableStream objects
|
||||
// that would add noise to objectTypeCounts.
|
||||
// The server sends one HTTP chunk immediately, then keeps the connection open.
|
||||
using server = Bun.listen({
|
||||
port: 0,
|
||||
hostname: "127.0.0.1",
|
||||
socket: {
|
||||
data(socket) {
|
||||
socket.write(
|
||||
"HTTP/1.1 200 OK\r\n" +
|
||||
"Transfer-Encoding: chunked\r\n" +
|
||||
"Connection: keep-alive\r\n" +
|
||||
"\r\n" +
|
||||
"400\r\n" +
|
||||
Buffer.alloc(0x400, "x").toString() +
|
||||
"\r\n",
|
||||
);
|
||||
// Don't send terminal chunk "0\r\n\r\n" — keep connection open
|
||||
},
|
||||
open() {},
|
||||
close() {},
|
||||
error() {},
|
||||
},
|
||||
});
|
||||
|
||||
const url = `http://127.0.0.1:${server.port}/`;
|
||||
const N = 30;
|
||||
|
||||
// Warmup: ensure JIT, lazy init, and connection pool are warmed up
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const response = await fetch(url);
|
||||
const reader = response.body!.getReader();
|
||||
await reader.read();
|
||||
await reader.cancel();
|
||||
}
|
||||
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(10);
|
||||
Bun.gc(true);
|
||||
|
||||
const baseline = heapStats().objectTypeCounts.ReadableStream ?? 0;
|
||||
|
||||
// Main test: fetch, read one chunk, cancel, repeat N times
|
||||
for (let i = 0; i < N; i++) {
|
||||
const response = await fetch(url);
|
||||
const reader = response.body!.getReader();
|
||||
await reader.read();
|
||||
await reader.cancel();
|
||||
}
|
||||
|
||||
// Allow finalizers to run, then GC aggressively
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(50);
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(50);
|
||||
Bun.gc(true);
|
||||
|
||||
const after = heapStats().objectTypeCounts.ReadableStream ?? 0;
|
||||
const leaked = after - baseline;
|
||||
|
||||
// With the bug: leaked ≈ N (each cancelled stream's Strong ref prevents GC)
|
||||
// When fixed: leaked should be near 0 (Strong ref released on cancel)
|
||||
expect(leaked).toBeLessThanOrEqual(5);
|
||||
});
|
||||
|
||||
test("ReadableStream from fetch should be GC'd after body.cancel()", async () => {
|
||||
using server = Bun.listen({
|
||||
port: 0,
|
||||
hostname: "127.0.0.1",
|
||||
socket: {
|
||||
data(socket) {
|
||||
socket.write(
|
||||
"HTTP/1.1 200 OK\r\n" +
|
||||
"Transfer-Encoding: chunked\r\n" +
|
||||
"Connection: keep-alive\r\n" +
|
||||
"\r\n" +
|
||||
"400\r\n" +
|
||||
Buffer.alloc(0x400, "x").toString() +
|
||||
"\r\n",
|
||||
);
|
||||
},
|
||||
open() {},
|
||||
close() {},
|
||||
error() {},
|
||||
},
|
||||
});
|
||||
|
||||
const url = `http://127.0.0.1:${server.port}/`;
|
||||
const N = 30;
|
||||
|
||||
// Warmup
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const response = await fetch(url);
|
||||
const reader = response.body!.getReader();
|
||||
await reader.read();
|
||||
reader.releaseLock();
|
||||
await response.body!.cancel();
|
||||
}
|
||||
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(10);
|
||||
Bun.gc(true);
|
||||
|
||||
const baseline = heapStats().objectTypeCounts.ReadableStream ?? 0;
|
||||
|
||||
// Main test: fetch, read, releaseLock, cancel body directly
|
||||
for (let i = 0; i < N; i++) {
|
||||
const response = await fetch(url);
|
||||
const reader = response.body!.getReader();
|
||||
await reader.read();
|
||||
reader.releaseLock();
|
||||
await response.body!.cancel();
|
||||
}
|
||||
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(50);
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(50);
|
||||
Bun.gc(true);
|
||||
|
||||
const after = heapStats().objectTypeCounts.ReadableStream ?? 0;
|
||||
const leaked = after - baseline;
|
||||
|
||||
expect(leaked).toBeLessThanOrEqual(5);
|
||||
});
|
||||
@@ -430,3 +430,5 @@ test/js/bun/test/parallel/test-http-should-not-accept-untrusted-certificates.ts
|
||||
# Need to run the event loop once more to ensure sockets close
|
||||
test/js/node/test/parallel/test-https-localaddress-bind-error.js
|
||||
test/js/node/test/parallel/test-crypto-op-during-process-exit.js
|
||||
|
||||
test/js/third_party/prisma/prisma.test.ts
|
||||
51
test/regression/issue/14709.test.ts
Normal file
51
test/regression/issue/14709.test.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { Database } from "bun:sqlite";
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
test("db.close(true) works after db.transaction()", () => {
|
||||
const db = new Database(":memory:");
|
||||
db.transaction(() => {})();
|
||||
expect(() => db.close(true)).not.toThrow();
|
||||
});
|
||||
|
||||
test("db.close(true) works after db.transaction() with actual work", () => {
|
||||
const db = new Database(":memory:");
|
||||
db.run("CREATE TABLE test (id INTEGER PRIMARY KEY, value TEXT)");
|
||||
const insert = db.transaction((items: string[]) => {
|
||||
const stmt = db.query("INSERT INTO test (value) VALUES (?)");
|
||||
for (const item of items) {
|
||||
stmt.run(item);
|
||||
}
|
||||
});
|
||||
insert(["a", "b", "c"]);
|
||||
expect(db.query("SELECT COUNT(*) as count FROM test").get()).toEqual({ count: 3 });
|
||||
expect(() => db.close(true)).not.toThrow();
|
||||
});
|
||||
|
||||
test("using declaration works with db.transaction()", () => {
|
||||
using db = new Database(":memory:");
|
||||
db.transaction(() => {})();
|
||||
// Symbol.dispose calls close(true), should not throw
|
||||
});
|
||||
|
||||
test("db.close(true) works after multiple transaction types", () => {
|
||||
const db = new Database(":memory:");
|
||||
db.transaction(() => {})();
|
||||
db.transaction(() => {}).deferred();
|
||||
db.transaction(() => {}).immediate();
|
||||
db.transaction(() => {}).exclusive();
|
||||
expect(() => db.close(true)).not.toThrow();
|
||||
});
|
||||
|
||||
test("db.close(true) works after nested transactions", () => {
|
||||
const db = new Database(":memory:");
|
||||
db.run("CREATE TABLE test (id INTEGER PRIMARY KEY)");
|
||||
const outer = db.transaction(() => {
|
||||
db.run("INSERT INTO test (id) VALUES (1)");
|
||||
const inner = db.transaction(() => {
|
||||
db.run("INSERT INTO test (id) VALUES (2)");
|
||||
});
|
||||
inner();
|
||||
});
|
||||
outer();
|
||||
expect(() => db.close(true)).not.toThrow();
|
||||
});
|
||||
43
test/regression/issue/17244.test.ts
Normal file
43
test/regression/issue/17244.test.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { $ } from "bun";
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/17244
|
||||
// Shell template literals leaked __bunstr_N when the first interpolated value
|
||||
// contained a space and a subsequent value contained a multi-byte UTF-8 character.
|
||||
|
||||
test("shell interpolation with space and multi-byte UTF-8", async () => {
|
||||
const a = " ";
|
||||
const b = "Í";
|
||||
|
||||
const result = await $`echo ${a} ${b}`.text();
|
||||
expect(result.trim()).toBe("Í");
|
||||
expect(result).not.toContain("__bunstr");
|
||||
});
|
||||
|
||||
test("shell interpolation with trailing-space string and 2-byte UTF-8", async () => {
|
||||
const a = "a ";
|
||||
const b = "Í";
|
||||
|
||||
const result = await $`echo ${a} ${b}`.text();
|
||||
// "a " (with trailing space preserved) + " " (template separator) + "Í"
|
||||
expect(result.trim()).toBe("a Í");
|
||||
expect(result).not.toContain("__bunstr");
|
||||
});
|
||||
|
||||
test("shell interpolation with space and 3-byte UTF-8", async () => {
|
||||
const a = " ";
|
||||
const b = "€";
|
||||
|
||||
const result = await $`echo ${a} ${b}`.text();
|
||||
expect(result.trim()).toBe("€");
|
||||
expect(result).not.toContain("__bunstr");
|
||||
});
|
||||
|
||||
test("shell interpolation with embedded space and multi-byte UTF-8", async () => {
|
||||
const a = "a b";
|
||||
const b = "Í";
|
||||
|
||||
const result = await $`echo ${a} ${b}`.text();
|
||||
expect(result.trim()).toBe("a b Í");
|
||||
expect(result).not.toContain("__bunstr");
|
||||
});
|
||||
117
test/regression/issue/17405.test.ts
Normal file
117
test/regression/issue/17405.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { $ } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
|
||||
describe("echo -e flag support", () => {
|
||||
test("echo -e does not output -e as literal text", async () => {
|
||||
const result = await $`echo -e hello`.text();
|
||||
expect(result).toBe("hello\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets backslash-n", async () => {
|
||||
const result = await $`echo -e ${"hello\\nworld"}`.text();
|
||||
expect(result).toBe("hello\nworld\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets backslash-t", async () => {
|
||||
const result = await $`echo -e ${"hello\\tworld"}`.text();
|
||||
expect(result).toBe("hello\tworld\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets backslash-backslash", async () => {
|
||||
const result = await $`echo -e ${"hello\\\\world"}`.text();
|
||||
expect(result).toBe("hello\\world\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets \\a (bell)", async () => {
|
||||
const result = await $`echo -e ${"\\a"}`.text();
|
||||
expect(result).toBe("\x07\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets \\b (backspace)", async () => {
|
||||
const result = await $`echo -e ${"a\\bb"}`.text();
|
||||
expect(result).toBe("a\bb\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets \\r (carriage return)", async () => {
|
||||
const result = await $`echo -e ${"hello\\rworld"}`.text();
|
||||
expect(result).toBe("hello\rworld\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets \\f (form feed)", async () => {
|
||||
const result = await $`echo -e ${"\\f"}`.text();
|
||||
expect(result).toBe("\f\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets \\v (vertical tab)", async () => {
|
||||
const result = await $`echo -e ${"\\v"}`.text();
|
||||
expect(result).toBe("\v\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets \\0nnn (octal)", async () => {
|
||||
// \0101 = 'A' (65 decimal)
|
||||
const result = await $`echo -e ${"\\0101"}`.text();
|
||||
expect(result).toBe("A\n");
|
||||
});
|
||||
|
||||
test("echo -e interprets \\xHH (hex)", async () => {
|
||||
// \x41 = 'A'
|
||||
const result = await $`echo -e ${"\\x41\\x42\\x43"}`.text();
|
||||
expect(result).toBe("ABC\n");
|
||||
});
|
||||
|
||||
test("echo -e \\c stops output", async () => {
|
||||
const result = await $`echo -e ${"hello\\cworld"}`.text();
|
||||
expect(result).toBe("hello");
|
||||
});
|
||||
|
||||
test("echo -e with \\e (escape character)", async () => {
|
||||
const result = await $`echo -e ${"\\e"}`.text();
|
||||
expect(result).toBe("\x1b\n");
|
||||
});
|
||||
|
||||
test("echo -E disables escape interpretation", async () => {
|
||||
const result = await $`echo -E ${"hello\\nworld"}`.text();
|
||||
expect(result).toBe("hello\\nworld\n");
|
||||
});
|
||||
|
||||
test("echo -eE (last wins: -E disables)", async () => {
|
||||
const result = await $`echo -eE ${"hello\\tworld"}`.text();
|
||||
expect(result).toBe("hello\\tworld\n");
|
||||
});
|
||||
|
||||
test("echo -Ee (last wins: -e enables)", async () => {
|
||||
const result = await $`echo -Ee ${"hello\\tworld"}`.text();
|
||||
expect(result).toBe("hello\tworld\n");
|
||||
});
|
||||
|
||||
test("echo -ne (no newline + escapes)", async () => {
|
||||
const result = await $`echo -ne ${"hello\\tworld"}`.text();
|
||||
expect(result).toBe("hello\tworld");
|
||||
});
|
||||
|
||||
test("echo -en (same as -ne)", async () => {
|
||||
const result = await $`echo -en ${"hello\\tworld"}`.text();
|
||||
expect(result).toBe("hello\tworld");
|
||||
});
|
||||
|
||||
test("echo -n still works (no newline)", async () => {
|
||||
const result = await $`echo -n hello`.text();
|
||||
expect(result).toBe("hello");
|
||||
});
|
||||
|
||||
test("echo with invalid flag outputs literally", async () => {
|
||||
const result = await $`echo -x hello`.text();
|
||||
expect(result).toBe("-x hello\n");
|
||||
});
|
||||
|
||||
test("echo -e piped to cat (original issue scenario)", async () => {
|
||||
const pw = "mypassword";
|
||||
const result = await $`echo -e ${pw} | cat`.text();
|
||||
expect(result).toBe("mypassword\n");
|
||||
});
|
||||
|
||||
test("echo without -e still works normally", async () => {
|
||||
const result = await $`echo hello world`.text();
|
||||
expect(result).toBe("hello world\n");
|
||||
});
|
||||
});
|
||||
62
test/regression/issue/18242.test.ts
Normal file
62
test/regression/issue/18242.test.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
test("Bun.build works multiple times after FileSystemRouter is created", async () => {
|
||||
using dir = tempDir("issue-18242", {
|
||||
"pages/index.ts": `console.log("Hello via Bun!");`,
|
||||
"build.ts": `
|
||||
import path from "path";
|
||||
|
||||
const PAGES_DIR = path.resolve(process.cwd(), "pages");
|
||||
|
||||
const srcRouter = new Bun.FileSystemRouter({
|
||||
dir: PAGES_DIR,
|
||||
style: "nextjs",
|
||||
});
|
||||
|
||||
const entrypoints = Object.values(srcRouter.routes);
|
||||
|
||||
const result1 = await Bun.build({
|
||||
entrypoints,
|
||||
outdir: "dist/browser",
|
||||
});
|
||||
|
||||
const result2 = await Bun.build({
|
||||
entrypoints,
|
||||
outdir: "dist/bun",
|
||||
target: "bun",
|
||||
});
|
||||
|
||||
const result3 = await Bun.build({
|
||||
entrypoints,
|
||||
outdir: "dist/third",
|
||||
});
|
||||
|
||||
console.log(JSON.stringify({
|
||||
build1: result1.success,
|
||||
build2: result2.success,
|
||||
build3: result3.success,
|
||||
build2Logs: result2.logs.map(String),
|
||||
build3Logs: result3.logs.map(String),
|
||||
}));
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.ts"],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
const result = JSON.parse(stdout.trim());
|
||||
|
||||
expect(result.build1).toBe(true);
|
||||
expect(result.build2).toBe(true);
|
||||
expect(result.build3).toBe(true);
|
||||
expect(result.build2Logs).toEqual([]);
|
||||
expect(result.build3Logs).toEqual([]);
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
97
test/regression/issue/20546.test.ts
Normal file
97
test/regression/issue/20546.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
describe("issue #20546 - CSS @layer declarations should be stripped from source files", () => {
|
||||
test("separate @layer statements with @import layer()", async () => {
|
||||
using dir = tempDir("css-layer-20546", {
|
||||
"main.css": /* css */ `
|
||||
@layer one;
|
||||
@layer two;
|
||||
@layer three;
|
||||
|
||||
@import url('./a.css') layer(one);
|
||||
@import url('./b.css') layer(two);
|
||||
@import url('./c.css') layer(three);
|
||||
`,
|
||||
"a.css": /* css */ `body { margin: 0; }`,
|
||||
"b.css": /* css */ `h1 { font-family: sans-serif; }`,
|
||||
"c.css": /* css */ `.text-centered { text-align: center; }`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build", "./main.css", "--outdir=out"],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stderr).toBe("");
|
||||
|
||||
const outCss = await Bun.file(`${dir}/out/main.css`).text();
|
||||
|
||||
// @layer declarations should appear at the top (hoisted or as part of the layer blocks)
|
||||
// @import statements should NOT appear in the output (they've been inlined)
|
||||
expect(outCss).not.toContain("@import");
|
||||
|
||||
// The bare @layer declarations should not be duplicated at the bottom
|
||||
// They should either be hoisted to the top or removed entirely since
|
||||
// the layer blocks establish the same ordering
|
||||
const layerOneStatements = outCss.match(/@layer one;/g);
|
||||
const layerTwoStatements = outCss.match(/@layer two;/g);
|
||||
const layerThreeStatements = outCss.match(/@layer three;/g);
|
||||
|
||||
// Each @layer declaration should appear at most once (hoisted)
|
||||
expect((layerOneStatements ?? []).length).toBeLessThanOrEqual(1);
|
||||
expect((layerTwoStatements ?? []).length).toBeLessThanOrEqual(1);
|
||||
expect((layerThreeStatements ?? []).length).toBeLessThanOrEqual(1);
|
||||
|
||||
// The actual layer block content should be present
|
||||
expect(outCss).toContain("margin: 0");
|
||||
expect(outCss).toContain("font-family: sans-serif");
|
||||
expect(outCss).toContain("text-align: center");
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("comma syntax @layer statement with @import layer()", async () => {
|
||||
using dir = tempDir("css-layer-20546-comma", {
|
||||
"main.css": /* css */ `
|
||||
@layer one, two, three;
|
||||
|
||||
@import url('./a.css') layer(one);
|
||||
@import url('./b.css') layer(two);
|
||||
@import url('./c.css') layer(three);
|
||||
`,
|
||||
"a.css": /* css */ `body { margin: 0; }`,
|
||||
"b.css": /* css */ `h1 { font-family: sans-serif; }`,
|
||||
"c.css": /* css */ `.text-centered { text-align: center; }`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build", "./main.css", "--outdir=out"],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stderr).toBe("");
|
||||
|
||||
const outCss = await Bun.file(`${dir}/out/main.css`).text();
|
||||
|
||||
// @import statements should NOT appear in the output
|
||||
expect(outCss).not.toContain("@import");
|
||||
|
||||
// The actual layer block content should be present
|
||||
expect(outCss).toContain("margin: 0");
|
||||
expect(outCss).toContain("font-family: sans-serif");
|
||||
expect(outCss).toContain("text-align: center");
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
49
test/regression/issue/24709.test.ts
Normal file
49
test/regression/issue/24709.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { tempDir } from "harness";
|
||||
|
||||
test("bun build produces valid JS for unused dynamic imports", async () => {
|
||||
using dir = tempDir("issue-24709", {
|
||||
"void-import.ts": `
|
||||
export function main() {
|
||||
void import("./dep.ts");
|
||||
}
|
||||
`,
|
||||
"bare-import.ts": `
|
||||
export function main() {
|
||||
import("./dep.ts");
|
||||
}
|
||||
`,
|
||||
"dep.ts": `export const x = 1;`,
|
||||
});
|
||||
|
||||
const transpiler = new Bun.Transpiler();
|
||||
|
||||
// Test void import("...")
|
||||
{
|
||||
const result = await Bun.build({
|
||||
entrypoints: [`${dir}/void-import.ts`],
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const output = await result.outputs[0].text();
|
||||
|
||||
// The output must not contain an empty arrow function body like "() => )"
|
||||
expect(output).not.toContain("() => )");
|
||||
|
||||
// Validate the output is syntactically valid JS by scanning it
|
||||
expect(() => transpiler.scanImports(output)).not.toThrow();
|
||||
}
|
||||
|
||||
// Test bare import("...")
|
||||
{
|
||||
const result = await Bun.build({
|
||||
entrypoints: [`${dir}/bare-import.ts`],
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const output = await result.outputs[0].text();
|
||||
|
||||
expect(output).not.toContain("() => )");
|
||||
expect(() => transpiler.scanImports(output)).not.toThrow();
|
||||
}
|
||||
});
|
||||
39
test/regression/issue/27117.test.ts
Normal file
39
test/regression/issue/27117.test.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
test("CSS bundler should not drop :root rule before @property", async () => {
|
||||
using dir = tempDir("css-property-root-dedup", {
|
||||
"input.css": `:root {
|
||||
--bar: 1;
|
||||
}
|
||||
|
||||
@property --foo {
|
||||
syntax: "<number>";
|
||||
inherits: true;
|
||||
initial-value: 0;
|
||||
}
|
||||
|
||||
:root {
|
||||
--baz: 2;
|
||||
}
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build", "input.css", "--outdir", "out"],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
const output = await Bun.file(`${dir}/out/input.css`).text();
|
||||
|
||||
// Both :root blocks must be preserved — they cannot be merged across the @property boundary
|
||||
expect(output).toContain("--bar: 1");
|
||||
expect(output).toContain("--baz: 2");
|
||||
expect(output).toContain("@property --foo");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
49
test/regression/issue/27287.test.ts
Normal file
49
test/regression/issue/27287.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/27287
|
||||
test("CJS require() of failing ESM does not corrupt module for subsequent import()", async () => {
|
||||
using dir = tempDir("issue-27287", {
|
||||
"bad-esm.mjs": `throw globalThis.err;\nexport const foo = 2;\n`,
|
||||
"entry.cjs": `
|
||||
'use strict';
|
||||
globalThis.err = new Error('intentional error');
|
||||
|
||||
// First: require() the failing ESM module
|
||||
try {
|
||||
require('./bad-esm.mjs');
|
||||
} catch (e) {
|
||||
console.log('require_error:', e.message);
|
||||
}
|
||||
|
||||
// Second: import() the same module - should re-throw the original error, not ReferenceError
|
||||
import('./bad-esm.mjs')
|
||||
.then(() => {
|
||||
console.log('import_result: resolved');
|
||||
})
|
||||
.catch((e) => {
|
||||
console.log('import_error_type:', e.constructor.name);
|
||||
console.log('import_error_msg:', e.message);
|
||||
});
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "run", "entry.cjs"],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toContain("require_error: intentional error");
|
||||
// The import() should re-throw the original evaluation error, NOT a ReferenceError
|
||||
// about uninitialized exports. The module threw during evaluation, so import() should
|
||||
// reject with the same error.
|
||||
expect(stdout).not.toContain("ReferenceError");
|
||||
expect(stdout).toContain("import_error_type: Error");
|
||||
expect(stdout).toContain("import_error_msg: intentional error");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
Reference in New Issue
Block a user