mirror of
https://github.com/oven-sh/bun
synced 2026-02-10 02:48:50 +00:00
Merge branch 'main' into claude/update-react-next-versions
This commit is contained in:
@@ -16,7 +16,6 @@ pub const BunObject = struct {
|
||||
pub const connect = toJSCallback(host_fn.wrapStaticMethod(api.Listener, "connect", false));
|
||||
pub const createParsedShellScript = toJSCallback(bun.shell.ParsedShellScript.createParsedShellScript);
|
||||
pub const createShellInterpreter = toJSCallback(bun.shell.Interpreter.createShellInterpreter);
|
||||
pub const traceShellScript = toJSCallback(bun.shell.TraceInterpreter.traceShellScript);
|
||||
pub const deflateSync = toJSCallback(JSZlib.deflateSync);
|
||||
pub const file = toJSCallback(WebCore.Blob.constructBunFile);
|
||||
pub const gunzipSync = toJSCallback(JSZlib.gunzipSync);
|
||||
@@ -158,7 +157,6 @@ pub const BunObject = struct {
|
||||
@export(&BunObject.connect, .{ .name = callbackName("connect") });
|
||||
@export(&BunObject.createParsedShellScript, .{ .name = callbackName("createParsedShellScript") });
|
||||
@export(&BunObject.createShellInterpreter, .{ .name = callbackName("createShellInterpreter") });
|
||||
@export(&BunObject.traceShellScript, .{ .name = callbackName("traceShellScript") });
|
||||
@export(&BunObject.deflateSync, .{ .name = callbackName("deflateSync") });
|
||||
@export(&BunObject.file, .{ .name = callbackName("file") });
|
||||
@export(&BunObject.gunzipSync, .{ .name = callbackName("gunzipSync") });
|
||||
|
||||
@@ -71,7 +71,6 @@
|
||||
macro(spawn) \
|
||||
macro(spawnSync) \
|
||||
macro(stringWidth) \
|
||||
macro(traceShellScript) \
|
||||
macro(udpSocket) \
|
||||
macro(which) \
|
||||
macro(write) \
|
||||
|
||||
@@ -354,14 +354,12 @@ static JSValue constructBunShell(VM& vm, JSObject* bunObject)
|
||||
auto* globalObject = jsCast<Zig::GlobalObject*>(bunObject->globalObject());
|
||||
JSFunction* createParsedShellScript = JSFunction::create(vm, bunObject->globalObject(), 2, "createParsedShellScript"_s, BunObject_callback_createParsedShellScript, ImplementationVisibility::Private, NoIntrinsic);
|
||||
JSFunction* createShellInterpreterFunction = JSFunction::create(vm, bunObject->globalObject(), 1, "createShellInterpreter"_s, BunObject_callback_createShellInterpreter, ImplementationVisibility::Private, NoIntrinsic);
|
||||
JSFunction* traceShellScriptFunction = JSFunction::create(vm, bunObject->globalObject(), 1, "traceShellScript"_s, BunObject_callback_traceShellScript, ImplementationVisibility::Private, NoIntrinsic);
|
||||
JSC::JSFunction* createShellFn = JSC::JSFunction::create(vm, globalObject, shellCreateBunShellTemplateFunctionCodeGenerator(vm), globalObject);
|
||||
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto args = JSC::MarkedArgumentBuffer();
|
||||
args.append(createShellInterpreterFunction);
|
||||
args.append(createParsedShellScript);
|
||||
args.append(traceShellScriptFunction);
|
||||
JSC::JSValue shell = JSC::call(globalObject, createShellFn, args, "BunShell"_s);
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
|
||||
|
||||
@@ -309,124 +309,6 @@ static inline JSC::EncodedJSValue constructJSWebSocket3(JSGlobalObject* lexicalG
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse agent option - extract proxy from agent.proxy if no explicit proxy
|
||||
// This supports HttpsProxyAgent and similar agent libraries
|
||||
if (proxyUrl.isNull() || proxyUrl.isEmpty()) {
|
||||
auto agentValue = Bun::getOwnPropertyIfExists(globalObject, options, PropertyName(Identifier::fromString(vm, "agent"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (agentValue && !agentValue.isUndefinedOrNull() && agentValue.isObject()) {
|
||||
if (JSC::JSObject* agentObj = agentValue.getObject()) {
|
||||
// Get agent.proxy (can be URL object or string)
|
||||
auto agentProxyValue = Bun::getOwnPropertyIfExists(globalObject, agentObj, PropertyName(Identifier::fromString(vm, "proxy"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (agentProxyValue && !agentProxyValue.isUndefinedOrNull()) {
|
||||
if (agentProxyValue.isString()) {
|
||||
proxyUrl = convert<IDLUSVString>(*lexicalGlobalObject, agentProxyValue);
|
||||
} else if (agentProxyValue.isObject()) {
|
||||
// URL object - get .href property
|
||||
if (JSC::JSObject* urlObj = agentProxyValue.getObject()) {
|
||||
auto hrefValue = Bun::getOwnPropertyIfExists(globalObject, urlObj, PropertyName(Identifier::fromString(vm, "href"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (hrefValue && hrefValue.isString()) {
|
||||
proxyUrl = convert<IDLUSVString>(*lexicalGlobalObject, hrefValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
}
|
||||
|
||||
// Get agent.proxyHeaders
|
||||
auto proxyHeadersValue = Bun::getOwnPropertyIfExists(globalObject, agentObj, PropertyName(Identifier::fromString(vm, "proxyHeaders"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (proxyHeadersValue && !proxyHeadersValue.isUndefinedOrNull()) {
|
||||
// If it's a function, call it
|
||||
if (proxyHeadersValue.isCallable()) {
|
||||
auto callData = JSC::getCallData(proxyHeadersValue);
|
||||
proxyHeadersValue = JSC::call(lexicalGlobalObject, proxyHeadersValue, callData, agentObj, JSC::MarkedArgumentBuffer());
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
}
|
||||
if (!proxyHeadersValue.isUndefinedOrNull()) {
|
||||
// Check if it's already a Headers instance (like fetch does)
|
||||
if (auto* jsHeaders = jsDynamicCast<JSFetchHeaders*>(proxyHeadersValue)) {
|
||||
// Convert FetchHeaders to the Init variant
|
||||
auto& headers = jsHeaders->wrapped();
|
||||
Vector<KeyValuePair<String, String>> pairs;
|
||||
auto iterator = headers.createIterator(false);
|
||||
while (auto value = iterator.next()) {
|
||||
pairs.append({ value->key, value->value });
|
||||
}
|
||||
proxyHeadersInit = WTF::move(pairs);
|
||||
} else {
|
||||
// Fall back to IDL conversion for plain objects/arrays
|
||||
proxyHeadersInit = convert<IDLUnion<IDLSequence<IDLSequence<IDLByteString>>, IDLRecord<IDLByteString, IDLByteString>>>(*lexicalGlobalObject, proxyHeadersValue);
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get TLS options from agent.connectOpts or agent.options
|
||||
// We build a filtered object with only supported TLS options (ca, cert, key, passphrase, rejectUnauthorized)
|
||||
// to avoid passing invalid properties like ALPNProtocols to the SSL parser
|
||||
if (rejectUnauthorized == -1 && !sslConfig) {
|
||||
auto connectOptsValue = Bun::getOwnPropertyIfExists(globalObject, agentObj, PropertyName(Identifier::fromString(vm, "connectOpts"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (!connectOptsValue || connectOptsValue.isUndefinedOrNull()) {
|
||||
connectOptsValue = Bun::getOwnPropertyIfExists(globalObject, agentObj, PropertyName(Identifier::fromString(vm, "options"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
}
|
||||
if (connectOptsValue && !connectOptsValue.isUndefinedOrNull() && connectOptsValue.isObject()) {
|
||||
if (JSC::JSObject* connectOptsObj = connectOptsValue.getObject()) {
|
||||
// Extract rejectUnauthorized
|
||||
auto rejectValue = Bun::getOwnPropertyIfExists(globalObject, connectOptsObj, PropertyName(Identifier::fromString(vm, "rejectUnauthorized"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (rejectValue && rejectValue.isBoolean()) {
|
||||
rejectUnauthorized = rejectValue.asBoolean() ? 1 : 0;
|
||||
}
|
||||
|
||||
// Build filtered TLS options object with only supported properties
|
||||
JSC::JSObject* filteredTlsOpts = JSC::constructEmptyObject(globalObject);
|
||||
bool hasTlsOpts = false;
|
||||
|
||||
auto caValue = Bun::getOwnPropertyIfExists(globalObject, connectOptsObj, PropertyName(Identifier::fromString(vm, "ca"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (caValue && !caValue.isUndefinedOrNull()) {
|
||||
filteredTlsOpts->putDirect(vm, Identifier::fromString(vm, "ca"_s), caValue);
|
||||
hasTlsOpts = true;
|
||||
}
|
||||
|
||||
auto certValue = Bun::getOwnPropertyIfExists(globalObject, connectOptsObj, PropertyName(Identifier::fromString(vm, "cert"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (certValue && !certValue.isUndefinedOrNull()) {
|
||||
filteredTlsOpts->putDirect(vm, Identifier::fromString(vm, "cert"_s), certValue);
|
||||
hasTlsOpts = true;
|
||||
}
|
||||
|
||||
auto keyValue = Bun::getOwnPropertyIfExists(globalObject, connectOptsObj, PropertyName(Identifier::fromString(vm, "key"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (keyValue && !keyValue.isUndefinedOrNull()) {
|
||||
filteredTlsOpts->putDirect(vm, Identifier::fromString(vm, "key"_s), keyValue);
|
||||
hasTlsOpts = true;
|
||||
}
|
||||
|
||||
auto passphraseValue = Bun::getOwnPropertyIfExists(globalObject, connectOptsObj, PropertyName(Identifier::fromString(vm, "passphrase"_s)));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (passphraseValue && !passphraseValue.isUndefinedOrNull()) {
|
||||
filteredTlsOpts->putDirect(vm, Identifier::fromString(vm, "passphrase"_s), passphraseValue);
|
||||
hasTlsOpts = true;
|
||||
}
|
||||
|
||||
// Parse the filtered TLS options
|
||||
if (hasTlsOpts) {
|
||||
sslConfig = Bun__WebSocket__parseSSLConfig(globalObject, JSValue::encode(filteredTlsOpts));
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
auto object = (rejectUnauthorized == -1)
|
||||
|
||||
@@ -464,6 +464,7 @@ pub const Command = struct {
|
||||
compile_autoload_bunfig: bool = true,
|
||||
compile_autoload_tsconfig: bool = false,
|
||||
compile_autoload_package_json: bool = false,
|
||||
compile_executable_path: ?[]const u8 = null,
|
||||
windows: options.WindowsOptions = .{},
|
||||
};
|
||||
|
||||
|
||||
@@ -158,6 +158,7 @@ pub const build_only_params = [_]ParamType{
|
||||
clap.parseParam("--no-compile-autoload-tsconfig Disable autoloading of tsconfig.json at runtime in standalone executable") catch unreachable,
|
||||
clap.parseParam("--compile-autoload-package-json Enable autoloading of package.json at runtime in standalone executable (default: false)") catch unreachable,
|
||||
clap.parseParam("--no-compile-autoload-package-json Disable autoloading of package.json at runtime in standalone executable") catch unreachable,
|
||||
clap.parseParam("--compile-executable-path <STR> Path to a Bun executable to use for cross-compilation instead of downloading") catch unreachable,
|
||||
clap.parseParam("--bytecode Use a bytecode cache") catch unreachable,
|
||||
clap.parseParam("--watch Automatically restart the process on file change") catch unreachable,
|
||||
clap.parseParam("--no-clear-screen Disable clearing the terminal screen on reload when --watch is enabled") catch unreachable,
|
||||
@@ -1106,6 +1107,14 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
}
|
||||
}
|
||||
|
||||
if (args.option("--compile-executable-path")) |path| {
|
||||
if (!ctx.bundler_options.compile) {
|
||||
Output.errGeneric("--compile-executable-path requires --compile", .{});
|
||||
Global.crash();
|
||||
}
|
||||
ctx.bundler_options.compile_executable_path = path;
|
||||
}
|
||||
|
||||
if (args.flag("--windows-hide-console")) {
|
||||
// --windows-hide-console technically doesnt depend on WinAPI, but since since --windows-icon
|
||||
// does, all of these customization options have been gated to windows-only
|
||||
|
||||
@@ -468,7 +468,7 @@ pub const BuildCommand = struct {
|
||||
this_transpiler.options.output_format,
|
||||
ctx.bundler_options.windows,
|
||||
ctx.bundler_options.compile_exec_argv orelse "",
|
||||
null,
|
||||
ctx.bundler_options.compile_executable_path,
|
||||
.{
|
||||
.disable_default_env_files = !ctx.bundler_options.compile_autoload_dotenv,
|
||||
.disable_autoload_bunfig = !ctx.bundler_options.compile_autoload_bunfig,
|
||||
|
||||
@@ -1,35 +1,4 @@
|
||||
// Note: ShellTraceFlags interface documents the permission flag values returned
|
||||
// by $.trace operations. These are intentionally not exported as runtime values
|
||||
// to keep the trace API simple - users compare against numeric constants directly.
|
||||
// The values mirror standard Unix open(2) and access(2) flags.
|
||||
|
||||
interface ShellTraceOperation {
|
||||
/** Permission flags (octal integer, can be combined with |) */
|
||||
flags: number;
|
||||
/** Working directory at time of operation */
|
||||
cwd: string;
|
||||
/** Absolute path that would be accessed (for file/execute operations) */
|
||||
path?: string;
|
||||
/** Command name (for execute operations) */
|
||||
command?: string;
|
||||
/** Accumulated environment variables at this point in execution */
|
||||
env?: Record<string, string>;
|
||||
/** Which standard stream is being redirected: "stdin", "stdout", or "stderr" */
|
||||
stream?: "stdin" | "stdout" | "stderr";
|
||||
/** Command arguments for external commands (excluding command name) */
|
||||
args?: string[];
|
||||
/** True if operation contains non-statically-analyzable values (command substitution, $1, etc.) */
|
||||
dynamic?: true;
|
||||
}
|
||||
|
||||
interface ShellTraceResult {
|
||||
operations: ShellTraceOperation[];
|
||||
cwd: string;
|
||||
success: boolean;
|
||||
error: string | null;
|
||||
}
|
||||
|
||||
export function createBunShellTemplateFunction(createShellInterpreter_, createParsedShellScript_, traceShellScript_) {
|
||||
export function createBunShellTemplateFunction(createShellInterpreter_, createParsedShellScript_) {
|
||||
const createShellInterpreter = createShellInterpreter_ as (
|
||||
resolve: (code: number, stdout: Buffer, stderr: Buffer) => void,
|
||||
reject: (code: number, stdout: Buffer, stderr: Buffer) => void,
|
||||
@@ -39,7 +8,6 @@ export function createBunShellTemplateFunction(createShellInterpreter_, createPa
|
||||
raw: string,
|
||||
args: string[],
|
||||
) => $ZigGeneratedClasses.ParsedShellScript;
|
||||
const traceShellScript = traceShellScript_ as (args: $ZigGeneratedClasses.ParsedShellScript) => ShellTraceResult;
|
||||
|
||||
function lazyBufferToHumanReadableString(this: Buffer) {
|
||||
return this.toString();
|
||||
@@ -380,22 +348,6 @@ export function createBunShellTemplateFunction(createShellInterpreter_, createPa
|
||||
BunShell[envSymbol] = defaultEnv;
|
||||
BunShell[throwsSymbol] = true;
|
||||
|
||||
// Trace function - analyzes shell script without running it
|
||||
function trace(first, ...rest): ShellTraceResult {
|
||||
if (first?.raw === undefined)
|
||||
throw new Error("Please use '$.trace' as a tagged template function: $.trace`cmd arg1 arg2`");
|
||||
const parsed_shell_script = createParsedShellScript(first.raw, rest);
|
||||
|
||||
const cwd = BunShell[cwdSymbol];
|
||||
const env = BunShell[envSymbol];
|
||||
|
||||
// cwd must be set before env or else it will be injected into env as "PWD=/"
|
||||
if (cwd) parsed_shell_script.setCwd(cwd);
|
||||
if (env) parsed_shell_script.setEnv(env);
|
||||
|
||||
return traceShellScript(parsed_shell_script);
|
||||
}
|
||||
|
||||
Object.defineProperties(BunShell, {
|
||||
Shell: {
|
||||
value: Shell,
|
||||
@@ -409,10 +361,6 @@ export function createBunShellTemplateFunction(createShellInterpreter_, createPa
|
||||
value: ShellError,
|
||||
enumerable: true,
|
||||
},
|
||||
trace: {
|
||||
value: trace,
|
||||
enumerable: true,
|
||||
},
|
||||
});
|
||||
|
||||
return BunShell;
|
||||
|
||||
115
src/js/thirdparty/ws.js
vendored
115
src/js/thirdparty/ws.js
vendored
@@ -15,6 +15,64 @@ const kBunInternals = Symbol.for("::bunternal::");
|
||||
const readyStates = ["CONNECTING", "OPEN", "CLOSING", "CLOSED"];
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
/**
|
||||
* Extracts TLS and proxy options from an agent object.
|
||||
* @param {Object} agent The agent object to extract options from
|
||||
* @returns {{ tls: Object|null, proxy: string|Object|null }}
|
||||
*/
|
||||
function extractAgentOptions(agent) {
|
||||
const connectOpts = agent?.connectOpts || agent?.options;
|
||||
let tls = null;
|
||||
let proxy = null;
|
||||
|
||||
if ($isObject(connectOpts)) {
|
||||
// Build TLS options
|
||||
const newTlsOptions = {};
|
||||
let hasTlsOptions = false;
|
||||
|
||||
if (connectOpts.rejectUnauthorized !== undefined) {
|
||||
newTlsOptions.rejectUnauthorized = connectOpts.rejectUnauthorized;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (connectOpts.ca) {
|
||||
newTlsOptions.ca = connectOpts.ca;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (connectOpts.cert) {
|
||||
newTlsOptions.cert = connectOpts.cert;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (connectOpts.key) {
|
||||
newTlsOptions.key = connectOpts.key;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (connectOpts.passphrase) {
|
||||
newTlsOptions.passphrase = connectOpts.passphrase;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
|
||||
if (hasTlsOptions) {
|
||||
tls = newTlsOptions;
|
||||
}
|
||||
}
|
||||
|
||||
// Build proxy - check connectOpts.proxy first, then agent.proxy
|
||||
const agentProxy = connectOpts?.proxy || agent?.proxy;
|
||||
if (agentProxy) {
|
||||
const proxyUrl = agentProxy?.href || agentProxy;
|
||||
// Get proxy headers from agent.proxyHeaders
|
||||
if (agent?.proxyHeaders) {
|
||||
const proxyHeaders = $isCallable(agent.proxyHeaders) ? agent.proxyHeaders.$call(agent) : agent.proxyHeaders;
|
||||
proxy = { url: proxyUrl, headers: proxyHeaders };
|
||||
} else {
|
||||
proxy = proxyUrl;
|
||||
}
|
||||
}
|
||||
|
||||
return { tls, proxy };
|
||||
}
|
||||
|
||||
const eventIds = {
|
||||
open: 1,
|
||||
close: 2,
|
||||
@@ -101,48 +159,12 @@ class BunWebSocket extends EventEmitter {
|
||||
// Extract from agent if provided (like HttpsProxyAgent)
|
||||
agent = options?.agent;
|
||||
if ($isObject(agent)) {
|
||||
// Get proxy from agent.proxy (can be URL object or string)
|
||||
if (!proxy && agent.proxy) {
|
||||
const agentProxy = agent.proxy?.href || agent.proxy;
|
||||
// Get proxy headers from agent.proxyHeaders
|
||||
if (agent.proxyHeaders) {
|
||||
const proxyHeaders = $isCallable(agent.proxyHeaders) ? agent.proxyHeaders.$call(agent) : agent.proxyHeaders;
|
||||
proxy = { url: agentProxy, headers: proxyHeaders };
|
||||
} else {
|
||||
proxy = agentProxy;
|
||||
}
|
||||
const agentOpts = extractAgentOptions(agent);
|
||||
if (!proxy && agentOpts.proxy) {
|
||||
proxy = agentOpts.proxy;
|
||||
}
|
||||
// Get TLS options from agent.connectOpts or agent.options
|
||||
// Only extract specific TLS options we support (not ALPNProtocols, etc.)
|
||||
if (!tlsOptions) {
|
||||
const agentOpts = agent.connectOpts || agent.options;
|
||||
if ($isObject(agentOpts)) {
|
||||
const newTlsOptions = {};
|
||||
let hasTlsOptions = false;
|
||||
if (agentOpts.rejectUnauthorized !== undefined) {
|
||||
newTlsOptions.rejectUnauthorized = agentOpts.rejectUnauthorized;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (agentOpts.ca) {
|
||||
newTlsOptions.ca = agentOpts.ca;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (agentOpts.cert) {
|
||||
newTlsOptions.cert = agentOpts.cert;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (agentOpts.key) {
|
||||
newTlsOptions.key = agentOpts.key;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (agentOpts.passphrase) {
|
||||
newTlsOptions.passphrase = agentOpts.passphrase;
|
||||
hasTlsOptions = true;
|
||||
}
|
||||
if (hasTlsOptions) {
|
||||
tlsOptions = newTlsOptions;
|
||||
}
|
||||
}
|
||||
if (!tlsOptions && agentOpts.tls) {
|
||||
tlsOptions = agentOpts.tls;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -184,7 +206,7 @@ class BunWebSocket extends EventEmitter {
|
||||
end: () => {
|
||||
if (!didCallEnd) {
|
||||
didCallEnd = true;
|
||||
this.#createWebSocket(url, protocols, headers, method, proxy, tlsOptions, agent);
|
||||
this.#createWebSocket(url, protocols, headers, method, proxy, tlsOptions);
|
||||
}
|
||||
},
|
||||
write() {},
|
||||
@@ -213,23 +235,22 @@ class BunWebSocket extends EventEmitter {
|
||||
EventEmitter.$call(nodeHttpClientRequestSimulated);
|
||||
finishRequest(nodeHttpClientRequestSimulated);
|
||||
if (!didCallEnd) {
|
||||
this.#createWebSocket(url, protocols, headers, method, proxy, tlsOptions, agent);
|
||||
this.#createWebSocket(url, protocols, headers, method, proxy, tlsOptions);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
this.#createWebSocket(url, protocols, headers, method, proxy, tlsOptions, agent);
|
||||
this.#createWebSocket(url, protocols, headers, method, proxy, tlsOptions);
|
||||
}
|
||||
|
||||
#createWebSocket(url, protocols, headers, method, proxy, tls, agent) {
|
||||
#createWebSocket(url, protocols, headers, method, proxy, tls) {
|
||||
let wsOptions;
|
||||
if (headers || proxy || tls || agent) {
|
||||
if (headers || proxy || tls) {
|
||||
wsOptions = { protocols };
|
||||
if (headers) wsOptions.headers = headers;
|
||||
if (method) wsOptions.method = method;
|
||||
if (proxy) wsOptions.proxy = proxy;
|
||||
if (tls) wsOptions.tls = tls;
|
||||
if (agent) wsOptions.agent = agent;
|
||||
} else {
|
||||
wsOptions = protocols;
|
||||
}
|
||||
|
||||
@@ -170,6 +170,41 @@ pub const BufferReadStream = struct {
|
||||
// }
|
||||
};
|
||||
|
||||
/// Validates that a symlink target doesn't escape the extraction directory.
|
||||
/// Returns true if the symlink is safe (target stays within extraction dir),
|
||||
/// false if it would escape (e.g., via ../ traversal or absolute path).
|
||||
///
|
||||
/// The check works by resolving the symlink target relative to the symlink's
|
||||
/// directory location using a fake root, then checking if the result stays
|
||||
/// within that fake root.
|
||||
fn isSymlinkTargetSafe(symlink_path: []const u8, link_target: [:0]const u8, symlink_join_buf: *?*bun.PathBuffer) bool {
|
||||
// Absolute symlink targets are never safe - they could point anywhere
|
||||
if (link_target.len > 0 and link_target[0] == '/') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get the directory containing the symlink
|
||||
const symlink_dir = std.fs.path.dirname(symlink_path) orelse "";
|
||||
|
||||
// Use a fake root to resolve the path and check if it escapes
|
||||
const fake_root = "/packages/";
|
||||
|
||||
const join_buf = symlink_join_buf.* orelse join_buf: {
|
||||
symlink_join_buf.* = bun.path_buffer_pool.get();
|
||||
break :join_buf symlink_join_buf.*.?;
|
||||
};
|
||||
|
||||
const resolved = bun.path.joinAbsStringBuf(
|
||||
fake_root,
|
||||
join_buf,
|
||||
&.{ symlink_dir, link_target },
|
||||
.posix,
|
||||
);
|
||||
|
||||
// If the resolved path doesn't start with our fake root, it escaped
|
||||
return strings.hasPrefix(resolved, fake_root);
|
||||
}
|
||||
|
||||
pub const Archiver = struct {
|
||||
// impl: *lib.archive = undefined,
|
||||
// buf: []const u8 = undefined,
|
||||
@@ -315,6 +350,9 @@ pub const Archiver = struct {
|
||||
var count: u32 = 0;
|
||||
const dir_fd = dir.fd;
|
||||
|
||||
var symlink_join_buf: ?*bun.PathBuffer = null;
|
||||
defer if (symlink_join_buf) |join_buf| bun.path_buffer_pool.put(join_buf);
|
||||
|
||||
var normalized_buf: bun.OSPathBuffer = undefined;
|
||||
var use_pwrite = Environment.isPosix;
|
||||
var use_lseek = true;
|
||||
@@ -435,6 +473,19 @@ pub const Archiver = struct {
|
||||
.sym_link => {
|
||||
const link_target = entry.symlink();
|
||||
if (Environment.isPosix) {
|
||||
// Validate that the symlink target doesn't escape the extraction directory.
|
||||
// This prevents path traversal attacks where a malicious tarball creates a symlink
|
||||
// pointing outside (e.g., to /tmp), then writes files through that symlink.
|
||||
if (!isSymlinkTargetSafe(path_slice, link_target, &symlink_join_buf)) {
|
||||
// Skip symlinks that would escape the extraction directory
|
||||
if (options.log) {
|
||||
Output.warn("Skipping symlink with unsafe target: {f} -> {s}\n", .{
|
||||
bun.fmt.fmtOSPath(path_slice, .{}),
|
||||
link_target,
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
bun.sys.symlinkat(link_target, .fromNative(dir_fd), path).unwrap() catch |err| brk: {
|
||||
switch (err) {
|
||||
error.EPERM, error.ENOENT => {
|
||||
|
||||
@@ -77,8 +77,8 @@ pub const MachoFile = struct {
|
||||
found_bun = true;
|
||||
original_fileoff = sect.offset;
|
||||
original_vmaddr = sect.addr;
|
||||
original_data_end = original_fileoff + blob_alignment;
|
||||
original_segsize = sect.size;
|
||||
original_data_end = command.fileoff + command.filesize;
|
||||
original_segsize = command.filesize;
|
||||
self.segment = command;
|
||||
self.section = sect.*;
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -723,68 +723,6 @@ test Lexer {
|
||||
}
|
||||
}
|
||||
|
||||
/// High-level helper that expands brace patterns in a string.
|
||||
/// Returns a list of expanded strings. Caller owns the returned memory.
|
||||
/// On error or if no expansion is needed, returns the input as a single-element list.
|
||||
pub fn expandBracesAlloc(input: []const u8, allocator: Allocator) std.ArrayListUnmanaged([]const u8) {
|
||||
var out: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
|
||||
// Use arena for temporary tokenization
|
||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena.deinit();
|
||||
const arena_alloc = arena.allocator();
|
||||
|
||||
// Tokenize - use appropriate lexer based on content
|
||||
const lexer_output = if (bun.strings.isAllASCII(input))
|
||||
Lexer.tokenize(arena_alloc, input) catch {
|
||||
out.append(allocator, allocator.dupe(u8, input) catch return out) catch {};
|
||||
return out;
|
||||
}
|
||||
else
|
||||
NewLexer(.wtf8).tokenize(arena_alloc, input) catch {
|
||||
out.append(allocator, allocator.dupe(u8, input) catch return out) catch {};
|
||||
return out;
|
||||
};
|
||||
|
||||
const expansion_count = calculateExpandedAmount(lexer_output.tokens.items[0..]);
|
||||
if (expansion_count == 0) {
|
||||
out.append(allocator, allocator.dupe(u8, input) catch return out) catch {};
|
||||
return out;
|
||||
}
|
||||
|
||||
// Allocate expanded strings
|
||||
const expanded_strings = arena_alloc.alloc(std.array_list.Managed(u8), expansion_count) catch {
|
||||
out.append(allocator, allocator.dupe(u8, input) catch return out) catch {};
|
||||
return out;
|
||||
};
|
||||
|
||||
for (0..expansion_count) |i| {
|
||||
expanded_strings[i] = std.array_list.Managed(u8).init(allocator);
|
||||
}
|
||||
|
||||
// Perform brace expansion
|
||||
expand(
|
||||
arena_alloc,
|
||||
lexer_output.tokens.items[0..],
|
||||
expanded_strings,
|
||||
lexer_output.contains_nested,
|
||||
) catch {
|
||||
for (expanded_strings) |*s| s.deinit();
|
||||
out.append(allocator, allocator.dupe(u8, input) catch return out) catch {};
|
||||
return out;
|
||||
};
|
||||
|
||||
// Collect results
|
||||
for (expanded_strings) |*s| {
|
||||
const slice = s.toOwnedSlice() catch "";
|
||||
if (slice.len > 0) {
|
||||
out.append(allocator, slice) catch {};
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
const SmolStr = @import("../string.zig").SmolStr;
|
||||
|
||||
const Encoding = @import("./shell.zig").StringEncoding;
|
||||
|
||||
@@ -444,10 +444,10 @@ pub const Interpreter = struct {
|
||||
|
||||
if (comptime free_buffered_io) {
|
||||
if (this._buffered_stdout == .owned) {
|
||||
this._buffered_stdout.owned.deinit(bun.default_allocator);
|
||||
this._buffered_stdout.owned.clearAndFree(bun.default_allocator);
|
||||
}
|
||||
if (this._buffered_stderr == .owned) {
|
||||
this._buffered_stderr.owned.deinit(bun.default_allocator);
|
||||
this._buffered_stderr.owned.clearAndFree(bun.default_allocator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -994,7 +994,7 @@ pub const Interpreter = struct {
|
||||
interp.exit_code = exit_code;
|
||||
switch (try interp.run()) {
|
||||
.err => |e| {
|
||||
interp.deinitEverything();
|
||||
interp.#deinitFromExec();
|
||||
bun.Output.err(e, "Failed to run script <b>{s}<r>", .{std.fs.path.basename(path)});
|
||||
bun.Global.exit(1);
|
||||
return 1;
|
||||
@@ -1003,7 +1003,7 @@ pub const Interpreter = struct {
|
||||
}
|
||||
mini.tick(&is_done, @as(fn (*anyopaque) bool, IsDone.isDone));
|
||||
const code = interp.exit_code.?;
|
||||
interp.deinitEverything();
|
||||
interp.#deinitFromExec();
|
||||
return code;
|
||||
}
|
||||
|
||||
@@ -1061,7 +1061,7 @@ pub const Interpreter = struct {
|
||||
interp.exit_code = exit_code;
|
||||
switch (try interp.run()) {
|
||||
.err => |e| {
|
||||
interp.deinitEverything();
|
||||
interp.#deinitFromExec();
|
||||
bun.Output.err(e, "Failed to run script <b>{s}<r>", .{path_for_errors});
|
||||
bun.Global.exit(1);
|
||||
return 1;
|
||||
@@ -1070,7 +1070,7 @@ pub const Interpreter = struct {
|
||||
}
|
||||
mini.tick(&is_done, @as(fn (*anyopaque) bool, IsDone.isDone));
|
||||
const code = interp.exit_code.?;
|
||||
interp.deinitEverything();
|
||||
interp.#deinitFromExec();
|
||||
return code;
|
||||
}
|
||||
|
||||
@@ -1142,7 +1142,7 @@ pub const Interpreter = struct {
|
||||
_ = callframe; // autofix
|
||||
|
||||
if (this.setupIOBeforeRun().asErr()) |e| {
|
||||
defer this.deinitEverything();
|
||||
defer this.#deinitFromExec();
|
||||
const shellerr = bun.shell.ShellErr.newSys(e);
|
||||
return try throwShellErr(&shellerr, .{ .js = globalThis.bunVM().event_loop });
|
||||
}
|
||||
@@ -1191,20 +1191,21 @@ pub const Interpreter = struct {
|
||||
defer decrPendingActivityFlag(&this.has_pending_activity);
|
||||
|
||||
if (this.event_loop == .js) {
|
||||
defer this.deinitAfterJSRun();
|
||||
this.exit_code = exit_code;
|
||||
const this_jsvalue = this.this_jsvalue;
|
||||
if (this_jsvalue != .zero) {
|
||||
if (jsc.Codegen.JSShellInterpreter.resolveGetCached(this_jsvalue)) |resolve| {
|
||||
const loop = this.event_loop.js;
|
||||
const globalThis = this.globalThis;
|
||||
this.this_jsvalue = .zero;
|
||||
const buffered_stdout = this.getBufferedStdout(globalThis);
|
||||
const buffered_stderr = this.getBufferedStderr(globalThis);
|
||||
this.keep_alive.disable();
|
||||
this.#derefRootShellAndIOIfNeeded(true);
|
||||
loop.enter();
|
||||
_ = resolve.call(globalThis, .js_undefined, &.{
|
||||
JSValue.jsNumberFromU16(exit_code),
|
||||
this.getBufferedStdout(globalThis),
|
||||
this.getBufferedStderr(globalThis),
|
||||
buffered_stdout,
|
||||
buffered_stderr,
|
||||
}) catch |err| globalThis.reportActiveExceptionAsUnhandled(err);
|
||||
jsc.Codegen.JSShellInterpreter.resolveSetCached(this_jsvalue, globalThis, .js_undefined);
|
||||
jsc.Codegen.JSShellInterpreter.rejectSetCached(this_jsvalue, globalThis, .js_undefined);
|
||||
@@ -1219,35 +1220,45 @@ pub const Interpreter = struct {
|
||||
return .done;
|
||||
}
|
||||
|
||||
fn deinitAfterJSRun(this: *ThisInterpreter) void {
|
||||
log("Interpreter(0x{x}) deinitAfterJSRun", .{@intFromPtr(this)});
|
||||
this.root_io.deref();
|
||||
this.keep_alive.disable();
|
||||
this.root_shell.deinitImpl(false, false);
|
||||
fn #derefRootShellAndIOIfNeeded(this: *ThisInterpreter, free_buffered_io: bool) void {
|
||||
if (free_buffered_io) {
|
||||
// Can safely be called multiple times.
|
||||
if (this.root_shell._buffered_stderr == .owned) {
|
||||
this.root_shell._buffered_stderr.owned.clearAndFree(bun.default_allocator);
|
||||
}
|
||||
if (this.root_shell._buffered_stdout == .owned) {
|
||||
this.root_shell._buffered_stdout.owned.clearAndFree(bun.default_allocator);
|
||||
}
|
||||
}
|
||||
|
||||
// Has this already been finalized?
|
||||
if (this.this_jsvalue != .zero) {
|
||||
// Cannot be safely called multiple times.
|
||||
this.root_io.deref();
|
||||
this.root_shell.deinitImpl(false, false);
|
||||
}
|
||||
|
||||
this.this_jsvalue = .zero;
|
||||
}
|
||||
|
||||
fn deinitFromFinalizer(this: *ThisInterpreter) void {
|
||||
if (this.root_shell._buffered_stderr == .owned) {
|
||||
this.root_shell._buffered_stderr.owned.deinit(bun.default_allocator);
|
||||
}
|
||||
if (this.root_shell._buffered_stdout == .owned) {
|
||||
this.root_shell._buffered_stdout.owned.deinit(bun.default_allocator);
|
||||
}
|
||||
this.this_jsvalue = .zero;
|
||||
this.#derefRootShellAndIOIfNeeded(true);
|
||||
this.keep_alive.disable();
|
||||
this.args.deinit();
|
||||
this.allocator.destroy(this);
|
||||
}
|
||||
|
||||
fn deinitEverything(this: *ThisInterpreter) void {
|
||||
fn #deinitFromExec(this: *ThisInterpreter) void {
|
||||
log("deinit interpreter", .{});
|
||||
|
||||
this.this_jsvalue = .zero;
|
||||
this.root_io.deref();
|
||||
this.root_shell.deinitImpl(false, true);
|
||||
|
||||
for (this.vm_args_utf8.items[0..]) |str| {
|
||||
str.deinit();
|
||||
}
|
||||
this.vm_args_utf8.deinit();
|
||||
this.this_jsvalue = .zero;
|
||||
this.allocator.destroy(this);
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ pub const interpret = @import("./interpreter.zig");
|
||||
pub const subproc = @import("./subproc.zig");
|
||||
|
||||
pub const AllocScope = @import("./AllocScope.zig");
|
||||
pub const TraceInterpreter = @import("./TraceInterpreter.zig");
|
||||
|
||||
pub const EnvMap = interpret.EnvMap;
|
||||
pub const EnvStr = interpret.EnvStr;
|
||||
|
||||
391
test/cli/install/symlink-path-traversal.test.ts
Normal file
391
test/cli/install/symlink-path-traversal.test.ts
Normal file
@@ -0,0 +1,391 @@
|
||||
import { spawn } from "bun";
|
||||
import { describe, expect, it, setDefaultTimeout } from "bun:test";
|
||||
import { access, lstat, readlink, rm, writeFile } from "fs/promises";
|
||||
import { bunExe, bunEnv as env, tempDir } from "harness";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path";
|
||||
|
||||
// This test validates the fix for a symlink path traversal vulnerability in tarball extraction.
|
||||
// CVE: Path traversal via symlink when installing packages
|
||||
//
|
||||
// The attack works as follows:
|
||||
// 1. Create a tarball with a symlink entry pointing outside (e.g., symlink -> ../../../tmp)
|
||||
// 2. Include a file entry through that symlink path (e.g., symlink/pwned.txt)
|
||||
// 3. On extraction, the symlink is created first
|
||||
// 4. Then when the file is written through the symlink path, it escapes the extraction directory
|
||||
//
|
||||
// The fix validates symlink targets before creating them, blocking those that would escape.
|
||||
//
|
||||
// Note: These tests only run on POSIX systems as the symlink extraction code is POSIX-only.
|
||||
|
||||
// Platform-agnostic temp directory for testing path traversal
|
||||
const systemTmpDir = tmpdir();
|
||||
const pwnedFilePath = join(systemTmpDir, "pwned.txt");
|
||||
|
||||
// Helper to create tar files programmatically
|
||||
function createTarHeader(
|
||||
name: string,
|
||||
size: number,
|
||||
type: "0" | "2" | "5", // 0=file, 2=symlink, 5=directory
|
||||
linkname: string = "",
|
||||
): Uint8Array {
|
||||
const header = new Uint8Array(512);
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
// Name (100 bytes)
|
||||
const nameBytes = encoder.encode(name);
|
||||
header.set(nameBytes.slice(0, 100), 0);
|
||||
|
||||
// Mode (8 bytes) - octal
|
||||
const modeStr = type === "5" ? "0000755" : "0000644";
|
||||
header.set(encoder.encode(modeStr.padStart(7, "0") + " "), 100);
|
||||
|
||||
// UID (8 bytes)
|
||||
header.set(encoder.encode("0000000 "), 108);
|
||||
|
||||
// GID (8 bytes)
|
||||
header.set(encoder.encode("0000000 "), 116);
|
||||
|
||||
// Size (12 bytes) - octal
|
||||
const sizeStr = size.toString(8).padStart(11, "0") + " ";
|
||||
header.set(encoder.encode(sizeStr), 124);
|
||||
|
||||
// Mtime (12 bytes)
|
||||
const mtime = Math.floor(Date.now() / 1000)
|
||||
.toString(8)
|
||||
.padStart(11, "0");
|
||||
header.set(encoder.encode(mtime + " "), 136);
|
||||
|
||||
// Checksum placeholder (8 spaces)
|
||||
header.set(encoder.encode(" "), 148);
|
||||
|
||||
// Type flag (1 byte)
|
||||
header[156] = type.charCodeAt(0);
|
||||
|
||||
// Link name (100 bytes) - for symlinks
|
||||
if (linkname) {
|
||||
const linkBytes = encoder.encode(linkname);
|
||||
header.set(linkBytes.slice(0, 100), 157);
|
||||
}
|
||||
|
||||
// USTAR magic
|
||||
header.set(encoder.encode("ustar"), 257);
|
||||
header[262] = 0; // null terminator
|
||||
header.set(encoder.encode("00"), 263);
|
||||
|
||||
// Calculate and set checksum
|
||||
let checksum = 0;
|
||||
for (let i = 0; i < 512; i++) {
|
||||
checksum += header[i];
|
||||
}
|
||||
const checksumStr = checksum.toString(8).padStart(6, "0") + "\0 ";
|
||||
header.set(encoder.encode(checksumStr), 148);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
function padToBlock(data: Uint8Array): Uint8Array[] {
|
||||
const result = [data];
|
||||
const remainder = data.length % 512;
|
||||
if (remainder > 0) {
|
||||
result.push(new Uint8Array(512 - remainder));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function createTarball(
|
||||
entries: Array<{ name: string; type: "file" | "symlink" | "dir"; content?: string; linkname?: string }>,
|
||||
): Uint8Array {
|
||||
const blocks: Uint8Array[] = [];
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.type === "dir") {
|
||||
blocks.push(createTarHeader(entry.name, 0, "5"));
|
||||
} else if (entry.type === "symlink") {
|
||||
blocks.push(createTarHeader(entry.name, 0, "2", entry.linkname || ""));
|
||||
} else {
|
||||
const content = encoder.encode(entry.content || "");
|
||||
blocks.push(createTarHeader(entry.name, content.length, "0"));
|
||||
blocks.push(...padToBlock(content));
|
||||
}
|
||||
}
|
||||
|
||||
// End of archive (two empty blocks)
|
||||
blocks.push(new Uint8Array(512));
|
||||
blocks.push(new Uint8Array(512));
|
||||
|
||||
// Combine all blocks
|
||||
const totalLength = blocks.reduce((sum, b) => sum + b.length, 0);
|
||||
const tarball = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const block of blocks) {
|
||||
tarball.set(block, offset);
|
||||
offset += block.length;
|
||||
}
|
||||
|
||||
return Bun.gzipSync(tarball);
|
||||
}
|
||||
|
||||
// Skip on Windows - symlink extraction is POSIX-only
|
||||
const isWindows = process.platform === "win32";
|
||||
|
||||
describe.concurrent.skipIf(isWindows)("symlink path traversal protection", () => {
|
||||
setDefaultTimeout(60000);
|
||||
|
||||
it("should skip symlinks with relative path traversal targets", async () => {
|
||||
// This reproduces the exact attack from the security report:
|
||||
// 1. Symlink test-package/symlink-to-tmp -> ../../../../../../../<tmpdir>
|
||||
// 2. File test-package/symlink-to-tmp/pwned.txt
|
||||
|
||||
// Calculate relative path to system temp directory (enough ../ to escape)
|
||||
const symlinkTarget = "../../../../../../../" + systemTmpDir.replace(/^\//, "");
|
||||
|
||||
const tarball = createTarball([
|
||||
{ name: "test-package/", type: "dir" },
|
||||
{
|
||||
name: "test-package/package.json",
|
||||
type: "file",
|
||||
content: JSON.stringify({ name: "test-package", version: "1.0.0" }),
|
||||
},
|
||||
// Malicious symlink pointing way outside
|
||||
{ name: "test-package/symlink-to-tmp", type: "symlink", linkname: symlinkTarget },
|
||||
// File that would be written through the symlink
|
||||
{ name: "test-package/symlink-to-tmp/pwned.txt", type: "file", content: "Arbitrary file write" },
|
||||
]);
|
||||
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname.includes("/tarball/") || url.pathname.endsWith(".tar.gz")) {
|
||||
return new Response(tarball, { headers: { "Content-Type": "application/gzip" } });
|
||||
}
|
||||
if (url.pathname.includes("/repos/")) {
|
||||
return Response.json({ default_branch: "main" });
|
||||
}
|
||||
return new Response("Not Found", { status: 404 });
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
using dir = tempDir("symlink-traversal-test", {});
|
||||
const installDir = String(dir);
|
||||
|
||||
await writeFile(
|
||||
join(installDir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "test-app",
|
||||
version: "1.0.0",
|
||||
dependencies: { "test-package": "github:user/repo#main" },
|
||||
}),
|
||||
);
|
||||
|
||||
await writeFile(join(installDir, "bunfig.toml"), `[install]\ncache = false\n`);
|
||||
|
||||
const proc = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: installDir,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
env: { ...env, GITHUB_API_URL: `http://localhost:${server.port}` },
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// The install should complete successfully (exit code 0)
|
||||
// If it fails, show diagnostics
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
console.error("stdout:", stdout);
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify stderr doesn't leak absolute paths like the system temp directory
|
||||
expect(stderr).not.toContain(systemTmpDir);
|
||||
|
||||
// CRITICAL CHECK: Verify no file was written to system temp directory
|
||||
let fileInTmp = false;
|
||||
try {
|
||||
await access(pwnedFilePath);
|
||||
fileInTmp = true;
|
||||
} catch {
|
||||
fileInTmp = false;
|
||||
}
|
||||
expect(fileInTmp).toBe(false);
|
||||
|
||||
// Verify the malicious symlink was NOT created as a symlink
|
||||
// (It may exist as a directory since the tarball has a file entry through it)
|
||||
const pkgDir = join(installDir, "node_modules", "test-package");
|
||||
const symlinkPath = join(pkgDir, "symlink-to-tmp");
|
||||
try {
|
||||
const stats = await lstat(symlinkPath);
|
||||
// If it exists, it must NOT be a symlink (directory is OK - that's what happens
|
||||
// when the symlink is blocked but a file tries to write through it)
|
||||
expect(stats.isSymbolicLink()).toBe(false);
|
||||
} catch {
|
||||
// Path doesn't exist at all - also acceptable
|
||||
}
|
||||
} finally {
|
||||
server.stop();
|
||||
// Clean up pwned file in case the test failed
|
||||
try {
|
||||
await rm(pwnedFilePath, { force: true });
|
||||
} catch {}
|
||||
}
|
||||
});
|
||||
|
||||
it("should skip symlinks with absolute path targets", async () => {
|
||||
const tarball = createTarball([
|
||||
{ name: "test-package/", type: "dir" },
|
||||
{
|
||||
name: "test-package/package.json",
|
||||
type: "file",
|
||||
content: JSON.stringify({ name: "test-package", version: "1.0.0" }),
|
||||
},
|
||||
// Absolute symlink - directly points to system temp directory
|
||||
{ name: "test-package/abs-symlink", type: "symlink", linkname: systemTmpDir },
|
||||
]);
|
||||
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname.includes("/tarball/") || url.pathname.endsWith(".tar.gz")) {
|
||||
return new Response(tarball, { headers: { "Content-Type": "application/gzip" } });
|
||||
}
|
||||
if (url.pathname.includes("/repos/")) {
|
||||
return Response.json({ default_branch: "main" });
|
||||
}
|
||||
return new Response("Not Found", { status: 404 });
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
using dir = tempDir("absolute-symlink-test", {});
|
||||
const installDir = String(dir);
|
||||
|
||||
await writeFile(
|
||||
join(installDir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "test-app",
|
||||
version: "1.0.0",
|
||||
dependencies: { "test-package": "github:user/repo#main" },
|
||||
}),
|
||||
);
|
||||
|
||||
await writeFile(join(installDir, "bunfig.toml"), `[install]\ncache = false\n`);
|
||||
|
||||
const proc = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: installDir,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
env: { ...env, GITHUB_API_URL: `http://localhost:${server.port}` },
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// The install should complete successfully
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
console.error("stdout:", stdout);
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that no absolute symlink was created
|
||||
const pkgDir = join(installDir, "node_modules", "test-package");
|
||||
try {
|
||||
const symlinkPath = join(pkgDir, "abs-symlink");
|
||||
const stats = await lstat(symlinkPath);
|
||||
if (stats.isSymbolicLink()) {
|
||||
const target = await readlink(symlinkPath);
|
||||
// Absolute symlinks should be blocked
|
||||
expect(target.startsWith("/")).toBe(false);
|
||||
}
|
||||
} catch {
|
||||
// Symlink doesn't exist - expected behavior
|
||||
}
|
||||
} finally {
|
||||
server.stop();
|
||||
}
|
||||
});
|
||||
|
||||
it("should allow safe relative symlinks within the package (install succeeds)", async () => {
|
||||
// This test verifies that safe symlinks don't cause extraction to fail.
|
||||
// Note: Safe symlinks ARE created in the cache during extraction, but bun's
|
||||
// install process doesn't preserve them in the final node_modules.
|
||||
// We verify the install succeeds, which proves safe symlinks are allowed.
|
||||
const tarball = createTarball([
|
||||
{ name: "test-package/", type: "dir" },
|
||||
{
|
||||
name: "test-package/package.json",
|
||||
type: "file",
|
||||
content: JSON.stringify({ name: "test-package", version: "1.0.0" }),
|
||||
},
|
||||
{ name: "test-package/src/", type: "dir" },
|
||||
{ name: "test-package/src/index.js", type: "file", content: "module.exports = 'hello';" },
|
||||
// Safe symlink - points to sibling directory (stays within package)
|
||||
{ name: "test-package/link-to-src", type: "symlink", linkname: "src" },
|
||||
// Safe symlink - relative path within same directory
|
||||
{ name: "test-package/src/link-to-index", type: "symlink", linkname: "./index.js" },
|
||||
]);
|
||||
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
fetch(req) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname.includes("/tarball/") || url.pathname.endsWith(".tar.gz")) {
|
||||
return new Response(tarball, { headers: { "Content-Type": "application/gzip" } });
|
||||
}
|
||||
if (url.pathname.includes("/repos/")) {
|
||||
return Response.json({ default_branch: "main" });
|
||||
}
|
||||
return new Response("Not Found", { status: 404 });
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
using dir = tempDir("safe-symlink-test", {});
|
||||
const installDir = String(dir);
|
||||
|
||||
await writeFile(
|
||||
join(installDir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "test-app",
|
||||
version: "1.0.0",
|
||||
dependencies: { "test-package": "github:user/repo#main" },
|
||||
}),
|
||||
);
|
||||
|
||||
await writeFile(join(installDir, "bunfig.toml"), `[install]\ncache = false\n`);
|
||||
|
||||
const proc = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: installDir,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
env: { ...env, GITHUB_API_URL: `http://localhost:${server.port}` },
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// Install should succeed - safe symlinks should not cause errors
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
console.error("stdout:", stdout);
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify package was installed (package.json should exist)
|
||||
const pkgDir = join(installDir, "node_modules", "test-package");
|
||||
const pkgJsonPath = join(pkgDir, "package.json");
|
||||
await access(pkgJsonPath); // Throws if doesn't exist
|
||||
} finally {
|
||||
server.stop();
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,417 +0,0 @@
|
||||
import { $ } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { tempDir } from "harness";
|
||||
|
||||
// Normalize path separators for cross-platform tests
|
||||
const normalizePath = (p: string) => p.replaceAll("\\", "/");
|
||||
|
||||
// Permission flags (octal) - mirrors the Zig constants
|
||||
const Permission = {
|
||||
O_RDONLY: 0o0,
|
||||
O_WRONLY: 0o1,
|
||||
O_RDWR: 0o2,
|
||||
O_CREAT: 0o100,
|
||||
O_EXCL: 0o200,
|
||||
O_TRUNC: 0o1000,
|
||||
O_APPEND: 0o2000,
|
||||
X_OK: 0o100000,
|
||||
DELETE: 0o200000,
|
||||
MKDIR: 0o400000,
|
||||
CHDIR: 0o1000000,
|
||||
ENV: 0o2000000,
|
||||
} as const;
|
||||
|
||||
// Convenience combinations
|
||||
const READ = Permission.O_RDONLY;
|
||||
const WRITE = Permission.O_WRONLY;
|
||||
const CREATE = Permission.O_CREAT | Permission.O_WRONLY;
|
||||
const CREATE_TRUNC = Permission.O_CREAT | Permission.O_TRUNC | Permission.O_WRONLY;
|
||||
const APPEND = Permission.O_APPEND | Permission.O_WRONLY;
|
||||
const EXECUTE = Permission.X_OK;
|
||||
|
||||
describe("Bun.$.trace", () => {
|
||||
test("returns trace result object", () => {
|
||||
const result = $.trace`echo hello`;
|
||||
expect(result).toHaveProperty("operations");
|
||||
expect(result).toHaveProperty("cwd");
|
||||
expect(result).toHaveProperty("success");
|
||||
expect(result).toHaveProperty("error");
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.error).toBeNull();
|
||||
expect(Array.isArray(result.operations)).toBe(true);
|
||||
});
|
||||
|
||||
test("traces echo command (builtin, no file access)", () => {
|
||||
const result = $.trace`echo hello world`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// echo is a builtin that runs in-process - no file access, no operations
|
||||
// It just writes to stdout (terminal) which doesn't require any permissions
|
||||
expect(result.operations.length).toBe(0);
|
||||
});
|
||||
|
||||
test("traces cat command with file read", () => {
|
||||
const result = $.trace`cat /tmp/test.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// cat is a builtin - it reads files but runs in-process (no EXECUTE)
|
||||
const readOps = result.operations.filter(op => op.flags === READ && op.path?.endsWith("test.txt"));
|
||||
expect(readOps.length).toBe(1);
|
||||
expect(normalizePath(readOps[0].path!)).toBe("/tmp/test.txt");
|
||||
});
|
||||
|
||||
test("traces rm command with delete permission", () => {
|
||||
const result = $.trace`rm /tmp/to-delete.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should have delete for the file
|
||||
const deleteOps = result.operations.filter(op => op.flags === Permission.DELETE);
|
||||
expect(deleteOps.length).toBe(1);
|
||||
expect(normalizePath(deleteOps[0].path!)).toBe("/tmp/to-delete.txt");
|
||||
});
|
||||
|
||||
test("traces mkdir command", () => {
|
||||
const result = $.trace`mkdir /tmp/newdir`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should have mkdir permission
|
||||
const mkdirOps = result.operations.filter(op => op.flags === Permission.MKDIR);
|
||||
expect(mkdirOps.length).toBe(1);
|
||||
expect(normalizePath(mkdirOps[0].path!)).toBe("/tmp/newdir");
|
||||
});
|
||||
|
||||
test("traces touch command with create permission", () => {
|
||||
const result = $.trace`touch /tmp/newfile.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should have create permission
|
||||
const createOps = result.operations.filter(op => op.flags === CREATE);
|
||||
expect(createOps.length).toBe(1);
|
||||
expect(normalizePath(createOps[0].path!)).toBe("/tmp/newfile.txt");
|
||||
});
|
||||
|
||||
test("traces cp command with read and write", () => {
|
||||
const result = $.trace`cp /tmp/src.txt /tmp/dst.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should have read for source
|
||||
const readOps = result.operations.filter(op => op.flags === READ && op.path?.endsWith("src.txt"));
|
||||
expect(readOps.length).toBe(1);
|
||||
|
||||
// Should have create for destination
|
||||
const writeOps = result.operations.filter(op => op.flags === CREATE && op.path?.endsWith("dst.txt"));
|
||||
expect(writeOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("traces mv command with read, delete, and write", () => {
|
||||
const result = $.trace`mv /tmp/old.txt /tmp/new.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should have read+delete for source (combined in one operation)
|
||||
const srcOps = result.operations.filter(
|
||||
op => op.flags === (READ | Permission.DELETE) && op.path?.endsWith("old.txt"),
|
||||
);
|
||||
expect(srcOps.length).toBe(1);
|
||||
|
||||
// Should have create for destination
|
||||
const dstOps = result.operations.filter(op => op.flags === CREATE && op.path?.endsWith("new.txt"));
|
||||
expect(dstOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("traces cd command with chdir permission", () => {
|
||||
const result = $.trace`cd /tmp`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const chdirOps = result.operations.filter(op => op.flags === Permission.CHDIR);
|
||||
expect(chdirOps.length).toBe(1);
|
||||
expect(normalizePath(chdirOps[0].path!)).toBe("/tmp");
|
||||
});
|
||||
|
||||
test("traces environment variable assignments with accumulated env", () => {
|
||||
const result = $.trace`FOO=1 BAR=2 echo test`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const envOps = result.operations.filter(op => op.flags === Permission.ENV);
|
||||
expect(envOps.length).toBe(2);
|
||||
// First op has FOO
|
||||
expect(envOps[0].env).toEqual({ FOO: "1" });
|
||||
// Second op has both FOO and BAR
|
||||
expect(envOps[1].env?.FOO).toBe("1");
|
||||
expect(envOps[1].env?.BAR).toBe("2");
|
||||
});
|
||||
|
||||
test("traces export with env values", () => {
|
||||
const result = $.trace`export FOO=hello BAR=world`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const envOps = result.operations.filter(op => op.flags === Permission.ENV);
|
||||
expect(envOps.length).toBe(1);
|
||||
expect(envOps[0].env?.FOO).toBe("hello");
|
||||
expect(envOps[0].env?.BAR).toBe("world");
|
||||
});
|
||||
|
||||
test("traces output redirection combined with command", () => {
|
||||
const result = $.trace`echo hello > /tmp/output.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// echo is a builtin - redirect creates the output file (CREATE_TRUNC, no EXECUTE)
|
||||
const redirectOps = result.operations.filter(op => op.flags === CREATE_TRUNC && op.path?.endsWith("output.txt"));
|
||||
expect(redirectOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("traces append redirection combined with command", () => {
|
||||
const result = $.trace`echo hello >> /tmp/append.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// echo is a builtin - append redirect opens file for appending (no EXECUTE)
|
||||
const appendOps = result.operations.filter(op => op.flags === APPEND && op.path?.endsWith("append.txt"));
|
||||
expect(appendOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("traces input redirection with read and stdin stream", () => {
|
||||
const result = $.trace`cat < /tmp/input.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should have read for input file with stdin stream marker
|
||||
const stdinOps = result.operations.filter(
|
||||
op => op.flags === READ && op.path?.endsWith("input.txt") && op.stream === "stdin",
|
||||
);
|
||||
expect(stdinOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("traces stderr redirection with stream marker", () => {
|
||||
const result = $.trace`cat /nonexistent 2> /tmp/err.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should have stderr stream for error redirect
|
||||
const stderrOps = result.operations.filter(op => op.stream === "stderr" && op.path?.endsWith("err.txt"));
|
||||
expect(stderrOps.length).toBe(1);
|
||||
expect(stderrOps[0].flags).toBe(CREATE_TRUNC);
|
||||
});
|
||||
|
||||
test("stdout redirect has stream marker", () => {
|
||||
const result = $.trace`echo hello > /tmp/out.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const stdoutOps = result.operations.filter(op => op.stream === "stdout");
|
||||
expect(stdoutOps.length).toBe(1);
|
||||
expect(normalizePath(stdoutOps[0].path!)).toBe("/tmp/out.txt");
|
||||
});
|
||||
|
||||
test("traces export command with env permission", () => {
|
||||
const result = $.trace`export FOO=bar`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const envOps = result.operations.filter(op => op.flags === Permission.ENV);
|
||||
expect(envOps.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("traces variable assignment with env permission", () => {
|
||||
const result = $.trace`FOO=bar echo $FOO`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const envOps = result.operations.filter(op => op.flags === Permission.ENV);
|
||||
expect(envOps.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("traces pipeline", () => {
|
||||
const result = $.trace`cat /tmp/file.txt | grep pattern`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// cat is a builtin - reads file (no EXECUTE, no command field)
|
||||
const readOps = result.operations.filter(op => op.flags === READ && op.path?.endsWith("file.txt"));
|
||||
expect(readOps.length).toBe(1);
|
||||
|
||||
// grep is external, should have execute permission and command field
|
||||
const grepOps = result.operations.filter(op => op.command === "grep" && (op.flags & EXECUTE) !== 0);
|
||||
expect(grepOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("traces ls with directory read", () => {
|
||||
const result = $.trace`ls /tmp`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const readOps = result.operations.filter(op => op.flags === READ && normalizePath(op.path || "") === "/tmp");
|
||||
expect(readOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("traces ls without args (current dir)", () => {
|
||||
const result = $.trace`ls`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should read current directory (.)
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("includes cwd in result", () => {
|
||||
const result = $.trace`echo test`;
|
||||
expect(result.cwd).toBeTruthy();
|
||||
expect(typeof result.cwd).toBe("string");
|
||||
});
|
||||
|
||||
test("includes cwd in each operation", () => {
|
||||
const result = $.trace`cat /tmp/test.txt`;
|
||||
for (const op of result.operations) {
|
||||
expect(op.cwd).toBeTruthy();
|
||||
expect(typeof op.cwd).toBe("string");
|
||||
}
|
||||
});
|
||||
|
||||
test("handles template literal interpolation", () => {
|
||||
const filename = "test.txt";
|
||||
const result = $.trace`cat /tmp/${filename}`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const readOps = result.operations.filter(op => op.flags === READ && op.path?.endsWith("test.txt"));
|
||||
expect(readOps.length).toBe(1);
|
||||
});
|
||||
|
||||
test("does not actually execute commands", () => {
|
||||
// This would fail if it actually ran, since the file doesn't exist
|
||||
const result = $.trace`cat /nonexistent/path/that/does/not/exist.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.operations.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("external command resolves path when available", () => {
|
||||
// Use a cross-platform external command
|
||||
const cmd = process.platform === "win32" ? "cmd" : "/bin/ls";
|
||||
const result = $.trace`${cmd} --version`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const execOps = result.operations.filter(op => op.flags === EXECUTE);
|
||||
expect(execOps.length).toBeGreaterThan(0);
|
||||
// Command name should be captured
|
||||
expect(execOps[0].command).toBe(cmd);
|
||||
});
|
||||
|
||||
test("external commands include args array", () => {
|
||||
const result = $.trace`grep -r 'pattern' src/`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const execOps = result.operations.filter(op => op.flags === EXECUTE);
|
||||
expect(execOps.length).toBe(1);
|
||||
expect(execOps[0].command).toBe("grep");
|
||||
expect(execOps[0].args).toEqual(["-r", "pattern", "src/"]);
|
||||
});
|
||||
|
||||
test("pipeline commands each have their own args", () => {
|
||||
const result = $.trace`git diff HEAD^ -- src/ | head -100`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const execOps = result.operations.filter(op => op.flags === EXECUTE);
|
||||
expect(execOps.length).toBe(2);
|
||||
|
||||
expect(execOps[0].command).toBe("git");
|
||||
expect(execOps[0].args).toEqual(["diff", "HEAD^", "--", "src/"]);
|
||||
|
||||
expect(execOps[1].command).toBe("head");
|
||||
expect(execOps[1].args).toEqual(["-100"]);
|
||||
});
|
||||
|
||||
test("builtins do not have args (tracked as file operations)", () => {
|
||||
const result = $.trace`cat file1.txt file2.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Builtins track files, not args
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(2);
|
||||
expect(readOps[0].args).toBeUndefined();
|
||||
expect(readOps[1].args).toBeUndefined();
|
||||
});
|
||||
|
||||
test("traces && (and) operator", () => {
|
||||
const result = $.trace`cat /tmp/a.txt && cat /tmp/b.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Both commands should be traced
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(2);
|
||||
expect(normalizePath(readOps[0].path!)).toBe("/tmp/a.txt");
|
||||
expect(normalizePath(readOps[1].path!)).toBe("/tmp/b.txt");
|
||||
});
|
||||
|
||||
test("traces || (or) operator", () => {
|
||||
const result = $.trace`cat /tmp/a.txt || cat /tmp/b.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Both commands should be traced
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(2);
|
||||
});
|
||||
|
||||
test("traces subshell with cwd isolation", () => {
|
||||
const result = $.trace`(cd /tmp && ls) && ls`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should have: CHDIR /tmp, READ /tmp (inside subshell), READ . (outside subshell)
|
||||
const chdirOps = result.operations.filter(op => op.flags === Permission.CHDIR);
|
||||
expect(chdirOps.length).toBe(1);
|
||||
expect(normalizePath(chdirOps[0].path!)).toBe("/tmp");
|
||||
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(2);
|
||||
// First ls inside subshell should see /tmp
|
||||
expect(normalizePath(readOps[0].cwd!)).toBe("/tmp");
|
||||
// Second ls outside subshell should see original cwd (subshell cwd is restored)
|
||||
expect(normalizePath(readOps[1].cwd!)).not.toBe("/tmp");
|
||||
});
|
||||
|
||||
test("cd updates cwd for subsequent commands", () => {
|
||||
const result = $.trace`cd /tmp && ls`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(1);
|
||||
expect(normalizePath(readOps[0].cwd!)).toBe("/tmp");
|
||||
expect(normalizePath(readOps[0].path!)).toBe("/tmp"); // ls reads cwd
|
||||
});
|
||||
|
||||
test("expands brace patterns", () => {
|
||||
const result = $.trace`cat /tmp/{a,b,c}.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(3);
|
||||
expect(normalizePath(readOps[0].path!)).toBe("/tmp/a.txt");
|
||||
expect(normalizePath(readOps[1].path!)).toBe("/tmp/b.txt");
|
||||
expect(normalizePath(readOps[2].path!)).toBe("/tmp/c.txt");
|
||||
});
|
||||
|
||||
test("expands tilde to home directory", () => {
|
||||
const result = $.trace`cat ~/.config/test.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(1);
|
||||
expect(readOps[0].path).not.toContain("~");
|
||||
// Home directory path varies by platform
|
||||
if (process.platform === "win32") {
|
||||
// Windows uses USERPROFILE which expands to something like C:\Users\username
|
||||
expect(readOps[0].path).toMatch(/\.config[/\\]test\.txt$/);
|
||||
} else {
|
||||
expect(readOps[0].path).toContain(".config/test.txt");
|
||||
}
|
||||
});
|
||||
|
||||
test("expands glob patterns to matching files", () => {
|
||||
// Create test files for glob expansion using tempDir helper
|
||||
const { join } = require("path");
|
||||
using dir = tempDir("trace-glob-test", {
|
||||
"a.txt": "",
|
||||
"b.txt": "",
|
||||
"c.txt": "",
|
||||
});
|
||||
const testDir = String(dir);
|
||||
|
||||
const result = $.trace`cat ${testDir}/*.txt`;
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const readOps = result.operations.filter(op => op.flags === READ);
|
||||
expect(readOps.length).toBe(3);
|
||||
const paths = readOps.map(op => normalizePath(op.path!)).sort();
|
||||
const expected = [join(testDir, "a.txt"), join(testDir, "b.txt"), join(testDir, "c.txt")].map(normalizePath);
|
||||
expect(paths).toEqual(expected);
|
||||
});
|
||||
});
|
||||
@@ -655,257 +655,4 @@ describe("ws module with HttpsProxyAgent", () => {
|
||||
expect(messages).toContain("hello from ws module via agent");
|
||||
gc();
|
||||
});
|
||||
|
||||
test("ws module passes agent with TLS options", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<string[]>();
|
||||
|
||||
const agent = new HttpsProxyAgent(`http://127.0.0.1:${proxyPort}`, {
|
||||
rejectUnauthorized: false,
|
||||
});
|
||||
const ws = new WS(`wss://127.0.0.1:${wssPort}`, { agent });
|
||||
|
||||
const receivedMessages: string[] = [];
|
||||
|
||||
ws.on("open", () => {
|
||||
ws.send("hello from ws module via agent to wss");
|
||||
});
|
||||
|
||||
ws.on("message", (data: Buffer) => {
|
||||
receivedMessages.push(data.toString());
|
||||
if (receivedMessages.length === 2) {
|
||||
ws.close();
|
||||
}
|
||||
});
|
||||
|
||||
ws.on("close", () => {
|
||||
resolve(receivedMessages);
|
||||
});
|
||||
|
||||
ws.on("error", (err: Error) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
const messages = await promise;
|
||||
expect(messages).toContain("connected");
|
||||
expect(messages).toContain("hello from ws module via agent to wss");
|
||||
gc();
|
||||
});
|
||||
|
||||
test("ws module explicit proxy takes precedence over agent", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<string[]>();
|
||||
|
||||
// Create agent pointing to wrong port
|
||||
const agent = new HttpsProxyAgent(`http://127.0.0.1:1`);
|
||||
// But use explicit proxy option with correct port
|
||||
const ws = new WS(`ws://127.0.0.1:${wsPort}`, {
|
||||
agent,
|
||||
proxy: `http://127.0.0.1:${proxyPort}`, // This should take precedence
|
||||
});
|
||||
|
||||
const receivedMessages: string[] = [];
|
||||
|
||||
ws.on("open", () => {
|
||||
ws.send("ws module explicit proxy wins");
|
||||
});
|
||||
|
||||
ws.on("message", (data: Buffer) => {
|
||||
receivedMessages.push(data.toString());
|
||||
if (receivedMessages.length === 2) {
|
||||
ws.close();
|
||||
}
|
||||
});
|
||||
|
||||
ws.on("close", () => {
|
||||
resolve(receivedMessages);
|
||||
});
|
||||
|
||||
ws.on("error", (err: Error) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
const messages = await promise;
|
||||
expect(messages).toContain("connected");
|
||||
expect(messages).toContain("ws module explicit proxy wins");
|
||||
gc();
|
||||
});
|
||||
});
|
||||
|
||||
describe("WebSocket with HttpsProxyAgent", () => {
|
||||
test("ws:// through HttpsProxyAgent", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<string[]>();
|
||||
|
||||
const agent = new HttpsProxyAgent(`http://127.0.0.1:${proxyPort}`);
|
||||
const ws = new WebSocket(`ws://127.0.0.1:${wsPort}`, { agent });
|
||||
|
||||
const receivedMessages: string[] = [];
|
||||
|
||||
ws.onopen = () => {
|
||||
ws.send("hello from WebSocket via HttpsProxyAgent");
|
||||
};
|
||||
|
||||
ws.onmessage = event => {
|
||||
receivedMessages.push(String(event.data));
|
||||
if (receivedMessages.length === 2) {
|
||||
ws.close();
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
resolve(receivedMessages);
|
||||
};
|
||||
|
||||
ws.onerror = event => {
|
||||
reject(event);
|
||||
};
|
||||
|
||||
const messages = await promise;
|
||||
expect(messages).toContain("connected");
|
||||
expect(messages).toContain("hello from WebSocket via HttpsProxyAgent");
|
||||
gc();
|
||||
});
|
||||
|
||||
test("wss:// through HttpsProxyAgent with rejectUnauthorized", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<string[]>();
|
||||
|
||||
const agent = new HttpsProxyAgent(`http://127.0.0.1:${proxyPort}`, {
|
||||
rejectUnauthorized: false,
|
||||
});
|
||||
const ws = new WebSocket(`wss://127.0.0.1:${wssPort}`, { agent });
|
||||
|
||||
const receivedMessages: string[] = [];
|
||||
|
||||
ws.onopen = () => {
|
||||
ws.send("hello from wss via HttpsProxyAgent");
|
||||
};
|
||||
|
||||
ws.onmessage = event => {
|
||||
receivedMessages.push(String(event.data));
|
||||
if (receivedMessages.length === 2) {
|
||||
ws.close();
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
resolve(receivedMessages);
|
||||
};
|
||||
|
||||
ws.onerror = event => {
|
||||
reject(event);
|
||||
};
|
||||
|
||||
const messages = await promise;
|
||||
expect(messages).toContain("connected");
|
||||
expect(messages).toContain("hello from wss via HttpsProxyAgent");
|
||||
gc();
|
||||
});
|
||||
|
||||
test("HttpsProxyAgent with authentication", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<string[]>();
|
||||
|
||||
const agent = new HttpsProxyAgent(`http://proxy_user:proxy_pass@127.0.0.1:${authProxyPort}`);
|
||||
const ws = new WebSocket(`ws://127.0.0.1:${wsPort}`, { agent });
|
||||
|
||||
const receivedMessages: string[] = [];
|
||||
|
||||
ws.onopen = () => {
|
||||
ws.send("hello from WebSocket with auth via HttpsProxyAgent");
|
||||
};
|
||||
|
||||
ws.onmessage = event => {
|
||||
receivedMessages.push(String(event.data));
|
||||
if (receivedMessages.length === 2) {
|
||||
ws.close();
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
resolve(receivedMessages);
|
||||
};
|
||||
|
||||
ws.onerror = event => {
|
||||
reject(event);
|
||||
};
|
||||
|
||||
const messages = await promise;
|
||||
expect(messages).toContain("connected");
|
||||
expect(messages).toContain("hello from WebSocket with auth via HttpsProxyAgent");
|
||||
gc();
|
||||
});
|
||||
|
||||
test("HttpsProxyAgent with agent.proxy as URL object", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<string[]>();
|
||||
|
||||
// HttpsProxyAgent stores the proxy URL as a URL object in agent.proxy
|
||||
const agent = new HttpsProxyAgent(`http://127.0.0.1:${proxyPort}`);
|
||||
// Verify the agent has the proxy property as a URL object
|
||||
expect(agent.proxy).toBeDefined();
|
||||
expect(typeof agent.proxy).toBe("object");
|
||||
expect(agent.proxy.href).toContain(`127.0.0.1:${proxyPort}`);
|
||||
|
||||
const ws = new WebSocket(`ws://127.0.0.1:${wsPort}`, { agent });
|
||||
|
||||
const receivedMessages: string[] = [];
|
||||
|
||||
ws.onopen = () => {
|
||||
ws.send("hello via agent with URL object");
|
||||
};
|
||||
|
||||
ws.onmessage = event => {
|
||||
receivedMessages.push(String(event.data));
|
||||
if (receivedMessages.length === 2) {
|
||||
ws.close();
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
resolve(receivedMessages);
|
||||
};
|
||||
|
||||
ws.onerror = event => {
|
||||
reject(event);
|
||||
};
|
||||
|
||||
const messages = await promise;
|
||||
expect(messages).toContain("connected");
|
||||
expect(messages).toContain("hello via agent with URL object");
|
||||
gc();
|
||||
});
|
||||
|
||||
test("explicit proxy option takes precedence over agent", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers<string[]>();
|
||||
|
||||
// Create agent pointing to wrong port (that doesn't exist)
|
||||
const agent = new HttpsProxyAgent(`http://127.0.0.1:1`);
|
||||
// But use explicit proxy option with correct port
|
||||
const ws = new WebSocket(`ws://127.0.0.1:${wsPort}`, {
|
||||
agent,
|
||||
proxy: `http://127.0.0.1:${proxyPort}`, // This should take precedence
|
||||
});
|
||||
|
||||
const receivedMessages: string[] = [];
|
||||
|
||||
ws.onopen = () => {
|
||||
ws.send("explicit proxy wins");
|
||||
};
|
||||
|
||||
ws.onmessage = event => {
|
||||
receivedMessages.push(String(event.data));
|
||||
if (receivedMessages.length === 2) {
|
||||
ws.close();
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
resolve(receivedMessages);
|
||||
};
|
||||
|
||||
ws.onerror = event => {
|
||||
reject(event);
|
||||
};
|
||||
|
||||
const messages = await promise;
|
||||
expect(messages).toContain("connected");
|
||||
expect(messages).toContain("explicit proxy wins");
|
||||
gc();
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user