mirror of
https://github.com/oven-sh/bun
synced 2026-02-21 00:02:19 +00:00
Compare commits
1 Commits
claude/fix
...
claude/env
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3b345c831a |
@@ -9,42 +9,18 @@ In Bun, `fetch` supports sending requests through an HTTP or HTTPS proxy. This i
|
||||
```ts proxy.ts icon="/icons/typescript.svg"
|
||||
await fetch("https://example.com", {
|
||||
// The URL of the proxy server
|
||||
proxy: "https://username:password@proxy.example.com:8080",
|
||||
proxy: "https://usertitle:password@proxy.example.com:8080",
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `proxy` option can be a URL string or an object with `url` and optional `headers`. The URL can include the username and password if the proxy requires authentication. It can be `http://` or `https://`.
|
||||
The `proxy` option is a URL string that specifies the proxy server. It can include the username and password if the proxy requires authentication. It can be `http://` or `https://`.
|
||||
|
||||
---
|
||||
|
||||
## Custom proxy headers
|
||||
|
||||
To send custom headers to the proxy server (useful for proxy authentication tokens, custom routing, etc.), use the object format:
|
||||
|
||||
```ts proxy-headers.ts icon="/icons/typescript.svg"
|
||||
await fetch("https://example.com", {
|
||||
proxy: {
|
||||
url: "https://proxy.example.com:8080",
|
||||
headers: {
|
||||
"Proxy-Authorization": "Bearer my-token",
|
||||
"X-Proxy-Region": "us-east-1",
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The `headers` property accepts a plain object or a `Headers` instance. These headers are sent directly to the proxy server in `CONNECT` requests (for HTTPS targets) or in the proxy request (for HTTP targets).
|
||||
|
||||
If you provide a `Proxy-Authorization` header, it will override any credentials specified in the proxy URL.
|
||||
|
||||
---
|
||||
|
||||
## Environment variables
|
||||
|
||||
You can also set the `$HTTP_PROXY` or `$HTTPS_PROXY` environment variable to the proxy URL. This is useful when you want to use the same proxy for all requests.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
HTTPS_PROXY=https://username:password@proxy.example.com:8080 bun run index.ts
|
||||
HTTPS_PROXY=https://usertitle:password@proxy.example.com:8080 bun run index.ts
|
||||
```
|
||||
|
||||
@@ -51,7 +51,7 @@ const response = await fetch("http://example.com", {
|
||||
|
||||
### Proxying requests
|
||||
|
||||
To proxy a request, pass an object with the `proxy` property set to a URL string:
|
||||
To proxy a request, pass an object with the `proxy` property set to a URL.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
@@ -59,22 +59,6 @@ const response = await fetch("http://example.com", {
|
||||
});
|
||||
```
|
||||
|
||||
You can also use an object format to send custom headers to the proxy server:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
proxy: {
|
||||
url: "http://proxy.com",
|
||||
headers: {
|
||||
"Proxy-Authorization": "Bearer my-token",
|
||||
"X-Custom-Proxy-Header": "value",
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The `headers` are sent directly to the proxy in `CONNECT` requests (for HTTPS targets) or in the proxy request (for HTTP targets). If you provide a `Proxy-Authorization` header, it overrides any credentials in the proxy URL.
|
||||
|
||||
### Custom headers
|
||||
|
||||
To set custom headers, pass an object with the `headers` property set to an object.
|
||||
|
||||
32
packages/bun-types/globals.d.ts
vendored
32
packages/bun-types/globals.d.ts
vendored
@@ -1920,44 +1920,14 @@ interface BunFetchRequestInit extends RequestInit {
|
||||
* Override http_proxy or HTTPS_PROXY
|
||||
* This is a custom property that is not part of the Fetch API specification.
|
||||
*
|
||||
* Can be a string URL or an object with `url` and optional `headers`.
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* // String format
|
||||
* const response = await fetch("http://example.com", {
|
||||
* proxy: "https://username:password@127.0.0.1:8080"
|
||||
* });
|
||||
*
|
||||
* // Object format with custom headers sent to the proxy
|
||||
* const response = await fetch("http://example.com", {
|
||||
* proxy: {
|
||||
* url: "https://127.0.0.1:8080",
|
||||
* headers: {
|
||||
* "Proxy-Authorization": "Bearer token",
|
||||
* "X-Custom-Proxy-Header": "value"
|
||||
* }
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* If a `Proxy-Authorization` header is provided in `proxy.headers`, it takes
|
||||
* precedence over credentials parsed from the proxy URL.
|
||||
*/
|
||||
proxy?:
|
||||
| string
|
||||
| {
|
||||
/**
|
||||
* The proxy URL
|
||||
*/
|
||||
url: string;
|
||||
/**
|
||||
* Custom headers to send to the proxy server.
|
||||
* These headers are sent in the CONNECT request (for HTTPS targets)
|
||||
* or in the proxy request (for HTTP targets).
|
||||
*/
|
||||
headers?: Bun.HeadersInit;
|
||||
};
|
||||
proxy?: string;
|
||||
|
||||
/**
|
||||
* Override the default S3 options
|
||||
|
||||
4
packages/bun-types/wasm.d.ts
vendored
4
packages/bun-types/wasm.d.ts
vendored
@@ -100,8 +100,8 @@ declare module "bun" {
|
||||
|
||||
declare namespace WebAssembly {
|
||||
interface ValueTypeMap extends Bun.WebAssembly.ValueTypeMap {}
|
||||
interface GlobalDescriptor<T extends keyof ValueTypeMap = keyof ValueTypeMap> extends Bun.WebAssembly
|
||||
.GlobalDescriptor<T> {}
|
||||
interface GlobalDescriptor<T extends keyof ValueTypeMap = keyof ValueTypeMap>
|
||||
extends Bun.WebAssembly.GlobalDescriptor<T> {}
|
||||
interface MemoryDescriptor extends Bun.WebAssembly.MemoryDescriptor {}
|
||||
interface ModuleExportDescriptor extends Bun.WebAssembly.ModuleExportDescriptor {}
|
||||
interface ModuleImportDescriptor extends Bun.WebAssembly.ModuleImportDescriptor {}
|
||||
|
||||
@@ -5398,18 +5398,21 @@ pub fn NewParser_(
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isDotDefineMatch(noalias p: *P, expr: Expr, parts: []const string) bool {
|
||||
/// Check if an expression matches a dot define pattern like "process.env.NODE_ENV".
|
||||
/// When `allow_optional_chain` is true, expressions like `process?.env?.NODE_ENV` will also match.
|
||||
/// This should only be true when we're substituting a value (the optional chain becomes irrelevant).
|
||||
/// When just setting flags like `can_be_removed_if_unused`, optional chains should NOT match
|
||||
/// because the chain itself has observable behavior (checking if the object exists).
|
||||
pub fn isDotDefineMatch(noalias p: *P, expr: Expr, parts: []const string, allow_optional_chain: bool) bool {
|
||||
switch (expr.data) {
|
||||
.e_dot => |ex| {
|
||||
if (parts.len > 1) {
|
||||
if (ex.optional_chain != null) {
|
||||
if (!allow_optional_chain and ex.optional_chain != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Intermediates must be dot expressions
|
||||
const last = parts.len - 1;
|
||||
const is_tail_match = strings.eql(parts[last], ex.name);
|
||||
return is_tail_match and p.isDotDefineMatch(ex.target, parts[0..last]);
|
||||
return is_tail_match and p.isDotDefineMatch(ex.target, parts[0..last], allow_optional_chain);
|
||||
}
|
||||
},
|
||||
.e_import_meta => {
|
||||
@@ -5421,13 +5424,12 @@ pub fn NewParser_(
|
||||
// the intent is to handle people using this form instead of E.Dot. So we really only want to do this if the accessor can also be an identifier
|
||||
.e_index => |index| {
|
||||
if (parts.len > 1 and index.index.data == .e_string and index.index.data.e_string.isUTF8()) {
|
||||
if (index.optional_chain != null) {
|
||||
if (!allow_optional_chain and index.optional_chain != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const last = parts.len - 1;
|
||||
const is_tail_match = strings.eql(parts[last], index.index.data.e_string.slice(p.allocator));
|
||||
return is_tail_match and p.isDotDefineMatch(index.target, parts[0..last]);
|
||||
return is_tail_match and p.isDotDefineMatch(index.target, parts[0..last], allow_optional_chain);
|
||||
}
|
||||
},
|
||||
.e_identifier => |ex| {
|
||||
|
||||
@@ -69,7 +69,8 @@ pub fn VisitExpr(
|
||||
if (p.define.dots.get("meta")) |meta| {
|
||||
for (meta) |define| {
|
||||
// TODO: clean up how we do define matches
|
||||
if (p.isDotDefineMatch(expr, define.parts)) {
|
||||
// Allow optional chains since we're substituting a value
|
||||
if (p.isDotDefineMatch(expr, define.parts, true)) {
|
||||
// Substitute user-specified defines
|
||||
return p.valueForDefine(expr.loc, in.assign_target, is_delete_target, &define.data);
|
||||
}
|
||||
@@ -518,6 +519,26 @@ pub fn VisitExpr(
|
||||
const is_call_target = p.call_target == .e_index and expr.data.e_index == p.call_target.e_index;
|
||||
const is_delete_target = p.delete_target == .e_index and expr.data.e_index == p.delete_target.e_index;
|
||||
|
||||
// Check for defines with bracket notation (e.g., process.env["VAR"])
|
||||
// This is checked first before any transformations, similar to e_dot handling
|
||||
if (e_.index.data == .e_string and e_.index.data.e_string.isUTF8()) {
|
||||
const index_str = e_.index.data.e_string.slice(p.allocator);
|
||||
if (p.define.dots.get(index_str)) |parts| {
|
||||
for (parts) |*define| {
|
||||
// Allow optional chains since we're substituting a value
|
||||
if (p.isDotDefineMatch(expr, define.parts, true)) {
|
||||
if (in.assign_target == .none) {
|
||||
// Substitute user-specified defines
|
||||
if (!define.data.valueless()) {
|
||||
return p.valueForDefine(expr.loc, in.assign_target, is_delete_target, &define.data);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// "a['b']" => "a.b"
|
||||
if (p.options.features.minify_syntax and
|
||||
e_.index.data == .e_string and
|
||||
@@ -832,10 +853,15 @@ pub fn VisitExpr(
|
||||
|
||||
if (p.define.dots.get(e_.name)) |parts| {
|
||||
for (parts) |*define| {
|
||||
if (p.isDotDefineMatch(expr, define.parts)) {
|
||||
// When substituting a value, allow optional chains (e.g. process?.env?.NODE_ENV)
|
||||
// because the substitution makes the chain irrelevant.
|
||||
// When just setting flags, don't allow optional chains because the chain
|
||||
// itself has observable behavior (checking if the object exists).
|
||||
const has_value = !define.data.valueless();
|
||||
if (p.isDotDefineMatch(expr, define.parts, has_value)) {
|
||||
if (in.assign_target == .none) {
|
||||
// Substitute user-specified defines
|
||||
if (!define.data.valueless()) {
|
||||
if (has_value) {
|
||||
return p.valueForDefine(expr.loc, in.assign_target, is_delete_target, &define.data);
|
||||
}
|
||||
|
||||
|
||||
@@ -1212,19 +1212,11 @@ pub fn mmapFile(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.
|
||||
}
|
||||
|
||||
if (try opts.get(globalThis, "size")) |value| {
|
||||
const size_value = try value.coerceToInt64(globalThis);
|
||||
if (size_value < 0) {
|
||||
return globalThis.throwInvalidArguments("size must be a non-negative integer", .{});
|
||||
}
|
||||
map_size = @intCast(size_value);
|
||||
map_size = @as(usize, @intCast(value.toInt64()));
|
||||
}
|
||||
|
||||
if (try opts.get(globalThis, "offset")) |value| {
|
||||
const offset_value = try value.coerceToInt64(globalThis);
|
||||
if (offset_value < 0) {
|
||||
return globalThis.throwInvalidArguments("offset must be a non-negative integer", .{});
|
||||
}
|
||||
offset = @intCast(offset_value);
|
||||
offset = @as(usize, @intCast(value.toInt64()));
|
||||
offset = std.mem.alignBackwardAnyAlign(usize, offset, std.heap.pageSize());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ JS_EXPORT_PRIVATE JSWrappingFunction* JSWrappingFunction::create(
|
||||
Zig::NativeFunctionPtr functionPointer,
|
||||
JSC::JSValue wrappedFnValue)
|
||||
{
|
||||
JSC::JSObject* wrappedFn = wrappedFnValue.getObject();
|
||||
JSC::JSFunction* wrappedFn = jsCast<JSC::JSFunction*>(wrappedFnValue.asCell());
|
||||
ASSERT(wrappedFn != nullptr);
|
||||
|
||||
auto nameStr = symbolName->tag == BunStringTag::Empty ? WTF::emptyString() : symbolName->toWTFString();
|
||||
@@ -75,9 +75,9 @@ extern "C" JSC::EncodedJSValue Bun__JSWrappingFunction__getWrappedFunction(
|
||||
Zig::GlobalObject* globalObject)
|
||||
{
|
||||
JSC::JSValue thisValue = JSC::JSValue::decode(thisValueEncoded);
|
||||
JSWrappingFunction* thisObject = jsDynamicCast<JSWrappingFunction*>(thisValue.asCell());
|
||||
JSWrappingFunction* thisObject = jsCast<JSWrappingFunction*>(thisValue.asCell());
|
||||
if (thisObject != nullptr) {
|
||||
JSC::JSObject* wrappedFn = thisObject->m_wrappedFn.get();
|
||||
JSC::JSFunction* wrappedFn = thisObject->m_wrappedFn.get();
|
||||
return JSC::JSValue::encode(wrappedFn);
|
||||
}
|
||||
return {};
|
||||
|
||||
@@ -59,7 +59,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
JSWrappingFunction(JSC::VM& vm, JSC::NativeExecutable* native, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, JSC::JSObject* wrappedFn)
|
||||
JSWrappingFunction(JSC::VM& vm, JSC::NativeExecutable* native, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, JSC::JSFunction* wrappedFn)
|
||||
: Base(vm, native, globalObject, structure)
|
||||
, m_wrappedFn(wrappedFn, JSC::WriteBarrierEarlyInit)
|
||||
{
|
||||
@@ -69,7 +69,7 @@ private:
|
||||
|
||||
DECLARE_VISIT_CHILDREN;
|
||||
|
||||
JSC::WriteBarrier<JSC::JSObject> m_wrappedFn;
|
||||
JSC::WriteBarrier<JSC::JSFunction> m_wrappedFn;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@@ -5677,15 +5677,7 @@ CPP_DECL JSC::EncodedJSValue WebCore__DOMFormData__createFromURLQuery(JSC::JSGlo
|
||||
{
|
||||
Zig::GlobalObject* globalObject = static_cast<Zig::GlobalObject*>(arg0);
|
||||
// don't need to copy the string because it internally does.
|
||||
auto str = toString(*arg1);
|
||||
// toString() in helpers.h returns an empty string when the input exceeds
|
||||
// String::MaxLength or Bun's synthetic allocation limit. This is the only
|
||||
// condition under which toString() returns empty for non-empty input.
|
||||
if (str.isEmpty() && arg1->len > 0) {
|
||||
auto scope = DECLARE_THROW_SCOPE(globalObject->vm());
|
||||
return Bun::ERR::STRING_TOO_LONG(scope, globalObject);
|
||||
}
|
||||
auto formData = DOMFormData::create(globalObject->scriptExecutionContext(), WTFMove(str));
|
||||
auto formData = DOMFormData::create(globalObject->scriptExecutionContext(), toString(*arg1));
|
||||
return JSValue::encode(toJSNewlyCreated(arg0, globalObject, WTFMove(formData)));
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
#include "root.h"
|
||||
#include "wtf/text/ASCIILiteral.h"
|
||||
#include "wtf/SIMDUTF.h"
|
||||
|
||||
#include <JavaScriptCore/Error.h>
|
||||
#include <JavaScriptCore/Exception.h>
|
||||
@@ -80,24 +79,12 @@ static const WTF::String toString(ZigString str)
|
||||
}
|
||||
if (isTaggedUTF8Ptr(str.ptr)) [[unlikely]] {
|
||||
ASSERT_WITH_MESSAGE(!isTaggedExternalPtr(str.ptr), "UTF8 and external ptr are mutually exclusive. The external will never be freed.");
|
||||
// Check if the resulting UTF-16 string could possibly exceed the maximum length.
|
||||
// For valid UTF-8, the number of UTF-16 code units is <= the number of UTF-8 bytes
|
||||
// (ASCII is 1:1; other code points use multiple UTF-8 bytes per UTF-16 code unit).
|
||||
// We only need to compute the actual UTF-16 length when the byte length exceeds the limit.
|
||||
size_t maxLength = std::min(Bun__stringSyntheticAllocationLimit, static_cast<size_t>(WTF::String::MaxLength));
|
||||
if (str.len > maxLength) [[unlikely]] {
|
||||
// UTF-8 byte length != UTF-16 length, so use simdutf to calculate the actual UTF-16 length.
|
||||
size_t utf16Length = simdutf::utf16_length_from_utf8(reinterpret_cast<const char*>(untag(str.ptr)), str.len);
|
||||
if (utf16Length > maxLength) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
return WTF::String::fromUTF8ReplacingInvalidSequences(std::span { untag(str.ptr), str.len });
|
||||
}
|
||||
|
||||
if (isTaggedExternalPtr(str.ptr)) [[unlikely]] {
|
||||
// This will fail if the string is too long. Let's make it explicit instead of an ASSERT.
|
||||
if (str.len > Bun__stringSyntheticAllocationLimit || str.len > WTF::String::MaxLength) [[unlikely]] {
|
||||
if (str.len > Bun__stringSyntheticAllocationLimit) [[unlikely]] {
|
||||
free_global_string(nullptr, reinterpret_cast<void*>(const_cast<unsigned char*>(untag(str.ptr))), static_cast<unsigned>(str.len));
|
||||
return {};
|
||||
}
|
||||
@@ -108,7 +95,7 @@ static const WTF::String toString(ZigString str)
|
||||
}
|
||||
|
||||
// This will fail if the string is too long. Let's make it explicit instead of an ASSERT.
|
||||
if (str.len > Bun__stringSyntheticAllocationLimit || str.len > WTF::String::MaxLength) [[unlikely]] {
|
||||
if (str.len > Bun__stringSyntheticAllocationLimit) [[unlikely]] {
|
||||
return {};
|
||||
}
|
||||
|
||||
@@ -134,19 +121,11 @@ static const WTF::String toString(ZigString str, StringPointer ptr)
|
||||
return WTF::String();
|
||||
}
|
||||
if (isTaggedUTF8Ptr(str.ptr)) [[unlikely]] {
|
||||
// Check if the resulting UTF-16 string could possibly exceed the maximum length.
|
||||
size_t maxLength = std::min(Bun__stringSyntheticAllocationLimit, static_cast<size_t>(WTF::String::MaxLength));
|
||||
if (ptr.len > maxLength) [[unlikely]] {
|
||||
size_t utf16Length = simdutf::utf16_length_from_utf8(reinterpret_cast<const char*>(&untag(str.ptr)[ptr.off]), ptr.len);
|
||||
if (utf16Length > maxLength) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
return WTF::String::fromUTF8ReplacingInvalidSequences(std::span { &untag(str.ptr)[ptr.off], ptr.len });
|
||||
}
|
||||
|
||||
// This will fail if the string is too long. Let's make it explicit instead of an ASSERT.
|
||||
if (ptr.len > Bun__stringSyntheticAllocationLimit || ptr.len > WTF::String::MaxLength) [[unlikely]] {
|
||||
if (str.len > Bun__stringSyntheticAllocationLimit) [[unlikely]] {
|
||||
return {};
|
||||
}
|
||||
|
||||
@@ -162,19 +141,11 @@ static const WTF::String toStringCopy(ZigString str, StringPointer ptr)
|
||||
return WTF::String();
|
||||
}
|
||||
if (isTaggedUTF8Ptr(str.ptr)) [[unlikely]] {
|
||||
// Check if the resulting UTF-16 string could possibly exceed the maximum length.
|
||||
size_t maxLength = std::min(Bun__stringSyntheticAllocationLimit, static_cast<size_t>(WTF::String::MaxLength));
|
||||
if (ptr.len > maxLength) [[unlikely]] {
|
||||
size_t utf16Length = simdutf::utf16_length_from_utf8(reinterpret_cast<const char*>(&untag(str.ptr)[ptr.off]), ptr.len);
|
||||
if (utf16Length > maxLength) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
return WTF::String::fromUTF8ReplacingInvalidSequences(std::span { &untag(str.ptr)[ptr.off], ptr.len });
|
||||
}
|
||||
|
||||
// This will fail if the string is too long. Let's make it explicit instead of an ASSERT.
|
||||
if (ptr.len > Bun__stringSyntheticAllocationLimit || ptr.len > WTF::String::MaxLength) [[unlikely]] {
|
||||
if (str.len > Bun__stringSyntheticAllocationLimit) [[unlikely]] {
|
||||
return {};
|
||||
}
|
||||
|
||||
@@ -190,14 +161,6 @@ static const WTF::String toStringCopy(ZigString str)
|
||||
return WTF::String();
|
||||
}
|
||||
if (isTaggedUTF8Ptr(str.ptr)) [[unlikely]] {
|
||||
// Check if the resulting UTF-16 string could possibly exceed the maximum length.
|
||||
size_t maxLength = std::min(Bun__stringSyntheticAllocationLimit, static_cast<size_t>(WTF::String::MaxLength));
|
||||
if (str.len > maxLength) [[unlikely]] {
|
||||
size_t utf16Length = simdutf::utf16_length_from_utf8(reinterpret_cast<const char*>(untag(str.ptr)), str.len);
|
||||
if (utf16Length > maxLength) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
return WTF::String::fromUTF8ReplacingInvalidSequences(std::span { untag(str.ptr), str.len });
|
||||
}
|
||||
|
||||
@@ -225,14 +188,6 @@ static void appendToBuilder(ZigString str, WTF::StringBuilder& builder)
|
||||
return;
|
||||
}
|
||||
if (isTaggedUTF8Ptr(str.ptr)) [[unlikely]] {
|
||||
// Check if the resulting UTF-16 string could possibly exceed the maximum length.
|
||||
size_t maxLength = std::min(Bun__stringSyntheticAllocationLimit, static_cast<size_t>(WTF::String::MaxLength));
|
||||
if (str.len > maxLength) [[unlikely]] {
|
||||
size_t utf16Length = simdutf::utf16_length_from_utf8(reinterpret_cast<const char*>(untag(str.ptr)), str.len);
|
||||
if (utf16Length > maxLength) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
WTF::String converted = WTF::String::fromUTF8ReplacingInvalidSequences(std::span { untag(str.ptr), str.len });
|
||||
builder.append(converted);
|
||||
return;
|
||||
|
||||
@@ -632,11 +632,7 @@ pub fn Bun__fetch_(
|
||||
break :extract_verbose verbose;
|
||||
};
|
||||
|
||||
// proxy: string | { url: string, headers?: Headers } | undefined;
|
||||
var proxy_headers: ?Headers = null;
|
||||
defer if (proxy_headers) |*hdrs| {
|
||||
hdrs.deinit();
|
||||
};
|
||||
// proxy: string | undefined;
|
||||
url_proxy_buffer = extract_proxy: {
|
||||
const objects_to_try = [_]jsc.JSValue{
|
||||
options_object orelse .zero,
|
||||
@@ -645,7 +641,6 @@ pub fn Bun__fetch_(
|
||||
inline for (0..2) |i| {
|
||||
if (objects_to_try[i] != .zero) {
|
||||
if (try objects_to_try[i].get(globalThis, "proxy")) |proxy_arg| {
|
||||
// Handle string format: proxy: "http://proxy.example.com:8080"
|
||||
if (proxy_arg.isString() and try proxy_arg.getLength(ctx) > 0) {
|
||||
var href = try jsc.URL.hrefFromJS(proxy_arg, globalThis);
|
||||
if (href.tag == .Dead) {
|
||||
@@ -666,54 +661,6 @@ pub fn Bun__fetch_(
|
||||
allocator.free(url_proxy_buffer);
|
||||
break :extract_proxy buffer;
|
||||
}
|
||||
// Handle object format: proxy: { url: "http://proxy.example.com:8080", headers?: Headers }
|
||||
if (proxy_arg.isObject()) {
|
||||
// Get the URL from the proxy object
|
||||
const proxy_url_arg = try proxy_arg.get(globalThis, "url");
|
||||
if (proxy_url_arg == null or proxy_url_arg.?.isUndefinedOrNull()) {
|
||||
const err = ctx.toTypeError(.INVALID_ARG_VALUE, "fetch() proxy object requires a 'url' property", .{});
|
||||
is_error = true;
|
||||
return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err);
|
||||
}
|
||||
if (proxy_url_arg.?.isString() and try proxy_url_arg.?.getLength(ctx) > 0) {
|
||||
var href = try jsc.URL.hrefFromJS(proxy_url_arg.?, globalThis);
|
||||
if (href.tag == .Dead) {
|
||||
const err = ctx.toTypeError(.INVALID_ARG_VALUE, "fetch() proxy URL is invalid", .{});
|
||||
is_error = true;
|
||||
return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err);
|
||||
}
|
||||
defer href.deref();
|
||||
const buffer = try std.fmt.allocPrint(allocator, "{s}{f}", .{ url_proxy_buffer, href });
|
||||
url = ZigURL.parse(buffer[0..url.href.len]);
|
||||
if (url.isFile()) {
|
||||
url_type = URLType.file;
|
||||
} else if (url.isBlob()) {
|
||||
url_type = URLType.blob;
|
||||
}
|
||||
|
||||
proxy = ZigURL.parse(buffer[url.href.len..]);
|
||||
allocator.free(url_proxy_buffer);
|
||||
url_proxy_buffer = buffer;
|
||||
|
||||
// Get the headers from the proxy object (optional)
|
||||
if (try proxy_arg.get(globalThis, "headers")) |headers_value| {
|
||||
if (!headers_value.isUndefinedOrNull()) {
|
||||
if (headers_value.as(FetchHeaders)) |fetch_hdrs| {
|
||||
proxy_headers = Headers.from(fetch_hdrs, allocator, .{}) catch |err| bun.handleOom(err);
|
||||
} else if (try FetchHeaders.createFromJS(ctx, headers_value)) |fetch_hdrs| {
|
||||
defer fetch_hdrs.deref();
|
||||
proxy_headers = Headers.from(fetch_hdrs, allocator, .{}) catch |err| bun.handleOom(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break :extract_proxy url_proxy_buffer;
|
||||
} else {
|
||||
const err = ctx.toTypeError(.INVALID_ARG_VALUE, "fetch() proxy.url must be a non-empty string", .{});
|
||||
is_error = true;
|
||||
return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (globalThis.hasException()) {
|
||||
@@ -1391,7 +1338,6 @@ pub fn Bun__fetch_(
|
||||
.redirect_type = redirect_type,
|
||||
.verbose = verbose,
|
||||
.proxy = proxy,
|
||||
.proxy_headers = proxy_headers,
|
||||
.url_proxy_buffer = url_proxy_buffer,
|
||||
.signal = signal,
|
||||
.globalThis = globalThis,
|
||||
@@ -1426,7 +1372,6 @@ pub fn Bun__fetch_(
|
||||
body = FetchTasklet.HTTPRequestBody.Empty;
|
||||
}
|
||||
proxy = null;
|
||||
proxy_headers = null;
|
||||
url_proxy_buffer = "";
|
||||
signal = null;
|
||||
ssl_config = null;
|
||||
|
||||
@@ -1049,7 +1049,6 @@ pub const FetchTasklet = struct {
|
||||
fetch_options.redirect_type,
|
||||
.{
|
||||
.http_proxy = proxy,
|
||||
.proxy_headers = fetch_options.proxy_headers,
|
||||
.hostname = fetch_options.hostname,
|
||||
.signals = fetch_tasklet.signals,
|
||||
.unix_socket_path = fetch_options.unix_socket_path,
|
||||
@@ -1223,7 +1222,6 @@ pub const FetchTasklet = struct {
|
||||
verbose: http.HTTPVerboseLevel = .none,
|
||||
redirect_type: FetchRedirect = FetchRedirect.follow,
|
||||
proxy: ?ZigURL = null,
|
||||
proxy_headers: ?Headers = null,
|
||||
url_proxy_buffer: []const u8 = "",
|
||||
signal: ?*jsc.WebCore.AbortSignal = null,
|
||||
globalThis: ?*JSGlobalObject,
|
||||
|
||||
@@ -349,7 +349,7 @@ pub const Loader = struct {
|
||||
}
|
||||
}
|
||||
|
||||
// We have to copy all the keys to prepend "process.env" :/
|
||||
// We have to copy all the keys to prepend "process.env" and "import.meta.env" :/
|
||||
var key_buf_len: usize = 0;
|
||||
var e_strings_to_allocate: usize = 0;
|
||||
|
||||
@@ -379,11 +379,15 @@ pub const Loader = struct {
|
||||
|
||||
if (key_buf_len > 0) {
|
||||
iter.reset();
|
||||
key_buf = try allocator.alloc(u8, key_buf_len + key_count * "process.env.".len);
|
||||
// Allocate space for both "process.env." and "import.meta.env." prefixes
|
||||
// We double key_buf_len for the env var names, and add space for both prefixes per key
|
||||
key_buf = try allocator.alloc(u8, key_buf_len * 2 + key_count * ("process.env.".len + "import.meta.env.".len));
|
||||
var key_writer = std.Io.Writer.fixed(key_buf);
|
||||
const js_ast = bun.ast;
|
||||
|
||||
var e_strings = try allocator.alloc(js_ast.E.String, e_strings_to_allocate * 2);
|
||||
// Allocate e_strings for both process.env and import.meta.env defines
|
||||
// Each env var needs 2 e_strings (one for process.env, one for import.meta.env)
|
||||
var e_strings = try allocator.alloc(js_ast.E.String, e_strings_to_allocate * 4);
|
||||
errdefer allocator.free(e_strings);
|
||||
errdefer allocator.free(key_buf);
|
||||
|
||||
@@ -392,29 +396,32 @@ pub const Loader = struct {
|
||||
const value: string = entry.value_ptr.value;
|
||||
|
||||
if (strings.startsWith(entry.key_ptr.*, prefix)) {
|
||||
key_writer.print("process.env.{s}", .{entry.key_ptr.*}) catch |err| switch (err) {
|
||||
error.WriteFailed => unreachable, // miscalculated length of key_buf above
|
||||
};
|
||||
const key_str = key_writer.buffered();
|
||||
key_writer = std.Io.Writer.fixed(key_writer.unusedCapacitySlice());
|
||||
// Add defines for both process.env.* and import.meta.env.* (Vite compat)
|
||||
inline for (.{ "process.env.", "import.meta.env." }) |env_prefix| {
|
||||
key_writer.print(env_prefix ++ "{s}", .{entry.key_ptr.*}) catch |err| switch (err) {
|
||||
error.WriteFailed => unreachable, // miscalculated length of key_buf above
|
||||
};
|
||||
const key_str = key_writer.buffered();
|
||||
key_writer = std.Io.Writer.fixed(key_writer.unusedCapacitySlice());
|
||||
|
||||
e_strings[0] = js_ast.E.String{
|
||||
.data = if (value.len > 0)
|
||||
@as([*]u8, @ptrFromInt(@intFromPtr(value.ptr)))[0..value.len]
|
||||
else
|
||||
&[_]u8{},
|
||||
};
|
||||
const expr_data = js_ast.Expr.Data{ .e_string = &e_strings[0] };
|
||||
e_strings[0] = js_ast.E.String{
|
||||
.data = if (value.len > 0)
|
||||
@as([*]u8, @ptrFromInt(@intFromPtr(value.ptr)))[0..value.len]
|
||||
else
|
||||
&[_]u8{},
|
||||
};
|
||||
const expr_data = js_ast.Expr.Data{ .e_string = &e_strings[0] };
|
||||
|
||||
_ = try to_string.getOrPutValue(
|
||||
key_str,
|
||||
.init(.{
|
||||
.can_be_removed_if_unused = true,
|
||||
.call_can_be_unwrapped_if_unused = .if_unused,
|
||||
.value = expr_data,
|
||||
}),
|
||||
);
|
||||
e_strings = e_strings[1..];
|
||||
_ = try to_string.getOrPutValue(
|
||||
key_str,
|
||||
.init(.{
|
||||
.can_be_removed_if_unused = true,
|
||||
.call_can_be_unwrapped_if_unused = .if_unused,
|
||||
.value = expr_data,
|
||||
}),
|
||||
);
|
||||
e_strings = e_strings[1..];
|
||||
}
|
||||
} else {
|
||||
const hash = bun.hash(entry.key_ptr.*);
|
||||
|
||||
@@ -446,30 +453,33 @@ pub const Loader = struct {
|
||||
while (iter.next()) |entry| {
|
||||
const value: string = entry.value_ptr.value;
|
||||
|
||||
key_writer.print("process.env.{s}", .{entry.key_ptr.*}) catch |err| switch (err) {
|
||||
error.WriteFailed => unreachable, // miscalculated length of key_buf above
|
||||
};
|
||||
const key_str = key_writer.buffered();
|
||||
key_writer = std.Io.Writer.fixed(key_writer.unusedCapacitySlice());
|
||||
// Add defines for both process.env.* and import.meta.env.* (Vite compat)
|
||||
inline for (.{ "process.env.", "import.meta.env." }) |env_prefix| {
|
||||
key_writer.print(env_prefix ++ "{s}", .{entry.key_ptr.*}) catch |err| switch (err) {
|
||||
error.WriteFailed => unreachable, // miscalculated length of key_buf above
|
||||
};
|
||||
const key_str = key_writer.buffered();
|
||||
key_writer = std.Io.Writer.fixed(key_writer.unusedCapacitySlice());
|
||||
|
||||
e_strings[0] = js_ast.E.String{
|
||||
.data = if (entry.value_ptr.value.len > 0)
|
||||
@as([*]u8, @ptrFromInt(@intFromPtr(entry.value_ptr.value.ptr)))[0..value.len]
|
||||
else
|
||||
&[_]u8{},
|
||||
};
|
||||
e_strings[0] = js_ast.E.String{
|
||||
.data = if (value.len > 0)
|
||||
@as([*]u8, @ptrFromInt(@intFromPtr(value.ptr)))[0..value.len]
|
||||
else
|
||||
&[_]u8{},
|
||||
};
|
||||
|
||||
const expr_data = js_ast.Expr.Data{ .e_string = &e_strings[0] };
|
||||
const expr_data = js_ast.Expr.Data{ .e_string = &e_strings[0] };
|
||||
|
||||
_ = try to_string.getOrPutValue(
|
||||
key_str,
|
||||
.init(.{
|
||||
.can_be_removed_if_unused = true,
|
||||
.call_can_be_unwrapped_if_unused = .if_unused,
|
||||
.value = expr_data,
|
||||
}),
|
||||
);
|
||||
e_strings = e_strings[1..];
|
||||
_ = try to_string.getOrPutValue(
|
||||
key_str,
|
||||
.init(.{
|
||||
.can_be_removed_if_unused = true,
|
||||
.call_can_be_unwrapped_if_unused = .if_unused,
|
||||
.value = expr_data,
|
||||
}),
|
||||
);
|
||||
e_strings = e_strings[1..];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
56
src/http.zig
56
src/http.zig
@@ -328,29 +328,10 @@ fn writeProxyConnect(
|
||||
|
||||
_ = writer.write("\r\nProxy-Connection: Keep-Alive\r\n") catch 0;
|
||||
|
||||
// Check if user provided Proxy-Authorization in custom headers
|
||||
const user_provided_proxy_auth = if (client.proxy_headers) |hdrs| hdrs.get("proxy-authorization") != null else false;
|
||||
|
||||
// Only write auto-generated proxy_authorization if user didn't provide one
|
||||
if (client.proxy_authorization) |auth| {
|
||||
if (!user_provided_proxy_auth) {
|
||||
_ = writer.write("Proxy-Authorization: ") catch 0;
|
||||
_ = writer.write(auth) catch 0;
|
||||
_ = writer.write("\r\n") catch 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Write custom proxy headers
|
||||
if (client.proxy_headers) |hdrs| {
|
||||
const slice = hdrs.entries.slice();
|
||||
const names = slice.items(.name);
|
||||
const values = slice.items(.value);
|
||||
for (names, 0..) |name_ptr, idx| {
|
||||
_ = writer.write(hdrs.asStr(name_ptr)) catch 0;
|
||||
_ = writer.write(": ") catch 0;
|
||||
_ = writer.write(hdrs.asStr(values[idx])) catch 0;
|
||||
_ = writer.write("\r\n") catch 0;
|
||||
}
|
||||
_ = writer.write("Proxy-Authorization: ") catch 0;
|
||||
_ = writer.write(auth) catch 0;
|
||||
_ = writer.write("\r\n") catch 0;
|
||||
}
|
||||
|
||||
_ = writer.write("\r\n") catch 0;
|
||||
@@ -378,31 +359,11 @@ fn writeProxyRequest(
|
||||
_ = writer.write(request.path) catch 0;
|
||||
_ = writer.write(" HTTP/1.1\r\nProxy-Connection: Keep-Alive\r\n") catch 0;
|
||||
|
||||
// Check if user provided Proxy-Authorization in custom headers
|
||||
const user_provided_proxy_auth = if (client.proxy_headers) |hdrs| hdrs.get("proxy-authorization") != null else false;
|
||||
|
||||
// Only write auto-generated proxy_authorization if user didn't provide one
|
||||
if (client.proxy_authorization) |auth| {
|
||||
if (!user_provided_proxy_auth) {
|
||||
_ = writer.write("Proxy-Authorization: ") catch 0;
|
||||
_ = writer.write(auth) catch 0;
|
||||
_ = writer.write("\r\n") catch 0;
|
||||
}
|
||||
_ = writer.write("Proxy-Authorization: ") catch 0;
|
||||
_ = writer.write(auth) catch 0;
|
||||
_ = writer.write("\r\n") catch 0;
|
||||
}
|
||||
|
||||
// Write custom proxy headers
|
||||
if (client.proxy_headers) |hdrs| {
|
||||
const slice = hdrs.entries.slice();
|
||||
const names = slice.items(.name);
|
||||
const values = slice.items(.value);
|
||||
for (names, 0..) |name_ptr, idx| {
|
||||
_ = writer.write(hdrs.asStr(name_ptr)) catch 0;
|
||||
_ = writer.write(": ") catch 0;
|
||||
_ = writer.write(hdrs.asStr(values[idx])) catch 0;
|
||||
_ = writer.write("\r\n") catch 0;
|
||||
}
|
||||
}
|
||||
|
||||
for (request.headers) |header| {
|
||||
_ = writer.write(header.name) catch 0;
|
||||
_ = writer.write(": ") catch 0;
|
||||
@@ -489,7 +450,6 @@ if_modified_since: string = "",
|
||||
request_content_len_buf: ["-4294967295".len]u8 = undefined,
|
||||
|
||||
http_proxy: ?URL = null,
|
||||
proxy_headers: ?Headers = null,
|
||||
proxy_authorization: ?[]u8 = null,
|
||||
proxy_tunnel: ?*ProxyTunnel = null,
|
||||
signals: Signals = .{},
|
||||
@@ -506,10 +466,6 @@ pub fn deinit(this: *HTTPClient) void {
|
||||
this.allocator.free(auth);
|
||||
this.proxy_authorization = null;
|
||||
}
|
||||
if (this.proxy_headers) |*hdrs| {
|
||||
hdrs.deinit();
|
||||
this.proxy_headers = null;
|
||||
}
|
||||
if (this.proxy_tunnel) |tunnel| {
|
||||
this.proxy_tunnel = null;
|
||||
tunnel.detachAndDeref();
|
||||
|
||||
@@ -93,7 +93,6 @@ const AtomicState = std.atomic.Value(State);
|
||||
|
||||
pub const Options = struct {
|
||||
http_proxy: ?URL = null,
|
||||
proxy_headers: ?Headers = null,
|
||||
hostname: ?[]u8 = null,
|
||||
signals: ?Signals = null,
|
||||
unix_socket_path: ?jsc.ZigString.Slice = null,
|
||||
@@ -186,7 +185,6 @@ pub fn init(
|
||||
.signals = options.signals orelse this.signals,
|
||||
.async_http_id = this.async_http_id,
|
||||
.http_proxy = this.http_proxy,
|
||||
.proxy_headers = options.proxy_headers,
|
||||
.redirect_type = redirect_type,
|
||||
};
|
||||
if (options.unix_socket_path) |val| {
|
||||
|
||||
@@ -484,19 +484,7 @@ function urlFormat(urlObject: unknown) {
|
||||
|
||||
Url.prototype.format = function format() {
|
||||
var auth: string = this.auth || "";
|
||||
|
||||
// Handle WHATWG URL objects which have username/password instead of auth
|
||||
if (!auth && (this.username || this.password)) {
|
||||
if (this.username) {
|
||||
auth = this.username;
|
||||
}
|
||||
if (this.password) {
|
||||
auth += ":" + this.password;
|
||||
}
|
||||
if (auth) {
|
||||
auth += "@";
|
||||
}
|
||||
} else if (auth) {
|
||||
if (auth) {
|
||||
auth = encodeURIComponent(auth);
|
||||
auth = auth.replace(/%3A/i, ":");
|
||||
auth += "@";
|
||||
|
||||
13
src/url.zig
13
src/url.zig
@@ -984,12 +984,7 @@ pub const FormData = struct {
|
||||
switch (encoding) {
|
||||
.URLEncoded => {
|
||||
var str = jsc.ZigString.fromUTF8(strings.withoutUTF8BOM(input));
|
||||
const result = jsc.DOMFormData.createFromURLQuery(globalThis, &str);
|
||||
// Check if an exception was thrown (e.g., string too long)
|
||||
if (result == .zero) {
|
||||
return error.JSError;
|
||||
}
|
||||
return result;
|
||||
return jsc.DOMFormData.createFromURLQuery(globalThis, &str);
|
||||
},
|
||||
.Multipart => |boundary| return toJSFromMultipartData(globalThis, input, boundary),
|
||||
}
|
||||
@@ -1046,11 +1041,7 @@ pub const FormData = struct {
|
||||
return globalThis.throwInvalidArguments("input must be a string or ArrayBufferView", .{});
|
||||
}
|
||||
|
||||
return FormData.toJS(globalThis, input, encoding) catch |err| {
|
||||
if (err == error.JSError) return error.JSError;
|
||||
if (err == error.JSTerminated) return error.JSTerminated;
|
||||
return globalThis.throwError(err, "while parsing FormData");
|
||||
};
|
||||
return FormData.toJS(globalThis, input, encoding) catch |err| return globalThis.throwError(err, "while parsing FormData");
|
||||
}
|
||||
|
||||
comptime {
|
||||
|
||||
@@ -116,5 +116,109 @@ for (let backend of ["api", "cli"] as const) {
|
||||
stdout: "process.env.BASE_URL\n$BASE_URL",
|
||||
},
|
||||
});
|
||||
|
||||
// Test optional chaining with process?.env?.VAR
|
||||
if (backend === "cli")
|
||||
itBundled("env/optional-chaining", {
|
||||
env: {
|
||||
MY_VAR: "my_value",
|
||||
ANOTHER: "another_value",
|
||||
},
|
||||
backend: backend,
|
||||
dotenv: "inline",
|
||||
files: {
|
||||
"/a.js": `
|
||||
// Test optional chaining patterns
|
||||
console.log(process?.env?.MY_VAR);
|
||||
console.log(process?.env?.ANOTHER);
|
||||
// Mixed optional chaining
|
||||
console.log(process?.env.MY_VAR);
|
||||
console.log(process.env?.MY_VAR);
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
env: {
|
||||
MY_VAR: "wrong",
|
||||
ANOTHER: "wrong",
|
||||
},
|
||||
stdout: "my_value\nanother_value\nmy_value\nmy_value\n",
|
||||
},
|
||||
});
|
||||
|
||||
// Test optional chaining with bracket notation
|
||||
if (backend === "cli")
|
||||
itBundled("env/optional-chaining-bracket", {
|
||||
env: {
|
||||
BRACKET_VAR: "bracket_value",
|
||||
},
|
||||
backend: backend,
|
||||
dotenv: "inline",
|
||||
files: {
|
||||
"/a.js": `
|
||||
// Test optional chaining with bracket notation
|
||||
console.log(process?.env?.["BRACKET_VAR"]);
|
||||
console.log(process?.env["BRACKET_VAR"]);
|
||||
console.log(process.env?.["BRACKET_VAR"]);
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
env: {
|
||||
BRACKET_VAR: "wrong",
|
||||
},
|
||||
stdout: "bracket_value\nbracket_value\nbracket_value\n",
|
||||
},
|
||||
});
|
||||
|
||||
// Test import.meta.env.* inlining
|
||||
if (backend === "cli")
|
||||
itBundled("env/import-meta-env", {
|
||||
env: {
|
||||
VITE_API_URL: "https://api.example.com",
|
||||
MY_SECRET: "secret123",
|
||||
},
|
||||
backend: backend,
|
||||
dotenv: "inline",
|
||||
files: {
|
||||
"/a.js": `
|
||||
// Test import.meta.env.* inlining (Vite compatibility)
|
||||
console.log(import.meta.env.VITE_API_URL);
|
||||
console.log(import.meta.env.MY_SECRET);
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
env: {
|
||||
VITE_API_URL: "wrong",
|
||||
MY_SECRET: "wrong",
|
||||
},
|
||||
stdout: "https://api.example.com\nsecret123\n",
|
||||
},
|
||||
});
|
||||
|
||||
// Test import.meta.env with prefix matching
|
||||
if (backend === "cli")
|
||||
itBundled("env/import-meta-env-prefix", {
|
||||
env: {
|
||||
VITE_PUBLIC: "public_value",
|
||||
VITE_PRIVATE: "private_value",
|
||||
OTHER_VAR: "other_value",
|
||||
},
|
||||
backend: backend,
|
||||
dotenv: "VITE_*",
|
||||
files: {
|
||||
"/a.js": `
|
||||
// Test import.meta.env with prefix matching
|
||||
console.log(import.meta.env.VITE_PUBLIC);
|
||||
console.log(import.meta.env.VITE_PRIVATE);
|
||||
console.log(import.meta.env.OTHER_VAR);
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
env: {
|
||||
VITE_PUBLIC: "wrong",
|
||||
VITE_PRIVATE: "wrong",
|
||||
},
|
||||
stdout: "public_value\nprivate_value\nundefined\n",
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -246,80 +246,3 @@ if (typeof process !== "undefined") {
|
||||
// @ts-expect-error - -Infinity
|
||||
fetch("https://example.com", { body: -Infinity });
|
||||
}
|
||||
|
||||
// Proxy option types
|
||||
{
|
||||
// String proxy URL is valid
|
||||
fetch("https://example.com", { proxy: "http://proxy.example.com:8080" });
|
||||
fetch("https://example.com", { proxy: "https://user:pass@proxy.example.com:8080" });
|
||||
}
|
||||
|
||||
{
|
||||
// Object proxy with url is valid
|
||||
fetch("https://example.com", {
|
||||
proxy: {
|
||||
url: "http://proxy.example.com:8080",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
// Object proxy with url and headers (plain object) is valid
|
||||
fetch("https://example.com", {
|
||||
proxy: {
|
||||
url: "http://proxy.example.com:8080",
|
||||
headers: {
|
||||
"Proxy-Authorization": "Bearer token",
|
||||
"X-Custom-Header": "value",
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
// Object proxy with url and headers (Headers instance) is valid
|
||||
fetch("https://example.com", {
|
||||
proxy: {
|
||||
url: "http://proxy.example.com:8080",
|
||||
headers: new Headers({ "Proxy-Authorization": "Bearer token" }),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
// Object proxy with url and headers (array of tuples) is valid
|
||||
fetch("https://example.com", {
|
||||
proxy: {
|
||||
url: "http://proxy.example.com:8080",
|
||||
headers: [
|
||||
["Proxy-Authorization", "Bearer token"],
|
||||
["X-Custom", "value"],
|
||||
],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
// @ts-expect-error - Proxy object without url is invalid
|
||||
fetch("https://example.com", { proxy: { headers: { "X-Custom": "value" } } });
|
||||
}
|
||||
|
||||
{
|
||||
// @ts-expect-error - Proxy url must be string, not number
|
||||
fetch("https://example.com", { proxy: { url: 8080 } });
|
||||
}
|
||||
|
||||
{
|
||||
// @ts-expect-error - Proxy must be string or object, not number
|
||||
fetch("https://example.com", { proxy: 8080 });
|
||||
}
|
||||
|
||||
{
|
||||
// @ts-expect-error - Proxy must be string or object, not boolean
|
||||
fetch("https://example.com", { proxy: true });
|
||||
}
|
||||
|
||||
{
|
||||
// @ts-expect-error - Proxy must be string or object, not array
|
||||
fetch("https://example.com", { proxy: ["http://proxy.example.com"] });
|
||||
}
|
||||
|
||||
@@ -337,366 +337,3 @@ test("HTTPS origin close-delimited body via HTTP proxy does not ECONNRESET", asy
|
||||
await once(originServer, "close");
|
||||
}
|
||||
});
|
||||
|
||||
describe("proxy object format with headers", () => {
|
||||
test("proxy object with url string works same as string proxy", async () => {
|
||||
const response = await fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: httpProxyServer.url,
|
||||
},
|
||||
keepalive: false,
|
||||
});
|
||||
expect(response.ok).toBe(true);
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
test("proxy object with url and headers sends headers to proxy (HTTP proxy)", async () => {
|
||||
// Create a proxy server that captures headers
|
||||
const capturedHeaders: string[] = [];
|
||||
const proxyServerWithCapture = net.createServer((clientSocket: net.Socket) => {
|
||||
clientSocket.once("data", data => {
|
||||
const request = data.toString();
|
||||
// Capture headers
|
||||
const lines = request.split("\r\n");
|
||||
for (const line of lines) {
|
||||
if (line.toLowerCase().startsWith("x-proxy-")) {
|
||||
capturedHeaders.push(line.toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
const [method, path] = request.split(" ");
|
||||
let host: string;
|
||||
let port: number | string = 0;
|
||||
let request_path = "";
|
||||
if (path.indexOf("http") !== -1) {
|
||||
const url = new URL(path);
|
||||
host = url.hostname;
|
||||
port = url.port;
|
||||
request_path = url.pathname + (url.search || "");
|
||||
} else {
|
||||
[host, port] = path.split(":");
|
||||
}
|
||||
const destinationPort = Number.parseInt((port || (method === "CONNECT" ? "443" : "80")).toString(), 10);
|
||||
const destinationHost = host || "";
|
||||
|
||||
const serverSocket = net.connect(destinationPort, destinationHost, () => {
|
||||
if (method === "CONNECT") {
|
||||
clientSocket.write("HTTP/1.1 200 OK\r\nHost: localhost\r\n\r\n");
|
||||
clientSocket.pipe(serverSocket);
|
||||
serverSocket.pipe(clientSocket);
|
||||
} else {
|
||||
serverSocket.write(`${method} ${request_path} HTTP/1.1\r\n`);
|
||||
serverSocket.write(data.slice(request.indexOf("\r\n") + 2));
|
||||
serverSocket.pipe(clientSocket);
|
||||
}
|
||||
});
|
||||
clientSocket.on("error", () => {});
|
||||
serverSocket.on("error", () => {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
proxyServerWithCapture.listen(0);
|
||||
await once(proxyServerWithCapture, "listening");
|
||||
const proxyPort = (proxyServerWithCapture.address() as net.AddressInfo).port;
|
||||
const proxyUrl = `http://localhost:${proxyPort}`;
|
||||
|
||||
try {
|
||||
const response = await fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: proxyUrl,
|
||||
headers: {
|
||||
"X-Proxy-Custom-Header": "custom-value",
|
||||
"X-Proxy-Another": "another-value",
|
||||
},
|
||||
},
|
||||
keepalive: false,
|
||||
});
|
||||
expect(response.ok).toBe(true);
|
||||
expect(response.status).toBe(200);
|
||||
// Verify the custom headers were sent to the proxy (case-insensitive check)
|
||||
expect(capturedHeaders).toContainEqual(expect.stringContaining("x-proxy-custom-header: custom-value"));
|
||||
expect(capturedHeaders).toContainEqual(expect.stringContaining("x-proxy-another: another-value"));
|
||||
} finally {
|
||||
proxyServerWithCapture.close();
|
||||
await once(proxyServerWithCapture, "close");
|
||||
}
|
||||
});
|
||||
|
||||
test("proxy object with url and headers sends headers in CONNECT request (HTTPS target)", async () => {
|
||||
// Create a proxy server that captures headers
|
||||
const capturedHeaders: string[] = [];
|
||||
const proxyServerWithCapture = net.createServer((clientSocket: net.Socket) => {
|
||||
clientSocket.once("data", data => {
|
||||
const request = data.toString();
|
||||
// Capture headers
|
||||
const lines = request.split("\r\n");
|
||||
for (const line of lines) {
|
||||
if (line.toLowerCase().startsWith("x-proxy-")) {
|
||||
capturedHeaders.push(line.toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
const [method, path] = request.split(" ");
|
||||
let host: string;
|
||||
let port: number | string = 0;
|
||||
if (path.indexOf("http") !== -1) {
|
||||
const url = new URL(path);
|
||||
host = url.hostname;
|
||||
port = url.port;
|
||||
} else {
|
||||
[host, port] = path.split(":");
|
||||
}
|
||||
const destinationPort = Number.parseInt((port || (method === "CONNECT" ? "443" : "80")).toString(), 10);
|
||||
const destinationHost = host || "";
|
||||
|
||||
const serverSocket = net.connect(destinationPort, destinationHost, () => {
|
||||
if (method === "CONNECT") {
|
||||
clientSocket.write("HTTP/1.1 200 OK\r\nHost: localhost\r\n\r\n");
|
||||
clientSocket.pipe(serverSocket);
|
||||
serverSocket.pipe(clientSocket);
|
||||
} else {
|
||||
clientSocket.write("HTTP/1.1 502 Bad Gateway\r\n\r\n");
|
||||
clientSocket.end();
|
||||
}
|
||||
});
|
||||
clientSocket.on("error", () => {});
|
||||
serverSocket.on("error", () => {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
proxyServerWithCapture.listen(0);
|
||||
await once(proxyServerWithCapture, "listening");
|
||||
const proxyPort = (proxyServerWithCapture.address() as net.AddressInfo).port;
|
||||
const proxyUrl = `http://localhost:${proxyPort}`;
|
||||
|
||||
try {
|
||||
const response = await fetch(httpsServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: proxyUrl,
|
||||
headers: new Headers({
|
||||
"X-Proxy-Auth-Token": "secret-token-123",
|
||||
}),
|
||||
},
|
||||
keepalive: false,
|
||||
tls: {
|
||||
ca: tlsCert.cert,
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
});
|
||||
expect(response.ok).toBe(true);
|
||||
expect(response.status).toBe(200);
|
||||
// Verify the custom headers were sent in the CONNECT request (case-insensitive check)
|
||||
expect(capturedHeaders).toContainEqual(expect.stringContaining("x-proxy-auth-token: secret-token-123"));
|
||||
} finally {
|
||||
proxyServerWithCapture.close();
|
||||
await once(proxyServerWithCapture, "close");
|
||||
}
|
||||
});
|
||||
|
||||
test("proxy object without url throws error", async () => {
|
||||
await expect(
|
||||
fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
headers: { "X-Test": "value" },
|
||||
} as any,
|
||||
keepalive: false,
|
||||
}),
|
||||
).rejects.toThrow("fetch() proxy object requires a 'url' property");
|
||||
});
|
||||
|
||||
test("proxy object with null url throws error", async () => {
|
||||
await expect(
|
||||
fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: null,
|
||||
headers: { "X-Test": "value" },
|
||||
} as any,
|
||||
keepalive: false,
|
||||
}),
|
||||
).rejects.toThrow("fetch() proxy object requires a 'url' property");
|
||||
});
|
||||
|
||||
test("proxy object with empty string url throws error", async () => {
|
||||
await expect(
|
||||
fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: "",
|
||||
headers: { "X-Test": "value" },
|
||||
} as any,
|
||||
keepalive: false,
|
||||
}),
|
||||
).rejects.toThrow("fetch() proxy.url must be a non-empty string");
|
||||
});
|
||||
|
||||
test("proxy object with empty headers object works", async () => {
|
||||
const response = await fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: httpProxyServer.url,
|
||||
headers: {},
|
||||
},
|
||||
keepalive: false,
|
||||
});
|
||||
expect(response.ok).toBe(true);
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
test("proxy object with undefined headers works", async () => {
|
||||
const response = await fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: httpProxyServer.url,
|
||||
headers: undefined,
|
||||
},
|
||||
keepalive: false,
|
||||
});
|
||||
expect(response.ok).toBe(true);
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
test("proxy object with headers as Headers instance", async () => {
|
||||
const capturedHeaders: string[] = [];
|
||||
const proxyServerWithCapture = net.createServer((clientSocket: net.Socket) => {
|
||||
clientSocket.once("data", data => {
|
||||
const request = data.toString();
|
||||
const lines = request.split("\r\n");
|
||||
for (const line of lines) {
|
||||
if (line.toLowerCase().startsWith("x-custom-")) {
|
||||
capturedHeaders.push(line.toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
const [method, path] = request.split(" ");
|
||||
let host: string;
|
||||
let port: number | string = 0;
|
||||
let request_path = "";
|
||||
if (path.indexOf("http") !== -1) {
|
||||
const url = new URL(path);
|
||||
host = url.hostname;
|
||||
port = url.port;
|
||||
request_path = url.pathname + (url.search || "");
|
||||
} else {
|
||||
[host, port] = path.split(":");
|
||||
}
|
||||
const destinationPort = Number.parseInt((port || "80").toString(), 10);
|
||||
const destinationHost = host || "";
|
||||
|
||||
const serverSocket = net.connect(destinationPort, destinationHost, () => {
|
||||
serverSocket.write(`${method} ${request_path} HTTP/1.1\r\n`);
|
||||
serverSocket.write(data.slice(request.indexOf("\r\n") + 2));
|
||||
serverSocket.pipe(clientSocket);
|
||||
});
|
||||
clientSocket.on("error", () => {});
|
||||
serverSocket.on("error", () => {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
proxyServerWithCapture.listen(0);
|
||||
await once(proxyServerWithCapture, "listening");
|
||||
const proxyPort = (proxyServerWithCapture.address() as net.AddressInfo).port;
|
||||
const proxyUrl = `http://localhost:${proxyPort}`;
|
||||
|
||||
try {
|
||||
const headers = new Headers();
|
||||
headers.set("X-Custom-Header-1", "value1");
|
||||
headers.set("X-Custom-Header-2", "value2");
|
||||
|
||||
const response = await fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: proxyUrl,
|
||||
headers: headers,
|
||||
},
|
||||
keepalive: false,
|
||||
});
|
||||
expect(response.ok).toBe(true);
|
||||
expect(response.status).toBe(200);
|
||||
// Case-insensitive check
|
||||
expect(capturedHeaders).toContainEqual(expect.stringContaining("x-custom-header-1: value1"));
|
||||
expect(capturedHeaders).toContainEqual(expect.stringContaining("x-custom-header-2: value2"));
|
||||
} finally {
|
||||
proxyServerWithCapture.close();
|
||||
await once(proxyServerWithCapture, "close");
|
||||
}
|
||||
});
|
||||
|
||||
test("user-provided Proxy-Authorization header overrides URL credentials", async () => {
|
||||
const capturedHeaders: string[] = [];
|
||||
const proxyServerWithCapture = net.createServer((clientSocket: net.Socket) => {
|
||||
clientSocket.once("data", data => {
|
||||
const request = data.toString();
|
||||
const lines = request.split("\r\n");
|
||||
for (const line of lines) {
|
||||
if (line.toLowerCase().startsWith("proxy-authorization:")) {
|
||||
capturedHeaders.push(line.toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
const [method, path] = request.split(" ");
|
||||
let host: string;
|
||||
let port: number | string = 0;
|
||||
let request_path = "";
|
||||
if (path.indexOf("http") !== -1) {
|
||||
const url = new URL(path);
|
||||
host = url.hostname;
|
||||
port = url.port;
|
||||
request_path = url.pathname + (url.search || "");
|
||||
} else {
|
||||
[host, port] = path.split(":");
|
||||
}
|
||||
const destinationPort = Number.parseInt((port || "80").toString(), 10);
|
||||
const destinationHost = host || "";
|
||||
|
||||
const serverSocket = net.connect(destinationPort, destinationHost, () => {
|
||||
serverSocket.write(`${method} ${request_path} HTTP/1.1\r\n`);
|
||||
serverSocket.write(data.slice(request.indexOf("\r\n") + 2));
|
||||
serverSocket.pipe(clientSocket);
|
||||
});
|
||||
clientSocket.on("error", () => {});
|
||||
serverSocket.on("error", () => {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
proxyServerWithCapture.listen(0);
|
||||
await once(proxyServerWithCapture, "listening");
|
||||
const proxyPort = (proxyServerWithCapture.address() as net.AddressInfo).port;
|
||||
// Proxy URL with credentials that would generate Basic auth
|
||||
const proxyUrl = `http://urluser:urlpass@localhost:${proxyPort}`;
|
||||
|
||||
try {
|
||||
const response = await fetch(httpServer.url, {
|
||||
method: "GET",
|
||||
proxy: {
|
||||
url: proxyUrl,
|
||||
headers: {
|
||||
// User-provided Proxy-Authorization should override the URL-based one
|
||||
"Proxy-Authorization": "Bearer custom-token-12345",
|
||||
},
|
||||
},
|
||||
keepalive: false,
|
||||
});
|
||||
expect(response.ok).toBe(true);
|
||||
expect(response.status).toBe(200);
|
||||
// Should only have one Proxy-Authorization header (the user-provided one)
|
||||
expect(capturedHeaders.length).toBe(1);
|
||||
expect(capturedHeaders[0]).toBe("proxy-authorization: bearer custom-token-12345");
|
||||
} finally {
|
||||
proxyServerWithCapture.close();
|
||||
await once(proxyServerWithCapture, "close");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -68,32 +68,4 @@ describe.skipIf(isWindows)("Bun.mmap", async () => {
|
||||
expect(map[0]).toBe(old);
|
||||
await gcTick();
|
||||
});
|
||||
|
||||
it("mmap rejects negative offset", () => {
|
||||
expect(() => Bun.mmap(path, { offset: -1 })).toThrow("offset must be a non-negative integer");
|
||||
});
|
||||
|
||||
it("mmap rejects negative size", () => {
|
||||
expect(() => Bun.mmap(path, { size: -1 })).toThrow("size must be a non-negative integer");
|
||||
});
|
||||
|
||||
it("mmap handles non-number offset/size without crashing", () => {
|
||||
// These should not crash - non-number values coerce to 0 per JavaScript semantics
|
||||
// Previously these caused assertion failures (issue ENG-22413)
|
||||
|
||||
// null coerces to 0, which is valid for offset
|
||||
expect(() => {
|
||||
Bun.mmap(path, { offset: null });
|
||||
}).not.toThrow();
|
||||
|
||||
// size: null coerces to 0, which is invalid (EINVAL), but shouldn't crash
|
||||
expect(() => {
|
||||
Bun.mmap(path, { size: null });
|
||||
}).toThrow("EINVAL");
|
||||
|
||||
// undefined is ignored (property not set)
|
||||
expect(() => {
|
||||
Bun.mmap(path, { offset: undefined });
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1941,7 +1941,7 @@ test("no assertion failures 3", () => {
|
||||
],
|
||||
[
|
||||
class // Random { // comments /* */ are part of the toString() result
|
||||
äß /**/
|
||||
äß /**/
|
||||
extends /*{*/ TypeError {},
|
||||
"[class äß extends TypeError]",
|
||||
],
|
||||
|
||||
@@ -277,23 +277,6 @@ describe("FormData", () => {
|
||||
expect(fd.toJSON()).toEqual({ "1": "1" });
|
||||
});
|
||||
|
||||
test("FormData.from throws on very large input instead of crashing", () => {
|
||||
// This test verifies that FormData.from throws an exception instead of crashing
|
||||
// when given input larger than WebKit's String::MaxLength (INT32_MAX ~= 2GB).
|
||||
// We use a smaller test case with the synthetic limit to avoid actually allocating 2GB+.
|
||||
const { setSyntheticAllocationLimitForTesting } = require("bun:internal-for-testing");
|
||||
// Set a small limit so we can test the boundary without allocating gigabytes
|
||||
const originalLimit = setSyntheticAllocationLimitForTesting(1024 * 1024); // 1MB limit
|
||||
try {
|
||||
// Create a buffer larger than the limit
|
||||
const largeBuffer = new Uint8Array(2 * 1024 * 1024); // 2MB
|
||||
// @ts-expect-error - FormData.from is a Bun extension
|
||||
expect(() => FormData.from(largeBuffer)).toThrow("Cannot create a string longer than");
|
||||
} finally {
|
||||
setSyntheticAllocationLimitForTesting(originalLimit);
|
||||
}
|
||||
});
|
||||
|
||||
it("should throw on bad boundary", async () => {
|
||||
const response = new Response('foo\r\nContent-Disposition: form-data; name="foo"\r\n\r\nbar\r\n', {
|
||||
headers: {
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import url from "node:url";
|
||||
|
||||
test("url.format with WHATWG URL preserves username and password", () => {
|
||||
const result = url.format(new URL("https://a:b@example.org/"));
|
||||
expect(result).toBe("https://a:b@example.org/");
|
||||
});
|
||||
|
||||
test("url.format with WHATWG URL preserves username only", () => {
|
||||
const result = url.format(new URL("https://user@example.org/"));
|
||||
expect(result).toBe("https://user@example.org/");
|
||||
});
|
||||
|
||||
test("url.format with WHATWG URL without auth", () => {
|
||||
const result = url.format(new URL("https://example.org/"));
|
||||
expect(result).toBe("https://example.org/");
|
||||
});
|
||||
|
||||
test("url.format with WHATWG URL preserves username, password, and path", () => {
|
||||
const result = url.format(new URL("https://user:pass@example.org/path?query=1#hash"));
|
||||
expect(result).toBe("https://user:pass@example.org/path?query=1#hash");
|
||||
});
|
||||
|
||||
test("url.format with WHATWG URL with special characters in credentials", () => {
|
||||
// When creating a URL, special characters in credentials are already percent-encoded
|
||||
// url.format should preserve the encoding from the URL object
|
||||
const result = url.format(new URL("https://us%40er:p%40ss@example.org/"));
|
||||
// The username and password are already encoded by the URL object, so we should preserve them as-is
|
||||
expect(result).toBe("https://us%40er:p%40ss@example.org/");
|
||||
});
|
||||
|
||||
test("url.format with legacy Url object still works", () => {
|
||||
const parsed = url.parse("https://a:b@example.org/path");
|
||||
const result = url.format(parsed);
|
||||
expect(result).toBe("https://a:b@example.org/path");
|
||||
});
|
||||
@@ -1,40 +0,0 @@
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
// Regression test for ENG-22942: Crash when calling expect.extend with non-function values
|
||||
// The crash occurred because JSWrappingFunction assumed all callable objects are JSFunction,
|
||||
// but class constructors like Expect are callable but not JSFunction instances.
|
||||
|
||||
test("expect.extend with jest object should throw TypeError, not crash", () => {
|
||||
const jest = Bun.jest(import.meta.path);
|
||||
|
||||
expect(() => {
|
||||
jest.expect.extend(jest);
|
||||
}).toThrow(TypeError);
|
||||
});
|
||||
|
||||
test("expect.extend with object containing non-function values should throw", () => {
|
||||
const jest = Bun.jest(import.meta.path);
|
||||
|
||||
expect(() => {
|
||||
jest.expect.extend({
|
||||
notAFunction: "string value",
|
||||
});
|
||||
}).toThrow("expect.extend: `notAFunction` is not a valid matcher");
|
||||
});
|
||||
|
||||
test("expect.extend with valid matchers still works", () => {
|
||||
const jest = Bun.jest(import.meta.path);
|
||||
|
||||
jest.expect.extend({
|
||||
toBeEven(received: number) {
|
||||
const pass = received % 2 === 0;
|
||||
return {
|
||||
message: () => `expected ${received} ${pass ? "not " : ""}to be even`,
|
||||
pass,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
jest.expect(4).toBeEven();
|
||||
jest.expect(3).not.toBeEven();
|
||||
});
|
||||
Reference in New Issue
Block a user