fetc h!!!

This commit is contained in:
Jarred Sumner
2021-09-09 05:40:07 -07:00
parent c30ec608b1
commit 8a02ad48a5
34 changed files with 6782 additions and 161 deletions

View File

@@ -7,6 +7,11 @@ pub fn addPicoHTTP(step: *std.build.LibExeObjStep, comptime with_obj: bool) void
.path = .{ .path = "src/deps/picohttp.zig" },
});
_ = step.addPackage(.{
.name = "iguanaTLS",
.path = .{ .path = "src/deps/iguanaTLS/src/main.zig" },
});
step.addIncludeDir("src/deps");
if (with_obj) {

View File

@@ -124,6 +124,7 @@ a:hover {
.BunError-NativeStackTrace {
margin-top: 0;
width: 100%;
}
.BunError-NativeStackTrace-filename {

View File

@@ -555,6 +555,32 @@ const JSException = ({ value }: { value: JSExceptionType }) => {
}
default: {
const newline = value.message.indexOf("\n");
if (newline > -1) {
const subtitle = value.message.substring(newline + 1).trim();
const message = value.message.substring(0, newline).trim();
if (subtitle.length) {
return (
<div className={`BunError-JSException`}>
<div className="BunError-error-header">
<div className={`BunError-error-code`}>{value.name}</div>
{errorTags[ErrorTagType.server]}
</div>
<div className={`BunError-error-message`}>{message}</div>
<div className={`BunError-error-subtitle`}>{subtitle}</div>
{value.stack && (
<NativeStackTrace
frames={value.stack.frames}
sourceLines={value.stack.source_lines}
/>
)}
</div>
);
}
}
return (
<div className={`BunError-JSException`}>
<div className="BunError-error-header">

View File

@@ -4,12 +4,15 @@
"main": "index.js",
"license": "MIT",
"dependencies": {
"isomorphic-fetch": "^3.0.0",
"next": "^11.1.0",
"parcel": "2.0.0-rc.0",
"path-browserify": "^1.0.1",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"react-is": "^17.0.2",
"react-static-tweets": "^0.5.4",
"static-tweets": "^0.5.5",
"whatwg-url": "^9.1.0"
},
"devDependencies": {

View File

@@ -591,6 +591,27 @@ export async function render({
defaultLocale: null,
});
if (result) {
if ("props" in result) {
if (typeof result.props === "object") {
Object.assign(props, result.props);
}
}
}
} else if (typeof getStaticProps === "function") {
const result = await getStaticProps({
params: route.params,
query: route.query,
req: notImplementedProxy("req"),
res: notImplementedProxy("res"),
resolvedUrl: route.pathname,
preview: false,
previewData: null,
locale: null,
locales: [],
defaultLocale: null,
});
if (result) {
if ("props" in result) {
if (typeof result.props === "object") {

View File

@@ -1114,7 +1114,15 @@ pub fn NewBundler(cache_files: bool) type {
);
var module_name = file_path.text["/bun-vfs/node_modules/".len..];
module_name = module_name[0..strings.indexOfChar(module_name, '/').?];
if (module_name[0] == '@') {
var end = strings.indexOfChar(module_name, '/').? + 1;
end += strings.indexOfChar(module_name[end..], '/').?;
module_name = module_name[0..end];
} else {
module_name = module_name[0..strings.indexOfChar(module_name, '/').?];
}
if (NodeFallbackModules.Map.get(module_name)) |mod| {
break :brk CacheEntry{ .contents = mod.code.* };

1
src/deps/iguanaTLS/.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
*.zig text=auto eol=lf

3
src/deps/iguanaTLS/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/zig-cache
deps.zig
gyro.lock

0
src/deps/iguanaTLS/.gitmodules vendored Normal file
View File

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Alexandros Naskos
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,14 @@
const Builder = @import("std").build.Builder;
pub fn build(b: *Builder) void {
const mode = b.standardReleaseOptions();
const lib = b.addStaticLibrary("iguanaTLS", "src/main.zig");
lib.setBuildMode(mode);
lib.install();
var main_tests = b.addTest("src/main.zig");
main_tests.setBuildMode(mode);
const test_step = b.step("test", "Run library tests");
test_step.dependOn(&main_tests.step);
}

View File

@@ -0,0 +1,631 @@
const std = @import("std");
const BigInt = std.math.big.int.Const;
const mem = std.mem;
const Allocator = mem.Allocator;
const ArenaAllocator = std.heap.ArenaAllocator;
// zig fmt: off
pub const Tag = enum(u8) {
bool = 0x01,
int = 0x02,
bit_string = 0x03,
octet_string = 0x04,
@"null" = 0x05,
object_identifier = 0x06,
utf8_string = 0x0c,
printable_string = 0x13,
ia5_string = 0x16,
utc_time = 0x17,
bmp_string = 0x1e,
sequence = 0x30,
set = 0x31,
// Bogus value
context_specific = 0xff,
};
// zig fmt: on
pub const ObjectIdentifier = struct {
data: [16]u32,
len: u8,
};
pub const BitString = struct {
data: []const u8,
bit_len: usize,
};
pub const Value = union(Tag) {
bool: bool,
int: BigInt,
bit_string: BitString,
octet_string: []const u8,
@"null",
// @TODO Make this []u32, owned?
object_identifier: ObjectIdentifier,
utf8_string: []const u8,
printable_string: []const u8,
ia5_string: []const u8,
utc_time: []const u8,
bmp_string: []const u16,
sequence: []const @This(),
set: []const @This(),
context_specific: struct {
child: *const Value,
number: u8,
},
pub fn deinit(self: @This(), alloc: *Allocator) void {
switch (self) {
.int => |i| alloc.free(i.limbs),
.bit_string => |bs| alloc.free(bs.data),
.octet_string,
.utf8_string,
.printable_string,
.ia5_string,
.utc_time,
=> |s| alloc.free(s),
.bmp_string => |s| alloc.free(s),
.sequence, .set => |s| {
for (s) |c| {
c.deinit(alloc);
}
alloc.free(s);
},
.context_specific => |cs| {
cs.child.deinit(alloc);
alloc.destroy(cs.child);
},
else => {},
}
}
fn formatInternal(
self: Value,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
indents: usize,
writer: anytype,
) @TypeOf(writer).Error!void {
try writer.writeByteNTimes(' ', indents);
switch (self) {
.bool => |b| try writer.print("BOOLEAN {}\n", .{b}),
.int => |i| {
try writer.writeAll("INTEGER ");
try i.format(fmt, options, writer);
try writer.writeByte('\n');
},
.bit_string => |bs| {
try writer.print("BIT STRING ({} bits) ", .{bs.bit_len});
const bits_to_show = std.math.min(8 * 3, bs.bit_len);
const bytes = std.math.divCeil(usize, bits_to_show, 8) catch unreachable;
var bit_idx: usize = 0;
var byte_idx: usize = 0;
while (byte_idx < bytes) : (byte_idx += 1) {
const byte = bs.data[byte_idx];
var cur_bit_idx: u3 = 0;
while (bit_idx < bits_to_show) {
const mask = @as(u8, 0x80) >> cur_bit_idx;
try writer.print("{}", .{@boolToInt(byte & mask == mask)});
cur_bit_idx += 1;
bit_idx += 1;
if (cur_bit_idx == 7)
break;
}
}
if (bits_to_show != bs.bit_len)
try writer.writeAll("...");
try writer.writeByte('\n');
},
.octet_string => |s| try writer.print("OCTET STRING ({} bytes) {X}\n", .{ s.len, s }),
.@"null" => try writer.writeAll("NULL\n"),
.object_identifier => |oid| {
try writer.writeAll("OBJECT IDENTIFIER ");
var i: u8 = 0;
while (i < oid.len) : (i += 1) {
if (i != 0) try writer.writeByte('.');
try writer.print("{}", .{oid.data[i]});
}
try writer.writeByte('\n');
},
.utf8_string => |s| try writer.print("UTF8 STRING ({} bytes) {}\n", .{ s.len, s }),
.printable_string => |s| try writer.print("PRINTABLE STRING ({} bytes) {}\n", .{ s.len, s }),
.ia5_string => |s| try writer.print("IA5 STRING ({} bytes) {}\n", .{ s.len, s }),
.utc_time => |s| try writer.print("UTC TIME {}\n", .{s}),
.bmp_string => |s| try writer.print("BMP STRING ({} words) {}\n", .{
s.len,
@ptrCast([*]const u16, s.ptr)[0 .. s.len * 2],
}),
.sequence => |children| {
try writer.print("SEQUENCE ({} elems)\n", .{children.len});
for (children) |child| try child.formatInternal(fmt, options, indents + 2, writer);
},
.set => |children| {
try writer.print("SET ({} elems)\n", .{children.len});
for (children) |child| try child.formatInternal(fmt, options, indents + 2, writer);
},
.context_specific => |cs| {
try writer.print("[{}]\n", .{cs.number});
try cs.child.formatInternal(fmt, options, indents + 2, writer);
},
}
}
pub fn format(self: Value, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
try self.formatInternal(fmt, options, 0, writer);
}
};
/// Distinguished encoding rules
pub const der = struct {
pub fn DecodeError(comptime Reader: type) type {
return Reader.Error || error{
OutOfMemory,
EndOfStream,
InvalidLength,
InvalidTag,
InvalidContainerLength,
DoesNotMatchSchema,
};
}
fn DERReaderState(comptime Reader: type) type {
return struct {
der_reader: Reader,
length: usize,
idx: usize = 0,
};
}
fn DERReader(comptime Reader: type) type {
const S = struct {
pub fn read(state: *DERReaderState(Reader), buffer: []u8) DecodeError(Reader)!usize {
const out_bytes = std.math.min(buffer.len, state.length - state.idx);
const res = try state.der_reader.readAll(buffer[0..out_bytes]);
state.idx += res;
return res;
}
};
return std.io.Reader(*DERReaderState(Reader), DecodeError(Reader), S.read);
}
pub fn parse_schema(
schema: anytype,
captures: anytype,
der_reader: anytype,
) !void {
const res = try parse_schema_tag_len_internal(null, null, schema, captures, der_reader);
if (res != null) return error.DoesNotMatchSchema;
}
pub fn parse_schema_tag_len(
existing_tag_byte: ?u8,
existing_length: ?usize,
schema: anytype,
captures: anytype,
der_reader: anytype,
) !void {
const res = try parse_schema_tag_len_internal(
existing_tag_byte,
existing_length,
schema,
captures,
der_reader,
);
if (res != null) return error.DoesNotMatchSchema;
}
const TagLength = struct {
tag: u8,
length: usize,
};
pub fn parse_schema_tag_len_internal(
existing_tag_byte: ?u8,
existing_length: ?usize,
schema: anytype,
captures: anytype,
der_reader: anytype,
) !?TagLength {
const Reader = @TypeOf(der_reader);
const isEnumLit = comptime std.meta.trait.is(.EnumLiteral);
comptime var tag_idx = 0;
const has_capture = comptime isEnumLit(@TypeOf(schema[tag_idx])) and schema[tag_idx] == .capture;
if (has_capture) tag_idx += 2;
const is_optional = comptime isEnumLit(@TypeOf(schema[tag_idx])) and schema[tag_idx] == .optional;
if (is_optional) tag_idx += 1;
const tag_literal = schema[tag_idx];
comptime std.debug.assert(isEnumLit(@TypeOf(tag_literal)));
const tag_byte = existing_tag_byte orelse (der_reader.readByte() catch |err| switch (err) {
error.EndOfStream => return if (is_optional) null else error.EndOfStream,
else => |e| return e,
});
const length = existing_length orelse try parse_length(der_reader);
if (tag_literal == .sequence_of) {
if (tag_byte != @enumToInt(Tag.sequence)) {
if (is_optional) return TagLength{ .tag = tag_byte, .length = length };
return error.InvalidTag;
}
var curr_tag_length: ?TagLength = null;
const sub_schema = schema[tag_idx + 1];
while (true) {
if (curr_tag_length == null) {
curr_tag_length = .{
.tag = der_reader.readByte() catch |err| switch (err) {
error.EndOfStream => {
curr_tag_length = null;
break;
},
else => |e| return e,
},
.length = try parse_length(der_reader),
};
}
curr_tag_length = parse_schema_tag_len_internal(
curr_tag_length.?.tag,
curr_tag_length.?.length,
sub_schema,
captures,
der_reader,
) catch |err| switch (err) {
error.DoesNotMatchSchema => break,
else => |e| return e,
};
}
return curr_tag_length;
} else if (tag_literal == .any) {
if (!has_capture) {
try der_reader.skipBytes(length, .{});
return null;
}
var reader_state = DERReaderState(Reader){
.der_reader = der_reader,
.idx = 0,
.length = length,
};
var reader = DERReader(@TypeOf(der_reader)){ .context = &reader_state };
const capture_context = captures[schema[1] * 2];
const capture_action = captures[schema[1] * 2 + 1];
try capture_action(capture_context, tag_byte, length, reader);
// Skip remaining bytes
try der_reader.skipBytes(reader_state.length - reader_state.idx, .{});
return null;
} else if (tag_literal == .context_specific) {
const cs_number = schema[tag_idx + 1];
if (tag_byte & 0xC0 == 0x80 and tag_byte - 0xa0 == cs_number) {
if (!has_capture) {
if (schema.len > tag_idx + 2) {
return try parse_schema_tag_len_internal(null, null, schema[tag_idx + 2], captures, der_reader);
}
try der_reader.skipBytes(length, .{});
return null;
}
var reader_state = DERReaderState(Reader){
.der_reader = der_reader,
.idx = 0,
.length = length,
};
var reader = DERReader(Reader){ .context = &reader_state };
const capture_context = captures[schema[1] * 2];
const capture_action = captures[schema[1] * 2 + 1];
try capture_action(capture_context, tag_byte, length, reader);
// Skip remaining bytes
try der_reader.skipBytes(reader_state.length - reader_state.idx, .{});
return null;
} else if (is_optional)
return TagLength{ .tag = tag_byte, .length = length }
else
return error.DoesNotMatchSchema;
}
const schema_tag: Tag = tag_literal;
const actual_tag = std.meta.intToEnum(Tag, tag_byte) catch return error.InvalidTag;
if (actual_tag != schema_tag) {
if (is_optional) return TagLength{ .tag = tag_byte, .length = length };
return error.DoesNotMatchSchema;
}
const single_seq = schema_tag == .sequence and schema.len == 1;
if ((!has_capture and schema_tag != .sequence) or (!has_capture and single_seq)) {
try der_reader.skipBytes(length, .{});
return null;
}
if (has_capture) {
var reader_state = DERReaderState(Reader){
.der_reader = der_reader,
.idx = 0,
.length = length,
};
var reader = DERReader(Reader){ .context = &reader_state };
const capture_context = captures[schema[1] * 2];
const capture_action = captures[schema[1] * 2 + 1];
try capture_action(capture_context, tag_byte, length, reader);
// Skip remaining bytes
try der_reader.skipBytes(reader_state.length - reader_state.idx, .{});
return null;
}
var cur_tag_length: ?TagLength = null;
const sub_schemas = schema[tag_idx + 1];
comptime var i = 0;
inline while (i < sub_schemas.len) : (i += 1) {
const curr_tag = if (cur_tag_length) |tl| tl.tag else null;
const curr_length = if (cur_tag_length) |tl| tl.length else null;
cur_tag_length = try parse_schema_tag_len_internal(curr_tag, curr_length, sub_schemas[i], captures, der_reader);
}
return cur_tag_length;
}
pub const EncodedLength = struct {
data: [@sizeOf(usize) + 1]u8,
len: usize,
pub fn slice(self: @This()) []const u8 {
if (self.len == 1) return self.data[0..1];
return self.data[0 .. 1 + self.len];
}
};
pub fn encode_length(length: usize) EncodedLength {
var enc = EncodedLength{ .data = undefined, .len = 0 };
if (length < 128) {
enc.data[0] = @truncate(u8, length);
enc.len = 1;
} else {
const bytes_needed = @intCast(u8, std.math.divCeil(
usize,
std.math.log2_int_ceil(usize, length),
8,
) catch unreachable);
enc.data[0] = bytes_needed | 0x80;
mem.copy(
u8,
enc.data[1 .. bytes_needed + 1],
mem.asBytes(&length)[0..bytes_needed],
);
if (std.builtin.target.cpu.arch.endian() != .Big) {
mem.reverse(u8, enc.data[1 .. bytes_needed + 1]);
}
enc.len = bytes_needed;
}
return enc;
}
fn parse_int_internal(alloc: *Allocator, bytes_read: *usize, der_reader: anytype) !BigInt {
const length = try parse_length_internal(bytes_read, der_reader);
return try parse_int_with_length_internal(alloc, bytes_read, length, der_reader);
}
pub fn parse_int(alloc: *Allocator, der_reader: anytype) !BigInt {
var bytes: usize = undefined;
return try parse_int_internal(alloc, &bytes, der_reader);
}
pub fn parse_int_with_length(alloc: *Allocator, length: usize, der_reader: anytype) !BigInt {
var read: usize = 0;
return try parse_int_with_length_internal(alloc, &read, length, der_reader);
}
fn parse_int_with_length_internal(alloc: *Allocator, bytes_read: *usize, length: usize, der_reader: anytype) !BigInt {
const first_byte = try der_reader.readByte();
if (first_byte == 0x0 and length > 1) {
// Positive number with highest bit set to 1 in the rest.
const limb_count = std.math.divCeil(usize, length - 1, @sizeOf(usize)) catch unreachable;
const limbs = try alloc.alloc(usize, limb_count);
std.mem.set(usize, limbs, 0);
errdefer alloc.free(limbs);
var limb_ptr = @ptrCast([*]u8, limbs.ptr);
try der_reader.readNoEof(limb_ptr[0 .. length - 1]);
// We always reverse because the standard library big int expects little endian.
mem.reverse(u8, limb_ptr[0 .. length - 1]);
bytes_read.* += length;
return BigInt{ .limbs = limbs, .positive = true };
}
std.debug.assert(length != 0);
// Write first_byte
// Twos complement
const limb_count = std.math.divCeil(usize, length, @sizeOf(usize)) catch unreachable;
const limbs = try alloc.alloc(usize, limb_count);
std.mem.set(usize, limbs, 0);
errdefer alloc.free(limbs);
var limb_ptr = @ptrCast([*]u8, limbs.ptr);
limb_ptr[0] = first_byte & ~@as(u8, 0x80);
try der_reader.readNoEof(limb_ptr[1..length]);
// We always reverse because the standard library big int expects little endian.
mem.reverse(u8, limb_ptr[0..length]);
bytes_read.* += length;
return BigInt{ .limbs = limbs, .positive = (first_byte & 0x80) == 0x00 };
}
pub fn parse_length(der_reader: anytype) !usize {
var bytes: usize = 0;
return try parse_length_internal(&bytes, der_reader);
}
fn parse_length_internal(bytes_read: *usize, der_reader: anytype) !usize {
const first_byte = try der_reader.readByte();
bytes_read.* += 1;
if (first_byte & 0x80 == 0x00) {
// 1 byte value
return first_byte;
}
const length = @truncate(u7, first_byte);
if (length > @sizeOf(usize))
@panic("DER length does not fit in usize");
var res_buf = std.mem.zeroes([@sizeOf(usize)]u8);
try der_reader.readNoEof(res_buf[0..length]);
bytes_read.* += length;
if (std.builtin.target.cpu.arch.endian() != .Big) {
mem.reverse(u8, res_buf[0..length]);
}
return mem.bytesToValue(usize, &res_buf);
}
fn parse_value_with_tag_byte(
tag_byte: u8,
alloc: *Allocator,
bytes_read: *usize,
der_reader: anytype,
) DecodeError(@TypeOf(der_reader))!Value {
const tag = std.meta.intToEnum(Tag, tag_byte) catch {
// tag starts with '0b10...', this is the context specific class.
if (tag_byte & 0xC0 == 0x80) {
const length = try parse_length_internal(bytes_read, der_reader);
var cur_read_bytes: usize = 0;
var child = try alloc.create(Value);
errdefer alloc.destroy(child);
child.* = try parse_value_internal(alloc, &cur_read_bytes, der_reader);
if (cur_read_bytes != length)
return error.InvalidContainerLength;
bytes_read.* += length;
return Value{ .context_specific = .{ .child = child, .number = tag_byte - 0xa0 } };
}
return error.InvalidTag;
};
switch (tag) {
.bool => {
if ((try der_reader.readByte()) != 0x1)
return error.InvalidLength;
defer bytes_read.* += 2;
return Value{ .bool = (try der_reader.readByte()) != 0x0 };
},
.int => return Value{ .int = try parse_int_internal(alloc, bytes_read, der_reader) },
.bit_string => {
const length = try parse_length_internal(bytes_read, der_reader);
const unused_bits = try der_reader.readByte();
std.debug.assert(unused_bits < 8);
const bit_count = (length - 1) * 8 - unused_bits;
const bit_memory = try alloc.alloc(u8, std.math.divCeil(usize, bit_count, 8) catch unreachable);
errdefer alloc.free(bit_memory);
try der_reader.readNoEof(bit_memory[0 .. length - 1]);
bytes_read.* += length;
return Value{ .bit_string = .{ .data = bit_memory, .bit_len = bit_count } };
},
.octet_string, .utf8_string, .printable_string, .utc_time, .ia5_string => {
const length = try parse_length_internal(bytes_read, der_reader);
const str_mem = try alloc.alloc(u8, length);
try der_reader.readNoEof(str_mem);
bytes_read.* += length;
return @as(Value, switch (tag) {
.octet_string => .{ .octet_string = str_mem },
.utf8_string => .{ .utf8_string = str_mem },
.printable_string => .{ .printable_string = str_mem },
.utc_time => .{ .utc_time = str_mem },
.ia5_string => .{ .ia5_string = str_mem },
else => unreachable,
});
},
.@"null" => {
std.debug.assert((try parse_length_internal(bytes_read, der_reader)) == 0x00);
return .@"null";
},
.object_identifier => {
const length = try parse_length_internal(bytes_read, der_reader);
const first_byte = try der_reader.readByte();
var ret = Value{ .object_identifier = .{ .data = undefined, .len = 0 } };
ret.object_identifier.data[0] = first_byte / 40;
ret.object_identifier.data[1] = first_byte % 40;
var out_idx: u8 = 2;
var i: usize = 0;
while (i < length - 1) {
var current_value: u32 = 0;
var current_byte = try der_reader.readByte();
i += 1;
while (current_byte & 0x80 == 0x80) : (i += 1) {
// Increase the base of the previous bytes
current_value *= 128;
// Add the current byte in base 128
current_value += @as(u32, current_byte & ~@as(u8, 0x80)) * 128;
current_byte = try der_reader.readByte();
} else {
current_value += current_byte;
}
ret.object_identifier.data[out_idx] = current_value;
out_idx += 1;
}
ret.object_identifier.len = out_idx;
std.debug.assert(out_idx <= 16);
bytes_read.* += length;
return ret;
},
.bmp_string => {
const length = try parse_length_internal(bytes_read, der_reader);
const str_mem = try alloc.alloc(u16, @divExact(length, 2));
errdefer alloc.free(str_mem);
for (str_mem) |*wide_char| {
wide_char.* = try der_reader.readIntBig(u16);
}
bytes_read.* += length;
return Value{ .bmp_string = str_mem };
},
.sequence, .set => {
const length = try parse_length_internal(bytes_read, der_reader);
var cur_read_bytes: usize = 0;
var arr = std.ArrayList(Value).init(alloc);
errdefer arr.deinit();
while (cur_read_bytes < length) {
(try arr.addOne()).* = try parse_value_internal(alloc, &cur_read_bytes, der_reader);
}
if (cur_read_bytes != length)
return error.InvalidContainerLength;
bytes_read.* += length;
return @as(Value, switch (tag) {
.sequence => .{ .sequence = arr.toOwnedSlice() },
.set => .{ .set = arr.toOwnedSlice() },
else => unreachable,
});
},
.context_specific => unreachable,
}
}
fn parse_value_internal(alloc: *Allocator, bytes_read: *usize, der_reader: anytype) DecodeError(@TypeOf(der_reader))!Value {
const tag_byte = try der_reader.readByte();
bytes_read.* += 1;
return try parse_value_with_tag_byte(tag_byte, alloc, bytes_read, der_reader);
}
pub fn parse_value(alloc: *Allocator, der_reader: anytype) DecodeError(@TypeOf(der_reader))!Value {
var read: usize = 0;
return try parse_value_internal(alloc, &read, der_reader);
}
};
test "der.parse_value" {
const github_der = @embedFile("../test/github.der");
var fbs = std.io.fixedBufferStream(github_der);
var arena = ArenaAllocator.init(std.testing.allocator);
defer arena.deinit();
_ = try der.parse_value(&arena.allocator, fbs.reader());
}

View File

@@ -0,0 +1,446 @@
const std = @import("std");
const mem = std.mem;
const crypto = @import("crypto.zig");
const ChaCha20Stream = crypto.ChaCha20Stream;
const Chacha20Poly1305 = std.crypto.aead.chacha_poly.ChaCha20Poly1305;
const Poly1305 = std.crypto.onetimeauth.Poly1305;
const Aes128Gcm = std.crypto.aead.aes_gcm.Aes128Gcm;
const main = @import("main.zig");
const RecordHeader = main.RecordHeader;
pub const suites = struct {
pub const ECDHE_RSA_Chacha20_Poly1305 = struct {
pub const name = "ECDHE-RSA-CHACHA20-POLY1305";
pub const tag = 0xCCA8;
pub const key_exchange = .ecdhe;
pub const hash = .sha256;
pub const prefix_data_length = 0;
pub const mac_length = 16;
pub const Keys = struct {
client_key: [32]u8,
server_key: [32]u8,
client_iv: [12]u8,
server_iv: [12]u8,
};
pub const State = struct {
mac: Poly1305,
context: ChaCha20Stream.BlockVec,
buf: [64]u8,
};
pub fn init_state(_: [0]u8, server_seq: u64, key_data: anytype, header: RecordHeader) State {
const len = header.len() - 16;
var nonce: [12]u8 = ([1]u8{0} ** 4) ++ ([1]u8{undefined} ** 8);
mem.writeIntBig(u64, nonce[4..12], server_seq);
for (nonce) |*n, i| {
n.* ^= key_data.server_iv(@This())[i];
}
var additional_data: [13]u8 = undefined;
mem.writeIntBig(u64, additional_data[0..8], server_seq);
additional_data[8..11].* = header.data[0..3].*;
mem.writeIntBig(u16, additional_data[11..13], len);
var c: [4]u32 = undefined;
c[0] = 1;
c[1] = mem.readIntLittle(u32, nonce[0..4]);
c[2] = mem.readIntLittle(u32, nonce[4..8]);
c[3] = mem.readIntLittle(u32, nonce[8..12]);
const server_key = crypto.keyToWords(key_data.server_key(@This()).*);
return .{
.mac = ChaCha20Stream.initPoly1305(key_data.server_key(@This()).*, nonce, additional_data),
.context = ChaCha20Stream.initContext(server_key, c),
.buf = undefined,
};
}
pub fn decrypt_part(
key_data: anytype,
record_length: usize,
idx: *usize,
state: *State,
encrypted: []const u8,
out: []u8,
) void {
_ = record_length;
std.debug.assert(encrypted.len == out.len);
ChaCha20Stream.chacha20Xor(
out,
encrypted,
crypto.keyToWords(key_data.server_key(@This()).*),
&state.context,
idx,
&state.buf,
);
state.mac.update(encrypted);
}
pub fn verify_mac(reader: anytype, record_length: usize, state: *State) !void {
var poly1305_tag: [16]u8 = undefined;
reader.readNoEof(&poly1305_tag) catch |err| switch (err) {
error.EndOfStream => return error.ServerMalformedResponse,
else => |e| return e,
};
try ChaCha20Stream.checkPoly1305(&state.mac, record_length, poly1305_tag);
}
pub fn raw_write(
comptime buffer_size: usize,
rand: *std.rand.Random,
key_data: anytype,
writer: anytype,
prefix: [3]u8,
seq: u64,
buffer: []const u8,
) !void {
_ = rand;
std.debug.assert(buffer.len <= buffer_size);
try writer.writeAll(&prefix);
try writer.writeIntBig(u16, @intCast(u16, buffer.len + 16));
var additional_data: [13]u8 = undefined;
mem.writeIntBig(u64, additional_data[0..8], seq);
additional_data[8..11].* = prefix;
mem.writeIntBig(u16, additional_data[11..13], @intCast(u16, buffer.len));
var encrypted_data: [buffer_size]u8 = undefined;
var tag_data: [16]u8 = undefined;
var nonce: [12]u8 = ([1]u8{0} ** 4) ++ ([1]u8{undefined} ** 8);
mem.writeIntBig(u64, nonce[4..12], seq);
for (nonce) |*n, i| {
n.* ^= key_data.client_iv(@This())[i];
}
Chacha20Poly1305.encrypt(
encrypted_data[0..buffer.len],
&tag_data,
buffer,
&additional_data,
nonce,
key_data.client_key(@This()).*,
);
try writer.writeAll(encrypted_data[0..buffer.len]);
try writer.writeAll(&tag_data);
}
pub fn check_verify_message(
key_data: anytype,
length: usize,
reader: anytype,
verify_message: [16]u8,
) !bool {
if (length != 32)
return false;
var msg_in: [32]u8 = undefined;
try reader.readNoEof(&msg_in);
const additional_data: [13]u8 = ([1]u8{0} ** 8) ++ [5]u8{ 0x16, 0x03, 0x03, 0x00, 0x10 };
var decrypted: [16]u8 = undefined;
Chacha20Poly1305.decrypt(
&decrypted,
msg_in[0..16],
msg_in[16..].*,
&additional_data,
key_data.server_iv(@This()).*,
key_data.server_key(@This()).*,
) catch return false;
return mem.eql(u8, &decrypted, &verify_message);
}
};
pub const ECDHE_RSA_AES128_GCM_SHA256 = struct {
pub const name = "ECDHE-RSA-AES128-GCM-SHA256";
pub const tag = 0xC02F;
pub const key_exchange = .ecdhe;
pub const hash = .sha256;
pub const prefix_data_length = 8;
pub const mac_length = 16;
pub const Keys = struct {
client_key: [16]u8,
server_key: [16]u8,
client_iv: [4]u8,
server_iv: [4]u8,
};
const Aes = std.crypto.core.aes.Aes128;
pub const State = struct {
aes: @typeInfo(@TypeOf(Aes.initEnc)).Fn.return_type.?,
counterInt: u128,
};
pub fn init_state(prefix_data: [8]u8, server_seq: u64, key_data: anytype, header: RecordHeader) State {
_ = server_seq;
_ = header;
var iv: [12]u8 = undefined;
iv[0..4].* = key_data.server_iv(@This()).*;
iv[4..].* = prefix_data;
var j: [16]u8 = undefined;
mem.copy(u8, j[0..12], iv[0..]);
mem.writeIntBig(u32, j[12..][0..4], 2);
return .{
.aes = Aes.initEnc(key_data.server_key(@This()).*),
.counterInt = mem.readInt(u128, &j, .Big),
};
}
pub fn decrypt_part(
key_data: anytype,
record_length: usize,
idx: *usize,
state: *State,
encrypted: []const u8,
out: []u8,
) void {
_ = key_data;
_ = record_length;
std.debug.assert(encrypted.len == out.len);
crypto.ctr(
@TypeOf(state.aes),
state.aes,
out,
encrypted,
&state.counterInt,
idx,
.Big,
);
}
pub fn verify_mac(reader: anytype, record_length: usize, state: *State) !void {
_ = state;
_ = record_length;
// @TODO Implement this
reader.skipBytes(16, .{}) catch |err| switch (err) {
error.EndOfStream => return error.ServerMalformedResponse,
else => |e| return e,
};
}
pub fn check_verify_message(
key_data: anytype,
length: usize,
reader: anytype,
verify_message: [16]u8,
) !bool {
if (length != 40)
return false;
var iv: [12]u8 = undefined;
iv[0..4].* = key_data.server_iv(@This()).*;
try reader.readNoEof(iv[4..12]);
var msg_in: [32]u8 = undefined;
try reader.readNoEof(&msg_in);
const additional_data: [13]u8 = ([1]u8{0} ** 8) ++ [5]u8{ 0x16, 0x03, 0x03, 0x00, 0x10 };
var decrypted: [16]u8 = undefined;
Aes128Gcm.decrypt(
&decrypted,
msg_in[0..16],
msg_in[16..].*,
&additional_data,
iv,
key_data.server_key(@This()).*,
) catch return false;
return mem.eql(u8, &decrypted, &verify_message);
}
pub fn raw_write(
comptime buffer_size: usize,
rand: *std.rand.Random,
key_data: anytype,
writer: anytype,
prefix: [3]u8,
seq: u64,
buffer: []const u8,
) !void {
std.debug.assert(buffer.len <= buffer_size);
var iv: [12]u8 = undefined;
iv[0..4].* = key_data.client_iv(@This()).*;
rand.bytes(iv[4..12]);
var additional_data: [13]u8 = undefined;
mem.writeIntBig(u64, additional_data[0..8], seq);
additional_data[8..11].* = prefix;
mem.writeIntBig(u16, additional_data[11..13], @intCast(u16, buffer.len));
try writer.writeAll(&prefix);
try writer.writeIntBig(u16, @intCast(u16, buffer.len + 24));
try writer.writeAll(iv[4..12]);
var encrypted_data: [buffer_size]u8 = undefined;
var tag_data: [16]u8 = undefined;
Aes128Gcm.encrypt(
encrypted_data[0..buffer.len],
&tag_data,
buffer,
&additional_data,
iv,
key_data.client_key(@This()).*,
);
try writer.writeAll(encrypted_data[0..buffer.len]);
try writer.writeAll(&tag_data);
}
};
pub const all = &[_]type{ ECDHE_RSA_Chacha20_Poly1305, ECDHE_RSA_AES128_GCM_SHA256 };
};
fn key_field_width(comptime T: type, comptime field: anytype) ?usize {
if (!@hasField(T, @tagName(field)))
return null;
const field_info = std.meta.fieldInfo(T, field);
if (!comptime std.meta.trait.is(.Array)(field_info.field_type) or std.meta.Elem(field_info.field_type) != u8)
@compileError("Field '" ++ field ++ "' of type '" ++ @typeName(T) ++ "' should be an array of u8.");
return @typeInfo(field_info.field_type).Array.len;
}
pub fn key_data_size(comptime ciphersuites: anytype) usize {
var max: usize = 0;
for (ciphersuites) |cs| {
const curr = (key_field_width(cs.Keys, .client_mac) orelse 0) +
(key_field_width(cs.Keys, .server_mac) orelse 0) +
key_field_width(cs.Keys, .client_key).? +
key_field_width(cs.Keys, .server_key).? +
key_field_width(cs.Keys, .client_iv).? +
key_field_width(cs.Keys, .server_iv).?;
if (curr > max)
max = curr;
}
return max;
}
pub fn KeyData(comptime ciphersuites: anytype) type {
return struct {
data: [key_data_size(ciphersuites)]u8,
pub fn client_mac(self: *@This(), comptime cs: type) *[key_field_width(cs.Keys, .client_mac) orelse 0]u8 {
return self.data[0..comptime (key_field_width(cs.Keys, .client_mac) orelse 0)];
}
pub fn server_mac(self: *@This(), comptime cs: type) *[key_field_width(cs.Keys, .server_mac) orelse 0]u8 {
const start = key_field_width(cs.Keys, .client_mac) orelse 0;
return self.data[start..][0..comptime (key_field_width(cs.Keys, .server_mac) orelse 0)];
}
pub fn client_key(self: *@This(), comptime cs: type) *[key_field_width(cs.Keys, .client_key).?]u8 {
const start = (key_field_width(cs.Keys, .client_mac) orelse 0) +
(key_field_width(cs.Keys, .server_mac) orelse 0);
return self.data[start..][0..comptime key_field_width(cs.Keys, .client_key).?];
}
pub fn server_key(self: *@This(), comptime cs: type) *[key_field_width(cs.Keys, .server_key).?]u8 {
const start = (key_field_width(cs.Keys, .client_mac) orelse 0) +
(key_field_width(cs.Keys, .server_mac) orelse 0) +
key_field_width(cs.Keys, .client_key).?;
return self.data[start..][0..comptime key_field_width(cs.Keys, .server_key).?];
}
pub fn client_iv(self: *@This(), comptime cs: type) *[key_field_width(cs.Keys, .client_iv).?]u8 {
const start = (key_field_width(cs.Keys, .client_mac) orelse 0) +
(key_field_width(cs.Keys, .server_mac) orelse 0) +
key_field_width(cs.Keys, .client_key).? +
key_field_width(cs.Keys, .server_key).?;
return self.data[start..][0..comptime key_field_width(cs.Keys, .client_iv).?];
}
pub fn server_iv(self: *@This(), comptime cs: type) *[key_field_width(cs.Keys, .server_iv).?]u8 {
const start = (key_field_width(cs.Keys, .client_mac) orelse 0) +
(key_field_width(cs.Keys, .server_mac) orelse 0) +
key_field_width(cs.Keys, .client_key).? +
key_field_width(cs.Keys, .server_key).? +
key_field_width(cs.Keys, .client_iv).?;
return self.data[start..][0..comptime key_field_width(cs.Keys, .server_iv).?];
}
};
}
pub fn key_expansion(
comptime ciphersuites: anytype,
tag: u16,
context: anytype,
comptime next_32_bytes: anytype,
) KeyData(ciphersuites) {
var res: KeyData(ciphersuites) = undefined;
inline for (ciphersuites) |cs| {
if (cs.tag == tag) {
var chunk: [32]u8 = undefined;
next_32_bytes(context, 0, &chunk);
comptime var chunk_idx = 1;
comptime var data_cursor = 0;
comptime var chunk_cursor = 0;
const fields = .{
.client_mac, .server_mac,
.client_key, .server_key,
.client_iv, .server_iv,
};
inline for (fields) |field| {
if (chunk_cursor == 32) {
next_32_bytes(context, chunk_idx, &chunk);
chunk_idx += 1;
chunk_cursor = 0;
}
const field_width = comptime (key_field_width(cs.Keys, field) orelse 0);
const first_read = comptime std.math.min(32 - chunk_cursor, field_width);
const second_read = field_width - first_read;
res.data[data_cursor..][0..first_read].* = chunk[chunk_cursor..][0..first_read].*;
data_cursor += first_read;
chunk_cursor += first_read;
if (second_read != 0) {
next_32_bytes(context, chunk_idx, &chunk);
chunk_idx += 1;
res.data[data_cursor..][0..second_read].* = chunk[chunk_cursor..][0..second_read].*;
data_cursor += second_read;
chunk_cursor = second_read;
comptime std.debug.assert(chunk_cursor != 32);
}
}
return res;
}
}
unreachable;
}
pub fn InRecordState(comptime ciphersuites: anytype) type {
var fields: [ciphersuites.len]std.builtin.TypeInfo.UnionField = undefined;
for (ciphersuites) |cs, i| {
fields[i] = .{
.name = cs.name,
.field_type = cs.State,
.alignment = if (@sizeOf(cs.State) > 0) @alignOf(cs.State) else 0,
};
}
return @Type(.{
.Union = .{
.layout = .Extern,
.tag_type = null,
.fields = &fields,
.decls = &[0]std.builtin.TypeInfo.Declaration{},
},
});
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,209 @@
const std = @import("std");
const mem = std.mem;
const Allocator = mem.Allocator;
const Sha224 = std.crypto.hash.sha2.Sha224;
const Sha384 = std.crypto.hash.sha2.Sha384;
const Sha512 = std.crypto.hash.sha2.Sha512;
const Sha256 = std.crypto.hash.sha2.Sha256;
const x509 = @import("x509.zig");
const SignatureAlgorithm = x509.Certificate.SignatureAlgorithm;
const asn1 = @import("asn1.zig");
fn rsa_perform(
allocator: *Allocator,
modulus: std.math.big.int.Const,
exponent: std.math.big.int.Const,
base: []const u8,
) !?std.math.big.int.Managed {
// @TODO Better algorithm, make it faster.
const curr_base_limbs = try allocator.alloc(
usize,
std.math.divCeil(usize, base.len, @sizeOf(usize)) catch unreachable,
);
const curr_base_limb_bytes = @ptrCast([*]u8, curr_base_limbs)[0..base.len];
mem.copy(u8, curr_base_limb_bytes, base);
mem.reverse(u8, curr_base_limb_bytes);
var curr_base = (std.math.big.int.Mutable{
.limbs = curr_base_limbs,
.positive = true,
.len = curr_base_limbs.len,
}).toManaged(allocator);
defer curr_base.deinit();
var curr_exponent = try exponent.toManaged(allocator);
defer curr_exponent.deinit();
var result = try std.math.big.int.Managed.initSet(allocator, @as(usize, 1));
// encrypted = signature ^ key.exponent MOD key.modulus
while (curr_exponent.toConst().orderAgainstScalar(0) == .gt) {
if (curr_exponent.isOdd()) {
try result.ensureMulCapacity(result.toConst(), curr_base.toConst());
try result.mul(result.toConst(), curr_base.toConst());
try llmod(&result, modulus);
}
try curr_base.sqr(curr_base.toConst());
try llmod(&curr_base, modulus);
try curr_exponent.shiftRight(curr_exponent, 1);
}
if (result.limbs.len * @sizeOf(usize) < base.len)
return null;
return result;
}
// res = res mod N
fn llmod(res: *std.math.big.int.Managed, n: std.math.big.int.Const) !void {
var temp = try std.math.big.int.Managed.init(res.allocator);
defer temp.deinit();
try temp.divTrunc(res, res.toConst(), n);
}
pub fn algorithm_prefix(signature_algorithm: SignatureAlgorithm) ?[]const u8 {
return switch (signature_algorithm.hash) {
.none, .md5, .sha1 => null,
.sha224 => &[_]u8{
0x30, 0x2d, 0x30, 0x0d, 0x06,
0x09, 0x60, 0x86, 0x48, 0x01,
0x65, 0x03, 0x04, 0x02, 0x04,
0x05, 0x00, 0x04, 0x1c,
},
.sha256 => &[_]u8{
0x30, 0x31, 0x30, 0x0d, 0x06,
0x09, 0x60, 0x86, 0x48, 0x01,
0x65, 0x03, 0x04, 0x02, 0x01,
0x05, 0x00, 0x04, 0x20,
},
.sha384 => &[_]u8{
0x30, 0x41, 0x30, 0x0d, 0x06,
0x09, 0x60, 0x86, 0x48, 0x01,
0x65, 0x03, 0x04, 0x02, 0x02,
0x05, 0x00, 0x04, 0x30,
},
.sha512 => &[_]u8{
0x30, 0x51, 0x30, 0x0d, 0x06,
0x09, 0x60, 0x86, 0x48, 0x01,
0x65, 0x03, 0x04, 0x02, 0x03,
0x05, 0x00, 0x04, 0x40,
},
};
}
pub fn sign(
allocator: *Allocator,
signature_algorithm: SignatureAlgorithm,
hash: []const u8,
private_key: x509.PrivateKey,
) !?[]const u8 {
// @TODO ECDSA signatures
if (signature_algorithm.signature != .rsa or private_key != .rsa)
return null;
const signature_length = private_key.rsa.modulus.len * @sizeOf(usize);
var sig_buf = try allocator.alloc(u8, signature_length);
defer allocator.free(sig_buf);
const prefix = algorithm_prefix(signature_algorithm) orelse return null;
const first_prefix_idx = sig_buf.len - hash.len - prefix.len;
const first_hash_idx = sig_buf.len - hash.len;
// EM = 0x00 || 0x01 || PS || 0x00 || T
sig_buf[0] = 0;
sig_buf[1] = 1;
mem.set(u8, sig_buf[2 .. first_prefix_idx - 1], 0xff);
sig_buf[first_prefix_idx - 1] = 0;
mem.copy(u8, sig_buf[first_prefix_idx..first_hash_idx], prefix);
mem.copy(u8, sig_buf[first_hash_idx..], hash);
const modulus = std.math.big.int.Const{ .limbs = private_key.rsa.modulus, .positive = true };
const exponent = std.math.big.int.Const{ .limbs = private_key.rsa.exponent, .positive = true };
var rsa_result = (try rsa_perform(allocator, modulus, exponent, sig_buf)) orelse return null;
if (rsa_result.limbs.len * @sizeOf(usize) < signature_length) {
rsa_result.deinit();
return null;
}
const enc_buf = @ptrCast([*]u8, rsa_result.limbs.ptr)[0..signature_length];
mem.reverse(u8, enc_buf);
return allocator.resize(
enc_buf.ptr[0 .. rsa_result.limbs.len * @sizeOf(usize)],
signature_length,
) catch unreachable;
}
pub fn verify_signature(
allocator: *Allocator,
signature_algorithm: SignatureAlgorithm,
signature: asn1.BitString,
hash: []const u8,
public_key: x509.PublicKey,
) !bool {
// @TODO ECDSA algorithms
if (public_key != .rsa or signature_algorithm.signature != .rsa) return false;
const prefix = algorithm_prefix(signature_algorithm) orelse return false;
// RSA hash verification with PKCS 1 V1_5 padding
const modulus = std.math.big.int.Const{ .limbs = public_key.rsa.modulus, .positive = true };
const exponent = std.math.big.int.Const{ .limbs = public_key.rsa.exponent, .positive = true };
if (modulus.bitCountAbs() != signature.bit_len)
return false;
var rsa_result = (try rsa_perform(allocator, modulus, exponent, signature.data)) orelse return false;
defer rsa_result.deinit();
if (rsa_result.limbs.len * @sizeOf(usize) < signature.data.len)
return false;
const enc_buf = @ptrCast([*]u8, rsa_result.limbs.ptr)[0..signature.data.len];
mem.reverse(u8, enc_buf);
if (enc_buf[0] != 0x00 or enc_buf[1] != 0x01)
return false;
if (!mem.endsWith(u8, enc_buf, hash))
return false;
if (!mem.endsWith(u8, enc_buf[0 .. enc_buf.len - hash.len], prefix))
return false;
if (enc_buf[enc_buf.len - hash.len - prefix.len - 1] != 0x00)
return false;
for (enc_buf[2 .. enc_buf.len - hash.len - prefix.len - 1]) |c| {
if (c != 0xff) return false;
}
return true;
}
pub fn certificate_verify_signature(
allocator: *Allocator,
signature_algorithm: x509.Certificate.SignatureAlgorithm,
signature: asn1.BitString,
bytes: []const u8,
public_key: x509.PublicKey,
) !bool {
// @TODO ECDSA algorithms
if (public_key != .rsa or signature_algorithm.signature != .rsa) return false;
var hash_buf: [64]u8 = undefined;
var hash: []u8 = undefined;
switch (signature_algorithm.hash) {
// Deprecated hash algos
.none, .md5, .sha1 => return false,
.sha224 => {
Sha224.hash(bytes, hash_buf[0..28], .{});
hash = hash_buf[0..28];
},
.sha256 => {
Sha256.hash(bytes, hash_buf[0..32], .{});
hash = hash_buf[0..32];
},
.sha384 => {
Sha384.hash(bytes, hash_buf[0..48], .{});
hash = hash_buf[0..48];
},
.sha512 => {
Sha512.hash(bytes, hash_buf[0..64], .{});
hash = &hash_buf;
},
}
return try verify_signature(allocator, signature_algorithm, signature, hash, public_key);
}

File diff suppressed because it is too large Load Diff

View File

@@ -106,8 +106,9 @@ pub const Response = struct {
status_code: usize,
status: []const u8,
headers: []const Header,
bytes_read: c_int = 0,
pub fn parse(buf: []const u8, src: []Header) !Response {
pub fn parseParts(buf: []const u8, src: []Header, offset: ?*usize) !Response {
var minor_version: c_int = undefined;
var status_code: c_int = undefined;
var status: []const u8 = undefined;
@@ -122,20 +123,29 @@ pub const Response = struct {
&status.len,
@ptrCast([*c]c.phr_header, src.ptr),
&num_headers,
0,
offset.?.*,
);
return switch (rc) {
-1 => error.BadResponse,
-2 => error.ShortRead,
-2 => brk: {
offset.?.* += buf.len;
break :brk error.ShortRead;
},
else => |bytes_read| Response{
.minor_version = @intCast(usize, minor_version),
.status_code = @intCast(usize, status_code),
.status = status,
.headers = src[0..num_headers],
.bytes_read = bytes_read,
},
};
}
pub fn parse(buf: []const u8, src: []Header) !Response {
return try parseParts(buf, src, 0);
}
};
test "pico_http: parse response" {

444
src/http_client.zig Normal file
View File

@@ -0,0 +1,444 @@
const picohttp = @import("picohttp");
usingnamespace @import("./global.zig");
const std = @import("std");
const Headers = @import("./javascript/jsc/webcore/response.zig").Headers;
const URL = @import("./query_string_map.zig").URL;
const Method = @import("./http.zig").Method;
const iguanaTLS = @import("iguanaTLS");
const Api = @import("./api/schema.zig").Api;
const HTTPClient = @This();
const SOCKET_FLAGS = os.SOCK_CLOEXEC;
fn writeRequest(
comptime Writer: type,
writer: Writer,
request: picohttp.Request,
body: string,
// header_hashes: []u64,
) !void {
try writer.writeAll(request.method);
try writer.writeAll(" ");
try writer.writeAll(request.path);
try writer.writeAll(" HTTP/1.1\r\n");
for (request.headers) |header, i| {
try writer.writeAll(header.name);
try writer.writeAll(": ");
try writer.writeAll(header.value);
try writer.writeAll("\r\n");
}
}
method: Method,
header_entries: Headers.Entries,
header_buf: string,
url: URL,
allocator: *std.mem.Allocator,
pub fn init(allocator: *std.mem.Allocator, method: Method, url: URL, header_entries: Headers.Entries, header_buf: string) HTTPClient {
return HTTPClient{
.allocator = allocator,
.method = method,
.url = url,
.header_entries = header_entries,
.header_buf = header_buf,
};
}
threadlocal var response_headers_buf: [256]picohttp.Header = undefined;
threadlocal var request_headers_buf: [256]picohttp.Header = undefined;
threadlocal var header_name_hashes: [256]u64 = undefined;
// threadlocal var resolver_cache
const tcp = std.x.net.tcp;
const ip = std.x.net.ip;
const IPv4 = std.x.os.IPv4;
const IPv6 = std.x.os.IPv6;
const Socket = std.x.os.Socket;
const os = std.os;
// lowercase hash header names so that we can be sure
fn hashHeaderName(name: string) u64 {
var hasher = std.hash.Wyhash.init(0);
var remain: string = name;
var buf: [32]u8 = undefined;
var buf_slice: []u8 = std.mem.span(&buf);
while (remain.len > 0) {
var end = std.math.min(hasher.buf.len, remain.len);
hasher.update(strings.copyLowercase(std.mem.span(remain[0..end]), buf_slice));
remain = remain[end..];
}
return hasher.final();
}
const host_header_hash = hashHeaderName("Host");
const connection_header_hash = hashHeaderName("Connection");
const content_encoding_hash = hashHeaderName("Content-Encoding");
const host_header_name = "Host";
const content_length_header_name = "Content-Length";
const content_length_header_hash = hashHeaderName("Content-Length");
const connection_header = picohttp.Header{ .name = "Connection", .value = "close" };
const accept_header = picohttp.Header{ .name = "Accept", .value = "*/*" };
const accept_header_hash = hashHeaderName("Accept");
pub fn headerStr(this: *const HTTPClient, ptr: Api.StringPointer) string {
return this.header_buf[ptr.offset..][0..ptr.length];
}
pub fn buildRequest(this: *const HTTPClient, body_len: usize) picohttp.Request {
var header_count: usize = 0;
var header_entries = this.header_entries.slice();
var header_names = header_entries.items(.name);
var header_values = header_entries.items(.value);
for (header_names) |head, i| {
const name = this.headerStr(head);
// Hash it as lowercase
const hash = hashHeaderName(request_headers_buf[header_count].name);
// Skip host and connection header
// we manage those
switch (hash) {
host_header_hash,
connection_header_hash,
content_length_header_hash,
accept_header_hash,
=> {
continue;
},
else => {},
}
request_headers_buf[header_count] = picohttp.Header{
.name = name,
.value = this.headerStr(header_values[i]),
};
// header_name_hashes[header_count] = hash;
// // ensure duplicate headers come after each other
// if (header_count > 2) {
// var head_i: usize = header_count - 1;
// while (head_i > 0) : (head_i -= 1) {
// if (header_name_hashes[head_i] == header_name_hashes[header_count]) {
// std.mem.swap(picohttp.Header, &header_name_hashes[header_count], &header_name_hashes[head_i + 1]);
// std.mem.swap(u64, &request_headers_buf[header_count], &request_headers_buf[head_i + 1]);
// break;
// }
// }
// }
header_count += 1;
}
request_headers_buf[header_count] = connection_header;
header_count += 1;
request_headers_buf[header_count] = accept_header;
header_count += 1;
request_headers_buf[header_count] = picohttp.Header{
.name = host_header_name,
.value = this.url.hostname,
};
header_count += 1;
if (body_len > 0) {
request_headers_buf[header_count] = picohttp.Header{
.name = host_header_name,
.value = this.url.hostname,
};
header_count += 1;
}
return picohttp.Request{
.method = @tagName(this.method),
.path = this.url.path,
.minor_version = 1,
.headers = request_headers_buf[0..header_count],
};
}
pub fn connect(
this: *HTTPClient,
) !tcp.Client {
var client: tcp.Client = try tcp.Client.init(tcp.Domain.ip, .{ .close_on_exec = true });
const port = this.url.getPortAuto();
// if (this.url.isLocalhost()) {
// try client.connect(
// try std.x.os.Socket.Address.initIPv4(try std.net.Address.resolveIp("localhost", port), port),
// );
// } else {
// } else if (this.url.isDomainName()) {
var stream = try std.net.tcpConnectToHost(default_allocator, this.url.hostname, port);
client.socket = std.x.os.Socket.from(stream.handle);
// }
// } else if (this.url.getIPv4Address()) |ip_addr| {
// try client.connect(std.x.os.Socket.Address(ip_addr, port));
// } else if (this.url.getIPv6Address()) |ip_addr| {
// try client.connect(std.x.os.Socket.Address.initIPv6(ip_addr, port));
// } else {
// return error.MissingHostname;
// }
return client;
}
threadlocal var http_req_buf: [65436]u8 = undefined;
pub inline fn send(this: *HTTPClient, body: []const u8, body_out_str: *MutableString) !picohttp.Response {
if (this.url.isHTTPS()) {
return this.sendHTTPS(body, body_out_str);
} else {
return this.sendHTTP(body, body_out_str);
}
}
pub fn sendHTTP(this: *HTTPClient, body: []const u8, body_out_str: *MutableString) !picohttp.Response {
var client = try this.connect();
defer {
std.os.closeSocket(client.socket.fd);
}
var request = buildRequest(this, body.len);
var client_writer = client.writer(SOCKET_FLAGS);
{
var client_writer_buffered = std.io.bufferedWriter(client_writer);
var client_writer_buffered_writer = client_writer_buffered.writer();
try writeRequest(@TypeOf(&client_writer_buffered_writer), &client_writer_buffered_writer, request, body);
try client_writer_buffered_writer.writeAll("\r\n");
try client_writer_buffered.flush();
}
if (body.len > 0) {
try client_writer.writeAll(body);
}
var client_reader = client.reader(SOCKET_FLAGS);
var req_buf_len = try client_reader.readAll(&http_req_buf);
var request_buffer = http_req_buf[0..req_buf_len];
var response: picohttp.Response = undefined;
{
var response_length: usize = 0;
restart: while (true) {
response = picohttp.Response.parseParts(request_buffer, &response_headers_buf, &response_length) catch |err| {
switch (err) {
error.ShortRead => {
continue :restart;
},
else => {
return err;
},
}
};
break :restart;
}
}
body_out_str.reset();
var content_length: u32 = 0;
for (response.headers) |header| {
switch (hashHeaderName(header.name)) {
content_length_header_hash => {
content_length = std.fmt.parseInt(u32, header.value, 10) catch 0;
try body_out_str.inflate(content_length);
body_out_str.list.expandToCapacity();
},
content_encoding_hash => {
return error.UnsupportedEncoding;
},
else => {},
}
}
if (content_length > 0) {
var remaining_content_length = content_length;
var remainder = http_req_buf[@intCast(u32, response.bytes_read)..];
remainder = remainder[0..std.math.min(remainder.len, content_length)];
var body_size: usize = 0;
if (remainder.len > 0) {
std.mem.copy(u8, body_out_str.list.items, remainder);
body_size = @intCast(u32, remainder.len);
remaining_content_length -= @intCast(u32, remainder.len);
}
while (remaining_content_length > 0) {
const size = @intCast(u32, try client.read(body_out_str.list.items[body_size..], SOCKET_FLAGS));
if (size == 0) break;
body_size += size;
remaining_content_length -= size;
}
body_out_str.list.items.len = body_size;
}
return response;
}
pub fn sendHTTPS(this: *HTTPClient, body_str: []const u8, body_out_str: *MutableString) !picohttp.Response {
var connection = try this.connect();
var arena = std.heap.ArenaAllocator.init(this.allocator);
defer arena.deinit();
var rand = blk: {
var seed: [std.rand.DefaultCsprng.secret_seed_length]u8 = undefined;
try std.os.getrandom(&seed);
break :blk &std.rand.DefaultCsprng.init(seed).random;
};
var client = try iguanaTLS.client_connect(
.{
.rand = rand,
.temp_allocator = &arena.allocator,
.reader = connection.reader(SOCKET_FLAGS),
.writer = connection.writer(SOCKET_FLAGS),
.cert_verifier = .none,
.protocols = &[_][]const u8{"http/1.1"},
},
this.url.hostname,
);
defer {
client.close_notify() catch {};
}
var request = buildRequest(this, body_str.len);
const body = body_str;
var client_writer = client.writer();
{
var client_writer_buffered = std.io.bufferedWriter(client_writer);
var client_writer_buffered_writer = client_writer_buffered.writer();
try writeRequest(@TypeOf(&client_writer_buffered_writer), &client_writer_buffered_writer, request, body);
try client_writer_buffered_writer.writeAll("\r\n");
try client_writer_buffered.flush();
}
if (body.len > 0) {
try client_writer.writeAll(body);
}
var client_reader = client.reader();
var req_buf_len = try client_reader.readAll(&http_req_buf);
var request_buffer = http_req_buf[0..req_buf_len];
var response: picohttp.Response = undefined;
{
var response_length: usize = 0;
restart: while (true) {
response = picohttp.Response.parseParts(request_buffer, &response_headers_buf, &response_length) catch |err| {
switch (err) {
error.ShortRead => {
continue :restart;
},
else => {
return err;
},
}
};
break :restart;
}
}
body_out_str.reset();
var content_length: u32 = 0;
for (response.headers) |header| {
switch (hashHeaderName(header.name)) {
content_length_header_hash => {
content_length = std.fmt.parseInt(u32, header.value, 10) catch 0;
try body_out_str.inflate(content_length);
body_out_str.list.expandToCapacity();
},
content_encoding_hash => {
return error.UnsupportedEncoding;
},
else => {},
}
}
if (content_length > 0) {
var remaining_content_length = content_length;
var remainder = http_req_buf[@intCast(u32, response.bytes_read)..];
remainder = remainder[0..std.math.min(remainder.len, content_length)];
var body_size: usize = 0;
if (remainder.len > 0) {
std.mem.copy(u8, body_out_str.list.items, remainder);
body_size = @intCast(u32, remainder.len);
remaining_content_length -= @intCast(u32, remainder.len);
}
while (remaining_content_length > 0) {
const size = @intCast(u32, try client.read(
body_out_str.list.items[body_size..],
));
if (size == 0) break;
body_size += size;
remaining_content_length -= size;
}
body_out_str.list.items.len = body_size;
}
return response;
}
// zig test src/http_client.zig --test-filter "sendHTTP" -lc -lc++ /Users/jarred/Code/bun/src/deps/picohttpparser.o --cache-dir /Users/jarred/Code/bun/zig-cache --global-cache-dir /Users/jarred/.cache/zig --name bun --pkg-begin clap /Users/jarred/Code/bun/src/deps/zig-clap/clap.zig --pkg-end --pkg-begin picohttp /Users/jarred/Code/bun/src/deps/picohttp.zig --pkg-end --pkg-begin iguanaTLS /Users/jarred/Code/bun/src/deps/iguanaTLS/src/main.zig --pkg-end -I /Users/jarred/Code/bun/src/deps -I /Users/jarred/Code/bun/src/deps/mimalloc -I /usr/local/opt/icu4c/include -L src/deps/mimalloc -L /usr/local/opt/icu4c/lib --main-pkg-path /Users/jarred/Code/bun --enable-cache
test "sendHTTP" {
var headers = try std.heap.c_allocator.create(Headers);
headers.* = Headers{
.entries = @TypeOf(headers.entries){},
.buf = @TypeOf(headers.buf){},
.used = 0,
.allocator = std.heap.c_allocator,
};
headers.appendHeader("X-What", "ok", true, true, false);
var client = HTTPClient.init(
std.heap.c_allocator,
.GET,
URL.parse("http://example.com/"),
headers.entries,
headers.buf.items,
);
var body_out_str = try MutableString.init(std.heap.c_allocator, 0);
var response = try client.sendHTTP("", &body_out_str);
try std.testing.expectEqual(response.status_code, 200);
try std.testing.expectEqual(body_out_str.list.items.len, 1256);
}
// zig test src/http_client.zig --test-filter "sendHTTPS" -lc -lc++ /Users/jarred/Code/bun/src/deps/picohttpparser.o --cache-dir /Users/jarred/Code/bun/zig-cache --global-cache-dir /Users/jarred/.cache/zig --name bun --pkg-begin clap /Users/jarred/Code/bun/src/deps/zig-clap/clap.zig --pkg-end --pkg-begin picohttp /Users/jarred/Code/bun/src/deps/picohttp.zig --pkg-end --pkg-begin iguanaTLS /Users/jarred/Code/bun/src/deps/iguanaTLS/src/main.zig --pkg-end -I /Users/jarred/Code/bun/src/deps -I /Users/jarred/Code/bun/src/deps/mimalloc -I /usr/local/opt/icu4c/include -L src/deps/mimalloc -L /usr/local/opt/icu4c/lib --main-pkg-path /Users/jarred/Code/bun --enable-cache
test "sendHTTPS" {
var headers = try std.heap.c_allocator.create(Headers);
headers.* = Headers{
.entries = @TypeOf(headers.entries){},
.buf = @TypeOf(headers.buf){},
.used = 0,
.allocator = std.heap.c_allocator,
};
headers.appendHeader("X-What", "ok", true, true, false);
var client = HTTPClient.init(
std.heap.c_allocator,
.GET,
URL.parse("https://hookb.in/aBnOOWN677UXQ9kkQ2g3"),
headers.entries,
headers.buf.items,
);
var body_out_str = try MutableString.init(std.heap.c_allocator, 0);
var response = try client.sendHTTPS("", &body_out_str);
try std.testing.expectEqual(response.status_code, 200);
try std.testing.expectEqual(body_out_str.list.items.len, 1256);
}

View File

@@ -726,11 +726,15 @@ pub fn NewClass(
var static_functions = brk: {
var funcs: [function_name_refs.len + 1]js.JSStaticFunction = undefined;
std.mem.set(js.JSStaticFunction, &funcs, js.JSStaticFunction{
.name = @intToPtr([*c]const u8, 0),
.callAsFunction = null,
.attributes = js.JSPropertyAttributes.kJSPropertyAttributeNone,
},);
std.mem.set(
js.JSStaticFunction,
&funcs,
js.JSStaticFunction{
.name = @intToPtr([*c]const u8, 0),
.callAsFunction = null,
.attributes = js.JSPropertyAttributes.kJSPropertyAttributeNone,
},
);
break :brk funcs;
};
var instance_functions = std.mem.zeroes([function_names.len]js.JSObjectRef);
@@ -738,36 +742,40 @@ pub fn NewClass(
var property_name_refs = std.mem.zeroes([property_names.len]js.JSStringRef);
const property_name_literals = property_names;
var static_properties = brk: {
var props: [property_names.len]js.JSStaticValue = undefined;
std.mem.set(js.JSStaticValue, &props, js.JSStaticValue{
.name = @intToPtr([*c]const u8, 0),
.getProperty = null,
.setProperty = null,
.attributes = js.JSPropertyAttributes.kJSPropertyAttributeNone,
},);
var props: [property_names.len]js.JSStaticValue = undefined;
std.mem.set(
js.JSStaticValue,
&props,
js.JSStaticValue{
.name = @intToPtr([*c]const u8, 0),
.getProperty = null,
.setProperty = null,
.attributes = js.JSPropertyAttributes.kJSPropertyAttributeNone,
},
);
break :brk props;
};
pub var ref: js.JSClassRef = null;
pub var loaded = false;
pub var definition: js.JSClassDefinition =.{
.version = 0,
.attributes = js.JSClassAttributes.kJSClassAttributeNone,
.className = name[0..:0].ptr,
.parentClass = null,
.staticValues = null,
.staticFunctions = null,
.initialize = null,
.finalize = null,
.hasProperty = null,
.getProperty = null,
.setProperty = null,
.deleteProperty = null,
.getPropertyNames = null,
.callAsFunction = null,
.callAsConstructor = null,
.hasInstance = null,
.convertToType = null,
pub var definition: js.JSClassDefinition = .{
.version = 0,
.attributes = js.JSClassAttributes.kJSClassAttributeNone,
.className = name[0.. :0].ptr,
.parentClass = null,
.staticValues = null,
.staticFunctions = null,
.initialize = null,
.finalize = null,
.hasProperty = null,
.getProperty = null,
.setProperty = null,
.deleteProperty = null,
.getPropertyNames = null,
.callAsFunction = null,
.callAsConstructor = null,
.hasInstance = null,
.convertToType = null,
};
const ConstructorWrapper = struct {
pub fn rfn(
@@ -1326,7 +1334,7 @@ pub fn NewClass(
.callAsConstructor = null,
.hasInstance = null,
.convertToType = null,
};
};
if (static_functions.len > 0) {
std.mem.set(js.JSStaticFunction, &static_functions, std.mem.zeroes(js.JSStaticFunction));
@@ -1338,6 +1346,8 @@ pub fn NewClass(
def.callAsConstructor = To.JS.Constructor(staticFunctions.constructor.rfn).rfn;
} else if (comptime strings.eqlComptime(function_names[i], "finalize")) {
def.finalize = To.JS.Finalize(ZigType, staticFunctions.finalize.rfn).rfn;
} else if (comptime strings.eqlComptime(function_names[i], "call")) {
def.callAsFunction = To.JS.Callback(ZigType, staticFunctions.call.rfn).rfn;
} else if (comptime strings.eqlComptime(function_names[i], "callAsFunction")) {
const ctxfn = @field(staticFunctions, function_names[i]).rfn;
const Func: std.builtin.TypeInfo.Fn = @typeInfo(@TypeOf(ctxfn)).Fn;
@@ -1379,6 +1389,8 @@ pub fn NewClass(
def.callAsConstructor = To.JS.Constructor(staticFunctions.constructor).rfn;
} else if (comptime strings.eqlComptime(function_names[i], "finalize")) {
def.finalize = To.JS.Finalize(ZigType, staticFunctions.finalize).rfn;
} else if (comptime strings.eqlComptime(function_names[i], "call")) {
def.callAsFunction = To.JS.Callback(ZigType, staticFunctions.call).rfn;
} else {
var callback = To.JS.Callback(
ZigType,

View File

@@ -165,7 +165,7 @@ void GlobalObject::setConsole(void *console) {
// and any other objects available globally.
void GlobalObject::installAPIGlobals(JSClassRef *globals, int count) {
WTF::Vector<GlobalPropertyInfo> extraStaticGlobals;
extraStaticGlobals.reserveCapacity((size_t)count + 1);
extraStaticGlobals.reserveCapacity((size_t)count + 2);
// This is not nearly a complete implementation. It's just enough to make some npm packages that
// were compiled with Webpack to run without crashing in this environment.
@@ -223,9 +223,7 @@ JSC::Identifier GlobalObject::moduleLoaderResolve(JSGlobalObject *globalObject,
res.success = false;
ZigString keyZ = toZigString(key, globalObject);
ZigString referrerZ = referrer.isString() ? toZigString(referrer, globalObject) : ZigStringEmpty;
Zig__GlobalObject__resolve(&res, globalObject, &keyZ,
&referrerZ
);
Zig__GlobalObject__resolve(&res, globalObject, &keyZ, &referrerZ);
if (res.success) {
return toIdentifier(res.result.value, globalObject);
@@ -250,11 +248,9 @@ JSC::JSInternalPromise *GlobalObject::moduleLoaderImportModule(JSGlobalObject *g
auto sourceURL = sourceOrigin.url();
ErrorableZigString resolved;
auto moduleNameZ = toZigString(moduleNameValue, globalObject);
auto sourceOriginZ = sourceURL.isEmpty() ? ZigStringCwd
: toZigString(sourceURL.fileSystemPath());
auto sourceOriginZ = sourceURL.isEmpty() ? ZigStringCwd : toZigString(sourceURL.fileSystemPath());
resolved.success = false;
Zig__GlobalObject__resolve(&resolved, globalObject, &moduleNameZ, &sourceOriginZ
);
Zig__GlobalObject__resolve(&resolved, globalObject, &moduleNameZ, &sourceOriginZ);
if (!resolved.success) {
throwException(scope, resolved.result.err, globalObject);
return promise->rejectWithCaughtException(globalObject, scope);
@@ -382,8 +378,7 @@ JSC::JSInternalPromise *GlobalObject::moduleLoaderFetch(JSGlobalObject *globalOb
res.result.err.code = 0;
res.result.err.ptr = nullptr;
Zig__GlobalObject__fetch(&res, globalObject, &moduleKeyZig,
&source );
Zig__GlobalObject__fetch(&res, globalObject, &moduleKeyZig, &source);
if (!res.success) {
throwException(scope, res.result.err, globalObject);

View File

@@ -1765,4 +1765,15 @@ void WTF__URL__setQuery(WTF__URL *arg0, bWTF__StringView arg1) {
void WTF__URL__setUser(WTF__URL *arg0, bWTF__StringView arg1) {
arg0->setUser(*Wrap<WTF::StringView, bWTF__StringView>::unwrap(&arg1));
};
JSC__JSValue JSC__JSPromise__rejectedPromiseValue(JSC__JSGlobalObject *arg0,
JSC__JSValue JSValue1) {
return JSC::JSValue::encode(
JSC::JSPromise::rejectedPromise(arg0, JSC::JSValue::decode(JSValue1)));
}
JSC__JSValue JSC__JSPromise__resolvedPromiseValue(JSC__JSGlobalObject *arg0,
JSC__JSValue JSValue1) {
return JSC::JSValue::encode(
JSC::JSPromise::resolvedPromise(arg0, JSC::JSValue::decode(JSValue1)));
}
}

View File

@@ -438,10 +438,19 @@ pub const JSPromise = extern struct {
pub fn resolvedPromise(globalThis: *JSGlobalObject, value: JSValue) *JSPromise {
return cppFn("resolvedPromise", .{ globalThis, value });
}
pub fn resolvedPromiseValue(globalThis: *JSGlobalObject, value: JSValue) JSValue {
return cppFn("resolvedPromiseValue", .{ globalThis, value });
}
pub fn rejectedPromise(globalThis: *JSGlobalObject, value: JSValue) *JSPromise {
return cppFn("rejectedPromise", .{ globalThis, value });
}
pub fn rejectedPromiseValue(globalThis: *JSGlobalObject, value: JSValue) JSValue {
return cppFn("rejectedPromiseValue", .{ globalThis, value });
}
pub fn resolve(this: *JSPromise, globalThis: *JSGlobalObject, value: JSValue) void {
cppFn("resolve", .{ this, globalThis, value });
}
@@ -470,6 +479,8 @@ pub const JSPromise = extern struct {
"rejectAsHandled",
// "rejectException",
"rejectAsHandledException",
"rejectedPromiseValue",
"resolvedPromiseValue",
};
};

View File

@@ -1,4 +1,4 @@
//-- AUTOGENERATED FILE -- 1631085611
//-- AUTOGENERATED FILE -- 1631179623
// clang-format off
#pragma once

View File

@@ -1,4 +1,4 @@
//-- AUTOGENERATED FILE -- 1631085611
//-- AUTOGENERATED FILE -- 1631179623
// clang-format: off
#pragma once
@@ -285,9 +285,11 @@ CPP_DECL void JSC__JSPromise__reject(JSC__JSPromise* arg0, JSC__JSGlobalObject*
CPP_DECL void JSC__JSPromise__rejectAsHandled(JSC__JSPromise* arg0, JSC__JSGlobalObject* arg1, JSC__JSValue JSValue2);
CPP_DECL void JSC__JSPromise__rejectAsHandledException(JSC__JSPromise* arg0, JSC__JSGlobalObject* arg1, JSC__Exception* arg2);
CPP_DECL JSC__JSPromise* JSC__JSPromise__rejectedPromise(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1);
CPP_DECL JSC__JSValue JSC__JSPromise__rejectedPromiseValue(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1);
CPP_DECL void JSC__JSPromise__rejectWithCaughtException(JSC__JSPromise* arg0, JSC__JSGlobalObject* arg1, bJSC__ThrowScope arg2);
CPP_DECL void JSC__JSPromise__resolve(JSC__JSPromise* arg0, JSC__JSGlobalObject* arg1, JSC__JSValue JSValue2);
CPP_DECL JSC__JSPromise* JSC__JSPromise__resolvedPromise(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1);
CPP_DECL JSC__JSValue JSC__JSPromise__resolvedPromiseValue(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1);
CPP_DECL JSC__JSValue JSC__JSPromise__result(const JSC__JSPromise* arg0, JSC__VM* arg1);
CPP_DECL uint32_t JSC__JSPromise__status(const JSC__JSPromise* arg0, JSC__VM* arg1);

File diff suppressed because one or more lines are too long

View File

@@ -33,6 +33,7 @@ pub const GlobalClasses = [_]type{
BuildError.Class,
ResolveError.Class,
Bun.Class,
Fetch.Class,
};
const Blob = @import("../../blob.zig");
@@ -276,6 +277,10 @@ pub const Bun = struct {
.rfn = Router.match,
.ts = Router.match_type_definition,
},
.fetch = .{
.rfn = Fetch.call,
.ts = d.ts{},
},
.getImportedStyles = .{
.rfn = Bun.getImportedStyles,
.ts = d.ts{
@@ -1348,7 +1353,6 @@ pub const EventListenerMixin = struct {
// Rely on JS finalizer
var fetch_event = try vm.allocator.create(FetchEvent);
fetch_event.* = FetchEvent{
.request_context = request_context,
.request = Request{ .request_context = request_context },

View File

@@ -4,19 +4,34 @@ const Api = @import("../../../api/schema.zig").Api;
const http = @import("../../../http.zig");
usingnamespace @import("../javascript.zig");
usingnamespace @import("../bindings/bindings.zig");
const ZigURL = @import("../../../query_string_map.zig").URL;
const HTTPClient = @import("../../../http_client.zig");
const picohttp = @import("picohttp");
pub const Response = struct {
pub const Class = NewClass(
Response,
.{ .name = "Response" },
.{
.@"constructor" = constructor,
.@"text" = .{
.rfn = getText,
.ts = d.ts{},
},
.@"json" = .{
.rfn = getJson,
.ts = d.ts{},
},
.@"arrayBuffer" = .{
.rfn = getArrayBuffer,
.ts = d.ts{},
},
},
.{
// .@"url" = .{
// .@"get" = getURL,
// .ro = true,
// },
.@"ok" = .{
.@"get" = getOK,
.ro = true,
@@ -30,6 +45,7 @@ pub const Response = struct {
allocator: *std.mem.Allocator,
body: Body,
status_text: string = "",
pub const Props = struct {};
@@ -41,7 +57,174 @@ pub const Response = struct {
exception: js.ExceptionRef,
) js.JSValueRef {
// https://developer.mozilla.org/en-US/docs/Web/API/Response/ok
return js.JSValueMakeBoolean(ctx, this.body.init.status_code >= 200 and this.body.init.status_code <= 299);
return js.JSValueMakeBoolean(ctx, this.body.init.status_code == 304 or (this.body.init.status_code >= 200 and this.body.init.status_code <= 299));
}
pub fn getText(
this: *Response,
ctx: js.JSContextRef,
function: js.JSObjectRef,
thisObject: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) js.JSValueRef {
// https://developer.mozilla.org/en-US/docs/Web/API/Response/text
defer this.body.value = .Empty;
return JSPromise.resolvedPromiseValue(
VirtualMachine.vm.global,
(brk: {
switch (this.body.value) {
.Unconsumed => {
if (this.body.ptr) |_ptr| {
break :brk ZigString.init(_ptr[0..this.body.len]).toValue(VirtualMachine.vm.global);
}
break :brk ZigString.init("").toValue(VirtualMachine.vm.global);
},
.Empty => {
break :brk ZigString.init("").toValue(VirtualMachine.vm.global);
},
.String => |str| {
break :brk ZigString.init(str).toValue(VirtualMachine.vm.global);
},
.ArrayBuffer => |buffer| {
break :brk ZigString.init(buffer.ptr[buffer.offset..buffer.byte_len]).toValue(VirtualMachine.vm.global);
},
}
}),
).asRef();
}
var temp_error_buffer: [4096]u8 = undefined;
var error_arg_list: [1]js.JSObjectRef = undefined;
pub fn getJson(
this: *Response,
ctx: js.JSContextRef,
function: js.JSObjectRef,
thisObject: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) js.JSValueRef {
defer this.body.value = .Empty;
var zig_string = ZigString.init("");
var js_string = (js.JSValueCreateJSONString(
ctx,
brk: {
switch (this.body.value) {
.Unconsumed => {
if (this.body.ptr) |_ptr| {
zig_string = ZigString.init(_ptr[0..this.body.len]);
break :brk zig_string.toJSStringRef();
}
break :brk zig_string.toJSStringRef();
},
.Empty => {
break :brk zig_string.toJSStringRef();
},
.String => |str| {
zig_string = ZigString.init(str);
break :brk zig_string.toJSStringRef();
},
.ArrayBuffer => |buffer| {
zig_string = ZigString.init(buffer.ptr[buffer.offset..buffer.byte_len]);
break :brk zig_string.toJSStringRef();
},
}
},
0,
exception,
) orelse {
var out = std.fmt.bufPrint(&temp_error_buffer, "Invalid JSON\n\n \"{s}\"", .{zig_string.slice()[0..std.math.min(zig_string.len, 4000)]}) catch unreachable;
error_arg_list[0] = ZigString.init(out).toValueGC(VirtualMachine.vm.global).asRef();
return JSPromise.rejectedPromiseValue(
VirtualMachine.vm.global,
JSValue.fromRef(
js.JSObjectMakeError(
ctx,
1,
&error_arg_list,
exception,
),
),
).asRef();
});
defer js.JSStringRelease(js_string);
return JSPromise.resolvedPromiseValue(
VirtualMachine.vm.global,
JSValue.fromRef(
js.JSValueMakeString(
ctx,
js_string,
),
),
).asRef();
}
pub fn getArrayBuffer(
this: *Response,
ctx: js.JSContextRef,
function: js.JSObjectRef,
thisObject: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) js.JSValueRef {
defer this.body.value = .Empty;
return JSPromise.resolvedPromiseValue(
VirtualMachine.vm.global,
JSValue.fromRef(
(brk: {
switch (this.body.value) {
.Unconsumed => {
if (this.body.ptr) |_ptr| {
break :brk js.JSObjectMakeTypedArrayWithBytesNoCopy(
ctx,
js.JSTypedArrayType.kJSTypedArrayTypeUint8Array,
_ptr,
this.body.len,
null,
null,
exception,
);
}
break :brk js.JSObjectMakeTypedArray(
ctx,
js.JSTypedArrayType.kJSTypedArrayTypeUint8Array,
0,
exception,
);
},
.Empty => {
break :brk js.JSObjectMakeTypedArray(ctx, js.JSTypedArrayType.kJSTypedArrayTypeUint8Array, 0, exception);
},
.String => |str| {
break :brk js.JSObjectMakeTypedArrayWithBytesNoCopy(
ctx,
js.JSTypedArrayType.kJSTypedArrayTypeUint8Array,
@intToPtr([*]u8, @ptrToInt(str.ptr)),
str.len,
null,
null,
exception,
);
},
.ArrayBuffer => |buffer| {
break :brk js.JSObjectMakeTypedArrayWithBytesNoCopy(
ctx,
buffer.typed_array_type,
buffer.ptr,
buffer.byte_len,
null,
null,
exception,
);
},
}
}),
),
).asRef();
}
pub fn getStatus(
@@ -87,7 +270,7 @@ pub const Response = struct {
return http.MimeType.html.value;
},
.ArrayBuffer => {
.Unconsumed, .ArrayBuffer => {
return "application/octet-stream";
},
}
@@ -134,6 +317,151 @@ pub const Response = struct {
}
};
pub const Fetch = struct {
const headers_string = "headers";
const method_string = "method";
var fetch_body_string: MutableString = undefined;
var fetch_body_string_loaded = false;
pub const Class = NewClass(
void,
.{ .name = "fetch" },
.{
.@"call" = .{
.rfn = Fetch.call,
.ts = d.ts{},
},
},
.{},
);
pub fn call(
this: void,
ctx: js.JSContextRef,
function: js.JSObjectRef,
thisObject: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) js.JSObjectRef {
if (arguments.len == 0 or arguments.len > 2) return js.JSValueMakeNull(ctx);
var http_client = HTTPClient.init(getAllocator(ctx), .GET, ZigURL{}, .{}, "");
var headers: ?Headers = null;
var body: string = "";
if (!js.JSValueIsString(ctx, arguments[0])) {
return js.JSValueMakeNull(ctx);
}
var url_zig_str = ZigString.init("");
JSValue.fromRef(arguments[0]).toZigString(
&url_zig_str,
VirtualMachine.vm.global,
);
var url_str = url_zig_str.slice();
if (url_str.len == 0) return js.JSValueMakeNull(ctx);
http_client.url = ZigURL.parse(url_str);
if (arguments.len == 2 and js.JSValueIsObject(ctx, arguments[1])) {
var array = js.JSObjectCopyPropertyNames(ctx, arguments[1]);
defer js.JSPropertyNameArrayRelease(array);
const count = js.JSPropertyNameArrayGetCount(array);
var i: usize = 0;
while (i < count) : (i += 1) {
var property_name_ref = js.JSPropertyNameArrayGetNameAtIndex(array, i);
switch (js.JSStringGetLength(property_name_ref)) {
"headers".len => {
if (js.JSStringIsEqualToUTF8CString(property_name_ref, "headers")) {
if (js.JSObjectGetProperty(ctx, arguments[1], property_name_ref, null)) |value| {
if (GetJSPrivateData(Headers, value)) |headers_ptr| {
headers = headers_ptr.*;
} else if (Headers.JS.headersInit(ctx, value) catch null) |headers_| {
headers = headers_;
}
}
}
},
"body".len => {
if (js.JSStringIsEqualToUTF8CString(property_name_ref, "body")) {
if (js.JSObjectGetProperty(ctx, arguments[1], property_name_ref, null)) |value| {
var body_ = Body.extractBody(ctx, value, false, null, exception);
if (exception != null) return js.JSValueMakeNull(ctx);
switch (body_.value) {
.ArrayBuffer => |arraybuffer| {
body = arraybuffer.ptr[0..arraybuffer.byte_len];
},
.String => |str| {
body = str;
},
else => {},
}
}
}
},
"method".len => {
if (js.JSStringIsEqualToUTF8CString(property_name_ref, "method")) {
if (js.JSObjectGetProperty(ctx, arguments[1], property_name_ref, null)) |value| {
var string_ref = js.JSValueToStringCopy(ctx, value, exception);
if (exception != null) return js.JSValueMakeNull(ctx);
defer js.JSStringRelease(string_ref);
var method_name_buf: [16]u8 = undefined;
var method_name = method_name_buf[0..js.JSStringGetUTF8CString(string_ref, &method_name_buf, method_name_buf.len)];
http_client.method = http.Method.which(method_name) orelse http_client.method;
}
}
},
else => {},
}
}
}
if (headers) |head| {
http_client.header_entries = head.entries;
http_client.header_buf = head.buf.items;
}
if (fetch_body_string_loaded) {
fetch_body_string.reset();
} else {
fetch_body_string = MutableString.init(VirtualMachine.vm.allocator, 0) catch unreachable;
fetch_body_string_loaded = true;
}
var http_response = http_client.send(body, &fetch_body_string) catch |err| {
const fetch_error = std.fmt.allocPrint(getAllocator(ctx), "Fetch error: {s}", .{@errorName(err)}) catch unreachable;
return JSPromise.rejectedPromiseValue(VirtualMachine.vm.global, ZigString.init(fetch_error).toErrorInstance(VirtualMachine.vm.global)).asRef();
};
var response_headers = Headers.fromPicoHeaders(getAllocator(ctx), http_response.headers) catch unreachable;
response_headers.guard = .immutable;
var response = getAllocator(ctx).create(Response) catch unreachable;
var allocator = getAllocator(ctx);
var duped = allocator.dupeZ(u8, fetch_body_string.list.items) catch unreachable;
response.* = Response{
.allocator = allocator,
.status_text = allocator.dupe(u8, http_response.status) catch unreachable,
.body = .{
.init = .{
.headers = response_headers,
.status_code = @truncate(u16, http_response.status_code),
},
.value = .{
.Unconsumed = 0,
},
.ptr = duped.ptr,
.len = duped.len,
.ptr_allocator = allocator,
},
};
return JSPromise.resolvedPromiseValue(
VirtualMachine.vm.global,
JSValue.fromRef(Response.Class.make(ctx, response)),
).asRef();
}
};
// https://developer.mozilla.org/en-US/docs/Web/API/Headers
pub const Headers = struct {
pub const Kv = struct {
@@ -272,6 +600,77 @@ pub const Headers = struct {
return js.JSValueMakeNull(ctx);
}
pub fn headersInit(ctx: js.JSContextRef, header_prop: js.JSObjectRef) !?Headers {
const header_keys = js.JSObjectCopyPropertyNames(ctx, header_prop);
defer js.JSPropertyNameArrayRelease(header_keys);
const total_header_count = js.JSPropertyNameArrayGetCount(header_keys);
if (total_header_count == 0) return null;
// 2 passes through the headers
// Pass #1: find the "real" count.
// The number of things which are strings or numbers.
// Anything else should be ignored.
// We could throw a TypeError, but ignoring silently is more JavaScript-like imo
var real_header_count: usize = 0;
var estimated_buffer_len: usize = 0;
var j: usize = 0;
while (j < total_header_count) : (j += 1) {
var key_ref = js.JSPropertyNameArrayGetNameAtIndex(header_keys, j);
var value_ref = js.JSObjectGetProperty(ctx, header_prop, key_ref, null);
switch (js.JSValueGetType(ctx, value_ref)) {
js.JSType.kJSTypeNumber => {
const key_len = js.JSStringGetLength(key_ref);
if (key_len > 0) {
real_header_count += 1;
estimated_buffer_len += key_len;
estimated_buffer_len += std.fmt.count("{d}", .{js.JSValueToNumber(ctx, value_ref, null)});
}
},
js.JSType.kJSTypeString => {
const key_len = js.JSStringGetLength(key_ref);
const value_len = js.JSStringGetLength(value_ref);
if (key_len > 0 and value_len > 0) {
real_header_count += 1;
estimated_buffer_len += key_len + value_len;
}
},
else => {},
}
}
if (real_header_count == 0 or estimated_buffer_len == 0) return null;
j = 0;
var allocator = getAllocator(ctx);
var headers = Headers{
.allocator = allocator,
.buf = try std.ArrayListUnmanaged(u8).initCapacity(allocator, estimated_buffer_len),
.entries = Headers.Entries{},
};
errdefer headers.deinit();
try headers.entries.ensureTotalCapacity(allocator, real_header_count);
headers.buf.expandToCapacity();
while (j < total_header_count) : (j += 1) {
var key_ref = js.JSPropertyNameArrayGetNameAtIndex(header_keys, j);
var value_ref = js.JSObjectGetProperty(ctx, header_prop, key_ref, null);
switch (js.JSValueGetType(ctx, value_ref)) {
js.JSType.kJSTypeNumber => {
if (js.JSStringGetLength(key_ref) == 0) continue;
try headers.appendInit(ctx, key_ref, .kJSTypeNumber, value_ref);
},
js.JSType.kJSTypeString => {
if (js.JSStringGetLength(value_ref) == 0 or js.JSStringGetLength(key_ref) == 0) continue;
try headers.appendInit(ctx, key_ref, .kJSTypeString, value_ref);
},
else => {},
}
}
return headers;
}
// https://developer.mozilla.org/en-US/docs/Web/API/Headers/Headers
pub fn constructor(
ctx: js.JSContextRef,
@@ -283,6 +682,14 @@ pub const Headers = struct {
if (arguments.len > 0 and js.JSValueIsObjectOfClass(ctx, arguments[0], Headers.Class.get().*)) {
var other = castObj(arguments[0], Headers);
other.clone(headers) catch unreachable;
} else if (arguments.len == 1 and js.JSValueIsObject(ctx, arguments[0])) {
headers.* = (JS.headersInit(ctx, arguments[0]) catch unreachable) orelse Headers{
.entries = @TypeOf(headers.entries){},
.buf = @TypeOf(headers.buf){},
.used = 0,
.allocator = getAllocator(ctx),
.guard = Guard.none,
};
} else {
headers.* = Headers{
.entries = @TypeOf(headers.entries){},
@@ -356,26 +763,25 @@ pub const Headers = struct {
none,
};
// TODO: is it worth making this lazy? instead of copying all the request headers, should we just do it on get/put/iterator?
pub fn fromRequestCtx(allocator: *std.mem.Allocator, request: *http.RequestContext) !Headers {
pub fn fromPicoHeaders(allocator: *std.mem.Allocator, picohttp_headers: []const picohttp.Header) !Headers {
var total_len: usize = 0;
for (request.request.headers) |header| {
for (picohttp_headers) |header| {
total_len += header.name.len;
total_len += header.value.len;
}
// for the null bytes
total_len += request.request.headers.len * 2;
total_len += picohttp_headers.len * 2;
var headers = Headers{
.allocator = allocator,
.entries = Entries{},
.buf = std.ArrayListUnmanaged(u8){},
};
try headers.entries.ensureTotalCapacity(allocator, request.request.headers.len);
try headers.entries.ensureTotalCapacity(allocator, picohttp_headers.len);
try headers.buf.ensureTotalCapacity(allocator, total_len);
headers.buf.expandToCapacity();
headers.guard = Guard.request;
for (request.request.headers) |header| {
for (picohttp_headers) |header| {
headers.entries.appendAssumeCapacity(Kv{
.name = headers.appendString(
string,
@@ -394,11 +800,14 @@ pub const Headers = struct {
});
}
headers.guard = Guard.immutable;
return headers;
}
// TODO: is it worth making this lazy? instead of copying all the request headers, should we just do it on get/put/iterator?
pub fn fromRequestCtx(allocator: *std.mem.Allocator, request: *http.RequestContext) !Headers {
return fromPicoHeaders(allocator, request.request.headers);
}
pub fn asStr(headers: *const Headers, ptr: Api.StringPointer) []u8 {
return headers.buf.items[ptr.offset..][0..ptr.length];
}
@@ -479,7 +888,7 @@ pub const Headers = struct {
),
.value = headers.appendString(
string,
key,
value,
needs_lowercase,
needs_normalize,
append_null,
@@ -577,6 +986,9 @@ pub const Headers = struct {
pub const Body = struct {
init: Init,
value: Value,
ptr: ?[*]u8 = null,
len: usize = 0,
ptr_allocator: ?*std.mem.Allocator = null,
pub fn deinit(this: *Body, allocator: *std.mem.Allocator) void {
if (this.init.headers) |headers| {
@@ -602,7 +1014,7 @@ pub const Body = struct {
defer js.JSPropertyNameArrayRelease(array);
const count = js.JSPropertyNameArrayGetCount(array);
var i: usize = 0;
upper: while (i < count) : (i += 1) {
while (i < count) : (i += 1) {
var property_name_ref = js.JSPropertyNameArrayGetNameAtIndex(array, i);
switch (js.JSStringGetLength(property_name_ref)) {
"headers".len => {
@@ -611,73 +1023,7 @@ pub const Body = struct {
if (js.JSObjectGetProperty(ctx, init_ref, property_name_ref, null)) |header_prop| {
switch (js.JSValueGetType(ctx, header_prop)) {
js.JSType.kJSTypeObject => {
const header_keys = js.JSObjectCopyPropertyNames(ctx, header_prop);
defer js.JSPropertyNameArrayRelease(header_keys);
const total_header_count = js.JSPropertyNameArrayGetCount(array);
if (total_header_count == 0) continue :upper;
// 2 passes through the headers
// Pass #1: find the "real" count.
// The number of things which are strings or numbers.
// Anything else should be ignored.
// We could throw a TypeError, but ignoring silently is more JavaScript-like imo
var real_header_count: usize = 0;
var estimated_buffer_len: usize = 0;
var j: usize = 0;
while (j < total_header_count) : (j += 1) {
var key_ref = js.JSPropertyNameArrayGetNameAtIndex(header_keys, j);
var value_ref = js.JSObjectGetProperty(ctx, header_prop, key_ref, null);
switch (js.JSValueGetType(ctx, value_ref)) {
js.JSType.kJSTypeNumber => {
const key_len = js.JSStringGetLength(key_ref);
if (key_len > 0) {
real_header_count += 1;
estimated_buffer_len += key_len;
estimated_buffer_len += std.fmt.count("{d}", .{js.JSValueToNumber(ctx, value_ref, null)});
}
},
js.JSType.kJSTypeString => {
const key_len = js.JSStringGetLength(key_ref);
const value_len = js.JSStringGetLength(value_ref);
if (key_len > 0 and value_len > 0) {
real_header_count += 1;
estimated_buffer_len += key_len + value_len;
}
},
else => {},
}
}
if (real_header_count == 0 or estimated_buffer_len == 0) continue :upper;
j = 0;
var headers = Headers{
.allocator = allocator,
.buf = try std.ArrayListUnmanaged(u8).initCapacity(allocator, estimated_buffer_len),
.entries = Headers.Entries{},
};
errdefer headers.deinit();
try headers.entries.ensureTotalCapacity(allocator, real_header_count);
while (j < total_header_count) : (j += 1) {
var key_ref = js.JSPropertyNameArrayGetNameAtIndex(header_keys, j);
var value_ref = js.JSObjectGetProperty(ctx, header_prop, key_ref, null);
switch (js.JSValueGetType(ctx, value_ref)) {
js.JSType.kJSTypeNumber => {
if (js.JSStringGetLength(key_ref) == 0) continue;
try headers.appendInit(ctx, key_ref, .kJSTypeNumber, value_ref);
},
js.JSType.kJSTypeString => {
if (js.JSStringGetLength(value_ref) == 0 or js.JSStringGetLength(key_ref) == 0) continue;
try headers.appendInit(ctx, key_ref, .kJSTypeString, value_ref);
},
else => {},
}
}
result.headers = headers;
result.headers = try Headers.JS.headersInit(ctx, header_prop);
},
else => {},
}
@@ -705,10 +1051,12 @@ pub const Body = struct {
ArrayBuffer: ArrayBuffer,
String: string,
Empty: u0,
Unconsumed: u0,
pub const Tag = enum {
ArrayBuffer,
String,
Empty,
Unconsumed,
};
pub fn length(value: *const Value) usize {
@@ -719,7 +1067,7 @@ pub const Body = struct {
.String => |str| {
return str.len;
},
.Empty => {
else => {
return 0;
},
}
@@ -783,6 +1131,8 @@ pub const Body = struct {
}
body.value = Value{ .String = str.characters8()[0..len] };
body.ptr = @intToPtr([*]u8, @ptrToInt(body.value.String.ptr));
body.len = body.value.String.len;
return body;
},
.kJSTypeObject => {
@@ -807,6 +1157,8 @@ pub const Body = struct {
} else |err| {}
}
body.value = Value{ .ArrayBuffer = buffer };
body.ptr = buffer.ptr[buffer.offset..buffer.byte_len].ptr;
body.len = buffer.ptr[buffer.offset..buffer.byte_len].len;
return body;
},
}

View File

@@ -0,0 +1,31 @@
// This is just a no-op. Intent is to prevent importing a bunch of stuff that isn't relevant.
module.exports = (wrapper = Bun.fetch) => {
return async function vercelFetch(url, opts = {}) {
// Convert Object bodies to JSON if they are JS objects
if (
opts.body &&
typeof opts.body === "object" &&
(!("buffer" in opts.body) ||
typeof opts.body.buffer !== "object" ||
!(opts.body.buffer instanceof ArrayBuffer))
) {
opts.body = JSON.stringify(opts.body);
// Content length will automatically be set
if (!opts.headers) opts.headers = new Headers();
opts.headers.set("Content-Type", "application/json");
}
try {
return await wrapper(url, opts);
} catch (err) {
if (typeof err === "string") {
err = new Error(err);
}
err.url = url;
err.opts = opts;
throw err;
}
};
};

View File

@@ -0,0 +1 @@
export default Bun.fetch;

View File

@@ -0,0 +1 @@
export default Bun.fetch;

View File

@@ -27,6 +27,13 @@ const _url_code: string = @embedFile("./node-fallbacks/out/url.js");
const _util_code: string = @embedFile("./node-fallbacks/out/util.js");
const _zlib_code: string = @embedFile("./node-fallbacks/out/zlib.js");
const _node_fetch_code: string = @embedFile("./node-fallbacks/out/node-fetch.js");
const _isomorphic_fetch_code: string = @embedFile("./node-fallbacks/out/isomorphic-fetch.js");
const _vercel_fetch_code: string = @embedFile("./node-fallbacks/out/@vercel_fetch.js");
const node_fetch_code: *const string = &_node_fetch_code;
const isomorphic_fetch_code: *const string = &_isomorphic_fetch_code;
const vercel_fetch_code: *const string = &_vercel_fetch_code;
const assert_code: *const string = &_assert_code;
const buffer_code: *const string = &_buffer_code;
const console_code: *const string = &_console_code;
@@ -73,6 +80,10 @@ const url_import_path = "/bun-vfs/node_modules/url/index.js";
const util_import_path = "/bun-vfs/node_modules/util/index.js";
const zlib_import_path = "/bun-vfs/node_modules/zlib/index.js";
const node_fetch_import_path = "/bun-vfs/node_modules/node-fetch/index.js";
const isomorphic_fetch_import_path = "/bun-vfs/node_modules/isomorphic-fetch/index.js";
const vercel_fetch_import_path = "/bun-vfs/node_modules/@vercel/fetch/index.js";
const assert_package_json = PackageJSON{
.name = "assert",
.version = "0.0.0-polyfill",
@@ -277,6 +288,34 @@ const zlib_package_json = PackageJSON{
.source = logger.Source.initPathString("/bun-vfs/node_modules/zlib/package.json", ""),
};
const node_fetch_package_json = PackageJSON{
.name = "node-fetch",
.version = "0.0.0-polyfill",
.module_type = .cjs,
.hash = @truncate(u32, std.hash.Wyhash.hash(0, "node-fetch@0.0.0-polyfill")),
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/node-fetch/package.json", ""),
};
const isomorphic_fetch_package_json = PackageJSON{
.name = "isomorphic-fetch",
.version = "0.0.0-polyfill",
.module_type = .cjs,
.hash = @truncate(u32, std.hash.Wyhash.hash(0, "isomorphic-fetch@0.0.0-polyfill")),
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/isomorphic-fetch/package.json", ""),
};
const vercel_fetch_package_json = PackageJSON{
.name = "@vercel/fetch",
.version = "0.0.0-polyfill",
.module_type = .cjs,
.hash = @truncate(u32, std.hash.Wyhash.hash(0, "@vercel/fetch@0.0.0-polyfill")),
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/@vercel/fetch/package.json", ""),
};
pub const FallbackModule = struct {
path: Fs.Path,
code: *const string,
@@ -392,6 +431,24 @@ pub const FallbackModule = struct {
.code = zlib_code,
.package_json = &zlib_package_json,
};
pub const @"node-fetch" = FallbackModule{
.path = Fs.Path.initWithNamespaceVirtual(node_fetch_import_path, "node", "node-fetch"),
.code = node_fetch_code,
.package_json = &node_fetch_package_json,
};
pub const @"isomorphic-fetch" = FallbackModule{
.path = Fs.Path.initWithNamespaceVirtual(isomorphic_fetch_import_path, "node", "isomorphic-fetch"),
.code = isomorphic_fetch_code,
.package_json = &isomorphic_fetch_package_json,
};
pub const @"@vercel/fetch" = FallbackModule{
.path = Fs.Path.initWithNamespaceVirtual(vercel_fetch_import_path, "node", "@vercel/fetch"),
.code = vercel_fetch_code,
.package_json = &vercel_fetch_package_json,
};
};
pub const Map = std.ComptimeStringMap(FallbackModule, .{
@@ -417,4 +474,8 @@ pub const Map = std.ComptimeStringMap(FallbackModule, .{
&.{ "url", FallbackModule.url },
&.{ "util", FallbackModule.util },
&.{ "zlib", FallbackModule.zlib },
&.{ "node-fetch", FallbackModule.@"node-fetch" },
&.{ "isomorphic-fetch", FallbackModule.@"isomorphic-fetch" },
&.{ "@vercel/fetch", FallbackModule.@"@vercel/fetch" },
});

View File

@@ -20,6 +20,37 @@ pub const URL = struct {
username: string = "",
port_was_automatically_set: bool = false,
pub fn isDomainName(this: *const URL) bool {
for (this.hostname) |c, i| {
switch (c) {
'0'...'9', '.', ':' => {},
else => {
return true;
},
}
}
return false;
}
pub fn isLocalhost(this: *const URL) bool {
return this.hostname.len == 0 or strings.eqlComptime(this.hostname, "localhost") or strings.eqlComptime(this.hostname, "0.0.0.0");
}
pub fn getIPv4Address(this: *const URL) ?std.x.net.ip.Address.IPv4 {
return (if (this.hostname.length > 0)
std.x.os.IPv4.parse(this.hostname)
else
std.x.os.IPv4.parse(this.href)) catch return null;
}
pub fn getIPv6Address(this: *const URL) ?std.x.net.ip.Address.IPv6 {
return (if (this.hostname.length > 0)
std.x.os.IPv6.parse(this.hostname)
else
std.x.os.IPv6.parse(this.href)) catch return null;
}
pub fn displayProtocol(this: *const URL) string {
if (this.protocol.len > 0) {
return this.protocol;
@@ -34,6 +65,10 @@ pub const URL = struct {
return "http";
}
pub inline fn isHTTPS(this: *const URL) bool {
return strings.eqlComptime(this.protocol, "https");
}
pub fn displayHostname(this: *const URL) string {
if (this.hostname.len > 0) {
return this.hostname;
@@ -50,6 +85,10 @@ pub const URL = struct {
return std.fmt.parseInt(u16, this.port, 10) catch null;
}
pub fn getPortAuto(this: *const URL) u16 {
return this.getPort() orelse (if (this.isHTTPS()) @as(u16, 443) else @as(u16, 80));
}
pub fn hasValidPort(this: *const URL) bool {
return (this.getPort() orelse 0) > 1;
}

View File

@@ -124,8 +124,8 @@ pub const StringOrTinyString = struct {
pub fn copyLowercase(in: string, out: []u8) string {
@setRuntimeSafety(false);
var in_slice = in;
var out_slice = out[0..in.len];
var in_slice: string = in;
var out_slice: []u8 = out[0..in.len];
begin: while (out_slice.len > 0) {
@setRuntimeSafety(false);