mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
HTTP fixes + buffer stdout/in + a little HTTP caching
This commit is contained in:
@@ -106,6 +106,12 @@ pub fn build(b: *std.build.Builder) void {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const runtime_hash = std.hash.Wyhash.hash(0, @embedFile("./src/runtime.js"));
|
||||
const runtime_version_file = std.fs.cwd().openFile("src/runtime.version", .{ .write = true }) catch unreachable;
|
||||
runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
|
||||
defer runtime_version_file.close();
|
||||
|
||||
exe.setTarget(target);
|
||||
exe.setBuildMode(mode);
|
||||
b.install_path = output_dir;
|
||||
|
||||
@@ -25,10 +25,11 @@ const MimeType = @import("./http/mime_type.zig");
|
||||
const resolve_path = @import("./resolver/resolve_path.zig");
|
||||
const runtime = @import("./runtime.zig");
|
||||
const Linker = linker.Linker;
|
||||
const Timer = @import("./timer.zig");
|
||||
|
||||
pub const ServeResult = struct {
|
||||
value: Value,
|
||||
|
||||
free: bool = true,
|
||||
mime_type: MimeType,
|
||||
|
||||
// Either we:
|
||||
@@ -69,6 +70,7 @@ pub const Bundler = struct {
|
||||
elapsed: i128 = 0,
|
||||
needs_runtime: bool = false,
|
||||
linker: Linker,
|
||||
timer: Timer = Timer{},
|
||||
|
||||
pub const RuntimeCode = @embedFile("./runtime.js");
|
||||
|
||||
@@ -137,9 +139,10 @@ pub const Bundler = struct {
|
||||
|
||||
try bundler.linker.link(file_path, &result);
|
||||
|
||||
const output_file = try bundler.print(
|
||||
var output_file = try bundler.print(
|
||||
result,
|
||||
);
|
||||
// output_file.version = if (resolve_result.is_from_node_modules) resolve_result.package_json_version else null;
|
||||
|
||||
return output_file;
|
||||
}
|
||||
@@ -196,14 +199,15 @@ pub const Bundler = struct {
|
||||
|
||||
ast: js_ast.Ast,
|
||||
};
|
||||
pub var tracing_start: i128 = if (enableTracing) 0 else undefined;
|
||||
|
||||
pub fn parse(bundler: *Bundler, path: Fs.Path, loader: options.Loader, dirname_fd: StoredFileDescriptorType) ?ParseResult {
|
||||
if (enableTracing) {
|
||||
tracing_start = std.time.nanoTimestamp();
|
||||
bundler.timer.start();
|
||||
}
|
||||
defer {
|
||||
if (enableTracing) {
|
||||
bundler.elapsed += std.time.nanoTimestamp() - tracing_start;
|
||||
bundler.timer.stop();
|
||||
bundler.elapsed += bundler.timer.elapsed;
|
||||
}
|
||||
}
|
||||
var result: ParseResult = undefined;
|
||||
@@ -269,8 +273,9 @@ pub const Bundler = struct {
|
||||
log: *logger.Log,
|
||||
allocator: *std.mem.Allocator,
|
||||
relative_path: string,
|
||||
extension: string,
|
||||
_extension: string,
|
||||
) !ServeResult {
|
||||
var extension = _extension;
|
||||
var original_resolver_logger = bundler.resolver.log;
|
||||
var original_bundler_logger = bundler.log;
|
||||
|
||||
@@ -294,14 +299,15 @@ pub const Bundler = struct {
|
||||
var _file: ?std.fs.File = null;
|
||||
|
||||
// Is it the index file?
|
||||
if (relative_unrooted_path.len == 1 and relative_unrooted_path[0] == '.') {
|
||||
if (relative_unrooted_path.len == 0) {
|
||||
// std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path);
|
||||
// std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/"
|
||||
// Search for /index.html
|
||||
if (public_dir.openFile("index.html", .{})) |file| {
|
||||
std.mem.copy(u8, relative_unrooted_path, "index.html");
|
||||
relative_unrooted_path = relative_unrooted_path[0.."index.html".len];
|
||||
var index_path = "index.html".*;
|
||||
relative_unrooted_path = &(index_path);
|
||||
_file = file;
|
||||
extension = "html";
|
||||
} else |err| {}
|
||||
// Okay is it actually a full path?
|
||||
} else {
|
||||
@@ -319,6 +325,7 @@ pub const Bundler = struct {
|
||||
|
||||
if (public_dir.openFile(tmp_buildfile_buf[0 .. relative_unrooted_path.len + ".html".len], .{})) |file| {
|
||||
_file = file;
|
||||
extension = "html";
|
||||
break;
|
||||
} else |err| {}
|
||||
|
||||
@@ -337,6 +344,7 @@ pub const Bundler = struct {
|
||||
if (public_dir.openFile(_path, .{})) |file| {
|
||||
const __path = _path;
|
||||
relative_unrooted_path = __path;
|
||||
extension = "html";
|
||||
_file = file;
|
||||
break;
|
||||
} else |err| {}
|
||||
@@ -357,14 +365,32 @@ pub const Bundler = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (strings.eqlComptime(relative_path, "__runtime.js")) {
|
||||
return ServeResult{
|
||||
.free = false,
|
||||
.value = .{ .build = .{ .path = "__runtime.js", .contents = runtime.SourceContent } },
|
||||
.mime_type = MimeType.javascript,
|
||||
};
|
||||
}
|
||||
|
||||
// We make some things faster in theory by using absolute paths instead of relative paths
|
||||
const absolute_path = resolve_path.joinAbsStringBuf(
|
||||
var absolute_path = resolve_path.joinAbsStringBuf(
|
||||
bundler.fs.top_level_dir,
|
||||
&tmp_buildfile_buf,
|
||||
&([_][]const u8{relative_path}),
|
||||
.auto,
|
||||
);
|
||||
|
||||
defer {
|
||||
js_ast.Expr.Data.Store.reset();
|
||||
js_ast.Stmt.Data.Store.reset();
|
||||
}
|
||||
|
||||
// If the extension is .js, omit it.
|
||||
// if (absolute_path.len > ".js".len and strings.eqlComptime(absolute_path[absolute_path.len - ".js".len ..], ".js")) {
|
||||
// absolute_path = absolute_path[0 .. absolute_path.len - ".js".len];
|
||||
// }
|
||||
|
||||
const resolved = (try bundler.resolver.resolve(bundler.fs.top_level_dir, absolute_path, .entry_point));
|
||||
|
||||
const loader = bundler.options.loaders.get(resolved.path_pair.primary.name.ext) orelse .file;
|
||||
|
||||
@@ -418,7 +418,7 @@ pub const Cli = struct {
|
||||
|
||||
if (!did_write) {
|
||||
for (result.output_files) |file, i| {
|
||||
try writer.writeAll(file.contents);
|
||||
try stdout.unbuffered_writer.writeAll(file.contents);
|
||||
if (i > 0) {
|
||||
_ = try writer.write("\n\n");
|
||||
}
|
||||
|
||||
@@ -63,7 +63,8 @@ pub const Output = struct {
|
||||
if (isWasm) {
|
||||
return std.io.FixedBufferStream([]u8);
|
||||
} else {
|
||||
return std.fs.File;
|
||||
var stdin = std.io.getStdIn();
|
||||
return @TypeOf(std.io.bufferedWriter(stdin.writer()));
|
||||
}
|
||||
};
|
||||
stream: StreamType,
|
||||
|
||||
19
src/http.zig
19
src/http.zig
@@ -491,7 +491,23 @@ pub const RequestContext = struct {
|
||||
}
|
||||
},
|
||||
.build => |output| {
|
||||
defer ctx.bundler.allocator.free(output.contents);
|
||||
defer {
|
||||
if (result.free) {
|
||||
ctx.bundler.allocator.free(output.contents);
|
||||
}
|
||||
}
|
||||
|
||||
// The version query string is only included for:
|
||||
// - The runtime
|
||||
// - node_modules
|
||||
// For the runtime, it's a hash of the file contents
|
||||
// For node modules, it's just the package version from the package.json
|
||||
// It's safe to assume node_modules are immutable. In practice, they aren't.
|
||||
// However, a lot of other stuff breaks when node_modules change so it's fine
|
||||
if (strings.contains(ctx.url.query_string, "v=")) {
|
||||
ctx.appendHeader("Cache-Control", "public, immutable, max-age=31556952");
|
||||
}
|
||||
|
||||
if (FeatureFlags.strong_etags_for_built_files) {
|
||||
const strong_etag = std.hash.Wyhash.hash(1, output.contents);
|
||||
const etag_content_slice = std.fmt.bufPrintIntToSlice(strong_etag_buffer[0..49], strong_etag, 16, true, .{});
|
||||
@@ -680,6 +696,7 @@ pub const Server = struct {
|
||||
.bundler = undefined,
|
||||
};
|
||||
server.bundler = try Bundler.init(allocator, &server.log, options);
|
||||
server.bundler.configureLinker();
|
||||
|
||||
try server.run();
|
||||
}
|
||||
|
||||
114
src/js_lexer.zig
114
src/js_lexer.zig
@@ -20,6 +20,17 @@ pub const PropertyModifierKeyword = tables.PropertyModifierKeyword;
|
||||
pub const TypescriptStmtKeyword = tables.TypescriptStmtKeyword;
|
||||
pub const TypeScriptAccessibilityModifier = tables.TypeScriptAccessibilityModifier;
|
||||
|
||||
fn utf8ByteSequenceLength(first_byte: u8) u3 {
|
||||
// The switch is optimized much better than a "smart" approach using @clz
|
||||
return switch (first_byte) {
|
||||
0b0000_0000...0b0111_1111 => 1,
|
||||
0b1100_0000...0b1101_1111 => 2,
|
||||
0b1110_0000...0b1110_1111 => 3,
|
||||
0b1111_0000...0b1111_0111 => 4,
|
||||
else => 0,
|
||||
};
|
||||
}
|
||||
|
||||
fn notimpl() noreturn {
|
||||
Global.panic("not implemented yet!", .{});
|
||||
}
|
||||
@@ -123,22 +134,22 @@ pub const Lexer = struct {
|
||||
return logger.usize2Loc(self.start);
|
||||
}
|
||||
|
||||
inline fn nextCodepointSlice(it: *LexerType) !?[]const u8 {
|
||||
inline fn nextCodepointSlice(it: *LexerType) []const u8 {
|
||||
@setRuntimeSafety(false);
|
||||
|
||||
if (it.current >= it.source.contents.len) {
|
||||
// without this line, strings cut off one before the last characte
|
||||
it.end = it.current;
|
||||
@setRuntimeSafety(false);
|
||||
// if (it.current >= it.source.contents.len) {
|
||||
// // without this line, strings cut off one before the last characte
|
||||
// it.end = it.current;
|
||||
// @setRuntimeSafety(false);
|
||||
|
||||
return null;
|
||||
}
|
||||
// return null;
|
||||
// }
|
||||
|
||||
const cp_len = unicode.utf8ByteSequenceLength(it.source.contents[it.current]) catch return Error.UTF8Fail;
|
||||
const cp_len = utf8ByteSequenceLength(it.source.contents[it.current]);
|
||||
it.end = it.current;
|
||||
it.current += cp_len;
|
||||
|
||||
return it.source.contents[it.current - cp_len .. it.current];
|
||||
return if (!(it.current > it.source.contents.len)) it.source.contents[it.current - cp_len .. it.current] else "";
|
||||
}
|
||||
|
||||
pub fn syntaxError(self: *LexerType) !void {
|
||||
@@ -192,27 +203,29 @@ pub const Lexer = struct {
|
||||
}
|
||||
|
||||
inline fn nextCodepoint(it: *LexerType) !CodePoint {
|
||||
const slice = (try it.nextCodepointSlice()) orelse return @as(CodePoint, -1);
|
||||
const slice = it.nextCodepointSlice();
|
||||
|
||||
switch (slice.len) {
|
||||
1 => return @as(CodePoint, slice[0]),
|
||||
2 => return @as(CodePoint, unicode.utf8Decode2(slice) catch unreachable),
|
||||
3 => return @as(CodePoint, unicode.utf8Decode3(slice) catch unreachable),
|
||||
4 => return @as(CodePoint, unicode.utf8Decode4(slice) catch unreachable),
|
||||
return switch (slice.len) {
|
||||
0 => -1,
|
||||
1 => @as(CodePoint, slice[0]),
|
||||
2 => @as(CodePoint, unicode.utf8Decode2(slice) catch unreachable),
|
||||
3 => @as(CodePoint, unicode.utf8Decode3(slice) catch unreachable),
|
||||
4 => @as(CodePoint, unicode.utf8Decode4(slice) catch unreachable),
|
||||
else => unreachable,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Look ahead at the next n codepoints without advancing the iterator.
|
||||
/// If fewer than n codepoints are available, then return the remainder of the string.
|
||||
fn peek(it: *LexerType, n: usize) !string {
|
||||
fn peek(it: *LexerType, n: usize) string {
|
||||
const original_i = it.current;
|
||||
defer it.current = original_i;
|
||||
|
||||
var end_ix = original_i;
|
||||
var found: usize = 0;
|
||||
while (found < n) : (found += 1) {
|
||||
const next_codepoint = (try it.nextCodepointSlice()) orelse return it.source.contents[original_i..];
|
||||
const next_codepoint = it.nextCodepointSlice();
|
||||
if (next_codepoint.len == 0) break;
|
||||
end_ix += next_codepoint.len;
|
||||
}
|
||||
|
||||
@@ -963,8 +976,7 @@ pub const Lexer = struct {
|
||||
if (lexer.code_point == '\\') {
|
||||
@setRuntimeSafety(false);
|
||||
|
||||
const scan_result = try lexer.scanIdentifierWithEscapes(.private);
|
||||
lexer.identifier = scan_result.contents;
|
||||
lexer.identifier = (try lexer.scanIdentifierWithEscapes(.private)).contents;
|
||||
lexer.token = T.t_private_identifier;
|
||||
} else {
|
||||
@setRuntimeSafety(false);
|
||||
@@ -978,8 +990,7 @@ pub const Lexer = struct {
|
||||
try lexer.step();
|
||||
}
|
||||
if (lexer.code_point == '\\') {
|
||||
const scan_result = try lexer.scanIdentifierWithEscapes(.private);
|
||||
lexer.identifier = scan_result.contents;
|
||||
lexer.identifier = (try lexer.scanIdentifierWithEscapes(.private)).contents;
|
||||
lexer.token = T.t_private_identifier;
|
||||
} else {
|
||||
lexer.token = T.t_private_identifier;
|
||||
@@ -1379,7 +1390,7 @@ pub const Lexer = struct {
|
||||
},
|
||||
// Handle legacy HTML-style comments
|
||||
'!' => {
|
||||
if (strings.eqlComptime(try lexer.peek("--".len), "--")) {
|
||||
if (strings.eqlComptime(lexer.peek("--".len), "--")) {
|
||||
try lexer.addUnsupportedSyntaxError("Legacy HTML comments not implemented yet!");
|
||||
return;
|
||||
}
|
||||
@@ -1470,9 +1481,10 @@ pub const Lexer = struct {
|
||||
lexer.identifier = scan_result.contents;
|
||||
lexer.token = scan_result.token;
|
||||
} else {
|
||||
const contents = lexer.raw();
|
||||
lexer.identifier = contents;
|
||||
lexer.token = Keywords.get(contents) orelse T.t_identifier;
|
||||
// this code is so hot that if you save lexer.raw() into a temporary variable
|
||||
// it shows up in profiling
|
||||
lexer.identifier = lexer.raw();
|
||||
lexer.token = Keywords.get(lexer.identifier) orelse T.t_identifier;
|
||||
}
|
||||
},
|
||||
|
||||
@@ -2534,43 +2546,23 @@ pub const Lexer = struct {
|
||||
};
|
||||
|
||||
pub fn isIdentifierStart(codepoint: CodePoint) bool {
|
||||
switch (codepoint) {
|
||||
'a'...'z', 'A'...'Z', '_', '$' => {
|
||||
return true;
|
||||
},
|
||||
else => {
|
||||
return false;
|
||||
},
|
||||
}
|
||||
@setRuntimeSafety(false);
|
||||
return switch (codepoint) {
|
||||
'a'...'z', 'A'...'Z', '_', '$' => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
pub fn isIdentifierContinue(codepoint: CodePoint) bool {
|
||||
@setRuntimeSafety(false);
|
||||
|
||||
switch (codepoint) {
|
||||
'_', '$', '0'...'9', 'a'...'z', 'A'...'Z' => {
|
||||
return true;
|
||||
},
|
||||
-1 => {
|
||||
return false;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// All ASCII identifier start code points are listed above
|
||||
if (codepoint < 0x7F) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// ZWNJ and ZWJ are allowed in identifiers
|
||||
if (codepoint == 0x200C or codepoint == 0x200D) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
return switch (codepoint) {
|
||||
'a'...'z', 'A'...'Z', '_', '$', '0'...'9', 0x200C, 0x200D => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isWhitespace(codepoint: CodePoint) bool {
|
||||
switch (codepoint) {
|
||||
return switch (codepoint) {
|
||||
0x000B, // line tabulation
|
||||
0x0009, // character tabulation
|
||||
0x000C, // form feed
|
||||
@@ -2593,13 +2585,9 @@ pub fn isWhitespace(codepoint: CodePoint) bool {
|
||||
0x205F, // medium mathematical space
|
||||
0x3000, // ideographic space
|
||||
0xFEFF, // zero width non-breaking space
|
||||
=> {
|
||||
return true;
|
||||
},
|
||||
else => {
|
||||
return false;
|
||||
},
|
||||
}
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isIdentifier(text: string) bool {
|
||||
|
||||
@@ -9838,10 +9838,12 @@ pub const P = struct {
|
||||
}
|
||||
|
||||
if (p.options.jsx.development) {
|
||||
// If we leave it as false, we get a warning about not providing a key
|
||||
// It calls Object.freeze on the array though
|
||||
// Which is wasteful! Object.freeze is slow.
|
||||
args[3] = Expr{ .loc = expr.loc, .data = .{ .e_boolean = .{ .value = e_.children.len == 0 } } };
|
||||
// is the return type of the first child an array?
|
||||
// It's dynamic
|
||||
// Else, it's static
|
||||
args[3] = Expr{ .loc = expr.loc, .data = .{ .e_boolean = .{
|
||||
.value = e_.children.len == 0 or e_.children.len > 1 or std.meta.activeTag(e_.children[0].data) != .e_array,
|
||||
} } };
|
||||
|
||||
var source = p.allocator.alloc(G.Property, 2) catch unreachable;
|
||||
p.recordUsage(p.jsx_filename_ref);
|
||||
|
||||
@@ -90,6 +90,7 @@ pub const Linker = struct {
|
||||
import_record.path = try linker.generateImportPath(
|
||||
source_dir,
|
||||
linker.runtime_source_path,
|
||||
Runtime.version(),
|
||||
);
|
||||
result.ast.runtime_import_record_id = @truncate(u32, record_index);
|
||||
result.ast.needs_runtime = true;
|
||||
@@ -124,8 +125,6 @@ pub const Linker = struct {
|
||||
import_record.wrap_with_to_module = true;
|
||||
result.ast.needs_runtime = true;
|
||||
}
|
||||
|
||||
Output.println("{s} ({s}): CommonJS? {d}", .{ import_record.path.text, @tagName(resolved_import.module_type), @boolToInt(resolved_import.shouldAssumeCommonJS(import_record)) });
|
||||
} else |err| {
|
||||
switch (err) {
|
||||
error.ModuleNotFound => {
|
||||
@@ -179,6 +178,7 @@ pub const Linker = struct {
|
||||
.path = try linker.generateImportPath(
|
||||
source_dir,
|
||||
linker.runtime_source_path,
|
||||
Runtime.version(),
|
||||
),
|
||||
.range = logger.Range{ .loc = logger.Loc{ .start = 0 }, .len = 0 },
|
||||
};
|
||||
@@ -191,7 +191,7 @@ pub const Linker = struct {
|
||||
threadlocal var relative_path_allocator_buf: [4096]u8 = undefined;
|
||||
threadlocal var relative_path_allocator_buf_loaded: bool = false;
|
||||
|
||||
pub fn generateImportPath(linker: *Linker, source_dir: string, source_path: string) !Fs.Path {
|
||||
pub fn generateImportPath(linker: *Linker, source_dir: string, source_path: string, package_version: ?string) !Fs.Path {
|
||||
if (!relative_path_allocator_buf_loaded) {
|
||||
relative_path_allocator_buf_loaded = true;
|
||||
relative_path_allocator = std.heap.FixedBufferAllocator.init(&relative_path_allocator_buf);
|
||||
@@ -202,8 +202,10 @@ pub const Linker = struct {
|
||||
var pathname = Fs.PathName.init(pretty);
|
||||
var absolute_pathname = Fs.PathName.init(source_path);
|
||||
|
||||
if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| {
|
||||
absolute_pathname.ext = ext;
|
||||
if (!linker.options.preserve_extensions) {
|
||||
if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| {
|
||||
absolute_pathname.ext = ext;
|
||||
}
|
||||
}
|
||||
|
||||
switch (linker.options.import_path_format) {
|
||||
@@ -222,19 +224,36 @@ pub const Linker = struct {
|
||||
base = base[0..dot];
|
||||
}
|
||||
|
||||
const absolute_url = try relative_paths_list.append(
|
||||
try std.fmt.allocPrint(
|
||||
&relative_path_allocator.allocator,
|
||||
"{s}{s}{s}",
|
||||
.{
|
||||
linker.options.public_url,
|
||||
base,
|
||||
absolute_pathname.ext,
|
||||
},
|
||||
),
|
||||
);
|
||||
if (linker.options.append_package_version_in_query_string and package_version != null) {
|
||||
const absolute_url = try relative_paths_list.append(
|
||||
try std.fmt.allocPrint(
|
||||
&relative_path_allocator.allocator,
|
||||
"{s}{s}{s}?v={s}",
|
||||
.{
|
||||
linker.options.public_url,
|
||||
base,
|
||||
absolute_pathname.ext,
|
||||
package_version.?,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
return Fs.Path.initWithPretty(absolute_url, absolute_url);
|
||||
return Fs.Path.initWithPretty(absolute_url, absolute_url);
|
||||
} else {
|
||||
const absolute_url = try relative_paths_list.append(
|
||||
try std.fmt.allocPrint(
|
||||
&relative_path_allocator.allocator,
|
||||
"{s}{s}{s}",
|
||||
.{
|
||||
linker.options.public_url,
|
||||
base,
|
||||
absolute_pathname.ext,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
return Fs.Path.initWithPretty(absolute_url, absolute_url);
|
||||
}
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
@@ -253,7 +272,11 @@ pub const Linker = struct {
|
||||
try linker.enqueueResolveResult(resolve_result);
|
||||
}
|
||||
|
||||
import_record.path = try linker.generateImportPath(source_dir, resolve_result.path_pair.primary.text);
|
||||
import_record.path = try linker.generateImportPath(
|
||||
source_dir,
|
||||
resolve_result.path_pair.primary.text,
|
||||
resolve_result.package_json_version,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn resolveResultHashKey(linker: *Linker, resolve_result: *const Resolver.Resolver.Result) string {
|
||||
|
||||
10
src/main.zig
10
src/main.zig
@@ -30,10 +30,14 @@ pub fn main() anyerror!void {
|
||||
// var root_alloc = std.heap.ArenaAllocator.init(std.heap.raw_c_allocator);
|
||||
// var root_alloc_ = &root_alloc.allocator;
|
||||
try alloc.setup(std.heap.c_allocator);
|
||||
var stdout: std.fs.File = std.io.getStdOut();
|
||||
var stderr: std.fs.File = std.io.getStdErr();
|
||||
var stdout_file = std.io.getStdIn();
|
||||
var stdout = std.io.bufferedWriter(stdout_file.writer());
|
||||
var stderr_file = std.io.getStdErr();
|
||||
var stderr = std.io.bufferedWriter(stderr_file.writer());
|
||||
var output_source = Output.Source.init(stdout, stderr);
|
||||
defer stdout.flush() catch {};
|
||||
defer stderr.flush() catch {};
|
||||
Output.Source.set(&output_source);
|
||||
|
||||
try cli.Cli.start(std.heap.c_allocator, stdout, stderr, MainPanicHandler);
|
||||
try cli.Cli.start(std.heap.c_allocator, &stdout, &stderr, MainPanicHandler);
|
||||
}
|
||||
|
||||
@@ -453,6 +453,14 @@ const TypeScript = struct {
|
||||
parse: bool = false,
|
||||
};
|
||||
|
||||
pub const Timings = struct {
|
||||
resolver: i128 = 0,
|
||||
parse: i128 = 0,
|
||||
print: i128 = 0,
|
||||
http: i128 = 0,
|
||||
read_file: i128 = 0,
|
||||
};
|
||||
|
||||
pub const BundleOptions = struct {
|
||||
footer: string = "",
|
||||
banner: string = "",
|
||||
@@ -469,6 +477,11 @@ pub const BundleOptions = struct {
|
||||
public_dir_handle: ?std.fs.Dir = null,
|
||||
write: bool = false,
|
||||
preserve_symlinks: bool = false,
|
||||
preserve_extensions: bool = false,
|
||||
timings: Timings = Timings{},
|
||||
|
||||
append_package_version_in_query_string: bool = false,
|
||||
|
||||
resolve_mode: api.Api.ResolveMode,
|
||||
tsconfig_override: ?string = null,
|
||||
platform: Platform = Platform.browser,
|
||||
@@ -575,6 +588,8 @@ pub const BundleOptions = struct {
|
||||
opts.out_extensions = opts.platform.outExtensions(allocator);
|
||||
|
||||
if (transform.serve orelse false) {
|
||||
opts.preserve_extensions = true;
|
||||
opts.append_package_version_in_query_string = true;
|
||||
opts.resolve_mode = .lazy;
|
||||
var _dirs = [_]string{transform.public_dir orelse opts.public_dir};
|
||||
opts.public_dir = try fs.absAlloc(allocator, &_dirs);
|
||||
@@ -687,6 +702,7 @@ pub const TransformOptions = struct {
|
||||
|
||||
pub const OutputFile = struct {
|
||||
path: string,
|
||||
version: ?string = null,
|
||||
contents: string,
|
||||
};
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ pub const PackageJSON = struct {
|
||||
source: logger.Source,
|
||||
main_fields: MainFieldMap,
|
||||
module_type: options.ModuleType,
|
||||
version: string = "",
|
||||
|
||||
// Present if the "browser" field is present. This field is intended to be
|
||||
// used by bundlers and lets you redirect the paths of certain 3rd-party
|
||||
@@ -81,6 +82,12 @@ pub const PackageJSON = struct {
|
||||
.main_fields = MainFieldMap.init(r.allocator),
|
||||
};
|
||||
|
||||
if (json.asProperty("version")) |version_json| {
|
||||
if (version_json.expr.asString(r.allocator)) |version_str| {
|
||||
package_json.version = r.allocator.dupe(u8, version_str) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
if (json.asProperty("type")) |type_json| {
|
||||
if (type_json.expr.asString(r.allocator)) |type_str| {
|
||||
switch (options.ModuleType.List.get(type_str) orelse options.ModuleType.unknown) {
|
||||
|
||||
@@ -239,6 +239,8 @@ pub const Resolver = struct {
|
||||
|
||||
jsx: options.JSX.Pragma = options.JSX.Pragma{},
|
||||
|
||||
package_json_version: ?string = null,
|
||||
|
||||
is_external: bool = false,
|
||||
|
||||
// This is true when the package was loaded from within the node_modules directory.
|
||||
@@ -539,6 +541,7 @@ pub const Resolver = struct {
|
||||
.is_from_node_modules = _result.is_node_module,
|
||||
.module_type = pkg.module_type,
|
||||
.dirname_fd = _result.dirname_fd,
|
||||
.package_json_version = pkg.version,
|
||||
};
|
||||
check_relative = false;
|
||||
check_package = false;
|
||||
@@ -556,6 +559,7 @@ pub const Resolver = struct {
|
||||
.diff_case = res.diff_case,
|
||||
.is_from_node_modules = res.is_node_module,
|
||||
.dirname_fd = res.dirname_fd,
|
||||
.package_json_version = res.package_json_version,
|
||||
};
|
||||
} else if (!check_package) {
|
||||
return null;
|
||||
@@ -604,6 +608,7 @@ pub const Resolver = struct {
|
||||
.dirname_fd = node_module.dirname_fd,
|
||||
.diff_case = node_module.diff_case,
|
||||
.is_from_node_modules = true,
|
||||
.package_json_version = package_json.version,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
@@ -625,6 +630,7 @@ pub const Resolver = struct {
|
||||
.diff_case = res.diff_case,
|
||||
.is_from_node_modules = res.is_node_module,
|
||||
.dirname_fd = res.dirname_fd,
|
||||
.package_json_version = res.package_json_version,
|
||||
};
|
||||
} else {
|
||||
// Note: node's "self references" are not currently supported
|
||||
@@ -640,6 +646,7 @@ pub const Resolver = struct {
|
||||
const pkg_json = dir_info.package_json orelse continue;
|
||||
const rel_path = r.fs.relative(pkg_json.source.key_path.text, path.text);
|
||||
result.module_type = pkg_json.module_type;
|
||||
result.package_json_version = if (result.package_json_version == null) pkg_json.version else result.package_json_version;
|
||||
if (r.checkBrowserMap(pkg_json, rel_path)) |remapped| {
|
||||
if (remapped.len == 0) {
|
||||
path.is_disabled = true;
|
||||
@@ -1048,6 +1055,7 @@ pub const Resolver = struct {
|
||||
dirname_fd: StoredFileDescriptorType = 0,
|
||||
file_fd: StoredFileDescriptorType = 0,
|
||||
is_node_module: bool = false,
|
||||
package_json_version: ?string = null,
|
||||
diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
|
||||
};
|
||||
|
||||
@@ -1271,6 +1279,7 @@ pub const Resolver = struct {
|
||||
.path_pair = PathPair{
|
||||
.primary = _path,
|
||||
},
|
||||
.package_json_version = browser_json.version,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1342,6 +1351,7 @@ pub const Resolver = struct {
|
||||
.path_pair = PathPair{
|
||||
.primary = _path,
|
||||
},
|
||||
.package_json_version = browser_json.version,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1392,9 +1402,11 @@ pub const Resolver = struct {
|
||||
}
|
||||
|
||||
const dir_info = (r.dirInfoCached(path) catch null) orelse return null;
|
||||
var package_json_version: ?string = null;
|
||||
|
||||
// Try using the main field(s) from "package.json"
|
||||
if (dir_info.package_json) |pkg_json| {
|
||||
package_json_version = pkg_json.version;
|
||||
if (pkg_json.main_fields.count() > 0) {
|
||||
const main_field_values = pkg_json.main_fields;
|
||||
const main_field_keys = r.opts.main_fields;
|
||||
@@ -1455,6 +1467,7 @@ pub const Resolver = struct {
|
||||
},
|
||||
.diff_case = auto_main_result.diff_case,
|
||||
.dirname_fd = auto_main_result.dirname_fd,
|
||||
.package_json_version = pkg_json.version,
|
||||
};
|
||||
} else {
|
||||
if (r.debug_logs) |*debug| {
|
||||
@@ -1464,8 +1477,9 @@ pub const Resolver = struct {
|
||||
pkg_json.source.key_path.text,
|
||||
}) catch {};
|
||||
}
|
||||
|
||||
return auto_main_result;
|
||||
var _auto_main_result = auto_main_result;
|
||||
_auto_main_result.package_json_version = pkg_json.version;
|
||||
return _auto_main_result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1474,7 +1488,14 @@ pub const Resolver = struct {
|
||||
}
|
||||
|
||||
// Look for an "index" file with known extensions
|
||||
return r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order);
|
||||
if (r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order)) |*res| {
|
||||
if (res.package_json_version == null and package_json_version != null) {
|
||||
res.package_json_version = package_json_version;
|
||||
}
|
||||
return res.*;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn loadAsFile(r: *Resolver, path: string, extension_order: []const string) ?LoadResult {
|
||||
|
||||
1
src/runtime.version
Normal file
1
src/runtime.version
Normal file
@@ -0,0 +1 @@
|
||||
6c20b700cd52b930
|
||||
@@ -1,10 +1,13 @@
|
||||
const options = @import("./options.zig");
|
||||
usingnamespace @import("ast/base.zig");
|
||||
usingnamespace @import("global.zig");
|
||||
|
||||
const std = @import("std");
|
||||
pub const SourceContent = @embedFile("./runtime.js");
|
||||
|
||||
pub const Runtime = struct {
|
||||
pub var version_hash = @embedFile("./runtime.version");
|
||||
pub fn version() string {
|
||||
return version_hash;
|
||||
}
|
||||
pub const Features = packed struct {
|
||||
react_fast_refresh: bool = false,
|
||||
hot_module_reloading: bool = false,
|
||||
|
||||
18
src/timer.zig
Normal file
18
src/timer.zig
Normal file
@@ -0,0 +1,18 @@
|
||||
const std = @import("std");
|
||||
|
||||
const Timer = @This();
|
||||
|
||||
begin: i128 = 0,
|
||||
elapsed: i128 = 0,
|
||||
|
||||
pub fn start(timer: *Timer) void {
|
||||
timer.begin = std.time.nanoTimestamp();
|
||||
}
|
||||
|
||||
pub fn stop(timer: *Timer) void {
|
||||
timer.elapsed = std.time.nanoTimestamp() - timer.begin;
|
||||
}
|
||||
|
||||
pub fn seconds(timer: *const Timer) f64 {
|
||||
return @intToFloat(f64, timer.elapsed) / std.time.ns_per_s;
|
||||
}
|
||||
Reference in New Issue
Block a user