mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
181
src/bundler.zig
181
src/bundler.zig
@@ -19,7 +19,7 @@ const Resolver = @import("./resolver/resolver.zig");
|
||||
const sync = @import("sync.zig");
|
||||
const ThreadPool = sync.ThreadPool;
|
||||
const ThreadSafeHashMap = @import("./thread_safe_hash_map.zig");
|
||||
|
||||
const ImportRecord = @import("./import_record.zig").ImportRecord;
|
||||
// pub const
|
||||
// const BundleMap =
|
||||
const ResolveResults = ThreadSafeHashMap.ThreadSafeStringHashMap(Resolver.Resolver.Result);
|
||||
@@ -31,8 +31,9 @@ pub const Bundler = struct {
|
||||
resolver: Resolver.Resolver,
|
||||
fs: *Fs.FileSystem,
|
||||
// thread_pool: *ThreadPool,
|
||||
|
||||
output_files: std.ArrayList(options.OutputFile),
|
||||
resolve_results: *ResolveResults,
|
||||
resolve_queue: std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic),
|
||||
|
||||
// to_bundle:
|
||||
|
||||
@@ -58,10 +59,127 @@ pub const Bundler = struct {
|
||||
// .thread_pool = pool,
|
||||
.result = options.TransformResult{},
|
||||
.resolve_results = try ResolveResults.init(allocator),
|
||||
.resolve_queue = std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic).init(allocator),
|
||||
.output_files = std.ArrayList(options.OutputFile).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn scan(bundler: *Bundler) !void {}
|
||||
pub fn processImportRecord(bundler: *Bundler, source_dir: string, import_record: *ImportRecord) !void {
|
||||
var resolve_result = (bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind) catch null) orelse return;
|
||||
|
||||
if (!bundler.resolve_results.contains(resolve_result.path_pair.primary.text)) {
|
||||
try bundler.resolve_results.put(resolve_result.path_pair.primary.text, resolve_result);
|
||||
try bundler.resolve_queue.writeItem(resolve_result);
|
||||
}
|
||||
|
||||
if (!strings.eql(import_record.path.text, resolve_result.path_pair.primary.text)) {
|
||||
import_record.path = Fs.Path.init(resolve_result.path_pair.primary.text);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn buildWithResolveResult(bundler: *Bundler, resolve_result: Resolver.Resolver.Result) !void {
|
||||
if (resolve_result.is_external) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 1. Parse & scan
|
||||
const result = bundler.parse(resolve_result.path_pair.primary) orelse return;
|
||||
|
||||
switch (result.loader) {
|
||||
.jsx, .js, .ts, .tsx => {
|
||||
const ast = result.ast;
|
||||
|
||||
for (ast.import_records) |*import_record| {
|
||||
bundler.processImportRecord(
|
||||
std.fs.path.dirname(resolve_result.path_pair.primary.text) orelse resolve_result.path_pair.primary.text,
|
||||
import_record,
|
||||
) catch continue;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
try bundler.print(
|
||||
result,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn print(
|
||||
bundler: *Bundler,
|
||||
result: ParseResult,
|
||||
) !void {
|
||||
var allocator = bundler.allocator;
|
||||
const relative_path = try std.fs.path.relative(bundler.allocator, bundler.fs.top_level_dir, result.source.path.text);
|
||||
var out_parts = [_]string{ bundler.options.output_dir, relative_path };
|
||||
const out_path = try std.fs.path.join(bundler.allocator, &out_parts);
|
||||
|
||||
const ast = result.ast;
|
||||
|
||||
var _linker = linker.Linker{};
|
||||
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
|
||||
|
||||
const print_result = try js_printer.printAst(
|
||||
allocator,
|
||||
ast,
|
||||
js_ast.Symbol.Map.initList(symbols),
|
||||
&result.source,
|
||||
false,
|
||||
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
|
||||
&_linker,
|
||||
);
|
||||
try bundler.output_files.append(options.OutputFile{
|
||||
.path = out_path,
|
||||
.contents = print_result.js,
|
||||
});
|
||||
}
|
||||
|
||||
pub const ParseResult = struct {
|
||||
source: logger.Source,
|
||||
loader: options.Loader,
|
||||
|
||||
ast: js_ast.Ast,
|
||||
};
|
||||
|
||||
pub fn parse(bundler: *Bundler, path: Fs.Path) ?ParseResult {
|
||||
var result: ParseResult = undefined;
|
||||
const loader: options.Loader = bundler.options.loaders.get(path.name.ext) orelse .file;
|
||||
const entry = bundler.resolver.caches.fs.readFile(bundler.fs, path.text) catch return null;
|
||||
const source = logger.Source.initFile(Fs.File{ .path = path, .contents = entry.contents }, bundler.allocator) catch return null;
|
||||
|
||||
switch (loader) {
|
||||
.js, .jsx, .ts, .tsx => {
|
||||
var jsx = bundler.options.jsx;
|
||||
jsx.parse = loader.isJSX();
|
||||
var opts = js_parser.Parser.Options.init(jsx, loader);
|
||||
const value = (bundler.resolver.caches.js.parse(bundler.allocator, opts, bundler.options.define, bundler.log, &source) catch null) orelse return null;
|
||||
return ParseResult{
|
||||
.ast = value,
|
||||
.source = source,
|
||||
.loader = loader,
|
||||
};
|
||||
},
|
||||
.json => {
|
||||
var expr = json_parser.ParseJSON(&source, bundler.log, bundler.allocator) catch return null;
|
||||
var stmt = js_ast.Stmt.alloc(bundler.allocator, js_ast.S.ExportDefault{
|
||||
.value = js_ast.StmtOrExpr{ .expr = expr },
|
||||
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
|
||||
}, logger.Loc{ .start = 0 });
|
||||
|
||||
var part = js_ast.Part{
|
||||
.stmts = &([_]js_ast.Stmt{stmt}),
|
||||
};
|
||||
|
||||
return ParseResult{
|
||||
.ast = js_ast.Ast.initTest(&([_]js_ast.Part{part})),
|
||||
.source = source,
|
||||
.loader = loader,
|
||||
};
|
||||
},
|
||||
else => Global.panic("Unsupported loader {s}", .{loader}),
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn bundle(
|
||||
allocator: *std.mem.Allocator,
|
||||
@@ -77,8 +195,54 @@ pub const Bundler = struct {
|
||||
bundler.resolver.debug_logs = try Resolver.Resolver.DebugLogs.init(allocator);
|
||||
}
|
||||
|
||||
var rfs: *Fs.FileSystem.RealFS = &bundler.fs.fs;
|
||||
|
||||
var entry_point_i: usize = 0;
|
||||
for (bundler.options.entry_points) |entry| {
|
||||
for (bundler.options.entry_points) |_entry| {
|
||||
var entry: string = _entry;
|
||||
// if (!std.fs.path.isAbsolute(_entry)) {
|
||||
// const _paths = [_]string{ bundler.fs.top_level_dir, _entry };
|
||||
// entry = std.fs.path.join(allocator, &_paths) catch unreachable;
|
||||
// } else {
|
||||
// entry = allocator.dupe(u8, _entry) catch unreachable;
|
||||
// }
|
||||
|
||||
// const dir = std.fs.path.dirname(entry) orelse continue;
|
||||
// const base = std.fs.path.basename(entry);
|
||||
|
||||
// var dir_entry = try rfs.readDirectory(dir);
|
||||
// if (std.meta.activeTag(dir_entry) == .err) {
|
||||
// log.addErrorFmt(null, logger.Loc.Empty, allocator, "Failed to read directory: {s} - {s}", .{ dir, @errorName(dir_entry.err.original_err) }) catch unreachable;
|
||||
// continue;
|
||||
// }
|
||||
|
||||
// const file_entry = dir_entry.entries.get(base) orelse continue;
|
||||
// if (file_entry.entry.kind(rfs) != .file) {
|
||||
// continue;
|
||||
// }
|
||||
|
||||
if (!strings.startsWith(entry, "./")) {
|
||||
// allocator.free(entry);
|
||||
|
||||
// Entry point paths without a leading "./" are interpreted as package
|
||||
// paths. This happens because they go through general path resolution
|
||||
// like all other import paths so that plugins can run on them. Requiring
|
||||
// a leading "./" for a relative path simplifies writing plugins because
|
||||
// entry points aren't a special case.
|
||||
//
|
||||
// However, requiring a leading "./" also breaks backward compatibility
|
||||
// and makes working with the CLI more difficult. So attempt to insert
|
||||
// "./" automatically when needed. We don't want to unconditionally insert
|
||||
// a leading "./" because the path may not be a file system path. For
|
||||
// example, it may be a URL. So only insert a leading "./" when the path
|
||||
// is an exact match for an existing file.
|
||||
var __entry = allocator.alloc(u8, "./".len + entry.len) catch unreachable;
|
||||
__entry[0] = '.';
|
||||
__entry[1] = '/';
|
||||
std.mem.copy(u8, __entry[2..__entry.len], entry);
|
||||
entry = __entry;
|
||||
}
|
||||
|
||||
const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch {
|
||||
continue;
|
||||
} orelse continue;
|
||||
@@ -90,6 +254,7 @@ pub const Bundler = struct {
|
||||
entry_points[entry_point_i] = result;
|
||||
Output.print("Resolved {s} => {s}", .{ entry, result.path_pair.primary.text });
|
||||
entry_point_i += 1;
|
||||
bundler.resolve_queue.writeItem(result) catch unreachable;
|
||||
}
|
||||
|
||||
if (isDebug) {
|
||||
@@ -99,11 +264,15 @@ pub const Bundler = struct {
|
||||
}
|
||||
|
||||
switch (bundler.options.resolve_mode) {
|
||||
.lazy, .dev, .bundle => {},
|
||||
.lazy, .dev, .bundle => {
|
||||
while (bundler.resolve_queue.readItem()) |item| {
|
||||
bundler.buildWithResolveResult(item) catch continue;
|
||||
}
|
||||
},
|
||||
else => Global.panic("Unsupported resolve mode: {s}", .{@tagName(bundler.options.resolve_mode)}),
|
||||
}
|
||||
|
||||
return bundler.result;
|
||||
return try options.TransformResult.init(bundler.output_files.toOwnedSlice(), log, allocator);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const logger = @import("./logger.zig");
|
||||
const js_parser = @import("./js_parser/js_parser.zig");
|
||||
const json_parser = @import("./json_parser.zig");
|
||||
const options = @import("./options.zig");
|
||||
const Defines = @import("./defines.zig").Defines;
|
||||
const Define = @import("./defines.zig").Define;
|
||||
const std = @import("std");
|
||||
const fs = @import("./fs.zig");
|
||||
const sync = @import("sync.zig");
|
||||
@@ -139,11 +139,18 @@ pub const Cache = struct {
|
||||
pub const Result = js_ast.Result;
|
||||
// For now, we're not going to cache JavaScript ASTs.
|
||||
// It's probably only relevant when bundling for production.
|
||||
pub fn parse(cache: *@This(), allocator: *std.mem.Allocator, opts: options.TransformOptions, defines: Defines, log: *logger.Log, source: logger.Source) anyerror!?js_ast.Ast {
|
||||
pub fn parse(
|
||||
cache: *@This(),
|
||||
allocator: *std.mem.Allocator,
|
||||
opts: js_parser.Parser.Options,
|
||||
defines: *Define,
|
||||
log: *logger.Log,
|
||||
source: *const logger.Source,
|
||||
) anyerror!?js_ast.Ast {
|
||||
var temp_log = logger.Log.init(allocator);
|
||||
defer temp_log.deinit();
|
||||
|
||||
var parser = js_parser.Parser.init(opts, temp_log, &source, defines, allocator) catch |err| {
|
||||
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
|
||||
temp_log.appendTo(log) catch {};
|
||||
return null;
|
||||
};
|
||||
|
||||
78
src/cli.zig
78
src/cli.zig
@@ -305,52 +305,52 @@ pub const Cli = struct {
|
||||
|
||||
var writer = stdout.writer();
|
||||
|
||||
if (args.write) |write| {
|
||||
if (write) {
|
||||
did_write = true;
|
||||
var root_dir = try std.fs.openDirAbsolute(args.absolute_working_dir.?, std.fs.Dir.OpenDirOptions{});
|
||||
defer root_dir.close();
|
||||
for (result.output_files) |f| {
|
||||
try root_dir.makePath(std.fs.path.dirname(f.path) orelse unreachable);
|
||||
// if (args.write) |write| {
|
||||
// if (write) {
|
||||
// did_write = true;
|
||||
// var root_dir = try std.fs.openDirAbsolute(args.absolute_working_dir.?, std.fs.Dir.OpenDirOptions{});
|
||||
// defer root_dir.close();
|
||||
// for (result.output_files) |f| {
|
||||
// try root_dir.makePath(std.fs.path.dirname(f.path) orelse unreachable);
|
||||
|
||||
var _handle = try std.fs.createFileAbsolute(f.path, std.fs.File.CreateFlags{
|
||||
.truncate = true,
|
||||
});
|
||||
try _handle.seekTo(0);
|
||||
// var _handle = try std.fs.createFileAbsolute(f.path, std.fs.File.CreateFlags{
|
||||
// .truncate = true,
|
||||
// });
|
||||
// try _handle.seekTo(0);
|
||||
|
||||
defer _handle.close();
|
||||
// defer _handle.close();
|
||||
|
||||
try _handle.writeAll(f.contents);
|
||||
}
|
||||
// try _handle.writeAll(f.contents);
|
||||
// }
|
||||
|
||||
var max_path_len: usize = 0;
|
||||
var max_padded_size: usize = 0;
|
||||
for (result.output_files) |file| {
|
||||
max_path_len = std.math.max(file.path.len, max_path_len);
|
||||
}
|
||||
|
||||
_ = try writer.write("\n");
|
||||
for (result.output_files) |file| {
|
||||
const padding_count = 2 + (max_path_len - file.path.len);
|
||||
|
||||
try writer.writeByteNTimes(' ', 2);
|
||||
try writer.writeAll(file.path);
|
||||
try writer.writeByteNTimes(' ', padding_count);
|
||||
const size = @intToFloat(f64, file.contents.len) / 1000.0;
|
||||
try std.fmt.formatFloatDecimal(size, .{ .precision = 2 }, writer);
|
||||
try writer.writeAll(" KB\n");
|
||||
}
|
||||
}
|
||||
var max_path_len: usize = 0;
|
||||
var max_padded_size: usize = 0;
|
||||
for (result.output_files) |file| {
|
||||
max_path_len = std.math.max(file.path.len, max_path_len);
|
||||
}
|
||||
|
||||
if (!did_write) {
|
||||
for (result.output_files) |file, i| {
|
||||
try writer.writeAll(file.contents);
|
||||
if (i > 0) {
|
||||
_ = try writer.write("\n\n");
|
||||
}
|
||||
}
|
||||
_ = try writer.write("\n");
|
||||
for (result.output_files) |file| {
|
||||
const padding_count = 2 + (max_path_len - file.path.len);
|
||||
|
||||
try writer.writeByteNTimes(' ', 2);
|
||||
try writer.writeAll(file.path);
|
||||
try writer.writeByteNTimes(' ', padding_count);
|
||||
const size = @intToFloat(f64, file.contents.len) / 1000.0;
|
||||
try std.fmt.formatFloatDecimal(size, .{ .precision = 2 }, writer);
|
||||
try writer.writeAll(" KB\n");
|
||||
}
|
||||
// }
|
||||
// }
|
||||
|
||||
// if (!did_write) {
|
||||
// for (result.output_files) |file, i| {
|
||||
// try writer.writeAll(file.contents);
|
||||
// if (i > 0) {
|
||||
// _ = try writer.write("\n\n");
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
var err_writer = stderr.writer();
|
||||
for (result.errors) |err| {
|
||||
|
||||
@@ -1156,7 +1156,7 @@ pub const Lexer = struct {
|
||||
return lex;
|
||||
}
|
||||
|
||||
pub fn init(log: *logger.Log, source: *logger.Source, allocator: *std.mem.Allocator) !LexerType {
|
||||
pub fn init(log: *logger.Log, source: *const logger.Source, allocator: *std.mem.Allocator) !LexerType {
|
||||
try tables.initJSXEntityMap();
|
||||
var empty_string_literal: JavascriptString = &emptyJavaScriptString;
|
||||
var lex = LexerType{
|
||||
@@ -2142,7 +2142,7 @@ pub fn isIdentifierUTF16(text: JavascriptString) bool {
|
||||
|
||||
// TODO: implement this to actually work right
|
||||
// this fn is a stub!
|
||||
pub fn rangeOfIdentifier(source: *Source, loc: logger.Loc) logger.Range {
|
||||
pub fn rangeOfIdentifier(source: *const Source, loc: logger.Loc) logger.Range {
|
||||
var r = logger.Range{ .loc = loc, .len = 0 };
|
||||
const offset = @intCast(usize, loc.start);
|
||||
var i: usize = 0;
|
||||
|
||||
@@ -1239,7 +1239,7 @@ pub const Parser = struct {
|
||||
options: Options,
|
||||
lexer: js_lexer.Lexer,
|
||||
log: *logger.Log,
|
||||
source: *logger.Source,
|
||||
source: *const logger.Source,
|
||||
define: *Define,
|
||||
allocator: *std.mem.Allocator,
|
||||
p: ?*P,
|
||||
@@ -1437,7 +1437,7 @@ pub const Parser = struct {
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn init(_options: Options, log: *logger.Log, source: *logger.Source, define: *Define, allocator: *std.mem.Allocator) !Parser {
|
||||
pub fn init(_options: Options, log: *logger.Log, source: *const logger.Source, define: *Define, allocator: *std.mem.Allocator) !Parser {
|
||||
const lexer = try js_lexer.Lexer.init(log, source, allocator);
|
||||
return Parser{
|
||||
.options = _options,
|
||||
@@ -1549,7 +1549,7 @@ pub const P = struct {
|
||||
options: Parser.Options,
|
||||
log: *logger.Log,
|
||||
define: *Define,
|
||||
source: *logger.Source,
|
||||
source: *const logger.Source,
|
||||
lexer: js_lexer.Lexer,
|
||||
allow_in: bool = false,
|
||||
allow_private_identifiers: bool = false,
|
||||
@@ -10918,7 +10918,7 @@ pub const P = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator, log: *logger.Log, source: *logger.Source, define: *Define, lexer: js_lexer.Lexer, opts: Parser.Options) !*P {
|
||||
pub fn init(allocator: *std.mem.Allocator, log: *logger.Log, source: *const logger.Source, define: *Define, lexer: js_lexer.Lexer, opts: Parser.Options) !*P {
|
||||
var _parser = try allocator.create(P);
|
||||
var parser = P{
|
||||
.symbol_uses = SymbolUseMap.init(allocator),
|
||||
|
||||
@@ -2739,7 +2739,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator, tree: Ast, source: *logger.Source, symbols: Symbol.Map, opts: Options, linker: *Linker) !Printer {
|
||||
pub fn init(allocator: *std.mem.Allocator, tree: Ast, source: *const logger.Source, symbols: Symbol.Map, opts: Options, linker: *Linker) !Printer {
|
||||
// Heuristic: most lines of JavaScript are short.
|
||||
var js = try MutableString.init(allocator, 0);
|
||||
return Printer{
|
||||
@@ -2779,7 +2779,7 @@ pub fn quoteIdentifier(js: *MutableString, identifier: string) !void {
|
||||
const UnicodePrinter = NewPrinter(false);
|
||||
const AsciiPrinter = NewPrinter(true);
|
||||
|
||||
pub fn printAst(allocator: *std.mem.Allocator, tree: Ast, symbols: js_ast.Symbol.Map, source: *logger.Source, ascii_only: bool, opts: Options, linker: *Linker) !PrintResult {
|
||||
pub fn printAst(allocator: *std.mem.Allocator, tree: Ast, symbols: js_ast.Symbol.Map, source: *const logger.Source, ascii_only: bool, opts: Options, linker: *Linker) !PrintResult {
|
||||
if (ascii_only) {
|
||||
var printer = try AsciiPrinter.init(
|
||||
allocator,
|
||||
|
||||
@@ -5,9 +5,9 @@ const logger = @import("logger.zig");
|
||||
|
||||
pub const Renamer = struct {
|
||||
symbols: js_ast.Symbol.Map,
|
||||
source: *logger.Source,
|
||||
source: *const logger.Source,
|
||||
|
||||
pub fn init(symbols: js_ast.Symbol.Map, source: *logger.Source) Renamer {
|
||||
pub fn init(symbols: js_ast.Symbol.Map, source: *const logger.Source) Renamer {
|
||||
return Renamer{ .symbols = symbols, .source = source };
|
||||
}
|
||||
|
||||
|
||||
@@ -1202,16 +1202,18 @@ pub const Resolver = struct {
|
||||
}
|
||||
|
||||
// Try the path with extensions
|
||||
|
||||
std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, path);
|
||||
for (r.opts.extension_order) |ext| {
|
||||
var buffer = TemporaryBuffer.ExtensionPathBuf[0 .. path.len + ext.len];
|
||||
std.mem.copy(u8, buffer[path.len..buffer.len], ext);
|
||||
const file_name = buffer[path.len - base.len .. buffer.len];
|
||||
|
||||
if (r.debug_logs) |*debug| {
|
||||
debug.addNoteFmt("Checking for file \"{s}{s}\" ", .{ base, ext }) catch {};
|
||||
}
|
||||
|
||||
if (entries.get(buffer[path.len - base.len .. buffer.len])) |query| {
|
||||
if (entries.get(file_name)) |query| {
|
||||
if (query.entry.kind(rfs) == .file) {
|
||||
if (r.debug_logs) |*debug| {
|
||||
debug.addNoteFmt("Found file \"{s}\" ", .{buffer}) catch {};
|
||||
|
||||
Reference in New Issue
Block a user