New subcommand: bun run run scripts in package.json really fast

This commit is contained in:
Jarred Sumner
2021-10-26 05:18:21 -07:00
parent e69f8c36d2
commit 0541cff2ad
7 changed files with 761 additions and 7 deletions

View File

@@ -147,6 +147,7 @@ pub const Arguments = struct {
clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx, ts, css") catch unreachable,
clap.parseParam("--origin <STR> Rewrite import paths to start with --origin. Default: \"\"") catch unreachable,
clap.parseParam("--port <STR> Port to serve Bun's dev server on. Default: \"/3000\"") catch unreachable,
clap.parseParam("--silent Don't repeat the command for bun run") catch unreachable,
// clap.parseParam("-o, --outdir <STR> Save output to directory (default: \"out\" if none provided and multiple entry points passed)") catch unreachable,
// clap.parseParam("-r, --resolve <STR> Determine import/require behavior. \"disable\" ignores. \"dev\" bundles node_modules and builds everything else as independent entry points") catch unreachable,
@@ -223,6 +224,8 @@ pub const Arguments = struct {
.disable_hmr = args.flag("--disable-hmr"),
};
ctx.positionals = args.positionals();
ctx.debug.silent = args.flag("--silent");
if (opts.port != null and opts.origin == null) {
opts.origin = try std.fmt.allocPrint(allocator, "http://localhost:{d}/", .{opts.port.?});
}
@@ -522,6 +525,7 @@ pub const Command = struct {
pub const DebugOptions = struct {
dump_environment_variables: bool = false,
fallback_only: bool = false,
silent: bool = false,
};
pub const Context = struct {
@@ -529,6 +533,7 @@ pub const Command = struct {
args: Api.TransformOptions = std.mem.zeroes(Api.TransformOptions),
log: *logger.Log,
allocator: *std.mem.Allocator,
positionals: []const string = &[_]string{},
debug: DebugOptions = DebugOptions{},
@@ -644,8 +649,9 @@ pub const Command = struct {
},
.RunCommand => {
const ctx = try Command.Context.create(allocator, log, .RunCommand);
try RunCommand.exec(ctx);
if (ctx.positionals.len > 0) {
_ = try RunCommand.exec(ctx, false, true);
}
},
.AutoCommand => {
var ctx = Command.Context.create(allocator, log, .AutoCommand) catch |e| {
@@ -667,6 +673,12 @@ pub const Command = struct {
return;
}
if (ctx.positionals.len > 0 and (std.fs.path.extension(ctx.positionals[0]).len == 0)) {
if (try RunCommand.exec(ctx, true, false)) {
return;
}
}
if (FeatureFlags.dev_only) {
try DevCommand.exec(ctx);
} else {

View File

@@ -0,0 +1,107 @@
const std = @import("std");
// yarn v2.3 commands
const yarn_v2 = [_][]const u8{
"add",
"bin",
"cache",
"config",
"dedupe",
"dlx",
"exec",
"explain",
"info",
"init",
"install",
"link",
"node",
"npm",
"pack",
"patch",
"plugin",
"rebuild",
"remove",
"run",
"set",
"unplug",
"up",
"why",
"workspace",
"workspaces",
};
// yarn v1 commands
const yarn_v1 = [_][]const u8{
"access",
"add",
"audit",
"autoclean",
"bin",
"cache",
"check",
"config",
"create",
"exec",
"generate-lock-entry",
"generateLockEntry",
"global",
"help",
"import",
"info",
"init",
"install",
"licenses",
"link",
"list",
"login",
"logout",
"node",
"outdated",
"owner",
"pack",
"policies",
"publish",
"remove",
"run",
"tag",
"team",
"unlink",
"unplug",
"upgrade",
"upgrade-interactive",
"upgradeInteractive",
"version",
"versions",
"why",
"workspace",
"workspaces",
};
pub const all_yarn_commands = brk: {
@setEvalBranchQuota(9999);
var array: [yarn_v2.len + yarn_v1.len]u64 = undefined;
var array_i: usize = 0;
for (yarn_v2) |yarn| {
const hash = std.hash.Wyhash.hash(0, yarn);
@setEvalBranchQuota(9999);
if (std.mem.indexOfScalar(u64, array[0..array_i], hash) == null) {
@setEvalBranchQuota(9999);
array[array_i] = hash;
array_i += 1;
}
}
for (yarn_v1) |yarn| {
@setEvalBranchQuota(9999);
const hash = std.hash.Wyhash.hash(0, yarn);
if (std.mem.indexOfScalar(u64, array[0..array_i], hash) == null) {
@setEvalBranchQuota(9999);
array[array_i] = hash;
array_i += 1;
}
}
break :brk array[0..array_i];
};

View File

@@ -20,9 +20,511 @@ const configureTransformOptionsForBun = @import(".././javascript/jsc/config.zig"
const Command = @import("../cli.zig").Command;
const bundler = @import("../bundler.zig");
const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
const DotEnv = @import("../env_loader.zig");
const which = @import("../which.zig").which;
var path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var path_buf2: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const NpmArgs = struct {
// https://github.com/npm/rfcs/blob/main/implemented/0021-reduce-lifecycle-script-environment.md#detailed-explanation
pub const package_name: string = "npm_package_name";
pub const package_version: string = "npm_package_version";
};
const yarn_commands: []u64 = @import("./list-of-yarn-commands.zig").all_yarn_commands;
pub const RunCommand = struct {
pub fn exec(ctx: Command.Context) !void {
Output.prettyErrorln("<r><red>nOt<r> <magenta>iMpLeMeNtEd<r> <yellow>yEt<r>", .{});
const shells_to_search = &[_]string{
"bash",
"sh",
"zsh",
};
pub fn findShell(PATH: string, cwd: string) ?string {
inline for (shells_to_search) |shell| {
if (which(&path_buf, PATH, cwd, shell)) |shell_| {
return shell_;
}
}
return null;
}
const BUN_BIN_NAME = if (isDebug) "bun-debug" else "bun";
const BUN_RUN = std.fmt.comptimePrint("{s} run", .{BUN_BIN_NAME});
pub fn runPackageScript(
ctx: Command.Context,
original_script: string,
name: string,
cwd: string,
env: *DotEnv.Loader,
passthrough: []const string,
silent: bool,
) !bool {
const shell_bin = findShell(env.map.get("PATH") orelse "", cwd) orelse return error.MissingShell;
var script = original_script;
var copy_script = try std.ArrayList(u8).initCapacity(ctx.allocator, script.len);
// Look for invocations of any:
// - yarn run
// - pnpm run
// - npm run
// Replace them with "bun run"
// If "yarn" exists and
var splitter = std.mem.split(u8, script, " ");
var is_first = true;
var skip_next = false;
while (splitter.next()) |entry_| {
const skip = skip_next;
skip_next = false;
var entry = entry_;
if (strings.startsWith(entry, "\\\"") and strings.endsWith(entry, "\\\"") and entry.len > 4) {
entry = entry[2 .. entry.len - 2];
}
if (strings.startsWith(entry, "'") and strings.endsWith(entry, "'") and entry.len > 2) {
entry = entry[1 .. entry.len - 1];
}
var replace = false;
defer is_first = false;
if (!skip) {
replacer: {
if (strings.eqlComptime(entry, "yarn")) {
var _split = splitter;
if (_split.next()) |entry2| {
if (strings.eqlComptime(entry2, "run")) {
replace = true;
_ = splitter.next();
break :replacer;
}
// "yarn npm" is a valid command
// this will confuse us
// so when we have a valid yarn command, rather than try to carefully parse & handle each version's arguments
// we just skip the command that says "yarn npm"
// this works because yarn is the only package manager that lets you omit "run"
// (bun is not a package manager)
const hash = std.hash.Wyhash.hash(0, entry2);
if (std.mem.indexOfScalar(u64, yarn_commands, hash) != null) {
skip_next = true;
break :replacer;
}
replace = true;
break :replacer;
}
}
if (strings.eqlComptime(entry, "pnpm")) {
var _split = splitter;
if (_split.next()) |entry2| {
if (strings.eqlComptime(entry2, "run")) {
replace = true;
_ = splitter.next();
break :replacer;
}
}
}
if (strings.eqlComptime(entry, "npm")) {
var _split = splitter;
if (_split.next()) |entry2| {
if (strings.eqlComptime(entry2, "run")) {
replace = true;
_ = splitter.next();
break :replacer;
}
}
}
}
}
if (replace) {
if (!is_first) {
copy_script.append(' ') catch unreachable;
}
try copy_script.appendSlice(BUN_RUN);
} else {
if (!is_first) {
copy_script.append(' ') catch unreachable;
}
try copy_script.appendSlice(entry);
}
}
var combined_script: string = copy_script.items;
if (passthrough.len > 0) {
var combined_script_len: usize = script.len;
for (passthrough) |p, i| {
combined_script_len += p.len + 1;
}
var combined_script_buf = try ctx.allocator.alloc(u8, combined_script_len);
std.mem.copy(u8, combined_script_buf, script);
var remaining_script_buf = combined_script_buf[script.len..];
for (passthrough) |p| {
remaining_script_buf[0] = ' ';
std.mem.copy(u8, remaining_script_buf[1..], p);
remaining_script_buf = remaining_script_buf[p.len + 1 ..];
}
combined_script = combined_script_buf;
}
var argv = [_]string{ shell_bin, "-c", combined_script };
var child_process = try std.ChildProcess.init(&argv, ctx.allocator);
if (!silent) {
Output.prettyErrorln("<r><d><magenta>$<r> <d><b>{s}<r>", .{combined_script});
Output.flush();
}
var buf_map = try env.map.cloneToBufMap(ctx.allocator);
child_process.env_map = &buf_map;
child_process.cwd = cwd;
child_process.stderr_behavior = .Inherit;
child_process.stdin_behavior = .Inherit;
child_process.stdout_behavior = .Inherit;
_ = child_process.spawnAndWait() catch |err| {
Output.prettyErrorln("<r><red>error<r>: Failed to run script <b>{s}<r> due to error <b>{s}<r>", .{ name, @errorName(err) });
Output.flush();
return false;
};
return true;
}
pub fn runBinary(
ctx: Command.Context,
executable: []const u8,
cwd: string,
env: *DotEnv.Loader,
passthrough: []const string,
) !bool {
var argv_ = [_]string{executable};
var argv: []const string = &argv_;
if (passthrough.len > 0) {
var array_list = std.ArrayList(string).init(ctx.allocator);
try array_list.append(executable);
try array_list.appendSlice(passthrough);
argv = array_list.toOwnedSlice();
}
var child_process = try std.ChildProcess.init(argv, ctx.allocator);
var buf_map = try env.map.cloneToBufMap(ctx.allocator);
child_process.cwd = cwd;
child_process.env_map = &buf_map;
child_process.stderr_behavior = .Inherit;
child_process.stdin_behavior = .Inherit;
child_process.stdout_behavior = .Inherit;
_ = child_process.spawnAndWait() catch |err| {
Output.prettyErrorln("<r><red>error<r>: Failed to run <b>{s}<r> due to error <b>{s}<r>", .{ std.fs.path.basename(executable), @errorName(err) });
Output.flush();
return false;
};
return true;
}
pub fn ls(ctx: Command.Context) !void {
var args = ctx.args;
args.node_modules_bundle_path = null;
args.node_modules_bundle_path_server = null;
args.generate_node_module_bundle = false;
var this_bundler = try bundler.Bundler.init(ctx.allocator, ctx.log, args, null, null);
this_bundler.options.env.behavior = Api.DotEnvBehavior.load_all;
this_bundler.options.env.prefix = "";
this_bundler.resolver.care_about_bin_folder = true;
this_bundler.resolver.care_about_scripts = true;
this_bundler.configureLinker();
}
pub fn exec(ctx: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !bool {
var args = ctx.args;
args.node_modules_bundle_path = null;
args.node_modules_bundle_path_server = null;
args.generate_node_module_bundle = false;
var this_bundler = try bundler.Bundler.init(ctx.allocator, ctx.log, args, null, null);
this_bundler.options.env.behavior = Api.DotEnvBehavior.load_all;
this_bundler.options.env.prefix = "";
this_bundler.env.quiet = true;
this_bundler.resolver.care_about_bin_folder = true;
this_bundler.resolver.care_about_scripts = true;
defer {
this_bundler.resolver.care_about_bin_folder = false;
this_bundler.resolver.care_about_scripts = false;
}
this_bundler.configureLinker();
var positionals = ctx.positionals;
if (positionals.len > 0 and strings.eqlComptime(positionals[0], "run") or strings.eqlComptime(positionals[0], "r")) {
positionals = positionals[1..];
}
var root_dir_info = this_bundler.resolver.readDirInfo(this_bundler.fs.top_level_dir) catch |err| {
if (!log_errors) return false;
if (Output.enable_ansi_colors) {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
} else {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
}
Output.prettyErrorln("Error loading directory: \"{s}\"", .{@errorName(err)});
Output.flush();
return err;
} orelse {
if (Output.enable_ansi_colors) {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
} else {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
}
Output.prettyErrorln("Error loading current directory", .{});
Output.flush();
return error.CouldntReadCurrentDirectory;
};
var package_json_dir: string = "";
{
this_bundler.env.loadProcess();
if (this_bundler.env.map.get("NODE_ENV")) |node_env| {
if (strings.eqlComptime(node_env, "production")) {
this_bundler.options.production = true;
}
}
// Run .env in the root dir
this_bundler.runEnvLoader() catch {};
if (root_dir_info.getEntries()) |dir| {
// Run .env again if it exists in a parent dir
if (this_bundler.options.production) {
this_bundler.env.load(&this_bundler.fs.fs, dir, false) catch {};
} else {
this_bundler.env.load(&this_bundler.fs.fs, dir, true) catch {};
}
}
}
var bin_dirs = this_bundler.resolver.binDirs();
if (root_dir_info.enclosing_package_json) |package_json| {
if (root_dir_info.package_json == null) {
// no trailing slash
package_json_dir = std.mem.trimRight(u8, package_json.source.path.name.dir, "/");
}
}
var PATH = this_bundler.env.map.get("PATH") orelse "";
var ORIGINAL_PATH = PATH;
if (bin_dirs.len > 0 or package_json_dir.len > 0) {
var new_path_len: usize = PATH.len + 2;
for (bin_dirs) |bin| {
new_path_len += bin.len + 1;
}
if (package_json_dir.len > 0) {
new_path_len += package_json_dir.len + 1;
}
var new_path = try std.ArrayList(u8).initCapacity(ctx.allocator, new_path_len);
{
var needs_colon = false;
if (package_json_dir.len > 0) {
defer needs_colon = true;
if (needs_colon) {
try new_path.append(':');
}
try new_path.appendSlice(package_json_dir);
}
for (bin_dirs) |bin, i| {
defer needs_colon = true;
if (needs_colon) {
try new_path.append(':');
}
try new_path.appendSlice(bin);
}
if (needs_colon) {
try new_path.append(':');
}
try new_path.appendSlice(PATH);
}
this_bundler.env.map.put("PATH", new_path.items) catch unreachable;
PATH = new_path.items;
}
var script_name_to_search: string = "";
if (positionals.len > 0) {
script_name_to_search = positionals[0];
}
var passthrough: []const string = &[_]string{};
if (positionals.len > 1) {
passthrough = positionals[1..];
}
var did_print = false;
if (root_dir_info.enclosing_package_json) |package_json| {
if (package_json.name.len > 0) {
if (this_bundler.env.map.get(NpmArgs.package_name) == null) {
this_bundler.env.map.put(NpmArgs.package_name, package_json.name) catch unreachable;
}
}
if (package_json.version.len > 0) {
if (this_bundler.env.map.get(NpmArgs.package_version) == null) {
this_bundler.env.map.put(NpmArgs.package_version, package_json.version) catch unreachable;
}
}
if (package_json.scripts) |scripts| {
switch (script_name_to_search.len) {
0 => {
var display_name = package_json.name;
if (display_name.len == 0) {
display_name = std.fs.path.basename(root_dir_info.getEntries().?.dir);
}
var iterator = scripts.iterator();
if (scripts.count() > 0) {
did_print = true;
Output.prettyln("<r><blue><b>{s}<r> scripts:<r>\n\n", .{display_name});
while (iterator.next()) |entry| {
Output.prettyln("\n", .{});
Output.prettyln(" bun run <blue>{s}<r>\n", .{entry.key_ptr.*});
Output.prettyln(" <d> {s}<r>\n", .{entry.value_ptr.*});
}
Output.prettyln("\n<d>{d} scripts<r>", .{scripts.count()});
Output.flush();
return true;
} else {
Output.prettyln("<r><blue><b>{s}<r> has no \"scripts\" in package.json.", .{display_name});
Output.flush();
return true;
}
},
else => {
if (scripts.get(script_name_to_search)) |script_content| {
// allocate enough to hold "post${scriptname}"
var temp_script_buffer = try std.fmt.allocPrint(ctx.allocator, "ppre{s}", .{script_name_to_search});
if (scripts.get(temp_script_buffer[1..])) |prescript| {
if (!try runPackageScript(
ctx,
prescript,
temp_script_buffer[1..],
this_bundler.fs.top_level_dir,
this_bundler.env,
passthrough,
ctx.debug.silent,
)) {
return false;
}
}
if (!try runPackageScript(
ctx,
script_content,
script_name_to_search,
this_bundler.fs.top_level_dir,
this_bundler.env,
passthrough,
ctx.debug.silent,
)) return false;
std.mem.copy(u8, temp_script_buffer, "post");
if (scripts.get(temp_script_buffer)) |postscript| {
if (!try runPackageScript(
ctx,
postscript,
temp_script_buffer,
this_bundler.fs.top_level_dir,
this_bundler.env,
passthrough,
ctx.debug.silent,
)) {
return false;
}
}
return true;
}
},
}
}
}
var path_for_which = PATH;
if (comptime bin_dirs_only) {
path_for_which = PATH[0 .. PATH.len - (ORIGINAL_PATH.len + 1)];
}
if (which(&path_buf, path_for_which, this_bundler.fs.top_level_dir, script_name_to_search)) |destination| {
var file = std.fs.openFileAbsoluteZ(destination, .{ .read = true }) catch |err| {
if (!log_errors) return false;
Output.prettyErrorln("<r>error: <red>{s}<r> opening file: \"{s}\"", .{ err, std.mem.span(destination) });
Output.flush();
return err;
};
var outbuf = std.os.getFdPath(file.handle, &path_buf2) catch |err| {
if (!log_errors) return false;
Output.prettyErrorln("<r>error: <red>{s}<r> resolving file: \"{s}\"", .{ err, std.mem.span(destination) });
Output.flush();
return err;
};
file.close();
return try runBinary(
ctx,
try this_bundler.fs.dirname_store.append([]u8, outbuf),
this_bundler.fs.top_level_dir,
this_bundler.env,
passthrough,
);
}
if (comptime log_errors) {
Output.prettyError("<r><red>error:<r> Missing script: <b>{s}<r>\n", .{script_name_to_search});
Output.flush();
std.os.exit(0);
}
return false;
}
};

View File

@@ -364,6 +364,8 @@ pub const Loader = struct {
@".env.production": ?logger.Source = null,
@".env": ?logger.Source = null,
quiet: bool = false,
did_load_process: bool = false,
const empty_string_value: string = "\"\"";
@@ -610,7 +612,7 @@ pub const Loader = struct {
Analytics.Features.dotenv = true;
}
this.printLoaded(start);
if (!this.quiet) this.printLoaded(start);
}
pub fn printLoaded(this: *Loader, start: i128) void {
@@ -748,6 +750,23 @@ pub const Map = struct {
map: HashTable,
pub fn cloneToBufMap(this: *Map, allocator: *std.mem.Allocator) !std.BufMap {
var buf_map = std.BufMap.init(allocator);
const Convert = struct {
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
};
var iter_ = this.map.iterator();
while (iter_.next()) |entry| {
try buf_map.putMove(Convert.constStrToU8(entry.key_ptr.*), Convert.constStrToU8(entry.value_ptr.*));
}
return buf_map;
}
pub inline fn init(allocator: *std.mem.Allocator) Map {
return Map{ .map = HashTable.init(allocator) };
}

View File

@@ -1 +1 @@
7aa588534c09f455
2bbe5942da63d2ba

View File

@@ -18,6 +18,8 @@ threadlocal var hashy: [2048]u8 = undefined;
pub const MacroImportReplacementMap = std.StringArrayHashMap(string);
pub const MacroMap = std.StringArrayHashMapUnmanaged(MacroImportReplacementMap);
const ScriptsMap = std.StringArrayHashMap(string);
pub const PackageJSON = struct {
pub const LoadFramework = enum {
none,
@@ -57,6 +59,8 @@ pub const PackageJSON = struct {
version: string = "",
hash: u32 = 0xDEADBEEF,
scripts: ?*ScriptsMap = null,
always_bundle: []string = &.{},
macros: MacroMap = MacroMap{},
@@ -440,6 +444,7 @@ pub const PackageJSON = struct {
input_path: string,
dirname_fd: StoredFileDescriptorType,
comptime generate_hash: bool,
comptime include_scripts: bool,
) ?PackageJSON {
// TODO: remove this extra copy
@@ -690,6 +695,40 @@ pub const PackageJSON = struct {
}
}
if (include_scripts) {
read_scripts: {
if (json.asProperty("scripts")) |scripts_prop| {
if (scripts_prop.expr.data == .e_object) {
const scripts_obj = scripts_prop.expr.data.e_object;
var count: usize = 0;
for (scripts_obj.properties) |prop| {
const key = prop.key.?.asString(r.allocator) orelse continue;
const value = prop.value.?.asString(r.allocator) orelse continue;
count += @as(usize, @boolToInt(key.len > 0 and value.len > 0));
}
if (count == 0) break :read_scripts;
var scripts = ScriptsMap.init(r.allocator);
scripts.ensureUnusedCapacity(count) catch break :read_scripts;
for (scripts_obj.properties) |prop| {
const key = prop.key.?.asString(r.allocator) orelse continue;
const value = prop.value.?.asString(r.allocator) orelse continue;
if (!(key.len > 0 and value.len > 0)) continue;
scripts.putAssumeCapacity(key, value);
}
package_json.scripts = r.allocator.create(ScriptsMap) catch unreachable;
package_json.scripts.?.* = scripts;
}
}
}
}
// TODO: side effects
// TODO: exports map

View File

@@ -207,6 +207,7 @@ threadlocal var _open_dirs: [256]std.fs.Dir = undefined;
threadlocal var resolve_without_remapping_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var index_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var dir_info_uncached_filename_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var node_bin_path: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var dir_info_uncached_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var tsconfig_base_url_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var relative_abs_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
@@ -324,6 +325,12 @@ pub const LoadResult = struct {
// This is a global so even if multiple resolvers are created, the mutex will still work
var resolver_Mutex: Mutex = undefined;
var resolver_Mutex_loaded: bool = false;
const BinFolderArray = std.BoundedArray(string, 128);
var bin_folders: BinFolderArray = undefined;
var bin_folders_lock: Mutex = Mutex.init();
var bin_folders_loaded: bool = false;
// TODO:
// - Fix "browser" field mapping
// - Consider removing the string list abstraction?
@@ -336,6 +343,9 @@ pub const Resolver = struct {
node_module_bundle: ?*NodeModuleBundle,
extension_order: []const string = undefined,
care_about_bin_folder: bool = false,
care_about_scripts: bool = false,
debug_logs: ?DebugLogs = null,
elapsed: i128 = 0, // tracing
@@ -1361,8 +1371,19 @@ pub const Resolver = struct {
return path.text;
}
pub fn binDirs(r: *const ThisResolver) []const string {
if (!bin_folders_loaded) return &[_]string{};
return bin_folders.constSlice();
}
pub fn parsePackageJSON(r: *ThisResolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON {
const pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true) orelse return null;
var pkg: PackageJSON = undefined;
if (!r.care_about_scripts) {
pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true, false) orelse return null;
} else {
pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true, true) orelse return null;
}
var _pkg = try r.allocator.create(PackageJSON);
_pkg.* = pkg;
return _pkg;
@@ -2477,6 +2498,60 @@ pub const Resolver = struct {
info.has_node_modules = (entry.entry.kind(rfs)) == .dir;
}
}
if (r.care_about_bin_folder) {
append_bin_dir: {
if (info.has_node_modules) {
if (entries.getComptimeQuery("node_modules")) |q| {
if (!bin_folders_loaded) {
bin_folders_loaded = true;
bin_folders = BinFolderArray.init(0) catch unreachable;
}
const this_dir = std.fs.Dir{ .fd = fd };
var file = this_dir.openDirZ("node_modules/.bin", .{}) catch break :append_bin_dir;
defer file.close();
var bin_path = std.os.getFdPath(file.fd, &node_bin_path) catch break :append_bin_dir;
bin_folders_lock.lock();
defer bin_folders_lock.unlock();
for (bin_folders.constSlice()) |existing_folder| {
if (strings.eql(existing_folder, bin_path)) {
break :append_bin_dir;
}
}
bin_folders.append(r.fs.dirname_store.append([]u8, bin_path) catch break :append_bin_dir) catch {};
}
}
if (info.is_node_modules) {
if (entries.getComptimeQuery(".bin")) |q| {
if (q.entry.kind(rfs) == .dir) {
if (!bin_folders_loaded) {
bin_folders_loaded = true;
bin_folders = BinFolderArray.init(0) catch unreachable;
}
const this_dir = std.fs.Dir{ .fd = fd };
var file = this_dir.openDirZ(".bin", .{}) catch break :append_bin_dir;
defer file.close();
var bin_path = std.os.getFdPath(file.fd, &node_bin_path) catch break :append_bin_dir;
bin_folders_lock.lock();
defer bin_folders_lock.unlock();
for (bin_folders.constSlice()) |existing_folder| {
if (strings.eql(existing_folder, bin_path)) {
break :append_bin_dir;
}
}
bin_folders.append(r.fs.dirname_store.append([]u8, bin_path) catch break :append_bin_dir) catch {};
}
}
}
}
}
// }
if (parent != null) {