Compare commits

...

7 Commits

Author SHA1 Message Date
Jarred Sumner
8504cff6fc Add workflow to cancel Buildkite build on PR close 2025-05-28 20:04:55 -07:00
Meghan Denny
c85cf136a5 test-http-get-pipeline-problem.js passes on windows (#19980) 2025-05-28 19:28:02 -07:00
Meghan Denny
4da85ac9c1 test-http2-compat-serverrequest-pipe.js passes on windows (#19981) 2025-05-28 19:27:41 -07:00
Meghan Denny
9248d81871 test-http2-trailers-after-session-close.js passes on windows (#19983) 2025-05-28 19:27:12 -07:00
Meghan Denny
ba21d6d54b test-require-long-path.js passes on windows (#19984) 2025-05-28 19:26:44 -07:00
Meghan Denny
32985591eb test-http2-pipe-named-pipe.js passes on windows (#19982) 2025-05-28 19:26:20 -07:00
Jarred Sumner
544d399980 Start splitting install.zig into a few more files (#19959)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
2025-05-28 19:25:59 -07:00
11 changed files with 2320 additions and 2216 deletions

31
.github/workflows/cancel-buildkite.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
name: Cancel Buildkite Build
on:
pull_request:
types: [closed]
jobs:
cancel-build:
if: ${{ github.event.pull_request.merged == false }}
runs-on: ubuntu-latest
env:
ORG_SLUG: bun
PIPELINE_SLUG: bun
steps:
- name: Get Buildkite build number
id: build
run: |
set -euo pipefail
STATUS_URL="https://api.github.com/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.head.sha }}"
BUILD_URL=$(curl -fsSL "$STATUS_URL" | jq -r '[.[].target_url] | map(select(test("buildkite.com"))) | first // ""')
if [ -z "$BUILD_URL" ]; then
exit 0
fi
BUILD_NUMBER=$(echo "$BUILD_URL" | grep -oE 'builds/[0-9]+' | cut -d/ -f2)
echo "build_number=$BUILD_NUMBER" >> $GITHUB_OUTPUT
- name: Cancel Buildkite build
if: ${{ steps.build.outputs.build_number }}
run: |
curl -fsS -X PUT "https://api.buildkite.com/v2/organizations/${ORG_SLUG}/pipelines/${PIPELINE_SLUG}/builds/${{ steps.build.outputs.build_number }}/cancel" \
-H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
-H "Content-Type: application/json"

View File

@@ -454,6 +454,9 @@ src/install/lockfile/printer/Yarn.zig
src/install/lockfile/Tree.zig
src/install/migration.zig
src/install/npm.zig
src/install/PackageManager/CommandLineArguments.zig
src/install/PackageManager/PackageJSONEditor.zig
src/install/PackageManager/PackageManagerOptions.zig
src/install/padding_checker.zig
src/install/patch_install.zig
src/install/repository.zig

View File

@@ -0,0 +1,800 @@
/// CLI Arguments for:
///
/// - bun install
/// - bun update
/// - bun patch
/// - bun patch-commit
/// - bun pm
/// - bun add
/// - bun remove
/// - bun link
/// - bun audit
///
const CommandLineArguments = @This();
const ParamType = clap.Param(clap.Help);
const platform_specific_backend_label = if (Environment.isMac)
"Possible values: \"clonefile\" (default), \"hardlink\", \"symlink\", \"copyfile\""
else
"Possible values: \"hardlink\" (default), \"symlink\", \"copyfile\"";
const shared_params = [_]ParamType{
clap.parseParam("-c, --config <STR>? Specify path to config file (bunfig.toml)") catch unreachable,
clap.parseParam("-y, --yarn Write a yarn.lock file (yarn v1)") catch unreachable,
clap.parseParam("-p, --production Don't install devDependencies") catch unreachable,
clap.parseParam("--no-save Don't update package.json or save a lockfile") catch unreachable,
clap.parseParam("--save Save to package.json (true by default)") catch unreachable,
clap.parseParam("--ca <STR>... Provide a Certificate Authority signing certificate") catch unreachable,
clap.parseParam("--cafile <STR> The same as `--ca`, but is a file path to the certificate") catch unreachable,
clap.parseParam("--dry-run Don't install anything") catch unreachable,
clap.parseParam("--frozen-lockfile Disallow changes to lockfile") catch unreachable,
clap.parseParam("-f, --force Always request the latest versions from the registry & reinstall all dependencies") catch unreachable,
clap.parseParam("--cache-dir <PATH> Store & load cached data from a specific directory path") catch unreachable,
clap.parseParam("--no-cache Ignore manifest cache entirely") catch unreachable,
clap.parseParam("--silent Don't log anything") catch unreachable,
clap.parseParam("--verbose Excessively verbose logging") catch unreachable,
clap.parseParam("--no-progress Disable the progress bar") catch unreachable,
clap.parseParam("--no-summary Don't print a summary") catch unreachable,
clap.parseParam("--no-verify Skip verifying integrity of newly downloaded packages") catch unreachable,
clap.parseParam("--ignore-scripts Skip lifecycle scripts in the project's package.json (dependency scripts are never run)") catch unreachable,
clap.parseParam("--trust Add to trustedDependencies in the project's package.json and install the package(s)") catch unreachable,
clap.parseParam("-g, --global Install globally") catch unreachable,
clap.parseParam("--cwd <STR> Set a specific cwd") catch unreachable,
clap.parseParam("--backend <STR> Platform-specific optimizations for installing dependencies. " ++ platform_specific_backend_label) catch unreachable,
clap.parseParam("--registry <STR> Use a specific registry by default, overriding .npmrc, bunfig.toml and environment variables") catch unreachable,
clap.parseParam("--concurrent-scripts <NUM> Maximum number of concurrent jobs for lifecycle scripts (default 5)") catch unreachable,
clap.parseParam("--network-concurrency <NUM> Maximum number of concurrent network requests (default 48)") catch unreachable,
clap.parseParam("--save-text-lockfile Save a text-based lockfile") catch unreachable,
clap.parseParam("--omit <dev|optional|peer>... Exclude 'dev', 'optional', or 'peer' dependencies from install") catch unreachable,
clap.parseParam("--lockfile-only Generate a lockfile without installing dependencies") catch unreachable,
clap.parseParam("-h, --help Print this help menu") catch unreachable,
};
pub const install_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("-d, --dev Add dependency to \"devDependencies\"") catch unreachable,
clap.parseParam("-D, --development") catch unreachable,
clap.parseParam("--optional Add dependency to \"optionalDependencies\"") catch unreachable,
clap.parseParam("--peer Add dependency to \"peerDependencies\"") catch unreachable,
clap.parseParam("-E, --exact Add the exact version instead of the ^range") catch unreachable,
clap.parseParam("--filter <STR>... Install packages for the matching workspaces") catch unreachable,
clap.parseParam("-a, --analyze Analyze & install all dependencies of files passed as arguments recursively (using Bun's bundler)") catch unreachable,
clap.parseParam("--only-missing Only add dependencies to package.json if they are not already present") catch unreachable,
clap.parseParam("<POS> ... ") catch unreachable,
});
pub const update_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("--latest Update packages to their latest versions") catch unreachable,
clap.parseParam("<POS> ... \"name\" of packages to update") catch unreachable,
});
pub const pm_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("-a, --all") catch unreachable,
clap.parseParam("--json Output in JSON format") catch unreachable,
// clap.parseParam("--filter <STR>... Pack each matching workspace") catch unreachable,
clap.parseParam("--destination <STR> The directory the tarball will be saved in") catch unreachable,
clap.parseParam("--filename <STR> The filename of the tarball") catch unreachable,
clap.parseParam("--gzip-level <STR> Specify a custom compression level for gzip. Default is 9.") catch unreachable,
clap.parseParam("<POS> ... ") catch unreachable,
});
pub const add_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("-d, --dev Add dependency to \"devDependencies\"") catch unreachable,
clap.parseParam("-D, --development") catch unreachable,
clap.parseParam("--optional Add dependency to \"optionalDependencies\"") catch unreachable,
clap.parseParam("--peer Add dependency to \"peerDependencies\"") catch unreachable,
clap.parseParam("-E, --exact Add the exact version instead of the ^range") catch unreachable,
clap.parseParam("-a, --analyze Recursively analyze & install dependencies of files passed as arguments (using Bun's bundler)") catch unreachable,
clap.parseParam("--only-missing Only add dependencies to package.json if they are not already present") catch unreachable,
clap.parseParam("<POS> ... \"name\" or \"name@version\" of package(s) to install") catch unreachable,
});
pub const remove_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("<POS> ... \"name\" of package(s) to remove from package.json") catch unreachable,
});
pub const link_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("<POS> ... \"name\" install package as a link") catch unreachable,
});
pub const unlink_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("<POS> ... \"name\" uninstall package as a link") catch unreachable,
});
const patch_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("<POS> ... \"name\" of the package to patch") catch unreachable,
clap.parseParam("--commit Install a package containing modifications in `dir`") catch unreachable,
clap.parseParam("--patches-dir <dir> The directory to put the patch file in (only if --commit is used)") catch unreachable,
});
const patch_commit_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("<POS> ... \"dir\" containing changes to a package") catch unreachable,
clap.parseParam("--patches-dir <dir> The directory to put the patch file") catch unreachable,
});
const outdated_params: []const ParamType = &(shared_params ++ [_]ParamType{
// clap.parseParam("--json Output outdated information in JSON format") catch unreachable,
clap.parseParam("-F, --filter <STR>... Display outdated dependencies for each matching workspace") catch unreachable,
clap.parseParam("<POS> ... Package patterns to filter by") catch unreachable,
});
const audit_params: []const ParamType = &([_]ParamType{
clap.parseParam("<POS> ... Check installed packages for vulnerabilities") catch unreachable,
clap.parseParam("--json Output in JSON format") catch unreachable,
});
const pack_params: []const ParamType = &(shared_params ++ [_]ParamType{
// clap.parseParam("--filter <STR>... Pack each matching workspace") catch unreachable,
clap.parseParam("--destination <STR> The directory the tarball will be saved in") catch unreachable,
clap.parseParam("--filename <STR> The filename of the tarball") catch unreachable,
clap.parseParam("--gzip-level <STR> Specify a custom compression level for gzip. Default is 9.") catch unreachable,
clap.parseParam("<POS> ... ") catch unreachable,
});
const publish_params: []const ParamType = &(shared_params ++ [_]ParamType{
clap.parseParam("<POS> ... Package tarball to publish") catch unreachable,
clap.parseParam("--access <STR> Set access level for scoped packages") catch unreachable,
clap.parseParam("--tag <STR> Tag the release. Default is \"latest\"") catch unreachable,
clap.parseParam("--otp <STR> Provide a one-time password for authentication") catch unreachable,
clap.parseParam("--auth-type <STR> Specify the type of one-time password authentication (default is 'web')") catch unreachable,
clap.parseParam("--gzip-level <STR> Specify a custom compression level for gzip. Default is 9.") catch unreachable,
});
cache_dir: ?string = null,
lockfile: string = "",
token: string = "",
global: bool = false,
config: ?string = null,
network_concurrency: ?u16 = null,
backend: ?PackageInstall.Method = null,
analyze: bool = false,
only_missing: bool = false,
positionals: []const string = &[_]string{},
yarn: bool = false,
production: bool = false,
frozen_lockfile: bool = false,
no_save: bool = false,
dry_run: bool = false,
force: bool = false,
no_cache: bool = false,
silent: bool = false,
verbose: bool = false,
no_progress: bool = false,
no_verify: bool = false,
ignore_scripts: bool = false,
trusted: bool = false,
no_summary: bool = false,
latest: bool = false,
json_output: bool = false,
filters: []const string = &.{},
pack_destination: string = "",
pack_filename: string = "",
pack_gzip_level: ?string = null,
development: bool = false,
optional: bool = false,
peer: bool = false,
omit: ?Omit = null,
exact: bool = false,
concurrent_scripts: ?usize = null,
patch: PatchOpts = .{ .nothing = .{} },
registry: string = "",
publish_config: Options.PublishConfig = .{},
ca: []const string = &.{},
ca_file_name: string = "",
save_text_lockfile: ?bool = null,
lockfile_only: bool = false,
const PatchOpts = union(enum) {
nothing: struct {},
patch: struct {},
commit: struct {
patches_dir: []const u8 = "patches",
},
};
const Omit = struct {
dev: bool = false,
optional: bool = false,
peer: bool = false,
};
pub fn printHelp(subcommand: Subcommand) void {
// the output of --help uses the following syntax highlighting
// template: <b>Usage<r>: <b><green>bun <command><r> <cyan>[flags]<r> <blue>[arguments]<r>
// use [foo] for multiple arguments or flags for foo.
// use <bar> to emphasize 'bar'
switch (subcommand) {
// fall back to HelpCommand.printWithReason
Subcommand.install => {
const intro_text =
\\<b>Usage<r>: <b><green>bun install<r> <cyan>[flags]<r> <blue>\<name\><r><d>@\<version\><r>
\\<b>Alias: <b><green>bun i<r>
\\ Install the dependencies listed in package.json
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Install the dependencies for the current project<r>
\\ <b><green>bun install<r>
\\
\\ <d>Skip devDependencies<r>
\\ <b><green>bun install<r> <cyan>--production<r>
\\
\\Full documentation is available at <magenta>https://bun.sh/docs/cli/install<r>
;
Output.pretty("\n" ++ intro_text, .{});
Output.flush();
Output.pretty("\n\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(install_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
Subcommand.update => {
const intro_text =
\\<b>Usage<r>: <b><green>bun update<r> <cyan>[flags]<r> <blue>\<name\><r><d>@\<version\><r>
\\ Update all dependencies to most recent versions within the version range in package.json
\\
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Update all dependencies:<r>
\\ <b><green>bun update<r>
\\
\\ <d>Update all dependencies to latest:<r>
\\ <b><green>bun update<r> <cyan>--latest<r>
\\
\\ <d>Update specific packages:<r>
\\ <b><green>bun update<r> <blue>zod jquery@3<r>
\\
\\Full documentation is available at <magenta>https://bun.sh/docs/cli/update<r>
;
Output.pretty("\n" ++ intro_text, .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(update_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
Subcommand.patch => {
const intro_text =
\\<b>Usage<r>: <b><green>bun patch<r> <cyan>[flags or options]<r> <blue>\<package\><r><d>@\<version\><r>
\\
\\Prepare a package for patching.
\\
;
Output.pretty("\n" ++ intro_text, .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(patch_params);
// Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.pretty("\n", .{});
Output.flush();
},
Subcommand.@"patch-commit" => {
const intro_text =
\\<b>Usage<r>: <b><green>bun patch-commit<r> <cyan>[flags or options]<r> <blue>\<directory\><r>
\\
\\Generate a patc out of a directory and save it.
\\
\\<b>Options:<r>
\\ <cyan>--patches-dir<r> <d>The directory to save the patch file<r>
\\
;
// const outro_text =
// \\<b>Options:<r>
// \\ <d>--edit-dir<r>
// \\ <b><green>bun update<r>
// \\
// \\Full documentation is available at <magenta>https://bun.sh/docs/cli/update<r>
// ;
Output.pretty("\n" ++ intro_text, .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(patch_params);
// Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.pretty("\n", .{});
Output.flush();
},
Subcommand.pm => {
PackageManagerCommand.printHelp();
},
Subcommand.add => {
const intro_text =
\\<b>Usage<r>: <b><green>bun add<r> <cyan>[flags]<r> <blue>\<package\><r><d>\<@version\><r>
\\<b>Alias: <b><green>bun a<r>
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Add a dependency from the npm registry<r>
\\ <b><green>bun add<r> <blue>zod<r>
\\ <b><green>bun add<r> <blue>zod@next<r>
\\ <b><green>bun add<r> <blue>zod@3.0.0<r>
\\
\\ <d>Add a dev, optional, or peer dependency <r>
\\ <b><green>bun add<r> <cyan>-d<r> <blue>typescript<r>
\\ <b><green>bun add<r> <cyan>--optional<r> <blue>lodash<r>
\\ <b><green>bun add<r> <cyan>--peer<r> <blue>esbuild<r>
\\
\\Full documentation is available at <magenta>https://bun.sh/docs/cli/add<r>
;
Output.pretty("\n" ++ intro_text, .{});
Output.flush();
Output.pretty("\n\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(add_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
Subcommand.remove => {
const intro_text =
\\<b>Usage<r>: <b><green>bun remove<r> <cyan>[flags]<r> <blue>[\<packages\>]<r>
\\<b>Alias: <b>bun r<r>
\\ Remove a package from package.json and uninstall from node_modules
\\
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Remove a dependency<r>
\\ <b><green>bun remove<r> <blue>ts-node<r>
\\
\\Full documentation is available at <magenta>https://bun.sh/docs/cli/remove<r>
;
Output.pretty("\n" ++ intro_text, .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(remove_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
Subcommand.link => {
const intro_text =
\\<b>Usage<r>: <b><green>bun link<r> <cyan>[flags]<r> <blue>[\<packages\>]<r>
\\
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Register the current directory as a linkable package.<r>
\\ <d>Directory should contain a package.json.<r>
\\ <b><green>bun link<r>
\\
\\ <d>Add a previously-registered linkable package as a dependency of the current project.<r>
\\ <b><green>bun link<r> <blue>\<package\><r>
\\
\\Full documentation is available at <magenta>https://bun.sh/docs/cli/link<r>
;
Output.pretty("\n" ++ intro_text, .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(link_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
Subcommand.unlink => {
const intro_text =
\\<b>Usage<r>: <b><green>bun unlink<r> <cyan>[flags]<r>
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Unregister the current directory as a linkable package.<r>
\\ <b><green>bun unlink<r>
\\
\\Full documentation is available at <magenta>https://bun.sh/docs/cli/unlink<r>
;
Output.pretty("\n" ++ intro_text ++ "\n", .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(unlink_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
.outdated => {
const intro_text =
\\<b>Usage<r>: <b><green>bun outdated<r> <cyan>[flags]<r> <blue>[filter]<r>
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Display outdated dependencies in the current workspace.<r>
\\ <b><green>bun outdated<r>
\\
\\ <d>Use --filter to include more than one workspace.<r>
\\ <b><green>bun outdated<r> <cyan>--filter="*"<r>
\\ <b><green>bun outdated<r> <cyan>--filter="./app/*"<r>
\\ <b><green>bun outdated<r> <cyan>--filter="!frontend"<r>
\\
\\ <d>Filter dependencies with name patterns.<r>
\\ <b><green>bun outdated<r> <blue>jquery<r>
\\ <b><green>bun outdated<r> <blue>"is-*"<r>
\\ <b><green>bun outdated<r> <blue>"!is-even"<r>
\\
;
Output.pretty("\n" ++ intro_text ++ "\n", .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
clap.simpleHelp(outdated_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
.pack => {
const intro_text =
\\<b>Usage<r>: <b><green>bun pack<r> <cyan>[flags]<r>
;
const outro_text =
\\<b>Examples:<r>
\\ <b><green>bun pack<r>
\\
;
Output.pretty("\n" ++ intro_text ++ "\n", .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
clap.simpleHelp(pack_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
.publish => {
const intro_text =
\\<b>Usage<r>: <b><green>bun publish<r> <cyan>[flags]<r> <blue>[dist]<r>
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Display files that would be published, without publishing to the registry.<r>
\\ <b><green>bun publish<r> <cyan>--dry-run<r>
\\
\\ <d>Publish the current package with public access.<r>
\\ <b><green>bun publish<r> <cyan>--access public<r>
\\
\\ <d>Publish a pre-existing package tarball with tag 'next'.<r>
\\ <b><green>bun publish<r> <cyan>--tag next<r> <blue>./path/to/tarball.tgz<r>
\\
;
Output.pretty("\n" ++ intro_text ++ "\n", .{});
Output.flush();
Output.pretty("\n<b>Flags:<r>", .{});
clap.simpleHelp(publish_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
.audit => {
const intro_text =
\\<b>Usage<r>: <b><green>bun audit<r> <cyan>[flags]<r>
;
const outro_text =
\\<b>Examples:<r>
\\ <d>Check installed packages for vulnerabilities.<r>
\\ <b><green>bun audit<r>
\\
\\ <d>Output package vulnerabilities in JSON format.<r>
\\ <b><green>bun audit --json<r>
\\
;
Output.pretty("\n" ++ intro_text ++ "\n", .{});
Output.pretty("\n<b>Flags:<r>", .{});
Output.flush();
clap.simpleHelp(audit_params);
Output.pretty("\n\n" ++ outro_text ++ "\n", .{});
Output.flush();
},
}
}
pub fn parse(allocator: std.mem.Allocator, comptime subcommand: Subcommand) !CommandLineArguments {
Output.is_verbose = Output.isVerbose();
const params: []const ParamType = switch (subcommand) {
.install => install_params,
.update => update_params,
.pm => pm_params,
.add => add_params,
.remove => remove_params,
.link => link_params,
.unlink => unlink_params,
.patch => patch_params,
.@"patch-commit" => patch_commit_params,
.outdated => outdated_params,
.pack => pack_params,
.publish => publish_params,
// TODO: we will probably want to do this for other *_params. this way extra params
// are not included in the help text
.audit => shared_params ++ audit_params,
};
var diag = clap.Diagnostic{};
var args = clap.parse(clap.Help, params, .{
.diagnostic = &diag,
.allocator = allocator,
}) catch |err| {
printHelp(subcommand);
diag.report(Output.errorWriter(), err) catch {};
Global.exit(1);
};
if (args.flag("--help")) {
printHelp(subcommand);
Global.exit(0);
}
var cli = CommandLineArguments{};
cli.yarn = args.flag("--yarn");
cli.production = args.flag("--production");
cli.frozen_lockfile = args.flag("--frozen-lockfile");
cli.no_progress = args.flag("--no-progress");
cli.dry_run = args.flag("--dry-run");
cli.global = args.flag("--global");
cli.force = args.flag("--force");
cli.no_verify = args.flag("--no-verify");
cli.no_cache = args.flag("--no-cache");
cli.silent = args.flag("--silent");
cli.verbose = args.flag("--verbose") or Output.is_verbose;
cli.ignore_scripts = args.flag("--ignore-scripts");
cli.trusted = args.flag("--trust");
cli.no_summary = args.flag("--no-summary");
cli.ca = args.options("--ca");
cli.lockfile_only = args.flag("--lockfile-only");
if (args.option("--cache-dir")) |cache_dir| {
cli.cache_dir = cache_dir;
}
if (args.option("--cafile")) |ca_file_name| {
cli.ca_file_name = ca_file_name;
}
if (args.option("--network-concurrency")) |network_concurrency| {
cli.network_concurrency = std.fmt.parseInt(u16, network_concurrency, 10) catch {
Output.errGeneric("Expected --network-concurrency to be a number between 0 and 65535: {s}", .{network_concurrency});
Global.crash();
};
}
if (args.flag("--save-text-lockfile")) {
cli.save_text_lockfile = true;
}
const omit_values = args.options("--omit");
if (omit_values.len > 0) {
var omit: Omit = .{};
for (omit_values) |omit_value| {
if (strings.eqlComptime(omit_value, "dev")) {
omit.dev = true;
} else if (strings.eqlComptime(omit_value, "optional")) {
omit.optional = true;
} else if (strings.eqlComptime(omit_value, "peer")) {
omit.peer = true;
} else {
Output.errGeneric("invalid `omit` value: '{s}'", .{omit_value});
Global.crash();
}
}
cli.omit = omit;
}
// commands that support --filter
if (comptime subcommand.supportsWorkspaceFiltering()) {
cli.filters = args.options("--filter");
}
if (comptime subcommand.supportsJsonOutput()) {
cli.json_output = args.flag("--json");
}
if (comptime subcommand == .outdated) {
// fake --dry-run, we don't actually resolve+clean the lockfile
cli.dry_run = true;
// cli.json_output = args.flag("--json");
}
if (comptime subcommand == .pack or subcommand == .pm or subcommand == .publish) {
if (comptime subcommand != .publish) {
if (args.option("--destination")) |dest| {
cli.pack_destination = dest;
}
if (args.option("--filename")) |file| {
cli.pack_filename = file;
}
}
if (args.option("--gzip-level")) |level| {
cli.pack_gzip_level = level;
}
}
if (comptime subcommand == .publish) {
if (args.option("--tag")) |tag| {
cli.publish_config.tag = tag;
}
if (args.option("--access")) |access| {
cli.publish_config.access = Options.Access.fromStr(access) orelse {
Output.errGeneric("invalid `access` value: '{s}'", .{access});
Global.crash();
};
}
if (args.option("--otp")) |otp| {
cli.publish_config.otp = otp;
}
if (args.option("--auth-type")) |auth_type| {
cli.publish_config.auth_type = Options.AuthType.fromStr(auth_type) orelse {
Output.errGeneric("invalid `auth-type` value: '{s}'", .{auth_type});
Global.crash();
};
}
}
// link and unlink default to not saving, all others default to
// saving.
if (comptime subcommand == .link or subcommand == .unlink) {
cli.no_save = !args.flag("--save");
} else {
cli.no_save = args.flag("--no-save");
}
if (subcommand == .patch) {
const patch_commit = args.flag("--commit");
if (patch_commit) {
cli.patch = .{
.commit = .{
.patches_dir = args.option("--patches-dir") orelse "patches",
},
};
} else {
cli.patch = .{
.patch = .{},
};
}
}
if (subcommand == .@"patch-commit") {
cli.patch = .{
.commit = .{
.patches_dir = args.option("--patches-dir") orelse "patches",
},
};
}
if (args.option("--config")) |opt| {
cli.config = opt;
}
if (comptime subcommand == .add or subcommand == .install) {
cli.development = args.flag("--development") or args.flag("--dev");
cli.optional = args.flag("--optional");
cli.peer = args.flag("--peer");
cli.exact = args.flag("--exact");
cli.analyze = args.flag("--analyze");
cli.only_missing = args.flag("--only-missing");
}
if (args.option("--concurrent-scripts")) |concurrency| {
cli.concurrent_scripts = std.fmt.parseInt(usize, concurrency, 10) catch null;
}
if (args.option("--cwd")) |cwd_| {
var buf: bun.PathBuffer = undefined;
var buf2: bun.PathBuffer = undefined;
var final_path: [:0]u8 = undefined;
if (cwd_.len > 0 and cwd_[0] == '.') {
const cwd = try bun.getcwd(&buf);
var parts = [_]string{cwd_};
const path_ = Path.joinAbsStringBuf(cwd, &buf2, &parts, .auto);
buf2[path_.len] = 0;
final_path = buf2[0..path_.len :0];
} else {
bun.copy(u8, &buf, cwd_);
buf[cwd_.len] = 0;
final_path = buf[0..cwd_.len :0];
}
bun.sys.chdir("", final_path).unwrap() catch |err| {
Output.errGeneric("failed to change directory to \"{s}\": {s}\n", .{ final_path, @errorName(err) });
Global.crash();
};
}
if (comptime subcommand == .update) {
cli.latest = args.flag("--latest");
}
const specified_backend: ?PackageInstall.Method = brk: {
if (args.option("--backend")) |backend_| {
break :brk PackageInstall.Method.map.get(backend_);
}
break :brk null;
};
if (specified_backend) |backend| {
if (backend.isSupported()) {
cli.backend = backend;
}
}
if (args.option("--registry")) |registry| {
if (!strings.hasPrefixComptime(registry, "https://") and !strings.hasPrefixComptime(registry, "http://")) {
Output.errGeneric("Registry URL must start with 'https://' or 'http://': {}\n", .{bun.fmt.quote(registry)});
Global.crash();
}
cli.registry = registry;
}
cli.positionals = args.positionals();
if (subcommand == .patch and cli.positionals.len < 2) {
Output.errGeneric("Missing pkg to patch\n", .{});
Global.crash();
}
if (subcommand == .@"patch-commit" and cli.positionals.len < 2) {
Output.errGeneric("Missing pkg folder to patch\n", .{});
Global.crash();
}
if (cli.production and cli.trusted) {
Output.errGeneric("The '--production' and '--trust' flags together are not supported because the --trust flag potentially modifies the lockfile after installing packages\n", .{});
Global.crash();
}
if (cli.frozen_lockfile and cli.trusted) {
Output.errGeneric("The '--frozen-lockfile' and '--trust' flags together are not supported because the --trust flag potentially modifies the lockfile after installing packages\n", .{});
Global.crash();
}
if (cli.analyze and cli.positionals.len == 0) {
Output.errGeneric("Missing script(s) to analyze. Pass paths to scripts to analyze their dependencies and add any missing ones to the lockfile.\n", .{});
Global.crash();
}
return cli;
}
const PackageInstall = bun.install.PackageInstall;
const Options = @import("./PackageManagerOptions.zig");
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const std = @import("std");
const JSON = bun.JSON;
const Path = bun.path;
const URL = bun.URL;
const clap = bun.clap;
const PackageManagerCommand = @import("../../cli/package_manager_command.zig").PackageManagerCommand;
const Subcommand = bun.install.PackageManager.Subcommand;

View File

@@ -0,0 +1,823 @@
const dependency_groups = &.{
.{ "optionalDependencies", .{ .optional = true } },
.{ "devDependencies", .{ .dev = true } },
.{ "dependencies", .{ .prod = true } },
.{ "peerDependencies", .{ .peer = true } },
};
pub const EditOptions = struct {
exact_versions: bool = false,
add_trusted_dependencies: bool = false,
before_install: bool = false,
};
pub fn editPatchedDependencies(
manager: *PackageManager,
package_json: *Expr,
patch_key: []const u8,
patchfile_path: []const u8,
) !void {
// const pkg_to_patch = manager.
var patched_dependencies = brk: {
if (package_json.asProperty("patchedDependencies")) |query| {
if (query.expr.data == .e_object)
break :brk query.expr.data.e_object.*;
}
break :brk E.Object{};
};
const patchfile_expr = try Expr.init(
E.String,
E.String{
.data = patchfile_path,
},
logger.Loc.Empty,
).clone(manager.allocator);
try patched_dependencies.put(
manager.allocator,
patch_key,
patchfile_expr,
);
try package_json.data.e_object.put(
manager.allocator,
"patchedDependencies",
try Expr.init(E.Object, patched_dependencies, logger.Loc.Empty).clone(manager.allocator),
);
}
pub fn editTrustedDependencies(allocator: std.mem.Allocator, package_json: *Expr, names_to_add: []string) !void {
var len = names_to_add.len;
var original_trusted_dependencies = brk: {
if (package_json.asProperty(trusted_dependencies_string)) |query| {
if (query.expr.data == .e_array) {
break :brk query.expr.data.e_array.*;
}
}
break :brk E.Array{};
};
for (names_to_add, 0..) |name, i| {
for (original_trusted_dependencies.items.slice()) |item| {
if (item.data == .e_string) {
if (item.data.e_string.eql(string, name)) {
const temp = names_to_add[i];
names_to_add[i] = names_to_add[len - 1];
names_to_add[len - 1] = temp;
len -= 1;
break;
}
}
}
}
var trusted_dependencies: []Expr = &[_]Expr{};
if (package_json.asProperty(trusted_dependencies_string)) |query| {
if (query.expr.data == .e_array) {
trusted_dependencies = query.expr.data.e_array.items.slice();
}
}
const trusted_dependencies_to_add = len;
const new_trusted_deps = brk: {
var deps = try allocator.alloc(Expr, trusted_dependencies.len + trusted_dependencies_to_add);
@memcpy(deps[0..trusted_dependencies.len], trusted_dependencies);
@memset(deps[trusted_dependencies.len..], Expr.empty);
for (names_to_add[0..len]) |name| {
if (comptime Environment.allow_assert) {
var has_missing = false;
for (deps) |dep| {
if (dep.data == .e_missing) has_missing = true;
}
bun.assert(has_missing);
}
var i = deps.len;
while (i > 0) {
i -= 1;
if (deps[i].data == .e_missing) {
deps[i] = try Expr.init(
E.String,
E.String{
.data = name,
},
logger.Loc.Empty,
).clone(allocator);
break;
}
}
}
if (comptime Environment.allow_assert) {
for (deps) |dep| bun.assert(dep.data != .e_missing);
}
break :brk deps;
};
var needs_new_trusted_dependencies_list = true;
const trusted_dependencies_array: Expr = brk: {
if (package_json.asProperty(trusted_dependencies_string)) |query| {
if (query.expr.data == .e_array) {
needs_new_trusted_dependencies_list = false;
break :brk query.expr;
}
}
break :brk Expr.init(
E.Array,
E.Array{
.items = JSAst.ExprNodeList.init(new_trusted_deps),
},
logger.Loc.Empty,
);
};
if (trusted_dependencies_to_add > 0 and new_trusted_deps.len > 0) {
trusted_dependencies_array.data.e_array.items = JSAst.ExprNodeList.init(new_trusted_deps);
trusted_dependencies_array.data.e_array.alphabetizeStrings();
}
if (package_json.data != .e_object or package_json.data.e_object.properties.len == 0) {
var root_properties = try allocator.alloc(JSAst.G.Property, 1);
root_properties[0] = JSAst.G.Property{
.key = Expr.init(
E.String,
E.String{
.data = trusted_dependencies_string,
},
logger.Loc.Empty,
),
.value = trusted_dependencies_array,
};
package_json.* = Expr.init(
E.Object,
E.Object{
.properties = JSAst.G.Property.List.init(root_properties),
},
logger.Loc.Empty,
);
} else if (needs_new_trusted_dependencies_list) {
var root_properties = try allocator.alloc(G.Property, package_json.data.e_object.properties.len + 1);
@memcpy(root_properties[0..package_json.data.e_object.properties.len], package_json.data.e_object.properties.slice());
root_properties[root_properties.len - 1] = .{
.key = Expr.init(
E.String,
E.String{
.data = trusted_dependencies_string,
},
logger.Loc.Empty,
),
.value = trusted_dependencies_array,
};
package_json.* = Expr.init(
E.Object,
E.Object{
.properties = JSAst.G.Property.List.init(root_properties),
},
logger.Loc.Empty,
);
}
}
/// When `bun update` is called without package names, all dependencies are updated.
/// This function will identify the current workspace and update all changed package
/// versions.
pub fn editUpdateNoArgs(
manager: *PackageManager,
current_package_json: *Expr,
options: EditOptions,
) !void {
// using data store is going to result in undefined memory issues as
// the store is cleared in some workspace situations. the solution
// is to always avoid the store
Expr.Disabler.disable();
defer Expr.Disabler.enable();
const allocator = manager.allocator;
inline for (dependency_groups) |group| {
const group_str = group[0];
if (current_package_json.asProperty(group_str)) |root| {
if (root.expr.data == .e_object) {
if (options.before_install) {
// set each npm dependency to latest
for (root.expr.data.e_object.properties.slice()) |*dep| {
const key = dep.key orelse continue;
if (key.data != .e_string) continue;
const value = dep.value orelse continue;
if (value.data != .e_string) continue;
const version_literal = try value.asStringCloned(allocator) orelse bun.outOfMemory();
var tag = Dependency.Version.Tag.infer(version_literal);
// only updating dependencies with npm versions, and dist-tags if `--latest`.
if (tag != .npm and (tag != .dist_tag or !manager.options.do.update_to_latest)) continue;
var alias_at_index: ?usize = null;
if (strings.hasPrefixComptime(strings.trim(version_literal, &strings.whitespace_chars), "npm:")) {
// negative because the real package might have a scope
// e.g. "dep": "npm:@foo/bar@1.2.3"
if (strings.lastIndexOfChar(version_literal, '@')) |at_index| {
tag = Dependency.Version.Tag.infer(version_literal[at_index + 1 ..]);
if (tag != .npm and (tag != .dist_tag or !manager.options.do.update_to_latest)) continue;
alias_at_index = at_index;
}
}
const key_str = try key.asStringCloned(allocator) orelse unreachable;
const entry = manager.updating_packages.getOrPut(allocator, key_str) catch bun.outOfMemory();
// If a dependency is present in more than one dependency group, only one of it's versions
// will be updated. The group is determined by the order of `dependency_groups`, the same
// order used to choose which version to install.
if (entry.found_existing) continue;
entry.value_ptr.* = .{
.original_version_literal = version_literal,
.is_alias = alias_at_index != null,
.original_version = null,
};
if (manager.options.do.update_to_latest) {
// is it an aliased package
const temp_version = if (alias_at_index) |at_index|
std.fmt.allocPrint(allocator, "{s}@latest", .{version_literal[0..at_index]}) catch bun.outOfMemory()
else
allocator.dupe(u8, "latest") catch bun.outOfMemory();
dep.value = Expr.allocate(allocator, E.String, .{
.data = temp_version,
}, logger.Loc.Empty);
}
}
} else {
const lockfile = manager.lockfile;
const string_buf = lockfile.buffers.string_bytes.items;
const workspace_package_id = lockfile.getWorkspacePackageID(manager.workspace_name_hash);
const packages = lockfile.packages.slice();
const resolutions = packages.items(.resolution);
const deps = packages.items(.dependencies)[workspace_package_id];
const resolution_ids = packages.items(.resolutions)[workspace_package_id];
const workspace_deps: []const Dependency = deps.get(lockfile.buffers.dependencies.items);
const workspace_resolution_ids = resolution_ids.get(lockfile.buffers.resolutions.items);
for (root.expr.data.e_object.properties.slice()) |*dep| {
const key = dep.key orelse continue;
if (key.data != .e_string) continue;
const value = dep.value orelse continue;
if (value.data != .e_string) continue;
const key_str = key.asString(allocator) orelse bun.outOfMemory();
updated: {
// fetchSwapRemove because we want to update the first dependency with a matching
// name, or none at all
if (manager.updating_packages.fetchSwapRemove(key_str)) |entry| {
const is_alias = entry.value.is_alias;
const dep_name = entry.key;
for (workspace_deps, workspace_resolution_ids) |workspace_dep, package_id| {
if (package_id == invalid_package_id) continue;
const resolution = resolutions[package_id];
if (resolution.tag != .npm) continue;
const workspace_dep_name = workspace_dep.name.slice(string_buf);
if (!strings.eqlLong(workspace_dep_name, dep_name, true)) continue;
if (workspace_dep.version.npm()) |npm_version| {
// It's possible we inserted a dependency that won't update (version is an exact version).
// If we find one, skip to keep the original version literal.
if (!manager.options.do.update_to_latest and npm_version.version.isExact()) break :updated;
}
const new_version = new_version: {
const version_fmt = resolution.value.npm.version.fmt(string_buf);
if (options.exact_versions) {
break :new_version try std.fmt.allocPrint(allocator, "{}", .{version_fmt});
}
const version_literal = version_literal: {
if (!is_alias) break :version_literal entry.value.original_version_literal;
if (strings.lastIndexOfChar(entry.value.original_version_literal, '@')) |at_index| {
break :version_literal entry.value.original_version_literal[at_index + 1 ..];
}
break :version_literal entry.value.original_version_literal;
};
const pinned_version = Semver.Version.whichVersionIsPinned(version_literal);
break :new_version try switch (pinned_version) {
.patch => std.fmt.allocPrint(allocator, "{}", .{version_fmt}),
.minor => std.fmt.allocPrint(allocator, "~{}", .{version_fmt}),
.major => std.fmt.allocPrint(allocator, "^{}", .{version_fmt}),
};
};
if (is_alias) {
const dep_literal = workspace_dep.version.literal.slice(string_buf);
// negative because the real package might have a scope
// e.g. "dep": "npm:@foo/bar@1.2.3"
if (strings.lastIndexOfChar(dep_literal, '@')) |at_index| {
dep.value = Expr.allocate(allocator, E.String, .{
.data = try std.fmt.allocPrint(allocator, "{s}@{s}", .{
dep_literal[0..at_index],
new_version,
}),
}, logger.Loc.Empty);
break :updated;
}
// fallthrough and replace entire version.
}
dep.value = Expr.allocate(allocator, E.String, .{
.data = new_version,
}, logger.Loc.Empty);
break :updated;
}
}
}
}
}
}
}
}
}
/// edits dependencies and trusted dependencies
/// if options.add_trusted_dependencies is true, gets list from PackageManager.trusted_deps_to_add_to_package_json
pub fn edit(
manager: *PackageManager,
updates: *[]UpdateRequest,
current_package_json: *Expr,
dependency_list: string,
options: EditOptions,
) !void {
// using data store is going to result in undefined memory issues as
// the store is cleared in some workspace situations. the solution
// is to always avoid the store
Expr.Disabler.disable();
defer Expr.Disabler.enable();
const allocator = manager.allocator;
var remaining = updates.len;
var replacing: usize = 0;
const only_add_missing = manager.options.enable.only_missing;
// There are three possible scenarios here
// 1. There is no "dependencies" (or equivalent list) or it is empty
// 2. There is a "dependencies" (or equivalent list), but the package name already exists in a separate list
// 3. There is a "dependencies" (or equivalent list), and the package name exists in multiple lists
// Try to use the existing spot in the dependencies list if possible
{
var original_trusted_dependencies = brk: {
if (!options.add_trusted_dependencies) break :brk E.Array{};
if (current_package_json.asProperty(trusted_dependencies_string)) |query| {
if (query.expr.data == .e_array) {
// not modifying
break :brk query.expr.data.e_array.*;
}
}
break :brk E.Array{};
};
if (options.add_trusted_dependencies) {
for (manager.trusted_deps_to_add_to_package_json.items, 0..) |trusted_package_name, i| {
for (original_trusted_dependencies.items.slice()) |item| {
if (item.data == .e_string) {
if (item.data.e_string.eql(string, trusted_package_name)) {
allocator.free(manager.trusted_deps_to_add_to_package_json.swapRemove(i));
break;
}
}
}
}
}
{
var i: usize = 0;
loop: while (i < updates.len) {
var request = &updates.*[i];
inline for ([_]string{ "dependencies", "devDependencies", "optionalDependencies", "peerDependencies" }) |list| {
if (current_package_json.asProperty(list)) |query| {
if (query.expr.data == .e_object) {
const name = if (request.is_aliased)
request.name
else
request.version.literal.slice(request.version_buf);
if (query.expr.asProperty(name)) |value| {
if (value.expr.data == .e_string) {
if (request.package_id != invalid_package_id and strings.eqlLong(list, dependency_list, true)) {
replacing += 1;
} else {
if (manager.subcommand == .update and options.before_install) add_packages_to_update: {
const version_literal = try value.expr.asStringCloned(allocator) orelse break :add_packages_to_update;
var tag = Dependency.Version.Tag.infer(version_literal);
if (tag != .npm and tag != .dist_tag) break :add_packages_to_update;
const entry = manager.updating_packages.getOrPut(allocator, name) catch bun.outOfMemory();
// first come, first serve
if (entry.found_existing) break :add_packages_to_update;
var is_alias = false;
if (strings.hasPrefixComptime(strings.trim(version_literal, &strings.whitespace_chars), "npm:")) {
if (strings.lastIndexOfChar(version_literal, '@')) |at_index| {
tag = Dependency.Version.Tag.infer(version_literal[at_index + 1 ..]);
if (tag != .npm and tag != .dist_tag) break :add_packages_to_update;
is_alias = true;
}
}
entry.value_ptr.* = .{
.original_version_literal = version_literal,
.is_alias = is_alias,
.original_version = null,
};
}
if (!only_add_missing) {
request.e_string = value.expr.data.e_string;
remaining -= 1;
} else {
if (i < updates.*.len - 1) {
updates.*[i] = updates.*[updates.*.len - 1];
}
updates.*.len -= 1;
remaining -= 1;
continue :loop;
}
}
}
break;
} else {
if (request.version.tag == .github or request.version.tag == .git) {
for (query.expr.data.e_object.properties.slice()) |item| {
if (item.value) |v| {
const url = request.version.literal.slice(request.version_buf);
if (v.data == .e_string and v.data.e_string.eql(string, url)) {
request.e_string = v.data.e_string;
remaining -= 1;
break;
}
}
}
}
}
}
}
}
i += 1;
}
}
}
if (remaining != 0) {
var dependencies: []G.Property = &[_]G.Property{};
if (current_package_json.asProperty(dependency_list)) |query| {
if (query.expr.data == .e_object) {
dependencies = query.expr.data.e_object.properties.slice();
}
}
var new_dependencies = try allocator.alloc(G.Property, dependencies.len + remaining - replacing);
bun.copy(G.Property, new_dependencies, dependencies);
@memset(new_dependencies[dependencies.len..], G.Property{});
var trusted_dependencies: []Expr = &[_]Expr{};
if (options.add_trusted_dependencies) {
if (current_package_json.asProperty(trusted_dependencies_string)) |query| {
if (query.expr.data == .e_array) {
trusted_dependencies = query.expr.data.e_array.items.slice();
}
}
}
const trusted_dependencies_to_add = manager.trusted_deps_to_add_to_package_json.items.len;
const new_trusted_deps = brk: {
if (!options.add_trusted_dependencies or trusted_dependencies_to_add == 0) break :brk &[_]Expr{};
var deps = try allocator.alloc(Expr, trusted_dependencies.len + trusted_dependencies_to_add);
@memcpy(deps[0..trusted_dependencies.len], trusted_dependencies);
@memset(deps[trusted_dependencies.len..], Expr.empty);
for (manager.trusted_deps_to_add_to_package_json.items) |package_name| {
if (comptime Environment.allow_assert) {
var has_missing = false;
for (deps) |dep| {
if (dep.data == .e_missing) has_missing = true;
}
bun.assert(has_missing);
}
var i = deps.len;
while (i > 0) {
i -= 1;
if (deps[i].data == .e_missing) {
deps[i] = Expr.allocate(allocator, E.String, .{
.data = package_name,
}, logger.Loc.Empty);
break;
}
}
}
if (comptime Environment.allow_assert) {
for (deps) |dep| bun.assert(dep.data != .e_missing);
}
break :brk deps;
};
outer: for (updates.*) |*request| {
if (request.e_string != null) continue;
defer if (comptime Environment.allow_assert) bun.assert(request.e_string != null);
var k: usize = 0;
while (k < new_dependencies.len) : (k += 1) {
if (new_dependencies[k].key) |key| {
if (!request.is_aliased and request.package_id != invalid_package_id and key.data.e_string.eql(
string,
manager.lockfile.packages.items(.name)[request.package_id].slice(request.version_buf),
)) {
// This actually is a duplicate which we did not
// pick up before dependency resolution.
// For this case, we'll just swap remove it.
if (new_dependencies.len > 1) {
new_dependencies[k] = new_dependencies[new_dependencies.len - 1];
new_dependencies = new_dependencies[0 .. new_dependencies.len - 1];
} else {
new_dependencies = &[_]G.Property{};
}
continue;
}
if (key.data.e_string.eql(
string,
if (request.is_aliased)
request.name
else
request.version.literal.slice(request.version_buf),
)) {
if (request.package_id == invalid_package_id) {
// This actually is a duplicate like "react"
// appearing in both "dependencies" and "optionalDependencies".
// For this case, we'll just swap remove it
if (new_dependencies.len > 1) {
new_dependencies[k] = new_dependencies[new_dependencies.len - 1];
new_dependencies = new_dependencies[0 .. new_dependencies.len - 1];
} else {
new_dependencies = &[_]G.Property{};
}
continue;
}
new_dependencies[k].key = null;
}
}
if (new_dependencies[k].key == null) {
new_dependencies[k].key = JSAst.Expr.allocate(
allocator,
JSAst.E.String,
.{ .data = try allocator.dupe(u8, request.getResolvedName(manager.lockfile)) },
logger.Loc.Empty,
);
new_dependencies[k].value = JSAst.Expr.allocate(allocator, JSAst.E.String, .{
// we set it later
.data = "",
}, logger.Loc.Empty);
request.e_string = new_dependencies[k].value.?.data.e_string;
if (request.is_aliased) continue :outer;
}
}
}
var needs_new_dependency_list = true;
const dependencies_object: JSAst.Expr = brk: {
if (current_package_json.asProperty(dependency_list)) |query| {
if (query.expr.data == .e_object) {
needs_new_dependency_list = false;
break :brk query.expr;
}
}
break :brk JSAst.Expr.allocate(allocator, JSAst.E.Object, .{
.properties = JSAst.G.Property.List.init(new_dependencies),
}, logger.Loc.Empty);
};
dependencies_object.data.e_object.properties = JSAst.G.Property.List.init(new_dependencies);
if (new_dependencies.len > 1)
dependencies_object.data.e_object.alphabetizeProperties();
var needs_new_trusted_dependencies_list = true;
const trusted_dependencies_array: Expr = brk: {
if (!options.add_trusted_dependencies or trusted_dependencies_to_add == 0) {
needs_new_trusted_dependencies_list = false;
break :brk Expr.empty;
}
if (current_package_json.asProperty(trusted_dependencies_string)) |query| {
if (query.expr.data == .e_array) {
needs_new_trusted_dependencies_list = false;
break :brk query.expr;
}
}
break :brk Expr.allocate(allocator, E.Array, .{
.items = JSAst.ExprNodeList.init(new_trusted_deps),
}, logger.Loc.Empty);
};
if (options.add_trusted_dependencies and trusted_dependencies_to_add > 0) {
trusted_dependencies_array.data.e_array.items = JSAst.ExprNodeList.init(new_trusted_deps);
if (new_trusted_deps.len > 1) {
trusted_dependencies_array.data.e_array.alphabetizeStrings();
}
}
if (current_package_json.data != .e_object or current_package_json.data.e_object.properties.len == 0) {
var root_properties = try allocator.alloc(JSAst.G.Property, if (options.add_trusted_dependencies) 2 else 1);
root_properties[0] = JSAst.G.Property{
.key = JSAst.Expr.allocate(allocator, JSAst.E.String, .{
.data = dependency_list,
}, logger.Loc.Empty),
.value = dependencies_object,
};
if (options.add_trusted_dependencies) {
root_properties[1] = JSAst.G.Property{
.key = Expr.allocate(allocator, E.String, .{
.data = trusted_dependencies_string,
}, logger.Loc.Empty),
.value = trusted_dependencies_array,
};
}
current_package_json.* = JSAst.Expr.allocate(allocator, JSAst.E.Object, .{
.properties = JSAst.G.Property.List.init(root_properties),
}, logger.Loc.Empty);
} else {
if (needs_new_dependency_list and needs_new_trusted_dependencies_list) {
var root_properties = try allocator.alloc(G.Property, current_package_json.data.e_object.properties.len + 2);
@memcpy(root_properties[0..current_package_json.data.e_object.properties.len], current_package_json.data.e_object.properties.slice());
root_properties[root_properties.len - 2] = .{
.key = Expr.allocate(allocator, E.String, E.String{
.data = dependency_list,
}, logger.Loc.Empty),
.value = dependencies_object,
};
root_properties[root_properties.len - 1] = .{
.key = Expr.allocate(allocator, E.String, .{
.data = trusted_dependencies_string,
}, logger.Loc.Empty),
.value = trusted_dependencies_array,
};
current_package_json.* = Expr.allocate(allocator, E.Object, .{
.properties = G.Property.List.init(root_properties),
}, logger.Loc.Empty);
} else if (needs_new_dependency_list or needs_new_trusted_dependencies_list) {
var root_properties = try allocator.alloc(JSAst.G.Property, current_package_json.data.e_object.properties.len + 1);
@memcpy(root_properties[0..current_package_json.data.e_object.properties.len], current_package_json.data.e_object.properties.slice());
root_properties[root_properties.len - 1] = .{
.key = JSAst.Expr.allocate(allocator, JSAst.E.String, .{
.data = if (needs_new_dependency_list) dependency_list else trusted_dependencies_string,
}, logger.Loc.Empty),
.value = if (needs_new_dependency_list) dependencies_object else trusted_dependencies_array,
};
current_package_json.* = JSAst.Expr.allocate(allocator, JSAst.E.Object, .{
.properties = JSAst.G.Property.List.init(root_properties),
}, logger.Loc.Empty);
}
}
}
const resolutions = if (!options.before_install) manager.lockfile.packages.items(.resolution) else &.{};
for (updates.*) |*request| {
if (request.e_string) |e_string| {
if (request.package_id >= resolutions.len or resolutions[request.package_id].tag == .uninitialized) {
e_string.data = uninitialized: {
if (manager.subcommand == .update and manager.options.do.update_to_latest) {
break :uninitialized try allocator.dupe(u8, "latest");
}
if (manager.subcommand != .update or !options.before_install or e_string.isBlank() or request.version.tag == .npm) {
break :uninitialized switch (request.version.tag) {
.uninitialized => try allocator.dupe(u8, "latest"),
else => try allocator.dupe(u8, request.version.literal.slice(request.version_buf)),
};
} else {
break :uninitialized e_string.data;
}
};
continue;
}
e_string.data = switch (resolutions[request.package_id].tag) {
.npm => npm: {
if (manager.subcommand == .update and (request.version.tag == .dist_tag or request.version.tag == .npm)) {
if (manager.updating_packages.fetchSwapRemove(request.name)) |entry| {
var alias_at_index: ?usize = null;
const new_version = new_version: {
const version_fmt = resolutions[request.package_id].value.npm.version.fmt(manager.lockfile.buffers.string_bytes.items);
if (options.exact_versions) {
break :new_version try std.fmt.allocPrint(allocator, "{}", .{version_fmt});
}
const version_literal = version_literal: {
if (!entry.value.is_alias) break :version_literal entry.value.original_version_literal;
if (strings.lastIndexOfChar(entry.value.original_version_literal, '@')) |at_index| {
alias_at_index = at_index;
break :version_literal entry.value.original_version_literal[at_index + 1 ..];
}
break :version_literal entry.value.original_version_literal;
};
const pinned_version = Semver.Version.whichVersionIsPinned(version_literal);
break :new_version try switch (pinned_version) {
.patch => std.fmt.allocPrint(allocator, "{}", .{version_fmt}),
.minor => std.fmt.allocPrint(allocator, "~{}", .{version_fmt}),
.major => std.fmt.allocPrint(allocator, "^{}", .{version_fmt}),
};
};
if (entry.value.is_alias) {
const dep_literal = entry.value.original_version_literal;
if (strings.lastIndexOfChar(dep_literal, '@')) |at_index| {
break :npm try std.fmt.allocPrint(allocator, "{s}@{s}", .{
dep_literal[0..at_index],
new_version,
});
}
}
break :npm new_version;
}
}
if (request.version.tag == .dist_tag or
(manager.subcommand == .update and request.version.tag == .npm and !request.version.value.npm.version.isExact()))
{
const new_version = try switch (options.exact_versions) {
inline else => |exact_versions| std.fmt.allocPrint(
allocator,
if (comptime exact_versions) "{}" else "^{}",
.{
resolutions[request.package_id].value.npm.version.fmt(request.version_buf),
},
),
};
if (request.version.tag == .npm and request.version.value.npm.is_alias) {
const dep_literal = request.version.literal.slice(request.version_buf);
if (strings.indexOfChar(dep_literal, '@')) |at_index| {
break :npm try std.fmt.allocPrint(allocator, "{s}@{s}", .{
dep_literal[0..at_index],
new_version,
});
}
}
break :npm new_version;
}
break :npm try allocator.dupe(u8, request.version.literal.slice(request.version_buf));
},
.workspace => try allocator.dupe(u8, "workspace:*"),
else => try allocator.dupe(u8, request.version.literal.slice(request.version_buf)),
};
}
}
}
const trusted_dependencies_string = "trustedDependencies";
const std = @import("std");
const bun = @import("bun");
const JSAst = bun.JSAst;
const Expr = JSAst.Expr;
const G = JSAst.G;
const E = JSAst.E;
const PackageManager = bun.install.PackageManager;
const string = []const u8;
const UpdateRequest = bun.install.PackageManager.UpdateRequest;
const Environment = bun.Environment;
const Semver = bun.Semver;
const Dependency = bun.install.Dependency;
const invalid_package_id = bun.install.invalid_package_id;
const logger = bun.logger;
const strings = bun.strings;

View File

@@ -0,0 +1,657 @@
const Options = @This();
log_level: LogLevel = .default,
global: bool = false,
global_bin_dir: std.fs.Dir = bun.FD.invalid.stdDir(),
explicit_global_directory: string = "",
/// destination directory to link bins into
// must be a variable due to global installs and bunx
bin_path: stringZ = bun.pathLiteral("node_modules/.bin"),
did_override_default_scope: bool = false,
scope: Npm.Registry.Scope = undefined,
registries: Npm.Registry.Map = .{},
cache_directory: string = "",
enable: Enable = .{},
do: Do = .{},
positionals: []const string = &[_]string{},
update: Update = .{},
dry_run: bool = false,
remote_package_features: Features = .{
.optional_dependencies = true,
},
local_package_features: Features = .{
.optional_dependencies = true,
.dev_dependencies = true,
.workspaces = true,
},
patch_features: union(enum) {
nothing: struct {},
patch: struct {},
commit: struct {
patches_dir: string,
},
} = .{ .nothing = .{} },
filter_patterns: []const string = &.{},
pack_destination: string = "",
pack_filename: string = "",
pack_gzip_level: ?string = null,
json_output: bool = false,
max_retry_count: u16 = 5,
min_simultaneous_requests: usize = 4,
max_concurrent_lifecycle_scripts: usize,
publish_config: PublishConfig = .{},
ca: []const string = &.{},
ca_file_name: string = &.{},
// if set to `false` in bunfig, save a binary lockfile
save_text_lockfile: ?bool = null,
lockfile_only: bool = false,
pub const PublishConfig = struct {
access: ?Access = null,
tag: string = "",
otp: string = "",
auth_type: ?AuthType = null,
};
pub const Access = enum {
public,
restricted,
const map = bun.ComptimeEnumMap(Access);
pub fn fromStr(str: string) ?Access {
return map.get(str);
}
};
pub const AuthType = enum {
legacy,
web,
const map = bun.ComptimeEnumMap(AuthType);
pub fn fromStr(str: string) ?AuthType {
return map.get(str);
}
};
pub fn shouldPrintCommandName(this: *const Options) bool {
return this.log_level != .silent and this.do.summary;
}
pub const LogLevel = enum {
default,
verbose,
silent,
default_no_progress,
verbose_no_progress,
pub inline fn isVerbose(this: LogLevel) bool {
return switch (this) {
.verbose_no_progress, .verbose => true,
else => false,
};
}
pub inline fn showProgress(this: LogLevel) bool {
return switch (this) {
.default, .verbose => true,
else => false,
};
}
};
pub const Update = struct {
development: bool = false,
optional: bool = false,
peer: bool = false,
};
pub fn openGlobalDir(explicit_global_dir: string) !std.fs.Dir {
if (bun.getenvZ("BUN_INSTALL_GLOBAL_DIR")) |home_dir| {
return try std.fs.cwd().makeOpenPath(home_dir, .{});
}
if (explicit_global_dir.len > 0) {
return try std.fs.cwd().makeOpenPath(explicit_global_dir, .{});
}
if (bun.getenvZ("BUN_INSTALL")) |home_dir| {
var buf: bun.PathBuffer = undefined;
var parts = [_]string{ "install", "global" };
const path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto);
return try std.fs.cwd().makeOpenPath(path, .{});
}
if (!Environment.isWindows) {
if (bun.getenvZ("XDG_CACHE_HOME") orelse bun.getenvZ("HOME")) |home_dir| {
var buf: bun.PathBuffer = undefined;
var parts = [_]string{ ".bun", "install", "global" };
const path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto);
return try std.fs.cwd().makeOpenPath(path, .{});
}
} else {
if (bun.getenvZ("USERPROFILE")) |home_dir| {
var buf: bun.PathBuffer = undefined;
var parts = [_]string{ ".bun", "install", "global" };
const path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto);
return try std.fs.cwd().makeOpenPath(path, .{});
}
}
return error.@"No global directory found";
}
pub fn openGlobalBinDir(opts_: ?*const Api.BunInstall) !std.fs.Dir {
if (bun.getenvZ("BUN_INSTALL_BIN")) |home_dir| {
return try std.fs.cwd().makeOpenPath(home_dir, .{});
}
if (opts_) |opts| {
if (opts.global_bin_dir) |home_dir| {
if (home_dir.len > 0) {
return try std.fs.cwd().makeOpenPath(home_dir, .{});
}
}
}
if (bun.getenvZ("BUN_INSTALL")) |home_dir| {
var buf: bun.PathBuffer = undefined;
var parts = [_]string{
"bin",
};
const path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto);
return try std.fs.cwd().makeOpenPath(path, .{});
}
if (bun.getenvZ("XDG_CACHE_HOME") orelse bun.getenvZ(bun.DotEnv.home_env)) |home_dir| {
var buf: bun.PathBuffer = undefined;
var parts = [_]string{
".bun",
"bin",
};
const path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto);
return try std.fs.cwd().makeOpenPath(path, .{});
}
return error.@"Missing global bin directory: try setting $BUN_INSTALL";
}
pub fn load(
this: *Options,
allocator: std.mem.Allocator,
log: *logger.Log,
env: *DotEnv.Loader,
maybe_cli: ?CommandLineArguments,
bun_install_: ?*Api.BunInstall,
subcommand: Subcommand,
) bun.OOM!void {
var base = Api.NpmRegistry{
.url = "",
.username = "",
.password = "",
.token = "",
};
if (bun_install_) |config| {
if (config.default_registry) |registry| {
base = registry;
}
}
if (base.url.len == 0) base.url = Npm.Registry.default_url;
this.scope = try Npm.Registry.Scope.fromAPI("", base, allocator, env);
defer {
this.did_override_default_scope = this.scope.url_hash != Npm.Registry.default_url_hash;
}
if (bun_install_) |config| {
if (config.cache_directory) |cache_directory| {
this.cache_directory = cache_directory;
}
if (config.scoped) |scoped| {
for (scoped.scopes.keys(), scoped.scopes.values()) |name, *registry_| {
var registry = registry_.*;
if (registry.url.len == 0) registry.url = base.url;
try this.registries.put(allocator, Npm.Registry.Scope.hash(name), try Npm.Registry.Scope.fromAPI(name, registry, allocator, env));
}
}
if (config.ca) |ca| {
switch (ca) {
.list => |ca_list| {
this.ca = ca_list;
},
.str => |ca_str| {
this.ca = &.{ca_str};
},
}
}
if (config.cafile) |cafile| {
this.ca_file_name = cafile;
}
if (config.disable_cache orelse false) {
this.enable.cache = false;
}
if (config.disable_manifest_cache orelse false) {
this.enable.manifest_cache = false;
}
if (config.force orelse false) {
this.enable.manifest_cache_control = false;
this.enable.force_install = true;
}
if (config.save_yarn_lockfile orelse false) {
this.do.save_yarn_lock = true;
}
if (config.save_lockfile) |save_lockfile| {
this.do.save_lockfile = save_lockfile;
this.enable.force_save_lockfile = true;
}
if (config.save_dev) |save| {
this.local_package_features.dev_dependencies = save;
// remote packages should never install dev dependencies
// (TODO: unless git dependency with postinstalls)
}
if (config.save_optional) |save| {
this.remote_package_features.optional_dependencies = save;
this.local_package_features.optional_dependencies = save;
}
if (config.save_peer) |save| {
this.remote_package_features.peer_dependencies = save;
this.local_package_features.peer_dependencies = save;
}
if (config.exact) |exact| {
this.enable.exact_versions = exact;
}
if (config.production) |production| {
if (production) {
this.local_package_features.dev_dependencies = false;
this.enable.fail_early = true;
this.enable.frozen_lockfile = true;
this.enable.force_save_lockfile = false;
}
}
if (config.frozen_lockfile) |frozen_lockfile| {
if (frozen_lockfile) {
this.enable.frozen_lockfile = true;
}
}
if (config.save_text_lockfile) |save_text_lockfile| {
this.save_text_lockfile = save_text_lockfile;
}
if (config.concurrent_scripts) |jobs| {
this.max_concurrent_lifecycle_scripts = jobs;
}
if (config.cache_directory) |cache_dir| {
this.cache_directory = cache_dir;
}
if (config.ignore_scripts) |ignore_scripts| {
if (ignore_scripts) {
this.do.run_scripts = false;
}
}
this.explicit_global_directory = config.global_dir orelse this.explicit_global_directory;
}
const default_disable_progress_bar: bool = brk: {
if (env.get("BUN_INSTALL_PROGRESS")) |prog| {
break :brk strings.eqlComptime(prog, "0");
}
if (env.isCI()) {
break :brk true;
}
break :brk Output.stderr_descriptor_type != .terminal;
};
// technically, npm_config is case in-sensitive
// load_registry:
{
const registry_keys = [_]string{
"BUN_CONFIG_REGISTRY",
"NPM_CONFIG_REGISTRY",
"npm_config_registry",
};
var did_set = false;
inline for (registry_keys) |registry_key| {
if (!did_set) {
if (env.get(registry_key)) |registry_| {
if (registry_.len > 0 and
(strings.startsWith(registry_, "https://") or
strings.startsWith(registry_, "http://")))
{
const prev_scope = this.scope;
var api_registry = std.mem.zeroes(Api.NpmRegistry);
api_registry.url = registry_;
api_registry.token = prev_scope.token;
this.scope = try Npm.Registry.Scope.fromAPI("", api_registry, allocator, env);
did_set = true;
}
}
}
}
}
{
const token_keys = [_]string{
"BUN_CONFIG_TOKEN",
"NPM_CONFIG_TOKEN",
"npm_config_token",
};
var did_set = false;
inline for (token_keys) |registry_key| {
if (!did_set) {
if (env.get(registry_key)) |registry_| {
if (registry_.len > 0) {
this.scope.token = registry_;
did_set = true;
// stage1 bug: break inside inline is broken
// break :load_registry;
}
}
}
}
}
if (env.get("BUN_CONFIG_YARN_LOCKFILE") != null) {
this.do.save_yarn_lock = true;
}
if (env.get("BUN_CONFIG_HTTP_RETRY_COUNT")) |retry_count| {
if (std.fmt.parseInt(u16, retry_count, 10)) |int| this.max_retry_count = int else |_| {}
}
AsyncHTTP.loadEnv(allocator, log, env);
if (env.get("BUN_CONFIG_SKIP_SAVE_LOCKFILE")) |check_bool| {
this.do.save_lockfile = strings.eqlComptime(check_bool, "0");
}
if (env.get("BUN_CONFIG_SKIP_LOAD_LOCKFILE")) |check_bool| {
this.do.load_lockfile = strings.eqlComptime(check_bool, "0");
}
if (env.get("BUN_CONFIG_SKIP_INSTALL_PACKAGES")) |check_bool| {
this.do.install_packages = strings.eqlComptime(check_bool, "0");
}
if (env.get("BUN_CONFIG_NO_VERIFY")) |check_bool| {
this.do.verify_integrity = !strings.eqlComptime(check_bool, "0");
}
// Update should never read from manifest cache
if (subcommand == .update) {
this.enable.manifest_cache = false;
this.enable.manifest_cache_control = false;
}
if (maybe_cli) |cli| {
this.do.analyze = cli.analyze;
this.enable.only_missing = cli.only_missing or cli.analyze;
if (cli.registry.len > 0) {
this.scope.url = URL.parse(cli.registry);
}
if (cli.cache_dir) |cache_dir| {
this.cache_directory = cache_dir;
}
if (cli.exact) {
this.enable.exact_versions = true;
}
if (cli.token.len > 0) {
this.scope.token = cli.token;
}
if (cli.no_save) {
this.do.save_lockfile = false;
this.do.write_package_json = false;
}
if (cli.dry_run) {
this.do.install_packages = false;
this.dry_run = true;
this.do.write_package_json = false;
this.do.save_lockfile = false;
}
if (cli.no_summary or cli.silent) {
this.do.summary = false;
}
this.filter_patterns = cli.filters;
this.pack_destination = cli.pack_destination;
this.pack_filename = cli.pack_filename;
this.pack_gzip_level = cli.pack_gzip_level;
this.json_output = cli.json_output;
if (cli.no_cache) {
this.enable.manifest_cache = false;
this.enable.manifest_cache_control = false;
}
if (cli.omit) |omit| {
if (omit.dev) {
this.local_package_features.dev_dependencies = false;
// remote packages should never install dev dependencies
// (TODO: unless git dependency with postinstalls)
}
if (omit.optional) {
this.local_package_features.optional_dependencies = false;
this.remote_package_features.optional_dependencies = false;
}
if (omit.peer) {
this.local_package_features.peer_dependencies = false;
this.remote_package_features.peer_dependencies = false;
}
}
if (cli.ignore_scripts) {
this.do.run_scripts = false;
}
if (cli.trusted) {
this.do.trust_dependencies_from_args = true;
}
if (cli.save_text_lockfile) |save_text_lockfile| {
this.save_text_lockfile = save_text_lockfile;
}
this.lockfile_only = cli.lockfile_only;
const disable_progress_bar = default_disable_progress_bar or cli.no_progress;
if (cli.verbose) {
this.log_level = if (disable_progress_bar) LogLevel.verbose_no_progress else LogLevel.verbose;
PackageManager.verbose_install = true;
} else if (cli.silent) {
this.log_level = .silent;
PackageManager.verbose_install = false;
} else {
this.log_level = if (disable_progress_bar) LogLevel.default_no_progress else LogLevel.default;
PackageManager.verbose_install = false;
}
if (cli.no_verify) {
this.do.verify_integrity = false;
}
if (cli.yarn) {
this.do.save_yarn_lock = true;
}
if (cli.backend) |backend| {
PackageInstall.supported_method = backend;
}
this.do.update_to_latest = cli.latest;
if (cli.positionals.len > 0) {
this.positionals = cli.positionals;
}
if (cli.production) {
this.local_package_features.dev_dependencies = false;
this.enable.fail_early = true;
this.enable.frozen_lockfile = true;
}
if (cli.frozen_lockfile) {
this.enable.frozen_lockfile = true;
}
if (cli.force) {
this.enable.manifest_cache_control = false;
this.enable.force_install = true;
this.enable.force_save_lockfile = true;
}
if (cli.development) {
this.update.development = cli.development;
} else if (cli.optional) {
this.update.optional = cli.optional;
} else if (cli.peer) {
this.update.peer = cli.peer;
}
switch (cli.patch) {
.nothing => {},
.patch => {
this.patch_features = .{ .patch = .{} };
},
.commit => {
this.patch_features = .{
.commit = .{
.patches_dir = cli.patch.commit.patches_dir,
},
};
},
}
if (cli.publish_config.access) |cli_access| {
this.publish_config.access = cli_access;
}
if (cli.publish_config.tag.len > 0) {
this.publish_config.tag = cli.publish_config.tag;
}
if (cli.publish_config.otp.len > 0) {
this.publish_config.otp = cli.publish_config.otp;
}
if (cli.publish_config.auth_type) |auth_type| {
this.publish_config.auth_type = auth_type;
}
if (cli.ca.len > 0) {
this.ca = cli.ca;
}
if (cli.ca_file_name.len > 0) {
this.ca_file_name = cli.ca_file_name;
}
} else {
this.log_level = if (default_disable_progress_bar) LogLevel.default_no_progress else LogLevel.default;
PackageManager.verbose_install = false;
}
// If the lockfile is frozen, don't save it to disk.
if (this.enable.frozen_lockfile) {
this.do.save_lockfile = false;
this.enable.force_save_lockfile = false;
}
}
pub const Do = packed struct(u16) {
save_lockfile: bool = true,
load_lockfile: bool = true,
install_packages: bool = true,
write_package_json: bool = true,
run_scripts: bool = true,
save_yarn_lock: bool = false,
print_meta_hash_string: bool = false,
verify_integrity: bool = true,
summary: bool = true,
trust_dependencies_from_args: bool = false,
update_to_latest: bool = false,
analyze: bool = false,
_: u4 = 0,
};
pub const Enable = packed struct(u16) {
manifest_cache: bool = true,
manifest_cache_control: bool = true,
cache: bool = true,
fail_early: bool = false,
frozen_lockfile: bool = false,
// Don't save the lockfile unless there were actual changes
// unless...
force_save_lockfile: bool = false,
force_install: bool = false,
exact_versions: bool = false,
only_missing: bool = false,
_: u7 = 0,
};
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Environment = bun.Environment;
const strings = bun.strings;
const stringZ = bun.stringZ;
const std = @import("std");
const logger = bun.logger;
const OOM = bun.OOM;
const FD = bun.FD;
const Api = bun.Schema.Api;
const Path = bun.path;
const DotEnv = bun.DotEnv;
const URL = bun.URL;
const HTTP = bun.http;
const AsyncHTTP = HTTP.AsyncHTTP;
const Npm = bun.install.Npm;
const patch = bun.install.patch;
const Features = bun.install.Features;
const CommandLineArguments = @import("./CommandLineArguments.zig");
const Subcommand = bun.install.PackageManager.Subcommand;
const PackageManager = bun.install.PackageManager;
const PackageInstall = bun.install.PackageInstall;

File diff suppressed because it is too large Load Diff

View File

@@ -24,7 +24,6 @@
// after http.globalAgent.maxSockets number of files.
// See https://groups.google.com/forum/#!topic/nodejs-dev/V5fB69hFa9o
const common = require('../common');
if (common.isWindows) return; // TODO: BUN
const fixtures = require('../common/fixtures');
const assert = require('assert');
const http = require('http');

View File

@@ -3,7 +3,6 @@
const common = require('../common');
if (!common.hasCrypto)
common.skip('missing crypto');
if (common.isWindows) return; // TODO: BUN
const fixtures = require('../common/fixtures');
const assert = require('assert');
const http2 = require('http2');

View File

@@ -3,7 +3,6 @@
const common = require('../common');
if (!common.hasCrypto)
common.skip('missing crypto');
if (common.isWindows) return; // TODO: BUN
const fixtures = require('../common/fixtures');
const assert = require('assert');
const http2 = require('http2');

View File

@@ -2,7 +2,6 @@
// Fixes: https://github.com/nodejs/node/issues/42713
const common = require('../common');
if (common.isWindows) return; // TODO: BUN
if (!common.hasCrypto) {
common.skip('missing crypto');
}

View File

@@ -1,6 +1,5 @@
'use strict';
const common = require('../common');
if (common.isWindows) return; // TODO: BUN
if (!common.isWindows)
common.skip('this test is Windows-specific.');