Compare commits

...

3 Commits

Author SHA1 Message Date
autofix-ci[bot]
dc77d9d12e [autofix.ci] apply automated fixes 2025-10-23 20:25:59 +00:00
Marko Vejnovic
a394e208bb test: Add npm_package_arg.zig tests 2025-10-23 13:22:19 -07:00
Marko Vejnovic
eca85aa15d feat(install): Match resolution with npm-package-arg 2025-10-23 13:21:50 -07:00
37 changed files with 8322 additions and 1311 deletions

View File

@@ -624,6 +624,22 @@ extern "C" BunString URL__getHrefJoin(BunString* baseStr, BunString* relativeStr
return Bun::toStringRef(url.string());
}
extern "C" BunString URL__hash(WTF::URL* url)
{
const auto& fragment = url->fragmentIdentifier().isEmpty()
? emptyString()
: url->fragmentIdentifierWithLeadingNumberSign().toString();
return Bun::toStringRef(fragment);
}
extern "C" BunString URL__fragmentIdentifier(WTF::URL* url)
{
const auto& fragment = url->fragmentIdentifier().isEmpty()
? emptyString()
: url->fragmentIdentifier().toString();
return Bun::toStringRef(fragment);
}
extern "C" WTF::URL* URL__fromString(BunString* input)
{
auto&& str = input->toWTFString();

View File

@@ -16,6 +16,20 @@ pub const URL = opaque {
extern fn URL__getFileURLString(*String) String;
extern fn URL__getHrefJoin(*String, *String) String;
extern fn URL__pathFromFileURL(*String) String;
extern fn URL__hash(*URL) String;
extern fn URL__fragmentIdentifier(*URL) String;
/// Includes the leading '#'.
pub fn hash(url: *URL) String {
jsc.markBinding(@src());
return URL__hash(url);
}
/// Exactly the same as hash, excluding the leading '#'.
pub fn fragmentIdentifier(url: *URL) String {
jsc.markBinding(@src());
return URL__fragmentIdentifier(url);
}
pub fn hrefFromString(str: bun.String) String {
jsc.markBinding(@src());

View File

@@ -1210,7 +1210,7 @@ pub const PackCommand = struct {
const package_name_expr: Expr = json.root.get("name") orelse return error.MissingPackageName;
const package_name = try package_name_expr.asStringCloned(ctx.allocator) orelse return error.InvalidPackageName;
if (comptime for_publish) {
const is_scoped = try Dependency.isScopedPackageName(package_name);
const is_scoped = try Dependency.NpaBridge.isScopedPackageName(package_name);
if (manager.options.publish_config.access) |access| {
if (access == .restricted and !is_scoped) {
return error.RestrictedUnscopedPackage;

View File

@@ -167,7 +167,7 @@ pub const PublishCommand = struct {
}
const name = try json.getStringCloned(ctx.allocator, "name") orelse return error.MissingPackageName;
const is_scoped = try Dependency.isScopedPackageName(name);
const is_scoped = try Dependency.NpaBridge.isScopedPackageName(name);
if (manager.options.publish_config.access) |access| {
if (access == .restricted and !is_scoped) {

View File

@@ -299,6 +299,17 @@ pub fn ComptimeStringMapWithKeyType(comptime KeyType: type, comptime V: type, co
return null;
}
/// Lookup the first-defined string key for a given value.
///
/// Linear search.
pub fn getKey(value: V) ?[]const KeyType {
inline for (kvs) |kv| {
if (kv.value == value) return kv.key;
}
return null;
}
};
}

View File

@@ -213,7 +213,7 @@ pub fn editUpdateNoArgs(
if (value.data != .e_string) continue;
const version_literal = try value.asStringCloned(allocator) orelse bun.outOfMemory();
var tag = Dependency.Version.Tag.infer(version_literal);
var tag = Dependency.NpaBridge.inferTag(version_literal);
// only updating dependencies with npm versions, dist-tags if `--latest`, and catalog versions.
if (tag != .npm and (tag != .dist_tag or !manager.options.do.update_to_latest) and tag != .catalog) continue;
@@ -223,7 +223,7 @@ pub fn editUpdateNoArgs(
// negative because the real package might have a scope
// e.g. "dep": "npm:@foo/bar@1.2.3"
if (strings.lastIndexOfChar(version_literal, '@')) |at_index| {
tag = Dependency.Version.Tag.infer(version_literal[at_index + 1 ..]);
tag = Dependency.NpaBridge.inferTag(version_literal[at_index + 1 ..]);
if (tag != .npm and (tag != .dist_tag or !manager.options.do.update_to_latest) and tag != .catalog) continue;
alias_at_index = at_index;
}
@@ -419,7 +419,7 @@ pub fn edit(
} else {
if (manager.subcommand == .update and options.before_install) add_packages_to_update: {
const version_literal = try value.expr.asStringCloned(allocator) orelse break :add_packages_to_update;
var tag = Dependency.Version.Tag.infer(version_literal);
var tag = Dependency.NpaBridge.inferTag(version_literal);
if (tag != .npm and tag != .dist_tag) break :add_packages_to_update;
@@ -431,7 +431,7 @@ pub fn edit(
var is_alias = false;
if (strings.hasPrefixComptime(strings.trim(version_literal, &strings.whitespace_chars), "npm:")) {
if (strings.lastIndexOfChar(version_literal, '@')) |at_index| {
tag = Dependency.Version.Tag.infer(version_literal[at_index + 1 ..]);
tag = Dependency.NpaBridge.inferTag(version_literal[at_index + 1 ..]);
if (tag != .npm and tag != .dist_tag) break :add_packages_to_update;
is_alias = true;
}

View File

@@ -143,7 +143,7 @@ fn parseWithError(
var value = input;
var alias: ?string = null;
if (!Dependency.isTarball(input) and strings.isNPMPackageName(input)) {
if (!Dependency.NpaBridge.isTarballPath(input) and strings.isNPMPackageName(input)) {
alias = input;
value = input[input.len..];
} else if (input.len > 1) {
@@ -157,7 +157,7 @@ fn parseWithError(
}
const placeholder = String.from("@@@");
var version = Dependency.parseWithOptionalTag(
var version = Dependency.NpaBridge.parseWithOptionalTag(
allocator,
if (alias) |name| String.init(input, name) else placeholder,
if (alias) |name| String.Builder.stringHash(name) else null,
@@ -180,7 +180,7 @@ fn parseWithError(
return error.UnrecognizedDependencyFormat;
};
if (alias != null and version.tag == .git) {
if (Dependency.parseWithOptionalTag(
if (Dependency.NpaBridge.parseWithOptionalTag(
allocator,
placeholder,
null,

View File

@@ -893,7 +893,7 @@ pub const Bin = extern struct {
// for normalizing `target`
const abs_target = path.joinAbsStringZ(package_dir, &.{target}, .auto);
const unscoped_package_name = Dependency.unscopedPackageName(this.package_name.slice());
const unscoped_package_name = Dependency.NpaBridge.unscopedPackageName(this.package_name.slice());
@memcpy(abs_dest_buf_remain[0..unscoped_package_name.len], unscoped_package_name);
abs_dest_buf_remain = abs_dest_buf_remain[unscoped_package_name.len..];
abs_dest_buf_remain[0] = 0;
@@ -995,7 +995,7 @@ pub const Bin = extern struct {
switch (this.bin.tag) {
.none => {},
.file => {
const unscoped_package_name = Dependency.unscopedPackageName(this.package_name.slice());
const unscoped_package_name = Dependency.NpaBridge.unscopedPackageName(this.package_name.slice());
@memcpy(abs_dest_buf_remain[0..unscoped_package_name.len], unscoped_package_name);
abs_dest_buf_remain = abs_dest_buf_remain[unscoped_package_name.len..];
abs_dest_buf_remain[0] = 0;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1000,7 +1000,7 @@ pub fn Package(comptime SemverIntType: type) type {
) !?Dependency {
const external_version = brk: {
if (comptime Environment.isWindows) {
switch (tag orelse Dependency.Version.Tag.infer(version)) {
switch (tag orelse Dependency.NpaBridge.inferTag(version)) {
.workspace, .folder, .symlink, .tarball => {
if (String.canInline(version)) {
var copy = string_builder.append(String, version);
@@ -1023,7 +1023,7 @@ pub fn Package(comptime SemverIntType: type) type {
const buf = lockfile.buffers.string_bytes.items;
const sliced = external_version.sliced(buf);
var dependency_version = Dependency.parseWithOptionalTag(
var dependency_version = Dependency.NpaBridge.parseWithOptionalTag(
allocator,
external_alias.value,
external_alias.hash,
@@ -1092,7 +1092,7 @@ pub fn Package(comptime SemverIntType: type) type {
if (workspace_version != null) {
if (pm.options.link_workspace_packages and npm.version.satisfies(workspace_version.?, buf, buf)) {
const path = workspace_path.?.sliced(buf);
if (Dependency.parseWithTag(
if (Dependency.NpaBridge.parseWithKnownTag(
allocator,
external_alias.value,
external_alias.hash,
@@ -1511,7 +1511,7 @@ pub fn Package(comptime SemverIntType: type) type {
string_builder.count(value);
// If it's a folder or workspace, pessimistically assume we will need a maximum path
switch (Dependency.Version.Tag.infer(value)) {
switch (Dependency.NpaBridge.inferTag(value)) {
.folder, .workspace => string_builder.cap += bun.MAX_PATH_BYTES,
else => {},
}

View File

@@ -838,11 +838,11 @@ pub fn migrateNPMLockfile(
const dep_resolved: string = dep_resolved: {
if (dep_pkg.get("resolved")) |resolved| {
const dep_resolved = resolved.asString(this.allocator) orelse return error.InvalidNPMLockfile;
switch (Dependency.Version.Tag.infer(dep_resolved)) {
switch (Dependency.NpaBridge.inferTag(dep_resolved)) {
.git, .github => |tag| {
const dep_resolved_str = try string_buf.append(dep_resolved);
const dep_resolved_sliced = dep_resolved_str.sliced(string_buf.bytes.items);
res_version = Dependency.parseWithTag(
res_version = Dependency.NpaBridge.parseWithKnownTag(
this.allocator,
dep_name,
name_hash,

File diff suppressed because it is too large Load Diff

View File

@@ -299,7 +299,7 @@ pub const Repository = extern struct {
try writer.print("{}", .{this.repo.owner.fmtStorePath(this.string_buf)});
// try writer.writeByte(if (this.opts.replace_slashes) '+' else '/');
try writer.writeByte('+');
} else if (Dependency.isSCPLikePath(this.repo.repo.slice(this.string_buf))) {
} else if (Dependency.NpaBridge.isGitSCPPath(this.repo.repo.slice(this.string_buf))) {
// try writer.print("ssh:{s}", .{if (this.opts.replace_slashes) "++" else "//"});
try writer.writeAll("ssh++");
}
@@ -340,7 +340,7 @@ pub const Repository = extern struct {
if (!formatter.repository.owner.isEmpty()) {
try writer.writeAll(formatter.repository.owner.slice(formatter.buf));
try writer.writeAll("/");
} else if (Dependency.isSCPLikePath(repo)) {
} else if (Dependency.NpaBridge.isGitSCPPath(repo)) {
try writer.writeAll("ssh://");
}
try writer.writeAll(repo);
@@ -410,7 +410,7 @@ pub const Repository = extern struct {
return url;
}
if (Dependency.isSCPLikePath(url)) {
if (Dependency.NpaBridge.isGitSCPPath(url)) {
ssh_path_buf[0.."ssh://git@".len].* = "ssh://git@".*;
var rest = ssh_path_buf["ssh://git@".len..];
@@ -449,7 +449,7 @@ pub const Repository = extern struct {
return out;
}
if (Dependency.isSCPLikePath(url)) {
if (Dependency.NpaBridge.isGitSCPPath(url)) {
final_path_buf[0.."https://".len].* = "https://".*;
var rest = final_path_buf["https://".len..];

View File

@@ -47,11 +47,11 @@ pub fn ResolutionType(comptime SemverIntType: type) type {
return This.init(.{ .folder = try string_buf.append(folder) });
}
return switch (Dependency.Version.Tag.infer(res_str)) {
return switch (Dependency.NpaBridge.inferTag(res_str)) {
.git => This.init(.{ .git = try Repository.parseAppendGit(res_str, string_buf) }),
.github => This.init(.{ .github = try Repository.parseAppendGithub(res_str, string_buf) }),
.tarball => {
if (Dependency.isRemoteTarball(res_str)) {
if (Dependency.NpaBridge.isRemoteUrl(res_str)) {
return This.init(.{ .remote_tarball = try string_buf.append(res_str) });
}
@@ -137,11 +137,11 @@ pub fn ResolutionType(comptime SemverIntType: type) type {
return This.init(.{ .folder = try string_buf.append(path) });
}
return switch (Dependency.Version.Tag.infer(res_str)) {
return switch (Dependency.NpaBridge.inferTag(res_str)) {
.git => This.init(.{ .git = try Repository.parseAppendGit(res_str, string_buf) }),
.github => This.init(.{ .github = try Repository.parseAppendGithub(res_str, string_buf) }),
.tarball => {
if (Dependency.isRemoteTarball(res_str)) {
if (Dependency.NpaBridge.isRemoteUrl(res_str)) {
return This.init(.{ .remote_tarball = try string_buf.append(res_str) });
}
return This.init(.{ .local_tarball = try string_buf.append(res_str) });

View File

@@ -0,0 +1,237 @@
//! This module provides the functionality offered by
//! https://github.com/npm/validate-npm-package-name.
//!
//! It deviates from the original implementation in its string error messages. The difference is
//! that the original implementation is able to preserve the original name of the package in its
//! error messages while ours is not. This is a conscious decision to avoid allocations and it is
//! documented in the JS tests.
extern fn ModuleLoader__isBuiltin(data: [*]const u8, len: usize) bool;
/// The maximum length of a package name, according the upstream npm implementation.
pub const max_package_name_length = 214;
pub const Error = enum {
empty_name,
name_starts_with_dot,
name_starts_with_underscore,
name_surrounded_with_spaces,
illegal_name,
name_contains_non_url_safe_characters,
pub fn message(self: Error) []const u8 {
// TODO(markovejnovic): These errors are not as rich as npm's. Namely,
// npm-validate-package-name will preserve the original name and show it in the error --
// "<name> is not allowed", etc.
return switch (self) {
.empty_name => "name length must be greater than zero",
.name_starts_with_dot => "name cannot start with a period",
.name_starts_with_underscore => "name cannot start with an underscore",
.name_surrounded_with_spaces => "name cannot contain leading or trailing spaces",
.illegal_name => "name is not allowed",
.name_contains_non_url_safe_characters => "name can only contain URL-friendly characters",
};
}
};
pub const Warning = enum {
core_module_name_conflict,
name_too_long,
name_contains_uppercase_letters,
name_contains_special_characters,
pub fn message(self: Warning) []const u8 {
return switch (self) {
.core_module_name_conflict => "name conflicts a core module name",
.name_too_long => std.fmt.comptimePrint(
"name can no longer contain more than {} characters",
.{max_package_name_length},
),
.name_contains_uppercase_letters => "name can no longer contain capital letters",
.name_contains_special_characters => "name can no longer contain special characters (\"~'!()*\")",
};
}
};
pub const ValidationResult = struct {
errors: std.EnumSet(Error),
warnings: std.EnumSet(Warning),
pub fn validForNewPackages(self: *const ValidationResult) bool {
return self.errors.count() == 0 and self.warnings.count() == 0;
}
pub fn validForOldPackages(self: *const ValidationResult) bool {
return self.errors.count() == 0;
}
};
const exclusion_list = [_][]const u8{
"node_modules",
"favicon.ico",
};
pub fn validate(name: []const u8) ValidationResult {
var result: ValidationResult = .{
.errors = std.EnumSet(Error).initEmpty(),
.warnings = std.EnumSet(Warning).initEmpty(),
};
if (name.len == 0) {
result.errors.insert(Error.empty_name);
} else {
switch (name[0]) {
'.' => {
result.errors.insert(Error.name_starts_with_dot);
},
'_' => {
result.errors.insert(Error.name_starts_with_underscore);
},
else => {},
}
}
if (!bun.strings.eql(bun.strings.trimSpaces(name), name)) {
result.errors.insert(Error.name_surrounded_with_spaces);
result.errors.insert(Error.name_contains_non_url_safe_characters);
}
for (exclusion_list) |excluded| {
if (bun.strings.eqlCaseInsensitiveASCII(name, excluded, true)) {
result.errors.insert(Error.illegal_name);
break;
}
}
if (ModuleLoader__isBuiltin(name.ptr, name.len)) {
result.warnings.insert(Warning.core_module_name_conflict);
}
if (name.len > max_package_name_length) {
result.warnings.insert(Warning.name_too_long);
}
if (bun.strings.containsUppercaseAscii(name)) {
result.warnings.insert(Warning.name_contains_uppercase_letters);
}
const last_segment = if (bun.strings.lastIndexOfChar(name, '/')) |idx|
name[idx + 1 ..]
else
name;
if (bun.strings.indexOfAny(last_segment, "~'!()*") != null) {
result.warnings.insert(Warning.name_contains_special_characters);
}
if (ScopedPackage.init(name)) |scoped| {
if (scoped.pkg.len > 0 and scoped.pkg[0] == '.') {
result.errors.insert(Error.name_starts_with_dot);
}
if (bun.strings.indexOfNeedsURLEncode(scoped.user) != null or
bun.strings.indexOfNeedsURLEncode(scoped.pkg) != null or
bun.strings.indexOfChar(scoped.user, ' ') != null or
bun.strings.indexOfChar(scoped.pkg, ' ') != null or
bun.strings.indexOfChar(scoped.user, ':') != null or
bun.strings.indexOfChar(scoped.pkg, ':') != null)
{
result.errors.insert(Error.name_contains_non_url_safe_characters);
}
} else {
if (bun.strings.indexOfChar(name, '/') != null) {
result.errors.insert(Error.name_contains_non_url_safe_characters);
}
if (bun.strings.indexOfNeedsURLEncode(name) != null or
bun.strings.indexOfChar(name, ' ') != null or
bun.strings.indexOfChar(name, ':') != null)
{
result.errors.insert(Error.name_contains_non_url_safe_characters);
}
}
return result;
}
const ScopedPackage = struct {
user: []const u8,
pkg: []const u8,
fn init(name: []const u8) ?ScopedPackage {
if (name.len < 4 or name[0] != '@') return null;
const slash_idx = bun.strings.indexOfChar(name, '/') orelse return null;
if (slash_idx == 1 or slash_idx >= name.len - 1) return null;
// Ensure there's only one slash
if (bun.strings.indexOfCharPos(name, '/', slash_idx + 1) != null) return null;
return .{
.user = name[1..slash_idx],
.pkg = name[slash_idx + 1 ..],
};
}
};
pub const TestingAPIs = struct {
pub fn jsValidate(go: *jsc.JSGlobalObject, cf: *jsc.CallFrame) bun.JSError!jsc.JSValue {
if (cf.argumentsCount() < 1) {
return go.throw("validate() requires 1 argument", .{});
}
const name_arg = cf.argument(0);
const name_str = try name_arg.toBunString(go);
defer name_str.deref();
const name_utf8 = name_str.toUTF8(bun.default_allocator);
defer name_utf8.deinit();
const result = validate(name_utf8.slice());
return try validationResultToJS(go, result);
}
fn validationResultToJS(
go: *jsc.JSGlobalObject,
result: ValidationResult,
) bun.JSError!jsc.JSValue {
const obj = jsc.JSValue.createEmptyObject(go, 4);
obj.put(
go,
bun.String.static("validForNewPackages"),
jsc.JSValue.jsBoolean(result.validForNewPackages()),
);
obj.put(
go,
bun.String.static("validForOldPackages"),
jsc.JSValue.jsBoolean(result.validForOldPackages()),
);
if (result.errors.count() != 0) {
const errors_array = try jsc.JSValue.createEmptyArray(go, 0);
var it = result.errors.iterator();
while (it.next()) |err| {
try errors_array.push(go, bun.String.fromBytes(err.message()).toJS(go));
}
obj.put(go, bun.String.static("errors"), errors_array);
}
if (result.warnings.count() != 0) {
const warnings_array = try jsc.JSValue.createEmptyArray(go, 0);
var it = result.warnings.iterator();
while (it.next()) |warn| {
try warnings_array.push(go, bun.String.fromBytes(warn.message()).toJS(go));
}
obj.put(go, bun.String.static("warnings"), warnings_array);
}
return obj;
}
};
const std = @import("std");
const bun = @import("bun");
const jsc = bun.jsc;

View File

@@ -138,10 +138,6 @@ pub const YarnLock = struct {
return strings.hasPrefixComptime(version, "npm:");
}
pub fn isRemoteTarball(version: []const u8) bool {
return strings.hasPrefixComptime(version, "https://") and strings.endsWithComptime(version, ".tgz");
}
pub fn isWorkspaceDependency(version: []const u8) bool {
return strings.hasPrefixComptime(version, "workspace:") or
strings.eqlComptime(version, "*");
@@ -385,7 +381,7 @@ pub const YarnLock = struct {
} else if (Entry.isNpmAlias(value)) {
const alias_info = Entry.parseNpmAlias(value);
current_entry.?.version = alias_info.version;
} else if (Entry.isRemoteTarball(value)) {
} else if (Dependency.NpaBridge.isRemoteUrl(value) and strings.endsWithComptime(value, ".tgz")) {
current_entry.?.resolved = value;
}
} else if (strings.eqlComptime(key, "resolved")) {
@@ -861,7 +857,7 @@ pub fn migrateYarnLockfile(
if (entry.commit != null and entry.git_repo_name != null) {
break :blk entry.git_repo_name.?;
} else if (entry.resolved) |resolved| {
if (is_direct_url_dep or YarnLock.Entry.isRemoteTarball(resolved) or strings.endsWithComptime(resolved, ".tgz")) {
if (is_direct_url_dep or Dependency.NpaBridge.isRemoteUrl(resolved) or strings.endsWithComptime(resolved, ".tgz")) {
// https://registry.npmjs.org/package/-/package-version.tgz
if (strings.contains(resolved, "registry.npmjs.org/") or strings.contains(resolved, "registry.yarnpkg.com/")) {
if (strings.indexOf(resolved, "/-/")) |separator_idx| {
@@ -945,7 +941,7 @@ pub fn migrateYarnLockfile(
});
}
if (YarnLock.Entry.isRemoteTarball(resolved)) {
if (Dependency.NpaBridge.isRemoteUrl(resolved)) {
break :blk Resolution.init(.{
.remote_tarball = try string_buf.append(resolved),
});

View File

@@ -120,10 +120,6 @@ export const npm_manifest_test_helpers = $zig("npm.zig", "PackageManifest.bindin
parseManifest: (manifestFileName: string, registryUrl: string) => any;
};
// Like npm-package-arg, sort of https://www.npmjs.com/package/npm-package-arg
export type Dependency = any;
export const npa: (name: string) => Dependency = $newZigFunction("dependency.zig", "fromJS", 1);
export const npmTag: (
name: string,
) => undefined | "npm" | "dist_tag" | "tarball" | "folder" | "symlink" | "workspace" | "git" | "github" =
@@ -210,3 +206,17 @@ export const structuredCloneAdvanced: (
) => any = $newCppFunction("StructuredClone.cpp", "jsFunctionStructuredCloneAdvanced", 5);
export const lsanDoLeakCheck = $newCppFunction("InternalForTesting.cpp", "jsFunction_lsanDoLeakCheck", 1);
export const hostedGitInfo = {
parseUrl: $newZigFunction("hosted_git_info.zig", "TestingAPIs.jsParseUrl", 1),
fromUrl: $newZigFunction("hosted_git_info.zig", "TestingAPIs.jsFromUrl", 1),
};
export const Npa = {
npa: $newZigFunction("npm_package_arg.zig", "TestingAPIs.jsNpa", 3),
resolve: $newZigFunction("npm_package_arg.zig", "TestingAPIs.jsResolve", 4),
};
export const ValidateNpmPackageName = {
validate: $newZigFunction("validate_npm_package_name.zig", "TestingAPIs.jsValidate", 1),
};

View File

@@ -963,6 +963,15 @@ pub fn Path(comptime opts: Options) type {
};
}
/// The given string contains separators that match the platform's path separator style.
pub fn hasPlatformPathSeparators(input_path: []const u8) bool {
if (Environment.isWindows) {
return bun.strings.containsChar(input_path, '\\');
} else {
return bun.strings.containsChar(input_path, '/');
}
}
const std = @import("std");
const bun = @import("bun");

View File

@@ -909,11 +909,11 @@ pub const PackageJSON = struct {
// // if there is a name & version, check if the lockfile has the package
if (package_json.name.len > 0 and package_json.version.len > 0) {
if (r.package_manager) |pm| {
const tag = Dependency.Version.Tag.infer(package_json.version);
const tag = Dependency.NpaBridge.inferTag(package_json.version);
if (tag == .npm) {
const sliced = Semver.SlicedString.init(package_json.version, package_json.version);
if (Dependency.parseWithTag(
if (Dependency.NpaBridge.parseWithKnownTag(
allocator,
String.init(package_json.name, package_json.name),
String.Builder.stringHash(package_json.name),
@@ -2166,4 +2166,3 @@ const api = bun.schema.api;
const Semver = bun.Semver;
const String = Semver.String;
const Version = Semver.Version;

View File

@@ -37,6 +37,15 @@ inline fn nqlAtIndexCaseInsensitive(comptime string_count: comptime_int, index:
return false;
}
/// The given string contains separators that match the platform's path separator style.
pub fn hasPlatformPathSeparators(input_path: []const u8) bool {
if (bun.Environment.isWindows) {
return bun.strings.containsChar(input_path, '\\');
} else {
return bun.strings.containsChar(input_path, '/');
}
}
const IsSeparatorFunc = fn (char: u8) bool;
const IsSeparatorFuncT = fn (comptime T: type, char: anytype) bool;
const LastSeparatorFunction = fn (slice: []const u8) ?usize;

View File

@@ -229,7 +229,17 @@ pub const Group = struct {
pub const FlagsBitSet = bun.bit_set.IntegerBitSet(3);
pub fn isExact(this: *const Group) bool {
return this.head.next == null and this.head.head.next == null and !this.head.head.range.hasRight() and this.head.head.range.left.op == .eql;
const range = this.head.head.range;
return range.left.op == .eql and
range.right.op == .unset and
this.head.head.next == null and
this.head.tail == null and
this.head.next == null and
this.tail == null;
}
pub fn isEmpty(this: *const Group) bool {
return !this.head.head.range.hasLeft() and !this.head.head.range.hasRight();
}
pub fn @"is *"(this: *const Group) bool {

View File

@@ -414,6 +414,12 @@ pub fn indexOfSigned(self: string, str: string) i32 {
return @as(i32, @intCast(i));
}
/// Returns last index of `char` before a character `before`.
pub fn lastIndexBeforeChar(in: []const u8, char: u8, before: u8) ?usize {
const before_pos = indexOfChar(in, before) orelse in.len;
return lastIndexOfChar(in[0..before_pos], char);
}
pub fn lastIndexOfChar(self: []const u8, char: u8) callconv(bun.callconv_inline) ?usize {
if (comptime Environment.isLinux) {
if (@inComptime()) {
@@ -467,6 +473,12 @@ pub fn indexOfT(comptime T: type, haystack: []const T, needle: []const T) ?usize
return std.mem.indexOf(T, haystack, needle);
}
/// Bounds-checked access to a character in a string.
pub fn charAtT(comptime T: type, haystack: []const T, idx: usize) ?T {
if (idx >= haystack.len) return null;
return haystack[idx];
}
pub fn split(self: string, delimiter: string) SplitIterator {
return SplitIterator{
.buffer = self,
@@ -1013,6 +1025,15 @@ pub fn hasPrefixCaseInsensitive(str: []const u8, prefix: []const u8) bool {
return hasPrefixCaseInsensitiveT(u8, str, prefix);
}
pub fn endsWithCaseInsensitiveT(comptime T: type, str: []const T, suffix: []const u8) bool {
if (str.len < suffix.len) return false;
return eqlCaseInsensitiveT(T, str[str.len - suffix.len ..], suffix);
}
pub fn endsWithCaseInsensitive(str: []const u8, suffix: []const u8) bool {
return endsWithCaseInsensitiveT(u8, str, suffix);
}
pub fn eqlLongT(comptime T: type, a_str: []const T, b_str: []const T, comptime check_len: bool) bool {
if (comptime check_len) {
const len = b_str.len;
@@ -1132,6 +1153,15 @@ pub fn index(self: string, str: string) i32 {
}
}
/// Returns a substring starting at `start` up to the end of the string.
/// If `start` is greater than the string's length, returns an empty string.
pub fn substring(self: anytype, start: ?usize, stop: ?usize) @TypeOf(self) {
const sta = start orelse 0;
const sto = stop orelse self.len;
return self[@min(sta, self.len)..@min(sto, self.len)];
}
pub const ascii_vector_size = if (Environment.isWasm) 8 else 16;
pub const ascii_u16_vector_size = if (Environment.isWasm) 4 else 8;
pub const AsciiVectorInt = std.meta.Int(.unsigned, ascii_vector_size);
@@ -1528,6 +1558,21 @@ pub fn trimLeadingChar(slice: []const u8, char: u8) []const u8 {
return "";
}
/// Count leading consecutive occurrences of a character.
/// Returns the count of consecutive characters from the start of the slice.
/// ```zig
/// countLeadingChar("///foo", '/') -> 3
/// countLeadingChar("foo", '/') -> 0
/// ```
pub fn countLeadingChar(slice: []const u8, char: u8) usize {
var count: usize = 0;
for (slice) |c| {
if (c != char) break;
count += 1;
}
return count;
}
/// Trim leading pattern of 2 bytes
///
/// e.g.
@@ -1553,6 +1598,14 @@ pub fn trimPrefixComptime(comptime T: type, buffer: []const T, comptime prefix:
buffer;
}
/// suffix is of type []const u8 or []const u16
pub fn trimSuffixComptime(buffer: []const u8, comptime suffix: anytype) []const u8 {
return if (hasSuffixComptime(buffer, suffix))
buffer[0 .. buffer.len - suffix.len]
else
buffer;
}
/// Get the line number and the byte offsets of `line_range_count` above the desired line number
/// The final element is the end index of the desired line
const LineRange = struct {
@@ -1759,12 +1812,24 @@ pub fn trim(slice: anytype, comptime values_to_strip: []const u8) @TypeOf(slice)
return slice[begin..end];
}
pub fn trimSpaces(slice: anytype) @TypeOf(slice) {
return trim(slice, &whitespace_chars);
}
pub fn isAllWhitespace(slice: []const u8) bool {
var begin: usize = 0;
while (begin < slice.len and std.mem.indexOfScalar(u8, &whitespace_chars, slice[begin]) != null) : (begin += 1) {}
return begin == slice.len;
}
// TODO(markovejnovic): Could be SIMD
pub fn containsUppercaseAscii(slice: []const u8) bool {
for (slice) |c| {
if (c >= 'A' and c <= 'Z') return true;
}
return false;
}
pub const whitespace_chars = [_]u8{ ' ', '\t', '\n', '\r', std.ascii.control_code.vt, std.ascii.control_code.ff };
pub fn lengthOfLeadingWhitespaceASCII(slice: string) usize {
@@ -2020,7 +2085,7 @@ pub fn concatWithLength(
allocator: std.mem.Allocator,
args: []const string,
length: usize,
) ![]u8 {
) bun.OOM![]u8 {
const out = try allocator.alloc(u8, length);
var remain = out;
for (args) |arg| {
@@ -2034,7 +2099,7 @@ pub fn concatWithLength(
pub fn concat(
allocator: std.mem.Allocator,
args: []const string,
) ![]u8 {
) bun.OOM![]u8 {
var length: usize = 0;
for (args) |arg| {
length += arg.len;
@@ -2135,6 +2200,48 @@ fn QuoteEscapeFormat(comptime flags: QuoteEscapeFormatFlags) type {
};
}
/// Manages a slice of an owned buffer, useful for avoiding re-allocations when only a portion of
/// an allocated buffer is needed.
///
/// Example: Parsing "123 Main St" where only "Main St" is needed but the entire
/// string was allocated. SlicedBuffer owns the full buffer while exposing only
/// the relevant slice.
pub const SlicedBuffer = struct {
/// The full allocated buffer
#buf: []u8,
/// The slice of interest within buf
#slice: []const u8,
/// Allocator used to free buf
#allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator, buffer: []const u8, slice: []const u8) SlicedBuffer {
bun.assert(@intFromPtr(slice.ptr) >= @intFromPtr(buf.ptr) and
@intFromPtr(slice.ptr) + slice.len <= @intFromPtr(buf.ptr) + buf.len);
return .{
.#buf = buffer,
.#slice = slice,
.#allocator = allocator,
};
}
pub fn buf(self: *const SlicedBuffer) []const u8 {
return self.buf;
}
/// Creates a SlicedBuffer where the slice is the entire buffer (no slicing).
pub fn initUnsliced(allocator: std.mem.Allocator, buffer: []const u8) SlicedBuffer {
return .{
.#buf = buffer,
.#slice = buffer,
.#allocator = allocator,
};
}
pub fn deinit(self: *const SlicedBuffer) void {
self.allocator.free(self.buf);
}
};
/// Generic. Works on []const u8, []const u16, etc
pub fn indexOfScalar(input: anytype, scalar: std.meta.Child(@TypeOf(input))) callconv(bun.callconv_inline) ?usize {
if (comptime std.meta.Child(@TypeOf(input)) == u8) {
@@ -2342,7 +2449,6 @@ pub const toNTPath16 = paths_.toNTPath16;
pub const toPath = paths_.toPath;
pub const toPathMaybeDir = paths_.toPathMaybeDir;
pub const toPathNormalized = paths_.toPathNormalized;
pub const toWDirNormalized = paths_.toWDirNormalized;
pub const toWDirPath = paths_.toWDirPath;
pub const toWPath = paths_.toWPath;
pub const toWPathMaybeDir = paths_.toWPathMaybeDir;

View File

@@ -233,26 +233,6 @@ pub fn normalizeSlashesOnly(buf: []u8, utf8: []const u8, comptime desired_slash:
return normalizeSlashesOnlyT(u8, buf, utf8, desired_slash, false);
}
pub fn toWDirNormalized(wbuf: []u16, utf8: []const u8) [:0]const u16 {
var renormalized: ?*bun.PathBuffer = null;
defer if (renormalized) |r| bun.path_buffer_pool.put(r);
var path_to_use = utf8;
if (bun.strings.containsChar(utf8, '/')) {
renormalized = bun.path_buffer_pool.get();
@memcpy(renormalized.?[0..utf8.len], utf8);
for (renormalized.?[0..utf8.len]) |*c| {
if (c.* == '/') {
c.* = '\\';
}
}
path_to_use = renormalized.?[0..utf8.len];
}
return toWDirPath(wbuf, path_to_use);
}
pub fn toWPath(wbuf: []u16, utf8: []const u8) [:0]u16 {
return toWPathMaybeDir(wbuf, utf8, false);
}

View File

@@ -1,4 +1,8 @@
// This is close to WHATWG URL, but we don't want the validation errors
/// Unsafe parser. Assumes the given string is already a valid URL.
///
/// Use bun.jsc.URL for a safe, compliant URL parser.
///
/// This is close to WHATWG URL, but we don't want the validation errors.
pub const URL = struct {
const log = Output.scoped(.URL, .visible);
@@ -870,6 +874,30 @@ pub const PercentEncoding = struct {
return written;
}
/// Extracts and percent-decodes the pathname from a URL string.
/// Returns an owned slice that must be freed by the caller.
/// Returns error.InvalidURL if the URL cannot be parsed.
/// Returns error.InvalidPath if percent-decoding fails.
pub fn extractDecodedPathname(url_string: bun.String, allocator: std.mem.Allocator) ![]u8 {
const url = bun.jsc.URL.fromString(url_string) orelse return error.InvalidURL;
defer url.deinit();
const pathname_str = url.pathname();
defer pathname_str.deref();
const pathname = pathname_str.toUTF8(allocator);
defer pathname.deinit();
var path_list = std.ArrayList(u8).init(allocator);
defer path_list.deinit();
_ = decode(
@TypeOf(path_list.writer()),
path_list.writer(),
pathname.slice(),
) catch return error.InvalidPath;
return try path_list.toOwnedSlice();
}
};
pub const FormData = struct {

View File

@@ -1,397 +0,0 @@
// Bun Snapshot v1, https://bun.sh/docs/test/snapshots
exports[`npa @scoped/package 1`] = `
{
"name": "@scoped/package",
"version": {
"name": "@scoped/package",
"tag": "latest",
"type": "dist_tag",
},
}
`;
exports[`npa @scoped/package@1.0.0 1`] = `
{
"name": "@scoped/package",
"version": {
"alias": false,
"name": "@scoped/package",
"type": "npm",
"version": "==1.0.0",
},
}
`;
exports[`npa @scoped/package@1.0.0-beta.1 1`] = `
{
"name": "@scoped/package",
"version": {
"alias": false,
"name": "@scoped/package",
"type": "npm",
"version": "==1.0.0-beta.1",
},
}
`;
exports[`npa @scoped/package@1.0.0-beta.1+build.123 1`] = `
{
"name": "@scoped/package",
"version": {
"alias": false,
"name": "@scoped/package",
"type": "npm",
"version": "==1.0.0-beta.1+build.123",
},
}
`;
exports[`npa package 1`] = `
{
"name": "package",
"version": {
"name": "package",
"tag": "latest",
"type": "dist_tag",
},
}
`;
exports[`npa package@1.0.0 1`] = `
{
"name": "package",
"version": {
"alias": false,
"name": "package",
"type": "npm",
"version": "==1.0.0",
},
}
`;
exports[`npa package@1.0.0-beta.1 1`] = `
{
"name": "package",
"version": {
"alias": false,
"name": "package",
"type": "npm",
"version": "==1.0.0-beta.1",
},
}
`;
exports[`npa package@1.0.0-beta.1+build.123 1`] = `
{
"name": "package",
"version": {
"alias": false,
"name": "package",
"type": "npm",
"version": "==1.0.0-beta.1+build.123",
},
}
`;
exports[`npa bitbucket:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "bitbucket:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa bitbucket.org:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "bitbucket.org:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa bitbucket.com:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "bitbucket.com:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa git@bitbucket.org:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "git@bitbucket.org:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa foo/bar 1`] = `
{
"name": "",
"version": {
"owner": "foo",
"ref": "",
"repo": "bar",
"type": "github",
},
}
`;
exports[`npa gitlab:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "gitlab:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa gitlab.com:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "gitlab.com:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa http://localhost:5000/no-deps/-/no-deps-2.0.0.tgz 1`] = `
{
"name": "",
"version": {
"name": "",
"type": "tarball",
"url": "http://localhost:5000/no-deps/-/no-deps-2.0.0.tgz",
},
}
`;
exports[`npa https://registry.npmjs.org/no-deps/-/no-deps-2.0.0.tgz 1`] = `
{
"name": "",
"version": {
"name": "",
"type": "tarball",
"url": "https://registry.npmjs.org/no-deps/-/no-deps-2.0.0.tgz",
},
}
`;
exports[`npa file:./path/to/tarball.tgz 1`] = `
{
"name": "",
"version": {
"name": "",
"path": "./path/to/tarball.tgz",
"type": "tarball",
},
}
`;
exports[`npa ./path/to/tarball.tgz 1`] = `
{
"name": "",
"version": {
"name": "",
"path": "./path/to/tarball.tgz",
"type": "tarball",
},
}
`;
exports[`npa foo/bar 2`] = `
{
"name": "",
"version": {
"owner": "foo",
"ref": "",
"repo": "bar",
"type": "github",
},
}
`;
exports[`npa github:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "dylan-conway",
"ref": "",
"repo": "public-install-test",
"type": "github",
},
}
`;
exports[`npa git@github.com:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "git@github.com:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa https://github.com/dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "dylan-conway",
"ref": "",
"repo": "public-install-test",
"type": "github",
},
}
`;
exports[`npa https://github.com/dylan-conway/public-install-test.git 1`] = `
{
"name": "",
"version": {
"owner": "dylan-conway",
"ref": "",
"repo": "public-install-test",
"type": "github",
},
}
`;
exports[`npa https://github.com/dylan-conway/public-install-test.git#semver:^1.0.0 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "semver:^1.0.0",
"repo": "https://github.com/dylan-conway/public-install-test.git",
"type": "git",
},
}
`;
exports[`dependencies: {"foo": "1.2.3"} 1`] = `
{
"alias": false,
"name": "foo",
"type": "npm",
"version": "==1.2.3-foo",
}
`;
exports[`dependencies: {"foo": "latest"} 1`] = `
{
"name": "foo",
"tag": "latest",
"type": "dist_tag",
}
`;
exports[`dependencies: {"foo": "workspace:*"} 1`] = `
{
"name": "*foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:^1.0.0"} 1`] = `
{
"name": "^1.0.0foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0"} 1`] = `
{
"name": "1.0.0foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0-beta.1"} 1`] = `
{
"name": "1.0.0-beta.1foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0-beta.1+build.123"} 1`] = `
{
"name": "1.0.0-beta.1+build.123foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0-beta.1+build.123"} 2`] = `
{
"name": "1.0.0-beta.1+build.123foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0-beta.1+build.123"} 3`] = `
{
"name": "1.0.0-beta.1+build.123foo",
"type": "workspace",
}
`;
exports[`dependencies: {"bar": "^1.0.0"} 1`] = `
{
"alias": false,
"name": "bar",
"type": "npm",
"version": ">=1.0.0-bar <2.0.0",
}
`;
exports[`dependencies: {"bar": "~1.0.0"} 1`] = `
{
"alias": false,
"name": "bar",
"type": "npm",
"version": ">=1.0.0-bar <1.1.0",
}
`;
exports[`dependencies: {"bar": "> 1.0.0 < 2.0.0"} 1`] = `
{
"alias": false,
"name": "bar",
"type": "npm",
"version": ">1.0.0 && <2.0.0-bar",
}
`;
exports[`dependencies: {"bar": "1.0.0 - 2.0.0"} 1`] = `
{
"alias": false,
"name": "bar",
"type": "npm",
"version": ">=1.0.0 <=2.0.0-bar",
}
`;

View File

@@ -1,70 +0,0 @@
import { npa } from "bun:internal-for-testing";
import { expect, test } from "bun:test";
const bitbucket = [
"bitbucket:dylan-conway/public-install-test",
"bitbucket.org:dylan-conway/public-install-test",
"bitbucket.com:dylan-conway/public-install-test",
"git@bitbucket.org:dylan-conway/public-install-test",
];
const tarball_remote = [
"http://localhost:5000/no-deps/-/no-deps-2.0.0.tgz",
"https://registry.npmjs.org/no-deps/-/no-deps-2.0.0.tgz",
];
const local_tarball = ["file:./path/to/tarball.tgz", "./path/to/tarball.tgz"];
const github = ["foo/bar"];
const folder = ["file:./path/to/folder"];
const gitlab = ["gitlab:dylan-conway/public-install-test", "gitlab.com:dylan-conway/public-install-test"];
const all = [
"@scoped/package",
"@scoped/package@1.0.0",
"@scoped/package@1.0.0-beta.1",
"@scoped/package@1.0.0-beta.1+build.123",
"package",
"package@1.0.0",
"package@1.0.0-beta.1",
"package@1.0.0-beta.1+build.123",
...bitbucket,
...github,
...gitlab,
...tarball_remote,
...local_tarball,
...github,
"github:dylan-conway/public-install-test",
"git@github.com:dylan-conway/public-install-test",
"https://github.com/dylan-conway/public-install-test",
"https://github.com/dylan-conway/public-install-test.git",
"https://github.com/dylan-conway/public-install-test.git#semver:^1.0.0",
];
test.each(all)("npa %s", dep => {
expect(npa(dep)).toMatchSnapshot();
});
const pkgJsonLike = [
["foo", "1.2.3"],
["foo", "latest"],
["foo", "workspace:*"],
["foo", "workspace:^1.0.0"],
["foo", "workspace:1.0.0"],
["foo", "workspace:1.0.0-beta.1"],
["foo", "workspace:1.0.0-beta.1+build.123"],
["foo", "workspace:1.0.0-beta.1+build.123"],
["foo", "workspace:1.0.0-beta.1+build.123"],
["bar", "^1.0.0"],
["bar", "~1.0.0"],
["bar", "> 1.0.0 < 2.0.0"],
["bar", "1.0.0 - 2.0.0"],
];
test.each(pkgJsonLike)('dependencies: {"%s": "%s"}', (name, version) => {
expect(npa(name, version)).toMatchSnapshot();
});
test("bad", () => {
expect(() => npa("-123!}{P}{!P#$s")).toThrow();
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
import { hostedGitInfo } from "bun:internal-for-testing";
import { describe, expect, it } from "bun:test";
import { invalidGitUrls, validGitUrls } from "./cases";
describe("fromUrl", () => {
describe("valid urls", () => {
describe.each(Object.entries(validGitUrls))("%s", (_, urlset: object) => {
it.each(Object.entries(urlset))("parses %s", (url, expected) => {
expect(hostedGitInfo.fromUrl(url)).toMatchObject({
...(expected.type && { type: expected.type }),
...(expected.domain && { domain: expected.domain }),
...(expected.user && { user: expected.user }),
...(expected.project && { project: expected.project }),
...(expected.committish && { committish: expected.committish }),
...(expected.default && { default: expected.default }),
});
});
});
});
// TODO(markovejnovic): Unskip these tests.
describe.skip("invalid urls", () => {
describe.each(Object.entries(invalidGitUrls))("%s", (_, urls: (string | null | undefined)[]) => {
it.each(urls)("does not permit %s", url => {
expect(() => {
hostedGitInfo.fromUrl(url);
}).toThrow();
});
});
});
});

View File

@@ -0,0 +1,21 @@
/**
* Mimics https://github.com/npm/hosted-git-info/blob/main/test/parse-url.js
*/
import { hostedGitInfo } from "bun:internal-for-testing";
import { describe, expect, it } from "bun:test";
const okCases = [
// These come straight out of the hosted-git-info tests
"git+ssh://git@abc:frontend/utils.git#6d45447e0c5eb6cd2e3edf05a8c5a9bb81950c79",
// These are custom cases added for Bun
"ssh://:password@bitbucket.org:foo/bar.git",
"git@bitbucket.org:foo/bar",
"gist:user:password@/feedbeef#branch",
"github:foo/bar#branch with space",
];
describe("parseUrl", () => {
it.each(okCases)("parses %s", url => {
expect(hostedGitInfo.parseUrl(url)).not.toBeNull();
});
});

View File

@@ -0,0 +1,942 @@
import path from "path";
import os from "os";
export default {
basic: {
'foo@1.2': {
name: 'foo',
escapedName: 'foo',
type: 'range',
saveSpec: null,
fetchSpec: '1.2',
raw: 'foo@1.2',
rawSpec: '1.2',
},
'foo@~1.2': {
name: 'foo',
escapedName: 'foo',
type: 'range',
saveSpec: null,
fetchSpec: '~1.2',
raw: 'foo@~1.2',
rawSpec: '~1.2',
},
'@foo/bar': {
raw: '@foo/bar',
name: '@foo/bar',
escapedName: '@foo%2fbar',
scope: '@foo',
rawSpec: '*',
saveSpec: null,
fetchSpec: '*',
type: 'range',
},
'@foo/bar@': {
raw: '@foo/bar@',
name: '@foo/bar',
escapedName: '@foo%2fbar',
scope: '@foo',
rawSpec: '*',
saveSpec: null,
fetchSpec: '*',
type: 'range',
},
'@foo/bar@baz': {
raw: '@foo/bar@baz',
name: '@foo/bar',
escapedName: '@foo%2fbar',
scope: '@foo',
rawSpec: 'baz',
saveSpec: null,
fetchSpec: 'baz',
type: 'tag',
},
'@f fo o al/ a d s ;f': {
raw: '@f fo o al/ a d s ;f',
name: null,
escapedName: null,
rawSpec: '@f fo o al/ a d s ;f',
saveSpec: 'file:@f fo o al/ a d s ;f',
fetchSpec: '/test/a/b/@f fo o al/ a d s ;f',
type: 'directory',
},
'foo@1.2.3': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '1.2.3',
raw: 'foo@1.2.3',
},
'foo@=v1.2.3': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '=v1.2.3',
raw: 'foo@=v1.2.3',
rawSpec: '=v1.2.3',
},
'foo@npm:bar': {
name: 'foo',
escapedName: 'foo',
type: 'alias',
saveSpec: null,
fetchSpec: null,
raw: 'foo@npm:bar',
rawSpec: 'npm:bar',
subSpec: {
registry: true,
name: 'bar',
escapedName: 'bar',
type: 'range',
raw: 'bar',
rawSpec: '*',
saveSpec: null,
fetchSpec: '*',
},
},
'git+ssh://git@notgithub.com/user/foo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@notgithub.com/user/foo#1.2.3',
fetchSpec: 'ssh://git@notgithub.com/user/foo',
gitCommittish: '1.2.3',
raw: 'git+ssh://git@notgithub.com/user/foo#1.2.3',
},
'git+ssh://git@notgithub.com/user/foo': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@notgithub.com/user/foo',
fetchSpec: 'ssh://git@notgithub.com/user/foo',
gitCommittish: null,
raw: 'git+ssh://git@notgithub.com/user/foo',
},
'git+ssh://git@notgithub.com:user/foo': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@notgithub.com:user/foo',
fetchSpec: 'git@notgithub.com:user/foo',
gitCommittish: null,
raw: 'git+ssh://git@notgithub.com:user/foo',
},
'git+ssh://mydomain.com:foo': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:foo',
fetchSpec: 'mydomain.com:foo',
gitCommittish: null,
raw: 'git+ssh://mydomain.com:foo',
},
'git+ssh://git@notgithub.com:user/foo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@notgithub.com:user/foo#1.2.3',
fetchSpec: 'git@notgithub.com:user/foo',
gitCommittish: '1.2.3',
raw: 'git+ssh://git@notgithub.com:user/foo#1.2.3',
},
'git+ssh://mydomain.com:foo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:foo#1.2.3',
fetchSpec: 'mydomain.com:foo',
gitCommittish: '1.2.3',
raw: 'git+ssh://mydomain.com:foo#1.2.3',
},
'git+ssh://mydomain.com:foo/bar#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:foo/bar#1.2.3',
fetchSpec: 'mydomain.com:foo/bar',
gitCommittish: '1.2.3',
raw: 'git+ssh://mydomain.com:foo/bar#1.2.3',
},
'git+ssh://mydomain.com:1234#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:1234#1.2.3',
fetchSpec: 'ssh://mydomain.com:1234',
gitCommittish: '1.2.3',
raw: 'git+ssh://mydomain.com:1234#1.2.3',
},
'git+ssh://mydomain.com:1234/hey#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:1234/hey#1.2.3',
fetchSpec: 'ssh://mydomain.com:1234/hey',
gitCommittish: '1.2.3',
raw: 'git+ssh://mydomain.com:1234/hey#1.2.3',
},
'git+ssh://mydomain.com:1234/hey': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:1234/hey',
fetchSpec: 'ssh://mydomain.com:1234/hey',
gitCommittish: null,
raw: 'git+ssh://mydomain.com:1234/hey',
},
'git+ssh://username:password@mydomain.com:1234/hey#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://username:password@mydomain.com:1234/hey#1.2.3',
fetchSpec: 'ssh://username:password@mydomain.com:1234/hey',
gitCommittish: '1.2.3',
raw: 'git+ssh://username:password@mydomain.com:1234/hey#1.2.3',
},
'git+ssh://git@github.com/user/foo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#1.2.3',
fetchSpec: 'ssh://git@github.com/user/foo.git',
gitCommittish: '1.2.3',
raw: 'git+ssh://git@github.com/user/foo#1.2.3',
},
'git+ssh://git@notgithub.com/user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
hosted: null,
saveSpec: 'git+ssh://git@notgithub.com/user/foo#semver:^1.2.3',
fetchSpec: 'ssh://git@notgithub.com/user/foo',
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'git+ssh://git@notgithub.com/user/foo#semver:^1.2.3',
},
'git+ssh://git@notgithub.com:user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
hosted: null,
saveSpec: 'git+ssh://git@notgithub.com:user/foo#semver:^1.2.3',
fetchSpec: 'git@notgithub.com:user/foo',
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'git+ssh://git@notgithub.com:user/foo#semver:^1.2.3',
},
'git+ssh://git@github.com/user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#semver:^1.2.3',
fetchSpec: 'ssh://git@github.com/user/foo.git',
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'git+ssh://git@github.com/user/foo#semver:^1.2.3',
},
'git+ssh://git@github.com:user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#semver:^1.2.3',
fetchSpec: 'ssh://git@github.com/user/foo.git',
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'git+ssh://git@github.com:user/foo#semver:^1.2.3',
},
'user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'github:user/foo#semver:^1.2.3',
fetchSpec: null,
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'user/foo#semver:^1.2.3',
},
'user/foo#path:dist': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'github:user/foo#path:dist',
fetchSpec: null,
gitCommittish: null,
gitSubdir: '/dist',
raw: 'user/foo#path:dist',
},
'user/foo#1234::path:dist': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'github:user/foo#1234::path:dist',
fetchSpec: null,
gitCommittish: '1234',
gitRange: null,
gitSubdir: '/dist',
raw: 'user/foo#1234::path:dist',
},
'user/foo#notimplemented:value': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'github:user/foo#notimplemented:value',
fetchSpec: null,
gitCommittish: null,
gitRange: null,
gitSubdir: null,
raw: 'user/foo#notimplemented:value',
},
'git+file://path/to/repo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+file://path/to/repo#1.2.3',
fetchSpec: 'file://path/to/repo',
gitCommittish: '1.2.3',
raw: 'git+file://path/to/repo#1.2.3',
},
'git://notgithub.com/user/foo': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git://notgithub.com/user/foo',
fetchSpec: 'git://notgithub.com/user/foo',
raw: 'git://notgithub.com/user/foo',
},
'@foo/bar@git+ssh://notgithub.com/user/foo': {
name: '@foo/bar',
escapedName: '@foo%2fbar',
scope: '@foo',
saveSpec: 'git+ssh://notgithub.com/user/foo',
fetchSpec: 'ssh://notgithub.com/user/foo',
rawSpec: 'git+ssh://notgithub.com/user/foo',
raw: '@foo/bar@git+ssh://notgithub.com/user/foo',
type: 'git',
},
'git@npm:not-git': {
name: 'git',
type: 'alias',
subSpec: {
type: 'range',
registry: true,
name: 'not-git',
fetchSpec: '*',
},
raw: 'git@npm:not-git',
},
'not-git@hostname.com:some/repo': {
name: null,
type: 'git',
saveSpec: 'git+ssh://not-git@hostname.com:some/repo',
fetchSpec: 'not-git@hostname.com:some/repo',
raw: 'not-git@hostname.com:some/repo',
},
'/path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: '/path/to/foo',
},
'/path/to/foo.tar': {
name: null,
escapedName: null,
type: 'file',
saveSpec: 'file:/path/to/foo.tar',
fetchSpec: '/path/to/foo.tar',
raw: '/path/to/foo.tar',
},
'/path/to/foo.tgz': {
name: null,
escapedName: null,
type: 'file',
saveSpec: 'file:/path/to/foo.tgz',
fetchSpec: '/path/to/foo.tgz',
raw: '/path/to/foo.tgz',
},
'file:path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:path/to/foo',
fetchSpec: '/test/a/b/path/to/foo',
raw: 'file:path/to/foo',
},
'file:path/to/foo.tar.gz': {
name: null,
escapedName: null,
type: 'file',
saveSpec: 'file:path/to/foo.tar.gz',
fetchSpec: '/test/a/b/path/to/foo.tar.gz',
raw: 'file:path/to/foo.tar.gz',
},
'file:~/path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:~/path/to/foo',
fetchSpec: path.normalize(path.join(os.homedir(), '/path/to/foo')),
raw: 'file:~/path/to/foo',
},
'file:/~/path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:~/path/to/foo',
fetchSpec: path.normalize(path.join(os.homedir(), '/path/to/foo')),
raw: 'file:/~/path/to/foo',
},
'file:/~path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/~path/to/foo',
fetchSpec: '/~path/to/foo',
raw: 'file:/~path/to/foo',
},
'file:/.path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/.path/to/foo',
fetchSpec: '/.path/to/foo',
raw: 'file:/.path/to/foo',
},
'file:./path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:path/to/foo',
fetchSpec: '/test/a/b/path/to/foo',
raw: 'file:./path/to/foo',
},
'file:/./path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:path/to/foo',
fetchSpec: '/test/a/b/path/to/foo',
raw: 'file:/./path/to/foo',
},
'file://./path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:path/to/foo',
fetchSpec: '/test/a/b/path/to/foo',
raw: 'file://./path/to/foo',
},
'file:../path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:../path/to/foo',
fetchSpec: '/test/a/path/to/foo',
raw: 'file:../path/to/foo',
},
'file:/../path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:../path/to/foo',
fetchSpec: '/test/a/path/to/foo',
raw: 'file:/../path/to/foo',
},
'file://../path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:../path/to/foo',
fetchSpec: '/test/a/path/to/foo',
raw: 'file://../path/to/foo',
},
'file:///path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: 'file:///path/to/foo',
},
'file:/path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: 'file:/path/to/foo',
},
'file://path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: 'file://path/to/foo',
},
'file:////path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: 'file:////path/to/foo',
},
'file://.': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:',
fetchSpec: '/test/a/b',
raw: 'file://.',
},
'http://insecure.com/foo.tgz': {
name: null,
escapedName: null,
type: 'remote',
saveSpec: 'http://insecure.com/foo.tgz',
fetchSpec: 'http://insecure.com/foo.tgz',
raw: 'http://insecure.com/foo.tgz',
},
'https://server.com/foo.tgz': {
name: null,
escapedName: null,
type: 'remote',
saveSpec: 'https://server.com/foo.tgz',
fetchSpec: 'https://server.com/foo.tgz',
raw: 'https://server.com/foo.tgz',
},
'foo@latest': {
name: 'foo',
escapedName: 'foo',
type: 'tag',
saveSpec: null,
fetchSpec: 'latest',
raw: 'foo@latest',
},
foo: {
name: 'foo',
escapedName: 'foo',
type: 'range',
saveSpec: null,
fetchSpec: '*',
raw: 'foo',
},
'foo@ 1.2 ': {
name: 'foo',
escapedName: 'foo',
type: 'range',
saveSpec: null,
fetchSpec: '1.2',
raw: 'foo@ 1.2 ',
rawSpec: ' 1.2 ',
},
'foo@ 1.2.3 ': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '1.2.3',
raw: 'foo@ 1.2.3 ',
rawSpec: ' 1.2.3 ',
},
'foo@1.2.3 ': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '1.2.3',
raw: 'foo@1.2.3 ',
rawSpec: '1.2.3 ',
},
'foo@ 1.2.3': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '1.2.3',
raw: 'foo@ 1.2.3',
rawSpec: ' 1.2.3',
},
},
bitbucket: {
'bitbucket:user/foo-js': {
name: null,
type: 'git',
saveSpec: 'bitbucket:user/foo-js',
raw: 'bitbucket:user/foo-js',
},
'bitbucket:user/foo-js#bar/baz': {
name: null,
type: 'git',
saveSpec: 'bitbucket:user/foo-js#bar/baz',
raw: 'bitbucket:user/foo-js#bar/baz',
},
'bitbucket:user..blerg--/..foo-js# . . . . . some . tags / / /': {
name: null,
type: 'git',
saveSpec: 'bitbucket:user..blerg--/..foo-js# . . . . . some . tags / / /',
raw: 'bitbucket:user..blerg--/..foo-js# . . . . . some . tags / / /',
},
'bitbucket:user/foo-js#bar/baz/bin': {
name: null,
type: 'git',
saveSpec: 'bitbucket:user/foo-js#bar/baz/bin',
raw: 'bitbucket:user/foo-js#bar/baz/bin',
},
'foo@bitbucket:user/foo-js': {
name: 'foo',
type: 'git',
saveSpec: 'bitbucket:user/foo-js',
raw: 'foo@bitbucket:user/foo-js',
},
'git+ssh://git@bitbucket.org/user/foo#1.2.3': {
name: null,
type: 'git',
saveSpec: 'git+ssh://git@bitbucket.org/user/foo.git#1.2.3',
raw: 'git+ssh://git@bitbucket.org/user/foo#1.2.3',
},
'https://bitbucket.org/user/foo.git': {
name: null,
type: 'git',
saveSpec: 'git+https://bitbucket.org/user/foo.git',
raw: 'https://bitbucket.org/user/foo.git',
},
'@foo/bar@git+ssh://bitbucket.org/user/foo': {
name: '@foo/bar',
scope: '@foo',
type: 'git',
saveSpec: 'git+ssh://git@bitbucket.org/user/foo.git',
rawSpec: 'git+ssh://bitbucket.org/user/foo',
raw: '@foo/bar@git+ssh://bitbucket.org/user/foo',
},
},
github: {
'user/foo-js': {
name: null,
type: 'git',
saveSpec: 'github:user/foo-js',
raw: 'user/foo-js',
},
'user/foo-js#bar/baz': {
name: null,
type: 'git',
saveSpec: 'github:user/foo-js#bar/baz',
raw: 'user/foo-js#bar/baz',
},
'user..blerg--/..foo-js# . . . . . some . tags / / /': {
name: null,
type: 'git',
saveSpec: 'github:user..blerg--/..foo-js# . . . . . some . tags / / /',
raw: 'user..blerg--/..foo-js# . . . . . some . tags / / /',
},
'user/foo-js#bar/baz/bin': {
name: null,
type: 'git',
raw: 'user/foo-js#bar/baz/bin',
},
'foo@user/foo-js': {
name: 'foo',
type: 'git',
saveSpec: 'github:user/foo-js',
raw: 'foo@user/foo-js',
},
'github:user/foo-js': {
name: null,
type: 'git',
saveSpec: 'github:user/foo-js',
raw: 'github:user/foo-js',
},
'git+ssh://git@github.com/user/foo#1.2.3': {
name: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#1.2.3',
raw: 'git+ssh://git@github.com/user/foo#1.2.3',
},
'git+ssh://git@github.com:user/foo#1.2.3': {
name: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#1.2.3',
raw: 'git+ssh://git@github.com:user/foo#1.2.3',
},
'git://github.com/user/foo': {
name: null,
type: 'git',
saveSpec: 'git://github.com/user/foo.git',
raw: 'git://github.com/user/foo',
},
'https://github.com/user/foo.git': {
name: null,
type: 'git',
saveSpec: 'git+https://github.com/user/foo.git',
raw: 'https://github.com/user/foo.git',
},
'@foo/bar@git+ssh://github.com/user/foo': {
name: '@foo/bar',
scope: '@foo',
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git',
rawSpec: 'git+ssh://github.com/user/foo',
raw: '@foo/bar@git+ssh://github.com/user/foo',
},
'foo@bar/foo': {
name: 'foo',
type: 'git',
saveSpec: 'github:bar/foo',
raw: 'foo@bar/foo',
},
'git@github.com:12345': {
name: undefined,
type: 'git',
saveSpec: 'git+ssh://git@github.com:12345',
fetchSpec: 'ssh://git@github.com:12345',
raw: 'git@github.com:12345',
},
'git@github.com:12345/': {
name: undefined,
type: 'git',
saveSpec: 'git+ssh://git@github.com:12345/',
fetchSpec: 'ssh://git@github.com:12345/',
raw: 'git@github.com:12345/',
},
'git@github.com:12345/foo': {
name: undefined,
type: 'git',
saveSpec: 'git+ssh://git@github.com:12345/foo',
fetchSpec: 'ssh://git@github.com:12345/foo',
raw: 'git@github.com:12345/foo',
},
'git@github.com:12345foo': {
name: undefined,
type: 'git',
saveSpec: 'git+ssh://git@github.com:12345foo',
fetchSpec: 'git@github.com:12345foo',
raw: 'git@github.com:12345foo',
},
},
gitlab: {
'gitlab:user/foo-js': {
name: null,
type: 'git',
raw: 'gitlab:user/foo-js',
},
'gitlab:user/foo-js#bar/baz': {
name: null,
type: 'git',
raw: 'gitlab:user/foo-js#bar/baz',
},
'gitlab:user..blerg--/..foo-js# . . . . . some . tags / / /': {
name: null,
type: 'git',
saveSpec: 'gitlab:user..blerg--/..foo-js# . . . . . some . tags / / /',
raw: 'gitlab:user..blerg--/..foo-js# . . . . . some . tags / / /',
},
'gitlab:user/foo-js#bar/baz/bin': {
name: null,
type: 'git',
saveSpec: 'gitlab:user/foo-js#bar/baz/bin',
raw: 'gitlab:user/foo-js#bar/baz/bin',
},
'foo@gitlab:user/foo-js': {
name: 'foo',
type: 'git',
saveSpec: 'gitlab:user/foo-js',
raw: 'foo@gitlab:user/foo-js',
},
'git+ssh://git@gitlab.com/user/foo#1.2.3': {
name: null,
type: 'git',
saveSpec: 'git+ssh://git@gitlab.com/user/foo.git#1.2.3',
raw: 'git+ssh://git@gitlab.com/user/foo#1.2.3',
},
'https://gitlab.com/user/foo.git': {
name: null,
type: 'git',
saveSpec: 'git+https://gitlab.com/user/foo.git',
raw: 'https://gitlab.com/user/foo.git',
},
'@foo/bar@git+ssh://gitlab.com/user/foo': {
name: '@foo/bar',
scope: '@foo',
type: 'git',
saveSpec: 'git+ssh://git@gitlab.com/user/foo.git',
rawSpec: 'git+ssh://gitlab.com/user/foo',
raw: '@foo/bar@git+ssh://gitlab.com/user/foo',
},
},
windows: {
'C:\\x\\y\\z': {
raw: 'C:\\x\\y\\z',
scope: null,
name: null,
escapedName: null,
rawSpec: 'C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'foo@C:\\x\\y\\z': {
raw: 'foo@C:\\x\\y\\z',
scope: null,
name: 'foo',
escapedName: 'foo',
rawSpec: 'C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'foo@file:///C:\\x\\y\\z': {
raw: 'foo@file:///C:\\x\\y\\z',
scope: null,
name: 'foo',
escapedName: 'foo',
rawSpec: 'file:///C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'foo@file://C:\\x\\y\\z': {
raw: 'foo@file://C:\\x\\y\\z',
scope: null,
name: 'foo',
escapedName: 'foo',
rawSpec: 'file://C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'file:///C:\\x\\y\\z': {
raw: 'file:///C:\\x\\y\\z',
scope: null,
name: null,
escapedName: null,
rawSpec: 'file:///C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'file://C:\\x\\y\\z': {
raw: 'file://C:\\x\\y\\z',
scope: null,
name: null,
escapedName: null,
rawSpec: 'file://C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'foo@/foo/bar/baz': {
raw: 'foo@/foo/bar/baz',
scope: null,
name: 'foo',
escapedName: 'foo',
rawSpec: '/foo/bar/baz',
fetchSpec: 'C:\\foo\\bar\\baz',
type: 'directory',
},
'foo@git+file://C:\\x\\y\\z': {
type: 'git',
registry: null,
where: null,
raw: 'foo@git+file://C:\\x\\y\\z',
name: 'foo',
escapedName: 'foo',
scope: null,
rawSpec: 'git+file://C:\\x\\y\\z',
saveSpec: 'git+file://C:\\x\\y\\z',
fetchSpec: 'file://c:/x/y/z',
gitRange: null,
gitCommittish: null,
hosted: null,
},
},
};

View File

@@ -0,0 +1,109 @@
import { Npa } from "bun:internal-for-testing";
import cases from "./cases";
const normalizePath = (p: string) => p && p.replace(/^[a-zA-Z]:/, "").replace(/\\/g, "/");
const normalizePaths = (spec: any) => {
spec.fetchSpec = normalizePath(spec.fetchSpec);
return spec;
};
const expectedPatch = (expected: any) => {
const patched = { ...expected };
// It's really annoying to differentiate between null and undefined and our use-case could not care less.
// Convert all undefined values to null
for (const key in patched) {
if (patched[key] === undefined) {
patched[key] = null;
}
}
return patched;
};
const platformAgnosticTests = Object.entries(cases).filter(([name]) => name !== "windows");
const windowsTests = Object.entries(cases).filter(([name]) => name !== "windows");
describe("npa", () => {
describe("valid cases", () => {
describe.each(platformAgnosticTests)("%s", (_, caseSet: object) => {
it.each(Object.entries(caseSet))("parses %s", (input, expected) => {
const result = Npa.npa(input as string, "/test/a/b");
expect(result).toMatchObject(expectedPatch(expected));
});
});
});
if (process.platform === "win32") {
describe("windows specific cases", () => {
describe.each(windowsTests)("%s", (_, caseSet: object) => {
it.each(Object.entries(caseSet))("parses %s", (input, expected) => {
const result = Npa.npa(input as string);
expect(normalizePaths(result)).toMatchObject(expected);
});
});
});
}
});
describe("resolve", () => {
test("npa.resolve", () => {
expect(Npa.resolve("foo", "^1.2.3", "/test/a/b")).toMatchObject({
type: "range",
});
});
test("Npa.resolve file:", () => {
expect(normalizePaths(Npa.resolve("foo", "file:foo", "/test/a/b"))).toMatchObject({
type: "directory",
fetchSpec: "/test/a/b/foo",
});
});
test("Npa.resolve no protocol", () => {
expect(Npa.resolve("foo", "../foo/bar", "/test/a/b")).toMatchObject({
type: "directory",
});
});
test("Npa.resolve file protocol", () => {
expect(Npa.resolve("foo", "file:../foo/bar", "/test/a/b")).toMatchObject({
type: "directory",
});
});
test("Npa.resolve file protocol w/ tgz", () => {
expect(Npa.resolve("foo", "file:../foo/bar.tgz", "/test/a/b")).toMatchObject({
type: "file",
});
});
test("Npa.resolve with no name", () => {
expect(Npa.resolve(null, "4.0.0", "/test/a/b")).toMatchObject({
type: "version",
name: null,
});
});
test("Npa.resolve sets raw right", () => {
expect(Npa.resolve("foo", "file:abc")).toMatchObject({
type: "directory",
raw: "foo@file:abc",
});
});
test("npa with path in @ in it", () => {
expect(Npa.npa("./path/to/thing/package@1.2.3/")).toMatchObject({
name: null,
type: "directory",
});
});
test("npa w/o leading or trailing slash", () => {
expect(Npa.npa("path/to/thing/package@1.2.3")).toMatchObject({
name: null,
type: "directory",
});
});
});

View File

@@ -0,0 +1,6 @@
import { Npa } from "bun:internal-for-testing";
import { expect, test } from "bun:test";
test("invalid url", () => {
expect(() => Npa.npa("foo@gopher://goodluckwiththat")).toThrow();
});

View File

@@ -0,0 +1,94 @@
import { Npa } from "bun:internal-for-testing";
import { describe, expect, test } from "bun:test";
describe("npm-package-arg memory safety", () => {
describe("fromAlias error paths", () => {
test("handles nested alias error without leaking", () => {
// This should trigger error.NestedAlias
// Before fix: leaked sub_spec allocation
expect(() => {
Npa.npa("foo@npm:bar@npm:baz", "/test/path");
}).toThrow();
});
test("handles non-registry alias without leaking", () => {
// This should trigger error.NotAliasingRegistry
// Before fix: leaked sub_spec allocation
expect(() => {
Npa.npa("foo@npm:github:user/repo", "/test/path");
}).toThrow();
});
});
describe("fromGitSpec error paths", () => {
test("handles valid git spec without crashing", () => {
// This should succeed and properly clean up
const result = Npa.npa("github:user/repo", "/test/path");
expect(result.type).toBe("git");
expect(result.hosted).toBeDefined();
});
test("handles git spec with committish", () => {
// Tests that git_attrs is properly managed
// Before fix: if internal allocation failed, hosted would leak
const result = Npa.npa("github:user/repo#v1.0.0", "/test/path");
expect(result.type).toBe("git");
expect(result.gitCommittish).toBe("v1.0.0");
});
test("handles git spec with semver range", () => {
// Tests GitAttrs.fromCommittish with semver parsing
// This also exercises the double-free fix (Bug #3)
const result = Npa.npa("github:user/repo#semver:^1.0.0", "/test/path");
expect(result.type).toBe("git");
expect(result.gitRange).toBe("^1.0.0");
});
test("handles git spec with path", () => {
// Tests GitAttrs.fromCommittish with subdir
const result = Npa.npa("github:user/repo#path:packages/foo", "/test/path");
expect(result.type).toBe("git");
expect(result.gitSubdir).toBe("/packages/foo");
});
test("handles git spec with multiple attributes", () => {
// Tests GitAttrs.fromCommittish with multiple parts
const result = Npa.npa("github:user/repo#v1.0.0::path:packages/foo", "/test/path");
expect(result.type).toBe("git");
expect(result.gitCommittish).toBe("v1.0.0");
expect(result.gitSubdir).toBe("/packages/foo");
});
});
describe("GitAttrs.fromCommittish edge cases", () => {
test("handles invalid percent encoding in semver range", () => {
// This should trigger the error path in PercentEncoding.decode
// Before fix: double-free when error returned
// The percent encoding needs to be malformed to trigger decode error
expect(() => {
Npa.npa("github:user/repo#semver:%XX", "/test/path");
}).toThrow();
});
test("handles duplicate committish attributes", () => {
// Should trigger error.InvalidCommittish
expect(() => {
Npa.npa("github:user/repo#v1.0.0::v2.0.0", "/test/path");
}).toThrow();
});
test("handles committish and semver conflict", () => {
// Should trigger error.InvalidCommittish (can't have both)
expect(() => {
Npa.npa("github:user/repo#v1.0.0::semver:^1.0.0", "/test/path");
}).toThrow();
});
test("handles duplicate subdir", () => {
// Should trigger error.InvalidCommittish
expect(() => {
Npa.npa("github:user/repo#path:foo::path:bar", "/test/path");
}).toThrow();
});
});
});

View File

@@ -0,0 +1,58 @@
import { Npa } from "bun:internal-for-testing";
import { expect, test } from "bun:test";
const npa = Npa.npa;
test("realize-package-specifier", () => {
let result;
result = npa("a.tar.gz", "/test/a/b");
expect(result.type).toBe("file"); // local tarball
result = npa("d", "/test/a/b");
expect(result.type).toBe("range"); // remote package
result = npa("file:./a.tar.gz", "/test/a/b");
expect(result.type).toBe("file"); // local tarball
result = npa("file:./b", "/test/a/b");
expect(result.type).toBe("directory"); // local package directory
result = npa("file:./c", "/test/a/b");
expect(result.type).toBe("directory"); // non-package local directory, specified with a file URL
result = npa("file:./d", "/test/a/b");
expect(result.type).toBe("directory"); // no local directory, specified with a file URL
});
test("named realize-package-specifier", () => {
let result;
result = npa("a@a.tar.gz", "/test/a/b");
expect(result.type).toBe("file"); // named local tarball
result = npa("d@d", "/test/a/b");
expect(result.type).toBe("tag"); // remote package
result = npa("a@file:./a.tar.gz", "/test/a/b");
expect(result.type).toBe("file"); // local tarball
result = npa("b@file:./b", "/test/a/b");
expect(result.type).toBe("directory"); // local package directory
result = npa("c@file:./c", "/test/a/b");
expect(result.type).toBe("directory"); // non-package local directory, specified with a file URL
result = npa("d@file:./d", "/test/a/b");
expect(result.type).toBe("directory"); // no local directory, specified with a file URL
result = npa("e@e/2", "test/a/b");
expect(result.type).toBe("git"); // hosted package dependency is git
expect(result.hosted.type).toBe("github"); // github package dependency
result = npa("e@1", "/test/a/b");
expect(result.type).toBe("range"); // range like specifier is never a local file
result = npa("e@1.0.0", "/test/a/b");
expect(result.type).toBe("version"); // version like specifier is never a local file
});

View File

@@ -0,0 +1,102 @@
export default {
'some-package': { validForNewPackages: true, validForOldPackages: true },
'example.com': { validForNewPackages: true, validForOldPackages: true },
'under_score': { validForNewPackages: true, validForOldPackages: true },
'period.js': { validForNewPackages: true, validForOldPackages: true },
'123numeric': { validForNewPackages: true, validForOldPackages: true },
'crazy!': { validForNewPackages: false, validForOldPackages: true },
'@npm/thingy': { validForNewPackages: true, validForOldPackages: true },
'@npm-zors/money!time.js': {
validForNewPackages: false,
validForOldPackages: true,
warnings: ['name can no longer contain special characters ("~\'!()*")'],
},
'@user/node_modules': { validForNewPackages: true, validForOldPackages: true },
'@user/_package': { validForNewPackages: true, validForOldPackages: true },
'@user/http': { validForNewPackages: true, validForOldPackages: true },
'': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name length must be greater than zero'],
},
'.start-with-period': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with a period'],
},
'@npm/.': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with a period'],
},
'@npm/..': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with a period'],
},
'@npm/.package': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with a period'],
},
'_start-with-underscore': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with an underscore'],
},
'contain:colons': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name can only contain URL-friendly characters'],
},
' leading-space': {
validForNewPackages: false,
validForOldPackages: false,
errors: [
'name cannot contain leading or trailing spaces',
'name can only contain URL-friendly characters',
],
},
'trailing-space ': {
validForNewPackages: false,
validForOldPackages: false,
errors: [
'name cannot contain leading or trailing spaces',
'name can only contain URL-friendly characters',
],
},
's/l/a/s/h/e/s': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name can only contain URL-friendly characters'],
},
'node_modules': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['node_modules is not a valid package name'],
},
'favicon.ico': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['favicon.ico is not a valid package name'],
},
'http': {
validForNewPackages: false,
validForOldPackages: true,
warnings: ['http is a core module name'],
},
'process': {
validForNewPackages: false,
validForOldPackages: true,
warnings: ['process is a core module name'],
},
'ifyouwanttogetthesumoftwonumberswherethosetwonumbersarechosenbyfindingthelargestoftwooutofthreenumbersandsquaringthemwhichismultiplyingthembyitselfthenyoushouldinputthreenumbersintothisfunctionanditwilldothatforyou-': {
validForNewPackages: false,
validForOldPackages: true,
warnings: ['name can no longer contain more than 214 characters'],
},
'ifyouwanttogetthesumoftwonumberswherethosetwonumbersarechosenbyfindingthelargestoftwooutofthreenumbersandsquaringthemwhichismultiplyingthembyitselfthenyoushouldinputthreenumbersintothisfunctionanditwilldothatforyou': {
validForNewPackages: true,
validForOldPackages: true,
},
};

View File

@@ -0,0 +1,36 @@
import { expect, describe, it } from "bun:test";
import cases from "./cases";
import { ValidateNpmPackageName } from "bun:internal-for-testing";
/**
* Convert the expected object in validate-npm-package-name to match our output.
*
* In some ways, this is debt that needs to be addressed should we choose to expose this API publicly.
*/
function remapExpectedObject(expectedObject: any): object {
const newObj = { ...expectedObject };
if (newObj.warnings) {
newObj.warnings = newObj.warnings.map(
(warning: string) =>
warning.endsWith("is a core module name") ? "name conflicts a core module name" :
warning
);
}
if (newObj.errors) {
newObj.errors = newObj.errors.map(
(error: string) =>
error.endsWith("is not a valid package name") ? "name is not allowed" :
error
);
}
return newObj;
}
describe("validate-npm-package-name", () => {
it.each(Object.entries(cases))("parses %s", (pkgName: string, expected: object) => {
expect(ValidateNpmPackageName.validate(pkgName)).toMatchObject(remapExpectedObject(expected));
});
})