Compare commits

...

5 Commits

Author SHA1 Message Date
autofix-ci[bot]
e2b697c8b5 [autofix.ci] apply automated fixes 2025-10-28 00:37:31 +00:00
Marko Vejnovic
cfef3337a2 try fix windows 2025-10-27 17:35:19 -07:00
Marko Vejnovic
69b87ee089 fix: Panicking tests 2025-10-27 14:50:19 -07:00
Marko Vejnovic
6e48e49547 test: Add npm_package_arg.zig tests 2025-10-27 12:41:45 -07:00
Marko Vejnovic
cca0b03361 feat(install): Match resolution with npm-package-arg 2025-10-27 12:41:45 -07:00
26 changed files with 8008 additions and 602 deletions

View File

@@ -624,6 +624,22 @@ extern "C" BunString URL__getHrefJoin(BunString* baseStr, BunString* relativeStr
return Bun::toStringRef(url.string());
}
extern "C" BunString URL__hash(WTF::URL* url)
{
const auto& fragment = url->fragmentIdentifier().isEmpty()
? emptyString()
: url->fragmentIdentifierWithLeadingNumberSign().toString();
return Bun::toStringRef(fragment);
}
extern "C" BunString URL__fragmentIdentifier(WTF::URL* url)
{
const auto& fragment = url->fragmentIdentifier().isEmpty()
? emptyString()
: url->fragmentIdentifier().toString();
return Bun::toStringRef(fragment);
}
extern "C" WTF::URL* URL__fromString(BunString* input)
{
auto&& str = input->toWTFString();

View File

@@ -16,6 +16,20 @@ pub const URL = opaque {
extern fn URL__getFileURLString(*String) String;
extern fn URL__getHrefJoin(*String, *String) String;
extern fn URL__pathFromFileURL(*String) String;
extern fn URL__hash(*URL) String;
extern fn URL__fragmentIdentifier(*URL) String;
/// Includes the leading '#'.
pub fn hash(url: *URL) String {
jsc.markBinding(@src());
return URL__hash(url);
}
/// Exactly the same as hash, excluding the leading '#'.
pub fn fragmentIdentifier(url: *URL) String {
jsc.markBinding(@src());
return URL__fragmentIdentifier(url);
}
pub fn hrefFromString(str: bun.String) String {
jsc.markBinding(@src());

View File

@@ -2952,6 +2952,98 @@ comptime {
@export(&bun.jsc.host_fn.wrap4v(Path.toNamespacedPath), .{ .name = "Bun__Path__toNamespacedPath" });
}
/// Utility for getting managing path buffers from the pool. Many functions use two buffers.
pub const BufferPair = struct {
buf1: *bun.PathBuffer,
buf2: *bun.PathBuffer,
pub fn init() BufferPair {
return .{
.buf1 = bun.path_buffer_pool.get(),
.buf2 = bun.path_buffer_pool.get(),
};
}
pub fn deinit(self: *BufferPair) void {
bun.path_buffer_pool.put(self.buf1);
bun.path_buffer_pool.put(self.buf2);
}
};
/// Utility for getting managing path buffers from the pool. Many functions use three buffers.
pub const BufferTriplet = struct {
pair: BufferPair,
buf3: *bun.PathBuffer,
pub fn init() BufferTriplet {
return .{
.pair = BufferPair.init(),
.buf3 = bun.path_buffer_pool.get(),
};
}
pub fn deinit(self: *BufferTriplet) void {
self.pair.deinit();
bun.path_buffer_pool.put(self.buf3);
}
};
/// JS path.resolve equivalent.
pub fn resolvePath(
segments: []const []const u8,
buffers: *BufferPair,
) error{ OutOfMemory, InvalidPath }![]const u8 {
const result = if (bun.Environment.isWindows)
resolveWindowsT(u8, segments, buffers.buf1, buffers.buf2)
else
resolvePosixT(u8, segments, buffers.buf1, buffers.buf2);
return switch (result) {
.result => |r| r,
.err => error.InvalidPath,
};
}
pub fn resolveWithPrefix(
allocator: std.mem.Allocator,
comptime prefix: []const u8,
segments: []const []const u8,
buffers: *BufferPair,
) ![]u8 {
const resolved = try resolvePath(segments, buffers);
return std.fmt.allocPrint(allocator, prefix ++ "{s}", .{resolved});
}
pub fn computeRelative(
from: []const u8,
to: []const u8,
buffers: *BufferTriplet,
) ![]const u8 {
const result = if (bun.Environment.isWindows)
relativeWindowsT(
u8,
from,
to,
buffers.pair.buf1,
buffers.pair.buf2,
buffers.buf3,
)
else
relativePosixT(
u8,
from,
to,
buffers.pair.buf1,
buffers.pair.buf2,
buffers.buf3,
);
return switch (result) {
.result => |r| r,
.err => error.InvalidPath,
};
}
const string = []const u8;
const std = @import("std");

View File

@@ -299,6 +299,17 @@ pub fn ComptimeStringMapWithKeyType(comptime KeyType: type, comptime V: type, co
return null;
}
/// Lookup the first-defined string key for a given value.
///
/// Linear search.
pub fn getKey(value: V) ?[]const KeyType {
inline for (kvs) |kv| {
if (kv.value == value) return kv.key;
}
return null;
}
};
}

View File

@@ -173,45 +173,6 @@ pub inline fn isSCPLikePath(dependency: string) bool {
return false;
}
/// `isGitHubShorthand` from npm
/// https://github.com/npm/cli/blob/22731831e22011e32fa0ca12178e242c2ee2b33d/node_modules/hosted-git-info/lib/from-url.js#L6
pub inline fn isGitHubRepoPath(dependency: string) bool {
// Shortest valid expression: u/r
if (dependency.len < 3) return false;
var hash_index: usize = 0;
// the branch could have slashes
// - oven-sh/bun#brach/name
var first_slash_index: usize = 0;
for (dependency, 0..) |c, i| {
switch (c) {
'/' => {
if (i == 0) return false;
if (first_slash_index == 0) {
first_slash_index = i;
}
},
'#' => {
if (i == 0) return false;
if (hash_index > 0) return false;
if (first_slash_index == 0) return false;
hash_index = i;
},
// Not allowed in username
'.', '_' => {
if (first_slash_index == 0) return false;
},
// Must be alphanumeric
'-', 'a'...'z', 'A'...'Z', '0'...'9' => {},
else => return false,
}
}
return hash_index != dependency.len - 1 and first_slash_index > 0 and first_slash_index != dependency.len - 1;
}
/// Github allows for the following format of URL:
/// https://github.com/<org>/<repo>/tarball/<ref>
/// This is a legacy (but still supported) method of retrieving a tarball of an
@@ -533,6 +494,10 @@ pub const Version = struct {
return .folder;
}
// Allocator necessary for slow paths.
var stackFallback = std.heap.stackFallback(1024, bun.default_allocator);
var allocator = stackFallback.get();
switch (dependency[0]) {
// =1
// >1.2
@@ -602,8 +567,24 @@ pub const Version = struct {
if (strings.hasPrefixComptime(url, "://")) {
url = url["://".len..];
if (strings.hasPrefixComptime(url, "github.com/")) {
if (isGitHubRepoPath(url["github.com/".len..])) return .github;
if (hosted_git_info.isGitHubShorthand(url["github.com/".len..])) return .github;
}
const copy = bun.handleOom(allocator.dupe(u8, dependency));
defer allocator.free(copy);
if (hosted_git_info.HostedGitInfo.fromUrl(allocator, copy) catch null) |info| {
defer info.deinit();
// npa.js correctly identifies github urls as github
// even when using a git+https:// protocol and so on.
// Legacy bun implementation used to only treat
// "shortcut" (eg. "foo/bar") git repos as .github. We
// now add this bridge to help match the old behavior.
return switch (info.host_provider) {
.github => if (info.default_representation == .shortcut) .github else .git,
else => .git,
};
}
return .git;
}
},
@@ -633,15 +614,35 @@ pub const Version = struct {
else => false,
}) {
if (strings.hasPrefixComptime(url, "github.com/")) {
if (isGitHubRepoPath(url["github.com/".len..])) return .github;
if (hosted_git_info.isGitHubShorthand(url["github.com/".len..])) return .github;
}
const dep_copy = bun.handleOom(allocator.dupe(u8, dependency));
defer allocator.free(dep_copy);
if (hosted_git_info.HostedGitInfo.fromUrl(allocator, dep_copy) catch null) |info| {
defer info.deinit();
// npa.js correctly identifies github urls as
// github even when using a git+https:// protocol
// and so on. Legacy bun implementation used to
// only treat "shortcut" (eg. "foo/bar") git repos
// as .github. We now add this bridge to help match
// the old behavior.
return switch (info.host_provider) {
.github => if (info.default_representation == .shortcut)
.github
else
.git,
.bitbucket, .gitlab, .gist, .sourcehut => .git,
};
}
return .git;
}
}
},
'h' => {
if (strings.hasPrefixComptime(url, "hub:")) {
if (isGitHubRepoPath(url["hub:".len..])) return .github;
if (hosted_git_info.isGitHubShorthand(url["hub:".len..])) return .github;
}
},
else => {},
@@ -673,11 +674,17 @@ pub const Version = struct {
if (strings.hasPrefixComptime(url, "github.com/")) {
const path = url["github.com/".len..];
if (isGitHubTarballPath(path)) return .tarball;
if (isGitHubRepoPath(path)) return .github;
if (hosted_git_info.isGitHubShorthand(path)) return .github;
}
if (strings.indexOfChar(url, '.')) |dot| {
if (Repository.Hosts.has(url[0..dot])) return .git;
const dep_copy = bun.handleOom(allocator.dupe(u8, dependency));
defer allocator.free(dep_copy);
if (hosted_git_info.HostedGitInfo.fromUrl(allocator, dep_copy) catch null) |info| {
defer info.deinit();
return switch (info.host_provider) {
.github => .github,
.bitbucket, .gitlab, .gist, .sourcehut => .git,
};
}
return .tarball;
@@ -698,9 +705,16 @@ pub const Version = struct {
url = url["git@".len..];
}
if (strings.indexOfChar(url, '.')) |dot| {
if (Repository.Hosts.has(url[0..dot])) return .git;
const dep_copy = bun.handleOom(allocator.dupe(u8, dependency));
defer allocator.free(dep_copy);
if (hosted_git_info.HostedGitInfo.fromUrl(allocator, dep_copy) catch null) |info| {
defer info.deinit();
return switch (info.host_provider) {
.github => .github,
.bitbucket, .gitlab, .gist, .sourcehut => .git,
};
}
return .git;
}
}
},
@@ -732,7 +746,7 @@ pub const Version = struct {
// virt@example.com:repo.git
'v' => {
if (isTarball(dependency)) return .tarball;
if (isGitHubRepoPath(dependency)) return .github;
if (hosted_git_info.isGitHubShorthand(dependency)) return .github;
if (isSCPLikePath(dependency)) return .git;
if (dependency.len == 1) return .dist_tag;
return switch (dependency[1]) {
@@ -765,11 +779,29 @@ pub const Version = struct {
// foo.tgz
// bar.tar.gz
if (isTarball(dependency)) return .tarball;
// user/repo
// user/repo#main
if (isGitHubRepoPath(dependency)) return .github;
if (hosted_git_info.isGitHubShorthand(dependency)) return .github;
// git@example.com:path/to/repo.git
if (isSCPLikePath(dependency)) return .git;
if (isSCPLikePath(dependency)) {
const dep_copy = bun.handleOom(allocator.dupe(u8, dependency));
defer allocator.free(dep_copy);
if (hosted_git_info.HostedGitInfo.fromUrl(allocator, dep_copy) catch null) |info| {
defer info.deinit();
// npa.js correctly identifies github urls as github even when using a
// git+https:// protocol and so on. Legacy bun implementation used to only
// treat "shortcut" (eg. "foo/bar") git repos as .github. We now add this
// bridge to help match the old behavior.
return switch (info.host_provider) {
.github => if (info.default_representation == .shortcut) .github else .git,
else => .git,
};
}
return .git;
}
// beta
if (!strings.containsChar(dependency, '|')) {
@@ -785,7 +817,14 @@ pub const Version = struct {
return .js_undefined;
}
const tag = try Tag.fromJS(globalObject, arguments[0]) orelse return .js_undefined;
// Convert JSValue to string slice
const dependency_str = try arguments[0].toBunString(globalObject);
defer dependency_str.deref();
var as_utf8 = dependency_str.toUTF8(bun.default_allocator);
defer as_utf8.deinit();
// Infer the tag from the dependency string
const tag = Tag.infer(as_utf8.slice());
var str = bun.String.init(@tagName(tag));
return str.transferToJS(globalObject);
}
@@ -1041,70 +1080,76 @@ pub fn parseWithTag(
};
},
.github => {
var from_url = false;
var input = dependency;
if (strings.hasPrefixComptime(input, "github:")) {
input = input["github:".len..];
} else if (strings.hasPrefixComptime(input, "git://github.com/")) {
input = input["git://github.com/".len..];
from_url = true;
} else {
if (strings.hasPrefixComptime(input, "git+")) {
input = input["git+".len..];
}
if (strings.hasPrefixComptime(input, "http")) {
var url = input["http".len..];
if (url.len > 2) {
switch (url[0]) {
':' => {
if (strings.hasPrefixComptime(url, "://")) {
url = url["://".len..];
}
},
's' => {
if (strings.hasPrefixComptime(url, "s://")) {
url = url["s://".len..];
}
},
else => {},
}
if (strings.hasPrefixComptime(url, "github.com/")) {
input = url["github.com/".len..];
from_url = true;
}
}
}
}
// Unfortunately, HostedGitInfo.fromUrl may need to mutate the input string so we copy
// it here.
const dep_copy = bun.handleOom(allocator.dupe(u8, dependency));
defer allocator.free(dep_copy);
if (comptime Environment.allow_assert) bun.assert(isGitHubRepoPath(input));
var hash_index: usize = 0;
var slash_index: usize = 0;
for (input, 0..) |c, i| {
switch (c) {
'/' => {
slash_index = i;
const info = hosted_git_info.HostedGitInfo.fromUrl(allocator, dep_copy) catch {
// Fallback to empty if parsing fails
return .{
.literal = sliced.value(),
.value = .{
.github = .{
.owner = String.from(""),
.repo = String.from(""),
.committish = String.from(""),
},
},
'#' => {
hash_index = i;
break;
.tag = .github,
};
} orelse {
// Fallback to empty if parsing returns null
return .{
.literal = sliced.value(),
.value = .{
.github = .{
.owner = String.from(""),
.repo = String.from(""),
.committish = String.from(""),
},
},
else => {},
}
}
.tag = .github,
};
};
defer info.deinit();
var repo = if (hash_index == 0) input[slash_index + 1 ..] else input[slash_index + 1 .. hash_index];
if (from_url and strings.endsWithComptime(repo, ".git")) {
repo = repo[0 .. repo.len - ".git".len];
}
// Now we have parsed info, we need to find these substrings in the original dependency
// to create String objects that point to the original buffer
const owner_str = info.user orelse "";
const repo_str = info.project;
const committish_str = info.committish orelse "";
// Find owner in dependency string
const owner_idx = strings.indexOf(dependency, owner_str);
const owner = if (owner_idx) |idx|
sliced.sub(dependency[idx .. idx + owner_str.len]).value()
else
String.from("");
// Find repo in dependency string
const repo_idx = strings.indexOf(dependency, repo_str);
const repo = if (repo_idx) |idx|
sliced.sub(dependency[idx .. idx + repo_str.len]).value()
else
String.from("");
// Find committish in dependency string
const committish = if (committish_str.len > 0) blk: {
const committish_idx = strings.indexOf(dependency, committish_str);
break :blk if (committish_idx) |idx|
sliced.sub(dependency[idx .. idx + committish_str.len]).value()
else
String.from("");
} else String.from("");
return .{
.literal = sliced.value(),
.value = .{
.github = .{
.owner = sliced.sub(input[0..slash_index]).value(),
.repo = sliced.sub(repo).value(),
.committish = if (hash_index == 0) String.from("") else sliced.sub(input[hash_index + 1 ..]).value(),
.owner = owner,
.repo = repo,
.committish = committish,
},
},
.tag = .github,
@@ -1457,6 +1502,7 @@ pub const Behavior = packed struct(u8) {
const string = []const u8;
const Environment = @import("../env.zig");
const hosted_git_info = @import("./hosted_git_info.zig");
const std = @import("std");
const Repository = @import("./repository.zig").Repository;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,237 @@
//! This module provides the functionality offered by
//! https://github.com/npm/validate-npm-package-name.
//!
//! It deviates from the original implementation in its string error messages. The difference is
//! that the original implementation is able to preserve the original name of the package in its
//! error messages while ours is not. This is a conscious decision to avoid allocations and it is
//! documented in the JS tests.
extern fn ModuleLoader__isBuiltin(data: [*]const u8, len: usize) bool;
/// The maximum length of a package name, according the upstream npm implementation.
pub const max_package_name_length = 214;
pub const Error = enum {
empty_name,
name_starts_with_dot,
name_starts_with_underscore,
name_surrounded_with_spaces,
illegal_name,
name_contains_non_url_safe_characters,
pub fn message(self: Error) []const u8 {
// TODO(markovejnovic): These errors are not as rich as npm's. Namely,
// npm-validate-package-name will preserve the original name and show it in the error --
// "<name> is not allowed", etc.
return switch (self) {
.empty_name => "name length must be greater than zero",
.name_starts_with_dot => "name cannot start with a period",
.name_starts_with_underscore => "name cannot start with an underscore",
.name_surrounded_with_spaces => "name cannot contain leading or trailing spaces",
.illegal_name => "name is not allowed",
.name_contains_non_url_safe_characters => "name can only contain URL-friendly characters",
};
}
};
pub const Warning = enum {
core_module_name_conflict,
name_too_long,
name_contains_uppercase_letters,
name_contains_special_characters,
pub fn message(self: Warning) []const u8 {
return switch (self) {
.core_module_name_conflict => "name conflicts a core module name",
.name_too_long => std.fmt.comptimePrint(
"name can no longer contain more than {} characters",
.{max_package_name_length},
),
.name_contains_uppercase_letters => "name can no longer contain capital letters",
.name_contains_special_characters => "name can no longer contain special characters (\"~'!()*\")",
};
}
};
pub const ValidationResult = struct {
errors: std.EnumSet(Error),
warnings: std.EnumSet(Warning),
pub fn validForNewPackages(self: *const ValidationResult) bool {
return self.errors.count() == 0 and self.warnings.count() == 0;
}
pub fn validForOldPackages(self: *const ValidationResult) bool {
return self.errors.count() == 0;
}
};
const exclusion_list = [_][]const u8{
"node_modules",
"favicon.ico",
};
pub fn validate(name: []const u8) ValidationResult {
var result: ValidationResult = .{
.errors = .initEmpty(),
.warnings = .initEmpty(),
};
if (name.len == 0) {
result.errors.insert(Error.empty_name);
} else {
switch (name[0]) {
'.' => {
result.errors.insert(Error.name_starts_with_dot);
},
'_' => {
result.errors.insert(Error.name_starts_with_underscore);
},
else => {},
}
}
if (!bun.strings.eql(bun.strings.trimSpaces(name), name)) {
result.errors.insert(Error.name_surrounded_with_spaces);
result.errors.insert(Error.name_contains_non_url_safe_characters);
}
for (exclusion_list) |excluded| {
if (bun.strings.eqlCaseInsensitiveASCII(name, excluded, true)) {
result.errors.insert(Error.illegal_name);
break;
}
}
if (ModuleLoader__isBuiltin(name.ptr, name.len)) {
result.warnings.insert(Warning.core_module_name_conflict);
}
if (name.len > max_package_name_length) {
result.warnings.insert(Warning.name_too_long);
}
if (bun.strings.containsUppercaseAscii(name)) {
result.warnings.insert(Warning.name_contains_uppercase_letters);
}
const last_segment = if (bun.strings.lastIndexOfChar(name, '/')) |idx|
name[idx + 1 ..]
else
name;
if (bun.strings.indexOfAny(last_segment, "~'!()*") != null) {
result.warnings.insert(Warning.name_contains_special_characters);
}
if (ScopedPackage.init(name)) |scoped| {
if (scoped.pkg.len > 0 and scoped.pkg[0] == '.') {
result.errors.insert(Error.name_starts_with_dot);
}
if (bun.strings.indexOfNeedsURLEncode(scoped.user) != null or
bun.strings.indexOfNeedsURLEncode(scoped.pkg) != null or
bun.strings.indexOfChar(scoped.user, ' ') != null or
bun.strings.indexOfChar(scoped.pkg, ' ') != null or
bun.strings.indexOfChar(scoped.user, ':') != null or
bun.strings.indexOfChar(scoped.pkg, ':') != null)
{
result.errors.insert(Error.name_contains_non_url_safe_characters);
}
} else {
if (bun.strings.indexOfChar(name, '/') != null) {
result.errors.insert(Error.name_contains_non_url_safe_characters);
}
if (bun.strings.indexOfNeedsURLEncode(name) != null or
bun.strings.indexOfChar(name, ' ') != null or
bun.strings.indexOfChar(name, ':') != null)
{
result.errors.insert(Error.name_contains_non_url_safe_characters);
}
}
return result;
}
const ScopedPackage = struct {
user: []const u8,
pkg: []const u8,
fn init(name: []const u8) ?ScopedPackage {
if (name.len < 4 or name[0] != '@') return null;
const slash_idx = bun.strings.indexOfChar(name, '/') orelse return null;
if (slash_idx == 1 or slash_idx >= name.len - 1) return null;
// Ensure there's only one slash
if (bun.strings.indexOfCharPos(name, '/', slash_idx + 1) != null) return null;
return .{
.user = name[1..slash_idx],
.pkg = name[slash_idx + 1 ..],
};
}
};
pub const TestingAPIs = struct {
pub fn jsValidate(go: *jsc.JSGlobalObject, cf: *jsc.CallFrame) bun.JSError!jsc.JSValue {
if (cf.argumentsCount() < 1) {
return go.throw("validate() requires 1 argument", .{});
}
const name_arg = cf.argument(0);
const name_str = try name_arg.toBunString(go);
defer name_str.deref();
const name_utf8 = name_str.toUTF8(bun.default_allocator);
defer name_utf8.deinit();
const result = validate(name_utf8.slice());
return try validationResultToJS(go, result);
}
fn validationResultToJS(
go: *jsc.JSGlobalObject,
result: ValidationResult,
) bun.JSError!jsc.JSValue {
const obj = jsc.JSValue.createEmptyObject(go, 4);
obj.put(
go,
bun.String.static("validForNewPackages"),
jsc.JSValue.jsBoolean(result.validForNewPackages()),
);
obj.put(
go,
bun.String.static("validForOldPackages"),
jsc.JSValue.jsBoolean(result.validForOldPackages()),
);
if (result.errors.count() != 0) {
const errors_array = try jsc.JSValue.createEmptyArray(go, 0);
var it = result.errors.iterator();
while (it.next()) |err| {
try errors_array.push(go, bun.String.fromBytes(err.message()).toJS(go));
}
obj.put(go, bun.String.static("errors"), errors_array);
}
if (result.warnings.count() != 0) {
const warnings_array = try jsc.JSValue.createEmptyArray(go, 0);
var it = result.warnings.iterator();
while (it.next()) |warn| {
try warnings_array.push(go, bun.String.fromBytes(warn.message()).toJS(go));
}
obj.put(go, bun.String.static("warnings"), warnings_array);
}
return obj;
}
};
const std = @import("std");
const bun = @import("bun");
const jsc = bun.jsc;

View File

@@ -120,10 +120,6 @@ export const npm_manifest_test_helpers = $zig("npm.zig", "PackageManifest.bindin
parseManifest: (manifestFileName: string, registryUrl: string) => any;
};
// Like npm-package-arg, sort of https://www.npmjs.com/package/npm-package-arg
export type Dependency = any;
export const npa: (name: string) => Dependency = $newZigFunction("dependency.zig", "fromJS", 1);
export const npmTag: (
name: string,
) => undefined | "npm" | "dist_tag" | "tarball" | "folder" | "symlink" | "workspace" | "git" | "github" =
@@ -210,3 +206,17 @@ export const structuredCloneAdvanced: (
) => any = $newCppFunction("StructuredClone.cpp", "jsFunctionStructuredCloneAdvanced", 5);
export const lsanDoLeakCheck = $newCppFunction("InternalForTesting.cpp", "jsFunction_lsanDoLeakCheck", 1);
export const hostedGitInfo = {
parseUrl: $newZigFunction("hosted_git_info.zig", "TestingAPIs.jsParseUrl", 1),
fromUrl: $newZigFunction("hosted_git_info.zig", "TestingAPIs.jsFromUrl", 1),
};
export const Npa = {
npa: $newZigFunction("npm_package_arg.zig", "TestingAPIs.jsNpa", 3),
resolve: $newZigFunction("npm_package_arg.zig", "TestingAPIs.jsResolve", 4),
};
export const ValidateNpmPackageName = {
validate: $newZigFunction("validate_npm_package_name.zig", "TestingAPIs.jsValidate", 1),
};

View File

@@ -37,6 +37,16 @@ inline fn nqlAtIndexCaseInsensitive(comptime string_count: comptime_int, index:
return false;
}
/// The given string contains separators that match the platform's path separator style.
pub fn hasPlatformPathSeparators(input_path: []const u8) bool {
if (bun.Environment.isWindows) {
// Windows accepts both forward and backward slashes as path separators
return bun.strings.containsChar(input_path, '\\') or bun.strings.containsChar(input_path, '/');
} else {
return bun.strings.containsChar(input_path, '/');
}
}
const IsSeparatorFunc = fn (char: u8) bool;
const IsSeparatorFuncT = fn (comptime T: type, char: anytype) bool;
const LastSeparatorFunction = fn (slice: []const u8) ?usize;

View File

@@ -229,7 +229,17 @@ pub const Group = struct {
pub const FlagsBitSet = bun.bit_set.IntegerBitSet(3);
pub fn isExact(this: *const Group) bool {
return this.head.next == null and this.head.head.next == null and !this.head.head.range.hasRight() and this.head.head.range.left.op == .eql;
const range = this.head.head.range;
return range.left.op == .eql and
range.right.op == .unset and
this.head.head.next == null and
this.head.tail == null and
this.head.next == null and
this.tail == null;
}
pub fn isEmpty(this: *const Group) bool {
return !this.head.head.range.hasLeft() and !this.head.head.range.hasRight();
}
pub fn @"is *"(this: *const Group) bool {

View File

@@ -414,6 +414,12 @@ pub fn indexOfSigned(self: string, str: string) i32 {
return @as(i32, @intCast(i));
}
/// Returns last index of `char` before a character `before`.
pub fn lastIndexBeforeChar(in: []const u8, char: u8, before: u8) ?usize {
const before_pos = indexOfChar(in, before) orelse in.len;
return lastIndexOfChar(in[0..before_pos], char);
}
pub fn lastIndexOfChar(self: []const u8, char: u8) callconv(bun.callconv_inline) ?usize {
if (comptime Environment.isLinux) {
if (@inComptime()) {
@@ -467,6 +473,12 @@ pub fn indexOfT(comptime T: type, haystack: []const T, needle: []const T) ?usize
return std.mem.indexOf(T, haystack, needle);
}
/// Bounds-checked access to a character in a string.
pub fn charAtT(comptime T: type, haystack: []const T, idx: usize) ?T {
if (idx >= haystack.len) return null;
return haystack[idx];
}
pub fn split(self: string, delimiter: string) SplitIterator {
return SplitIterator{
.buffer = self,
@@ -1013,6 +1025,15 @@ pub fn hasPrefixCaseInsensitive(str: []const u8, prefix: []const u8) bool {
return hasPrefixCaseInsensitiveT(u8, str, prefix);
}
pub fn endsWithCaseInsensitiveT(comptime T: type, str: []const T, suffix: []const u8) bool {
if (str.len < suffix.len) return false;
return eqlCaseInsensitiveT(T, str[str.len - suffix.len ..], suffix);
}
pub fn endsWithCaseInsensitive(str: []const u8, suffix: []const u8) bool {
return endsWithCaseInsensitiveT(u8, str, suffix);
}
pub fn eqlLongT(comptime T: type, a_str: []const T, b_str: []const T, comptime check_len: bool) bool {
if (comptime check_len) {
const len = b_str.len;
@@ -1132,6 +1153,15 @@ pub fn index(self: string, str: string) i32 {
}
}
/// Returns a substring starting at `start` up to the end of the string.
/// If `start` is greater than the string's length, returns an empty string.
pub fn substring(self: anytype, start: ?usize, stop: ?usize) @TypeOf(self) {
const sta = start orelse 0;
const sto = stop orelse self.len;
return self[@min(sta, self.len)..@min(sto, self.len)];
}
pub const ascii_vector_size = if (Environment.isWasm) 8 else 16;
pub const ascii_u16_vector_size = if (Environment.isWasm) 4 else 8;
pub const AsciiVectorInt = std.meta.Int(.unsigned, ascii_vector_size);
@@ -1528,6 +1558,21 @@ pub fn trimLeadingChar(slice: []const u8, char: u8) []const u8 {
return "";
}
/// Count leading consecutive occurrences of a character.
/// Returns the count of consecutive characters from the start of the slice.
/// ```zig
/// countLeadingChar("///foo", '/') -> 3
/// countLeadingChar("foo", '/') -> 0
/// ```
pub fn countLeadingChar(slice: []const u8, char: u8) usize {
var count: usize = 0;
for (slice) |c| {
if (c != char) break;
count += 1;
}
return count;
}
/// Trim leading pattern of 2 bytes
///
/// e.g.
@@ -1553,6 +1598,14 @@ pub fn trimPrefixComptime(comptime T: type, buffer: []const T, comptime prefix:
buffer;
}
/// suffix is of type []const u8 or []const u16
pub fn trimSuffixComptime(buffer: []const u8, comptime suffix: anytype) []const u8 {
return if (hasSuffixComptime(buffer, suffix))
buffer[0 .. buffer.len - suffix.len]
else
buffer;
}
/// Get the line number and the byte offsets of `line_range_count` above the desired line number
/// The final element is the end index of the desired line
const LineRange = struct {
@@ -1759,12 +1812,24 @@ pub fn trim(slice: anytype, comptime values_to_strip: []const u8) @TypeOf(slice)
return slice[begin..end];
}
pub fn trimSpaces(slice: anytype) @TypeOf(slice) {
return trim(slice, &whitespace_chars);
}
pub fn isAllWhitespace(slice: []const u8) bool {
var begin: usize = 0;
while (begin < slice.len and std.mem.indexOfScalar(u8, &whitespace_chars, slice[begin]) != null) : (begin += 1) {}
return begin == slice.len;
}
// TODO(markovejnovic): Could be SIMD
pub fn containsUppercaseAscii(slice: []const u8) bool {
for (slice) |c| {
if (c >= 'A' and c <= 'Z') return true;
}
return false;
}
pub const whitespace_chars = [_]u8{ ' ', '\t', '\n', '\r', std.ascii.control_code.vt, std.ascii.control_code.ff };
pub fn lengthOfLeadingWhitespaceASCII(slice: string) usize {
@@ -2020,7 +2085,7 @@ pub fn concatWithLength(
allocator: std.mem.Allocator,
args: []const string,
length: usize,
) ![]u8 {
) bun.OOM![]u8 {
const out = try allocator.alloc(u8, length);
var remain = out;
for (args) |arg| {
@@ -2034,7 +2099,7 @@ pub fn concatWithLength(
pub fn concat(
allocator: std.mem.Allocator,
args: []const string,
) ![]u8 {
) bun.OOM![]u8 {
var length: usize = 0;
for (args) |arg| {
length += arg.len;
@@ -2135,6 +2200,48 @@ fn QuoteEscapeFormat(comptime flags: QuoteEscapeFormatFlags) type {
};
}
/// Manages a slice of an owned buffer, useful for avoiding re-allocations when only a portion of
/// an allocated buffer is needed.
///
/// Example: Parsing "123 Main St" where only "Main St" is needed but the entire
/// string was allocated. SlicedBuffer owns the full buffer while exposing only
/// the relevant slice.
pub const SlicedBuffer = struct {
/// The full allocated buffer
#buf: []u8,
/// The slice of interest within buf
#slice: []const u8,
/// Allocator used to free buf
#allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator, buffer: []const u8, slice: []const u8) SlicedBuffer {
bun.assert(@intFromPtr(slice.ptr) >= @intFromPtr(buf.ptr) and
@intFromPtr(slice.ptr) + slice.len <= @intFromPtr(buf.ptr) + buf.len);
return .{
.#buf = buffer,
.#slice = slice,
.#allocator = allocator,
};
}
pub fn buf(self: *const SlicedBuffer) []const u8 {
return self.buf;
}
/// Creates a SlicedBuffer where the slice is the entire buffer (no slicing).
pub fn initUnsliced(allocator: std.mem.Allocator, buffer: []const u8) SlicedBuffer {
return .{
.#buf = buffer,
.#slice = buffer,
.#allocator = allocator,
};
}
pub fn deinit(self: *const SlicedBuffer) void {
self.allocator.free(self.buf);
}
};
/// Generic. Works on []const u8, []const u16, etc
pub fn indexOfScalar(input: anytype, scalar: std.meta.Child(@TypeOf(input))) callconv(bun.callconv_inline) ?usize {
if (comptime std.meta.Child(@TypeOf(input)) == u8) {
@@ -2342,7 +2449,6 @@ pub const toNTPath16 = paths_.toNTPath16;
pub const toPath = paths_.toPath;
pub const toPathMaybeDir = paths_.toPathMaybeDir;
pub const toPathNormalized = paths_.toPathNormalized;
pub const toWDirNormalized = paths_.toWDirNormalized;
pub const toWDirPath = paths_.toWDirPath;
pub const toWPath = paths_.toWPath;
pub const toWPathMaybeDir = paths_.toWPathMaybeDir;

View File

@@ -233,26 +233,6 @@ pub fn normalizeSlashesOnly(buf: []u8, utf8: []const u8, comptime desired_slash:
return normalizeSlashesOnlyT(u8, buf, utf8, desired_slash, false);
}
pub fn toWDirNormalized(wbuf: []u16, utf8: []const u8) [:0]const u16 {
var renormalized: ?*bun.PathBuffer = null;
defer if (renormalized) |r| bun.path_buffer_pool.put(r);
var path_to_use = utf8;
if (bun.strings.containsChar(utf8, '/')) {
renormalized = bun.path_buffer_pool.get();
@memcpy(renormalized.?[0..utf8.len], utf8);
for (renormalized.?[0..utf8.len]) |*c| {
if (c.* == '/') {
c.* = '\\';
}
}
path_to_use = renormalized.?[0..utf8.len];
}
return toWDirPath(wbuf, path_to_use);
}
pub fn toWPath(wbuf: []u16, utf8: []const u8) [:0]u16 {
return toWPathMaybeDir(wbuf, utf8, false);
}

View File

@@ -1,4 +1,8 @@
// This is close to WHATWG URL, but we don't want the validation errors
/// Unsafe parser. Assumes the given string is already a valid URL.
///
/// Use bun.jsc.URL for a safe, compliant URL parser.
///
/// This is close to WHATWG URL, but we don't want the validation errors.
pub const URL = struct {
const log = Output.scoped(.URL, .visible);
@@ -870,6 +874,33 @@ pub const PercentEncoding = struct {
return written;
}
/// Extracts and percent-decodes the pathname from a URL string.
/// Returns an owned slice that must be freed by the caller.
/// Returns error.InvalidURL if the URL cannot be parsed.
/// Returns error.InvalidPath if percent-decoding fails.
pub fn extractDecodedPathname(
url_string: bun.String,
allocator: std.mem.Allocator,
) error{ InvalidURL, InvalidPath, OutOfMemory }![]u8 {
const url = bun.jsc.URL.fromString(url_string) orelse return error.InvalidURL;
defer url.deinit();
const pathname_str = url.pathname();
defer pathname_str.deref();
const pathname = pathname_str.toUTF8(allocator);
defer pathname.deinit();
var path_list = std.ArrayList(u8).init(allocator);
defer path_list.deinit();
_ = decode(
@TypeOf(path_list.writer()),
path_list.writer(),
pathname.slice(),
) catch return error.InvalidPath;
return try path_list.toOwnedSlice();
}
};
pub const FormData = struct {

View File

@@ -1,397 +0,0 @@
// Bun Snapshot v1, https://bun.sh/docs/test/snapshots
exports[`npa @scoped/package 1`] = `
{
"name": "@scoped/package",
"version": {
"name": "@scoped/package",
"tag": "latest",
"type": "dist_tag",
},
}
`;
exports[`npa @scoped/package@1.0.0 1`] = `
{
"name": "@scoped/package",
"version": {
"alias": false,
"name": "@scoped/package",
"type": "npm",
"version": "==1.0.0",
},
}
`;
exports[`npa @scoped/package@1.0.0-beta.1 1`] = `
{
"name": "@scoped/package",
"version": {
"alias": false,
"name": "@scoped/package",
"type": "npm",
"version": "==1.0.0-beta.1",
},
}
`;
exports[`npa @scoped/package@1.0.0-beta.1+build.123 1`] = `
{
"name": "@scoped/package",
"version": {
"alias": false,
"name": "@scoped/package",
"type": "npm",
"version": "==1.0.0-beta.1+build.123",
},
}
`;
exports[`npa package 1`] = `
{
"name": "package",
"version": {
"name": "package",
"tag": "latest",
"type": "dist_tag",
},
}
`;
exports[`npa package@1.0.0 1`] = `
{
"name": "package",
"version": {
"alias": false,
"name": "package",
"type": "npm",
"version": "==1.0.0",
},
}
`;
exports[`npa package@1.0.0-beta.1 1`] = `
{
"name": "package",
"version": {
"alias": false,
"name": "package",
"type": "npm",
"version": "==1.0.0-beta.1",
},
}
`;
exports[`npa package@1.0.0-beta.1+build.123 1`] = `
{
"name": "package",
"version": {
"alias": false,
"name": "package",
"type": "npm",
"version": "==1.0.0-beta.1+build.123",
},
}
`;
exports[`npa bitbucket:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "bitbucket:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa bitbucket.org:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "bitbucket.org:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa bitbucket.com:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "bitbucket.com:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa git@bitbucket.org:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "git@bitbucket.org:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa foo/bar 1`] = `
{
"name": "",
"version": {
"owner": "foo",
"ref": "",
"repo": "bar",
"type": "github",
},
}
`;
exports[`npa gitlab:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "gitlab:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa gitlab.com:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "gitlab.com:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa http://localhost:5000/no-deps/-/no-deps-2.0.0.tgz 1`] = `
{
"name": "",
"version": {
"name": "",
"type": "tarball",
"url": "http://localhost:5000/no-deps/-/no-deps-2.0.0.tgz",
},
}
`;
exports[`npa https://registry.npmjs.org/no-deps/-/no-deps-2.0.0.tgz 1`] = `
{
"name": "",
"version": {
"name": "",
"type": "tarball",
"url": "https://registry.npmjs.org/no-deps/-/no-deps-2.0.0.tgz",
},
}
`;
exports[`npa file:./path/to/tarball.tgz 1`] = `
{
"name": "",
"version": {
"name": "",
"path": "./path/to/tarball.tgz",
"type": "tarball",
},
}
`;
exports[`npa ./path/to/tarball.tgz 1`] = `
{
"name": "",
"version": {
"name": "",
"path": "./path/to/tarball.tgz",
"type": "tarball",
},
}
`;
exports[`npa foo/bar 2`] = `
{
"name": "",
"version": {
"owner": "foo",
"ref": "",
"repo": "bar",
"type": "github",
},
}
`;
exports[`npa github:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "dylan-conway",
"ref": "",
"repo": "public-install-test",
"type": "github",
},
}
`;
exports[`npa git@github.com:dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "",
"repo": "git@github.com:dylan-conway/public-install-test",
"type": "git",
},
}
`;
exports[`npa https://github.com/dylan-conway/public-install-test 1`] = `
{
"name": "",
"version": {
"owner": "dylan-conway",
"ref": "",
"repo": "public-install-test",
"type": "github",
},
}
`;
exports[`npa https://github.com/dylan-conway/public-install-test.git 1`] = `
{
"name": "",
"version": {
"owner": "dylan-conway",
"ref": "",
"repo": "public-install-test",
"type": "github",
},
}
`;
exports[`npa https://github.com/dylan-conway/public-install-test.git#semver:^1.0.0 1`] = `
{
"name": "",
"version": {
"owner": "",
"ref": "semver:^1.0.0",
"repo": "https://github.com/dylan-conway/public-install-test.git",
"type": "git",
},
}
`;
exports[`dependencies: {"foo": "1.2.3"} 1`] = `
{
"alias": false,
"name": "foo",
"type": "npm",
"version": "==1.2.3-foo",
}
`;
exports[`dependencies: {"foo": "latest"} 1`] = `
{
"name": "foo",
"tag": "latest",
"type": "dist_tag",
}
`;
exports[`dependencies: {"foo": "workspace:*"} 1`] = `
{
"name": "*foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:^1.0.0"} 1`] = `
{
"name": "^1.0.0foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0"} 1`] = `
{
"name": "1.0.0foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0-beta.1"} 1`] = `
{
"name": "1.0.0-beta.1foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0-beta.1+build.123"} 1`] = `
{
"name": "1.0.0-beta.1+build.123foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0-beta.1+build.123"} 2`] = `
{
"name": "1.0.0-beta.1+build.123foo",
"type": "workspace",
}
`;
exports[`dependencies: {"foo": "workspace:1.0.0-beta.1+build.123"} 3`] = `
{
"name": "1.0.0-beta.1+build.123foo",
"type": "workspace",
}
`;
exports[`dependencies: {"bar": "^1.0.0"} 1`] = `
{
"alias": false,
"name": "bar",
"type": "npm",
"version": ">=1.0.0-bar <2.0.0",
}
`;
exports[`dependencies: {"bar": "~1.0.0"} 1`] = `
{
"alias": false,
"name": "bar",
"type": "npm",
"version": ">=1.0.0-bar <1.1.0",
}
`;
exports[`dependencies: {"bar": "> 1.0.0 < 2.0.0"} 1`] = `
{
"alias": false,
"name": "bar",
"type": "npm",
"version": ">1.0.0 && <2.0.0-bar",
}
`;
exports[`dependencies: {"bar": "1.0.0 - 2.0.0"} 1`] = `
{
"alias": false,
"name": "bar",
"type": "npm",
"version": ">=1.0.0 <=2.0.0-bar",
}
`;

View File

@@ -1,70 +0,0 @@
import { npa } from "bun:internal-for-testing";
import { expect, test } from "bun:test";
const bitbucket = [
"bitbucket:dylan-conway/public-install-test",
"bitbucket.org:dylan-conway/public-install-test",
"bitbucket.com:dylan-conway/public-install-test",
"git@bitbucket.org:dylan-conway/public-install-test",
];
const tarball_remote = [
"http://localhost:5000/no-deps/-/no-deps-2.0.0.tgz",
"https://registry.npmjs.org/no-deps/-/no-deps-2.0.0.tgz",
];
const local_tarball = ["file:./path/to/tarball.tgz", "./path/to/tarball.tgz"];
const github = ["foo/bar"];
const folder = ["file:./path/to/folder"];
const gitlab = ["gitlab:dylan-conway/public-install-test", "gitlab.com:dylan-conway/public-install-test"];
const all = [
"@scoped/package",
"@scoped/package@1.0.0",
"@scoped/package@1.0.0-beta.1",
"@scoped/package@1.0.0-beta.1+build.123",
"package",
"package@1.0.0",
"package@1.0.0-beta.1",
"package@1.0.0-beta.1+build.123",
...bitbucket,
...github,
...gitlab,
...tarball_remote,
...local_tarball,
...github,
"github:dylan-conway/public-install-test",
"git@github.com:dylan-conway/public-install-test",
"https://github.com/dylan-conway/public-install-test",
"https://github.com/dylan-conway/public-install-test.git",
"https://github.com/dylan-conway/public-install-test.git#semver:^1.0.0",
];
test.each(all)("npa %s", dep => {
expect(npa(dep)).toMatchSnapshot();
});
const pkgJsonLike = [
["foo", "1.2.3"],
["foo", "latest"],
["foo", "workspace:*"],
["foo", "workspace:^1.0.0"],
["foo", "workspace:1.0.0"],
["foo", "workspace:1.0.0-beta.1"],
["foo", "workspace:1.0.0-beta.1+build.123"],
["foo", "workspace:1.0.0-beta.1+build.123"],
["foo", "workspace:1.0.0-beta.1+build.123"],
["bar", "^1.0.0"],
["bar", "~1.0.0"],
["bar", "> 1.0.0 < 2.0.0"],
["bar", "1.0.0 - 2.0.0"],
];
test.each(pkgJsonLike)('dependencies: {"%s": "%s"}', (name, version) => {
expect(npa(name, version)).toMatchSnapshot();
});
test("bad", () => {
expect(() => npa("-123!}{P}{!P#$s")).toThrow();
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
import { hostedGitInfo } from "bun:internal-for-testing";
import { describe, expect, it } from "bun:test";
import { invalidGitUrls, validGitUrls } from "./cases";
describe("fromUrl", () => {
describe("valid urls", () => {
describe.each(Object.entries(validGitUrls))("%s", (_, urlset: object) => {
it.each(Object.entries(urlset))("parses %s", (url, expected) => {
expect(hostedGitInfo.fromUrl(url)).toMatchObject({
...(expected.type && { type: expected.type }),
...(expected.domain && { domain: expected.domain }),
...(expected.user && { user: expected.user }),
...(expected.project && { project: expected.project }),
...(expected.committish && { committish: expected.committish }),
...(expected.default && { default: expected.default }),
});
});
});
});
// TODO(markovejnovic): Unskip these tests.
describe.skip("invalid urls", () => {
describe.each(Object.entries(invalidGitUrls))("%s", (_, urls: (string | null | undefined)[]) => {
it.each(urls)("does not permit %s", url => {
expect(() => {
hostedGitInfo.fromUrl(url);
}).toThrow();
});
});
});
});

View File

@@ -0,0 +1,21 @@
/**
* Mimics https://github.com/npm/hosted-git-info/blob/main/test/parse-url.js
*/
import { hostedGitInfo } from "bun:internal-for-testing";
import { describe, expect, it } from "bun:test";
const okCases = [
// These come straight out of the hosted-git-info tests
"git+ssh://git@abc:frontend/utils.git#6d45447e0c5eb6cd2e3edf05a8c5a9bb81950c79",
// These are custom cases added for Bun
"ssh://:password@bitbucket.org:foo/bar.git",
"git@bitbucket.org:foo/bar",
"gist:user:password@/feedbeef#branch",
"github:foo/bar#branch with space",
];
describe("parseUrl", () => {
it.each(okCases)("parses %s", url => {
expect(hostedGitInfo.parseUrl(url)).not.toBeNull();
});
});

View File

@@ -0,0 +1,942 @@
import path from "path";
import os from "os";
export default {
basic: {
'foo@1.2': {
name: 'foo',
escapedName: 'foo',
type: 'range',
saveSpec: null,
fetchSpec: '1.2',
raw: 'foo@1.2',
rawSpec: '1.2',
},
'foo@~1.2': {
name: 'foo',
escapedName: 'foo',
type: 'range',
saveSpec: null,
fetchSpec: '~1.2',
raw: 'foo@~1.2',
rawSpec: '~1.2',
},
'@foo/bar': {
raw: '@foo/bar',
name: '@foo/bar',
escapedName: '@foo%2fbar',
scope: '@foo',
rawSpec: '*',
saveSpec: null,
fetchSpec: '*',
type: 'range',
},
'@foo/bar@': {
raw: '@foo/bar@',
name: '@foo/bar',
escapedName: '@foo%2fbar',
scope: '@foo',
rawSpec: '*',
saveSpec: null,
fetchSpec: '*',
type: 'range',
},
'@foo/bar@baz': {
raw: '@foo/bar@baz',
name: '@foo/bar',
escapedName: '@foo%2fbar',
scope: '@foo',
rawSpec: 'baz',
saveSpec: null,
fetchSpec: 'baz',
type: 'tag',
},
'@f fo o al/ a d s ;f': {
raw: '@f fo o al/ a d s ;f',
name: null,
escapedName: null,
rawSpec: '@f fo o al/ a d s ;f',
saveSpec: 'file:@f fo o al/ a d s ;f',
fetchSpec: '/test/a/b/@f fo o al/ a d s ;f',
type: 'directory',
},
'foo@1.2.3': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '1.2.3',
raw: 'foo@1.2.3',
},
'foo@=v1.2.3': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '=v1.2.3',
raw: 'foo@=v1.2.3',
rawSpec: '=v1.2.3',
},
'foo@npm:bar': {
name: 'foo',
escapedName: 'foo',
type: 'alias',
saveSpec: null,
fetchSpec: null,
raw: 'foo@npm:bar',
rawSpec: 'npm:bar',
subSpec: {
registry: true,
name: 'bar',
escapedName: 'bar',
type: 'range',
raw: 'bar',
rawSpec: '*',
saveSpec: null,
fetchSpec: '*',
},
},
'git+ssh://git@notgithub.com/user/foo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@notgithub.com/user/foo#1.2.3',
fetchSpec: 'ssh://git@notgithub.com/user/foo',
gitCommittish: '1.2.3',
raw: 'git+ssh://git@notgithub.com/user/foo#1.2.3',
},
'git+ssh://git@notgithub.com/user/foo': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@notgithub.com/user/foo',
fetchSpec: 'ssh://git@notgithub.com/user/foo',
gitCommittish: null,
raw: 'git+ssh://git@notgithub.com/user/foo',
},
'git+ssh://git@notgithub.com:user/foo': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@notgithub.com:user/foo',
fetchSpec: 'git@notgithub.com:user/foo',
gitCommittish: null,
raw: 'git+ssh://git@notgithub.com:user/foo',
},
'git+ssh://mydomain.com:foo': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:foo',
fetchSpec: 'mydomain.com:foo',
gitCommittish: null,
raw: 'git+ssh://mydomain.com:foo',
},
'git+ssh://git@notgithub.com:user/foo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@notgithub.com:user/foo#1.2.3',
fetchSpec: 'git@notgithub.com:user/foo',
gitCommittish: '1.2.3',
raw: 'git+ssh://git@notgithub.com:user/foo#1.2.3',
},
'git+ssh://mydomain.com:foo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:foo#1.2.3',
fetchSpec: 'mydomain.com:foo',
gitCommittish: '1.2.3',
raw: 'git+ssh://mydomain.com:foo#1.2.3',
},
'git+ssh://mydomain.com:foo/bar#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:foo/bar#1.2.3',
fetchSpec: 'mydomain.com:foo/bar',
gitCommittish: '1.2.3',
raw: 'git+ssh://mydomain.com:foo/bar#1.2.3',
},
'git+ssh://mydomain.com:1234#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:1234#1.2.3',
fetchSpec: 'ssh://mydomain.com:1234',
gitCommittish: '1.2.3',
raw: 'git+ssh://mydomain.com:1234#1.2.3',
},
'git+ssh://mydomain.com:1234/hey#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:1234/hey#1.2.3',
fetchSpec: 'ssh://mydomain.com:1234/hey',
gitCommittish: '1.2.3',
raw: 'git+ssh://mydomain.com:1234/hey#1.2.3',
},
'git+ssh://mydomain.com:1234/hey': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://mydomain.com:1234/hey',
fetchSpec: 'ssh://mydomain.com:1234/hey',
gitCommittish: null,
raw: 'git+ssh://mydomain.com:1234/hey',
},
'git+ssh://username:password@mydomain.com:1234/hey#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://username:password@mydomain.com:1234/hey#1.2.3',
fetchSpec: 'ssh://username:password@mydomain.com:1234/hey',
gitCommittish: '1.2.3',
raw: 'git+ssh://username:password@mydomain.com:1234/hey#1.2.3',
},
'git+ssh://git@github.com/user/foo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#1.2.3',
fetchSpec: 'ssh://git@github.com/user/foo.git',
gitCommittish: '1.2.3',
raw: 'git+ssh://git@github.com/user/foo#1.2.3',
},
'git+ssh://git@notgithub.com/user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
hosted: null,
saveSpec: 'git+ssh://git@notgithub.com/user/foo#semver:^1.2.3',
fetchSpec: 'ssh://git@notgithub.com/user/foo',
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'git+ssh://git@notgithub.com/user/foo#semver:^1.2.3',
},
'git+ssh://git@notgithub.com:user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
hosted: null,
saveSpec: 'git+ssh://git@notgithub.com:user/foo#semver:^1.2.3',
fetchSpec: 'git@notgithub.com:user/foo',
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'git+ssh://git@notgithub.com:user/foo#semver:^1.2.3',
},
'git+ssh://git@github.com/user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#semver:^1.2.3',
fetchSpec: 'ssh://git@github.com/user/foo.git',
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'git+ssh://git@github.com/user/foo#semver:^1.2.3',
},
'git+ssh://git@github.com:user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#semver:^1.2.3',
fetchSpec: 'ssh://git@github.com/user/foo.git',
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'git+ssh://git@github.com:user/foo#semver:^1.2.3',
},
'user/foo#semver:^1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'github:user/foo#semver:^1.2.3',
fetchSpec: null,
gitCommittish: null,
gitRange: '^1.2.3',
raw: 'user/foo#semver:^1.2.3',
},
'user/foo#path:dist': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'github:user/foo#path:dist',
fetchSpec: null,
gitCommittish: null,
gitSubdir: '/dist',
raw: 'user/foo#path:dist',
},
'user/foo#1234::path:dist': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'github:user/foo#1234::path:dist',
fetchSpec: null,
gitCommittish: '1234',
gitRange: null,
gitSubdir: '/dist',
raw: 'user/foo#1234::path:dist',
},
'user/foo#notimplemented:value': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'github:user/foo#notimplemented:value',
fetchSpec: null,
gitCommittish: null,
gitRange: null,
gitSubdir: null,
raw: 'user/foo#notimplemented:value',
},
'git+file://path/to/repo#1.2.3': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git+file://path/to/repo#1.2.3',
fetchSpec: 'file://path/to/repo',
gitCommittish: '1.2.3',
raw: 'git+file://path/to/repo#1.2.3',
},
'git://notgithub.com/user/foo': {
name: null,
escapedName: null,
type: 'git',
saveSpec: 'git://notgithub.com/user/foo',
fetchSpec: 'git://notgithub.com/user/foo',
raw: 'git://notgithub.com/user/foo',
},
'@foo/bar@git+ssh://notgithub.com/user/foo': {
name: '@foo/bar',
escapedName: '@foo%2fbar',
scope: '@foo',
saveSpec: 'git+ssh://notgithub.com/user/foo',
fetchSpec: 'ssh://notgithub.com/user/foo',
rawSpec: 'git+ssh://notgithub.com/user/foo',
raw: '@foo/bar@git+ssh://notgithub.com/user/foo',
type: 'git',
},
'git@npm:not-git': {
name: 'git',
type: 'alias',
subSpec: {
type: 'range',
registry: true,
name: 'not-git',
fetchSpec: '*',
},
raw: 'git@npm:not-git',
},
'not-git@hostname.com:some/repo': {
name: null,
type: 'git',
saveSpec: 'git+ssh://not-git@hostname.com:some/repo',
fetchSpec: 'not-git@hostname.com:some/repo',
raw: 'not-git@hostname.com:some/repo',
},
'/path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: '/path/to/foo',
},
'/path/to/foo.tar': {
name: null,
escapedName: null,
type: 'file',
saveSpec: 'file:/path/to/foo.tar',
fetchSpec: '/path/to/foo.tar',
raw: '/path/to/foo.tar',
},
'/path/to/foo.tgz': {
name: null,
escapedName: null,
type: 'file',
saveSpec: 'file:/path/to/foo.tgz',
fetchSpec: '/path/to/foo.tgz',
raw: '/path/to/foo.tgz',
},
'file:path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:path/to/foo',
fetchSpec: '/test/a/b/path/to/foo',
raw: 'file:path/to/foo',
},
'file:path/to/foo.tar.gz': {
name: null,
escapedName: null,
type: 'file',
saveSpec: 'file:path/to/foo.tar.gz',
fetchSpec: '/test/a/b/path/to/foo.tar.gz',
raw: 'file:path/to/foo.tar.gz',
},
'file:~/path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:~/path/to/foo',
fetchSpec: path.normalize(path.join(os.homedir(), '/path/to/foo')),
raw: 'file:~/path/to/foo',
},
'file:/~/path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:~/path/to/foo',
fetchSpec: path.normalize(path.join(os.homedir(), '/path/to/foo')),
raw: 'file:/~/path/to/foo',
},
'file:/~path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/~path/to/foo',
fetchSpec: '/~path/to/foo',
raw: 'file:/~path/to/foo',
},
'file:/.path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/.path/to/foo',
fetchSpec: '/.path/to/foo',
raw: 'file:/.path/to/foo',
},
'file:./path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:path/to/foo',
fetchSpec: '/test/a/b/path/to/foo',
raw: 'file:./path/to/foo',
},
'file:/./path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:path/to/foo',
fetchSpec: '/test/a/b/path/to/foo',
raw: 'file:/./path/to/foo',
},
'file://./path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:path/to/foo',
fetchSpec: '/test/a/b/path/to/foo',
raw: 'file://./path/to/foo',
},
'file:../path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:../path/to/foo',
fetchSpec: '/test/a/path/to/foo',
raw: 'file:../path/to/foo',
},
'file:/../path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:../path/to/foo',
fetchSpec: '/test/a/path/to/foo',
raw: 'file:/../path/to/foo',
},
'file://../path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:../path/to/foo',
fetchSpec: '/test/a/path/to/foo',
raw: 'file://../path/to/foo',
},
'file:///path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: 'file:///path/to/foo',
},
'file:/path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: 'file:/path/to/foo',
},
'file://path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: 'file://path/to/foo',
},
'file:////path/to/foo': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:/path/to/foo',
fetchSpec: '/path/to/foo',
raw: 'file:////path/to/foo',
},
'file://.': {
name: null,
escapedName: null,
type: 'directory',
saveSpec: 'file:',
fetchSpec: '/test/a/b',
raw: 'file://.',
},
'http://insecure.com/foo.tgz': {
name: null,
escapedName: null,
type: 'remote',
saveSpec: 'http://insecure.com/foo.tgz',
fetchSpec: 'http://insecure.com/foo.tgz',
raw: 'http://insecure.com/foo.tgz',
},
'https://server.com/foo.tgz': {
name: null,
escapedName: null,
type: 'remote',
saveSpec: 'https://server.com/foo.tgz',
fetchSpec: 'https://server.com/foo.tgz',
raw: 'https://server.com/foo.tgz',
},
'foo@latest': {
name: 'foo',
escapedName: 'foo',
type: 'tag',
saveSpec: null,
fetchSpec: 'latest',
raw: 'foo@latest',
},
foo: {
name: 'foo',
escapedName: 'foo',
type: 'range',
saveSpec: null,
fetchSpec: '*',
raw: 'foo',
},
'foo@ 1.2 ': {
name: 'foo',
escapedName: 'foo',
type: 'range',
saveSpec: null,
fetchSpec: '1.2',
raw: 'foo@ 1.2 ',
rawSpec: ' 1.2 ',
},
'foo@ 1.2.3 ': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '1.2.3',
raw: 'foo@ 1.2.3 ',
rawSpec: ' 1.2.3 ',
},
'foo@1.2.3 ': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '1.2.3',
raw: 'foo@1.2.3 ',
rawSpec: '1.2.3 ',
},
'foo@ 1.2.3': {
name: 'foo',
escapedName: 'foo',
type: 'version',
saveSpec: null,
fetchSpec: '1.2.3',
raw: 'foo@ 1.2.3',
rawSpec: ' 1.2.3',
},
},
bitbucket: {
'bitbucket:user/foo-js': {
name: null,
type: 'git',
saveSpec: 'bitbucket:user/foo-js',
raw: 'bitbucket:user/foo-js',
},
'bitbucket:user/foo-js#bar/baz': {
name: null,
type: 'git',
saveSpec: 'bitbucket:user/foo-js#bar/baz',
raw: 'bitbucket:user/foo-js#bar/baz',
},
'bitbucket:user..blerg--/..foo-js# . . . . . some . tags / / /': {
name: null,
type: 'git',
saveSpec: 'bitbucket:user..blerg--/..foo-js# . . . . . some . tags / / /',
raw: 'bitbucket:user..blerg--/..foo-js# . . . . . some . tags / / /',
},
'bitbucket:user/foo-js#bar/baz/bin': {
name: null,
type: 'git',
saveSpec: 'bitbucket:user/foo-js#bar/baz/bin',
raw: 'bitbucket:user/foo-js#bar/baz/bin',
},
'foo@bitbucket:user/foo-js': {
name: 'foo',
type: 'git',
saveSpec: 'bitbucket:user/foo-js',
raw: 'foo@bitbucket:user/foo-js',
},
'git+ssh://git@bitbucket.org/user/foo#1.2.3': {
name: null,
type: 'git',
saveSpec: 'git+ssh://git@bitbucket.org/user/foo.git#1.2.3',
raw: 'git+ssh://git@bitbucket.org/user/foo#1.2.3',
},
'https://bitbucket.org/user/foo.git': {
name: null,
type: 'git',
saveSpec: 'git+https://bitbucket.org/user/foo.git',
raw: 'https://bitbucket.org/user/foo.git',
},
'@foo/bar@git+ssh://bitbucket.org/user/foo': {
name: '@foo/bar',
scope: '@foo',
type: 'git',
saveSpec: 'git+ssh://git@bitbucket.org/user/foo.git',
rawSpec: 'git+ssh://bitbucket.org/user/foo',
raw: '@foo/bar@git+ssh://bitbucket.org/user/foo',
},
},
github: {
'user/foo-js': {
name: null,
type: 'git',
saveSpec: 'github:user/foo-js',
raw: 'user/foo-js',
},
'user/foo-js#bar/baz': {
name: null,
type: 'git',
saveSpec: 'github:user/foo-js#bar/baz',
raw: 'user/foo-js#bar/baz',
},
'user..blerg--/..foo-js# . . . . . some . tags / / /': {
name: null,
type: 'git',
saveSpec: 'github:user..blerg--/..foo-js# . . . . . some . tags / / /',
raw: 'user..blerg--/..foo-js# . . . . . some . tags / / /',
},
'user/foo-js#bar/baz/bin': {
name: null,
type: 'git',
raw: 'user/foo-js#bar/baz/bin',
},
'foo@user/foo-js': {
name: 'foo',
type: 'git',
saveSpec: 'github:user/foo-js',
raw: 'foo@user/foo-js',
},
'github:user/foo-js': {
name: null,
type: 'git',
saveSpec: 'github:user/foo-js',
raw: 'github:user/foo-js',
},
'git+ssh://git@github.com/user/foo#1.2.3': {
name: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#1.2.3',
raw: 'git+ssh://git@github.com/user/foo#1.2.3',
},
'git+ssh://git@github.com:user/foo#1.2.3': {
name: null,
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git#1.2.3',
raw: 'git+ssh://git@github.com:user/foo#1.2.3',
},
'git://github.com/user/foo': {
name: null,
type: 'git',
saveSpec: 'git://github.com/user/foo.git',
raw: 'git://github.com/user/foo',
},
'https://github.com/user/foo.git': {
name: null,
type: 'git',
saveSpec: 'git+https://github.com/user/foo.git',
raw: 'https://github.com/user/foo.git',
},
'@foo/bar@git+ssh://github.com/user/foo': {
name: '@foo/bar',
scope: '@foo',
type: 'git',
saveSpec: 'git+ssh://git@github.com/user/foo.git',
rawSpec: 'git+ssh://github.com/user/foo',
raw: '@foo/bar@git+ssh://github.com/user/foo',
},
'foo@bar/foo': {
name: 'foo',
type: 'git',
saveSpec: 'github:bar/foo',
raw: 'foo@bar/foo',
},
'git@github.com:12345': {
name: undefined,
type: 'git',
saveSpec: 'git+ssh://git@github.com:12345',
fetchSpec: 'ssh://git@github.com:12345',
raw: 'git@github.com:12345',
},
'git@github.com:12345/': {
name: undefined,
type: 'git',
saveSpec: 'git+ssh://git@github.com:12345/',
fetchSpec: 'ssh://git@github.com:12345/',
raw: 'git@github.com:12345/',
},
'git@github.com:12345/foo': {
name: undefined,
type: 'git',
saveSpec: 'git+ssh://git@github.com:12345/foo',
fetchSpec: 'ssh://git@github.com:12345/foo',
raw: 'git@github.com:12345/foo',
},
'git@github.com:12345foo': {
name: undefined,
type: 'git',
saveSpec: 'git+ssh://git@github.com:12345foo',
fetchSpec: 'git@github.com:12345foo',
raw: 'git@github.com:12345foo',
},
},
gitlab: {
'gitlab:user/foo-js': {
name: null,
type: 'git',
raw: 'gitlab:user/foo-js',
},
'gitlab:user/foo-js#bar/baz': {
name: null,
type: 'git',
raw: 'gitlab:user/foo-js#bar/baz',
},
'gitlab:user..blerg--/..foo-js# . . . . . some . tags / / /': {
name: null,
type: 'git',
saveSpec: 'gitlab:user..blerg--/..foo-js# . . . . . some . tags / / /',
raw: 'gitlab:user..blerg--/..foo-js# . . . . . some . tags / / /',
},
'gitlab:user/foo-js#bar/baz/bin': {
name: null,
type: 'git',
saveSpec: 'gitlab:user/foo-js#bar/baz/bin',
raw: 'gitlab:user/foo-js#bar/baz/bin',
},
'foo@gitlab:user/foo-js': {
name: 'foo',
type: 'git',
saveSpec: 'gitlab:user/foo-js',
raw: 'foo@gitlab:user/foo-js',
},
'git+ssh://git@gitlab.com/user/foo#1.2.3': {
name: null,
type: 'git',
saveSpec: 'git+ssh://git@gitlab.com/user/foo.git#1.2.3',
raw: 'git+ssh://git@gitlab.com/user/foo#1.2.3',
},
'https://gitlab.com/user/foo.git': {
name: null,
type: 'git',
saveSpec: 'git+https://gitlab.com/user/foo.git',
raw: 'https://gitlab.com/user/foo.git',
},
'@foo/bar@git+ssh://gitlab.com/user/foo': {
name: '@foo/bar',
scope: '@foo',
type: 'git',
saveSpec: 'git+ssh://git@gitlab.com/user/foo.git',
rawSpec: 'git+ssh://gitlab.com/user/foo',
raw: '@foo/bar@git+ssh://gitlab.com/user/foo',
},
},
windows: {
'C:\\x\\y\\z': {
raw: 'C:\\x\\y\\z',
scope: null,
name: null,
escapedName: null,
rawSpec: 'C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'foo@C:\\x\\y\\z': {
raw: 'foo@C:\\x\\y\\z',
scope: null,
name: 'foo',
escapedName: 'foo',
rawSpec: 'C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'foo@file:///C:\\x\\y\\z': {
raw: 'foo@file:///C:\\x\\y\\z',
scope: null,
name: 'foo',
escapedName: 'foo',
rawSpec: 'file:///C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'foo@file://C:\\x\\y\\z': {
raw: 'foo@file://C:\\x\\y\\z',
scope: null,
name: 'foo',
escapedName: 'foo',
rawSpec: 'file://C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'file:///C:\\x\\y\\z': {
raw: 'file:///C:\\x\\y\\z',
scope: null,
name: null,
escapedName: null,
rawSpec: 'file:///C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'file://C:\\x\\y\\z': {
raw: 'file://C:\\x\\y\\z',
scope: null,
name: null,
escapedName: null,
rawSpec: 'file://C:\\x\\y\\z',
fetchSpec: 'C:\\x\\y\\z',
type: 'directory',
},
'foo@/foo/bar/baz': {
raw: 'foo@/foo/bar/baz',
scope: null,
name: 'foo',
escapedName: 'foo',
rawSpec: '/foo/bar/baz',
fetchSpec: 'C:\\foo\\bar\\baz',
type: 'directory',
},
'foo@git+file://C:\\x\\y\\z': {
type: 'git',
registry: null,
where: null,
raw: 'foo@git+file://C:\\x\\y\\z',
name: 'foo',
escapedName: 'foo',
scope: null,
rawSpec: 'git+file://C:\\x\\y\\z',
saveSpec: 'git+file://C:\\x\\y\\z',
fetchSpec: 'file://c:/x/y/z',
gitRange: null,
gitCommittish: null,
hosted: null,
},
},
};

View File

@@ -0,0 +1,109 @@
import { Npa } from "bun:internal-for-testing";
import cases from "./cases";
const normalizePath = (p: string) => p && p.replace(/^[a-zA-Z]:/, "").replace(/\\/g, "/");
const normalizePaths = (spec: any) => {
spec.fetchSpec = normalizePath(spec.fetchSpec);
return spec;
};
const expectedPatch = (expected: any) => {
const patched = { ...expected };
// It's really annoying to differentiate between null and undefined and our use-case could not care less.
// Convert all undefined values to null
for (const key in patched) {
if (patched[key] === undefined) {
patched[key] = null;
}
}
return patched;
};
const platformAgnosticTests = Object.entries(cases).filter(([name]) => name !== "windows");
const windowsTests = Object.entries(cases).filter(([name]) => name === "windows");
describe("npa", () => {
describe("valid cases", () => {
describe.each(platformAgnosticTests)("%s", (_, caseSet: object) => {
it.each(Object.entries(caseSet))("parses %s", (input, expected) => {
const result = Npa.npa(input as string, "/test/a/b");
expect(result).toMatchObject(expectedPatch(expected));
});
});
});
if (process.platform === "win32") {
describe("windows specific cases", () => {
describe.each(windowsTests)("%s", (_, caseSet: object) => {
it.each(Object.entries(caseSet))("parses %s", (input, expected) => {
const result = Npa.npa(input as string);
expect(normalizePaths(result)).toMatchObject(expectedPatch(expected));
});
});
});
}
});
describe("resolve", () => {
test("npa.resolve", () => {
expect(Npa.resolve("foo", "^1.2.3", "/test/a/b")).toMatchObject({
type: "range",
});
});
test("Npa.resolve file:", () => {
expect(normalizePaths(Npa.resolve("foo", "file:foo", "/test/a/b"))).toMatchObject({
type: "directory",
fetchSpec: "/test/a/b/foo",
});
});
test("Npa.resolve no protocol", () => {
expect(Npa.resolve("foo", "../foo/bar", "/test/a/b")).toMatchObject({
type: "directory",
});
});
test("Npa.resolve file protocol", () => {
expect(Npa.resolve("foo", "file:../foo/bar", "/test/a/b")).toMatchObject({
type: "directory",
});
});
test("Npa.resolve file protocol w/ tgz", () => {
expect(Npa.resolve("foo", "file:../foo/bar.tgz", "/test/a/b")).toMatchObject({
type: "file",
});
});
test("Npa.resolve with no name", () => {
expect(Npa.resolve(null, "4.0.0", "/test/a/b")).toMatchObject({
type: "version",
name: null,
});
});
test("Npa.resolve sets raw right", () => {
expect(Npa.resolve("foo", "file:abc")).toMatchObject({
type: "directory",
raw: "foo@file:abc",
});
});
test("npa with path in @ in it", () => {
expect(Npa.npa("./path/to/thing/package@1.2.3/")).toMatchObject({
name: null,
type: "directory",
});
});
test("npa w/o leading or trailing slash", () => {
expect(Npa.npa("path/to/thing/package@1.2.3")).toMatchObject({
name: null,
type: "directory",
});
});
});

View File

@@ -0,0 +1,6 @@
import { Npa } from "bun:internal-for-testing";
import { expect, test } from "bun:test";
test("invalid url", () => {
expect(() => Npa.npa("foo@gopher://goodluckwiththat")).toThrow();
});

View File

@@ -0,0 +1,94 @@
import { Npa } from "bun:internal-for-testing";
import { describe, expect, test } from "bun:test";
describe("npm-package-arg memory safety", () => {
describe("fromAlias error paths", () => {
test("handles nested alias error without leaking", () => {
// This should trigger error.NestedAlias
// Before fix: leaked sub_spec allocation
expect(() => {
Npa.npa("foo@npm:bar@npm:baz", "/test/path");
}).toThrow();
});
test("handles non-registry alias without leaking", () => {
// This should trigger error.NotAliasingRegistry
// Before fix: leaked sub_spec allocation
expect(() => {
Npa.npa("foo@npm:github:user/repo", "/test/path");
}).toThrow();
});
});
describe("fromGitSpec error paths", () => {
test("handles valid git spec without crashing", () => {
// This should succeed and properly clean up
const result = Npa.npa("github:user/repo", "/test/path");
expect(result.type).toBe("git");
expect(result.hosted).toBeDefined();
});
test("handles git spec with committish", () => {
// Tests that git_attrs is properly managed
// Before fix: if internal allocation failed, hosted would leak
const result = Npa.npa("github:user/repo#v1.0.0", "/test/path");
expect(result.type).toBe("git");
expect(result.gitCommittish).toBe("v1.0.0");
});
test("handles git spec with semver range", () => {
// Tests GitAttrs.fromCommittish with semver parsing
// This also exercises the double-free fix (Bug #3)
const result = Npa.npa("github:user/repo#semver:^1.0.0", "/test/path");
expect(result.type).toBe("git");
expect(result.gitRange).toBe("^1.0.0");
});
test("handles git spec with path", () => {
// Tests GitAttrs.fromCommittish with subdir
const result = Npa.npa("github:user/repo#path:packages/foo", "/test/path");
expect(result.type).toBe("git");
expect(result.gitSubdir).toBe("/packages/foo");
});
test("handles git spec with multiple attributes", () => {
// Tests GitAttrs.fromCommittish with multiple parts
const result = Npa.npa("github:user/repo#v1.0.0::path:packages/foo", "/test/path");
expect(result.type).toBe("git");
expect(result.gitCommittish).toBe("v1.0.0");
expect(result.gitSubdir).toBe("/packages/foo");
});
});
describe("GitAttrs.fromCommittish edge cases", () => {
test("handles invalid percent encoding in semver range", () => {
// This should trigger the error path in PercentEncoding.decode
// Before fix: double-free when error returned
// The percent encoding needs to be malformed to trigger decode error
expect(() => {
Npa.npa("github:user/repo#semver:%XX", "/test/path");
}).toThrow();
});
test("handles duplicate committish attributes", () => {
// Should trigger error.InvalidCommittish
expect(() => {
Npa.npa("github:user/repo#v1.0.0::v2.0.0", "/test/path");
}).toThrow();
});
test("handles committish and semver conflict", () => {
// Should trigger error.InvalidCommittish (can't have both)
expect(() => {
Npa.npa("github:user/repo#v1.0.0::semver:^1.0.0", "/test/path");
}).toThrow();
});
test("handles duplicate subdir", () => {
// Should trigger error.InvalidCommittish
expect(() => {
Npa.npa("github:user/repo#path:foo::path:bar", "/test/path");
}).toThrow();
});
});
});

View File

@@ -0,0 +1,58 @@
import { Npa } from "bun:internal-for-testing";
import { expect, test } from "bun:test";
const npa = Npa.npa;
test("realize-package-specifier", () => {
let result;
result = npa("a.tar.gz", "/test/a/b");
expect(result.type).toBe("file"); // local tarball
result = npa("d", "/test/a/b");
expect(result.type).toBe("range"); // remote package
result = npa("file:./a.tar.gz", "/test/a/b");
expect(result.type).toBe("file"); // local tarball
result = npa("file:./b", "/test/a/b");
expect(result.type).toBe("directory"); // local package directory
result = npa("file:./c", "/test/a/b");
expect(result.type).toBe("directory"); // non-package local directory, specified with a file URL
result = npa("file:./d", "/test/a/b");
expect(result.type).toBe("directory"); // no local directory, specified with a file URL
});
test("named realize-package-specifier", () => {
let result;
result = npa("a@a.tar.gz", "/test/a/b");
expect(result.type).toBe("file"); // named local tarball
result = npa("d@d", "/test/a/b");
expect(result.type).toBe("tag"); // remote package
result = npa("a@file:./a.tar.gz", "/test/a/b");
expect(result.type).toBe("file"); // local tarball
result = npa("b@file:./b", "/test/a/b");
expect(result.type).toBe("directory"); // local package directory
result = npa("c@file:./c", "/test/a/b");
expect(result.type).toBe("directory"); // non-package local directory, specified with a file URL
result = npa("d@file:./d", "/test/a/b");
expect(result.type).toBe("directory"); // no local directory, specified with a file URL
result = npa("e@e/2", "test/a/b");
expect(result.type).toBe("git"); // hosted package dependency is git
expect(result.hosted.type).toBe("github"); // github package dependency
result = npa("e@1", "/test/a/b");
expect(result.type).toBe("range"); // range like specifier is never a local file
result = npa("e@1.0.0", "/test/a/b");
expect(result.type).toBe("version"); // version like specifier is never a local file
});

View File

@@ -0,0 +1,102 @@
export default {
'some-package': { validForNewPackages: true, validForOldPackages: true },
'example.com': { validForNewPackages: true, validForOldPackages: true },
'under_score': { validForNewPackages: true, validForOldPackages: true },
'period.js': { validForNewPackages: true, validForOldPackages: true },
'123numeric': { validForNewPackages: true, validForOldPackages: true },
'crazy!': { validForNewPackages: false, validForOldPackages: true },
'@npm/thingy': { validForNewPackages: true, validForOldPackages: true },
'@npm-zors/money!time.js': {
validForNewPackages: false,
validForOldPackages: true,
warnings: ['name can no longer contain special characters ("~\'!()*")'],
},
'@user/node_modules': { validForNewPackages: true, validForOldPackages: true },
'@user/_package': { validForNewPackages: true, validForOldPackages: true },
'@user/http': { validForNewPackages: true, validForOldPackages: true },
'': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name length must be greater than zero'],
},
'.start-with-period': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with a period'],
},
'@npm/.': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with a period'],
},
'@npm/..': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with a period'],
},
'@npm/.package': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with a period'],
},
'_start-with-underscore': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name cannot start with an underscore'],
},
'contain:colons': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name can only contain URL-friendly characters'],
},
' leading-space': {
validForNewPackages: false,
validForOldPackages: false,
errors: [
'name cannot contain leading or trailing spaces',
'name can only contain URL-friendly characters',
],
},
'trailing-space ': {
validForNewPackages: false,
validForOldPackages: false,
errors: [
'name cannot contain leading or trailing spaces',
'name can only contain URL-friendly characters',
],
},
's/l/a/s/h/e/s': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['name can only contain URL-friendly characters'],
},
'node_modules': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['node_modules is not a valid package name'],
},
'favicon.ico': {
validForNewPackages: false,
validForOldPackages: false,
errors: ['favicon.ico is not a valid package name'],
},
'http': {
validForNewPackages: false,
validForOldPackages: true,
warnings: ['http is a core module name'],
},
'process': {
validForNewPackages: false,
validForOldPackages: true,
warnings: ['process is a core module name'],
},
'ifyouwanttogetthesumoftwonumberswherethosetwonumbersarechosenbyfindingthelargestoftwooutofthreenumbersandsquaringthemwhichismultiplyingthembyitselfthenyoushouldinputthreenumbersintothisfunctionanditwilldothatforyou-': {
validForNewPackages: false,
validForOldPackages: true,
warnings: ['name can no longer contain more than 214 characters'],
},
'ifyouwanttogetthesumoftwonumberswherethosetwonumbersarechosenbyfindingthelargestoftwooutofthreenumbersandsquaringthemwhichismultiplyingthembyitselfthenyoushouldinputthreenumbersintothisfunctionanditwilldothatforyou': {
validForNewPackages: true,
validForOldPackages: true,
},
};

View File

@@ -0,0 +1,36 @@
import { expect, describe, it } from "bun:test";
import cases from "./cases";
import { ValidateNpmPackageName } from "bun:internal-for-testing";
/**
* Convert the expected object in validate-npm-package-name to match our output.
*
* In some ways, this is debt that needs to be addressed should we choose to expose this API publicly.
*/
function remapExpectedObject(expectedObject: any): object {
const newObj = { ...expectedObject };
if (newObj.warnings) {
newObj.warnings = newObj.warnings.map(
(warning: string) =>
warning.endsWith("is a core module name") ? "name conflicts a core module name" :
warning
);
}
if (newObj.errors) {
newObj.errors = newObj.errors.map(
(error: string) =>
error.endsWith("is not a valid package name") ? "name is not allowed" :
error
);
}
return newObj;
}
describe("validate-npm-package-name", () => {
it.each(Object.entries(cases))("parses %s", (pkgName: string, expected: object) => {
expect(ValidateNpmPackageName.validate(pkgName)).toMatchObject(remapExpectedObject(expected));
});
})