mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
bun create react app is almost done
This commit is contained in:
1
Makefile
1
Makefile
@@ -159,6 +159,7 @@ BUN_LLD_FLAGS = $(OBJ_FILES) \
|
||||
src/deps/libcrypto.a \
|
||||
src/deps/picohttpparser.o \
|
||||
$(CLANG_FLAGS) \
|
||||
-liconv \
|
||||
|
||||
ifeq ($(OS_NAME), linux)
|
||||
BUN_LLD_FLAGS += -lstdc++fs \
|
||||
|
||||
1
examples/.gitignore
vendored
Normal file
1
examples/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
bun-examples-all
|
||||
0
examples/.is-examples-folder
Normal file
0
examples/.is-examples-folder
Normal file
0
examples/README.md
Normal file
0
examples/README.md
Normal file
4
examples/bun-create.md
Normal file
4
examples/bun-create.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# `bun create`
|
||||
|
||||
This folder
|
||||
|
||||
8
examples/next/.gitignore
vendored
8
examples/next/.gitignore
vendored
@@ -32,3 +32,11 @@ yarn-error.log*
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
**/*.trace
|
||||
**/*.zip
|
||||
**/*.tar.gz
|
||||
**/*.tgz
|
||||
**/*.log
|
||||
package-lock.json
|
||||
**/*.bun
|
||||
42
examples/next/.npmignore
Normal file
42
examples/next/.npmignore
Normal file
@@ -0,0 +1,42 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
**/*.trace
|
||||
**/*.zip
|
||||
**/*.tar.gz
|
||||
**/*.tgz
|
||||
**/*.log
|
||||
package-lock.json
|
||||
**/*.bun
|
||||
42
examples/next/gitignore
Normal file
42
examples/next/gitignore
Normal file
@@ -0,0 +1,42 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
**/*.trace
|
||||
**/*.zip
|
||||
**/*.tar.gz
|
||||
**/*.tgz
|
||||
**/*.log
|
||||
package-lock.json
|
||||
**/*.bun
|
||||
4
examples/next/package-lock.json
generated
4
examples/next/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "hello-next",
|
||||
"version": "1.0.0",
|
||||
"version": "0.0.31",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "hello-next",
|
||||
"version": "1.0.0",
|
||||
"version": "0.0.31",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jarred/react-static-tweets": "0.5.8",
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
{
|
||||
"name": "hello-next",
|
||||
"version": "1.0.0",
|
||||
"name": "@bun-examples/next",
|
||||
"version": "0.0.31",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"next": "11.1.2",
|
||||
"react": "^17.0.2",
|
||||
@@ -14,7 +13,7 @@
|
||||
"bun-framework-next": "^0.0.0-21",
|
||||
"typescript": "^4.3.5"
|
||||
},
|
||||
"bun-template": {
|
||||
"bun-create": {
|
||||
"postinstall": [
|
||||
"bun bun --use next"
|
||||
]
|
||||
|
||||
9
examples/react/.gitignore
vendored
9
examples/react/.gitignore
vendored
@@ -1,3 +1,12 @@
|
||||
**/*.trace
|
||||
**/*.zip
|
||||
**/*.tar.gz
|
||||
**/*.tgz
|
||||
**/*.log
|
||||
|
||||
package-lock.json
|
||||
**/*.bun
|
||||
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
|
||||
31
examples/react/.npmignore
Normal file
31
examples/react/.npmignore
Normal file
@@ -0,0 +1,31 @@
|
||||
**/*.trace
|
||||
**/*.zip
|
||||
**/*.tar.gz
|
||||
**/*.tgz
|
||||
**/*.log
|
||||
package-lock.json
|
||||
**/*.bun
|
||||
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
32
examples/react/gitignore
Normal file
32
examples/react/gitignore
Normal file
@@ -0,0 +1,32 @@
|
||||
**/*.trace
|
||||
**/*.zip
|
||||
**/*.tar.gz
|
||||
**/*.tgz
|
||||
**/*.log
|
||||
|
||||
package-lock.json
|
||||
**/*.bun
|
||||
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
@@ -1,14 +1,12 @@
|
||||
{
|
||||
"name": "hello-create-react-app",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"name": "@bun-examples/react",
|
||||
"version": "0.0.27",
|
||||
"dependencies": {
|
||||
"@testing-library/jest-dom": "^5.11.4",
|
||||
"@testing-library/react": "^11.1.0",
|
||||
"@testing-library/user-event": "^12.1.10",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-scripts": "4.0.3",
|
||||
"web-vitals": "^1.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
137
misctools/publish-examples.js
Normal file
137
misctools/publish-examples.js
Normal file
@@ -0,0 +1,137 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const { execSync } = require("child_process");
|
||||
|
||||
const exec = (cmd, opts = {}) => {
|
||||
console.log("$", cmd);
|
||||
return execSync(cmd, {
|
||||
...opts,
|
||||
env: { CI: "true", ...process.env, ...(opts.env || {}) },
|
||||
});
|
||||
};
|
||||
|
||||
const DRY_RUN = !!process.env.DRY_RUN;
|
||||
|
||||
var count = 0;
|
||||
|
||||
const examplesFolderEntries = fs.readdirSync(
|
||||
path.join(process.cwd(), "examples"),
|
||||
{ withFileTypes: true }
|
||||
);
|
||||
|
||||
const packageNames = [];
|
||||
|
||||
for (let folder of examplesFolderEntries) {
|
||||
if (!folder.isDirectory()) continue;
|
||||
const absolute = path.resolve(process.cwd(), "examples", folder.name);
|
||||
|
||||
let packageJSONText;
|
||||
|
||||
try {
|
||||
packageJSONText = fs.readFileSync(
|
||||
path.join(absolute, "package.json"),
|
||||
"utf8"
|
||||
);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
let packageJSON = JSON.parse(packageJSONText);
|
||||
|
||||
if (!packageJSON.name) continue;
|
||||
if (!packageJSON.name.startsWith("@bun-examples")) continue;
|
||||
|
||||
var version = "0.0.1";
|
||||
try {
|
||||
const _versions = exec(`npm view ${packageJSON.name} versions --json`)
|
||||
.toString()
|
||||
.trim();
|
||||
|
||||
if (_versions.length > 0) {
|
||||
const versionsArray = JSON.parse(_versions);
|
||||
version = versionsArray[versionsArray.length - 1];
|
||||
}
|
||||
} catch (exception) {
|
||||
console.error(exception);
|
||||
}
|
||||
var retryCount = 5;
|
||||
|
||||
restart: while (retryCount-- > 0) {
|
||||
packageJSON.version = version;
|
||||
if ("private" in packageJSON) delete packageJSON.private;
|
||||
if ("license" in packageJSON) delete packageJSON.license;
|
||||
|
||||
try {
|
||||
fs.copyFileSync(
|
||||
path.join(absolute, ".gitignore"),
|
||||
path.join(absolute, "gitignore")
|
||||
);
|
||||
} catch (exception) {}
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(absolute, "package.json"),
|
||||
JSON.stringify(packageJSON, null, 2)
|
||||
);
|
||||
exec(`npm version patch --force`, { cwd: absolute });
|
||||
|
||||
packageJSON = JSON.parse(
|
||||
fs.readFileSync(path.join(absolute, "package.json"), "utf8")
|
||||
);
|
||||
version = packageJSON.version;
|
||||
|
||||
try {
|
||||
exec(
|
||||
`npm publish ${
|
||||
DRY_RUN ? "--dry-run" : ""
|
||||
} --access public --registry https://registry.npmjs.org/`,
|
||||
{ cwd: absolute }
|
||||
);
|
||||
packageNames.push([
|
||||
packageJSON.name,
|
||||
{
|
||||
version: packageJSON.version,
|
||||
description: packageJSON.description || "",
|
||||
},
|
||||
]);
|
||||
count++;
|
||||
break;
|
||||
} catch (exception) {
|
||||
continue restart;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (packageNames.length > 0) {
|
||||
const packageJSON = {
|
||||
name: "bun-examples-all",
|
||||
private: false,
|
||||
version: `0.0.${Date.now()}`,
|
||||
description: "All bun-examples",
|
||||
examples: Object.fromEntries(packageNames),
|
||||
};
|
||||
const dir = path.join(process.cwd(), "examples/bun-examples-all");
|
||||
try {
|
||||
fs.rmSync(dir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
} catch (exception) {}
|
||||
|
||||
try {
|
||||
fs.mkdirSync(dir, {
|
||||
recursive: true,
|
||||
});
|
||||
} catch (exception) {}
|
||||
fs.writeFileSync(
|
||||
path.join(dir, "package.json"),
|
||||
JSON.stringify(packageJSON, null, 2)
|
||||
);
|
||||
exec(
|
||||
`npm publish ${
|
||||
DRY_RUN ? "--dry-run" : ""
|
||||
} --access public --registry https://registry.npmjs.org/`,
|
||||
{ cwd: dir }
|
||||
);
|
||||
}
|
||||
|
||||
console.log(`Published ${count} packages`);
|
||||
@@ -61,15 +61,15 @@ pub fn main() anyerror!void {
|
||||
);
|
||||
}
|
||||
|
||||
// if (std.mem.eql(u8, std.fs.path.extension(tarball_path), ".gz") or std.mem.eql(u8, std.fs.path.extension(tarball_path), ".tgz")) {
|
||||
// tarball_buf_list = std.ArrayListUnmanaged(u8){ .capacity = file_buf.len, .items = file_buf };
|
||||
// var gunzip = try Zlib.ZlibReaderArrayList.init(file_buf, &tarball_buf_list, std.heap.c_allocator);
|
||||
// try gunzip.readAll();
|
||||
// gunzip.deinit();
|
||||
// Output.prettyErrorln("Decompressed {d} -> {d}\n", .{ file_buf.len, tarball_buf_list.items.len });
|
||||
// } else {
|
||||
// tarball_buf_list = std.ArrayListUnmanaged(u8){ .capacity = file_buf.len, .items = file_buf };
|
||||
// }
|
||||
if (std.mem.eql(u8, std.fs.path.extension(tarball_path), ".gz") or std.mem.eql(u8, std.fs.path.extension(tarball_path), ".tgz")) {
|
||||
tarball_buf_list = std.ArrayListUnmanaged(u8){ .capacity = file_buf.len, .items = file_buf };
|
||||
var gunzip = try Zlib.ZlibReaderArrayList.init(file_buf, &tarball_buf_list, std.heap.c_allocator);
|
||||
try gunzip.readAll();
|
||||
gunzip.deinit();
|
||||
Output.prettyErrorln("Decompressed {d} -> {d}\n", .{ file_buf.len, tarball_buf_list.items.len });
|
||||
} else {
|
||||
tarball_buf_list = std.ArrayListUnmanaged(u8){ .capacity = file_buf.len, .items = file_buf };
|
||||
}
|
||||
|
||||
try Archive.extractToDisk(file_buf, folder, 1, false);
|
||||
}
|
||||
|
||||
12
src/c.zig
12
src/c.zig
@@ -127,7 +127,7 @@ pub fn moveFileZSlow(from_dir: std.os.fd_t, filename: [*:0]const u8, to_dir: std
|
||||
}
|
||||
|
||||
pub fn moveFileZSlowWithHandle(in_handle: std.os.fd_t, to_dir: std.os.fd_t, destination: [*:0]const u8) !void {
|
||||
const stat = try std.os.fstat(in_handle);
|
||||
const stat_ = try std.os.fstat(in_handle);
|
||||
// delete if exists, don't care if it fails. it may fail due to the file not existing
|
||||
// delete here because we run into weird truncation issues if we do not
|
||||
// ftruncate() instead didn't work.
|
||||
@@ -136,8 +136,8 @@ pub fn moveFileZSlowWithHandle(in_handle: std.os.fd_t, to_dir: std.os.fd_t, dest
|
||||
const out_handle = try std.os.openatZ(to_dir, destination, std.os.O_WRONLY | std.os.O_CREAT | std.os.O_CLOEXEC, 022);
|
||||
defer std.os.close(out_handle);
|
||||
if (comptime Enviroment.isLinux) {
|
||||
_ = std.os.system.fallocate(out_handle, 0, 0, @intCast(i64, stat.size));
|
||||
_ = try std.os.sendfile(out_handle, in_handle, 0, @intCast(usize, stat.size), &[_]std.c.iovec_const{}, &[_]std.c.iovec_const{}, 0);
|
||||
_ = std.os.system.fallocate(out_handle, 0, 0, @intCast(i64, stat_.size));
|
||||
_ = try std.os.sendfile(out_handle, in_handle, 0, @intCast(usize, stat_.size), &[_]std.c.iovec_const{}, &[_]std.c.iovec_const{}, 0);
|
||||
} else {
|
||||
if (comptime Enviroment.isMac) {
|
||||
// if this fails, it doesn't matter
|
||||
@@ -145,7 +145,7 @@ pub fn moveFileZSlowWithHandle(in_handle: std.os.fd_t, to_dir: std.os.fd_t, dest
|
||||
preallocate_file(
|
||||
out_handle,
|
||||
@intCast(std.os.off_t, 0),
|
||||
@intCast(std.os.off_t, stat.size),
|
||||
@intCast(std.os.off_t, stat_.size),
|
||||
) catch {};
|
||||
}
|
||||
|
||||
@@ -161,6 +161,6 @@ pub fn moveFileZSlowWithHandle(in_handle: std.os.fd_t, to_dir: std.os.fd_t, dest
|
||||
}
|
||||
}
|
||||
|
||||
_ = fchmod(out_handle, stat.mode);
|
||||
_ = fchown(out_handle, stat.uid, stat.gid);
|
||||
_ = fchmod(out_handle, stat_.mode);
|
||||
_ = fchown(out_handle, stat_.uid, stat_.gid);
|
||||
}
|
||||
|
||||
34
src/cli.zig
34
src/cli.zig
@@ -33,6 +33,7 @@ const DevCommand = @import("./cli/dev_command.zig").DevCommand;
|
||||
const DiscordCommand = @import("./cli/discord_command.zig").DiscordCommand;
|
||||
const BuildCommand = @import("./cli/build_command.zig").BuildCommand;
|
||||
const CreateCommand = @import("./cli/create_command.zig").CreateCommand;
|
||||
const CreateListExamplesCommand = @import("./cli/create_command.zig").CreateListExamplesCommand;
|
||||
const RunCommand = @import("./cli/run_command.zig").RunCommand;
|
||||
|
||||
var start_time: i128 = undefined;
|
||||
@@ -518,7 +519,10 @@ pub const Command = struct {
|
||||
|
||||
pub fn create(allocator: *std.mem.Allocator, log: *logger.Log, comptime command: Command.Tag) anyerror!Context {
|
||||
return Command.Context{
|
||||
.args = try Arguments.parse(allocator, command),
|
||||
.args = if (comptime command != Command.Tag.CreateCommand)
|
||||
try Arguments.parse(allocator, command)
|
||||
else
|
||||
std.mem.zeroes(Api.TransformOptions),
|
||||
.log = log,
|
||||
.start_time = start_time,
|
||||
.allocator = allocator,
|
||||
@@ -535,7 +539,10 @@ pub const Command = struct {
|
||||
return .AutoCommand;
|
||||
}
|
||||
|
||||
const next_arg = (args_iter.next(allocator) orelse return .AutoCommand) catch unreachable;
|
||||
var next_arg = (args_iter.next(allocator) orelse return .AutoCommand) catch unreachable;
|
||||
while (next_arg[0] == '-') {
|
||||
next_arg = (args_iter.next(allocator) orelse return .AutoCommand) catch unreachable;
|
||||
}
|
||||
|
||||
const first_arg_name = std.mem.span(next_arg);
|
||||
const RootCommandMatcher = strings.ExactSizeMatcher(8);
|
||||
@@ -595,8 +602,29 @@ pub const Command = struct {
|
||||
},
|
||||
.CreateCommand => {
|
||||
const ctx = try Command.Context.create(allocator, log, .CreateCommand);
|
||||
var positionals: [2]string = undefined;
|
||||
var positional_i: usize = 0;
|
||||
|
||||
try CreateCommand.exec(ctx);
|
||||
var args = try std.process.argsAlloc(allocator);
|
||||
|
||||
if (args.len > 2) {
|
||||
var remainder = args[2..];
|
||||
var remainder_i: usize = 0;
|
||||
var i: usize = 0;
|
||||
while (remainder_i < remainder.len and positional_i < positionals.len) : (remainder_i += 1) {
|
||||
var slice = std.mem.trim(u8, std.mem.span(remainder[remainder_i]), " \t\n;");
|
||||
if (slice.len > 0) {
|
||||
positionals[positional_i] = slice;
|
||||
positional_i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
var positionals_ = positionals[0..positional_i];
|
||||
|
||||
switch (positionals_.len) {
|
||||
0...1 => try CreateListExamplesCommand.exec(ctx),
|
||||
else => try CreateCommand.exec(ctx, positionals_),
|
||||
}
|
||||
},
|
||||
.RunCommand => {
|
||||
const ctx = try Command.Context.create(allocator, log, .RunCommand);
|
||||
|
||||
1
src/cli/create.sh
Normal file
1
src/cli/create.sh
Normal file
@@ -0,0 +1 @@
|
||||
git add -A . > /dev/null 2>&1; git commit -am "Initial Commit" > /dev/null 2>&1;
|
||||
@@ -20,12 +20,835 @@ const Command = @import("../cli.zig").Command;
|
||||
const bundler = @import("../bundler.zig");
|
||||
const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
|
||||
const fs = @import("../fs.zig");
|
||||
const URL = @import("../query_string_map.zig").URL;
|
||||
const HTTPClient = @import("../http_client.zig");
|
||||
const ParseJSON = @import("../json_parser.zig").ParseJSON;
|
||||
const Archive = @import("../libarchive/libarchive.zig").Archive;
|
||||
const Zlib = @import("../zlib.zig");
|
||||
const JSPrinter = @import("../js_printer.zig");
|
||||
const DotEnv = @import("../env_loader.zig");
|
||||
const NPMClient = @import("../which_npm_client.zig").NPMClient;
|
||||
const which = @import("../which.zig").which;
|
||||
const clap = @import("clap");
|
||||
|
||||
var bun_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
var bun_path: ?[:0]const u8 = null;
|
||||
fn execTask(allocator: *std.mem.Allocator, task_: string, cwd: string, PATH: string, npm_client: NPMClient) void {
|
||||
const task = std.mem.trim(u8, task_, " \n\r\t");
|
||||
if (task.len == 0) return;
|
||||
|
||||
var splitter = std.mem.split(u8, task, " ");
|
||||
var count: usize = 0;
|
||||
while (splitter.next() != null) {
|
||||
count += 1;
|
||||
}
|
||||
|
||||
var argv = allocator.alloc(string, count + 2) catch return;
|
||||
defer allocator.free(argv);
|
||||
|
||||
argv[0] = npm_client.bin;
|
||||
argv[1] = "exec";
|
||||
{
|
||||
var i: usize = 2;
|
||||
splitter = std.mem.split(u8, task, " ");
|
||||
while (splitter.next()) |split| {
|
||||
argv[i] = split;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (strings.startsWith(task, "bun ")) {
|
||||
if (bun_path orelse which(&bun_path_buf, PATH, cwd, "bun")) |bun_path_| {
|
||||
bun_path = bun_path_;
|
||||
argv = argv[2..];
|
||||
argv[0] = std.mem.span(bun_path_);
|
||||
}
|
||||
}
|
||||
|
||||
Output.pretty("\n<r><d>$<b>", .{});
|
||||
for (argv) |arg, i| {
|
||||
if (i > argv.len - 1) {
|
||||
Output.print(" {s} ", .{arg});
|
||||
} else {
|
||||
Output.print(" {s}", .{arg});
|
||||
}
|
||||
}
|
||||
Output.pretty("<r>", .{});
|
||||
Output.print("\n", .{});
|
||||
Output.flush();
|
||||
|
||||
Output.disableBuffering();
|
||||
defer Output.enableBuffering();
|
||||
|
||||
var proc = std.ChildProcess.init(argv, allocator) catch return;
|
||||
defer proc.deinit();
|
||||
proc.stdin_behavior = .Inherit;
|
||||
proc.stdout_behavior = .Inherit;
|
||||
proc.stderr_behavior = .Inherit;
|
||||
proc.cwd = cwd;
|
||||
_ = proc.spawnAndWait() catch undefined;
|
||||
}
|
||||
|
||||
const CreateOptions = struct {
|
||||
npm_client: ?NPMClient.Tag = null,
|
||||
skip_install: bool = false,
|
||||
overwrite: bool = false,
|
||||
skip_git: bool = false,
|
||||
|
||||
pub fn parse(allocator: *std.mem.Allocator) !CreateOptions {
|
||||
const params = comptime [_]clap.Param(clap.Help){
|
||||
clap.parseParam("--help Print this menu") catch unreachable,
|
||||
clap.parseParam("--npm Use npm for tasks & install") catch unreachable,
|
||||
clap.parseParam("--yarn Use yarn for tasks & install") catch unreachable,
|
||||
clap.parseParam("--pnpm Use pnpm for tasks & install") catch unreachable,
|
||||
clap.parseParam("--force Overwrite existing files") catch unreachable,
|
||||
clap.parseParam("--no-install Don't install node_modules") catch unreachable,
|
||||
clap.parseParam("--no-git Don't create a git repository") catch unreachable,
|
||||
clap.parseParam("<POS>... ") catch unreachable,
|
||||
};
|
||||
|
||||
var diag = clap.Diagnostic{};
|
||||
|
||||
var args = clap.parse(clap.Help, ¶ms, .{ .diagnostic = &diag, .allocator = allocator }) catch |err| {
|
||||
// Report useful error and exit
|
||||
diag.report(Output.errorWriter(), err) catch {};
|
||||
return err;
|
||||
};
|
||||
|
||||
if (args.flag("--help")) {
|
||||
clap.help(Output.writer(), ¶ms) catch {};
|
||||
std.os.exit(0);
|
||||
}
|
||||
|
||||
var opts = CreateOptions{};
|
||||
if (args.flag("--npm")) {
|
||||
opts.npm_client = NPMClient.Tag.npm;
|
||||
}
|
||||
|
||||
if (args.flag("--yarn")) {
|
||||
opts.npm_client = NPMClient.Tag.yarn;
|
||||
}
|
||||
|
||||
if (args.flag("--pnpm")) {
|
||||
opts.npm_client = NPMClient.Tag.pnpm;
|
||||
}
|
||||
|
||||
if (args.flag("--no-install")) {
|
||||
opts.skip_install = true;
|
||||
}
|
||||
|
||||
if (args.flag("--no-git")) {
|
||||
opts.skip_git = true;
|
||||
}
|
||||
|
||||
if (args.flag("--force")) {
|
||||
opts.overwrite = true;
|
||||
}
|
||||
|
||||
return opts;
|
||||
}
|
||||
};
|
||||
|
||||
pub const CreateCommand = struct {
|
||||
pub const Args = struct {
|
||||
template_name: string,
|
||||
directory_name: string,
|
||||
};
|
||||
var client: HTTPClient = undefined;
|
||||
var extracting_name_buf: [1024]u8 = undefined;
|
||||
pub fn exec(ctx: Command.Context, positionals: []const []const u8) !void {
|
||||
var create_options = try CreateOptions.parse(ctx.allocator);
|
||||
const template = positionals[0];
|
||||
const dirname = positionals[1];
|
||||
var progress = std.Progress{};
|
||||
|
||||
pub fn exec(ctx: Command.Context) !void {}
|
||||
var node_ = try progress.start(try std.fmt.bufPrint(&extracting_name_buf, "Loading {s}", .{template}), 0);
|
||||
progress.supports_ansi_escape_codes = Output.enable_ansi_colors;
|
||||
var node = node_.start("Downloading", 0);
|
||||
|
||||
// alacritty is fast
|
||||
if (std.os.getenvZ("ALACRITTY_LOG") != null) {
|
||||
progress.refresh_rate_ns = std.time.ns_per_ms * 8;
|
||||
}
|
||||
|
||||
defer {
|
||||
progress.root.end();
|
||||
progress.refresh();
|
||||
}
|
||||
|
||||
var filesystem = try fs.FileSystem.init1(ctx.allocator, null);
|
||||
|
||||
var tarball_bytes: MutableString = if (!(strings.eqlComptime(std.fs.path.extension(template), ".tgz") or strings.eqlComptime(std.fs.path.extension(template), ".tar.gz")))
|
||||
try Example.fetch(ctx, template, &progress, &node)
|
||||
else
|
||||
Example.fetchFromDisk(ctx, template, &progress, &node) catch |err| {
|
||||
node.end();
|
||||
progress.refresh();
|
||||
Output.prettyErrorln("Error loading package from disk {s}", .{@errorName(err)});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
};
|
||||
|
||||
node.end();
|
||||
|
||||
node = progress.root.start(try std.fmt.bufPrint(&extracting_name_buf, "Decompressing {s}", .{template}), 0);
|
||||
node.setCompletedItems(0);
|
||||
node.setEstimatedTotalItems(0);
|
||||
node.activate();
|
||||
progress.refresh();
|
||||
|
||||
var file_buf = try ctx.allocator.alloc(u8, 16384);
|
||||
|
||||
var tarball_buf_list = std.ArrayListUnmanaged(u8){ .capacity = file_buf.len, .items = file_buf };
|
||||
var gunzip = try Zlib.ZlibReaderArrayList.init(tarball_bytes.list.items, &tarball_buf_list, ctx.allocator);
|
||||
try gunzip.readAll();
|
||||
gunzip.deinit();
|
||||
|
||||
node.end();
|
||||
|
||||
node = progress.root.start(try std.fmt.bufPrint(&extracting_name_buf, "Extracting {s}", .{template}), 0);
|
||||
node.setCompletedItems(0);
|
||||
node.setEstimatedTotalItems(0);
|
||||
node.activate();
|
||||
progress.refresh();
|
||||
|
||||
var pluckers = [_]Archive.Plucker{
|
||||
try Archive.Plucker.init("package.json", 2048, ctx.allocator),
|
||||
try Archive.Plucker.init("GETTING_STARTED", 512, ctx.allocator),
|
||||
};
|
||||
|
||||
var archive_context = Archive.Context{
|
||||
.pluckers = &pluckers,
|
||||
.overwrite_list = std.StringArrayHashMap(void).init(ctx.allocator),
|
||||
};
|
||||
|
||||
var filename_writer = filesystem.dirname_store;
|
||||
|
||||
const destination = try filesystem.dirname_store.append([]const u8, resolve_path.joinAbs(filesystem.top_level_dir, .auto, dirname));
|
||||
|
||||
if (!create_options.overwrite) {
|
||||
try Archive.getOverwritingFileList(
|
||||
tarball_buf_list.items,
|
||||
destination,
|
||||
&archive_context,
|
||||
@TypeOf(filesystem.dirname_store),
|
||||
filesystem.dirname_store,
|
||||
1,
|
||||
);
|
||||
|
||||
if (archive_context.overwrite_list.count() > 0) {
|
||||
node.end();
|
||||
progress.root.end();
|
||||
progress.refresh();
|
||||
|
||||
// Thank you create-react-app for this copy (and idea)
|
||||
Output.prettyErrorln(
|
||||
"<r><red>error<r><d>: <r>The directory <b><green>{s}<r> contains files that could conflict:",
|
||||
.{
|
||||
std.fs.path.basename(destination),
|
||||
},
|
||||
);
|
||||
for (archive_context.overwrite_list.keys()) |path| {
|
||||
if (strings.endsWith(path, std.fs.path.sep_str)) {
|
||||
Output.prettyErrorln("<r> <cyan>{s}<r>", .{path});
|
||||
} else {
|
||||
Output.prettyErrorln("<r> {s}", .{path});
|
||||
}
|
||||
}
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
const extracted_file_count = try Archive.extractToDisk(
|
||||
tarball_buf_list.items,
|
||||
destination,
|
||||
&archive_context,
|
||||
1,
|
||||
false,
|
||||
);
|
||||
|
||||
var plucker = pluckers[0];
|
||||
|
||||
if (!plucker.found or plucker.fd == 0) {
|
||||
node.end();
|
||||
progress.root.end();
|
||||
Output.prettyErrorln("package.json not found. This package is corrupt. Please try again or file an issue if it keeps happening.", .{});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
node.end();
|
||||
node = progress.root.start(try std.fmt.bufPrint(&extracting_name_buf, "Updating package.json", .{}), 0);
|
||||
|
||||
node.activate();
|
||||
progress.refresh();
|
||||
|
||||
var source = logger.Source.initPathString("package.json", plucker.contents.toOwnedSliceLeaky());
|
||||
var package_json_expr = ParseJSON(&source, ctx.log, ctx.allocator) catch |err| {
|
||||
node.end();
|
||||
progress.root.end();
|
||||
progress.refresh();
|
||||
|
||||
Output.prettyErrorln("package.json failed to parse with error: {s}", .{@errorName(err)});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
};
|
||||
|
||||
if (ctx.log.errors > 0) {
|
||||
node.end();
|
||||
|
||||
progress.refresh();
|
||||
|
||||
if (Output.enable_ansi_colors) {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
|
||||
} else {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
|
||||
}
|
||||
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
if (package_json_expr.asProperty("name")) |name_expr| {
|
||||
if (name_expr.expr.data != .e_string) {
|
||||
node.end();
|
||||
progress.root.end();
|
||||
|
||||
progress.refresh();
|
||||
|
||||
Output.prettyErrorln("package.json failed to parse correctly. its missing a name. it shouldnt be missing a name.", .{});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
var basename = std.fs.path.basename(destination);
|
||||
name_expr.expr.data.e_string.utf8 = @intToPtr([*]u8, @ptrToInt(basename.ptr))[0..basename.len];
|
||||
} else {
|
||||
node.end();
|
||||
progress.root.end();
|
||||
|
||||
progress.refresh();
|
||||
|
||||
Output.prettyErrorln("package.json failed to parse correctly. its missing a name. it shouldnt be missing a name.", .{});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
package_json_expr.data.e_object.is_single_line = false;
|
||||
|
||||
var preinstall_tasks = std.mem.zeroes(std.ArrayListUnmanaged([]const u8));
|
||||
var postinstall_tasks = std.mem.zeroes(std.ArrayListUnmanaged([]const u8));
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
var property_i: usize = 0;
|
||||
while (i < package_json_expr.data.e_object.properties.len) : (i += 1) {
|
||||
const property = package_json_expr.data.e_object.properties[i];
|
||||
const key = property.key.?.asString(ctx.allocator).?;
|
||||
|
||||
if (key.len == 0 or !strings.eqlComptime(key, "bun-create")) {
|
||||
package_json_expr.data.e_object.properties[property_i] = property;
|
||||
property_i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
var value = property.value.?;
|
||||
if (value.asProperty("postinstall")) |postinstall| {
|
||||
switch (postinstall.expr.data) {
|
||||
.e_string => |single_task| {
|
||||
try postinstall_tasks.append(
|
||||
ctx.allocator,
|
||||
try single_task.string(ctx.allocator),
|
||||
);
|
||||
},
|
||||
.e_array => |tasks| {
|
||||
for (tasks.items) |task| {
|
||||
if (task.asString(ctx.allocator)) |task_entry| {
|
||||
try postinstall_tasks.append(
|
||||
ctx.allocator,
|
||||
task_entry,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
if (value.asProperty("preinstall")) |preinstall| {
|
||||
switch (preinstall.expr.data) {
|
||||
.e_string => |single_task| {
|
||||
try preinstall_tasks.append(
|
||||
ctx.allocator,
|
||||
try single_task.string(ctx.allocator),
|
||||
);
|
||||
},
|
||||
.e_array => |tasks| {
|
||||
for (tasks.items) |task| {
|
||||
if (task.asString(ctx.allocator)) |task_entry| {
|
||||
try preinstall_tasks.append(
|
||||
ctx.allocator,
|
||||
task_entry,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
node.name = "Saving package.json";
|
||||
progress.maybeRefresh();
|
||||
|
||||
const package_json_file = std.fs.File{ .handle = plucker.fd };
|
||||
var package_json_writer = JSPrinter.NewFileWriter(package_json_file);
|
||||
|
||||
_ = JSPrinter.printJSON(@TypeOf(package_json_writer), package_json_writer, package_json_expr, &source) catch |err| {
|
||||
Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
};
|
||||
|
||||
var env_loader: DotEnv.Loader = brk: {
|
||||
var map = try ctx.allocator.create(DotEnv.Map);
|
||||
map.* = DotEnv.Map.init(ctx.allocator);
|
||||
|
||||
break :brk DotEnv.Loader.init(map, ctx.allocator);
|
||||
};
|
||||
|
||||
env_loader.loadProcess();
|
||||
|
||||
const PATH = env_loader.map.get("PATH") orelse "";
|
||||
|
||||
var npm_client_: ?NPMClient = null;
|
||||
|
||||
if (!create_options.skip_install) {
|
||||
if (env_loader.map.get("NPM_CLIENT")) |npm_client_bin| {
|
||||
npm_client_ = NPMClient{ .tag = .npm, .bin = npm_client_bin };
|
||||
} else if (PATH.len > 0) {
|
||||
var realpath_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
|
||||
if (create_options.npm_client) |tag| {
|
||||
if (which(&realpath_buf, PATH, filesystem.top_level_dir, @tagName(tag))) |bin| {
|
||||
npm_client_ = NPMClient{ .tag = tag, .bin = try ctx.allocator.dupe(u8, bin) };
|
||||
}
|
||||
} else if (try NPMClient.detect(ctx.allocator, &realpath_buf, PATH, filesystem.top_level_dir, true)) |npmclient| {
|
||||
npm_client_ = NPMClient{
|
||||
.bin = try ctx.allocator.dupe(u8, npmclient.bin),
|
||||
.tag = npmclient.tag,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (npm_client_ != null and preinstall_tasks.items.len > 0) {
|
||||
node.end();
|
||||
node = progress.root.start("Running pre-install tasks", preinstall_tasks.items.len);
|
||||
node.setCompletedItems(0);
|
||||
progress.refresh();
|
||||
|
||||
for (preinstall_tasks.items) |task, i| {
|
||||
execTask(ctx.allocator, task, destination, PATH, npm_client_.?);
|
||||
|
||||
node.setCompletedItems(i);
|
||||
progress.refresh();
|
||||
}
|
||||
}
|
||||
|
||||
node.end();
|
||||
|
||||
if (npm_client_) |npm_client| {
|
||||
var install_args = [_]string{ npm_client.bin, "install" };
|
||||
Output.printError("\n", .{});
|
||||
Output.flush();
|
||||
|
||||
Output.prettyln("\n<r><d>$ <b><cyan>{s}<r><d> install<r>", .{@tagName(npm_client.tag)});
|
||||
Output.flush();
|
||||
|
||||
var process = try std.ChildProcess.init(&install_args, ctx.allocator);
|
||||
process.cwd = destination;
|
||||
|
||||
defer {
|
||||
Output.print("\n", .{});
|
||||
Output.flush();
|
||||
}
|
||||
defer process.deinit();
|
||||
|
||||
var term = try process.spawnAndWait();
|
||||
_ = process.kill() catch undefined;
|
||||
} else if (!create_options.skip_install) {
|
||||
progress.log("Failed to detect npm client. Tried pnpm, yarn, and npm.\n", .{});
|
||||
}
|
||||
|
||||
progress.refresh();
|
||||
|
||||
if (npm_client_ != null and !create_options.skip_install and postinstall_tasks.items.len > 0) {
|
||||
node.end();
|
||||
node = progress.root.start("Running post-install tasks", postinstall_tasks.items.len);
|
||||
node.setCompletedItems(0);
|
||||
progress.refresh();
|
||||
|
||||
for (postinstall_tasks.items) |task, i| {
|
||||
execTask(ctx.allocator, task, destination, PATH, npm_client_.?);
|
||||
|
||||
node.setCompletedItems(i);
|
||||
progress.refresh();
|
||||
}
|
||||
}
|
||||
|
||||
var parent_dir = try std.fs.openDirAbsolute(destination, .{});
|
||||
std.os.linkat(parent_dir.fd, "gitignore", parent_dir.fd, ".gitignore", 0) catch {};
|
||||
std.os.unlinkat(
|
||||
parent_dir.fd,
|
||||
"gitignore",
|
||||
0,
|
||||
) catch {};
|
||||
parent_dir.close();
|
||||
|
||||
if (!create_options.skip_git) {
|
||||
if (which(&bun_path_buf, PATH, destination, "git")) |git| {
|
||||
const git_commands = .{
|
||||
&[_]string{ std.mem.span(git), "init", "--quiet" },
|
||||
&[_]string{ std.mem.span(git), "add", "-A", destination, "--ignore-errors" },
|
||||
&[_]string{ std.mem.span(git), "commit", "-am", "\"Initial Commit\"", "--quiet" },
|
||||
};
|
||||
// same names, just comptime known values
|
||||
|
||||
inline for (comptime std.meta.fieldNames(@TypeOf(Commands))) |command_field| {
|
||||
const command: []const string = @field(git_commands, command_field);
|
||||
var process = try std.ChildProcess.init(command, ctx.allocator);
|
||||
process.cwd = destination;
|
||||
process.stdin_behavior = .Inherit;
|
||||
process.stdout_behavior = .Inherit;
|
||||
process.stderr_behavior = .Inherit;
|
||||
defer process.deinit();
|
||||
|
||||
var term = try process.spawnAndWait();
|
||||
_ = process.kill() catch undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Output.printError("\n", .{});
|
||||
Output.printStartEnd(ctx.start_time, std.time.nanoTimestamp());
|
||||
Output.prettyErrorln(" <r><d>bun create {s} <r><d><b>({d} files)<r>", .{ template, extracted_file_count });
|
||||
Output.flush();
|
||||
}
|
||||
};
|
||||
const Commands = .{
|
||||
&[_]string{""},
|
||||
&[_]string{""},
|
||||
&[_]string{""},
|
||||
};
|
||||
const picohttp = @import("picohttp");
|
||||
|
||||
const PackageDownloadThread = struct {
|
||||
thread: std.Thread,
|
||||
client: HTTPClient,
|
||||
tarball_url: string,
|
||||
allocator: *std.mem.Allocator,
|
||||
buffer: MutableString,
|
||||
done: std.atomic.Atomic(u32),
|
||||
response: picohttp.Response = undefined,
|
||||
|
||||
pub fn threadHandler(this: *PackageDownloadThread) !void {
|
||||
this.done.store(0, .Release);
|
||||
this.response = try this.client.send("", &this.buffer);
|
||||
this.done.store(1, .Release);
|
||||
std.Thread.Futex.wake(&this.done, 1);
|
||||
}
|
||||
|
||||
pub fn spawn(allocator: *std.mem.Allocator, tarball_url: string) !*PackageDownloadThread {
|
||||
var download = try allocator.create(PackageDownloadThread);
|
||||
download.* = PackageDownloadThread{
|
||||
.allocator = allocator,
|
||||
.client = HTTPClient.init(allocator, .GET, URL.parse(tarball_url), .{}, ""),
|
||||
.tarball_url = tarball_url,
|
||||
.buffer = try MutableString.init(allocator, 1024),
|
||||
.done = std.atomic.Atomic(u32).init(0),
|
||||
.thread = undefined,
|
||||
};
|
||||
|
||||
download.thread = try std.Thread.spawn(.{}, threadHandler, .{download});
|
||||
|
||||
return download;
|
||||
}
|
||||
};
|
||||
|
||||
pub const DownloadedExample = struct {
|
||||
tarball_bytes: MutableString,
|
||||
example: Example,
|
||||
};
|
||||
|
||||
pub const Example = struct {
|
||||
name: string,
|
||||
version: string,
|
||||
description: string,
|
||||
|
||||
var client: HTTPClient = undefined;
|
||||
const examples_url: string = "https://registry.npmjs.org/bun-examples-all/latest";
|
||||
var url: URL = undefined;
|
||||
pub const timeout: u32 = 6000;
|
||||
|
||||
pub fn print(examples: []const Example) void {
|
||||
for (examples) |example, i| {
|
||||
var app_name = example.name;
|
||||
|
||||
if (example.description.len > 0) {
|
||||
Output.pretty(" <r># {s}<r>\n <b>bun create <cyan>{s}<r><b> ./{s}-app<r>\n<d> \n\n", .{
|
||||
example.description,
|
||||
example.name,
|
||||
app_name,
|
||||
});
|
||||
} else {
|
||||
Output.pretty(" <r><b>bun create <cyan>{s}<r><b> ./{s}-app<r>\n\n", .{
|
||||
example.name,
|
||||
app_name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fetchFromDisk(ctx: Command.Context, absolute_path: string, refresher: *std.Progress, progress: *std.Progress.Node) !MutableString {
|
||||
progress.name = "Reading local package";
|
||||
refresher.refresh();
|
||||
|
||||
var package = try std.fs.openFileAbsolute(absolute_path, .{ .read = true });
|
||||
var stat = try package.stat();
|
||||
if (stat.kind != .File) {
|
||||
progress.end();
|
||||
Output.prettyErrorln("<r>{s} is not a file", .{absolute_path});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
if (stat.size == 0) {
|
||||
progress.end();
|
||||
Output.prettyErrorln("<r>{s} is an empty file", .{absolute_path});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
var mutable_string = try MutableString.init(ctx.allocator, stat.size);
|
||||
mutable_string.list.expandToCapacity();
|
||||
var bytes = try package.readAll(mutable_string.list.items);
|
||||
try mutable_string.inflate(bytes);
|
||||
return mutable_string;
|
||||
}
|
||||
|
||||
pub fn fetch(ctx: Command.Context, name: string, refresher: *std.Progress, progress: *std.Progress.Node) !MutableString {
|
||||
progress.name = "Fetching package.json";
|
||||
refresher.refresh();
|
||||
|
||||
const example_start = std.time.nanoTimestamp();
|
||||
var url_buf: [1024]u8 = undefined;
|
||||
var mutable = try MutableString.init(ctx.allocator, 2048);
|
||||
|
||||
url = URL.parse(try std.fmt.bufPrint(&url_buf, "https://registry.npmjs.org/@bun-examples/{s}/latest", .{name}));
|
||||
client = HTTPClient.init(ctx.allocator, .GET, url, .{}, "");
|
||||
client.timeout = timeout;
|
||||
var response = try client.send("", &mutable);
|
||||
|
||||
switch (response.status_code) {
|
||||
404 => return error.ExampleNotFound,
|
||||
403 => return error.HTTPForbidden,
|
||||
429 => return error.HTTPTooManyRequests,
|
||||
499...599 => return error.NPMIsDown,
|
||||
200 => {},
|
||||
else => return error.HTTPError,
|
||||
}
|
||||
|
||||
progress.name = "Parsing package.json";
|
||||
refresher.refresh();
|
||||
js_ast.Expr.Data.Store.create(default_allocator);
|
||||
js_ast.Stmt.Data.Store.create(default_allocator);
|
||||
|
||||
var source = logger.Source.initPathString("package.json", mutable.list.items);
|
||||
var expr = ParseJSON(&source, ctx.log, ctx.allocator) catch |err| {
|
||||
progress.end();
|
||||
refresher.refresh();
|
||||
|
||||
if (ctx.log.errors > 0) {
|
||||
if (Output.enable_ansi_colors) {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
|
||||
} else {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
|
||||
}
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
} else {
|
||||
Output.prettyErrorln("Error parsing package: <r><red>{s}<r>", .{@errorName(err)});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if (ctx.log.errors > 0) {
|
||||
progress.end();
|
||||
refresher.refresh();
|
||||
|
||||
if (Output.enable_ansi_colors) {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
|
||||
} else {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
|
||||
}
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
const tarball_url: string = brk: {
|
||||
if (expr.asProperty("dist")) |q| {
|
||||
if (q.expr.asProperty("tarball")) |p| {
|
||||
if (p.expr.asString(ctx.allocator)) |s| {
|
||||
if (s.len > 0 and (strings.startsWith(s, "https://") or strings.startsWith(s, "http://"))) {
|
||||
break :brk s;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
progress.end();
|
||||
refresher.refresh();
|
||||
|
||||
Output.prettyErrorln("package.json is missing tarball url. This is an internal error!", .{});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
};
|
||||
|
||||
progress.name = "Downloading tarball";
|
||||
refresher.refresh();
|
||||
|
||||
var thread: *PackageDownloadThread = try PackageDownloadThread.spawn(ctx.allocator, tarball_url);
|
||||
|
||||
std.Thread.Futex.wait(&thread.done, 1, std.time.ns_per_ms * 100) catch {};
|
||||
|
||||
progress.setEstimatedTotalItems(thread.client.body_size);
|
||||
progress.setCompletedItems(thread.client.read_count);
|
||||
refresher.maybeRefresh();
|
||||
if (thread.done.load(.Acquire) == 0) {
|
||||
while (true) {
|
||||
std.Thread.Futex.wait(&thread.done, 1, std.time.ns_per_ms * 100) catch {};
|
||||
progress.setEstimatedTotalItems(thread.client.body_size);
|
||||
progress.setCompletedItems(thread.client.read_count);
|
||||
refresher.maybeRefresh();
|
||||
if (thread.done.load(.Acquire) == 1) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
refresher.maybeRefresh();
|
||||
|
||||
if (thread.response.status_code != 200) {
|
||||
progress.end();
|
||||
refresher.refresh();
|
||||
Output.prettyErrorln("Error fetching tarball: <r><red>{d}<r>", .{thread.response.status_code});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
refresher.refresh();
|
||||
thread.thread.join();
|
||||
|
||||
return thread.buffer;
|
||||
}
|
||||
|
||||
pub fn fetchAll(ctx: Command.Context) ![]const Example {
|
||||
url = URL.parse(examples_url);
|
||||
client = HTTPClient.init(ctx.allocator, .GET, url, .{}, "");
|
||||
client.timeout = timeout;
|
||||
var mutable: MutableString = try MutableString.init(ctx.allocator, 1024);
|
||||
var response = client.send("", &mutable) catch |err| {
|
||||
switch (err) {
|
||||
error.WouldBlock => {
|
||||
Output.prettyErrorln("Request timed out while trying to fetch examples list. Please try again", .{});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
},
|
||||
else => {
|
||||
Output.prettyErrorln("<r><red>{s}<r> while trying to fetch examples list. Please try again", .{@errorName(err)});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
if (response.status_code != 200) {
|
||||
Output.prettyErrorln("<r><red>{d}<r> fetching examples :( {s}", .{ response.status_code, mutable.list.items });
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
js_ast.Expr.Data.Store.create(default_allocator);
|
||||
js_ast.Stmt.Data.Store.create(default_allocator);
|
||||
var source = logger.Source.initPathString("examples.json", mutable.list.items);
|
||||
const examples_object = ParseJSON(&source, ctx.log, ctx.allocator) catch |err| {
|
||||
if (ctx.log.errors > 0) {
|
||||
if (Output.enable_ansi_colors) {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
|
||||
} else {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
|
||||
}
|
||||
std.os.exit(1);
|
||||
Output.flush();
|
||||
} else {
|
||||
Output.prettyErrorln("Error parsing examples: <r><red>{s}<r>", .{@errorName(err)});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if (ctx.log.errors > 0) {
|
||||
if (Output.enable_ansi_colors) {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
|
||||
} else {
|
||||
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
|
||||
}
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
|
||||
if (examples_object.asProperty("examples")) |q| {
|
||||
if (q.expr.data == .e_object) {
|
||||
var count: usize = 0;
|
||||
for (q.expr.data.e_object.properties) |property| {
|
||||
count += 1;
|
||||
}
|
||||
|
||||
var list = try ctx.allocator.alloc(Example, count);
|
||||
for (q.expr.data.e_object.properties) |property, i| {
|
||||
const name = property.key.?.data.e_string.utf8;
|
||||
list[i] = Example{
|
||||
.name = if (std.mem.indexOfScalar(u8, name, '/')) |slash|
|
||||
name[slash + 1 ..]
|
||||
else
|
||||
name,
|
||||
.version = property.value.?.asProperty("version").?.expr.data.e_string.utf8,
|
||||
.description = property.value.?.asProperty("description").?.expr.data.e_string.utf8,
|
||||
};
|
||||
}
|
||||
return list;
|
||||
}
|
||||
}
|
||||
|
||||
Output.prettyErrorln("Corrupt examples data: expected object but received {s}", .{@tagName(examples_object.data)});
|
||||
Output.flush();
|
||||
std.os.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
pub const CreateListExamplesCommand = struct {
|
||||
pub fn exec(ctx: Command.Context) !void {
|
||||
const time = std.time.nanoTimestamp();
|
||||
const examples = try Example.fetchAll(ctx);
|
||||
Output.printStartEnd(time, std.time.nanoTimestamp());
|
||||
Output.prettyln(" <d>Fetched examples<r>", .{});
|
||||
|
||||
Output.prettyln("Welcome to Bun! Create a new project by pasting any of the following:\n\n", .{});
|
||||
Output.flush();
|
||||
|
||||
Example.print(examples);
|
||||
|
||||
Output.pretty("<d>To add a new template, git clone https://github.com/jarred-sumner/bun, add a new folder to the \"examples\" folder, and submit a PR.<r>", .{});
|
||||
Output.flush();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -128,7 +128,14 @@ pub const Output = struct {
|
||||
}
|
||||
|
||||
pub fn printElapsed(elapsed: f64) void {
|
||||
Output.prettyError("<r><d>[<b>{d:>.2}ms<r><d>]<r>", .{elapsed});
|
||||
switch (elapsed) {
|
||||
0...1500 => {
|
||||
Output.prettyError("<r><d>[<b>{d:>.2}ms<r><d>]<r>", .{elapsed});
|
||||
},
|
||||
else => {
|
||||
Output.prettyError("<r><d>[<b>{d:>.2}s<r><d>]<r>", .{elapsed / 1000.0});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printStartEnd(start: i128, end: i128) void {
|
||||
|
||||
@@ -46,6 +46,7 @@ read_count: u32 = 0,
|
||||
remaining_redirect_count: i8 = 127,
|
||||
redirect_buf: [2048]u8 = undefined,
|
||||
disable_shutdown: bool = false,
|
||||
timeout: u32 = 0,
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator, method: Method, url: URL, header_entries: Headers.Entries, header_buf: string) HTTPClient {
|
||||
return HTTPClient{
|
||||
@@ -225,6 +226,11 @@ pub fn connect(
|
||||
client.setReadBufferSize(http_req_buf.len) catch {};
|
||||
client.setQuickACK(true) catch {};
|
||||
|
||||
if (this.timeout > 0) {
|
||||
client.setReadTimeout(this.timeout) catch {};
|
||||
client.setWriteTimeout(this.timeout) catch {};
|
||||
}
|
||||
|
||||
// if (this.url.isLocalhost()) {
|
||||
// try client.connect(
|
||||
// try std.x.os.Socket.Address.initIPv4(try std.net.Address.resolveIp("localhost", port), port),
|
||||
@@ -394,6 +400,7 @@ pub fn processResponse(this: *HTTPClient, comptime is_https: bool, comptime Clie
|
||||
content_length = std.fmt.parseInt(u32, header.value, 10) catch 0;
|
||||
try body_out_str.inflate(content_length);
|
||||
body_out_str.list.expandToCapacity();
|
||||
this.body_size = content_length;
|
||||
},
|
||||
content_encoding_hash => {
|
||||
if (strings.eqlComptime(header.value, "gzip")) {
|
||||
@@ -486,6 +493,10 @@ pub fn processResponse(this: *HTTPClient, comptime is_https: bool, comptime Clie
|
||||
// set consume_trailer to 1 to discard the trailing header
|
||||
// using content-encoding per chunk is not supported
|
||||
decoder.consume_trailer = 1;
|
||||
|
||||
// these variable names are terrible
|
||||
// it's copypasta from https://github.com/h2o/picohttpparser#phr_decode_chunked
|
||||
// (but ported from C -> zig)
|
||||
var rret: usize = 0;
|
||||
var rsize: usize = last_read;
|
||||
var pret: isize = picohttp.phr_decode_chunked(&decoder, buffer.list.items.ptr, &rsize);
|
||||
@@ -530,6 +541,7 @@ pub fn processResponse(this: *HTTPClient, comptime is_https: bool, comptime Clie
|
||||
else => {},
|
||||
}
|
||||
|
||||
this.body_size = @intCast(u32, body_out_str.list.items.len);
|
||||
return response;
|
||||
}
|
||||
|
||||
|
||||
@@ -81,11 +81,11 @@ pub const SourceMapChunk = struct {
|
||||
|
||||
pub const Options = struct {
|
||||
transform_imports: bool = true,
|
||||
to_module_ref: js_ast.Ref,
|
||||
to_module_ref: js_ast.Ref = js_ast.Ref.None,
|
||||
require_ref: ?js_ast.Ref = null,
|
||||
indent: usize = 0,
|
||||
externals: []u32 = &[_]u32{},
|
||||
runtime_imports: runtime.Runtime.Imports,
|
||||
runtime_imports: runtime.Runtime.Imports = runtime.Runtime.Imports{},
|
||||
module_hash: u32 = 0,
|
||||
source_path: ?fs.Path = null,
|
||||
bundle_export_ref: ?js_ast.Ref = null,
|
||||
@@ -219,6 +219,7 @@ pub fn NewPrinter(
|
||||
comptime rewrite_esm_to_cjs: bool,
|
||||
comptime bun: bool,
|
||||
comptime is_inside_bundle: bool,
|
||||
comptime is_json: bool,
|
||||
) type {
|
||||
return struct {
|
||||
symbols: Symbol.Map,
|
||||
@@ -506,7 +507,11 @@ pub fn NewPrinter(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bestQuoteCharForString(p: *Printer, str: anytype, allow_backtick: bool) u8 {
|
||||
pub fn bestQuoteCharForString(p: *Printer, str: anytype, allow_backtick_: bool) u8 {
|
||||
if (comptime is_json) return '"';
|
||||
|
||||
const allow_backtick = allow_backtick_;
|
||||
|
||||
var single_cost: usize = 0;
|
||||
var double_cost: usize = 0;
|
||||
var backtick_cost: usize = 0;
|
||||
@@ -867,6 +872,8 @@ pub fn NewPrinter(
|
||||
}
|
||||
|
||||
pub inline fn canPrintIdentifier(p: *Printer, name: string) bool {
|
||||
if (comptime is_json) return false;
|
||||
|
||||
if (comptime ascii_only) {
|
||||
return js_lexer.isIdentifier(name) and !strings.containsNonBmpCodePoint(name);
|
||||
} else {
|
||||
@@ -889,8 +896,6 @@ pub fn NewPrinter(
|
||||
pub fn printExpr(p: *Printer, expr: Expr, level: Level, _flags: ExprFlag) void {
|
||||
p.addSourceMapping(expr.loc);
|
||||
var flags = _flags;
|
||||
debugl("<printExpr>");
|
||||
defer debugl("</printExpr>");
|
||||
|
||||
switch (expr.data) {
|
||||
.e_missing => {},
|
||||
@@ -1321,7 +1326,10 @@ pub fn NewPrinter(
|
||||
},
|
||||
.e_object => |e| {
|
||||
const n = p.writer.written;
|
||||
const wrap = p.stmt_start == n or p.arrow_expr_start == n;
|
||||
const wrap = if (comptime is_json)
|
||||
false
|
||||
else
|
||||
p.stmt_start == n or p.arrow_expr_start == n;
|
||||
|
||||
if (wrap) {
|
||||
p.print("(");
|
||||
@@ -1879,7 +1887,7 @@ pub fn NewPrinter(
|
||||
// While each of those property keys are ASCII, a subset of ASCII is valid as the start of an identifier
|
||||
// "=" and ":" are not valid
|
||||
// So we need to check
|
||||
if (js_lexer.isIdentifier(key.utf8)) {
|
||||
if ((comptime !is_json) and js_lexer.isIdentifier(key.utf8)) {
|
||||
p.print(key.utf8);
|
||||
} else {
|
||||
allow_shorthand = false;
|
||||
@@ -4087,7 +4095,7 @@ pub fn printAst(
|
||||
comptime LinkerType: type,
|
||||
linker: ?*LinkerType,
|
||||
) !usize {
|
||||
const PrinterType = NewPrinter(false, Writer, LinkerType, false, false, false);
|
||||
const PrinterType = NewPrinter(false, Writer, LinkerType, false, false, false, false);
|
||||
var writer = _writer;
|
||||
|
||||
var printer = try PrinterType.init(
|
||||
@@ -4122,6 +4130,39 @@ pub fn printAst(
|
||||
return @intCast(usize, std.math.max(printer.writer.written, 0));
|
||||
}
|
||||
|
||||
pub fn printJSON(
|
||||
comptime Writer: type,
|
||||
_writer: Writer,
|
||||
expr: Expr,
|
||||
source: *const logger.Source,
|
||||
) !usize {
|
||||
const PrinterType = NewPrinter(false, Writer, void, false, false, false, true);
|
||||
var writer = _writer;
|
||||
var s_expr = S.SExpr{ .value = expr };
|
||||
var stmt = Stmt{ .loc = logger.Loc.Empty, .data = .{
|
||||
.s_expr = &s_expr,
|
||||
} };
|
||||
var stmts = &[_]js_ast.Stmt{stmt};
|
||||
var parts = &[_]js_ast.Part{.{ .stmts = stmts }};
|
||||
const ast = Ast.initTest(parts);
|
||||
var printer = try PrinterType.init(
|
||||
writer,
|
||||
&ast,
|
||||
source,
|
||||
std.mem.zeroes(Symbol.Map),
|
||||
.{},
|
||||
null,
|
||||
);
|
||||
|
||||
printer.printExpr(expr, Level.lowest, ExprFlag{});
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
return err;
|
||||
}
|
||||
try printer.writer.done();
|
||||
|
||||
return @intCast(usize, std.math.max(printer.writer.written, 0));
|
||||
}
|
||||
|
||||
pub fn printCommonJS(
|
||||
comptime Writer: type,
|
||||
_writer: Writer,
|
||||
@@ -4133,7 +4174,7 @@ pub fn printCommonJS(
|
||||
comptime LinkerType: type,
|
||||
linker: ?*LinkerType,
|
||||
) !usize {
|
||||
const PrinterType = NewPrinter(false, Writer, LinkerType, true, false, false);
|
||||
const PrinterType = NewPrinter(false, Writer, LinkerType, true, false, false, false);
|
||||
var writer = _writer;
|
||||
var printer = try PrinterType.init(
|
||||
writer,
|
||||
@@ -4191,7 +4232,7 @@ pub fn printCommonJSThreaded(
|
||||
comptime getPos: fn (ctx: GetPosType) anyerror!u64,
|
||||
end_off_ptr: *u32,
|
||||
) !WriteResult {
|
||||
const PrinterType = NewPrinter(false, Writer, LinkerType, true, false, true);
|
||||
const PrinterType = NewPrinter(false, Writer, LinkerType, true, false, true, false);
|
||||
var writer = _writer;
|
||||
var printer = try PrinterType.init(
|
||||
writer,
|
||||
|
||||
@@ -405,7 +405,34 @@ pub const Archive = struct {
|
||||
// buf: []const u8 = undefined,
|
||||
// dir: FileDescriptorType = 0,
|
||||
|
||||
pub fn extractToDisk(file_buffer: []const u8, root: []const u8, comptime depth_to_skip: usize, comptime close_handles: bool) !void {
|
||||
pub const Context = struct {
|
||||
pluckers: []Plucker = &[_]Plucker{},
|
||||
overwrite_list: std.StringArrayHashMap(void),
|
||||
};
|
||||
|
||||
pub const Plucker = struct {
|
||||
contents: MutableString,
|
||||
filename_hash: u64 = 0,
|
||||
found: bool = false,
|
||||
fd: FileDescriptorType = 0,
|
||||
pub fn init(filepath: string, estimated_size: usize, allocator: *std.mem.Allocator) !Plucker {
|
||||
return Plucker{
|
||||
.contents = try MutableString.init(allocator, estimated_size),
|
||||
.filename_hash = std.hash.Wyhash.hash(0, filepath),
|
||||
.fd = 0,
|
||||
.found = false,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn getOverwritingFileList(
|
||||
file_buffer: []const u8,
|
||||
root: []const u8,
|
||||
ctx: *Archive.Context,
|
||||
comptime FilePathAppender: type,
|
||||
appender: FilePathAppender,
|
||||
comptime depth_to_skip: usize,
|
||||
) !void {
|
||||
var entry: *lib.archive_entry = undefined;
|
||||
var ext: *lib.archive = undefined;
|
||||
|
||||
@@ -415,9 +442,100 @@ pub const Archive = struct {
|
||||
defer stream.deinit();
|
||||
_ = stream.openRead();
|
||||
var archive = stream.archive;
|
||||
const dir: std.fs.Dir = brk: {
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
// if the destination doesn't exist, we skip the whole thing since nothing can overwrite it.
|
||||
if (std.fs.path.isAbsolute(root)) {
|
||||
break :brk std.fs.openDirAbsolute(root, .{ .iterate = true }) catch return;
|
||||
} else {
|
||||
break :brk cwd.openDir(root, .{ .iterate = true }) catch return;
|
||||
}
|
||||
};
|
||||
|
||||
loop: while (true) {
|
||||
const r = @intToEnum(Status, lib.archive_read_next_header(archive, &entry));
|
||||
|
||||
switch (r) {
|
||||
Status.eof => break :loop,
|
||||
Status.failed, Status.fatal, Status.retry => return error.Fail,
|
||||
else => {
|
||||
var pathname: [:0]const u8 = std.mem.sliceTo(lib.archive_entry_pathname(entry).?, 0);
|
||||
var tokenizer = std.mem.tokenize(u8, std.mem.span(pathname), std.fs.path.sep_str);
|
||||
comptime var depth_i: usize = 0;
|
||||
inline while (depth_i < depth_to_skip) : (depth_i += 1) {
|
||||
if (tokenizer.next() == null) continue :loop;
|
||||
}
|
||||
|
||||
var pathname_ = tokenizer.rest();
|
||||
pathname = std.mem.sliceTo(pathname_.ptr[0..pathname_.len :0], 0);
|
||||
const dirname = std.mem.trim(u8, std.fs.path.dirname(std.mem.span(pathname)) orelse "", std.fs.path.sep_str);
|
||||
|
||||
const size = @intCast(usize, std.math.max(lib.archive_entry_size(entry), 0));
|
||||
if (size > 0) {
|
||||
var opened = dir.openFileZ(pathname, .{ .write = true }) catch continue :loop;
|
||||
var stat = try opened.stat();
|
||||
|
||||
if (stat.size > 0) {
|
||||
const is_already_top_level = dirname.len == 0;
|
||||
const path_to_use_: string = brk: {
|
||||
const __pathname: string = std.mem.span(pathname);
|
||||
|
||||
if (is_already_top_level) break :brk __pathname;
|
||||
|
||||
const index = std.mem.indexOfScalar(u8, __pathname, std.fs.path.sep).?;
|
||||
break :brk __pathname[0..index];
|
||||
};
|
||||
var temp_buf: [1024]u8 = undefined;
|
||||
std.mem.copy(u8, &temp_buf, path_to_use_);
|
||||
var path_to_use: string = temp_buf[0..path_to_use_.len];
|
||||
if (!is_already_top_level) {
|
||||
temp_buf[path_to_use_.len] = std.fs.path.sep;
|
||||
path_to_use = temp_buf[0 .. path_to_use_.len + 1];
|
||||
}
|
||||
|
||||
var overwrite_entry = try ctx.overwrite_list.getOrPut(path_to_use);
|
||||
if (!overwrite_entry.found_existing) {
|
||||
overwrite_entry.key_ptr.* = try appender.append(@TypeOf(path_to_use), path_to_use);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extractToDisk(
|
||||
file_buffer: []const u8,
|
||||
root: []const u8,
|
||||
ctx: ?*Archive.Context,
|
||||
comptime depth_to_skip: usize,
|
||||
comptime close_handles: bool,
|
||||
) !u32 {
|
||||
var entry: *lib.archive_entry = undefined;
|
||||
var ext: *lib.archive = undefined;
|
||||
|
||||
const flags = @enumToInt(Flags.Extract.time) | @enumToInt(Flags.Extract.perm) | @enumToInt(Flags.Extract.acl) | @enumToInt(Flags.Extract.fflags);
|
||||
var stream: BufferReadStream = undefined;
|
||||
stream.init(file_buffer);
|
||||
defer stream.deinit();
|
||||
_ = stream.openRead();
|
||||
var archive = stream.archive;
|
||||
var count: u32 = 0;
|
||||
|
||||
const dir: std.fs.Dir = brk: {
|
||||
const cwd = std.fs.cwd();
|
||||
cwd.makePath(
|
||||
root,
|
||||
) catch {};
|
||||
|
||||
if (std.fs.path.isAbsolute(root)) {
|
||||
break :brk try std.fs.openDirAbsolute(root, .{ .iterate = true });
|
||||
} else {
|
||||
break :brk try cwd.openDir(root, .{ .iterate = true });
|
||||
}
|
||||
};
|
||||
|
||||
const cwd = std.fs.cwd();
|
||||
const dir = try cwd.makeOpenPath(root, .{ .iterate = true });
|
||||
defer if (comptime close_handles) dir.close();
|
||||
|
||||
loop: while (true) {
|
||||
@@ -427,7 +545,7 @@ pub const Archive = struct {
|
||||
Status.eof => break :loop,
|
||||
Status.failed, Status.fatal, Status.retry => return error.Fail,
|
||||
else => {
|
||||
var pathname: [:0]const u8 = std.mem.sliceTo(lib.archive_entry_pathname_utf8(entry).?, 0);
|
||||
var pathname: [:0]const u8 = std.mem.sliceTo(lib.archive_entry_pathname(entry).?, 0);
|
||||
var tokenizer = std.mem.tokenize(u8, std.mem.span(pathname), std.fs.path.sep_str);
|
||||
comptime var depth_i: usize = 0;
|
||||
inline while (depth_i < depth_to_skip) : (depth_i += 1) {
|
||||
@@ -439,15 +557,14 @@ pub const Archive = struct {
|
||||
const dirname = std.fs.path.dirname(std.mem.span(pathname)) orelse "";
|
||||
|
||||
const mask = lib.archive_entry_filetype(entry);
|
||||
|
||||
if (lib.archive_entry_size(entry) > 0) {
|
||||
Output.prettyln("<r><d>{s}/<r>{s}", .{ root, pathname });
|
||||
const size = @intCast(usize, std.math.max(lib.archive_entry_size(entry), 0));
|
||||
if (size > 0) {
|
||||
Output.prettyln(" {s}", .{pathname});
|
||||
|
||||
const file = dir.createFileZ(pathname, .{ .truncate = true }) catch |err| brk: {
|
||||
switch (err) {
|
||||
error.FileNotFound => {
|
||||
const subdir = try dir.makeOpenPath(dirname, .{ .iterate = true });
|
||||
defer if (comptime close_handles) subdir.close();
|
||||
dir.makePath(dirname) catch {};
|
||||
break :brk try dir.createFileZ(pathname, .{ .truncate = true });
|
||||
},
|
||||
else => {
|
||||
@@ -455,12 +572,35 @@ pub const Archive = struct {
|
||||
},
|
||||
}
|
||||
};
|
||||
defer if (comptime close_handles) file.close();
|
||||
_ = lib.archive_read_data_into_fd(archive, file.handle);
|
||||
count += 1;
|
||||
|
||||
_ = C.fchmod(file.handle, lib.archive_entry_perm(entry));
|
||||
|
||||
if (ctx) |ctx_| {
|
||||
const hash: u64 = if (ctx_.pluckers.len > 0)
|
||||
std.hash.Wyhash.hash(0, std.mem.span(pathname))
|
||||
else
|
||||
@as(u64, 0);
|
||||
|
||||
for (ctx_.pluckers) |*plucker_| {
|
||||
if (plucker_.filename_hash == hash) {
|
||||
try plucker_.contents.inflate(size);
|
||||
plucker_.contents.list.expandToCapacity();
|
||||
var read = lib.archive_read_data(archive, plucker_.contents.list.items.ptr, size);
|
||||
try plucker_.contents.inflate(@intCast(usize, read));
|
||||
plucker_.found = read > 0;
|
||||
plucker_.fd = file.handle;
|
||||
continue :loop;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ = lib.archive_read_data_into_fd(archive, file.handle);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -549,14 +549,14 @@ pub const s2n_offered_early_data = struct_s2n_offered_early_data;
|
||||
var booted = false;
|
||||
pub var global_s2n_config: *s2n_config = undefined;
|
||||
const unexpectedErrno = std.os.unexpectedErrno;
|
||||
const S2NError = error{ Closed, Blocked, Alert, Protocol, Internal, Usage };
|
||||
const S2NError = error{ Closed, WouldBlock, Alert, Protocol, Internal, Usage };
|
||||
pub inline fn s2nErrorNo(rc: c_int) S2NError!std.os.system.E {
|
||||
switch (s2n_error_get_type(rc)) {
|
||||
-1 => return error.Internal,
|
||||
S2N_ERR_T_OK => return .SUCCESS,
|
||||
S2N_ERR_T_IO => return std.os.errno(rc),
|
||||
S2N_ERR_T_CLOSED => return error.Closed,
|
||||
S2N_ERR_T_BLOCKED => return error.Blocked,
|
||||
S2N_ERR_T_BLOCKED => return error.WouldBlock,
|
||||
S2N_ERR_T_ALERT => return error.Alert,
|
||||
S2N_ERR_T_PROTO => return error.Protocol,
|
||||
S2N_ERR_T_INTERNAL => return error.Internal,
|
||||
|
||||
40
src/which.zig
Normal file
40
src/which.zig
Normal file
@@ -0,0 +1,40 @@
|
||||
const std = @import("std");
|
||||
|
||||
fn isValid(buf: *[std.fs.MAX_PATH_BYTES]u8, segment: []const u8, bin: []const u8) ?u16 {
|
||||
std.mem.copy(u8, buf, segment);
|
||||
buf[segment.len] = std.fs.path.sep;
|
||||
std.mem.copy(u8, buf[segment.len + 1 ..], bin);
|
||||
buf[segment.len + 1 + bin.len ..][0] = 0;
|
||||
var filepath = buf[0 .. segment.len + 1 + bin.len :0];
|
||||
|
||||
std.os.accessZ(filepath, std.os.X_OK) catch return null;
|
||||
return @intCast(u16, filepath.len);
|
||||
}
|
||||
|
||||
// Like /usr/bin/which but without needing to exec a child process
|
||||
// Remember to resolve the symlink if necessary
|
||||
pub fn which(buf: *[std.fs.MAX_PATH_BYTES]u8, path: []const u8, cwd: []const u8, bin: []const u8) ?[:0]const u8 {
|
||||
if (isValid(buf, std.mem.trimRight(u8, cwd, std.fs.path.sep_str), bin)) |len| {
|
||||
return buf[0..len :0];
|
||||
}
|
||||
|
||||
var path_iter = std.mem.tokenize(u8, path, ":");
|
||||
while (path_iter.next()) |segment| {
|
||||
if (isValid(buf, segment, bin)) |len| {
|
||||
return buf[0..len :0];
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
test "which" {
|
||||
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
var realpath = std.os.getenv("PATH") orelse unreachable;
|
||||
var whichbin = which(&buf, realpath, try std.process.getCwdAlloc(std.heap.c_allocator), "which");
|
||||
try std.testing.expectEqualStrings(whichbin orelse return std.debug.assert(false), "/usr/bin/which");
|
||||
try std.testing.expect(null == which(&buf, realpath, try std.process.getCwdAlloc(std.heap.c_allocator), "baconnnnnn"));
|
||||
try std.testing.expect(null != which(&buf, realpath, try std.process.getCwdAlloc(std.heap.c_allocator), "zig"));
|
||||
try std.testing.expect(null == which(&buf, realpath, try std.process.getCwdAlloc(std.heap.c_allocator), "bin"));
|
||||
try std.testing.expect(null == which(&buf, realpath, try std.process.getCwdAlloc(std.heap.c_allocator), "usr"));
|
||||
}
|
||||
108
src/which_npm_client.zig
Normal file
108
src/which_npm_client.zig
Normal file
@@ -0,0 +1,108 @@
|
||||
usingnamespace @import("./global.zig");
|
||||
|
||||
const which = @import("./which.zig").which;
|
||||
const std = @import("std");
|
||||
|
||||
pub const NPMClient = struct {
|
||||
bin: string,
|
||||
tag: Tag,
|
||||
|
||||
pub const Tag = enum {
|
||||
npm,
|
||||
yarn,
|
||||
pnpm,
|
||||
};
|
||||
|
||||
pub fn isYarnBerry(allocator: *std.mem.Allocator, yarn_path: string) bool {
|
||||
var args = [_]string{ yarn_path, "--version" };
|
||||
var child_process = std.ChildProcess.init(&args, allocator) catch return true;
|
||||
defer child_process.deinit();
|
||||
child_process.cwd_dir = std.fs.cwd();
|
||||
child_process.expand_arg0 = .no_expand;
|
||||
child_process.stdout_behavior = .Pipe;
|
||||
child_process.stderr_behavior = .Pipe;
|
||||
child_process.spawn() catch return true;
|
||||
defer _ = child_process.kill() catch undefined;
|
||||
|
||||
var path_buf: [512]u8 = undefined;
|
||||
var path_len = child_process.stdout.?.read(&path_buf) catch return true;
|
||||
|
||||
if (path_len == 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return path_buf[0] != '1';
|
||||
}
|
||||
|
||||
pub fn detect(allocator: *std.mem.Allocator, realpath_buf: *[std.fs.MAX_PATH_BYTES]u8, PATH: string, cwd: string, comptime allow_yarn: bool) !?NPMClient {
|
||||
|
||||
// We say:
|
||||
// - pnpm if it exists, is the default. its most esoteric, so if you have it installed, you prob want it.
|
||||
// - yarn if it exists and it is yarn 1, its the default (yarn 2 or later is not supported)
|
||||
// - else npm
|
||||
var path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
|
||||
const path: [:0]const u8 = brk: {
|
||||
if (comptime allow_yarn) {
|
||||
break :brk which(
|
||||
&path_buf,
|
||||
PATH,
|
||||
cwd,
|
||||
"pnpm",
|
||||
) orelse which(
|
||||
&path_buf,
|
||||
PATH,
|
||||
cwd,
|
||||
"yarn",
|
||||
) orelse which(
|
||||
&path_buf,
|
||||
PATH,
|
||||
cwd,
|
||||
"npm",
|
||||
) orelse "";
|
||||
} else {
|
||||
break :brk which(
|
||||
&path_buf,
|
||||
PATH,
|
||||
cwd,
|
||||
"pnpm",
|
||||
) orelse which(
|
||||
&path_buf,
|
||||
PATH,
|
||||
cwd,
|
||||
"npm",
|
||||
) orelse "";
|
||||
}
|
||||
unreachable;
|
||||
};
|
||||
|
||||
var basename = std.fs.path.basename(path);
|
||||
if (basename.len == 0) return null;
|
||||
|
||||
if (comptime allow_yarn) {
|
||||
if (std.mem.indexOf(u8, basename, "yarn") != null) {
|
||||
if (isYarnBerry(allocator, path)) {
|
||||
return try detect(allocator, realpath_buf, PATH, cwd, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var file = std.fs.openFileAbsoluteZ(path, .{ .read = true }) catch return null;
|
||||
defer file.close();
|
||||
const out_path = std.os.getFdPath(file.handle, realpath_buf) catch return null;
|
||||
|
||||
if (strings.contains(basename, "pnpm")) {
|
||||
return NPMClient{ .bin = out_path, .tag = .pnpm };
|
||||
}
|
||||
|
||||
if (strings.contains(basename, "yarn")) {
|
||||
return NPMClient{ .bin = out_path, .tag = .yarn };
|
||||
}
|
||||
|
||||
if (strings.contains(basename, "npm")) {
|
||||
return NPMClient{ .bin = out_path, .tag = .npm };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
Reference in New Issue
Block a user