Former-commit-id: a5f1670e92
This commit is contained in:
Jarred Sumner
2021-05-11 18:39:00 -07:00
parent 0c951bd012
commit 324784cd6e
28 changed files with 3414 additions and 528 deletions

7
.gitignore vendored
View File

@@ -17,4 +17,9 @@ dist
build
*.wat
zig-out
pnpm-lock.yaml
pnpm-lock.yaml
README.md.template
src/deps/zig-clap/example
src/deps/zig-clap/README.md
src/deps/zig-clap/.github
src/deps/zig-clap/.gitattributes

26
Makefile Normal file
View File

@@ -0,0 +1,26 @@
speedy: speedy-prod-native speedy-prod-wasi speedy-prod-wasm
api:
peechy --schema src/api/schema.peechy --esm src/api/schema.js --ts src/api/schema.d.ts --zig src/api/schema.zig
speedy-prod-native:
zig build -Drelease-fast
speedy-prod-wasm:
zig build -Drelease-fast -Dtarget=wasm32-freestanding
speedy-prod-wasi:
zig build -Drelease-fast -Dtarget=wasm32-wasi
speedy-dev: speedy-dev-native speedy-dev-wasi speedy-dev-wasm
speedy-dev-native:
zig build
speedy-dev-wasm:
zig build -Dtarget=wasm32-freestanding
speedy-dev-wasi:
zig build -Dtarget=wasm32-wasi

View File

@@ -1,10 +1,10 @@
# Speedy
Incredibly fast ECMAScript & TypeScript bundler designed for development.
Incredibly fast ECMAScript & TypeScript toolchain optimized for development.
## Motivation
JavaScript bundlers run very slow in web browsers.
Nobody should have to wait for build tools to be productive.
## Purpose
@@ -12,7 +12,7 @@ The purpose of Speedy is to very quickly convert ECMAScript/TypeScript into some
Goals:
- Transpile fast inside a web browser. "Fast" is defined as "<= 3ms per un-minified file up to 1000 LOC" without build caching (FS cache yes).
- Transpile fast. "Fast" is defined as "<= 3ms per un-minified file up to 1000 LOC" without a build cache
- Transpile JSX to ECMAScript
- Remove TypeScript annotations
- Conditionally support React Fast Refresh
@@ -35,33 +35,7 @@ Non-goals:
## How it works
Much of the code is a line-for-line port of esbuild to Zig, with a few important differences.
### Implementation differences
#### Moar lookup tables
### Why not just use esbuild?
#### Missing features
- Hot Module Reloading
- Rewrite CommonJS/SystemJS/UMD imports and exports to ESM
- React Fast Refresh
#### Go WASM performance isn't great.
There's a number of reasons for this:
- Unlike native targets, Go's WASM target runs the garbage collector on the same thread as the application. Since this usecase is very constrained (no need for shared memory, or long-term objects), rewriting in Zig lets us get away with a bump allocator -- skipping garbage collection entirely. This is faster than what Go does and possibly Rust, since this zeroes out the heap in one call at the end, rather than progressively zeroing memory.
- Goroutines cross the JS<>WASM binding, which is very slow. The more goroutines you use, the slower your code runs. When building a Zig project in single-threaded mode, Zig's `comptime` feature compiles away most of the difference.
- Slow startup time: unless you use TinyGo, Go WASM binaries are > 2 MB. In esbuild's case, at the time of writing its 6 MB. That's a lot of code for the web browser to download & compile.
#### Different constraints enable performance improvements
If bundler means "merge N source files into 1 or few source file(s)", Speedy is most definitely not a bundler. Unlike most bundlers today, Speedy deliberately outputs
If bundler means "turn my development code into something a browser can run",
Much of the code is a line-for-line port of esbuild to Zig. Thank you @evanw for building esbuild - a fantastic ECMAScript & CSS Bundler, and for inspiring this project.
### Compatibility Table

View File

@@ -23,9 +23,9 @@ pub fn build(b: *std.build.Builder) void {
exe = b.addExecutable("esdev", "src/main_wasi.zig");
exe.is_dynamic = true;
if (mode == std.builtin.Mode.Debug) {
exe.setOutputDir("build/bin/debug");
exe.setOutputDir("build/wasi/debug");
} else {
exe.setOutputDir("build/bin");
exe.setOutputDir("build/wasi");
}
} else if (target.getCpuArch().isWasm()) {
std.debug.print("Build OS: WASM\n", .{});
@@ -88,6 +88,11 @@ pub fn build(b: *std.build.Builder) void {
}
}
exe.addPackage(.{
.name = "clap",
.path = "src/deps/zig-clap/clap.zig",
});
std.debug.print("Build Destination: {s}\n", .{exe.getOutputPath()});
var walker = std.fs.walkPath(std.heap.page_allocator, cwd) catch unreachable;
if (std.builtin.is_test) {

344
src/api/schema.d.ts vendored
View File

@@ -1,4 +1,4 @@
import type { ByteBuffer } from "peechy";
import type {ByteBuffer} from "peechy";
type byte = number;
type float = number;
@@ -12,177 +12,191 @@ type int32 = number;
type float32 = number;
type uint16 = number;
type uint32 = number;
export enum Loader {
jsx = 1,
js = 2,
ts = 3,
tsx = 4,
css = 5,
file = 6,
json = 7,
}
export const LoaderKeys = {
1: "jsx",
jsx: "jsx",
2: "js",
js: "js",
3: "ts",
ts: "ts",
4: "tsx",
tsx: "tsx",
5: "css",
css: "css",
6: "file",
file: "file",
7: "json",
json: "json",
};
export enum JSXRuntime {
automatic = 1,
classic = 2,
}
export const JSXRuntimeKeys = {
1: "automatic",
automatic: "automatic",
2: "classic",
classic: "classic",
};
export enum TransformResponseStatus {
success = 1,
fail = 2,
}
export const TransformResponseStatusKeys = {
1: "success",
success: "success",
2: "fail",
fail: "fail",
};
export enum MessageKind {
err = 1,
warn = 2,
note = 3,
debug = 4,
}
export const MessageKindKeys = {
1: "err",
err: "err",
2: "warn",
warn: "warn",
3: "note",
note: "note",
4: "debug",
debug: "debug",
};
export interface JSX {
factory: string;
runtime: JSXRuntime;
fragment: string;
production: boolean;
import_source: string;
react_fast_refresh: boolean;
loader_keys: string[];
loader_values: Loader[];
}
export enum Loader {
jsx = 1,
js = 2,
ts = 3,
tsx = 4,
css = 5,
file = 6,
json = 7
}
export const LoaderKeys = {
1: "jsx",
jsx: "jsx",
2: "js",
js: "js",
3: "ts",
ts: "ts",
4: "tsx",
tsx: "tsx",
5: "css",
css: "css",
6: "file",
file: "file",
7: "json",
json: "json"
}
export enum ResolveMode {
disable = 1,
lazy = 2,
dev = 3,
bundle = 4
}
export const ResolveModeKeys = {
1: "disable",
disable: "disable",
2: "lazy",
lazy: "lazy",
3: "dev",
dev: "dev",
4: "bundle",
bundle: "bundle"
}
export enum Platform {
browser = 1,
node = 2
}
export const PlatformKeys = {
1: "browser",
browser: "browser",
2: "node",
node: "node"
}
export enum JSXRuntime {
automatic = 1,
classic = 2
}
export const JSXRuntimeKeys = {
1: "automatic",
automatic: "automatic",
2: "classic",
classic: "classic"
}
export enum TransformResponseStatus {
success = 1,
fail = 2
}
export const TransformResponseStatusKeys = {
1: "success",
success: "success",
2: "fail",
fail: "fail"
}
export enum MessageKind {
err = 1,
warn = 2,
note = 3,
debug = 4
}
export const MessageKindKeys = {
1: "err",
err: "err",
2: "warn",
warn: "warn",
3: "note",
note: "note",
4: "debug",
debug: "debug"
}
export interface JSX {
factory: string;
runtime: JSXRuntime;
fragment: string;
development: boolean;
import_source: string;
react_fast_refresh: boolean;
}
export interface TransformOptions {
jsx: JSX;
ts: boolean;
base_path: string;
define_keys: string[];
define_values: string[];
}
export interface TransformOptions {
jsx?: JSX;
tsconfig_override?: string;
resolve?: ResolveMode;
public_url?: string;
absolute_working_dir?: string;
define_keys?: string[];
define_values?: string[];
preserve_symlinks?: boolean;
entry_points?: string[];
write?: boolean;
inject?: string[];
output_dir?: string;
external?: string[];
loader_keys?: string[];
loader_values?: Loader[];
main_fields?: string[];
platform?: Platform;
}
export interface FileHandle {
path: string;
size: uint;
fd: uint;
}
export interface FileHandle {
path: string;
size: uint;
fd: uint;
}
export interface Transform {
handle?: FileHandle;
path?: string;
contents?: string;
loader?: Loader;
options?: TransformOptions;
}
export interface Transform {
handle?: FileHandle;
path?: string;
contents?: Uint8Array;
loader?: Loader;
options?: TransformOptions;
}
export interface OutputFile {
data: Uint8Array;
path: string;
}
export interface OutputFile {
data: Uint8Array;
path: string;
}
export interface TransformResponse {
status: TransformResponseStatus;
files: OutputFile[];
errors: Message[];
}
export interface TransformResponse {
status: TransformResponseStatus;
files: OutputFile[];
errors: Message[];
}
export interface Location {
file: string;
namespace: string;
line: int32;
column: int32;
line_text: string;
suggestion: string;
offset: uint;
}
export interface Location {
file: string;
namespace: string;
line: int32;
column: int32;
line_text: string;
suggestion: string;
offset: uint;
}
export interface MessageData {
text?: string;
location?: Location;
}
export interface MessageData {
text?: string;
location?: Location;
}
export interface Message {
kind: MessageKind;
data: MessageData;
notes: MessageData[];
}
export interface Message {
kind: MessageKind;
data: MessageData;
notes: MessageData[];
}
export interface Log {
warnings: uint32;
errors: uint32;
msgs: Message[];
}
export interface Log {
warnings: uint32;
errors: uint32;
msgs: Message[];
}
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeTransformOptions(
message: TransformOptions,
bb: ByteBuffer
): void;
export declare function decodeTransformOptions(
buffer: ByteBuffer
): TransformOptions;
export declare function encodeFileHandle(
message: FileHandle,
bb: ByteBuffer
): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(
message: Transform,
bb: ByteBuffer
): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeOutputFile(
message: OutputFile,
bb: ByteBuffer
): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(
message: TransformResponse,
bb: ByteBuffer
): void;
export declare function decodeTransformResponse(
buffer: ByteBuffer
): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(
message: MessageData,
bb: ByteBuffer
): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(message: Transform, bb: ByteBuffer): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeOutputFile(message: OutputFile, bb: ByteBuffer): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(message: TransformResponse, bb: ByteBuffer): void;
export declare function decodeTransformResponse(buffer: ByteBuffer): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(message: MessageData, bb: ByteBuffer): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;

View File

@@ -30,6 +30,38 @@ const LoaderKeys = {
"file": "file",
"json": "json"
};
const ResolveMode = {
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"disable": 1,
"lazy": 2,
"dev": 3,
"bundle": 4
};
const ResolveModeKeys = {
"1": "disable",
"2": "lazy",
"3": "dev",
"4": "bundle",
"disable": "disable",
"lazy": "lazy",
"dev": "dev",
"bundle": "bundle"
};
const Platform = {
"1": 1,
"2": 2,
"browser": 1,
"node": 2
};
const PlatformKeys = {
"1": "browser",
"2": "node",
"browser": "browser",
"node": "node"
};
const JSXRuntime = {
"1": 1,
"2": 2,
@@ -49,15 +81,9 @@ function decodeJSX(bb) {
result["factory"] = bb.readString();
result["runtime"] = JSXRuntime[bb.readByte()];
result["fragment"] = bb.readString();
result["production"] = !!bb.readByte();
result["development"] = !!bb.readByte();
result["import_source"] = bb.readString();
result["react_fast_refresh"] = !!bb.readByte();
var length = bb.readVarUint();
var values = result["loader_keys"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
var length = bb.readVarUint();
var values = result["loader_values"] = Array(length);
for (var i = 0; i < length; i++) values[i] = Loader[bb.readByte()];
return result;
}
@@ -86,11 +112,11 @@ bb.writeByte(encoded);
throw new Error("Missing required field \"fragment\"");
}
var value = message["production"];
var value = message["development"];
if (value != null) {
bb.writeByte(value);
} else {
throw new Error("Missing required field \"production\"");
throw new Error("Missing required field \"development\"");
}
var value = message["import_source"];
@@ -107,20 +133,227 @@ bb.writeByte(encoded);
throw new Error("Missing required field \"react_fast_refresh\"");
}
var value = message["loader_keys"];
}
function decodeTransformOptions(bb) {
var result = {};
while (true) {
switch (bb.readByte()) {
case 0:
return result;
case 1:
result["jsx"] = decodeJSX(bb);
break;
case 2:
result["tsconfig_override"] = bb.readString();
break;
case 3:
result["resolve"] = ResolveMode[bb.readByte()];
break;
case 4:
result["public_url"] = bb.readString();
break;
case 5:
result["absolute_working_dir"] = bb.readString();
break;
case 6:
var length = bb.readVarUint();
var values = result["define_keys"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 7:
var length = bb.readVarUint();
var values = result["define_values"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 8:
result["preserve_symlinks"] = !!bb.readByte();
break;
case 9:
var length = bb.readVarUint();
var values = result["entry_points"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 10:
result["write"] = !!bb.readByte();
break;
case 11:
var length = bb.readVarUint();
var values = result["inject"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 12:
result["output_dir"] = bb.readString();
break;
case 13:
var length = bb.readVarUint();
var values = result["external"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 14:
var length = bb.readVarUint();
var values = result["loader_keys"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 15:
var length = bb.readVarUint();
var values = result["loader_values"] = Array(length);
for (var i = 0; i < length; i++) values[i] = Loader[bb.readByte()];
break;
case 16:
var length = bb.readVarUint();
var values = result["main_fields"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 17:
result["platform"] = Platform[bb.readByte()];
break;
default:
throw new Error("Attempted to parse invalid message");
}
}
}
function encodeTransformOptions(message, bb) {
var value = message["jsx"];
if (value != null) {
bb.writeByte(1);
encodeJSX(value, bb);
}
var value = message["tsconfig_override"];
if (value != null) {
bb.writeByte(2);
bb.writeString(value);
}
var value = message["resolve"];
if (value != null) {
bb.writeByte(3);
var encoded = ResolveMode[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"ResolveMode\"");
bb.writeByte(encoded);
}
var value = message["public_url"];
if (value != null) {
bb.writeByte(4);
bb.writeString(value);
}
var value = message["absolute_working_dir"];
if (value != null) {
bb.writeByte(5);
bb.writeString(value);
}
var value = message["define_keys"];
if (value != null) {
bb.writeByte(6);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["define_values"];
if (value != null) {
bb.writeByte(7);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["preserve_symlinks"];
if (value != null) {
bb.writeByte(8);
bb.writeByte(value);
}
var value = message["entry_points"];
if (value != null) {
bb.writeByte(9);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["write"];
if (value != null) {
bb.writeByte(10);
bb.writeByte(value);
}
var value = message["inject"];
if (value != null) {
bb.writeByte(11);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["output_dir"];
if (value != null) {
bb.writeByte(12);
bb.writeString(value);
}
var value = message["external"];
if (value != null) {
bb.writeByte(13);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["loader_keys"];
if (value != null) {
bb.writeByte(14);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
} else {
throw new Error("Missing required field \"loader_keys\"");
}
var value = message["loader_values"];
if (value != null) {
bb.writeByte(15);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
@@ -129,73 +362,27 @@ bb.writeByte(encoded);
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
bb.writeByte(encoded);
}
} else {
throw new Error("Missing required field \"loader_values\"");
}
}
function decodeTransformOptions(bb) {
var result = {};
result["jsx"] = decodeJSX(bb);
result["ts"] = !!bb.readByte();
result["base_path"] = bb.readString();
var length = bb.readVarUint();
var values = result["define_keys"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
var length = bb.readVarUint();
var values = result["define_values"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
return result;
}
function encodeTransformOptions(message, bb) {
var value = message["jsx"];
if (value != null) {
encodeJSX(value, bb);
} else {
throw new Error("Missing required field \"jsx\"");
}
var value = message["ts"];
if (value != null) {
bb.writeByte(value);
} else {
throw new Error("Missing required field \"ts\"");
}
var value = message["base_path"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"base_path\"");
}
var value = message["define_keys"];
var value = message["main_fields"];
if (value != null) {
bb.writeByte(16);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
} else {
throw new Error("Missing required field \"define_keys\"");
}
var value = message["define_values"];
var value = message["platform"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
} else {
throw new Error("Missing required field \"define_values\"");
bb.writeByte(17);
var encoded = Platform[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Platform\"");
bb.writeByte(encoded);
}
bb.writeByte(0);
}
@@ -603,6 +790,10 @@ function encodeLog(message, bb) {
export { Loader }
export { LoaderKeys }
export { ResolveMode }
export { ResolveModeKeys }
export { Platform }
export { PlatformKeys }
export { JSXRuntime }
export { JSXRuntimeKeys }
export { decodeJSX }

View File

@@ -10,6 +10,17 @@ smol Loader {
json = 7;
}
smol ResolveMode {
disable = 1;
lazy = 2;
dev = 3;
bundle = 4;
}
smol Platform {
browser = 1;
node = 2;
}
smol JSXRuntime {
automatic = 1;
@@ -20,25 +31,42 @@ struct JSX {
string factory;
JSXRuntime runtime;
string fragment;
bool production;
bool development;
// Probably react
string import_source;
bool react_fast_refresh;
string[] loader_keys;
Loader[] loader_values;
}
struct TransformOptions {
JSX jsx;
bool ts;
message TransformOptions {
JSX jsx = 1;
string tsconfig_override = 2;
ResolveMode resolve = 3;
string base_path;
string[] define_keys;
string[] define_values;
string public_url = 4;
string absolute_working_dir = 5;
string[] define_keys = 6;
string[] define_values = 7;
bool preserve_symlinks = 8;
string[] entry_points = 9;
bool write = 10;
string[] inject = 11;
string output_dir = 12;
string[] external = 13;
string[] loader_keys = 14;
Loader[] loader_values = 15;
string[] main_fields = 16;
Platform platform = 17;
}
struct FileHandle {

184
src/api/schema.ts Normal file
View File

@@ -0,0 +1,184 @@
import type {ByteBuffer} from "peechy";
type byte = number;
type float = number;
type int = number;
type alphanumeric = string;
type uint = number;
type int8 = number;
type lowp = number;
type int16 = number;
type int32 = number;
type float32 = number;
type uint16 = number;
type uint32 = number;
export enum Loader {
jsx = 1,
js = 2,
ts = 3,
tsx = 4,
css = 5,
file = 6,
json = 7
}
export const LoaderKeys = {
1: "jsx",
jsx: "jsx",
2: "js",
js: "js",
3: "ts",
ts: "ts",
4: "tsx",
tsx: "tsx",
5: "css",
css: "css",
6: "file",
file: "file",
7: "json",
json: "json"
}
export enum ResolveMode {
disable = 1,
lazy = 2
}
export const ResolveModeKeys = {
1: "disable",
disable: "disable",
2: "lazy",
lazy: "lazy"
}
export enum JSXRuntime {
automatic = 1,
classic = 2
}
export const JSXRuntimeKeys = {
1: "automatic",
automatic: "automatic",
2: "classic",
classic: "classic"
}
export enum TransformResponseStatus {
success = 1,
fail = 2
}
export const TransformResponseStatusKeys = {
1: "success",
success: "success",
2: "fail",
fail: "fail"
}
export enum MessageKind {
err = 1,
warn = 2,
note = 3,
debug = 4
}
export const MessageKindKeys = {
1: "err",
err: "err",
2: "warn",
warn: "warn",
3: "note",
note: "note",
4: "debug",
debug: "debug"
}
export interface JSX {
factory: string;
runtime: JSXRuntime;
fragment: string;
development: boolean;
import_source: string;
react_fast_refresh: boolean;
loader_keys: string[];
loader_values: Loader[];
}
export interface TransformOptions {
jsx?: JSX;
tsconfig_override?: string;
resolve?: ResolveMode;
public_url?: string;
absolute_working_dir?: string;
define_keys?: string[];
define_values?: string[];
preserve_symlinks?: boolean;
entry_points?: string[];
write?: boolean;
inject?: string[];
output_dir?: string;
externals?: string[];
}
export interface FileHandle {
path: string;
size: uint;
fd: uint;
}
export interface Transform {
handle?: FileHandle;
path?: string;
contents?: Uint8Array;
loader?: Loader;
options?: TransformOptions;
}
export interface OutputFile {
data: Uint8Array;
path: string;
}
export interface TransformResponse {
status: TransformResponseStatus;
files: OutputFile[];
errors: Message[];
}
export interface Location {
file: string;
namespace: string;
line: int32;
column: int32;
line_text: string;
suggestion: string;
offset: uint;
}
export interface MessageData {
text?: string;
location?: Location;
}
export interface Message {
kind: MessageKind;
data: MessageData;
notes: MessageData[];
}
export interface Log {
warnings: uint32;
errors: uint32;
msgs: Message[];
}
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(message: Transform, bb: ByteBuffer): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeOutputFile(message: OutputFile, bb: ByteBuffer): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(message: TransformResponse, bb: ByteBuffer): void;
export declare function decodeTransformResponse(buffer: ByteBuffer): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(message: MessageData, bb: ByteBuffer): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;

View File

@@ -31,6 +31,42 @@ pub const Api = struct {
}
};
pub const ResolveMode = enum(u8) {
_none,
/// disable
disable,
/// lazy
lazy,
/// dev
dev,
/// bundle
bundle,
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
return try std.json.stringify(@tagName(self), opts, o);
}
};
pub const Platform = enum(u8) {
_none,
/// browser
browser,
/// node
node,
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
return try std.json.stringify(@tagName(self), opts, o);
}
};
pub const JsxRuntime = enum(u8) {
_none,
/// automatic
@@ -48,29 +84,23 @@ pub const Api = struct {
pub const Jsx = struct {
/// factory
factory: []u8,
factory: []const u8,
/// runtime
runtime: JsxRuntime,
/// fragment
fragment: []u8,
fragment: []const u8,
/// production
production: bool = false,
/// development
development: bool = false,
/// import_source
import_source: []u8,
import_source: []const u8,
/// react_fast_refresh
react_fast_refresh: bool = false,
/// loader_keys
loader_keys: [][]u8,
/// loader_values
loader_values: []Loader,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!Jsx {
var obj = std.mem.zeroes(Jsx);
try update(&obj, allocator, reader);
@@ -89,39 +119,17 @@ pub const Api = struct {
result.fragment = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.fragment);
result.production = (try reader.readByte()) == @as(u8, 1);
result.development = (try reader.readByte()) == @as(u8, 1);
length = try reader.readIntNative(u32);
if (result.import_source.len != length) {
result.import_source = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.import_source);
result.react_fast_refresh = (try reader.readByte()) == @as(u8, 1);
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.loader_keys.len) {
result.loader_keys = try allocator.alloc([]u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.loader_keys) |content, j| {
if (result.loader_keys[j].len != length and length > 0) {
result.loader_keys[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.loader_keys[j]);
}
}
length = try reader.readIntNative(u32);
result.loader_values = try allocator.alloc(Loader, length);
{
var j: usize = 0;
while (j < length) : (j += 1) {
result.loader_values[j] = try reader.readEnum(Loader, .Little);
}
}
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
var n: usize = 0;
try writer.writeIntNative(u32, @intCast(u32, result.factory.len));
try writer.writeAll(std.mem.sliceAsBytes(result.factory));
@@ -130,50 +138,67 @@ pub const Api = struct {
try writer.writeIntNative(u32, @intCast(u32, result.fragment.len));
try writer.writeAll(std.mem.sliceAsBytes(result.fragment));
try writer.writeByte(@boolToInt(result.production));
try writer.writeByte(@boolToInt(result.development));
try writer.writeIntNative(u32, @intCast(u32, result.import_source.len));
try writer.writeAll(std.mem.sliceAsBytes(result.import_source));
try writer.writeByte(@boolToInt(result.react_fast_refresh));
n = result.loader_keys.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.loader_keys[j].len));
try writer.writeAll(std.mem.sliceAsBytes(result.loader_keys[j]));
}
}
n = result.loader_values.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
try writer.writeByte(@enumToInt(result.loader_values[j]));
}
}
return;
}
};
pub const TransformOptions = struct {
/// jsx
jsx: Jsx,
jsx: ?Jsx = null,
/// ts
ts: bool = false,
/// tsconfig_override
tsconfig_override: ?[]const u8 = null,
/// base_path
base_path: []u8,
/// resolve
resolve: ?ResolveMode = null,
/// public_url
public_url: ?[]const u8 = null,
/// absolute_working_dir
absolute_working_dir: ?[]const u8 = null,
/// define_keys
define_keys: [][]u8,
define_keys: []const []const u8,
/// define_values
define_values: [][]u8,
define_values: []const []const u8,
/// preserve_symlinks
preserve_symlinks: ?bool = null,
/// entry_points
entry_points: []const []const u8,
/// write
write: ?bool = null,
/// inject
inject: []const []const u8,
/// output_dir
output_dir: ?[]const u8 = null,
/// external
external: []const []const u8,
/// loader_keys
loader_keys: []const []const u8,
/// loader_values
loader_values: []const Loader,
/// main_fields
main_fields: []const []const u8,
/// platform
platform: ?Platform = null,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!TransformOptions {
var obj = std.mem.zeroes(TransformOptions);
@@ -182,77 +207,341 @@ pub const Api = struct {
}
pub fn update(result: *TransformOptions, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
result.jsx = try Jsx.decode(allocator, reader);
result.ts = (try reader.readByte()) == @as(u8, 1);
length = try reader.readIntNative(u32);
if (result.base_path.len != length) {
result.base_path = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.base_path);
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.define_keys.len) {
result.define_keys = try allocator.alloc([]u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.define_keys) |content, j| {
if (result.define_keys[j].len != length and length > 0) {
result.define_keys[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.define_keys[j]);
while (true) {
const field_type: u8 = try reader.readByte();
switch (field_type) {
0 => {
return;
},
1 => {
result.jsx = try Jsx.decode(allocator, reader);
},
2 => {
length = try reader.readIntNative(u32);
if ((result.tsconfig_override orelse &([_]u8{})).len != length) {
result.tsconfig_override = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.tsconfig_override.?);
},
3 => {
result.resolve = try reader.readEnum(ResolveMode, .Little);
},
4 => {
length = try reader.readIntNative(u32);
if ((result.public_url orelse &([_]u8{})).len != length) {
result.public_url = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.public_url.?);
},
5 => {
length = try reader.readIntNative(u32);
if ((result.absolute_working_dir orelse &([_]u8{})).len != length) {
result.absolute_working_dir = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.absolute_working_dir.?);
},
6 => {
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.define_keys.len) {
result.define_keys = try allocator.alloc([]const u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.define_keys) |content, j| {
if (result.define_keys[j].len != length and length > 0) {
result.define_keys[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.define_keys[j].?);
}
}
},
7 => {
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.define_values.len) {
result.define_values = try allocator.alloc([]const u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.define_values) |content, j| {
if (result.define_values[j].len != length and length > 0) {
result.define_values[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.define_values[j].?);
}
}
},
8 => {
result.preserve_symlinks = (try reader.readByte()) == @as(u8, 1);
},
9 => {
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.entry_points.len) {
result.entry_points = try allocator.alloc([]const u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.entry_points) |content, j| {
if (result.entry_points[j].len != length and length > 0) {
result.entry_points[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.entry_points[j].?);
}
}
},
10 => {
result.write = (try reader.readByte()) == @as(u8, 1);
},
11 => {
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.inject.len) {
result.inject = try allocator.alloc([]const u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.inject) |content, j| {
if (result.inject[j].len != length and length > 0) {
result.inject[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.inject[j].?);
}
}
},
12 => {
length = try reader.readIntNative(u32);
if ((result.output_dir orelse &([_]u8{})).len != length) {
result.output_dir = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.output_dir.?);
},
13 => {
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.external.len) {
result.external = try allocator.alloc([]const u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.external) |content, j| {
if (result.external[j].len != length and length > 0) {
result.external[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.external[j].?);
}
}
},
14 => {
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.loader_keys.len) {
result.loader_keys = try allocator.alloc([]const u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.loader_keys) |content, j| {
if (result.loader_keys[j].len != length and length > 0) {
result.loader_keys[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.loader_keys[j].?);
}
}
},
15 => {
length = try reader.readIntNative(u32);
if (result.loader_values != length) {
result.loader_values = try allocator.alloc(Loader, length);
}
{
var j: usize = 0;
while (j < length) : (j += 1) {
result.loader_values[j] = try reader.readEnum(Loader, .Little);
}
}
},
16 => {
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.main_fields.len) {
result.main_fields = try allocator.alloc([]const u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.main_fields) |content, j| {
if (result.main_fields[j].len != length and length > 0) {
result.main_fields[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.main_fields[j].?);
}
}
},
17 => {
result.platform = try reader.readEnum(Platform, .Little);
},
else => {
return error.InvalidMessage;
},
}
}
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.define_values.len) {
result.define_values = try allocator.alloc([]u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.define_values) |content, j| {
if (result.define_values[j].len != length and length > 0) {
result.define_values[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.define_values[j]);
}
}
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
var n: usize = 0;
try result.jsx.encode(writer);
if (result.jsx) |jsx| {
try writer.writeByte(1);
try jsx.encode(writer);
}
try writer.writeByte(@boolToInt(result.ts));
if (result.tsconfig_override) |tsconfig_override| {
try writer.writeByte(2);
try writer.writeIntNative(u32, @intCast(u32, tsconfig_override.len));
try writer.writeAll(std.mem.sliceAsBytes(tsconfig_override));
}
try writer.writeIntNative(u32, @intCast(u32, result.base_path.len));
try writer.writeAll(std.mem.sliceAsBytes(result.base_path));
if (result.resolve) |resolve| {
try writer.writeByte(3);
try writer.writeIntNative(@TypeOf(@enumToInt(result.resolve orelse unreachable)), @enumToInt(result.resolve orelse unreachable));
}
n = result.define_keys.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.define_keys[j].len));
try writer.writeAll(std.mem.sliceAsBytes(result.define_keys[j]));
if (result.public_url) |public_url| {
try writer.writeByte(4);
try writer.writeIntNative(u32, @intCast(u32, public_url.len));
try writer.writeAll(std.mem.sliceAsBytes(public_url));
}
if (result.absolute_working_dir) |absolute_working_dir| {
try writer.writeByte(5);
try writer.writeIntNative(u32, @intCast(u32, absolute_working_dir.len));
try writer.writeAll(std.mem.sliceAsBytes(absolute_working_dir));
}
if (result.define_keys) |define_keys| {
try writer.writeByte(6);
n = result.define_keys.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.define_keys[j].len));
try writer.writeAll(std.mem.sliceAsBytes(define_keys[j]));
}
}
}
n = result.define_values.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.define_values[j].len));
try writer.writeAll(std.mem.sliceAsBytes(result.define_values[j]));
if (result.define_values) |define_values| {
try writer.writeByte(7);
n = result.define_values.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.define_values[j].len));
try writer.writeAll(std.mem.sliceAsBytes(define_values[j]));
}
}
}
if (result.preserve_symlinks) |preserve_symlinks| {
try writer.writeByte(8);
try writer.writeByte(@boolToInt(preserve_symlinks));
}
if (result.entry_points) |entry_points| {
try writer.writeByte(9);
n = result.entry_points.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.entry_points[j].len));
try writer.writeAll(std.mem.sliceAsBytes(entry_points[j]));
}
}
}
if (result.write) |write| {
try writer.writeByte(10);
try writer.writeByte(@boolToInt(write));
}
if (result.inject) |inject| {
try writer.writeByte(11);
n = result.inject.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.inject[j].len));
try writer.writeAll(std.mem.sliceAsBytes(inject[j]));
}
}
}
if (result.output_dir) |output_dir| {
try writer.writeByte(12);
try writer.writeIntNative(u32, @intCast(u32, output_dir.len));
try writer.writeAll(std.mem.sliceAsBytes(output_dir));
}
if (result.external) |external| {
try writer.writeByte(13);
n = result.external.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.external[j].len));
try writer.writeAll(std.mem.sliceAsBytes(external[j]));
}
}
}
if (result.loader_keys) |loader_keys| {
try writer.writeByte(14);
n = result.loader_keys.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.loader_keys[j].len));
try writer.writeAll(std.mem.sliceAsBytes(loader_keys[j]));
}
}
}
if (result.loader_values) |loader_values| {
try writer.writeByte(15);
n = result.loader_values.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
try writer.writeByte(@enumToInt(result.loader_values[j] orelse unreachable));
}
}
}
if (result.main_fields) |main_fields| {
try writer.writeByte(16);
n = result.main_fields.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.main_fields[j].len));
try writer.writeAll(std.mem.sliceAsBytes(main_fields[j]));
}
}
}
if (result.platform) |platform| {
try writer.writeByte(17);
try writer.writeIntNative(@TypeOf(@enumToInt(result.platform orelse unreachable)), @enumToInt(result.platform orelse unreachable));
}
try writer.writeByte(0);
return;
}
};
pub const FileHandle = struct {
/// path
path: []u8,
path: []const u8,
/// size
size: u32 = 0,
@@ -293,10 +582,10 @@ pub const Api = struct {
handle: ?FileHandle = null,
/// path
path: ?[]u8 = null,
path: ?[]const u8 = null,
/// contents
contents: []u8,
contents: []const u8,
/// loader
loader: ?Loader = null,
@@ -330,7 +619,7 @@ pub const Api = struct {
},
3 => {
length = @intCast(usize, try reader.readIntNative(u32));
if (result.contents.len != length) {
if (result.contents != length) {
result.contents = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.contents);
@@ -397,10 +686,10 @@ pub const Api = struct {
pub const OutputFile = struct {
/// data
data: []u8,
data: []const u8,
/// path
path: []u8,
path: []const u8,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!OutputFile {
var obj = std.mem.zeroes(OutputFile);
@@ -517,10 +806,10 @@ pub const Api = struct {
pub const Location = struct {
/// file
file: []u8,
file: []const u8,
/// namespace
namespace: []u8,
namespace: []const u8,
/// line
line: i32 = 0,
@@ -529,10 +818,10 @@ pub const Api = struct {
column: i32 = 0,
/// line_text
line_text: []u8,
line_text: []const u8,
/// suggestion
suggestion: []u8,
suggestion: []const u8,
/// offset
offset: u32 = 0,
@@ -594,7 +883,7 @@ pub const Api = struct {
pub const MessageData = struct {
/// text
text: ?[]u8 = null,
text: ?[]const u8 = null,
/// location
location: ?Location = null,

View File

@@ -1,28 +1,159 @@
usingnamespace @import("global.zig");
const std = @import("std");
const options = @import("options.zig");
const lex = @import("js_lexer.zig");
const logger = @import("logger.zig");
const alloc = @import("alloc.zig");
const options = @import("options.zig");
const js_parser = @import("js_parser.zig");
const json_parser = @import("json_parser.zig");
const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
const fs = @import("fs.zig");
const Api = @import("api/schema.zig").Api;
pub const Bundler = struct {
options: options.TransformOptions,
log: logger.Log,
options: options.BundleOptions,
log: *logger.Log,
allocator: *std.mem.Allocator,
result: ?options.TransformResult = null,
pub fn init(options: options.TransformOptions, allocator: *std.mem.Allocator) Bundler {
var log = logger.Log.init(allocator);
return Bundler{
.options = options,
.allocator = allocator,
.log = log,
};
}
pub fn scan(self: *Bundler) void {}
pub fn bundle(self: *Bundler) options.TransformResult {
var result = self.result;
var source = logger.Source.initFile(self.options.entry_point, self.allocator);
pub fn bundle(
allocator: *std.mem.Allocator,
log: *logger.Log,
opts: Api.TransformOptions,
) !options.TransformResult {
Global.notimpl();
}
};
pub const Transformer = struct {
options: options.TransformOptions,
log: *logger.Log,
allocator: *std.mem.Allocator,
result: ?options.TransformResult = null,
pub fn transform(
allocator: *std.mem.Allocator,
log: *logger.Log,
opts: Api.TransformOptions,
) !options.TransformResult {
var raw_defines = try options.stringHashMapFromArrays(RawDefines, allocator, opts.define_keys, opts.define_values);
if (opts.define_keys.len == 0) {
try raw_defines.put("process.env.NODE_ENV", "\"development\"");
}
var user_defines = try DefineData.from_input(raw_defines, log, alloc.static);
var define = try Define.init(
alloc.static,
user_defines,
);
const cwd = opts.absolute_working_dir orelse try std.process.getCwdAlloc(allocator);
var output_files = try std.ArrayList(options.OutputFile).initCapacity(allocator, opts.entry_points.len);
var loader_values = try allocator.alloc(options.Loader, opts.loader_values.len);
for (loader_values) |_, i| {
const loader = switch (opts.loader_values[i]) {
.jsx => options.Loader.jsx,
.js => options.Loader.js,
.ts => options.Loader.ts,
.css => options.Loader.css,
.tsx => options.Loader.tsx,
.json => options.Loader.json,
else => unreachable,
};
loader_values[i] = loader;
}
var loader_map = try options.stringHashMapFromArrays(
std.StringHashMap(options.Loader),
allocator,
opts.loader_keys,
loader_values,
);
var use_default_loaders = loader_map.count() == 0;
var jsx = if (opts.jsx) |_jsx| options.JSX.Pragma.fromApi(_jsx) else options.JSX.Pragma{};
var output_i: usize = 0;
for (opts.entry_points) |entry_point, i| {
var paths = [_]string{ cwd, entry_point };
const absolutePath = try std.fs.path.resolve(alloc.dynamic, &paths);
const file = try std.fs.openFileAbsolute(absolutePath, std.fs.File.OpenFlags{ .read = true });
defer file.close();
const stat = try file.stat();
const code = try file.readToEndAlloc(alloc.dynamic, stat.size);
const _file = fs.File{ .path = fs.Path.init(entry_point), .contents = code };
var source = try logger.Source.initFile(_file, alloc.dynamic);
var loader: options.Loader = undefined;
if (use_default_loaders) {
loader = options.defaultLoaders.get(std.fs.path.extension(absolutePath)) orelse continue;
} else {
loader = options.Loader.forFileName(
entry_point,
loader_map,
) orelse continue;
}
const parser_opts = js_parser.Parser.Options.init(jsx, loader);
var _source = &source;
const res = _transform(allocator, log, parser_opts, loader, define, _source) catch continue;
try output_files.append(options.OutputFile{ .path = absolutePath, .contents = res.js });
}
return try options.TransformResult.init(output_files.toOwnedSlice(), log, allocator);
}
pub fn _transform(
allocator: *std.mem.Allocator,
log: *logger.Log,
opts: js_parser.Parser.Options,
loader: options.Loader,
define: *Define,
source: *logger.Source,
) !js_printer.PrintResult {
var ast: js_ast.Ast = undefined;
switch (loader) {
.json => {
var expr = try json_parser.ParseJSON(source, log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, log, source, define, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
Global.panic("Unsupported loader: {s}", .{loader});
},
}
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
return try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map.initList(symbols),
source,
false,
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
}
};

306
src/cli.zig Normal file
View File

@@ -0,0 +1,306 @@
usingnamespace @import("global.zig");
const std = @import("std");
const lex = @import("js_lexer.zig");
const logger = @import("logger.zig");
const alloc = @import("alloc.zig");
const options = @import("options.zig");
const js_parser = @import("js_parser.zig");
const json_parser = @import("json_parser.zig");
const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
const Api = @import("api/schema.zig").Api;
const clap = @import("clap");
const bundler = @import("bundler.zig");
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
pub const Cli = struct {
const LoaderMatcher = strings.ExactSizeMatcher(4);
pub fn ColonListType(comptime t: type, value_resolver: anytype) type {
return struct {
pub fn init(allocator: *std.mem.Allocator, count: usize) !@This() {
var keys = try allocator.alloc(string, count);
var values = try allocator.alloc(t, count);
return @This(){ .keys = keys, .values = values };
}
keys: []string,
values: []t,
pub fn load(self: *@This(), input: []const string) !void {
for (input) |str, i| {
// Support either ":" or "=" as the separator, preferring whichever is first.
// ":" is less confusing IMO because that syntax is used with flags
// but "=" is what esbuild uses and I want this to be somewhat familiar for people using esbuild
const midpoint = std.math.min(strings.indexOfChar(str, ':') orelse std.math.maxInt(usize), strings.indexOfChar(str, '=') orelse std.math.maxInt(usize));
if (midpoint == std.math.maxInt(usize)) {
return error.InvalidSeparator;
}
self.keys[i] = str[0..midpoint];
self.values[i] = try value_resolver(str[midpoint + 1 .. str.len]);
}
}
pub fn resolve(allocator: *std.mem.Allocator, input: []const string) !@This() {
var list = try init(allocator, input.len);
try list.load(input);
return list;
}
};
}
pub const LoaderColonList = ColonListType(Api.Loader, Arguments.loader_resolver);
pub const DefineColonList = ColonListType(string, Arguments.noop_resolver);
pub const Arguments = struct {
pub fn loader_resolver(in: string) !Api.Loader {
const Matcher = strings.ExactSizeMatcher(4);
switch (Matcher.match(in)) {
Matcher.case("jsx") => return Api.Loader.jsx,
Matcher.case("js") => return Api.Loader.js,
Matcher.case("ts") => return Api.Loader.ts,
Matcher.case("tsx") => return Api.Loader.tsx,
Matcher.case("css") => return Api.Loader.css,
Matcher.case("file") => return Api.Loader.file,
Matcher.case("json") => return Api.Loader.json,
else => {
return error.InvalidLoader;
},
}
}
pub fn noop_resolver(in: string) !string {
return in;
}
pub fn fileReadError(err: anyerror, stderr: anytype, filename: string, kind: string) noreturn {
stderr.writer().print("Error reading file \"{s}\" for {s}: {s}", .{ filename, kind, @errorName(err) }) catch {};
std.process.exit(1);
}
pub fn readFile(
allocator: *std.mem.Allocator,
cwd: string,
filename: string,
) ![]u8 {
var paths = [_]string{ cwd, filename };
const outpath = try std.fs.path.resolve(allocator, &paths);
defer allocator.free(outpath);
var file = try std.fs.openFileAbsolute(outpath, std.fs.File.OpenFlags{ .read = true, .write = false });
defer file.close();
const stats = try file.stat();
return try file.readToEndAlloc(allocator, stats.size);
}
pub fn parse(allocator: *std.mem.Allocator, stdout: anytype, stderr: anytype) !Api.TransformOptions {
@setEvalBranchQuota(9999);
const params = comptime [_]clap.Param(clap.Help){
clap.parseParam("-h, --help Display this help and exit. ") catch unreachable,
clap.parseParam("-r, --resolve <STR> Determine import/require behavior. \"disable\" ignores. \"dev\" bundles node_modules and builds everything else as independent entry points") catch unreachable,
clap.parseParam("-d, --define <STR>... Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:development") catch unreachable,
clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx (not implemented yet), ts (not implemented yet), css (not implemented yet)") catch unreachable,
clap.parseParam("-o, --outdir <STR> Save output to directory (default: \"out\" if none provided and multiple entry points passed)") catch unreachable,
clap.parseParam("-e, --external <STR>... Exclude module from transpilation (can use * wildcards). ex: -e react") catch unreachable,
clap.parseParam("-i, --inject <STR>... Inject module at the top of every file") catch unreachable,
clap.parseParam("--cwd <STR> Absolute path to resolve entry points from. Defaults to cwd") catch unreachable,
clap.parseParam("--public-url <STR> Rewrite import paths to start with --public-url. Useful for web browsers.") catch unreachable,
clap.parseParam("--jsx-factory <STR> Changes the function called when compiling JSX elements using the classic JSX runtime") catch unreachable,
clap.parseParam("--jsx-fragment <STR> Changes the function called when compiling JSX fragments using the classic JSX runtime") catch unreachable,
clap.parseParam("--jsx-import-source <STR> Declares the module specifier to be used for importing the jsx and jsxs factory functions. Default: \"react\"") catch unreachable,
clap.parseParam("--jsx-runtime <STR> \"automatic\" (default) or \"classic\"") catch unreachable,
clap.parseParam("--jsx-production Use jsx instead of jsxDEV (default) for the automatic runtime") catch unreachable,
clap.parseParam("--react-fast-refresh Enable React Fast Refresh (not implemented yet)") catch unreachable,
clap.parseParam("--tsconfig-override <STR> Load tsconfig from path instead of cwd/tsconfig.json") catch unreachable,
clap.parseParam("--platform <STR> \"browser\" or \"node\". Defaults to \"browser\"") catch unreachable,
clap.parseParam("--main-fields <STR>... Main fields to lookup in package.json. Defaults to --platform dependent") catch unreachable,
clap.parseParam("<POS>... Entry points to use") catch unreachable,
};
var diag = clap.Diagnostic{};
var args = clap.parse(clap.Help, &params, .{ .diagnostic = &diag }) catch |err| {
// Report useful error and exit
diag.report(stderr.writer(), err) catch {};
return err;
};
if (args.flag("--help")) {
try clap.help(stderr.writer(), &params);
std.process.exit(1);
}
var cwd_paths = [_]string{args.option("--cwd") orelse try std.process.getCwdAlloc(allocator)};
var cwd = try std.fs.path.resolve(allocator, &cwd_paths);
var tsconfig_override = if (args.option("--tsconfig-override")) |ts| (Arguments.readFile(allocator, cwd, ts) catch |err| fileReadError(err, stderr, ts, "tsconfig.json")) else null;
var public_url = args.option("--public-url");
var defines_tuple = try DefineColonList.resolve(allocator, args.options("--define"));
var loader_tuple = try LoaderColonList.resolve(allocator, args.options("--define"));
var define_keys = defines_tuple.keys;
var define_values = defines_tuple.values;
var loader_keys = loader_tuple.keys;
var loader_values = loader_tuple.values;
var entry_points = args.positionals();
var write = entry_points.len > 1;
var inject = args.options("--inject");
var output_dir = args.option("--outdir");
if (write and output_dir == null) {
var _paths = [_]string{ cwd, "out" };
output_dir = try std.fs.path.resolve(allocator, &_paths);
}
var externals = std.mem.zeroes([][]u8);
if (args.options("--external").len > 0) {
externals = try allocator.alloc([]u8, args.options("--external").len);
for (args.options("--external")) |external, i| {
externals[i] = constStrToU8(external);
}
}
var jsx_factory = args.option("--jsx-factory");
var jsx_fragment = args.option("--jsx-fragment");
var jsx_import_source = args.option("--jsx-import-source");
var jsx_runtime = args.option("--jsx-runtime");
var jsx_production = args.flag("--jsx-production");
var react_fast_refresh = args.flag("--react-fast-refresh");
var main_fields = args.options("--main-fields");
comptime const PlatformMatcher = strings.ExactSizeMatcher(8);
comptime const ResoveMatcher = strings.ExactSizeMatcher(8);
var resolve = Api.ResolveMode.lazy;
if (args.option("--resolve")) |_resolve| {
switch (PlatformMatcher.match(_resolve)) {
PlatformMatcher.case("disable") => {
resolve = Api.ResolveMode.disable;
},
PlatformMatcher.case("bundle") => {
resolve = Api.ResolveMode.bundle;
},
PlatformMatcher.case("dev") => {
resolve = Api.ResolveMode.dev;
},
PlatformMatcher.case("lazy") => {
resolve = Api.ResolveMode.lazy;
},
else => {
diag.name.long = "--resolve";
diag.arg = _resolve;
try diag.report(stderr.writer(), error.InvalidPlatform);
std.process.exit(1);
},
}
}
var platform: ?Api.Platform = null;
if (args.option("--platform")) |_platform| {
switch (PlatformMatcher.match(_platform)) {
PlatformMatcher.case("browser") => {
platform = Api.Platform.browser;
},
PlatformMatcher.case("node") => {
platform = Api.Platform.node;
},
else => {
diag.name.long = "--platform";
diag.arg = _platform;
try diag.report(stderr.writer(), error.InvalidPlatform);
std.process.exit(1);
},
}
}
var jsx: ?Api.Jsx = null;
if (jsx_factory != null or
jsx_fragment != null or
jsx_import_source != null or
jsx_runtime != null or
jsx_production or react_fast_refresh)
{
var default_factory = "".*;
var default_fragment = "".*;
var default_import_source = "".*;
jsx = Api.Jsx{
.factory = constStrToU8(jsx_factory orelse &default_factory),
.fragment = constStrToU8(jsx_fragment orelse &default_fragment),
.import_source = constStrToU8(jsx_import_source orelse &default_import_source),
.runtime = if (jsx_runtime != null) try resolve_jsx_runtime(jsx_runtime.?) else Api.JsxRuntime.automatic,
.development = !jsx_production,
.react_fast_refresh = react_fast_refresh,
};
}
if (entry_points.len == 0) {
try clap.help(stderr.writer(), &params);
try diag.report(stderr.writer(), error.MissingEntryPoint);
std.process.exit(1);
}
return Api.TransformOptions{
.jsx = jsx,
.output_dir = output_dir,
.resolve = resolve,
.external = externals,
.absolute_working_dir = cwd,
.tsconfig_override = tsconfig_override,
.public_url = public_url,
.define_keys = define_keys,
.define_values = define_values,
.loader_keys = loader_keys,
.loader_values = loader_values,
.write = write,
.inject = inject,
.entry_points = entry_points,
.main_fields = args.options("--main-fields"),
.platform = platform,
};
}
};
pub fn resolve_jsx_runtime(str: string) !Api.JsxRuntime {
if (strings.eql(str, "automatic")) {
return Api.JsxRuntime.automatic;
} else if (strings.eql(str, "fallback")) {
return Api.JsxRuntime.classic;
} else {
return error.InvalidJSXRuntime;
}
}
pub fn startTransform(allocator: *std.mem.Allocator, args: Api.TransformOptions, log: *logger.Log) anyerror!void {}
pub fn start(allocator: *std.mem.Allocator, stdout: anytype, stderr: anytype, comptime MainPanicHandler: type) anyerror!void {
var log = logger.Log.init(alloc.dynamic);
var panicker = MainPanicHandler.init(&log);
MainPanicHandler.Singleton = &panicker;
const args = try Arguments.parse(alloc.static, stdout, stderr);
var result: options.TransformResult = undefined;
switch (args.resolve orelse Api.ResolveMode.dev) {
Api.ResolveMode.disable => {
result = try bundler.Transformer.transform(
allocator,
&log,
args,
);
},
else => {
result = try bundler.Bundler.bundle(
allocator,
&log,
args,
);
},
}
for (result.output_files) |file| {
try stdout.writer().writeAll(file.contents);
}
}
};

1
src/deps/zig-clap/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
zig-cache

24
src/deps/zig-clap/LICENSE Normal file
View File

@@ -0,0 +1,24 @@
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <http://unlicense.org>

View File

@@ -0,0 +1,55 @@
const builtin = @import("builtin");
const std = @import("std");
const Builder = std.build.Builder;
const Mode = std.builtin.Mode;
pub fn build(b: *Builder) void {
const mode = b.standardReleaseOptions();
const target = b.standardTargetOptions(.{});
const test_all_step = b.step("test", "Run all tests in all modes.");
inline for ([_]Mode{ Mode.Debug, Mode.ReleaseFast, Mode.ReleaseSafe, Mode.ReleaseSmall }) |test_mode| {
const mode_str = comptime modeToString(test_mode);
const tests = b.addTest("clap.zig");
tests.setBuildMode(test_mode);
tests.setTarget(target);
tests.setNamePrefix(mode_str ++ " ");
const test_step = b.step("test-" ++ mode_str, "Run all tests in " ++ mode_str ++ ".");
test_step.dependOn(&tests.step);
test_all_step.dependOn(test_step);
}
const example_step = b.step("examples", "Build examples");
inline for ([_][]const u8{
"simple",
"simple-ex",
//"simple-error",
"streaming-clap",
"help",
"usage",
}) |example_name| {
const example = b.addExecutable(example_name, "example/" ++ example_name ++ ".zig");
example.addPackagePath("clap", "clap.zig");
example.setBuildMode(mode);
example.setTarget(target);
example.install();
example_step.dependOn(&example.step);
}
const all_step = b.step("all", "Build everything and runs all tests");
all_step.dependOn(test_all_step);
b.default_step.dependOn(all_step);
}
fn modeToString(mode: Mode) []const u8 {
return switch (mode) {
Mode.Debug => "debug",
Mode.ReleaseFast => "release-fast",
Mode.ReleaseSafe => "release-safe",
Mode.ReleaseSmall => "release-small",
};
}

608
src/deps/zig-clap/clap.zig Normal file
View File

@@ -0,0 +1,608 @@
const std = @import("std");
const debug = std.debug;
const heap = std.heap;
const io = std.io;
const mem = std.mem;
const testing = std.testing;
pub const args = @import("clap/args.zig");
test "clap" {
testing.refAllDecls(@This());
}
pub const ComptimeClap = @import("clap/comptime.zig").ComptimeClap;
pub const StreamingClap = @import("clap/streaming.zig").StreamingClap;
/// The names a ::Param can have.
pub const Names = struct {
/// '-' prefix
short: ?u8 = null,
/// '--' prefix
long: ?[]const u8 = null,
};
/// Whether a param takes no value (a flag), one value, or can be specified multiple times.
pub const Values = enum {
none,
one,
many,
};
/// Represents a parameter for the command line.
/// Parameters come in three kinds:
/// * Short ("-a"): Should be used for the most commonly used parameters in your program.
/// * They can take a value three different ways.
/// * "-a value"
/// * "-a=value"
/// * "-avalue"
/// * They chain if they don't take values: "-abc".
/// * The last given parameter can take a value in the same way that a single parameter can:
/// * "-abc value"
/// * "-abc=value"
/// * "-abcvalue"
/// * Long ("--long-param"): Should be used for less common parameters, or when no single character
/// can describe the paramter.
/// * They can take a value two different ways.
/// * "--long-param value"
/// * "--long-param=value"
/// * Positional: Should be used as the primary parameter of the program, like a filename or
/// an expression to parse.
/// * Positional parameters have both names.long and names.short == null.
/// * Positional parameters must take a value.
pub fn Param(comptime Id: type) type {
return struct {
id: Id = Id{},
names: Names = Names{},
takes_value: Values = .none,
};
}
/// Takes a string and parses it to a Param(Help).
/// This is the reverse of 'help' but for at single parameter only.
pub fn parseParam(line: []const u8) !Param(Help) {
var found_comma = false;
var it = mem.tokenize(line, " \t");
var param_str = it.next() orelse return error.NoParamFound;
const short_name = if (!mem.startsWith(u8, param_str, "--") and
mem.startsWith(u8, param_str, "-"))
blk: {
found_comma = param_str[param_str.len - 1] == ',';
if (found_comma)
param_str = param_str[0 .. param_str.len - 1];
if (param_str.len != 2)
return error.InvalidShortParam;
const short_name = param_str[1];
if (!found_comma) {
var res = parseParamRest(it.rest());
res.names.short = short_name;
return res;
}
param_str = it.next() orelse return error.NoParamFound;
break :blk short_name;
} else null;
const long_name = if (mem.startsWith(u8, param_str, "--")) blk: {
if (param_str[param_str.len - 1] == ',')
return error.TrailingComma;
break :blk param_str[2..];
} else if (found_comma) {
return error.TrailingComma;
} else if (short_name == null) {
return parseParamRest(mem.trimLeft(u8, line, " \t"));
} else null;
var res = parseParamRest(it.rest());
res.names.long = param_str[2..];
res.names.short = short_name;
return res;
}
fn parseParamRest(line: []const u8) Param(Help) {
if (mem.startsWith(u8, line, "<")) blk: {
const len = mem.indexOfScalar(u8, line, '>') orelse break :blk;
const takes_many = mem.startsWith(u8, line[len + 1 ..], "...");
const help_start = len + 1 + @as(usize, 3) * @boolToInt(takes_many);
return .{
.takes_value = if (takes_many) .many else .one,
.id = .{
.msg = mem.trim(u8, line[help_start..], " \t"),
.value = line[1..len],
},
};
}
return .{ .id = .{ .msg = mem.trim(u8, line, " \t") } };
}
fn expectParam(expect: Param(Help), actual: Param(Help)) void {
testing.expectEqualStrings(expect.id.msg, actual.id.msg);
testing.expectEqualStrings(expect.id.value, actual.id.value);
testing.expectEqual(expect.names.short, actual.names.short);
testing.expectEqual(expect.takes_value, actual.takes_value);
if (expect.names.long) |long| {
testing.expectEqualStrings(long, actual.names.long.?);
} else {
testing.expectEqual(@as(?[]const u8, null), actual.names.long);
}
}
test "parseParam" {
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "value" },
.names = .{ .short = 's', .long = "long" },
.takes_value = .one,
}, try parseParam("-s, --long <value> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "value" },
.names = .{ .short = 's', .long = "long" },
.takes_value = .many,
}, try parseParam("-s, --long <value>... Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "value" },
.names = .{ .long = "long" },
.takes_value = .one,
}, try parseParam("--long <value> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "value" },
.names = .{ .short = 's' },
.takes_value = .one,
}, try parseParam("-s <value> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text" },
.names = .{ .short = 's', .long = "long" },
}, try parseParam("-s, --long Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text" },
.names = .{ .short = 's' },
}, try parseParam("-s Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text" },
.names = .{ .long = "long" },
}, try parseParam("--long Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "A | B" },
.names = .{ .long = "long" },
.takes_value = .one,
}, try parseParam("--long <A | B> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "A" },
.names = .{},
.takes_value = .one,
}, try parseParam("<A> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "A" },
.names = .{},
.takes_value = .many,
}, try parseParam("<A>... Help text"));
testing.expectError(error.TrailingComma, parseParam("--long, Help"));
testing.expectError(error.TrailingComma, parseParam("-s, Help"));
testing.expectError(error.InvalidShortParam, parseParam("-ss Help"));
testing.expectError(error.InvalidShortParam, parseParam("-ss <value> Help"));
testing.expectError(error.InvalidShortParam, parseParam("- Help"));
}
/// Optional diagnostics used for reporting useful errors
pub const Diagnostic = struct {
arg: []const u8 = "",
name: Names = Names{},
/// Default diagnostics reporter when all you want is English with no colors.
/// Use this as a reference for implementing your own if needed.
pub fn report(diag: Diagnostic, stream: anytype, err: anyerror) !void {
const Arg = struct {
prefix: []const u8,
name: []const u8,
};
const a = if (diag.name.short) |*c|
Arg{ .prefix = "-", .name = @as(*const [1]u8, c)[0..] }
else if (diag.name.long) |l|
Arg{ .prefix = "--", .name = l }
else
Arg{ .prefix = "", .name = diag.arg };
switch (err) {
error.DoesntTakeValue => try stream.print("The argument '{s}{s}' does not take a value\n", .{ a.prefix, a.name }),
error.MissingValue => try stream.print("The argument '{s}{s}' requires a value but none was supplied\n", .{ a.prefix, a.name }),
error.InvalidArgument => try stream.print("Invalid argument '{s}{s}'\n", .{ a.prefix, a.name }),
else => try stream.print("Error while parsing arguments: {s}\n", .{@errorName(err)}),
}
}
};
fn testDiag(diag: Diagnostic, err: anyerror, expected: []const u8) void {
var buf: [1024]u8 = undefined;
var slice_stream = io.fixedBufferStream(&buf);
diag.report(slice_stream.writer(), err) catch unreachable;
testing.expectEqualStrings(expected, slice_stream.getWritten());
}
test "Diagnostic.report" {
testDiag(.{ .arg = "c" }, error.InvalidArgument, "Invalid argument 'c'\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.InvalidArgument, "Invalid argument '--cc'\n");
testDiag(.{ .name = .{ .short = 'c' } }, error.DoesntTakeValue, "The argument '-c' does not take a value\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.DoesntTakeValue, "The argument '--cc' does not take a value\n");
testDiag(.{ .name = .{ .short = 'c' } }, error.MissingValue, "The argument '-c' requires a value but none was supplied\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.MissingValue, "The argument '--cc' requires a value but none was supplied\n");
testDiag(.{ .name = .{ .short = 'c' } }, error.InvalidArgument, "Invalid argument '-c'\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.InvalidArgument, "Invalid argument '--cc'\n");
testDiag(.{ .name = .{ .short = 'c' } }, error.SomethingElse, "Error while parsing arguments: SomethingElse\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.SomethingElse, "Error while parsing arguments: SomethingElse\n");
}
pub fn Args(comptime Id: type, comptime params: []const Param(Id)) type {
return struct {
arena: std.heap.ArenaAllocator,
clap: ComptimeClap(Id, params),
exe_arg: ?[]const u8,
pub fn deinit(a: *@This()) void {
a.arena.deinit();
}
pub fn flag(a: @This(), comptime name: []const u8) bool {
return a.clap.flag(name);
}
pub fn option(a: @This(), comptime name: []const u8) ?[]const u8 {
return a.clap.option(name);
}
pub fn options(a: @This(), comptime name: []const u8) []const []const u8 {
return a.clap.options(name);
}
pub fn positionals(a: @This()) []const []const u8 {
return a.clap.positionals();
}
};
}
/// Options that can be set to customize the behavior of parsing.
pub const ParseOptions = struct {
/// The allocator used for all memory allocations. Defaults to the `heap.page_allocator`.
/// Note: You should probably override this allocator if you are calling `parseEx`. Unlike
/// `parse`, `parseEx` does not wrap the allocator so the heap allocator can be
/// quite expensive. (TODO: Can we pick a better default? For `parse`, this allocator
/// is fine, as it wraps it in an arena)
allocator: *mem.Allocator = heap.page_allocator,
diagnostic: ?*Diagnostic = null,
};
/// Same as `parseEx` but uses the `args.OsIterator` by default.
pub fn parse(
comptime Id: type,
comptime params: []const Param(Id),
opt: ParseOptions,
) !Args(Id, params) {
var iter = try args.OsIterator.init(opt.allocator);
var res = Args(Id, params){
.arena = iter.arena,
.exe_arg = iter.exe_arg,
.clap = undefined,
};
// Let's reuse the arena from the `OSIterator` since we already have
// it.
res.clap = try parseEx(Id, params, &iter, .{
.allocator = &res.arena.allocator,
.diagnostic = opt.diagnostic,
});
return res;
}
/// Parses the command line arguments passed into the program based on an
/// array of `Param`s.
pub fn parseEx(
comptime Id: type,
comptime params: []const Param(Id),
iter: anytype,
opt: ParseOptions,
) !ComptimeClap(Id, params) {
const Clap = ComptimeClap(Id, params);
return try Clap.parse(iter, opt);
}
/// Will print a help message in the following format:
/// -s, --long <valueText> helpText
/// -s, helpText
/// -s <valueText> helpText
/// --long helpText
/// --long <valueText> helpText
pub fn helpFull(
stream: anytype,
comptime Id: type,
params: []const Param(Id),
comptime Error: type,
context: anytype,
helpText: fn (@TypeOf(context), Param(Id)) Error![]const u8,
valueText: fn (@TypeOf(context), Param(Id)) Error![]const u8,
) !void {
const max_spacing = blk: {
var res: usize = 0;
for (params) |param| {
var cs = io.countingWriter(io.null_writer);
try printParam(cs.writer(), Id, param, Error, context, valueText);
if (res < cs.bytes_written)
res = @intCast(usize, cs.bytes_written);
}
break :blk res;
};
for (params) |param| {
if (param.names.short == null and param.names.long == null)
continue;
var cs = io.countingWriter(stream);
try stream.print("\t", .{});
try printParam(cs.writer(), Id, param, Error, context, valueText);
try stream.writeByteNTimes(' ', max_spacing - @intCast(usize, cs.bytes_written));
try stream.print("\t{s}\n", .{try helpText(context, param)});
}
}
fn printParam(
stream: anytype,
comptime Id: type,
param: Param(Id),
comptime Error: type,
context: anytype,
valueText: fn (@TypeOf(context), Param(Id)) Error![]const u8,
) !void {
if (param.names.short) |s| {
try stream.print("-{c}", .{s});
} else {
try stream.print(" ", .{});
}
if (param.names.long) |l| {
if (param.names.short) |_| {
try stream.print(", ", .{});
} else {
try stream.print(" ", .{});
}
try stream.print("--{s}", .{l});
}
switch (param.takes_value) {
.none => {},
.one => try stream.print(" <{s}>", .{valueText(context, param)}),
.many => try stream.print(" <{s}>...", .{valueText(context, param)}),
}
}
/// A wrapper around helpFull for simple helpText and valueText functions that
/// cant return an error or take a context.
pub fn helpEx(
stream: anytype,
comptime Id: type,
params: []const Param(Id),
helpText: fn (Param(Id)) []const u8,
valueText: fn (Param(Id)) []const u8,
) !void {
const Context = struct {
helpText: fn (Param(Id)) []const u8,
valueText: fn (Param(Id)) []const u8,
pub fn help(c: @This(), p: Param(Id)) error{}![]const u8 {
return c.helpText(p);
}
pub fn value(c: @This(), p: Param(Id)) error{}![]const u8 {
return c.valueText(p);
}
};
return helpFull(
stream,
Id,
params,
error{},
Context{
.helpText = helpText,
.valueText = valueText,
},
Context.help,
Context.value,
);
}
pub const Help = struct {
msg: []const u8 = "",
value: []const u8 = "",
};
/// A wrapper around helpEx that takes a Param(Help).
pub fn help(stream: anytype, params: []const Param(Help)) !void {
try helpEx(stream, Help, params, getHelpSimple, getValueSimple);
}
fn getHelpSimple(param: Param(Help)) []const u8 {
return param.id.msg;
}
fn getValueSimple(param: Param(Help)) []const u8 {
return param.id.value;
}
test "clap.help" {
var buf: [1024]u8 = undefined;
var slice_stream = io.fixedBufferStream(&buf);
@setEvalBranchQuota(10000);
try help(
slice_stream.writer(),
comptime &[_]Param(Help){
parseParam("-a Short flag.") catch unreachable,
parseParam("-b <V1> Short option.") catch unreachable,
parseParam("--aa Long flag.") catch unreachable,
parseParam("--bb <V2> Long option.") catch unreachable,
parseParam("-c, --cc Both flag.") catch unreachable,
parseParam("-d, --dd <V3> Both option.") catch unreachable,
parseParam("-d, --dd <V3>... Both repeated option.") catch unreachable,
parseParam("<P> Positional. This should not appear in the help message.") catch unreachable,
},
);
const expected = "" ++
"\t-a \tShort flag.\n" ++
"\t-b <V1> \tShort option.\n" ++
"\t --aa \tLong flag.\n" ++
"\t --bb <V2> \tLong option.\n" ++
"\t-c, --cc \tBoth flag.\n" ++
"\t-d, --dd <V3> \tBoth option.\n" ++
"\t-d, --dd <V3>...\tBoth repeated option.\n";
testing.expectEqualStrings(expected, slice_stream.getWritten());
}
/// Will print a usage message in the following format:
/// [-abc] [--longa] [-d <valueText>] [--longb <valueText>] <valueText>
///
/// First all none value taking parameters, which have a short name are
/// printed, then non positional parameters and finally the positinal.
pub fn usageFull(
stream: anytype,
comptime Id: type,
params: []const Param(Id),
comptime Error: type,
context: anytype,
valueText: fn (@TypeOf(context), Param(Id)) Error![]const u8,
) !void {
var cos = io.countingWriter(stream);
const cs = cos.writer();
for (params) |param| {
const name = param.names.short orelse continue;
if (param.takes_value != .none)
continue;
if (cos.bytes_written == 0)
try stream.writeAll("[-");
try cs.writeByte(name);
}
if (cos.bytes_written != 0)
try cs.writeByte(']');
var positional: ?Param(Id) = null;
for (params) |param| {
if (param.takes_value == .none and param.names.short != null)
continue;
const prefix = if (param.names.short) |_| "-" else "--";
// Seems the zig compiler is being a little wierd. I doesn't allow me to write
// @as(*const [1]u8, s) VVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
const name = if (param.names.short) |*s| @ptrCast([*]const u8, s)[0..1] else param.names.long orelse {
positional = param;
continue;
};
if (cos.bytes_written != 0)
try cs.writeByte(' ');
try cs.print("[{s}{s}", .{ prefix, name });
switch (param.takes_value) {
.none => {},
.one => try cs.print(" <{s}>", .{try valueText(context, param)}),
.many => try cs.print(" <{s}>...", .{try valueText(context, param)}),
}
try cs.writeByte(']');
}
if (positional) |p| {
if (cos.bytes_written != 0)
try cs.writeByte(' ');
try cs.print("<{s}>", .{try valueText(context, p)});
}
}
/// A wrapper around usageFull for a simple valueText functions that
/// cant return an error or take a context.
pub fn usageEx(
stream: anytype,
comptime Id: type,
params: []const Param(Id),
valueText: fn (Param(Id)) []const u8,
) !void {
const Context = struct {
valueText: fn (Param(Id)) []const u8,
pub fn value(c: @This(), p: Param(Id)) error{}![]const u8 {
return c.valueText(p);
}
};
return usageFull(
stream,
Id,
params,
error{},
Context{ .valueText = valueText },
Context.value,
);
}
/// A wrapper around usageEx that takes a Param(Help).
pub fn usage(stream: anytype, params: []const Param(Help)) !void {
try usageEx(stream, Help, params, getValueSimple);
}
fn testUsage(expected: []const u8, params: []const Param(Help)) !void {
var buf: [1024]u8 = undefined;
var fbs = io.fixedBufferStream(&buf);
try usage(fbs.writer(), params);
testing.expectEqualStrings(expected, fbs.getWritten());
}
test "usage" {
@setEvalBranchQuota(100000);
try testUsage("[-ab]", comptime &[_]Param(Help){
parseParam("-a") catch unreachable,
parseParam("-b") catch unreachable,
});
try testUsage("[-a <value>] [-b <v>]", comptime &[_]Param(Help){
parseParam("-a <value>") catch unreachable,
parseParam("-b <v>") catch unreachable,
});
try testUsage("[--a] [--b]", comptime &[_]Param(Help){
parseParam("--a") catch unreachable,
parseParam("--b") catch unreachable,
});
try testUsage("[--a <value>] [--b <v>]", comptime &[_]Param(Help){
parseParam("--a <value>") catch unreachable,
parseParam("--b <v>") catch unreachable,
});
try testUsage("<file>", comptime &[_]Param(Help){
parseParam("<file>") catch unreachable,
});
try testUsage("[-ab] [-c <value>] [-d <v>] [--e] [--f] [--g <value>] [--h <v>] [-i <v>...] <file>", comptime &[_]Param(Help){
parseParam("-a") catch unreachable,
parseParam("-b") catch unreachable,
parseParam("-c <value>") catch unreachable,
parseParam("-d <v>") catch unreachable,
parseParam("--e") catch unreachable,
parseParam("--f") catch unreachable,
parseParam("--g <value>") catch unreachable,
parseParam("--h <v>") catch unreachable,
parseParam("-i <v>...") catch unreachable,
parseParam("<file>") catch unreachable,
});
}

View File

@@ -0,0 +1,341 @@
const std = @import("std");
const builtin = std.builtin;
const debug = std.debug;
const heap = std.heap;
const mem = std.mem;
const process = std.process;
const testing = std.testing;
/// An example of what methods should be implemented on an arg iterator.
pub const ExampleArgIterator = struct {
const Error = error{};
pub fn next(iter: *ExampleArgIterator) Error!?[]const u8 {
return "2";
}
};
/// An argument iterator which iterates over a slice of arguments.
/// This implementation does not allocate.
pub const SliceIterator = struct {
const Error = error{};
args: []const []const u8,
index: usize = 0,
pub fn next(iter: *SliceIterator) Error!?[]const u8 {
if (iter.args.len <= iter.index)
return null;
defer iter.index += 1;
return iter.args[iter.index];
}
};
test "SliceIterator" {
const args = &[_][]const u8{ "A", "BB", "CCC" };
var iter = SliceIterator{ .args = args };
for (args) |a| {
const b = try iter.next();
debug.assert(mem.eql(u8, a, b.?));
}
}
/// An argument iterator which wraps the ArgIterator in ::std.
/// On windows, this iterator allocates.
pub const OsIterator = struct {
const Error = process.ArgIterator.NextError;
arena: heap.ArenaAllocator,
args: process.ArgIterator,
/// The executable path (this is the first argument passed to the program)
/// TODO: Is it the right choice for this to be null? Maybe `init` should
/// return an error when we have no exe.
exe_arg: ?[:0]const u8,
pub fn init(allocator: *mem.Allocator) Error!OsIterator {
var res = OsIterator{
.arena = heap.ArenaAllocator.init(allocator),
.args = process.args(),
.exe_arg = undefined,
};
res.exe_arg = try res.next();
return res;
}
pub fn deinit(iter: *OsIterator) void {
iter.arena.deinit();
}
pub fn next(iter: *OsIterator) Error!?[:0]const u8 {
if (builtin.os.tag == .windows) {
return try iter.args.next(&iter.arena.allocator) orelse return null;
} else {
return iter.args.nextPosix();
}
}
};
/// An argument iterator that takes a string and parses it into arguments, simulating
/// how shells split arguments.
pub const ShellIterator = struct {
const Error = error{
DanglingEscape,
QuoteNotClosed,
} || mem.Allocator.Error;
arena: heap.ArenaAllocator,
str: []const u8,
pub fn init(allocator: *mem.Allocator, str: []const u8) ShellIterator {
return .{
.arena = heap.ArenaAllocator.init(allocator),
.str = str,
};
}
pub fn deinit(iter: *ShellIterator) void {
iter.arena.deinit();
}
pub fn next(iter: *ShellIterator) Error!?[]const u8 {
// Whenever possible, this iterator will return slices into `str` instead of
// allocating. Sometimes this is not possible, for example, escaped characters
// have be be unescape, so we need to allocate in this case.
var list = std.ArrayList(u8).init(&iter.arena.allocator);
var start: usize = 0;
var state: enum {
skip_whitespace,
no_quote,
no_quote_escape,
single_quote,
double_quote,
double_quote_escape,
after_quote,
} = .skip_whitespace;
for (iter.str) |c, i| {
switch (state) {
// The state that skips the initial whitespace.
.skip_whitespace => switch (c) {
' ', '\t', '\n' => {},
'\'' => {
start = i + 1;
state = .single_quote;
},
'"' => {
start = i + 1;
state = .double_quote;
},
'\\' => {
start = i + 1;
state = .no_quote_escape;
},
else => {
start = i;
state = .no_quote;
},
},
// The state that parses the none quoted part of a argument.
.no_quote => switch (c) {
// We're done parsing a none quoted argument when we hit a
// whitespace.
' ', '\t', '\n' => {
defer iter.str = iter.str[i..];
return iter.result(start, i, &list);
},
// Slicing is not possible if a quote starts while parsing none
// quoted args.
// Example:
// ab'cd' -> abcd
'\'' => {
try list.appendSlice(iter.str[start..i]);
start = i + 1;
state = .single_quote;
},
'"' => {
try list.appendSlice(iter.str[start..i]);
start = i + 1;
state = .double_quote;
},
// Slicing is not possible if we need to escape a character.
// Example:
// ab\"d -> ab"d
'\\' => {
try list.appendSlice(iter.str[start..i]);
start = i + 1;
state = .no_quote_escape;
},
else => {},
},
// We're in this state after having parsed the quoted part of an
// argument. This state works mostly the same as .no_quote, but
// is aware, that the last character seen was a quote, which should
// not be part of the argument. This is why you will see `i - 1` here
// instead of just `i` when `iter.str` is sliced.
.after_quote => switch (c) {
' ', '\t', '\n' => {
defer iter.str = iter.str[i..];
return iter.result(start, i - 1, &list);
},
'\'' => {
try list.appendSlice(iter.str[start .. i - 1]);
start = i + 1;
state = .single_quote;
},
'"' => {
try list.appendSlice(iter.str[start .. i - 1]);
start = i + 1;
state = .double_quote;
},
'\\' => {
try list.appendSlice(iter.str[start .. i - 1]);
start = i + 1;
state = .no_quote_escape;
},
else => {
try list.appendSlice(iter.str[start .. i - 1]);
start = i;
state = .no_quote;
},
},
// The states that parse the quoted part of arguments. The only differnece
// between single and double quoted arguments is that single quoted
// arguments ignore escape sequences, while double quoted arguments
// does escaping.
.single_quote => switch (c) {
'\'' => state = .after_quote,
else => {},
},
.double_quote => switch (c) {
'"' => state = .after_quote,
'\\' => {
try list.appendSlice(iter.str[start..i]);
start = i + 1;
state = .double_quote_escape;
},
else => {},
},
// The state we end up when after the escape character (`\`). All these
// states do is transition back into the previous state.
// TODO: Are there any escape sequences that does transform the second
// character into something else? For example, in Zig, `\n` is
// transformed into the line feed ascii character.
.no_quote_escape => switch (c) {
else => state = .no_quote,
},
.double_quote_escape => switch (c) {
else => state = .double_quote,
},
}
}
defer iter.str = iter.str[iter.str.len..];
switch (state) {
.skip_whitespace => return null,
.no_quote => return iter.result(start, iter.str.len, &list),
.after_quote => return iter.result(start, iter.str.len - 1, &list),
.no_quote_escape => return Error.DanglingEscape,
.single_quote,
.double_quote,
.double_quote_escape,
=> return Error.QuoteNotClosed,
}
}
fn result(iter: *ShellIterator, start: usize, end: usize, list: *std.ArrayList(u8)) Error!?[]const u8 {
const res = iter.str[start..end];
// If we already have something in `list` that means that we could not
// parse the argument without allocation. We therefor need to just append
// the rest we have to the list and return that.
if (list.items.len != 0) {
try list.appendSlice(res);
return list.toOwnedSlice();
}
return res;
}
};
fn testShellIteratorOk(str: []const u8, allocations: usize, expect: []const []const u8) void {
var allocator = testing.FailingAllocator.init(testing.allocator, allocations);
var it = ShellIterator.init(&allocator.allocator, str);
defer it.deinit();
for (expect) |e| {
if (it.next()) |actual| {
testing.expect(actual != null);
testing.expectEqualStrings(e, actual.?);
} else |err| testing.expectEqual(@as(anyerror![]const u8, e), err);
}
if (it.next()) |actual| {
testing.expectEqual(@as(?[]const u8, null), actual);
testing.expectEqual(allocations, allocator.allocations);
} else |err| testing.expectEqual(@as(anyerror!void, {}), err);
}
fn testShellIteratorErr(str: []const u8, expect: anyerror) void {
var it = ShellIterator.init(testing.allocator, str);
defer it.deinit();
while (it.next() catch |err| {
testing.expectError(expect, @as(anyerror!void, err));
return;
}) |_| {}
testing.expectError(expect, @as(anyerror!void, {}));
}
test "ShellIterator" {
testShellIteratorOk("a", 0, &[_][]const u8{"a"});
testShellIteratorOk("'a'", 0, &[_][]const u8{"a"});
testShellIteratorOk("\"a\"", 0, &[_][]const u8{"a"});
testShellIteratorOk("a b", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("'a' b", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("\"a\" b", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("a 'b'", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("a \"b\"", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("'a b'", 0, &[_][]const u8{"a b"});
testShellIteratorOk("\"a b\"", 0, &[_][]const u8{"a b"});
testShellIteratorOk("\"a\"\"b\"", 1, &[_][]const u8{"ab"});
testShellIteratorOk("'a''b'", 1, &[_][]const u8{"ab"});
testShellIteratorOk("'a'b", 1, &[_][]const u8{"ab"});
testShellIteratorOk("a'b'", 1, &[_][]const u8{"ab"});
testShellIteratorOk("a\\ b", 1, &[_][]const u8{"a b"});
testShellIteratorOk("\"a\\ b\"", 1, &[_][]const u8{"a b"});
testShellIteratorOk("'a\\ b'", 0, &[_][]const u8{"a\\ b"});
testShellIteratorOk(" a b ", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("\\ \\ ", 0, &[_][]const u8{ " ", " " });
testShellIteratorOk(
\\printf 'run\nuninstall\n'
, 0, &[_][]const u8{ "printf", "run\\nuninstall\\n" });
testShellIteratorOk(
\\setsid -f steam "steam://$action/$id"
, 0, &[_][]const u8{ "setsid", "-f", "steam", "steam://$action/$id" });
testShellIteratorOk(
\\xargs -I% rg --no-heading --no-line-number --only-matching
\\ --case-sensitive --multiline --text --byte-offset '(?-u)%' $@
\\
, 0, &[_][]const u8{
"xargs", "-I%", "rg", "--no-heading",
"--no-line-number", "--only-matching", "--case-sensitive", "--multiline",
"--text", "--byte-offset", "(?-u)%", "$@",
});
testShellIteratorErr("'a", error.QuoteNotClosed);
testShellIteratorErr("'a\\", error.QuoteNotClosed);
testShellIteratorErr("\"a", error.QuoteNotClosed);
testShellIteratorErr("\"a\\", error.QuoteNotClosed);
testShellIteratorErr("a\\", error.DanglingEscape);
}

View File

@@ -0,0 +1,175 @@
const clap = @import("../clap.zig");
const std = @import("std");
const debug = std.debug;
const heap = std.heap;
const mem = std.mem;
const testing = std.testing;
/// Deprecated: Use `parseEx` instead
pub fn ComptimeClap(
comptime Id: type,
comptime params: []const clap.Param(Id),
) type {
var flags: usize = 0;
var single_options: usize = 0;
var multi_options: usize = 0;
var converted_params: []const clap.Param(usize) = &[_]clap.Param(usize){};
for (params) |param| {
var index: usize = 0;
if (param.names.long != null or param.names.short != null) {
const ptr = switch (param.takes_value) {
.none => &flags,
.one => &single_options,
.many => &multi_options,
};
index = ptr.*;
ptr.* += 1;
}
const converted = clap.Param(usize){
.id = index,
.names = param.names,
.takes_value = param.takes_value,
};
converted_params = converted_params ++ [_]clap.Param(usize){converted};
}
return struct {
single_options: [single_options]?[]const u8,
multi_options: [multi_options][]const []const u8,
flags: [flags]bool,
pos: []const []const u8,
allocator: *mem.Allocator,
pub fn parse(iter: anytype, opt: clap.ParseOptions) !@This() {
const allocator = opt.allocator;
var multis = [_]std.ArrayList([]const u8){undefined} ** multi_options;
for (multis) |*multi| {
multi.* = std.ArrayList([]const u8).init(allocator);
}
var pos = std.ArrayList([]const u8).init(allocator);
var res = @This(){
.single_options = [_]?[]const u8{null} ** single_options,
.multi_options = [_][]const []const u8{undefined} ** multi_options,
.flags = [_]bool{false} ** flags,
.pos = undefined,
.allocator = allocator,
};
var stream = clap.StreamingClap(usize, @typeInfo(@TypeOf(iter)).Pointer.child){
.params = converted_params,
.iter = iter,
};
while (try stream.next()) |arg| {
const param = arg.param;
if (param.names.long == null and param.names.short == null) {
try pos.append(arg.value.?);
} else if (param.takes_value == .one) {
debug.assert(res.single_options.len != 0);
if (res.single_options.len != 0)
res.single_options[param.id] = arg.value.?;
} else if (param.takes_value == .many) {
debug.assert(multis.len != 0);
if (multis.len != 0)
try multis[param.id].append(arg.value.?);
} else {
debug.assert(res.flags.len != 0);
if (res.flags.len != 0)
res.flags[param.id] = true;
}
}
for (multis) |*multi, i|
res.multi_options[i] = multi.toOwnedSlice();
res.pos = pos.toOwnedSlice();
return res;
}
pub fn deinit(parser: @This()) void {
for (parser.multi_options) |o|
parser.allocator.free(o);
parser.allocator.free(parser.pos);
}
pub fn flag(parser: @This(), comptime name: []const u8) bool {
const param = comptime findParam(name);
if (param.takes_value != .none)
@compileError(name ++ " is an option and not a flag.");
return parser.flags[param.id];
}
pub fn option(parser: @This(), comptime name: []const u8) ?[]const u8 {
const param = comptime findParam(name);
if (param.takes_value == .none)
@compileError(name ++ " is a flag and not an option.");
if (param.takes_value == .many)
@compileError(name ++ " takes many options, not one.");
return parser.single_options[param.id];
}
pub fn options(parser: @This(), comptime name: []const u8) []const []const u8 {
const param = comptime findParam(name);
if (param.takes_value == .none)
@compileError(name ++ " is a flag and not an option.");
if (param.takes_value == .one)
@compileError(name ++ " takes one option, not multiple.");
return parser.multi_options[param.id];
}
pub fn positionals(parser: @This()) []const []const u8 {
return parser.pos;
}
fn findParam(comptime name: []const u8) clap.Param(usize) {
comptime {
for (converted_params) |param| {
if (param.names.short) |s| {
if (mem.eql(u8, name, "-" ++ [_]u8{s}))
return param;
}
if (param.names.long) |l| {
if (mem.eql(u8, name, "--" ++ l))
return param;
}
}
@compileError(name ++ " is not a parameter.");
}
}
};
}
test "" {
const Clap = ComptimeClap(clap.Help, comptime &[_]clap.Param(clap.Help){
clap.parseParam("-a, --aa ") catch unreachable,
clap.parseParam("-b, --bb ") catch unreachable,
clap.parseParam("-c, --cc <V>") catch unreachable,
clap.parseParam("-d, --dd <V>...") catch unreachable,
clap.parseParam("<P>") catch unreachable,
});
var iter = clap.args.SliceIterator{
.args = &[_][]const u8{
"-a", "-c", "0", "something", "-d", "a", "--dd", "b",
},
};
var args = try Clap.parse(&iter, .{ .allocator = testing.allocator });
defer args.deinit();
testing.expect(args.flag("-a"));
testing.expect(args.flag("--aa"));
testing.expect(!args.flag("-b"));
testing.expect(!args.flag("--bb"));
testing.expectEqualStrings("0", args.option("-c").?);
testing.expectEqualStrings("0", args.option("--cc").?);
testing.expectEqual(@as(usize, 1), args.positionals().len);
testing.expectEqualStrings("something", args.positionals()[0]);
testing.expectEqualSlices([]const u8, &[_][]const u8{ "a", "b" }, args.options("-d"));
testing.expectEqualSlices([]const u8, &[_][]const u8{ "a", "b" }, args.options("--dd"));
}

View File

@@ -0,0 +1,424 @@
const builtin = @import("builtin");
const clap = @import("../clap.zig");
const std = @import("std");
const args = clap.args;
const debug = std.debug;
const heap = std.heap;
const io = std.io;
const mem = std.mem;
const os = std.os;
const testing = std.testing;
/// The result returned from StreamingClap.next
pub fn Arg(comptime Id: type) type {
return struct {
const Self = @This();
param: *const clap.Param(Id),
value: ?[]const u8 = null,
};
}
/// A command line argument parser which, given an ArgIterator, will parse arguments according
/// to the params. StreamingClap parses in an iterating manner, so you have to use a loop together with
/// StreamingClap.next to parse all the arguments of your program.
pub fn StreamingClap(comptime Id: type, comptime ArgIterator: type) type {
return struct {
const State = union(enum) {
normal,
chaining: Chaining,
rest_are_positional,
const Chaining = struct {
arg: []const u8,
index: usize,
};
};
params: []const clap.Param(Id),
iter: *ArgIterator,
state: State = .normal,
positional: ?*const clap.Param(Id) = null,
diagnostic: ?*clap.Diagnostic = null,
/// Get the next Arg that matches a Param.
pub fn next(parser: *@This()) !?Arg(Id) {
switch (parser.state) {
.normal => return try parser.normal(),
.chaining => |state| return try parser.chainging(state),
.rest_are_positional => {
const param = parser.positionalParam() orelse unreachable;
const value = (try parser.iter.next()) orelse return null;
return Arg(Id){ .param = param, .value = value };
},
}
}
fn normal(parser: *@This()) !?Arg(Id) {
const arg_info = (try parser.parseNextArg()) orelse return null;
const arg = arg_info.arg;
switch (arg_info.kind) {
.long => {
const eql_index = mem.indexOfScalar(u8, arg, '=');
const name = if (eql_index) |i| arg[0..i] else arg;
const maybe_value = if (eql_index) |i| arg[i + 1 ..] else null;
for (parser.params) |*param| {
const match = param.names.long orelse continue;
if (!mem.eql(u8, name, match))
continue;
if (param.takes_value == .none) {
if (maybe_value != null)
return parser.err(arg, .{ .long = name }, error.DoesntTakeValue);
return Arg(Id){ .param = param };
}
const value = blk: {
if (maybe_value) |v|
break :blk v;
break :blk (try parser.iter.next()) orelse
return parser.err(arg, .{ .long = name }, error.MissingValue);
};
return Arg(Id){ .param = param, .value = value };
}
return parser.err(arg, .{ .long = name }, error.InvalidArgument);
},
.short => return try parser.chainging(.{
.arg = arg,
.index = 0,
}),
.positional => if (parser.positionalParam()) |param| {
// If we find a positional with the value `--` then we
// interpret the rest of the arguments as positional
// arguments.
if (mem.eql(u8, arg, "--")) {
parser.state = .rest_are_positional;
const value = (try parser.iter.next()) orelse return null;
return Arg(Id){ .param = param, .value = value };
}
return Arg(Id){ .param = param, .value = arg };
} else {
return parser.err(arg, .{}, error.InvalidArgument);
},
}
}
fn chainging(parser: *@This(), state: State.Chaining) !?Arg(Id) {
const arg = state.arg;
const index = state.index;
const next_index = index + 1;
for (parser.params) |*param| {
const short = param.names.short orelse continue;
if (short != arg[index])
continue;
// Before we return, we have to set the new state of the clap
defer {
if (arg.len <= next_index or param.takes_value != .none) {
parser.state = .normal;
} else {
parser.state = .{
.chaining = .{
.arg = arg,
.index = next_index,
},
};
}
}
const next_is_eql = if (next_index < arg.len) arg[next_index] == '=' else false;
if (param.takes_value == .none) {
if (next_is_eql)
return parser.err(arg, .{ .short = short }, error.DoesntTakeValue);
return Arg(Id){ .param = param };
}
if (arg.len <= next_index) {
const value = (try parser.iter.next()) orelse
return parser.err(arg, .{ .short = short }, error.MissingValue);
return Arg(Id){ .param = param, .value = value };
}
if (next_is_eql)
return Arg(Id){ .param = param, .value = arg[next_index + 1 ..] };
return Arg(Id){ .param = param, .value = arg[next_index..] };
}
return parser.err(arg, .{ .short = arg[index] }, error.InvalidArgument);
}
fn positionalParam(parser: *@This()) ?*const clap.Param(Id) {
if (parser.positional) |p|
return p;
for (parser.params) |*param| {
if (param.names.long) |_|
continue;
if (param.names.short) |_|
continue;
parser.positional = param;
return param;
}
return null;
}
const ArgInfo = struct {
arg: []const u8,
kind: enum {
long,
short,
positional,
},
};
fn parseNextArg(parser: *@This()) !?ArgInfo {
const full_arg = (try parser.iter.next()) orelse return null;
if (mem.eql(u8, full_arg, "--") or mem.eql(u8, full_arg, "-"))
return ArgInfo{ .arg = full_arg, .kind = .positional };
if (mem.startsWith(u8, full_arg, "--"))
return ArgInfo{ .arg = full_arg[2..], .kind = .long };
if (mem.startsWith(u8, full_arg, "-"))
return ArgInfo{ .arg = full_arg[1..], .kind = .short };
return ArgInfo{ .arg = full_arg, .kind = .positional };
}
fn err(parser: @This(), arg: []const u8, names: clap.Names, _err: anytype) @TypeOf(_err) {
if (parser.diagnostic) |d|
d.* = .{ .arg = arg, .name = names };
return _err;
}
};
}
fn testNoErr(params: []const clap.Param(u8), args_strings: []const []const u8, results: []const Arg(u8)) void {
var iter = args.SliceIterator{ .args = args_strings };
var c = StreamingClap(u8, args.SliceIterator){
.params = params,
.iter = &iter,
};
for (results) |res| {
const arg = (c.next() catch unreachable) orelse unreachable;
testing.expectEqual(res.param, arg.param);
const expected_value = res.value orelse {
testing.expectEqual(@as(@TypeOf(arg.value), null), arg.value);
continue;
};
const actual_value = arg.value orelse unreachable;
testing.expectEqualSlices(u8, expected_value, actual_value);
}
if (c.next() catch unreachable) |_|
unreachable;
}
fn testErr(params: []const clap.Param(u8), args_strings: []const []const u8, expected: []const u8) void {
var diag = clap.Diagnostic{};
var iter = args.SliceIterator{ .args = args_strings };
var c = StreamingClap(u8, args.SliceIterator){
.params = params,
.iter = &iter,
.diagnostic = &diag,
};
while (c.next() catch |err| {
var buf: [1024]u8 = undefined;
var fbs = io.fixedBufferStream(&buf);
diag.report(fbs.writer(), err) catch unreachable;
testing.expectEqualStrings(expected, fbs.getWritten());
return;
}) |_| {}
testing.expect(false);
}
test "short params" {
const params = [_]clap.Param(u8){
.{ .id = 0, .names = .{ .short = 'a' } },
.{ .id = 1, .names = .{ .short = 'b' } },
.{
.id = 2,
.names = .{ .short = 'c' },
.takes_value = .one,
},
.{
.id = 3,
.names = .{ .short = 'd' },
.takes_value = .many,
},
};
const a = &params[0];
const b = &params[1];
const c = &params[2];
const d = &params[3];
testNoErr(
&params,
&[_][]const u8{
"-a", "-b", "-ab", "-ba",
"-c", "0", "-c=0", "-ac",
"0", "-ac=0", "-d=0",
},
&[_]Arg(u8){
.{ .param = a },
.{ .param = b },
.{ .param = a },
.{ .param = b },
.{ .param = b },
.{ .param = a },
.{ .param = c, .value = "0" },
.{ .param = c, .value = "0" },
.{ .param = a },
.{ .param = c, .value = "0" },
.{ .param = a },
.{ .param = c, .value = "0" },
.{ .param = d, .value = "0" },
},
);
}
test "long params" {
const params = [_]clap.Param(u8){
.{ .id = 0, .names = .{ .long = "aa" } },
.{ .id = 1, .names = .{ .long = "bb" } },
.{
.id = 2,
.names = .{ .long = "cc" },
.takes_value = .one,
},
.{
.id = 3,
.names = .{ .long = "dd" },
.takes_value = .many,
},
};
const aa = &params[0];
const bb = &params[1];
const cc = &params[2];
const dd = &params[3];
testNoErr(
&params,
&[_][]const u8{
"--aa", "--bb",
"--cc", "0",
"--cc=0", "--dd=0",
},
&[_]Arg(u8){
.{ .param = aa },
.{ .param = bb },
.{ .param = cc, .value = "0" },
.{ .param = cc, .value = "0" },
.{ .param = dd, .value = "0" },
},
);
}
test "positional params" {
const params = [_]clap.Param(u8){.{
.id = 0,
.takes_value = .one,
}};
testNoErr(
&params,
&[_][]const u8{ "aa", "bb" },
&[_]Arg(u8){
.{ .param = &params[0], .value = "aa" },
.{ .param = &params[0], .value = "bb" },
},
);
}
test "all params" {
const params = [_]clap.Param(u8){
.{
.id = 0,
.names = .{ .short = 'a', .long = "aa" },
},
.{
.id = 1,
.names = .{ .short = 'b', .long = "bb" },
},
.{
.id = 2,
.names = .{ .short = 'c', .long = "cc" },
.takes_value = .one,
},
.{ .id = 3, .takes_value = .one },
};
const aa = &params[0];
const bb = &params[1];
const cc = &params[2];
const positional = &params[3];
testNoErr(
&params,
&[_][]const u8{
"-a", "-b", "-ab", "-ba",
"-c", "0", "-c=0", "-ac",
"0", "-ac=0", "--aa", "--bb",
"--cc", "0", "--cc=0", "something",
"-", "--", "--cc=0", "-a",
},
&[_]Arg(u8){
.{ .param = aa },
.{ .param = bb },
.{ .param = aa },
.{ .param = bb },
.{ .param = bb },
.{ .param = aa },
.{ .param = cc, .value = "0" },
.{ .param = cc, .value = "0" },
.{ .param = aa },
.{ .param = cc, .value = "0" },
.{ .param = aa },
.{ .param = cc, .value = "0" },
.{ .param = aa },
.{ .param = bb },
.{ .param = cc, .value = "0" },
.{ .param = cc, .value = "0" },
.{ .param = positional, .value = "something" },
.{ .param = positional, .value = "-" },
.{ .param = positional, .value = "--cc=0" },
.{ .param = positional, .value = "-a" },
},
);
}
test "errors" {
const params = [_]clap.Param(u8){
.{
.id = 0,
.names = .{ .short = 'a', .long = "aa" },
},
.{
.id = 1,
.names = .{ .short = 'c', .long = "cc" },
.takes_value = .one,
},
};
testErr(&params, &[_][]const u8{"q"}, "Invalid argument 'q'\n");
testErr(&params, &[_][]const u8{"-q"}, "Invalid argument '-q'\n");
testErr(&params, &[_][]const u8{"--q"}, "Invalid argument '--q'\n");
testErr(&params, &[_][]const u8{"--q=1"}, "Invalid argument '--q'\n");
testErr(&params, &[_][]const u8{"-a=1"}, "The argument '-a' does not take a value\n");
testErr(&params, &[_][]const u8{"--aa=1"}, "The argument '--aa' does not take a value\n");
testErr(&params, &[_][]const u8{"-c"}, "The argument '-c' requires a value but none was supplied\n");
testErr(&params, &[_][]const u8{"--cc"}, "The argument '--cc' requires a value but none was supplied\n");
}

View File

@@ -0,0 +1,14 @@
pkgs:
clap:
version: 0.3.0
license: Unlicense
description: Simple command line argument parsing library
source_url: "https://github.com/Hejsil/zig-clap"
root: clap.zig
files:
README.md
LICENSE
build.zig
clap/*.zig
example/*.zig

View File

@@ -0,0 +1,5 @@
id: aoe2l16htluewam6bfwvv0khsbbno8g8jd7suonifg74u7kd
name: clap
main: clap.zig
license: Unlicense
dependencies:

View File

@@ -11,21 +11,12 @@ const resolvePath = @import("./resolver/resolve_path.zig").resolvePath;
// pub const FilesystemImplementation = @import("fs_impl.zig");
//
pub const Stat = packed struct {
// milliseconds
mtime: i64 = 0,
// last queried timestamp
qtime: i64 = 0,
kind: FileSystemEntry.Kind,
};
threadlocal var scratch_lookup_buffer = [_]u8{0} ** 255;
pub const FileSystem = struct {
// This maps paths relative to absolute_working_dir to the structure of arrays of paths
stats: std.StringHashMap(Stat) = undefined,
allocator: *std.mem.Allocator,
top_level_dir = "/",
top_level_dir: string = "/",
fs: Implementation,
pub const Error = error{
@@ -35,16 +26,28 @@ pub const FileSystem = struct {
ENOTDIR,
};
pub fn init1(allocator: *std.mem.Allocator, top_level_dir: ?string, enable_watcher: bool) !*FileSystem {
var files = try allocator.create(FileSystem);
files.* = FileSystem{
.allocator = allocator,
.top_level_dir = top_level_dir orelse (if (isBrowser) "/project" else try std.process.getCwdAlloc(allocator)),
.fs = Implementation.init(allocator, enable_watcher),
// .stats = std.StringHashMap(Stat).init(allocator),
};
return files;
}
pub const DirEntry = struct {
pub const EntryMap = std.StringArrayHashMap(*Entry);
dir: string,
data: EntryMap,
pub fn empty(dir: string, allocator: std.mem.Allocator) DirEntry {
pub fn empty(dir: string, allocator: *std.mem.Allocator) DirEntry {
return DirEntry{ .dir = dir, .data = EntryMap.init(allocator) };
}
pub fn init(dir: string, allocator: std.mem.Allocator) DirEntry {
pub fn init(dir: string, allocator: *std.mem.Allocator) DirEntry {
return DirEntry{ .dir = dir, .data = EntryMap.init(allocator) };
}
@@ -53,13 +56,6 @@ pub const FileSystem = struct {
canonical_error: anyerror,
};
pub fn init(dir: string, allocator: *std.mem.Allocator) DirEntry {
return DirEntry{
.dir = dir,
.data = std.StringArrayHashMap(*Entry).init(allocator),
};
}
pub fn deinit(d: *DirEntry) void {
d.data.allocator.free(d.dir);
@@ -170,6 +166,15 @@ pub const FileSystem = struct {
watcher: ?std.StringHashMap(WatchData) = null,
watcher_mutex: Mutex = Mutex{},
pub fn init(allocator: *std.mem.Allocator, enable_watcher: bool) RealFS {
return RealFS{
.entries = std.StringHashMap(EntriesOption).init(allocator),
.allocator = allocator,
.limiter = Limiter.init(allocator),
.watcher = if (enable_watcher) std.StringHashMap(WatchData).init(allocator) else null,
};
}
pub const ModKey = struct {
inode: std.fs.File.INode = 0,
size: u64 = 0,
@@ -274,7 +279,9 @@ pub const FileSystem = struct {
// Limit the number of files open simultaneously to avoid ulimit issues
pub const Limiter = struct {
chan: std.event.Channel(bool),
chan: ChannelVoid,
pub const ChannelVoid = std.event.Channel(void);
pub fn init(allocator: *std.mem.Allocator) !Limiter {
var limiter = Limiter{ .chan = std.event.Channel(bool) };
@@ -286,7 +293,7 @@ pub const FileSystem = struct {
// This will block if the number of open files is already at the limit
pub fn before(limiter: *Limiter) void {
limiter.chan.put(false);
limiter.chan.put(void);
}
pub fn after(limiter: *Limiter) void {
@@ -514,8 +521,8 @@ pub const FileSystem = struct {
pub const Implementation = comptime {
switch (build_target) {
.wasi, .native => RealFS,
.wasm => WasmFS,
.wasi, .native => return RealFS,
.wasm => return WasmFS,
}
};
};

View File

@@ -15,6 +15,7 @@ pub const build_target: BuildTarget = comptime {
pub const isWasm = build_target == .wasm;
pub const isNative = build_target == .native;
pub const isWasi = build_target == .wasi;
pub const isBrowser = !isWasi and isWasm;
pub const isDebug = std.builtin.Mode.Debug == std.builtin.mode;

View File

@@ -1259,6 +1259,14 @@ pub const Parser = struct {
moduleType: ModuleType = ModuleType.esm,
trim_unused_imports: bool = true,
pub fn init(jsx: options.JSX.Pragma, loader: options.Loader) Options {
return Options{
.ts = loader.isTypeScript(),
.jsx = jsx,
};
}
};
pub fn parse(self: *Parser) !js_ast.Result {
@@ -1429,11 +1437,10 @@ pub const Parser = struct {
return result;
}
pub fn init(transform: options.TransformOptions, log: *logger.Log, source: *logger.Source, define: *Define, allocator: *std.mem.Allocator) !Parser {
pub fn init(_options: Options, log: *logger.Log, source: *logger.Source, define: *Define, allocator: *std.mem.Allocator) !Parser {
const lexer = try js_lexer.Lexer.init(log, source, allocator);
const jsx = if (transform.jsx != null) transform.jsx.? else options.JSX.Pragma{ .parse = false };
return Parser{
.options = Options{ .ts = transform.loader == .tsx or transform.loader == .ts, .jsx = jsx },
.options = _options,
.allocator = allocator,
.lexer = lexer,
.define = define,
@@ -8152,6 +8159,7 @@ pub const P = struct {
.was_jsx_element = true,
}, expr.loc);
},
else => unreachable,
}
},

View File

@@ -83,7 +83,7 @@ pub const Options = struct {
}
};
pub const PrintResult = struct { js: []u8, source_map: ?SourceMapChunk = null };
pub const PrintResult = struct { js: string, source_map: ?SourceMapChunk = null };
// Zig represents booleans in packed structs as 1 bit, with no padding
// This is effectively a bit field

View File

@@ -12,8 +12,8 @@ usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
usingnamespace @import("global.zig");
const panicky = @import("panic_handler.zig");
const MainPanicHandler = panicky.NewPanicHandler(panicky.default_panic);
const cli = @import("cli.zig");
pub const MainPanicHandler = panicky.NewPanicHandler(panicky.default_panic);
pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) noreturn {
if (MainPanicHandler.Singleton) |singleton| {
@@ -35,79 +35,5 @@ pub fn main() anyerror!void {
var output_source = Output.Source.init(stdout, stderr);
Output.Source.set(&output_source);
var log = logger.Log.init(alloc.dynamic);
var panicker = MainPanicHandler.init(&log);
MainPanicHandler.Singleton = &panicker;
const args = try std.process.argsAlloc(alloc.dynamic);
if (args.len < 1) {
const len = stderr.write("Pass a file");
return;
}
const absolutePath = try std.fs.path.resolve(alloc.dynamic, args);
const entryPointName = std.fs.path.basename(absolutePath);
const file = try std.fs.openFileAbsolute(absolutePath, std.fs.File.OpenFlags{ .read = true });
const stat = try file.stat();
const code = try file.readToEndAlloc(alloc.dynamic, stat.size);
const opts = try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code);
var source = try logger.Source.initFile(opts.entry_point, alloc.dynamic);
var ast: js_ast.Ast = undefined;
var raw_defines = RawDefines.init(alloc.static);
try raw_defines.put("process.env.NODE_ENV", "\"development\"");
var user_defines = try DefineData.from_input(raw_defines, &log, alloc.static);
var define = try Define.init(
alloc.static,
user_defines,
);
switch (opts.loader) {
.json => {
var expr = try json_parser.ParseJSON(&source, &log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, &log, &source, define, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
Global.panic("Unsupported loader: {s}", .{opts.loader});
},
}
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
const printed = try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map.initList(symbols),
&source,
false,
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
// if (std.builtin.mode == std.builtin.Mode.Debug) {
// var fixed_buffer = [_]u8{0} ** 512000;
// var buf_stream = std.io.fixedBufferStream(&fixed_buffer);
// try ast.toJSON(alloc.dynamic, stderr.writer());
// }
_ = try stdout.write(printed.js);
try cli.Cli.start(std.heap.c_allocator, stdout, stderr, MainPanicHandler);
}

View File

@@ -24,8 +24,8 @@ pub fn validatePath(log: *logger.Log, fs: *fs.FileSystem.Implementation, cwd: st
}
pub fn stringHashMapFromArrays(comptime t: type, allocator: *std.mem.Allocator, keys: anytype, values: anytype) !t {
const hash_map = t.init(allocator);
hash_map.ensureCapacity(keys.len);
var hash_map = t.init(allocator);
try hash_map.ensureCapacity(@intCast(u32, keys.len));
for (keys) |key, i| {
try hash_map.put(key, values[i]);
}
@@ -153,6 +153,16 @@ pub const Loader = enum {
pub fn isJSX(loader: Loader) bool {
return loader == .jsx or loader == .tsx;
}
pub fn isTypeScript(loader: Loader) bool {
return loader == .tsx or loader == .ts;
}
pub fn forFileName(filename: string, obj: anytype) ?Loader {
const ext = std.fs.path.extension(filename);
if (ext.len == 0 or (ext.len == 1 and ext[0] == '.')) return null;
return obj.get(ext);
}
};
pub const defaultLoaders = std.ComptimeStringMap(Loader, .{
@@ -180,6 +190,27 @@ pub const JSX = struct {
development: bool = true,
parse: bool = true,
pub fn fromApi(jsx: api.Api.Jsx) Pragma {
var pragma = JSX.Pragma{};
if (jsx.fragment.len > 0) {
pragma.jsx = jsx.fragment;
}
if (jsx.factory.len > 0) {
pragma.jsx = jsx.factory;
}
if (jsx.import_source.len > 0) {
pragma.jsx = jsx.import_source;
}
pragma.development = jsx.development;
pragma.runtime = jsx.runtime;
pragma.parse = true;
return pragma;
}
};
parse: bool = true,
@@ -193,13 +224,76 @@ pub const JSX = struct {
/// /** @jsxImportSource @emotion/core */
import_source: string = "react",
pub const Runtime = enum { classic, automatic };
pub const Runtime = api.Api.JsxRuntime;
};
const TypeScript = struct {
parse: bool = false,
};
pub const BundleOptions = struct {
footer: string = "",
banner: string = "",
define: defines.Define,
loaders: std.StringHashMap(Loader),
resolve_dir: string = "/",
jsx: ?JSX.Pragma,
react_fast_refresh: bool = false,
inject: ?[]string = null,
public_url: string = "/",
output_dir: string = "",
write: bool = false,
preserve_symlinks: bool = false,
resolve_mode: api.Api.ResolveMode,
tsconfig_override: ?string = null,
fs: *fs.FileSystem,
platform: Platform = Platform.browser,
main_fields: []string = Platform.DefaultMainFields.get(Platform.browser),
log: *logger.Log,
external: ExternalModules,
entry_points: []string,
pub fn fromApi(
allocator: *std.mem.Allocator,
transform: Api.TransformOptions,
) !BundleOptions {
var log = logger.Log.init(allocator);
var opts: BundleOptions = std.mem.zeroes(BundleOptions);
opts.fs = try fs.FileSystem.init1(allocator, transform.absolute_working_dir, false);
opts.write = transform.write;
if (transform.jsx) |jsx| {
opts.jsx = JSX.Pragma.fromApi(jsx);
}
options.loaders = try stringHashMapFromArrays(std.StringHashMap(Loader), allocator, transform.loader_keys, transform.loader_values);
var user_defines = try stringHashMapFromArrays(defines.RawDefines, allocator, transform.define_keys, transform.define_values);
if (transform.define_keys.len == 0) {
try user_defines.put("process.env.NODE_ENV", "development");
}
var resolved_defines = try defines.DefineData.from_input(user_defines, log, allocator);
options.defines = try defines.Define.init(
allocator,
);
if (transform.external.len > 0) {
opts.external = try ExternalModules.init(allocator, opts.fs, opts.fs.top_level_dir, transform.external, &log);
}
if (transform.platform) |plat| {
opts.platform = plat;
opts.main_fields = Platform.DefaultMainFields.get(plat);
}
if (transform.main_fields.len > 0) {
options.main_fields = transform.main_fields;
}
return opts;
}
};
pub const TransformOptions = struct {
footer: string = "",
banner: string = "",
@@ -255,11 +349,46 @@ pub const TransformOptions = struct {
};
pub const OutputFile = struct {
path: []u8,
contents: []u8,
path: string,
contents: string,
};
pub const TransformResult = struct { errors: []logger.Msg, warnings: []logger.Msg, output_files: []OutputFile };
pub const TransformResult = struct {
errors: []logger.Msg,
warnings: []logger.Msg,
output_files: []OutputFile,
pub fn init(
output_files: []OutputFile,
log: *logger.Log,
allocator: *std.mem.Allocator,
) !TransformResult {
var errors = try allocator.alloc(logger.Msg, log.errors);
var warnings = try allocator.alloc(logger.Msg, log.warnings);
var error_i: usize = 0;
var warning_i: usize = 0;
for (log.msgs.items) |msg| {
switch (msg.kind) {
logger.Kind.err => {
std.debug.assert(warnings.len > warning_i);
errors[error_i] = msg;
error_i += 1;
},
logger.Kind.warn => {
std.debug.assert(warnings.len > warning_i);
warnings[warning_i] = msg;
warning_i += 1;
},
else => {},
}
}
return TransformResult{
.output_files = output_files,
.errors = errors,
.warnings = warnings,
};
}
};
test "TransformOptions.initUncached" {
try alloc.setup(std.heap.page_allocator);

View File

@@ -307,9 +307,14 @@ pub const Resolver = struct {
if (try r.dirInfoCached(source_dir)) |_dir_info| {
const dir_info: *DirInfo = _dir_info;
if (dir_info.ts_config_json) |tsconfig| {
if (tsconfig.paths.size() > 0) {}
if (tsconfig.paths.size() > 0) {
const res = r.matchTSConfigPaths(tsconfig, import_path, kind);
return Result{ .path_pair = res.path_pair, .diff_case = res.diff_case };
}
}
}
}
}
@@ -400,6 +405,16 @@ pub const Resolver = struct {
try r.dir_cache.put(path, info);
}
pub const MatchResult = struct {
path_pair: PathPair,
ok: bool = false,
diff_case: ?fs.FileSystem.Entry.Lookup.DifferentCase = null,
};
pub fn matchTSConfigPaths(r: *Resolver, tsconfig: *TSConfigJSON, path: string, kind: ast.ImportKind) MatchResult {
Global.notimpl();
}
fn dirInfoUncached(r: *Resolver, path: string) !?*DirInfo {
const rfs: r.fs.RealFS = r.fs.fs;
var parent: ?*DirInfo = null;
@@ -417,7 +432,7 @@ pub const Resolver = struct {
// set. It means we will just pass through the empty directory and
// continue to check the directories above it, which is now node behaves.
switch (_entries.err) {
fs.FileSystem.Error.EACCESS => {
error.EACCESS => {
entries = fs.FileSystem.DirEntry.empty(path, r.allocator);
},
@@ -428,8 +443,8 @@ pub const Resolver = struct {
// directory. The "pnpm" package manager generates a faulty "NODE_PATH"
// list which contains such paths and treating them as missing means we just
// ignore them during path resolution.
fs.FileSystem.Error.ENOENT,
fs.FileSystem.Error.ENOTDIR,
error.ENOENT,
error.ENOTDIR,
=> {},
else => {
const pretty = r.prettyPath(fs.Path{ .text = path, .namespace = "file" });

View File

@@ -128,7 +128,7 @@ pub const MutableString = struct {
return self.list.toOwnedSlice(self.allocator);
}
pub fn toOwnedSliceLeaky(self: *MutableString) []u8 {
pub fn toOwnedSliceLeaky(self: *MutableString) string {
return self.list.items;
}