Compare commits

...

122 Commits

Author SHA1 Message Date
Jarred Sumner
474df61a86 maekfile 2021-05-28 22:48:37 -07:00
Jarred Sumner
7c576d7b62 commit 2021-05-28 22:47:30 -07:00
Jarred Sumner
4e1619c17a typo 2021-05-28 13:34:02 -07:00
Jarred Sumner
54d9969b4c Fix integer overflow 2021-05-28 13:33:02 -07:00
Jarred Sumner
91d6bf26b9 Remove legacy_octal_loc 2021-05-28 13:32:55 -07:00
Jarred Sumner
2172f3c5e3 keep lexer/loc 2021-05-28 13:27:05 -07:00
Jarred Sumner
d44fa1ca92 launch.json 2021-05-28 13:23:29 -07:00
Jarred Sumner
e72ad4777c fixtures 2021-05-28 13:22:31 -07:00
Jarred Sumner
f1bcf07e2b gitignore 2021-05-27 21:35:41 -07:00
Jarred Sumner
cbf0b77e52 lists 2021-05-27 21:35:28 -07:00
Jarred Sumner
b6e7f01e6a stmt experiment 2021-05-27 18:50:20 -07:00
Jarred Sumner
d1b3bce067 lots 2021-05-27 16:38:53 -07:00
Jarred Sumner
05b9e89417 Fix blah = value inside function args 2021-05-27 15:27:58 -07:00
Jarred Sumner
ebefe97073 Fix yield* 2021-05-27 14:56:53 -07:00
Jarred Sumner
84ea80b813 Fix parsing await inside scopes that contain functions, return the backtracking error in TypeScript 2021-05-27 14:42:33 -07:00
Jarred Sumner
3bcce51fa4 Error message for using node builtins outside of platform == .node 2021-05-27 14:38:28 -07:00
Jarred Sumner
2c212929f8 faster writes performance 2021-05-27 00:42:02 -07:00
Jarred Sumner
9b47a8791e trying to fix outbase 2021-05-27 00:41:33 -07:00
Jarred Sumner
ca2897b466 node builtins 2021-05-27 00:40:56 -07:00
Jarred Sumner
453cfa5689 fuckin with absolute paths 2021-05-27 00:40:47 -07:00
Jarred Sumner
7337f27a7e Use a normal string to represent template literal content for easier UTF8/UTF16 mixing 2021-05-26 18:15:49 -07:00
Jarred Sumner
96a33924fb Skip slow path 2021-05-26 18:15:21 -07:00
Jarred Sumner
bc794e89ed FIx parsing 2 digit hex 2021-05-26 18:14:49 -07:00
Jarred Sumner
bb7404d6bc Fix returning parse errors and template tags 2021-05-26 18:14:36 -07:00
Jarred Sumner
f47d7b3d2d More reliable path storage 2021-05-26 18:13:54 -07:00
Jarred Sumner
831a461916 Fix base_url always null 2021-05-26 18:13:39 -07:00
Jarred Sumner
0e6c46819a Fix tempalte tags 2021-05-26 18:11:55 -07:00
Jarred Sumner
9b5f317c5b detect JSON errors 2021-05-26 18:11:28 -07:00
Jarred Sumner
63b8182b7c I love enums 2021-05-26 13:15:52 -07:00
Jarred Sumner
1e0eb4012a namespace/enum? is that it? 2021-05-26 12:23:09 -07:00
Jarred Sumner
bc4e76c2a5 print_ast feature flag 2021-05-26 12:21:02 -07:00
Jarred Sumner
9c2c005b58 import== 2021-05-26 10:08:14 -07:00
Jarred Sumner
84472ed57f lexer bug! 2021-05-26 10:07:56 -07:00
Jarred Sumner
d04ef8c53f cloner 2021-05-25 23:34:14 -07:00
Jarred Sumner
f2a9dc9eea like all of typescript lol 2021-05-25 20:01:33 -07:00
Jarred Sumner
1cd6b587a2 newline 2021-05-25 20:01:21 -07:00
Jarred Sumner
28534b2e34 add cat, microoptimize the microptimize 2021-05-25 20:01:16 -07:00
Jarred Sumner
6c7eeb2030 mostly just zig fmt 2021-05-25 20:01:06 -07:00
Jarred Sumner
9b206cca2b Malformed headers breaks request parsing 2021-05-25 11:12:22 -07:00
Jarred Sumner
06fbc24b11 relative path 2021-05-25 01:34:44 -07:00
Jarred Sumner
a2637e9016 w 2021-05-24 12:44:49 -07:00
Jarred Sumner
244ae8c593 try 2021-05-24 12:44:39 -07:00
Jarred Sumner
f7ed006a08 ok 2021-05-24 12:44:23 -07:00
Jarred Sumner
5f72442386 ok 2021-05-24 12:44:13 -07:00
Jarred Sumner
1957f0fc23 little separation 2021-05-23 11:10:23 -07:00
Jarred Sumner
37e17be7aa The little things 2021-05-23 10:20:48 -07:00
Jarred Sumner
ac4ac8f5a8 muck 2021-05-23 10:05:21 -07:00
Jarred Sumner
45b55a8970 http server can load static files...slowly. 2021-05-22 23:25:25 -07:00
Jarred Sumner
63e622f2f3 wip 2021-05-21 17:55:42 -07:00
Jarred Sumner
cee857ac4e pico 2021-05-20 02:34:42 -07:00
Jarred Sumner
6475442469 cool 2021-05-19 23:12:23 -07:00
Jarred Sumner
23220fd348 Starting to work on rutnime 2021-05-19 19:30:24 -07:00
Jarred Sumner
4f1d32be16 tread 2021-05-18 20:33:45 -07:00
Jarred Sumner
f502d0f1a4 decodeEscapeSequences...kiond of? 2021-05-18 20:32:55 -07:00
Jarred Sumner
4d6a8f598a hm 2021-05-18 20:06:08 -07:00
Jarred Sumner
78fa4c4f87 Fix DotDefine 2021-05-18 14:40:37 -07:00
Jarred Sumner
1c80859431 Fix label parsing 2021-05-18 14:07:51 -07:00
Jarred Sumner
957e871f4a Fix duplicate exports error 2021-05-18 13:49:23 -07:00
Jarred Sumner
0840845d68 Fix "in" keyword 2021-05-18 13:13:04 -07:00
Jarred Sumner
2ef6397ab9 Resolver is fast now! 2021-05-18 02:24:40 -07:00
Jarred Sumner
9ccb4dd082 lots 2021-05-16 23:25:12 -07:00
Jarred Sumner
d8b1d29656 lots 2021-05-15 17:23:55 -07:00
Jarred Sumner
778c24f176 keep 2021-05-13 23:22:08 -07:00
Jarred Sumner
248d1a7a93 w 2021-05-13 21:12:41 -07:00
Jarred Sumner
e1f996e1b8 more utf8 2021-05-13 21:10:02 -07:00
Jarred Sumner
dbcddc79fc bugfix 2021-05-13 21:09:41 -07:00
Jarred Sumner
7243945291 bug fixes galore 2021-05-13 17:44:50 -07:00
Jarred Sumner
b42b239344 okay 2021-05-13 13:51:40 -07:00
Jarred Sumner
87771ba895 various bug fixes 2021-05-13 01:24:10 -07:00
Jarred Sumner
28fce4aac1 hm 2021-05-13 00:46:22 -07:00
Jarred Sumner
d8828b69d8 hm 2021-05-12 20:40:38 -07:00
Jarred Sumner
80037859ec okay I think that's most of resolving packages/imports algorithm!!! 2021-05-12 20:33:58 -07:00
Jarred Sumner
51df94e599 cool 2021-05-12 13:17:26 -07:00
Jarred Sumner
f9a74df73d That's all the errors?? 2021-05-12 13:00:25 -07:00
Jarred Sumner
2c20d88e8d okay 2021-05-12 01:46:58 -07:00
Jarred Sumner
8df97221a4 now we do resolver?? 2021-05-11 20:49:11 -07:00
Jarred Sumner
cf4d0fe3b6 cool 2021-05-11 20:26:13 -07:00
Jarred Sumner
a5f1670e92 update 2021-05-11 18:39:00 -07:00
Jarred Sumner
d75a1deb4a opts 2021-05-11 17:19:08 -07:00
Jarred Sumner
033b74cc2a submodule 2021-05-11 11:55:38 -07:00
Jarred Sumner
2b3c0584c6 asdasdasdasd 2021-05-10 20:05:53 -07:00
Jarred Sumner
b7d8fe2f35 1day 2021-05-09 18:57:48 -07:00
Jarred Sumner
7d3b0e7daa Use try for errors during parsing so that backtracking can happen 2021-05-08 20:48:20 -07:00
Jarred Sumner
32cdc13f63 Okay this hunks solution seems to work for now. It's not _great_ though. 2021-05-08 19:41:52 -07:00
Jarred Sumner
2f4cd402e4 Fix exporting default twice 2021-05-08 18:12:54 -07:00
Jarred Sumner
6b863d5d51 Fix for loop initializer 2021-05-08 14:23:52 -07:00
Jarred Sumner
79223472f7 wip 2021-05-07 23:34:16 -07:00
Jarred Sumner
8c4917fe60 This _sort of_ works 2021-05-07 20:19:32 -07:00
Jarred Sumner
f4267e2d1f wip 2021-05-07 14:12:56 -07:00
Jarred Sumner
96ff169e46 cool 2021-05-07 01:26:26 -07:00
Jarred Sumner
741e1513b7 123 2021-05-05 19:02:36 -07:00
Jarred Sumner
3708dd4484 cool 2021-05-05 19:02:30 -07:00
Jarred Sumner
2cbd4c9d80 I think that fixes the scopes bug 2021-05-05 19:02:14 -07:00
Jarred Sumner
e0d01a9a91 alright 2021-05-05 13:12:19 -07:00
Jarred Sumner
e1df98878d damn tho 2021-05-05 03:09:59 -07:00
Jarred Sumner
596f3c064a Revert "the fast way"
This reverts commit 808e5cfac3.
2021-05-04 16:05:15 -07:00
Jarred Sumner
29fe5b730f hbm 2021-05-04 16:03:00 -07:00
Jarred Sumner
082d184848 w 2021-05-04 16:02:22 -07:00
Jarred Sumner
2e8d6d549d re 2021-05-04 16:02:09 -07:00
Jarred Sumner
6431b90b9e *src 2021-05-04 16:01:43 -07:00
Jarred Sumner
4c60accdc1 * 2021-05-04 16:01:21 -07:00
Jarred Sumner
808e5cfac3 the fast way 2021-05-04 15:58:18 -07:00
Jarred Sumner
0bfd74af55 slice 2021-05-04 15:56:55 -07:00
Jarred Sumner
83ff3453dc keeper 2021-05-04 15:54:17 -07:00
Jarred Sumner
e034383833 it works??? 2021-05-03 22:37:28 -07:00
Jarred Sumner
1d44b63675 hm 2021-05-03 20:29:38 -07:00
Jarred Sumner
468927c14b maybePrintSpace 2021-05-02 23:45:41 -07:00
Jarred Sumner
c8a8da370c wip 2021-05-02 18:24:46 -07:00
Jarred Sumner
195c69606b shorthand 2021-05-02 16:42:15 -07:00
Jarred Sumner
8db9c7650c various 2021-05-02 16:25:14 -07:00
Jarred Sumner
818d014931 classes work, excluding name and constructor/super 2021-05-02 13:04:55 -07:00
Jarred Sumner
f59ec8d6c0 Assorted bugfixes but the next step really is porting tests and fixing 2021-05-01 01:28:40 -07:00
Jarred Sumner
006ca4f13c it prints end to end though doesn't work yet 2021-04-30 17:26:17 -07:00
Jarred Sumner
107310d785 inching closure 2021-04-30 15:34:31 -07:00
Jarred Sumner
fd56d41c8e all in a days work 2021-04-30 00:55:15 -07:00
Jarred Sumner
daf9ea419b ao[slk 2021-04-29 22:12:22 -07:00
Jarred Sumner
ac83057d08 aoskdp 2021-04-29 21:46:07 -07:00
Jarred Sumner
2567243c8d hm 2021-04-29 20:22:25 -07:00
Jarred Sumner
38c7eb73c1 okay 2021-04-29 14:43:30 -07:00
Jarred Sumner
a32116476a wap 2021-04-29 14:03:01 -07:00
Jarred Sumner
4e3f680ac4 asdasd 2021-04-29 10:29:25 -07:00
Jarred Sumner
b37acf309c wip 2021-04-28 21:58:02 -07:00
122 changed files with 48159 additions and 9534 deletions

30
.gitignore vendored
View File

@@ -3,4 +3,32 @@ zig-cache
*.wasm
*.o
*.a
*.a
profile.json
/package.json
node_modules
.swcrc
yarn.lock
dist
*.log
*.out.js
/package-lock.json
build
*.wat
zig-out
pnpm-lock.yaml
README.md.template
src/deps/zig-clap/example
src/deps/zig-clap/README.md
src/deps/zig-clap/.github
src/deps/zig-clap/.gitattributes
out
.trace
cover
coverage
coverv
*.trace
bench
github

6
.gitmodules vendored Normal file
View File

@@ -0,0 +1,6 @@
# [submodule "src/deps/zig-clap"]
# path = src/deps/zig-clap
# url = https://github.com/Hejsil/zig-clap
[submodule "src/deps/picohttpparser"]
path = src/deps/picohttpparser
url = https://github.com/h2o/picohttpparser/

203
.vscode/launch.json vendored
View File

@@ -1,40 +1,187 @@
{
"version": "0.2.0",
"configurations": [
// {
// "type": "lldb",
// "request": "launch",
// "name": "Test",
// "program": "${workspaceFolder}/zig-out/bin/test",
// "preLaunchTask": "test",
// "args": ["/usr/local/bin/zig"],
// "cwd": "${workspaceFolder}",
// "console": "internalConsole"
// },
{
"name": "Test",
"type": "lldb",
"name": "esbuild",
"type": "go",
"request": "launch",
"stdio": null,
"stopOnEntry": false,
"program": "/usr/local/bin/zig",
"cwd": "${workspaceFolder}",
"args": ["test", "${file}"],
"presentation": {
"hidden": false,
"group": "",
"order": 1
},
"env": {
"TERM": "xterm"
}
"mode": "debug",
"program": "/Users/jarred/Code/esbuild/cmd/esbuild",
"cwd": "/Users/jarred/Code/esdev/src/test/fixtures",
"args": ["--bundle", "--outfile=out.esbuild.js", "await.ts"]
},
{
"name": "Launch",
"type": "cppdbg",
"type": "lldb",
"request": "launch",
"program": "${workspaceFolder}/zig-cache/bin/esdev",
"args": [],
"stopAtEntry": false,
"cwd": "${workspaceFolder}",
"environment": [],
"externalConsole": false,
"MIMode": "lldb",
"internalConsoleOptions": "openOnSessionStart",
"logging": {
"moduleLoad": false
}
"name": "Dev Launch",
"program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
"args": ["./await.ts", "--resolve=disable"],
"cwd": "${workspaceFolder}/src/test/fixtures",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "DAev Launch",
"program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
"args": ["./simple.jsx", "--resolve=disable"],
"cwd": "${workspaceFolder}/src/test/fixtures",
"console": "internalConsole"
},
// {
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch (other)",
// "program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
// "args": ["./simple.jsx", "--resolve=disable"],
// "cwd": "${workspaceFolder}/src/test/fixtures",
// "console": "internalConsole"
// },
// {
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
// "preLaunchTask": "build",
// "args": [
// "--resolve=disable",
// "--cwd",
// "/Users/jarredsumner/Code/esdev/src/test/fixtures",
// "escape-chars.js"
// ],
// "cwd": "${workspaceFolder}",
// "console": "internalConsole"
// }
// {
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
// "preLaunchTask": "build",
// "args": [
// "--resolve=dev",
// "--cwd",
// "/Users/jarredsumner/Builds/esbuild/bench/three/src/",
// "./entry.js",
// "-o",
// "out"
// ],
// "cwd": "/Users/jarredsumner/Builds/esbuild/bench/three/src",
// "console": "internalConsole"
// }
// {
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
// "preLaunchTask": "build",
// "args": [
// "--resolve=dev",
// "--cwd",
// "/Users/jarredsumner/Builds/esbuild/bench/three/src/",
// "./entry.js",
// "-o",
// "out"
// ],
// "cwd": "${workspaceFolder}",
// "console": "internalConsole"
// }
// {
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
// // "preLaunchTask": "build",
// "args": [
// "--resolve=dev",
// "--cwd",
// "./src/api/demo",
// "pages/index.jsx",
// "-o",
// "out",
// "--public-url=https://hello.com/",
// "--serve"
// ],
// "cwd": "${workspaceFolder}",
// "console": "internalConsole"
// }
{
"type": "lldb",
"request": "launch",
"name": "Rome",
// "program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
"program": "${workspaceFolder}/build/macos-x86_64/esdev",
// "preLaunchTask": "build",
"args": [
"--resolve=dev",
// "--resolve=lazy",
"--cwd",
"${workspaceFolder}/bench/rome/src",
"entry",
"--platform=node",
// "@romejs/js-analysis/evaluators/modules/ImportCall.ts",
"--outdir=${workspaceFolder}/bench/rome/src/out",
// "@romejs/cli-diagnostics/banners/success.json",
"--public-url=https://hello.com/"
],
"cwd": "${workspaceFolder}/bench/rome/src",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "Rome Dev",
// "program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
"program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
// "preLaunchTask": "build",
"args": [
"--resolve=dev",
// "--resolve=lazy",
"--cwd",
"${workspaceFolder}/bench/rome/src",
"entry",
"--platform=node",
// "@romejs/js-analysis/evaluators/modules/ImportCall.ts",
"--outdir=${workspaceFolder}/bench/rome/src/out",
// "@romejs/cli-diagnostics/banners/success.json",
"--public-url=https://hello.com/"
],
"cwd": "${workspaceFolder}/bench/rome/src",
"console": "internalConsole"
}
// {
// "type": "lldb",
// "request": "launch",
// "name": "Dev Launch",
// "program": "${workspaceFolder}/build/bin/debug/esdev",
// "preLaunchTask": "build",
// "args": [
// "--resolve=dev",
// "--cwd",
// "/",
// "/Users/jarredsumner/Code/esdev/src/test/fixtures/img-bug.js",
// "-o",
// "out"
// ],
// "cwd": "${workspaceFolder}",
// "console": "internalConsole",
// "presentation": {
// "hidden": false,
// "group": "",
// "order": 1
// }
// }
]
}

28
.vscode/tasks.json vendored
View File

@@ -3,9 +3,17 @@
"tasks": [
{
"label": "build",
"type": "shell",
"command": "zig build",
"type": "process",
"command": "zig",
"args": ["build"],
"presentation": {
"echo": true,
"reveal": "silent",
"focus": false,
"panel": "shared",
"showReuseMessage": false,
"clear": false
},
"group": {
"kind": "build",
"isDefault": true
@@ -26,7 +34,15 @@
"label": "test",
"type": "shell",
"command": "zig",
"args": ["test", "${file}", "-femit-bin=zig-cache/bin/test"],
"args": [
"test",
"${file}",
"--main-pkg-path",
"${workspaceFolder}",
"-femit-bin=${workspaceFolder}/zig-out/bin/test",
";",
"true"
],
"group": {
"kind": "test",
@@ -34,7 +50,9 @@
},
"presentation": {
"showReuseMessage": false,
"clear": true
"clear": true,
"panel": "new",
"reveal": "always"
}
}
]

92
Makefile Normal file
View File

@@ -0,0 +1,92 @@
speedy: speedy-prod-native speedy-prod-wasi speedy-prod-wasm
api:
peechy --schema src/api/schema.peechy --esm src/api/schema.js --ts src/api/schema.d.ts --zig src/api/schema.zig
speedy-prod-native-macos:
cd src/deps; clang -c picohttpparser.c; cd ../../
zig build -Drelease-fast -Dtarget=x86_64-macos-gnu
speedy-prod-native-macos-lib:
zig build lib -Drelease-fast -Dtarget=x86_64-macos-gnu
speedy-m1:
zig build -Drelease-fast -Dtarget=aarch64-macos-gnu
speedy-prod-wasm:
zig build -Drelease-fast -Dtarget=wasm32-freestanding
speedy-prod-wasi:
zig build -Drelease-fast -Dtarget=wasm32-wasi
speedy-dev: speedy-dev-native speedy-dev-wasi speedy-dev-wasm
speedy-dev-native:
zig build
speedy-dev-wasm:
zig build -Dtarget=wasm32-freestanding
speedy-dev-wasi:
zig build -Dtarget=wasm32-wasi
ROME_TSCONFIG += {
ROME_TSCONFIG += \"compilerOptions\": {
ROME_TSCONFIG += \"sourceMap\": true,
ROME_TSCONFIG += \"esModuleInterop\": true,
ROME_TSCONFIG += \"resolveJsonModule\": true,
ROME_TSCONFIG += \"moduleResolution\": \"node\",
ROME_TSCONFIG += \"target\": \"es2019\",
ROME_TSCONFIG += \"module\": \"commonjs\",
ROME_TSCONFIG += \"baseUrl\": \".\"
ROME_TSCONFIG += }
ROME_TSCONFIG += }
github/rome:
mkdir -p github/rome
cd github/rome && git init && git remote add origin https://github.com/romejs/rome.git
cd github/rome && git fetch --depth 1 origin d95a3a7aab90773c9b36d9c82a08c8c4c6b68aa5 && git checkout FETCH_HEAD
# This target provides an easy way to verify that the build is correct. Since
# Rome is self-hosted, we can just run the bundle to build Rome. This makes sure
# the bundle doesn't crash when run and is a good test of a non-trivial workload.
bench-rome-verify: | github/rome
mkdir -p bench/rome-verify
cp -r github/rome/packages bench/rome-verify/packages
cp github/rome/package.json bench/rome-verify/package.json
bench-rome:
rm -rf bench/rome
mkdir -p bench/rome
cp -r github/rome/packages bench/rome/src/
echo "$(ROME_TSCONFIG)" > bench/rome/src/tsconfig.json
echo 'import "rome/bin/rome"' > bench/rome/src/entry.ts
# Patch a cyclic import ordering issue that affects commonjs-style bundlers (webpack and parcel)
echo "export { default as createHook } from './api/createHook';" > .temp
sed "/createHook/d" bench/rome/src/@romejs/js-compiler/index.ts >> .temp
mv .temp bench/rome/src/@romejs/js-compiler/index.ts
# Replace "import fs = require('fs')" with "const fs = require('fs')" because
# the TypeScript compiler strips these statements when targeting "esnext",
# which breaks Parcel 2 when scope hoisting is enabled.
find bench/rome/src -name '*.ts' -type f -print0 | xargs -L1 -0 sed -i '' 's/import \([A-Za-z0-9_]*\) =/const \1 =/g'
find bench/rome/src -name '*.tsx' -type f -print0 | xargs -L1 -0 sed -i '' 's/import \([A-Za-z0-9_]*\) =/const \1 =/g'
# Get an approximate line count
rm -r bench/rome/src/@romejs/js-parser/test-fixtures
echo 'Line count:' && (find bench/rome/src -name '*.ts' && find bench/rome/src -name '*.js') | xargs wc -l | tail -n 1
bench-rome-speedy: | bench/rome-verify
cd bench/rome/src
/Users/jarred/Code/esdev/build/macos-x86_64/esdev --outdir=dist ./entry.ts
github-rome:
mkdir -p github/rome
cd github/rome && git init && git remote add origin https://github.com/romejs/rome.git
cd github/rome && git fetch --depth 1 origin d95a3a7aab90773c9b36d9c82a08c8c4c6b68aa5 && git checkout FETCH_HEAD

158
README.md
View File

@@ -1,18 +1,18 @@
# esdev
# Speedy
Incredibly fast ECMAScript & TypeScript bundler designed for development.
Incredibly fast ECMAScript & TypeScript toolchain optimized for development.
## Motivation
JavaScript bundlers run very slow in web browsers.
Nobody should have to wait for build tools to be productive.
## Purpose
The purpose of esdev is to very quickly convert ECMAScript/TypeScript into something a web browser can execute.
The purpose of Speedy is to very quickly convert ECMAScript/TypeScript into something a web browser can execute.
Goals:
- Transpile fast inside a web browser. "Fast" is defined as "<= 3ms per un-minified file up to 1000 LOC" without build caching (FS cache yes).
- Transpile fast. "Fast" is defined as "<= 3ms per un-minified file up to 1000 LOC" without a build cache
- Transpile JSX to ECMAScript
- Remove TypeScript annotations
- Conditionally support React Fast Refresh
@@ -35,57 +35,31 @@ Non-goals:
## How it works
Much of the code is a line-for-line port of esbuild to Zig, with a few important differences.
### Implementation differences
#### Moar lookup tables
### Why not just use esbuild?
#### Missing features
- Hot Module Reloading
- Rewrite CommonJS/SystemJS/UMD imports and exports to ESM
- React Fast Refresh
#### Go WASM performance isn't great.
There's a number of reasons for this:
- Unlike native targets, Go's WASM target runs the garbage collector on the same thread as the application. Since this usecase is very constrained (no need for shared memory, or long-term objects), rewriting in Zig lets us get away with a bump allocator -- skipping garbage collection entirely. This is faster than what Go does and possibly Rust, since this zeroes out the heap in one call at the end, rather than progressively zeroing memory.
- Goroutines cross the JS<>WASM binding, which is very slow. The more goroutines you use, the slower your code runs. When building a Zig project in single-threaded mode, Zig's `comptime` feature compiles away most of the difference.
- Slow startup time: unless you use TinyGo, Go WASM binaries are > 2 MB. In esbuild's case, at the time of writing its 6 MB. That's a lot of code for the web browser to download & compile.
#### Different constraints enable performance improvements
If bundler means "merge N source files into 1 or few source file(s)", esdev is most definitely not a bundler. Unlike most bundlers today, esdev deliberately outputs
If bundler means "turn my development code into something a browser can run",
Much of the code is a line-for-line port of esbuild to Zig. Thank you @evanw for building esbuild - a fantastic ECMAScript & CSS Bundler, and for inspiring this project.
### Compatibility Table
| Feature | esbuild | esdev |
| ------------------------------------ | ------- | ----- |
| JSX (transform) | ✅ | ⌛ |
| TypeScript (transform) | ✅ | ⌛ |
| React Fast Refresh | ❌ | ⌛ |
| Hot Module Reloading | ❌ | ⌛ |
| Minification | ✅ | ❌ |
| Tree Shaking | ✅ | ⌛ |
| Incremental builds | ✅ | ⌛ |
| CSS | ✅ | 🗓️ |
| Expose CSS dependencies per file | ✅ | 🗓️ |
| CommonJS, IIFE, UMD outputs | ✅ | ❌ |
| Node.js build target | ✅ | ❌ |
| Code Splitting | ✅ | ⌛ |
| Browser build target | ✅ | ⌛ |
| Bundling for production | ✅ | ❌ |
| Support older browsers | ✅ | ❌ |
| Plugins | ✅ | 🗓️ |
| AST Plugins | ❌ | ❌ |
| Filesystem Cache API (for plugins) | ❓ | 🗓️ |
| Transform to ESM with `bundle` false | ❓ | ⌛ |
| Feature | Speedy |
| ------------------------------------ | ------ |
| JSX (transform) | ✅ |
| TypeScript (transform) | ⌛ |
| React Fast Refresh | ⌛ |
| Hot Module Reloading | ⌛ |
| Minification | ❌ |
| Tree Shaking | ⌛ |
| Incremental builds | ⌛ |
| CSS | 🗓️ |
| Expose CSS dependencies per file | 🗓️ |
| CommonJS, IIFE, UMD outputs | ❌ |
| Node.js build target | ❌ |
| Code Splitting | ⌛ |
| Browser build target | ⌛ |
| Bundling for production | ❌ |
| Support older browsers | ❌ |
| Plugins | 🗓️ |
| AST Plugins | ❌ |
| Filesystem Cache API (for plugins) | 🗓️ |
| Transform to ESM with `bundle` false | ⌛ |
Key:
@@ -97,22 +71,54 @@ Key:
| 🗓️ | Planned but work has not started |
| ❓ | Unknown |
### Compatibility Table (more info)
| Feature | Speedy |
| -------------------------------- | ------ |
| `browser` in `package.json` | ⌛ |
| main fields in `package.json` | ⌛ |
| `exports` map in `package.json` | 🗓️ |
| `side_effects` in `package.json` | 🗓️ |
| `extends` in `tsconfig.json` | 🗓️ |
#### Notes
##### Hot Module Reloading & React Fast Refresh
esdev exposes a runtime API to support Hot Module Reloading and React Fast Refresh. React Fast Refresh depends on Hot Module Reloading to work, but you can turn either of them off. esdev itself doesn't serve bundled files, it's up to the development server to provide that.
Speedy exposes a runtime API to support Hot Module Reloading and React Fast Refresh. React Fast Refresh depends on Hot Module Reloading to work, but you can turn either of them off. Speedy itself doesn't serve bundled files, it's up to the development server to provide that.
##### Code Splitting
esdev supports code splitting the way browsers do natively: through ES Modules. This works great for local development files. It doesn't work great for node_modules or for production due to the sheer number of network requests. There are plans to make this better, stay tuned.
Speedy supports code splitting the way browsers do natively: through ES Modules. This works great for local development files. It doesn't work great for node_modules or for production due to the sheer number of network requests. There are plans to make this better, stay tuned.
##### Support older browsers
To simplify the parser, esdev doesn't support lowering features to non-current browsers. This means if you run a development build with esdev with, for example, optional chaining, it won't work in Internet Explorer 11. If you want to support older browsers, use a different tool.
To simplify the parser, Speedy doesn't support lowering features to non-current browsers. This means if you run a development build with Speedy with, for example, optional chaining, it won't work in Internet Explorer 11. If you want to support older browsers, use a different tool.
#### Implementation Notes
##### Deviations from other bundlers
Unused imports are removed by default, unless they're an import without an identifier. This is similar to what the TypeScript compiler does, but TypeScript only does it for TypeScript. This is on by default, but you can turn it off.
For example in this code snippet, `forEach` in unused:
```ts
import { forEach, map } from "lodash-es";
const foo = map(["bar", "baz"], (item) => {});
```
So it's never included.
```ts
import { map } from "lodash-es";
const foo = map(["bar", "baz"], (item) => {});
```
If
##### HMR & Fast Refresh implementation
This section only applies when Hot Module Reloading is enabled. When it's off, none of this part runs. React Fast Refresh depends on Hot Module Reloading.
@@ -140,42 +146,6 @@ There are two ways to update references:
Either approach works.
###### How it's implemented in esdev
###### How it's implemented in Speedy
At build time, esdev replaces all import URLs with import manifests that wrap the real module.
In the simple case, that looks like this:
```ts
import { Button as _Button } from "http://localhost:3000/src/components/button.KXk23UX3.js";
export let Button = _Button;
import.meta.onUpdate(import.meta.url, (exports) => {
if ("Button" in exports) {
Button = exports["Button"];
}
});
```
Then, lets say you updated `button.tsx` from this:
```tsx
export const Button = ({ children }) => (
<div className="Button">{children}</div>
);
```
To this:
```tsx
export const Button = ({ children }) => (
<div className="Button">
<div className="Button-label">{children}</div>
</div>
);
```
This triggers the HMR client in esdev to:
1. import `/src/components/button.js` once again
TODO: doc

109
build.zig
View File

@@ -1,4 +1,17 @@
const std = @import("std");
const resolve_path = @import("./src/resolver/resolve_path.zig");
pub fn addPicoHTTP(step: *std.build.LibExeObjStep, dir: []const u8) void {
const picohttp = step.addPackage(.{
.name = "picohttp",
.path = "src/deps/picohttp.zig",
});
step.addObjectFile(
"src/deps/picohttpparser.o",
);
step.addIncludeDir("src/deps");
}
pub fn build(b: *std.build.Builder) void {
// Standard target options allows the person running `zig build` to choose
@@ -11,17 +24,107 @@ pub fn build(b: *std.build.Builder) void {
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
const mode = b.standardReleaseOptions();
var cwd_buf = [_]u8{0} ** 4096;
var cwd = std.os.getcwd(&cwd_buf) catch unreachable;
var exe: *std.build.LibExeObjStep = undefined;
if (target.getCpuArch().isWasm()) {
exe = b.addExecutable("esdev", "src/main_wasm.zig");
var output_dir_buf = std.mem.zeroes([4096]u8);
var bin_label = if (mode == std.builtin.Mode.Debug) "/debug/" else "/";
const output_dir = std.fmt.bufPrint(&output_dir_buf, "build{s}{s}-{s}", .{ bin_label, @tagName(target.getOs().tag), @tagName(target.getCpuArch()) }) catch unreachable;
if (target.getOsTag() == .wasi) {
exe.enable_wasmtime = true;
exe = b.addExecutable("esdev", "src/main_wasi.zig");
exe.is_dynamic = true;
exe.setOutputDir(output_dir);
} else if (target.getCpuArch().isWasm()) {
// exe = b.addExecutable(
// "esdev",
// "src/main_wasm.zig",
// );
// exe.is_linking_libc = false;
// exe.is_dynamic = true;
var lib = b.addExecutable("esdev", "src/main_wasm.zig");
lib.single_threaded = true;
// exe.want_lto = true;
// exe.linkLibrary(lib);
if (mode == std.builtin.Mode.Debug) {
// exception_handling
var features = target.getCpuFeatures();
features.addFeature(2);
target.updateCpuFeatures(&features);
} else {
// lib.strip = true;
}
lib.setOutputDir(output_dir);
lib.want_lto = true;
b.install_path = lib.getOutputPath();
std.debug.print("Build: ./{s}\n", .{lib.getOutputPath()});
b.default_step.dependOn(&lib.step);
b.verbose_link = true;
lib.setTarget(target);
lib.setBuildMode(mode);
std.fs.deleteTreeAbsolute(std.fs.path.join(std.heap.page_allocator, &.{ cwd, lib.getOutputPath() }) catch unreachable) catch {};
var install = b.getInstallStep();
lib.strip = false;
lib.install();
const run_cmd = lib.run();
run_cmd.step.dependOn(b.getInstallStep());
if (b.args) |args| {
run_cmd.addArgs(args);
}
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
return;
} else {
exe = b.addExecutable("esdev", "src/main.zig");
exe.linkLibC();
}
// exe.setLibCFile("libc.txt");
exe.linkLibC();
exe.addPackage(.{
.name = "clap",
.path = "src/deps/zig-clap/clap.zig",
});
exe.setOutputDir(output_dir);
std.debug.print("Build: ./{s}\n", .{exe.getOutputPath()});
var walker = std.fs.walkPath(std.heap.page_allocator, cwd) catch unreachable;
if (std.builtin.is_test) {
while (walker.next() catch unreachable) |entry| {
if (std.mem.endsWith(u8, entry.basename, "_test.zig")) {
std.debug.print("[test] Added {s}", .{entry.basename});
_ = b.addTest(entry.path);
}
}
}
exe.setTarget(target);
exe.setBuildMode(mode);
b.install_path = output_dir;
// exe.want_lto = true;
if (!target.getCpuArch().isWasm()) {
// exe.addLibPath("/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib");
// exe.addIncludeDir("/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/");
const env = std.process.getEnvMap(std.heap.c_allocator) catch unreachable;
// if (env.get("SDKROOT")) |sdkroot| {
// const joined = resolve_path.joinAbs2(cwd, .auto, sdkroot, "usr/include");
// const sys = std.heap.c_allocator.dupe(u8, joined) catch unreachable;
// exe.addSystemIncludeDir(sys);
// }
addPicoHTTP(exe, cwd);
}
exe.addLibPath("/usr/local/lib");
exe.install();
const run_cmd = exe.run();

318
out.txt Normal file
View File

@@ -0,0 +1,318 @@
/Users/jarredsumner/Code/esdev: readFile error -- IsDir/Users/jarredsumner/Code/esdev/src/api/demo: readFile error -- IsDir/Users/jarredsumner/Code/esdev/src/api/demo/.: readFile error -- IsDir/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next: readFile error -- IsDir/Users/jarredsumner/Code/esdev/src/api/demo/./pages/..: readFile error -- IsDir/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react: readFile error -- IsDir/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/.: readFile error -- IsDir/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/.: readFile error -- IsDir
error: Cannot read file "/Users/jarredsumner/Code/esdev": IsDir
error: Cannot read file "/Users/jarredsumner/Code/esdev/src/api/demo": IsDir
error: Cannot read file "/Users/jarredsumner/Code/esdev/src/api/demo/.": IsDir
error: Cannot read file "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next": IsDir
error: Cannot read file "/Users/jarredsumner/Code/esdev/src/api/demo/./pages/..": IsDir
error: Cannot read file "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react": IsDir
error: Cannot read file "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/.": IsDir
error: Cannot read file "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/.": IsDir
error: Cannot assign to property on import "newObj"
"use strict";exports.__esModule=true;exports.defaultHead=defaultHead;exports.default=void 0;var _react=_interopRequireWildcard(require("react"));var _sideEffect=_interopRequireDefault(require("./side-effect"));var _ampContext=require("./amp-context");var _headManagerContext=require("./head-manager-context");var _amp=require("./amp");function _interopRequireDefault(obj){return obj&&obj.__esModule?obj:{default:obj};}function _getRequireWildcardCache(){if(typeof WeakMap!=="function")return null;var cache=new WeakMap();_getRequireWildcardCache=function(){return cache;};return cache;}function _interopRequireWildcard(obj){if(obj&&obj.__esModule){return obj;}if(obj===null||typeof obj!=="object"&&typeof obj!=="function"){return{default:obj};}var cache=_getRequireWildcardCache();if(cache&&cache.has(obj)){return cache.get(obj);}var newObj={};var hasPropertyDescriptor=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var key in obj){if(Object.prototype.hasOwnProperty.call(obj,key)){var desc=hasPropertyDescriptor?Object.getOwnPropertyDescriptor(obj,key):null;if(desc&&(desc.get||desc.set)){Object.defineProperty(newObj,key,desc);}else{newObj[key]=obj[key];}}}newObj.default=obj;if(cache){cache.set(obj,newObj);}return newObj;}function defaultHead(inAmpMode=false){const head=[/*#__PURE__*/_react.default.createElement("meta",{charSet:"utf-8"})];if(!inAmpMode){head.push(/*#__PURE__*/_react.default.createElement("meta",{name:"viewport",content:"width=device-width"}));}return head;}function onlyReactElement(list,child){// React children can be "string" or "number" in this case we ignore them for backwards compat
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/next-server/lib/head.js:1:1149 1148
error: Cannot assign to property on import "metaCategories"
switch(h.type){case'title':case'base':if(tags.has(h.type)){isUnique=false;}else{tags.add(h.type);}break;case'meta':for(let i=0,len=METATYPES.length;i<len;i++){const metatype=METATYPES[i];if(!h.props.hasOwnProperty(metatype))continue;if(metatype==='charSet'){if(metaTypes.has(metatype)){isUnique=false;}else{metaTypes.add(metatype);}}else{const category=h.props[metatype];const categories=metaCategories[metatype]||new Set();if((metatype!=='name'||!hasKey)&&categories.has(category)){isUnique=false;}else{categories.add(category);metaCategories[metatype]=categories;}}}break;}return isUnique;};}/**
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/next-server/lib/head.js:8:530 3158
error: Cannot assign to property on import "newProps"
['https://fonts.googleapis.com/css'].some(url=>c.props['href'].startsWith(url))){const newProps={...(c.props||{})};newProps['data-href']=newProps['href'];newProps['href']=undefined;return/*#__PURE__*/_react.default.cloneElement(c,newProps);}}return/*#__PURE__*/_react.default.cloneElement(c,{key});});}/**
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/next-server/lib/head.js:12:116 3986
error: Cannot assign to property on import "newProps"
['https://fonts.googleapis.com/css'].some(url=>c.props['href'].startsWith(url))){const newProps={...(c.props||{})};newProps['data-href']=newProps['href'];newProps['href']=undefined;return/*#__PURE__*/_react.default.cloneElement(c,newProps);}}return/*#__PURE__*/_react.default.cloneElement(c,{key});});}/**
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/next-server/lib/head.js:12:155 4025
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./node_modules/@babel": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/node_modules/@babel": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/@babel/runtime": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/node_modules/@babel/runtime": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/node_modules/@babel/runtime": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/node_modules/@babel/runtime/helpers": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./node_modules/react": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/node_modules/react": FileNotFound
error: Expected ")" but found ,
function J(a,b,c){var e,d={},k=null,h=null;if(null!=b)for(e in void 0!==b.ref&&(h=b.ref),void 0!==b.key&&(k=""+b.key),b)H.call(b,e)&&!I.hasOwnProperty(e)&&(d[e]=b[e]);var g=arguments.length-2;if(1===g)d.children=c;else if(1<g){for(var f=Array(g),m=0;m<g;m++)f[m]=arguments[m+2];d.children=f}if(a&&a.defaultProps)for(e in g=a.defaultProps,g)void 0===d[e]&&(d[e]=g[e]);return{$$typeof:n,type:a,key:k,ref:h,props:d,_owner:G.current}}
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:14:89 2171
error: Expected ";" but found )
function J(a,b,c){var e,d={},k=null,h=null;if(null!=b)for(e in void 0!==b.ref&&(h=b.ref),void 0!==b.key&&(k=""+b.key),b)H.call(b,e)&&!I.hasOwnProperty(e)&&(d[e]=b[e]);var g=arguments.length-2;if(1===g)d.children=c;else if(1<g){for(var f=Array(g),m=0;m<g;m++)f[m]=arguments[m+2];d.children=f}if(a&&a.defaultProps)for(e in g=a.defaultProps,g)void 0===d[e]&&(d[e]=g[e]);return{$$typeof:n,type:a,key:k,ref:h,props:d,_owner:G.current}}
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:14:120 2202
error: Expected ")" but found ,
function J(a,b,c){var e,d={},k=null,h=null;if(null!=b)for(e in void 0!==b.ref&&(h=b.ref),void 0!==b.key&&(k=""+b.key),b)H.call(b,e)&&!I.hasOwnProperty(e)&&(d[e]=b[e]);var g=arguments.length-2;if(1===g)d.children=c;else if(1<g){for(var f=Array(g),m=0;m<g;m++)f[m]=arguments[m+2];d.children=f}if(a&&a.defaultProps)for(e in g=a.defaultProps,g)void 0===d[e]&&(d[e]=g[e]);return{$$typeof:n,type:a,key:k,ref:h,props:d,_owner:G.current}}
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:14:338 2420
error: Expected ";" but found )
function J(a,b,c){var e,d={},k=null,h=null;if(null!=b)for(e in void 0!==b.ref&&(h=b.ref),void 0!==b.key&&(k=""+b.key),b)H.call(b,e)&&!I.hasOwnProperty(e)&&(d[e]=b[e]);var g=arguments.length-2;if(1===g)d.children=c;else if(1<g){for(var f=Array(g),m=0;m<g;m++)f[m]=arguments[m+2];d.children=f}if(a&&a.defaultProps)for(e in g=a.defaultProps,g)void 0===d[e]&&(d[e]=g[e]);return{$$typeof:n,type:a,key:k,ref:h,props:d,_owner:G.current}}
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:14:340 2422
error: Cannot assign to property on import "d"
function J(a,b,c){var e,d={},k=null,h=null;if(null!=b)for(e in void 0!==b.ref&&(h=b.ref),void 0!==b.key&&(k=""+b.key),b)H.call(b,e)&&!I.hasOwnProperty(e)&&(d[e]=b[e]);var g=arguments.length-2;if(1===g)d.children=c;else if(1<g){for(var f=Array(g),m=0;m<g;m++)f[m]=arguments[m+2];d.children=f}if(a&&a.defaultProps)for(e in g=a.defaultProps,g)void 0===d[e]&&(d[e]=g[e]);return{$$typeof:n,type:a,key:k,ref:h,props:d,_owner:G.current}}
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:14:157 2239
error: Cannot assign to property on import "f"
function J(a,b,c){var e,d={},k=null,h=null;if(null!=b)for(e in void 0!==b.ref&&(h=b.ref),void 0!==b.key&&(k=""+b.key),b)H.call(b,e)&&!I.hasOwnProperty(e)&&(d[e]=b[e]);var g=arguments.length-2;if(1===g)d.children=c;else if(1<g){for(var f=Array(g),m=0;m<g;m++)f[m]=arguments[m+2];d.children=f}if(a&&a.defaultProps)for(e in g=a.defaultProps,g)void 0===d[e]&&(d[e]=g[e]);return{$$typeof:n,type:a,key:k,ref:h,props:d,_owner:G.current}}
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:14:259 2341
error: Cannot assign to property on import "d"
function J(a,b,c){var e,d={},k=null,h=null;if(null!=b)for(e in void 0!==b.ref&&(h=b.ref),void 0!==b.key&&(k=""+b.key),b)H.call(b,e)&&!I.hasOwnProperty(e)&&(d[e]=b[e]);var g=arguments.length-2;if(1===g)d.children=c;else if(1<g){for(var f=Array(g),m=0;m<g;m++)f[m]=arguments[m+2];d.children=f}if(a&&a.defaultProps)for(e in g=a.defaultProps,g)void 0===d[e]&&(d[e]=g[e]);return{$$typeof:n,type:a,key:k,ref:h,props:d,_owner:G.current}}
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:14:357 2439
error: Cannot assign to property on import "e"
exports.cloneElement=function(a,b,c){if(null===a||void 0===a)throw Error(z(267,a));var e=l({},a.props),d=a.key,k=a.ref,h=a._owner;if(null!=b){void 0!==b.ref&&(k=b.ref,h=G.current);void 0!==b.key&&(d=""+b.key);if(a.type&&a.type.defaultProps)var g=a.type.defaultProps;for(f in b)H.call(b,f)&&!I.hasOwnProperty(f)&&(e[f]=void 0===b[f]&&void 0!==g?g[f]:b[f])}var f=arguments.length-2;if(1===f)e.children=c;else if(1<f){g=Array(f);for(var m=0;m<f;m++)g[m]=arguments[m+2];e.children=g}return{$$typeof:n,type:a.type,
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:20:314 4973
error: Cannot assign to property on import "g"
exports.cloneElement=function(a,b,c){if(null===a||void 0===a)throw Error(z(267,a));var e=l({},a.props),d=a.key,k=a.ref,h=a._owner;if(null!=b){void 0!==b.ref&&(k=b.ref,h=G.current);void 0!==b.key&&(d=""+b.key);if(a.type&&a.type.defaultProps)var g=a.type.defaultProps;for(f in b)H.call(b,f)&&!I.hasOwnProperty(f)&&(e[f]=void 0===b[f]&&void 0!==g?g[f]:b[f])}var f=arguments.length-2;if(1===f)e.children=c;else if(1<f){g=Array(f);for(var m=0;m<f;m++)g[m]=arguments[m+2];e.children=g}return{$$typeof:n,type:a.type,
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.production.min.js:20:447 5106
error: Cannot assign to property on import "args"
args[_key - 1] = arguments[_key];
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:184:7 5256
error: Cannot assign to property on import "args"
args[_key2 - 1] = arguments[_key2];
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:193:7 5499
error: Cannot assign to property on import "didWarnStateUpdateForUnmountedComponent"
didWarnStateUpdateForUnmountedComponent[warningKey] = true;
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:238:5 7144
error: Cannot assign to property on import "didWarnAboutStringRefs"
didWarnAboutStringRefs[componentName] = true;
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:622:9 19312
error: Cannot assign to property on import "props"
props[propName] = config[propName];
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:734:9 22825
error: Cannot assign to property on import "childArray"
childArray[i] = arguments[i + 2];
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:749:7 23234
error: Cannot assign to property on import "props"
props[propName] = defaultProps[propName];
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:767:9 23588
error: Cannot assign to property on import "props"
props[propName] = defaultProps[propName];
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:842:11 25862
error: Cannot assign to property on import "props"
props[propName] = config[propName];
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:844:11 25931
error: Cannot assign to property on import "childArray"
childArray[i] = arguments[i + 2];
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:860:7 26350
error: Cannot use "break" here
break;
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:1790:11 55454
error: Cannot assign to property on import "loggedTypeFailures"
loggedTypeFailures[error$1.message] = true;
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:1932:11 60269
error: Cannot assign to property on import "ownerHasKeyUseWarning"
ownerHasKeyUseWarning[currentComponentErrorInfo] = true; // Usually the current owner is the offender, but if it accepts children as a
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:2038:3 62986
error: Cannot use "break" here
break;
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./cjs/react.development.js:2169:9 67164
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/./node_modules/object-assign": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/react/node_modules/object-assign": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/object-assign": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/src/api/demo/node_modules/object-assign": FileNotFound
error: Cannot read directory "/Users/jarredsumner/Code/esdev/node_modules/object-assign": FileNotFound
error: Unexpected "super"
"use strict";exports.__esModule=true;exports.default=void 0;var _react=require("react");const isServer=typeof window==='undefined';class _default extends _react.Component{constructor(props){super(props);this._hasHeadManager=void 0;this.emitChange=()=>{if(this._hasHeadManager){this.props.headManager.updateHead(this.props.reduceComponentsToState([...this.props.headManager.mountedInstances],this.props));}};this._hasHeadManager=this.props.headManager&&this.props.headManager.mountedInstances;if(isServer&&this._hasHeadManager){this.props.headManager.mountedInstances.add(this);this.emitChange();}}componentDidMount(){if(this._hasHeadManager){this.props.headManager.mountedInstances.add(this);}this.emitChange();}componentDidUpdate(){this.emitChange();}componentWillUnmount(){if(this._hasHeadManager){this.props.headManager.mountedInstances.delete(this);}this.emitChange();}render(){return null;}}exports.default=_default;
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/next-server/lib/./side-effect.js:1:191 190
error: Cannot assign to property on import "newObj"
"use strict";exports.__esModule=true;exports.defaultHead=defaultHead;exports.default=void 0;var _react=_interopRequireWildcard(require("react"));var _sideEffect=_interopRequireDefault(require("./side-effect"));var _ampContext=require("./amp-context");var _headManagerContext=require("./head-manager-context");var _amp=require("./amp");function _interopRequireDefault(obj){return obj&&obj.__esModule?obj:{default:obj};}function _getRequireWildcardCache(){if(typeof WeakMap!=="function")return null;var cache=new WeakMap();_getRequireWildcardCache=function(){return cache;};return cache;}function _interopRequireWildcard(obj){if(obj&&obj.__esModule){return obj;}if(obj===null||typeof obj!=="object"&&typeof obj!=="function"){return{default:obj};}var cache=_getRequireWildcardCache();if(cache&&cache.has(obj)){return cache.get(obj);}var newObj={};var hasPropertyDescriptor=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var key in obj){if(Object.prototype.hasOwnProperty.call(obj,key)){var desc=hasPropertyDescriptor?Object.getOwnPropertyDescriptor(obj,key):null;if(desc&&(desc.get||desc.set)){Object.defineProperty(newObj,key,desc);}else{newObj[key]=obj[key];}}}newObj.default=obj;if(cache){cache.set(obj,newObj);}return newObj;}function defaultHead(inAmpMode=false){const head=[/*#__PURE__*/_react.default.createElement("meta",{charSet:"utf-8"})];if(!inAmpMode){head.push(/*#__PURE__*/_react.default.createElement("meta",{name:"viewport",content:"width=device-width"}));}return head;}function onlyReactElement(list,child){// React children can be "string" or "number" in this case we ignore them for backwards compat
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/client/../next-server/lib/head.js:1:1149 1148
error: Cannot assign to property on import "metaCategories"
switch(h.type){case'title':case'base':if(tags.has(h.type)){isUnique=false;}else{tags.add(h.type);}break;case'meta':for(let i=0,len=METATYPES.length;i<len;i++){const metatype=METATYPES[i];if(!h.props.hasOwnProperty(metatype))continue;if(metatype==='charSet'){if(metaTypes.has(metatype)){isUnique=false;}else{metaTypes.add(metatype);}}else{const category=h.props[metatype];const categories=metaCategories[metatype]||new Set();if((metatype!=='name'||!hasKey)&&categories.has(category)){isUnique=false;}else{categories.add(category);metaCategories[metatype]=categories;}}}break;}return isUnique;};}/**
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/client/../next-server/lib/head.js:8:530 3158
error: Cannot assign to property on import "newProps"
['https://fonts.googleapis.com/css'].some(url=>c.props['href'].startsWith(url))){const newProps={...(c.props||{})};newProps['data-href']=newProps['href'];newProps['href']=undefined;return/*#__PURE__*/_react.default.cloneElement(c,newProps);}}return/*#__PURE__*/_react.default.cloneElement(c,{key});});}/**
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/client/../next-server/lib/head.js:12:116 3986
error: Cannot assign to property on import "newProps"
['https://fonts.googleapis.com/css'].some(url=>c.props['href'].startsWith(url))){const newProps={...(c.props||{})};newProps['data-href']=newProps['href'];newProps['href']=undefined;return/*#__PURE__*/_react.default.cloneElement(c,newProps);}}return/*#__PURE__*/_react.default.cloneElement(c,{key});});}/**
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/client/../next-server/lib/head.js:12:155 4025
error: Unexpected "super"
"use strict";exports.__esModule=true;exports.default=void 0;var _react=require("react");const isServer=typeof window==='undefined';class _default extends _react.Component{constructor(props){super(props);this._hasHeadManager=void 0;this.emitChange=()=>{if(this._hasHeadManager){this.props.headManager.updateHead(this.props.reduceComponentsToState([...this.props.headManager.mountedInstances],this.props));}};this._hasHeadManager=this.props.headManager&&this.props.headManager.mountedInstances;if(isServer&&this._hasHeadManager){this.props.headManager.mountedInstances.add(this);this.emitChange();}}componentDidMount(){if(this._hasHeadManager){this.props.headManager.mountedInstances.add(this);}this.emitChange();}componentDidUpdate(){this.emitChange();}componentWillUnmount(){if(this._hasHeadManager){this.props.headManager.mountedInstances.delete(this);}this.emitChange();}render(){return null;}}exports.default=_default;
/Users/jarredsumner/Code/esdev/src/api/demo/./node_modules/next/./dist/client/../next-server/lib/./side-effect.js:1:191 190

113
outdir/index.css Normal file
View File

@@ -0,0 +1,113 @@
/* src/api/demo/styles/Home.module.css */
.container {
min-height: 100vh;
padding: 0 0.5rem;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 100vh;
}
.main {
padding: 5rem 0;
flex: 1;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
.footer {
width: 100%;
height: 100px;
border-top: 1px solid #eaeaea;
display: flex;
justify-content: center;
align-items: center;
}
.footer a {
display: flex;
justify-content: center;
align-items: center;
flex-grow: 1;
}
.title a {
color: #0070f3;
text-decoration: none;
}
.title a:hover,
.title a:focus,
.title a:active {
text-decoration: underline;
}
.title {
margin: 0;
line-height: 1.15;
font-size: 4rem;
}
.title,
.description {
text-align: center;
}
.description {
line-height: 1.5;
font-size: 1.5rem;
}
.code {
background: #fafafa;
border-radius: 5px;
padding: 0.75rem;
font-size: 1.1rem;
font-family:
Menlo,
Monaco,
Lucida Console,
Liberation Mono,
DejaVu Sans Mono,
Bitstream Vera Sans Mono,
Courier New,
monospace;
}
.grid {
display: flex;
align-items: center;
justify-content: center;
flex-wrap: wrap;
max-width: 800px;
margin-top: 3rem;
}
.card {
margin: 1rem;
padding: 1.5rem;
text-align: left;
color: inherit;
text-decoration: none;
border: 1px solid #eaeaea;
border-radius: 10px;
transition: color 0.15s ease, border-color 0.15s ease;
width: 45%;
}
.card:hover,
.card:focus,
.card:active {
color: #0070f3;
border-color: #0070f3;
}
.card h2 {
margin: 0 0 1rem 0;
font-size: 1.5rem;
}
.card p {
margin: 0;
font-size: 1.25rem;
line-height: 1.5;
}
.logo {
height: 1em;
margin-left: 0.5rem;
}
@media (max-width: 600px) {
.grid {
width: 100%;
flex-direction: column;
}
}

2338
outdir/index.js Normal file

File diff suppressed because it is too large Load Diff

336
pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,336 @@
lockfileVersion: 5.3
specifiers:
'@babel/preset-react': ^7.13.13
'@swc/cli': ^0.1.39
'@swc/core': ^1.2.55
'@swc/wasm': ^1.2.54
esbuild-wasm: ^0.11.19
dependencies:
'@babel/preset-react': 7.13.13
'@swc/cli': 0.1.39_@swc+core@1.2.55
'@swc/core': 1.2.55
'@swc/wasm': 1.2.55
esbuild-wasm: 0.11.19
packages:
/@babel/helper-annotate-as-pure/7.12.13:
resolution: {integrity: sha512-7YXfX5wQ5aYM/BOlbSccHDbuXXFPxeoUmfWtz8le2yTkTZc+BxsiEnENFoi2SlmA8ewDkG2LgIMIVzzn2h8kfw==}
dependencies:
'@babel/types': 7.14.1
dev: false
/@babel/helper-module-imports/7.13.12:
resolution: {integrity: sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA==}
dependencies:
'@babel/types': 7.14.1
dev: false
/@babel/helper-plugin-utils/7.13.0:
resolution: {integrity: sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ==}
dev: false
/@babel/helper-validator-identifier/7.14.0:
resolution: {integrity: sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==}
dev: false
/@babel/helper-validator-option/7.12.17:
resolution: {integrity: sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw==}
dev: false
/@babel/plugin-syntax-jsx/7.12.13:
resolution: {integrity: sha512-d4HM23Q1K7oq/SLNmG6mRt85l2csmQ0cHRaxRXjKW0YFdEXqlZ5kzFQKH5Uc3rDJECgu+yCRgPkG04Mm98R/1g==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-plugin-utils': 7.13.0
dev: false
/@babel/plugin-transform-react-display-name/7.12.13:
resolution: {integrity: sha512-MprESJzI9O5VnJZrL7gg1MpdqmiFcUv41Jc7SahxYsNP2kDkFqClxxTZq+1Qv4AFCamm+GXMRDQINNn+qrxmiA==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-plugin-utils': 7.13.0
dev: false
/@babel/plugin-transform-react-jsx-development/7.12.17:
resolution: {integrity: sha512-BPjYV86SVuOaudFhsJR1zjgxxOhJDt6JHNoD48DxWEIxUCAMjV1ys6DYw4SDYZh0b1QsS2vfIA9t/ZsQGsDOUQ==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/plugin-transform-react-jsx': 7.13.12
dev: false
/@babel/plugin-transform-react-jsx/7.13.12:
resolution: {integrity: sha512-jcEI2UqIcpCqB5U5DRxIl0tQEProI2gcu+g8VTIqxLO5Iidojb4d77q+fwGseCvd8af/lJ9masp4QWzBXFE2xA==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-annotate-as-pure': 7.12.13
'@babel/helper-module-imports': 7.13.12
'@babel/helper-plugin-utils': 7.13.0
'@babel/plugin-syntax-jsx': 7.12.13
'@babel/types': 7.14.1
dev: false
/@babel/plugin-transform-react-pure-annotations/7.12.1:
resolution: {integrity: sha512-RqeaHiwZtphSIUZ5I85PEH19LOSzxfuEazoY7/pWASCAIBuATQzpSVD+eT6MebeeZT2F4eSL0u4vw6n4Nm0Mjg==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-annotate-as-pure': 7.12.13
'@babel/helper-plugin-utils': 7.13.0
dev: false
/@babel/preset-react/7.13.13:
resolution: {integrity: sha512-gx+tDLIE06sRjKJkVtpZ/t3mzCDOnPG+ggHZG9lffUbX8+wC739x20YQc9V35Do6ZAxaUc/HhVHIiOzz5MvDmA==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-plugin-utils': 7.13.0
'@babel/helper-validator-option': 7.12.17
'@babel/plugin-transform-react-display-name': 7.12.13
'@babel/plugin-transform-react-jsx': 7.13.12
'@babel/plugin-transform-react-jsx-development': 7.12.17
'@babel/plugin-transform-react-pure-annotations': 7.12.1
dev: false
/@babel/types/7.14.1:
resolution: {integrity: sha512-S13Qe85fzLs3gYRUnrpyeIrBJIMYv33qSTg1qoBwiG6nPKwUWAD9odSzWhEedpwOIzSEI6gbdQIWEMiCI42iBA==}
dependencies:
'@babel/helper-validator-identifier': 7.14.0
to-fast-properties: 2.0.0
dev: false
/@napi-rs/triples/1.0.2:
resolution: {integrity: sha512-EL3SiX43m9poFSnhDx4d4fn9SSaqyO2rHsCNhETi9bWPmjXK3uPJ0QpPFtx39FEdHcz1vJmsiW41kqc0AgvtzQ==}
dev: false
/@node-rs/helper/1.1.0:
resolution: {integrity: sha512-r43YnnrY5JNzDuXJdW3sBJrKzvejvFmFWbiItUEoBJsaPzOIWFMhXB7i5j4c9EMXcFfxveF4l7hT+rLmwtjrVQ==}
dependencies:
'@napi-rs/triples': 1.0.2
tslib: 2.2.0
dev: false
/@swc/cli/0.1.39_@swc+core@1.2.55:
resolution: {integrity: sha512-qTI+HIjSgKUJUKZ3xGA6zAEkHryirmKrzj4zWrCg4FQnAEFGPOIx58/qRs3aURSOS3BnbVE33sqAxEN+v8qZpw==}
engines: {node: '>= 12.13'}
hasBin: true
peerDependencies:
'@swc/core': ^1.2.4
chokidar: ^3.0.0
peerDependenciesMeta:
chokidar:
optional: true
dependencies:
'@swc/core': 1.2.55
commander: 7.2.0
convert-source-map: 1.7.0
glob: 7.1.7
lodash: 4.17.21
slash: 3.0.0
source-map: 0.7.3
dev: false
/@swc/core-android-arm64/1.2.56:
resolution: {integrity: sha512-yXiqbuEnpotpYdGL8rFvRQzkK7JQ1rhZAdGTcCvwUF7L8Ujm1NxJlrNaiMiK7uKvCYOynwe32Ddykaew8ggEFQ==}
engines: {node: '>=10'}
cpu: [arm64]
os: [android]
dev: false
optional: true
/@swc/core-darwin-arm64/1.2.56:
resolution: {integrity: sha512-Ub74q6rKxJy909mXoBJQ7dF5dUJnqrq3XpGHWexv3WUr7C/sTbcwZDwgFMqgDHOf0TSPTge+qwPNOIxcSYv/Kg==}
engines: {node: '>=10'}
cpu: [arm64]
os: [darwin]
dev: false
optional: true
/@swc/core-darwin-x64/1.2.56:
resolution: {integrity: sha512-vxHo9eAyEVykTXM9tJGOYdlsxWq43po5mDeB1dEEjdwefpRCeV+xv3xL6GfVxoVn26w+LZgT4R+BpP0Hx7kATQ==}
engines: {node: '>=10'}
cpu: [x64]
os: [darwin]
dev: false
optional: true
/@swc/core-linux-arm-gnueabihf/1.2.56:
resolution: {integrity: sha512-Chmj/OQB1ie/UY5Cdt9e8VkUTE5lDAPGg4eN2O71j0UlZux3TwR+L/tiGuS9S87lqF9qtZAmZ+WTldeiVFdVqQ==}
engines: {node: '>=10'}
cpu: [arm]
os: [linux]
dev: false
optional: true
/@swc/core-linux-arm64-gnu/1.2.56:
resolution: {integrity: sha512-WCze10brrFmWrJUKmmZVQPfgVnfkvfXbKbs24cgjFSzsV2iBZ4/NVqe+5covYTOkaFvnrqERHqq+ntm1wjDT1A==}
engines: {node: '>=10'}
cpu: [arm64]
os: [linux]
dev: false
optional: true
/@swc/core-linux-x64-gnu/1.2.56:
resolution: {integrity: sha512-B+Rr6NXUNe8RmgBNEh3ATZt77muFssaXbzIYTn+Yovw/s+xh27TFHaoZkfKJFNY/uWxL3S22ZVAxv5ugwS4++g==}
engines: {node: '>=10'}
cpu: [x64]
os: [linux]
dev: false
optional: true
/@swc/core-linux-x64-musl/1.2.56:
resolution: {integrity: sha512-W1BA8Zjz4pkFmAg3PqKsdTyySkJcUiPWi18Ok0qBx2xemgkEKpERpwI51NwWm3YQUSJKTH2MFiwfDLtCE+Ieng==}
engines: {node: '>=10'}
cpu: [x64]
os: [linux]
dev: false
optional: true
/@swc/core-win32-ia32-msvc/1.2.56:
resolution: {integrity: sha512-sSpruAaA3y0CXO1yMPfDxo4p9wtrS7cVOM7P9IryKIUGZBtoM3U0W2NAUE3h5GNrx7xv2GBxqtzfoYW6I8T9bw==}
engines: {node: '>=10'}
cpu: [ia32]
os: [win32]
dev: false
optional: true
/@swc/core-win32-x64-msvc/1.2.56:
resolution: {integrity: sha512-eSqajMZ6fAfHAy1h9Bh8oN90faCy3zsj3VcgjhEbJQnjUIN32eOLlWb70pAb58ckP+c2pBejaRuRElVjaViVjw==}
engines: {node: '>=10'}
cpu: [x64]
os: [win32]
dev: false
optional: true
/@swc/core/1.2.55:
resolution: {integrity: sha512-ZtyxJ0IT0dv4jq0oPrlQytRN9HoSocT5Xig6y/Yx28uFRGJOlqaP1NrkNyZhB65c29gwXoedxN54uVqmXe+aFQ==}
engines: {node: '>=10'}
dependencies:
'@node-rs/helper': 1.1.0
optionalDependencies:
'@swc/core-android-arm64': 1.2.56
'@swc/core-darwin-arm64': 1.2.56
'@swc/core-darwin-x64': 1.2.56
'@swc/core-linux-arm-gnueabihf': 1.2.56
'@swc/core-linux-arm64-gnu': 1.2.56
'@swc/core-linux-x64-gnu': 1.2.56
'@swc/core-linux-x64-musl': 1.2.56
'@swc/core-win32-ia32-msvc': 1.2.56
'@swc/core-win32-x64-msvc': 1.2.56
dev: false
/@swc/wasm/1.2.55:
resolution: {integrity: sha512-otrxYNDmKSKVK8QVsGynACyvSL8XOYYXsh7cyaXPSKGnTTPjeWhYvI1d5uFnZyASfFXUpk1eFEE6AMJWIwKJhA==}
dev: false
/balanced-match/1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
dev: false
/brace-expansion/1.1.11:
resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==}
dependencies:
balanced-match: 1.0.2
concat-map: 0.0.1
dev: false
/commander/7.2.0:
resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==}
engines: {node: '>= 10'}
dev: false
/concat-map/0.0.1:
resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=}
dev: false
/convert-source-map/1.7.0:
resolution: {integrity: sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==}
dependencies:
safe-buffer: 5.1.2
dev: false
/esbuild-wasm/0.11.19:
resolution: {integrity: sha512-d4s3fcIBG9CL/h5kKfXHpkztyMhs71anqdszND1Zfr4na1bhMGAb+VyEMBbt2/0ft5HtcsOYBqXsjNPNWTC29w==}
engines: {node: '>=8'}
hasBin: true
dev: false
/fs.realpath/1.0.0:
resolution: {integrity: sha1-FQStJSMVjKpA20onh8sBQRmU6k8=}
dev: false
/glob/7.1.7:
resolution: {integrity: sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==}
dependencies:
fs.realpath: 1.0.0
inflight: 1.0.6
inherits: 2.0.4
minimatch: 3.0.4
once: 1.4.0
path-is-absolute: 1.0.1
dev: false
/inflight/1.0.6:
resolution: {integrity: sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=}
dependencies:
once: 1.4.0
wrappy: 1.0.2
dev: false
/inherits/2.0.4:
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
dev: false
/lodash/4.17.21:
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
dev: false
/minimatch/3.0.4:
resolution: {integrity: sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==}
dependencies:
brace-expansion: 1.1.11
dev: false
/once/1.4.0:
resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
dependencies:
wrappy: 1.0.2
dev: false
/path-is-absolute/1.0.1:
resolution: {integrity: sha1-F0uSaHNVNP+8es5r9TpanhtcX18=}
engines: {node: '>=0.10.0'}
dev: false
/safe-buffer/5.1.2:
resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
dev: false
/slash/3.0.0:
resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
engines: {node: '>=8'}
dev: false
/source-map/0.7.3:
resolution: {integrity: sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==}
engines: {node: '>= 8'}
dev: false
/to-fast-properties/2.0.0:
resolution: {integrity: sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=}
engines: {node: '>=4'}
dev: false
/tslib/2.2.0:
resolution: {integrity: sha512-gS9GVHRU+RGn5KQM2rllAlR3dU6m7AcpJKdtH8gFvQiC4Otgk98XnmMU+nZenHt/+VhnBPWwgrJsyrdcw6i23w==}
dev: false
/wrappy/1.0.2:
resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
dev: false

1
profile.json Normal file

File diff suppressed because one or more lines are too long

View File

@@ -1,22 +1,17 @@
const std = @import("std");
const builtin = @import("builtin");
const STATIC_MEMORY_SIZE = 256000;
pub var static_manager: ?std.heap.FixedBufferAllocator = null;
pub var dynamic_manager: ?std.heap.ArenaAllocator = null;
pub var root_manager: ?std.heap.ArenaAllocator = null;
pub var static_manager: ?std.heap.ArenaAllocator = null;
pub var root_manager: ?RootAlloc = null;
pub var needs_setup: bool = true;
pub var static: *std.mem.Allocator = undefined;
pub var dynamic: *std.mem.Allocator = undefined;
pub fn setup(root: *std.mem.Allocator) !void {
root_manager = std.heap.ArenaAllocator.init(root);
var buf = try root_manager.?.child_allocator.alloc(u8, STATIC_MEMORY_SIZE);
dynamic_manager = std.heap.ArenaAllocator.init(root_manager.?.child_allocator);
static_manager = std.heap.FixedBufferAllocator.init(buf);
static = root_manager.?.child_allocator;
dynamic_manager = std.heap.ArenaAllocator.init(root);
dynamic = dynamic_manager.?.child_allocator;
needs_setup = false;
static = root;
dynamic = root;
// static = @ptrCast(*std.mem.Allocator, &stat.allocator);
}
@@ -25,3 +20,177 @@ test "GlobalAllocator" {
var testType = try static.alloc(u8, 10);
testType[1] = 1;
}
pub const HunkSide = struct {
pub const VTable = struct {
alloc: fn (self: *Hunk, n: usize, alignment: u29) std.mem.Allocator.Error![]u8,
getMark: fn (self: *Hunk) usize,
freeToMark: fn (self: *Hunk, pos: usize) void,
};
hunk: *Hunk,
vtable: *const VTable,
allocator: std.mem.Allocator,
pub fn init(hunk: *Hunk, vtable: *const VTable) HunkSide {
return .{
.hunk = hunk,
.vtable = vtable,
.allocator = .{
.allocFn = allocFn,
.resizeFn = resizeFn,
},
};
}
pub fn getMark(self: HunkSide) usize {
return self.vtable.getMark(self.hunk);
}
pub fn freeToMark(self: HunkSide, pos: usize) void {
self.vtable.freeToMark(self.hunk, pos);
}
fn allocFn(allocator: *std.mem.Allocator, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) std.mem.Allocator.Error![]u8 {
const self = @fieldParentPtr(HunkSide, "allocator", allocator);
return try self.vtable.alloc(self.hunk, len, ptr_align);
}
fn resizeFn(allocator: *std.mem.Allocator, old_mem: []u8, old_align: u29, new_size: usize, len_align: u29, ret_addr: usize) std.mem.Allocator.Error!usize {
if (new_size > old_mem.len) {
return error.OutOfMemory;
}
if (new_size == 0) {
return 0;
}
return std.mem.alignAllocLen(old_mem.len, new_size, len_align);
}
};
pub const Hunk = struct {
low_used: usize,
high_used: usize,
buffer: []u8,
pub fn init(buffer: []u8) Hunk {
return .{
.low_used = 0,
.high_used = 0,
.buffer = buffer,
};
}
pub fn low(self: *Hunk) HunkSide {
const GlobalStorage = struct {
const vtable: HunkSide.VTable = .{
.alloc = allocLow,
.getMark = getLowMark,
.freeToMark = freeToLowMark,
};
};
return HunkSide.init(self, &GlobalStorage.vtable);
}
pub fn high(self: *Hunk) HunkSide {
const GlobalStorage = struct {
const vtable: HunkSide.VTable = .{
.alloc = allocHigh,
.getMark = getHighMark,
.freeToMark = freeToHighMark,
};
};
return HunkSide.init(self, &GlobalStorage.vtable);
}
pub fn allocLow(self: *Hunk, n: usize, alignment: u29) ![]u8 {
const start = @ptrToInt(self.buffer.ptr);
const adjusted_index = std.mem.alignForward(start + self.low_used, alignment) - start;
const new_low_used = adjusted_index + n;
if (new_low_used > self.buffer.len - self.high_used) {
return error.OutOfMemory;
}
const result = self.buffer[adjusted_index..new_low_used];
self.low_used = new_low_used;
return result;
}
pub fn allocHigh(self: *Hunk, n: usize, alignment: u29) ![]u8 {
const addr = @ptrToInt(self.buffer.ptr) + self.buffer.len - self.high_used;
const rem = @rem(addr, alignment);
const march_backward_bytes = rem;
const adjusted_index = self.high_used + march_backward_bytes;
const new_high_used = adjusted_index + n;
if (new_high_used > self.buffer.len - self.low_used) {
return error.OutOfMemory;
}
const start = self.buffer.len - adjusted_index - n;
const result = self.buffer[start .. start + n];
self.high_used = new_high_used;
return result;
}
pub fn getLowMark(self: *Hunk) usize {
return self.low_used;
}
pub fn getHighMark(self: *Hunk) usize {
return self.high_used;
}
pub fn freeToLowMark(self: *Hunk, pos: usize) void {
std.debug.assert(pos <= self.low_used);
if (pos < self.low_used) {
if (std.builtin.mode == std.builtin.Mode.Debug) {
std.mem.set(u8, self.buffer[pos..self.low_used], 0xcc);
}
self.low_used = pos;
}
}
pub fn freeToHighMark(self: *Hunk, pos: usize) void {
std.debug.assert(pos <= self.high_used);
if (pos < self.high_used) {
if (std.builtin.mode == std.builtin.Mode.Debug) {
const i = self.buffer.len - self.high_used;
const n = self.high_used - pos;
std.mem.set(u8, self.buffer[i .. i + n], 0xcc);
}
self.high_used = pos;
}
}
};
test "Hunk" {
// test a few random operations. very low coverage. write more later
var buf: [100]u8 = undefined;
var hunk = Hunk.init(buf[0..]);
const high_mark = hunk.getHighMark();
_ = try hunk.low().allocator.alloc(u8, 7);
_ = try hunk.high().allocator.alloc(u8, 8);
std.testing.expectEqual(@as(usize, 7), hunk.low_used);
std.testing.expectEqual(@as(usize, 8), hunk.high_used);
_ = try hunk.high().allocator.alloc(u8, 8);
std.testing.expectEqual(@as(usize, 16), hunk.high_used);
const low_mark = hunk.getLowMark();
_ = try hunk.low().allocator.alloc(u8, 100 - 7 - 16);
std.testing.expectEqual(@as(usize, 100 - 16), hunk.low_used);
std.testing.expectError(error.OutOfMemory, hunk.high().allocator.alloc(u8, 1));
hunk.freeToLowMark(low_mark);
_ = try hunk.high().allocator.alloc(u8, 1);
hunk.freeToHighMark(high_mark);
std.testing.expectEqual(@as(usize, 0), hunk.high_used);
}

713
src/allocators.zig Normal file
View File

@@ -0,0 +1,713 @@
const std = @import("std");
const Wyhash = std.hash.Wyhash;
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
// https://en.wikipedia.org/wiki/.bss#BSS_in_C
pub fn BSSSectionAllocator(comptime size: usize) type {
return struct {
var backing_buf: [size]u8 = undefined;
var fixed_buffer_allocator = FixedBufferAllocator.init(&backing_buf);
var buf_allocator = &fixed_buffer_allocator.allocator;
const Allocator = std.mem.Allocator;
const Self = @This();
allocator: Allocator,
fallback_allocator: *Allocator,
is_overflowed: bool = false,
pub fn get(self: *Self) *Allocator {
return &self.allocator;
}
pub fn init(fallback_allocator: *Allocator) Self {
return Self{ .fallback_allocator = fallback_allocator, .allocator = Allocator{
.allocFn = BSSSectionAllocator(size).alloc,
.resizeFn = BSSSectionAllocator(size).resize,
} };
}
pub fn alloc(
allocator: *Allocator,
len: usize,
ptr_align: u29,
len_align: u29,
return_address: usize,
) error{OutOfMemory}![]u8 {
const self = @fieldParentPtr(Self, "allocator", allocator);
return buf_allocator.allocFn(buf_allocator, len, ptr_align, len_align, return_address) catch |err| {
self.is_overflowed = true;
return self.fallback_allocator.allocFn(self.fallback_allocator, len, ptr_align, len_align, return_address);
};
}
pub fn resize(
allocator: *Allocator,
buf: []u8,
buf_align: u29,
new_len: usize,
len_align: u29,
return_address: usize,
) error{OutOfMemory}!usize {
const self = @fieldParentPtr(Self, "allocator", allocator);
if (fixed_buffer_allocator.ownsPtr(buf.ptr)) {
return fixed_buffer_allocator.allocator.resizeFn(&fixed_buffer_allocator.allocator, buf, buf_align, new_len, len_align, return_address);
} else {
return self.fallback_allocator.resizeFn(self.fallback_allocator, buf, buf_align, new_len, len_align, return_address);
}
}
};
}
pub fn isSliceInBuffer(slice: anytype, buffer: anytype) bool {
return (@ptrToInt(buffer) <= @ptrToInt(slice.ptr) and (@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(buffer) + buffer.len));
}
pub const IndexType = packed struct {
index: u31,
is_overflow: bool = false,
};
const HashKeyType = u64;
const IndexMap = std.HashMapUnmanaged(HashKeyType, IndexType, hash_hashFn, hash_eqlFn, 80);
pub const Result = struct {
hash: HashKeyType,
index: IndexType,
status: ItemStatus,
pub fn hasCheckedIfExists(r: *const Result) bool {
return r.index.index != Unassigned.index;
}
pub fn isOverflowing(r: *const Result, comptime count: usize) bool {
return r.index >= count;
}
pub fn realIndex(r: *const Result, comptime count: anytype) IndexType {
return if (r.isOverflowing(count)) @intCast(IndexType, r.index - max_index) else r.index;
}
};
const Seed = 999;
pub const NotFound = IndexType{
.index = std.math.maxInt(u31),
};
pub const Unassigned = IndexType{
.index = std.math.maxInt(u31) - 1,
};
pub fn hash_hashFn(key: HashKeyType) HashKeyType {
return key;
}
pub fn hash_eqlFn(a: HashKeyType, b: HashKeyType) bool {
return a == b;
}
pub const ItemStatus = enum(u3) {
unknown,
exists,
not_found,
};
const hasDeinit = std.meta.trait.hasFn("deinit")(ValueType);
pub fn BSSList(comptime ValueType: type, comptime count: anytype) type {
const max_index = count - 1;
var list_type: type = undefined;
var list_count = count;
return struct {
pub var backing_buf: [count]ValueType = undefined;
pub var backing_buf_used: u16 = 0;
const Allocator = std.mem.Allocator;
const Self = @This();
pub const ListIndex = packed struct {
index: u31,
is_overflowing: bool = false,
};
overflow_list: std.ArrayListUnmanaged(ValueType),
allocator: *Allocator,
pub var instance: Self = undefined;
pub fn init(allocator: *std.mem.Allocator) *Self {
instance = Self{
.allocator = allocator,
.overflow_list = std.ArrayListUnmanaged(ValueType){},
};
return &instance;
}
pub fn isOverflowing() bool {
return backing_buf_used >= @as(u16, count);
}
pub fn at(self: *const Self, index: ListIndex) ?*ValueType {
if (index.index == NotFound.index or index.index == Unassigned.index) return null;
if (index.is_overflowing) {
return &self.overflow_list.items[index.index];
} else {
return &backing_buf[index.index];
}
}
pub fn exists(self: *Self, value: ValueType) bool {
return isSliceInBuffer(value, backing_buf);
}
pub fn append(self: *Self, value: ValueType) !ListIndex {
var result = ListIndex{ .index = std.math.maxInt(u31), .is_overflowing = backing_buf_used > max_index };
if (result.is_overflowing) {
result.index = @intCast(u31, self.overflow_list.items.len);
try self.overflow_list.append(self.allocator, value);
} else {
result.index = backing_buf_used;
backing_buf[result.index] = value;
backing_buf_used += 1;
if (backing_buf_used >= max_index) {
self.overflow_list = try @TypeOf(self.overflow_list).initCapacity(self.allocator, count);
}
}
return result;
}
pub fn update(self: *Self, result: *ListIndex, value: ValueType) !*ValueType {
if (result.index.index == NotFound.index or result.index.index == Unassigned.index) {
result.index.is_overflowing = backing_buf_used > max_index;
if (result.index.is_overflowing) {
result.index.index = @intCast(u31, self.overflow_list.items.len);
} else {
result.index.index = backing_buf_used;
backing_buf_used += 1;
if (backing_buf_used >= max_index) {
self.overflow_list = try @TypeOf(self.overflow_list).initCapacity(self.allocator, count);
}
}
}
if (result.index.is_overflowing) {
if (self.overflow_list.items.len == result.index.index) {
const real_index = self.overflow_list.items.len;
try self.overflow_list.append(self.allocator, value);
} else {
self.overflow_list.items[result.index.index] = value;
}
return &self.overflow_list.items[result.index.index];
} else {
backing_buf[result.index.index] = value;
return &backing_buf[result.index.index];
}
}
pub fn remove(self: *Self, index: ListIndex) void {
@compileError("Not implemented yet.");
// switch (index) {
// Unassigned.index => {
// self.index.remove(_key);
// },
// NotFound.index => {
// self.index.remove(_key);
// },
// 0...max_index => {
// if (hasDeinit(ValueType)) {
// backing_buf[index].deinit();
// }
// backing_buf[index] = undefined;
// },
// else => {
// const i = index - count;
// if (hasDeinit(ValueType)) {
// self.overflow_list.items[i].deinit();
// }
// self.overflow_list.items[index - count] = undefined;
// },
// }
// return index;
}
};
}
pub fn BSSStringList(comptime count: usize, comptime item_length: usize) type {
const max_index = count - 1;
const ValueType = []const u8;
return struct {
pub var slice_buf: [count][]const u8 = undefined;
pub var slice_buf_used: u16 = 0;
pub var backing_buf: [count * item_length]u8 = undefined;
pub var backing_buf_used: u64 = undefined;
const Allocator = std.mem.Allocator;
const Self = @This();
pub const ListIndex = packed struct {
index: u31,
is_overflowing: bool = false,
};
overflow_list: std.ArrayListUnmanaged(ValueType),
allocator: *Allocator,
pub var instance: Self = undefined;
pub fn init(allocator: *std.mem.Allocator) *Self {
instance = Self{
.allocator = allocator,
.overflow_list = std.ArrayListUnmanaged(ValueType){},
};
return &instance;
}
pub fn isOverflowing() bool {
return slice_buf_used >= @as(u16, count);
}
pub fn at(self: *const Self, index: IndexType) ?ValueType {
if (index.index == NotFound.index or index.index == Unassigned.index) return null;
if (index.is_overflowing) {
return &self.overflow_list.items[index.index];
} else {
return &slice_buf[index.index];
}
}
pub fn exists(self: *Self, value: ValueType) bool {
return isSliceInBuffer(value, slice_buf);
}
pub fn editableSlice(slice: []const u8) []u8 {
return constStrToU8(slice);
}
pub fn append(self: *Self, _value: anytype) ![]const u8 {
var value = _value;
if (value.len + backing_buf_used < backing_buf.len - 1) {
const start = backing_buf_used;
backing_buf_used += value.len;
std.mem.copy(u8, backing_buf[start..backing_buf_used], _value);
value = backing_buf[start..backing_buf_used];
} else {
value = try self.allocator.dupe(u8, _value);
}
var result = ListIndex{ .index = std.math.maxInt(u31), .is_overflowing = slice_buf_used > max_index };
if (result.is_overflowing) {
result.index = @intCast(u31, self.overflow_list.items.len);
} else {
result.index = slice_buf_used;
slice_buf_used += 1;
if (slice_buf_used >= max_index) {
self.overflow_list = try @TypeOf(self.overflow_list).initCapacity(self.allocator, count);
}
}
if (result.is_overflowing) {
if (self.overflow_list.items.len == result.index) {
const real_index = self.overflow_list.items.len;
try self.overflow_list.append(self.allocator, value);
} else {
self.overflow_list.items[result.index] = value;
}
return self.overflow_list.items[result.index];
} else {
slice_buf[result.index] = value;
return slice_buf[result.index];
}
}
pub fn remove(self: *Self, index: ListIndex) void {
@compileError("Not implemented yet.");
// switch (index) {
// Unassigned.index => {
// self.index.remove(_key);
// },
// NotFound.index => {
// self.index.remove(_key);
// },
// 0...max_index => {
// if (hasDeinit(ValueType)) {
// slice_buf[index].deinit();
// }
// slice_buf[index] = undefined;
// },
// else => {
// const i = index - count;
// if (hasDeinit(ValueType)) {
// self.overflow_list.items[i].deinit();
// }
// self.overflow_list.items[index - count] = undefined;
// },
// }
// return index;
}
};
}
pub fn TBSSStringList(comptime count: usize, comptime item_length: usize) type {
const max_index = count - 1;
const ValueType = []const u8;
return struct {
const Allocator = std.mem.Allocator;
const Self = @This();
pub threadlocal var slice_buf: [count][]const u8 = undefined;
pub threadlocal var slice_buf_used: u16 = 0;
pub threadlocal var backing_buf: [count * item_length]u8 = undefined;
pub threadlocal var backing_buf_used: u64 = undefined;
pub threadlocal var instance: Self = undefined;
pub const ListIndex = packed struct {
index: u31,
is_overflowing: bool = false,
};
overflow_list: std.ArrayListUnmanaged(ValueType),
allocator: *Allocator,
pub fn init(allocator: *std.mem.Allocator) *Self {
instance = Self{
.allocator = allocator,
.overflow_list = std.ArrayListUnmanaged(ValueType){},
};
return &instance;
}
pub fn isOverflowing() bool {
return slice_buf_used >= @as(u16, count);
}
pub fn at(self: *const Self, index: IndexType) ?ValueType {
if (index.index == NotFound.index or index.index == Unassigned.index) return null;
if (index.is_overflowing) {
return &self.overflow_list.items[index.index];
} else {
return &slice_buf[index.index];
}
}
pub fn exists(self: *Self, value: ValueType) bool {
return isSliceInBuffer(value, slice_buf);
}
pub fn editableSlice(slice: []const u8) []u8 {
return constStrToU8(slice);
}
pub fn append(self: *Self, _value: anytype) ![]const u8 {
var value = _value;
if (value.len + backing_buf_used < backing_buf.len - 1) {
const start = backing_buf_used;
backing_buf_used += value.len;
std.mem.copy(u8, backing_buf[start..backing_buf_used], _value);
value = backing_buf[start..backing_buf_used];
} else {
value = try self.allocator.dupe(u8, _value);
}
var result = ListIndex{ .index = std.math.maxInt(u31), .is_overflowing = slice_buf_used > max_index };
if (result.is_overflowing) {
result.index = @intCast(u31, self.overflow_list.items.len);
} else {
result.index = slice_buf_used;
slice_buf_used += 1;
if (slice_buf_used >= max_index) {
self.overflow_list = try @TypeOf(self.overflow_list).initCapacity(self.allocator, count);
}
}
if (result.is_overflowing) {
if (self.overflow_list.items.len == result.index) {
const real_index = self.overflow_list.items.len;
try self.overflow_list.append(self.allocator, value);
} else {
self.overflow_list.items[result.index] = value;
}
return self.overflow_list.items[result.index];
} else {
slice_buf[result.index] = value;
return slice_buf[result.index];
}
}
pub fn remove(self: *Self, index: ListIndex) void {
@compileError("Not implemented yet.");
// switch (index) {
// Unassigned.index => {
// self.index.remove(_key);
// },
// NotFound.index => {
// self.index.remove(_key);
// },
// 0...max_index => {
// if (hasDeinit(ValueType)) {
// slice_buf[index].deinit();
// }
// slice_buf[index] = undefined;
// },
// else => {
// const i = index - count;
// if (hasDeinit(ValueType)) {
// self.overflow_list.items[i].deinit();
// }
// self.overflow_list.items[index - count] = undefined;
// },
// }
// return index;
}
};
}
pub fn BSSMap(comptime ValueType: type, comptime count: anytype, store_keys: bool, estimated_key_length: usize) type {
const max_index = count - 1;
const BSSMapType = struct {
pub var backing_buf: [count]ValueType = undefined;
pub var backing_buf_used: u16 = 0;
const Allocator = std.mem.Allocator;
const Self = @This();
index: IndexMap,
overflow_list: std.ArrayListUnmanaged(ValueType),
allocator: *Allocator,
pub var instance: Self = undefined;
pub fn init(allocator: *std.mem.Allocator) *Self {
instance = Self{
.index = IndexMap{},
.allocator = allocator,
.overflow_list = std.ArrayListUnmanaged(ValueType){},
};
return &instance;
}
pub fn isOverflowing() bool {
return backing_buf_used >= @as(u16, count);
}
pub fn getOrPut(self: *Self, key: []const u8) !Result {
const _key = Wyhash.hash(Seed, key);
var index = try self.index.getOrPut(self.allocator, _key);
if (index.found_existing) {
return Result{
.hash = _key,
.index = index.entry.value,
.status = switch (index.entry.value.index) {
NotFound.index => .not_found,
Unassigned.index => .unknown,
else => .exists,
},
};
}
index.entry.value = Unassigned;
return Result{
.hash = _key,
.index = Unassigned,
.status = .unknown,
};
}
pub fn get(self: *const Self, key: []const u8) ?*ValueType {
const _key = Wyhash.hash(Seed, key);
const index = self.index.get(_key) orelse return null;
return self.atIndex(index);
}
pub fn markNotFound(self: *Self, result: Result) void {
self.index.put(self.allocator, result.hash, NotFound) catch unreachable;
}
pub fn atIndex(self: *const Self, index: IndexType) ?*ValueType {
if (index.index == NotFound.index or index.index == Unassigned.index) return null;
if (index.is_overflow) {
return &self.overflow_list.items[index.index];
} else {
return &backing_buf[index.index];
}
}
pub fn put(self: *Self, result: *Result, value: ValueType) !*ValueType {
if (result.index.index == NotFound.index or result.index.index == Unassigned.index) {
result.index.is_overflow = backing_buf_used > max_index;
if (result.index.is_overflow) {
result.index.index = @intCast(u31, self.overflow_list.items.len);
} else {
result.index.index = backing_buf_used;
backing_buf_used += 1;
if (backing_buf_used >= max_index) {
self.overflow_list = try @TypeOf(self.overflow_list).initCapacity(self.allocator, count);
}
}
}
try self.index.put(self.allocator, result.hash, result.index);
if (result.index.is_overflow) {
if (self.overflow_list.items.len == result.index.index) {
const real_index = self.overflow_list.items.len;
try self.overflow_list.append(self.allocator, value);
} else {
self.overflow_list.items[result.index.index] = value;
}
return &self.overflow_list.items[result.index.index];
} else {
backing_buf[result.index.index] = value;
return &backing_buf[result.index.index];
}
}
pub fn remove(self: *Self, key: string) IndexType {
const _key = Wyhash.hash(Seed, key);
const index = self.index.get(_key) orelse return;
switch (index) {
Unassigned.index => {
self.index.remove(_key);
},
NotFound.index => {
self.index.remove(_key);
},
0...max_index => {
if (hasDeinit(ValueType)) {
backing_buf[index].deinit();
}
backing_buf[index] = undefined;
},
else => {
const i = index - count;
if (hasDeinit(ValueType)) {
self.overflow_list.items[i].deinit();
}
self.overflow_list.items[index - count] = undefined;
},
}
return index;
}
};
if (!store_keys) {
return BSSMapType;
}
return struct {
map: *BSSMapType,
const Self = @This();
pub var instance: Self = undefined;
var key_list_buffer: [count * estimated_key_length]u8 = undefined;
var key_list_buffer_used: usize = 0;
var key_list_slices: [count][]u8 = undefined;
var key_list_overflow: std.ArrayListUnmanaged([]u8) = undefined;
pub fn init(allocator: *std.mem.Allocator) *Self {
instance = Self{
.map = BSSMapType.init(allocator),
};
return &instance;
}
pub fn isOverflowing() bool {
return instance.map.backing_buf_used >= count;
}
pub fn getOrPut(self: *Self, key: []const u8) !Result {
return try self.map.getOrPut(key);
}
pub fn get(self: *Self, key: []const u8) ?*ValueType {
return @call(.{ .modifier = .always_inline }, BSSMapType.get, .{ self.map, key });
}
pub fn atIndex(self: *Self, index: IndexType) ?*ValueType {
return @call(.{ .modifier = .always_inline }, BSSMapType.atIndex, .{ self.map, index });
}
pub fn keyAtIndex(self: *Self, index: IndexType) ?[]const u8 {
return switch (index.index) {
Unassigned.index, NotFound.index => null,
else => {
if (!index.is_overflow) {
return key_list_slices[index.index];
} else {
return key_list_overflow.items[index.index];
}
},
};
}
pub fn put(self: *Self, key: anytype, comptime store_key: bool, result: *Result, value: ValueType) !*ValueType {
var ptr = try self.map.put(result, value);
if (store_key) {
try self.putKey(key, result);
}
return ptr;
}
pub fn isKeyStaticallyAllocated(key: anytype) bool {
return isSliceInBuffer(key, &key_list_buffer);
}
// There's two parts to this.
// 1. Storing the underyling string.
// 2. Making the key accessible at the index.
pub fn putKey(self: *Self, key: anytype, result: *Result) !void {
var slice: []u8 = undefined;
// Is this actually a slice into the map? Don't free it.
if (isKeyStaticallyAllocated(key)) {
slice = constStrToU8(key);
} else if (key_list_buffer_used + key.len < key_list_buffer.len) {
const start = key_list_buffer_used;
key_list_buffer_used += key.len;
slice = key_list_buffer[start..key_list_buffer_used];
std.mem.copy(u8, slice, key);
} else {
slice = try self.map.allocator.dupe(u8, key);
}
if (!result.index.is_overflow) {
key_list_slices[result.index.index] = slice;
} else {
if (@intCast(u31, key_list_overflow.items.len) > result.index.index) {
const existing_slice = key_list_overflow.items[result.index.index];
if (!isKeyStaticallyAllocated(existing_slice)) {
self.map.allocator.free(existing_slice);
}
key_list_overflow.items[result.index.index] = slice;
} else {
try key_list_overflow.append(self.map.allocator, slice);
}
}
}
pub fn markNotFound(self: *Self, result: Result) void {
self.map.markNotFound(result);
}
// For now, don't free the keys.
pub fn remove(self: *Self, key: string) IndexType {
return self.map.remove(key);
}
};
}
pub fn constStrToU8(s: []const u8) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}

34
src/api/demo/.gitignore vendored Normal file
View File

@@ -0,0 +1,34 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# local env files
.env.local
.env.development.local
.env.test.local
.env.production.local
# vercel
.vercel

34
src/api/demo/README.md Normal file
View File

@@ -0,0 +1,34 @@
This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
## Getting Started
First, run the development server:
```bash
npm run dev
# or
yarn dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file.
[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`.
The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages.
## Learn More
To learn more about Next.js, take a look at the following resources:
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome!
## Deploy on Vercel
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details.

184
src/api/demo/lib/api.ts Normal file
View File

@@ -0,0 +1,184 @@
import * as Schema from "../../schema";
import { ByteBuffer } from "peechy";
import { transform as sucraseTransform } from "sucrase";
export interface WebAssemblyModule {
init(): number;
transform(a: number): number;
malloc(a: number): number;
calloc(a: number): number;
realloc(a: number): number;
free(a: number): number;
cycle(): void;
}
const wasm_imports_sym: symbol | string =
process.env.NODE_ENV === "development"
? "wasm_imports"
: Symbol("wasm_imports");
const ptr_converter = new ArrayBuffer(8);
const ptr_float = new Float64Array(ptr_converter);
const slice = new Uint32Array(ptr_converter);
var scratch: Uint8Array;
export class ESDev {
static has_initialized = false;
static wasm_source: WebAssembly.WebAssemblyInstantiatedSource = null;
static get wasm_exports(): WebAssemblyModule {
return ESDev.wasm_source.instance.exports as any;
}
static get memory() {
return ESDev[wasm_imports_sym].memory as WebAssembly.Memory;
}
static memory_array: Uint8Array;
static _decoder: TextDecoder;
static _wasmPtrToSlice(offset: number) {
if (ESDev.memory_array.buffer !== ESDev.memory.buffer) {
ESDev.memory_array = new Uint8Array(ESDev.memory.buffer);
}
ptr_float[0] = offset;
return ESDev.memory_array.subarray(slice[0], slice[0] + slice[1]);
}
static _wasmPtrLenToString(slice: number) {
if (!ESDev._decoder) {
ESDev._decoder = new TextDecoder("utf8");
}
const region = this._wasmPtrToSlice(slice);
return ESDev._decoder.decode(region);
}
// We don't want people to be calling these manually
static [wasm_imports_sym] = {
console_log(slice: number) {
console.log(ESDev._wasmPtrLenToString(slice));
},
console_error(slice: number) {
console.error(ESDev._wasmPtrLenToString(slice));
},
console_warn(slice: number) {
console.warn(ESDev._wasmPtrLenToString(slice));
},
console_info(slice: number) {
console.info(ESDev._wasmPtrLenToString(slice));
},
memory: null,
// __indirect_function_table: new WebAssembly.Table({
// initial: 0,
// element: "anyfunc",
// }),
// __stack_pointer: new WebAssembly.Global({
// mutable: true,
// value: "i32",
// }),
// __multi3(one: number, two: number) {
// return Math.imul(one | 0, two | 0);
// },
// fmod(one: number, two: number) {
// return one % two;
// },
// memset(ptr: number, value: number, len: number) {
// ESDev.memory_array.fill(value, ptr, ptr + len);
// },
// memcpy(ptr: number, value: number, len: number) {
// ESDev.memory_array.copyWithin(ptr, value, value + len);
// },
// // These functions convert a to an unsigned long long, rounding toward zero. Negative values all become zero.
// __fixunsdfti(a: number) {
// return Math.floor(a);
// },
// // These functions return the remainder of the unsigned division of a and b.
// __umodti3(a: number, b: number) {
// return (a | 0) % (b | 0);
// },
// // These functions return the quotient of the unsigned division of a and b.
// __udivti3(a: number, b: number) {
// return (a | 0) / (b | 0);
// },
// // These functions return the result of shifting a left by b bits.
// __ashlti3(a: number, b: number) {
// return (a | 0) >> (b | 0);
// },
// /* Returns: convert a to a double, rounding toward even. */
// __floatuntidf(a: number) {
// const mod = a % 2;
// if (mod === 0) {
// return Math.ceil(a);
// } else if (mod === 1) {
// return Math.floor(a);
// }
// },
};
static async init(url) {
globalThis.sucraseTransform = sucraseTransform;
scratch = new Uint8Array(8096);
if (ESDev.has_initialized) {
return;
}
ESDev[wasm_imports_sym].memory = new WebAssembly.Memory({
initial: 20,
// shared: typeof SharedArrayBuffer !== "undefined",
maximum: typeof SharedArrayBuffer !== "undefined" ? 5000 : undefined,
});
ESDev.wasm_source = await globalThis.WebAssembly.instantiateStreaming(
fetch(url),
{ env: ESDev[wasm_imports_sym] }
);
ESDev.memory_array = new Uint8Array(ESDev.memory.buffer);
const res = ESDev.wasm_exports.init();
if (res < 0) {
throw `[ESDev] Failed to initialize WASM module: code ${res}`;
} else {
console.log("WASM loaded.");
}
ESDev.has_initialized = true;
}
static transform(content: Uint8Array, file_name: string) {
if (!ESDev.has_initialized) {
throw "Please run await ESDev.init(wasm_url) before using this.";
}
// if (process.env.NODE_ENV === "development") {
// console.time("[ESDev] Transform " + file_name);
// }
const bb = new ByteBuffer(scratch);
bb.length = 0;
Schema.encodeTransform(
{
contents: content,
path: file_name,
},
bb
);
const data = bb.toUint8Array();
if (bb._data.buffer !== scratch.buffer) {
scratch = bb._data;
}
ESDev.wasm_exports.cycleStart();
const ptr = ESDev.wasm_exports.malloc(data.byteLength);
this._wasmPtrToSlice(ptr).set(data);
const resp_ptr = ESDev.wasm_exports.transform(ptr);
var _bb = new ByteBuffer(this._wasmPtrToSlice(resp_ptr));
const response = Schema.decodeTransformResponse(_bb);
ESDev.wasm_exports.cycleEnd();
return response;
}
}
globalThis.ESDev = ESDev;

17
src/api/demo/package.json Normal file
View File

@@ -0,0 +1,17 @@
{
"name": "demo",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start"
},
"dependencies": {
"next": "10.2.0",
"peechy": "0.4.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"sucrase": "^3.18.1"
}
}

View File

@@ -0,0 +1,7 @@
import '../styles/globals.css'
function MyApp({ Component, pageProps }) {
return <Component {...pageProps} />
}
export default MyApp

View File

@@ -0,0 +1,5 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
export default (req, res) => {
res.status(200).json({ name: 'John Doe' })
}

View File

@@ -0,0 +1,69 @@
import Head from "next/head";
import Image from "next/image";
import styles from "../styles/Home.module.css";
// import "../lib/api.ts";
export default function Home() {
return (
<div className={styles.container}>
<Head>
<title>Create Next App</title>
<meta name="description" content="Generated by create next app" />
<link rel="icon" href="/favicon.ico" />
</Head>
<main className={styles.main}>
<h1 className={styles.title}>
Welcome to <a href="https://nextjs.org">Next.js!</a>
</h1>
<p className={styles.description}>
Get started by editing{" "}
<code className={styles.code}>pages/index.js</code>
</p>
<div className={styles.grid}>
<a href="https://nextjs.org/docs" className={styles.card}>
<h2>Documentation &rarr;</h2>
<p>Find in-depth information about Next.js features and API.</p>
</a>
<a href="https://nextjs.org/learn" className={styles.card}>
<h2>Learn &rarr;</h2>
<p>Learn about Next.js in an interactive course with quizzes!</p>
</a>
<a
href="https://github.com/vercel/next.js/tree/master/examples"
className={styles.card}
>
<h2>Examples &rarr;</h2>
<p>Discover and deploy boilerplate example Next.js projects.</p>
</a>
<a
href="https://vercel.com/new?utm_source=create-next-app&utm_medium=default-template&utm_campaign=create-next-app"
className={styles.card}
>
<h2>Deploy &rarr;</h2>
<p>
Instantly deploy your Next.js site to a public URL with Vercel.
</p>
</a>
</div>
</main>
<footer className={styles.footer}>
<a
href="https://vercel.com?utm_source=create-next-app&utm_medium=default-template&utm_campaign=create-next-app"
target="_blank"
rel="noopener noreferrer"
>
Powered by{" "}
<span className={styles.logo}>
<Image src="/vercel.svg" alt="Vercel Logo" width={72} height={16} />
</span>
</a>
</footer>
</div>
);
}

2038
src/api/demo/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -0,0 +1,4 @@
<svg width="283" height="64" viewBox="0 0 283 64" fill="none"
xmlns="http://www.w3.org/2000/svg">
<path d="M141.04 16c-11.04 0-19 7.2-19 18s8.96 18 20 18c6.67 0 12.55-2.64 16.19-7.09l-7.65-4.42c-2.02 2.21-5.09 3.5-8.54 3.5-4.79 0-8.86-2.5-10.37-6.5h28.02c.22-1.12.35-2.28.35-3.5 0-10.79-7.96-17.99-19-17.99zm-9.46 14.5c1.25-3.99 4.67-6.5 9.45-6.5 4.79 0 8.21 2.51 9.45 6.5h-18.9zM248.72 16c-11.04 0-19 7.2-19 18s8.96 18 20 18c6.67 0 12.55-2.64 16.19-7.09l-7.65-4.42c-2.02 2.21-5.09 3.5-8.54 3.5-4.79 0-8.86-2.5-10.37-6.5h28.02c.22-1.12.35-2.28.35-3.5 0-10.79-7.96-17.99-19-17.99zm-9.45 14.5c1.25-3.99 4.67-6.5 9.45-6.5 4.79 0 8.21 2.51 9.45 6.5h-18.9zM200.24 34c0 6 3.92 10 10 10 4.12 0 7.21-1.87 8.8-4.92l7.68 4.43c-3.18 5.3-9.14 8.49-16.48 8.49-11.05 0-19-7.2-19-18s7.96-18 19-18c7.34 0 13.29 3.19 16.48 8.49l-7.68 4.43c-1.59-3.05-4.68-4.92-8.8-4.92-6.07 0-10 4-10 10zm82.48-29v46h-9V5h9zM36.95 0L73.9 64H0L36.95 0zm92.38 5l-27.71 48L73.91 5H84.3l17.32 30 17.32-30h10.39zm58.91 12v9.69c-1-.29-2.06-.49-3.2-.49-5.81 0-10 4-10 10V51h-9V17h9v9.2c0-5.08 5.91-9.2 13.2-9.2z" fill="#000"/>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -0,0 +1,121 @@
.container {
min-height: 100vh;
padding: 0 0.5rem;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 100vh;
}
.main {
padding: 5rem 0;
flex: 1;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
.footer {
width: 100%;
height: 100px;
border-top: 1px solid #eaeaea;
display: flex;
justify-content: center;
align-items: center;
}
.footer a {
display: flex;
justify-content: center;
align-items: center;
flex-grow: 1;
}
.title a {
color: #0070f3;
text-decoration: none;
}
.title a:hover,
.title a:focus,
.title a:active {
text-decoration: underline;
}
.title {
margin: 0;
line-height: 1.15;
font-size: 4rem;
}
.title,
.description {
text-align: center;
}
.description {
line-height: 1.5;
font-size: 1.5rem;
}
.code {
background: #fafafa;
border-radius: 5px;
padding: 0.75rem;
font-size: 1.1rem;
font-family: Menlo, Monaco, Lucida Console, Liberation Mono, DejaVu Sans Mono,
Bitstream Vera Sans Mono, Courier New, monospace;
}
.grid {
display: flex;
align-items: center;
justify-content: center;
flex-wrap: wrap;
max-width: 800px;
margin-top: 3rem;
}
.card {
margin: 1rem;
padding: 1.5rem;
text-align: left;
color: inherit;
text-decoration: none;
border: 1px solid #eaeaea;
border-radius: 10px;
transition: color 0.15s ease, border-color 0.15s ease;
width: 45%;
}
.card:hover,
.card:focus,
.card:active {
color: #0070f3;
border-color: #0070f3;
}
.card h2 {
margin: 0 0 1rem 0;
font-size: 1.5rem;
}
.card p {
margin: 0;
font-size: 1.25rem;
line-height: 1.5;
}
.logo {
height: 1em;
margin-left: 0.5rem;
}
@media (max-width: 600px) {
.grid {
width: 100%;
flex-direction: column;
}
}

View File

@@ -0,0 +1,16 @@
html,
body {
padding: 0;
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen,
Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif;
}
a {
color: inherit;
text-decoration: none;
}
* {
box-sizing: border-box;
}

205
src/api/schema.d.ts vendored Normal file
View File

@@ -0,0 +1,205 @@
import type {ByteBuffer} from "peechy";
type byte = number;
type float = number;
type int = number;
type alphanumeric = string;
type uint = number;
type int8 = number;
type lowp = number;
type int16 = number;
type int32 = number;
type float32 = number;
type uint16 = number;
type uint32 = number;
export enum Loader {
jsx = 1,
js = 2,
ts = 3,
tsx = 4,
css = 5,
file = 6,
json = 7
}
export const LoaderKeys = {
1: "jsx",
jsx: "jsx",
2: "js",
js: "js",
3: "ts",
ts: "ts",
4: "tsx",
tsx: "tsx",
5: "css",
css: "css",
6: "file",
file: "file",
7: "json",
json: "json"
}
export enum ResolveMode {
disable = 1,
lazy = 2,
dev = 3,
bundle = 4
}
export const ResolveModeKeys = {
1: "disable",
disable: "disable",
2: "lazy",
lazy: "lazy",
3: "dev",
dev: "dev",
4: "bundle",
bundle: "bundle"
}
export enum Platform {
browser = 1,
node = 2
}
export const PlatformKeys = {
1: "browser",
browser: "browser",
2: "node",
node: "node"
}
export enum JSXRuntime {
automatic = 1,
classic = 2
}
export const JSXRuntimeKeys = {
1: "automatic",
automatic: "automatic",
2: "classic",
classic: "classic"
}
export enum TransformResponseStatus {
success = 1,
fail = 2
}
export const TransformResponseStatusKeys = {
1: "success",
success: "success",
2: "fail",
fail: "fail"
}
export enum MessageKind {
err = 1,
warn = 2,
note = 3,
debug = 4
}
export const MessageKindKeys = {
1: "err",
err: "err",
2: "warn",
warn: "warn",
3: "note",
note: "note",
4: "debug",
debug: "debug"
}
export interface JSX {
factory: string;
runtime: JSXRuntime;
fragment: string;
development: boolean;
import_source: string;
react_fast_refresh: boolean;
}
export interface TransformOptions {
jsx?: JSX;
tsconfig_override?: string;
resolve?: ResolveMode;
public_url?: string;
absolute_working_dir?: string;
define_keys?: string[];
define_values?: string[];
preserve_symlinks?: boolean;
entry_points?: string[];
write?: boolean;
inject?: string[];
output_dir?: string;
external?: string[];
loader_keys?: string[];
loader_values?: Loader[];
main_fields?: string[];
platform?: Platform;
serve?: boolean;
extension_order?: string[];
public_dir?: string;
}
export interface FileHandle {
path: string;
size: uint;
fd: uint;
}
export interface Transform {
handle?: FileHandle;
path?: string;
contents?: Uint8Array;
loader?: Loader;
options?: TransformOptions;
}
export interface OutputFile {
data: Uint8Array;
path: string;
}
export interface TransformResponse {
status: TransformResponseStatus;
files: OutputFile[];
errors: Message[];
}
export interface Location {
file: string;
namespace: string;
line: int32;
column: int32;
line_text: string;
suggestion: string;
offset: uint;
}
export interface MessageData {
text?: string;
location?: Location;
}
export interface Message {
kind: MessageKind;
data: MessageData;
notes: MessageData[];
}
export interface Log {
warnings: uint32;
errors: uint32;
msgs: Message[];
}
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(message: Transform, bb: ByteBuffer): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeOutputFile(message: OutputFile, bb: ByteBuffer): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(message: TransformResponse, bb: ByteBuffer): void;
export declare function decodeTransformResponse(buffer: ByteBuffer): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(message: MessageData, bb: ByteBuffer): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;

859
src/api/schema.js Normal file
View File

@@ -0,0 +1,859 @@
const Loader = {
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
"7": 7,
"jsx": 1,
"js": 2,
"ts": 3,
"tsx": 4,
"css": 5,
"file": 6,
"json": 7
};
const LoaderKeys = {
"1": "jsx",
"2": "js",
"3": "ts",
"4": "tsx",
"5": "css",
"6": "file",
"7": "json",
"jsx": "jsx",
"js": "js",
"ts": "ts",
"tsx": "tsx",
"css": "css",
"file": "file",
"json": "json"
};
const ResolveMode = {
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"disable": 1,
"lazy": 2,
"dev": 3,
"bundle": 4
};
const ResolveModeKeys = {
"1": "disable",
"2": "lazy",
"3": "dev",
"4": "bundle",
"disable": "disable",
"lazy": "lazy",
"dev": "dev",
"bundle": "bundle"
};
const Platform = {
"1": 1,
"2": 2,
"browser": 1,
"node": 2
};
const PlatformKeys = {
"1": "browser",
"2": "node",
"browser": "browser",
"node": "node"
};
const JSXRuntime = {
"1": 1,
"2": 2,
"automatic": 1,
"classic": 2
};
const JSXRuntimeKeys = {
"1": "automatic",
"2": "classic",
"automatic": "automatic",
"classic": "classic"
};
function decodeJSX(bb) {
var result = {};
result["factory"] = bb.readString();
result["runtime"] = JSXRuntime[bb.readByte()];
result["fragment"] = bb.readString();
result["development"] = !!bb.readByte();
result["import_source"] = bb.readString();
result["react_fast_refresh"] = !!bb.readByte();
return result;
}
function encodeJSX(message, bb) {
var value = message["factory"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"factory\"");
}
var value = message["runtime"];
if (value != null) {
var encoded = JSXRuntime[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"JSXRuntime\"");
bb.writeByte(encoded);
} else {
throw new Error("Missing required field \"runtime\"");
}
var value = message["fragment"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"fragment\"");
}
var value = message["development"];
if (value != null) {
bb.writeByte(value);
} else {
throw new Error("Missing required field \"development\"");
}
var value = message["import_source"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"import_source\"");
}
var value = message["react_fast_refresh"];
if (value != null) {
bb.writeByte(value);
} else {
throw new Error("Missing required field \"react_fast_refresh\"");
}
}
function decodeTransformOptions(bb) {
var result = {};
while (true) {
switch (bb.readByte()) {
case 0:
return result;
case 1:
result["jsx"] = decodeJSX(bb);
break;
case 2:
result["tsconfig_override"] = bb.readString();
break;
case 3:
result["resolve"] = ResolveMode[bb.readByte()];
break;
case 4:
result["public_url"] = bb.readString();
break;
case 5:
result["absolute_working_dir"] = bb.readString();
break;
case 6:
var length = bb.readVarUint();
var values = result["define_keys"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 7:
var length = bb.readVarUint();
var values = result["define_values"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 8:
result["preserve_symlinks"] = !!bb.readByte();
break;
case 9:
var length = bb.readVarUint();
var values = result["entry_points"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 10:
result["write"] = !!bb.readByte();
break;
case 11:
var length = bb.readVarUint();
var values = result["inject"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 12:
result["output_dir"] = bb.readString();
break;
case 13:
var length = bb.readVarUint();
var values = result["external"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 14:
var length = bb.readVarUint();
var values = result["loader_keys"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 15:
var length = bb.readVarUint();
var values = result["loader_values"] = Array(length);
for (var i = 0; i < length; i++) values[i] = Loader[bb.readByte()];
break;
case 16:
var length = bb.readVarUint();
var values = result["main_fields"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 17:
result["platform"] = Platform[bb.readByte()];
break;
case 18:
result["serve"] = !!bb.readByte();
break;
case 19:
var length = bb.readVarUint();
var values = result["extension_order"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
case 20:
result["public_dir"] = bb.readString();
break;
default:
throw new Error("Attempted to parse invalid message");
}
}
}
function encodeTransformOptions(message, bb) {
var value = message["jsx"];
if (value != null) {
bb.writeByte(1);
encodeJSX(value, bb);
}
var value = message["tsconfig_override"];
if (value != null) {
bb.writeByte(2);
bb.writeString(value);
}
var value = message["resolve"];
if (value != null) {
bb.writeByte(3);
var encoded = ResolveMode[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"ResolveMode\"");
bb.writeByte(encoded);
}
var value = message["public_url"];
if (value != null) {
bb.writeByte(4);
bb.writeString(value);
}
var value = message["absolute_working_dir"];
if (value != null) {
bb.writeByte(5);
bb.writeString(value);
}
var value = message["define_keys"];
if (value != null) {
bb.writeByte(6);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["define_values"];
if (value != null) {
bb.writeByte(7);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["preserve_symlinks"];
if (value != null) {
bb.writeByte(8);
bb.writeByte(value);
}
var value = message["entry_points"];
if (value != null) {
bb.writeByte(9);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["write"];
if (value != null) {
bb.writeByte(10);
bb.writeByte(value);
}
var value = message["inject"];
if (value != null) {
bb.writeByte(11);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["output_dir"];
if (value != null) {
bb.writeByte(12);
bb.writeString(value);
}
var value = message["external"];
if (value != null) {
bb.writeByte(13);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["loader_keys"];
if (value != null) {
bb.writeByte(14);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["loader_values"];
if (value != null) {
bb.writeByte(15);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
bb.writeByte(encoded);
}
}
var value = message["main_fields"];
if (value != null) {
bb.writeByte(16);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["platform"];
if (value != null) {
bb.writeByte(17);
var encoded = Platform[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Platform\"");
bb.writeByte(encoded);
}
var value = message["serve"];
if (value != null) {
bb.writeByte(18);
bb.writeByte(value);
}
var value = message["extension_order"];
if (value != null) {
bb.writeByte(19);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
}
var value = message["public_dir"];
if (value != null) {
bb.writeByte(20);
bb.writeString(value);
}
bb.writeByte(0);
}
function decodeFileHandle(bb) {
var result = {};
result["path"] = bb.readString();
result["size"] = bb.readVarUint();
result["fd"] = bb.readVarUint();
return result;
}
function encodeFileHandle(message, bb) {
var value = message["path"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"path\"");
}
var value = message["size"];
if (value != null) {
bb.writeVarUint(value);
} else {
throw new Error("Missing required field \"size\"");
}
var value = message["fd"];
if (value != null) {
bb.writeVarUint(value);
} else {
throw new Error("Missing required field \"fd\"");
}
}
function decodeTransform(bb) {
var result = {};
while (true) {
switch (bb.readByte()) {
case 0:
return result;
case 1:
result["handle"] = decodeFileHandle(bb);
break;
case 2:
result["path"] = bb.readString();
break;
case 3:
result["contents"] = bb.readByteArray();
break;
case 4:
result["loader"] = Loader[bb.readByte()];
break;
case 5:
result["options"] = decodeTransformOptions(bb);
break;
default:
throw new Error("Attempted to parse invalid message");
}
}
}
function encodeTransform(message, bb) {
var value = message["handle"];
if (value != null) {
bb.writeByte(1);
encodeFileHandle(value, bb);
}
var value = message["path"];
if (value != null) {
bb.writeByte(2);
bb.writeString(value);
}
var value = message["contents"];
if (value != null) {
bb.writeByte(3);
bb.writeByteArray(value);
}
var value = message["loader"];
if (value != null) {
bb.writeByte(4);
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
bb.writeByte(encoded);
}
var value = message["options"];
if (value != null) {
bb.writeByte(5);
encodeTransformOptions(value, bb);
}
bb.writeByte(0);
}
const TransformResponseStatus = {
"1": 1,
"2": 2,
"success": 1,
"fail": 2
};
const TransformResponseStatusKeys = {
"1": "success",
"2": "fail",
"success": "success",
"fail": "fail"
};
function decodeOutputFile(bb) {
var result = {};
result["data"] = bb.readByteArray();
result["path"] = bb.readString();
return result;
}
function encodeOutputFile(message, bb) {
var value = message["data"];
if (value != null) {
bb.writeByteArray(value);
} else {
throw new Error("Missing required field \"data\"");
}
var value = message["path"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"path\"");
}
}
function decodeTransformResponse(bb) {
var result = {};
result["status"] = TransformResponseStatus[bb.readVarUint()];
var length = bb.readVarUint();
var values = result["files"] = Array(length);
for (var i = 0; i < length; i++) values[i] = decodeOutputFile(bb);
var length = bb.readVarUint();
var values = result["errors"] = Array(length);
for (var i = 0; i < length; i++) values[i] = decodeMessage(bb);
return result;
}
function encodeTransformResponse(message, bb) {
var value = message["status"];
if (value != null) {
var encoded = TransformResponseStatus[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"TransformResponseStatus\"");
bb.writeVarUint(encoded);
} else {
throw new Error("Missing required field \"status\"");
}
var value = message["files"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
encodeOutputFile(value, bb);
}
} else {
throw new Error("Missing required field \"files\"");
}
var value = message["errors"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
encodeMessage(value, bb);
}
} else {
throw new Error("Missing required field \"errors\"");
}
}
const MessageKind = {
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"err": 1,
"warn": 2,
"note": 3,
"debug": 4
};
const MessageKindKeys = {
"1": "err",
"2": "warn",
"3": "note",
"4": "debug",
"err": "err",
"warn": "warn",
"note": "note",
"debug": "debug"
};
function decodeLocation(bb) {
var result = {};
result["file"] = bb.readString();
result["namespace"] = bb.readString();
result["line"] = bb.readInt32();
result["column"] = bb.readInt32();
result["line_text"] = bb.readString();
result["suggestion"] = bb.readString();
result["offset"] = bb.readVarUint();
return result;
}
function encodeLocation(message, bb) {
var value = message["file"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"file\"");
}
var value = message["namespace"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"namespace\"");
}
var value = message["line"];
if (value != null) {
bb.writeInt32(value);
} else {
throw new Error("Missing required field \"line\"");
}
var value = message["column"];
if (value != null) {
bb.writeInt32(value);
} else {
throw new Error("Missing required field \"column\"");
}
var value = message["line_text"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"line_text\"");
}
var value = message["suggestion"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"suggestion\"");
}
var value = message["offset"];
if (value != null) {
bb.writeVarUint(value);
} else {
throw new Error("Missing required field \"offset\"");
}
}
function decodeMessageData(bb) {
var result = {};
while (true) {
switch (bb.readByte()) {
case 0:
return result;
case 1:
result["text"] = bb.readString();
break;
case 2:
result["location"] = decodeLocation(bb);
break;
default:
throw new Error("Attempted to parse invalid message");
}
}
}
function encodeMessageData(message, bb) {
var value = message["text"];
if (value != null) {
bb.writeByte(1);
bb.writeString(value);
}
var value = message["location"];
if (value != null) {
bb.writeByte(2);
encodeLocation(value, bb);
}
bb.writeByte(0);
}
function decodeMessage(bb) {
var result = {};
result["kind"] = MessageKind[bb.readVarUint()];
result["data"] = decodeMessageData(bb);
var length = bb.readVarUint();
var values = result["notes"] = Array(length);
for (var i = 0; i < length; i++) values[i] = decodeMessageData(bb);
return result;
}
function encodeMessage(message, bb) {
var value = message["kind"];
if (value != null) {
var encoded = MessageKind[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"MessageKind\"");
bb.writeVarUint(encoded);
} else {
throw new Error("Missing required field \"kind\"");
}
var value = message["data"];
if (value != null) {
encodeMessageData(value, bb);
} else {
throw new Error("Missing required field \"data\"");
}
var value = message["notes"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
encodeMessageData(value, bb);
}
} else {
throw new Error("Missing required field \"notes\"");
}
}
function decodeLog(bb) {
var result = {};
result["warnings"] = bb.readUint32();
result["errors"] = bb.readUint32();
var length = bb.readVarUint();
var values = result["msgs"] = Array(length);
for (var i = 0; i < length; i++) values[i] = decodeMessage(bb);
return result;
}
function encodeLog(message, bb) {
var value = message["warnings"];
if (value != null) {
bb.writeUint32(value);
} else {
throw new Error("Missing required field \"warnings\"");
}
var value = message["errors"];
if (value != null) {
bb.writeUint32(value);
} else {
throw new Error("Missing required field \"errors\"");
}
var value = message["msgs"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
encodeMessage(value, bb);
}
} else {
throw new Error("Missing required field \"msgs\"");
}
}
export { Loader }
export { LoaderKeys }
export { ResolveMode }
export { ResolveModeKeys }
export { Platform }
export { PlatformKeys }
export { JSXRuntime }
export { JSXRuntimeKeys }
export { decodeJSX }
export { encodeJSX }
export { decodeTransformOptions }
export { encodeTransformOptions }
export { decodeFileHandle }
export { encodeFileHandle }
export { decodeTransform }
export { encodeTransform }
export { TransformResponseStatus }
export { TransformResponseStatusKeys }
export { decodeOutputFile }
export { encodeOutputFile }
export { decodeTransformResponse }
export { encodeTransformResponse }
export { MessageKind }
export { MessageKindKeys }
export { decodeLocation }
export { encodeLocation }
export { decodeMessageData }
export { encodeMessageData }
export { decodeMessage }
export { encodeMessage }
export { decodeLog }
export { encodeLog }

142
src/api/schema.peechy Normal file
View File

@@ -0,0 +1,142 @@
package Api;
smol Loader {
jsx = 1;
js = 2;
ts = 3;
tsx = 4;
css = 5;
file = 6;
json = 7;
}
smol ResolveMode {
disable = 1;
lazy = 2;
dev = 3;
bundle = 4;
}
smol Platform {
browser = 1;
node = 2;
}
smol JSXRuntime {
automatic = 1;
classic = 2;
}
struct JSX {
string factory;
JSXRuntime runtime;
string fragment;
bool development;
// Probably react
string import_source;
bool react_fast_refresh;
}
message TransformOptions {
JSX jsx = 1;
string tsconfig_override = 2;
ResolveMode resolve = 3;
string public_url = 4;
string absolute_working_dir = 5;
string[] define_keys = 6;
string[] define_values = 7;
bool preserve_symlinks = 8;
string[] entry_points = 9;
bool write = 10;
string[] inject = 11;
string output_dir = 12;
string[] external = 13;
string[] loader_keys = 14;
Loader[] loader_values = 15;
string[] main_fields = 16;
Platform platform = 17;
bool serve = 18;
string[] extension_order = 19;
string public_dir = 20;
}
struct FileHandle {
string path;
uint size;
uint fd;
}
message Transform {
FileHandle handle = 1;
string path = 2;
byte[] contents = 3;
Loader loader = 4;
TransformOptions options = 5;
}
enum TransformResponseStatus {
success = 1;
fail = 2;
}
struct OutputFile {
byte[] data;
string path;
}
struct TransformResponse {
TransformResponseStatus status;
OutputFile[] files;
Message[] errors;
}
enum MessageKind {
err = 1;
warn =2;
note = 3;
debug = 4;
}
struct Location {
string file;
string namespace;
int32 line;
int32 column;
string line_text;
string suggestion;
uint offset;
}
message MessageData {
string text = 1;
Location location = 2;
}
struct Message {
MessageKind kind;
MessageData data;
MessageData[] notes;
}
struct Log {
uint32 warnings;
uint32 errors;
Message[] msgs;
}

184
src/api/schema.ts Normal file
View File

@@ -0,0 +1,184 @@
import type {ByteBuffer} from "peechy";
type byte = number;
type float = number;
type int = number;
type alphanumeric = string;
type uint = number;
type int8 = number;
type lowp = number;
type int16 = number;
type int32 = number;
type float32 = number;
type uint16 = number;
type uint32 = number;
export enum Loader {
jsx = 1,
js = 2,
ts = 3,
tsx = 4,
css = 5,
file = 6,
json = 7
}
export const LoaderKeys = {
1: "jsx",
jsx: "jsx",
2: "js",
js: "js",
3: "ts",
ts: "ts",
4: "tsx",
tsx: "tsx",
5: "css",
css: "css",
6: "file",
file: "file",
7: "json",
json: "json"
}
export enum ResolveMode {
disable = 1,
lazy = 2
}
export const ResolveModeKeys = {
1: "disable",
disable: "disable",
2: "lazy",
lazy: "lazy"
}
export enum JSXRuntime {
automatic = 1,
classic = 2
}
export const JSXRuntimeKeys = {
1: "automatic",
automatic: "automatic",
2: "classic",
classic: "classic"
}
export enum TransformResponseStatus {
success = 1,
fail = 2
}
export const TransformResponseStatusKeys = {
1: "success",
success: "success",
2: "fail",
fail: "fail"
}
export enum MessageKind {
err = 1,
warn = 2,
note = 3,
debug = 4
}
export const MessageKindKeys = {
1: "err",
err: "err",
2: "warn",
warn: "warn",
3: "note",
note: "note",
4: "debug",
debug: "debug"
}
export interface JSX {
factory: string;
runtime: JSXRuntime;
fragment: string;
development: boolean;
import_source: string;
react_fast_refresh: boolean;
loader_keys: string[];
loader_values: Loader[];
}
export interface TransformOptions {
jsx?: JSX;
tsconfig_override?: string;
resolve?: ResolveMode;
public_url?: string;
absolute_working_dir?: string;
define_keys?: string[];
define_values?: string[];
preserve_symlinks?: boolean;
entry_points?: string[];
write?: boolean;
inject?: string[];
output_dir?: string;
externals?: string[];
}
export interface FileHandle {
path: string;
size: uint;
fd: uint;
}
export interface Transform {
handle?: FileHandle;
path?: string;
contents?: Uint8Array;
loader?: Loader;
options?: TransformOptions;
}
export interface OutputFile {
data: Uint8Array;
path: string;
}
export interface TransformResponse {
status: TransformResponseStatus;
files: OutputFile[];
errors: Message[];
}
export interface Location {
file: string;
namespace: string;
line: int32;
column: int32;
line_text: string;
suggestion: string;
offset: uint;
}
export interface MessageData {
text?: string;
location?: Location;
}
export interface Message {
kind: MessageKind;
data: MessageData;
notes: MessageData[];
}
export interface Log {
warnings: uint32;
errors: uint32;
msgs: Message[];
}
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(message: Transform, bb: ByteBuffer): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeOutputFile(message: OutputFile, bb: ByteBuffer): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(message: TransformResponse, bb: ByteBuffer): void;
export declare function decodeTransformResponse(buffer: ByteBuffer): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(message: MessageData, bb: ByteBuffer): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;

1091
src/api/schema.zig Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -25,10 +25,21 @@ pub const NodeIndexNone = 4294967293;
pub const Ref = packed struct {
source_index: Int = std.math.maxInt(Ref.Int),
inner_index: Int = 0,
is_source_contents_slice: bool = false,
// 2 bits of padding for whatever is the parent
pub const Int = u31;
pub const None = Ref{ .inner_index = std.math.maxInt(Ref.Int) };
pub const Int = u30;
pub const None = Ref{
.inner_index = std.math.maxInt(Ref.Int),
.source_index = std.math.maxInt(Ref.Int),
};
pub const RuntimeRef = Ref{
.inner_index = std.math.maxInt(Ref.Int),
.source_index = std.math.maxInt(Ref.Int) - 1,
};
pub fn toInt(int: anytype) Int {
return @intCast(Int, int);
}
pub fn isNull(self: *const Ref) bool {
return self.source_index == std.math.maxInt(Ref.Int) and self.inner_index == std.math.maxInt(Ref.Int);
}
@@ -37,13 +48,17 @@ pub const Ref = packed struct {
return self.source_index == std.math.maxInt(Ref.Int);
}
pub fn isSourceIndexNull(int: Ref.Int) bool {
pub fn isSourceIndexNull(int: anytype) bool {
return int == std.math.maxInt(Ref.Int);
}
pub fn eql(ref: Ref, b: Ref) bool {
return ref.inner_index == b.inner_index and ref.source_index == b.source_index;
}
pub fn jsonStringify(self: *const Ref, options: anytype, writer: anytype) !void {
return try std.json.stringify([2]u32{ self.source_index, self.inner_index }, options, writer);
}
};
// This is kind of the wrong place, but it's shared between files
@@ -55,3 +70,11 @@ pub const RequireOrImportMeta = struct {
exports_ref: Ref = Ref.None,
is_wrapper_async: bool = false,
};
pub fn debug(comptime fmt: []const u8, args: anytype) callconv(.Inline) void {
// Output.print(fmt, args);
}
pub fn debugl(
comptime fmt: []const u8,
) callconv(.Inline) void {
// Output.print("{s}\n", .{fmt});
}

View File

@@ -1,29 +1,770 @@
usingnamespace @import("global.zig");
const std = @import("std");
const options = @import("options.zig");
const lex = @import("js_lexer.zig");
const logger = @import("logger.zig");
const alloc = @import("alloc.zig");
const options = @import("options.zig");
const js_parser = @import("js_parser.zig");
const json_parser = @import("json_parser.zig");
const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
const Fs = @import("fs.zig");
const Api = @import("api/schema.zig").Api;
const Resolver = @import("./resolver/resolver.zig");
const sync = @import("sync.zig");
const ThreadPool = sync.ThreadPool;
const ThreadSafeHashMap = @import("./thread_safe_hash_map.zig");
const ImportRecord = @import("./import_record.zig").ImportRecord;
const allocators = @import("./allocators.zig");
const MimeType = @import("./http/mime_type.zig");
const resolve_path = @import("./resolver/resolve_path.zig");
pub const ServeResult = struct {
value: Value,
mime_type: MimeType,
// Either we:
// - send pre-buffered asset body
// - stream a file from the file system
pub const Value = union(Tag) {
file: File,
build: options.OutputFile,
none: u0,
pub const Tag = enum {
file,
build,
none,
};
pub const File = struct {
absolute_path: string,
handle: std.fs.File,
};
};
};
// const BundleMap =
const ResolveResults = ThreadSafeHashMap.ThreadSafeStringHashMap(Resolver.Resolver.Result);
pub const Bundler = struct {
options: options.TransformOptions,
log: logger.Log,
options: options.BundleOptions,
log: *logger.Log,
allocator: *std.mem.Allocator,
result: ?options.TransformResult = null,
result: options.TransformResult = undefined,
resolver: Resolver.Resolver,
fs: *Fs.FileSystem,
// thread_pool: *ThreadPool,
output_files: std.ArrayList(options.OutputFile),
resolve_results: *ResolveResults,
resolve_queue: std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic),
elapsed: i128 = 0,
needs_runtime: bool = false,
pub fn init(options: options.TransformOptions, allocator: *std.mem.Allocator) Bundler {
var log = logger.Log.init(allocator);
runtime_output_path: Fs.Path = undefined,
pub const RuntimeCode = @embedFile("./runtime.js");
// to_bundle:
// thread_pool: *ThreadPool,
pub fn init(
allocator: *std.mem.Allocator,
log: *logger.Log,
opts: Api.TransformOptions,
) !Bundler {
var fs = try Fs.FileSystem.init1(allocator, opts.absolute_working_dir, opts.serve orelse false);
const bundle_options = try options.BundleOptions.fromApi(allocator, fs, log, opts);
relative_paths_list = ImportPathsList.init(allocator);
// var pool = try allocator.create(ThreadPool);
// try pool.init(ThreadPool.InitConfig{
// .allocator = allocator,
// });
return Bundler{
.options = options,
.options = bundle_options,
.fs = fs,
.allocator = allocator,
.resolver = Resolver.Resolver.init1(allocator, log, fs, bundle_options),
.log = log,
// .thread_pool = pool,
.result = options.TransformResult{ .outbase = bundle_options.output_dir },
.resolve_results = try ResolveResults.init(allocator),
.resolve_queue = std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic).init(allocator),
.output_files = std.ArrayList(options.OutputFile).init(allocator),
};
}
pub fn scan(self: *Bundler) void {}
const ImportPathsList = allocators.BSSStringList(2048, 256);
var relative_paths_list: *ImportPathsList = undefined;
threadlocal var relative_path_allocator: std.heap.FixedBufferAllocator = undefined;
threadlocal var relative_path_allocator_buf: [4096]u8 = undefined;
threadlocal var relative_path_allocator_buf_loaded: bool = false;
pub fn bundle(self: *Bundler) options.TransformResult {
var result = self.result;
pub fn generateImportPath(bundler: *Bundler, source_dir: string, source_path: string) !Fs.Path {
if (!relative_path_allocator_buf_loaded) {
relative_path_allocator_buf_loaded = true;
relative_path_allocator = std.heap.FixedBufferAllocator.init(&relative_path_allocator_buf);
}
defer relative_path_allocator.reset();
var source = logger.Source.initFile(self.options.entry_point, self.allocator);
var pretty = try relative_paths_list.append(bundler.fs.relativeTo(source_path));
var pathname = Fs.PathName.init(pretty);
var absolute_pathname = Fs.PathName.init(source_path);
if (bundler.options.out_extensions.get(absolute_pathname.ext)) |ext| {
absolute_pathname.ext = ext;
}
switch (bundler.options.import_path_format) {
.relative => {
return Fs.Path.initWithPretty(pretty, pretty);
},
.relative_nodejs => {
var path = Fs.Path.initWithPretty(pretty, pretty);
path.text = path.text[0 .. path.text.len - path.name.ext.len];
return path;
},
.absolute_url => {
const absolute_url = try relative_paths_list.append(
try std.fmt.allocPrint(
&relative_path_allocator.allocator,
"{s}{s}{s}{s}",
.{
bundler.options.public_url,
pathname.dir,
pathname.base,
absolute_pathname.ext,
},
),
);
return Fs.Path.initWithPretty(absolute_url, pretty);
},
else => unreachable,
}
}
pub fn processImportRecord(bundler: *Bundler, source_dir: string, import_record: *ImportRecord) !void {
var resolve_result = try bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind);
// extremely naive.
resolve_result.is_from_node_modules = strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
if (resolve_result.shouldAssumeCommonJS()) {
import_record.wrap_with_to_module = true;
if (!bundler.needs_runtime) {
bundler.runtime_output_path = Fs.Path.init(try std.fmt.allocPrint(bundler.allocator, "{s}/__runtime.js", .{bundler.fs.top_level_dir}));
}
bundler.needs_runtime = true;
}
// lazy means:
// Run the resolver
// Don't parse/print automatically.
if (bundler.options.resolve_mode != .lazy) {
if (!bundler.resolve_results.contains(resolve_result.path_pair.primary.text)) {
try bundler.resolve_results.put(resolve_result.path_pair.primary.text, resolve_result);
try bundler.resolve_queue.writeItem(resolve_result);
}
}
if (!strings.eql(import_record.path.text, resolve_result.path_pair.primary.text)) {
import_record.path = try bundler.generateImportPath(source_dir, resolve_result.path_pair.primary.text);
}
}
pub fn buildWithResolveResult(bundler: *Bundler, resolve_result: Resolver.Resolver.Result) !?options.OutputFile {
if (resolve_result.is_external) {
return null;
}
// Step 1. Parse & scan
const loader = bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
var file_path = resolve_result.path_pair.primary;
file_path.pretty = relative_paths_list.append(bundler.fs.relativeTo(file_path.text)) catch unreachable;
var result = bundler.parse(file_path, loader) orelse return null;
switch (result.loader) {
.jsx, .js, .ts, .tsx => {
const ast = result.ast;
for (ast.import_records) |*import_record| {
bundler.processImportRecord(
std.fs.path.dirname(file_path.text) orelse file_path.text,
import_record,
) catch |err| {
switch (err) {
error.ModuleNotFound => {
if (Resolver.Resolver.isPackagePath(import_record.path.text)) {
if (bundler.options.platform != .node and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
try bundler.log.addRangeErrorFmt(
&result.source,
import_record.range,
bundler.allocator,
"Could not resolve: \"{s}\". Try setting --platform=\"node\"",
.{import_record.path.text},
);
} else {
try bundler.log.addRangeErrorFmt(
&result.source,
import_record.range,
bundler.allocator,
"Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?",
.{import_record.path.text},
);
}
} else {
try bundler.log.addRangeErrorFmt(
&result.source,
import_record.range,
bundler.allocator,
"Could not resolve: \"{s}\"",
.{
import_record.path.text,
},
);
}
},
else => {
continue;
},
}
};
}
},
else => {},
}
const output_file = try bundler.print(
result,
);
js_ast.Stmt.Data.Store.reset();
js_ast.Expr.Data.Store.reset();
return output_file;
}
pub fn print(
bundler: *Bundler,
result: ParseResult,
) !options.OutputFile {
var allocator = bundler.allocator;
var parts = &([_]string{result.source.path.text});
var abs_path = bundler.fs.abs(parts);
var rel_path = bundler.fs.relativeTo(abs_path);
var pathname = Fs.PathName.init(rel_path);
if (bundler.options.out_extensions.get(pathname.ext)) |ext| {
pathname.ext = ext;
}
var stack_fallback = std.heap.stackFallback(1024, bundler.allocator);
var stack = stack_fallback.get();
var _out_path = std.fmt.allocPrint(stack, "{s}{s}{s}{s}", .{ pathname.dir, std.fs.path.sep_str, pathname.base, pathname.ext }) catch unreachable;
defer stack.free(_out_path);
var out_path = bundler.fs.filename_store.append(_out_path) catch unreachable;
const ast = result.ast;
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
const print_result = try js_printer.printAst(
allocator,
ast,
js_ast.Symbol.Map.initList(symbols),
&result.source,
false,
js_printer.Options{ .to_module_ref = Ref.RuntimeRef },
&_linker,
);
// allocator.free(result.source.contents);
return options.OutputFile{
.path = out_path,
.contents = print_result.js,
};
}
pub const ParseResult = struct {
source: logger.Source,
loader: options.Loader,
ast: js_ast.Ast,
};
pub var tracing_start: i128 = if (enableTracing) 0 else undefined;
pub fn parse(bundler: *Bundler, path: Fs.Path, loader: options.Loader) ?ParseResult {
if (enableTracing) {
tracing_start = std.time.nanoTimestamp();
}
defer {
if (enableTracing) {
bundler.elapsed += std.time.nanoTimestamp() - tracing_start;
}
}
var result: ParseResult = undefined;
const entry = bundler.resolver.caches.fs.readFile(bundler.fs, path.text) catch return null;
const source = logger.Source.initFile(Fs.File{ .path = path, .contents = entry.contents }, bundler.allocator) catch return null;
switch (loader) {
.js, .jsx, .ts, .tsx => {
var jsx = bundler.options.jsx;
jsx.parse = loader.isJSX();
var opts = js_parser.Parser.Options.init(jsx, loader);
const value = (bundler.resolver.caches.js.parse(bundler.allocator, opts, bundler.options.define, bundler.log, &source) catch null) orelse return null;
return ParseResult{
.ast = value,
.source = source,
.loader = loader,
};
},
.json => {
var expr = json_parser.ParseJSON(&source, bundler.log, bundler.allocator) catch return null;
var stmt = js_ast.Stmt.alloc(bundler.allocator, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var stmts = bundler.allocator.alloc(js_ast.Stmt, 1) catch unreachable;
stmts[0] = stmt;
var parts = bundler.allocator.alloc(js_ast.Part, 1) catch unreachable;
parts[0] = js_ast.Part{ .stmts = stmts };
return ParseResult{
.ast = js_ast.Ast.initTest(parts),
.source = source,
.loader = loader,
};
},
.css => {
return null;
},
else => Global.panic("Unsupported loader {s} for path: {s}", .{ loader, source.path.text }),
}
return null;
}
pub fn buildServeResultOutput(bundler: *Bundler, resolve: Resolver.Resolver.Result, loader: options.Loader) !ServeResult.Output {
switch (loader) {
.js, .jsx, .ts, .tsx, .json => {
return ServeResult.Output{ .built = bundler.buildWithResolveResult(resolve) orelse error.BuildFailed };
},
else => {
return ServeResult.Output{ .file = ServeResult.Output.File{ .absolute_path = resolve.path_pair.primary.text } };
},
}
}
threadlocal var tmp_buildfile_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
// We try to be mostly stateless when serving
// This means we need a slightly different resolver setup
// Essentially:
pub fn buildFile(
bundler: *Bundler,
log: *logger.Log,
allocator: *std.mem.Allocator,
relative_path: string,
extension: string,
) !ServeResult {
var original_resolver_logger = bundler.resolver.log;
var original_bundler_logger = bundler.log;
defer bundler.log = original_bundler_logger;
defer bundler.resolver.log = original_resolver_logger;
bundler.log = log;
bundler.resolver.log = log;
// Resolving a public file has special behavior
if (bundler.options.public_dir_enabled) {
// On Windows, we don't keep the directory handle open forever because Windows doesn't like that.
const public_dir: std.fs.Dir = bundler.options.public_dir_handle orelse std.fs.openDirAbsolute(bundler.options.public_dir, .{}) catch |err| {
log.addErrorFmt(null, logger.Loc.Empty, allocator, "Opening public directory failed: {s}", .{@errorName(err)}) catch unreachable;
Output.printErrorln("Opening public directory failed: {s}", .{@errorName(err)});
bundler.options.public_dir_enabled = false;
return error.PublicDirError;
};
var relative_unrooted_path: []u8 = resolve_path.normalizeString(relative_path, false, .auto);
var _file: ?std.fs.File = null;
// Is it the index file?
if (relative_unrooted_path.len == 1 and relative_unrooted_path[0] == '.') {
// std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path);
// std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/"
// Search for /index.html
if (public_dir.openFile("index.html", .{})) |file| {
std.mem.copy(u8, relative_unrooted_path, "index.html");
relative_unrooted_path = relative_unrooted_path[0.."index.html".len];
_file = file;
} else |err| {}
// Okay is it actually a full path?
} else {
if (public_dir.openFile(relative_unrooted_path, .{})) |file| {
_file = file;
} else |err| {}
}
// Try some weird stuff.
while (_file == null and relative_unrooted_path.len > 1) {
// When no extension is provided, it might be html
if (extension.len == 0) {
std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]);
std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], ".html");
if (public_dir.openFile(tmp_buildfile_buf[0 .. relative_unrooted_path.len + ".html".len], .{})) |file| {
_file = file;
break;
} else |err| {}
var _path: []u8 = undefined;
if (relative_unrooted_path[relative_unrooted_path.len - 1] == '/') {
std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path[0 .. relative_unrooted_path.len - 1]);
std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len - 1 ..], "/index.html");
_path = tmp_buildfile_buf[0 .. relative_unrooted_path.len - 1 + "/index.html".len];
} else {
std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]);
std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/index.html");
_path = tmp_buildfile_buf[0 .. relative_unrooted_path.len + "/index.html".len];
}
if (public_dir.openFile(_path, .{})) |file| {
const __path = _path;
relative_unrooted_path = __path;
_file = file;
break;
} else |err| {}
}
break;
}
if (_file) |file| {
const _parts = [_]string{ bundler.options.public_dir, relative_unrooted_path };
return ServeResult{
.value = ServeResult.Value{ .file = .{
.absolute_path = try bundler.fs.joinAlloc(allocator, &_parts),
.handle = file,
} },
.mime_type = MimeType.byExtension(extension),
};
}
}
// We make some things faster in theory by using absolute paths instead of relative paths
const absolute_path = resolve_path.joinAbsStringBuf(
bundler.fs.top_level_dir,
&tmp_buildfile_buf,
&([_][]const u8{relative_path}),
.auto,
);
const resolved = (try bundler.resolver.resolve(bundler.fs.top_level_dir, absolute_path, .entry_point));
const loader = bundler.options.loaders.get(resolved.path_pair.primary.name.ext) orelse .file;
const output = switch (loader) {
.js, .jsx, .ts, .tsx, .json => ServeResult.Value{
.build = (try bundler.buildWithResolveResult(resolved)) orelse return error.BuildFailed,
},
else => ServeResult.Value{ .file = ServeResult.Value.File{
.absolute_path = resolved.path_pair.primary.text,
.handle = try std.fs.openFileAbsolute(resolved.path_pair.primary.text, .{ .read = true, .write = false }),
} },
};
return ServeResult{
.value = output,
.mime_type = MimeType.byLoader(loader, resolved.path_pair.primary.name.ext),
};
}
pub fn bundle(
allocator: *std.mem.Allocator,
log: *logger.Log,
opts: Api.TransformOptions,
) !options.TransformResult {
var bundler = try Bundler.init(allocator, log, opts);
var entry_points = try allocator.alloc(Resolver.Resolver.Result, bundler.options.entry_points.len);
if (isDebug) {
log.level = .verbose;
bundler.resolver.debug_logs = try Resolver.Resolver.DebugLogs.init(allocator);
}
var rfs: *Fs.FileSystem.RealFS = &bundler.fs.fs;
var entry_point_i: usize = 0;
for (bundler.options.entry_points) |_entry| {
var entry: string = _entry;
// if (!std.fs.path.isAbsolute(_entry)) {
// const _paths = [_]string{ bundler.fs.top_level_dir, _entry };
// entry = std.fs.path.join(allocator, &_paths) catch unreachable;
// } else {
// entry = allocator.dupe(u8, _entry) catch unreachable;
// }
// const dir = std.fs.path.dirname(entry) orelse continue;
// const base = std.fs.path.basename(entry);
// var dir_entry = try rfs.readDirectory(dir);
// if (std.meta.activeTag(dir_entry) == .err) {
// log.addErrorFmt(null, logger.Loc.Empty, allocator, "Failed to read directory: {s} - {s}", .{ dir, @errorName(dir_entry.err.original_err) }) catch unreachable;
// continue;
// }
// const file_entry = dir_entry.entries.get(base) orelse continue;
// if (file_entry.entry.kind(rfs) != .file) {
// continue;
// }
if (!strings.startsWith(entry, "./")) {
// allocator.free(entry);
// Entry point paths without a leading "./" are interpreted as package
// paths. This happens because they go through general path resolution
// like all other import paths so that plugins can run on them. Requiring
// a leading "./" for a relative path simplifies writing plugins because
// entry points aren't a special case.
//
// However, requiring a leading "./" also breaks backward compatibility
// and makes working with the CLI more difficult. So attempt to insert
// "./" automatically when needed. We don't want to unconditionally insert
// a leading "./" because the path may not be a file system path. For
// example, it may be a URL. So only insert a leading "./" when the path
// is an exact match for an existing file.
var __entry = allocator.alloc(u8, "./".len + entry.len) catch unreachable;
__entry[0] = '.';
__entry[1] = '/';
std.mem.copy(u8, __entry[2..__entry.len], entry);
entry = __entry;
}
const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch |err| {
Output.printError("Error resolving \"{s}\": {s}\n", .{ entry, @errorName(err) });
continue;
};
const key = result.path_pair.primary.text;
if (bundler.resolve_results.contains(key)) {
continue;
}
try bundler.resolve_results.put(key, result);
entry_points[entry_point_i] = result;
if (isDebug) {
Output.print("Resolved {s} => {s}", .{ entry, result.path_pair.primary.text });
}
entry_point_i += 1;
bundler.resolve_queue.writeItem(result) catch unreachable;
}
switch (bundler.options.resolve_mode) {
.lazy, .dev, .bundle => {
while (bundler.resolve_queue.readItem()) |item| {
const output_file = bundler.buildWithResolveResult(item) catch continue orelse continue;
bundler.output_files.append(output_file) catch unreachable;
}
},
else => Global.panic("Unsupported resolve mode: {s}", .{@tagName(bundler.options.resolve_mode)}),
}
// if (log.level == .verbose) {
// for (log.msgs.items) |msg| {
// try msg.writeFormat(std.io.getStdOut().writer());
// }
// }
// if (bundler.needs_runtime) {
// try bundler.output_files.append(options.OutputFile{
// });
// }
if (enableTracing) {
Output.print(
"\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n",
.{ bundler.resolver.elapsed, bundler.elapsed },
);
}
return try options.TransformResult.init(try allocator.dupe(u8, bundler.result.outbase), bundler.output_files.toOwnedSlice(), log, allocator);
}
};
pub const Transformer = struct {
options: options.TransformOptions,
log: *logger.Log,
allocator: *std.mem.Allocator,
result: ?options.TransformResult = null,
pub fn transform(
allocator: *std.mem.Allocator,
log: *logger.Log,
opts: Api.TransformOptions,
) !options.TransformResult {
var raw_defines = try options.stringHashMapFromArrays(RawDefines, allocator, opts.define_keys, opts.define_values);
if (opts.define_keys.len == 0) {
try raw_defines.put("process.env.NODE_ENV", "\"development\"");
}
var user_defines = try DefineData.from_input(raw_defines, log, alloc.static);
var define = try Define.init(
alloc.static,
user_defines,
);
const cwd = opts.absolute_working_dir orelse try std.process.getCwdAlloc(allocator);
const output_dir_parts = [_]string{ try std.process.getCwdAlloc(allocator), opts.output_dir orelse "out" };
const output_dir = try std.fs.path.join(allocator, &output_dir_parts);
var output_files = try std.ArrayList(options.OutputFile).initCapacity(allocator, opts.entry_points.len);
var loader_values = try allocator.alloc(options.Loader, opts.loader_values.len);
for (loader_values) |_, i| {
const loader = switch (opts.loader_values[i]) {
.jsx => options.Loader.jsx,
.js => options.Loader.js,
.ts => options.Loader.ts,
.css => options.Loader.css,
.tsx => options.Loader.tsx,
.json => options.Loader.json,
else => unreachable,
};
loader_values[i] = loader;
}
var loader_map = try options.stringHashMapFromArrays(
std.StringHashMap(options.Loader),
allocator,
opts.loader_keys,
loader_values,
);
var use_default_loaders = loader_map.count() == 0;
var jsx = if (opts.jsx) |_jsx| try options.JSX.Pragma.fromApi(_jsx, allocator) else options.JSX.Pragma{};
var output_i: usize = 0;
var chosen_alloc: *std.mem.Allocator = allocator;
var arena: std.heap.ArenaAllocator = undefined;
const use_arenas = opts.entry_points.len > 8;
for (opts.entry_points) |entry_point, i| {
if (use_arenas) {
arena = std.heap.ArenaAllocator.init(allocator);
chosen_alloc = &arena.allocator;
}
defer {
if (use_arenas) {
arena.deinit();
}
}
var _log = logger.Log.init(allocator);
var __log = &_log;
var paths = [_]string{ cwd, entry_point };
const absolutePath = try std.fs.path.resolve(chosen_alloc, &paths);
const file = try std.fs.openFileAbsolute(absolutePath, std.fs.File.OpenFlags{ .read = true });
defer file.close();
const stat = try file.stat();
const code = try file.readToEndAlloc(allocator, stat.size);
defer {
if (_log.msgs.items.len == 0) {
allocator.free(code);
}
chosen_alloc.free(absolutePath);
_log.appendTo(log) catch {};
}
const _file = Fs.File{ .path = Fs.Path.init(entry_point), .contents = code };
var source = try logger.Source.initFile(_file, chosen_alloc);
var loader: options.Loader = undefined;
if (use_default_loaders) {
loader = options.defaultLoaders.get(std.fs.path.extension(absolutePath)) orelse continue;
} else {
loader = options.Loader.forFileName(
entry_point,
loader_map,
) orelse continue;
}
jsx.parse = loader.isJSX();
const parser_opts = js_parser.Parser.Options.init(jsx, loader);
var _source = &source;
const res = _transform(chosen_alloc, allocator, __log, parser_opts, loader, define, _source) catch continue;
const relative_path = resolve_path.relative(cwd, absolutePath);
const out_path = resolve_path.joinAbs2(cwd, .auto, absolutePath, relative_path);
try output_files.append(options.OutputFile{ .path = allocator.dupe(u8, out_path) catch continue, .contents = res.js });
}
return try options.TransformResult.init(output_dir, output_files.toOwnedSlice(), log, allocator);
}
pub fn _transform(
allocator: *std.mem.Allocator,
result_allocator: *std.mem.Allocator,
log: *logger.Log,
opts: js_parser.Parser.Options,
loader: options.Loader,
define: *Define,
source: *logger.Source,
) !js_printer.PrintResult {
var ast: js_ast.Ast = undefined;
switch (loader) {
.json => {
var expr = try json_parser.ParseJSON(source, log, allocator);
var stmt = js_ast.Stmt.alloc(allocator, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var stmts = try allocator.alloc(js_ast.Stmt, 1);
stmts[0] = stmt;
var parts = try allocator.alloc(js_ast.Part, 1);
parts[0] = js_ast.Part{ .stmts = stmts };
ast = js_ast.Ast.initTest(parts);
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, log, source, define, allocator);
var res = try parser.parse();
ast = res.ast;
if (FeatureFlags.print_ast) {
try ast.toJSON(allocator, std.io.getStdErr().writer());
}
},
else => {
Global.panic("Unsupported loader: {s} for path: {s}", .{ loader, source.path.text });
},
}
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
return try js_printer.printAst(
result_allocator,
ast,
js_ast.Symbol.Map.initList(symbols),
source,
false,
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
}
};

233
src/cache.zig Normal file
View File

@@ -0,0 +1,233 @@
usingnamespace @import("global.zig");
const js_ast = @import("./js_ast.zig");
const logger = @import("./logger.zig");
const js_parser = @import("./js_parser/js_parser.zig");
const json_parser = @import("./json_parser.zig");
const options = @import("./options.zig");
const Define = @import("./defines.zig").Define;
const std = @import("std");
const fs = @import("./fs.zig");
const sync = @import("sync.zig");
const Mutex = sync.Mutex;
pub const Cache = struct {
pub const Set = struct {
js: JavaScript,
fs: Fs,
json: Json,
pub fn init(allocator: *std.mem.Allocator) Set {
return Set{
.js = JavaScript.init(allocator),
.fs = Fs{
.mutex = Mutex.init(),
.entries = std.StringHashMap(Fs.Entry).init(allocator),
},
.json = Json{
.mutex = Mutex.init(),
.entries = std.StringHashMap(*Json.Entry).init(allocator),
},
};
}
};
pub const Fs = struct {
mutex: Mutex,
entries: std.StringHashMap(Entry),
pub const Entry = struct {
contents: string,
// Null means its not usable
mod_key: ?fs.FileSystem.Implementation.ModKey = null,
pub fn deinit(entry: *Entry, allocator: *std.mem.Allocator) void {
if (entry.contents.len > 0) {
allocator.free(entry.contents);
entry.contents = "";
}
}
};
pub fn deinit(c: *Fs) void {
var iter = c.entries.iterator();
while (iter.next()) |entry| {
entry.value.deinit(c.entries.allocator);
}
c.entries.deinit();
}
pub fn readFile(c: *Fs, _fs: *fs.FileSystem, path: string) !Entry {
var rfs = _fs.fs;
{
c.mutex.lock();
defer c.mutex.unlock();
if (c.entries.get(path)) |entry| {
return entry;
}
}
// If the file's modification key hasn't changed since it was cached, assume
// the contents of the file are also the same and skip reading the file.
var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKey(path) catch |err| handler: {
switch (err) {
error.FileNotFound, error.AccessDenied => {
return err;
},
else => {
if (isDebug) {
Output.printError("modkey error: {s}", .{@errorName(err)});
}
break :handler null;
},
}
};
var file: fs.File = undefined;
if (mod_key) |modk| {
file = rfs.readFile(path, modk.size) catch |err| {
if (isDebug) {
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
}
return err;
};
} else {
file = rfs.readFile(path, null) catch |err| {
if (isDebug) {
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
}
return err;
};
}
const entry = Entry{
.contents = file.contents,
.mod_key = mod_key,
};
c.mutex.lock();
defer c.mutex.unlock();
var res = c.entries.getOrPut(path) catch unreachable;
if (res.found_existing) {
res.entry.value.deinit(c.entries.allocator);
}
res.entry.value = entry;
return res.entry.value;
}
};
pub const Css = struct {
pub const Entry = struct {};
pub const Result = struct {
ok: bool,
value: void,
};
pub fn parse(cache: *@This(), log: *logger.Log, source: logger.Source) !Result {
Global.notimpl();
}
};
pub const JavaScript = struct {
mutex: Mutex,
entries: std.StringHashMap(Result),
pub const Result = js_ast.Result;
pub fn init(allocator: *std.mem.Allocator) JavaScript {
return JavaScript{ .mutex = Mutex.init(), .entries = std.StringHashMap(Result).init(allocator) };
}
// For now, we're not going to cache JavaScript ASTs.
// It's probably only relevant when bundling for production.
pub fn parse(
cache: *@This(),
allocator: *std.mem.Allocator,
opts: js_parser.Parser.Options,
defines: *Define,
log: *logger.Log,
source: *const logger.Source,
) anyerror!?js_ast.Ast {
cache.mutex.lock();
defer cache.mutex.unlock();
var get_or_put_result = try cache.entries.getOrPut(source.key_path.text);
if (get_or_put_result.found_existing) {
return if (get_or_put_result.entry.value.ok) get_or_put_result.entry.value.ast else null;
}
var temp_log = logger.Log.init(allocator);
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
temp_log.appendTo(log) catch {};
get_or_put_result.entry.value = Result{ .ast = undefined, .ok = false };
return null;
};
get_or_put_result.entry.value = parser.parse() catch |err| {
get_or_put_result.entry.value = Result{ .ast = undefined, .ok = false };
temp_log.appendTo(log) catch {};
return null;
};
temp_log.appendTo(log) catch {};
return if (get_or_put_result.entry.value.ok) get_or_put_result.entry.value.ast else null;
}
};
pub const Json = struct {
pub const Entry = struct {
is_tsconfig: bool = false,
source: logger.Source,
expr: ?js_ast.Expr = null,
ok: bool = false,
// msgs: []logger.Msg,
};
mutex: Mutex,
entries: std.StringHashMap(*Entry),
pub fn init(allocator: *std.mem.Allocator) Json {
return Json{
.mutex = Mutex.init(),
.entries = std.StringHashMap(Entry).init(allocator),
};
}
fn parse(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator, is_tsconfig: bool, func: anytype) anyerror!?js_ast.Expr {
{
cache.mutex.lock();
defer cache.mutex.unlock();
if (cache.entries.get(source.key_path.text)) |entry| {
return entry.expr;
}
}
var temp_log = logger.Log.init(allocator);
defer {
temp_log.appendTo(log) catch {};
}
const expr = func(&source, &temp_log, allocator) catch handler: {
break :handler null;
};
const entry = try allocator.create(Entry);
entry.* = Entry{
.is_tsconfig = is_tsconfig,
.source = source,
.expr = expr,
.ok = expr != null,
};
cache.mutex.lock();
defer cache.mutex.unlock();
std.debug.assert(source.key_path.text.len > 0); // missing key_path in source
try cache.entries.put(source.key_path.text, entry);
return entry.expr;
}
pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
return try parse(cache, log, source, allocator, false, json_parser.ParseJSON);
}
pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
return try parse(cache, log, source, allocator, true, json_parser.ParseTSConfig);
}
};
};

422
src/cli.zig Normal file
View File

@@ -0,0 +1,422 @@
usingnamespace @import("global.zig");
usingnamespace @import("./http.zig");
const std = @import("std");
const lex = @import("js_lexer.zig");
const logger = @import("logger.zig");
const alloc = @import("alloc.zig");
const options = @import("options.zig");
const js_parser = @import("js_parser.zig");
const json_parser = @import("json_parser.zig");
const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
const Api = @import("api/schema.zig").Api;
const resolve_path = @import("./resolver/resolve_path.zig");
const clap = @import("clap");
const bundler = @import("bundler.zig");
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
pub const Cli = struct {
const LoaderMatcher = strings.ExactSizeMatcher(4);
pub fn ColonListType(comptime t: type, value_resolver: anytype) type {
return struct {
pub fn init(allocator: *std.mem.Allocator, count: usize) !@This() {
var keys = try allocator.alloc(string, count);
var values = try allocator.alloc(t, count);
return @This(){ .keys = keys, .values = values };
}
keys: []string,
values: []t,
pub fn load(self: *@This(), input: []const string) !void {
for (input) |str, i| {
// Support either ":" or "=" as the separator, preferring whichever is first.
// ":" is less confusing IMO because that syntax is used with flags
// but "=" is what esbuild uses and I want this to be somewhat familiar for people using esbuild
const midpoint = std.math.min(strings.indexOfChar(str, ':') orelse std.math.maxInt(usize), strings.indexOfChar(str, '=') orelse std.math.maxInt(usize));
if (midpoint == std.math.maxInt(usize)) {
return error.InvalidSeparator;
}
self.keys[i] = str[0..midpoint];
self.values[i] = try value_resolver(str[midpoint + 1 .. str.len]);
}
}
pub fn resolve(allocator: *std.mem.Allocator, input: []const string) !@This() {
var list = try init(allocator, input.len);
try list.load(input);
return list;
}
};
}
pub const LoaderColonList = ColonListType(Api.Loader, Arguments.loader_resolver);
pub const DefineColonList = ColonListType(string, Arguments.noop_resolver);
pub const Arguments = struct {
pub fn loader_resolver(in: string) !Api.Loader {
const Matcher = strings.ExactSizeMatcher(4);
switch (Matcher.match(in)) {
Matcher.case("jsx") => return Api.Loader.jsx,
Matcher.case("js") => return Api.Loader.js,
Matcher.case("ts") => return Api.Loader.ts,
Matcher.case("tsx") => return Api.Loader.tsx,
Matcher.case("css") => return Api.Loader.css,
Matcher.case("file") => return Api.Loader.file,
Matcher.case("json") => return Api.Loader.json,
else => {
return error.InvalidLoader;
},
}
}
pub fn noop_resolver(in: string) !string {
return in;
}
pub fn fileReadError(err: anyerror, stderr: anytype, filename: string, kind: string) noreturn {
stderr.writer().print("Error reading file \"{s}\" for {s}: {s}", .{ filename, kind, @errorName(err) }) catch {};
std.process.exit(1);
}
pub fn readFile(
allocator: *std.mem.Allocator,
cwd: string,
filename: string,
) ![]u8 {
var paths = [_]string{ cwd, filename };
const outpath = try std.fs.path.resolve(allocator, &paths);
defer allocator.free(outpath);
var file = try std.fs.openFileAbsolute(outpath, std.fs.File.OpenFlags{ .read = true, .write = false });
defer file.close();
const stats = try file.stat();
return try file.readToEndAlloc(allocator, stats.size);
}
pub fn parse(allocator: *std.mem.Allocator, stdout: anytype, stderr: anytype) !Api.TransformOptions {
@setEvalBranchQuota(9999);
const params = comptime [_]clap.Param(clap.Help){
clap.parseParam("-h, --help Display this help and exit. ") catch unreachable,
clap.parseParam("-r, --resolve <STR> Determine import/require behavior. \"disable\" ignores. \"dev\" bundles node_modules and builds everything else as independent entry points") catch unreachable,
clap.parseParam("-d, --define <STR>... Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:development") catch unreachable,
clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx (not implemented yet), ts (not implemented yet), css (not implemented yet)") catch unreachable,
clap.parseParam("-o, --outdir <STR> Save output to directory (default: \"out\" if none provided and multiple entry points passed)") catch unreachable,
clap.parseParam("-e, --external <STR>... Exclude module from transpilation (can use * wildcards). ex: -e react") catch unreachable,
clap.parseParam("-i, --inject <STR>... Inject module at the top of every file") catch unreachable,
clap.parseParam("--cwd <STR> Absolute path to resolve entry points from. Defaults to cwd") catch unreachable,
clap.parseParam("--public-url <STR> Rewrite import paths to start with --public-url. Useful for web browsers.") catch unreachable,
clap.parseParam("--serve Start a local dev server. This also sets resolve to \"lazy\".") catch unreachable,
clap.parseParam("--public-dir <STR> Top-level directory for .html files, fonts, images, or anything external. Only relevant with --serve. Defaults to \"<cwd>/public\", to match create-react-app and Next.js") catch unreachable,
clap.parseParam("--jsx-factory <STR> Changes the function called when compiling JSX elements using the classic JSX runtime") catch unreachable,
clap.parseParam("--jsx-fragment <STR> Changes the function called when compiling JSX fragments using the classic JSX runtime") catch unreachable,
clap.parseParam("--jsx-import-source <STR> Declares the module specifier to be used for importing the jsx and jsxs factory functions. Default: \"react\"") catch unreachable,
clap.parseParam("--jsx-runtime <STR> \"automatic\" (default) or \"classic\"") catch unreachable,
clap.parseParam("--jsx-production Use jsx instead of jsxDEV (default) for the automatic runtime") catch unreachable,
clap.parseParam("--extension-order <STR>... defaults to: .tsx,.ts,.jsx,.js,.json ") catch unreachable,
clap.parseParam("--react-fast-refresh Enable React Fast Refresh (not implemented yet)") catch unreachable,
clap.parseParam("--tsconfig-override <STR> Load tsconfig from path instead of cwd/tsconfig.json") catch unreachable,
clap.parseParam("--platform <STR> \"browser\" or \"node\". Defaults to \"browser\"") catch unreachable,
clap.parseParam("--main-fields <STR>... Main fields to lookup in package.json. Defaults to --platform dependent") catch unreachable,
clap.parseParam("<POS>... Entry points to use") catch unreachable,
};
var diag = clap.Diagnostic{};
var args = clap.parse(clap.Help, &params, .{ .diagnostic = &diag }) catch |err| {
// Report useful error and exit
diag.report(stderr.writer(), err) catch {};
return err;
};
if (args.flag("--help")) {
try clap.help(stderr.writer(), &params);
std.process.exit(1);
}
var cwd_paths = [_]string{args.option("--cwd") orelse try std.process.getCwdAlloc(allocator)};
var cwd = try std.fs.path.resolve(allocator, &cwd_paths);
var tsconfig_override = if (args.option("--tsconfig-override")) |ts| (Arguments.readFile(allocator, cwd, ts) catch |err| fileReadError(err, stderr, ts, "tsconfig.json")) else null;
var public_url = args.option("--public-url");
var defines_tuple = try DefineColonList.resolve(allocator, args.options("--define"));
var loader_tuple = try LoaderColonList.resolve(allocator, args.options("--define"));
var define_keys = defines_tuple.keys;
var define_values = defines_tuple.values;
var loader_keys = loader_tuple.keys;
var loader_values = loader_tuple.values;
var entry_points = args.positionals();
var inject = args.options("--inject");
var output_dir = args.option("--outdir");
const serve = args.flag("--serve");
var write = entry_points.len > 1 or output_dir != null;
if (write and output_dir == null) {
var _paths = [_]string{ cwd, "out" };
output_dir = try std.fs.path.resolve(allocator, &_paths);
}
var externals = std.mem.zeroes([][]u8);
if (args.options("--external").len > 0) {
externals = try allocator.alloc([]u8, args.options("--external").len);
for (args.options("--external")) |external, i| {
externals[i] = constStrToU8(external);
}
}
var jsx_factory = args.option("--jsx-factory");
var jsx_fragment = args.option("--jsx-fragment");
var jsx_import_source = args.option("--jsx-import-source");
var jsx_runtime = args.option("--jsx-runtime");
var jsx_production = args.flag("--jsx-production");
var react_fast_refresh = args.flag("--react-fast-refresh");
var main_fields = args.options("--main-fields");
const PlatformMatcher = strings.ExactSizeMatcher(8);
const ResoveMatcher = strings.ExactSizeMatcher(8);
var resolve = Api.ResolveMode.lazy;
if (args.option("--resolve")) |_resolve| {
switch (PlatformMatcher.match(_resolve)) {
PlatformMatcher.case("disable") => {
resolve = Api.ResolveMode.disable;
},
PlatformMatcher.case("bundle") => {
resolve = Api.ResolveMode.bundle;
},
PlatformMatcher.case("dev") => {
resolve = Api.ResolveMode.dev;
},
PlatformMatcher.case("lazy") => {
resolve = Api.ResolveMode.lazy;
},
else => {
diag.name.long = "--resolve";
diag.arg = _resolve;
try diag.report(stderr.writer(), error.InvalidResolveOption);
std.process.exit(1);
},
}
}
var platform: ?Api.Platform = null;
if (args.option("--platform")) |_platform| {
switch (PlatformMatcher.match(_platform)) {
PlatformMatcher.case("browser") => {
platform = Api.Platform.browser;
},
PlatformMatcher.case("node") => {
platform = Api.Platform.node;
},
else => {
diag.name.long = "--platform";
diag.arg = _platform;
try diag.report(stderr.writer(), error.InvalidPlatform);
std.process.exit(1);
},
}
}
var jsx: ?Api.Jsx = null;
if (jsx_factory != null or
jsx_fragment != null or
jsx_import_source != null or
jsx_runtime != null or
jsx_production or react_fast_refresh)
{
var default_factory = "".*;
var default_fragment = "".*;
var default_import_source = "".*;
jsx = Api.Jsx{
.factory = constStrToU8(jsx_factory orelse &default_factory),
.fragment = constStrToU8(jsx_fragment orelse &default_fragment),
.import_source = constStrToU8(jsx_import_source orelse &default_import_source),
.runtime = if (jsx_runtime != null) try resolve_jsx_runtime(jsx_runtime.?) else Api.JsxRuntime.automatic,
.development = !jsx_production,
.react_fast_refresh = react_fast_refresh,
};
}
if (entry_points.len == 0) {
try clap.help(stderr.writer(), &params);
try diag.report(stderr.writer(), error.MissingEntryPoint);
std.process.exit(1);
}
return Api.TransformOptions{
.jsx = jsx,
.output_dir = output_dir,
.resolve = resolve,
.external = externals,
.absolute_working_dir = cwd,
.tsconfig_override = tsconfig_override,
.public_url = public_url,
.define_keys = define_keys,
.define_values = define_values,
.loader_keys = loader_keys,
.loader_values = loader_values,
.public_dir = if (args.option("--public-dir")) |public_dir| allocator.dupe(u8, public_dir) catch unreachable else null,
.write = write,
.serve = serve,
.inject = inject,
.entry_points = entry_points,
.extension_order = args.options("--extension-order"),
.main_fields = args.options("--main-fields"),
.platform = platform,
};
}
};
pub fn resolve_jsx_runtime(str: string) !Api.JsxRuntime {
if (strings.eql(str, "automatic")) {
return Api.JsxRuntime.automatic;
} else if (strings.eql(str, "fallback")) {
return Api.JsxRuntime.classic;
} else {
return error.InvalidJSXRuntime;
}
}
pub fn startTransform(allocator: *std.mem.Allocator, args: Api.TransformOptions, log: *logger.Log) anyerror!void {}
pub fn start(allocator: *std.mem.Allocator, stdout: anytype, stderr: anytype, comptime MainPanicHandler: type) anyerror!void {
const start_time = std.time.nanoTimestamp();
var log = logger.Log.init(allocator);
var panicker = MainPanicHandler.init(&log);
MainPanicHandler.Singleton = &panicker;
var args = try Arguments.parse(alloc.static, stdout, stderr);
if (args.serve orelse false) {
try Server.start(allocator, args);
return;
}
var result: options.TransformResult = undefined;
switch (args.resolve orelse Api.ResolveMode.dev) {
Api.ResolveMode.disable => {
result = try bundler.Transformer.transform(
allocator,
&log,
args,
);
},
else => {
result = try bundler.Bundler.bundle(
allocator,
&log,
args,
);
},
}
var did_write = false;
var writer = stdout.writer();
if (args.write) |write| {
if (write) {
var open_file_limit: usize = 32;
if (std.os.getrlimit(.NOFILE)) |limit| {
open_file_limit = limit.cur;
} else |err| {}
const do_we_need_to_close = open_file_limit > result.output_files.len * 2;
did_write = true;
var root_dir = try std.fs.openDirAbsolute(result.outbase, std.fs.Dir.OpenDirOptions{});
defer {
if (do_we_need_to_close) {
root_dir.close();
}
}
for (result.output_files) |f| {
var fp = f.path;
if (fp[0] == std.fs.path.sep) {
fp = fp[1..];
}
var _handle = root_dir.createFile(fp, std.fs.File.CreateFlags{
.truncate = true,
}) catch |err| brk: {
// Only bother to create the directory if there's an error because that's probably why it errored
if (std.fs.path.dirname(fp)) |dirname| {
root_dir.makePath(dirname) catch {};
}
// Then, retry!
break :brk (root_dir.createFile(fp, std.fs.File.CreateFlags{
.truncate = true,
}) catch |err2| return err2);
};
try _handle.seekTo(0);
defer {
if (do_we_need_to_close) {
_handle.close();
}
}
try _handle.writeAll(f.contents);
}
var max_path_len: usize = 0;
var max_padded_size: usize = 0;
for (result.output_files) |file| {
max_path_len = std.math.max(file.path.len, max_path_len);
}
_ = try writer.write("\n");
for (result.output_files) |file| {
const padding_count = 2 + (max_path_len - file.path.len);
try writer.writeByteNTimes(' ', 2);
try writer.writeAll(file.path);
try writer.writeByteNTimes(' ', padding_count);
const size = @intToFloat(f64, file.contents.len) / 1000.0;
try std.fmt.formatFloatDecimal(size, .{ .precision = 2 }, writer);
try writer.writeAll(" KB\n");
}
}
}
if (isDebug) {
Output.println("Expr count: {d}", .{js_ast.Expr.icount});
Output.println("Stmt count: {d}", .{js_ast.Stmt.icount});
}
if (!did_write) {
for (result.output_files) |file, i| {
try writer.writeAll(file.contents);
if (i > 0) {
_ = try writer.write("\n\n");
}
}
}
var err_writer = stderr.writer();
for (result.errors) |err| {
try err.writeFormat(err_writer);
_ = try err_writer.write("\n");
}
for (result.warnings) |err| {
try err.writeFormat(err_writer);
_ = try err_writer.write("\n");
}
const duration = std.time.nanoTimestamp() - start_time;
if (did_write and duration < @as(i128, @as(i128, std.time.ns_per_s) * @as(i128, 2))) {
var elapsed = @divFloor(duration, @as(i128, std.time.ns_per_ms));
try writer.print("\nCompleted in {d}ms", .{elapsed});
}
}
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,313 @@
const std = @import("std");
const js_ast = @import("./js_ast.zig");
const alloc = @import("alloc.zig");
const logger = @import("logger.zig");
const js_lexer = @import("js_lexer.zig");
const json_parser = @import("json_parser.zig");
const fs = @import("fs.zig");
usingnamespace @import("global.zig");
usingnamespace @import("ast/base.zig");
const GlobalDefinesKey = @import("./defines-table.zig").GlobalDefinesKey;
pub const defaultIdentifierDefines = comptime {};
const Globals = struct {
pub const Undefined = js_ast.E.Undefined{};
pub const UndefinedPtr = &Globals.Undefined;
pub const IdentifierDefine = struct {};
pub const NaN = js_ast.E.Number{ .value = std.math.nan(f64) };
pub const NanPtr = &Globals.NaN;
pub const DotDefine = struct {};
pub const Infinity = js_ast.E.Number{ .value = std.math.inf(f64) };
pub const InfinityPtr = &Globals.Infinity;
pub const UndefinedData = js_ast.Expr.Data{ .e_undefined = Globals.UndefinedPtr };
pub const NaNData = js_ast.Expr.Data{ .e_number = Globals.NanPtr };
pub const InfinityData = js_ast.Expr.Data{ .e_number = Globals.InfinityPtr };
};
pub const Defines = struct {};
const defines_path = fs.Path.initWithNamespace("defines.json", "internal");
pub const RawDefines = std.StringHashMap(string);
pub const UserDefines = std.StringHashMap(DefineData);
pub const DefineData = struct {
value: js_ast.Expr.Data,
valueless: bool = false,
original_name: ?string = null,
// True if accessing this value is known to not have any side effects. For
// example, a bare reference to "Object.create" can be removed because it
// does not have any observable side effects.
can_be_removed_if_unused: bool = false,
// True if a call to this value is known to not have any side effects. For
// example, a bare call to "Object()" can be removed because it does not
// have any observable side effects.
call_can_be_unwrapped_if_unused: bool = false,
// All the globals have the same behavior.
// So we can create just one struct for it.
pub const GlobalDefineData = DefineData{};
pub fn isUndefined(self: *const DefineData) bool {
return self.valueless;
}
pub fn merge(a: DefineData, b: DefineData) DefineData {
return DefineData{
.value = b.value,
.can_be_removed_if_unused = a.can_be_removed_if_unused,
.call_can_be_unwrapped_if_unused = a.call_can_be_unwrapped_if_unused,
};
}
pub fn from_input(defines: RawDefines, log: *logger.Log, allocator: *std.mem.Allocator) !UserDefines {
var user_defines = UserDefines.init(allocator);
try user_defines.ensureCapacity(defines.count());
// var iter = defines.iterator();
// while (iter.next()) |entry| {
// var splitter = std.mem.split(entry.key, ".");
// while (splitter.next()) |part| {
// if (!js_lexer.isIdentifier(part)) {
// if (strings.eql(part, entry.key)) {
// try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key});
// } else {
// try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key });
// }
// break;
// }
// }
// if (js_lexer.isIdentifier(entry.value) and !js_lexer.Keywords.has(entry.value)) {
// var ident: *js_ast.E.Identifier = try allocator.create(js_ast.E.Identifier);
// ident.ref = Ref.None;
// ident.can_be_removed_if_unused = true;
// user_defines.putAssumeCapacity(
// entry.key,
// DefineData{
// .value = js_ast.Expr.Data{ .e_identifier = ident },
// .original_name = entry.value,
// .can_be_removed_if_unused = true,
// },
// );
// // user_defines.putAssumeCapacity(
// // entry.key,
// // DefineData{ .value = js_ast.Expr.Data{.e_identifier = } },
// // );
// continue;
// }
// var _log = log;
// var source = logger.Source{
// .contents = entry.value,
// .path = defines_path,
// .identifier_name = "defines",
// .key_path = fs.Path.initWithNamespace("defines", "internal"),
// };
// var expr = try json_parser.ParseJSON(&source, _log, allocator);
// var data: js_ast.Expr.Data = undefined;
// switch (expr.data) {
// .e_missing => {
// continue;
// },
// .e_null, .e_boolean, .e_string, .e_number, .e_object, .e_array => {
// data = expr.data;
// },
// else => {
// continue;
// },
// }
// user_defines.putAssumeCapacity(entry.key, DefineData{
// .value = data,
// });
// }
return user_defines;
}
};
fn arePartsEqual(a: []const string, b: []const string) bool {
if (a.len != b.len) {
return false;
}
var i: usize = 0;
while (i < a.len) : (i += 1) {
if (!strings.eql(a[i], b[i])) {
return false;
}
}
return true;
}
pub const IdentifierDefine = DefineData;
pub const DotDefine = struct {
parts: []const string,
data: DefineData,
};
pub const Define = struct {
identifiers: std.StringHashMap(IdentifierDefine),
dots: std.StringHashMap([]DotDefine),
allocator: *std.mem.Allocator,
pub fn init(allocator: *std.mem.Allocator, _user_defines: ?UserDefines) !*@This() {
var define = try allocator.create(Define);
define.allocator = allocator;
define.identifiers = std.StringHashMap(IdentifierDefine).init(allocator);
define.dots = std.StringHashMap([]DotDefine).init(allocator);
return define;
// try define.identifiers.ensureCapacity(641);
// try define.dots.ensureCapacity(64);
// var undefined_val = try allocator.create(js_ast.E.Undefined);
// var val = js_ast.Expr.Data{ .e_undefined = undefined_val };
// var ident_define = IdentifierDefine{
// .value = val,
// };
// var value_define = DefineData{ .value = val, .valueless = true };
// // Step 1. Load the globals into the hash tables
// for (GlobalDefinesKey) |global| {
// if (global.len == 1) {
// // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
// define.identifiers.putAssumeCapacity(global[0], value_define);
// } else {
// const key = global[global.len - 1];
// // TODO: move this to comptime
// // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
// if (define.dots.getEntry(key)) |entry| {
// var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value.len + 1);
// list.appendSliceAssumeCapacity(entry.value);
// list.appendAssumeCapacity(DotDefine{
// .parts = global[0..global.len],
// .data = value_define,
// });
// define.dots.putAssumeCapacity(key, list.toOwnedSlice());
// } else {
// var list = try std.ArrayList(DotDefine).initCapacity(allocator, 1);
// list.appendAssumeCapacity(DotDefine{
// .parts = global[0..global.len],
// .data = value_define,
// });
// define.dots.putAssumeCapacity(key, list.toOwnedSlice());
// }
// }
// }
// var nan_val = try allocator.create(js_ast.E.Number);
// nan_val.value = std.math.nan_f64;
// var inf_val = try allocator.create(js_ast.E.Number);
// inf_val.value = std.math.inf_f64;
// // Step 2. Swap in certain literal values because those can be constant folded
// define.identifiers.putAssumeCapacity("undefined", value_define);
// define.identifiers.putAssumeCapacity("NaN", DefineData{
// .value = js_ast.Expr.Data{ .e_number = nan_val },
// });
// define.identifiers.putAssumeCapacity("Infinity", DefineData{
// .value = js_ast.Expr.Data{ .e_number = inf_val },
// });
// // Step 3. Load user data into hash tables
// // At this stage, user data has already been validated.
// if (_user_defines) |user_defines| {
// var iter = user_defines.iterator();
// while (iter.next()) |user_define| {
// // If it has a dot, then it's a DotDefine.
// // e.g. process.env.NODE_ENV
// if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| {
// const tail = user_define.key[last_dot + 1 .. user_define.key.len];
// const remainder = user_define.key[0..last_dot];
// const count = std.mem.count(u8, remainder, ".") + 1;
// var parts = try allocator.alloc(string, count + 1);
// var splitter = std.mem.split(remainder, ".");
// var i: usize = 0;
// while (splitter.next()) |split| : (i += 1) {
// parts[i] = split;
// }
// parts[i] = tail;
// var didFind = false;
// var initial_values: []DotDefine = &([_]DotDefine{});
// // "NODE_ENV"
// if (define.dots.getEntry(tail)) |entry| {
// for (entry.value) |*part| {
// // ["process", "env"] === ["process", "env"] (if that actually worked)
// if (arePartsEqual(part.parts, parts)) {
// part.data = part.data.merge(user_define.value);
// didFind = true;
// break;
// }
// }
// initial_values = entry.value;
// }
// if (!didFind) {
// var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1);
// if (initial_values.len > 0) {
// list.appendSliceAssumeCapacity(initial_values);
// }
// list.appendAssumeCapacity(DotDefine{
// .data = user_define.value,
// // TODO: do we need to allocate this?
// .parts = parts,
// });
// try define.dots.put(tail, list.toOwnedSlice());
// }
// } else {
// // e.g. IS_BROWSER
// try define.identifiers.put(user_define.key, user_define.value);
// }
// }
// }
// return define;
}
};
const expect = std.testing.expect;
test "UserDefines" {
try alloc.setup(std.heap.page_allocator);
var orig = RawDefines.init(alloc.dynamic);
try orig.put("process.env.NODE_ENV", "\"development\"");
try orig.put("globalThis", "window");
var log = logger.Log.init(alloc.dynamic);
var data = try DefineData.from_input(orig, &log, alloc.dynamic);
expect(data.contains("process.env.NODE_ENV"));
expect(data.contains("globalThis"));
const globalThis = data.get("globalThis");
const val = data.get("process.env.NODE_ENV");
expect(val != null);
expect(strings.utf16EqlString(val.?.value.e_string.value, "development"));
std.testing.expectEqualStrings(globalThis.?.original_name.?, "window");
}
// 396,000ns was upper end of last time this was checked how long it took
// => 0.396ms
test "Defines" {
try alloc.setup(std.heap.page_allocator);
const start = std.time.nanoTimestamp();
var orig = RawDefines.init(alloc.dynamic);
try orig.put("process.env.NODE_ENV", "\"development\"");
var log = logger.Log.init(alloc.dynamic);
var data = try DefineData.from_input(orig, &log, alloc.dynamic);
var defines = try Define.init(alloc.dynamic, data);
Output.print("Time: {d}", .{std.time.nanoTimestamp() - start});
const node_env_dots = defines.dots.get("NODE_ENV");
expect(node_env_dots != null);
expect(node_env_dots.?.len > 0);
const node_env = node_env_dots.?[0];
std.testing.expectEqual(node_env.parts.len, 2);
std.testing.expectEqualStrings("process", node_env.parts[0]);
std.testing.expectEqualStrings("env", node_env.parts[1]);
expect(node_env.data.original_name == null);
expect(strings.utf16EqlString(node_env.data.value.e_string.value, "development"));
}

202
src/deps/picohttp.zig Normal file
View File

@@ -0,0 +1,202 @@
const std = @import("std");
const c = @cImport(@cInclude("picohttpparser.h"));
const ExactSizeMatcher = @import("../exact_size_matcher.zig").ExactSizeMatcher;
const Match = ExactSizeMatcher(2);
const fmt = std.fmt;
const assert = std.debug.assert;
pub const Header = struct {
name: []const u8,
value: []const u8,
pub fn isMultiline(self: Header) bool {
return @ptrToInt(self.name.ptr) == 0;
}
pub fn format(self: Header, comptime layout: []const u8, opts: fmt.FormatOptions, writer: anytype) !void {
if (self.isMultiline()) {
try fmt.format(writer, "{s}", .{self.value});
} else {
try fmt.format(writer, "{s}: {s}", .{ self.name, self.value });
}
}
comptime {
assert(@sizeOf(Header) == @sizeOf(c.phr_header));
assert(@alignOf(Header) == @alignOf(c.phr_header));
}
};
pub const Request = struct {
method: []const u8,
path: []const u8,
minor_version: usize,
headers: []const Header,
pub fn parse(buf: []const u8, src: []Header) !Request {
var method: []const u8 = undefined;
var path: []const u8 = undefined;
var minor_version: c_int = undefined;
var num_headers: usize = src.len;
const rc = c.phr_parse_request(
buf.ptr,
buf.len,
@ptrCast([*c][*c]const u8, &method.ptr),
&method.len,
@ptrCast([*c][*c]const u8, &path.ptr),
&path.len,
&minor_version,
@ptrCast([*c]c.phr_header, src.ptr),
&num_headers,
0,
);
return switch (rc) {
-1 => error.BadRequest,
-2 => error.ShortRead,
else => |bytes_read| Request{
.method = method,
.path = path,
.minor_version = @intCast(usize, minor_version),
.headers = src[0..num_headers],
},
};
}
};
test "pico_http: parse request" {
const REQ = "GET /wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg HTTP/1.1\r\n" ++
"Host: www.kittyhell.com\r\n" ++
"User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 " ++
"Pathtraq/0.9\r\n" ++
"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n" ++
"Accept-Language: ja,en-us;q=0.7,en;q=0.3\r\n" ++
"Accept-Encoding: gzip,deflate\r\n" ++
"Accept-Charset: Shift_JIS,utf-8;q=0.7,*;q=0.7\r\n" ++
"Keep-Alive: 115\r\n" ++
"Connection: keep-alive\r\n" ++
"TestMultiline: Hello world\r\n" ++
" This is a second line in the header!\r\n" ++
"Cookie: wp_ozh_wsa_visits=2; wp_ozh_wsa_visit_lasttime=xxxxxxxxxx; " ++
"__utma=xxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.x; " ++
"__utmz=xxxxxxxxx.xxxxxxxxxx.x.x.utmccn=(referral)|utmcsr=reader.livedoor.com|utmcct=/reader/|utmcmd=referral\r\n" ++
"\r\n";
var headers: [32]Header = undefined;
const req = try Request.parse(REQ, &headers);
std.debug.print("Method: {s}\n", .{req.method});
std.debug.print("Path: {s}\n", .{req.path});
std.debug.print("Minor Version: {}\n", .{req.minor_version});
for (req.headers) |header| {
std.debug.print("{}\n", .{header});
}
}
pub const Response = struct {
minor_version: usize,
status_code: usize,
status: []const u8,
headers: []const Header,
pub fn parse(buf: []const u8, src: []Header) !Response {
var minor_version: c_int = undefined;
var status_code: c_int = undefined;
var status: []const u8 = undefined;
var num_headers: usize = src.len;
const rc = c.phr_parse_response(
buf.ptr,
buf.len,
&minor_version,
&status_code,
@ptrCast([*c][*c]const u8, &status.ptr),
&status.len,
@ptrCast([*c]c.phr_header, src.ptr),
&num_headers,
0,
);
return switch (rc) {
-1 => error.BadResponse,
-2 => error.ShortRead,
else => |bytes_read| Response{
.minor_version = @intCast(usize, minor_version),
.status_code = @intCast(usize, status_code),
.status = status,
.headers = src[0..num_headers],
},
};
}
};
test "pico_http: parse response" {
const RES = "HTTP/1.1 200 OK\r\n" ++
"Date: Mon, 22 Mar 2021 08:15:54 GMT\r\n" ++
"Content-Type: text/html; charset=utf-8\r\n" ++
"Content-Length: 9593\r\n" ++
"Connection: keep-alive\r\n" ++
"Server: gunicorn/19.9.0\r\n" ++
"Access-Control-Allow-Origin: *\r\n" ++
"Access-Control-Allow-Credentials: true\r\n" ++
"\r\n";
var headers: [32]Header = undefined;
const res = try Response.parse(RES, &headers);
std.debug.print("Minor Version: {}\n", .{res.minor_version});
std.debug.print("Status Code: {}\n", .{res.status_code});
std.debug.print("Status: {s}\n", .{res.status});
for (res.headers) |header| {
std.debug.print("{}\n", .{header});
}
}
pub const Headers = struct {
headers: []const Header,
pub fn parse(buf: []const u8, src: []Header) !Headers {
var num_headers: usize = src.len;
const rc = c.phr_parse_headers(
buf.ptr,
buf.len,
@ptrCast([*c]c.phr_header, src.ptr),
@ptrCast([*c]usize, &num_headers),
0,
);
return switch (rc) {
-1 => error.BadHeaders,
-2 => error.ShortRead,
else => |bytes_read| Headers{
.headers = src[0..num_headers],
},
};
}
};
test "pico_http: parse headers" {
const HEADERS = "Date: Mon, 22 Mar 2021 08:15:54 GMT\r\n" ++
"Content-Type: text/html; charset=utf-8\r\n" ++
"Content-Length: 9593\r\n" ++
"Connection: keep-alive\r\n" ++
"Server: gunicorn/19.9.0\r\n" ++
"Access-Control-Allow-Origin: *\r\n" ++
"Access-Control-Allow-Credentials: true\r\n" ++
"\r\n";
var headers: [32]Header = undefined;
const result = try Headers.parse(HEADERS, &headers);
for (result.headers) |header| {
std.debug.print("{}\n", .{header});
}
}

665
src/deps/picohttpparser.c Normal file
View File

@@ -0,0 +1,665 @@
/*
* Copyright (c) 2009-2014 Kazuho Oku, Tokuhiro Matsuno, Daisuke Murase,
* Shigeo Mitsunari
*
* The software is licensed under either the MIT License (below) or the Perl
* license.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#include <assert.h>
#include <stddef.h>
#include <string.h>
#ifdef __SSE4_2__
#ifdef _MSC_VER
#include <nmmintrin.h>
#else
#include <x86intrin.h>
#endif
#endif
#include "picohttpparser.h"
#if __GNUC__ >= 3
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
#else
#define likely(x) (x)
#define unlikely(x) (x)
#endif
#ifdef _MSC_VER
#define ALIGNED(n) _declspec(align(n))
#else
#define ALIGNED(n) __attribute__((aligned(n)))
#endif
#define IS_PRINTABLE_ASCII(c) ((unsigned char)(c)-040u < 0137u)
#define CHECK_EOF() \
if (buf == buf_end) { \
*ret = -2; \
return NULL; \
}
#define EXPECT_CHAR_NO_CHECK(ch) \
if (*buf++ != ch) { \
*ret = -1; \
return NULL; \
}
#define EXPECT_CHAR(ch) \
CHECK_EOF(); \
EXPECT_CHAR_NO_CHECK(ch);
#define ADVANCE_TOKEN(tok, toklen) \
do { \
const char *tok_start = buf; \
static const char ALIGNED(16) ranges2[16] = "\000\040\177\177"; \
int found2; \
buf = findchar_fast(buf, buf_end, ranges2, 4, &found2); \
if (!found2) { \
CHECK_EOF(); \
} \
while (1) { \
if (*buf == ' ') { \
break; \
} else if (unlikely(!IS_PRINTABLE_ASCII(*buf))) { \
if ((unsigned char)*buf < '\040' || *buf == '\177') { \
*ret = -1; \
return NULL; \
} \
} \
++buf; \
CHECK_EOF(); \
} \
tok = tok_start; \
toklen = buf - tok_start; \
} while (0)
static const char *token_char_map = "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\1\0\1\1\1\1\1\0\0\1\1\0\1\1\0\1\1\1\1\1\1\1\1\1\1\0\0\0\0\0\0"
"\0\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\0\0\0\1\1"
"\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\0\1\0\1\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0";
static const char *findchar_fast(const char *buf, const char *buf_end, const char *ranges, size_t ranges_size, int *found)
{
*found = 0;
#if __SSE4_2__
if (likely(buf_end - buf >= 16)) {
__m128i ranges16 = _mm_loadu_si128((const __m128i *)ranges);
size_t left = (buf_end - buf) & ~15;
do {
__m128i b16 = _mm_loadu_si128((const __m128i *)buf);
int r = _mm_cmpestri(ranges16, ranges_size, b16, 16, _SIDD_LEAST_SIGNIFICANT | _SIDD_CMP_RANGES | _SIDD_UBYTE_OPS);
if (unlikely(r != 16)) {
buf += r;
*found = 1;
break;
}
buf += 16;
left -= 16;
} while (likely(left != 0));
}
#else
/* suppress unused parameter warning */
(void)buf_end;
(void)ranges;
(void)ranges_size;
#endif
return buf;
}
static const char *get_token_to_eol(const char *buf, const char *buf_end, const char **token, size_t *token_len, int *ret)
{
const char *token_start = buf;
#ifdef __SSE4_2__
static const char ALIGNED(16) ranges1[16] = "\0\010" /* allow HT */
"\012\037" /* allow SP and up to but not including DEL */
"\177\177"; /* allow chars w. MSB set */
int found;
buf = findchar_fast(buf, buf_end, ranges1, 6, &found);
if (found)
goto FOUND_CTL;
#else
/* find non-printable char within the next 8 bytes, this is the hottest code; manually inlined */
while (likely(buf_end - buf >= 8)) {
#define DOIT() \
do { \
if (unlikely(!IS_PRINTABLE_ASCII(*buf))) \
goto NonPrintable; \
++buf; \
} while (0)
DOIT();
DOIT();
DOIT();
DOIT();
DOIT();
DOIT();
DOIT();
DOIT();
#undef DOIT
continue;
NonPrintable:
if ((likely((unsigned char)*buf < '\040') && likely(*buf != '\011')) || unlikely(*buf == '\177')) {
goto FOUND_CTL;
}
++buf;
}
#endif
for (;; ++buf) {
CHECK_EOF();
if (unlikely(!IS_PRINTABLE_ASCII(*buf))) {
if ((likely((unsigned char)*buf < '\040') && likely(*buf != '\011')) || unlikely(*buf == '\177')) {
goto FOUND_CTL;
}
}
}
FOUND_CTL:
if (likely(*buf == '\015')) {
++buf;
EXPECT_CHAR('\012');
*token_len = buf - 2 - token_start;
} else if (*buf == '\012') {
*token_len = buf - token_start;
++buf;
} else {
*ret = -1;
return NULL;
}
*token = token_start;
return buf;
}
static const char *is_complete(const char *buf, const char *buf_end, size_t last_len, int *ret)
{
int ret_cnt = 0;
buf = last_len < 3 ? buf : buf + last_len - 3;
while (1) {
CHECK_EOF();
if (*buf == '\015') {
++buf;
CHECK_EOF();
EXPECT_CHAR('\012');
++ret_cnt;
} else if (*buf == '\012') {
++buf;
++ret_cnt;
} else {
++buf;
ret_cnt = 0;
}
if (ret_cnt == 2) {
return buf;
}
}
*ret = -2;
return NULL;
}
#define PARSE_INT(valp_, mul_) \
if (*buf < '0' || '9' < *buf) { \
buf++; \
*ret = -1; \
return NULL; \
} \
*(valp_) = (mul_) * (*buf++ - '0');
#define PARSE_INT_3(valp_) \
do { \
int res_ = 0; \
PARSE_INT(&res_, 100) \
*valp_ = res_; \
PARSE_INT(&res_, 10) \
*valp_ += res_; \
PARSE_INT(&res_, 1) \
*valp_ += res_; \
} while (0)
/* returned pointer is always within [buf, buf_end), or null */
static const char *parse_token(const char *buf, const char *buf_end, const char **token, size_t *token_len, char next_char,
int *ret)
{
/* We use pcmpestri to detect non-token characters. This instruction can take no more than eight character ranges (8*2*8=128
* bits that is the size of a SSE register). Due to this restriction, characters `|` and `~` are handled in the slow loop. */
static const char ALIGNED(16) ranges[] = "\x00 " /* control chars and up to SP */
"\"\"" /* 0x22 */
"()" /* 0x28,0x29 */
",," /* 0x2c */
"//" /* 0x2f */
":@" /* 0x3a-0x40 */
"[]" /* 0x5b-0x5d */
"{\xff"; /* 0x7b-0xff */
const char *buf_start = buf;
int found;
buf = findchar_fast(buf, buf_end, ranges, sizeof(ranges) - 1, &found);
if (!found) {
CHECK_EOF();
}
while (1) {
if (*buf == next_char) {
break;
} else if (!token_char_map[(unsigned char)*buf]) {
*ret = -1;
return NULL;
}
++buf;
CHECK_EOF();
}
*token = buf_start;
*token_len = buf - buf_start;
return buf;
}
/* returned pointer is always within [buf, buf_end), or null */
static const char *parse_http_version(const char *buf, const char *buf_end, int *minor_version, int *ret)
{
/* we want at least [HTTP/1.<two chars>] to try to parse */
if (buf_end - buf < 9) {
*ret = -2;
return NULL;
}
EXPECT_CHAR_NO_CHECK('H');
EXPECT_CHAR_NO_CHECK('T');
EXPECT_CHAR_NO_CHECK('T');
EXPECT_CHAR_NO_CHECK('P');
EXPECT_CHAR_NO_CHECK('/');
EXPECT_CHAR_NO_CHECK('1');
EXPECT_CHAR_NO_CHECK('.');
PARSE_INT(minor_version, 1);
return buf;
}
static const char *parse_headers(const char *buf, const char *buf_end, struct phr_header *headers, size_t *num_headers,
size_t max_headers, int *ret)
{
for (;; ++*num_headers) {
CHECK_EOF();
if (*buf == '\015') {
++buf;
EXPECT_CHAR('\012');
break;
} else if (*buf == '\012') {
++buf;
break;
}
if (*num_headers == max_headers) {
*ret = -1;
return NULL;
}
if (!(*num_headers != 0 && (*buf == ' ' || *buf == '\t'))) {
/* parsing name, but do not discard SP before colon, see
* http://www.mozilla.org/security/announce/2006/mfsa2006-33.html */
if ((buf = parse_token(buf, buf_end, &headers[*num_headers].name, &headers[*num_headers].name_len, ':', ret)) == NULL) {
return NULL;
}
if (headers[*num_headers].name_len == 0) {
*ret = -1;
return NULL;
}
++buf;
for (;; ++buf) {
CHECK_EOF();
if (!(*buf == ' ' || *buf == '\t')) {
break;
}
}
} else {
headers[*num_headers].name = NULL;
headers[*num_headers].name_len = 0;
}
const char *value;
size_t value_len;
if ((buf = get_token_to_eol(buf, buf_end, &value, &value_len, ret)) == NULL) {
return NULL;
}
/* remove trailing SPs and HTABs */
const char *value_end = value + value_len;
for (; value_end != value; --value_end) {
const char c = *(value_end - 1);
if (!(c == ' ' || c == '\t')) {
break;
}
}
headers[*num_headers].value = value;
headers[*num_headers].value_len = value_end - value;
}
return buf;
}
static const char *parse_request(const char *buf, const char *buf_end, const char **method, size_t *method_len, const char **path,
size_t *path_len, int *minor_version, struct phr_header *headers, size_t *num_headers,
size_t max_headers, int *ret)
{
/* skip first empty line (some clients add CRLF after POST content) */
CHECK_EOF();
if (*buf == '\015') {
++buf;
EXPECT_CHAR('\012');
} else if (*buf == '\012') {
++buf;
}
/* parse request line */
if ((buf = parse_token(buf, buf_end, method, method_len, ' ', ret)) == NULL) {
return NULL;
}
do {
++buf;
CHECK_EOF();
} while (*buf == ' ');
ADVANCE_TOKEN(*path, *path_len);
do {
++buf;
CHECK_EOF();
} while (*buf == ' ');
if (*method_len == 0 || *path_len == 0) {
*ret = -1;
return NULL;
}
if ((buf = parse_http_version(buf, buf_end, minor_version, ret)) == NULL) {
return NULL;
}
if (*buf == '\015') {
++buf;
EXPECT_CHAR('\012');
} else if (*buf == '\012') {
++buf;
} else {
*ret = -1;
return NULL;
}
return parse_headers(buf, buf_end, headers, num_headers, max_headers, ret);
}
int phr_parse_request(const char *buf_start, size_t len, const char **method, size_t *method_len, const char **path,
size_t *path_len, int *minor_version, struct phr_header *headers, size_t *num_headers, size_t last_len)
{
const char *buf = buf_start, *buf_end = buf_start + len;
size_t max_headers = *num_headers;
int r;
*method = NULL;
*method_len = 0;
*path = NULL;
*path_len = 0;
*minor_version = -1;
*num_headers = 0;
/* if last_len != 0, check if the request is complete (a fast countermeasure
againt slowloris */
if (last_len != 0 && is_complete(buf, buf_end, last_len, &r) == NULL) {
return r;
}
if ((buf = parse_request(buf, buf_end, method, method_len, path, path_len, minor_version, headers, num_headers, max_headers,
&r)) == NULL) {
return r;
}
return (int)(buf - buf_start);
}
static const char *parse_response(const char *buf, const char *buf_end, int *minor_version, int *status, const char **msg,
size_t *msg_len, struct phr_header *headers, size_t *num_headers, size_t max_headers, int *ret)
{
/* parse "HTTP/1.x" */
if ((buf = parse_http_version(buf, buf_end, minor_version, ret)) == NULL) {
return NULL;
}
/* skip space */
if (*buf != ' ') {
*ret = -1;
return NULL;
}
do {
++buf;
CHECK_EOF();
} while (*buf == ' ');
/* parse status code, we want at least [:digit:][:digit:][:digit:]<other char> to try to parse */
if (buf_end - buf < 4) {
*ret = -2;
return NULL;
}
PARSE_INT_3(status);
/* get message including preceding space */
if ((buf = get_token_to_eol(buf, buf_end, msg, msg_len, ret)) == NULL) {
return NULL;
}
if (*msg_len == 0) {
/* ok */
} else if (**msg == ' ') {
/* Remove preceding space. Successful return from `get_token_to_eol` guarantees that we would hit something other than SP
* before running past the end of the given buffer. */
do {
++*msg;
--*msg_len;
} while (**msg == ' ');
} else {
/* garbage found after status code */
*ret = -1;
return NULL;
}
return parse_headers(buf, buf_end, headers, num_headers, max_headers, ret);
}
int phr_parse_response(const char *buf_start, size_t len, int *minor_version, int *status, const char **msg, size_t *msg_len,
struct phr_header *headers, size_t *num_headers, size_t last_len)
{
const char *buf = buf_start, *buf_end = buf + len;
size_t max_headers = *num_headers;
int r;
*minor_version = -1;
*status = 0;
*msg = NULL;
*msg_len = 0;
*num_headers = 0;
/* if last_len != 0, check if the response is complete (a fast countermeasure
against slowloris */
if (last_len != 0 && is_complete(buf, buf_end, last_len, &r) == NULL) {
return r;
}
if ((buf = parse_response(buf, buf_end, minor_version, status, msg, msg_len, headers, num_headers, max_headers, &r)) == NULL) {
return r;
}
return (int)(buf - buf_start);
}
int phr_parse_headers(const char *buf_start, size_t len, struct phr_header *headers, size_t *num_headers, size_t last_len)
{
const char *buf = buf_start, *buf_end = buf + len;
size_t max_headers = *num_headers;
int r;
*num_headers = 0;
/* if last_len != 0, check if the response is complete (a fast countermeasure
against slowloris */
if (last_len != 0 && is_complete(buf, buf_end, last_len, &r) == NULL) {
return r;
}
if ((buf = parse_headers(buf, buf_end, headers, num_headers, max_headers, &r)) == NULL) {
return r;
}
return (int)(buf - buf_start);
}
enum {
CHUNKED_IN_CHUNK_SIZE,
CHUNKED_IN_CHUNK_EXT,
CHUNKED_IN_CHUNK_DATA,
CHUNKED_IN_CHUNK_CRLF,
CHUNKED_IN_TRAILERS_LINE_HEAD,
CHUNKED_IN_TRAILERS_LINE_MIDDLE
};
static int decode_hex(int ch)
{
if ('0' <= ch && ch <= '9') {
return ch - '0';
} else if ('A' <= ch && ch <= 'F') {
return ch - 'A' + 0xa;
} else if ('a' <= ch && ch <= 'f') {
return ch - 'a' + 0xa;
} else {
return -1;
}
}
ssize_t phr_decode_chunked(struct phr_chunked_decoder *decoder, char *buf, size_t *_bufsz)
{
size_t dst = 0, src = 0, bufsz = *_bufsz;
ssize_t ret = -2; /* incomplete */
while (1) {
switch (decoder->_state) {
case CHUNKED_IN_CHUNK_SIZE:
for (;; ++src) {
int v;
if (src == bufsz)
goto Exit;
if ((v = decode_hex(buf[src])) == -1) {
if (decoder->_hex_count == 0) {
ret = -1;
goto Exit;
}
break;
}
if (decoder->_hex_count == sizeof(size_t) * 2) {
ret = -1;
goto Exit;
}
decoder->bytes_left_in_chunk = decoder->bytes_left_in_chunk * 16 + v;
++decoder->_hex_count;
}
decoder->_hex_count = 0;
decoder->_state = CHUNKED_IN_CHUNK_EXT;
/* fallthru */
case CHUNKED_IN_CHUNK_EXT:
/* RFC 7230 A.2 "Line folding in chunk extensions is disallowed" */
for (;; ++src) {
if (src == bufsz)
goto Exit;
if (buf[src] == '\012')
break;
}
++src;
if (decoder->bytes_left_in_chunk == 0) {
if (decoder->consume_trailer) {
decoder->_state = CHUNKED_IN_TRAILERS_LINE_HEAD;
break;
} else {
goto Complete;
}
}
decoder->_state = CHUNKED_IN_CHUNK_DATA;
/* fallthru */
case CHUNKED_IN_CHUNK_DATA: {
size_t avail = bufsz - src;
if (avail < decoder->bytes_left_in_chunk) {
if (dst != src)
memmove(buf + dst, buf + src, avail);
src += avail;
dst += avail;
decoder->bytes_left_in_chunk -= avail;
goto Exit;
}
if (dst != src)
memmove(buf + dst, buf + src, decoder->bytes_left_in_chunk);
src += decoder->bytes_left_in_chunk;
dst += decoder->bytes_left_in_chunk;
decoder->bytes_left_in_chunk = 0;
decoder->_state = CHUNKED_IN_CHUNK_CRLF;
}
/* fallthru */
case CHUNKED_IN_CHUNK_CRLF:
for (;; ++src) {
if (src == bufsz)
goto Exit;
if (buf[src] != '\015')
break;
}
if (buf[src] != '\012') {
ret = -1;
goto Exit;
}
++src;
decoder->_state = CHUNKED_IN_CHUNK_SIZE;
break;
case CHUNKED_IN_TRAILERS_LINE_HEAD:
for (;; ++src) {
if (src == bufsz)
goto Exit;
if (buf[src] != '\015')
break;
}
if (buf[src++] == '\012')
goto Complete;
decoder->_state = CHUNKED_IN_TRAILERS_LINE_MIDDLE;
/* fallthru */
case CHUNKED_IN_TRAILERS_LINE_MIDDLE:
for (;; ++src) {
if (src == bufsz)
goto Exit;
if (buf[src] == '\012')
break;
}
++src;
decoder->_state = CHUNKED_IN_TRAILERS_LINE_HEAD;
break;
default:
assert(!"decoder is corrupt");
}
}
Complete:
ret = bufsz - src;
Exit:
if (dst != src)
memmove(buf + dst, buf + src, bufsz - src);
*_bufsz = dst;
return ret;
}
int phr_decode_chunked_is_in_data(struct phr_chunked_decoder *decoder)
{
return decoder->_state == CHUNKED_IN_CHUNK_DATA;
}
#undef CHECK_EOF
#undef EXPECT_CHAR
#undef ADVANCE_TOKEN

87
src/deps/picohttpparser.h Normal file
View File

@@ -0,0 +1,87 @@
/*
* Copyright (c) 2009-2014 Kazuho Oku, Tokuhiro Matsuno, Daisuke Murase,
* Shigeo Mitsunari
*
* The software is licensed under either the MIT License (below) or the Perl
* license.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef picohttpparser_h
#define picohttpparser_h
#include <sys/types.h>
#ifdef _MSC_VER
#define ssize_t intptr_t
#endif
#ifdef __cplusplus
extern "C" {
#endif
/* contains name and value of a header (name == NULL if is a continuing line
* of a multiline header */
struct phr_header {
const char *name;
size_t name_len;
const char *value;
size_t value_len;
};
/* returns number of bytes consumed if successful, -2 if request is partial,
* -1 if failed */
int phr_parse_request(const char *buf, size_t len, const char **method, size_t *method_len, const char **path, size_t *path_len,
int *minor_version, struct phr_header *headers, size_t *num_headers, size_t last_len);
/* ditto */
int phr_parse_response(const char *_buf, size_t len, int *minor_version, int *status, const char **msg, size_t *msg_len,
struct phr_header *headers, size_t *num_headers, size_t last_len);
/* ditto */
int phr_parse_headers(const char *buf, size_t len, struct phr_header *headers, size_t *num_headers, size_t last_len);
/* should be zero-filled before start */
struct phr_chunked_decoder {
size_t bytes_left_in_chunk; /* number of bytes left in current chunk */
char consume_trailer; /* if trailing headers should be consumed */
char _hex_count;
char _state;
};
/* the function rewrites the buffer given as (buf, bufsz) removing the chunked-
* encoding headers. When the function returns without an error, bufsz is
* updated to the length of the decoded data available. Applications should
* repeatedly call the function while it returns -2 (incomplete) every time
* supplying newly arrived data. If the end of the chunked-encoded data is
* found, the function returns a non-negative number indicating the number of
* octets left undecoded, that starts from the offset returned by `*bufsz`.
* Returns -1 on error.
*/
ssize_t phr_decode_chunked(struct phr_chunked_decoder *decoder, char *buf, size_t *bufsz);
/* returns if the chunked decoder is in middle of chunked data */
int phr_decode_chunked_is_in_data(struct phr_chunked_decoder *decoder);
#ifdef __cplusplus
}
#endif
#endif

1
src/deps/zig-clap/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
zig-cache

24
src/deps/zig-clap/LICENSE Normal file
View File

@@ -0,0 +1,24 @@
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <http://unlicense.org>

View File

@@ -0,0 +1,55 @@
const builtin = @import("builtin");
const std = @import("std");
const Builder = std.build.Builder;
const Mode = std.builtin.Mode;
pub fn build(b: *Builder) void {
const mode = b.standardReleaseOptions();
const target = b.standardTargetOptions(.{});
const test_all_step = b.step("test", "Run all tests in all modes.");
inline for ([_]Mode{ Mode.Debug, Mode.ReleaseFast, Mode.ReleaseSafe, Mode.ReleaseSmall }) |test_mode| {
const mode_str = comptime modeToString(test_mode);
const tests = b.addTest("clap.zig");
tests.setBuildMode(test_mode);
tests.setTarget(target);
tests.setNamePrefix(mode_str ++ " ");
const test_step = b.step("test-" ++ mode_str, "Run all tests in " ++ mode_str ++ ".");
test_step.dependOn(&tests.step);
test_all_step.dependOn(test_step);
}
const example_step = b.step("examples", "Build examples");
inline for ([_][]const u8{
"simple",
"simple-ex",
//"simple-error",
"streaming-clap",
"help",
"usage",
}) |example_name| {
const example = b.addExecutable(example_name, "example/" ++ example_name ++ ".zig");
example.addPackagePath("clap", "clap.zig");
example.setBuildMode(mode);
example.setTarget(target);
example.install();
example_step.dependOn(&example.step);
}
const all_step = b.step("all", "Build everything and runs all tests");
all_step.dependOn(test_all_step);
b.default_step.dependOn(all_step);
}
fn modeToString(mode: Mode) []const u8 {
return switch (mode) {
Mode.Debug => "debug",
Mode.ReleaseFast => "release-fast",
Mode.ReleaseSafe => "release-safe",
Mode.ReleaseSmall => "release-small",
};
}

608
src/deps/zig-clap/clap.zig Normal file
View File

@@ -0,0 +1,608 @@
const std = @import("std");
const debug = std.debug;
const heap = std.heap;
const io = std.io;
const mem = std.mem;
const testing = std.testing;
pub const args = @import("clap/args.zig");
test "clap" {
testing.refAllDecls(@This());
}
pub const ComptimeClap = @import("clap/comptime.zig").ComptimeClap;
pub const StreamingClap = @import("clap/streaming.zig").StreamingClap;
/// The names a ::Param can have.
pub const Names = struct {
/// '-' prefix
short: ?u8 = null,
/// '--' prefix
long: ?[]const u8 = null,
};
/// Whether a param takes no value (a flag), one value, or can be specified multiple times.
pub const Values = enum {
none,
one,
many,
};
/// Represents a parameter for the command line.
/// Parameters come in three kinds:
/// * Short ("-a"): Should be used for the most commonly used parameters in your program.
/// * They can take a value three different ways.
/// * "-a value"
/// * "-a=value"
/// * "-avalue"
/// * They chain if they don't take values: "-abc".
/// * The last given parameter can take a value in the same way that a single parameter can:
/// * "-abc value"
/// * "-abc=value"
/// * "-abcvalue"
/// * Long ("--long-param"): Should be used for less common parameters, or when no single character
/// can describe the paramter.
/// * They can take a value two different ways.
/// * "--long-param value"
/// * "--long-param=value"
/// * Positional: Should be used as the primary parameter of the program, like a filename or
/// an expression to parse.
/// * Positional parameters have both names.long and names.short == null.
/// * Positional parameters must take a value.
pub fn Param(comptime Id: type) type {
return struct {
id: Id = Id{},
names: Names = Names{},
takes_value: Values = .none,
};
}
/// Takes a string and parses it to a Param(Help).
/// This is the reverse of 'help' but for at single parameter only.
pub fn parseParam(line: []const u8) !Param(Help) {
var found_comma = false;
var it = mem.tokenize(line, " \t");
var param_str = it.next() orelse return error.NoParamFound;
const short_name = if (!mem.startsWith(u8, param_str, "--") and
mem.startsWith(u8, param_str, "-"))
blk: {
found_comma = param_str[param_str.len - 1] == ',';
if (found_comma)
param_str = param_str[0 .. param_str.len - 1];
if (param_str.len != 2)
return error.InvalidShortParam;
const short_name = param_str[1];
if (!found_comma) {
var res = parseParamRest(it.rest());
res.names.short = short_name;
return res;
}
param_str = it.next() orelse return error.NoParamFound;
break :blk short_name;
} else null;
const long_name = if (mem.startsWith(u8, param_str, "--")) blk: {
if (param_str[param_str.len - 1] == ',')
return error.TrailingComma;
break :blk param_str[2..];
} else if (found_comma) {
return error.TrailingComma;
} else if (short_name == null) {
return parseParamRest(mem.trimLeft(u8, line, " \t"));
} else null;
var res = parseParamRest(it.rest());
res.names.long = param_str[2..];
res.names.short = short_name;
return res;
}
fn parseParamRest(line: []const u8) Param(Help) {
if (mem.startsWith(u8, line, "<")) blk: {
const len = mem.indexOfScalar(u8, line, '>') orelse break :blk;
const takes_many = mem.startsWith(u8, line[len + 1 ..], "...");
const help_start = len + 1 + @as(usize, 3) * @boolToInt(takes_many);
return .{
.takes_value = if (takes_many) .many else .one,
.id = .{
.msg = mem.trim(u8, line[help_start..], " \t"),
.value = line[1..len],
},
};
}
return .{ .id = .{ .msg = mem.trim(u8, line, " \t") } };
}
fn expectParam(expect: Param(Help), actual: Param(Help)) void {
testing.expectEqualStrings(expect.id.msg, actual.id.msg);
testing.expectEqualStrings(expect.id.value, actual.id.value);
testing.expectEqual(expect.names.short, actual.names.short);
testing.expectEqual(expect.takes_value, actual.takes_value);
if (expect.names.long) |long| {
testing.expectEqualStrings(long, actual.names.long.?);
} else {
testing.expectEqual(@as(?[]const u8, null), actual.names.long);
}
}
test "parseParam" {
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "value" },
.names = .{ .short = 's', .long = "long" },
.takes_value = .one,
}, try parseParam("-s, --long <value> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "value" },
.names = .{ .short = 's', .long = "long" },
.takes_value = .many,
}, try parseParam("-s, --long <value>... Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "value" },
.names = .{ .long = "long" },
.takes_value = .one,
}, try parseParam("--long <value> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "value" },
.names = .{ .short = 's' },
.takes_value = .one,
}, try parseParam("-s <value> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text" },
.names = .{ .short = 's', .long = "long" },
}, try parseParam("-s, --long Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text" },
.names = .{ .short = 's' },
}, try parseParam("-s Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text" },
.names = .{ .long = "long" },
}, try parseParam("--long Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "A | B" },
.names = .{ .long = "long" },
.takes_value = .one,
}, try parseParam("--long <A | B> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "A" },
.names = .{},
.takes_value = .one,
}, try parseParam("<A> Help text"));
expectParam(Param(Help){
.id = .{ .msg = "Help text", .value = "A" },
.names = .{},
.takes_value = .many,
}, try parseParam("<A>... Help text"));
testing.expectError(error.TrailingComma, parseParam("--long, Help"));
testing.expectError(error.TrailingComma, parseParam("-s, Help"));
testing.expectError(error.InvalidShortParam, parseParam("-ss Help"));
testing.expectError(error.InvalidShortParam, parseParam("-ss <value> Help"));
testing.expectError(error.InvalidShortParam, parseParam("- Help"));
}
/// Optional diagnostics used for reporting useful errors
pub const Diagnostic = struct {
arg: []const u8 = "",
name: Names = Names{},
/// Default diagnostics reporter when all you want is English with no colors.
/// Use this as a reference for implementing your own if needed.
pub fn report(diag: Diagnostic, stream: anytype, err: anyerror) !void {
const Arg = struct {
prefix: []const u8,
name: []const u8,
};
const a = if (diag.name.short) |*c|
Arg{ .prefix = "-", .name = @as(*const [1]u8, c)[0..] }
else if (diag.name.long) |l|
Arg{ .prefix = "--", .name = l }
else
Arg{ .prefix = "", .name = diag.arg };
switch (err) {
error.DoesntTakeValue => try stream.print("The argument '{s}{s}' does not take a value\n", .{ a.prefix, a.name }),
error.MissingValue => try stream.print("The argument '{s}{s}' requires a value but none was supplied\n", .{ a.prefix, a.name }),
error.InvalidArgument => try stream.print("Invalid argument '{s}{s}'\n", .{ a.prefix, a.name }),
else => try stream.print("Error while parsing arguments: {s}\n", .{@errorName(err)}),
}
}
};
fn testDiag(diag: Diagnostic, err: anyerror, expected: []const u8) void {
var buf: [1024]u8 = undefined;
var slice_stream = io.fixedBufferStream(&buf);
diag.report(slice_stream.writer(), err) catch unreachable;
testing.expectEqualStrings(expected, slice_stream.getWritten());
}
test "Diagnostic.report" {
testDiag(.{ .arg = "c" }, error.InvalidArgument, "Invalid argument 'c'\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.InvalidArgument, "Invalid argument '--cc'\n");
testDiag(.{ .name = .{ .short = 'c' } }, error.DoesntTakeValue, "The argument '-c' does not take a value\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.DoesntTakeValue, "The argument '--cc' does not take a value\n");
testDiag(.{ .name = .{ .short = 'c' } }, error.MissingValue, "The argument '-c' requires a value but none was supplied\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.MissingValue, "The argument '--cc' requires a value but none was supplied\n");
testDiag(.{ .name = .{ .short = 'c' } }, error.InvalidArgument, "Invalid argument '-c'\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.InvalidArgument, "Invalid argument '--cc'\n");
testDiag(.{ .name = .{ .short = 'c' } }, error.SomethingElse, "Error while parsing arguments: SomethingElse\n");
testDiag(.{ .name = .{ .long = "cc" } }, error.SomethingElse, "Error while parsing arguments: SomethingElse\n");
}
pub fn Args(comptime Id: type, comptime params: []const Param(Id)) type {
return struct {
arena: std.heap.ArenaAllocator,
clap: ComptimeClap(Id, params),
exe_arg: ?[]const u8,
pub fn deinit(a: *@This()) void {
a.arena.deinit();
}
pub fn flag(a: @This(), comptime name: []const u8) bool {
return a.clap.flag(name);
}
pub fn option(a: @This(), comptime name: []const u8) ?[]const u8 {
return a.clap.option(name);
}
pub fn options(a: @This(), comptime name: []const u8) []const []const u8 {
return a.clap.options(name);
}
pub fn positionals(a: @This()) []const []const u8 {
return a.clap.positionals();
}
};
}
/// Options that can be set to customize the behavior of parsing.
pub const ParseOptions = struct {
/// The allocator used for all memory allocations. Defaults to the `heap.page_allocator`.
/// Note: You should probably override this allocator if you are calling `parseEx`. Unlike
/// `parse`, `parseEx` does not wrap the allocator so the heap allocator can be
/// quite expensive. (TODO: Can we pick a better default? For `parse`, this allocator
/// is fine, as it wraps it in an arena)
allocator: *mem.Allocator = heap.page_allocator,
diagnostic: ?*Diagnostic = null,
};
/// Same as `parseEx` but uses the `args.OsIterator` by default.
pub fn parse(
comptime Id: type,
comptime params: []const Param(Id),
opt: ParseOptions,
) !Args(Id, params) {
var iter = try args.OsIterator.init(opt.allocator);
var res = Args(Id, params){
.arena = iter.arena,
.exe_arg = iter.exe_arg,
.clap = undefined,
};
// Let's reuse the arena from the `OSIterator` since we already have
// it.
res.clap = try parseEx(Id, params, &iter, .{
.allocator = &res.arena.allocator,
.diagnostic = opt.diagnostic,
});
return res;
}
/// Parses the command line arguments passed into the program based on an
/// array of `Param`s.
pub fn parseEx(
comptime Id: type,
comptime params: []const Param(Id),
iter: anytype,
opt: ParseOptions,
) !ComptimeClap(Id, params) {
const Clap = ComptimeClap(Id, params);
return try Clap.parse(iter, opt);
}
/// Will print a help message in the following format:
/// -s, --long <valueText> helpText
/// -s, helpText
/// -s <valueText> helpText
/// --long helpText
/// --long <valueText> helpText
pub fn helpFull(
stream: anytype,
comptime Id: type,
params: []const Param(Id),
comptime Error: type,
context: anytype,
helpText: fn (@TypeOf(context), Param(Id)) Error![]const u8,
valueText: fn (@TypeOf(context), Param(Id)) Error![]const u8,
) !void {
const max_spacing = blk: {
var res: usize = 0;
for (params) |param| {
var cs = io.countingWriter(io.null_writer);
try printParam(cs.writer(), Id, param, Error, context, valueText);
if (res < cs.bytes_written)
res = @intCast(usize, cs.bytes_written);
}
break :blk res;
};
for (params) |param| {
if (param.names.short == null and param.names.long == null)
continue;
var cs = io.countingWriter(stream);
try stream.print("\t", .{});
try printParam(cs.writer(), Id, param, Error, context, valueText);
try stream.writeByteNTimes(' ', max_spacing - @intCast(usize, cs.bytes_written));
try stream.print("\t{s}\n", .{try helpText(context, param)});
}
}
fn printParam(
stream: anytype,
comptime Id: type,
param: Param(Id),
comptime Error: type,
context: anytype,
valueText: fn (@TypeOf(context), Param(Id)) Error![]const u8,
) !void {
if (param.names.short) |s| {
try stream.print("-{c}", .{s});
} else {
try stream.print(" ", .{});
}
if (param.names.long) |l| {
if (param.names.short) |_| {
try stream.print(", ", .{});
} else {
try stream.print(" ", .{});
}
try stream.print("--{s}", .{l});
}
switch (param.takes_value) {
.none => {},
.one => try stream.print(" <{s}>", .{valueText(context, param)}),
.many => try stream.print(" <{s}>...", .{valueText(context, param)}),
}
}
/// A wrapper around helpFull for simple helpText and valueText functions that
/// cant return an error or take a context.
pub fn helpEx(
stream: anytype,
comptime Id: type,
params: []const Param(Id),
helpText: fn (Param(Id)) []const u8,
valueText: fn (Param(Id)) []const u8,
) !void {
const Context = struct {
helpText: fn (Param(Id)) []const u8,
valueText: fn (Param(Id)) []const u8,
pub fn help(c: @This(), p: Param(Id)) error{}![]const u8 {
return c.helpText(p);
}
pub fn value(c: @This(), p: Param(Id)) error{}![]const u8 {
return c.valueText(p);
}
};
return helpFull(
stream,
Id,
params,
error{},
Context{
.helpText = helpText,
.valueText = valueText,
},
Context.help,
Context.value,
);
}
pub const Help = struct {
msg: []const u8 = "",
value: []const u8 = "",
};
/// A wrapper around helpEx that takes a Param(Help).
pub fn help(stream: anytype, params: []const Param(Help)) !void {
try helpEx(stream, Help, params, getHelpSimple, getValueSimple);
}
fn getHelpSimple(param: Param(Help)) []const u8 {
return param.id.msg;
}
fn getValueSimple(param: Param(Help)) []const u8 {
return param.id.value;
}
test "clap.help" {
var buf: [1024]u8 = undefined;
var slice_stream = io.fixedBufferStream(&buf);
@setEvalBranchQuota(10000);
try help(
slice_stream.writer(),
comptime &[_]Param(Help){
parseParam("-a Short flag.") catch unreachable,
parseParam("-b <V1> Short option.") catch unreachable,
parseParam("--aa Long flag.") catch unreachable,
parseParam("--bb <V2> Long option.") catch unreachable,
parseParam("-c, --cc Both flag.") catch unreachable,
parseParam("-d, --dd <V3> Both option.") catch unreachable,
parseParam("-d, --dd <V3>... Both repeated option.") catch unreachable,
parseParam("<P> Positional. This should not appear in the help message.") catch unreachable,
},
);
const expected = "" ++
"\t-a \tShort flag.\n" ++
"\t-b <V1> \tShort option.\n" ++
"\t --aa \tLong flag.\n" ++
"\t --bb <V2> \tLong option.\n" ++
"\t-c, --cc \tBoth flag.\n" ++
"\t-d, --dd <V3> \tBoth option.\n" ++
"\t-d, --dd <V3>...\tBoth repeated option.\n";
testing.expectEqualStrings(expected, slice_stream.getWritten());
}
/// Will print a usage message in the following format:
/// [-abc] [--longa] [-d <valueText>] [--longb <valueText>] <valueText>
///
/// First all none value taking parameters, which have a short name are
/// printed, then non positional parameters and finally the positinal.
pub fn usageFull(
stream: anytype,
comptime Id: type,
params: []const Param(Id),
comptime Error: type,
context: anytype,
valueText: fn (@TypeOf(context), Param(Id)) Error![]const u8,
) !void {
var cos = io.countingWriter(stream);
const cs = cos.writer();
for (params) |param| {
const name = param.names.short orelse continue;
if (param.takes_value != .none)
continue;
if (cos.bytes_written == 0)
try stream.writeAll("[-");
try cs.writeByte(name);
}
if (cos.bytes_written != 0)
try cs.writeByte(']');
var positional: ?Param(Id) = null;
for (params) |param| {
if (param.takes_value == .none and param.names.short != null)
continue;
const prefix = if (param.names.short) |_| "-" else "--";
// Seems the zig compiler is being a little wierd. I doesn't allow me to write
// @as(*const [1]u8, s) VVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
const name = if (param.names.short) |*s| @ptrCast([*]const u8, s)[0..1] else param.names.long orelse {
positional = param;
continue;
};
if (cos.bytes_written != 0)
try cs.writeByte(' ');
try cs.print("[{s}{s}", .{ prefix, name });
switch (param.takes_value) {
.none => {},
.one => try cs.print(" <{s}>", .{try valueText(context, param)}),
.many => try cs.print(" <{s}>...", .{try valueText(context, param)}),
}
try cs.writeByte(']');
}
if (positional) |p| {
if (cos.bytes_written != 0)
try cs.writeByte(' ');
try cs.print("<{s}>", .{try valueText(context, p)});
}
}
/// A wrapper around usageFull for a simple valueText functions that
/// cant return an error or take a context.
pub fn usageEx(
stream: anytype,
comptime Id: type,
params: []const Param(Id),
valueText: fn (Param(Id)) []const u8,
) !void {
const Context = struct {
valueText: fn (Param(Id)) []const u8,
pub fn value(c: @This(), p: Param(Id)) error{}![]const u8 {
return c.valueText(p);
}
};
return usageFull(
stream,
Id,
params,
error{},
Context{ .valueText = valueText },
Context.value,
);
}
/// A wrapper around usageEx that takes a Param(Help).
pub fn usage(stream: anytype, params: []const Param(Help)) !void {
try usageEx(stream, Help, params, getValueSimple);
}
fn testUsage(expected: []const u8, params: []const Param(Help)) !void {
var buf: [1024]u8 = undefined;
var fbs = io.fixedBufferStream(&buf);
try usage(fbs.writer(), params);
testing.expectEqualStrings(expected, fbs.getWritten());
}
test "usage" {
@setEvalBranchQuota(100000);
try testUsage("[-ab]", comptime &[_]Param(Help){
parseParam("-a") catch unreachable,
parseParam("-b") catch unreachable,
});
try testUsage("[-a <value>] [-b <v>]", comptime &[_]Param(Help){
parseParam("-a <value>") catch unreachable,
parseParam("-b <v>") catch unreachable,
});
try testUsage("[--a] [--b]", comptime &[_]Param(Help){
parseParam("--a") catch unreachable,
parseParam("--b") catch unreachable,
});
try testUsage("[--a <value>] [--b <v>]", comptime &[_]Param(Help){
parseParam("--a <value>") catch unreachable,
parseParam("--b <v>") catch unreachable,
});
try testUsage("<file>", comptime &[_]Param(Help){
parseParam("<file>") catch unreachable,
});
try testUsage("[-ab] [-c <value>] [-d <v>] [--e] [--f] [--g <value>] [--h <v>] [-i <v>...] <file>", comptime &[_]Param(Help){
parseParam("-a") catch unreachable,
parseParam("-b") catch unreachable,
parseParam("-c <value>") catch unreachable,
parseParam("-d <v>") catch unreachable,
parseParam("--e") catch unreachable,
parseParam("--f") catch unreachable,
parseParam("--g <value>") catch unreachable,
parseParam("--h <v>") catch unreachable,
parseParam("-i <v>...") catch unreachable,
parseParam("<file>") catch unreachable,
});
}

View File

@@ -0,0 +1,341 @@
const std = @import("std");
const builtin = std.builtin;
const debug = std.debug;
const heap = std.heap;
const mem = std.mem;
const process = std.process;
const testing = std.testing;
/// An example of what methods should be implemented on an arg iterator.
pub const ExampleArgIterator = struct {
const Error = error{};
pub fn next(iter: *ExampleArgIterator) Error!?[]const u8 {
return "2";
}
};
/// An argument iterator which iterates over a slice of arguments.
/// This implementation does not allocate.
pub const SliceIterator = struct {
const Error = error{};
args: []const []const u8,
index: usize = 0,
pub fn next(iter: *SliceIterator) Error!?[]const u8 {
if (iter.args.len <= iter.index)
return null;
defer iter.index += 1;
return iter.args[iter.index];
}
};
test "SliceIterator" {
const args = &[_][]const u8{ "A", "BB", "CCC" };
var iter = SliceIterator{ .args = args };
for (args) |a| {
const b = try iter.next();
debug.assert(mem.eql(u8, a, b.?));
}
}
/// An argument iterator which wraps the ArgIterator in ::std.
/// On windows, this iterator allocates.
pub const OsIterator = struct {
const Error = process.ArgIterator.NextError;
arena: heap.ArenaAllocator,
args: process.ArgIterator,
/// The executable path (this is the first argument passed to the program)
/// TODO: Is it the right choice for this to be null? Maybe `init` should
/// return an error when we have no exe.
exe_arg: ?[:0]const u8,
pub fn init(allocator: *mem.Allocator) Error!OsIterator {
var res = OsIterator{
.arena = heap.ArenaAllocator.init(allocator),
.args = process.args(),
.exe_arg = undefined,
};
res.exe_arg = try res.next();
return res;
}
pub fn deinit(iter: *OsIterator) void {
iter.arena.deinit();
}
pub fn next(iter: *OsIterator) Error!?[:0]const u8 {
if (builtin.os.tag == .windows) {
return try iter.args.next(&iter.arena.allocator) orelse return null;
} else {
return iter.args.nextPosix();
}
}
};
/// An argument iterator that takes a string and parses it into arguments, simulating
/// how shells split arguments.
pub const ShellIterator = struct {
const Error = error{
DanglingEscape,
QuoteNotClosed,
} || mem.Allocator.Error;
arena: heap.ArenaAllocator,
str: []const u8,
pub fn init(allocator: *mem.Allocator, str: []const u8) ShellIterator {
return .{
.arena = heap.ArenaAllocator.init(allocator),
.str = str,
};
}
pub fn deinit(iter: *ShellIterator) void {
iter.arena.deinit();
}
pub fn next(iter: *ShellIterator) Error!?[]const u8 {
// Whenever possible, this iterator will return slices into `str` instead of
// allocating. Sometimes this is not possible, for example, escaped characters
// have be be unescape, so we need to allocate in this case.
var list = std.ArrayList(u8).init(&iter.arena.allocator);
var start: usize = 0;
var state: enum {
skip_whitespace,
no_quote,
no_quote_escape,
single_quote,
double_quote,
double_quote_escape,
after_quote,
} = .skip_whitespace;
for (iter.str) |c, i| {
switch (state) {
// The state that skips the initial whitespace.
.skip_whitespace => switch (c) {
' ', '\t', '\n' => {},
'\'' => {
start = i + 1;
state = .single_quote;
},
'"' => {
start = i + 1;
state = .double_quote;
},
'\\' => {
start = i + 1;
state = .no_quote_escape;
},
else => {
start = i;
state = .no_quote;
},
},
// The state that parses the none quoted part of a argument.
.no_quote => switch (c) {
// We're done parsing a none quoted argument when we hit a
// whitespace.
' ', '\t', '\n' => {
defer iter.str = iter.str[i..];
return iter.result(start, i, &list);
},
// Slicing is not possible if a quote starts while parsing none
// quoted args.
// Example:
// ab'cd' -> abcd
'\'' => {
try list.appendSlice(iter.str[start..i]);
start = i + 1;
state = .single_quote;
},
'"' => {
try list.appendSlice(iter.str[start..i]);
start = i + 1;
state = .double_quote;
},
// Slicing is not possible if we need to escape a character.
// Example:
// ab\"d -> ab"d
'\\' => {
try list.appendSlice(iter.str[start..i]);
start = i + 1;
state = .no_quote_escape;
},
else => {},
},
// We're in this state after having parsed the quoted part of an
// argument. This state works mostly the same as .no_quote, but
// is aware, that the last character seen was a quote, which should
// not be part of the argument. This is why you will see `i - 1` here
// instead of just `i` when `iter.str` is sliced.
.after_quote => switch (c) {
' ', '\t', '\n' => {
defer iter.str = iter.str[i..];
return iter.result(start, i - 1, &list);
},
'\'' => {
try list.appendSlice(iter.str[start .. i - 1]);
start = i + 1;
state = .single_quote;
},
'"' => {
try list.appendSlice(iter.str[start .. i - 1]);
start = i + 1;
state = .double_quote;
},
'\\' => {
try list.appendSlice(iter.str[start .. i - 1]);
start = i + 1;
state = .no_quote_escape;
},
else => {
try list.appendSlice(iter.str[start .. i - 1]);
start = i;
state = .no_quote;
},
},
// The states that parse the quoted part of arguments. The only differnece
// between single and double quoted arguments is that single quoted
// arguments ignore escape sequences, while double quoted arguments
// does escaping.
.single_quote => switch (c) {
'\'' => state = .after_quote,
else => {},
},
.double_quote => switch (c) {
'"' => state = .after_quote,
'\\' => {
try list.appendSlice(iter.str[start..i]);
start = i + 1;
state = .double_quote_escape;
},
else => {},
},
// The state we end up when after the escape character (`\`). All these
// states do is transition back into the previous state.
// TODO: Are there any escape sequences that does transform the second
// character into something else? For example, in Zig, `\n` is
// transformed into the line feed ascii character.
.no_quote_escape => switch (c) {
else => state = .no_quote,
},
.double_quote_escape => switch (c) {
else => state = .double_quote,
},
}
}
defer iter.str = iter.str[iter.str.len..];
switch (state) {
.skip_whitespace => return null,
.no_quote => return iter.result(start, iter.str.len, &list),
.after_quote => return iter.result(start, iter.str.len - 1, &list),
.no_quote_escape => return Error.DanglingEscape,
.single_quote,
.double_quote,
.double_quote_escape,
=> return Error.QuoteNotClosed,
}
}
fn result(iter: *ShellIterator, start: usize, end: usize, list: *std.ArrayList(u8)) Error!?[]const u8 {
const res = iter.str[start..end];
// If we already have something in `list` that means that we could not
// parse the argument without allocation. We therefor need to just append
// the rest we have to the list and return that.
if (list.items.len != 0) {
try list.appendSlice(res);
return list.toOwnedSlice();
}
return res;
}
};
fn testShellIteratorOk(str: []const u8, allocations: usize, expect: []const []const u8) void {
var allocator = testing.FailingAllocator.init(testing.allocator, allocations);
var it = ShellIterator.init(&allocator.allocator, str);
defer it.deinit();
for (expect) |e| {
if (it.next()) |actual| {
testing.expect(actual != null);
testing.expectEqualStrings(e, actual.?);
} else |err| testing.expectEqual(@as(anyerror![]const u8, e), err);
}
if (it.next()) |actual| {
testing.expectEqual(@as(?[]const u8, null), actual);
testing.expectEqual(allocations, allocator.allocations);
} else |err| testing.expectEqual(@as(anyerror!void, {}), err);
}
fn testShellIteratorErr(str: []const u8, expect: anyerror) void {
var it = ShellIterator.init(testing.allocator, str);
defer it.deinit();
while (it.next() catch |err| {
testing.expectError(expect, @as(anyerror!void, err));
return;
}) |_| {}
testing.expectError(expect, @as(anyerror!void, {}));
}
test "ShellIterator" {
testShellIteratorOk("a", 0, &[_][]const u8{"a"});
testShellIteratorOk("'a'", 0, &[_][]const u8{"a"});
testShellIteratorOk("\"a\"", 0, &[_][]const u8{"a"});
testShellIteratorOk("a b", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("'a' b", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("\"a\" b", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("a 'b'", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("a \"b\"", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("'a b'", 0, &[_][]const u8{"a b"});
testShellIteratorOk("\"a b\"", 0, &[_][]const u8{"a b"});
testShellIteratorOk("\"a\"\"b\"", 1, &[_][]const u8{"ab"});
testShellIteratorOk("'a''b'", 1, &[_][]const u8{"ab"});
testShellIteratorOk("'a'b", 1, &[_][]const u8{"ab"});
testShellIteratorOk("a'b'", 1, &[_][]const u8{"ab"});
testShellIteratorOk("a\\ b", 1, &[_][]const u8{"a b"});
testShellIteratorOk("\"a\\ b\"", 1, &[_][]const u8{"a b"});
testShellIteratorOk("'a\\ b'", 0, &[_][]const u8{"a\\ b"});
testShellIteratorOk(" a b ", 0, &[_][]const u8{ "a", "b" });
testShellIteratorOk("\\ \\ ", 0, &[_][]const u8{ " ", " " });
testShellIteratorOk(
\\printf 'run\nuninstall\n'
, 0, &[_][]const u8{ "printf", "run\\nuninstall\\n" });
testShellIteratorOk(
\\setsid -f steam "steam://$action/$id"
, 0, &[_][]const u8{ "setsid", "-f", "steam", "steam://$action/$id" });
testShellIteratorOk(
\\xargs -I% rg --no-heading --no-line-number --only-matching
\\ --case-sensitive --multiline --text --byte-offset '(?-u)%' $@
\\
, 0, &[_][]const u8{
"xargs", "-I%", "rg", "--no-heading",
"--no-line-number", "--only-matching", "--case-sensitive", "--multiline",
"--text", "--byte-offset", "(?-u)%", "$@",
});
testShellIteratorErr("'a", error.QuoteNotClosed);
testShellIteratorErr("'a\\", error.QuoteNotClosed);
testShellIteratorErr("\"a", error.QuoteNotClosed);
testShellIteratorErr("\"a\\", error.QuoteNotClosed);
testShellIteratorErr("a\\", error.DanglingEscape);
}

View File

@@ -0,0 +1,175 @@
const clap = @import("../clap.zig");
const std = @import("std");
const debug = std.debug;
const heap = std.heap;
const mem = std.mem;
const testing = std.testing;
/// Deprecated: Use `parseEx` instead
pub fn ComptimeClap(
comptime Id: type,
comptime params: []const clap.Param(Id),
) type {
var flags: usize = 0;
var single_options: usize = 0;
var multi_options: usize = 0;
var converted_params: []const clap.Param(usize) = &[_]clap.Param(usize){};
for (params) |param| {
var index: usize = 0;
if (param.names.long != null or param.names.short != null) {
const ptr = switch (param.takes_value) {
.none => &flags,
.one => &single_options,
.many => &multi_options,
};
index = ptr.*;
ptr.* += 1;
}
const converted = clap.Param(usize){
.id = index,
.names = param.names,
.takes_value = param.takes_value,
};
converted_params = converted_params ++ [_]clap.Param(usize){converted};
}
return struct {
single_options: [single_options]?[]const u8,
multi_options: [multi_options][]const []const u8,
flags: [flags]bool,
pos: []const []const u8,
allocator: *mem.Allocator,
pub fn parse(iter: anytype, opt: clap.ParseOptions) !@This() {
const allocator = opt.allocator;
var multis = [_]std.ArrayList([]const u8){undefined} ** multi_options;
for (multis) |*multi| {
multi.* = std.ArrayList([]const u8).init(allocator);
}
var pos = std.ArrayList([]const u8).init(allocator);
var res = @This(){
.single_options = [_]?[]const u8{null} ** single_options,
.multi_options = [_][]const []const u8{undefined} ** multi_options,
.flags = [_]bool{false} ** flags,
.pos = undefined,
.allocator = allocator,
};
var stream = clap.StreamingClap(usize, @typeInfo(@TypeOf(iter)).Pointer.child){
.params = converted_params,
.iter = iter,
};
while (try stream.next()) |arg| {
const param = arg.param;
if (param.names.long == null and param.names.short == null) {
try pos.append(arg.value.?);
} else if (param.takes_value == .one) {
debug.assert(res.single_options.len != 0);
if (res.single_options.len != 0)
res.single_options[param.id] = arg.value.?;
} else if (param.takes_value == .many) {
debug.assert(multis.len != 0);
if (multis.len != 0)
try multis[param.id].append(arg.value.?);
} else {
debug.assert(res.flags.len != 0);
if (res.flags.len != 0)
res.flags[param.id] = true;
}
}
for (multis) |*multi, i|
res.multi_options[i] = multi.toOwnedSlice();
res.pos = pos.toOwnedSlice();
return res;
}
pub fn deinit(parser: @This()) void {
for (parser.multi_options) |o|
parser.allocator.free(o);
parser.allocator.free(parser.pos);
}
pub fn flag(parser: @This(), comptime name: []const u8) bool {
const param = comptime findParam(name);
if (param.takes_value != .none)
@compileError(name ++ " is an option and not a flag.");
return parser.flags[param.id];
}
pub fn option(parser: @This(), comptime name: []const u8) ?[]const u8 {
const param = comptime findParam(name);
if (param.takes_value == .none)
@compileError(name ++ " is a flag and not an option.");
if (param.takes_value == .many)
@compileError(name ++ " takes many options, not one.");
return parser.single_options[param.id];
}
pub fn options(parser: @This(), comptime name: []const u8) []const []const u8 {
const param = comptime findParam(name);
if (param.takes_value == .none)
@compileError(name ++ " is a flag and not an option.");
if (param.takes_value == .one)
@compileError(name ++ " takes one option, not multiple.");
return parser.multi_options[param.id];
}
pub fn positionals(parser: @This()) []const []const u8 {
return parser.pos;
}
fn findParam(comptime name: []const u8) clap.Param(usize) {
comptime {
for (converted_params) |param| {
if (param.names.short) |s| {
if (mem.eql(u8, name, "-" ++ [_]u8{s}))
return param;
}
if (param.names.long) |l| {
if (mem.eql(u8, name, "--" ++ l))
return param;
}
}
@compileError(name ++ " is not a parameter.");
}
}
};
}
test "" {
const Clap = ComptimeClap(clap.Help, comptime &[_]clap.Param(clap.Help){
clap.parseParam("-a, --aa ") catch unreachable,
clap.parseParam("-b, --bb ") catch unreachable,
clap.parseParam("-c, --cc <V>") catch unreachable,
clap.parseParam("-d, --dd <V>...") catch unreachable,
clap.parseParam("<P>") catch unreachable,
});
var iter = clap.args.SliceIterator{
.args = &[_][]const u8{
"-a", "-c", "0", "something", "-d", "a", "--dd", "b",
},
};
var args = try Clap.parse(&iter, .{ .allocator = testing.allocator });
defer args.deinit();
testing.expect(args.flag("-a"));
testing.expect(args.flag("--aa"));
testing.expect(!args.flag("-b"));
testing.expect(!args.flag("--bb"));
testing.expectEqualStrings("0", args.option("-c").?);
testing.expectEqualStrings("0", args.option("--cc").?);
testing.expectEqual(@as(usize, 1), args.positionals().len);
testing.expectEqualStrings("something", args.positionals()[0]);
testing.expectEqualSlices([]const u8, &[_][]const u8{ "a", "b" }, args.options("-d"));
testing.expectEqualSlices([]const u8, &[_][]const u8{ "a", "b" }, args.options("--dd"));
}

View File

@@ -0,0 +1,424 @@
const builtin = @import("builtin");
const clap = @import("../clap.zig");
const std = @import("std");
const args = clap.args;
const debug = std.debug;
const heap = std.heap;
const io = std.io;
const mem = std.mem;
const os = std.os;
const testing = std.testing;
/// The result returned from StreamingClap.next
pub fn Arg(comptime Id: type) type {
return struct {
const Self = @This();
param: *const clap.Param(Id),
value: ?[]const u8 = null,
};
}
/// A command line argument parser which, given an ArgIterator, will parse arguments according
/// to the params. StreamingClap parses in an iterating manner, so you have to use a loop together with
/// StreamingClap.next to parse all the arguments of your program.
pub fn StreamingClap(comptime Id: type, comptime ArgIterator: type) type {
return struct {
const State = union(enum) {
normal,
chaining: Chaining,
rest_are_positional,
const Chaining = struct {
arg: []const u8,
index: usize,
};
};
params: []const clap.Param(Id),
iter: *ArgIterator,
state: State = .normal,
positional: ?*const clap.Param(Id) = null,
diagnostic: ?*clap.Diagnostic = null,
/// Get the next Arg that matches a Param.
pub fn next(parser: *@This()) !?Arg(Id) {
switch (parser.state) {
.normal => return try parser.normal(),
.chaining => |state| return try parser.chainging(state),
.rest_are_positional => {
const param = parser.positionalParam() orelse unreachable;
const value = (try parser.iter.next()) orelse return null;
return Arg(Id){ .param = param, .value = value };
},
}
}
fn normal(parser: *@This()) !?Arg(Id) {
const arg_info = (try parser.parseNextArg()) orelse return null;
const arg = arg_info.arg;
switch (arg_info.kind) {
.long => {
const eql_index = mem.indexOfScalar(u8, arg, '=');
const name = if (eql_index) |i| arg[0..i] else arg;
const maybe_value = if (eql_index) |i| arg[i + 1 ..] else null;
for (parser.params) |*param| {
const match = param.names.long orelse continue;
if (!mem.eql(u8, name, match))
continue;
if (param.takes_value == .none) {
if (maybe_value != null)
return parser.err(arg, .{ .long = name }, error.DoesntTakeValue);
return Arg(Id){ .param = param };
}
const value = blk: {
if (maybe_value) |v|
break :blk v;
break :blk (try parser.iter.next()) orelse
return parser.err(arg, .{ .long = name }, error.MissingValue);
};
return Arg(Id){ .param = param, .value = value };
}
return parser.err(arg, .{ .long = name }, error.InvalidArgument);
},
.short => return try parser.chainging(.{
.arg = arg,
.index = 0,
}),
.positional => if (parser.positionalParam()) |param| {
// If we find a positional with the value `--` then we
// interpret the rest of the arguments as positional
// arguments.
if (mem.eql(u8, arg, "--")) {
parser.state = .rest_are_positional;
const value = (try parser.iter.next()) orelse return null;
return Arg(Id){ .param = param, .value = value };
}
return Arg(Id){ .param = param, .value = arg };
} else {
return parser.err(arg, .{}, error.InvalidArgument);
},
}
}
fn chainging(parser: *@This(), state: State.Chaining) !?Arg(Id) {
const arg = state.arg;
const index = state.index;
const next_index = index + 1;
for (parser.params) |*param| {
const short = param.names.short orelse continue;
if (short != arg[index])
continue;
// Before we return, we have to set the new state of the clap
defer {
if (arg.len <= next_index or param.takes_value != .none) {
parser.state = .normal;
} else {
parser.state = .{
.chaining = .{
.arg = arg,
.index = next_index,
},
};
}
}
const next_is_eql = if (next_index < arg.len) arg[next_index] == '=' else false;
if (param.takes_value == .none) {
if (next_is_eql)
return parser.err(arg, .{ .short = short }, error.DoesntTakeValue);
return Arg(Id){ .param = param };
}
if (arg.len <= next_index) {
const value = (try parser.iter.next()) orelse
return parser.err(arg, .{ .short = short }, error.MissingValue);
return Arg(Id){ .param = param, .value = value };
}
if (next_is_eql)
return Arg(Id){ .param = param, .value = arg[next_index + 1 ..] };
return Arg(Id){ .param = param, .value = arg[next_index..] };
}
return parser.err(arg, .{ .short = arg[index] }, error.InvalidArgument);
}
fn positionalParam(parser: *@This()) ?*const clap.Param(Id) {
if (parser.positional) |p|
return p;
for (parser.params) |*param| {
if (param.names.long) |_|
continue;
if (param.names.short) |_|
continue;
parser.positional = param;
return param;
}
return null;
}
const ArgInfo = struct {
arg: []const u8,
kind: enum {
long,
short,
positional,
},
};
fn parseNextArg(parser: *@This()) !?ArgInfo {
const full_arg = (try parser.iter.next()) orelse return null;
if (mem.eql(u8, full_arg, "--") or mem.eql(u8, full_arg, "-"))
return ArgInfo{ .arg = full_arg, .kind = .positional };
if (mem.startsWith(u8, full_arg, "--"))
return ArgInfo{ .arg = full_arg[2..], .kind = .long };
if (mem.startsWith(u8, full_arg, "-"))
return ArgInfo{ .arg = full_arg[1..], .kind = .short };
return ArgInfo{ .arg = full_arg, .kind = .positional };
}
fn err(parser: @This(), arg: []const u8, names: clap.Names, _err: anytype) @TypeOf(_err) {
if (parser.diagnostic) |d|
d.* = .{ .arg = arg, .name = names };
return _err;
}
};
}
fn testNoErr(params: []const clap.Param(u8), args_strings: []const []const u8, results: []const Arg(u8)) void {
var iter = args.SliceIterator{ .args = args_strings };
var c = StreamingClap(u8, args.SliceIterator){
.params = params,
.iter = &iter,
};
for (results) |res| {
const arg = (c.next() catch unreachable) orelse unreachable;
testing.expectEqual(res.param, arg.param);
const expected_value = res.value orelse {
testing.expectEqual(@as(@TypeOf(arg.value), null), arg.value);
continue;
};
const actual_value = arg.value orelse unreachable;
testing.expectEqualSlices(u8, expected_value, actual_value);
}
if (c.next() catch unreachable) |_|
unreachable;
}
fn testErr(params: []const clap.Param(u8), args_strings: []const []const u8, expected: []const u8) void {
var diag = clap.Diagnostic{};
var iter = args.SliceIterator{ .args = args_strings };
var c = StreamingClap(u8, args.SliceIterator){
.params = params,
.iter = &iter,
.diagnostic = &diag,
};
while (c.next() catch |err| {
var buf: [1024]u8 = undefined;
var fbs = io.fixedBufferStream(&buf);
diag.report(fbs.writer(), err) catch unreachable;
testing.expectEqualStrings(expected, fbs.getWritten());
return;
}) |_| {}
testing.expect(false);
}
test "short params" {
const params = [_]clap.Param(u8){
.{ .id = 0, .names = .{ .short = 'a' } },
.{ .id = 1, .names = .{ .short = 'b' } },
.{
.id = 2,
.names = .{ .short = 'c' },
.takes_value = .one,
},
.{
.id = 3,
.names = .{ .short = 'd' },
.takes_value = .many,
},
};
const a = &params[0];
const b = &params[1];
const c = &params[2];
const d = &params[3];
testNoErr(
&params,
&[_][]const u8{
"-a", "-b", "-ab", "-ba",
"-c", "0", "-c=0", "-ac",
"0", "-ac=0", "-d=0",
},
&[_]Arg(u8){
.{ .param = a },
.{ .param = b },
.{ .param = a },
.{ .param = b },
.{ .param = b },
.{ .param = a },
.{ .param = c, .value = "0" },
.{ .param = c, .value = "0" },
.{ .param = a },
.{ .param = c, .value = "0" },
.{ .param = a },
.{ .param = c, .value = "0" },
.{ .param = d, .value = "0" },
},
);
}
test "long params" {
const params = [_]clap.Param(u8){
.{ .id = 0, .names = .{ .long = "aa" } },
.{ .id = 1, .names = .{ .long = "bb" } },
.{
.id = 2,
.names = .{ .long = "cc" },
.takes_value = .one,
},
.{
.id = 3,
.names = .{ .long = "dd" },
.takes_value = .many,
},
};
const aa = &params[0];
const bb = &params[1];
const cc = &params[2];
const dd = &params[3];
testNoErr(
&params,
&[_][]const u8{
"--aa", "--bb",
"--cc", "0",
"--cc=0", "--dd=0",
},
&[_]Arg(u8){
.{ .param = aa },
.{ .param = bb },
.{ .param = cc, .value = "0" },
.{ .param = cc, .value = "0" },
.{ .param = dd, .value = "0" },
},
);
}
test "positional params" {
const params = [_]clap.Param(u8){.{
.id = 0,
.takes_value = .one,
}};
testNoErr(
&params,
&[_][]const u8{ "aa", "bb" },
&[_]Arg(u8){
.{ .param = &params[0], .value = "aa" },
.{ .param = &params[0], .value = "bb" },
},
);
}
test "all params" {
const params = [_]clap.Param(u8){
.{
.id = 0,
.names = .{ .short = 'a', .long = "aa" },
},
.{
.id = 1,
.names = .{ .short = 'b', .long = "bb" },
},
.{
.id = 2,
.names = .{ .short = 'c', .long = "cc" },
.takes_value = .one,
},
.{ .id = 3, .takes_value = .one },
};
const aa = &params[0];
const bb = &params[1];
const cc = &params[2];
const positional = &params[3];
testNoErr(
&params,
&[_][]const u8{
"-a", "-b", "-ab", "-ba",
"-c", "0", "-c=0", "-ac",
"0", "-ac=0", "--aa", "--bb",
"--cc", "0", "--cc=0", "something",
"-", "--", "--cc=0", "-a",
},
&[_]Arg(u8){
.{ .param = aa },
.{ .param = bb },
.{ .param = aa },
.{ .param = bb },
.{ .param = bb },
.{ .param = aa },
.{ .param = cc, .value = "0" },
.{ .param = cc, .value = "0" },
.{ .param = aa },
.{ .param = cc, .value = "0" },
.{ .param = aa },
.{ .param = cc, .value = "0" },
.{ .param = aa },
.{ .param = bb },
.{ .param = cc, .value = "0" },
.{ .param = cc, .value = "0" },
.{ .param = positional, .value = "something" },
.{ .param = positional, .value = "-" },
.{ .param = positional, .value = "--cc=0" },
.{ .param = positional, .value = "-a" },
},
);
}
test "errors" {
const params = [_]clap.Param(u8){
.{
.id = 0,
.names = .{ .short = 'a', .long = "aa" },
},
.{
.id = 1,
.names = .{ .short = 'c', .long = "cc" },
.takes_value = .one,
},
};
testErr(&params, &[_][]const u8{"q"}, "Invalid argument 'q'\n");
testErr(&params, &[_][]const u8{"-q"}, "Invalid argument '-q'\n");
testErr(&params, &[_][]const u8{"--q"}, "Invalid argument '--q'\n");
testErr(&params, &[_][]const u8{"--q=1"}, "Invalid argument '--q'\n");
testErr(&params, &[_][]const u8{"-a=1"}, "The argument '-a' does not take a value\n");
testErr(&params, &[_][]const u8{"--aa=1"}, "The argument '--aa' does not take a value\n");
testErr(&params, &[_][]const u8{"-c"}, "The argument '-c' requires a value but none was supplied\n");
testErr(&params, &[_][]const u8{"--cc"}, "The argument '--cc' requires a value but none was supplied\n");
}

View File

@@ -0,0 +1,14 @@
pkgs:
clap:
version: 0.3.0
license: Unlicense
description: Simple command line argument parsing library
source_url: "https://github.com/Hejsil/zig-clap"
root: clap.zig
files:
README.md
LICENSE
build.zig
clap/*.zig
example/*.zig

View File

@@ -0,0 +1,5 @@
id: aoe2l16htluewam6bfwvv0khsbbno8g8jd7suonifg74u7kd
name: clap
main: clap.zig
license: Unlicense
dependencies:

View File

@@ -0,0 +1,73 @@
const std = @import("std");
pub fn ExactSizeMatcher(comptime max_bytes: usize) type {
const a: u32 = 1000;
switch (max_bytes) {
1, 2, 4, 8, 12 => {},
else => {
@compileError("max_bytes must be 1, 2, 4, 8, or 12.");
},
}
const T = std.meta.Int(
.unsigned,
max_bytes * 8,
);
return struct {
pub fn match(str: anytype) T {
switch (str.len) {
1...max_bytes - 1 => {
var tmp = std.mem.zeroes([max_bytes]u8);
std.mem.copy(u8, &tmp, str[0..str.len]);
return std.mem.readIntNative(T, &tmp);
},
max_bytes => {
return std.mem.readIntSliceNative(T, str);
},
0 => {
return 0;
},
else => {
return std.math.maxInt(T);
},
}
}
pub fn case(comptime str: []const u8) T {
if (str.len < max_bytes) {
var bytes = std.mem.zeroes([max_bytes]u8);
const slice_bytes = std.mem.sliceAsBytes(str);
std.mem.copy(u8, &bytes, slice_bytes);
return std.mem.readIntNative(T, &bytes);
} else if (str.len == max_bytes) {
return std.mem.readIntNative(T, str[0..str.len]);
} else {
@compileError("str: \"" ++ str ++ "\" too long");
}
}
fn hash(comptime str: anytype) ?T {
if (str.len > max_bytes) return null;
var tmp = [_]u8{0} ** max_bytes;
std.mem.copy(u8, &tmp, str[0..str.len]);
return std.mem.readIntNative(T, &tmp);
}
};
}
const eight = ExactSizeMatcher(8);
const expect = std.testing.expect;
test "ExactSizeMatcher 5 letter" {
const word = "yield";
try expect(eight.match(word) == eight.case("yield"));
try expect(eight.match(word) != eight.case("yields"));
}
test "ExactSizeMatcher 4 letter" {
const Four = ExactSizeMatcher(4);
const word = "from";
try expect(Four.match(word) == Four.case("from"));
try expect(Four.match(word) != Four.case("fro"));
}

44
src/exports.zig Normal file
View File

@@ -0,0 +1,44 @@
const std = @import("std");
const alloc = @import("alloc.zig");
usingnamespace @import("global.zig");
const Root = @import("main_wasm.zig").Root;
pub extern fn init() void {
alloc.dynamic = std.heap.c_allocator;
alloc.static = std.heap.c_allocator;
}
/// Convert a slice into known memory representation -- enables C ABI
pub const U8Chunk = packed struct {
const Float = @Type(builtin.TypeInfo{ .Float = .{ .bits = 2 * @bitSizeOf(usize) } });
const Abi = if (builtin.arch.isWasm()) Float else U8Chunk;
ptr: [*]u8,
len: usize,
pub fn toSlice(raw: Abi) []u8 {
const self = @bitCast(U8Chunk, raw);
return self.ptr[0..self.len];
}
pub fn fromSlice(slice: []u8) Abi {
const self = U8Chunk{ .ptr = slice.ptr, .len = slice.len };
return @bitCast(Abi, self);
}
pub fn empty() Abi {
return U8Chunk.fromSlice(&[0]u8{});
}
};
export fn fd_create() ?*Root {
const fd = allocator.create(Root) catch return null;
fd.* = .{};
return fd;
}
export fn fd_destroy(fd: *Root) void {
fd.deinit(allocator);
allocator.destroy(fd);
}

View File

@@ -1,28 +1,237 @@
const std = @import("std");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const sync = @import("sync.zig");
const alloc = @import("alloc.zig");
const expect = std.testing.expect;
const Mutex = sync.Mutex;
const Semaphore = sync.Semaphore;
const path_handler = @import("./resolver/resolve_path.zig");
const allocators = @import("./allocators.zig");
// pub const FilesystemImplementation = @import("fs_impl.zig");
//
pub const Stat = packed struct {
// milliseconds
mtime: i64 = 0,
// last queried timestamp
qtime: i64 = 0,
kind: FileSystemEntry.Kind,
threadlocal var scratch_lookup_buffer: [256]u8 = undefined;
pub const Preallocate = struct {
pub const Counts = struct {
pub const dir_entry: usize = 1024;
pub const files: usize = 2048;
};
};
pub const FileSystem = struct {
// This maps paths relative to absolute_working_dir to the structure of arrays of paths
stats: std.StringHashMap(Stat) = undefined,
entries: std.ArrayList(FileSystemEntry),
allocator: *std.mem.Allocator,
top_level_dir: string = "/",
fs: Implementation,
absolute_working_dir = "/",
implementation: anytype = undefined,
dirname_store: *DirnameStore,
filename_store: *FilenameStore,
pub var instance: FileSystem = undefined;
pub const DirnameStore = allocators.BSSStringList(Preallocate.Counts.dir_entry, 256);
pub const FilenameStore = allocators.BSSStringList(Preallocate.Counts.files, 64);
pub const Error = error{
ENOENT,
EACCESS,
INVALID_NAME,
ENOTDIR,
};
pub fn init1(allocator: *std.mem.Allocator, top_level_dir: ?string, enable_watcher: bool) !*FileSystem {
var _top_level_dir = top_level_dir orelse (if (isBrowser) "/project/" else try std.process.getCwdAlloc(allocator));
// Ensure there's a trailing separator in the top level directory
// This makes path resolution more reliable
if (!std.fs.path.isSep(_top_level_dir[_top_level_dir.len - 1])) {
const tld = try allocator.alloc(u8, _top_level_dir.len + 1);
std.mem.copy(u8, tld, _top_level_dir);
tld[tld.len - 1] = std.fs.path.sep;
if (!isBrowser) {
allocator.free(_top_level_dir);
}
_top_level_dir = tld;
}
instance = FileSystem{
.allocator = allocator,
.top_level_dir = _top_level_dir,
.fs = Implementation.init(allocator, _top_level_dir, enable_watcher),
// .stats = std.StringHashMap(Stat).init(allocator),
.dirname_store = DirnameStore.init(allocator),
.filename_store = FilenameStore.init(allocator),
};
instance.fs.parent_fs = &instance;
_ = DirEntry.EntryStore.init(allocator);
return &instance;
}
pub const DirEntry = struct {
pub const EntryMap = std.StringHashMap(EntryStore.ListIndex);
pub const EntryStore = allocators.BSSList(Entry, Preallocate.Counts.files);
dir: string,
data: EntryMap,
pub fn addEntry(dir: *DirEntry, entry: std.fs.Dir.Entry) !void {
var _kind: Entry.Kind = undefined;
switch (entry.kind) {
.Directory => {
_kind = Entry.Kind.dir;
},
.SymLink => {
// This might be wrong!
_kind = Entry.Kind.file;
},
.File => {
_kind = Entry.Kind.file;
},
else => {
return;
},
}
// entry.name only lives for the duration of the iteration
var name = FileSystem.FilenameStore.editableSlice(try FileSystem.FilenameStore.instance.append(entry.name));
for (entry.name) |c, i| {
name[i] = std.ascii.toLower(c);
}
var symlink: []u8 = "";
if (entry.kind == std.fs.Dir.Entry.Kind.SymLink) {
symlink = name;
}
const index = try EntryStore.instance.append(Entry{
.base = name,
.dir = dir.dir,
.mutex = Mutex.init(),
// Call "stat" lazily for performance. The "@material-ui/icons" package
// contains a directory with over 11,000 entries in it and running "stat"
// for each entry was a big performance issue for that package.
.need_stat = entry.kind == .SymLink,
.cache = Entry.Cache{
.symlink = symlink,
.kind = _kind,
},
});
try dir.data.put(name, index);
}
pub fn updateDir(i: *DirEntry, dir: string) void {
var iter = i.data.iterator();
i.dir = dir;
while (iter.next()) |entry| {
entry.value.dir = dir;
}
}
pub fn empty(dir: string, allocator: *std.mem.Allocator) DirEntry {
return DirEntry{ .dir = dir, .data = EntryMap.init(allocator) };
}
pub fn init(dir: string, allocator: *std.mem.Allocator) DirEntry {
return DirEntry{ .dir = dir, .data = EntryMap.init(allocator) };
}
pub const Err = struct {
original_err: anyerror,
canonical_error: anyerror,
};
pub fn deinit(d: *DirEntry) void {
d.data.allocator.free(d.dir);
var iter = d.data.iterator();
while (iter.next()) |file_entry| {
EntryStore.instance.at(file_entry.value).?.deinit(d.data.allocator);
}
d.data.deinit();
}
pub fn get(entry: *DirEntry, _query: string) ?Entry.Lookup {
if (_query.len == 0) return null;
var end: usize = 0;
std.debug.assert(scratch_lookup_buffer.len >= _query.len);
for (_query) |c, i| {
scratch_lookup_buffer[i] = std.ascii.toLower(c);
end = i;
}
const query = scratch_lookup_buffer[0 .. end + 1];
const result_index = entry.data.get(query) orelse return null;
const result = EntryStore.instance.at(result_index) orelse return null;
if (!strings.eql(result.base, query)) {
return Entry.Lookup{ .entry = result, .diff_case = Entry.Lookup.DifferentCase{
.dir = entry.dir,
.query = _query,
.actual = result.base,
} };
}
return Entry.Lookup{ .entry = result, .diff_case = null };
}
};
pub const Entry = struct {
cache: Cache = Cache{},
dir: string,
base: string,
mutex: Mutex,
need_stat: bool = true,
pub const Lookup = struct {
entry: *Entry,
diff_case: ?DifferentCase,
pub const DifferentCase = struct {
dir: string,
query: string,
actual: string,
};
};
pub fn deinit(e: *Entry, allocator: *std.mem.Allocator) void {
allocator.free(e.base);
allocator.free(e.dir);
allocator.free(e.cache.symlink);
allocator.destroy(e);
}
pub const Cache = struct {
symlink: string = "",
kind: Kind = Kind.file,
};
pub const Kind = enum {
dir,
file,
};
pub fn kind(entry: *Entry, fs: *Implementation) Kind {
// entry.mutex.lock();
// defer entry.mutex.unlock();
if (entry.need_stat) {
entry.need_stat = false;
entry.cache = fs.kind(entry.dir, entry.base) catch unreachable;
}
return entry.cache.kind;
}
pub fn symlink(entry: *Entry, fs: *Implementation) string {
// entry.mutex.lock();
// defer entry.mutex.unlock();
if (entry.need_stat) {
entry.need_stat = false;
entry.cache = fs.kind(entry.dir, entry.base) catch unreachable;
}
return entry.cache.symlink;
}
};
// pub fn statBatch(fs: *FileSystemEntry, paths: []string) ![]?Stat {
@@ -36,31 +245,516 @@ pub const FileSystem = struct {
// pub fn readDir(fs: *FileSystemEntry, path: string) ?[]string {
// }
pub fn normalize(f: *@This(), str: string) string {
return @call(.{ .modifier = .always_inline }, path_handler.normalizeString, .{ str, true, .auto });
}
pub fn Implementation(comptime Context: type) type {
return struct {
context: *Context,
pub fn join(f: *@This(), parts: anytype) string {
return @call(.{ .modifier = .always_inline }, path_handler.joinStringBuf, .{
&join_buf,
parts,
.auto,
});
}
pub fn statBatch(context: *Context, path: string) ![]?Stat {
return try context.statBatch(path);
pub fn joinBuf(f: *@This(), parts: anytype, buf: []u8) string {
return @call(.{ .modifier = .always_inline }, path_handler.joinStringBuf, .{
buf,
parts,
.auto,
});
}
pub fn relative(f: *@This(), from: string, to: string) string {
return @call(.{ .modifier = .always_inline }, path_handler.relative, .{
from,
to,
});
}
pub fn relativeAlloc(f: *@This(), allocator: *std.mem.Allocator, from: string, to: string) string {
return @call(.{ .modifier = .always_inline }, path_handler.relativeAlloc, .{
alloc,
from,
to,
});
}
pub fn relativeTo(f: *@This(), to: string) string {
return @call(.{ .modifier = .always_inline }, path_handler.relative, .{
f.top_level_dir,
to,
});
}
pub fn relativeFrom(f: *@This(), from: string) string {
return @call(.{ .modifier = .always_inline }, path_handler.relative, .{
from,
f.top_level_dir,
});
}
pub fn relativeToAlloc(f: *@This(), allocator: *std.mem.Allocator, to: string) string {
return @call(.{ .modifier = .always_inline }, path_handler.relativeAlloc, .{
allocator,
f.top_level_dir,
to,
});
}
pub fn absAlloc(f: *@This(), allocator: *std.mem.Allocator, parts: anytype) !string {
const joined = path_handler.joinAbsString(
f.top_level_dir,
parts,
.auto,
);
return try allocator.dupe(u8, joined);
}
pub fn abs(f: *@This(), parts: anytype) string {
return path_handler.joinAbsString(
f.top_level_dir,
parts,
.auto,
);
}
pub fn absBuf(f: *@This(), parts: anytype, buf: []u8) string {
return path_handler.joinAbsStringBuf(f.top_level_dir, buf, parts, .auto);
}
pub fn joinAlloc(f: *@This(), allocator: *std.mem.Allocator, parts: anytype) !string {
const joined = f.join(parts);
return try allocator.dupe(u8, joined);
}
threadlocal var realpath_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
pub fn resolveAlloc(f: *@This(), allocator: *std.mem.Allocator, parts: anytype) !string {
const joined = f.join(parts);
const realpath = try std.fs.realpath(joined, (&realpath_buffer));
return try allocator.dupe(u8, realpath);
}
pub fn resolvePath(f: *@This(), part: string) ![]u8 {
return try std.fs.realpath(part, (&realpath_buffer).ptr);
}
pub const RealFS = struct {
entries_mutex: Mutex = Mutex.init(),
entries: *EntriesOption.Map,
allocator: *std.mem.Allocator,
do_not_cache_entries: bool = false,
limiter: Limiter,
watcher: ?std.StringHashMap(WatchData) = null,
watcher_mutex: Mutex = Mutex.init(),
cwd: string,
parent_fs: *FileSystem = undefined,
file_limit: usize = 32,
file_quota: usize = 32,
// Always try to max out how many files we can keep open
pub fn adjustUlimit() usize {
var limit = std.os.getrlimit(.NOFILE) catch return 32;
if (limit.cur < limit.max) {
var new_limit = std.mem.zeroes(std.os.rlimit);
new_limit.cur = limit.max;
new_limit.max = limit.max;
std.os.setrlimit(.NOFILE, new_limit) catch return limit.cur;
return new_limit.cur;
}
return limit.cur;
}
pub fn init(allocator: *std.mem.Allocator, cwd: string, enable_watcher: bool) RealFS {
const file_limit = adjustUlimit();
return RealFS{
.entries = EntriesOption.Map.init(allocator),
.allocator = allocator,
.cwd = cwd,
.file_limit = file_limit,
.file_quota = file_limit,
.limiter = Limiter.init(allocator),
.watcher = if (enable_watcher) std.StringHashMap(WatchData).init(allocator) else null,
};
}
pub const ModKeyError = error{
Unusable,
};
pub const ModKey = struct {
inode: std.fs.File.INode = 0,
size: u64 = 0,
mtime: i128 = 0,
mode: std.fs.File.Mode = 0,
pub fn generate(fs: *RealFS, path: string) anyerror!ModKey {
var file = try std.fs.openFileAbsolute(path, std.fs.File.OpenFlags{ .read = true });
defer file.close();
const stat = try file.stat();
const seconds = @divTrunc(stat.mtime, @as(@TypeOf(stat.mtime), std.time.ns_per_s));
// We can't detect changes if the file system zeros out the modification time
if (seconds == 0 and std.time.ns_per_s == 0) {
return error.Unusable;
}
// Don't generate a modification key if the file is too new
const now = std.time.nanoTimestamp();
const now_seconds = @divTrunc(now, std.time.ns_per_s);
if (seconds > seconds or (seconds == now_seconds and stat.mtime > now)) {
return error.Unusable;
}
return ModKey{
.inode = stat.inode,
.size = stat.size,
.mtime = stat.mtime,
.mode = stat.mode,
// .uid = stat.
};
}
pub const SafetyGap = 3;
};
fn modKeyError(fs: *RealFS, path: string, err: anyerror) void {
if (fs.watcher) |*watcher| {
fs.watcher_mutex.lock();
defer fs.watcher_mutex.unlock();
var state = WatchData.State.file_missing;
switch (err) {
error.Unusable => {
state = WatchData.State.file_unusable_mod_key;
},
else => {},
}
var entry = watcher.getOrPutValue(path, WatchData{ .state = state }) catch unreachable;
entry.value.state = state;
}
}
pub fn modKey(fs: *RealFS, path: string) anyerror!ModKey {
fs.limiter.before();
defer fs.limiter.after();
const key = ModKey.generate(fs, path) catch |err| {
fs.modKeyError(path, err);
return err;
};
if (fs.watcher) |*watcher| {
fs.watcher_mutex.lock();
defer fs.watcher_mutex.unlock();
var entry = watcher.getOrPutValue(path, WatchData{ .state = .file_has_mod_key, .mod_key = key }) catch unreachable;
entry.value.mod_key = key;
}
pub fn stat(context: *Context, path: string) !?Stat {
return try context.stat(path);
return key;
}
pub const WatchData = struct {
dir_entries: []string = &([_]string{}),
file_contents: string = "",
mod_key: ModKey = ModKey{},
watch_mutex: Mutex = Mutex.init(),
state: State = State.none,
pub const State = enum {
none,
dir_has_entries,
dir_missing,
file_has_mod_key,
file_need_mod_key,
file_missing,
file_unusable_mod_key,
};
};
pub const EntriesOption = union(Tag) {
entries: DirEntry,
err: DirEntry.Err,
pub const Tag = enum {
entries,
err,
};
// This custom map implementation:
// - Preallocates a fixed amount of directory name space
// - Doesn't store directory names which don't exist.
pub const Map = allocators.BSSMap(EntriesOption, Preallocate.Counts.dir_entry, false, 128);
};
// Limit the number of files open simultaneously to avoid ulimit issues
pub const Limiter = struct {
semaphore: Semaphore,
pub fn init(allocator: *std.mem.Allocator) Limiter {
return Limiter{
.semaphore = Semaphore.init(32),
// .counter = std.atomic.Int(u8).init(0),
// .lock = std.Thread.Mutex.init(),
};
}
pub fn readFile(context: *Context, path: string) !?File {
return try context.readFile(path);
// This will block if the number of open files is already at the limit
pub fn before(limiter: *Limiter) void {
limiter.semaphore.wait();
// var added = limiter.counter.fetchAdd(1);
}
pub fn readDir(context: *Context, path: string) []string {
return context.readdir(path);
pub fn after(limiter: *Limiter) void {
limiter.semaphore.post();
// limiter.counter.decr();
// if (limiter.held) |hold| {
// hold.release();
// limiter.held = null;
// }
}
};
}
};
pub const FileNotFound = struct {};
pub fn openDir(fs: *RealFS, unsafe_dir_string: string) std.fs.File.OpenError!std.fs.Dir {
return try std.fs.openDirAbsolute(unsafe_dir_string, std.fs.Dir.OpenDirOptions{ .iterate = true, .access_sub_paths = true, .no_follow = true });
}
fn readdir(
fs: *RealFS,
_dir: string,
handle: std.fs.Dir,
) !DirEntry {
fs.limiter.before();
defer fs.limiter.after();
var iter: std.fs.Dir.Iterator = handle.iterate();
var dir = DirEntry.init(_dir, fs.allocator);
errdefer dir.deinit();
while (try iter.next()) |_entry| {
try dir.addEntry(_entry);
}
return dir;
}
fn readDirectoryError(fs: *RealFS, dir: string, err: anyerror) !*EntriesOption {
if (fs.watcher) |*watcher| {
fs.watcher_mutex.lock();
defer fs.watcher_mutex.unlock();
try watcher.put(dir, WatchData{ .state = .dir_missing });
}
if (!fs.do_not_cache_entries) {
fs.entries_mutex.lock();
defer fs.entries_mutex.unlock();
var get_or_put_result = try fs.entries.getOrPut(dir);
var opt = try fs.entries.put(&get_or_put_result, EntriesOption{
.err = DirEntry.Err{ .original_err = err, .canonical_error = err },
});
return opt;
}
temp_entries_option = EntriesOption{
.err = DirEntry.Err{ .original_err = err, .canonical_error = err },
};
return &temp_entries_option;
}
threadlocal var temp_entries_option: EntriesOption = undefined;
pub fn readDirectory(fs: *RealFS, _dir: string, _handle: ?std.fs.Dir, recursive: bool) !*EntriesOption {
var dir = _dir;
var cache_result: ?allocators.Result = null;
if (!fs.do_not_cache_entries) {
fs.entries_mutex.lock();
defer fs.entries_mutex.unlock();
cache_result = try fs.entries.getOrPut(dir);
if (cache_result.?.hasCheckedIfExists()) {
if (fs.entries.atIndex(cache_result.?.index)) |cached_result| {
return cached_result;
}
}
}
var handle = _handle orelse try fs.openDir(dir);
defer {
if (_handle == null) {
handle.close();
}
}
// if we get this far, it's a real directory, so we can just store the dir name.
if (_handle == null) {
dir = try FilenameStore.instance.append(_dir);
}
// Cache miss: read the directory entries
const entries = fs.readdir(
dir,
handle,
) catch |err| {
return fs.readDirectoryError(dir, err) catch unreachable;
};
// if (fs.watcher) |*watcher| {
// fs.watcher_mutex.lock();
// defer fs.watcher_mutex.unlock();
// var _entries = watcher.iterator();
// const names = try fs.allocator.alloc([]const u8, _entries.len);
// for (_entries) |entry, i| {
// names[i] = try fs.allocator.dupe(u8, entry.key);
// }
// strings.sortAsc(names);
// try watcher.put(
// try fs.allocator.dupe(u8, dir),
// WatchData{ .dir_entries = names, .state = .dir_has_entries },
// );
// }
if (!fs.do_not_cache_entries) {
fs.entries_mutex.lock();
defer fs.entries_mutex.unlock();
const result = EntriesOption{
.entries = entries,
};
return try fs.entries.put(&cache_result.?, result);
}
temp_entries_option = EntriesOption{ .entries = entries };
return &temp_entries_option;
}
fn readFileError(fs: *RealFS, path: string, err: anyerror) void {
if (fs.watcher) |*watcher| {
fs.watcher_mutex.lock();
defer fs.watcher_mutex.unlock();
var res = watcher.getOrPutValue(path, WatchData{ .state = .file_missing }) catch unreachable;
res.value.state = .file_missing;
}
}
pub fn readFile(fs: *RealFS, path: string, _size: ?usize) !File {
fs.limiter.before();
defer fs.limiter.after();
const file: std.fs.File = std.fs.openFileAbsolute(path, std.fs.File.OpenFlags{ .read = true, .write = false }) catch |err| {
fs.readFileError(path, err);
return err;
};
defer file.close();
// Skip the extra file.stat() call when possible
var size = _size orelse (file.getEndPos() catch |err| {
fs.readFileError(path, err);
return err;
});
const file_contents: []u8 = file.readToEndAllocOptions(fs.allocator, size, size, @alignOf(u8), null) catch |err| {
fs.readFileError(path, err);
return err;
};
if (fs.watcher) |*watcher| {
fs.watcher_mutex.lock();
defer fs.watcher_mutex.unlock();
var res = watcher.getOrPutValue(path, WatchData{}) catch unreachable;
res.value.state = .file_need_mod_key;
res.value.file_contents = file_contents;
}
return File{ .path = Path.init(path), .contents = file_contents };
}
pub fn kind(fs: *RealFS, _dir: string, base: string) !Entry.Cache {
var dir = _dir;
var combo = [2]string{ dir, base };
var entry_path = path_handler.joinAbsString(fs.cwd, &combo, .auto);
fs.limiter.before();
defer fs.limiter.after();
const file = try std.fs.openFileAbsolute(entry_path, .{ .read = true, .write = false });
defer file.close();
var stat = try file.stat();
var _kind = stat.kind;
var cache = Entry.Cache{ .kind = Entry.Kind.file, .symlink = "" };
var symlink: []const u8 = "";
if (_kind == .SymLink) {
// windows has a max filepath of 255 chars
// we give it a little longer for other platforms
var out_buffer = std.mem.zeroes([512]u8);
var out_slice = &out_buffer;
symlink = entry_path;
var links_walked: u8 = 0;
while (links_walked < 255) : (links_walked += 1) {
var link: string = try std.os.readlink(symlink, out_slice);
if (!std.fs.path.isAbsolute(link)) {
combo[0] = dir;
combo[1] = link;
link = path_handler.joinAbsStringBuf(fs.cwd, out_slice, &combo, .auto);
}
// TODO: do we need to clean the path?
symlink = link;
const file2 = std.fs.openFileAbsolute(symlink, std.fs.File.OpenFlags{ .read = true, .write = false }) catch return cache;
defer file2.close();
const stat2 = file2.stat() catch return cache;
// Re-run "lstat" on the symlink target
_kind = stat2.kind;
if (_kind != .SymLink) {
break;
}
dir = std.fs.path.dirname(link) orelse return cache;
}
if (links_walked > 255) {
return cache;
}
}
if (_kind == .Directory) {
cache.kind = .dir;
} else {
cache.kind = .file;
}
if (symlink.len > 0) {
cache.symlink = try fs.allocator.dupe(u8, symlink);
}
return cache;
}
// // Stores the file entries for directories we've listed before
// entries_mutex: std.Mutex
// entries map[string]entriesOrErr
// // If true, do not use the "entries" cache
// doNotCacheEntries bool
};
pub const Implementation = {
switch (build_target) {
.wasi, .native => return RealFS,
.wasm => return WasmFS,
}
};
};
pub const FileSystemEntry = union(FileSystemEntry.Kind) {
file: File,
@@ -94,7 +788,7 @@ pub const PathName = struct {
// through the renaming logic that all other symbols go through to avoid name
// collisions.
pub fn nonUniqueNameString(self: *PathName, allocator: *std.mem.Allocator) !string {
if (strings.eql("index", self.base)) {
if (strings.eqlComptime(self.base, "index")) {
if (self.dir.len > 0) {
return MutableString.ensureValidIdentifier(PathName.init(self.dir).dir, allocator);
}
@@ -108,6 +802,7 @@ pub const PathName = struct {
var base = path;
var ext = path;
var dir = path;
var is_absolute = true;
var _i = strings.lastIndexOfChar(path, '/');
while (_i) |i| {
@@ -115,6 +810,7 @@ pub const PathName = struct {
if (i + 1 != path.len) {
base = path[i + 1 ..];
dir = path[0..i];
is_absolute = false;
break;
}
@@ -131,6 +827,10 @@ pub const PathName = struct {
base = base[0..dot];
}
if (is_absolute) {
dir = &([_]u8{});
}
return PathName{
.dir = dir,
.base = base,
@@ -139,16 +839,32 @@ pub const PathName = struct {
}
};
threadlocal var normalize_buf: [1024]u8 = undefined;
threadlocal var join_buf: [1024]u8 = undefined;
pub const Path = struct {
pretty: string,
text: string,
namespace: string,
namespace: string = "unspecified",
name: PathName,
is_disabled: bool = false,
pub fn generateKey(p: *Path, allocator: *std.mem.Allocator) !string {
return try std.fmt.allocPrint(allocator, "{s}://{s}", .{ p.namespace, p.text });
}
pub fn init(text: string) Path {
return Path{ .pretty = text, .text = text, .namespace = "file", .name = PathName.init(text) };
}
pub fn initWithPretty(text: string, pretty: string) Path {
return Path{ .pretty = pretty, .text = text, .namespace = "file", .name = PathName.init(text) };
}
pub fn initWithNamespace(text: string, namespace: string) Path {
return Path{ .pretty = text, .text = text, .namespace = namespace, .name = PathName.init(text) };
}
pub fn isBefore(a: *Path, b: Path) bool {
return a.namespace > b.namespace ||
(a.namespace == b.namespace and (a.text < b.text ||
@@ -163,7 +879,9 @@ test "PathName.init" {
&file,
);
std.testing.expectEqualStrings(res.dir, "/root/directory");
std.testing.expectEqualStrings(res.base, "file");
std.testing.expectEqualStrings(res.ext, ".ext");
try std.testing.expectEqualStrings(res.dir, "/root/directory");
try std.testing.expectEqualStrings(res.base, "file");
try std.testing.expectEqualStrings(res.ext, ".ext");
}
test {}

137
src/global.zig Normal file
View File

@@ -0,0 +1,137 @@
const std = @import("std");
pub usingnamespace @import("strings.zig");
pub const C = @import("c.zig");
pub const BuildTarget = enum { native, wasm, wasi };
pub const build_target: BuildTarget = comptime {
if (std.Target.current.isWasm() and std.Target.current.getOsTag() == .wasi) {
return BuildTarget.wasi;
} else if (std.Target.current.isWasm()) {
return BuildTarget.wasm;
} else {
return BuildTarget.native;
}
};
pub const isWasm = build_target == .wasm;
pub const isNative = build_target == .native;
pub const isWasi = build_target == .wasi;
pub const isBrowser = !isWasi and isWasm;
pub const isWindows = std.Target.current.os.tag == .windows;
pub const FeatureFlags = struct {
pub const strong_etags_for_built_files = true;
pub const keep_alive = true;
// it just doesn't work well.
pub const use_std_path_relative = false;
pub const use_std_path_join = false;
pub const print_ast = false;
pub const disable_printing_null = false;
};
pub const enableTracing = true;
pub const isDebug = std.builtin.Mode.Debug == std.builtin.mode;
pub const isTest = std.builtin.is_test;
pub const Output = struct {
var source: *Source = undefined;
pub const Source = struct {
const StreamType = {
if (isWasm) {
return std.io.FixedBufferStream([]u8);
} else {
return std.fs.File;
}
};
stream: StreamType,
error_stream: StreamType,
out_buffer: []u8 = &([_]u8{}),
err_buffer: []u8 = &([_]u8{}),
pub fn init(
stream: StreamType,
err: StreamType,
) Source {
return Source{ .stream = stream, .error_stream = err };
}
pub fn set(_source: *Source) void {
source = _source;
}
};
pub fn errorWriter() @typeInfo(@TypeOf(Source.StreamType.writer)).Fn.return_type.? {
return source.error_stream.writer();
}
pub fn writer() @typeInfo(@TypeOf(Source.StreamType.writer)).Fn.return_type.? {
return source.stream.writer();
}
pub fn printErrorable(comptime fmt: string, args: anytype) !void {
if (isWasm) {
try source.stream.seekTo(0);
try source.stream.writer().print(fmt, args);
const root = @import("root");
root.console_log(root.Uint8Array.fromSlice(source.out_buffer[0..source.stream.pos]));
} else {
std.fmt.format(source.stream.writer(), fmt, args) catch unreachable;
}
}
pub fn println(comptime fmt: string, args: anytype) void {
if (fmt[fmt.len - 1] != '\n') {
return print(fmt ++ "\n", args);
}
return print(fmt, args);
}
pub fn print(comptime fmt: string, args: anytype) void {
if (isWasm) {
source.stream.seekTo(0) catch return;
source.stream.writer().print(fmt, args) catch return;
const root = @import("root");
root.console_log(root.Uint8Array.fromSlice(source.out_buffer[0..source.stream.pos]));
} else {
std.fmt.format(source.stream.writer(), fmt, args) catch unreachable;
}
}
pub fn printErrorln(comptime fmt: string, args: anytype) void {
if (fmt[fmt.len - 1] != '\n') {
return printError(fmt ++ "\n", args);
}
return printError(fmt, args);
}
pub fn printError(comptime fmt: string, args: anytype) void {
if (isWasm) {
source.error_stream.seekTo(0) catch return;
source.error_stream.writer().print(fmt, args) catch unreachable;
const root = @import("root");
root.console_error(root.Uint8Array.fromSlice(source.err_buffer[0..source.error_stream.pos]));
} else {
std.fmt.format(source.error_stream.writer(), fmt, args) catch unreachable;
}
}
};
pub const Global = struct {
pub fn panic(comptime fmt: string, args: anytype) noreturn {
if (isWasm) {
Output.print(fmt, args);
@panic(fmt);
} else {
std.debug.panic(fmt, args);
}
}
pub fn notimpl() noreturn {
Global.panic("Not implemented yet!!!!!", .{});
}
};

View File

1818
src/hash_map_v2.zig Normal file

File diff suppressed because it is too large Load Diff

686
src/http.zig Normal file
View File

@@ -0,0 +1,686 @@
// const c = @import("./c.zig");
const std = @import("std");
usingnamespace @import("global.zig");
const Api = @import("./api/schema.zig").Api;
const bundler = @import("bundler.zig");
const logger = @import("logger.zig");
const tcp = std.x.net.tcp;
const ip = std.x.net.ip;
const IPv4 = std.x.os.IPv4;
const IPv6 = std.x.os.IPv6;
const Socket = std.x.os.Socket;
const os = std.os;
const picohttp = @import("picohttp");
const Header = picohttp.Header;
const Request = picohttp.Request;
const Response = picohttp.Response;
const Headers = picohttp.Headers;
const MimeType = @import("http/mime_type.zig");
const Bundler = bundler.Bundler;
const SOCKET_FLAGS = os.SOCK_CLOEXEC;
threadlocal var req_headers_buf: [100]picohttp.Header = undefined;
threadlocal var res_headers_buf: [100]picohttp.Header = undefined;
const ENABLE_LOGGER = false;
pub fn println(comptime fmt: string, args: anytype) void {
// if (ENABLE_LOGGER) {
Output.println(fmt, args);
// }
}
const HTTPStatusCode = u9;
pub const URLPath = struct {
extname: string = "",
path: string = "",
first_segment: string = "",
query_string: string = "",
// This does one pass over the URL path instead of like 4
pub fn parse(raw_path: string) URLPath {
var question_mark_i: i16 = -1;
var period_i: i16 = -1;
var first_segment_end: i16 = std.math.maxInt(i16);
var last_slash: i16 = -1;
var i: i16 = @intCast(i16, raw_path.len) - 1;
while (i >= 0) : (i -= 1) {
const c = raw_path[@intCast(usize, i)];
switch (c) {
'?' => {
question_mark_i = std.math.max(question_mark_i, i);
if (question_mark_i < period_i) {
period_i = -1;
}
if (last_slash > question_mark_i) {
last_slash = -1;
}
},
'.' => {
period_i = std.math.max(period_i, i);
},
'/' => {
last_slash = std.math.max(last_slash, i);
if (i > 0) {
first_segment_end = std.math.min(first_segment_end, i);
}
},
else => {},
}
}
if (last_slash > period_i) {
period_i = -1;
}
const extname = brk: {
if (question_mark_i > -1 and period_i > -1) {
period_i += 1;
break :brk raw_path[@intCast(usize, period_i)..@intCast(usize, question_mark_i)];
} else if (period_i > -1) {
period_i += 1;
break :brk raw_path[@intCast(usize, period_i)..];
} else {
break :brk &([_]u8{});
}
};
const path = if (question_mark_i < 0) raw_path[1..] else raw_path[1..@intCast(usize, question_mark_i)];
const first_segment = raw_path[1..std.math.min(@intCast(usize, first_segment_end), raw_path.len)];
return URLPath{
.extname = extname,
.first_segment = first_segment,
.path = if (raw_path.len == 1) "." else path,
.query_string = if (question_mark_i > -1) raw_path[@intCast(usize, question_mark_i)..@intCast(usize, raw_path.len)] else "",
};
}
};
pub const Method = enum {
GET,
HEAD,
PATCH,
PUT,
POST,
OPTIONS,
CONNECT,
TRACE,
pub fn which(str: []const u8) ?Method {
if (str.len < 3) {
return null;
}
const Match = strings.ExactSizeMatcher(2);
// we already did the length check
switch (Match.match(str[0..2])) {
Match.case("GE"), Match.case("ge") => {
return .GET;
},
Match.case("HE"), Match.case("he") => {
return .HEAD;
},
Match.case("PA"), Match.case("pa") => {
return .PATCH;
},
Match.case("PO"), Match.case("po") => {
return .POST;
},
Match.case("PU"), Match.case("pu") => {
return .PUT;
},
Match.case("OP"), Match.case("op") => {
return .OPTIONS;
},
Match.case("CO"), Match.case("co") => {
return .CONNECT;
},
Match.case("TR"), Match.case("tr") => {
return .TRACE;
},
else => {
return null;
},
}
}
};
pub const RequestContext = struct {
request: Request,
method: Method,
url: URLPath,
conn: *tcp.Connection,
allocator: *std.mem.Allocator,
log: logger.Log,
bundler: *Bundler,
keep_alive: bool = true,
status: ?HTTPStatusCode = null,
has_written_last_header: bool = false,
has_called_done: bool = false,
mime_type: MimeType = MimeType.other,
res_headers_count: usize = 0,
pub const bundle_prefix = "__speedy";
pub fn header(ctx: *RequestContext, comptime name: anytype) ?Header {
for (ctx.request.headers) |head| {
if (strings.eqlComptime(head.name, name)) {
return head;
}
}
return null;
}
pub fn printStatusLine(comptime code: HTTPStatusCode) []const u8 {
const status_text = switch (code) {
200...299 => "OK",
300...399 => "=>",
400...499 => "UH",
500...599 => "ERR",
else => @compileError("Invalid code passed to printStatusLine"),
};
return std.fmt.comptimePrint("HTTP/1.1 {d} {s}\r\n", .{ code, status_text });
}
pub fn prepareToSendBody(
ctx: *RequestContext,
length: usize,
comptime chunked: bool,
) !void {
defer {
if (isDebug or isTest) {
std.debug.assert(!ctx.has_written_last_header);
ctx.has_written_last_header = true;
}
}
if (chunked) {
ctx.appendHeader("Transfer-Encoding", "Chunked");
} else {
const length_str = try ctx.allocator.alloc(u8, 64);
ctx.appendHeader("Content-Length", length_str[0..std.fmt.formatIntBuf(length_str, length, 10, true, .{})]);
}
try ctx.flushHeaders();
}
threadlocal var resp_header_out_buf: [4096]u8 = undefined;
pub fn flushHeaders(ctx: *RequestContext) !void {
if (ctx.res_headers_count == 0) return;
const headers: []picohttp.Header = res_headers_buf[0..ctx.res_headers_count];
defer ctx.res_headers_count = 0;
var writer = std.io.fixedBufferStream(&resp_header_out_buf);
for (headers) |head| {
_ = writer.write(head.name) catch 0;
_ = writer.write(": ") catch 0;
_ = writer.write(head.value) catch 0;
_ = writer.write("\r\n") catch 0;
}
_ = writer.write("\r\n") catch 0;
_ = try ctx.writeSocket(writer.getWritten(), SOCKET_FLAGS);
}
pub fn writeSocket(ctx: *RequestContext, buf: anytype, flags: anytype) !usize {
// ctx.conn.client.setWriteBufferSize(@intCast(u32, buf.len)) catch {};
const written = ctx.conn.client.write(buf, SOCKET_FLAGS) catch |err| {
Output.printError("Write error: {s}", .{@errorName(err)});
return err;
};
if (written == 0) {
return error.SocketClosed;
}
return written;
}
pub fn writeBodyBuf(ctx: *RequestContext, body: []const u8) !void {
_ = try ctx.writeSocket(body, SOCKET_FLAGS);
}
pub fn writeStatus(ctx: *RequestContext, comptime code: HTTPStatusCode) !void {
_ = try ctx.writeSocket(comptime printStatusLine(code), SOCKET_FLAGS);
ctx.status = code;
}
pub fn init(req: Request, allocator: *std.mem.Allocator, conn: *tcp.Connection, bundler_: *Bundler) !RequestContext {
return RequestContext{
.request = req,
.allocator = allocator,
.bundler = bundler_,
.url = URLPath.parse(req.path),
.log = logger.Log.init(allocator),
.conn = conn,
.method = Method.which(req.method) orelse return error.InvalidMethod,
};
}
pub fn sendNotFound(req: *RequestContext) !void {
return req.writeStatus(404);
}
pub fn sendInternalError(ctx: *RequestContext, err: anytype) !void {
try ctx.writeStatus(500);
const printed = std.fmt.bufPrint(&error_buf, "Error: {s}", .{@errorName(err)}) catch |err2| brk: {
if (isDebug or isTest) {
Global.panic("error while printing error: {s}", .{@errorName(err2)});
}
break :brk "Internal error";
};
try ctx.prepareToSendBody(printed.len, false);
try ctx.writeBodyBuf(printed);
}
threadlocal var error_buf: [4096]u8 = undefined;
pub fn sendNotModified(ctx: *RequestContext) !void {
try ctx.writeStatus(304);
try ctx.flushHeaders();
ctx.done();
}
pub fn sendNoContent(ctx: *RequestContext) !void {
try ctx.writeStatus(204);
try ctx.flushHeaders();
ctx.done();
}
pub fn appendHeader(ctx: *RequestContext, comptime key: string, value: string) void {
if (isDebug or isTest) std.debug.assert(!ctx.has_written_last_header);
if (isDebug or isTest) std.debug.assert(ctx.res_headers_count < res_headers_buf.len);
res_headers_buf[ctx.res_headers_count] = Header{ .name = key, .value = value };
ctx.res_headers_count += 1;
}
const file_chunk_size = 16384;
const chunk_preamble_len: usize = brk: {
var buf: [64]u8 = undefined;
break :brk std.fmt.bufPrintIntToSlice(&buf, file_chunk_size, 16, true, .{}).len;
};
threadlocal var file_chunk_buf: [chunk_preamble_len + 2 + file_chunk_size]u8 = undefined;
threadlocal var symlink_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var weak_etag_buffer: [100]u8 = undefined;
threadlocal var strong_etag_buffer: [100]u8 = undefined;
threadlocal var weak_etag_tmp_buffer: [100]u8 = undefined;
pub fn done(ctx: *RequestContext) void {
std.debug.assert(!ctx.has_called_done);
ctx.conn.deinit();
ctx.has_called_done = true;
}
pub fn sendBadRequest(ctx: *RequestContext) !void {
try ctx.writeStatus(400);
ctx.done();
}
pub fn handleGet(ctx: *RequestContext) !void {
const result = try ctx.bundler.buildFile(&ctx.log, ctx.allocator, ctx.url.path, ctx.url.extname);
ctx.mime_type = result.mime_type;
ctx.appendHeader("Content-Type", result.mime_type.value);
if (ctx.keep_alive) {
ctx.appendHeader("Connection", "keep-alive");
}
const send_body = ctx.method == .GET;
switch (result.value) {
.none => {
unreachable;
},
.file => |file| {
defer file.handle.close();
var do_extra_close = false;
var handle = file.handle;
var real_path = file.absolute_path;
// Assume "stat" is lying to us.
// Don't write a 2xx status until we've successfully read at least 1 byte
var stat = try handle.stat();
switch (stat.kind) {
.Directory,
.NamedPipe,
.UnixDomainSocket,
.Whiteout,
.BlockDevice,
.CharacterDevice,
=> {
ctx.log.addErrorFmt(null, logger.Loc.Empty, ctx.allocator, "Bad file type: {s}", .{@tagName(stat.kind)}) catch {};
try ctx.sendBadRequest();
return;
},
.SymLink => {
const real_file_path = try std.fs.realpath(file.absolute_path, &symlink_buffer);
real_path = real_file_path;
handle = try std.fs.openFileAbsolute(real_file_path, .{});
stat = try handle.stat();
do_extra_close = true;
},
else => {},
}
defer {
if (do_extra_close) {
handle.close();
}
}
var file_chunk_slice = file_chunk_buf[chunk_preamble_len .. file_chunk_buf.len - 3];
if (result.mime_type.category != .html) {
// hash(absolute_file_path, size, mtime)
var weak_etag = std.hash.Wyhash.init(1);
weak_etag_buffer[0] = 'W';
weak_etag_buffer[1] = '/';
weak_etag.update(real_path);
std.mem.writeIntNative(u64, weak_etag_tmp_buffer[0..8], stat.size);
weak_etag.update(weak_etag_tmp_buffer[0..8]);
std.mem.writeIntNative(i128, weak_etag_tmp_buffer[0..16], stat.mtime);
weak_etag.update(weak_etag_tmp_buffer[0..16]);
const etag_content_slice = std.fmt.bufPrintIntToSlice(weak_etag_buffer[2..], weak_etag.final(), 16, true, .{});
const complete_weak_etag = weak_etag_buffer[0 .. etag_content_slice.len + 2];
ctx.appendHeader("ETag", complete_weak_etag);
if (ctx.header("If-None-Match")) |etag_header| {
if (strings.eql(complete_weak_etag, etag_header.value)) {
try ctx.sendNotModified();
return;
}
}
} else {
ctx.appendHeader("Cache-Control", "no-cache");
}
switch (stat.size) {
0 => {
try ctx.sendNoContent();
return;
},
1...file_chunk_size - 1 => {
defer ctx.done();
// always report by amount we actually read instead of stat-reported read
const file_read = try handle.read(file_chunk_slice);
if (file_read == 0) {
return ctx.sendNoContent();
}
const file_slice = file_chunk_slice[0..file_read];
try ctx.writeStatus(200);
try ctx.prepareToSendBody(file_read, false);
if (!send_body) return;
_ = try ctx.writeSocket(file_slice, SOCKET_FLAGS);
},
else => {
var chunk_written: usize = 0;
var size_slice = file_chunk_buf[0..chunk_preamble_len];
var trailing_newline_slice = file_chunk_buf[file_chunk_buf.len - 3 ..];
trailing_newline_slice[0] = '\r';
trailing_newline_slice[1] = '\n';
var pushed_chunk_count: usize = 0;
while (true) : (pushed_chunk_count += 1) {
defer chunk_written = 0;
// Read from the file until we reach either end of file or the max chunk size
chunk_written = handle.read(file_chunk_slice) catch |err| {
if (pushed_chunk_count > 0) {
_ = try ctx.writeSocket("0\r\n\r\n", SOCKET_FLAGS);
}
return ctx.sendInternalError(err);
};
// empty chunk
if (chunk_written == 0) {
defer ctx.done();
if (pushed_chunk_count == 0) {
return ctx.sendNoContent();
}
_ = try ctx.writeSocket("0\r\n\r\n", SOCKET_FLAGS);
break;
// final chunk
} else if (chunk_written < file_chunk_size - 1) {
defer ctx.done();
var hex_size_slice = std.fmt.bufPrintIntToSlice(size_slice, chunk_written, 16, true, .{});
var remainder_slice = file_chunk_buf[hex_size_slice.len..size_slice.len];
remainder_slice[0] = '\r';
remainder_slice[1] = '\n';
if (pushed_chunk_count == 0) {
ctx.writeStatus(200) catch {};
ctx.prepareToSendBody(0, true) catch {};
if (!send_body) return;
}
_ = try ctx.writeSocket(size_slice, SOCKET_FLAGS);
_ = try ctx.writeSocket(file_chunk_slice[0..chunk_written], SOCKET_FLAGS);
_ = try ctx.writeSocket(trailing_newline_slice, SOCKET_FLAGS);
break;
// full chunk
} else {
if (pushed_chunk_count == 0) {
try ctx.writeStatus(200);
try ctx.prepareToSendBody(0, true);
if (!send_body) return;
}
var hex_size_slice = std.fmt.bufPrintIntToSlice(size_slice, chunk_written, 16, true, .{});
var remainder_slice = file_chunk_buf[hex_size_slice.len..size_slice.len];
remainder_slice[0] = '\r';
remainder_slice[1] = '\n';
_ = try ctx.writeSocket(&file_chunk_buf, SOCKET_FLAGS);
}
}
},
}
},
.build => |output| {
defer ctx.bundler.allocator.free(output.contents);
if (FeatureFlags.strong_etags_for_built_files) {
const strong_etag = std.hash.Wyhash.hash(1, output.contents);
const etag_content_slice = std.fmt.bufPrintIntToSlice(strong_etag_buffer[0..49], strong_etag, 16, true, .{});
ctx.appendHeader("ETag", etag_content_slice);
if (ctx.header("If-None-Match")) |etag_header| {
if (std.mem.eql(u8, etag_content_slice, etag_header.value)) {
try ctx.sendNotModified();
return;
}
}
}
if (output.contents.len == 0) {
return try ctx.sendNoContent();
}
defer ctx.done();
try ctx.writeStatus(200);
try ctx.prepareToSendBody(output.contents.len, false);
if (!send_body) return;
_ = try ctx.writeSocket(output.contents, SOCKET_FLAGS);
},
}
// If we get this far, it means
}
pub fn handleRequest(ctx: *RequestContext) !void {
switch (ctx.method) {
.GET, .HEAD, .OPTIONS => {
return ctx.handleGet();
},
else => {
return ctx.sendNotFound();
},
}
}
};
// This is a tiny HTTP server.
// It needs to support:
// - Static files
// - ETags, If-Not-Modified-Since
// - Bundling
// - Content-Type header
// - Content-Range header
// Fancy things to support:
// - Server-Timings for:
// - Resolver time
// - Parsing time
// - IO read time
pub const Server = struct {
log: logger.Log,
allocator: *std.mem.Allocator,
bundler: Bundler,
pub fn adjustUlimit() !void {
var limit = try std.os.getrlimit(.NOFILE);
if (limit.cur < limit.max) {
var new_limit = std.mem.zeroes(std.os.rlimit);
new_limit.cur = limit.max;
new_limit.max = limit.max;
try std.os.setrlimit(.NOFILE, new_limit);
}
}
pub fn onTCPConnection(server: *Server, conn: tcp.Connection) void {
conn.client.setNoDelay(true) catch {};
conn.client.setQuickACK(true) catch {};
conn.client.setLinger(1) catch {};
server.handleConnection(&conn);
}
fn run(server: *Server) !void {
adjustUlimit() catch {};
const listener = try tcp.Listener.init(.ip, SOCKET_FLAGS);
defer listener.deinit();
listener.setReuseAddress(true) catch {};
listener.setReusePort(true) catch {};
listener.setFastOpen(true) catch {};
// listener.setNoDelay(true) catch {};
// listener.setQuickACK(true) catch {};
// try listener.ack(true);
try listener.bind(ip.Address.initIPv4(IPv4.unspecified, 9000));
try listener.listen(1280);
const addr = try listener.getLocalAddress();
Output.println("Started Speedy at http://{s}", .{addr});
// var listener_handle = try std.os.kqueue();
// var change_list = std.mem.zeroes([2]os.Kevent);
// change_list[0].ident = @intCast(usize, listener.socket.fd);
// change_list[1].ident = @intCast(usize, listener.socket.fd);
// var eventlist: [128]os.Kevent = undefined;
while (true) {
var conn = listener.accept(SOCKET_FLAGS) catch |err| {
continue;
};
server.handleConnection(&conn);
}
}
pub fn sendError(server: *Server, request: *Request, conn: *tcp.Connection, code: HTTPStatusCode, msg: string) !void {
try server.writeStatus(code, connection);
}
threadlocal var req_buf: [32_000]u8 = undefined;
pub fn handleConnection(server: *Server, conn: *tcp.Connection) void {
// https://stackoverflow.com/questions/686217/maximum-on-http-header-values
var read_size = conn.client.read(&req_buf, SOCKET_FLAGS) catch |err| {
_ = conn.client.write(RequestContext.printStatusLine(400) ++ "\r\n\r\n", SOCKET_FLAGS) catch {};
return;
};
if (read_size == 0) {
// Actually, this was not a request.
return;
}
var req = picohttp.Request.parse(req_buf[0..read_size], &req_headers_buf) catch |err| {
_ = conn.client.write(RequestContext.printStatusLine(400) ++ "\r\n\r\n", SOCKET_FLAGS) catch {};
conn.client.deinit();
Output.printErrorln("ERR: {s}", .{@errorName(err)});
return;
};
var request_arena = std.heap.ArenaAllocator.init(server.allocator);
defer request_arena.deinit();
var req_ctx = RequestContext.init(req, &request_arena.allocator, conn, &server.bundler) catch |err| {
Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
conn.client.deinit();
return;
};
if (FeatureFlags.keep_alive) {
if (req_ctx.header("Connection")) |connection| {
req_ctx.keep_alive = strings.eqlInsensitive(connection.value, "keep-alive");
}
conn.client.setKeepAlive(req_ctx.keep_alive) catch {};
} else {
req_ctx.keep_alive = false;
}
req_ctx.handleRequest() catch |err| {
switch (err) {
error.ModuleNotFound => {
req_ctx.sendNotFound() catch {};
},
else => {
Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
return;
},
}
};
const status = req_ctx.status orelse @intCast(HTTPStatusCode, 500);
if (req_ctx.log.msgs.items.len == 0) {
println("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
} else {
println("{s} {s}", .{ @tagName(req_ctx.method), req.path });
for (req_ctx.log.msgs.items) |msg| {
msg.writeFormat(Output.errorWriter()) catch continue;
}
req_ctx.log.deinit();
}
}
pub fn start(allocator: *std.mem.Allocator, options: Api.TransformOptions) !void {
var log = logger.Log.init(allocator);
var server = Server{
.allocator = allocator,
.log = log,
.bundler = undefined,
};
server.bundler = try Bundler.init(allocator, &server.log, options);
try server.run();
}
};

118
src/http/mime_type.zig Normal file
View File

@@ -0,0 +1,118 @@
const std = @import("std");
usingnamespace @import("../global.zig");
const Loader = @import("../options.zig").Loader;
const Two = strings.ExactSizeMatcher(2);
const Four = strings.ExactSizeMatcher(4);
const Eight = strings.ExactSizeMatcher(8);
const MimeType = @This();
value: string,
category: Category,
pub const Category = enum {
image,
text,
html,
font,
other,
css,
json,
audio,
video,
javascript,
wasm,
};
pub const other = MimeType.init("application/octet-stream", .other);
pub const css = MimeType.init("application/octet-stream", .other);
pub const javascript = MimeType.init("text/javascript;charset=utf-8", .javascript);
pub const ico = MimeType.init("image/vnd.microsoft.icon", .image);
fn init(comptime str: string, t: Category) MimeType {
return MimeType{
.value = str,
.category = t,
};
}
// TODO: improve this
pub fn byLoader(loader: Loader, ext: string) MimeType {
switch (loader) {
.tsx, .ts, .js, .jsx => {
return javascript;
},
.css => {
return css;
},
else => {
return byExtension(ext);
},
}
}
// TODO: improve this
pub fn byExtension(ext: string) MimeType {
return switch (ext.len) {
2 => {
return switch (std.mem.readIntNative(u16, ext[0..2])) {
Two.case("js") => javascript,
else => MimeType.other,
};
},
3 => {
const four = [4]u8{ ext[0], ext[1], ext[2], 0 };
return switch (std.mem.readIntNative(u32, &four)) {
Four.case("css") => css,
Four.case("jpg") => MimeType.init("image/jpeg", .image),
Four.case("gif") => MimeType.init("image/gif", .image),
Four.case("png") => MimeType.init("image/png", .image),
Four.case("bmp") => MimeType.init("image/bmp", .image),
Four.case("jsx"), Four.case("mjs") => MimeType.javascript,
Four.case("wav") => MimeType.init("audio/wave", .audio),
Four.case("aac") => MimeType.init("audio/aic", .audio),
Four.case("mp4") => MimeType.init("video/mp4", .video),
Four.case("htm") => MimeType.init("text/html;charset=utf-8", .html),
Four.case("xml") => MimeType.init("text/xml", .other),
Four.case("zip") => MimeType.init("application/zip", .other),
Four.case("txt") => MimeType.init("text/plain", .other),
Four.case("ttf") => MimeType.init("font/ttf", .font),
Four.case("otf") => MimeType.init("font/otf", .font),
Four.case("ico") => ico,
Four.case("mp3") => MimeType.init("audio/mpeg", .video),
Four.case("svg") => MimeType.init("image/svg+xml", .image),
Four.case("csv") => MimeType.init("text/csv", .other),
Four.case("mid") => MimeType.init("audio/mid", .audio),
else => MimeType.other,
};
},
4 => {
return switch (Four.match(ext)) {
Four.case("json") => MimeType.init("application/json;charset=utf-8", .json),
Four.case("jpeg") => MimeType.init("image/jpeg", .image),
Four.case("aiff") => MimeType.init("image/png", .image),
Four.case("tiff") => MimeType.init("image/tiff", .image),
Four.case("html") => MimeType.init("text/html;charset=utf-8", .html),
Four.case("wasm") => MimeType.init(
"application/wasm",
.wasm,
),
Four.case("woff") => MimeType.init("font/woff", .font),
Four.case("webm") => MimeType.init("video/webm", .video),
Four.case("webp") => MimeType.init("image/webp", .image),
Four.case("midi") => MimeType.init("audio/midi", .audio),
else => MimeType.other,
};
},
5 => {
const eight = [8]u8{ ext[0], ext[1], ext[2], ext[3], ext[4], 0, 0, 0 };
return switch (std.mem.readIntNative(u64, &eight)) {
Eight.case("woff2") => MimeType.init("font/woff2", .font),
Eight.case("xhtml") => MimeType.init("application/xhtml+xml;charset=utf-8", .html),
else => MimeType.other,
};
},
else => MimeType.other,
};
}

View File

@@ -28,6 +28,10 @@ pub const ImportKind = enum(u8) {
// A CSS "url(...)" token
url,
pub fn isFromCSS(k: ImportKind) bool {
return k == .at_conditional or k == .at or k == .url;
}
};
pub const ImportRecord = struct {
@@ -36,6 +40,18 @@ pub const ImportRecord = struct {
source_index: Ref.Int = std.math.maxInt(Ref.Int),
// True for the following cases:
//
// try { require('x') } catch { handle }
// try { await import('x') } catch { handle }
// try { require.resolve('x') } catch { handle }
// import('x').catch(handle)
// import('x').then(_, handle)
//
// In these cases we shouldn't generate an error if the path could not be
// resolved.
handles_import_errors: bool = false,
// Sometimes the parser creates an import record and decides it isn't needed.
// For example, TypeScript code may have import statements that later turn
// out to be type-only imports after analyzing the whole file.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -201,7 +201,7 @@ pub const StrictModeReservedWords = std.ComptimeStringMap(bool, .{
.{ "yield", true },
});
pub const CodePoint = i22;
pub const CodePoint = i32;
pub const PropertyModifierKeyword = enum {
p_abstract,
@@ -231,6 +231,13 @@ pub const PropertyModifierKeyword = enum {
});
};
pub const TypeScriptAccessibilityModifier = std.ComptimeStringMap(u1, .{
.{ "public", 1 },
.{ "private", 1 },
.{ "protected", 1 },
.{ "readonly", 1 },
});
pub const TokenEnumType = std.EnumArray(T, []u8);
pub const tokenToString = comptime {
@@ -529,9 +536,17 @@ pub const TypescriptStmtKeyword = enum {
pub const JSXEntityMap = std.StringHashMap(CodePoint);
pub var jsxEntity: JSXEntityMap = undefined;
var has_loaded_jsx_map = false;
// There's probably a way to move this to comptime
pub fn initJSXEntityMap() !void {
jsxEntity = JSXEntityMap.init(alloc.dynamic);
if (has_loaded_jsx_map) {
return;
}
has_loaded_jsx_map = true;
jsxEntity = JSXEntityMap.init(alloc.static);
// return jsxEntity;
jsxEntity.ensureCapacity(255) catch unreachable;
jsxEntity.putAssumeCapacity("quot", @as(CodePoint, 0x0022));
@@ -793,14 +808,14 @@ test "tokenToString" {
expectString(tokenToString.get(T.t_end_of_file), "end of file");
}
test "jsxEntity" {
try alloc.setup(std.heap.page_allocator);
// test "jsxEntity" {
// try alloc.setup(std.heap.page_allocator);
initJSXEntityMap() catch |err| {
@panic(@errorName(err));
};
// initJSXEntityMap() catch |err| {
// @panic(@errorName(err));
// };
if (jsxEntity.get("sim")) |v| {
expect(v == 0x223C);
}
}
// if (jsxEntity.get("sim")) |v| {
// expect(v == 0x223C);
// }
// }

File diff suppressed because it is too large Load Diff

40
src/js_parser/imports.zig Normal file
View File

@@ -0,0 +1,40 @@
pub const std = @import("std");
pub const logger = @import("../logger.zig");
pub const js_lexer = @import("../js_lexer.zig");
pub const importRecord = @import("../import_record.zig");
pub const js_ast = @import("../js_ast.zig");
pub const options = @import("../options.zig");
pub const alloc = @import("../alloc.zig");
pub const js_printer = @import("../js_printer.zig");
pub const renamer = @import("../renamer.zig");
pub const fs = @import("../fs.zig");
pub usingnamespace @import("../global.zig");
pub usingnamespace @import("../ast/base.zig");
pub usingnamespace js_ast.G;
pub usingnamespace @import("../defines.zig");
pub const ImportKind = importRecord.ImportKind;
pub const BindingNodeIndex = js_ast.BindingNodeIndex;
pub const StmtNodeIndex = js_ast.StmtNodeIndex;
pub const ExprNodeIndex = js_ast.ExprNodeIndex;
pub const ExprNodeList = js_ast.ExprNodeList;
pub const StmtNodeList = js_ast.StmtNodeList;
pub const BindingNodeList = js_ast.BindingNodeList;
pub const assert = std.debug.assert;
pub const LocRef = js_ast.LocRef;
pub const S = js_ast.S;
pub const B = js_ast.B;
pub const G = js_ast.G;
pub const T = js_lexer.T;
pub const E = js_ast.E;
pub const Stmt = js_ast.Stmt;
pub const Expr = js_ast.Expr;
pub const Binding = js_ast.Binding;
pub const Symbol = js_ast.Symbol;
pub const Level = js_ast.Op.Level;
pub const Op = js_ast.Op;
pub const Scope = js_ast.Scope;
pub const locModuleScope = logger.Loc{ .start = -100 };

12849
src/js_parser/js_parser.zig Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,525 @@
usingnamespace @import("./imports.zig");
usingnamespace @import("./js_parser.zig");
usingnamespace @import("../test/tester.zig");
usingnamespace @import("../linker.zig");
const SymbolList = [][]Symbol;
// const Tester = struct {
// allocator: *std.mem.Allocator,
// pub const Expectation = struct {
// target: anytype,
// pub fn report(writer: anytype) void {}
// pub const Outcome = enum {
// pending,
// pass,
// fail,
// };
// const Normalized = struct {
// value: NormalizedValue,
// optional: bool = false,
// pointer: bool = false,
// array_like: bool = false,
// const NormalizedValue = union(enum) {
// Struct: anytype,
// Number: f64,
// String: anytype,
// };
// pub fn parse_valuetype(value: anytype, was_optional: bool, was_pointer: bool, was_arraylike: bool, original_value: anytype) Normalized {
// switch (@typeInfo(@TypeOf(value))) {
// .Pointer => |info| {
// return switch (info.size) {
// .One => {
// return parse_valuetype(value.*, was_optional, true, was_arraylike, original_value);
// },
// .Many, .C => @compileError("Not supported."),
// .Slice => |slice| {
// return parse_valuetype(value.ptr.*, was_optional, true, true, original_value);
// },
// };
// },
// .Enum => |info| {
// return parse_valuetype(@enumToInt(value), was_optional, was_pointer, was_arraylike, original_value);
// },
// .Struct => |info| {
// return Normalized{
// .value = NormalizedValue{
// .Struct = original_value,
// },
// .optional = was_optional,
// .pointer = was_pointer,
// .array_like = was_arraylike,
// };
// },
// .Int => |info| {
// if (std.meta.bitCount(@TypeOf(value)) == 8)
// return Normalized{
// .value = NormalizedValue{
// .Number = @intToFloat(f64, value),
// },
// .optional = was_optional,
// .pointer = was_pointer,
// .array_like = was_arraylike,
// };
// },
// .ComptimeInt => {
// return Normalized{
// .value = NormalizedValue{
// .Number = @intToFloat(f64, value),
// },
// .optional = was_optional,
// .pointer = was_pointer,
// .array_like = was_arraylike,
// };
// },
// .Float => |info| {
// return Normalized{
// .value = NormalizedValue{
// .Number = @floatCast(f64, value),
// },
// .optional = was_optional,
// .pointer = was_pointer,
// .array_like = was_arraylike,
// };
// },
// }
// }
// pub fn init(value: anytype) Normalized {
// return Normalized.parse_valuetype(value, false, false, false, value);
// }
// };
// fn equals(a: anytype) bool {
// const T = @TypeOf(a);
// const a_info = @typeInfo(T);
// const Tb = @TypeOf(b);
// const b_info = @typeInfo(Tb);
// const a_final = a_getter: {};
// switch (@typeInfo(T)) {
// .Struct => |info| {
// inline for (info.fields) |field_info| {
// if (!eql(@field(a, field_info.name), @field(b, field_info.name))) return false;
// }
// return true;
// },
// .ErrorUnion => {
// if (a) |a_p| {
// if (b) |b_p| return eql(a_p, b_p) else |_| return false;
// } else |a_e| {
// if (b) |_| return false else |b_e| return a_e == b_e;
// }
// },
// .Union => |info| {
// if (info.tag_type) |UnionTag| {
// const tag_a = activeTag(a);
// const tag_b = activeTag(b);
// if (tag_a != tag_b) return false;
// inline for (info.fields) |field_info| {
// if (@field(UnionTag, field_info.name) == tag_a) {
// return eql(@field(a, field_info.name), @field(b, field_info.name));
// }
// }
// return false;
// }
// @compileError("cannot compare untagged union type " ++ @typeName(T));
// },
// .Array => {
// if (a.len != b.len) return false;
// for (a) |e, i|
// if (!eql(e, b[i])) return false;
// return true;
// },
// .Vector => |info| {
// var i: usize = 0;
// while (i < info.len) : (i += 1) {
// if (!eql(a[i], b[i])) return false;
// }
// return true;
// },
// .Pointer => |info| {
// return switch (info.size) {
// .One, .Many, .C => a == b,
// .Slice => |slice| {
// if (a.len != b.len) {
// return false;
// }
// for (a) |e, i|
// if (!eql(e, b[i])) return false;
// },
// };
// },
// .Optional => {
// return eql(a.?, b.?);
// },
// else => return a == b,
// }
// }
// pub fn toBe(value: anytype) *Expectation {}
// };
// pub fn expect(outcome: anytype) Expectation {}
// pub fn init(allocator: *std.mem.Allocator) Tester {
// return Tester{ .allocator = allocator };
// }
// };
fn expectPrinted(t: *Tester, contents: string, expected: string, src: anytype) !void {
if (alloc.needs_setup) {
try alloc.setup(std.heap.c_allocator);
var __source = Output.Source.init(std.io.getStdOut(), std.io.getStdErr());
Output.Source.set(&__source);
}
debugl("INIT TEST");
var log = logger.Log.init(alloc.dynamic);
var source = logger.Source.initPathString("file.jsx", contents);
var ast: js_ast.Ast = undefined;
var define = try Define.init(alloc.dynamic, null);
debugl("INIT PARSER");
var parser = try Parser.init(Parser.Options{ .jsx = .{} }, &log, &source, define, alloc.dynamic);
debugl("RUN PARSER");
var res = try parser.parse();
ast = res.ast;
var symbols: SymbolList = &([_][]Symbol{ast.symbols});
var symbol_map = js_ast.Symbol.Map.initList(symbols);
if (log.msgs.items.len > 0) {
debugl("PRINT LOG ERRORS");
var fixedBuffer = [_]u8{0} ** 4096;
var stream = std.io.fixedBufferStream(&fixedBuffer);
try log.print(stream.writer());
Output.print("{s}", .{fixedBuffer});
}
var linker = Linker{};
debugl("START AST PRINT");
if (PRINT_AST) {
var fixed_buffer = [_]u8{0} ** 512000;
var buf_stream = std.io.fixedBufferStream(&fixed_buffer);
try ast.toJSON(alloc.dynamic, std.io.getStdErr().writer());
}
const result = js_printer.printAst(alloc.dynamic, ast, symbol_map, &source, true, js_printer.Options{ .to_module_ref = res.ast.module_ref orelse Ref{ .inner_index = 0 } }, &linker) catch unreachable;
var copied = try std.mem.dupe(alloc.dynamic, u8, result.js);
_ = t.expect(contents, copied, src);
// std.testing.expectEqualStrings(contents, copied);
}
const PRINT_AST = false;
test "expectPrint" {
var t_ = Tester.t(std.heap.c_allocator);
var t = &t_;
// try expectPrinted(t, @embedFile("../test/fixtures/function-scope-bug.jsx"), @embedFile("../test/fixtures/function-scope-bug.jsx"), @src());
try expectPrinted(t, @embedFile("../test/fixtures/cannot-assign-to-import-bug.js"), @embedFile("../test/fixtures/cannot-assign-to-import-bug.js"), @src());
// try expectPrinted(t, "if (true) { console.log(\"hi\"); }", "if (true) { console.log(\"hi\"); }", @src());
// try expectPrinted(t, "try { console.log(\"hi\"); }\ncatch(er) { console.log('noooo'); }", "class Foo {\n foo() {\n }\n}\n", @src());
// try expectPrinted(t, "try { console.log(\"hi\"); }\ncatch(er) { console.log('noooo'); }", "class Foo {\n foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { foo() {} }", "class Foo {\n foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { *foo() {} }", "class Foo {\n *foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { get foo() {} }", "class Foo {\n get foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { set foo(x) {} }", "class Foo {\n set foo(x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static foo() {} }", "class Foo {\n static foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static *foo() {} }", "class Foo {\n static *foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static get foo() {} }", "class Foo {\n static get foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static set foo(x) {} }", "class Foo {\n static set foo(x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { async foo() {} }", "class Foo {\n async foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static async foo() {} }", "class Foo {\n static async foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static async *foo() {} }", "class Foo {\n static async *foo() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static async *foo() {}\n hey = true; }", "class Foo {\n static async *foo() {\n }\n hey = true;\n}\n", @src());
// try expectPrinted(t, "class Foo { if() {} }", "class Foo {\n if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { *if() {} }", "class Foo {\n *if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { get if() {} }", "class Foo {\n get if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { set if(x) {} }", "class Foo {\n set if(x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static if() {} }", "class Foo {\n static if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static *if() {} }", "class Foo {\n static *if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static get if() {} }", "class Foo {\n static get if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static set if(x) {} }", "class Foo {\n static set if(x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { async if() {} }", "class Foo {\n async if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static async if() {} }", "class Foo {\n static async if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static async *if() {} }", "class Foo {\n static async *if() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} b() {} }", "class Foo {\n a() {\n }\n b() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} get b() {} }", "class Foo {\n a() {\n }\n get b() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} set b(x) {} }", "class Foo {\n a() {\n }\n set b(x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} static b() {} }", "class Foo {\n a() {\n }\n static b() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} static *b() {} }", "class Foo {\n a() {\n }\n static *b() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} static get b() {} }", "class Foo {\n a() {\n }\n static get b() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} static set b(x) {} }", "class Foo {\n a() {\n }\n static set b(x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} async b() {} }", "class Foo {\n a() {\n }\n async b() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} static async b() {} }", "class Foo {\n a() {\n }\n static async b() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a() {} static async *b() {} }", "class Foo {\n a() {\n }\n static async *b() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { [arguments] }", "class Foo {\n [arguments];\n}\n", @src());
// try expectPrinted(t, "class Foo { [arguments] = 1 }", "class Foo {\n [arguments] = 1;\n}\n", @src());
// try expectPrinted(t, "class Foo { arguments = 1 }", "class Foo {\n arguments = 1;\n}\n", @src());
// try expectPrinted(t, "class Foo { x = class { arguments = 1 } }", "class Foo {\n x = class {\n arguments = 1;\n };\n}\n", @src());
// try expectPrinted(t, "class Foo { x = function() { arguments } }", "class Foo {\n x = function() {\n arguments;\n };\n}\n", @src());
// try expectPrinted(t, "class Foo { get ['constructor']() {} }", "class Foo {\n get [\"constructor\"]() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { set ['constructor'](x) {} }", "class Foo {\n set [\"constructor\"](x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { *['constructor']() {} }", "class Foo {\n *[\"constructor\"]() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { async ['constructor']() {} }", "class Foo {\n async [\"constructor\"]() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { async *['constructor']() {} }", "class Foo {\n async *[\"constructor\"]() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { get prototype() {} }", "class Foo {\n get prototype() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { get 'prototype'() {} }", "class Foo {\n get prototype() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { set prototype(x) {} }", "class Foo {\n set prototype(x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { set 'prototype'(x) {} }", "class Foo {\n set prototype(x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { *prototype() {} }", "class Foo {\n *prototype() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { *'prototype'() {} }", "class Foo {\n *prototype() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { async prototype() {} }", "class Foo {\n async prototype() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { async 'prototype'() {} }", "class Foo {\n async prototype() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { async *prototype() {} }", "class Foo {\n async *prototype() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { async *'prototype'() {} }", "class Foo {\n async *prototype() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static get ['prototype']() {} }", "class Foo {\n static get [\"prototype\"]() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static set ['prototype'](x) {} }", "class Foo {\n static set [\"prototype\"](x) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static *['prototype']() {} }", "class Foo {\n static *[\"prototype\"]() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static async ['prototype']() {} }", "class Foo {\n static async [\"prototype\"]() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { static async *['prototype']() {} }", "class Foo {\n static async *[\"prototype\"]() {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo extends Bar { constructor() { super() } }", "class Foo extends Bar {\n constructor() {\n super();\n }\n}\n", @src());
// try expectPrinted(t, "class Foo extends Bar { constructor() { () => super() } }", "class Foo extends Bar {\n constructor() {\n () => super();\n }\n}\n", @src());
// try expectPrinted(t, "class Foo extends Bar { constructor() { () => { super() } } }", "class Foo extends Bar {\n constructor() {\n () => {\n super();\n };\n }\n}\n", @src());
// try expectPrinted(t, "class Foo extends Bar { constructor(x = super()) {} }", "class Foo extends Bar {\n constructor(x = super()) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo extends Bar { constructor(x = () => super()) {} }", "class Foo extends Bar {\n constructor(x = () => super()) {\n }\n}\n", @src());
// try expectPrinted(t, "class Foo { a }", "class Foo {\n a;\n}\n", @src());
// try expectPrinted(t, "class Foo { a = 1 }", "class Foo {\n a = 1;\n}\n", @src());
// try expectPrinted(t, "class Foo { static a }", "class Foo {\n static a;\n}\n", @src());
// try expectPrinted(t, "class Foo { static a = 1 }", "class Foo {\n static a = 1;\n}\n", @src());
// try expectPrinted(t, "class Foo { static a = 1; b }", "class Foo {\n static a = 1;\n b;\n}\n", @src());
// try expectPrinted(t, "class Foo { static [a] }", "class Foo {\n static [a];\n}\n", @src());
// try expectPrinted(t, "class Foo { static [a] = 1 }", "class Foo {\n static [a] = 1;\n}\n", @src());
// try expectPrinted(t, "class Foo { static [a] = 1; [b] }", "class Foo {\n static [a] = 1;\n [b];\n}\n", @src());
// try expectPrinted(t, "class Foo { prototype }", "class Foo {\n prototype;\n}\n", @src());
// try expectPrinted(t, "class Foo { 'prototype' }", "class Foo {\n prototype;\n}\n", @src());
// try expectPrinted(t, "class Foo { prototype = 1 }", "class Foo {\n prototype = 1;\n}\n", @src());
// try expectPrinted(t, "class Foo { 'prototype' = 1 }", "class Foo {\n prototype = 1;\n}\n", @src());
// try expectPrinted(t, "function* foo() { -(yield 100) }", "function* foo() {\n -(yield 100);\n}\n", @src());
// try expectPrinted(t, "function *foo() { (x = yield y) }", "function* foo() {\n x = yield y;\n}\n", @src());
// try expectPrinted(t, "yield\n100", "yield;\n100;\n", @src());
// try expectPrinted(t, "let x = {yield}", "let x = {yield};\n", @src());
// try expectPrinted(t, "function foo() { ({yield} = x) }", "function foo() {\n ({yield} = x);\n}\n", @src());
// try expectPrinted(t, "({ *yield() {} })", "({*yield() {\n}});\n", @src());
// try expectPrinted(t, "(class { *yield() {} })", "(class {\n *yield() {\n }\n});\n", @src());
// try expectPrinted(t, "class Foo { *yield() {} }", "class Foo {\n *yield() {\n }\n}\n", @src());
// try expectPrinted(t, "function* yield() {}", "function* yield() {\n}\n", @src());
// try expectPrinted(t, "({ async *yield() {} })", "({async *yield() {\n}});\n", @src());
// try expectPrinted(t, "(class { async *yield() {} })", "(class {\n async *yield() {\n }\n});\n", @src());
// try expectPrinted(t, "class Foo { async *yield() {} }", "class Foo {\n async *yield() {\n }\n}\n", @src());
// try expectPrinted(t, "async function* yield() {}", "async function* yield() {\n}\n", @src());
// try expectPrinted(t, "-async function foo() { await 0 }", "-async function foo() {\n await 0;\n};\n", @src());
// try expectPrinted(t, "-async function() { await 0 }", "-async function() {\n await 0;\n};\n", @src());
// try expectPrinted(t, "1 - async function foo() { await 0 }", "1 - async function foo() {\n await 0;\n};\n", @src());
// try expectPrinted(t, "1 - async function() { await 0 }", "1 - async function() {\n await 0;\n};\n", @src());
// try expectPrinted(t, "(async function foo() { await 0 })", "(async function foo() {\n await 0;\n});\n", @src());
// try expectPrinted(t, "(async function() { await 0 })", "(async function() {\n await 0;\n});\n", @src());
// try expectPrinted(t, "(x, async function foo() { await 0 })", "x, async function foo() {\n await 0;\n};\n", @src());
// try expectPrinted(t, "(x, async function() { await 0 })", "x, async function() {\n await 0;\n};\n", @src());
// try expectPrinted(t, "new async function() { await 0 }", "new async function() {\n await 0;\n}();\n", @src());
// try expectPrinted(t, "new async function() { await 0 }.x", "new async function() {\n await 0;\n}.x();\n", @src());
// try expectPrinted(t, "function foo() { await }", "function foo() {\n await;\n}\n", @src());
// try expectPrinted(t, "async function foo() { await 0 }", "async function foo() {\n await 0;\n}\n", @src());
// try expectPrinted(t, "(async x => y), z", "async (x) => y, z;\n", @src());
// try expectPrinted(t, "(async x => y, z)", "async (x) => y, z;\n", @src());
// try expectPrinted(t, "(async x => (y, z))", "async (x) => (y, z);\n", @src());
// try expectPrinted(t, "(async (x) => y), z", "async (x) => y, z;\n", @src());
// try expectPrinted(t, "(async (x) => y, z)", "async (x) => y, z;\n", @src());
// try expectPrinted(t, "(async (x) => (y, z))", "async (x) => (y, z);\n", @src());
// try expectPrinted(t, "async x => y, z", "async (x) => y, z;\n", @src());
// try expectPrinted(t, "async x => (y, z)", "async (x) => (y, z);\n", @src());
// try expectPrinted(t, "async (x) => y, z", "async (x) => y, z;\n", @src());
// try expectPrinted(t, "async (x) => (y, z)", "async (x) => (y, z);\n", @src());
// try expectPrinted(t, "export default async x => (y, z)", "export default async (x) => (y, z);\n", @src());
// try expectPrinted(t, "export default async (x) => (y, z)", "export default async (x) => (y, z);\n", @src());
// try expectPrinted(t, "(class { async \n foo() {} })", "(class {\n async;\n foo() {\n }\n});\n", @src());
// try expectPrinted(t, "(class { async \n *foo() {} })", "(class {\n async;\n *foo() {\n }\n});\n", @src());
// try expectPrinted(t, "async function foo(){for await(x of y);}", "async function foo() {\n for await (x of y)\n ;\n}\n", @src());
// try expectPrinted(t, "async function foo(){for await(let x of y);}", "async function foo() {\n for await (let x of y)\n ;\n}\n", @src());
// // Await as a declaration
// try expectPrinted(t, "({ async await() {} })", "({async await() {\n}});\n", @src());
// try expectPrinted(t, "(class { async await() {} })", "(class {\n async await() {\n }\n});\n", @src());
// try expectPrinted(t, "class Foo { async await() {} }", "class Foo {\n async await() {\n }\n}\n", @src());
// try expectPrinted(t, "({ async *await() {} })", "({async *await() {\n}});\n", @src());
// try expectPrinted(t, "(class { async *await() {} })", "(class {\n async *await() {\n }\n});\n", @src());
// try expectPrinted(t, "class Foo { async *await() {} }", "class Foo {\n async *await() {\n }\n}\n", @src());
// try expectPrinted(t, "(x) => function() {}", "(x) => function() {\n};\n", @src());
// try expectPrinted(t, "x => function() {}", "(x) => function() {\n};\n", @src());
// try expectPrinted(t, "(x => function() {})", "(x) => function() {\n};\n", @src());
// try expectPrinted(t, "(x = () => {}) => {}", "(x = () => {\n}) => {\n};\n", @src());
// try expectPrinted(t, "async (x = () => {}) => {}", "async (x = () => {\n}) => {\n};\n", @src());
// try expectPrinted(t, "(() => {}) ? a : b", "(() => {\n}) ? a : b;\n", @src());
// try expectPrinted(t, "1 < (() => {})", "1 < (() => {\n});\n", @src());
// try expectPrinted(t, "y = x => {}", "y = (x) => {\n};\n", @src());
// try expectPrinted(t, "y = () => {}", "y = () => {\n};\n", @src());
// try expectPrinted(t, "y = (x) => {}", "y = (x) => {\n};\n", @src());
// try expectPrinted(t, "y = async x => {}", "y = async (x) => {\n};\n", @src());
// try expectPrinted(t, "y = async () => {}", "y = async () => {\n};\n", @src());
// try expectPrinted(t, "y = async (x) => {}", "y = async (x) => {\n};\n", @src());
// try expectPrinted(t, "1 + function () {}", "1 + function() {\n};\n", @src());
// try expectPrinted(t, "1 + async function () {}", "1 + async function() {\n};\n", @src());
// try expectPrinted(t, "class Foo extends function () {} {}", "class Foo extends function() {\n} {\n}\n", @src());
// try expectPrinted(t, "class Foo extends async function () {} {}", "class Foo extends async function() {\n} {\n}\n", @src());
// try expectPrinted(t, "() => {}\n(0)", "() => {\n};\n0;\n", @src());
// try expectPrinted(t, "x => {}\n(0)", "(x) => {\n};\n0;\n", @src());
// try expectPrinted(t, "async () => {}\n(0)", "async () => {\n};\n0;\n", @src());
// try expectPrinted(t, "async x => {}\n(0)", "async (x) => {\n};\n0;\n", @src());
// try expectPrinted(t, "async (x) => {}\n(0)", "async (x) => {\n};\n0;\n", @src());
// try expectPrinted(t, "() => {}\n,0", "() => {\n}, 0;\n", @src());
// try expectPrinted(t, "x => {}\n,0", "(x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "async () => {}\n,0", "async () => {\n}, 0;\n", @src());
// try expectPrinted(t, "async x => {}\n,0", "async (x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "async (x) => {}\n,0", "async (x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "(() => {})\n(0)", "(() => {\n})(0);\n", @src());
// try expectPrinted(t, "(x => {})\n(0)", "((x) => {\n})(0);\n", @src());
// try expectPrinted(t, "(async () => {})\n(0)", "(async () => {\n})(0);\n", @src());
// try expectPrinted(t, "(async x => {})\n(0)", "(async (x) => {\n})(0);\n", @src());
// try expectPrinted(t, "(async (x) => {})\n(0)", "(async (x) => {\n})(0);\n", @src());
// try expectPrinted(t, "y = () => {}\n(0)", "y = () => {\n};\n0;\n", @src());
// try expectPrinted(t, "y = x => {}\n(0)", "y = (x) => {\n};\n0;\n", @src());
// try expectPrinted(t, "y = async () => {}\n(0)", "y = async () => {\n};\n0;\n", @src());
// try expectPrinted(t, "y = async x => {}\n(0)", "y = async (x) => {\n};\n0;\n", @src());
// try expectPrinted(t, "y = async (x) => {}\n(0)", "y = async (x) => {\n};\n0;\n", @src());
// try expectPrinted(t, "y = () => {}\n,0", "y = () => {\n}, 0;\n", @src());
// try expectPrinted(t, "y = x => {}\n,0", "y = (x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "y = async () => {}\n,0", "y = async () => {\n}, 0;\n", @src());
// try expectPrinted(t, "y = async x => {}\n,0", "y = async (x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "y = async (x) => {}\n,0", "y = async (x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "y = (() => {})\n(0)", "y = (() => {\n})(0);\n", @src());
// try expectPrinted(t, "y = (x => {})\n(0)", "y = ((x) => {\n})(0);\n", @src());
// try expectPrinted(t, "y = (async () => {})\n(0)", "y = (async () => {\n})(0);\n", @src());
// try expectPrinted(t, "y = (async x => {})\n(0)", "y = (async (x) => {\n})(0);\n", @src());
// try expectPrinted(t, "y = (async (x) => {})\n(0)", "y = (async (x) => {\n})(0);\n", @src());
// try expectPrinted(t, "(() => {}\n,0)", "() => {\n}, 0;\n", @src());
// try expectPrinted(t, "(x => {}\n,0)", "(x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "(async () => {}\n,0)", "async () => {\n}, 0;\n", @src());
// try expectPrinted(t, "(async x => {}\n,0)", "async (x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "(async (x) => {}\n,0)", "async (x) => {\n}, 0;\n", @src());
// try expectPrinted(t, "((() => {})\n(0))", "(() => {\n})(0);\n", @src());
// try expectPrinted(t, "((x => {})\n(0))", "((x) => {\n})(0);\n", @src());
// try expectPrinted(t, "((async () => {})\n(0))", "(async () => {\n})(0);\n", @src());
// try expectPrinted(t, "((async x => {})\n(0))", "(async (x) => {\n})(0);\n", @src());
// try expectPrinted(t, "((async (x) => {})\n(0))", "(async (x) => {\n})(0);\n", @src());
// try expectPrinted(t, "y = (() => {}\n,0)", "y = (() => {\n}, 0);\n", @src());
// try expectPrinted(t, "y = (x => {}\n,0)", "y = ((x) => {\n}, 0);\n", @src());
// try expectPrinted(t, "y = (async () => {}\n,0)", "y = (async () => {\n}, 0);\n", @src());
// try expectPrinted(t, "y = (async x => {}\n,0)", "y = (async (x) => {\n}, 0);\n", @src());
// try expectPrinted(t, "y = (async (x) => {}\n,0)", "y = (async (x) => {\n}, 0);\n", @src());
// try expectPrinted(t, "y = ((() => {})\n(0))", "y = (() => {\n})(0);\n", @src());
// try expectPrinted(t, "y = ((x => {})\n(0))", "y = ((x) => {\n})(0);\n", @src());
// try expectPrinted(t, "y = ((async () => {})\n(0))", "y = (async () => {\n})(0);\n", @src());
// try expectPrinted(t, "y = ((async x => {})\n(0))", "y = (async (x) => {\n})(0);\n", @src());
// try expectPrinted(t, "y = ((async (x) => {})\n(0))", "y = (async (x) => {\n})(0);\n", @src());
// try expectPrinted(t, "(-x) ** 2", "(-x) ** 2;\n", @src());
// try expectPrinted(t, "(+x) ** 2", "(+x) ** 2;\n", @src());
// try expectPrinted(t, "(~x) ** 2", "(~x) ** 2;\n", @src());
// try expectPrinted(t, "(!x) ** 2", "(!x) ** 2;\n", @src());
// try expectPrinted(t, "(-1) ** 2", "(-1) ** 2;\n", @src());
// try expectPrinted(t, "(+1) ** 2", "1 ** 2;\n", @src());
// try expectPrinted(t, "(~1) ** 2", "(~1) ** 2;\n", @src());
// try expectPrinted(t, "(!1) ** 2", "false ** 2;\n", @src());
// try expectPrinted(t, "(void x) ** 2", "(void x) ** 2;\n", @src());
// try expectPrinted(t, "(delete x) ** 2", "(delete x) ** 2;\n", @src());
// try expectPrinted(t, "(typeof x) ** 2", "(typeof x) ** 2;\n", @src());
// try expectPrinted(t, "undefined ** 2", "(void 0) ** 2;\n", @src());
// try expectPrinted(t, "({ prototype: 1 })", "({prototype: 1});\n", @src());
// try expectPrinted(t, "({ get prototype() {} })", "({get prototype() {\n}});\n", @src());
// try expectPrinted(t, "({ set prototype(x) {} })", "({set prototype(x) {\n}});\n", @src());
// try expectPrinted(t, "({ *prototype() {} })", "({*prototype() {\n}});\n", @src());
// try expectPrinted(t, "({ async prototype() {} })", "({async prototype() {\n}});\n", @src());
// try expectPrinted(t, "({ async* prototype() {} })", "({async *prototype() {\n}});\n", @src());
// try expectPrinted(t, "({foo})", "({foo});\n", @src());
// try expectPrinted(t, "({foo:0})", "({foo: 0});\n", @src());
// try expectPrinted(t, "({1e9:0})", "({1e9: 0});\n", @src());
// try expectPrinted(t, "({1_2_3n:0})", "({123n: 0});\n", @src());
// try expectPrinted(t, "({0x1_2_3n:0})", "({0x123n: 0});\n", @src());
// try expectPrinted(t, "({foo() {}})", "({foo() {\n}});\n", @src());
// try expectPrinted(t, "({*foo() {}})", "({*foo() {\n}});\n", @src());
// try expectPrinted(t, "({get foo() {}})", "({get foo() {\n}});\n", @src());
// try expectPrinted(t, "({set foo(x) {}})", "({set foo(x) {\n}});\n", @src());
// try expectPrinted(t, "({if:0})", "({if: 0});\n", @src());
// try expectPrinted(t, "({if() {}})", "({if() {\n}});\n", @src());
// try expectPrinted(t, "({*if() {}})", "({*if() {\n}});\n", @src());
// try expectPrinted(t, "({get if() {}})", "({get if() {\n}});\n", @src());
// try expectPrinted(t, "({set if(x) {}})", "({set if(x) {\n}});\n", @src());
// try expectPrinted(t, "async function foo() { await x; }", "await x;\n", @src());
// try expectPrinted(t, "async function foo() { await +x; }", "await +x;\n", @src());
// try expectPrinted(t, "async function foo() { await -x; }", "await -x;\n", @src());
// try expectPrinted(t, "async function foo() { await ~x; }", "await ~x;\n", @src());
// try expectPrinted(t, "async function foo() { await !x; }", "await !x;\n", @src());
// try expectPrinted(t, "async function foo() { await --x; }", "await --x;\n", @src());
// try expectPrinted(t, "async function foo() { await ++x; }", "await ++x;\n", @src());
// try expectPrinted(t, "async function foo() { await x--; }", "await x--;\n", @src());
// try expectPrinted(t, "async function foo() { await x++; }", "await x++;\n", @src());
// try expectPrinted(t, "async function foo() { await void x; }", "await void x;\n", @src());
// try expectPrinted(t, "async function foo() { await typeof x; }", "await typeof x;\n", @src());
// try expectPrinted(t, "async function foo() { await (x * y); }", "await (x * y);\n", @src());
// try expectPrinted(t, "async function foo() { await (x ** y); }", "await (x ** y);\n", @src());
// try expectPrinted(t, "export default (1, 2)", "export default (1, 2);\n", @src());
// try expectPrinted(t, "export default async", "export default async;\n", @src());
// try expectPrinted(t, "export default async()", "export default async();\n", @src());
// try expectPrinted(t, "export default async + 1", "export default async + 1;\n", @src());
// try expectPrinted(t, "export default async => {}", "export default (async) => {\n};\n", @src());
// try expectPrinted(t, "export default async x => {}", "export default async (x) => {\n};\n", @src());
// try expectPrinted(t, "export default async () => {}", "export default async () => {\n};\n", @src());
// // This is a corner case in the ES6 grammar. The "export default" statement
// // normally takes an expression except for the function and class keywords
// // which behave sort of like their respective declarations instead.
// try expectPrinted(t, "export default function() {} - after", "export default function() {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default function*() {} - after", "export default function* () {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default function foo() {} - after", "export default function foo() {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default function* foo() {} - after", "export default function* foo() {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default async function() {} - after", "export default async function() {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default async function*() {} - after", "export default async function* () {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default async function foo() {} - after", "export default async function foo() {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default async function* foo() {} - after", "export default async function* foo() {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default class {} - after", "export default class {\n}\n-after;\n", @src());
// try expectPrinted(t, "export default class Foo {} - after", "export default class Foo {\n}\n-after;\n", @src());
t.report(@src());
}

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,7 @@ const options = @import("options.zig");
const alloc = @import("alloc.zig");
const fs = @import("fs.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
usingnamespace @import("ast/base.zig");
usingnamespace js_ast.G;
@@ -36,22 +36,31 @@ const Level = js_ast.Op.Level;
const Op = js_ast.Op;
const Scope = js_ast.Scope;
const locModuleScope = logger.Loc.Empty;
const Lexer = js_lexer.Lexer;
fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
const Lexer = if (opts.allow_comments) js_lexer.TSConfigJSONLexer else js_lexer.JSONLexer;
return struct {
lexer: Lexer,
source: *logger.Source,
source: *const logger.Source,
log: *logger.Log,
allocator: *std.mem.Allocator,
pub fn init(allocator: *std.mem.Allocator, source: *logger.Source, log: *logger.Log) !Parser {
return Parser{
.lexer = try Lexer.init(log, source, allocator),
.allocator = allocator,
.log = log,
.source = source,
};
pub fn init(allocator: *std.mem.Allocator, source: *const logger.Source, log: *logger.Log) !Parser {
if (opts.allow_comments) {
return Parser{
.lexer = try Lexer.initTSConfig(log, source, allocator),
.allocator = allocator,
.log = log,
.source = source,
};
} else {
return Parser{
.lexer = try Lexer.initJSON(log, source, allocator),
.allocator = allocator,
.log = log,
.source = source,
};
}
}
const Parser = @This();
@@ -63,46 +72,45 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
return Expr.alloc(p.allocator, t, loc);
}
}
pub fn parseExpr(p: *Parser) Expr {
pub fn parseExpr(p: *Parser) anyerror!Expr {
const loc = p.lexer.loc();
switch (p.lexer.token) {
.t_false => {
p.lexer.next();
try p.lexer.next();
return p.e(E.Boolean{
.value = false,
}, loc);
},
.t_true => {
p.lexer.next();
try p.lexer.next();
return p.e(E.Boolean{
.value = true,
}, loc);
},
.t_null => {
p.lexer.next();
try p.lexer.next();
return p.e(E.Null{}, loc);
},
.t_string_literal => {
const value = p.lexer.string_literal;
p.lexer.next();
return p.e(E.String{
.value = value,
}, loc);
var str: E.String = p.lexer.toEString();
try p.lexer.next();
return p.e(str, loc);
},
.t_numeric_literal => {
const value = p.lexer.number;
p.lexer.next();
try p.lexer.next();
return p.e(E.Number{ .value = value }, loc);
},
.t_minus => {
p.lexer.next();
try p.lexer.next();
const value = p.lexer.number;
p.lexer.expect(.t_numeric_literal);
try p.lexer.expect(.t_numeric_literal);
return p.e(E.Number{ .value = -value }, loc);
},
.t_open_bracket => {
p.lexer.next();
try p.lexer.next();
var is_single_line = !p.lexer.has_newline_before;
var exprs = std.ArrayList(Expr).init(p.allocator);
@@ -112,7 +120,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
is_single_line = false;
}
if (!p.parseMaybeTrailingComma(.t_close_bracket)) {
if (!try p.parseMaybeTrailingComma(.t_close_bracket)) {
break;
}
@@ -121,27 +129,28 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
}
}
exprs.append(p.parseExpr()) catch unreachable;
exprs.append(try p.parseExpr()) catch unreachable;
}
if (p.lexer.has_newline_before) {
is_single_line = false;
}
p.lexer.expect(.t_close_bracket);
try p.lexer.expect(.t_close_bracket);
return p.e(E.Array{ .items = exprs.toOwnedSlice() }, loc);
},
.t_open_brace => {
p.lexer.next();
try p.lexer.next();
var is_single_line = !p.lexer.has_newline_before;
var properties = std.ArrayList(G.Property).init(p.allocator);
var duplicates = std.StringHashMap(u1).init(p.allocator);
var duplicates = std.BufSet.init(p.allocator);
defer duplicates.deinit();
while (p.lexer.token != .t_close_brace) {
if (properties.items.len > 0) {
if (p.lexer.has_newline_before) {
is_single_line = false;
}
if (!p.parseMaybeTrailingComma(.t_close_brace)) {
if (!try p.parseMaybeTrailingComma(.t_close_brace)) {
break;
}
if (p.lexer.has_newline_before) {
@@ -149,46 +158,58 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
}
}
var key_string = p.lexer.string_literal;
var str: E.String = undefined;
if (p.lexer.string_literal_is_ascii) {
str = E.String{
.utf8 = p.lexer.string_literal_slice,
};
} else {
const value = p.lexer.stringLiteralUTF16();
str = E.String{
.value = value,
};
}
const is_duplicate = duplicates.exists(p.lexer.string_literal_slice);
if (!is_duplicate) {
duplicates.put(p.lexer.string_literal_slice) catch unreachable;
}
var key_range = p.lexer.range();
var key = p.e(E.String{ .value = key_string }, key_range.loc);
p.lexer.expect(.t_string_literal);
var key_text = p.lexer.utf16ToString(key_string);
// Warn about duplicate keys
const entry = duplicates.getOrPut(key_text) catch unreachable;
if (entry.found_existing) {
p.log.addRangeWarningFmt(p.source.*, key_range, p.allocator, "Duplicate key \"{s}\" in object literal", .{key_text}) catch unreachable;
if (is_duplicate) {
p.log.addRangeWarningFmt(p.source, key_range, p.allocator, "Duplicate key \"{s}\" in object literal", .{p.lexer.string_literal_slice}) catch unreachable;
}
p.lexer.expect(.t_colon);
var value = p.parseExpr();
var key = p.e(str, key_range.loc);
try p.lexer.expect(.t_string_literal);
try p.lexer.expect(.t_colon);
var value = try p.parseExpr();
properties.append(G.Property{ .key = key, .value = value }) catch unreachable;
}
if (p.lexer.has_newline_before) {
is_single_line = false;
}
p.lexer.expect(.t_close_brace);
try p.lexer.expect(.t_close_brace);
return p.e(E.Object{
.properties = properties.toOwnedSlice(),
.is_single_line = is_single_line,
}, loc);
},
else => {
p.lexer.unexpected();
return p.e(E.Missing{}, loc);
try p.lexer.unexpected();
return error.ParserError;
},
}
}
pub fn parseMaybeTrailingComma(p: *Parser, closer: T) bool {
pub fn parseMaybeTrailingComma(p: *Parser, closer: T) !bool {
const comma_range = p.lexer.range();
p.lexer.expect(.t_comma);
try p.lexer.expect(.t_comma);
if (p.lexer.token == closer) {
if (!opts.allow_trailing_commas) {
p.log.addRangeError(p.source.*, comma_range, "JSON does not support trailing commas") catch unreachable;
p.log.addRangeError(p.source, comma_range, "JSON does not support trailing commas") catch unreachable;
}
return false;
}
@@ -201,14 +222,14 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
const JSONParser = JSONLikeParser(js_lexer.JSONOptions{});
const TSConfigParser = JSONLikeParser(js_lexer.JSONOptions{ .allow_comments = true, .allow_trailing_commas = true });
pub fn ParseJSON(source: *logger.Source, log: *logger.Log, allocator: *std.mem.Allocator) !Expr {
pub fn ParseJSON(source: *const logger.Source, log: *logger.Log, allocator: *std.mem.Allocator) !Expr {
var parser = try JSONParser.init(allocator, source, log);
return parser.parseExpr();
}
pub fn ParseTSConfig(log: logger.Loc, source: logger.Source, allocator: *std.mem.Allocator) !Expr {
var parser = try TSConfigParser.init(allocator, log, source);
pub fn ParseTSConfig(source: *const logger.Source, log: *logger.Log, allocator: *std.mem.Allocator) !Expr {
var parser = try TSConfigParser.init(allocator, source, log);
return parser.parseExpr();
}
@@ -217,12 +238,9 @@ const duplicateKeyJson = "{ \"name\": \"valid\", \"name\": \"invalid\" }";
const js_printer = @import("js_printer.zig");
const renamer = @import("renamer.zig");
const SymbolList = [][]Symbol;
fn expectPrintedJSON(_contents: string, expected: string) void {
if (alloc.dynamic_manager == null) {
alloc.setup(std.heap.page_allocator) catch unreachable;
}
fn expectPrintedJSON(_contents: string, expected: string) !void {
var contents = alloc.dynamic.alloc(u8, _contents.len + 1) catch unreachable;
std.mem.copy(u8, contents, _contents);
contents[contents.len - 1] = ';';
@@ -240,13 +258,14 @@ fn expectPrintedJSON(_contents: string, expected: string) void {
.stmts = &([_]Stmt{stmt}),
};
const tree = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
var symbol_map = Symbol.Map{};
var symbols: SymbolList = &([_][]Symbol{tree.symbols});
var symbol_map = js_ast.Symbol.Map.initList(symbols);
if (log.msgs.items.len > 0) {
std.debug.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text });
Global.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text });
}
var linker = @import("linker.zig").Linker{};
const result = js_printer.printAst(alloc.dynamic, tree, symbol_map, true, js_printer.Options{ .to_module_ref = Ref{ .inner_index = 0 } }, &linker) catch unreachable;
const result = js_printer.printAst(alloc.dynamic, tree, symbol_map, &source, true, js_printer.Options{ .to_module_ref = Ref{ .inner_index = 0 } }, &linker) catch unreachable;
var js = result.js;
@@ -264,20 +283,21 @@ fn expectPrintedJSON(_contents: string, expected: string) void {
}
test "ParseJSON" {
expectPrintedJSON("true", "true");
expectPrintedJSON("false", "false");
expectPrintedJSON("1", "1");
expectPrintedJSON("10", "10");
expectPrintedJSON("100", "100");
expectPrintedJSON("100.1", "100.1");
expectPrintedJSON("19.1", "19.1");
expectPrintedJSON("19.12", "19.12");
expectPrintedJSON("3.4159820837456", "3.4159820837456");
expectPrintedJSON("-10000.25", "-10000.25");
expectPrintedJSON("\"hi\"", "\"hi\"");
expectPrintedJSON("{\"hi\": 1, \"hey\": \"200\", \"boom\": {\"yo\": true}}", "({\"hi\": 1, \"hey\": \"200\", \"boom\": {\"yo\": true}})");
expectPrintedJSON("{\"hi\": \"hey\"}", "({hi: \"hey\"})");
expectPrintedJSON("{\"hi\": [\"hey\", \"yo\"]}", "({hi:[\"hey\",\"yo\"]})");
try alloc.setup(std.heap.c_allocator);
try expectPrintedJSON("true", "true");
try expectPrintedJSON("false", "false");
try expectPrintedJSON("1", "1");
try expectPrintedJSON("10", "10");
try expectPrintedJSON("100", "100");
try expectPrintedJSON("100.1", "100.1");
try expectPrintedJSON("19.1", "19.1");
try expectPrintedJSON("19.12", "19.12");
try expectPrintedJSON("3.4159820837456", "3.4159820837456");
try expectPrintedJSON("-10000.25", "-10000.25");
try expectPrintedJSON("\"hi\"", "\"hi\"");
try expectPrintedJSON("{\"hi\": 1, \"hey\": \"200\", \"boom\": {\"yo\": true}}", "({\"hi\": 1, \"hey\": \"200\", \"boom\": {\"yo\": true}})");
try expectPrintedJSON("{\"hi\": \"hey\"}", "({hi: \"hey\"})");
try expectPrintedJSON("{\"hi\": [\"hey\", \"yo\"]}", "({hi:[\"hey\",\"yo\"]})");
// TODO: emoji?
}

20
src/lib.zig Normal file
View File

@@ -0,0 +1,20 @@
const std = @import("std");
const lex = @import("js_lexer.zig");
const logger = @import("logger.zig");
const alloc = @import("alloc.zig");
const options = @import("options.zig");
const js_parser = @import("js_parser.zig");
const json_parser = @import("json_parser.zig");
const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
usingnamespace @import("global.zig");
const panicky = @import("panic_handler.zig");
const cli = @import("cli.zig");
const api = @import("./api/schema.zig");
extern const Configuration = struct {};
export fn configure(configuration: Configuration) void {}

View File

@@ -1,6 +1,6 @@
const std = @import("std");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const fs = @import("fs.zig");
const unicode = std.unicode;
@@ -14,6 +14,7 @@ pub const Kind = enum {
warn,
note,
debug,
verbose,
pub fn string(self: Kind) string {
return switch (self) {
@@ -21,6 +22,7 @@ pub const Kind = enum {
.warn => "warn",
.note => "note",
.debug => "debug",
.verbose => "verbose",
};
}
};
@@ -28,8 +30,12 @@ pub const Kind = enum {
pub const Loc = packed struct {
start: i32 = -1,
pub fn toNullable(loc: *Loc) ?Loc {
return if (loc.start == -1) null else loc.*;
}
// TODO: remove this stupidity
pub fn toUsize(self: *Loc) usize {
pub fn toUsize(self: *const Loc) usize {
return @intCast(usize, self.start);
}
@@ -43,6 +49,10 @@ pub const Loc = packed struct {
pub fn eql(loc: *Loc, other: Loc) bool {
return loc.start == other.start;
}
pub fn jsonStringify(self: *const Loc, options: anytype, writer: anytype) !void {
return try std.json.stringify(self.start, options, writer);
}
};
pub const Location = struct {
@@ -53,6 +63,10 @@ pub const Location = struct {
length: usize = 0, // in bytes
line_text: ?string = null,
suggestion: ?string = null,
offset: usize = 0,
// don't really know what's safe to deinit here!
pub fn deinit(l: *Location, allocator: *std.mem.Allocator) void {}
pub fn init(file: []u8, namespace: []u8, line: i32, column: i32, length: u32, line_text: ?[]u8, suggestion: ?[]u8) Location {
return Location{
@@ -63,10 +77,11 @@ pub const Location = struct {
.length = length,
.line_text = line_text,
.suggestion = suggestion,
.offset = length,
};
}
pub fn init_or_nil(_source: ?Source, r: Range) ?Location {
pub fn init_or_nil(_source: ?*const Source, r: Range) ?Location {
if (_source) |source| {
var data = source.initErrorPosition(r.loc);
return Location{
@@ -76,6 +91,7 @@ pub const Location = struct {
.column = usize2Loc(data.column_count).start,
.length = source.contents.len,
.line_text = source.contents[data.line_start..data.line_end],
.offset = @intCast(usize, std.math.max(r.loc.start, 0)),
};
} else {
return null;
@@ -97,18 +113,77 @@ pub const Location = struct {
}
};
pub const Data = struct { text: string, location: ?Location = null };
pub const Data = struct {
text: string,
location: ?Location = null,
pub fn deinit(d: *Data, allocator: *std.mem.Allocator) void {
if (d.location) |loc| {
loc.deinit(allocator);
}
allocator.free(text);
}
};
pub const Msg = struct {
kind: Kind = Kind.err,
data: Data,
notes: ?[]Data = null,
pub fn doFormat(msg: *const Msg, to: anytype, formatterFunc: @TypeOf(std.fmt.format)) !void {
try formatterFunc(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{}", .{ msg.kind.string(), msg.data.text, msg.data.location.?.line_text, msg.data.location.?.file, msg.data.location.?.line, msg.data.location.?.column });
pub fn deinit(msg: *Msg, allocator: *std.mem.Allocator) void {
msg.data.deinit(allocator);
if (msg.notes) |notes| {
for (notes) |note| {
note.deinit(allocator);
}
}
msg.notes = null;
}
pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(std.debug.panic)) void {
formatterFunc("\n\n{s}: {s}\n{s}\n{s}:{}:{}", .{ msg.kind.string(), msg.data.text, msg.data.location.?.line_text, msg.data.location.?.file, msg.data.location.?.line, msg.data.location.?.column });
pub fn writeFormat(
msg: *const Msg,
to: anytype,
) !void {
if (msg.data.location) |location| {
try std.fmt.format(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{} {d}", .{
msg.kind.string(),
msg.data.text,
location.line_text,
location.file,
location.line,
location.column,
location.offset,
});
} else {
try std.fmt.format(to, "\n\n{s}: {s}\n", .{
msg.kind.string(),
msg.data.text,
});
}
}
pub fn doFormat(msg: *const Msg, to: anytype, formatterFunc: anytype) !void {
try formatterFunc(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{} {d}", .{
msg.kind.string(),
msg.data.text,
msg.data.location.?.line_text,
msg.data.location.?.file,
msg.data.location.?.line,
msg.data.location.?.column,
msg.data.location.?.offset,
});
}
pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(Global.panic)) void {
formatterFunc("\n\n{s}: {s}\n{s}\n{s}:{}:{} ({d})", .{
msg.kind.string(),
msg.data.text,
msg.data.location.?.line_text,
msg.data.location.?.file,
msg.data.location.?.line,
msg.data.location.?.column,
msg.data.location.?.offset,
});
}
};
@@ -117,12 +192,20 @@ pub const Range = packed struct {
len: i32 = 0,
pub const None = Range{ .loc = Loc.Empty, .len = 0 };
pub fn isEmpty(r: *const Range) bool {
return r.len == 0 and r.loc.start == Loc.Empty.start;
}
pub fn end(self: *const Range) Loc {
return Loc{ .start = self.loc.start + self.len };
}
pub fn endI(self: *const Range) usize {
return std.math.lossyCast(usize, self.loc.start + self.len);
}
pub fn jsonStringify(self: *const Range, options: anytype, writer: anytype) !void {
return try std.json.stringify([2]i32{ self.loc.start, self.len + self.loc.start }, options, writer);
}
};
pub const Log = struct {
@@ -130,6 +213,15 @@ pub const Log = struct {
warnings: usize = 0,
errors: usize = 0,
msgs: ArrayList(Msg),
level: Level = Level.debug,
pub const Level = enum {
verbose,
debug,
info,
warn,
err,
};
pub fn init(allocator: *std.mem.Allocator) Log {
return Log{
@@ -137,14 +229,25 @@ pub const Log = struct {
};
}
pub fn addVerbose(log: *Log, source: ?Source, loc: Loc, text: string) !void {
pub fn addVerbose(log: *Log, source: ?*const Source, loc: Loc, text: string) !void {
try log.addMsg(Msg{
.kind = .verbose,
.data = rangeData(source, Range{ .loc = loc }, text),
});
}
pub fn addVerboseWithNotes(source: ?Source, loc: Loc, text: string, notes: []Data) !void {
pub fn appendTo(self: *Log, other: *Log) !void {
try other.msgs.appendSlice(self.msgs.items);
other.warnings += self.warnings;
other.errors += self.errors;
self.msgs.deinit();
}
pub fn deinit(self: *Log) void {
self.msgs.deinit();
}
pub fn addVerboseWithNotes(log: *Log, source: ?*const Source, loc: Loc, text: string, notes: []Data) !void {
try log.addMsg(Msg{
.kind = .verbose,
.data = rangeData(source, Range{ .loc = loc }, text),
@@ -152,7 +255,7 @@ pub const Log = struct {
});
}
pub fn addRangeError(log: *Log, source: ?Source, r: Range, text: string) !void {
pub fn addRangeError(log: *Log, source: ?*const Source, r: Range, text: string) !void {
log.errors += 1;
try log.addMsg(Msg{
.kind = .err,
@@ -160,7 +263,7 @@ pub const Log = struct {
});
}
pub fn addRangeErrorFmt(log: *Log, source: ?Source, r: Range, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
pub fn addRangeErrorFmt(log: *Log, source: ?*const Source, r: Range, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
log.errors += 1;
try log.addMsg(Msg{
.kind = .err,
@@ -168,7 +271,16 @@ pub const Log = struct {
});
}
pub fn addErrorFmt(log: *Log, source: ?Source, l: Loc, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
pub fn addRangeErrorFmtWithNotes(log: *Log, source: ?*const Source, r: Range, allocator: *std.mem.Allocator, notes: []Data, comptime text: string, args: anytype) !void {
log.errors += 1;
try log.addMsg(Msg{
.kind = .err,
.data = rangeData(source, r, std.fmt.allocPrint(allocator, text, args) catch unreachable),
.notes = notes,
});
}
pub fn addErrorFmt(log: *Log, source: ?*const Source, l: Loc, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
log.errors += 1;
try log.addMsg(Msg{
.kind = .err,
@@ -176,7 +288,7 @@ pub const Log = struct {
});
}
pub fn addRangeWarning(log: *Log, source: ?Source, r: Range, text: string) !void {
pub fn addRangeWarning(log: *Log, source: ?*const Source, r: Range, text: string) !void {
log.warnings += 1;
try log.addMsg(Msg{
.kind = .warn,
@@ -184,7 +296,7 @@ pub const Log = struct {
});
}
pub fn addWarningFmt(log: *Log, source: ?Source, l: Loc, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
pub fn addWarningFmt(log: *Log, source: ?*const Source, l: Loc, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
log.warnings += 1;
try log.addMsg(Msg{
.kind = .err,
@@ -192,7 +304,7 @@ pub const Log = struct {
});
}
pub fn addRangeWarningFmt(log: *Log, source: ?Source, r: Range, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
pub fn addRangeWarningFmt(log: *Log, source: ?*const Source, r: Range, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
log.warnings += 1;
try log.addMsg(Msg{
.kind = .warn,
@@ -200,7 +312,7 @@ pub const Log = struct {
});
}
pub fn addWarning(log: *Log, source: ?Source, l: Loc, text: string) !void {
pub fn addWarning(log: *Log, source: ?*const Source, l: Loc, text: string) !void {
log.warnings += 1;
try log.addMsg(Msg{
.kind = .warn,
@@ -208,14 +320,23 @@ pub const Log = struct {
});
}
pub fn addRangeDebug(log: *Log, source: ?Source, r: Range, text: string) !void {
pub fn addRangeDebug(log: *Log, source: ?*const Source, r: Range, text: string) !void {
try log.addMsg(Msg{
.kind = .debug,
.data = rangeData(source, r, text),
});
}
pub fn addRangeErrorWithNotes(log: *Log, source: ?Source, r: Range, text: string, notes: []Data) !void {
pub fn addRangeDebugWithNotes(log: *Log, source: ?*const Source, r: Range, text: string, notes: []Data) !void {
// log.de += 1;
try log.addMsg(Msg{
.kind = Kind.debug,
.data = rangeData(source, r, text),
.notes = notes,
});
}
pub fn addRangeErrorWithNotes(log: *Log, source: ?*const Source, r: Range, text: string, notes: []Data) !void {
log.errors += 1;
try log.addMsg(Msg{
.kind = Kind.err,
@@ -224,7 +345,7 @@ pub const Log = struct {
});
}
pub fn addRangeWarningWithNotes(log: *Log, source: ?Source, r: Range, text: string, notes: []Data) !void {
pub fn addRangeWarningWithNotes(log: *Log, source: ?*const Source, r: Range, text: string, notes: []Data) !void {
log.warnings += 1;
try log.addMsg(Msg{
.kind = .warning,
@@ -239,7 +360,7 @@ pub const Log = struct {
}
// TODO:
pub fn addError(self: *Log, _source: ?Source, loc: Loc, text: string) !void {
pub fn addError(self: *Log, _source: ?*const Source, loc: Loc, text: string) !void {
self.errors += 1;
try self.addMsg(Msg{ .kind = .err, .data = rangeData(_source, Range{ .loc = loc }, text) });
}
@@ -252,16 +373,13 @@ pub const Log = struct {
}
};
pub fn usize2Loc(loc: usize) Loc {
if (loc > std.math.maxInt(i32)) {
return Loc.Empty;
} else {
return Loc{ .start = @intCast(i32, loc) };
}
pub inline fn usize2Loc(loc: usize) Loc {
return Loc{ .start = @intCast(i32, loc) };
}
pub const Source = struct {
path: fs.Path,
key_path: fs.Path,
index: u32 = 0,
contents: string,
@@ -270,25 +388,37 @@ pub const Source = struct {
// symbol for an "export default" statement will be called "util_default".
identifier_name: string,
pub const ErrorPosition = struct { line_start: usize, line_end: usize, column_count: usize, line_count: usize };
pub const ErrorPosition = struct {
line_start: usize,
line_end: usize,
column_count: usize,
line_count: usize,
};
pub fn initFile(file: fs.File, allocator: *std.mem.Allocator) Source {
pub fn initFile(file: fs.File, allocator: *std.mem.Allocator) !Source {
var name = file.path.name;
var identifier_name = name.nonUniqueNameString(allocator) catch unreachable;
return Source{ .path = file.path, .identifier_name = identifier_name, .contents = file.contents };
var source = Source{
.path = file.path,
.key_path = fs.Path.init(file.path.text),
.identifier_name = identifier_name,
.contents = file.contents,
};
source.path.namespace = "file";
return source;
}
pub fn initPathString(pathString: string, contents: string) Source {
var path = fs.Path.init(pathString);
return Source{ .path = path, .identifier_name = path.name.base, .contents = contents };
return Source{ .key_path = path, .path = path, .identifier_name = path.name.base, .contents = contents };
}
pub fn textForRange(self: *Source, r: Range) string {
pub fn textForRange(self: *const Source, r: Range) string {
return self.contents[r.loc.i()..r.endI()];
}
pub fn rangeOfOperatorBefore(self: *Source, loc: Loc, op: string) Range {
pub fn rangeOfOperatorBefore(self: *const Source, loc: Loc, op: string) Range {
const text = self.contents[0..loc.i()];
const index = strings.index(text, op);
if (index >= 0) {
@@ -300,7 +430,7 @@ pub const Source = struct {
return Range{ .loc = loc };
}
pub fn rangeOfString(self: *Source, loc: Loc) Range {
pub fn rangeOfString(self: *const Source, loc: Loc) Range {
const text = self.contents[loc.i()..];
if (text.len == 0) {
@@ -327,7 +457,7 @@ pub const Source = struct {
return Range{ .loc = loc, .len = 0 };
}
pub fn rangeOfOperatorAfter(self: *Source, loc: Loc, op: string) Range {
pub fn rangeOfOperatorAfter(self: *const Source, loc: Loc, op: string) Range {
const text = self.contents[loc.i()..];
const index = strings.index(text, op);
if (index >= 0) {
@@ -341,21 +471,23 @@ pub const Source = struct {
pub fn initErrorPosition(self: *const Source, _offset: Loc) ErrorPosition {
var prev_code_point: u21 = 0;
var offset: usize = if (_offset.start < 0) 0 else @intCast(usize, _offset.start);
var offset: usize = std.math.min(if (_offset.start < 0) 0 else @intCast(usize, _offset.start), self.contents.len - 1);
const contents = self.contents;
var iter = unicode.Utf8Iterator{
.bytes = self.contents[0..offset],
.i = std.math.min(offset, self.contents.len),
.i = 0,
};
var line_start: usize = 0;
var line_count: usize = 0;
var line_count: usize = 1;
var column_number: usize = 1;
while (iter.nextCodepoint()) |code_point| {
switch (code_point) {
'\n' => {
column_number = 1;
line_start = iter.i + 1;
if (prev_code_point != '\r') {
line_count += 1;
@@ -363,6 +495,7 @@ pub const Source = struct {
},
'\r' => {
column_number = 0;
line_start = iter.i + 1;
line_count += 1;
},
@@ -370,8 +503,11 @@ pub const Source = struct {
0x2028, 0x2029 => {
line_start = iter.i + 3; // These take three bytes to encode in UTF-8
line_count += 1;
column_number = 1;
},
else => {
column_number += 1;
},
else => {},
}
prev_code_point = code_point;
@@ -379,7 +515,7 @@ pub const Source = struct {
iter = unicode.Utf8Iterator{
.bytes = self.contents[offset..],
.i = std.math.min(offset, self.contents.len),
.i = 0,
};
// Scan to the end of the line (or end of file if this is the last line)
@@ -395,15 +531,15 @@ pub const Source = struct {
}
}
return ErrorPosition{
.line_start = line_start,
.line_start = if (line_start > 0) line_start - 1 else line_start,
.line_end = line_end,
.line_count = line_count,
.column_count = offset - line_start,
.column_count = column_number,
};
}
};
pub fn rangeData(source: ?Source, r: Range, text: string) Data {
pub fn rangeData(source: ?*const Source, r: Range, text: string) Data {
return Data{ .text = text, .location = Location.init_or_nil(source, r) };
}
@@ -412,16 +548,41 @@ test "print msg" {
var log = Log{ .msgs = msgs };
defer log.msgs.deinit();
var filename = "test.js".*;
var syntax = "for(i = 0;)".*;
var syntax = "for (i".*;
var err = "invalid syntax".*;
var namespace = "file".*;
try log.addMsg(Msg{
.kind = .err,
.data = Data{ .location = Location.init_file(&filename, 1, 3, 0, &syntax, ""), .text = &err },
.data = Data{
.location = Location.init_file(&filename, 1, 3, 0, &syntax, ""),
.text = &err,
},
});
const stdout = std.io.getStdOut().writer();
// try log.print(stdout);
}
test "ErrorPosition" {
const source = Source.initPathString("/src/test/fixtures/simple.jsx", @embedFile("./test/fixtures/simple.jsx"));
const error_position = source.initErrorPosition(Loc{ .start = 979 });
std.testing.expectEqual(@as(usize, 973), @as(usize, error_position.line_start));
std.testing.expectEqual(@as(usize, 1016), @as(usize, error_position.line_end));
var msgs = ArrayList(Msg).init(std.testing.allocator);
var log = Log{ .msgs = msgs };
defer log.msgs.deinit();
try log.addMsg(Msg{
.kind = .err,
.data = rangeData(&source, Range{ .loc = Loc{
.start = 979,
}, .len = 15 }, "Oh no"),
});
const stdout = std.io.getStdOut().writer();
try log.print(stdout);
}

View File

@@ -9,61 +9,31 @@ const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
usingnamespace @import("global.zig");
const panicky = @import("panic_handler.zig");
const cli = @import("cli.zig");
pub const MainPanicHandler = panicky.NewPanicHandler(panicky.default_panic);
pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) noreturn {
if (MainPanicHandler.Singleton) |singleton| {
MainPanicHandler.handle_panic(msg, error_return_trace);
} else {
panicky.default_panic(msg, error_return_trace);
}
}
pub fn main() anyerror!void {
try alloc.setup(std.heap.page_allocator);
const args = try std.process.argsAlloc(alloc.dynamic);
const stdout = std.io.getStdOut();
const stderr = std.io.getStdErr();
// The memory allocator makes a massive difference.
// std.heap.raw_c_allocator and std.heap.c_allocator perform similarly.
// std.heap.GeneralPurposeAllocator makes this about 3x _slower_ than esbuild.
// var root_alloc = std.heap.ArenaAllocator.init(std.heap.raw_c_allocator);
// var root_alloc_ = &root_alloc.allocator;
try alloc.setup(std.heap.c_allocator);
var stdout: std.fs.File = std.io.getStdOut();
var stderr: std.fs.File = std.io.getStdErr();
var output_source = Output.Source.init(stdout, stderr);
Output.Source.set(&output_source);
if (args.len < 1) {
const len = stderr.write("Pass a file");
return;
}
const absolutePath = try std.fs.path.resolve(alloc.dynamic, args);
const entryPointName = std.fs.path.basename(absolutePath);
const file = try std.fs.openFileAbsolute(absolutePath, std.fs.File.OpenFlags{ .read = true });
const stat = try file.stat();
const code = try file.readToEndAlloc(alloc.dynamic, stat.size);
const opts = try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code);
var log = logger.Log.init(alloc.dynamic);
var source = logger.Source.initFile(opts.entry_point, alloc.dynamic);
var ast: js_ast.Ast = undefined;
switch (opts.loader) {
.json => {
var expr = try json_parser.ParseJSON(&source, &log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, &log, &source, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
std.debug.panic("Unsupported loader: {s}", .{opts.loader});
},
}
var _linker = linker.Linker{};
const printed = try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map{},
false,
js_printer.Options{ .to_module_ref = js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
_ = try stdout.write(printed.js);
try cli.Cli.start(std.heap.c_allocator, stdout, stderr, MainPanicHandler);
}

108
src/main_wasi.zig Normal file
View File

@@ -0,0 +1,108 @@
const std = @import("std");
const lex = @import("js_lexer.zig");
const logger = @import("logger.zig");
const alloc = @import("alloc.zig");
const options = @import("options.zig");
const js_parser = @import("js_parser.zig");
const json_parser = @import("json_parser.zig");
const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
const fs = @import("fs.zig");
const MainPanicHandler = panicky.NewPanicHandler(panicky.default_panic);
pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) noreturn {
if (MainPanicHandler.Singleton) |singleton| {
MainPanicHandler.handle_panic(msg, error_return_trace);
} else {
panicky.default_panic(msg, error_return_trace);
}
}
// const Alloc = zee.ZeeAllocDefaults.wasm_allocator
pub fn main() anyerror!void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
var allocator = &arena.allocator;
try alloc.setup(allocator);
var log = logger.Log.init(alloc.dynamic);
var panicker = MainPanicHandler.init(&log);
MainPanicHandler.Singleton = &panicker;
const args = try std.process.argsAlloc(alloc.dynamic);
const stdout = std.io.getStdOut();
const stderr = std.io.getStdErr();
if (args.len < 1) {
const len = stderr.write("Pass a file");
return;
}
const absolutePath = args[args.len - 1];
const pathname = fs.PathName.init(absolutePath);
const entryPointName = try alloc.dynamic.alloc(u8, pathname.base.len + pathname.ext.len);
std.mem.copy(u8, entryPointName, pathname.base);
std.mem.copy(u8, entryPointName[pathname.base.len..entryPointName.len], pathname.ext);
const code = try std.io.getStdIn().readToEndAlloc(alloc.dynamic, std.math.maxInt(usize));
const opts = try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code);
var source = logger.Source.initFile(opts.entry_point, alloc.dynamic);
var ast: js_ast.Ast = undefined;
var raw_defines = RawDefines.init(alloc.static);
try raw_defines.put("process.env.NODE_ENV", "\"development\"");
var user_defines = try DefineData.from_input(raw_defines, &log, alloc.static);
var define = try Define.init(
alloc.static,
user_defines,
);
switch (opts.loader) {
.json => {
var expr = try json_parser.ParseJSON(&source, &log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, &log, &source, define, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
Global.panic("Unsupported loader: {s}", .{opts.loader});
},
}
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
const printed = try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map.initList(symbols),
&source,
false,
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
// if (std.builtin.mode == std.builtin.Mode.Debug) {
// var fixed_buffer = [_]u8{0} ** 512000;
// var buf_stream = std.io.fixedBufferStream(&fixed_buffer);
// try ast.toJSON(alloc.dynamic, stderr.writer());
// }
_ = try stdout.write(printed.js);
}

View File

@@ -8,65 +8,300 @@ const json_parser = @import("json_parser.zig");
const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
const fs = @import("fs.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
usingnamespace @import("global.zig");
const fs = @import("fs.zig");
const Schema = @import("api/schema.zig").Api;
const builtin = std.builtin;
const MainPanicHandler = panicky.NewPanicHandler(panicky.default_panic);
// const zee = @import("zee_alloc.zig");
pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) noreturn {
if (MainPanicHandler.Singleton) |singleton| {
MainPanicHandler.handle_panic(msg, error_return_trace);
} else {
panicky.default_panic(msg, error_return_trace);
}
}
var default_options = std.mem.zeroes(Schema.TransformOptions);
pub const Uint8Array = packed struct {
pub const Float = @Type(builtin.TypeInfo{ .Float = .{ .bits = 2 * @bitSizeOf(usize) } });
pub const Abi = if (builtin.target.isWasm()) Float else Uint8Array;
ptr: [*]u8,
len: usize,
pub fn toSlice(raw: Abi) []u8 {
const self = @bitCast(Uint8Array, raw);
return self.ptr[0..self.len];
}
pub fn fromSlice(slice: []u8) Abi {
const self = Uint8Array{ .ptr = slice.ptr, .len = slice.len };
return @bitCast(Abi, self);
}
pub fn empty() Abi {
return Uint8Array.fromSlice(&[0]u8{});
}
pub fn encode(comptime SchemaType: type, obj: SchemaType) !Abi {
var list = std.ArrayList(u8).init(alloc.dynamic);
var writer = list.writer();
try obj.encode(writer);
return Uint8Array.fromSlice(list.toOwnedSlice());
}
pub fn decode(self: Abi, comptime SchemaType: type) !SchemaType {
var buf = Uint8Array.toSlice(self);
var stream = std.io.fixedBufferStream(buf);
const res = try SchemaType.decode(alloc.dynamic, stream.reader());
return res;
}
};
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
pub const Api = struct {
options: *Schema.TransformOptions = &default_options,
files: std.ArrayList(string),
log: logger.Log,
defines: ?*Define = null,
pub fn transform(self: *Api, request: Schema.Transform) !Schema.TransformResponse {
const opts = try options.TransformOptions.initUncached(alloc.dynamic, request.path.?, request.contents);
var source = logger.Source.initFile(opts.entry_point, alloc.dynamic);
var ast: js_ast.Ast = undefined;
if (self.defines == null) {
var raw_defines = RawDefines.init(alloc.static);
raw_defines.put("process.env.NODE_ENV", "\"development\"") catch unreachable;
var user_defines = try DefineData.from_input(raw_defines, &self.log, alloc.static);
self.defines = try Define.init(
alloc.static,
user_defines,
);
}
switch (opts.loader) {
.json => {
var expr = try json_parser.ParseJSON(&source, &self.log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, &self.log, &source, self.defines.?, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
Global.panic("Unsupported loader: {s}", .{opts.loader});
},
}
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
const printed = try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map.initList(symbols),
&source,
false,
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
// Output.print("Parts count: {d}", .{ast.parts.len});
var output_files = try alloc.dynamic.alloc(Schema.OutputFile, 1);
var _data = printed.js[0..printed.js.len];
var _path = constStrToU8(source.path.text);
output_files[0] = Schema.OutputFile{ .data = _data, .path = _path };
var resp = std.mem.zeroes(Schema.TransformResponse);
resp.status = .success;
resp.files = output_files;
return resp;
// var source = logger.Source.initFile(file: fs.File, allocator: *std.mem.Allocator)
}
};
pub extern fn console_log(abi: Uint8Array.Abi) void;
pub extern fn console_error(abi: Uint8Array.Abi) void;
pub extern fn console_warn(abi: Uint8Array.Abi) void;
pub extern fn console_info(abi: Uint8Array.Abi) void;
// const ZeeAlloc = zee.ZeeAlloc(.{});
// var zee_instance: ZeeAlloc = undefined;
// const Gpa = std.heap.GeneralPurposeAllocator(.{});
// var arena: std.heap.ArenaAllocator = undefined;
// var gpa: Gpa = undefined;
var hunk: alloc.Hunk = undefined;
var hunk_high: alloc.HunkSide = undefined;
var hunk_low: alloc.HunkSide = undefined;
var perma_hunk: alloc.Hunk = undefined;
var perma_hunk_high_alloc: *std.mem.Allocator = undefined;
var perma_hunk_high: alloc.HunkSide = undefined;
var perma_hunk_low_alloc: *std.mem.Allocator = undefined;
var perma_hunk_low: alloc.HunkSide = undefined;
var last_start_high: usize = 0;
var last_start_low: usize = 0;
pub const Exports = struct {
fn init() callconv(.C) i32 {
var perma_hunk_buf = std.heap.page_allocator.alloc(u8, 128000) catch return -1;
perma_hunk = alloc.Hunk.init(perma_hunk_buf);
perma_hunk_high = perma_hunk.high();
perma_hunk_low = perma_hunk.low();
perma_hunk_high_alloc = &perma_hunk_low.allocator;
// var gpa = Gpa{};
// var allocator = &gpa.allocator;
// alloc.setup(allocator) catch return -1;
var out_buffer = perma_hunk_low.allocator.alloc(u8, 4096) catch return -1;
var err_buffer = perma_hunk_low.allocator.alloc(u8, 4096) catch return -1;
var output = perma_hunk_low.allocator.create(Output.Source) catch return -1;
var stream = std.io.fixedBufferStream(out_buffer);
var err_stream = std.io.fixedBufferStream(err_buffer);
output.* = Output.Source.init(
stream,
err_stream,
);
output.out_buffer = out_buffer;
output.err_buffer = err_buffer;
Output.Source.set(output);
var _api = std.heap.page_allocator.create(Api) catch return -1;
_api.* = Api{ .files = std.ArrayList(string).init(std.heap.page_allocator), .log = logger.Log.init(std.heap.page_allocator) };
api = _api;
_ = MainPanicHandler.init(&api.?.log);
// This will need more thought.
var raw_defines = RawDefines.init(std.heap.page_allocator);
raw_defines.put("process.env.NODE_ENV", "\"development\"") catch return -1;
var user_defines = DefineData.from_input(raw_defines, &_api.log, std.heap.page_allocator) catch return -1;
_api.defines = Define.init(
std.heap.page_allocator,
user_defines,
) catch return -1;
if (alloc.needs_setup) {
var buf = std.heap.page_allocator.alloc(u8, 26843545) catch return -1;
hunk = alloc.Hunk.init(buf);
hunk_high = hunk.high();
hunk_low = hunk.low();
alloc.dynamic = &hunk_high.allocator;
alloc.static = &hunk_low.allocator;
alloc.needs_setup = false;
}
Output.printErrorable("Initialized.", .{}) catch |err| {
var name = alloc.static.alloc(u8, @errorName(err).len) catch unreachable;
std.mem.copy(u8, name, @errorName(err));
console_error(Uint8Array.fromSlice(name));
};
return 1;
}
fn transform(abi: Uint8Array.Abi) callconv(.C) Uint8Array.Abi {
// Output.print("Received {d}", .{abi});
const req: Schema.Transform = Uint8Array.decode(abi, Schema.Transform) catch return Uint8Array.empty();
// Output.print("Req {s}", .{req});
// alloc.dynamic.free(Uint8Array.toSlice(abi));
const resp = api.?.transform(req) catch return Uint8Array.empty();
var res = Uint8Array.encode(Schema.TransformResponse, resp) catch return Uint8Array.empty();
return res;
}
// Reset
fn cycleStart() callconv(.C) void {
last_start_high = hunk.getHighMark();
last_start_low = hunk.getLowMark();
}
fn cycleEnd() callconv(.C) void {
if (last_start_high > 0) {
hunk.freeToHighMark(last_start_high);
last_start_high = 0;
}
if (last_start_low > 0) {
hunk.freeToLowMark(last_start_low);
last_start_low = 0;
}
}
fn malloc(size: usize) callconv(.C) Uint8Array.Abi {
if (size == 0) {
return 0;
}
const result = alloc.dynamic.alloc(u8, size) catch unreachable;
return Uint8Array.fromSlice(result);
}
// fn calloc(num_elements: usize, element_size: usize) callconv(.C) ?*c_void {
// const size = num_elements *% element_size;
// const c_ptr = @call(.{ .modifier = .never_inline }, malloc, .{size});
// if (c_ptr) |ptr| {
// const p = @ptrCast([*]u8, ptr);
// @memset(p, 0, size);
// }
// return c_ptr;
// }
// fn realloc(c_ptr: ?*c_void, new_size: usize) callconv(.C) ?*c_void {
// if (new_size == 0) {
// // @call(.{ .modifier = .never_inline }, free, .{@intCast(Uint8Array.Abi, c_ptr.?)});
// return null;
// } else if (c_ptr) |ptr| {
// // Use a synthetic slice
// const p = @ptrCast([*]u8, ptr);
// const result = alloc.dynamic.realloc(p[0..1], new_size) catch return null;
// return @ptrCast(*c_void, result.ptr);
// } else {
// return @call(.{ .modifier = .never_inline }, malloc, .{new_size});
// }
// }
fn free(abi: Uint8Array.Abi) callconv(.C) void {
alloc.dynamic.free(Uint8Array.toSlice(abi));
}
};
var api: ?*Api = null;
comptime {
@export(Exports.init, .{ .name = "init", .linkage = .Strong });
@export(Exports.transform, .{ .name = "transform", .linkage = .Strong });
@export(Exports.malloc, .{ .name = "malloc", .linkage = .Strong });
// @export(Exports.calloc, .{ .name = "calloc", .linkage = .Strong });
// @export(Exports.realloc, .{ .name = "realloc", .linkage = .Strong });
@export(Exports.cycleStart, .{ .name = "cycleStart", .linkage = .Strong });
@export(Exports.cycleEnd, .{ .name = "cycleEnd", .linkage = .Strong });
@export(Exports.free, .{ .name = "free", .linkage = .Strong });
}
pub fn main() anyerror!void {
try alloc.setup(std.heap.page_allocator);
const args = try std.process.argsAlloc(alloc.dynamic);
const stdout = std.io.getStdOut();
const stderr = std.io.getStdErr();
if (args.len < 1) {
const len = stderr.write("Pass a file");
return;
}
const absolutePath = args[args.len - 2];
const pathname = fs.PathName.init(absolutePath);
const entryPointName = try alloc.dynamic.alloc(u8, pathname.base.len + pathname.ext.len);
std.mem.copy(u8, entryPointName, pathname.base);
std.mem.copy(u8, entryPointName[pathname.base.len..entryPointName.len], pathname.ext);
const code = try std.io.getStdIn().readToEndAlloc(alloc.dynamic, 99999999);
const opts = try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code);
var log = logger.Log.init(alloc.dynamic);
var source = logger.Source.initFile(opts.entry_point, alloc.dynamic);
var ast: js_ast.Ast = undefined;
switch (opts.loader) {
.json => {
var expr = try json_parser.ParseJSON(&source, &log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, &log, &source, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
std.debug.panic("Unsupported loader: {s}", .{opts.loader});
},
}
var _linker = linker.Linker{};
const printed = try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map{},
false,
js_printer.Options{ .to_module_ref = js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
_ = try stdout.write(printed.js);
std.mem.doNotOptimizeAway(Exports.init);
std.mem.doNotOptimizeAway(Exports.transform);
std.mem.doNotOptimizeAway(Exports.malloc);
// std.mem.doNotOptimizeAway(Exports.calloc);
// std.mem.doNotOptimizeAway(Exports.realloc);
std.mem.doNotOptimizeAway(Exports.free);
}

View File

@@ -1,12 +1,325 @@
const std = @import("std");
const log = @import("logger.zig");
const fs = @import("fs.zig");
const logger = @import("logger.zig");
const Fs = @import("fs.zig");
const alloc = @import("alloc.zig");
const resolver = @import("./resolver/resolver.zig");
const api = @import("./api/schema.zig");
const Api = api.Api;
const defines = @import("./defines.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const assert = std.debug.assert;
pub fn validatePath(log: *logger.Log, fs: *Fs.FileSystem.Implementation, cwd: string, rel_path: string, allocator: *std.mem.Allocator, path_kind: string) string {
if (rel_path.len == 0) {
return "";
}
const paths = [_]string{ cwd, rel_path };
const out = std.fs.path.resolve(allocator, &paths) catch |err| {
log.addErrorFmt(null, logger.Loc{}, allocator, "Invalid {s}: {s}", .{ path_kind, rel_path }) catch unreachable;
Global.panic("", .{});
};
return out;
}
pub fn stringHashMapFromArrays(comptime t: type, allocator: *std.mem.Allocator, keys: anytype, values: anytype) !t {
var hash_map = t.init(allocator);
try hash_map.ensureCapacity(@intCast(u32, keys.len));
for (keys) |key, i| {
try hash_map.put(key, values[i]);
}
return hash_map;
}
pub const ExternalModules = struct {
node_modules: std.BufSet,
abs_paths: std.BufSet,
patterns: []WildcardPattern,
pub const WildcardPattern = struct {
prefix: string,
suffix: string,
};
pub fn isNodeBuiltin(str: string) bool {
return NodeBuiltinsMap.has(str);
}
pub fn init(
allocator: *std.mem.Allocator,
fs: *Fs.FileSystem.Implementation,
cwd: string,
externals: []const string,
log: *logger.Log,
platform: Platform,
) ExternalModules {
var result = ExternalModules{
.node_modules = std.BufSet.init(allocator),
.abs_paths = std.BufSet.init(allocator),
.patterns = &([_]WildcardPattern{}),
};
if (platform == .node) {
// TODO: fix this stupid copy
for (NodeBuiltinPatterns) |pattern| {
result.node_modules.put(pattern) catch unreachable;
}
}
if (externals.len == 0) {
return result;
}
var patterns = std.ArrayList(WildcardPattern).init(allocator);
for (externals) |external| {
const path = external;
if (strings.indexOfChar(path, '*')) |i| {
if (strings.indexOfChar(path[i + 1 .. path.len], '*') != null) {
log.addErrorFmt(null, logger.Loc.Empty, allocator, "External path \"{s}\" cannot have more than one \"*\" wildcard", .{external}) catch unreachable;
return result;
}
patterns.append(WildcardPattern{
.prefix = external[0..i],
.suffix = external[i + 1 .. external.len],
}) catch unreachable;
} else if (resolver.Resolver.isPackagePath(external)) {
result.node_modules.put(external) catch unreachable;
} else {
const normalized = validatePath(log, fs, cwd, external, allocator, "external path");
if (normalized.len > 0) {
result.abs_paths.put(normalized) catch unreachable;
}
}
}
result.patterns = patterns.toOwnedSlice();
return result;
}
pub const NodeBuiltinPatterns = [_]string{
"_http_agent",
"_http_client",
"_http_common",
"_http_incoming",
"_http_outgoing",
"_http_server",
"_stream_duplex",
"_stream_passthrough",
"_stream_readable",
"_stream_transform",
"_stream_wrap",
"_stream_writable",
"_tls_common",
"_tls_wrap",
"assert",
"async_hooks",
"buffer",
"child_process",
"cluster",
"console",
"constants",
"crypto",
"dgram",
"diagnostics_channel",
"dns",
"domain",
"events",
"fs",
"http",
"http2",
"https",
"inspector",
"module",
"net",
"os",
"path",
"perf_hooks",
"process",
"punycode",
"querystring",
"readline",
"repl",
"stream",
"string_decoder",
"sys",
"timers",
"tls",
"trace_events",
"tty",
"url",
"util",
"v8",
"vm",
"wasi",
"worker_threads",
"zlib",
};
pub const NodeBuiltinsMap = std.ComptimeStringMap(bool, .{
.{ "_http_agent", true },
.{ "_http_client", true },
.{ "_http_common", true },
.{ "_http_incoming", true },
.{ "_http_outgoing", true },
.{ "_http_server", true },
.{ "_stream_duplex", true },
.{ "_stream_passthrough", true },
.{ "_stream_readable", true },
.{ "_stream_transform", true },
.{ "_stream_wrap", true },
.{ "_stream_writable", true },
.{ "_tls_common", true },
.{ "_tls_wrap", true },
.{ "assert", true },
.{ "async_hooks", true },
.{ "buffer", true },
.{ "child_process", true },
.{ "cluster", true },
.{ "console", true },
.{ "constants", true },
.{ "crypto", true },
.{ "dgram", true },
.{ "diagnostics_channel", true },
.{ "dns", true },
.{ "domain", true },
.{ "events", true },
.{ "fs", true },
.{ "http", true },
.{ "http2", true },
.{ "https", true },
.{ "inspector", true },
.{ "module", true },
.{ "net", true },
.{ "os", true },
.{ "path", true },
.{ "perf_hooks", true },
.{ "process", true },
.{ "punycode", true },
.{ "querystring", true },
.{ "readline", true },
.{ "repl", true },
.{ "stream", true },
.{ "string_decoder", true },
.{ "sys", true },
.{ "timers", true },
.{ "tls", true },
.{ "trace_events", true },
.{ "tty", true },
.{ "url", true },
.{ "util", true },
.{ "v8", true },
.{ "vm", true },
.{ "wasi", true },
.{ "worker_threads", true },
.{ "zlib", true },
});
};
pub const ModuleType = enum {
unknown,
cjs,
esm,
pub const List = std.ComptimeStringMap(ModuleType, .{
.{ "commonjs", ModuleType.cjs },
.{ "module", ModuleType.esm },
});
};
pub const Platform = enum {
node,
browser,
neutral,
pub const Extensions = struct {
pub const In = struct {
pub const JavaScript = [_]string{ ".js", ".ts", ".tsx", ".jsx", ".json" };
};
pub const Out = struct {
pub const JavaScript = [_]string{
".js",
".mjs",
};
};
};
pub fn outExtensions(platform: Platform, allocator: *std.mem.Allocator) std.StringHashMap(string) {
var exts = std.StringHashMap(string).init(allocator);
const js = Extensions.Out.JavaScript[0];
const mjs = Extensions.Out.JavaScript[1];
if (platform == .node) {
for (Extensions.In.JavaScript) |ext| {
exts.put(ext, mjs) catch unreachable;
}
} else {
exts.put(mjs, js) catch unreachable;
}
for (Extensions.In.JavaScript) |ext| {
exts.put(ext, js) catch unreachable;
}
return exts;
}
pub fn from(plat: ?api.Api.Platform) Platform {
return switch (plat orelse api.Api.Platform._none) {
.node => .node,
.browser => .browser,
else => .browser,
};
}
const MAIN_FIELD_NAMES = [_]string{ "browser", "module", "main" };
pub const DefaultMainFields: std.EnumArray(Platform, []const string) = {
var array = std.EnumArray(Platform, []const string).initUndefined();
// Note that this means if a package specifies "module" and "main", the ES6
// module will not be selected. This means tree shaking will not work when
// targeting node environments.
//
// This is unfortunately necessary for compatibility. Some packages
// incorrectly treat the "module" field as "code for the browser". It
// actually means "code for ES6 environments" which includes both node
// and the browser.
//
// For example, the package "@firebase/app" prints a warning on startup about
// the bundler incorrectly using code meant for the browser if the bundler
// selects the "module" field instead of the "main" field.
//
// If you want to enable tree shaking when targeting node, you will have to
// configure the main fields to be "module" and then "main". Keep in mind
// that some packages may break if you do this.
var list = [_]string{ MAIN_FIELD_NAMES[1], MAIN_FIELD_NAMES[2] };
array.set(Platform.node, &list);
// Note that this means if a package specifies "main", "module", and
// "browser" then "browser" will win out over "module". This is the
// same behavior as webpack: https://github.com/webpack/webpack/issues/4674.
//
// This is deliberate because the presence of the "browser" field is a
// good signal that the "module" field may have non-browser stuff in it,
// which will crash or fail to be bundled when targeting the browser.
var listc = [_]string{ MAIN_FIELD_NAMES[0], MAIN_FIELD_NAMES[1], MAIN_FIELD_NAMES[2] };
array.set(Platform.browser, &listc);
// The neutral platform is for people that don't want esbuild to try to
// pick good defaults for their platform. In that case, the list of main
// fields is empty by default. You must explicitly configure it yourself.
array.set(Platform.neutral, &([_]string{}));
return array;
};
};
pub const Loader = enum {
jsx,
js,
@@ -15,12 +328,26 @@ pub const Loader = enum {
css,
file,
json,
pub fn isJSX(loader: Loader) bool {
return loader == .jsx or loader == .tsx;
}
pub fn isTypeScript(loader: Loader) bool {
return loader == .tsx or loader == .ts;
}
pub fn forFileName(filename: string, obj: anytype) ?Loader {
const ext = std.fs.path.extension(filename);
if (ext.len == 0 or (ext.len == 1 and ext[0] == '.')) return null;
return obj.get(ext);
}
};
pub const defaultLoaders = std.ComptimeStringMap(Loader, .{
.{ ".jsx", Loader.jsx },
.{ ".json", Loader.json },
.{ ".js", Loader.js },
.{ ".js", Loader.jsx },
.{ ".mjs", Loader.js },
.{ ".css", Loader.css },
.{ ".ts", Loader.ts },
@@ -28,41 +355,311 @@ pub const defaultLoaders = std.ComptimeStringMap(Loader, .{
});
pub const JSX = struct {
pub const Pragma = struct {
// these need to be arrays
factory: []const string = &(Defaults.Factory),
fragment: []const string = &(Defaults.Fragment),
runtime: JSX.Runtime = JSX.Runtime.automatic,
/// Facilitates automatic JSX importing
/// Set on a per file basis like this:
/// /** @jsxImportSource @emotion/core */
import_source: string = "react",
jsx: string = "jsxDEV",
development: bool = true,
parse: bool = true,
pub const Defaults = struct {
pub var Factory = [_]string{ "React", "createElement" };
pub var Fragment = [_]string{ "React", "Fragment" };
};
// "React.createElement" => ["React", "createElement"]
// ...unless new is "React.createElement" and original is ["React", "createElement"]
// saves an allocation for the majority case
pub fn memberListToComponentsIfDifferent(allocator: *std.mem.Allocator, original: []const string, new: string) ![]const string {
var splitter = std.mem.split(new, ".");
var needs_alloc = false;
var count: usize = 0;
while (splitter.next()) |str| {
const i = (splitter.index orelse break);
count = i;
if (i > original.len) {
needs_alloc = true;
break;
}
if (!strings.eql(original[i], str)) {
needs_alloc = true;
break;
}
}
if (!needs_alloc) {
return original;
}
var out = try allocator.alloc(string, count + 1);
splitter = std.mem.split(new, ".");
var i: usize = 0;
while (splitter.next()) |str| {
out[i] = str;
i += 1;
}
return out;
}
pub fn fromApi(jsx: api.Api.Jsx, allocator: *std.mem.Allocator) !Pragma {
var pragma = JSX.Pragma{};
if (jsx.fragment.len > 0) {
pragma.fragment = try memberListToComponentsIfDifferent(allocator, pragma.fragment, jsx.fragment);
}
if (jsx.factory.len > 0) {
pragma.factory = try memberListToComponentsIfDifferent(allocator, pragma.factory, jsx.factory);
}
if (jsx.import_source.len > 0) {
pragma.jsx = jsx.import_source;
}
pragma.development = jsx.development;
pragma.runtime = jsx.runtime;
pragma.parse = true;
return pragma;
}
};
parse: bool = true,
factory: string = "React.createElement",
fragment: string = "jsx",
factory: string = "createElement",
fragment: string = "Fragment",
jsx: string = "jsxDEV",
runtime: Runtime = Runtime.automatic,
development: bool = true,
/// Set on a per file basis like this:
/// /** @jsxImportSource @emotion/core */
import_source: string = "react",
pub const Runtime = api.Api.JsxRuntime;
};
const TypeScript = struct {
parse: bool = false,
};
pub const BundleOptions = struct {
footer: string = "",
banner: string = "",
define: *defines.Define,
loaders: std.StringHashMap(Loader),
resolve_dir: string = "/",
jsx: JSX.Pragma = JSX.Pragma{},
react_fast_refresh: bool = false,
inject: ?[]string = null,
public_url: string = "",
public_dir: string = "public",
public_dir_enabled: bool = true,
output_dir: string = "",
public_dir_handle: ?std.fs.Dir = null,
write: bool = false,
preserve_symlinks: bool = false,
resolve_mode: api.Api.ResolveMode,
tsconfig_override: ?string = null,
platform: Platform = Platform.browser,
main_fields: []const string = Platform.DefaultMainFields.get(Platform.browser),
log: *logger.Log,
external: ExternalModules = ExternalModules{},
entry_points: []const string,
extension_order: []const string = &Defaults.ExtensionOrder,
out_extensions: std.StringHashMap(string),
import_path_format: ImportPathFormat = ImportPathFormat.relative,
pub const ImportPathFormat = enum {
relative,
// omit file extension for Node.js packages
relative_nodejs,
absolute_url,
// omit file extension
absolute_path,
};
pub const Defaults = struct {
pub var ExtensionOrder = [_]string{ ".tsx", ".ts", ".jsx", ".js", ".json" };
};
pub fn fromApi(
allocator: *std.mem.Allocator,
fs: *Fs.FileSystem,
log: *logger.Log,
transform: Api.TransformOptions,
) !BundleOptions {
var loader_values = try allocator.alloc(Loader, transform.loader_values.len);
for (loader_values) |_, i| {
const loader = switch (transform.loader_values[i]) {
.jsx => Loader.jsx,
.js => Loader.js,
.ts => Loader.ts,
.css => Loader.css,
.tsx => Loader.tsx,
.json => Loader.json,
else => unreachable,
};
loader_values[i] = loader;
}
var loaders = try stringHashMapFromArrays(std.StringHashMap(Loader), allocator, transform.loader_keys, loader_values);
const default_loader_ext = [_]string{ ".jsx", ".json", ".js", ".mjs", ".css", ".ts", ".tsx" };
inline for (default_loader_ext) |ext| {
if (!loaders.contains(ext)) {
try loaders.put(ext, defaultLoaders.get(ext).?);
}
}
var user_defines = try stringHashMapFromArrays(defines.RawDefines, allocator, transform.define_keys, transform.define_values);
if (transform.define_keys.len == 0) {
try user_defines.put("process.env.NODE_ENV", "development");
}
var resolved_defines = try defines.DefineData.from_input(user_defines, log, allocator);
const output_dir_parts = [_]string{ try std.process.getCwdAlloc(allocator), transform.output_dir orelse "out" };
var opts: BundleOptions = BundleOptions{
.log = log,
.resolve_mode = transform.resolve orelse .dev,
.define = try defines.Define.init(
allocator,
resolved_defines,
),
.loaders = loaders,
.output_dir = try fs.absAlloc(allocator, &output_dir_parts),
.platform = Platform.from(transform.platform),
.write = transform.write orelse false,
.external = undefined,
.entry_points = transform.entry_points,
.out_extensions = undefined,
};
if (transform.public_url) |public_url| {
opts.import_path_format = ImportPathFormat.absolute_url;
opts.public_url = public_url;
}
if (transform.jsx) |jsx| {
opts.jsx = try JSX.Pragma.fromApi(jsx, allocator);
}
if (transform.extension_order.len > 0) {
opts.extension_order = transform.extension_order;
}
if (transform.platform) |plat| {
opts.platform = if (plat == .browser) .browser else .node;
opts.main_fields = Platform.DefaultMainFields.get(opts.platform);
}
if (opts.platform == .node) {
opts.import_path_format = .relative_nodejs;
}
if (transform.main_fields.len > 0) {
opts.main_fields = transform.main_fields;
}
opts.external = ExternalModules.init(allocator, &fs.fs, fs.top_level_dir, transform.external, log, opts.platform);
opts.out_extensions = opts.platform.outExtensions(allocator);
if (transform.serve orelse false) {
opts.resolve_mode = .lazy;
var _dirs = [_]string{transform.public_dir orelse opts.public_dir};
opts.public_dir = try fs.absAlloc(allocator, &_dirs);
opts.public_dir_handle = std.fs.openDirAbsolute(opts.public_dir, .{ .iterate = true }) catch |err| brk: {
var did_warn = false;
switch (err) {
error.FileNotFound => {
// Be nice.
// Check "static" since sometimes people use that instead.
// Don't switch to it, but just tell "hey try --public-dir=static" next time
if (transform.public_dir == null or transform.public_dir.?.len == 0) {
_dirs[0] = "static";
const check_static = try fs.joinAlloc(allocator, &_dirs);
defer allocator.free(check_static);
std.fs.accessAbsolute(check_static, .{}) catch {
Output.printError("warn: \"public\" folder missing. If there are external assets used in your project, pass --public-dir=\"public-folder-name\"", .{});
did_warn = true;
};
}
if (!did_warn) {
Output.printError("warn: \"public\" folder missing. If you want to use \"static\" as the public folder, pass --public-dir=\"static\".", .{});
}
opts.public_dir_enabled = false;
},
error.AccessDenied => {
Output.printError(
"error: access denied when trying to open public_dir: \"{s}\".\nPlease re-open Speedy with access to this folder or pass a different folder via \"--public-dir\". Note: --public-dir is relative to --cwd (or the process' current working directory).\n\nThe public folder is where static assets such as images, fonts, and .html files go.",
.{opts.public_dir},
);
std.process.exit(1);
},
else => {
Output.printError(
"error: \"{s}\" when accessing public folder: \"{s}\"",
.{ @errorName(err), opts.public_dir },
);
std.process.exit(1);
},
}
break :brk null;
};
// Windows has weird locking rules for files
// so it's a bad idea to keep a file handle open for a long time on Windows.
if (isWindows and opts.public_dir_handle != null) {
opts.public_dir_handle.?.close();
}
}
return opts;
}
};
pub const TransformOptions = struct {
footer: string = "",
banner: string = "",
define: std.StringHashMap(string),
loader: Loader = Loader.tsx,
loader: Loader = Loader.js,
resolve_dir: string = "/",
jsx_factory: string = "React.createElement",
jsx_fragment: string = "jsx",
ts: bool = true,
jsx: ?JSX.Pragma,
react_fast_refresh: bool = false,
inject: ?[]string = null,
public_url: string = "/",
filesystem_cache: std.StringHashMap(fs.File),
entry_point: fs.File,
public_url: string = "",
preserve_symlinks: bool = false,
entry_point: Fs.File,
resolve_paths: bool = false,
tsconfig_override: ?string = null,
platform: Platform = Platform.browser,
main_fields: []string = Platform.DefaultMainFields.get(Platform.browser),
pub fn initUncached(allocator: *std.mem.Allocator, entryPointName: string, code: string) !TransformOptions {
assert(entryPointName.len > 0);
var filesystemCache = std.StringHashMap(fs.File).init(allocator);
var entryPoint = fs.File{
.path = fs.Path.init(entryPointName),
var entryPoint = Fs.File{
.path = Fs.Path.init(entryPointName),
.contents = code,
};
var cwd: string = "/";
if (isWasi or isNative) {
cwd = try std.process.getCwdAlloc(allocator);
}
var define = std.StringHashMap(string).init(allocator);
try define.ensureCapacity(1);
@@ -73,32 +670,54 @@ pub const TransformOptions = struct {
loader = defaultLoader;
}
assert(loader != .file);
assert(code.len > 0);
try filesystemCache.put(entryPointName, entryPoint);
return TransformOptions{
.entry_point = entryPoint,
.define = define,
.loader = loader,
.filesystem_cache = filesystemCache,
.resolve_dir = entryPoint.path.name.dir,
.main_fields = Platform.DefaultMainFields.get(Platform.browser),
.jsx = if (Loader.isJSX(loader)) JSX.Pragma{} else null,
};
}
};
pub const OutputFile = struct {
path: []u8,
contents: []u8,
path: string,
contents: string,
};
pub const TransformResult = struct { errors: []log.Msg, warnings: []log.Msg, output_files: []OutputFile };
pub const TransformResult = struct {
errors: []logger.Msg = &([_]logger.Msg{}),
warnings: []logger.Msg = &([_]logger.Msg{}),
output_files: []OutputFile = &([_]OutputFile{}),
outbase: string,
pub fn init(
outbase: string,
output_files: []OutputFile,
log: *logger.Log,
allocator: *std.mem.Allocator,
) !TransformResult {
var errors = try std.ArrayList(logger.Msg).initCapacity(allocator, log.errors);
var warnings = try std.ArrayList(logger.Msg).initCapacity(allocator, log.warnings);
for (log.msgs.items) |msg| {
switch (msg.kind) {
logger.Kind.err => {
errors.append(msg) catch unreachable;
},
logger.Kind.warn => {
warnings.append(msg) catch unreachable;
},
else => {},
}
}
test "TransformOptions.initUncached" {
try alloc.setup(std.heap.page_allocator);
const opts = try TransformOptions.initUncached(alloc.dynamic, "lol.jsx", "<Hi />");
std.testing.expectEqualStrings("lol", opts.entry_point.path.name.base);
std.testing.expectEqualStrings(".jsx", opts.entry_point.path.name.ext);
std.testing.expect(Loader.jsx == opts.loader);
}
return TransformResult{
.outbase = outbase,
.output_files = output_files,
.errors = errors.toOwnedSlice(),
.warnings = warnings.toOwnedSlice(),
};
}
};

5
src/output_native.zig Normal file
View File

@@ -0,0 +1,5 @@
const std = @import("std");
pub var Stream: ?std.fs.File = null;
pub var writer: ?std.fs.File.Writer = null;
pub var errorWriter: ?std.fs.File.Writer = null;

5
src/output_wasi.zig Normal file
View File

@@ -0,0 +1,5 @@
const std = @import("std");
pub var Stream: ?std.fs.File = null;
pub var writer: ?std.fs.File.Writer = null;
pub var errorWriter: ?std.fs.File.Writer = null;

5
src/output_wasm.zig Normal file
View File

@@ -0,0 +1,5 @@
const std = @import("std");
pub var out_buffer: []u8 = &([_]u8{});
pub var Stream: ?std.io.FixedBufferStream([]u8) = null;
pub var writer = if (Stream) |stream| stream.writer() else null;

58
src/panic_handler.zig Normal file
View File

@@ -0,0 +1,58 @@
const std = @import("std");
const logger = @import("logger.zig");
const root = @import("root");
const USERLAND_PANIC_MESSAGE = "iNtErNaL sErVeR eRrOr";
/// This function is used by the Zig language code generation and
/// therefore must be kept in sync with the compiler implementation.
pub fn default_panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) noreturn {
@setCold(true);
if (@hasDecl(root, "os") and @hasDecl(root.os, "panic")) {
root.os.panic(msg, error_return_trace);
unreachable;
}
switch (std.builtin.os.tag) {
.freestanding => {
while (true) {
@breakpoint();
}
},
.wasi => {
std.debug.warn("{s}", .{msg});
std.os.abort();
},
.uefi => {
// TODO look into using the debug info and logging helpful messages
std.os.abort();
},
else => {
const first_trace_addr = @returnAddress();
std.debug.panicExtra(error_return_trace, first_trace_addr, "{s}", .{msg});
},
}
}
pub fn NewPanicHandler(panic_func: fn handle_panic(msg: []const u8, error_return_type: ?*std.builtin.StackTrace) noreturn) type {
return struct {
panic_count: usize = 0,
skip_next_panic: bool = false,
log: *logger.Log,
pub var Singleton: ?*Handler = null;
const Handler = @This();
pub fn init(log: *logger.Log) Handler {
return Handler{
.log = log,
};
}
pub fn handle_panic(msg: []const u8, error_return_type: ?*std.builtin.StackTrace) callconv(.Inline) noreturn {
if (@This().Singleton) |singleton| {
singleton.panic_count += 1;
}
panic_func(msg, error_return_type);
}
};
}

View File

@@ -1,18 +1,30 @@
const js_ast = @import("js_ast.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const std = @import("std");
const logger = @import("logger.zig");
// This is...poorly named
// It does not rename
// It merely names
pub const Renamer = struct {
symbols: js_ast.Symbol.Map,
pub fn init(symbols: js_ast.Symbol.Map) Renamer {
return Renamer{ .symbols = symbols };
source: *const logger.Source,
pub fn init(symbols: js_ast.Symbol.Map, source: *const logger.Source) Renamer {
return Renamer{ .symbols = symbols, .source = source };
}
pub fn nameForSymbol(renamer: *Renamer, ref: js_ast.Ref) string {
const resolved = renamer.symbols.follow(ref);
const symbol = renamer.symbols.get(resolved) orelse std.debug.panic("Internal error: symbol not found for ref: {s}", .{resolved});
if (ref.is_source_contents_slice) {
return renamer.source.contents[ref.source_index .. ref.source_index + ref.inner_index];
}
return symbol.original_name;
const resolved = renamer.symbols.follow(ref);
if (renamer.symbols.get(resolved)) |symbol| {
return symbol.original_name;
} else {
Global.panic("Invalid symbol {s}", .{ref});
}
}
};

160
src/resolver/data_url.zig Normal file
View File

@@ -0,0 +1,160 @@
usingnamespace @import("../global.zig");
const std = @import("std");
const assert = std.debug.assert;
const mem = std.mem;
// https://github.com/Vexu/zuri/blob/master/src/zuri.zig#L61-L127
pub const PercentEncoding = struct {
/// possible errors for decode and encode
pub const EncodeError = error{
InvalidCharacter,
OutOfMemory,
};
/// returns true if c is a hexadecimal digit
pub fn isHex(c: u8) bool {
return switch (c) {
'0'...'9', 'a'...'f', 'A'...'F' => true,
else => false,
};
}
/// returns true if str starts with a valid path character or a percent encoded octet
pub fn isPchar(str: []const u8) bool {
assert(str.len > 0);
return switch (str[0]) {
'a'...'z', 'A'...'Z', '0'...'9', '-', '.', '_', '~', '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '@' => true,
'%' => str.len > 3 and isHex(str[1]) and isHex(str[2]),
else => false,
};
}
/// decode path if it is percent encoded
pub fn decode(allocator: *Allocator, path: []const u8) EncodeError!?[]u8 {
var ret: ?[]u8 = null;
errdefer if (ret) |some| allocator.free(some);
var ret_index: usize = 0;
var i: usize = 0;
while (i < path.len) : (i += 1) {
if (path[i] == '%') {
if (!isPchar(path[i..])) {
return error.InvalidCharacter;
}
if (ret == null) {
ret = try allocator.alloc(u8, path.len);
mem.copy(u8, ret.?, path[0..i]);
ret_index = i;
}
// charToDigit can't fail because the chars are validated earlier
var new = (std.fmt.charToDigit(path[i + 1], 16) catch unreachable) << 4;
new |= std.fmt.charToDigit(path[i + 2], 16) catch unreachable;
ret.?[ret_index] = new;
ret_index += 1;
i += 2;
} else if (path[i] != '/' and !isPchar(path[i..])) {
return error.InvalidCharacter;
} else if (ret != null) {
ret.?[ret_index] = path[i];
ret_index += 1;
}
}
if (ret) |some| return allocator.shrink(some, ret_index);
return null;
}
/// percent encode if path contains characters not allowed in paths
pub fn encode(allocator: *Allocator, path: []const u8) EncodeError!?[]u8 {
var ret: ?[]u8 = null;
var ret_index: usize = 0;
for (path) |c, i| {
if (c != '/' and !isPchar(path[i..])) {
if (ret == null) {
ret = try allocator.alloc(u8, path.len * 3);
mem.copy(u8, ret.?, path[0..i]);
ret_index = i;
}
const hex_digits = "0123456789ABCDEF";
ret.?[ret_index] = '%';
ret.?[ret_index + 1] = hex_digits[(c & 0xF0) >> 4];
ret.?[ret_index + 2] = hex_digits[c & 0x0F];
ret_index += 3;
} else if (ret != null) {
ret.?[ret_index] = c;
ret_index += 1;
}
}
if (ret) |some| return allocator.shrink(some, ret_index);
return null;
}
};
pub const MimeType = enum {
Unsupported,
TextCSS,
TextJavaScript,
ApplicationJSON,
pub const Map = std.ComptimeStringMap(MimeType, .{
.{ "text/css", MimeType.TextCSS },
.{ "text/javascript", MimeType.TextJavaScript },
.{ "application/json", MimeType.ApplicationJSON },
});
pub fn decode(str: string) MimeType {
// Remove things like ";charset=utf-8"
var mime_type = str;
if (strings.indexOfChar(mime_type, ';')) |semicolon| {
mime_type = mime_type[0..semicolon];
}
return Map.get(mime_type) orelse MimeType.Unsupported;
}
};
pub const DataURL = struct {
mime_type: string,
data: string,
is_base64: bool = false,
pub fn parse(url: string) ?DataURL {
if (!strings.startsWith(url, "data:")) {
return null;
}
const comma = strings.indexOfChar(url, ',') orelse return null;
var parsed = DataURL{
.mime_type = url["data:".len..comma],
.data = url[comma + 1 .. url.len],
};
if (strings.endsWith(parsed.mime_type, ";base64")) {
parsed.mime_type = parsed.mime_type[0..(parsed.mime_type.len - ";base64".len)];
parsed.is_base64 = true;
}
return parsed;
}
pub fn decode_mime_type(d: DataURL) MimeType {
return MimeType.decode(d.mime_type);
}
pub fn decode_data(d: *DataURL, allocator: *std.mem.Allocator, url: string) !string {
// Try to read base64 data
if (d.is_base64) {
const size = try std.base64.standard.Decoder.calcSizeForSlice(d.data);
var buf = try allocator.alloc(u8, size);
try std.base64.standard.Decoder.decode(buf, d.data);
return buf;
}
// Try to read percent-escaped data
return try PercentEncoding.decode(allocator, url);
}
};

View File

@@ -0,0 +1,182 @@
usingnamespace @import("../global.zig");
const std = @import("std");
const options = @import("../options.zig");
const log = @import("../logger.zig");
const cache = @import("../cache.zig");
const logger = @import("../logger.zig");
const js_ast = @import("../js_ast.zig");
const alloc = @import("../alloc.zig");
const fs = @import("../fs.zig");
const resolver = @import("./resolver.zig");
const MainFieldMap = std.StringHashMap(string);
const BrowserMap = std.StringHashMap(string);
pub const PackageJSON = struct {
source: logger.Source,
main_fields: MainFieldMap,
module_type: options.ModuleType,
// Present if the "browser" field is present. This field is intended to be
// used by bundlers and lets you redirect the paths of certain 3rd-party
// modules that don't work in the browser to other modules that shim that
// functionality. That way you don't have to rewrite the code for those 3rd-
// party modules. For example, you might remap the native "util" node module
// to something like https://www.npmjs.com/package/util so it works in the
// browser.
//
// This field contains a mapping of absolute paths to absolute paths. Mapping
// to an empty path indicates that the module is disabled. As far as I can
// tell, the official spec is an abandoned GitHub repo hosted by a user account:
// https://github.com/defunctzombie/package-browser-field-spec. The npm docs
// say almost nothing: https://docs.npmjs.com/files/package.json.
//
// Note that the non-package "browser" map has to be checked twice to match
// Webpack's behavior: once before resolution and once after resolution. It
// leads to some unintuitive failure cases that we must emulate around missing
// file extensions:
//
// * Given the mapping "./no-ext": "./no-ext-browser.js" the query "./no-ext"
// should match but the query "./no-ext.js" should NOT match.
//
// * Given the mapping "./ext.js": "./ext-browser.js" the query "./ext.js"
// should match and the query "./ext" should ALSO match.
//
browser_map: BrowserMap,
pub fn parse(r: *resolver.Resolver, input_path: string) ?PackageJSON {
const parts = [_]string{ input_path, "package.json" };
const package_json_path = r.fs.join(&parts);
const entry = r.caches.fs.readFile(r.fs, input_path) catch |err| {
if (err != error.IsDir) {
r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot read file \"{s}\": {s}", .{ r.prettyPath(fs.Path.init(input_path)), @errorName(err) }) catch unreachable;
}
return null;
};
if (r.debug_logs) |*debug| {
debug.addNoteFmt("The file \"{s}\" exists", .{package_json_path}) catch unreachable;
}
const key_path = fs.Path.init(r.allocator.dupe(u8, package_json_path) catch unreachable);
var json_source = logger.Source.initPathString(key_path.text, entry.contents);
json_source.path.pretty = r.prettyPath(json_source.path);
const json: js_ast.Expr = (r.caches.json.parseJSON(r.log, json_source, r.allocator) catch |err| {
if (isDebug) {
Output.printError("{s}: JSON parse error: {s}", .{ package_json_path, @errorName(err) });
}
return null;
} orelse return null);
var package_json = PackageJSON{
.source = json_source,
.module_type = .unknown,
.browser_map = BrowserMap.init(r.allocator),
.main_fields = MainFieldMap.init(r.allocator),
};
if (json.asProperty("type")) |type_json| {
if (type_json.expr.asString(r.allocator)) |type_str| {
switch (options.ModuleType.List.get(type_str) orelse options.ModuleType.unknown) {
.cjs => {
package_json.module_type = .cjs;
},
.esm => {
package_json.module_type = .esm;
},
.unknown => {
r.log.addRangeWarningFmt(
&json_source,
json_source.rangeOfString(type_json.loc),
r.allocator,
"\"{s}\" is not a valid value for \"type\" field (must be either \"commonjs\" or \"module\")",
.{type_str},
) catch unreachable;
},
}
} else {
r.log.addWarning(&json_source, type_json.loc, "The value for \"type\" must be a string") catch unreachable;
}
}
// Read the "main" fields
for (r.opts.main_fields) |main| {
if (json.asProperty(main)) |main_json| {
const expr: js_ast.Expr = main_json.expr;
if ((expr.asString(r.allocator))) |str| {
if (str.len > 0) {
package_json.main_fields.put(main, str) catch unreachable;
}
}
}
}
// Read the "browser" property, but only when targeting the browser
if (r.opts.platform == .browser) {
// We both want the ability to have the option of CJS vs. ESM and the
// option of having node vs. browser. The way to do this is to use the
// object literal form of the "browser" field like this:
//
// "main": "dist/index.node.cjs.js",
// "module": "dist/index.node.esm.js",
// "browser": {
// "./dist/index.node.cjs.js": "./dist/index.browser.cjs.js",
// "./dist/index.node.esm.js": "./dist/index.browser.esm.js"
// },
//
if (json.asProperty("browser")) |browser_prop| {
switch (browser_prop.expr.data) {
.e_object => {
const obj = browser_prop.expr.getObject();
// The value is an object
// Remap all files in the browser field
for (obj.properties) |prop| {
var _key_str = (prop.key orelse continue).asString(r.allocator) orelse continue;
const value: js_ast.Expr = prop.value orelse continue;
// Normalize the path so we can compare against it without getting
// confused by "./". There is no distinction between package paths and
// relative paths for these values because some tools (i.e. Browserify)
// don't make such a distinction.
//
// This leads to weird things like a mapping for "./foo" matching an
// import of "foo", but that's actually not a bug. Or arguably it's a
// bug in Browserify but we have to replicate this bug because packages
// do this in the wild.
const key = r.allocator.dupe(u8, r.fs.normalize(_key_str)) catch unreachable;
switch (value.data) {
.e_string => {
const str = value.getString();
// If this is a string, it's a replacement package
package_json.browser_map.put(key, str.string(r.allocator) catch unreachable) catch unreachable;
},
.e_boolean => {
const boolean = value.getBoolean();
if (!boolean.value) {
package_json.browser_map.put(key, "") catch unreachable;
}
},
else => {
r.log.addWarning(&json_source, value.loc, "Each \"browser\" mapping must be a string or boolean") catch unreachable;
},
}
}
},
else => {},
}
}
}
// TODO: side effects
// TODO: exports map
return package_json;
}
};

File diff suppressed because it is too large Load Diff

1687
src/resolver/resolver.zig Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,343 @@
usingnamespace @import("../global.zig");
const std = @import("std");
const options = @import("../options.zig");
const logger = @import("../logger.zig");
const cache = @import("../cache.zig");
const js_ast = @import("../js_ast.zig");
const js_lexer = @import("../js_lexer.zig");
const alloc = @import("../alloc.zig");
// Heuristic: you probably don't have 100 of these
// Probably like 5-10
// Array iteration is faster and deterministically ordered in that case.
const PathsMap = std.StringArrayHashMap([]string);
pub const TSConfigJSON = struct {
abs_path: string,
// The absolute path of "compilerOptions.baseUrl"
base_url: string = "",
// This is used if "Paths" is non-nil. It's equal to "BaseURL" except if
// "BaseURL" is missing, in which case it is as if "BaseURL" was ".". This
// is to implement the "paths without baseUrl" feature from TypeScript 4.1.
// More info: https://github.com/microsoft/TypeScript/issues/31869
base_url_for_paths: string = "",
// The verbatim values of "compilerOptions.paths". The keys are patterns to
// match and the values are arrays of fallback paths to search. Each key and
// each fallback path can optionally have a single "*" wildcard character.
// If both the key and the value have a wildcard, the substring matched by
// the wildcard is substituted into the fallback path. The keys represent
// module-style path names and the fallback paths are relative to the
// "baseUrl" value in the "tsconfig.json" file.
paths: PathsMap,
jsx: options.JSX.Pragma = options.JSX.Pragma{},
use_define_for_class_fields: ?bool = null,
preserve_imports_not_used_as_values: bool = false,
pub fn hasBaseURL(tsconfig: *TSConfigJSON) bool {
return tsconfig.base_url.len > 0;
}
pub const ImportsNotUsedAsValue = enum {
preserve,
err,
remove,
invalid,
pub const List = std.ComptimeStringMap(ImportsNotUsedAsValue, .{
.{ "preserve", ImportsNotUsedAsValue.preserve },
.{ "error", ImportsNotUsedAsValue.err },
.{ "remove", ImportsNotUsedAsValue.remove },
});
};
pub fn parse(
allocator: *std.mem.Allocator,
log: *logger.Log,
source: logger.Source,
json_cache: *cache.Cache.Json,
) anyerror!?*TSConfigJSON {
// Unfortunately "tsconfig.json" isn't actually JSON. It's some other
// format that appears to be defined by the implementation details of the
// TypeScript compiler.
//
// Attempt to parse it anyway by modifying the JSON parser, but just for
// these particular files. This is likely not a completely accurate
// emulation of what the TypeScript compiler does (e.g. string escape
// behavior may also be different).
const json: js_ast.Expr = (json_cache.parseTSConfig(log, source, allocator) catch null) orelse return null;
var result: TSConfigJSON = TSConfigJSON{ .abs_path = source.key_path.text, .paths = PathsMap.init(allocator) };
errdefer allocator.free(result.paths);
if (json.asProperty("extends")) |extends_value| {
log.addWarning(&source, extends_value.loc, "\"extends\" is not implemented yet") catch unreachable;
// if ((extends_value.expr.asString(allocator) catch null)) |str| {
// if (extends(str, source.rangeOfString(extends_value.loc))) |base| {
// result.jsx = base.jsx;
// result.base_url_for_paths = base.base_url_for_paths;
// result.use_define_for_class_fields = base.use_define_for_class_fields;
// result.preserve_imports_not_used_as_values = base.preserve_imports_not_used_as_values;
// // https://github.com/microsoft/TypeScript/issues/14527#issuecomment-284948808
// result.paths = base.paths;
// }
// }
}
var has_base_url = false;
// Parse "compilerOptions"
if (json.asProperty("compilerOptions")) |compiler_opts| {
// Parse "baseUrl"
if (compiler_opts.expr.asProperty("baseUrl")) |base_url_prop| {
if ((base_url_prop.expr.asString(allocator))) |base_url| {
result.base_url = base_url;
has_base_url = true;
}
}
// Parse "jsxFactory"
if (compiler_opts.expr.asProperty("jsxFactory")) |jsx_prop| {
if (jsx_prop.expr.asString(allocator)) |str| {
result.jsx.factory = try parseMemberExpressionForJSX(log, &source, jsx_prop.loc, str, allocator);
}
}
// Parse "jsxFragmentFactory"
if (compiler_opts.expr.asProperty("jsxFactory")) |jsx_prop| {
if (jsx_prop.expr.asString(allocator)) |str| {
result.jsx.fragment = try parseMemberExpressionForJSX(log, &source, jsx_prop.loc, str, allocator);
}
}
// Parse "jsxImportSource"
if (compiler_opts.expr.asProperty("jsxImportSource")) |jsx_prop| {
if (jsx_prop.expr.asString(allocator)) |str| {
result.jsx.import_source = str;
}
}
// Parse "useDefineForClassFields"
if (compiler_opts.expr.asProperty("useDefineForClassFields")) |use_define_value_prop| {
if (use_define_value_prop.expr.asBool()) |val| {
result.use_define_for_class_fields = val;
}
}
// Parse "importsNotUsedAsValues"
if (compiler_opts.expr.asProperty("importsNotUsedAsValues")) |jsx_prop| {
// This should never allocate since it will be utf8
if ((jsx_prop.expr.asString(allocator))) |str| {
switch (ImportsNotUsedAsValue.List.get(str) orelse ImportsNotUsedAsValue.invalid) {
.preserve, .err => {
result.preserve_imports_not_used_as_values = true;
},
.remove => {},
else => {
log.addRangeWarningFmt(&source, source.rangeOfString(jsx_prop.loc), allocator, "Invalid value \"{s}\" for \"importsNotUsedAsValues\"", .{str}) catch {};
},
}
}
}
// Parse "paths"
if (compiler_opts.expr.asProperty("paths")) |paths_prop| {
switch (paths_prop.expr.data) {
.e_object => {
var paths = paths_prop.expr.getObject();
result.base_url_for_paths = result.base_url;
result.paths = PathsMap.init(allocator);
for (paths.properties) |property| {
const key_prop = property.key orelse continue;
const key = (key_prop.asString(allocator)) orelse continue;
if (!TSConfigJSON.isValidTSConfigPathNoBaseURLPattern(key, log, &source, allocator, key_prop.loc)) {
continue;
}
const value_prop = property.value orelse continue;
// The "paths" field is an object which maps a pattern to an
// array of remapping patterns to try, in priority order. See
// the documentation for examples of how this is used:
// https://www.typescriptlang.org/docs/handbook/module-resolution.html#path-mapping.
//
// One particular example:
//
// {
// "compilerOptions": {
// "baseUrl": "projectRoot",
// "paths": {
// "*": [
// "*",
// "generated/*"
// ]
// }
// }
// }
//
// Matching "folder1/file2" should first check "projectRoot/folder1/file2"
// and then, if that didn't work, also check "projectRoot/generated/folder1/file2".
switch (value_prop.data) {
.e_array => {
const array = value_prop.getArray();
if (array.items.len > 0) {
var values = allocator.alloc(string, array.items.len) catch unreachable;
errdefer allocator.free(values);
var count: usize = 0;
for (array.items) |expr| {
if ((expr.asString(allocator))) |str| {
if (TSConfigJSON.isValidTSConfigPathPattern(
str,
log,
&source,
expr.loc,
allocator,
) and
(has_base_url or
TSConfigJSON.isValidTSConfigPathNoBaseURLPattern(
str,
log,
&source,
allocator,
expr.loc,
))) {
values[count] = str;
count += 1;
}
}
}
if (count > 0) {
result.paths.put(
key,
values[0..count],
) catch unreachable;
}
}
},
else => {
log.addRangeWarningFmt(
&source,
source.rangeOfString(key_prop.loc),
allocator,
"Substitutions for pattern \"{s}\" should be an array",
.{key},
) catch {};
},
}
}
},
else => {},
}
}
}
if (isDebug and has_base_url) {
std.debug.assert(result.base_url.len > 0);
}
var _result = allocator.create(TSConfigJSON) catch unreachable;
_result.* = result;
if (isDebug and has_base_url) {
std.debug.assert(_result.base_url.len > 0);
}
return _result;
}
pub fn isValidTSConfigPathPattern(text: string, log: *logger.Log, source: *const logger.Source, loc: logger.Loc, allocator: *std.mem.Allocator) bool {
var found_asterisk = false;
for (text) |c, i| {
if (c == '*') {
if (found_asterisk) {
const r = source.rangeOfString(loc);
log.addRangeWarningFmt(source, r, allocator, "Invalid pattern \"{s}\", must have at most one \"*\" character", .{text}) catch {};
return false;
}
found_asterisk = true;
}
}
return true;
}
pub fn parseMemberExpressionForJSX(log: *logger.Log, source: *const logger.Source, loc: logger.Loc, text: string, allocator: *std.mem.Allocator) ![]string {
if (text.len == 0) {
return &([_]string{});
}
const parts_count = std.mem.count(u8, text, ".");
const parts = allocator.alloc(string, parts_count) catch unreachable;
var iter = std.mem.tokenize(text, ".");
var i: usize = 0;
while (iter.next()) |part| {
if (!js_lexer.isIdentifier(part)) {
const warn = source.rangeOfString(loc);
log.addRangeWarningFmt(source, warn, allocator, "Invalid JSX member expression: \"{s}\"", .{part}) catch {};
return &([_]string{});
}
parts[i] = part;
i += 1;
}
return parts;
}
pub fn isSlash(c: u8) bool {
return c == '/' or c == '\\';
}
pub fn isValidTSConfigPathNoBaseURLPattern(text: string, log: *logger.Log, source: *const logger.Source, allocator: *std.mem.Allocator, loc: logger.Loc) bool {
var c0: u8 = 0;
var c1: u8 = 0;
var c2: u8 = 0;
const n = text.len;
switch (n) {
0 => {
return false;
},
// Relative "." or ".."
1 => {
return text[0] == '.';
},
// "..", ".\", "./"
2 => {
return text[0] == '.' and (text[1] == '.' or text[1] == '\\' or text[1] == '/');
},
else => {
c0 = text[0];
c1 = text[1];
c2 = text[2];
},
}
// Relative "./" or "../" or ".\\" or "..\\"
if (c0 == '.' and (TSConfigJSON.isSlash(c1) or (c1 == '.' and TSConfigJSON.isSlash(c2)))) {
return true;
}
// Absolute DOS "c:/" or "c:\\"
if (c1 == ':' and TSConfigJSON.isSlash(c2)) {
switch (c0) {
'a'...'z', 'A'...'Z' => {
return true;
},
else => {},
}
}
const r = source.rangeOfString(loc);
log.addRangeWarningFmt(source, r, allocator, "Non-relative path \"{s}\" is not allowed when \"baseUrl\" is not set (did you forget a leading \"./\"?)", .{text}) catch {};
return false;
}
};
test "tsconfig.json" {
try alloc.setup(std.heap.c_allocator);
}

40
src/runtime.js Normal file
View File

@@ -0,0 +1,40 @@
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
export var __markAsModule = (target) =>
__defProp(target, "__esModule", { value: true });
export var __commonJS = (cb, mod) => () => {
return mod || cb((mod = { exports: {} }).exports, mod), mod.exports;
};
export var __reExport = (target, module, desc) => {
if ((module && typeof module === "object") || typeof module === "function") {
for (let key of __getOwnPropNames(module))
if (!__hasOwnProp.call(target, key) && key !== "default")
__defProp(target, key, {
get: () => module[key],
enumerable:
!(desc = __getOwnPropDesc(module, key)) || desc.enumerable,
});
}
return target;
};
export var __toModule = (module) => {
return __reExport(
__markAsModule(
__defProp(
module != null ? __create(__getProtoOf(module)) : {},
"default",
module && module.__esModule && "default" in module
? { get: () => module.default, enumerable: true }
: { value: module, enumerable: true }
)
),
module
);
};

14
src/runtime.zig Normal file
View File

@@ -0,0 +1,14 @@
pub const Runtime = struct {
pub const Features = packed struct {
react_fast_refresh: bool = false,
hot_module_reloading: bool = false,
keep_names_for_arrow_functions: bool = true,
};
pub const Functions = enum {
KeepNames,
CommonJSToESModule,
TypeScriptDecorateClass,
TypeScriptDecorateParam,
};
};

View File

@@ -6,7 +6,11 @@ const JavascriptString = @import("ast/base.zig").JavascriptString;
usingnamespace @import("string_types.zig");
pub fn containsChar(self: string, char: u8) bool {
return std.mem(char) != null;
return indexOfChar(self, char) != null;
}
pub fn contains(self: string, str: string) bool {
return std.mem.indexOf(u8, self, str) != null;
}
pub fn indexOfChar(self: string, char: u8) ?usize {
@@ -25,6 +29,13 @@ pub fn indexOf(self: string, str: u8) ?usize {
return std.mem.indexOf(u8, self, str);
}
pub fn cat(allocator: *std.mem.Allocator, first: string, second: string) !string {
var out = try allocator.alloc(u8, first.len + second.len);
std.mem.copy(u8, out, first);
std.mem.copy(u8, out[first.len..], second);
return out;
}
pub fn startsWith(self: string, str: string) bool {
if (str.len > self.len) {
return false;
@@ -41,6 +52,21 @@ pub fn startsWith(self: string, str: string) bool {
return true;
}
pub fn endsWith(self: string, str: string) bool {
if (str.len > self.len) {
return false;
}
var i: usize = str.len - 1;
while (i > 0) : (i -= 1) {
if (str[i] != self[i]) {
return false;
}
}
return true;
}
pub fn endsWithAny(self: string, str: string) bool {
const end = self[self.len - 1];
for (str) |char| {
@@ -70,7 +96,102 @@ pub fn endsWithAnyComptime(self: string, comptime str: string) bool {
}
pub fn eql(self: string, other: anytype) bool {
return std.mem.eql(u8, self, other);
if (self.len != other.len) return false;
for (self) |c, i| {
if (other[i] != c) return false;
}
return true;
}
pub fn eqlInsensitive(self: string, other: anytype) bool {
return std.ascii.eqlIgnoreCase(self, other);
}
pub fn eqlComptime(self: string, comptime alt: anytype) bool {
switch (comptime alt.len) {
0 => {
@compileError("Invalid size passed to eqlComptime");
},
2 => {
const check = std.mem.readIntNative(u16, alt[0..alt.len]);
return self.len == alt.len and std.mem.readIntNative(u16, self[0..2]) == check;
},
1, 3 => {
if (alt.len != self.len) {
return false;
}
inline for (alt) |c, i| {
if (self[i] != c) return false;
}
return true;
},
4 => {
const check = std.mem.readIntNative(u32, alt[0..alt.len]);
return self.len == alt.len and std.mem.readIntNative(u32, self[0..4]) == check;
},
6 => {
const first = std.mem.readIntNative(u32, alt[0..4]);
const second = std.mem.readIntNative(u16, alt[4..6]);
return self.len == alt.len and first == std.mem.readIntNative(u32, self[0..4]) and
second == std.mem.readIntNative(u16, self[4..6]);
},
5, 7 => {
const check = std.mem.readIntNative(u32, alt[0..4]);
if (self.len != alt.len or std.mem.readIntNative(u32, self[0..4]) != check) {
return false;
}
const remainder = self[4..];
inline for (alt[4..]) |c, i| {
if (remainder[i] != c) return false;
}
return true;
},
8 => {
const check = std.mem.readIntNative(u64, alt[0..alt.len]);
return self.len == alt.len and std.mem.readIntNative(u64, self[0..8]) == check;
},
9...11 => {
const first = std.mem.readIntNative(u64, alt[0..8]);
if (self.len != alt.len or first != std.mem.readIntNative(u64, self[0..8])) {
return false;
}
inline for (alt[8..]) |c, i| {
if (self[i + 8] != c) return false;
}
return true;
},
12 => {
const first = std.mem.readIntNative(u64, alt[0..8]);
const second = std.mem.readIntNative(u32, alt[8..12]);
return (self.len == alt.len) and first == std.mem.readIntNative(u64, self[0..8]) and second == std.mem.readIntNative(u32, self[8..12]);
},
13...15 => {
const first = std.mem.readIntNative(u64, alt[0..8]);
const second = std.mem.readIntNative(u32, alt[8..12]);
if (self.len != alt.len or first != std.mem.readIntNative(u64, self[0..8]) or second != std.mem.readIntNative(u32, self[8..12])) {
return false;
}
inline for (alt[13..]) |c, i| {
if (self[i + 13] != c) return false;
}
return true;
},
16 => {
const first = std.mem.readIntNative(u64, alt[0..8]);
const second = std.mem.readIntNative(u64, alt[8..15]);
return (self.len == alt.len) and first == std.mem.readIntNative(u64, self[0..8]) and second == std.mem.readIntNative(u64, self[8..16]);
},
else => {
@compileError(alt ++ " is too long.");
},
}
}
pub fn append(allocator: *std.mem.Allocator, self: string, other: string) !string {
@@ -89,6 +210,26 @@ pub fn eqlUtf16(comptime self: string, other: JavascriptString) bool {
return std.mem.eql(u16, std.unicode.utf8ToUtf16LeStringLiteral(self), other);
}
pub fn toUTF8Alloc(allocator: *std.mem.Allocator, js: JavascriptString) !string {
var temp = std.mem.zeroes([4]u8);
var list = std.ArrayList(u8).initCapacity(allocator, js.len) catch unreachable;
var i: usize = 0;
while (i < js.len) : (i += 1) {
var r1 = @intCast(i32, js[i]);
if (r1 >= 0xD800 and r1 <= 0xDBFF and i + 1 < js.len) {
const r2 = @intCast(i32, js[i] + 1);
if (r2 >= 0xDC00 and r2 <= 0xDFFF) {
r1 = (r1 - 0xD800) << 10 | (r2 - 0xDC00) + 0x10000;
i += 1;
}
}
const width = encodeWTF8Rune(&temp, r1);
list.appendSlice(temp[0..width]) catch unreachable;
}
return list.toOwnedSlice();
}
// Check utf16 string equals utf8 string without allocating extra memory
pub fn utf16EqlString(text: []u16, str: string) bool {
if (text.len > str.len) {
// Strings can't be equal if UTF-16 encoding is longer than UTF-8 encoding
@@ -180,7 +321,7 @@ pub fn toUTF16Buf(in: string, out: []u16) usize {
}
}
return utf8Iterator.i;
return i;
}
pub fn toUTF16Alloc(in: string, allocator: *std.mem.Allocator) !JavascriptString {
@@ -237,3 +378,54 @@ pub fn containsNonBmpCodePointUTF16(_text: JavascriptString) bool {
return false;
}
pub fn join(slices: []const string, delimiter: string, allocator: *std.mem.Allocator) !string {
return try std.mem.join(allocator, delimiter, slices);
}
pub fn cmpStringsAsc(ctx: void, a: string, b: string) bool {
return std.mem.order(u8, a, b) == .lt;
}
pub fn cmpStringsDesc(ctx: void, a: string, b: string) bool {
return std.mem.order(u8, a, b) == .gt;
}
const sort_asc = std.sort.asc(u8);
const sort_desc = std.sort.desc(u8);
pub fn sortAsc(in: []string) void {
std.sort.sort([]const u8, in, {}, cmpStringsAsc);
}
pub fn sortDesc(in: []string) void {
std.sort.sort([]const u8, in, {}, cmpStringsDesc);
}
test "join" {
var string_list = &[_]string{ "abc", "def", "123", "hello" };
const list = try join(string_list, "-", std.heap.page_allocator);
try std.testing.expectEqualStrings("abc-def-123-hello", list);
}
test "sortAsc" {
var string_list = [_]string{ "abc", "def", "123", "hello" };
var sorted_string_list = [_]string{ "123", "abc", "def", "hello" };
var sorted_join = try join(&sorted_string_list, "-", std.heap.page_allocator);
sortAsc(&string_list);
var string_join = try join(&string_list, "-", std.heap.page_allocator);
try std.testing.expectEqualStrings(sorted_join, string_join);
}
test "sortDesc" {
var string_list = [_]string{ "abc", "def", "123", "hello" };
var sorted_string_list = [_]string{ "hello", "def", "abc", "123" };
var sorted_join = try join(&sorted_string_list, "-", std.heap.page_allocator);
sortDesc(&string_list);
var string_join = try join(&string_list, "-", std.heap.page_allocator);
try std.testing.expectEqualStrings(sorted_join, string_join);
}
pub usingnamespace @import("exact_size_matcher.zig");

View File

@@ -14,17 +14,16 @@ pub const MutableString = struct {
};
}
pub fn deinit(str: *MutableString) void {
str.list.deinit(str.allocator);
}
pub fn growIfNeeded(self: *MutableString, amount: usize) !void {
const new_capacity = self.list.items.len + amount;
if (self.list.capacity < new_capacity) {
try self.list.ensureCapacity(self.allocator, new_capacity);
}
try self.list.ensureUnusedCapacity(self.allocator, amount);
}
pub fn writeAll(self: *MutableString, bytes: string) !usize {
const new_capacity = self.list.items.len + bytes.len;
try self.list.ensureCapacity(self.allocator, new_capacity);
self.list.appendSliceAssumeCapacity(bytes);
try self.list.appendSlice(self.allocator, bytes);
return self.list.items.len;
}
@@ -47,29 +46,49 @@ pub const MutableString = struct {
return "_";
}
var mutable = try MutableString.init(allocator, 0);
var has_needed_gap = false;
var needs_gap = false;
var start_i: usize = 0;
var needsGap = false;
for (str) |c| {
if (std.ascii.isLower(c) or std.ascii.isUpper(c) or (mutable.len() > 0 and std.ascii.isAlNum(c))) {
if (needsGap) {
try mutable.appendChar('_');
needsGap = false;
// Common case: no gap necessary. No allocation necessary.
needs_gap = std.ascii.isAlNum(str[0]);
if (!needs_gap) {
// Are there any non-alphanumeric chars at all?
for (str[1..str.len]) |c, i| {
switch (c) {
'a'...'z', 'A'...'Z', '0'...'9' => {},
else => {
needs_gap = true;
start_i = i;
break;
},
}
try mutable.appendChar(c);
} else if (!needsGap) {
needsGap = true;
}
}
if (mutable.len() > 0) {
if (needs_gap) {
var mutable = try MutableString.initCopy(allocator, str[0..start_i]);
for (str[start_i..str.len]) |c, i| {
if (std.ascii.isLower(c) or std.ascii.isUpper(c) or (mutable.len() > 0 and std.ascii.isAlNum(c))) {
if (needs_gap) {
try mutable.appendChar('_');
needs_gap = false;
has_needed_gap = true;
}
try mutable.appendChar(c);
} else if (!needs_gap) {
needs_gap = true;
}
}
return mutable.list.toOwnedSlice(allocator);
} else {
return str;
}
return str;
}
pub fn len(self: *MutableString) usize {
pub fn len(self: *const MutableString) usize {
return self.list.items.len;
}
@@ -82,17 +101,11 @@ pub const MutableString = struct {
try self.list.replaceRange(self.allocator, 0, std.mem.len(str[0..]), str[0..]);
}
}
pub fn growBy(self: *MutableString, amount: usize) callconv(.Inline) !void {
try self.ensureCapacity(self.list.capacity + amount);
try self.list.ensureUnusedCapacity(self.allocator, amount);
}
pub fn ensureCapacity(self: *MutableString, amount: usize) callconv(.Inline) !void {
try self.list.ensureCapacity(self.allocator, amount);
}
pub fn deinit(self: *MutableString) !void {
self.list.deinit(self.allocator);
}
pub fn appendChar(self: *MutableString, char: u8) callconv(.Inline) !void {
try self.list.append(self.allocator, char);
}
@@ -115,6 +128,10 @@ pub const MutableString = struct {
return self.list.toOwnedSlice(self.allocator);
}
pub fn toOwnedSliceLeaky(self: *MutableString) string {
return self.list.items;
}
pub fn toOwnedSliceLength(self: *MutableString, length: usize) string {
self.list.shrinkAndFree(self.allocator, length);
return self.list.toOwnedSlice(self.allocator);

1220
src/sync.zig Normal file

File diff suppressed because it is too large Load Diff

1
src/test/fixtures/await.ts vendored Normal file
View File

@@ -0,0 +1 @@
const init: (VariableDeclaration | AnyExpression) = true;

View File

@@ -0,0 +1,369 @@
/** @license React v17.0.2
* react.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
"use strict";
var l = require("object-assign"),
n = 60103,
p = 60106;
exports.Fragment = 60107;
exports.StrictMode = 60108;
exports.Profiler = 60114;
var q = 60109,
r = 60110,
t = 60112;
exports.Suspense = 60113;
var u = 60115,
v = 60116;
if ("function" === typeof Symbol && Symbol.for) {
var w = Symbol.for;
n = w("react.element");
p = w("react.portal");
exports.Fragment = w("react.fragment");
exports.StrictMode = w("react.strict_mode");
exports.Profiler = w("react.profiler");
q = w("react.provider");
r = w("react.context");
t = w("react.forward_ref");
exports.Suspense = w("react.suspense");
u = w("react.memo");
v = w("react.lazy");
}
var x = "function" === typeof Symbol && Symbol.iterator;
function y(a) {
if (null === a || "object" !== typeof a) return null;
a = (x && a[x]) || a["@@iterator"];
return "function" === typeof a ? a : null;
}
function z(a) {
for (
var b = "https://reactjs.org/docs/error-decoder.html?invariant=" + a, c = 1;
c < arguments.length;
c++
)
b += "&args[]=" + encodeURIComponent(arguments[c]);
return (
"Minified React error #" +
a +
"; visit " +
b +
" for the full message or use the non-minified dev environment for full errors and additional helpful warnings."
);
}
var A = {
isMounted: function () {
return !1;
},
enqueueForceUpdate: function () {},
enqueueReplaceState: function () {},
enqueueSetState: function () {},
},
B = {};
function C(a, b, c) {
this.props = a;
this.context = b;
this.refs = B;
this.updater = c || A;
}
C.prototype.isReactComponent = {};
C.prototype.setState = function (a, b) {
if ("object" !== typeof a && "function" !== typeof a && null != a)
throw Error(z(85));
this.updater.enqueueSetState(this, a, b, "setState");
};
C.prototype.forceUpdate = function (a) {
this.updater.enqueueForceUpdate(this, a, "forceUpdate");
};
function D() {}
D.prototype = C.prototype;
function E(a, b, c) {
this.props = a;
this.context = b;
this.refs = B;
this.updater = c || A;
}
var F = (E.prototype = new D());
F.constructor = E;
l(F, C.prototype);
F.isPureReactComponent = !0;
var G = { current: null },
H = Object.prototype.hasOwnProperty,
I = { key: !0, ref: !0, __self: !0, __source: !0 };
function J(a, b, c) {
var e,
d = {},
k = null,
h = null;
if (null != b)
for (e in (void 0 !== b.ref && (h = b.ref),
void 0 !== b.key && (k = "" + b.key),
b))
H.call(b, e) && !I.hasOwnProperty(e) && (d[e] = b[e]);
var g = arguments.length - 2;
if (1 === g) d.children = c;
else if (1 < g) {
for (var f = Array(g), m = 0; m < g; m++) f[m] = arguments[m + 2];
d.children = f;
}
if (a && a.defaultProps)
for (e in ((g = a.defaultProps), g)) void 0 === d[e] && (d[e] = g[e]);
return { $$typeof: n, type: a, key: k, ref: h, props: d, _owner: G.current };
}
function K(a, b) {
return {
$$typeof: n,
type: a.type,
key: b,
ref: a.ref,
props: a.props,
_owner: a._owner,
};
}
function L(a) {
return "object" === typeof a && null !== a && a.$$typeof === n;
}
function escape(a) {
var b = { "=": "=0", ":": "=2" };
return (
"$" +
a.replace(/[=:]/g, function (a) {
return b[a];
})
);
}
var M = /\/+/g;
function N(a, b) {
return "object" === typeof a && null !== a && null != a.key
? escape("" + a.key)
: b.toString(36);
}
function O(a, b, c, e, d) {
var k = typeof a;
if ("undefined" === k || "boolean" === k) a = null;
var h = !1;
if (null === a) h = !0;
else
switch (k) {
case "string":
case "number":
h = !0;
break;
case "object":
switch (a.$$typeof) {
case n:
case p:
h = !0;
}
}
if (h)
return (
(h = a),
(d = d(h)),
(a = "" === e ? "." + N(h, 0) : e),
Array.isArray(d)
? ((c = ""),
null != a && (c = a.replace(M, "$&/") + "/"),
O(d, b, c, "", function (a) {
return a;
}))
: null != d &&
(L(d) &&
(d = K(
d,
c +
(!d.key || (h && h.key === d.key)
? ""
: ("" + d.key).replace(M, "$&/") + "/") +
a
)),
b.push(d)),
1
);
h = 0;
e = "" === e ? "." : e + ":";
if (Array.isArray(a))
for (var g = 0; g < a.length; g++) {
k = a[g];
var f = e + N(k, g);
h += O(k, b, c, f, d);
}
else if (((f = y(a)), "function" === typeof f))
for (a = f.call(a), g = 0; !(k = a.next()).done; )
(k = k.value), (f = e + N(k, g++)), (h += O(k, b, c, f, d));
else if ("object" === k)
throw (
((b = "" + a),
Error(
z(
31,
"[object Object]" === b
? "object with keys {" + Object.keys(a).join(", ") + "}"
: b
)
))
);
return h;
}
function P(a, b, c) {
if (null == a) return a;
var e = [],
d = 0;
O(a, e, "", "", function (a) {
return b.call(c, a, d++);
});
return e;
}
function Q(a) {
if (-1 === a._status) {
var b = a._result;
b = b();
a._status = 0;
a._result = b;
b.then(
function (b) {
0 === a._status && ((b = b.default), (a._status = 1), (a._result = b));
},
function (b) {
0 === a._status && ((a._status = 2), (a._result = b));
}
);
}
if (1 === a._status) return a._result;
throw a._result;
}
var R = { current: null };
function S() {
var a = R.current;
if (null === a) throw Error(z(321));
return a;
}
var T = {
ReactCurrentDispatcher: R,
ReactCurrentBatchConfig: { transition: 0 },
ReactCurrentOwner: G,
IsSomeRendererActing: { current: !1 },
assign: l,
};
exports.Children = {
map: P,
forEach: function (a, b, c) {
P(
a,
function () {
b.apply(this, arguments);
},
c
);
},
count: function (a) {
var b = 0;
P(a, function () {
b++;
});
return b;
},
toArray: function (a) {
return (
P(a, function (a) {
return a;
}) || []
);
},
only: function (a) {
if (!L(a)) throw Error(z(143));
return a;
},
};
exports.Component = C;
exports.PureComponent = E;
exports.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED = T;
exports.cloneElement = function (a, b, c) {
if (null === a || void 0 === a) throw Error(z(267, a));
var e = l({}, a.props),
d = a.key,
k = a.ref,
h = a._owner;
if (null != b) {
void 0 !== b.ref && ((k = b.ref), (h = G.current));
void 0 !== b.key && (d = "" + b.key);
if (a.type && a.type.defaultProps) var g = a.type.defaultProps;
for (f in b)
H.call(b, f) &&
!I.hasOwnProperty(f) &&
(e[f] = void 0 === b[f] && void 0 !== g ? g[f] : b[f]);
}
var f = arguments.length - 2;
if (1 === f) e.children = c;
else if (1 < f) {
g = Array(f);
for (var m = 0; m < f; m++) g[m] = arguments[m + 2];
e.children = g;
}
return { $$typeof: n, type: a.type, key: d, ref: k, props: e, _owner: h };
};
exports.createContext = function (a, b) {
void 0 === b && (b = null);
a = {
$$typeof: r,
_calculateChangedBits: b,
_currentValue: a,
_currentValue2: a,
_threadCount: 0,
Provider: null,
Consumer: null,
};
a.Provider = { $$typeof: q, _context: a };
return (a.Consumer = a);
};
exports.createElement = J;
exports.createFactory = function (a) {
var b = J.bind(null, a);
b.type = a;
return b;
};
exports.createRef = function () {
return { current: null };
};
exports.forwardRef = function (a) {
return { $$typeof: t, render: a };
};
exports.isValidElement = L;
exports.lazy = function (a) {
return { $$typeof: v, _payload: { _status: -1, _result: a }, _init: Q };
};
exports.memo = function (a, b) {
return { $$typeof: u, type: a, compare: void 0 === b ? null : b };
};
exports.useCallback = function (a, b) {
return S().useCallback(a, b);
};
exports.useContext = function (a, b) {
return S().useContext(a, b);
};
exports.useDebugValue = function () {};
exports.useEffect = function (a, b) {
return S().useEffect(a, b);
};
exports.useImperativeHandle = function (a, b, c) {
return S().useImperativeHandle(a, b, c);
};
exports.useLayoutEffect = function (a, b) {
return S().useLayoutEffect(a, b);
};
exports.useMemo = function (a, b) {
return S().useMemo(a, b);
};
exports.useReducer = function (a, b, c) {
return S().useReducer(a, b, c);
};
exports.useRef = function (a) {
return S().useRef(a);
};
exports.useState = function (a) {
return S().useState(a);
};
exports.version = "17.0.2";

View File

@@ -0,0 +1,3 @@
class Foo {
prop: string;
}

4
src/test/fixtures/defines.js vendored Normal file
View File

@@ -0,0 +1,4 @@
if (process.env.NODE_ENV === "development") {
console.log("hi");
} else {
}

View File

@@ -0,0 +1,72 @@
import Head from "next/head";
import Image from "next/image";
import styles from "../styles/Home.module.css";
import "../lib/api.ts";
// The bug:
// This function appears twice in the output.
export default function Home() {
return (
<div className={styles.container}>
<Head>
<title>Create Next App</title>
<meta name="description" content="Generated by create next app" />
<link rel="icon" href="/favicon.ico" />
</Head>
<main className={styles.main}>
<h1 className={styles.title}>
Welcome to <a href="https://nextjs.org">Next.js!</a>
</h1>
<p className={styles.description}>
Get started by editing{" "}
<code className={styles.code}>pages/index.js</code>
</p>
<div className={styles.grid}>
<a href="https://nextjs.org/docs" className={styles.card}>
<h2>Documentation &rarr;</h2>
<p>Find in-depth information about Next.js features and API.</p>
</a>
<a href="https://nextjs.org/learn" className={styles.card}>
<h2>Learn &rarr;</h2>
<p>Learn about Next.js in an interactive course with quizzes!</p>
</a>
<a
href="https://github.com/vercel/next.js/tree/master/examples"
className={styles.card}
>
<h2>Examples &rarr;</h2>
<p>Discover and deploy boilerplate example Next.js projects.</p>
</a>
<a
href="https://vercel.com/new?utm_source=create-next-app&utm_medium=default-template&utm_campaign=create-next-app"
className={styles.card}
>
<h2>Deploy &rarr;</h2>
<p>
Instantly deploy your Next.js site to a public URL with Vercel.
</p>
</a>
</div>
</main>
<footer className={styles.footer}>
<a
href="https://vercel.com?utm_source=create-next-app&utm_medium=default-template&utm_campaign=create-next-app"
target="_blank"
rel="noopener noreferrer"
>
Powered by{" "}
<span className={styles.logo}>
<Image src="/vercel.svg" alt="Vercel Logo" width={72} height={16} />
</span>
</a>
</footer>
</div>
);
}

View File

@@ -0,0 +1,5 @@
export function boop() {}
var UniformsUtils;
export { UniformsUtils };

1
src/test/fixtures/escape-chars.js vendored Normal file
View File

@@ -0,0 +1 @@
console.log("\u2028");

2342
src/test/fixtures/exports-bug.js vendored Normal file

File diff suppressed because it is too large Load Diff

5
src/test/fixtures/for-loop-bug.js vendored Normal file
View File

@@ -0,0 +1,5 @@
// For loop was missing initializer
for (let i = 0; i < 100; i++) {
console.log("hi");
console.log("hey");
}

View File

@@ -0,0 +1,20 @@
"use strict";
exports.__esModule = true;
exports.defaultHead = defaultHead;
exports.default = void 0;
var _react = _interopRequireWildcard(require("react"));
var _sideEffect = _interopRequireDefault(require("./side-effect"));
var _ampContext = require("./amp-context");
var _headManagerContext = require("./head-manager-context");
var _amp = require("./amp");
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : { default: obj };
}
function _getRequireWildcardCache() {
if (typeof WeakMap !== "function") return null;
var cache = new WeakMap();
_getRequireWildcardCache = function () {
return cache;
};
return cache;
}

View File

@@ -0,0 +1,31 @@
var Button = () => {
return <div className="button">Button!</div>;
};
var Bar = () => {
return (
<div prop={1}>
Plain text
<div>
&larr; A child div
<Button>Red</Button>
</div>
</div>
);
};
// It failed while parsing this function.
// The bug happened due to incorrectly modifying scopes_in_order
// The fix was using tombstoning instead of deleting
// The fix also resolved some performance issues.
var Baz = () => {
return (
<div prop={1}>
Plain text
<div>
&larr; A child div
<Button>Red</Button>
</div>
</div>
);
};

Some files were not shown because too many files have changed in this diff Show More