Former-commit-id: 96ff169e46
This commit is contained in:
Jarred Sumner
2021-05-07 01:26:26 -07:00
parent dd9e7de689
commit fad34bb4ab
49 changed files with 5815 additions and 148 deletions

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "deps/wasi-libc"]
path = deps/wasi-libc
url = https://github.com/WebAssembly/wasi-libc

View File

@@ -67,7 +67,7 @@ If bundler means "turn my development code into something a browser can run",
| Feature | esbuild | esdev |
| ------------------------------------ | ------- | ----- |
| JSX (transform) | ✅ | |
| JSX (transform) | ✅ | |
| TypeScript (transform) | ✅ | ⌛ |
| React Fast Refresh | ❌ | ⌛ |
| Hot Module Reloading | ❌ | ⌛ |
@@ -133,7 +133,7 @@ import { map } from "lodash-es";
const foo = map(["bar", "baz"], (item) => {});
```
If
If
##### HMR & Fast Refresh implementation

View File

@@ -12,10 +12,17 @@ pub fn build(b: *std.build.Builder) void {
const mode = b.standardReleaseOptions();
var exe: *std.build.LibExeObjStep = undefined;
if (target.getCpuArch().isWasm()) {
exe = b.addExecutable("esdev", "src/main_wasm.zig");
if (target.getOsTag() == .wasi) {
exe = b.addExecutable("esdev", "src/main_wasi.zig");
} else if (target.getCpuArch().isWasm()) {
var lib = b.addSharedLibrary("esdev", "src/main_wasm.zig", b.version(1, 0, 0));
lib.setTarget(target);
lib.setBuildMode(mode);
lib.install();
return;
} else {
exe = b.addExecutable("esdev", "src/main.zig");
exe.linkLibC();
}
var cwd_buf = [_]u8{0} ** 4096;
var cwd = std.os.getcwd(&cwd_buf) catch unreachable;
@@ -24,14 +31,14 @@ pub fn build(b: *std.build.Builder) void {
if (std.builtin.is_test) {
while (walker.next() catch unreachable) |entry| {
if (std.mem.endsWith(u8, entry.basename, "_test.zig")) {
std.debug.print("[test] Added {s}", .{entry.basename});
Output.print("[test] Added {s}", .{entry.basename});
_ = b.addTest(entry.path);
}
}
}
exe.setTarget(target);
exe.setBuildMode(mode);
exe.linkLibC();
exe.addLibPath("/usr/local/lib");
exe.install();

111
pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,111 @@
lockfileVersion: 5.3
specifiers:
'@babel/preset-react': ^7.13.13
esbuild-wasm: ^0.11.19
dependencies:
'@babel/preset-react': 7.13.13
esbuild-wasm: 0.11.19
packages:
/@babel/helper-annotate-as-pure/7.12.13:
resolution: {integrity: sha512-7YXfX5wQ5aYM/BOlbSccHDbuXXFPxeoUmfWtz8le2yTkTZc+BxsiEnENFoi2SlmA8ewDkG2LgIMIVzzn2h8kfw==}
dependencies:
'@babel/types': 7.14.1
dev: false
/@babel/helper-module-imports/7.13.12:
resolution: {integrity: sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA==}
dependencies:
'@babel/types': 7.14.1
dev: false
/@babel/helper-plugin-utils/7.13.0:
resolution: {integrity: sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ==}
dev: false
/@babel/helper-validator-identifier/7.14.0:
resolution: {integrity: sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==}
dev: false
/@babel/helper-validator-option/7.12.17:
resolution: {integrity: sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw==}
dev: false
/@babel/plugin-syntax-jsx/7.12.13:
resolution: {integrity: sha512-d4HM23Q1K7oq/SLNmG6mRt85l2csmQ0cHRaxRXjKW0YFdEXqlZ5kzFQKH5Uc3rDJECgu+yCRgPkG04Mm98R/1g==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-plugin-utils': 7.13.0
dev: false
/@babel/plugin-transform-react-display-name/7.12.13:
resolution: {integrity: sha512-MprESJzI9O5VnJZrL7gg1MpdqmiFcUv41Jc7SahxYsNP2kDkFqClxxTZq+1Qv4AFCamm+GXMRDQINNn+qrxmiA==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-plugin-utils': 7.13.0
dev: false
/@babel/plugin-transform-react-jsx-development/7.12.17:
resolution: {integrity: sha512-BPjYV86SVuOaudFhsJR1zjgxxOhJDt6JHNoD48DxWEIxUCAMjV1ys6DYw4SDYZh0b1QsS2vfIA9t/ZsQGsDOUQ==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/plugin-transform-react-jsx': 7.13.12
dev: false
/@babel/plugin-transform-react-jsx/7.13.12:
resolution: {integrity: sha512-jcEI2UqIcpCqB5U5DRxIl0tQEProI2gcu+g8VTIqxLO5Iidojb4d77q+fwGseCvd8af/lJ9masp4QWzBXFE2xA==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-annotate-as-pure': 7.12.13
'@babel/helper-module-imports': 7.13.12
'@babel/helper-plugin-utils': 7.13.0
'@babel/plugin-syntax-jsx': 7.12.13
'@babel/types': 7.14.1
dev: false
/@babel/plugin-transform-react-pure-annotations/7.12.1:
resolution: {integrity: sha512-RqeaHiwZtphSIUZ5I85PEH19LOSzxfuEazoY7/pWASCAIBuATQzpSVD+eT6MebeeZT2F4eSL0u4vw6n4Nm0Mjg==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-annotate-as-pure': 7.12.13
'@babel/helper-plugin-utils': 7.13.0
dev: false
/@babel/preset-react/7.13.13:
resolution: {integrity: sha512-gx+tDLIE06sRjKJkVtpZ/t3mzCDOnPG+ggHZG9lffUbX8+wC739x20YQc9V35Do6ZAxaUc/HhVHIiOzz5MvDmA==}
peerDependencies:
'@babel/core': ^7.0.0-0
dependencies:
'@babel/helper-plugin-utils': 7.13.0
'@babel/helper-validator-option': 7.12.17
'@babel/plugin-transform-react-display-name': 7.12.13
'@babel/plugin-transform-react-jsx': 7.13.12
'@babel/plugin-transform-react-jsx-development': 7.12.17
'@babel/plugin-transform-react-pure-annotations': 7.12.1
dev: false
/@babel/types/7.14.1:
resolution: {integrity: sha512-S13Qe85fzLs3gYRUnrpyeIrBJIMYv33qSTg1qoBwiG6nPKwUWAD9odSzWhEedpwOIzSEI6gbdQIWEMiCI42iBA==}
dependencies:
'@babel/helper-validator-identifier': 7.14.0
to-fast-properties: 2.0.0
dev: false
/esbuild-wasm/0.11.19:
resolution: {integrity: sha512-d4s3fcIBG9CL/h5kKfXHpkztyMhs71anqdszND1Zfr4na1bhMGAb+VyEMBbt2/0ft5HtcsOYBqXsjNPNWTC29w==}
engines: {node: '>=8'}
hasBin: true
dev: false
/to-fast-properties/2.0.0:
resolution: {integrity: sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=}
engines: {node: '>=4'}
dev: false

View File

@@ -9,8 +9,8 @@ pub var dynamic: *std.mem.Allocator = undefined;
pub fn setup(root: *std.mem.Allocator) !void {
needs_setup = false;
static = std.heap.c_allocator;
dynamic = std.heap.c_allocator;
static = root;
dynamic = root;
// static = @ptrCast(*std.mem.Allocator, &stat.allocator);
}

34
src/api/demo/.gitignore vendored Normal file
View File

@@ -0,0 +1,34 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# local env files
.env.local
.env.development.local
.env.test.local
.env.production.local
# vercel
.vercel

34
src/api/demo/README.md Normal file
View File

@@ -0,0 +1,34 @@
This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
## Getting Started
First, run the development server:
```bash
npm run dev
# or
yarn dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file.
[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`.
The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages.
## Learn More
To learn more about Next.js, take a look at the following resources:
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome!
## Deploy on Vercel
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details.

77
src/api/demo/lib/api.ts Normal file
View File

@@ -0,0 +1,77 @@
import * as Schema from "../../schema";
import { ByteBuffer } from "peechy";
export interface WebAssemblyModule {
init(): number;
transform(a: number): number;
malloc(a: number): number;
calloc(a: number): number;
realloc(a: number): number;
free(a: number): number;
}
export class ESDev {
static has_initialized = false;
static wasm_source: WebAssembly.WebAssemblyInstantiatedSource = null;
static wasm_exports: WebAssemblyModule;
static memory: WebAssembly.Memory;
static memory_array: Uint8Array;
static async init(url) {
if (typeof SharedArrayBuffer !== "undefined") {
ESDev.memory = new WebAssembly.Memory({
initial: 1500,
maximum: 3000,
shared: true,
});
} else {
ESDev.memory = new WebAssembly.Memory({
initial: 1500,
maximum: 3000,
});
}
ESDev.memory_array = new Uint8Array(ESDev.memory.buffer);
ESDev.wasm_source = await globalThis.WebAssembly.instantiateStreaming(
fetch(url),
{
js: {
mem: ESDev.memory,
},
}
);
ESDev.wasm_exports = ESDev.wasm_source.instance.exports as any;
ESDev.wasm_exports.init();
console.log("WASM loaded.");
ESDev.has_initialized = true;
}
static transform(content: string, file_name: string) {
if (!ESDev.has_initialized) {
throw "Please run await ESDev.init(wasm_url) before using this.";
}
const bb = new ByteBuffer(
new Uint8Array(content.length + file_name.length)
);
bb.length = 0;
Schema.encodeTransform(
{
contents: content,
path: file_name,
},
bb
);
const data = bb.toUint8Array();
const ptr = ESDev.wasm_exports.malloc(data.byteLength);
ESDev.memory_array.set(data, ptr);
debugger;
const resp_ptr = ESDev.wasm_exports.transform(ptr);
var _bb = new ByteBuffer(ESDev.memory_array.subarray(resp_ptr));
const response = Schema.decodeTransformResponse(_bb);
ESDev.wasm_exports.free(resp_ptr);
return response;
}
}
globalThis.ESDev = ESDev;

16
src/api/demo/package.json Normal file
View File

@@ -0,0 +1,16 @@
{
"name": "demo",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start"
},
"dependencies": {
"next": "10.2.0",
"peechy": "0.3.6",
"react": "17.0.2",
"react-dom": "17.0.2"
}
}

View File

@@ -0,0 +1,7 @@
import '../styles/globals.css'
function MyApp({ Component, pageProps }) {
return <Component {...pageProps} />
}
export default MyApp

View File

@@ -0,0 +1,5 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
export default (req, res) => {
res.status(200).json({ name: 'John Doe' })
}

View File

@@ -0,0 +1,69 @@
import Head from "next/head";
import Image from "next/image";
import styles from "../styles/Home.module.css";
import "../lib/api.ts";
export default function Home() {
return (
<div className={styles.container}>
<Head>
<title>Create Next App</title>
<meta name="description" content="Generated by create next app" />
<link rel="icon" href="/favicon.ico" />
</Head>
<main className={styles.main}>
<h1 className={styles.title}>
Welcome to <a href="https://nextjs.org">Next.js!</a>
</h1>
<p className={styles.description}>
Get started by editing{" "}
<code className={styles.code}>pages/index.js</code>
</p>
<div className={styles.grid}>
<a href="https://nextjs.org/docs" className={styles.card}>
<h2>Documentation &rarr;</h2>
<p>Find in-depth information about Next.js features and API.</p>
</a>
<a href="https://nextjs.org/learn" className={styles.card}>
<h2>Learn &rarr;</h2>
<p>Learn about Next.js in an interactive course with quizzes!</p>
</a>
<a
href="https://github.com/vercel/next.js/tree/master/examples"
className={styles.card}
>
<h2>Examples &rarr;</h2>
<p>Discover and deploy boilerplate example Next.js projects.</p>
</a>
<a
href="https://vercel.com/new?utm_source=create-next-app&utm_medium=default-template&utm_campaign=create-next-app"
className={styles.card}
>
<h2>Deploy &rarr;</h2>
<p>
Instantly deploy your Next.js site to a public URL with Vercel.
</p>
</a>
</div>
</main>
<footer className={styles.footer}>
<a
href="https://vercel.com?utm_source=create-next-app&utm_medium=default-template&utm_campaign=create-next-app"
target="_blank"
rel="noopener noreferrer"
>
Powered by{" "}
<span className={styles.logo}>
<Image src="/vercel.svg" alt="Vercel Logo" width={72} height={16} />
</span>
</a>
</footer>
</div>
);
}

1915
src/api/demo/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -0,0 +1,4 @@
<svg width="283" height="64" viewBox="0 0 283 64" fill="none"
xmlns="http://www.w3.org/2000/svg">
<path d="M141.04 16c-11.04 0-19 7.2-19 18s8.96 18 20 18c6.67 0 12.55-2.64 16.19-7.09l-7.65-4.42c-2.02 2.21-5.09 3.5-8.54 3.5-4.79 0-8.86-2.5-10.37-6.5h28.02c.22-1.12.35-2.28.35-3.5 0-10.79-7.96-17.99-19-17.99zm-9.46 14.5c1.25-3.99 4.67-6.5 9.45-6.5 4.79 0 8.21 2.51 9.45 6.5h-18.9zM248.72 16c-11.04 0-19 7.2-19 18s8.96 18 20 18c6.67 0 12.55-2.64 16.19-7.09l-7.65-4.42c-2.02 2.21-5.09 3.5-8.54 3.5-4.79 0-8.86-2.5-10.37-6.5h28.02c.22-1.12.35-2.28.35-3.5 0-10.79-7.96-17.99-19-17.99zm-9.45 14.5c1.25-3.99 4.67-6.5 9.45-6.5 4.79 0 8.21 2.51 9.45 6.5h-18.9zM200.24 34c0 6 3.92 10 10 10 4.12 0 7.21-1.87 8.8-4.92l7.68 4.43c-3.18 5.3-9.14 8.49-16.48 8.49-11.05 0-19-7.2-19-18s7.96-18 19-18c7.34 0 13.29 3.19 16.48 8.49l-7.68 4.43c-1.59-3.05-4.68-4.92-8.8-4.92-6.07 0-10 4-10 10zm82.48-29v46h-9V5h9zM36.95 0L73.9 64H0L36.95 0zm92.38 5l-27.71 48L73.91 5H84.3l17.32 30 17.32-30h10.39zm58.91 12v9.69c-1-.29-2.06-.49-3.2-.49-5.81 0-10 4-10 10V51h-9V17h9v9.2c0-5.08 5.91-9.2 13.2-9.2z" fill="#000"/>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -0,0 +1,121 @@
.container {
min-height: 100vh;
padding: 0 0.5rem;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 100vh;
}
.main {
padding: 5rem 0;
flex: 1;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
.footer {
width: 100%;
height: 100px;
border-top: 1px solid #eaeaea;
display: flex;
justify-content: center;
align-items: center;
}
.footer a {
display: flex;
justify-content: center;
align-items: center;
flex-grow: 1;
}
.title a {
color: #0070f3;
text-decoration: none;
}
.title a:hover,
.title a:focus,
.title a:active {
text-decoration: underline;
}
.title {
margin: 0;
line-height: 1.15;
font-size: 4rem;
}
.title,
.description {
text-align: center;
}
.description {
line-height: 1.5;
font-size: 1.5rem;
}
.code {
background: #fafafa;
border-radius: 5px;
padding: 0.75rem;
font-size: 1.1rem;
font-family: Menlo, Monaco, Lucida Console, Liberation Mono, DejaVu Sans Mono,
Bitstream Vera Sans Mono, Courier New, monospace;
}
.grid {
display: flex;
align-items: center;
justify-content: center;
flex-wrap: wrap;
max-width: 800px;
margin-top: 3rem;
}
.card {
margin: 1rem;
padding: 1.5rem;
text-align: left;
color: inherit;
text-decoration: none;
border: 1px solid #eaeaea;
border-radius: 10px;
transition: color 0.15s ease, border-color 0.15s ease;
width: 45%;
}
.card:hover,
.card:focus,
.card:active {
color: #0070f3;
border-color: #0070f3;
}
.card h2 {
margin: 0 0 1rem 0;
font-size: 1.5rem;
}
.card p {
margin: 0;
font-size: 1.25rem;
line-height: 1.5;
}
.logo {
height: 1em;
margin-left: 0.5rem;
}
@media (max-width: 600px) {
.grid {
width: 100%;
flex-direction: column;
}
}

View File

@@ -0,0 +1,16 @@
html,
body {
padding: 0;
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen,
Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif;
}
a {
color: inherit;
text-decoration: none;
}
* {
box-sizing: border-box;
}

188
src/api/schema.d.ts vendored Normal file
View File

@@ -0,0 +1,188 @@
import type { ByteBuffer } from "peechy";
type byte = number;
type float = number;
type int = number;
type alphanumeric = string;
type uint = number;
type int8 = number;
type lowp = number;
type int16 = number;
type int32 = number;
type float32 = number;
type uint16 = number;
type uint32 = number;
export enum Loader {
jsx = 1,
js = 2,
ts = 3,
tsx = 4,
css = 5,
file = 6,
json = 7,
}
export const LoaderKeys = {
1: "jsx",
jsx: "jsx",
2: "js",
js: "js",
3: "ts",
ts: "ts",
4: "tsx",
tsx: "tsx",
5: "css",
css: "css",
6: "file",
file: "file",
7: "json",
json: "json",
};
export enum JSXRuntime {
automatic = 1,
classic = 2,
}
export const JSXRuntimeKeys = {
1: "automatic",
automatic: "automatic",
2: "classic",
classic: "classic",
};
export enum TransformResponseStatus {
success = 1,
fail = 2,
}
export const TransformResponseStatusKeys = {
1: "success",
success: "success",
2: "fail",
fail: "fail",
};
export enum MessageKind {
err = 1,
warn = 2,
note = 3,
debug = 4,
}
export const MessageKindKeys = {
1: "err",
err: "err",
2: "warn",
warn: "warn",
3: "note",
note: "note",
4: "debug",
debug: "debug",
};
export interface JSX {
factory: string;
runtime: JSXRuntime;
fragment: string;
production: boolean;
import_source: string;
react_fast_refresh: boolean;
loader_keys: string[];
loader_values: Loader[];
}
export interface TransformOptions {
jsx: JSX;
ts: boolean;
base_path: string;
define_keys: string[];
define_values: string[];
}
export interface FileHandle {
path: string;
size: uint;
fd: uint;
}
export interface Transform {
handle?: FileHandle;
path?: string;
contents?: string;
loader?: Loader;
options?: TransformOptions;
}
export interface OutputFile {
data: Uint8Array;
path: string;
}
export interface TransformResponse {
status: TransformResponseStatus;
files: OutputFile[];
errors: Message[];
}
export interface Location {
file: string;
namespace: string;
line: int32;
column: int32;
line_text: string;
suggestion: string;
offset: uint;
}
export interface MessageData {
text?: string;
location?: Location;
}
export interface Message {
kind: MessageKind;
data: MessageData;
notes: MessageData[];
}
export interface Log {
warnings: uint32;
errors: uint32;
msgs: Message[];
}
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeTransformOptions(
message: TransformOptions,
bb: ByteBuffer
): void;
export declare function decodeTransformOptions(
buffer: ByteBuffer
): TransformOptions;
export declare function encodeFileHandle(
message: FileHandle,
bb: ByteBuffer
): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(
message: Transform,
bb: ByteBuffer
): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeOutputFile(
message: OutputFile,
bb: ByteBuffer
): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(
message: TransformResponse,
bb: ByteBuffer
): void;
export declare function decodeTransformResponse(
buffer: ByteBuffer
): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(
message: MessageData,
bb: ByteBuffer
): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;

631
src/api/schema.js Normal file
View File

@@ -0,0 +1,631 @@
const Loader = {
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
"7": 7,
"jsx": 1,
"js": 2,
"ts": 3,
"tsx": 4,
"css": 5,
"file": 6,
"json": 7
};
const LoaderKeys = {
"1": "jsx",
"2": "js",
"3": "ts",
"4": "tsx",
"5": "css",
"6": "file",
"7": "json",
"jsx": "jsx",
"js": "js",
"ts": "ts",
"tsx": "tsx",
"css": "css",
"file": "file",
"json": "json"
};
const JSXRuntime = {
"1": 1,
"2": 2,
"automatic": 1,
"classic": 2
};
const JSXRuntimeKeys = {
"1": "automatic",
"2": "classic",
"automatic": "automatic",
"classic": "classic"
};
function decodeJSX(bb) {
var result = {};
result["factory"] = bb.readString();
result["runtime"] = JSXRuntime[bb.readByte()];
result["fragment"] = bb.readString();
result["production"] = !!bb.readByte();
result["import_source"] = bb.readString();
result["react_fast_refresh"] = !!bb.readByte();
var length = bb.readVarUint();
var values = result["loader_keys"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
var length = bb.readVarUint();
var values = result["loader_values"] = Array(length);
for (var i = 0; i < length; i++) values[i] = Loader[bb.readByte()];
return result;
}
function encodeJSX(message, bb) {
var value = message["factory"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"factory\"");
}
var value = message["runtime"];
if (value != null) {
var encoded = JSXRuntime[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"JSXRuntime\"");
bb.writeByte(encoded);
} else {
throw new Error("Missing required field \"runtime\"");
}
var value = message["fragment"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"fragment\"");
}
var value = message["production"];
if (value != null) {
bb.writeByte(value);
} else {
throw new Error("Missing required field \"production\"");
}
var value = message["import_source"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"import_source\"");
}
var value = message["react_fast_refresh"];
if (value != null) {
bb.writeByte(value);
} else {
throw new Error("Missing required field \"react_fast_refresh\"");
}
var value = message["loader_keys"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
} else {
throw new Error("Missing required field \"loader_keys\"");
}
var value = message["loader_values"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
bb.writeByte(encoded);
}
} else {
throw new Error("Missing required field \"loader_values\"");
}
}
function decodeTransformOptions(bb) {
var result = {};
result["jsx"] = decodeJSX(bb);
result["ts"] = !!bb.readByte();
result["base_path"] = bb.readString();
var length = bb.readVarUint();
var values = result["define_keys"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
var length = bb.readVarUint();
var values = result["define_values"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
return result;
}
function encodeTransformOptions(message, bb) {
var value = message["jsx"];
if (value != null) {
encodeJSX(value, bb);
} else {
throw new Error("Missing required field \"jsx\"");
}
var value = message["ts"];
if (value != null) {
bb.writeByte(value);
} else {
throw new Error("Missing required field \"ts\"");
}
var value = message["base_path"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"base_path\"");
}
var value = message["define_keys"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
} else {
throw new Error("Missing required field \"define_keys\"");
}
var value = message["define_values"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
bb.writeString(value);
}
} else {
throw new Error("Missing required field \"define_values\"");
}
}
function decodeFileHandle(bb) {
var result = {};
result["path"] = bb.readString();
result["size"] = bb.readVarUint();
result["fd"] = bb.readVarUint();
return result;
}
function encodeFileHandle(message, bb) {
var value = message["path"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"path\"");
}
var value = message["size"];
if (value != null) {
bb.writeVarUint(value);
} else {
throw new Error("Missing required field \"size\"");
}
var value = message["fd"];
if (value != null) {
bb.writeVarUint(value);
} else {
throw new Error("Missing required field \"fd\"");
}
}
function decodeTransform(bb) {
var result = {};
while (true) {
switch (bb.readVarUint()) {
case 0:
return result;
case 1:
result["handle"] = decodeFileHandle(bb);
break;
case 2:
result["path"] = bb.readString();
break;
case 3:
result["contents"] = bb.readString();
break;
case 4:
result["loader"] = Loader[bb.readByte()];
break;
case 5:
result["options"] = decodeTransformOptions(bb);
break;
default:
throw new Error("Attempted to parse invalid message");
}
}
}
function encodeTransform(message, bb) {
var value = message["handle"];
if (value != null) {
bb.writeVarUint(1);
encodeFileHandle(value, bb);
}
var value = message["path"];
if (value != null) {
bb.writeVarUint(2);
bb.writeString(value);
}
var value = message["contents"];
if (value != null) {
bb.writeVarUint(3);
bb.writeString(value);
}
var value = message["loader"];
if (value != null) {
bb.writeVarUint(4);
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
bb.writeByte(encoded);
}
var value = message["options"];
if (value != null) {
bb.writeVarUint(5);
encodeTransformOptions(value, bb);
}
bb.writeVarUint(0);
}
const TransformResponseStatus = {
"1": 1,
"2": 2,
"success": 1,
"fail": 2
};
const TransformResponseStatusKeys = {
"1": "success",
"2": "fail",
"success": "success",
"fail": "fail"
};
function decodeOutputFile(bb) {
var result = {};
result["data"] = bb.readByteArray();
result["path"] = bb.readString();
return result;
}
function encodeOutputFile(message, bb) {
var value = message["data"];
if (value != null) {
bb.writeByteArray(value);
} else {
throw new Error("Missing required field \"data\"");
}
var value = message["path"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"path\"");
}
}
function decodeTransformResponse(bb) {
var result = {};
result["status"] = TransformResponseStatus[bb.readVarUint()];
var length = bb.readVarUint();
var values = result["files"] = Array(length);
for (var i = 0; i < length; i++) values[i] = decodeOutputFile(bb);
var length = bb.readVarUint();
var values = result["errors"] = Array(length);
for (var i = 0; i < length; i++) values[i] = decodeMessage(bb);
return result;
}
function encodeTransformResponse(message, bb) {
var value = message["status"];
if (value != null) {
var encoded = TransformResponseStatus[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"TransformResponseStatus\"");
bb.writeVarUint(encoded);
} else {
throw new Error("Missing required field \"status\"");
}
var value = message["files"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
encodeOutputFile(value, bb);
}
} else {
throw new Error("Missing required field \"files\"");
}
var value = message["errors"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
encodeMessage(value, bb);
}
} else {
throw new Error("Missing required field \"errors\"");
}
}
const MessageKind = {
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"err": 1,
"warn": 2,
"note": 3,
"debug": 4
};
const MessageKindKeys = {
"1": "err",
"2": "warn",
"3": "note",
"4": "debug",
"err": "err",
"warn": "warn",
"note": "note",
"debug": "debug"
};
function decodeLocation(bb) {
var result = {};
result["file"] = bb.readString();
result["namespace"] = bb.readString();
result["line"] = bb.readInt32();
result["column"] = bb.readInt32();
result["line_text"] = bb.readString();
result["suggestion"] = bb.readString();
result["offset"] = bb.readVarUint();
return result;
}
function encodeLocation(message, bb) {
var value = message["file"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"file\"");
}
var value = message["namespace"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"namespace\"");
}
var value = message["line"];
if (value != null) {
bb.writeInt32(value);
} else {
throw new Error("Missing required field \"line\"");
}
var value = message["column"];
if (value != null) {
bb.writeInt32(value);
} else {
throw new Error("Missing required field \"column\"");
}
var value = message["line_text"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"line_text\"");
}
var value = message["suggestion"];
if (value != null) {
bb.writeString(value);
} else {
throw new Error("Missing required field \"suggestion\"");
}
var value = message["offset"];
if (value != null) {
bb.writeVarUint(value);
} else {
throw new Error("Missing required field \"offset\"");
}
}
function decodeMessageData(bb) {
var result = {};
while (true) {
switch (bb.readVarUint()) {
case 0:
return result;
case 1:
result["text"] = bb.readString();
break;
case 2:
result["location"] = decodeLocation(bb);
break;
default:
throw new Error("Attempted to parse invalid message");
}
}
}
function encodeMessageData(message, bb) {
var value = message["text"];
if (value != null) {
bb.writeVarUint(1);
bb.writeString(value);
}
var value = message["location"];
if (value != null) {
bb.writeVarUint(2);
encodeLocation(value, bb);
}
bb.writeVarUint(0);
}
function decodeMessage(bb) {
var result = {};
result["kind"] = MessageKind[bb.readVarUint()];
result["data"] = decodeMessageData(bb);
var length = bb.readVarUint();
var values = result["notes"] = Array(length);
for (var i = 0; i < length; i++) values[i] = decodeMessageData(bb);
return result;
}
function encodeMessage(message, bb) {
var value = message["kind"];
if (value != null) {
var encoded = MessageKind[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"MessageKind\"");
bb.writeVarUint(encoded);
} else {
throw new Error("Missing required field \"kind\"");
}
var value = message["data"];
if (value != null) {
encodeMessageData(value, bb);
} else {
throw new Error("Missing required field \"data\"");
}
var value = message["notes"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
encodeMessageData(value, bb);
}
} else {
throw new Error("Missing required field \"notes\"");
}
}
function decodeLog(bb) {
var result = {};
result["warnings"] = bb.readUint32();
result["errors"] = bb.readUint32();
var length = bb.readVarUint();
var values = result["msgs"] = Array(length);
for (var i = 0; i < length; i++) values[i] = decodeMessage(bb);
return result;
}
function encodeLog(message, bb) {
var value = message["warnings"];
if (value != null) {
bb.writeUint32(value);
} else {
throw new Error("Missing required field \"warnings\"");
}
var value = message["errors"];
if (value != null) {
bb.writeUint32(value);
} else {
throw new Error("Missing required field \"errors\"");
}
var value = message["msgs"];
if (value != null) {
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
value = values[i];
encodeMessage(value, bb);
}
} else {
throw new Error("Missing required field \"msgs\"");
}
}
export { Loader }
export { LoaderKeys }
export { JSXRuntime }
export { JSXRuntimeKeys }
export { decodeJSX }
export { encodeJSX }
export { decodeTransformOptions }
export { encodeTransformOptions }
export { decodeFileHandle }
export { encodeFileHandle }
export { decodeTransform }
export { encodeTransform }
export { TransformResponseStatus }
export { TransformResponseStatusKeys }
export { decodeOutputFile }
export { encodeOutputFile }
export { decodeTransformResponse }
export { encodeTransformResponse }
export { MessageKind }
export { MessageKindKeys }
export { decodeLocation }
export { encodeLocation }
export { decodeMessageData }
export { encodeMessageData }
export { decodeMessage }
export { encodeMessage }
export { decodeLog }
export { encodeLog }

107
src/api/schema.peechy Normal file
View File

@@ -0,0 +1,107 @@
package Api;
smol Loader {
jsx = 1;
js = 2;
ts = 3;
tsx = 4;
css = 5;
file = 6;
json = 7;
}
smol JSXRuntime {
automatic = 1;
classic = 2;
}
struct JSX {
string factory;
JSXRuntime runtime;
string fragment;
bool production;
// Probably react
string import_source;
bool react_fast_refresh;
string[] loader_keys;
Loader[] loader_values;
}
struct TransformOptions {
JSX jsx;
bool ts;
string base_path;
string[] define_keys;
string[] define_values;
}
struct FileHandle {
string path;
uint size;
uint fd;
}
message Transform {
FileHandle handle = 1;
string path = 2;
string contents = 3;
Loader loader = 4;
TransformOptions options = 5;
}
enum TransformResponseStatus {
success = 1;
fail = 2;
}
struct OutputFile {
byte[] data;
string path;
}
struct TransformResponse {
TransformResponseStatus status;
OutputFile[] files;
Message[] errors;
}
enum MessageKind {
err = 1;
warn =2;
note = 3;
debug = 4;
}
struct Location {
string file;
string namespace;
int32 line;
int32 column;
string line_text;
string suggestion;
uint offset;
}
message MessageData {
string text = 1;
Location location = 2;
}
struct Message {
MessageKind kind;
MessageData data;
MessageData[] notes;
}
struct Log {
uint32 warnings;
uint32 errors;
Message[] msgs;
}

166
src/api/schema.ts Normal file
View File

@@ -0,0 +1,166 @@
import type {ByteBuffer} from "peechy";
type byte = number;
type float = number;
type int = number;
type alphanumeric = string;
type uint = number;
type int8 = number;
type lowp = number;
type int16 = number;
type int32 = number;
type float32 = number;
type uint16 = number;
type uint32 = number;
export enum Loader {
jsx = 1,
js = 2,
ts = 3,
tsx = 4,
css = 5,
file = 6,
json = 7
}
export const LoaderKeys = {
1: "jsx",
jsx: "jsx",
2: "js",
js: "js",
3: "ts",
ts: "ts",
4: "tsx",
tsx: "tsx",
5: "css",
css: "css",
6: "file",
file: "file",
7: "json",
json: "json"
}
export enum JSXRuntime {
automatic = 1,
classic = 2
}
export const JSXRuntimeKeys = {
1: "automatic",
automatic: "automatic",
2: "classic",
classic: "classic"
}
export enum TransformResponseStatus {
success = 1,
fail = 2
}
export const TransformResponseStatusKeys = {
1: "success",
success: "success",
2: "fail",
fail: "fail"
}
export enum MessageKind {
err = 1,
warn = 2,
note = 3,
debug = 4
}
export const MessageKindKeys = {
1: "err",
err: "err",
2: "warn",
warn: "warn",
3: "note",
note: "note",
4: "debug",
debug: "debug"
}
export interface JSX {
factory: string;
runtime: JSXRuntime;
fragment: string;
production: boolean;
import_source: string;
react_fast_refresh: boolean;
loader_keys: string[];
loader_values: Loader[];
}
export interface TransformOptions {
jsx: JSX;
ts: boolean;
base_path: string;
define_keys: string[];
define_values: string[];
}
export interface FileHandle {
path: string;
size: uint;
fd: uint;
}
export interface Transform {
handle?: FileHandle;
path?: string;
contents?: string;
loader?: Loader;
options?: TransformOptions;
}
export interface OutputFile {
data: Uint8Array;
path: string;
}
export interface TransformResponse {
status: TransformResponseStatus;
files: OutputFile[];
errors: Message[];
}
export interface Location {
file: string;
namespace: string;
line: int32;
column: int32;
line_text: string;
suggestion: string;
offset: uint;
}
export interface MessageData {
text?: string;
location?: Location;
}
export interface Message {
kind: MessageKind;
data: MessageData;
notes: MessageData[];
}
export interface Log {
warnings: uint32;
errors: uint32;
msgs: Message[];
}
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(message: Transform, bb: ByteBuffer): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeOutputFile(message: OutputFile, bb: ByteBuffer): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(message: TransformResponse, bb: ByteBuffer): void;
export declare function decodeTransformResponse(buffer: ByteBuffer): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(message: MessageData, bb: ByteBuffer): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;

739
src/api/schema.zig Normal file
View File

@@ -0,0 +1,739 @@
const std = @import("std");
pub const Api = struct {
pub const Loader = enum(u8) {
_none,
/// jsx
jsx,
/// js
js,
/// ts
ts,
/// tsx
tsx,
/// css
css,
/// file
file,
/// json
json,
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
return try std.json.stringify(@tagName(self), opts, o);
}
};
pub const JsxRuntime = enum(u8) {
_none,
/// automatic
automatic,
/// classic
classic,
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
return try std.json.stringify(@tagName(self), opts, o);
}
};
pub const Jsx = struct {
/// factory
factory: []u8,
/// runtime
runtime: JsxRuntime,
/// fragment
fragment: []u8,
/// production
production: bool = false,
/// import_source
import_source: []u8,
/// react_fast_refresh
react_fast_refresh: bool = false,
/// loader_keys
loader_keys: [][]u8,
/// loader_values
loader_values: []Loader,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!Jsx {
var obj = std.mem.zeroes(Jsx);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *Jsx, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
length = try reader.readIntNative(u32);
if (result.factory.len != length) {
result.factory = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.factory);
result.runtime = try reader.readEnum(JsxRuntime, .Little);
length = try reader.readIntNative(u32);
if (result.fragment.len != length) {
result.fragment = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.fragment);
result.production = (try reader.readByte()) == @as(u8, 1);
length = try reader.readIntNative(u32);
if (result.import_source.len != length) {
result.import_source = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.import_source);
result.react_fast_refresh = (try reader.readByte()) == @as(u8, 1);
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.loader_keys.len) {
result.loader_keys = try allocator.alloc([]u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.loader_keys) |content, j| {
if (result.loader_keys[j].len != length) {
result.loader_keys[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.loader_keys[j]);
}
}
length = try reader.readIntNative(u32);
result.loader_values = try allocator.alloc(Loader, length);
{
var j: usize = 0;
while (j < length) : (j += 1) {
result.loader_values[j] = try reader.readEnum(Loader, .Little);
}
}
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
var n: usize = 0;
try writer.writeIntNative(u32, @intCast(u32, result.factory.len));
try writer.writeAll(std.mem.sliceAsBytes(result.factory));
try writer.writeIntNative(@TypeOf(@enumToInt(result.runtime)), @enumToInt(result.runtime));
try writer.writeIntNative(u32, @intCast(u32, result.fragment.len));
try writer.writeAll(std.mem.sliceAsBytes(result.fragment));
try writer.writeByte(@boolToInt(result.production));
try writer.writeIntNative(u32, @intCast(u32, result.import_source.len));
try writer.writeAll(std.mem.sliceAsBytes(result.import_source));
try writer.writeByte(@boolToInt(result.react_fast_refresh));
n = result.loader_keys.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.loader_keys[j].len));
try writer.writeAll(std.mem.sliceAsBytes(result.loader_keys[j]));
}
}
n = result.loader_values.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
try writer.writeByte(@enumToInt(result.loader_values[j]));
}
}
return;
}
};
pub const TransformOptions = struct {
/// jsx
jsx: Jsx,
/// ts
ts: bool = false,
/// base_path
base_path: []u8,
/// define_keys
define_keys: [][]u8,
/// define_values
define_values: [][]u8,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!TransformOptions {
var obj = std.mem.zeroes(TransformOptions);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *TransformOptions, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
result.jsx = try Jsx.decode(allocator, reader);
result.ts = (try reader.readByte()) == @as(u8, 1);
length = try reader.readIntNative(u32);
if (result.base_path.len != length) {
result.base_path = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.base_path);
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.define_keys.len) {
result.define_keys = try allocator.alloc([]u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.define_keys) |content, j| {
if (result.define_keys[j].len != length) {
result.define_keys[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.define_keys[j]);
}
}
{
var array_count = try reader.readIntNative(u32);
if (array_count != result.define_values.len) {
result.define_values = try allocator.alloc([]u8, array_count);
}
length = try reader.readIntNative(u32);
for (result.define_values) |content, j| {
if (result.define_values[j].len != length) {
result.define_values[j] = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.define_values[j]);
}
}
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
var n: usize = 0;
try result.jsx.encode(writer);
try writer.writeByte(@boolToInt(result.ts));
try writer.writeIntNative(u32, @intCast(u32, result.base_path.len));
try writer.writeAll(std.mem.sliceAsBytes(result.base_path));
n = result.define_keys.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.define_keys[j].len));
try writer.writeAll(std.mem.sliceAsBytes(result.define_keys[j]));
}
}
n = result.define_values.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
_ = try writer.writeIntNative(u32, @intCast(u32, result.define_values[j].len));
try writer.writeAll(std.mem.sliceAsBytes(result.define_values[j]));
}
}
return;
}
};
pub const FileHandle = struct {
/// path
path: []u8,
/// size
size: u32 = 0,
/// fd
fd: u32 = 0,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!FileHandle {
var obj = std.mem.zeroes(FileHandle);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *FileHandle, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
length = try reader.readIntNative(u32);
if (result.path.len != length) {
result.path = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.path);
_ = try reader.readAll(std.mem.asBytes(&result.size));
_ = try reader.readAll(std.mem.asBytes(&result.fd));
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
try writer.writeIntNative(u32, @intCast(u32, result.path.len));
try writer.writeAll(std.mem.sliceAsBytes(result.path));
try writer.writeIntNative(u32, result.size);
try writer.writeIntNative(u32, result.fd);
return;
}
};
pub const Transform = struct {
/// handle
handle: ?FileHandle = null,
/// path
path: ?[]u8 = null,
/// contents
contents: ?[]u8 = null,
/// loader
loader: ?Loader = null,
/// options
options: ?TransformOptions = null,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!Transform {
var obj = std.mem.zeroes(Transform);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *Transform, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
while (true) {
const field_type: u8 = try reader.readByte();
switch (field_type) {
0 => {
return;
},
1 => {
result.handle = try FileHandle.decode(allocator, reader);
},
2 => {
length = try reader.readIntNative(u32);
if ((result.path orelse &([_]u8{})).len != length) {
result.path = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.path.?);
},
3 => {
length = try reader.readIntNative(u32);
if ((result.contents orelse &([_]u8{})).len != length) {
result.contents = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.contents.?);
},
4 => {
result.loader = try reader.readEnum(Loader, .Little);
},
5 => {
result.options = try TransformOptions.decode(allocator, reader);
},
else => {
return error.InvalidMessage;
},
}
}
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
if (result.handle) |handle| {
try writer.writeByte(1);
try handle.encode(writer);
}
if (result.path) |path| {
try writer.writeByte(2);
try writer.writeIntNative(u32, @intCast(u32, path.len));
try writer.writeAll(std.mem.sliceAsBytes(path));
}
if (result.contents) |contents| {
try writer.writeByte(3);
try writer.writeIntNative(u32, @intCast(u32, contents.len));
try writer.writeAll(std.mem.sliceAsBytes(contents));
}
if (result.loader) |loader| {
try writer.writeByte(4);
try writer.writeIntNative(@TypeOf(@enumToInt(result.loader orelse unreachable)), @enumToInt(result.loader orelse unreachable));
}
if (result.options) |options| {
try writer.writeByte(5);
try options.encode(writer);
}
try writer.writeByte(0);
return;
}
};
pub const TransformResponseStatus = enum(u32) {
_none,
/// success
success,
/// fail
fail,
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
return try std.json.stringify(@tagName(self), opts, o);
}
};
pub const OutputFile = struct {
/// data
data: []u8,
/// path
path: []u8,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!OutputFile {
var obj = std.mem.zeroes(OutputFile);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *OutputFile, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
_ = try reader.readAll(result.data);
length = try reader.readIntNative(u32);
if (result.path.len != length) {
result.path = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.path);
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
try writer.writeAll(result.data);
try writer.writeIntNative(u32, @intCast(u32, result.path.len));
try writer.writeAll(std.mem.sliceAsBytes(result.path));
return;
}
};
pub const TransformResponse = struct {
/// status
status: TransformResponseStatus,
/// files
files: []OutputFile,
/// errors
errors: []Message,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!TransformResponse {
var obj = std.mem.zeroes(TransformResponse);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *TransformResponse, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
result.status = try reader.readEnum(TransformResponseStatus, .Little);
length = try reader.readIntNative(u32);
result.files = try allocator.alloc(OutputFile, length);
{
var j: usize = 0;
while (j < length) : (j += 1) {
result.files[j] = try OutputFile.decode(allocator, reader);
}
}
length = try reader.readIntNative(u32);
result.errors = try allocator.alloc(Message, length);
{
var j: usize = 0;
while (j < length) : (j += 1) {
result.errors[j] = try Message.decode(allocator, reader);
}
}
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
var n: usize = 0;
try writer.writeIntNative(@TypeOf(@enumToInt(result.status)), @enumToInt(result.status));
n = result.files.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
try result.files[j].encode(writer);
}
}
n = result.errors.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
try result.errors[j].encode(writer);
}
}
return;
}
};
pub const MessageKind = enum(u32) {
_none,
/// err
err,
/// warn
warn,
/// note
note,
/// debug
debug,
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
return try std.json.stringify(@tagName(self), opts, o);
}
};
pub const Location = struct {
/// file
file: []u8,
/// namespace
namespace: []u8,
/// line
line: i32 = 0,
/// column
column: i32 = 0,
/// line_text
line_text: []u8,
/// suggestion
suggestion: []u8,
/// offset
offset: u32 = 0,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!Location {
var obj = std.mem.zeroes(Location);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *Location, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
length = try reader.readIntNative(u32);
if (result.file.len != length) {
result.file = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.file);
length = try reader.readIntNative(u32);
if (result.namespace.len != length) {
result.namespace = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.namespace);
_ = try reader.readAll(std.mem.asBytes(&result.line));
_ = try reader.readAll(std.mem.asBytes(&result.column));
length = try reader.readIntNative(u32);
if (result.line_text.len != length) {
result.line_text = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.line_text);
length = try reader.readIntNative(u32);
if (result.suggestion.len != length) {
result.suggestion = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.suggestion);
_ = try reader.readAll(std.mem.asBytes(&result.offset));
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
try writer.writeIntNative(u32, @intCast(u32, result.file.len));
try writer.writeAll(std.mem.sliceAsBytes(result.file));
try writer.writeIntNative(u32, @intCast(u32, result.namespace.len));
try writer.writeAll(std.mem.sliceAsBytes(result.namespace));
try writer.writeIntNative(i32, result.line);
try writer.writeIntNative(i32, result.column);
try writer.writeIntNative(u32, @intCast(u32, result.line_text.len));
try writer.writeAll(std.mem.sliceAsBytes(result.line_text));
try writer.writeIntNative(u32, @intCast(u32, result.suggestion.len));
try writer.writeAll(std.mem.sliceAsBytes(result.suggestion));
try writer.writeIntNative(u32, result.offset);
return;
}
};
pub const MessageData = struct {
/// text
text: ?[]u8 = null,
/// location
location: ?Location = null,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!MessageData {
var obj = std.mem.zeroes(MessageData);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *MessageData, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
while (true) {
const field_type: u8 = try reader.readByte();
switch (field_type) {
0 => {
return;
},
1 => {
length = try reader.readIntNative(u32);
if ((result.text orelse &([_]u8{})).len != length) {
result.text = try allocator.alloc(u8, length);
}
_ = try reader.readAll(result.text.?);
},
2 => {
result.location = try Location.decode(allocator, reader);
},
else => {
return error.InvalidMessage;
},
}
}
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
if (result.text) |text| {
try writer.writeByte(1);
try writer.writeIntNative(u32, @intCast(u32, text.len));
try writer.writeAll(std.mem.sliceAsBytes(text));
}
if (result.location) |location| {
try writer.writeByte(2);
try location.encode(writer);
}
try writer.writeByte(0);
return;
}
};
pub const Message = struct {
/// kind
kind: MessageKind,
/// data
data: MessageData,
/// notes
notes: []MessageData,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!Message {
var obj = std.mem.zeroes(Message);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *Message, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
result.kind = try reader.readEnum(MessageKind, .Little);
result.data = try MessageData.decode(allocator, reader);
length = try reader.readIntNative(u32);
result.notes = try allocator.alloc(MessageData, length);
{
var j: usize = 0;
while (j < length) : (j += 1) {
result.notes[j] = try MessageData.decode(allocator, reader);
}
}
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
var n: usize = 0;
try writer.writeIntNative(@TypeOf(@enumToInt(result.kind)), @enumToInt(result.kind));
try result.data.encode(writer);
n = result.notes.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
try result.notes[j].encode(writer);
}
}
return;
}
};
pub const Log = struct {
/// warnings
warnings: u32 = 0,
/// errors
errors: u32 = 0,
/// msgs
msgs: []Message,
pub fn decode(allocator: *std.mem.Allocator, reader: anytype) anyerror!Log {
var obj = std.mem.zeroes(Log);
try update(&obj, allocator, reader);
return obj;
}
pub fn update(result: *Log, allocator: *std.mem.Allocator, reader: anytype) anyerror!void {
var length: usize = 0;
_ = try reader.readAll(std.mem.asBytes(&result.warnings));
_ = try reader.readAll(std.mem.asBytes(&result.errors));
length = try reader.readIntNative(u32);
result.msgs = try allocator.alloc(Message, length);
{
var j: usize = 0;
while (j < length) : (j += 1) {
result.msgs[j] = try Message.decode(allocator, reader);
}
}
return;
}
pub fn encode(result: *const @This(), writer: anytype) anyerror!void {
var n: usize = 0;
try writer.writeIntNative(u32, result.warnings);
try writer.writeIntNative(u32, result.errors);
n = result.msgs.len;
_ = try writer.writeIntNative(u32, @intCast(u32, n));
{
var j: usize = 0;
while (j < n) : (j += 1) {
try result.msgs[j].encode(writer);
}
}
return;
}
};
};

View File

@@ -64,10 +64,10 @@ pub const RequireOrImportMeta = struct {
is_wrapper_async: bool = false,
};
pub fn debug(comptime fmt: []const u8, args: anytype) callconv(.Inline) void {
// std.debug.print(fmt, args);
// Output.print(fmt, args);
}
pub fn debugl(
comptime fmt: []const u8,
) callconv(.Inline) void {
// std.debug.print("{s}\n", .{fmt});
// Output.print("{s}\n", .{fmt});
}

View File

@@ -1,4 +1,4 @@
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
// If something is in this list, then a direct identifier expression or property
// access chain matching this will be assumed to have no side effects and will

View File

@@ -5,7 +5,7 @@ const logger = @import("logger.zig");
const js_lexer = @import("js_lexer.zig");
const json_parser = @import("json_parser.zig");
const fs = @import("fs.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
usingnamespace @import("ast/base.zig");
const GlobalDefinesKey = @import("./defines-table.zig").GlobalDefinesKey;
@@ -294,7 +294,7 @@ test "Defines" {
var log = logger.Log.init(alloc.dynamic);
var data = try DefineData.from_input(orig, &log, alloc.dynamic);
var defines = try Define.init(alloc.dynamic, data);
std.debug.print("Time: {d}", .{std.time.nanoTimestamp() - start});
Output.print("Time: {d}", .{std.time.nanoTimestamp() - start});
const node_env_dots = defines.dots.get("NODE_ENV");
expect(node_env_dots != null);
expect(node_env_dots.?.len > 0);

44
src/exports.zig Normal file
View File

@@ -0,0 +1,44 @@
const std = @import("std");
const alloc = @import("alloc.zig");
usingnamespace @import("global.zig");
const Root = @import("main_wasm.zig").Root;
pub extern fn init() void {
alloc.dynamic = std.heap.c_allocator;
alloc.static = std.heap.c_allocator;
}
/// Convert a slice into known memory representation -- enables C ABI
pub const U8Chunk = packed struct {
const Float = @Type(builtin.TypeInfo{ .Float = .{ .bits = 2 * @bitSizeOf(usize) } });
const Abi = if (builtin.arch.isWasm()) Float else U8Chunk;
ptr: [*]u8,
len: usize,
pub fn toSlice(raw: Abi) []u8 {
const self = @bitCast(U8Chunk, raw);
return self.ptr[0..self.len];
}
pub fn fromSlice(slice: []u8) Abi {
const self = U8Chunk{ .ptr = slice.ptr, .len = slice.len };
return @bitCast(Abi, self);
}
pub fn empty() Abi {
return U8Chunk.fromSlice(&[0]u8{});
}
};
export fn fd_create() ?*Root {
const fd = allocator.create(Root) catch return null;
fd.* = .{};
return fd;
}
export fn fd_destroy(fd: *Root) void {
fd.deinit(allocator);
allocator.destroy(fd);
}

View File

@@ -1,6 +1,6 @@
const std = @import("std");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const alloc = @import("alloc.zig");
const expect = std.testing.expect;

39
src/global.zig Normal file
View File

@@ -0,0 +1,39 @@
const std = @import("std");
pub usingnamespace @import("strings.zig");
pub const Output = struct {
pub const source = comptime {
if (std.builtin.os.tag == .wasi) {
return @import("./output_wasi.zig");
} else if (std.builtin.target.isWasm()) {
return @import("./output_wasm.zig");
} else {
return @import("./output_native.zig");
}
};
pub fn print(comptime fmt: string, args: anytype) void {
if (comptime std.builtin.target.isWasm()) {
std.fmt.format(source.writer, fmt, args) catch unreachable;
} else {
std.fmt.format(source.writer orelse unreachable, fmt, args) catch unreachable;
}
}
pub fn printError(comptime fmt: string, args: anytype) void {
if (comptime std.builtin.target.isWasm()) {
std.fmt.format(source.writer, fmt, args) catch unreachable;
} else {
std.fmt.format(source.writer orelse unreachable, fmt, args) catch unreachable;
}
}
};
pub const Global = struct {
pub fn panic(comptime fmt: string, args: anytype) noreturn {
if (comptime std.builtin.target.isWasm()) {
@panic(fmt);
} else {
std.debug.panic(fmt, args);
}
}
};

View File

@@ -2,7 +2,7 @@ const std = @import("std");
const logger = @import("logger.zig");
const JSXRuntime = @import("options.zig").JSX.Runtime;
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
usingnamespace @import("ast/base.zig");
const ImportRecord = @import("import_record.zig").ImportRecord;
@@ -170,7 +170,7 @@ pub const Binding = struct {
return Expr.alloc(wrapper.allocator, E.Object{ .properties = properties, .is_single_line = b.is_single_line }, loc);
},
else => {
std.debug.panic("Interanl error", .{});
Global.panic("Interanl error", .{});
},
}
}
@@ -3213,7 +3213,7 @@ pub const Scope = struct {
};
pub fn printmem(comptime format: string, args: anytype) void {
// std.debug.print(format, args);
// Output.print(format, args);
}
test "Binding.init" {

View File

@@ -6,7 +6,7 @@ const build_options = @import("build_options");
const js_ast = @import("js_ast.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const unicode = std.unicode;
@@ -21,7 +21,7 @@ pub const TypescriptStmtKeyword = tables.TypescriptStmtKeyword;
pub const TypeScriptAccessibilityModifier = tables.TypeScriptAccessibilityModifier;
fn notimpl() noreturn {
std.debug.panic("not implemented yet!", .{});
Global.panic("not implemented yet!", .{});
}
pub var emptyJavaScriptString = ([_]u16{0});
@@ -111,7 +111,7 @@ pub const Lexer = struct {
self.log.addErrorFmt(self.source, __loc, self.allocator, format, args) catch unreachable;
self.prev_error_loc = __loc;
var msg = self.log.msgs.items[self.log.msgs.items.len - 1];
msg.formatNoWriter(std.debug.panic);
msg.formatNoWriter(Global.panic);
}
pub fn addRangeError(self: *LexerType, r: logger.Range, comptime format: []const u8, args: anytype, panic: bool) void {
@@ -129,7 +129,7 @@ pub const Lexer = struct {
const writer = stream.writer();
self.log.print(writer) catch unreachable;
std.debug.panic("{s}", .{fixedBuffer[0..stream.pos]});
Global.panic("{s}", .{fixedBuffer[0..stream.pos]});
}
}
@@ -137,7 +137,7 @@ pub const Lexer = struct {
if (@import("builtin").is_test) {
self.did_panic = true;
} else {
std.debug.panic("{s}", .{content});
Global.panic("{s}", .{content});
}
}
@@ -341,9 +341,9 @@ pub const Lexer = struct {
self.log.print(stderr) catch unreachable;
} else {
if (self.token == T.t_identifier or self.token == T.t_string_literal) {
std.debug.print(" {s} ", .{self.raw()});
Output.print(" {s} ", .{self.raw()});
} else {
std.debug.print(" <{s}> ", .{tokenToString.get(self.token)});
Output.print(" <{s}> ", .{tokenToString.get(self.token)});
}
}
}
@@ -2168,9 +2168,9 @@ fn test_lexer(contents: []const u8) Lexer {
fn expectStr(lexer: *Lexer, expected: string, actual: string) void {
if (lexer.log.errors > 0 or lexer.log.warnings > 0) {
std.debug.panic("{s}", .{lexer.log.msgs.items});
Global.panic("{s}", .{lexer.log.msgs.items});
// const msg: logger.Msg = lexer.log.msgs.items[0];
// msg.formatNoWriter(std.debug.panic);
// msg.formatNoWriter(Global.panic);
}
std.testing.expectEqual(lexer.log.errors, 0);
std.testing.expectEqual(lexer.log.warnings, 0);

View File

@@ -9,7 +9,7 @@ pub const js_printer = @import("../js_printer.zig");
pub const renamer = @import("../renamer.zig");
pub const fs = @import("../fs.zig");
pub usingnamespace @import("../strings.zig");
pub usingnamespace @import("../global.zig");
pub usingnamespace @import("../ast/base.zig");
pub usingnamespace js_ast.G;
pub usingnamespace @import("../defines.zig");

View File

@@ -1010,15 +1010,15 @@ pub const StmtsKind = enum {
};
fn notimpl() noreturn {
std.debug.panic("Not implemented yet!!", .{});
Global.panic("Not implemented yet!!", .{});
}
fn lexerpanic() noreturn {
std.debug.panic("LexerPanic", .{});
Global.panic("LexerPanic", .{});
}
fn fail() noreturn {
std.debug.panic("Something went wrong :cry;", .{});
Global.panic("Something went wrong :cry;", .{});
}
const ExprBindingTuple = struct { expr: ?ExprNodeIndex = null, binding: ?Binding = null, override_expr: ?ExprNodeIndex = null };
@@ -2254,7 +2254,7 @@ pub const P = struct {
// Sanity-check that the scopes generated by the first and second passes match
if (order.loc.start != loc.start or order.scope.kind != kind) {
std.debug.print("Expected scope ({s}, {d}) in {s}, found scope ({s}, {d})", .{ kind, loc.start, p.source.path.pretty, order.scope.kind, order.loc.start });
Output.print("Expected scope ({s}, {d}) in {s}, found scope ({s}, {d})", .{ kind, loc.start, p.source.path.pretty, order.scope.kind, order.loc.start });
p.panic("", .{});
}
@@ -4046,7 +4046,7 @@ pub const P = struct {
}
}
}
// std.debug.print("\n\nmVALUE {s}:{s}\n", .{ expr, name });
// Output.print("\n\nmVALUE {s}:{s}\n", .{ expr, name });
p.lexer.expectOrInsertSemicolon();
return p.s(S.SExpr{ .value = expr }, loc);
},
@@ -4204,7 +4204,7 @@ pub const P = struct {
var let_range = p.lexer.range();
var raw = p.lexer.raw();
if (p.lexer.token != .t_identifier or !strings.eql(raw, "let")) {
// std.debug.print("HI", .{});
// Output.print("HI", .{});
return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .expr = p.parseExpr(.lowest) } };
}
@@ -6653,13 +6653,13 @@ pub const P = struct {
var stream = std.io.fixedBufferStream(&fixedBuffer);
p.log.print(stream.writer()) catch unreachable;
std.debug.panic("{s}", .{fixedBuffer});
Global.panic("{s}", .{fixedBuffer});
}
pub fn _parsePrefix(p: *P, level: Level, errors: *DeferredErrors, flags: Expr.EFlags) Expr {
const loc = p.lexer.loc();
const l = @enumToInt(level);
// std.debug.print("Parse Prefix {s}:{s} @{s} ", .{ p.lexer.token, p.lexer.raw(), @tagName(level) });
// Output.print("Parse Prefix {s}:{s} @{s} ", .{ p.lexer.token, p.lexer.raw(), @tagName(level) });
switch (p.lexer.token) {
.t_super => {
@@ -6812,7 +6812,7 @@ pub const P = struct {
_ = p.pushScopeForParsePass(.function_args, loc) catch unreachable;
defer p.popScope();
// std.debug.print("HANDLE START ", .{});
// Output.print("HANDLE START ", .{});
return p.e(p.parseArrowBody(args, p.m(FnOrArrowDataParse{})) catch unreachable, loc);
}
@@ -7763,7 +7763,7 @@ pub const P = struct {
}
},
else => {
std.debug.panic("Unexpected type in export default: {s}", .{s2});
Global.panic("Unexpected type in export default: {s}", .{s2});
},
}
},
@@ -8706,7 +8706,7 @@ pub const P = struct {
var property = e_.properties[i];
if (property.kind != .spread) {
const key = p.visitExpr(property.key orelse std.debug.panic("Expected property key", .{}));
const key = p.visitExpr(property.key orelse Global.panic("Expected property key", .{}));
e_.properties[i].key = key;
// Forbid duplicate "__proto__" properties according to the specification

View File

@@ -203,7 +203,7 @@ fn expectPrinted(t: *Tester, contents: string, expected: string, src: anytype) !
var stream = std.io.fixedBufferStream(&fixedBuffer);
try log.print(stream.writer());
std.debug.print("{s}", .{fixedBuffer});
Output.print("{s}", .{fixedBuffer});
}
var linker = Linker{};
debugl("START AST PRINT");

View File

@@ -8,7 +8,7 @@ const alloc = @import("alloc.zig");
const rename = @import("renamer.zig");
const fs = @import("fs.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
usingnamespace @import("ast/base.zig");
usingnamespace js_ast.G;
@@ -43,7 +43,7 @@ const assert = std.debug.assert;
const Linker = @import("linker.zig").Linker;
fn notimpl() void {
std.debug.panic("Not implemented yet!", .{});
Global.panic("Not implemented yet!", .{});
}
pub const SourceMapChunk = struct {
@@ -83,7 +83,7 @@ pub const Options = struct {
}
};
pub const PrintResult = struct { js: string, source_map: ?SourceMapChunk = null };
pub const PrintResult = struct { js: []u8, source_map: ?SourceMapChunk = null };
// Zig represents booleans in packed structs as 1 bit, with no padding
// This is effectively a bit field
@@ -677,7 +677,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
// First, we will assert to make detecting this case a little clearer for us in development.
if (std.builtin.mode == std.builtin.Mode.Debug) {
std.debug.panic("Internal error: {s} is an external require, which should never happen.", .{record});
Global.panic("Internal error: {s} is an external require, which should never happen.", .{record});
}
p.printSpaceBeforeIdentifier();
@@ -1150,7 +1150,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
if (e.func.name) |sym| {
p.printSymbol(sym.ref orelse std.debug.panic("internal error: expected E.Function's name symbol to have a ref\n{s}", .{e.func}));
p.printSymbol(sym.ref orelse Global.panic("internal error: expected E.Function's name symbol to have a ref\n{s}", .{e.func}));
}
p.printFunc(e.func);
@@ -1168,7 +1168,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printSpaceBeforeIdentifier();
p.print("class");
if (e.class_name) |name| {
p.printSymbol(name.ref orelse std.debug.panic("internal error: expected E.Class's name symbol to have a ref\n{s}", .{e}));
p.printSymbol(name.ref orelse Global.panic("internal error: expected E.Class's name symbol to have a ref\n{s}", .{e}));
p.maybePrintSpace();
}
p.printClass(e.*);
@@ -1596,7 +1596,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
},
else => {
// std.debug.panic("Unexpected expression of type {s}", .{std.meta.activeTag(expr.data});
// Global.panic("Unexpected expression of type {s}", .{std.meta.activeTag(expr.data});
},
}
}
@@ -1946,7 +1946,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print("}");
},
else => {
std.debug.panic("Unexpected binding of type {s}", .{binding});
Global.panic("Unexpected binding of type {s}", .{binding});
},
}
}
@@ -1985,8 +1985,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print("*");
p.printSpace();
}
const name = s.func.name orelse std.debug.panic("Internal error: expected func to have a name ref\n{s}", .{s});
const nameRef = name.ref orelse std.debug.panic("Internal error: expected func to have a name\n{s}", .{s});
const name = s.func.name orelse Global.panic("Internal error: expected func to have a name ref\n{s}", .{s});
const nameRef = name.ref orelse Global.panic("Internal error: expected func to have a name\n{s}", .{s});
p.printSpace();
p.printSymbol(nameRef);
p.printFunc(s.func);
@@ -2035,7 +2035,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printSpace();
}
if (func.func.name) |name| {
p.printSymbol(name.ref orelse std.debug.panic("Internal error: Expected func to have a name ref\n{s}", .{func}));
p.printSymbol(name.ref orelse Global.panic("Internal error: Expected func to have a name ref\n{s}", .{func}));
}
p.printFunc(func.func);
p.printNewline();
@@ -2044,7 +2044,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printSpaceBeforeIdentifier();
if (class.class.class_name) |name| {
p.print("class ");
p.printSymbol(name.ref orelse std.debug.panic("Internal error: Expected class to have a name ref\n{s}", .{class}));
p.printSymbol(name.ref orelse Global.panic("Internal error: Expected class to have a name ref\n{s}", .{class}));
} else {
p.print("class");
}
@@ -2052,7 +2052,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printNewline();
},
else => {
std.debug.panic("Internal error: unexpected export default stmt data {s}", .{s});
Global.panic("Internal error: unexpected export default stmt data {s}", .{s});
},
}
},
@@ -2268,7 +2268,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
},
.s_label => |s| {
p.printIndent();
p.printSymbol(s.name.ref orelse std.debug.panic("Internal error: expected label to have a name {s}", .{s}));
p.printSymbol(s.name.ref orelse Global.panic("Internal error: expected label to have a name {s}", .{s}));
p.print(":");
p.printBody(s.stmt);
},
@@ -2526,7 +2526,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printSemicolonAfterStatement();
},
else => {
std.debug.panic("Unexpected statement of type {s}", .{@TypeOf(stmt)});
Global.panic("Unexpected statement of type {s}", .{@TypeOf(stmt)});
},
}
}
@@ -2554,7 +2554,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
},
else => {
std.debug.panic("Internal error: Unexpected stmt in for loop {s}", .{initSt});
Global.panic("Internal error: Unexpected stmt in for loop {s}", .{initSt});
},
}
}
@@ -2705,7 +2705,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
temp = [_]u8{ '\\', 'u', hex_chars[c >> 12], hex_chars[(c >> 8) & 15], hex_chars[(c >> 4) & 15], hex_chars[c & 15] };
p.print(&temp);
} else {
std.debug.panic("Not implemented yet: unicode escapes in ascii only", .{});
Global.panic("Not implemented yet: unicode escapes in ascii only", .{});
}
continue;
}

View File

@@ -7,7 +7,7 @@ const options = @import("options.zig");
const alloc = @import("alloc.zig");
const fs = @import("fs.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
usingnamespace @import("ast/base.zig");
usingnamespace js_ast.G;
@@ -249,7 +249,7 @@ fn expectPrintedJSON(_contents: string, expected: string) void {
var symbols: SymbolList = &([_][]Symbol{tree.symbols});
var symbol_map = js_ast.Symbol.Map.initList(symbols);
if (log.msgs.items.len > 0) {
std.debug.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text });
Global.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text });
}
var linker = @import("linker.zig").Linker{};

View File

@@ -1,6 +1,6 @@
const std = @import("std");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const fs = @import("fs.zig");
const unicode = std.unicode;
@@ -122,7 +122,7 @@ pub const Msg = struct {
});
}
pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(std.debug.panic)) void {
pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(Global.panic)) void {
formatterFunc("\n\n{s}: {s}\n{s}\n{s}:{}:{} ({d})", .{
msg.kind.string(),
msg.data.text,
@@ -486,10 +486,6 @@ test "ErrorPosition" {
var msgs = ArrayList(Msg).init(std.testing.allocator);
var log = Log{ .msgs = msgs };
defer log.msgs.deinit();
var filename = "test.js".*;
var syntax = "for (i".*;
var err = "invalid syntax".*;
var namespace = "file".*;
try log.addMsg(Msg{
.kind = .err,

View File

@@ -10,6 +10,7 @@ const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
usingnamespace @import("global.zig");
const panicky = @import("panic_handler.zig");
const MainPanicHandler = panicky.NewPanicHandler(panicky.default_panic);
@@ -23,7 +24,15 @@ pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) nore
}
pub fn main() anyerror!void {
try alloc.setup(std.heap.page_allocator);
// The memory allocator makes a massive difference.
// std.heap.raw_c_allocator and std.heap.c_allocator perform similarly.
// std.heap.GeneralPurposeAllocator makes this about 3x _slower_ than esbuild.
// var root_alloc = std.heap.ArenaAllocator.init(std.heap.raw_c_allocator);
// var root_alloc_ = &root_alloc.allocator;
try alloc.setup(std.heap.c_allocator);
Output.source.Stream = std.io.getStdOut();
Output.source.writer = Output.source.Stream.?.writer();
Output.source.errorWriter = std.io.getStdErr().writer();
var log = logger.Log.init(alloc.dynamic);
var panicker = MainPanicHandler.init(&log);
MainPanicHandler.Singleton = &panicker;
@@ -77,7 +86,7 @@ pub fn main() anyerror!void {
ast = res.ast;
},
else => {
std.debug.panic("Unsupported loader: {s}", .{opts.loader});
Global.panic("Unsupported loader: {s}", .{opts.loader});
},
}

108
src/main_wasi.zig Normal file
View File

@@ -0,0 +1,108 @@
const std = @import("std");
const lex = @import("js_lexer.zig");
const logger = @import("logger.zig");
const alloc = @import("alloc.zig");
const options = @import("options.zig");
const js_parser = @import("js_parser.zig");
const json_parser = @import("json_parser.zig");
const js_printer = @import("js_printer.zig");
const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
const fs = @import("fs.zig");
const MainPanicHandler = panicky.NewPanicHandler(panicky.default_panic);
pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) noreturn {
if (MainPanicHandler.Singleton) |singleton| {
MainPanicHandler.handle_panic(msg, error_return_trace);
} else {
panicky.default_panic(msg, error_return_trace);
}
}
// const Alloc = zee.ZeeAllocDefaults.wasm_allocator
pub fn main() anyerror!void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
var allocator = &arena.allocator;
try alloc.setup(allocator);
var log = logger.Log.init(alloc.dynamic);
var panicker = MainPanicHandler.init(&log);
MainPanicHandler.Singleton = &panicker;
const args = try std.process.argsAlloc(alloc.dynamic);
const stdout = std.io.getStdOut();
const stderr = std.io.getStdErr();
if (args.len < 1) {
const len = stderr.write("Pass a file");
return;
}
const absolutePath = args[args.len - 1];
const pathname = fs.PathName.init(absolutePath);
const entryPointName = try alloc.dynamic.alloc(u8, pathname.base.len + pathname.ext.len);
std.mem.copy(u8, entryPointName, pathname.base);
std.mem.copy(u8, entryPointName[pathname.base.len..entryPointName.len], pathname.ext);
const code = try std.io.getStdIn().readToEndAlloc(alloc.dynamic, std.math.maxInt(usize));
const opts = try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code);
var source = logger.Source.initFile(opts.entry_point, alloc.dynamic);
var ast: js_ast.Ast = undefined;
var raw_defines = RawDefines.init(alloc.static);
try raw_defines.put("process.env.NODE_ENV", "\"development\"");
var user_defines = try DefineData.from_input(raw_defines, &log, alloc.static);
var define = try Define.init(
alloc.static,
user_defines,
);
switch (opts.loader) {
.json => {
var expr = try json_parser.ParseJSON(&source, &log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, &log, &source, define, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
Global.panic("Unsupported loader: {s}", .{opts.loader});
},
}
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
const printed = try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map.initList(symbols),
&source,
false,
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
// if (std.builtin.mode == std.builtin.Mode.Debug) {
// var fixed_buffer = [_]u8{0} ** 512000;
// var buf_stream = std.io.fixedBufferStream(&fixed_buffer);
// try ast.toJSON(alloc.dynamic, stderr.writer());
// }
_ = try stdout.write(printed.js);
}

View File

@@ -11,8 +11,10 @@ const linker = @import("linker.zig");
usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
usingnamespace @import("global.zig");
const fs = @import("fs.zig");
const Schema = @import("api/schema.zig").Api;
const builtin = std.builtin;
const MainPanicHandler = panicky.NewPanicHandler(panicky.default_panic);
pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) noreturn {
@@ -23,84 +25,186 @@ pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) nore
}
}
pub fn main() anyerror!void {
try alloc.setup(std.heap.page_allocator);
var log = logger.Log.init(alloc.dynamic);
var panicker = MainPanicHandler.init(&log);
MainPanicHandler.Singleton = &panicker;
var default_options = std.mem.zeroes(Schema.TransformOptions);
const args = try std.process.argsAlloc(alloc.dynamic);
const stdout = std.io.getStdOut();
const stderr = std.io.getStdErr();
pub const Uint8Array = packed struct {
pub const Float = @Type(builtin.TypeInfo{ .Float = .{ .bits = 2 * @bitSizeOf(usize) } });
pub const Abi = if (builtin.target.isWasm()) Float else Uint8Array;
if (args.len < 1) {
const len = stderr.write("Pass a file");
return;
ptr: [*]u8,
len: usize,
pub fn toSlice(raw: Abi) []u8 {
const self = @bitCast(Uint8Array, raw);
return self.ptr[0..self.len];
}
const absolutePath = args[args.len - 2];
const pathname = fs.PathName.init(absolutePath);
const entryPointName = try alloc.dynamic.alloc(u8, pathname.base.len + pathname.ext.len);
std.mem.copy(u8, entryPointName, pathname.base);
std.mem.copy(u8, entryPointName[pathname.base.len..entryPointName.len], pathname.ext);
const code = try std.io.getStdIn().readToEndAlloc(alloc.dynamic, 99999999);
const opts = try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code);
var source = logger.Source.initFile(opts.entry_point, alloc.dynamic);
var ast: js_ast.Ast = undefined;
var raw_defines = RawDefines.init(alloc.static);
try raw_defines.put("process.env.NODE_ENV", "\"development\"");
var user_defines = try DefineData.from_input(raw_defines, &log, alloc.static);
var define = try Define.init(
alloc.static,
user_defines,
);
switch (opts.loader) {
.json => {
var expr = try json_parser.ParseJSON(&source, &log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, &log, &source, define, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
std.debug.panic("Unsupported loader: {s}", .{opts.loader});
},
pub fn fromSlice(slice: []u8) Abi {
const self = Uint8Array{ .ptr = slice.ptr, .len = slice.len };
return @bitCast(Abi, self);
}
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
const printed = try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map.initList(symbols),
false,
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
pub fn empty() Abi {
return Uint8Array.fromSlice(&[0]u8{});
}
// if (std.builtin.mode == std.builtin.Mode.Debug) {
// var fixed_buffer = [_]u8{0} ** 512000;
// var buf_stream = std.io.fixedBufferStream(&fixed_buffer);
pub fn encode(comptime SchemaType: type, obj: SchemaType) !Abi {
var list = std.ArrayList(u8).init(alloc.dynamic);
var writer = list.writer();
try obj.encode(writer);
return Uint8Array.fromSlice(list.toOwnedSlice());
}
// try ast.toJSON(alloc.dynamic, stderr.writer());
// }
pub fn decode(self: Abi, comptime SchemaType: type) !SchemaType {
var buf = Uint8Array.toSlice(self);
var stream = std.io.fixedBufferStream(buf);
return try SchemaType.decode(alloc.dynamic, stream.reader());
}
};
_ = try stdout.write(printed.js);
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
pub const Api = struct {
options: *Schema.TransformOptions = &default_options,
files: std.ArrayList(string),
log: logger.Log,
pub fn transform(self: *Api, request: Schema.Transform) !Schema.TransformResponse {
const opts = try options.TransformOptions.initUncached(alloc.dynamic, request.path.?, request.contents.?);
var source = logger.Source.initFile(opts.entry_point, alloc.dynamic);
var ast: js_ast.Ast = undefined;
var raw_defines = RawDefines.init(alloc.static);
try raw_defines.put("process.env.NODE_ENV", "\"development\"");
var user_defines = try DefineData.from_input(raw_defines, &self.log, alloc.static);
var define = try Define.init(
alloc.static,
user_defines,
);
switch (opts.loader) {
.json => {
var expr = try json_parser.ParseJSON(&source, &self.log, alloc.dynamic);
var stmt = js_ast.Stmt.alloc(alloc.dynamic, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
}, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]js_ast.Stmt{stmt}),
};
ast = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
},
.jsx, .tsx, .ts, .js => {
var parser = try js_parser.Parser.init(opts, &self.log, &source, define, alloc.dynamic);
var res = try parser.parse();
ast = res.ast;
},
else => {
Global.panic("Unsupported loader: {s}", .{opts.loader});
},
}
var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
const printed = try js_printer.printAst(
alloc.dynamic,
ast,
js_ast.Symbol.Map.initList(symbols),
&source,
false,
js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
&_linker,
);
var output_files = try alloc.dynamic.alloc(Schema.OutputFile, 1);
var _data = printed.js[0..printed.js.len];
var _path = constStrToU8(source.path.text);
output_files[0] = Schema.OutputFile{ .data = _data, .path = _path };
var resp = std.mem.zeroes(Schema.TransformResponse);
resp.status = .success;
resp.files = output_files;
return resp;
// var source = logger.Source.initFile(file: fs.File, allocator: *std.mem.Allocator)
}
};
pub const Exports = struct {
fn init() callconv(.C) u8 {
if (alloc.needs_setup) {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
var allocator = &arena.allocator;
alloc.setup(allocator) catch return 0;
}
var _api = alloc.static.create(Api) catch return 0;
_api.* = Api{ .files = std.ArrayList(string).init(alloc.dynamic), .log = logger.Log.init(alloc.dynamic) };
api = _api;
return 1;
}
fn transform(abi: Uint8Array.Abi) callconv(.C) Uint8Array.Abi {
const req: Schema.Transform = Uint8Array.decode(abi, Schema.Transform) catch return Uint8Array.empty();
alloc.dynamic.free(Uint8Array.toSlice(abi));
const resp = api.?.transform(req) catch return Uint8Array.empty();
return Uint8Array.encode(Schema.TransformResponse, resp) catch return Uint8Array.empty();
}
fn malloc(size: usize) callconv(.C) ?*c_void {
if (size == 0) {
return null;
}
//const result = alloc.dynamic.alloc(u8, size) catch return null;
const result = alloc.dynamic.allocFn(alloc.dynamic, size, 1, 1, 0) catch return null;
return result.ptr;
}
fn calloc(num_elements: usize, element_size: usize) callconv(.C) ?*c_void {
const size = num_elements *% element_size;
const c_ptr = @call(.{ .modifier = .never_inline }, malloc, .{size});
if (c_ptr) |ptr| {
const p = @ptrCast([*]u8, ptr);
@memset(p, 0, size);
}
return c_ptr;
}
fn realloc(c_ptr: ?*c_void, new_size: usize) callconv(.C) ?*c_void {
if (new_size == 0) {
@call(.{ .modifier = .never_inline }, free, .{c_ptr});
return null;
} else if (c_ptr) |ptr| {
// Use a synthetic slice
const p = @ptrCast([*]u8, ptr);
const result = alloc.dynamic.realloc(p[0..1], new_size) catch return null;
return @ptrCast(*c_void, result.ptr);
} else {
return @call(.{ .modifier = .never_inline }, malloc, .{new_size});
}
}
fn free(c_ptr: ?*c_void) callconv(.C) void {
if (c_ptr) |ptr| {
// Use a synthetic slice. zee_alloc will free via corresponding metadata.
const p = @ptrCast([*]u8, ptr);
//alloc.dynamic.free(p[0..1]);
_ = alloc.dynamic.resizeFn(alloc.dynamic, p[0..1], 0, 0, 0, 0) catch unreachable;
}
}
};
comptime {
@export(Exports.init, .{ .name = "init", .linkage = .Strong });
@export(Exports.transform, .{ .name = "transform", .linkage = .Strong });
@export(Exports.malloc, .{ .name = "malloc", .linkage = .Strong });
@export(Exports.calloc, .{ .name = "calloc", .linkage = .Strong });
@export(Exports.realloc, .{ .name = "realloc", .linkage = .Strong });
@export(Exports.free, .{ .name = "free", .linkage = .Strong });
}
var api: ?*Api = null;

View File

@@ -3,7 +3,7 @@ const log = @import("logger.zig");
const fs = @import("fs.zig");
const alloc = @import("alloc.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const assert = std.debug.assert;

5
src/output_native.zig Normal file
View File

@@ -0,0 +1,5 @@
const std = @import("std");
pub var Stream: ?std.fs.File = null;
pub var writer: ?std.fs.File.Writer = null;
pub var errorWriter: ?std.fs.File.Writer = null;

5
src/output_wasi.zig Normal file
View File

@@ -0,0 +1,5 @@
const std = @import("std");
pub var Stream: ?std.fs.File = null;
pub var writer: ?std.fs.File.Writer = null;
pub var errorWriter: ?std.fs.File.Writer = null;

5
src/output_wasm.zig Normal file
View File

@@ -0,0 +1,5 @@
const std = @import("std");
pub var out_buffer = [_]u8{0} ** 1024;
pub var Stream = std.io.fixedBufferStream(&out_buffer);
pub var writer = Stream.writer();

View File

@@ -1,5 +1,5 @@
const js_ast = @import("js_ast.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("global.zig");
const std = @import("std");
const logger = @import("logger.zig");
@@ -20,7 +20,7 @@ pub const Renamer = struct {
if (renamer.symbols.get(resolved)) |symbol| {
return symbol.original_name;
} else {
std.debug.panic("Invalid symbol {s}", .{ref});
Global.panic("Invalid symbol {s}", .{ref});
}
}
};

View File

@@ -80,6 +80,8 @@ pub fn eql(self: string, other: anytype) bool {
}
return true;
}
// I have not actually verified that this makes it faster
// It's probably like 0.0001ms faster
pub fn eqlComptime(self: string, comptime alt: string) bool {
comptime var matcher_size: usize = 0;
@@ -346,11 +348,6 @@ pub fn ExactSizeMatcher(comptime max_bytes: usize) type {
return std.mem.readIntNative(T, &tmp);
}
fn hashNoCheck(str: anytype) T {
var tmp = [_]u8{0} ** max_bytes;
std.mem.copy(u8, &tmp, str[0..str.len]);
return std.mem.readIntNative(T, &tmp);
}
fn hashUnsafe(str: anytype) T {
var tmp = [_]u8{0} ** max_bytes;
std.mem.copy(u8, &tmp, str[0..str.len]);

View File

@@ -112,7 +112,7 @@ pub const MutableString = struct {
return self.list.toOwnedSlice(self.allocator);
}
pub fn toOwnedSliceLeaky(self: *MutableString) string {
pub fn toOwnedSliceLeaky(self: *MutableString) []u8 {
return self.list.items;
}

View File

@@ -1 +0,0 @@
function hi() {}

443
src/test/fixtures/simple-150x.jsx vendored Normal file
View File

@@ -0,0 +1,443 @@
import { Link } from "../routes";
import Head from "../components/head";
import Nav from "../components/nav";
import withRedux from "next-redux-wrapper";
import Header from "../components/Header";
import Button from "../components/Button";
import cookies from "next-cookies";
import Text from "../components/Text";
import _ from "lodash";
import { updateEntities, setCurrentUser, initStore } from "../redux/store";
import { getFeaturedProfiles, getCurrentUser } from "../api";
import { bindActionCreators } from "redux";
import { Router } from "../routes";
import PageFooter from "../components/PageFooter";
import withLogin from "../lib/withLogin";
import qs from "qs";
import LazyLoad from "react-lazyload";
import { buildImgSrcSet } from "../lib/imgUri";
import { buildProfileURL } from "../lib/routeHelpers";
import LoginGate, { LOGIN_STATUSES } from "../components/LoginGate";
import Divider from "../components/Divider";
import { SPACING } from "../helpers/styles";
(function () {
const FeaturedProfile = ({ profile }) => {
return (
<Link route={buildProfileURL(profile.id)}>
<a className="Profile">
<img
src={_.first(profile.photos)}
srcSet={buildImgSrcSet(_.first(profile.photos), 250)}
/>
<div className="Text">
<div className="Title">
<Text
font="sans-serif"
lineHeight="20px"
weight="semiBold"
size="18px"
color="#000"
>
{profile.name}
</Text>
</div>
<div className="Tagline">
<Text size="14px">{(profile.tagline || "").substr(0, 100)}</Text>
</div>
</div>
<style jsx>{`
.Profile {
background-color: #ffffff;
cursor: pointer;
text-decoration: none;
text-align: left;
width: 100%;
height: 100%;
border-radius: 6px;
display: flex;
flex-shrink: 0;
flex-grow: 0;
flex-direction: column;
}
.Text {
flex: 1;
}
.Profile:hover img {
opacity: 0.85;
}
.Title {
margin-top: 1rem;
margin-bottom: 0.5rem;
}
.Tagline {
margin-bottom: 1.5rem;
}
img {
object-fit: cover;
flex: 0 0 250px;
flex-shrink: 0;
display: flex;
width: 250px;
height: 250px;
opacity: 1;
transition: opacity 0.1s linear;
}
@media (max-width: 500px) {
.Profile {
margin-bottom: 2em;
}
}
`}</style>
</a>
</Link>
);
};
class SignupForm extends React.Component {
constructor(props) {
super(props);
this.state = {
email: this.props.email || "",
};
}
setEmail = (evt) => this.setState({ email: evt.target.value });
componentDidMount() {
Router.prefetchRoute(`/sign-up/verify`);
}
handleSubmit = (evt) => {
evt.preventDefault();
Router.pushRoute(
`/sign-up/verify?${qs.stringify({ email: this.state.email })}`
);
};
render() {
return (
<form onSubmit={this.handleSubmit}>
<input
type="email"
name="email"
autoComplete="email"
onChange={this.setEmail}
placeholder="Your email"
value={this.state.email}
/>
<Button componentType="button" inline>
CREATE MY PAGE
</Button>
<style jsx>{`
form {
display: flex;
}
input {
font-size: 14px;
padding: 14px 22px;
border-radius: 33px;
border-top-right-radius: 0;
border-bottom-right-radius: 0;
border: 1px solid #bababa;
border-right: 0px;
line-height: 18px;
color: #000;
outline: none;
width: auto;
display: flex;
flex: 1;
}
input::-webkit-input-placeholder {
color: #c5cbd4;
}
input:focus {
border-color: #b0b0b0;
}
`}</style>
</form>
);
}
}
class Homepage extends React.Component {
constructor(props) {
super(props);
this.state = {
isLoadingProfiles: true,
profiles: [],
};
}
static async getInitialProps(ctx) {
if (ctx.isServer && ctx.req.path === "/") {
const { currentUserId } = cookies(ctx);
if (currentUserId) {
ctx.res.writeHead(302, {
Location: `${process.env.DOMAIN}/welcome`,
});
ctx.res.end();
ctx.res.finished = true;
}
}
}
async componentDidMount() {
const profileResponse = await getFeaturedProfiles();
this.props.updateEntities(profileResponse.body);
this.setState({
isLoadingProfiles: false,
profiles: profileResponse.body.data,
});
Router.prefetchRoute(`/lucy`);
}
render() {
return (
<div>
<Head
title="Apply to Date your own game of The Bachelor(ette)"
url={`${process.env.DOMAIN}/`}
disableGoogle={false}
/>
<Header />
<article>
<main>
<div className="Copy">
<img
className="Logo Logo-Home"
src="/static/animatedlogo.gif"
/>
<div className="Copy-title">
<Text
font="serif"
size="36px"
lineHeight="44px"
weight="bold"
>
Your own game of The Bachelor(ette)
</Text>
</div>
<div className="Copy-body">
<Text size="16px" lineHeight="24px" font="sans-serif">
Create a page where people apply to go on a date with you.
You pick the winners.
</Text>
</div>
{!this.props.currentUserId && <SignupForm />}
<div className="AppStoreContainer">
<Divider height={`${SPACING.normal}px`} color="transparent" />
<a
className="AppStore AppStore--ios"
target="_blank"
href="https://itunes.apple.com/us/app/apply-to-date/id1357419725?mt=8"
>
<img src="https://devimages-cdn.apple.com/app-store/marketing/guidelines/images/badge-download-on-the-app-store.svg" />
</a>
<a
target="_blank"
className="AppStore AppStore--android"
href="https://play.google.com/store/apps/details?id=com.shipfirstlabs.applytodate&utm_source=homepage&pcampaignid=MKT-Other-global-all-co-prtnr-py-PartBadge-Mar2515-1"
>
<img
alt="Get it on Google Play"
src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
/>
</a>
</div>
</div>
</main>
</article>
<footer>
<div className="divider" />
<Text size="36px" font="sans-serif" color="#000">
Featured pages
</Text>
<div className="FeaturedProfiles-wrapper">
{this.state.isLoadingProfiles && <div className="Spinner" />}
<div className="FeaturedProfiles">
{!_.isEmpty(this.state.profiles) &&
this.state.profiles.map((profile) => (
<FeaturedProfile key={profile.id} profile={profile} />
))}
</div>
</div>
</footer>
<article>
<PageFooter center />
</article>
<style jsx>{`
article {
max-width: 710px;
margin-left: auto;
margin-right: auto;
padding-left: 14px;
padding-right: 14px;
overflow-x: hidden;
}
main {
display: flex;
margin-top: 6rem;
margin-bottom: 6rem;
justify-content: center;
}
footer {
display: flex;
flex-direction: column;
text-align: center;
overflow-x: hidden;
}
.divider {
height: 2px;
width: 269px;
margin-bottom: 6rem;
margin-left: auto;
margin-right: auto;
background-color: #0aca9b;
}
.Logo-Home {
margin-left: auto;
margin-right: auto;
width: 97px;
height: 152.02px;
margin-bottom: 28px;
}
.Copy {
max-width: 710px;
margin: 0 auto;
text-align: center;
}
.Copy-body {
margin-top: 1rem;
margin-bottom: 2rem;
font-weight: 200;
}
.FeaturedProfiles-wrapper {
padding-top: 4rem;
padding-bottom: 6rem;
padding-left: 28px;
padding-right: 28px;
overflow-x: auto;
width: 100vw;
}
.Spinner {
display: flex;
content: "";
margin: 84px auto;
height: 28px;
width: 28px;
animation: rotate 0.8s infinite linear;
border: 4px solid #4be1ab;
border-right-color: transparent;
border-radius: 50%;
}
.AppStoreContainer {
display: flex;
justify-content: center;
align-items: center;
}
.AppStore--ios img {
width: 180px;
}
.AppStore--android img {
width: 230px;
}
@keyframes rotate {
0% {
transform: rotate(0deg);
}
100% {
transform: rotate(360deg);
}
}
.FeaturedProfiles {
display: grid;
grid-column-gap: 2rem;
grid-row-gap: 2rem;
text-align: center;
justify-content: center;
margin-left: auto;
margin-right: auto;
grid-template-columns: 250px 250px 250px 250px;
}
@media (max-width: 1100px) {
.FeaturedProfiles {
grid-template-columns: 250px 250px 250px;
}
}
@media (max-width: 900px) {
.FeaturedProfiles {
grid-template-columns: 250px 250px;
}
}
@media (max-width: 554px) {
.FeaturedProfiles-wrapper {
padding-left: 14px;
padding-right: 14px;
}
.AppStoreContainer {
flex-direction: column;
}
.FeaturedProfiles {
grid-auto-flow: row dense;
grid-auto-rows: auto;
grid-template-columns: 250px;
grid-template-rows: 1fr;
justify-content: center;
}
}
`}</style>
</div>
);
}
}
const HomepageWithStore = withRedux(initStore, null, (dispatch) =>
bindActionCreators({ updateEntities, setCurrentUser }, dispatch)
)(LoginGate(Homepage));
})();

663
src/zee_alloc.zig Normal file
View File

@@ -0,0 +1,663 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
pub const Config = struct {
/// ZeeAlloc will request a multiple of `slab_size` from the backing allocator.
/// **Must** be a power of two.
slab_size: usize = std.math.max(std.mem.page_size, 65536), // 64K ought to be enough for everybody
/// **Must** be a power of two.
min_element_size: usize = 4,
fn maxElementSize(conf: Config) usize {
// Scientifically derived value
return conf.slab_size / 4;
}
};
pub const ZeeAllocDefaults = ZeeAlloc(Config{});
pub fn ZeeAlloc(comptime conf: Config) type {
return struct {
const Self = @This();
const min_shift_size = unsafeLog2(usize, conf.min_element_size);
const max_shift_size = unsafeLog2(usize, conf.maxElementSize());
const total_slabs = max_shift_size - min_shift_size + 1;
/// The definitive™ way of using `ZeeAlloc`
pub const wasm_allocator = &_wasm.allocator;
var _wasm = init(&wasm_page_allocator);
jumbo: ?*Slab = null,
slabs: [total_slabs]?*Slab = [_]?*Slab{null} ** total_slabs,
backing_allocator: *std.mem.Allocator,
allocator: Allocator = Allocator{
.allocFn = alloc,
.resizeFn = resize,
},
const Slab = extern struct {
const header_size = 2 * @sizeOf(usize);
const payload_alignment = header_size;
next: ?*Slab align(conf.slab_size),
element_size: usize,
pad: [conf.slab_size - header_size]u8 align(payload_alignment),
fn init(element_size: usize) Slab {
var result: Slab = undefined;
result.reset(element_size);
return result;
}
fn reset(self: *Slab, element_size: usize) void {
self.next = null;
self.element_size = element_size;
const blocks = self.freeBlocks();
for (blocks) |*block| {
block.* = std.math.maxInt(u64);
}
const remaining_bits = @truncate(u6, (self.elementCount() - self.dataOffset()) % 64);
// TODO: detect overflow
blocks[blocks.len - 1] = (@as(u64, 1) << remaining_bits) - 1;
}
fn fromMemPtr(ptr: [*]u8) *Slab {
const addr = std.mem.alignBackward(@ptrToInt(ptr), conf.slab_size);
return @intToPtr(*Slab, addr);
}
const detached_signal = @intToPtr(*align(1) Slab, 0xaaaa);
fn markDetached(self: *Slab) void {
// Salt the earth
const raw_next = @ptrCast(*usize, &self.next);
raw_next.* = @ptrToInt(detached_signal);
}
fn isDetached(self: Slab) bool {
return self.next == detached_signal;
}
fn freeBlocks(self: *Slab) []u64 {
const count = divCeil(usize, self.elementCount(), 64);
const ptr = @ptrCast([*]u64, &self.pad);
return ptr[0..count];
}
fn totalFree(self: *Slab) usize {
var i: usize = 0;
for (self.freeBlocks()) |block| {
i += @popCount(u64, block);
}
return i;
}
const UsizeShift = std.meta.Int(.unsigned, @bitSizeOf(std.math.Log2Int(usize)) - 1);
fn elementSizeShift(self: Slab) UsizeShift {
return @truncate(UsizeShift, @ctz(usize, self.element_size));
}
fn elementCount(self: Slab) usize {
return conf.slab_size >> self.elementSizeShift();
}
fn dataOffset(self: Slab) usize {
const BITS_PER_BYTE = 8;
return 1 + ((conf.slab_size / BITS_PER_BYTE) >> self.elementSizeShift() >> self.elementSizeShift());
}
fn elementAt(self: *Slab, idx: usize) []u8 {
std.debug.assert(idx >= self.dataOffset());
std.debug.assert(idx < self.elementCount());
const bytes = std.mem.asBytes(self);
return bytes[idx << self.elementSizeShift() ..][0..self.element_size];
}
fn elementIdx(self: *Slab, element: []u8) usize {
std.debug.assert(element.len <= self.element_size);
const diff = @ptrToInt(element.ptr) - @ptrToInt(self);
std.debug.assert(diff % self.element_size == 0);
return diff >> self.elementSizeShift();
}
fn alloc(self: *Slab) ![]u8 {
for (self.freeBlocks()) |*block, i| {
const bit = @ctz(u64, block.*);
if (bit != 64) {
const index = 64 * i + bit;
const mask = @as(u64, 1) << @intCast(u6, bit);
block.* &= ~mask;
return self.elementAt(index + self.dataOffset());
}
}
return error.OutOfMemory;
}
fn free(self: *Slab, element: []u8) void {
const index = self.elementIdx(element) - self.dataOffset();
const block = &self.freeBlocks()[index / 64];
const mask = @as(u64, 1) << @truncate(u6, index);
std.debug.assert(mask & block.* == 0);
block.* |= mask;
}
};
pub fn init(allocator: *std.mem.Allocator) Self {
return .{ .backing_allocator = allocator };
}
pub fn deinit(self: *Self) void {
{
var iter = self.jumbo;
while (iter) |node| {
iter = node.next;
const bytes = @ptrCast([*]u8, node);
self.backing_allocator.free(bytes[0..node.element_size]);
}
}
for (self.slabs) |root| {
var iter = root;
while (iter) |node| {
iter = node.next;
self.backing_allocator.destroy(node);
}
}
self.* = undefined;
}
fn isJumbo(value: usize) bool {
return value > conf.slab_size / 4;
}
fn padToSize(memsize: usize) usize {
if (isJumbo(memsize)) {
return std.mem.alignForward(memsize + Slab.header_size, conf.slab_size);
} else {
return std.math.max(conf.min_element_size, ceilPowerOfTwo(usize, memsize));
}
}
fn unsafeLog2(comptime T: type, val: T) T {
std.debug.assert(ceilPowerOfTwo(T, val) == val);
return @ctz(T, val);
}
fn findSlabIndex(padded_size: usize) usize {
return unsafeLog2(usize, padded_size) - min_shift_size;
}
fn allocJumbo(self: *Self, padded_size: usize, ptr_align: usize) ![*]u8 {
if (ptr_align > Slab.payload_alignment) {
return error.OutOfMemory;
}
const slab: *Slab = blk: {
var prev = @ptrCast(*align(@alignOf(Self)) Slab, self);
while (prev.next) |curr| : (prev = curr) {
if (curr.element_size == padded_size) {
prev.next = curr.next;
break :blk curr;
}
}
const new_frame = try self.backing_allocator.allocAdvanced(u8, conf.slab_size, padded_size, .exact);
const synth_slab = @ptrCast(*Slab, new_frame.ptr);
synth_slab.element_size = padded_size;
break :blk synth_slab;
};
slab.markDetached();
return @ptrCast([*]u8, &slab.pad);
}
fn allocSlab(self: *Self, element_size: usize, ptr_align: usize) ![*]u8 {
if (ptr_align > element_size) {
return error.OutOfMemory;
}
const idx = findSlabIndex(element_size);
const slab = self.slabs[idx] orelse blk: {
const new_slab = try self.backing_allocator.create(Slab);
new_slab.reset(element_size);
self.slabs[idx] = new_slab;
break :blk new_slab;
};
const result = slab.alloc() catch unreachable;
if (slab.totalFree() == 0) {
self.slabs[idx] = slab.next;
slab.markDetached();
}
return result.ptr;
}
fn alloc(allocator: *Allocator, n: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Allocator.Error![]u8 {
const self = @fieldParentPtr(Self, "allocator", allocator);
const padded_size = padToSize(n);
const ptr: [*]u8 = if (isJumbo(n))
try self.allocJumbo(padded_size, ptr_align)
else
try self.allocSlab(padded_size, ptr_align);
return ptr[0..std.mem.alignAllocLen(padded_size, n, len_align)];
}
fn resize(allocator: *Allocator, buf: []u8, buf_align: u29, new_size: usize, len_align: u29, ret_addr: usize) Allocator.Error!usize {
const self = @fieldParentPtr(Self, "allocator", allocator);
const slab = Slab.fromMemPtr(buf.ptr);
if (new_size == 0) {
if (isJumbo(slab.element_size)) {
std.debug.assert(slab.isDetached());
slab.next = self.jumbo;
self.jumbo = slab;
} else {
slab.free(buf);
if (slab.isDetached()) {
const idx = findSlabIndex(slab.element_size);
slab.next = self.slabs[idx];
self.slabs[idx] = slab;
}
}
return 0;
}
const padded_new_size = padToSize(new_size);
if (padded_new_size > slab.element_size) {
return error.OutOfMemory;
}
return std.mem.alignAllocLen(padded_new_size, new_size, len_align);
}
};
}
var wasm_page_allocator = init: {
if (!std.builtin.target.isWasm()) {
@compileError("wasm allocator is only available for wasm32 arch");
}
// std.heap.WasmPageAllocator is designed for reusing pages
// We never free, so this lets us stay super small
const WasmPageAllocator = struct {
fn alloc(allocator: *Allocator, n: usize, alignment: u29, len_align: u29, ret_addr: usize) Allocator.Error![]u8 {
const is_debug = std.builtin.mode == .Debug;
@setRuntimeSafety(is_debug);
std.debug.assert(n % std.mem.page_size == 0); // Should only be allocating page size chunks
std.debug.assert(alignment % std.mem.page_size == 0); // Should only align to page_size increments
const requested_page_count = @intCast(u32, n / std.mem.page_size);
const prev_page_count = @wasmMemoryGrow(0, requested_page_count);
if (prev_page_count < 0) {
return error.OutOfMemory;
}
const start_ptr = @intToPtr([*]u8, @intCast(usize, prev_page_count) * std.mem.page_size);
return start_ptr[0..n];
}
};
break :init Allocator{
.allocFn = WasmPageAllocator.alloc,
.resizeFn = undefined, // Shouldn't be shrinking / freeing
};
};
pub const ExportC = struct {
allocator: *std.mem.Allocator,
malloc: bool = true,
free: bool = true,
calloc: bool = false,
realloc: bool = false,
pub fn run(comptime conf: ExportC) void {
const Funcs = struct {
fn malloc(size: usize) callconv(.C) ?*c_void {
if (size == 0) {
return null;
}
//const result = conf.allocator.alloc(u8, size) catch return null;
const result = conf.allocator.allocFn(conf.allocator, size, 1, 1, 0) catch return null;
return result.ptr;
}
fn calloc(num_elements: usize, element_size: usize) callconv(.C) ?*c_void {
const size = num_elements *% element_size;
const c_ptr = @call(.{ .modifier = .never_inline }, malloc, .{size});
if (c_ptr) |ptr| {
const p = @ptrCast([*]u8, ptr);
@memset(p, 0, size);
}
return c_ptr;
}
fn realloc(c_ptr: ?*c_void, new_size: usize) callconv(.C) ?*c_void {
if (new_size == 0) {
@call(.{ .modifier = .never_inline }, free, .{c_ptr});
return null;
} else if (c_ptr) |ptr| {
// Use a synthetic slice
const p = @ptrCast([*]u8, ptr);
const result = conf.allocator.realloc(p[0..1], new_size) catch return null;
return @ptrCast(*c_void, result.ptr);
} else {
return @call(.{ .modifier = .never_inline }, malloc, .{new_size});
}
}
fn free(c_ptr: ?*c_void) callconv(.C) void {
if (c_ptr) |ptr| {
// Use a synthetic slice. zee_alloc will free via corresponding metadata.
const p = @ptrCast([*]u8, ptr);
//conf.allocator.free(p[0..1]);
_ = conf.allocator.resizeFn(conf.allocator, p[0..1], 0, 0, 0, 0) catch unreachable;
}
}
};
if (conf.malloc) {
@export(Funcs.malloc, .{ .name = "malloc" });
}
if (conf.calloc) {
@export(Funcs.calloc, .{ .name = "calloc" });
}
if (conf.realloc) {
@export(Funcs.realloc, .{ .name = "realloc" });
}
if (conf.free) {
@export(Funcs.free, .{ .name = "free" });
}
}
};
fn divCeil(comptime T: type, numerator: T, denominator: T) T {
return (numerator + denominator - 1) / denominator;
}
// https://github.com/ziglang/zig/issues/2426
fn ceilPowerOfTwo(comptime T: type, value: T) T {
std.debug.assert(value != 0);
const Shift = comptime std.math.Log2Int(T);
return @as(T, 1) << @intCast(Shift, @bitSizeOf(T) - @clz(T, value - 1));
}
test "divCeil" {
std.testing.expectEqual(@as(u32, 0), divCeil(u32, 0, 64));
std.testing.expectEqual(@as(u32, 1), divCeil(u32, 1, 64));
std.testing.expectEqual(@as(u32, 1), divCeil(u32, 64, 64));
std.testing.expectEqual(@as(u32, 2), divCeil(u32, 65, 64));
}
test "Slab.init" {
{
const slab = ZeeAllocDefaults.Slab.init(16384);
std.testing.expectEqual(@as(usize, 16384), slab.element_size);
std.testing.expectEqual(@as(?*ZeeAllocDefaults.Slab, null), slab.next);
const raw_ptr = @ptrCast(*const u64, &slab.pad);
std.testing.expectEqual((@as(u64, 1) << 3) - 1, raw_ptr.*);
}
{
const slab = ZeeAllocDefaults.Slab.init(2048);
std.testing.expectEqual(@as(usize, 2048), slab.element_size);
std.testing.expectEqual(@as(?*ZeeAllocDefaults.Slab, null), slab.next);
const raw_ptr = @ptrCast(*const u64, &slab.pad);
std.testing.expectEqual((@as(u64, 1) << 31) - 1, raw_ptr.*);
}
const u64_max: u64 = std.math.maxInt(u64);
{
const slab = ZeeAllocDefaults.Slab.init(256);
std.testing.expectEqual(@as(usize, 256), slab.element_size);
std.testing.expectEqual(@as(?*ZeeAllocDefaults.Slab, null), slab.next);
const raw_ptr = @ptrCast([*]const u64, &slab.pad);
std.testing.expectEqual(u64_max, raw_ptr[0]);
std.testing.expectEqual(u64_max, raw_ptr[1]);
std.testing.expectEqual(u64_max, raw_ptr[2]);
std.testing.expectEqual((@as(u64, 1) << 63) - 1, raw_ptr[3]);
}
}
test "Slab.elementAt" {
{
var slab = ZeeAllocDefaults.Slab.init(16384);
var element = slab.elementAt(1);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(1 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
element = slab.elementAt(2);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(2 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
element = slab.elementAt(3);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(3 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
}
{
var slab = ZeeAllocDefaults.Slab.init(128);
var element = slab.elementAt(1);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(1 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
element = slab.elementAt(2);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(2 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
element = slab.elementAt(3);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(3 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
}
{
var slab = ZeeAllocDefaults.Slab.init(64);
std.testing.expectEqual(@as(usize, 3), slab.dataOffset());
var element = slab.elementAt(3);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(3 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
element = slab.elementAt(5);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(5 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
}
{
var slab = ZeeAllocDefaults.Slab.init(4);
std.testing.expectEqual(@as(usize, 513), slab.dataOffset());
var element = slab.elementAt(513);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(513 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
element = slab.elementAt(1023);
std.testing.expectEqual(slab.element_size, element.len);
std.testing.expectEqual(1023 * slab.element_size, @ptrToInt(element.ptr) - @ptrToInt(&slab));
}
}
test "Slab.elementIdx" {
var slab = ZeeAllocDefaults.Slab.init(128);
var element = slab.elementAt(1);
std.testing.expectEqual(@as(usize, 1), slab.elementIdx(element));
}
test "Slab.freeBlocks" {
{
var slab = ZeeAllocDefaults.Slab.init(16384);
const blocks = slab.freeBlocks();
std.testing.expectEqual(@as(usize, 1), blocks.len);
std.testing.expectEqual(@ptrToInt(&slab.pad), @ptrToInt(blocks.ptr));
}
{
var slab = ZeeAllocDefaults.Slab.init(128);
const blocks = slab.freeBlocks();
std.testing.expectEqual(@as(usize, 8), blocks.len);
std.testing.expectEqual(@ptrToInt(&slab.pad), @ptrToInt(blocks.ptr));
}
}
test "Slab.alloc + free" {
var slab = ZeeAllocDefaults.Slab.init(16384);
std.testing.expectEqual(@as(usize, 3), slab.totalFree());
const data0 = try slab.alloc();
std.testing.expectEqual(@as(usize, 2), slab.totalFree());
std.testing.expectEqual(@as(usize, 16384), data0.len);
const data1 = try slab.alloc();
std.testing.expectEqual(@as(usize, 1), slab.totalFree());
std.testing.expectEqual(@as(usize, 16384), data1.len);
std.testing.expectEqual(@as(usize, 16384), @ptrToInt(data1.ptr) - @ptrToInt(data0.ptr));
const data2 = try slab.alloc();
std.testing.expectEqual(@as(usize, 0), slab.totalFree());
std.testing.expectEqual(@as(usize, 16384), data2.len);
std.testing.expectEqual(@as(usize, 16384), @ptrToInt(data2.ptr) - @ptrToInt(data1.ptr));
std.testing.expectError(error.OutOfMemory, slab.alloc());
{
slab.free(data2);
std.testing.expectEqual(@as(usize, 1), slab.totalFree());
slab.free(data1);
std.testing.expectEqual(@as(usize, 2), slab.totalFree());
slab.free(data0);
std.testing.expectEqual(@as(usize, 3), slab.totalFree());
}
}
test "padToSize" {
const page_size = 65536;
const header_size = 2 * @sizeOf(usize);
std.testing.expectEqual(@as(usize, 4), ZeeAllocDefaults.padToSize(1));
std.testing.expectEqual(@as(usize, 4), ZeeAllocDefaults.padToSize(4));
std.testing.expectEqual(@as(usize, 8), ZeeAllocDefaults.padToSize(8));
std.testing.expectEqual(@as(usize, 16), ZeeAllocDefaults.padToSize(9));
std.testing.expectEqual(@as(usize, 16384), ZeeAllocDefaults.padToSize(16384));
}
test "alloc slabs" {
var zee_alloc = ZeeAllocDefaults.init(std.testing.allocator);
defer zee_alloc.deinit();
for (zee_alloc.slabs) |root| {
std.testing.expect(root == null);
}
std.testing.expect(zee_alloc.slabs[0] == null);
const small = try zee_alloc.allocator.alloc(u8, 4);
std.testing.expect(zee_alloc.slabs[0] != null);
const smalls_before_free = zee_alloc.slabs[0].?.totalFree();
zee_alloc.allocator.free(small);
std.testing.expectEqual(smalls_before_free + 1, zee_alloc.slabs[0].?.totalFree());
std.testing.expect(zee_alloc.slabs[12] == null);
const large = try zee_alloc.allocator.alloc(u8, 16384);
std.testing.expect(zee_alloc.slabs[12] != null);
const larges_before_free = zee_alloc.slabs[12].?.totalFree();
zee_alloc.allocator.free(large);
std.testing.expectEqual(larges_before_free + 1, zee_alloc.slabs[12].?.totalFree());
}
test "alloc jumbo" {
var zee_alloc = ZeeAllocDefaults.init(std.testing.allocator);
defer zee_alloc.deinit();
std.testing.expect(zee_alloc.jumbo == null);
const first = try zee_alloc.allocator.alloc(u8, 32000);
std.testing.expect(zee_alloc.jumbo == null);
std.testing.expectEqual(@as(usize, ZeeAllocDefaults.Slab.header_size), @ptrToInt(first.ptr) % 65536);
zee_alloc.allocator.free(first);
std.testing.expect(zee_alloc.jumbo != null);
const reuse = try zee_alloc.allocator.alloc(u8, 32000);
std.testing.expect(zee_alloc.jumbo == null);
std.testing.expectEqual(first.ptr, reuse.ptr);
zee_alloc.allocator.free(first);
std.testing.expect(zee_alloc.jumbo != null);
}
test "functional tests" {
var zee_alloc = ZeeAllocDefaults.init(std.testing.allocator);
defer zee_alloc.deinit();
try std.heap.testAllocator(&zee_alloc.allocator);
try std.heap.testAllocatorAligned(&zee_alloc.allocator, 16);
}
fn expectIllegalBehavior(context: anytype, comptime func: anytype) !void {
if (!@hasDecl(std.os.system, "fork") or !std.debug.runtime_safety) return;
const child_pid = try std.os.fork();
if (child_pid == 0) {
const null_fd = std.os.openZ("/dev/null", std.os.O_RDWR, 0) catch {
std.debug.print("Cannot open /dev/null\n", .{});
std.os.exit(0);
};
std.os.dup2(null_fd, std.io.getStdErr().handle) catch {
std.debug.print("Cannot close child process stderr\n", .{});
std.os.exit(0);
};
func(context); // this should crash
std.os.exit(0);
} else {
const status = std.os.waitpid(child_pid, 0);
// Maybe we should use a fixed error code instead of checking status != 0
if (status == 0) @panic("Expected illegal behavior but succeeded instead");
}
}
const AllocContext = struct {
allocator: *Allocator,
mem: []u8,
fn init(allocator: *Allocator, mem: []u8) AllocContext {
return .{ .allocator = allocator, .mem = mem };
}
fn free(self: AllocContext) void {
self.allocator.free(self.mem);
}
};
test "double free" {
var zee_alloc = ZeeAllocDefaults.init(std.testing.allocator);
defer zee_alloc.deinit();
const mem = try zee_alloc.allocator.alloc(u8, 16);
zee_alloc.allocator.free(mem);
const context = AllocContext.init(&zee_alloc.allocator, mem);
try expectIllegalBehavior(context, AllocContext.free);
}
test "freeing non-owned memory" {
var zee_alloc = ZeeAllocDefaults.init(std.testing.allocator);
defer zee_alloc.deinit();
const mem = try std.testing.allocator.alloc(u8, 16);
defer std.testing.allocator.free(mem);
const context = AllocContext.init(&zee_alloc.allocator, mem);
try expectIllegalBehavior(context, AllocContext.free);
}