mirror of
https://github.com/oven-sh/bun
synced 2026-02-07 09:28:51 +00:00
Compare commits
2 Commits
dylan/ref-
...
jarred/ast
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7e2539ed70 | ||
|
|
60b5fb95b1 |
9
.vscode/launch.json
vendored
9
.vscode/launch.json
vendored
@@ -246,6 +246,15 @@
|
||||
"cwd": "${workspaceFolder}/packages/bun-hello",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Macro",
|
||||
"program": "${workspaceFolder}/packages/debug-bun-cli-darwin-x64/bin/bun-debug",
|
||||
"args": ["dev"],
|
||||
"cwd": "${workspaceFolder}/examples/macros",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
|
||||
1
Makefile
1
Makefile
@@ -190,7 +190,6 @@ CLANG_FLAGS := $(INCLUDE_DIRS) \
|
||||
|
||||
jsc-bindings-mac: $(OBJ_FILES)
|
||||
|
||||
|
||||
MACOS_ICU_FILES := $(HOMEBREW_PREFIX)opt/icu4c/lib/libicudata.a \
|
||||
$(HOMEBREW_PREFIX)opt/icu4c/lib/libicui18n.a \
|
||||
$(HOMEBREW_PREFIX)opt/icu4c/lib/libicuuc.a
|
||||
|
||||
3
examples/macros/dogeify.tsx
Normal file
3
examples/macros/dogeify.tsx
Normal file
@@ -0,0 +1,3 @@
|
||||
export function dogeify(astNode: any) {
|
||||
return <void />;
|
||||
}
|
||||
4
examples/macros/hello.js
Normal file
4
examples/macros/hello.js
Normal file
@@ -0,0 +1,4 @@
|
||||
import { dogeify } from "macro:./dogeify";
|
||||
|
||||
const wow = dogeify`Call #1!`;
|
||||
const suchDoge = dogeify`Call #2!`;
|
||||
6
examples/macros/package.json
Normal file
6
examples/macros/package.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "macros",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT"
|
||||
}
|
||||
5
examples/macros/tsconfig.json
Normal file
5
examples/macros/tsconfig.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"jsx": "react-jsx"
|
||||
}
|
||||
}
|
||||
4709
src/bundler.zig
4709
src/bundler.zig
File diff suppressed because it is too large
Load Diff
613
src/cache.zig
613
src/cache.zig
@@ -14,6 +14,8 @@ const Mutex = @import("./lock.zig").Lock;
|
||||
const import_record = @import("./import_record.zig");
|
||||
const ImportRecord = import_record.ImportRecord;
|
||||
|
||||
const cache_files = false;
|
||||
|
||||
pub const FsCacheEntry = struct {
|
||||
contents: string,
|
||||
fd: StoredFileDescriptorType = 0,
|
||||
@@ -28,328 +30,321 @@ pub const FsCacheEntry = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn NewCache(comptime cache_files: bool) type {
|
||||
return struct {
|
||||
pub const Set = struct {
|
||||
js: JavaScript,
|
||||
fs: Fs,
|
||||
json: Json,
|
||||
pub const Set = struct {
|
||||
js: JavaScript,
|
||||
fs: Fs,
|
||||
json: Json,
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator) Set {
|
||||
return Set{
|
||||
.js = JavaScript.init(allocator),
|
||||
.fs = Fs{
|
||||
.mutex = Mutex.init(),
|
||||
.entries = std.StringHashMap(Fs.Entry).init(allocator),
|
||||
.shared_buffer = MutableString.init(allocator, 0) catch unreachable,
|
||||
},
|
||||
.json = Json{
|
||||
.mutex = Mutex.init(),
|
||||
.entries = std.StringHashMap(*Json.Entry).init(allocator),
|
||||
},
|
||||
};
|
||||
}
|
||||
pub fn init(allocator: *std.mem.Allocator) Set {
|
||||
return Set{
|
||||
.js = JavaScript.init(allocator),
|
||||
.fs = Fs{
|
||||
.mutex = Mutex.init(),
|
||||
.entries = std.StringHashMap(Fs.Entry).init(allocator),
|
||||
.shared_buffer = MutableString.init(allocator, 0) catch unreachable,
|
||||
},
|
||||
.json = Json{
|
||||
.mutex = Mutex.init(),
|
||||
.entries = std.StringHashMap(*Json.Entry).init(allocator),
|
||||
},
|
||||
};
|
||||
pub const Fs = struct {
|
||||
const Entry = FsCacheEntry;
|
||||
}
|
||||
};
|
||||
pub const Fs = struct {
|
||||
const Entry = FsCacheEntry;
|
||||
|
||||
mutex: Mutex,
|
||||
entries: std.StringHashMap(Entry),
|
||||
shared_buffer: MutableString,
|
||||
mutex: Mutex,
|
||||
entries: std.StringHashMap(Entry),
|
||||
shared_buffer: MutableString,
|
||||
|
||||
pub fn deinit(c: *Fs) void {
|
||||
var iter = c.entries.iterator();
|
||||
while (iter.next()) |entry| {
|
||||
entry.value.deinit(c.entries.allocator);
|
||||
}
|
||||
c.entries.deinit();
|
||||
}
|
||||
pub fn deinit(c: *Fs) void {
|
||||
var iter = c.entries.iterator();
|
||||
while (iter.next()) |entry| {
|
||||
entry.value.deinit(c.entries.allocator);
|
||||
}
|
||||
c.entries.deinit();
|
||||
}
|
||||
|
||||
pub fn readFileShared(
|
||||
c: *Fs,
|
||||
_fs: *fs.FileSystem,
|
||||
path: [:0]const u8,
|
||||
dirname_fd: StoredFileDescriptorType,
|
||||
_file_handle: ?StoredFileDescriptorType,
|
||||
shared: *MutableString,
|
||||
) !Entry {
|
||||
var rfs = _fs.fs;
|
||||
pub fn readFileShared(
|
||||
c: *Fs,
|
||||
_fs: *fs.FileSystem,
|
||||
path: [:0]const u8,
|
||||
dirname_fd: StoredFileDescriptorType,
|
||||
_file_handle: ?StoredFileDescriptorType,
|
||||
shared: *MutableString,
|
||||
) !Entry {
|
||||
var rfs = _fs.fs;
|
||||
|
||||
if (comptime cache_files) {
|
||||
{
|
||||
c.mutex.lock();
|
||||
defer c.mutex.unlock();
|
||||
if (c.entries.get(path)) |entry| {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var file_handle: std.fs.File = if (_file_handle) |__file| std.fs.File{ .handle = __file } else undefined;
|
||||
|
||||
if (_file_handle == null) {
|
||||
file_handle = try std.fs.openFileAbsoluteZ(path, .{ .read = true });
|
||||
}
|
||||
|
||||
defer {
|
||||
if (rfs.needToCloseFiles() and _file_handle == null) {
|
||||
file_handle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// If the file's modification key hasn't changed since it was cached, assume
|
||||
// the contents of the file are also the same and skip reading the file.
|
||||
var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: {
|
||||
switch (err) {
|
||||
error.FileNotFound, error.AccessDenied => {
|
||||
return err;
|
||||
},
|
||||
else => {
|
||||
if (isDebug) {
|
||||
Output.printError("modkey error: {s}", .{@errorName(err)});
|
||||
}
|
||||
break :handler null;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
var file: fs.File = undefined;
|
||||
if (mod_key) |modk| {
|
||||
file = rfs.readFileWithHandle(path, modk.size, file_handle, true, shared) catch |err| {
|
||||
if (isDebug) {
|
||||
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
} else {
|
||||
file = rfs.readFileWithHandle(path, null, file_handle, true, shared) catch |err| {
|
||||
if (isDebug) {
|
||||
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
const entry = Entry{
|
||||
.contents = file.contents,
|
||||
.mod_key = mod_key,
|
||||
.fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
|
||||
};
|
||||
|
||||
if (comptime cache_files) {
|
||||
c.mutex.lock();
|
||||
defer c.mutex.unlock();
|
||||
var res = c.entries.getOrPut(path) catch unreachable;
|
||||
|
||||
if (res.found_existing) {
|
||||
res.value_ptr.*.deinit(c.entries.allocator);
|
||||
}
|
||||
res.value_ptr.* = entry;
|
||||
return res.value_ptr.*;
|
||||
} else {
|
||||
if (comptime cache_files) {
|
||||
{
|
||||
c.mutex.lock();
|
||||
defer c.mutex.unlock();
|
||||
if (c.entries.get(path)) |entry| {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn readFile(
|
||||
c: *Fs,
|
||||
_fs: *fs.FileSystem,
|
||||
path: string,
|
||||
dirname_fd: StoredFileDescriptorType,
|
||||
comptime use_shared_buffer: bool,
|
||||
_file_handle: ?StoredFileDescriptorType,
|
||||
) !Entry {
|
||||
var rfs = _fs.fs;
|
||||
var file_handle: std.fs.File = if (_file_handle) |__file| std.fs.File{ .handle = __file } else undefined;
|
||||
|
||||
if (comptime cache_files) {
|
||||
{
|
||||
c.mutex.lock();
|
||||
defer c.mutex.unlock();
|
||||
if (c.entries.get(path)) |entry| {
|
||||
return entry;
|
||||
}
|
||||
if (_file_handle == null) {
|
||||
file_handle = try std.fs.openFileAbsoluteZ(path, .{ .read = true });
|
||||
}
|
||||
|
||||
defer {
|
||||
if (rfs.needToCloseFiles() and _file_handle == null) {
|
||||
file_handle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// If the file's modification key hasn't changed since it was cached, assume
|
||||
// the contents of the file are also the same and skip reading the file.
|
||||
var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: {
|
||||
switch (err) {
|
||||
error.FileNotFound, error.AccessDenied => {
|
||||
return err;
|
||||
},
|
||||
else => {
|
||||
if (isDebug) {
|
||||
Output.printError("modkey error: {s}", .{@errorName(err)});
|
||||
}
|
||||
}
|
||||
|
||||
var file_handle: std.fs.File = if (_file_handle) |__file| std.fs.File{ .handle = __file } else undefined;
|
||||
|
||||
if (_file_handle == null) {
|
||||
if (FeatureFlags.store_file_descriptors and dirname_fd > 0) {
|
||||
file_handle = std.fs.Dir.openFile(std.fs.Dir{ .fd = dirname_fd }, std.fs.path.basename(path), .{ .read = true }) catch |err| brk: {
|
||||
switch (err) {
|
||||
error.FileNotFound => {
|
||||
const handle = try std.fs.openFileAbsolute(path, .{ .read = true });
|
||||
Output.prettyErrorln(
|
||||
"<r><d>Internal error: directory mismatch for directory \"{s}\", fd {d}<r>. You don't need to do anything, but this indicates a bug.",
|
||||
.{ path, dirname_fd },
|
||||
);
|
||||
break :brk handle;
|
||||
},
|
||||
else => return err,
|
||||
}
|
||||
};
|
||||
} else {
|
||||
file_handle = try std.fs.openFileAbsolute(path, .{ .read = true });
|
||||
}
|
||||
}
|
||||
|
||||
defer {
|
||||
if (rfs.needToCloseFiles() and _file_handle == null) {
|
||||
file_handle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// If the file's modification key hasn't changed since it was cached, assume
|
||||
// the contents of the file are also the same and skip reading the file.
|
||||
var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: {
|
||||
switch (err) {
|
||||
error.FileNotFound, error.AccessDenied => {
|
||||
return err;
|
||||
},
|
||||
else => {
|
||||
if (isDebug) {
|
||||
Output.printError("modkey error: {s}", .{@errorName(err)});
|
||||
}
|
||||
break :handler null;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
var file: fs.File = undefined;
|
||||
if (mod_key) |modk| {
|
||||
file = rfs.readFileWithHandle(path, modk.size, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| {
|
||||
if (isDebug) {
|
||||
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
} else {
|
||||
file = rfs.readFileWithHandle(path, null, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| {
|
||||
if (isDebug) {
|
||||
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
const entry = Entry{
|
||||
.contents = file.contents,
|
||||
.mod_key = mod_key,
|
||||
.fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
|
||||
};
|
||||
|
||||
if (comptime cache_files) {
|
||||
c.mutex.lock();
|
||||
defer c.mutex.unlock();
|
||||
var res = c.entries.getOrPut(path) catch unreachable;
|
||||
|
||||
if (res.found_existing) {
|
||||
res.value_ptr.*.deinit(c.entries.allocator);
|
||||
}
|
||||
res.value_ptr.* = entry;
|
||||
return res.value_ptr.*;
|
||||
} else {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Css = struct {
|
||||
pub const Entry = struct {};
|
||||
pub const Result = struct {
|
||||
ok: bool,
|
||||
value: void,
|
||||
};
|
||||
pub fn parse(cache: *@This(), log: *logger.Log, source: logger.Source) !Result {
|
||||
Global.notimpl();
|
||||
}
|
||||
};
|
||||
|
||||
pub const JavaScript = struct {
|
||||
mutex: Mutex,
|
||||
entries: std.StringHashMap(Result),
|
||||
|
||||
pub const Result = js_ast.Result;
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator) JavaScript {
|
||||
return JavaScript{ .mutex = Mutex.init(), .entries = std.StringHashMap(Result).init(allocator) };
|
||||
}
|
||||
// For now, we're not going to cache JavaScript ASTs.
|
||||
// It's probably only relevant when bundling for production.
|
||||
pub fn parse(
|
||||
cache: *@This(),
|
||||
allocator: *std.mem.Allocator,
|
||||
opts: js_parser.Parser.Options,
|
||||
defines: *Define,
|
||||
log: *logger.Log,
|
||||
source: *const logger.Source,
|
||||
) anyerror!?js_ast.Ast {
|
||||
var temp_log = logger.Log.init(allocator);
|
||||
defer temp_log.appendToMaybeRecycled(log, source) catch {};
|
||||
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
|
||||
return null;
|
||||
};
|
||||
|
||||
const result = try parser.parse();
|
||||
|
||||
return if (result.ok) result.ast else null;
|
||||
}
|
||||
|
||||
pub fn scan(
|
||||
cache: *@This(),
|
||||
allocator: *std.mem.Allocator,
|
||||
scan_pass_result: *js_parser.ScanPassResult,
|
||||
opts: js_parser.Parser.Options,
|
||||
defines: *Define,
|
||||
log: *logger.Log,
|
||||
source: *const logger.Source,
|
||||
) anyerror!void {
|
||||
var temp_log = logger.Log.init(allocator);
|
||||
defer temp_log.appendToMaybeRecycled(log, source) catch {};
|
||||
|
||||
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
|
||||
return;
|
||||
};
|
||||
|
||||
return try parser.scanImports(scan_pass_result);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Json = struct {
|
||||
pub const Entry = struct {
|
||||
is_tsconfig: bool = false,
|
||||
source: logger.Source,
|
||||
expr: ?js_ast.Expr = null,
|
||||
ok: bool = false,
|
||||
// msgs: []logger.Msg,
|
||||
};
|
||||
mutex: Mutex,
|
||||
entries: std.StringHashMap(*Entry),
|
||||
pub fn init(allocator: *std.mem.Allocator) Json {
|
||||
return Json{
|
||||
.mutex = Mutex.init(),
|
||||
.entries = std.StringHashMap(Entry).init(allocator),
|
||||
};
|
||||
}
|
||||
fn parse(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator, is_tsconfig: bool, func: anytype) anyerror!?js_ast.Expr {
|
||||
var temp_log = logger.Log.init(allocator);
|
||||
defer {
|
||||
temp_log.appendTo(log) catch {};
|
||||
}
|
||||
return func(&source, &temp_log, allocator) catch handler: {
|
||||
break :handler null;
|
||||
};
|
||||
}
|
||||
pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
|
||||
return try parse(cache, log, source, allocator, false, json_parser.ParseJSON);
|
||||
}
|
||||
|
||||
pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
|
||||
return try parse(cache, log, source, allocator, true, json_parser.ParseTSConfig);
|
||||
},
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
pub const Cache = NewCache(true);
|
||||
pub const ServeCache = NewCache(false);
|
||||
var file: fs.File = undefined;
|
||||
if (mod_key) |modk| {
|
||||
file = rfs.readFileWithHandle(path, modk.size, file_handle, true, shared) catch |err| {
|
||||
if (isDebug) {
|
||||
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
} else {
|
||||
file = rfs.readFileWithHandle(path, null, file_handle, true, shared) catch |err| {
|
||||
if (isDebug) {
|
||||
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
const entry = Entry{
|
||||
.contents = file.contents,
|
||||
.mod_key = mod_key,
|
||||
.fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
|
||||
};
|
||||
|
||||
if (comptime cache_files) {
|
||||
c.mutex.lock();
|
||||
defer c.mutex.unlock();
|
||||
var res = c.entries.getOrPut(path) catch unreachable;
|
||||
|
||||
if (res.found_existing) {
|
||||
res.value_ptr.*.deinit(c.entries.allocator);
|
||||
}
|
||||
res.value_ptr.* = entry;
|
||||
return res.value_ptr.*;
|
||||
} else {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn readFile(
|
||||
c: *Fs,
|
||||
_fs: *fs.FileSystem,
|
||||
path: string,
|
||||
dirname_fd: StoredFileDescriptorType,
|
||||
comptime use_shared_buffer: bool,
|
||||
_file_handle: ?StoredFileDescriptorType,
|
||||
) !Entry {
|
||||
var rfs = _fs.fs;
|
||||
|
||||
if (comptime cache_files) {
|
||||
{
|
||||
c.mutex.lock();
|
||||
defer c.mutex.unlock();
|
||||
if (c.entries.get(path)) |entry| {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var file_handle: std.fs.File = if (_file_handle) |__file| std.fs.File{ .handle = __file } else undefined;
|
||||
|
||||
if (_file_handle == null) {
|
||||
if (FeatureFlags.store_file_descriptors and dirname_fd > 0) {
|
||||
file_handle = std.fs.Dir.openFile(std.fs.Dir{ .fd = dirname_fd }, std.fs.path.basename(path), .{ .read = true }) catch |err| brk: {
|
||||
switch (err) {
|
||||
error.FileNotFound => {
|
||||
const handle = try std.fs.openFileAbsolute(path, .{ .read = true });
|
||||
Output.prettyErrorln(
|
||||
"<r><d>Internal error: directory mismatch for directory \"{s}\", fd {d}<r>. You don't need to do anything, but this indicates a bug.",
|
||||
.{ path, dirname_fd },
|
||||
);
|
||||
break :brk handle;
|
||||
},
|
||||
else => return err,
|
||||
}
|
||||
};
|
||||
} else {
|
||||
file_handle = try std.fs.openFileAbsolute(path, .{ .read = true });
|
||||
}
|
||||
}
|
||||
|
||||
defer {
|
||||
if (rfs.needToCloseFiles() and _file_handle == null) {
|
||||
file_handle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// If the file's modification key hasn't changed since it was cached, assume
|
||||
// the contents of the file are also the same and skip reading the file.
|
||||
var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: {
|
||||
switch (err) {
|
||||
error.FileNotFound, error.AccessDenied => {
|
||||
return err;
|
||||
},
|
||||
else => {
|
||||
if (isDebug) {
|
||||
Output.printError("modkey error: {s}", .{@errorName(err)});
|
||||
}
|
||||
break :handler null;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
var file: fs.File = undefined;
|
||||
if (mod_key) |modk| {
|
||||
file = rfs.readFileWithHandle(path, modk.size, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| {
|
||||
if (isDebug) {
|
||||
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
} else {
|
||||
file = rfs.readFileWithHandle(path, null, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| {
|
||||
if (isDebug) {
|
||||
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
||||
}
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
const entry = Entry{
|
||||
.contents = file.contents,
|
||||
.mod_key = mod_key,
|
||||
.fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
|
||||
};
|
||||
|
||||
if (comptime cache_files) {
|
||||
c.mutex.lock();
|
||||
defer c.mutex.unlock();
|
||||
var res = c.entries.getOrPut(path) catch unreachable;
|
||||
|
||||
if (res.found_existing) {
|
||||
res.value_ptr.*.deinit(c.entries.allocator);
|
||||
}
|
||||
res.value_ptr.* = entry;
|
||||
return res.value_ptr.*;
|
||||
} else {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Css = struct {
|
||||
pub const Entry = struct {};
|
||||
pub const Result = struct {
|
||||
ok: bool,
|
||||
value: void,
|
||||
};
|
||||
pub fn parse(cache: *@This(), log: *logger.Log, source: logger.Source) !Result {
|
||||
Global.notimpl();
|
||||
}
|
||||
};
|
||||
|
||||
pub const JavaScript = struct {
|
||||
mutex: Mutex,
|
||||
entries: std.StringHashMap(Result),
|
||||
|
||||
pub const Result = js_ast.Result;
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator) JavaScript {
|
||||
return JavaScript{ .mutex = Mutex.init(), .entries = std.StringHashMap(Result).init(allocator) };
|
||||
}
|
||||
// For now, we're not going to cache JavaScript ASTs.
|
||||
// It's probably only relevant when bundling for production.
|
||||
pub fn parse(
|
||||
cache: *@This(),
|
||||
allocator: *std.mem.Allocator,
|
||||
opts: js_parser.Parser.Options,
|
||||
defines: *Define,
|
||||
log: *logger.Log,
|
||||
source: *const logger.Source,
|
||||
) anyerror!?js_ast.Ast {
|
||||
var temp_log = logger.Log.init(allocator);
|
||||
defer temp_log.appendToMaybeRecycled(log, source) catch {};
|
||||
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
|
||||
return null;
|
||||
};
|
||||
|
||||
const result = try parser.parse();
|
||||
|
||||
return if (result.ok) result.ast else null;
|
||||
}
|
||||
|
||||
pub fn scan(
|
||||
cache: *@This(),
|
||||
allocator: *std.mem.Allocator,
|
||||
scan_pass_result: *js_parser.ScanPassResult,
|
||||
opts: js_parser.Parser.Options,
|
||||
defines: *Define,
|
||||
log: *logger.Log,
|
||||
source: *const logger.Source,
|
||||
) anyerror!void {
|
||||
var temp_log = logger.Log.init(allocator);
|
||||
defer temp_log.appendToMaybeRecycled(log, source) catch {};
|
||||
|
||||
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
|
||||
return;
|
||||
};
|
||||
|
||||
return try parser.scanImports(scan_pass_result);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Json = struct {
|
||||
pub const Entry = struct {
|
||||
is_tsconfig: bool = false,
|
||||
source: logger.Source,
|
||||
expr: ?js_ast.Expr = null,
|
||||
ok: bool = false,
|
||||
// msgs: []logger.Msg,
|
||||
};
|
||||
mutex: Mutex,
|
||||
entries: std.StringHashMap(*Entry),
|
||||
pub fn init(allocator: *std.mem.Allocator) Json {
|
||||
return Json{
|
||||
.mutex = Mutex.init(),
|
||||
.entries = std.StringHashMap(Entry).init(allocator),
|
||||
};
|
||||
}
|
||||
fn parse(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator, is_tsconfig: bool, func: anytype) anyerror!?js_ast.Expr {
|
||||
var temp_log = logger.Log.init(allocator);
|
||||
defer {
|
||||
temp_log.appendTo(log) catch {};
|
||||
}
|
||||
return func(&source, &temp_log, allocator) catch handler: {
|
||||
break :handler null;
|
||||
};
|
||||
}
|
||||
pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
|
||||
return try parse(cache, log, source, allocator, false, json_parser.ParseJSON);
|
||||
}
|
||||
|
||||
pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
|
||||
return try parse(cache, log, source, allocator, true, json_parser.ParseTSConfig);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -32,13 +32,6 @@ pub const BuildCommand = struct {
|
||||
ctx.args,
|
||||
);
|
||||
},
|
||||
.lazy => {
|
||||
result = try bundler.ServeBundler.bundle(
|
||||
ctx.allocator,
|
||||
ctx.log,
|
||||
ctx.args,
|
||||
);
|
||||
},
|
||||
else => {
|
||||
result = try bundler.Bundler.bundle(
|
||||
ctx.allocator,
|
||||
|
||||
@@ -37,7 +37,7 @@ const ServerBundleGeneratorThread = struct {
|
||||
route_conf_: ?Api.LoadedRouteConfig,
|
||||
router: ?Router,
|
||||
) !void {
|
||||
var server_bundler = try bundler.ServeBundler.init(
|
||||
var server_bundler = try bundler.Bundler.init(
|
||||
allocator_,
|
||||
logs,
|
||||
try configureTransformOptionsForBun(allocator_, transform_args),
|
||||
@@ -48,7 +48,7 @@ const ServerBundleGeneratorThread = struct {
|
||||
server_bundler.configureLinker();
|
||||
server_bundler.router = router;
|
||||
try server_bundler.configureDefines();
|
||||
_ = try bundler.ServeBundler.GenerateNodeModuleBundle.generate(
|
||||
_ = try bundler.Bundler.GenerateNodeModuleBundle.generate(
|
||||
&server_bundler,
|
||||
allocator_,
|
||||
server_conf,
|
||||
@@ -92,7 +92,7 @@ pub const BunCommand = struct {
|
||||
var allocator = ctx.allocator;
|
||||
var log = ctx.log;
|
||||
|
||||
var this_bundler = try bundler.ServeBundler.init(allocator, log, ctx.args, null, null);
|
||||
var this_bundler = try bundler.Bundler.init(allocator, log, ctx.args, null, null);
|
||||
this_bundler.configureLinker();
|
||||
var filepath: [*:0]const u8 = "node_modules.bun";
|
||||
var server_bundle_filepath: [*:0]const u8 = "node_modules.server.bun";
|
||||
@@ -156,7 +156,7 @@ pub const BunCommand = struct {
|
||||
{
|
||||
// Always generate the client-only bundle
|
||||
// we can revisit this decision if people ask
|
||||
var node_modules_ = try bundler.ServeBundler.GenerateNodeModuleBundle.generate(
|
||||
var node_modules_ = try bundler.Bundler.GenerateNodeModuleBundle.generate(
|
||||
&this_bundler,
|
||||
allocator,
|
||||
loaded_framework,
|
||||
|
||||
@@ -269,10 +269,10 @@ pub const Define = struct {
|
||||
// Step 2. Swap in certain literal values because those can be constant folded
|
||||
define.identifiers.putAssumeCapacity("undefined", value_define);
|
||||
define.identifiers.putAssumeCapacity("NaN", DefineData{
|
||||
.value = js_ast.Expr.Data{ .e_number = &nan_val },
|
||||
.value = js_ast.Expr.Data{ .e_number = nan_val },
|
||||
});
|
||||
define.identifiers.putAssumeCapacity("Infinity", DefineData{
|
||||
.value = js_ast.Expr.Data{ .e_number = &inf_val },
|
||||
.value = js_ast.Expr.Data{ .e_number = inf_val },
|
||||
});
|
||||
|
||||
// Step 3. Load user data into hash tables
|
||||
|
||||
@@ -40,7 +40,7 @@ const Request = picohttp.Request;
|
||||
const Response = picohttp.Response;
|
||||
pub const Headers = picohttp.Headers;
|
||||
pub const MimeType = @import("./http/mime_type.zig");
|
||||
const Bundler = bundler.ServeBundler;
|
||||
const Bundler = bundler.Bundler;
|
||||
const Websocket = @import("./http/websocket.zig");
|
||||
const js_printer = @import("./js_printer.zig");
|
||||
const SOCKET_FLAGS = os.SOCK_CLOEXEC;
|
||||
|
||||
@@ -4,6 +4,7 @@ pub usingnamespace @import("../../global.zig");
|
||||
usingnamespace @import("./javascript.zig");
|
||||
usingnamespace @import("./webcore/response.zig");
|
||||
const Router = @import("./api/router.zig");
|
||||
const JSExpr = @import("../../js_ast.zig").Macro.JSExpr;
|
||||
|
||||
const TaggedPointerTypes = @import("../../tagged_pointer.zig");
|
||||
const TaggedPointerUnion = TaggedPointerTypes.TaggedPointerUnion;
|
||||
@@ -839,6 +840,10 @@ pub fn NewClass(
|
||||
}
|
||||
|
||||
pub fn customHasInstance(ctx: js.JSContextRef, obj: js.JSObjectRef, value: js.JSValueRef, exception: js.ExceptionRef) callconv(.C) bool {
|
||||
if (comptime @typeInfo(ZigType) == .Struct and @hasDecl(ZigType, "isInstanceOf")) {
|
||||
return ZigType.isInstanceOf(ctx, obj, value, exception);
|
||||
}
|
||||
|
||||
return js.JSValueIsObjectOfClass(ctx, value, get().*);
|
||||
}
|
||||
|
||||
@@ -1449,6 +1454,7 @@ pub fn NewClass(
|
||||
|
||||
if (!singleton)
|
||||
def.hasInstance = customHasInstance;
|
||||
|
||||
return def;
|
||||
}
|
||||
};
|
||||
@@ -1509,6 +1515,7 @@ pub const JSPrivateDataPtr = TaggedPointerUnion(.{
|
||||
Headers,
|
||||
Body,
|
||||
Router,
|
||||
JSExpr,
|
||||
});
|
||||
|
||||
pub inline fn GetJSPrivateData(comptime Type: type, ref: js.JSObjectRef) ?*Type {
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
#include <JavaScriptCore/ErrorInstance.h>
|
||||
#include <JavaScriptCore/ExceptionScope.h>
|
||||
#include <JavaScriptCore/FunctionConstructor.h>
|
||||
#include <JavaScriptCore/HashMapImplInlines.h>
|
||||
#include <JavaScriptCore/Identifier.h>
|
||||
#include <JavaScriptCore/IteratorOperations.h>
|
||||
#include <JavaScriptCore/JSArray.h>
|
||||
@@ -15,7 +16,9 @@
|
||||
#include <JavaScriptCore/JSClassRef.h>
|
||||
#include <JavaScriptCore/JSInternalPromise.h>
|
||||
#include <JavaScriptCore/JSMap.h>
|
||||
#include <JavaScriptCore/JSModuleEnvironment.h>
|
||||
#include <JavaScriptCore/JSModuleLoader.h>
|
||||
#include <JavaScriptCore/JSModuleNamespaceObject.h>
|
||||
#include <JavaScriptCore/JSModuleRecord.h>
|
||||
#include <JavaScriptCore/JSNativeStdFunction.h>
|
||||
#include <JavaScriptCore/JSObject.h>
|
||||
@@ -27,6 +30,7 @@
|
||||
#include <JavaScriptCore/StackFrame.h>
|
||||
#include <JavaScriptCore/StackVisitor.h>
|
||||
#include <JavaScriptCore/VM.h>
|
||||
#include <JavaScriptCore/VMEntryScope.h>
|
||||
#include <JavaScriptCore/WasmFaultSignalHandler.h>
|
||||
#include <wtf/text/ExternalStringImpl.h>
|
||||
#include <wtf/text/StringCommon.h>
|
||||
@@ -92,6 +96,243 @@ void JSC__JSObject__putRecord(JSC__JSObject *object, JSC__JSGlobalObject *global
|
||||
object->putDirect(global->vm(), ident, descriptor.value());
|
||||
scope.release();
|
||||
}
|
||||
|
||||
static void populateStackFrameMetadata(const JSC::StackFrame *stackFrame, ZigStackFrame *frame) {
|
||||
frame->source_url = Zig::toZigString(stackFrame->sourceURL());
|
||||
|
||||
if (stackFrame->isWasmFrame()) {
|
||||
frame->code_type = ZigStackFrameCodeWasm;
|
||||
return;
|
||||
}
|
||||
|
||||
auto m_codeBlock = stackFrame->codeBlock();
|
||||
if (m_codeBlock) {
|
||||
switch (m_codeBlock->codeType()) {
|
||||
case JSC::EvalCode: {
|
||||
frame->code_type = ZigStackFrameCodeEval;
|
||||
return;
|
||||
}
|
||||
case JSC::ModuleCode: {
|
||||
frame->code_type = ZigStackFrameCodeModule;
|
||||
return;
|
||||
}
|
||||
case JSC::GlobalCode: {
|
||||
frame->code_type = ZigStackFrameCodeGlobal;
|
||||
return;
|
||||
}
|
||||
case JSC::FunctionCode: {
|
||||
frame->code_type =
|
||||
!m_codeBlock->isConstructor() ? ZigStackFrameCodeFunction : ZigStackFrameCodeConstructor;
|
||||
break;
|
||||
}
|
||||
default: ASSERT_NOT_REACHED();
|
||||
}
|
||||
}
|
||||
|
||||
auto calleeCell = stackFrame->callee();
|
||||
if (!calleeCell || !calleeCell->isObject()) return;
|
||||
|
||||
JSC::JSObject *callee = JSC::jsCast<JSC::JSObject *>(calleeCell);
|
||||
// Does the code block have a user-defined name property?
|
||||
JSC::JSValue name = callee->getDirect(m_codeBlock->vm(), m_codeBlock->vm().propertyNames->name);
|
||||
if (name && name.isString()) {
|
||||
auto str = name.toWTFString(m_codeBlock->globalObject());
|
||||
frame->function_name = Zig::toZigString(str);
|
||||
return;
|
||||
}
|
||||
|
||||
/* For functions (either JSFunction or InternalFunction), fallback to their "native" name
|
||||
* property. Based on JSC::getCalculatedDisplayName, "inlining" the
|
||||
* JSFunction::calculatedDisplayName\InternalFunction::calculatedDisplayName calls */
|
||||
if (JSC::JSFunction *function =
|
||||
JSC::jsDynamicCast<JSC::JSFunction *>(m_codeBlock->vm(), callee)) {
|
||||
|
||||
WTF::String actualName = function->name(m_codeBlock->vm());
|
||||
if (!actualName.isEmpty() || function->isHostOrBuiltinFunction()) {
|
||||
frame->function_name = Zig::toZigString(actualName);
|
||||
return;
|
||||
}
|
||||
|
||||
auto inferred_name = function->jsExecutable()->name();
|
||||
frame->function_name = Zig::toZigString(inferred_name.string());
|
||||
}
|
||||
|
||||
if (JSC::InternalFunction *function =
|
||||
JSC::jsDynamicCast<JSC::InternalFunction *>(m_codeBlock->vm(), callee)) {
|
||||
// Based on JSC::InternalFunction::calculatedDisplayName, skipping the "displayName" property
|
||||
frame->function_name = Zig::toZigString(function->name());
|
||||
}
|
||||
}
|
||||
// Based on
|
||||
// https://github.com/mceSystems/node-jsc/blob/master/deps/jscshim/src/shim/JSCStackTrace.cpp#L298
|
||||
static void populateStackFramePosition(const JSC::StackFrame *stackFrame, ZigString *source_lines,
|
||||
int32_t *source_line_numbers, uint8_t source_lines_count,
|
||||
ZigStackFramePosition *position) {
|
||||
auto m_codeBlock = stackFrame->codeBlock();
|
||||
if (!m_codeBlock) return;
|
||||
|
||||
JSC::BytecodeIndex bytecodeOffset =
|
||||
stackFrame->hasBytecodeIndex() ? stackFrame->bytecodeIndex() : JSC::BytecodeIndex();
|
||||
|
||||
/* Get the "raw" position info.
|
||||
* Note that we're using m_codeBlock->unlinkedCodeBlock()->expressionRangeForBytecodeOffset
|
||||
* rather than m_codeBlock->expressionRangeForBytecodeOffset in order get the "raw" offsets and
|
||||
* avoid the CodeBlock's expressionRangeForBytecodeOffset modifications to the line and column
|
||||
* numbers, (we don't need the column number from it, and we'll calculate the line "fixes"
|
||||
* ourselves). */
|
||||
int startOffset = 0;
|
||||
int endOffset = 0;
|
||||
int divotPoint = 0;
|
||||
unsigned line = 0;
|
||||
unsigned unusedColumn = 0;
|
||||
m_codeBlock->unlinkedCodeBlock()->expressionRangeForBytecodeIndex(
|
||||
bytecodeOffset, divotPoint, startOffset, endOffset, line, unusedColumn);
|
||||
divotPoint += m_codeBlock->sourceOffset();
|
||||
|
||||
// TODO: evaluate if using the API from UnlinkedCodeBlock can be used instead of iterating
|
||||
// through source text.
|
||||
|
||||
/* On the first line of the source code, it seems that we need to "fix" the column with the
|
||||
* starting offset. We currently use codeBlock->source()->startPosition().m_column.oneBasedInt()
|
||||
* as the offset in the first line rather than codeBlock->firstLineColumnOffset(), which seems
|
||||
* simpler (and what CodeBlock::expressionRangeForBytecodeOffset does). This is because
|
||||
* firstLineColumnOffset values seems different from what we expect (according to v8's tests)
|
||||
* and I haven't dove into the relevant parts in JSC (yet) to figure out why. */
|
||||
unsigned columnOffset = line ? 0 : m_codeBlock->source().startColumn().zeroBasedInt();
|
||||
|
||||
// "Fix" the line number
|
||||
JSC::ScriptExecutable *executable = m_codeBlock->ownerExecutable();
|
||||
if (std::optional<int> overrideLine = executable->overrideLineNumber(m_codeBlock->vm())) {
|
||||
line = overrideLine.value();
|
||||
} else {
|
||||
line += executable->firstLine();
|
||||
}
|
||||
|
||||
// Calculate the staring\ending offsets of the entire expression
|
||||
int expressionStart = divotPoint - startOffset;
|
||||
int expressionStop = divotPoint + endOffset;
|
||||
|
||||
// Make sure the range is valid
|
||||
WTF::StringView sourceString = m_codeBlock->source().provider()->source();
|
||||
if (!expressionStop || expressionStart > static_cast<int>(sourceString.length())) { return; }
|
||||
|
||||
// Search for the beginning of the line
|
||||
unsigned int lineStart = expressionStart;
|
||||
while ((lineStart > 0) && ('\n' != sourceString[lineStart - 1])) { lineStart--; }
|
||||
// Search for the end of the line
|
||||
unsigned int lineStop = expressionStop;
|
||||
unsigned int sourceLength = sourceString.length();
|
||||
while ((lineStop < sourceLength) && ('\n' != sourceString[lineStop])) { lineStop++; }
|
||||
if (source_lines_count > 1 && source_lines != nullptr) {
|
||||
auto chars = sourceString.characters8();
|
||||
|
||||
// Most of the time, when you look at a stack trace, you want a couple lines above
|
||||
|
||||
source_lines[0] = {&chars[lineStart], lineStop - lineStart};
|
||||
source_line_numbers[0] = line;
|
||||
|
||||
if (lineStart > 0) {
|
||||
auto byte_offset_in_source_string = lineStart - 1;
|
||||
uint8_t source_line_i = 1;
|
||||
auto remaining_lines_to_grab = source_lines_count - 1;
|
||||
|
||||
while (byte_offset_in_source_string > 0 && remaining_lines_to_grab > 0) {
|
||||
unsigned int end_of_line_offset = byte_offset_in_source_string;
|
||||
|
||||
// This should probably be code points instead of newlines
|
||||
while (byte_offset_in_source_string > 0 && chars[byte_offset_in_source_string] != '\n') {
|
||||
byte_offset_in_source_string--;
|
||||
}
|
||||
|
||||
// We are at the beginning of the line
|
||||
source_lines[source_line_i] = {&chars[byte_offset_in_source_string],
|
||||
end_of_line_offset - byte_offset_in_source_string + 1};
|
||||
|
||||
source_line_numbers[source_line_i] = line - source_line_i;
|
||||
source_line_i++;
|
||||
|
||||
remaining_lines_to_grab--;
|
||||
|
||||
byte_offset_in_source_string -= byte_offset_in_source_string > 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Finally, store the source "positions" info.
|
||||
* Notes:
|
||||
* - The retrieved column seem to point the "end column". To make sure we're current, we'll
|
||||
*calculate the columns ourselves, since we've already found where the line starts. Note that in
|
||||
*v8 it should be 0-based here (in contrast the 1-based column number in v8::StackFrame).
|
||||
* - The static_casts are ugly, but comes from differences between JSC and v8's api, and should
|
||||
*be OK since no source should be longer than "max int" chars.
|
||||
* TODO: If expressionStart == expressionStop, then m_endColumn will be equal to m_startColumn.
|
||||
*Should we handle this case?
|
||||
*/
|
||||
position->expression_start = expressionStart;
|
||||
position->expression_stop = expressionStop;
|
||||
position->line = WTF::OrdinalNumber::fromOneBasedInt(static_cast<int>(line)).zeroBasedInt();
|
||||
position->column_start = (expressionStart - lineStart) + columnOffset;
|
||||
position->column_stop = position->column_start + (expressionStop - expressionStart);
|
||||
position->line_start = lineStart;
|
||||
position->line_stop = lineStop;
|
||||
|
||||
return;
|
||||
}
|
||||
static void populateStackFrame(ZigStackTrace *trace, const JSC::StackFrame *stackFrame,
|
||||
ZigStackFrame *frame, bool is_top) {
|
||||
populateStackFrameMetadata(stackFrame, frame);
|
||||
populateStackFramePosition(stackFrame, is_top ? trace->source_lines_ptr : nullptr,
|
||||
is_top ? trace->source_lines_numbers : nullptr,
|
||||
is_top ? trace->source_lines_to_collect : 0, &frame->position);
|
||||
}
|
||||
static void populateStackTrace(const WTF::Vector<JSC::StackFrame> &frames, ZigStackTrace *trace) {
|
||||
uint8_t frame_i = 0;
|
||||
size_t stack_frame_i = 0;
|
||||
const size_t total_frame_count = frames.size();
|
||||
const uint8_t frame_count =
|
||||
total_frame_count < trace->frames_len ? total_frame_count : trace->frames_len;
|
||||
|
||||
while (frame_i < frame_count && stack_frame_i < total_frame_count) {
|
||||
// Skip native frames
|
||||
while (stack_frame_i < total_frame_count && !(&frames.at(stack_frame_i))->codeBlock() &&
|
||||
!(&frames.at(stack_frame_i))->isWasmFrame()) {
|
||||
stack_frame_i++;
|
||||
}
|
||||
if (stack_frame_i >= total_frame_count) break;
|
||||
|
||||
ZigStackFrame *frame = &trace->frames_ptr[frame_i];
|
||||
populateStackFrame(trace, &frames[stack_frame_i], frame, frame_i == 0);
|
||||
stack_frame_i++;
|
||||
frame_i++;
|
||||
}
|
||||
trace->frames_len = frame_i;
|
||||
}
|
||||
static void fromErrorInstance(ZigException *except, JSC::JSGlobalObject *global,
|
||||
JSC::ErrorInstance *err, const Vector<JSC::StackFrame> *stackTrace,
|
||||
JSC::JSValue val) {
|
||||
JSC::JSObject *obj = JSC::jsDynamicCast<JSC::JSObject *>(global->vm(), val);
|
||||
if (stackTrace != nullptr && stackTrace->size() > 0) {
|
||||
populateStackTrace(*stackTrace, &except->stack);
|
||||
} else if (err->stackTrace() != nullptr && err->stackTrace()->size() > 0) {
|
||||
populateStackTrace(*err->stackTrace(), &except->stack);
|
||||
}
|
||||
|
||||
except->code = (unsigned char)err->errorType();
|
||||
if (err->isStackOverflowError()) { except->code = 253; }
|
||||
if (err->isOutOfMemoryError()) { except->code = 8; }
|
||||
|
||||
if (obj->hasProperty(global, global->vm().propertyNames->message)) {
|
||||
except->message = Zig::toZigString(
|
||||
obj->getDirect(global->vm(), global->vm().propertyNames->message).toWTFString(global));
|
||||
|
||||
} else {
|
||||
except->message = Zig::toZigString(err->sanitizedMessageString(global));
|
||||
}
|
||||
except->name = Zig::toZigString(err->sanitizedNameString(global));
|
||||
except->runtime_type = err->runtimeTypeForCause();
|
||||
|
||||
except->exception = err;
|
||||
}
|
||||
void JSC__JSValue__putRecord(JSC__JSValue objectValue, JSC__JSGlobalObject *global, ZigString *key,
|
||||
ZigString *values, size_t valuesLen) {
|
||||
JSC::JSValue objValue = JSC::JSValue::decode(objectValue);
|
||||
@@ -268,6 +509,64 @@ bWTF__String JSC__JSString__value(JSC__JSString *arg0, JSC__JSGlobalObject *arg1
|
||||
|
||||
#pragma mark - JSC::JSModuleLoader
|
||||
|
||||
JSC__JSValue JSC__JSModuleLoader__callExportedFunction(JSC__JSGlobalObject *globalObject,
|
||||
ZigString specifier, ZigString functionName,
|
||||
JSC__JSValue *arguments,
|
||||
unsigned char argumentsCount,
|
||||
ZigException *zig_exception) {
|
||||
JSC::VM &vm = globalObject->vm();
|
||||
JSC::JSLockHolder lock(vm);
|
||||
|
||||
JSC::JSObject *loader = JSC::jsDynamicCast<JSC::JSObject *>(vm, globalObject->moduleLoader());
|
||||
JSC::JSMap *registry = JSC::jsDynamicCast<JSC::JSMap *>(
|
||||
vm, loader->getDirect(vm, JSC::Identifier::fromString(vm, "registry")));
|
||||
auto specifier_impl = WTF::ExternalStringImpl::createStatic(specifier.ptr, specifier.len);
|
||||
auto specifier_ident =
|
||||
JSC::jsOwnedString(vm, reinterpret_cast<WTF::UniquedStringImpl *>(specifier_impl.ptr()));
|
||||
auto entry_cell = registry->get(globalObject, specifier_ident);
|
||||
|
||||
if (JSC::JSObject *entry = JSC::jsDynamicCast<JSC::JSObject *>(vm, entry_cell)) {
|
||||
auto recordIdentifier = JSC::Identifier::fromString(vm, "module");
|
||||
|
||||
if (JSC::JSModuleRecord *record =
|
||||
JSC::jsDynamicCast<JSC::JSModuleRecord *>(vm, entry->getDirect(vm, recordIdentifier))) {
|
||||
auto fn_impl = WTF::ExternalStringImpl::createStatic(functionName.ptr, functionName.len);
|
||||
auto fn_ident = reinterpret_cast<WTF::UniquedStringImpl *>(fn_impl.ptr());
|
||||
auto moduleNamespace = record->getModuleNamespace(globalObject);
|
||||
|
||||
if (JSC::JSValue macroFunctionExport =
|
||||
moduleNamespace->getIfPropertyExists(globalObject, JSC::PropertyName(fn_ident))) {
|
||||
|
||||
if (JSC::JSObject *macroFunction = JSC::asObject(macroFunctionExport.asCell())) {
|
||||
JSC::VMEntryScope entryScope(vm, globalObject);
|
||||
|
||||
auto callData = JSC::getCallData(vm, macroFunction);
|
||||
if (callData.type == JSC::CallData::Type::None) return JSC::JSValue::encode({});
|
||||
|
||||
JSC::MarkedArgumentBuffer argList;
|
||||
for (size_t i = 0; i < argumentsCount; i++)
|
||||
argList.append(JSC::JSValue::decode(arguments[i]));
|
||||
|
||||
NakedPtr<JSC::Exception> uncaughtException;
|
||||
JSC::JSValue reval = JSC::call(globalObject, macroFunction, callData,
|
||||
globalObject->globalThis(), argList, uncaughtException);
|
||||
if (uncaughtException) {
|
||||
if (JSC::ErrorInstance *error =
|
||||
JSC::jsDynamicCast<JSC::ErrorInstance *>(vm, uncaughtException->value())) {
|
||||
fromErrorInstance(zig_exception, globalObject, error, &uncaughtException->stack(),
|
||||
JSC::JSValue(uncaughtException));
|
||||
return JSC::JSValue::encode({});
|
||||
}
|
||||
}
|
||||
return JSC::JSValue::encode(reval);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return JSC::JSValue::encode({});
|
||||
}
|
||||
|
||||
// JSC__JSValue
|
||||
// JSC__JSModuleLoader__dependencyKeysIfEvaluated(JSC__JSModuleLoader* arg0,
|
||||
// JSC__JSGlobalObject* arg1, JSC__JSModuleRecord* arg2) {
|
||||
@@ -1032,243 +1331,6 @@ bWTF__String JSC__JSValue__toWTFString(JSC__JSValue JSValue0, JSC__JSGlobalObjec
|
||||
return Wrap<WTF::String, bWTF__String>::wrap(value.toWTFString(arg1));
|
||||
};
|
||||
|
||||
static void populateStackFrameMetadata(const JSC::StackFrame *stackFrame, ZigStackFrame *frame) {
|
||||
frame->source_url = Zig::toZigString(stackFrame->sourceURL());
|
||||
|
||||
if (stackFrame->isWasmFrame()) {
|
||||
frame->code_type = ZigStackFrameCodeWasm;
|
||||
return;
|
||||
}
|
||||
|
||||
auto m_codeBlock = stackFrame->codeBlock();
|
||||
if (m_codeBlock) {
|
||||
switch (m_codeBlock->codeType()) {
|
||||
case JSC::EvalCode: {
|
||||
frame->code_type = ZigStackFrameCodeEval;
|
||||
return;
|
||||
}
|
||||
case JSC::ModuleCode: {
|
||||
frame->code_type = ZigStackFrameCodeModule;
|
||||
return;
|
||||
}
|
||||
case JSC::GlobalCode: {
|
||||
frame->code_type = ZigStackFrameCodeGlobal;
|
||||
return;
|
||||
}
|
||||
case JSC::FunctionCode: {
|
||||
frame->code_type =
|
||||
!m_codeBlock->isConstructor() ? ZigStackFrameCodeFunction : ZigStackFrameCodeConstructor;
|
||||
break;
|
||||
}
|
||||
default: ASSERT_NOT_REACHED();
|
||||
}
|
||||
}
|
||||
|
||||
auto calleeCell = stackFrame->callee();
|
||||
if (!calleeCell || !calleeCell->isObject()) return;
|
||||
|
||||
JSC::JSObject *callee = JSC::jsCast<JSC::JSObject *>(calleeCell);
|
||||
// Does the code block have a user-defined name property?
|
||||
JSC::JSValue name = callee->getDirect(m_codeBlock->vm(), m_codeBlock->vm().propertyNames->name);
|
||||
if (name && name.isString()) {
|
||||
auto str = name.toWTFString(m_codeBlock->globalObject());
|
||||
frame->function_name = Zig::toZigString(str);
|
||||
return;
|
||||
}
|
||||
|
||||
/* For functions (either JSFunction or InternalFunction), fallback to their "native" name
|
||||
* property. Based on JSC::getCalculatedDisplayName, "inlining" the
|
||||
* JSFunction::calculatedDisplayName\InternalFunction::calculatedDisplayName calls */
|
||||
if (JSC::JSFunction *function =
|
||||
JSC::jsDynamicCast<JSC::JSFunction *>(m_codeBlock->vm(), callee)) {
|
||||
|
||||
WTF::String actualName = function->name(m_codeBlock->vm());
|
||||
if (!actualName.isEmpty() || function->isHostOrBuiltinFunction()) {
|
||||
frame->function_name = Zig::toZigString(actualName);
|
||||
return;
|
||||
}
|
||||
|
||||
auto inferred_name = function->jsExecutable()->name();
|
||||
frame->function_name = Zig::toZigString(inferred_name.string());
|
||||
}
|
||||
|
||||
if (JSC::InternalFunction *function =
|
||||
JSC::jsDynamicCast<JSC::InternalFunction *>(m_codeBlock->vm(), callee)) {
|
||||
// Based on JSC::InternalFunction::calculatedDisplayName, skipping the "displayName" property
|
||||
frame->function_name = Zig::toZigString(function->name());
|
||||
}
|
||||
}
|
||||
// Based on
|
||||
// https://github.com/mceSystems/node-jsc/blob/master/deps/jscshim/src/shim/JSCStackTrace.cpp#L298
|
||||
static void populateStackFramePosition(const JSC::StackFrame *stackFrame, ZigString *source_lines,
|
||||
int32_t *source_line_numbers, uint8_t source_lines_count,
|
||||
ZigStackFramePosition *position) {
|
||||
auto m_codeBlock = stackFrame->codeBlock();
|
||||
if (!m_codeBlock) return;
|
||||
|
||||
JSC::BytecodeIndex bytecodeOffset =
|
||||
stackFrame->hasBytecodeIndex() ? stackFrame->bytecodeIndex() : JSC::BytecodeIndex();
|
||||
|
||||
/* Get the "raw" position info.
|
||||
* Note that we're using m_codeBlock->unlinkedCodeBlock()->expressionRangeForBytecodeOffset
|
||||
* rather than m_codeBlock->expressionRangeForBytecodeOffset in order get the "raw" offsets and
|
||||
* avoid the CodeBlock's expressionRangeForBytecodeOffset modifications to the line and column
|
||||
* numbers, (we don't need the column number from it, and we'll calculate the line "fixes"
|
||||
* ourselves). */
|
||||
int startOffset = 0;
|
||||
int endOffset = 0;
|
||||
int divotPoint = 0;
|
||||
unsigned line = 0;
|
||||
unsigned unusedColumn = 0;
|
||||
m_codeBlock->unlinkedCodeBlock()->expressionRangeForBytecodeIndex(
|
||||
bytecodeOffset, divotPoint, startOffset, endOffset, line, unusedColumn);
|
||||
divotPoint += m_codeBlock->sourceOffset();
|
||||
|
||||
// TODO: evaluate if using the API from UnlinkedCodeBlock can be used instead of iterating
|
||||
// through source text.
|
||||
|
||||
/* On the first line of the source code, it seems that we need to "fix" the column with the
|
||||
* starting offset. We currently use codeBlock->source()->startPosition().m_column.oneBasedInt()
|
||||
* as the offset in the first line rather than codeBlock->firstLineColumnOffset(), which seems
|
||||
* simpler (and what CodeBlock::expressionRangeForBytecodeOffset does). This is because
|
||||
* firstLineColumnOffset values seems different from what we expect (according to v8's tests)
|
||||
* and I haven't dove into the relevant parts in JSC (yet) to figure out why. */
|
||||
unsigned columnOffset = line ? 0 : m_codeBlock->source().startColumn().zeroBasedInt();
|
||||
|
||||
// "Fix" the line number
|
||||
JSC::ScriptExecutable *executable = m_codeBlock->ownerExecutable();
|
||||
if (std::optional<int> overrideLine = executable->overrideLineNumber(m_codeBlock->vm())) {
|
||||
line = overrideLine.value();
|
||||
} else {
|
||||
line += executable->firstLine();
|
||||
}
|
||||
|
||||
// Calculate the staring\ending offsets of the entire expression
|
||||
int expressionStart = divotPoint - startOffset;
|
||||
int expressionStop = divotPoint + endOffset;
|
||||
|
||||
// Make sure the range is valid
|
||||
WTF::StringView sourceString = m_codeBlock->source().provider()->source();
|
||||
if (!expressionStop || expressionStart > static_cast<int>(sourceString.length())) { return; }
|
||||
|
||||
// Search for the beginning of the line
|
||||
unsigned int lineStart = expressionStart;
|
||||
while ((lineStart > 0) && ('\n' != sourceString[lineStart - 1])) { lineStart--; }
|
||||
// Search for the end of the line
|
||||
unsigned int lineStop = expressionStop;
|
||||
unsigned int sourceLength = sourceString.length();
|
||||
while ((lineStop < sourceLength) && ('\n' != sourceString[lineStop])) { lineStop++; }
|
||||
if (source_lines_count > 1 && source_lines != nullptr) {
|
||||
auto chars = sourceString.characters8();
|
||||
|
||||
// Most of the time, when you look at a stack trace, you want a couple lines above
|
||||
|
||||
source_lines[0] = {&chars[lineStart], lineStop - lineStart};
|
||||
source_line_numbers[0] = line;
|
||||
|
||||
if (lineStart > 0) {
|
||||
auto byte_offset_in_source_string = lineStart - 1;
|
||||
uint8_t source_line_i = 1;
|
||||
auto remaining_lines_to_grab = source_lines_count - 1;
|
||||
|
||||
while (byte_offset_in_source_string > 0 && remaining_lines_to_grab > 0) {
|
||||
unsigned int end_of_line_offset = byte_offset_in_source_string;
|
||||
|
||||
// This should probably be code points instead of newlines
|
||||
while (byte_offset_in_source_string > 0 && chars[byte_offset_in_source_string] != '\n') {
|
||||
byte_offset_in_source_string--;
|
||||
}
|
||||
|
||||
// We are at the beginning of the line
|
||||
source_lines[source_line_i] = {&chars[byte_offset_in_source_string],
|
||||
end_of_line_offset - byte_offset_in_source_string + 1};
|
||||
|
||||
source_line_numbers[source_line_i] = line - source_line_i;
|
||||
source_line_i++;
|
||||
|
||||
remaining_lines_to_grab--;
|
||||
|
||||
byte_offset_in_source_string -= byte_offset_in_source_string > 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Finally, store the source "positions" info.
|
||||
* Notes:
|
||||
* - The retrieved column seem to point the "end column". To make sure we're current, we'll
|
||||
*calculate the columns ourselves, since we've already found where the line starts. Note that in
|
||||
*v8 it should be 0-based here (in contrast the 1-based column number in v8::StackFrame).
|
||||
* - The static_casts are ugly, but comes from differences between JSC and v8's api, and should
|
||||
*be OK since no source should be longer than "max int" chars.
|
||||
* TODO: If expressionStart == expressionStop, then m_endColumn will be equal to m_startColumn.
|
||||
*Should we handle this case?
|
||||
*/
|
||||
position->expression_start = expressionStart;
|
||||
position->expression_stop = expressionStop;
|
||||
position->line = WTF::OrdinalNumber::fromOneBasedInt(static_cast<int>(line)).zeroBasedInt();
|
||||
position->column_start = (expressionStart - lineStart) + columnOffset;
|
||||
position->column_stop = position->column_start + (expressionStop - expressionStart);
|
||||
position->line_start = lineStart;
|
||||
position->line_stop = lineStop;
|
||||
|
||||
return;
|
||||
}
|
||||
static void populateStackFrame(ZigStackTrace *trace, const JSC::StackFrame *stackFrame,
|
||||
ZigStackFrame *frame, bool is_top) {
|
||||
populateStackFrameMetadata(stackFrame, frame);
|
||||
populateStackFramePosition(stackFrame, is_top ? trace->source_lines_ptr : nullptr,
|
||||
is_top ? trace->source_lines_numbers : nullptr,
|
||||
is_top ? trace->source_lines_to_collect : 0, &frame->position);
|
||||
}
|
||||
static void populateStackTrace(const WTF::Vector<JSC::StackFrame> &frames, ZigStackTrace *trace) {
|
||||
uint8_t frame_i = 0;
|
||||
size_t stack_frame_i = 0;
|
||||
const size_t total_frame_count = frames.size();
|
||||
const uint8_t frame_count =
|
||||
total_frame_count < trace->frames_len ? total_frame_count : trace->frames_len;
|
||||
|
||||
while (frame_i < frame_count && stack_frame_i < total_frame_count) {
|
||||
// Skip native frames
|
||||
while (stack_frame_i < total_frame_count && !(&frames.at(stack_frame_i))->codeBlock() &&
|
||||
!(&frames.at(stack_frame_i))->isWasmFrame()) {
|
||||
stack_frame_i++;
|
||||
}
|
||||
if (stack_frame_i >= total_frame_count) break;
|
||||
|
||||
ZigStackFrame *frame = &trace->frames_ptr[frame_i];
|
||||
populateStackFrame(trace, &frames[stack_frame_i], frame, frame_i == 0);
|
||||
stack_frame_i++;
|
||||
frame_i++;
|
||||
}
|
||||
trace->frames_len = frame_i;
|
||||
}
|
||||
static void fromErrorInstance(ZigException *except, JSC::JSGlobalObject *global,
|
||||
JSC::ErrorInstance *err, const Vector<JSC::StackFrame> *stackTrace,
|
||||
JSC::JSValue val) {
|
||||
JSC::JSObject *obj = JSC::jsDynamicCast<JSC::JSObject *>(global->vm(), val);
|
||||
if (stackTrace != nullptr && stackTrace->size() > 0) {
|
||||
populateStackTrace(*stackTrace, &except->stack);
|
||||
} else if (err->stackTrace() != nullptr && err->stackTrace()->size() > 0) {
|
||||
populateStackTrace(*err->stackTrace(), &except->stack);
|
||||
}
|
||||
|
||||
except->code = (unsigned char)err->errorType();
|
||||
if (err->isStackOverflowError()) { except->code = 253; }
|
||||
if (err->isOutOfMemoryError()) { except->code = 8; }
|
||||
|
||||
if (obj->hasProperty(global, global->vm().propertyNames->message)) {
|
||||
except->message = Zig::toZigString(
|
||||
obj->getDirect(global->vm(), global->vm().propertyNames->message).toWTFString(global));
|
||||
|
||||
} else {
|
||||
except->message = Zig::toZigString(err->sanitizedMessageString(global));
|
||||
}
|
||||
except->name = Zig::toZigString(err->sanitizedNameString(global));
|
||||
except->runtime_type = err->runtimeTypeForCause();
|
||||
|
||||
except->exception = err;
|
||||
}
|
||||
|
||||
void exceptionFromString(ZigException *except, JSC::JSValue value, JSC::JSGlobalObject *global) {
|
||||
// Fallback case for when it's a user-defined ErrorLike-object that doesn't inherit from
|
||||
// ErrorInstance
|
||||
|
||||
@@ -373,12 +373,30 @@ pub const JSModuleLoader = extern struct {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn callExportedFunction(
|
||||
globalObject: *JSGlobalObject,
|
||||
specifier: ZigString,
|
||||
function_name: ZigString,
|
||||
arguments_ptr: [*]JSValue,
|
||||
arguments_len: u8,
|
||||
exception: *ZigException,
|
||||
) JSValue {
|
||||
return shim.cppFn("callExportedFunction", .{
|
||||
globalObject,
|
||||
specifier,
|
||||
function_name,
|
||||
arguments_ptr,
|
||||
arguments_len,
|
||||
exception,
|
||||
});
|
||||
}
|
||||
// pub fn dependencyKeysIfEvaluated(this: *JSModuleLoader, globalObject: *JSGlobalObject, moduleRecord: *JSModuleRecord) *JSValue {
|
||||
// return shim.cppFn("dependencyKeysIfEvaluated", .{ this, globalObject, moduleRecord });
|
||||
// }
|
||||
|
||||
pub const Extern = [_][]const u8{
|
||||
// "dependencyKeysIfEvaluated",
|
||||
"callExportedFunction",
|
||||
"evaluate",
|
||||
"loadAndEvaluateModuleEntryPoint",
|
||||
"loadAndEvaluateModule",
|
||||
@@ -821,7 +839,7 @@ pub const JSGlobalObject = extern struct {
|
||||
|
||||
const cppFn = shim.cppFn;
|
||||
|
||||
pub fn ref(this: *JSGlobalObject) C_API.JSContextRef {
|
||||
pub inline fn ref(this: *JSGlobalObject) C_API.JSContextRef {
|
||||
return @ptrCast(C_API.JSContextRef, this);
|
||||
}
|
||||
pub const ctx = ref;
|
||||
@@ -1427,11 +1445,11 @@ pub const JSValue = enum(i64) {
|
||||
}
|
||||
|
||||
pub inline fn asRef(this: JSValue) C_API.JSValueRef {
|
||||
return @intToPtr(C_API.JSValueRef, @intCast(usize, @enumToInt(this)));
|
||||
return @intToPtr(C_API.JSValueRef, @bitCast(usize, @enumToInt(this)));
|
||||
}
|
||||
|
||||
pub inline fn fromRef(this: C_API.JSValueRef) JSValue {
|
||||
return @intToEnum(JSValue, @intCast(i64, @ptrToInt(this)));
|
||||
return @intToEnum(JSValue, @bitCast(i64, @ptrToInt(this)));
|
||||
}
|
||||
|
||||
pub inline fn asObjectRef(this: JSValue) C_API.JSObjectRef {
|
||||
@@ -1439,7 +1457,7 @@ pub const JSValue = enum(i64) {
|
||||
}
|
||||
|
||||
pub inline fn asVoid(this: JSValue) *c_void {
|
||||
return @intToPtr(*c_void, @intCast(usize, @enumToInt(this)));
|
||||
return @intToPtr(*c_void, @bitCast(usize, @enumToInt(this)));
|
||||
}
|
||||
|
||||
pub const Extern = [_][]const u8{ "toZigString", "createStringArray", "createEmptyObject", "putRecord", "asPromise", "isClass", "getNameProperty", "getClassName", "getErrorsProperty", "toInt32", "toBoolean", "isInt32", "isIterable", "forEach", "isAggregateError", "toZigException", "isException", "toWTFString", "hasProperty", "getPropertyNames", "getDirect", "putDirect", "get", "getIfExists", "asString", "asObject", "asNumber", "isError", "jsNull", "jsUndefined", "jsTDZValue", "jsBoolean", "jsDoubleNumber", "jsNumberFromDouble", "jsNumberFromChar", "jsNumberFromU16", "jsNumberFromInt32", "jsNumberFromInt64", "jsNumberFromUint64", "isUndefined", "isNull", "isUndefinedOrNull", "isBoolean", "isAnyInt", "isUInt32AsAnyInt", "isInt32AsAnyInt", "isNumber", "isString", "isBigInt", "isHeapBigInt", "isBigInt32", "isSymbol", "isPrimitive", "isGetterSetter", "isCustomGetterSetter", "isObject", "isCell", "asCell", "toString", "toStringOrNull", "toPropertyKey", "toPropertyKeyValue", "toObject", "toString", "getPrototype", "getPropertyByPropertyName", "eqlValue", "eqlCell", "isCallable" };
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
//-- AUTOGENERATED FILE -- 1631749917
|
||||
//-- AUTOGENERATED FILE -- 1632030969
|
||||
// clang-format off
|
||||
#pragma once
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
//-- AUTOGENERATED FILE -- 1631749917
|
||||
//-- AUTOGENERATED FILE -- 1632030969
|
||||
// clang-format: off
|
||||
#pragma once
|
||||
|
||||
@@ -100,10 +100,10 @@ typedef void* JSClassRef;
|
||||
typedef bJSC__JSGlobalObject JSC__JSGlobalObject; // JSC::JSGlobalObject
|
||||
typedef bJSC__JSFunction JSC__JSFunction; // JSC::JSFunction
|
||||
typedef struct JSC__ArrayPrototype JSC__ArrayPrototype; // JSC::ArrayPrototype
|
||||
typedef struct JSC__AsyncFunctionPrototype JSC__AsyncFunctionPrototype; // JSC::AsyncFunctionPrototype
|
||||
typedef bJSC__Identifier JSC__Identifier; // JSC::Identifier
|
||||
typedef bJSC__JSPromise JSC__JSPromise; // JSC::JSPromise
|
||||
typedef ZigException ZigException;
|
||||
typedef struct JSC__AsyncFunctionPrototype JSC__AsyncFunctionPrototype; // JSC::AsyncFunctionPrototype
|
||||
typedef bJSC__JSPromise JSC__JSPromise; // JSC::JSPromise
|
||||
typedef bJSC__Identifier JSC__Identifier; // JSC::Identifier
|
||||
typedef struct JSC__SetIteratorPrototype JSC__SetIteratorPrototype; // JSC::SetIteratorPrototype
|
||||
typedef bJSC__SourceCode JSC__SourceCode; // JSC::SourceCode
|
||||
typedef bJSC__JSCell JSC__JSCell; // JSC::JSCell
|
||||
@@ -139,8 +139,8 @@ typedef void* JSClassRef;
|
||||
class JSObject;
|
||||
class AsyncIteratorPrototype;
|
||||
class AsyncGeneratorFunctionPrototype;
|
||||
class Identifier;
|
||||
class JSPromise;
|
||||
class Identifier;
|
||||
class RegExpPrototype;
|
||||
class AsyncFunctionPrototype;
|
||||
class CatchScope;
|
||||
@@ -193,8 +193,8 @@ typedef void* JSClassRef;
|
||||
using JSC__JSObject = JSC::JSObject;
|
||||
using JSC__AsyncIteratorPrototype = JSC::AsyncIteratorPrototype;
|
||||
using JSC__AsyncGeneratorFunctionPrototype = JSC::AsyncGeneratorFunctionPrototype;
|
||||
using JSC__Identifier = JSC::Identifier;
|
||||
using JSC__JSPromise = JSC::JSPromise;
|
||||
using JSC__Identifier = JSC::Identifier;
|
||||
using JSC__RegExpPrototype = JSC::RegExpPrototype;
|
||||
using JSC__AsyncFunctionPrototype = JSC::AsyncFunctionPrototype;
|
||||
using JSC__CatchScope = JSC::CatchScope;
|
||||
@@ -267,6 +267,7 @@ CPP_DECL void Inspector__ScriptArguments__release(Inspector__ScriptArguments* ar
|
||||
|
||||
#pragma mark - JSC::JSModuleLoader
|
||||
|
||||
CPP_DECL JSC__JSValue JSC__JSModuleLoader__callExportedFunction(JSC__JSGlobalObject* arg0, ZigString arg1, ZigString arg2, JSC__JSValue* arg3, unsigned char arg4, ZigException* arg5);
|
||||
CPP_DECL bool JSC__JSModuleLoader__checkSyntax(JSC__JSGlobalObject* arg0, const JSC__SourceCode* arg1, bool arg2);
|
||||
CPP_DECL JSC__JSValue JSC__JSModuleLoader__evaluate(JSC__JSGlobalObject* arg0, const unsigned char* arg1, size_t arg2, const unsigned char* arg3, size_t arg4, JSC__JSValue JSValue5, JSC__JSValue* arg6);
|
||||
CPP_DECL JSC__JSInternalPromise* JSC__JSModuleLoader__importModule(JSC__JSGlobalObject* arg0, const JSC__Identifier* arg1);
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -8,7 +8,7 @@ const NodeModuleBundle = @import("../../node_module_bundle.zig").NodeModuleBundl
|
||||
const logger = @import("../../logger.zig");
|
||||
const Api = @import("../../api/schema.zig").Api;
|
||||
const options = @import("../../options.zig");
|
||||
const Bundler = @import("../../bundler.zig").ServeBundler;
|
||||
const Bundler = @import("../../bundler.zig").Bundler;
|
||||
const js_printer = @import("../../js_printer.zig");
|
||||
const hash_map = @import("../../hash_map.zig");
|
||||
const http = @import("../../http.zig");
|
||||
|
||||
@@ -7,10 +7,12 @@ const NodeModuleBundle = @import("../../node_module_bundle.zig").NodeModuleBundl
|
||||
const logger = @import("../../logger.zig");
|
||||
const Api = @import("../../api/schema.zig").Api;
|
||||
const options = @import("../../options.zig");
|
||||
const Bundler = @import("../../bundler.zig").ServeBundler;
|
||||
const Bundler = @import("../../bundler.zig").Bundler;
|
||||
const ServerEntryPoint = @import("../../bundler.zig").ServerEntryPoint;
|
||||
const MacroEntryPoint = @import("../../bundler.zig").MacroEntryPoint;
|
||||
const js_printer = @import("../../js_printer.zig");
|
||||
const js_parser = @import("../../js_parser.zig");
|
||||
const js_ast = @import("../../js_ast.zig");
|
||||
const hash_map = @import("../../hash_map.zig");
|
||||
const http = @import("../../http.zig");
|
||||
const ImportKind = ast.ImportKind;
|
||||
@@ -158,6 +160,41 @@ pub const Bun = struct {
|
||||
return JSValue.createStringArray(VirtualMachine.vm.global, styles.ptr, styles.len).asRef();
|
||||
}
|
||||
|
||||
pub fn registerMacro(
|
||||
this: void,
|
||||
ctx: js.JSContextRef,
|
||||
function: js.JSObjectRef,
|
||||
thisObject: js.JSObjectRef,
|
||||
arguments: []const js.JSValueRef,
|
||||
exception: js.ExceptionRef,
|
||||
) js.JSValueRef {
|
||||
if (arguments.len != 2 or !js.JSValueIsNumber(ctx, arguments[0])) {
|
||||
JSError(getAllocator(ctx), "Internal error registering macros: invalid args", .{}, ctx, exception);
|
||||
return js.JSValueMakeUndefined(ctx);
|
||||
}
|
||||
// TODO: make this faster
|
||||
const id = @truncate(i32, @floatToInt(i64, js.JSValueToNumber(ctx, arguments[0], exception)));
|
||||
if (id == -1 or id == 0) {
|
||||
JSError(getAllocator(ctx), "Internal error registering macros: invalid id", .{}, ctx, exception);
|
||||
return js.JSValueMakeUndefined(ctx);
|
||||
}
|
||||
|
||||
if (!js.JSValueIsObject(ctx, arguments[1]) or !js.JSObjectIsFunction(ctx, arguments[1])) {
|
||||
JSError(getAllocator(ctx), "Macro must be a function. Received: {s}", .{@tagName(js.JSValueGetType(ctx, arguments[1]))}, ctx, exception);
|
||||
return js.JSValueMakeUndefined(ctx);
|
||||
}
|
||||
|
||||
var get_or_put_result = VirtualMachine.vm.macros.getOrPut(id) catch unreachable;
|
||||
if (get_or_put_result.found_existing) {
|
||||
js.JSValueUnprotect(ctx, get_or_put_result.value_ptr.*);
|
||||
}
|
||||
|
||||
js.JSValueProtect(ctx, arguments[1]);
|
||||
get_or_put_result.value_ptr.* = arguments[1];
|
||||
|
||||
return js.JSValueMakeUndefined(ctx);
|
||||
}
|
||||
|
||||
pub fn getRouteFiles(
|
||||
this: void,
|
||||
ctx: js.JSContextRef,
|
||||
@@ -330,6 +367,13 @@ pub const Bun = struct {
|
||||
.@"return" = "string",
|
||||
},
|
||||
},
|
||||
.registerMacro = .{
|
||||
.rfn = Bun.registerMacro,
|
||||
.ts = d.ts{
|
||||
.name = "registerMacro",
|
||||
.@"return" = "undefined",
|
||||
},
|
||||
},
|
||||
},
|
||||
.{
|
||||
.main = .{
|
||||
@@ -359,28 +403,26 @@ pub const Bun = struct {
|
||||
const bun_file_import_path = "/node_modules.server.bun";
|
||||
pub const LazyClasses = [_]type{};
|
||||
|
||||
pub const Module = struct {
|
||||
reload_pending: bool = false,
|
||||
};
|
||||
|
||||
// If you read JavascriptCore/API/JSVirtualMachine.mm - https://github.com/WebKit/WebKit/blob/acff93fb303baa670c055cb24c2bad08691a01a0/Source/JavaScriptCore/API/JSVirtualMachine.mm#L101
|
||||
// We can see that it's sort of like std.mem.Allocator but for JSGlobalContextRef, to support Automatic Reference Counting
|
||||
// Its unavailable on Linux
|
||||
pub const VirtualMachine = struct {
|
||||
const RequireCacheType = std.AutoHashMap(u32, *Module);
|
||||
global: *JSGlobalObject,
|
||||
allocator: *std.mem.Allocator,
|
||||
node_modules: ?*NodeModuleBundle = null,
|
||||
bundler: Bundler,
|
||||
|
||||
macro_mode: bool = false,
|
||||
|
||||
watcher: ?*http.Watcher = null,
|
||||
console: *ZigConsoleClient,
|
||||
require_cache: RequireCacheType,
|
||||
log: *logger.Log,
|
||||
event_listeners: EventListenerMixin.Map,
|
||||
main: string = "",
|
||||
process: js.JSObjectRef = null,
|
||||
blobs: *Blob.Group = undefined,
|
||||
flush_list: std.ArrayList(string),
|
||||
macro_entry_points: std.AutoArrayHashMap(i32, *MacroEntryPoint),
|
||||
entry_point: ServerEntryPoint = undefined,
|
||||
|
||||
arena: *std.heap.ArenaAllocator = undefined,
|
||||
@@ -389,8 +431,21 @@ pub const VirtualMachine = struct {
|
||||
transpiled_count: usize = 0,
|
||||
resolved_count: usize = 0,
|
||||
had_errors: bool = false,
|
||||
pub var vm_loaded = false;
|
||||
pub var vm: *VirtualMachine = undefined;
|
||||
macros: MacroMap,
|
||||
pub threadlocal var vm_loaded = false;
|
||||
pub threadlocal var vm: *VirtualMachine = undefined;
|
||||
|
||||
pub const MacroMap = std.AutoArrayHashMap(i32, js.JSObjectRef);
|
||||
|
||||
pub fn enableMacroMode(this: *VirtualMachine) void {
|
||||
this.bundler.options.platform = .bunMacro;
|
||||
this.macro_mode = true;
|
||||
}
|
||||
|
||||
pub fn disableMacroMode(this: *VirtualMachine) void {
|
||||
this.bundler.options.platform = .bun;
|
||||
this.macro_mode = false;
|
||||
}
|
||||
|
||||
pub fn init(
|
||||
allocator: *std.mem.Allocator,
|
||||
@@ -421,12 +476,13 @@ pub const VirtualMachine = struct {
|
||||
.global = undefined,
|
||||
.allocator = allocator,
|
||||
.entry_point = ServerEntryPoint{},
|
||||
.require_cache = RequireCacheType.init(allocator),
|
||||
.event_listeners = EventListenerMixin.Map.init(allocator),
|
||||
.bundler = bundler,
|
||||
.console = console,
|
||||
.node_modules = bundler.options.node_modules_bundle,
|
||||
.log = log,
|
||||
.macros = MacroMap.init(allocator),
|
||||
.macro_entry_points = @TypeOf(VirtualMachine.vm.macro_entry_points).init(allocator),
|
||||
.flush_list = std.ArrayList(string).init(allocator),
|
||||
.blobs = try Blob.Group.init(allocator),
|
||||
};
|
||||
@@ -557,7 +613,7 @@ pub const VirtualMachine = struct {
|
||||
var parse_result = ParseResult{ .source = vm.entry_point.source, .ast = main_ast, .loader = .js, .input_fd = null };
|
||||
var file_path = Fs.Path.init(bundler.fs.top_level_dir);
|
||||
file_path.name.dir = bundler.fs.top_level_dir;
|
||||
file_path.name.base = "bun:main";
|
||||
file_path.name.base = main_file_name;
|
||||
try bundler.linker.link(
|
||||
file_path,
|
||||
&parse_result,
|
||||
@@ -586,6 +642,19 @@ pub const VirtualMachine = struct {
|
||||
.hash = 0,
|
||||
.bytecodecache_fd = 0,
|
||||
};
|
||||
} else if (_specifier.len > js_ast.Macro.namespaceWithColon.len and
|
||||
strings.eqlComptimeIgnoreLen(_specifier[0..js_ast.Macro.namespaceWithColon.len], js_ast.Macro.namespaceWithColon))
|
||||
{
|
||||
if (vm.macro_entry_points.get(MacroEntryPoint.generateIDFromSpecifier(_specifier))) |entry| {
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = ZigString.init(entry.source.contents),
|
||||
.specifier = ZigString.init(_specifier),
|
||||
.source_url = ZigString.init(_specifier),
|
||||
.hash = 0,
|
||||
.bytecodecache_fd = 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const specifier = normalizeSpecifier(_specifier);
|
||||
@@ -698,10 +767,16 @@ pub const VirtualMachine = struct {
|
||||
ret.result = null;
|
||||
ret.path = vm.entry_point.source.path.text;
|
||||
return;
|
||||
} else if (specifier.len > js_ast.Macro.namespaceWithColon.len and strings.eqlComptimeIgnoreLen(specifier[0..js_ast.Macro.namespaceWithColon.len], js_ast.Macro.namespaceWithColon)) {
|
||||
ret.result = null;
|
||||
ret.path = specifier;
|
||||
return;
|
||||
}
|
||||
|
||||
const is_special_source = strings.eqlComptime(source, main_file_name) or js_ast.Macro.isMacroPath(source);
|
||||
|
||||
const result = try vm.bundler.resolver.resolve(
|
||||
if (!strings.eqlComptime(source, main_file_name)) Fs.PathName.init(source).dirWithTrailingSlash() else VirtualMachine.vm.bundler.fs.top_level_dir,
|
||||
if (!is_special_source) Fs.PathName.init(source).dirWithTrailingSlash() else VirtualMachine.vm.bundler.fs.top_level_dir,
|
||||
specifier,
|
||||
.stmt,
|
||||
);
|
||||
@@ -977,6 +1052,46 @@ pub const VirtualMachine = struct {
|
||||
return promise;
|
||||
}
|
||||
|
||||
pub fn loadMacroEntryPoint(this: *VirtualMachine, entry_path: string, function_name: string, specifier: string, hash: i32) !*JSInternalPromise {
|
||||
var entry_point_entry = try this.macro_entry_points.getOrPut(hash);
|
||||
|
||||
if (!entry_point_entry.found_existing) {
|
||||
var macro_entry_pointer: *MacroEntryPoint = this.allocator.create(MacroEntryPoint) catch unreachable;
|
||||
entry_point_entry.value_ptr.* = macro_entry_pointer;
|
||||
try macro_entry_pointer.generate(&this.bundler, Fs.PathName.init(entry_path), function_name, hash, specifier);
|
||||
}
|
||||
var entry_point = entry_point_entry.value_ptr.*;
|
||||
|
||||
var promise: *JSInternalPromise = undefined;
|
||||
// We first import the node_modules bundle. This prevents any potential TDZ issues.
|
||||
// The contents of the node_modules bundle are lazy, so hopefully this should be pretty quick.
|
||||
if (this.node_modules != null) {
|
||||
promise = JSModuleLoader.loadAndEvaluateModule(this.global, ZigString.init(std.mem.span(bun_file_import_path)));
|
||||
|
||||
this.global.vm().drainMicrotasks();
|
||||
|
||||
while (promise.status(this.global.vm()) == JSPromise.Status.Pending) {
|
||||
this.global.vm().drainMicrotasks();
|
||||
}
|
||||
|
||||
if (promise.status(this.global.vm()) == JSPromise.Status.Rejected) {
|
||||
return promise;
|
||||
}
|
||||
|
||||
_ = promise.result(this.global.vm());
|
||||
}
|
||||
|
||||
promise = JSModuleLoader.loadAndEvaluateModule(this.global, ZigString.init(entry_point.source.path.text));
|
||||
|
||||
this.global.vm().drainMicrotasks();
|
||||
|
||||
while (promise.status(this.global.vm()) == JSPromise.Status.Pending) {
|
||||
this.global.vm().drainMicrotasks();
|
||||
}
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
// When the Error-like object is one of our own, it's best to rely on the object directly instead of serializing it to a ZigException.
|
||||
// This is for:
|
||||
// - BuildError
|
||||
@@ -1056,7 +1171,7 @@ pub const VirtualMachine = struct {
|
||||
var build_error = private_data_ptr.as(BuildError);
|
||||
if (!build_error.logged) {
|
||||
var writer = Output.errorWriter();
|
||||
build_error.msg.formatWriter(@TypeOf(writer), writer, allow_ansi_color) catch {};
|
||||
build_error.msg.writeFormat(writer, allow_ansi_color) catch {};
|
||||
build_error.logged = true;
|
||||
}
|
||||
this.had_errors = this.had_errors or build_error.msg.kind == .err;
|
||||
@@ -1072,7 +1187,7 @@ pub const VirtualMachine = struct {
|
||||
var resolve_error = private_data_ptr.as(ResolveError);
|
||||
if (!resolve_error.logged) {
|
||||
var writer = Output.errorWriter();
|
||||
resolve_error.msg.formatWriter(@TypeOf(writer), writer, allow_ansi_color) catch {};
|
||||
resolve_error.msg.writeFormat(writer, allow_ansi_color) catch {};
|
||||
resolve_error.logged = true;
|
||||
}
|
||||
|
||||
|
||||
501
src/js_ast.zig
501
src/js_ast.zig
@@ -321,7 +321,7 @@ pub const Binding = struct {
|
||||
*B.Object => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_object = t } };
|
||||
},
|
||||
*B.Missing => {
|
||||
B.Missing => {
|
||||
return Binding{ .loc = loc, .data = B{ .b_missing = t } };
|
||||
},
|
||||
else => {
|
||||
@@ -1700,6 +1700,7 @@ pub const Stmt = struct {
|
||||
pub const All = NewBaseStore(Union, 128);
|
||||
|
||||
threadlocal var has_inited = false;
|
||||
pub threadlocal var disable_reset = false;
|
||||
pub fn create(allocator: *std.mem.Allocator) void {
|
||||
if (has_inited) {
|
||||
return;
|
||||
@@ -1710,6 +1711,7 @@ pub const Stmt = struct {
|
||||
}
|
||||
|
||||
pub fn reset() void {
|
||||
if (disable_reset) return;
|
||||
All.reset();
|
||||
}
|
||||
|
||||
@@ -2367,7 +2369,7 @@ pub const Expr = struct {
|
||||
return Expr{
|
||||
.loc = loc,
|
||||
.data = Data{
|
||||
.e_number = Data.Store.All.append(Type, st),
|
||||
.e_number = st,
|
||||
},
|
||||
};
|
||||
},
|
||||
@@ -2542,8 +2544,160 @@ pub const Expr = struct {
|
||||
e_class,
|
||||
e_require,
|
||||
|
||||
pub inline fn toPublicValue(this: Tag) u16 {
|
||||
return @intCast(u16, @enumToInt(this)) + 16;
|
||||
}
|
||||
|
||||
pub inline fn fromPublicValue(comptime ValueType: type, value: ValueType) ?Tag {
|
||||
if (value < 16 or value > @enumToInt(Tag.e_require)) return null;
|
||||
|
||||
switch (comptime ValueType) {
|
||||
f64 => {
|
||||
return @intToEnum(@floatToInt(u16, value - 16), Tag);
|
||||
},
|
||||
else => {
|
||||
return @intToEnum(@intCast(u6, @intCast(u16, value) - 16), Tag);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub const names_strings = [_]string{
|
||||
"<array>",
|
||||
"<unary>",
|
||||
"<binary>",
|
||||
"<boolean>",
|
||||
"<super>",
|
||||
"<null>",
|
||||
"<void>",
|
||||
"<new>",
|
||||
"<function>",
|
||||
"<ntarget>",
|
||||
"<import>",
|
||||
"<call>",
|
||||
"<dot>",
|
||||
"<index>",
|
||||
"<arrow>",
|
||||
"<id>",
|
||||
"<importid>",
|
||||
"<private>",
|
||||
"<jsx>",
|
||||
"<missing>",
|
||||
"<number>",
|
||||
"<bigint>",
|
||||
"<object>",
|
||||
"<spread>",
|
||||
"<string>",
|
||||
"<tpart>",
|
||||
"<template>",
|
||||
"<regexp>",
|
||||
"<await>",
|
||||
"<yield>",
|
||||
"<if>",
|
||||
"<resolve>",
|
||||
"<import>",
|
||||
"<this>",
|
||||
"<class>",
|
||||
"<require>",
|
||||
};
|
||||
pub const valid_names_list: string = brk: {
|
||||
var names_list = names_strings[0];
|
||||
for (names_strings[1..]) |name_str, i| {
|
||||
names_list = names_list ++ "\n " ++ name_str;
|
||||
}
|
||||
break :brk " " ++ names_list;
|
||||
};
|
||||
|
||||
pub const TagName = std.EnumArray(Tag, string);
|
||||
|
||||
pub const names: TagName = brk: {
|
||||
var array = TagName.initUndefined();
|
||||
array.set(.e_array, names_strings[0]);
|
||||
array.set(.e_unary, names_strings[1]);
|
||||
array.set(.e_binary, names_strings[2]);
|
||||
array.set(.e_boolean, names_strings[3]);
|
||||
array.set(.e_super, names_strings[4]);
|
||||
array.set(.e_null, names_strings[5]);
|
||||
array.set(.e_undefined, names_strings[6]);
|
||||
array.set(.e_new, names_strings[7]);
|
||||
array.set(.e_function, names_strings[8]);
|
||||
array.set(.e_new_target, names_strings[9]);
|
||||
array.set(.e_import_meta, names_strings[10]);
|
||||
array.set(.e_call, names_strings[11]);
|
||||
array.set(.e_dot, names_strings[12]);
|
||||
array.set(.e_index, names_strings[13]);
|
||||
array.set(.e_arrow, names_strings[14]);
|
||||
array.set(.e_identifier, names_strings[15]);
|
||||
array.set(.e_import_identifier, names_strings[16]);
|
||||
array.set(.e_private_identifier, names_strings[17]);
|
||||
array.set(.e_jsx_element, names_strings[18]);
|
||||
array.set(.e_missing, names_strings[19]);
|
||||
array.set(.e_number, names_strings[20]);
|
||||
array.set(.e_big_int, names_strings[21]);
|
||||
array.set(.e_object, names_strings[22]);
|
||||
array.set(.e_spread, names_strings[23]);
|
||||
array.set(.e_string, names_strings[24]);
|
||||
array.set(.e_template_part, names_strings[25]);
|
||||
array.set(.e_template, names_strings[26]);
|
||||
array.set(.e_reg_exp, names_strings[27]);
|
||||
array.set(.e_await, names_strings[28]);
|
||||
array.set(.e_yield, names_strings[29]);
|
||||
array.set(.e_if, names_strings[30]);
|
||||
array.set(.e_require_or_require_resolve, names_strings[31]);
|
||||
array.set(.e_import, names_strings[32]);
|
||||
array.set(.e_this, names_strings[33]);
|
||||
array.set(.e_class, names_strings[34]);
|
||||
array.set(.e_require, names_strings[35]);
|
||||
break :brk array;
|
||||
};
|
||||
pub const TagExactSizeMatcher = strings.ExactSizeMatcher(8);
|
||||
pub fn find(name_: string) ?Tag {
|
||||
return switch (TagExactSizeMatcher.match(name_)) {
|
||||
TagExactSizeMatcher.case("array") => Tag.e_array,
|
||||
TagExactSizeMatcher.case("unary") => Tag.e_unary,
|
||||
TagExactSizeMatcher.case("binary") => Tag.e_binary,
|
||||
TagExactSizeMatcher.case("boolean") => Tag.e_boolean,
|
||||
TagExactSizeMatcher.case("true") => Tag.e_boolean,
|
||||
TagExactSizeMatcher.case("false") => Tag.e_boolean,
|
||||
TagExactSizeMatcher.case("super") => Tag.e_super,
|
||||
TagExactSizeMatcher.case("null") => Tag.e_null,
|
||||
TagExactSizeMatcher.case("void") => Tag.e_undefined,
|
||||
TagExactSizeMatcher.case("new") => Tag.e_new,
|
||||
TagExactSizeMatcher.case("function") => Tag.e_function,
|
||||
TagExactSizeMatcher.case("ntarget") => Tag.e_new_target,
|
||||
TagExactSizeMatcher.case("imeta") => Tag.e_import_meta,
|
||||
TagExactSizeMatcher.case("call") => Tag.e_call,
|
||||
TagExactSizeMatcher.case("dot") => Tag.e_dot,
|
||||
TagExactSizeMatcher.case("index") => Tag.e_index,
|
||||
TagExactSizeMatcher.case("arrow") => Tag.e_arrow,
|
||||
TagExactSizeMatcher.case("id") => Tag.e_identifier,
|
||||
TagExactSizeMatcher.case("importid") => Tag.e_import_identifier,
|
||||
TagExactSizeMatcher.case("jsx") => Tag.e_jsx_element,
|
||||
TagExactSizeMatcher.case("missing") => Tag.e_missing,
|
||||
TagExactSizeMatcher.case("number") => Tag.e_number,
|
||||
TagExactSizeMatcher.case("bigint") => Tag.e_big_int,
|
||||
TagExactSizeMatcher.case("object") => Tag.e_object,
|
||||
TagExactSizeMatcher.case("spread") => Tag.e_spread,
|
||||
TagExactSizeMatcher.case("string") => Tag.e_string,
|
||||
TagExactSizeMatcher.case("tpart") => Tag.e_template_part,
|
||||
TagExactSizeMatcher.case("template") => Tag.e_template,
|
||||
TagExactSizeMatcher.case("regexp") => Tag.e_reg_exp,
|
||||
TagExactSizeMatcher.case("await") => Tag.e_await,
|
||||
TagExactSizeMatcher.case("yield") => Tag.e_yield,
|
||||
TagExactSizeMatcher.case("if") => Tag.e_if,
|
||||
TagExactSizeMatcher.case("import") => Tag.e_import,
|
||||
TagExactSizeMatcher.case("this") => Tag.e_this,
|
||||
TagExactSizeMatcher.case("class") => Tag.e_class,
|
||||
TagExactSizeMatcher.case("require") => Tag.e_require,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn name(this: Tag) string {
|
||||
return names.get(this);
|
||||
}
|
||||
|
||||
pub fn jsonStringify(self: @This(), opts: anytype, o: anytype) !void {
|
||||
return try std.json.stringify(@tagName(self), opts, o);
|
||||
return try std.json.stringify(self.name(), opts, o);
|
||||
}
|
||||
|
||||
pub fn isArray(self: Tag) bool {
|
||||
@@ -3074,7 +3228,7 @@ pub const Expr = struct {
|
||||
e_import: *E.Import,
|
||||
|
||||
e_boolean: E.Boolean,
|
||||
e_number: *E.Number,
|
||||
e_number: E.Number,
|
||||
e_big_int: *E.BigInt,
|
||||
e_string: *E.String,
|
||||
|
||||
@@ -3129,6 +3283,7 @@ pub const Expr = struct {
|
||||
);
|
||||
|
||||
threadlocal var has_inited = false;
|
||||
pub threadlocal var disable_reset = false;
|
||||
pub fn create(allocator: *std.mem.Allocator) void {
|
||||
if (has_inited) {
|
||||
return;
|
||||
@@ -3140,6 +3295,7 @@ pub const Expr = struct {
|
||||
}
|
||||
|
||||
pub fn reset() void {
|
||||
if (disable_reset) return;
|
||||
All.reset();
|
||||
Identifier.reset();
|
||||
}
|
||||
@@ -3921,6 +4077,343 @@ pub fn printmem(comptime format: string, args: anytype) void {
|
||||
// Output.print(format, args);
|
||||
}
|
||||
|
||||
pub const Macro = struct {
|
||||
const JavaScript = @import("./javascript/jsc/javascript.zig");
|
||||
const JSC = @import("./javascript/jsc/bindings/bindings.zig");
|
||||
const JSCBase = @import("./javascript/jsc/base.zig");
|
||||
const Resolver = @import("./resolver/resolver.zig").Resolver;
|
||||
const isPackagePath = @import("./resolver/resolver.zig").isPackagePath;
|
||||
const ResolveResult = @import("./resolver/resolver.zig").Result;
|
||||
const DotEnv = @import("./env_loader.zig");
|
||||
const js = @import("./javascript/jsc/JavascriptCore.zig");
|
||||
const Zig = @import("./javascript/jsc/bindings/exports.zig");
|
||||
const Bundler = @import("./bundler.zig").Bundler;
|
||||
const MacroEntryPoint = @import("./bundler.zig").MacroEntryPoint;
|
||||
|
||||
pub const namespace: string = "macro";
|
||||
pub const namespaceWithColon: string = namespace ++ ":";
|
||||
|
||||
pub fn isMacroPath(str: string) bool {
|
||||
return (str.len > namespaceWithColon.len and strings.eqlComptimeIgnoreLen(str[0..namespaceWithColon.len], namespaceWithColon));
|
||||
}
|
||||
|
||||
pub const MacroContext = struct {
|
||||
pub const MacroMap = std.AutoArrayHashMap(i32, Macro);
|
||||
|
||||
resolver: *Resolver,
|
||||
env: *DotEnv.Loader,
|
||||
macros: MacroMap,
|
||||
|
||||
pub fn init(bundler: *Bundler) MacroContext {
|
||||
return MacroContext{
|
||||
.macros = MacroMap.init(default_allocator),
|
||||
.resolver = &bundler.resolver,
|
||||
.env = bundler.env,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn call(
|
||||
this: *MacroContext,
|
||||
import_record_path: string,
|
||||
source_dir: string,
|
||||
log: *logger.Log,
|
||||
source: *const logger.Source,
|
||||
import_range: logger.Range,
|
||||
caller: Expr,
|
||||
args: []Expr,
|
||||
function_name: string,
|
||||
) anyerror!Expr {
|
||||
Expr.Data.Store.disable_reset = true;
|
||||
Stmt.Data.Store.disable_reset = true;
|
||||
defer Expr.Data.Store.disable_reset = false;
|
||||
defer Stmt.Data.Store.disable_reset = false;
|
||||
// const is_package_path = isPackagePath(specifier);
|
||||
std.debug.assert(isMacroPath(import_record_path));
|
||||
|
||||
const resolve_result = this.resolver.resolve(source_dir, import_record_path[namespaceWithColon.len..], .stmt) catch |err| {
|
||||
switch (err) {
|
||||
error.ModuleNotFound => {
|
||||
log.addResolveError(
|
||||
source,
|
||||
import_range,
|
||||
log.msgs.allocator,
|
||||
"Macro \"{s}\" not found",
|
||||
.{import_record_path},
|
||||
.stmt,
|
||||
) catch unreachable;
|
||||
return error.MacroNotFound;
|
||||
},
|
||||
else => {
|
||||
log.addRangeErrorFmt(
|
||||
source,
|
||||
import_range,
|
||||
log.msgs.allocator,
|
||||
"{s} resolving macro \"{s}\"",
|
||||
.{ @errorName(err), import_record_path },
|
||||
) catch unreachable;
|
||||
return err;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
var specifier_buf: [64]u8 = undefined;
|
||||
var specifier_buf_len: u32 = 0;
|
||||
const hash = MacroEntryPoint.generateID(
|
||||
resolve_result.path_pair.primary.text,
|
||||
function_name,
|
||||
&specifier_buf,
|
||||
&specifier_buf_len,
|
||||
);
|
||||
|
||||
var macro_entry = this.macros.getOrPut(hash) catch unreachable;
|
||||
if (!macro_entry.found_existing) {
|
||||
macro_entry.value_ptr.* = Macro.init(
|
||||
default_allocator,
|
||||
this.resolver,
|
||||
resolve_result,
|
||||
log,
|
||||
this.env,
|
||||
function_name,
|
||||
specifier_buf[0..specifier_buf_len],
|
||||
hash,
|
||||
) catch |err| {
|
||||
macro_entry.value_ptr.* = Macro{ .resolver = undefined, .disabled = true };
|
||||
return err;
|
||||
};
|
||||
Output.flush();
|
||||
}
|
||||
defer Output.flush();
|
||||
|
||||
const macro = macro_entry.value_ptr.*;
|
||||
if (macro.disabled) {
|
||||
return caller;
|
||||
}
|
||||
macro.vm.enableMacroMode();
|
||||
defer macro.vm.disableMacroMode();
|
||||
return Macro.Runner.run(
|
||||
macro,
|
||||
log,
|
||||
default_allocator,
|
||||
function_name,
|
||||
caller,
|
||||
args,
|
||||
source,
|
||||
hash,
|
||||
);
|
||||
// this.macros.getOrPut(key: K)
|
||||
}
|
||||
};
|
||||
|
||||
pub const JSExpr = struct {
|
||||
expr: Expr,
|
||||
pub const Class = JSCBase.NewClass(
|
||||
JSExpr,
|
||||
.{
|
||||
.name = "JSExpr",
|
||||
.read_only = true,
|
||||
},
|
||||
.{
|
||||
.toString = .{
|
||||
.rfn = toString,
|
||||
},
|
||||
// .toNumber = .{
|
||||
// .rfn = toNumber,
|
||||
// },
|
||||
},
|
||||
.{
|
||||
.tag = .{
|
||||
.get = getTag,
|
||||
.ro = true,
|
||||
},
|
||||
.tagName = .{
|
||||
.get = getTagName,
|
||||
.ro = true,
|
||||
},
|
||||
.position = .{
|
||||
.get = getPosition,
|
||||
.ro = true,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// pub fn isInstanceOf(
|
||||
// ctx: js.JSContextRef,
|
||||
// obj: js.JSObjectRef,
|
||||
// value: js.JSValueRef,
|
||||
// exception: js.ExceptionRef,
|
||||
// ) bool {
|
||||
// js.JSValueToNumber(ctx, value, exception);
|
||||
// }
|
||||
|
||||
pub fn toString(
|
||||
this: *JSExpr,
|
||||
ctx: js.JSContextRef,
|
||||
function: js.JSObjectRef,
|
||||
thisObject: js.JSObjectRef,
|
||||
arguments: []const js.JSValueRef,
|
||||
exception: js.ExceptionRef,
|
||||
) js.JSObjectRef {
|
||||
switch (this.expr.data) {
|
||||
.e_string => |str| {
|
||||
if (str.isBlank()) {
|
||||
return JSC.ZigString.init("").toValue(JavaScript.VirtualMachine.vm.global).asRef();
|
||||
}
|
||||
|
||||
if (str.isUTF8()) {
|
||||
return JSC.ZigString.init(str.utf8).toValue(JavaScript.VirtualMachine.vm.global).asRef();
|
||||
} else {
|
||||
return js.JSValueMakeString(ctx, js.JSStringCreateWithCharactersNoCopy(str.value.ptr, str.value.len));
|
||||
}
|
||||
},
|
||||
.e_template => |template| {
|
||||
const str = template.head;
|
||||
|
||||
if (str.isBlank()) {
|
||||
return JSC.ZigString.init("").toValue(JavaScript.VirtualMachine.vm.global).asRef();
|
||||
}
|
||||
|
||||
if (str.isUTF8()) {
|
||||
return JSC.ZigString.init(str.utf8).toValue(JavaScript.VirtualMachine.vm.global).asRef();
|
||||
} else {
|
||||
return js.JSValueMakeString(ctx, js.JSStringCreateWithCharactersNoCopy(str.value.ptr, str.value.len));
|
||||
}
|
||||
},
|
||||
else => {
|
||||
return JSC.ZigString.init("").toValue(JavaScript.VirtualMachine.vm.global).asRef();
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getTag(
|
||||
this: *JSExpr,
|
||||
ctx: js.JSContextRef,
|
||||
thisObject: js.JSValueRef,
|
||||
prop: js.JSStringRef,
|
||||
exception: js.ExceptionRef,
|
||||
) js.JSObjectRef {
|
||||
return JSC.JSValue.jsNumberFromU16(@intCast(u16, @enumToInt(std.meta.activeTag(this.expr.data)))).asRef();
|
||||
}
|
||||
pub fn getTagName(
|
||||
this: *JSExpr,
|
||||
ctx: js.JSContextRef,
|
||||
thisObject: js.JSValueRef,
|
||||
prop: js.JSStringRef,
|
||||
exception: js.ExceptionRef,
|
||||
) js.JSObjectRef {
|
||||
return JSC.ZigString.init(@tagName(this.expr.data)).toValue(JavaScript.VirtualMachine.vm.global).asRef();
|
||||
}
|
||||
pub fn getPosition(
|
||||
this: *JSExpr,
|
||||
ctx: js.JSContextRef,
|
||||
thisObject: js.JSValueRef,
|
||||
prop: js.JSStringRef,
|
||||
exception: js.ExceptionRef,
|
||||
) js.JSObjectRef {
|
||||
return JSC.JSValue.jsNumberFromInt32(this.expr.loc.start).asRef();
|
||||
}
|
||||
};
|
||||
|
||||
resolver: *Resolver,
|
||||
vm: *JavaScript.VirtualMachine = undefined,
|
||||
|
||||
resolved: ResolveResult = undefined,
|
||||
disabled: bool = false,
|
||||
|
||||
pub fn init(
|
||||
allocator: *std.mem.Allocator,
|
||||
resolver: *Resolver,
|
||||
resolved: ResolveResult,
|
||||
log: *logger.Log,
|
||||
env: *DotEnv.Loader,
|
||||
function_name: string,
|
||||
specifier: string,
|
||||
hash: i32,
|
||||
) !Macro {
|
||||
const path = resolved.path_pair.primary;
|
||||
|
||||
var vm: *JavaScript.VirtualMachine = if (JavaScript.VirtualMachine.vm_loaded)
|
||||
JavaScript.VirtualMachine.vm
|
||||
else brk: {
|
||||
var _vm = try JavaScript.VirtualMachine.init(default_allocator, resolver.opts.transform_options, null, log, env);
|
||||
_vm.enableMacroMode();
|
||||
|
||||
_vm.bundler.configureLinker();
|
||||
_vm.bundler.configureDefines() catch unreachable;
|
||||
break :brk _vm;
|
||||
};
|
||||
|
||||
vm.enableMacroMode();
|
||||
|
||||
var loaded_result = try vm.loadMacroEntryPoint(path.text, function_name, specifier, hash);
|
||||
|
||||
if (loaded_result.status(vm.global.vm()) == JSC.JSPromise.Status.Rejected) {
|
||||
vm.defaultErrorHandler(loaded_result.result(vm.global.vm()), null);
|
||||
vm.disableMacroMode();
|
||||
return error.MacroLoadError;
|
||||
}
|
||||
|
||||
JavaScript.VirtualMachine.vm_loaded = true;
|
||||
|
||||
// We don't need to do anything with the result.
|
||||
// We just want to make sure the promise is finished.
|
||||
_ = loaded_result.result(vm.global.vm());
|
||||
|
||||
return Macro{
|
||||
.vm = vm,
|
||||
.resolved = resolved,
|
||||
.resolver = resolver,
|
||||
};
|
||||
}
|
||||
|
||||
pub const Runner = struct {
|
||||
threadlocal var args_buf: [32]js.JSObjectRef = undefined;
|
||||
threadlocal var expr_nodes_buf: [32]JSExpr = undefined;
|
||||
threadlocal var exception_holder: Zig.ZigException.Holder = undefined;
|
||||
pub fn run(
|
||||
macro: Macro,
|
||||
log: *logger.Log,
|
||||
allocator: *std.mem.Allocator,
|
||||
function_name: string,
|
||||
caller: Expr,
|
||||
args: []Expr,
|
||||
source: *const logger.Source,
|
||||
id: i32,
|
||||
) Expr {
|
||||
if (comptime isDebug) Output.prettyln("<r><d>[macro]<r> call <d><b>{s}<r>", .{function_name});
|
||||
|
||||
exception_holder = Zig.ZigException.Holder.init();
|
||||
expr_nodes_buf[0] = JSExpr{ .expr = caller };
|
||||
args_buf[0] = JSExpr.Class.make(
|
||||
macro.vm.global.ref(),
|
||||
&expr_nodes_buf[0],
|
||||
);
|
||||
for (args) |arg, i| {
|
||||
expr_nodes_buf[i + 1] = JSExpr{ .expr = arg };
|
||||
args_buf[i + 1] =
|
||||
JSExpr.Class.make(
|
||||
macro.vm.global.ref(),
|
||||
&expr_nodes_buf[i + 1],
|
||||
);
|
||||
}
|
||||
args_buf[args.len + 2] = null;
|
||||
|
||||
var macro_callback = macro.vm.macros.get(id) orelse return caller;
|
||||
var result = js.JSObjectCallAsFunctionReturnValue(macro.vm.global.ref(), macro_callback, null, args.len + 1, &args_buf);
|
||||
var promise = JSC.JSPromise.resolvedPromise(macro.vm.global, result);
|
||||
macro.vm.global.vm().drainMicrotasks();
|
||||
|
||||
if (promise.status(macro.vm.global.vm()) == .Rejected) {
|
||||
macro.vm.defaultErrorHandler(promise.result(macro.vm.global.vm()), null);
|
||||
return caller;
|
||||
}
|
||||
|
||||
const value = promise.result(macro.vm.global.vm());
|
||||
|
||||
return caller;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
test "Binding.init" {
|
||||
var binding = Binding.alloc(
|
||||
std.heap.page_allocator,
|
||||
|
||||
111
src/js_parser/ast.js
Normal file
111
src/js_parser/ast.js
Normal file
@@ -0,0 +1,111 @@
|
||||
globalThis.BunASTNode ??= class BunASTNode {
|
||||
position = -1;
|
||||
|
||||
};
|
||||
|
||||
if (!globalThis.BunAST) {
|
||||
globalThis.BunAST = {
|
||||
EArray: class EArray extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EUnary: class EUnary extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EBinary: class EBinary extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EClass: class EClass extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
ENew: class ENew extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EFunction: class EFunction extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
ECall: class ECall extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EDot: class EDot extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EIndex: class EIndex extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EArrow: class EArrow extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EIdentifier: class EIdentifier extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EImportIdentifier: class EImportIdentifier extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EPrivateIdentifier: class EPrivateIdentifier extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EJsxElement: class EJsxElement extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EObject: class EObject extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
ESpread: class ESpread extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
ETemplatePart: class ETemplatePart extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
ETemplate: class ETemplate extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
ERegExp: class ERegExp extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EAwait: class EAwait extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EYield: class EYield extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EIf: class EIf extends BunASTNode {
|
||||
no = Number.MAX_SAFE_INTEGER;
|
||||
yes = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
ERequire: class ERequire extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EImport: class EImport extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EBoolean: class EBoolean extends BunASTNode {
|
||||
val = false;
|
||||
},
|
||||
ENumber: class ENumber extends BunASTNode {
|
||||
val = 0;
|
||||
},
|
||||
EBigInt: class EBigInt extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EString: class EString extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EMissing: class EMissing extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EThis: class EThis extends BunASTNode {
|
||||
},
|
||||
ESuper: class ESuper extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
ENull: class ENull extends BunASTNode {
|
||||
},
|
||||
EUndefined: class EUndefined extends BunASTNode {
|
||||
},
|
||||
ENewTarget: class ENewTarget extends BunASTNode {
|
||||
#ptr = Number.MAX_SAFE_INTEGER;
|
||||
},
|
||||
EImportMeta: class EImportMeta extends BunASTNode {
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -87,6 +87,12 @@ pub const ImportScanner = struct {
|
||||
.s_import => |st| {
|
||||
var record: *ImportRecord = &p.import_records.items[st.import_record_index];
|
||||
|
||||
if (strings.eqlComptime(record.path.namespace, "macro")) {
|
||||
record.is_unused = true;
|
||||
record.path.is_disabled = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
// The official TypeScript compiler always removes unused imported
|
||||
// symbols. However, we deliberately deviate from the official
|
||||
// TypeScript compiler's behavior doing this in a specific scenario:
|
||||
@@ -686,6 +692,98 @@ const StaticSymbolName = struct {
|
||||
};
|
||||
};
|
||||
|
||||
const BunJSX = struct {
|
||||
const TagNumberExprArray = std.EnumArray(Expr.Tag, E.Number);
|
||||
|
||||
pub const tag_numbers: TagNumberExprArray = brk: {
|
||||
var numbers = TagNumberExprArray.initFill(E.Number{ .value = 0 });
|
||||
tag_numbers.set(.e_array, E.Number{ .value = @intToFloat(f64, Tag.e_array.toPublicValue()) });
|
||||
tag_numbers.set(.e_unary, E.Number{ .value = @intToFloat(f64, Tag.e_unary.toPublicValue()) });
|
||||
tag_numbers.set(.e_binary, E.Number{ .value = @intToFloat(f64, Tag.e_binary.toPublicValue()) });
|
||||
tag_numbers.set(.e_class, E.Number{ .value = @intToFloat(f64, Tag.e_class.toPublicValue()) });
|
||||
tag_numbers.set(.e_new, E.Number{ .value = @intToFloat(f64, Tag.e_new.toPublicValue()) });
|
||||
tag_numbers.set(.e_function, E.Number{ .value = @intToFloat(f64, Tag.e_function.toPublicValue()) });
|
||||
tag_numbers.set(.e_call, E.Number{ .value = @intToFloat(f64, Tag.e_call.toPublicValue()) });
|
||||
tag_numbers.set(.e_dot, E.Number{ .value = @intToFloat(f64, Tag.e_dot.toPublicValue()) });
|
||||
tag_numbers.set(.e_index, E.Number{ .value = @intToFloat(f64, Tag.e_index.toPublicValue()) });
|
||||
tag_numbers.set(.e_arrow, E.Number{ .value = @intToFloat(f64, Tag.e_arrow.toPublicValue()) });
|
||||
tag_numbers.set(.e_identifier, E.Number{ .value = @intToFloat(f64, Tag.e_identifier.toPublicValue()) });
|
||||
tag_numbers.set(.e_import_identifier, E.Number{ .value = @intToFloat(f64, Tag.e_import_identifier.toPublicValue()) });
|
||||
tag_numbers.set(.e_private_identifier, E.Number{ .value = @intToFloat(f64, Tag.e_private_identifier.toPublicValue()) });
|
||||
tag_numbers.set(.e_jsx_element, E.Number{ .value = @intToFloat(f64, Tag.e_jsx_element.toPublicValue()) });
|
||||
tag_numbers.set(.e_object, E.Number{ .value = @intToFloat(f64, Tag.e_object.toPublicValue()) });
|
||||
tag_numbers.set(.e_spread, E.Number{ .value = @intToFloat(f64, Tag.e_spread.toPublicValue()) });
|
||||
tag_numbers.set(.e_template_part, E.Number{ .value = @intToFloat(f64, Tag.e_template_part.toPublicValue()) });
|
||||
tag_numbers.set(.e_template, E.Number{ .value = @intToFloat(f64, Tag.e_template.toPublicValue()) });
|
||||
tag_numbers.set(.e_reg_exp, E.Number{ .value = @intToFloat(f64, Tag.e_reg_exp.toPublicValue()) });
|
||||
tag_numbers.set(.e_await, E.Number{ .value = @intToFloat(f64, Tag.e_await.toPublicValue()) });
|
||||
tag_numbers.set(.e_yield, E.Number{ .value = @intToFloat(f64, Tag.e_yield.toPublicValue()) });
|
||||
tag_numbers.set(.e_if, E.Number{ .value = @intToFloat(f64, Tag.e_if.toPublicValue()) });
|
||||
tag_numbers.set(.e_require, E.Number{ .value = @intToFloat(f64, Tag.e_require.toPublicValue()) });
|
||||
tag_numbers.set(.e_require_or_require_resolve, E.Number{ .value = @intToFloat(f64, Tag.e_require_or_require_resolve.toPublicValue()) });
|
||||
tag_numbers.set(.e_import, E.Number{ .value = @intToFloat(f64, Tag.e_import.toPublicValue()) });
|
||||
tag_numbers.set(.e_boolean, E.Number{ .value = @intToFloat(f64, Tag.e_boolean.toPublicValue()) });
|
||||
tag_numbers.set(.e_number, E.Number{ .value = @intToFloat(f64, Tag.e_number.toPublicValue()) });
|
||||
tag_numbers.set(.e_big_int, E.Number{ .value = @intToFloat(f64, Tag.e_big_int.toPublicValue()) });
|
||||
tag_numbers.set(.e_string, E.Number{ .value = @intToFloat(f64, Tag.e_string.toPublicValue()) });
|
||||
tag_numbers.set(.e_missing, E.Number{ .value = @intToFloat(f64, Tag.e_missing.toPublicValue()) });
|
||||
tag_numbers.set(.e_this, E.Number{ .value = @intToFloat(f64, Tag.e_this.toPublicValue()) });
|
||||
tag_numbers.set(.e_super, E.Number{ .value = @intToFloat(f64, Tag.e_super.toPublicValue()) });
|
||||
tag_numbers.set(.e_null, E.Number{ .value = @intToFloat(f64, Tag.e_null.toPublicValue()) });
|
||||
tag_numbers.set(.e_undefined, E.Number{ .value = @intToFloat(f64, Tag.e_undefined.toPublicValue()) });
|
||||
tag_numbers.set(.e_new_target, E.Number{ .value = @intToFloat(f64, Tag.e_new_target.toPublicValue()) });
|
||||
tag_numbers.set(.e_import_meta, E.Number{ .value = @intToFloat(f64, Tag.e_import_meta.toPublicValue()) });
|
||||
break :brk tag_numbers;
|
||||
};
|
||||
pub const StaticExpr = struct {
|
||||
// pub const one: E.Expr = E.Expr{ .tag = .e_number, .data = };
|
||||
};
|
||||
|
||||
pub const tag_name_key: string = "t";
|
||||
pub const tag_name_key_string = E.String{ .utf8 = tag_name_key };
|
||||
|
||||
pub const children_name_key: string = "c";
|
||||
pub const children_name_key_string = E.String{ .utf8 = children_name_key };
|
||||
|
||||
pub const value_name_key: string = "v";
|
||||
pub const value_name_string = E.String{ .utf8 = value_name_key };
|
||||
|
||||
pub const number_name_key: string = "n";
|
||||
pub const number_name_string = E.String{ .utf8 = number_name_key };
|
||||
|
||||
pub const @"undefined" = E.Object{
|
||||
.properties = &[_]G.Property{
|
||||
.{ .key = &tag_name_key_string, .value = &tag_names.get(.e_undefined) },
|
||||
},
|
||||
};
|
||||
pub const @"null" = E.Object{
|
||||
.properties = &[_]G.Property{
|
||||
.{ .key = &tag_name_key_string, .value = &tag_names.get(.e_null) },
|
||||
},
|
||||
};
|
||||
pub const @"false" = E.Object{
|
||||
.properties = &[_]G.Property{
|
||||
.{ .key = &tag_name_key_string, .value = &tag_names.get(.e_boolean) },
|
||||
.{ .key = &value_name_key, .value = Expr{ .data = Prefill.Data.Zero, .loc = logger.Loc.Empty } },
|
||||
},
|
||||
.is_single_line = true,
|
||||
};
|
||||
pub const @"true" = E.Object{
|
||||
.properties = &[_]G.Property{
|
||||
.{ .key = &tag_name_key_string, .value = &tag_names.get(.e_boolean) },
|
||||
.{ .key = &value_name_key, .value = Expr{ .data = Prefill.Data.One, .loc = logger.Loc.Empty } },
|
||||
},
|
||||
.is_single_line = true,
|
||||
};
|
||||
pub const @"empty_string" = E.Object{
|
||||
.properties = &[_]G.Property{
|
||||
.{ .key = &tag_name_key_string, .value = &tag_names.get(.e_string) },
|
||||
.{ .key = &value_name_key, .value = Expr{ .data = Prefill.Data.EmptyString, .loc = logger.Loc.Empty } },
|
||||
},
|
||||
.is_single_line = true,
|
||||
};
|
||||
};
|
||||
|
||||
pub const SideEffects = enum(u2) {
|
||||
could_have_side_effects,
|
||||
no_side_effects,
|
||||
@@ -1764,6 +1862,7 @@ pub const Parser = struct {
|
||||
suppress_warnings_about_weird_code: bool = true,
|
||||
filepath_hash_for_hmr: u32 = 0,
|
||||
features: RuntimeFeatures = RuntimeFeatures{},
|
||||
macro_context: *js_ast.Macro.MacroContext = undefined,
|
||||
|
||||
warn_about_unbundled_modules: bool = true,
|
||||
|
||||
@@ -1871,6 +1970,12 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
pub fn parse(self: *Parser) !js_ast.Result {
|
||||
if (self.options.features.is_macro and self.options.ts) {
|
||||
return try self._parse(TypeScriptMacroParser);
|
||||
} else if (self.options.features.is_macro) {
|
||||
return try self._parse(JavaScriptMacroParser);
|
||||
}
|
||||
|
||||
if (self.options.ts and self.options.jsx.parse) {
|
||||
if (self.options.features.react_fast_refresh) {
|
||||
return try self._parse(TSXParserFastRefresh);
|
||||
@@ -1980,7 +2085,7 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
// Auto-import JSX
|
||||
if (p.options.jsx.parse) {
|
||||
if (self.options.jsx.parse and !self.options.features.is_macro) {
|
||||
const jsx_symbol: *const Symbol = &p.symbols.items[p.jsx_runtime.ref.inner_index];
|
||||
const jsx_static_symbol: *const Symbol = &p.symbols.items[p.jsxs_runtime.ref.inner_index];
|
||||
const jsx_fragment_symbol: *const Symbol = &p.symbols.items[p.jsx_fragment.ref.inner_index];
|
||||
@@ -2546,8 +2651,14 @@ pub const Prefill = struct {
|
||||
pub var ColumnNumber = [_]u16{ 'c', 'o', 'l', 'u', 'm', 'n', 'N', 'u', 'm', 'b', 'e', 'r' };
|
||||
};
|
||||
pub const Value = struct {
|
||||
pub var EThis = E.This{};
|
||||
pub var Zero = E.Number{ .value = 0.0 };
|
||||
pub const EThis = E.This{};
|
||||
pub const Zero = E.Number{ .value = 0.0 };
|
||||
pub const One = E.Number{ .value = 1.0 };
|
||||
pub const Two = E.Number{ .value = 2.0 };
|
||||
pub const Three = E.Number{ .value = 3.0 };
|
||||
pub const Four = E.Number{ .value = 4.0 };
|
||||
pub const Five = E.Number{ .value = 5.0 };
|
||||
pub var EmptyString = E.String{};
|
||||
};
|
||||
pub const String = struct {
|
||||
pub var Key = E.String{ .value = &Prefill.StringLiteral.Key };
|
||||
@@ -2557,20 +2668,27 @@ pub const Prefill = struct {
|
||||
pub var ColumnNumber = E.String{ .value = &Prefill.StringLiteral.ColumnNumber };
|
||||
};
|
||||
pub const Data = struct {
|
||||
pub var BMissing = B{ .b_missing = BMissing_ };
|
||||
pub var BMissing_ = B.Missing{};
|
||||
pub const BMissing = B{ .b_missing = BMissing_ };
|
||||
pub const BMissing_ = B.Missing{};
|
||||
|
||||
pub var EMissing = Expr.Data{ .e_missing = EMissing_ };
|
||||
pub var EMissing_ = E.Missing{};
|
||||
pub const EMissing = Expr.Data{ .e_missing = EMissing_ };
|
||||
pub const EMissing_ = E.Missing{};
|
||||
|
||||
pub var SEmpty = Stmt.Data{ .s_empty = SEmpty_ };
|
||||
pub var SEmpty_ = S.Empty{};
|
||||
pub const SEmpty = Stmt.Data{ .s_empty = SEmpty_ };
|
||||
pub const SEmpty_ = S.Empty{};
|
||||
|
||||
pub var Filename = Expr.Data{ .e_string = &Prefill.String.Filename };
|
||||
pub var LineNumber = Expr.Data{ .e_string = &Prefill.String.LineNumber };
|
||||
pub var ColumnNumber = Expr.Data{ .e_string = &Prefill.String.ColumnNumber };
|
||||
pub var This = Expr.Data{ .e_this = E.This{} };
|
||||
pub var Zero = Expr.Data{ .e_number = &Value.Zero };
|
||||
pub const EmptyString = Expr.Data{ .e_string = &Prefill.Value.EmptyString };
|
||||
|
||||
pub const Filename = Expr.Data{ .e_string = &Prefill.String.Filename };
|
||||
pub const LineNumber = Expr.Data{ .e_string = &Prefill.String.LineNumber };
|
||||
pub const ColumnNumber = Expr.Data{ .e_string = &Prefill.String.ColumnNumber };
|
||||
pub const This = Expr.Data{ .e_this = E.This{} };
|
||||
pub const Zero = Expr.Data{ .e_number = .{ .value = 0 } };
|
||||
pub const One = Expr.Data{ .e_number = .{ .value = 1 } };
|
||||
pub const Two = Expr.Data{ .e_number = .{ .value = 2 } };
|
||||
pub const Three = Expr.Data{ .e_number = .{ .value = 3 } };
|
||||
pub const Four = Expr.Data{ .e_number = .{ .value = 4 } };
|
||||
pub const Five = Expr.Data{ .e_number = .{ .value = 5 } };
|
||||
};
|
||||
pub const Runtime = struct {
|
||||
pub var JSXFilename = "__jsxFilename";
|
||||
@@ -2598,6 +2716,8 @@ const ParserFeatures = struct {
|
||||
jsx: bool = false,
|
||||
scan_only: bool = false,
|
||||
|
||||
is_macro: bool = false,
|
||||
|
||||
// *** How React Fast Refresh works ***
|
||||
//
|
||||
// Implmenetations:
|
||||
@@ -2653,24 +2773,43 @@ const FastRefresh = struct {};
|
||||
|
||||
const ImportItemForNamespaceMap = std.StringArrayHashMap(LocRef);
|
||||
|
||||
const MacroRefs = std.AutoArrayHashMap(Ref, u32);
|
||||
|
||||
const JSXTransformType = enum {
|
||||
none,
|
||||
react,
|
||||
bun_macro,
|
||||
};
|
||||
|
||||
pub fn NewParser(
|
||||
comptime js_parser_features: ParserFeatures,
|
||||
) type {
|
||||
const is_typescript_enabled = js_parser_features.typescript;
|
||||
const is_jsx_enabled = js_parser_features.jsx;
|
||||
const only_scan_imports_and_do_not_visit = js_parser_features.scan_only;
|
||||
const is_react_fast_refresh_enabled = js_parser_features.react_fast_refresh;
|
||||
|
||||
const ImportRecordList = if (only_scan_imports_and_do_not_visit) *std.ArrayList(ImportRecord) else std.ArrayList(ImportRecord);
|
||||
const NamedImportsType = if (only_scan_imports_and_do_not_visit) *js_ast.Ast.NamedImports else js_ast.Ast.NamedImports;
|
||||
const NeedsJSXType = if (only_scan_imports_and_do_not_visit) bool else void;
|
||||
const ParsePassSymbolUsageType = if (only_scan_imports_and_do_not_visit and is_typescript_enabled) *ScanPassResult.ParsePassSymbolUsageMap else void;
|
||||
// P is for Parser!
|
||||
// public only because of Binding.ToExpr
|
||||
return struct {
|
||||
pub const is_typescript_enabled = js_parser_features.typescript;
|
||||
pub const is_jsx_enabled = js_parser_features.jsx;
|
||||
pub const jsx_transform_type: JSXTransformType = brk: {
|
||||
if (!is_jsx_enabled) break :brk JSXTransformType.none;
|
||||
|
||||
if (js_parser_features.is_macro) {
|
||||
break :brk JSXTransformType.bun_macro;
|
||||
} else {
|
||||
break :brk JSXTransformType.react;
|
||||
}
|
||||
};
|
||||
pub const only_scan_imports_and_do_not_visit = js_parser_features.scan_only;
|
||||
pub const is_react_fast_refresh_enabled = js_parser_features.react_fast_refresh;
|
||||
|
||||
const ImportRecordList = if (only_scan_imports_and_do_not_visit) *std.ArrayList(ImportRecord) else std.ArrayList(ImportRecord);
|
||||
const NamedImportsType = if (only_scan_imports_and_do_not_visit) *js_ast.Ast.NamedImports else js_ast.Ast.NamedImports;
|
||||
const NeedsJSXType = if (only_scan_imports_and_do_not_visit) bool else void;
|
||||
const ParsePassSymbolUsageType = if (only_scan_imports_and_do_not_visit and is_typescript_enabled) *ScanPassResult.ParsePassSymbolUsageMap else void;
|
||||
const P = @This();
|
||||
allocator: *std.mem.Allocator,
|
||||
options: Parser.Options,
|
||||
macro_refs: MacroRefs,
|
||||
log: *logger.Log,
|
||||
define: *Define,
|
||||
source: *const logger.Source,
|
||||
@@ -3524,7 +3663,7 @@ pub fn NewParser(
|
||||
}
|
||||
}
|
||||
|
||||
if (is_jsx_enabled) {
|
||||
if (jsx_transform_type == .react) {
|
||||
generated_symbols_count += 7;
|
||||
|
||||
if (p.options.jsx.development) generated_symbols_count += 1;
|
||||
@@ -3567,22 +3706,28 @@ pub fn NewParser(
|
||||
p.recordUsage(p.runtime_imports.__HMRClient.?.ref);
|
||||
}
|
||||
|
||||
if (is_jsx_enabled) {
|
||||
if (p.options.jsx.development) {
|
||||
p.jsx_filename = p.declareGeneratedSymbol(.other, "jsxFilename") catch unreachable;
|
||||
}
|
||||
p.jsx_fragment = p.declareGeneratedSymbol(.other, "Fragment") catch unreachable;
|
||||
p.jsx_runtime = p.declareGeneratedSymbol(.other, "jsx") catch unreachable;
|
||||
p.jsxs_runtime = p.declareGeneratedSymbol(.other, "jsxs") catch unreachable;
|
||||
p.jsx_factory = p.declareGeneratedSymbol(.other, "Factory") catch unreachable;
|
||||
switch (comptime jsx_transform_type) {
|
||||
.react => {
|
||||
if (p.options.jsx.development) {
|
||||
p.jsx_filename = p.declareGeneratedSymbol(.other, "jsxFilename") catch unreachable;
|
||||
}
|
||||
p.jsx_fragment = p.declareGeneratedSymbol(.other, "Fragment") catch unreachable;
|
||||
p.jsx_runtime = p.declareGeneratedSymbol(.other, "jsx") catch unreachable;
|
||||
p.jsxs_runtime = p.declareGeneratedSymbol(.other, "jsxs") catch unreachable;
|
||||
p.jsx_factory = p.declareGeneratedSymbol(.other, "Factory") catch unreachable;
|
||||
|
||||
if (p.options.jsx.factory.len > 1 or FeatureFlags.jsx_runtime_is_cjs) {
|
||||
p.jsx_classic = p.declareGeneratedSymbol(.other, "ClassicImportSource") catch unreachable;
|
||||
}
|
||||
if (p.options.jsx.factory.len > 1 or FeatureFlags.jsx_runtime_is_cjs) {
|
||||
p.jsx_classic = p.declareGeneratedSymbol(.other, "ClassicImportSource") catch unreachable;
|
||||
}
|
||||
|
||||
if (p.options.jsx.import_source.len > 0) {
|
||||
p.jsx_automatic = p.declareGeneratedSymbol(.other, "ImportSource") catch unreachable;
|
||||
}
|
||||
if (p.options.jsx.import_source.len > 0) {
|
||||
p.jsx_automatic = p.declareGeneratedSymbol(.other, "ImportSource") catch unreachable;
|
||||
}
|
||||
},
|
||||
.bun_macro => {
|
||||
p.jsx_fragment = p.declareGeneratedSymbol(.other, "Fragment") catch unreachable;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6091,10 +6236,20 @@ pub fn NewParser(
|
||||
}
|
||||
|
||||
const path = try p.parsePath();
|
||||
|
||||
stmt.import_record_index = p.addImportRecord(.stmt, path.loc, path.text);
|
||||
p.import_records.items[stmt.import_record_index].was_originally_bare_import = was_originally_bare_import;
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
|
||||
const is_macro = js_ast.Macro.isMacroPath(path.text);
|
||||
|
||||
if (is_macro) {
|
||||
p.import_records.items[stmt.import_record_index].path.namespace = js_ast.Macro.namespace;
|
||||
if (comptime only_scan_imports_and_do_not_visit) {
|
||||
p.import_records.items[stmt.import_record_index].path.is_disabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (stmt.star_name_loc) |star| {
|
||||
const name = p.loadNameFromRef(stmt.namespace_ref);
|
||||
stmt.namespace_ref = try p.declareSymbol(.import, star, name);
|
||||
@@ -6104,6 +6259,16 @@ pub fn NewParser(
|
||||
.import_record_index = stmt.import_record_index,
|
||||
}) catch unreachable;
|
||||
}
|
||||
if (is_macro) {
|
||||
p.log.addErrorFmt(
|
||||
p.source,
|
||||
star,
|
||||
p.allocator,
|
||||
"Macro cannot be a * import, must be default or an {{item}}",
|
||||
.{},
|
||||
) catch unreachable;
|
||||
return error.SyntaxError;
|
||||
}
|
||||
} else {
|
||||
var path_name = fs.PathName.init(strings.append(p.allocator, "import_", path.text) catch unreachable);
|
||||
const name = try path_name.nonUniqueNameString(p.allocator);
|
||||
@@ -6126,6 +6291,10 @@ pub fn NewParser(
|
||||
.import_record_index = stmt.import_record_index,
|
||||
}) catch unreachable;
|
||||
}
|
||||
|
||||
if (is_macro) {
|
||||
try p.macro_refs.put(ref, stmt.import_record_index);
|
||||
}
|
||||
}
|
||||
|
||||
if (stmt.items.len > 0) {
|
||||
@@ -6144,6 +6313,9 @@ pub fn NewParser(
|
||||
.import_record_index = stmt.import_record_index,
|
||||
}) catch unreachable;
|
||||
}
|
||||
if (is_macro) {
|
||||
try p.macro_refs.put(ref, stmt.import_record_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10000,7 +10172,7 @@ pub fn NewParser(
|
||||
// <A[]>(x)
|
||||
// <A>(x) => {}
|
||||
// <A = B>(x) => {}
|
||||
if (is_typescript_enabled and is_jsx_enabled) {
|
||||
if (comptime is_typescript_enabled and is_jsx_enabled) {
|
||||
var oldLexer = std.mem.toBytes(p.lexer);
|
||||
|
||||
try p.lexer.next();
|
||||
@@ -10027,7 +10199,7 @@ pub fn NewParser(
|
||||
}
|
||||
}
|
||||
|
||||
if (is_jsx_enabled) {
|
||||
if (comptime is_jsx_enabled) {
|
||||
// Use NextInsideJSXElement() instead of Next() so we parse "<<" as "<"
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
const element = try p.parseJSXElement(loc);
|
||||
@@ -10040,7 +10212,7 @@ pub fn NewParser(
|
||||
return element;
|
||||
}
|
||||
|
||||
if (is_typescript_enabled) {
|
||||
if (comptime is_typescript_enabled) {
|
||||
// This is either an old-style type cast or a generic lambda function
|
||||
|
||||
// "<T>(x)"
|
||||
@@ -10846,204 +11018,257 @@ pub fn NewParser(
|
||||
p.panic("Unexpected private identifier. This is an internal error - not your fault.", .{});
|
||||
},
|
||||
.e_jsx_element => |e_| {
|
||||
const tag: Expr = tagger: {
|
||||
if (e_.tag) |_tag| {
|
||||
break :tagger p.visitExpr(_tag);
|
||||
} else {
|
||||
break :tagger p.jsxStringsToMemberExpression(expr.loc, p.jsx_fragment.ref);
|
||||
}
|
||||
};
|
||||
switch (comptime jsx_transform_type) {
|
||||
.bun_macro => {
|
||||
const IdentifierOrNodeType = union(Tag) {
|
||||
identifier: Expr,
|
||||
expression: Expr.Tag,
|
||||
pub const Tag = enum { identifier, expression };
|
||||
};
|
||||
const tag: IdentifierOrNodeType = tagger: {
|
||||
if (e_.tag) |_tag| {
|
||||
switch (_tag.data) {
|
||||
.e_string => |str| {
|
||||
if (Expr.Tag.find(str.utf8)) |tagname| {
|
||||
break :tagger IdentifierOrNodeType{ .expression = tagname };
|
||||
}
|
||||
|
||||
for (e_.properties) |property, i| {
|
||||
if (property.kind != .spread) {
|
||||
e_.properties[i].key = p.visitExpr(e_.properties[i].key.?);
|
||||
}
|
||||
|
||||
if (property.value != null) {
|
||||
e_.properties[i].value = p.visitExpr(e_.properties[i].value.?);
|
||||
}
|
||||
|
||||
if (property.initializer != null) {
|
||||
e_.properties[i].initializer = p.visitExpr(e_.properties[i].initializer.?);
|
||||
}
|
||||
}
|
||||
|
||||
const runtime = if (p.options.jsx.runtime == .automatic and !e_.flags.is_key_before_rest) options.JSX.Runtime.automatic else options.JSX.Runtime.classic;
|
||||
var children_count = e_.children.len;
|
||||
|
||||
const is_childless_tag = FeatureFlags.react_specific_warnings and children_count > 0 and tag.data == .e_string and tag.data.e_string.isUTF8() and js_lexer.ChildlessJSXTags.has(tag.data.e_string.utf8);
|
||||
|
||||
children_count = if (is_childless_tag) 0 else children_count;
|
||||
|
||||
if (children_count != e_.children.len) {
|
||||
// Error: meta is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`.
|
||||
// ^ from react-dom
|
||||
p.log.addWarningFmt(p.source, tag.loc, p.allocator, "<{s} /> is a void element and must not have \"children\"", .{tag.data.e_string.utf8}) catch {};
|
||||
}
|
||||
|
||||
// TODO: maybe we should split these into two different AST Nodes
|
||||
// That would reduce the amount of allocations a little
|
||||
switch (runtime) {
|
||||
.classic => {
|
||||
// Arguments to createElement()
|
||||
const args = p.allocator.alloc(Expr, 2 + children_count) catch unreachable;
|
||||
// There are at least two args:
|
||||
// - name of the tag
|
||||
// - props
|
||||
var i: usize = 1;
|
||||
args[0] = tag;
|
||||
if (e_.properties.len > 0) {
|
||||
for (e_.properties) |prop, prop_i| {
|
||||
if (prop.key) |key| {
|
||||
e_.properties[prop_i].key = p.visitExpr(key);
|
||||
}
|
||||
|
||||
if (prop.value) |val| {
|
||||
e_.properties[prop_i].value = p.visitExpr(val);
|
||||
}
|
||||
}
|
||||
|
||||
if (e_.key) |key| {
|
||||
var props = p.allocator.alloc(G.Property, e_.properties.len + 1) catch unreachable;
|
||||
std.mem.copy(G.Property, props, e_.properties);
|
||||
props[props.len - 1] = G.Property{ .key = Expr{ .loc = key.loc, .data = keyExprData }, .value = key };
|
||||
args[1] = p.e(E.Object{ .properties = props }, expr.loc);
|
||||
} else {
|
||||
args[1] = p.e(E.Object{ .properties = e_.properties }, expr.loc);
|
||||
}
|
||||
i = 2;
|
||||
} else {
|
||||
args[1] = p.e(E.Null{}, expr.loc);
|
||||
i = 2;
|
||||
}
|
||||
|
||||
for (e_.children[0..children_count]) |child| {
|
||||
args[i] = p.visitExpr(child);
|
||||
i += @intCast(usize, @boolToInt(args[i].data != .e_missing));
|
||||
}
|
||||
|
||||
// Call createElement()
|
||||
return p.e(E.Call{
|
||||
.target = p.jsxStringsToMemberExpression(expr.loc, p.jsx_factory.ref),
|
||||
.args = args[0..i],
|
||||
// Enable tree shaking
|
||||
.can_be_unwrapped_if_unused = !p.options.ignore_dce_annotations,
|
||||
}, expr.loc);
|
||||
},
|
||||
// function jsxDEV(type, config, maybeKey, source, self) {
|
||||
.automatic => {
|
||||
// Either:
|
||||
// jsxDEV(type, arguments, key, isStaticChildren, source, self)
|
||||
// jsx(type, arguments, key)
|
||||
const args = p.allocator.alloc(Expr, if (p.options.jsx.development) @as(usize, 6) else @as(usize, 4)) catch unreachable;
|
||||
args[0] = tag;
|
||||
var props = List(G.Property).fromOwnedSlice(p.allocator, e_.properties);
|
||||
// arguments needs to be like
|
||||
// {
|
||||
// ...props,
|
||||
// children: [el1, el2]
|
||||
// }
|
||||
|
||||
const is_static_jsx = e_.children.len == 0 or e_.children.len > 1 or e_.children[0].data != .e_array;
|
||||
|
||||
// if (p.options.jsx.development) {
|
||||
switch (children_count) {
|
||||
0 => {},
|
||||
1 => {
|
||||
// static jsx must always be an array
|
||||
if (is_static_jsx) {
|
||||
const children_key = Expr{ .data = jsxChildrenKeyData, .loc = expr.loc };
|
||||
e_.children[0] = p.visitExpr(e_.children[0]);
|
||||
props.append(G.Property{
|
||||
.key = children_key,
|
||||
.value = p.e(E.Array{
|
||||
.items = e_.children[0..children_count],
|
||||
.is_single_line = e_.children.len < 2,
|
||||
}, expr.loc),
|
||||
}) catch unreachable;
|
||||
} else {
|
||||
const children_key = Expr{ .data = jsxChildrenKeyData, .loc = expr.loc };
|
||||
props.append(G.Property{
|
||||
.key = children_key,
|
||||
.value = p.visitExpr(e_.children[0]),
|
||||
}) catch unreachable;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
for (e_.children[0..children_count]) |child, i| {
|
||||
e_.children[i] = p.visitExpr(child);
|
||||
}
|
||||
const children_key = Expr{ .data = jsxChildrenKeyData, .loc = expr.loc };
|
||||
props.append(G.Property{
|
||||
.key = children_key,
|
||||
.value = p.e(E.Array{
|
||||
.items = e_.children[0..children_count],
|
||||
.is_single_line = e_.children.len < 2,
|
||||
}, expr.loc),
|
||||
}) catch unreachable;
|
||||
},
|
||||
}
|
||||
|
||||
args[1] = p.e(E.Object{
|
||||
.properties = props.toOwnedSlice(),
|
||||
}, expr.loc);
|
||||
|
||||
if (e_.key) |key| {
|
||||
args[2] = key;
|
||||
} else {
|
||||
// if (maybeKey !== undefined)
|
||||
args[2] = Expr{
|
||||
.loc = expr.loc,
|
||||
.data = .{
|
||||
.e_undefined = E.Undefined{},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (p.options.jsx.development) {
|
||||
// is the return type of the first child an array?
|
||||
// It's dynamic
|
||||
// Else, it's static
|
||||
args[3] = Expr{
|
||||
.loc = expr.loc,
|
||||
.data = .{
|
||||
.e_boolean = .{
|
||||
.value = is_static_jsx,
|
||||
p.log.addErrorFmt(
|
||||
p.source,
|
||||
expr.loc,
|
||||
p.allocator,
|
||||
"Invalid expression tag: \"<{s}>\". Valid tags are:\n" ++ Expr.Tag.valid_names_list ++ "\n",
|
||||
.{str.utf8},
|
||||
) catch unreachable;
|
||||
break :tagger IdentifierOrNodeType{ .identifier = p.visitExpr(_tag) };
|
||||
},
|
||||
},
|
||||
};
|
||||
else => {
|
||||
break :tagger IdentifierOrNodeType{ .identifier = p.visitExpr(_tag) };
|
||||
},
|
||||
}
|
||||
} else {
|
||||
break :tagger IdentifierOrNodeType{ .expression = Expr.Tag.e_array };
|
||||
}
|
||||
};
|
||||
|
||||
var source = p.allocator.alloc(G.Property, 2) catch unreachable;
|
||||
p.recordUsage(p.jsx_filename.ref);
|
||||
source[0] = G.Property{
|
||||
.key = Expr{ .loc = expr.loc, .data = Prefill.Data.Filename },
|
||||
.value = p.e(E.Identifier{ .ref = p.jsx_filename.ref }, expr.loc),
|
||||
};
|
||||
for (e_.properties) |property, i| {
|
||||
if (property.kind != .spread) {
|
||||
e_.properties[i].key = p.visitExpr(e_.properties[i].key.?);
|
||||
}
|
||||
|
||||
source[1] = G.Property{
|
||||
.key = Expr{ .loc = expr.loc, .data = Prefill.Data.LineNumber },
|
||||
.value = p.e(E.Number{ .value = @intToFloat(f64, expr.loc.start) }, expr.loc),
|
||||
};
|
||||
if (property.value != null) {
|
||||
e_.properties[i].value = p.visitExpr(e_.properties[i].value.?);
|
||||
}
|
||||
|
||||
// Officially, they ask for columnNumber. But I don't see any usages of it in the code!
|
||||
// source[2] = G.Property{
|
||||
// .key = Expr{ .loc = expr.loc, .data = Prefill.Data.ColumnNumber },
|
||||
// .value = p.e(E.Number{ .value = @intToFloat(f64, expr.loc.start) }, expr.loc),
|
||||
// };
|
||||
|
||||
args[4] = p.e(E.Object{
|
||||
.properties = source,
|
||||
}, expr.loc);
|
||||
args[5] = Expr{ .data = Prefill.Data.This, .loc = expr.loc };
|
||||
if (property.initializer != null) {
|
||||
e_.properties[i].initializer = p.visitExpr(e_.properties[i].initializer.?);
|
||||
}
|
||||
}
|
||||
|
||||
return p.e(E.Call{
|
||||
.target = p.jsxStringsToMemberExpressionAutomatic(expr.loc, is_static_jsx),
|
||||
.args = args,
|
||||
// Enable tree shaking
|
||||
.can_be_unwrapped_if_unused = !p.options.ignore_dce_annotations,
|
||||
.was_jsx_element = true,
|
||||
}, expr.loc);
|
||||
return p.e(E.Missing{}, expr.loc);
|
||||
},
|
||||
.react => {
|
||||
const tag: Expr = tagger: {
|
||||
if (e_.tag) |_tag| {
|
||||
break :tagger p.visitExpr(_tag);
|
||||
} else {
|
||||
break :tagger p.jsxStringsToMemberExpression(expr.loc, p.jsx_fragment.ref);
|
||||
}
|
||||
};
|
||||
|
||||
for (e_.properties) |property, i| {
|
||||
if (property.kind != .spread) {
|
||||
e_.properties[i].key = p.visitExpr(e_.properties[i].key.?);
|
||||
}
|
||||
|
||||
if (property.value != null) {
|
||||
e_.properties[i].value = p.visitExpr(e_.properties[i].value.?);
|
||||
}
|
||||
|
||||
if (property.initializer != null) {
|
||||
e_.properties[i].initializer = p.visitExpr(e_.properties[i].initializer.?);
|
||||
}
|
||||
}
|
||||
|
||||
const runtime = if (p.options.jsx.runtime == .automatic and !e_.flags.is_key_before_rest) options.JSX.Runtime.automatic else options.JSX.Runtime.classic;
|
||||
var children_count = e_.children.len;
|
||||
|
||||
const is_childless_tag = FeatureFlags.react_specific_warnings and children_count > 0 and tag.data == .e_string and tag.data.e_string.isUTF8() and js_lexer.ChildlessJSXTags.has(tag.data.e_string.utf8);
|
||||
|
||||
children_count = if (is_childless_tag) 0 else children_count;
|
||||
|
||||
if (children_count != e_.children.len) {
|
||||
// Error: meta is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`.
|
||||
// ^ from react-dom
|
||||
p.log.addWarningFmt(p.source, tag.loc, p.allocator, "<{s} /> is a void element and must not have \"children\"", .{tag.data.e_string.utf8}) catch {};
|
||||
}
|
||||
|
||||
// TODO: maybe we should split these into two different AST Nodes
|
||||
// That would reduce the amount of allocations a little
|
||||
switch (runtime) {
|
||||
.classic => {
|
||||
// Arguments to createElement()
|
||||
const args = p.allocator.alloc(Expr, 2 + children_count) catch unreachable;
|
||||
// There are at least two args:
|
||||
// - name of the tag
|
||||
// - props
|
||||
var i: usize = 1;
|
||||
args[0] = tag;
|
||||
if (e_.properties.len > 0) {
|
||||
for (e_.properties) |prop, prop_i| {
|
||||
if (prop.key) |key| {
|
||||
e_.properties[prop_i].key = p.visitExpr(key);
|
||||
}
|
||||
|
||||
if (prop.value) |val| {
|
||||
e_.properties[prop_i].value = p.visitExpr(val);
|
||||
}
|
||||
}
|
||||
|
||||
if (e_.key) |key| {
|
||||
var props = p.allocator.alloc(G.Property, e_.properties.len + 1) catch unreachable;
|
||||
std.mem.copy(G.Property, props, e_.properties);
|
||||
props[props.len - 1] = G.Property{ .key = Expr{ .loc = key.loc, .data = keyExprData }, .value = key };
|
||||
args[1] = p.e(E.Object{ .properties = props }, expr.loc);
|
||||
} else {
|
||||
args[1] = p.e(E.Object{ .properties = e_.properties }, expr.loc);
|
||||
}
|
||||
i = 2;
|
||||
} else {
|
||||
args[1] = p.e(E.Null{}, expr.loc);
|
||||
i = 2;
|
||||
}
|
||||
|
||||
for (e_.children[0..children_count]) |child| {
|
||||
args[i] = p.visitExpr(child);
|
||||
i += @intCast(usize, @boolToInt(args[i].data != .e_missing));
|
||||
}
|
||||
|
||||
// Call createElement()
|
||||
return p.e(E.Call{
|
||||
.target = p.jsxStringsToMemberExpression(expr.loc, p.jsx_factory.ref),
|
||||
.args = args[0..i],
|
||||
// Enable tree shaking
|
||||
.can_be_unwrapped_if_unused = !p.options.ignore_dce_annotations,
|
||||
}, expr.loc);
|
||||
},
|
||||
// function jsxDEV(type, config, maybeKey, source, self) {
|
||||
.automatic => {
|
||||
// Either:
|
||||
// jsxDEV(type, arguments, key, isStaticChildren, source, self)
|
||||
// jsx(type, arguments, key)
|
||||
const args = p.allocator.alloc(Expr, if (p.options.jsx.development) @as(usize, 6) else @as(usize, 4)) catch unreachable;
|
||||
args[0] = tag;
|
||||
var props = List(G.Property).fromOwnedSlice(p.allocator, e_.properties);
|
||||
// arguments needs to be like
|
||||
// {
|
||||
// ...props,
|
||||
// children: [el1, el2]
|
||||
// }
|
||||
|
||||
const is_static_jsx = e_.children.len == 0 or e_.children.len > 1 or e_.children[0].data != .e_array;
|
||||
|
||||
// if (p.options.jsx.development) {
|
||||
switch (children_count) {
|
||||
0 => {},
|
||||
1 => {
|
||||
// static jsx must always be an array
|
||||
if (is_static_jsx) {
|
||||
const children_key = Expr{ .data = jsxChildrenKeyData, .loc = expr.loc };
|
||||
e_.children[0] = p.visitExpr(e_.children[0]);
|
||||
props.append(G.Property{
|
||||
.key = children_key,
|
||||
.value = p.e(E.Array{
|
||||
.items = e_.children[0..children_count],
|
||||
.is_single_line = e_.children.len < 2,
|
||||
}, expr.loc),
|
||||
}) catch unreachable;
|
||||
} else {
|
||||
const children_key = Expr{ .data = jsxChildrenKeyData, .loc = expr.loc };
|
||||
props.append(G.Property{
|
||||
.key = children_key,
|
||||
.value = p.visitExpr(e_.children[0]),
|
||||
}) catch unreachable;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
for (e_.children[0..children_count]) |child, i| {
|
||||
e_.children[i] = p.visitExpr(child);
|
||||
}
|
||||
const children_key = Expr{ .data = jsxChildrenKeyData, .loc = expr.loc };
|
||||
props.append(G.Property{
|
||||
.key = children_key,
|
||||
.value = p.e(E.Array{
|
||||
.items = e_.children[0..children_count],
|
||||
.is_single_line = e_.children.len < 2,
|
||||
}, expr.loc),
|
||||
}) catch unreachable;
|
||||
},
|
||||
}
|
||||
|
||||
args[1] = p.e(E.Object{
|
||||
.properties = props.toOwnedSlice(),
|
||||
}, expr.loc);
|
||||
|
||||
if (e_.key) |key| {
|
||||
args[2] = key;
|
||||
} else {
|
||||
// if (maybeKey !== undefined)
|
||||
args[2] = Expr{
|
||||
.loc = expr.loc,
|
||||
.data = .{
|
||||
.e_undefined = E.Undefined{},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (p.options.jsx.development) {
|
||||
// is the return type of the first child an array?
|
||||
// It's dynamic
|
||||
// Else, it's static
|
||||
args[3] = Expr{
|
||||
.loc = expr.loc,
|
||||
.data = .{
|
||||
.e_boolean = .{
|
||||
.value = is_static_jsx,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
var source = p.allocator.alloc(G.Property, 2) catch unreachable;
|
||||
p.recordUsage(p.jsx_filename.ref);
|
||||
source[0] = G.Property{
|
||||
.key = Expr{ .loc = expr.loc, .data = Prefill.Data.Filename },
|
||||
.value = p.e(E.Identifier{ .ref = p.jsx_filename.ref }, expr.loc),
|
||||
};
|
||||
|
||||
source[1] = G.Property{
|
||||
.key = Expr{ .loc = expr.loc, .data = Prefill.Data.LineNumber },
|
||||
.value = p.e(E.Number{ .value = @intToFloat(f64, expr.loc.start) }, expr.loc),
|
||||
};
|
||||
|
||||
// Officially, they ask for columnNumber. But I don't see any usages of it in the code!
|
||||
// source[2] = G.Property{
|
||||
// .key = Expr{ .loc = expr.loc, .data = Prefill.Data.ColumnNumber },
|
||||
// .value = p.e(E.Number{ .value = @intToFloat(f64, expr.loc.start) }, expr.loc),
|
||||
// };
|
||||
|
||||
args[4] = p.e(E.Object{
|
||||
.properties = source,
|
||||
}, expr.loc);
|
||||
args[5] = Expr{ .data = Prefill.Data.This, .loc = expr.loc };
|
||||
}
|
||||
|
||||
return p.e(E.Call{
|
||||
.target = p.jsxStringsToMemberExpressionAutomatic(expr.loc, is_static_jsx),
|
||||
.args = args,
|
||||
// Enable tree shaking
|
||||
.can_be_unwrapped_if_unused = !p.options.ignore_dce_annotations,
|
||||
.was_jsx_element = true,
|
||||
}, expr.loc);
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
@@ -11057,6 +11282,26 @@ pub fn NewParser(
|
||||
for (e_.parts) |*part| {
|
||||
part.value = p.visitExpr(part.value);
|
||||
}
|
||||
|
||||
if (e_.tag) |tag| {
|
||||
if (tag.data == .e_import_identifier) {
|
||||
const ref = tag.data.e_import_identifier.ref;
|
||||
if (p.macro_refs.get(ref)) |import_record_id| {
|
||||
const name = p.symbols.items[ref.inner_index].original_name;
|
||||
const record = &p.import_records.items[import_record_id];
|
||||
return p.options.macro_context.call(
|
||||
record.path.text,
|
||||
p.source.path.sourceDir(),
|
||||
p.log,
|
||||
p.source,
|
||||
record.range,
|
||||
expr,
|
||||
&.{},
|
||||
name,
|
||||
) catch return expr;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.e_binary => |e_| {
|
||||
@@ -14942,6 +15187,7 @@ pub fn NewParser(
|
||||
scope_order.appendAssumeCapacity(ScopeOrder{ .loc = locModuleScope, .scope = scope });
|
||||
this.* = P{
|
||||
.cjs_import_stmts = @TypeOf(this.cjs_import_stmts).init(allocator),
|
||||
.macro_refs = @TypeOf(this.macro_refs).init(allocator),
|
||||
// This must default to true or else parsing "in" won't work right.
|
||||
// It will fail for the case in the "in-keyword.js" file
|
||||
.allow_in = true,
|
||||
@@ -15021,6 +15267,9 @@ const JSXParser = NewParser(.{ .jsx = true });
|
||||
const TSXParser = NewParser(.{ .jsx = true, .typescript = true });
|
||||
const TypeScriptParser = NewParser(.{ .typescript = true });
|
||||
|
||||
const JavaScriptMacroParser = NewParser(.{ .is_macro = true });
|
||||
const TypeScriptMacroParser = NewParser(.{ .typescript = true, .is_macro = true, .jsx = true });
|
||||
|
||||
const JavaScriptParserFastRefresh = NewParser(.{ .react_fast_refresh = true });
|
||||
const JSXParserFastRefresh = NewParser(.{ .jsx = true, .react_fast_refresh = true });
|
||||
const TSXParserFastRefresh = NewParser(.{ .jsx = true, .typescript = true, .react_fast_refresh = true });
|
||||
|
||||
1213
src/linker.zig
1213
src/linker.zig
File diff suppressed because it is too large
Load Diff
@@ -499,3 +499,11 @@ pub fn contentsFromPath(path: string) ?string {
|
||||
}
|
||||
|
||||
pub const buffer_fallback_import_name: string = "node:buffer";
|
||||
|
||||
pub fn isDisabledFallback(name: string) bool {
|
||||
if (name.len >= 2) {
|
||||
return (strings.eqlComptime(name, "fs") or strings.eqlComptime(name[0..3], "fs/")) or (name.len >= "module".len and strings.eqlComptime(name, "module"));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -337,18 +337,26 @@ pub const Platform = enum {
|
||||
neutral,
|
||||
browser,
|
||||
bun,
|
||||
bunMacro,
|
||||
node,
|
||||
|
||||
pub inline fn isBun(this: Platform) bool {
|
||||
return switch (this) {
|
||||
.bunMacro, .bun => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn isClient(this: Platform) bool {
|
||||
return switch (this) {
|
||||
.bun => false,
|
||||
.bunMacro, .bun => false,
|
||||
else => true,
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn supportsBrowserField(this: Platform) bool {
|
||||
return switch (this) {
|
||||
.neutral, .browser, .bun => true,
|
||||
.bunMacro, .neutral, .browser, .bun => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
@@ -359,7 +367,7 @@ pub const Platform = enum {
|
||||
pub inline fn processBrowserDefineValue(this: Platform) ?string {
|
||||
return switch (this) {
|
||||
.browser => browser_define_value_true,
|
||||
.bun, .node => browser_define_value_false,
|
||||
.bunMacro, .bun, .node => browser_define_value_false,
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
@@ -446,6 +454,7 @@ pub const Platform = enum {
|
||||
var listc = [_]string{ MAIN_FIELD_NAMES[0], MAIN_FIELD_NAMES[1], MAIN_FIELD_NAMES[2] };
|
||||
array.set(Platform.browser, &listc);
|
||||
array.set(Platform.bun, &listc);
|
||||
array.set(Platform.bunMacro, &listc);
|
||||
|
||||
// Original comment:
|
||||
// The neutral platform is for people that don't want esbuild to try to
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -205,6 +205,7 @@ pub const Runtime = struct {
|
||||
hot_module_reloading: bool = false,
|
||||
hot_module_reloading_entry: bool = false,
|
||||
keep_names_for_arrow_functions: bool = true,
|
||||
is_macro: bool = false,
|
||||
};
|
||||
|
||||
pub const Names = struct {
|
||||
|
||||
Reference in New Issue
Block a user