Compare commits

...

22 Commits

Author SHA1 Message Date
Jarred Sumner
4f09b3beda Even More Code 2022-02-08 23:03:18 -08:00
Jarred Sumner
1f137ca78a port like 1/3 of it 2022-02-08 20:45:55 -08:00
Jarred Sumner
519c037e4b wip 2022-02-08 20:45:40 -08:00
Jarred Sumner
07436b6001 wip 2022-02-08 20:45:40 -08:00
Jarred Sumner
ac5ea3c875 Update runtime.version 2022-02-08 20:43:48 -08:00
Jarred Sumner
8f8278eca2 Possibly fix #98 2022-02-08 20:43:48 -08:00
Jarred Sumner
50802bdd58 📷 2022-02-08 20:43:32 -08:00
Jarred Sumner
7e8dcde203 Move __exportDefault transform to parser 2022-02-08 20:43:32 -08:00
Jarred Sumner
9a1177bf04 Update build-id 2022-02-08 20:43:32 -08:00
Jarred Sumner
cf634094cf Add test for #117 2022-02-08 20:43:32 -08:00
Jarred Sumner
a16a0ec670 Fix #117 2022-02-08 20:43:32 -08:00
Jarred Sumner
f126f0d2bb Fix #117 2022-02-08 20:43:32 -08:00
Jarred Sumner
42a22af968 Don't import __FastRefreshRuntime if it's a CJS transform 2022-02-08 20:43:32 -08:00
Jarred Sumner
c5cf6e5365 Reduce debug build memory usage 2022-02-08 20:43:32 -08:00
Jarred Sumner
57c3470852 [JS Parser] Fix bug with export default Foo when Foo was a bundled import 2022-02-08 20:43:18 -08:00
Jarred Sumner
906ca8ba1f Update transpiler.test.js 2022-02-08 20:43:18 -08:00
Jarred Sumner
7f3bbb20d0 [TS] Implement import {type foo} from 'bar'; (type inside clause) 2022-02-08 20:43:18 -08:00
Jarred Sumner
e6d972b6c5 [bun-macro-relay] resolve the artifact directory 2022-02-08 20:43:18 -08:00
Jarred Sumner
d935075776 wip but this is the wrong approach 2022-02-07 19:06:17 -08:00
Jarred Sumner
a28c11377a wip 2022-02-07 19:05:59 -08:00
Jarred Sumner
0ab0f1d886 wip 2022-02-07 16:25:48 -08:00
Jarred Sumner
a4ce8dd869 Update completions for bun --config 2022-02-06 17:04:27 -08:00
40 changed files with 2934 additions and 176 deletions

View File

@@ -1 +1 @@
69
70

View File

@@ -75,7 +75,11 @@ complete -c bun \
complete -c bun \
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'u' -l 'origin' -r -d 'Server URL. Rewrites import paths'
complete -c bun \
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'p' -l 'port' -r -d 'Port number to start server from'
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'p' -l 'port' -r -d 'Port number to start server from'
complete -c bun \
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts);" -s 'c' -l 'config' -r -d 'Load bun configuration from a bunfig.toml file'
complete -c bun \
-n "not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) install remove add;" --no-files -s 'd' -l 'define' -r -d 'Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:\"development\"'
complete -c bun \

View File

@@ -51,7 +51,8 @@ _bun() {
'--verbose[Excessively verbose logging]' \
'--cwd[Set a specific cwd]:cwd' \
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' \
'--config[Path to bunfig.toml file]:config' &&
ret=0
case $state in
@@ -72,7 +73,8 @@ _bun() {
'--cwd[Change directory]:cwd' \
'--help[Show command help]' \
'-h[Show command help]' \
'--use[Use a framework, e.g. "next"]:use' &&
'--use[Use a framework, e.g. "next"]:use' \
'--config[Path to bunfig.toml file]:config' &&
ret=0
case $state in
@@ -168,7 +170,8 @@ _bun() {
'--jsx-factory[Changes the function called when compiling JSX elements using the classic JSX runtime]:jsx-factory' \
'--jsx-fragment[Changes the function called when compiling JSX fragments]:jsx-fragment' \
'--jsx-import-source[Declares the module specifier to be used for importing the jsx and jsxs factory functions. Default: "react"]:jsx-import-source' \
'--port[Port number]:port' &&
'--port[Port number]:port' \
'--config[Path to bunfig.toml file]:config' &&
ret=0
;;
@@ -388,7 +391,8 @@ _bun() {
'--verbose[Excessively verbose logging]' \
'--cwd[Set a specific cwd]:cwd' \
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' \
'--config[Path to bunfig.toml file]:config' &&
ret=0
;;
@@ -418,7 +422,8 @@ _bun() {
'--silent[Don'"'"'t output anything]' \
'--verbose[Excessively verbose logging]' \
'--backend[Platform-specific optimizations for installing dependencies]:backend:("clonefile" "copyfile" "hardlink" "clonefile_each_dir")' \
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' &&
'--link-native-bins[Link "bin" from a matching platform-specific dependency instead. Default: esbuild, turbo]:link-native-bins' \
'--config[Path to bunfig.toml file]:config' &&
ret=0
case $state in
@@ -439,7 +444,8 @@ _bun() {
'--cwd[Change directory]:cwd' \
'--help[Show command help]' \
'-h[Show command help]' \
'--silent[Don'"'"'t echo the command]' &&
'--silent[Don'"'"'t echo the command]' \
'--config[Path to bunfig.toml file]:config' &&
ret=0
case $state in

View File

@@ -3,14 +3,12 @@ framework = "next"
origin = "http://localhost:5000"
inline.array = [1234, 4, 5, 6]
[macros]
react-relay = { "graphql" = "node_modules/bun-macro-relay/bun-macro-relay.tsx" }
[bundle.packages]
"@emotion/react" = true
[dev]
foo = 123
"foo.bar" = "baz"

View File

@@ -16,6 +16,18 @@ describe("Bun.Transpiler", () => {
const code = `import { useParams } from "remix";
import type { LoaderFunction, ActionFunction } from "remix";
import { type xx } from 'mod';
import { type xx as yy } from 'mod';
import { type 'xx' as yy } from 'mod';
import { type as } from 'mod';
import { type as as } from 'mod';
import { type as as as } from 'mod';
import { type xx } from 'mod';
import { type xx as yy } from 'mod';
import { type if as yy } from 'mod';
import { type 'xx' as yy } from 'mod';
import React, { type ReactNode, Component } from 'react';
export const loader: LoaderFunction = async ({
params
@@ -40,6 +52,8 @@ describe("Bun.Transpiler", () => {
it("reports import paths, excluding types", () => {
const imports = transpiler.scanImports(code);
expect(imports.filter(({ path }) => path === "remix")).toHaveLength(1);
expect(imports.filter(({ path }) => path === "mod")).toHaveLength(0);
expect(imports.filter(({ path }) => path === "react")).toHaveLength(1);
});
});
@@ -123,12 +137,22 @@ describe("Bun.Transpiler", () => {
});
it("removes types", () => {
expect(code.includes("mod")).toBe(true);
expect(code.includes("xx")).toBe(true);
expect(code.includes("ActionFunction")).toBe(true);
expect(code.includes("LoaderFunction")).toBe(true);
expect(code.includes("ReactNode")).toBe(true);
expect(code.includes("React")).toBe(true);
expect(code.includes("Component")).toBe(true);
const out = transpiler.transformSync(code);
expect(out.includes("ActionFunction")).toBe(false);
expect(out.includes("LoaderFunction")).toBe(false);
expect(out.includes("mod")).toBe(false);
expect(out.includes("xx")).toBe(false);
expect(out.includes("ReactNode")).toBe(false);
expect(out.includes("React")).toBe(true);
expect(out.includes("Component")).toBe(true);
const { exports } = transpiler.scan(out);
expect(exports[0]).toBe("action");

Binary file not shown.

View File

@@ -27,5 +27,6 @@
"/number-literal-bug.js",
"/caught-require.js",
"/package-json-utf8.js",
"/multiple-var.js"
"/multiple-var.js",
"/export-default-module-hot.js"
]

View File

@@ -0,0 +1,17 @@
import {
__cJS2eSM
} from "http://localhost:8080/bun:wrap";
import {
__exportDefault
} from "http://localhost:8080/bun:wrap";
export default __cJS2eSM(function(module, exports) {
__exportDefault(module.exports, module.id);
function test() {
testDone(import.meta.url);
}
Object.defineProperty(module.exports,"test",{get: () => test, enumerable: true, configurable: true});
}, "export-default-module-hot.js");

View File

@@ -0,0 +1,17 @@
import {
__cJS2eSM
} from "http://localhost:8080/bun:wrap";
import {
__exportDefault
} from "http://localhost:8080/bun:wrap";
export default __cJS2eSM(function(module, exports) {
__exportDefault(module.exports, module.id);
function test() {
testDone(import.meta.url);
}
Object.defineProperty(module.exports,"test",{get: () => test, enumerable: true, configurable: true});
}, "export-default-module-hot.js");

View File

@@ -0,0 +1,17 @@
import {
__cJS2eSM
} from "http://localhost:8080/bun:wrap";
import {
__exportDefault
} from "http://localhost:8080/bun:wrap";
export default __cJS2eSM(function(module, exports) {
__exportDefault(module.exports, module.id);
function test() {
testDone(import.meta.url);
}
Object.defineProperty(module.exports,"test",{get: () => test, enumerable: true, configurable: true});
}, "export-default-module-hot.js");

View File

@@ -0,0 +1,17 @@
import {
__cJS2eSM
} from "http://localhost:8080/bun:wrap";
import {
__exportDefault
} from "http://localhost:8080/bun:wrap";
export default __cJS2eSM(function(module, exports) {
__exportDefault(module.exports, module.id);
function test() {
testDone(import.meta.url);
}
Object.defineProperty(module.exports,"test",{get: () => test, enumerable: true, configurable: true});
}, "export-default-module-hot.js");

View File

@@ -4,7 +4,6 @@ export {default as auth} from "http://localhost:8080/_auth.js";
export {default as login} from "http://localhost:8080/_login.js";
export * from "http://localhost:8080/_bacon.js";
export let yoyoyo = "yoyoyo";
export default function hey() {
return true;
}

View File

@@ -4,7 +4,6 @@ export {default as auth} from "http://localhost:8080/_auth.js";
export {default as login} from "http://localhost:8080/_login.js";
export * from "http://localhost:8080/_bacon.js";
export let yoyoyo = "yoyoyo";
export default function hey() {
return true;
}

View File

@@ -0,0 +1,6 @@
// This test passes if there's no syntax error
export default module.id;
export function test() {
testDone(import.meta.url);
}

View File

@@ -1,4 +1,5 @@
import { parse, print } from "graphql/index.js";
import { resolve } from "path";
//
// 1. Parse the GraphQL tag.
@@ -21,17 +22,7 @@ if (BUN_MACRO_RELAY_ARTIFACT_DIRECTORY) {
artifactDirectory = BUN_MACRO_RELAY_ARTIFACT_DIRECTORY;
}
// TODO: platform-independent path cleaning
if (!artifactDirectory.startsWith("/")) {
while (artifactDirectory.endsWith("/")) {
artifactDirectory = artifactDirectory.substring(
0,
artifactDirectory.length - 1
);
}
artifactDirectory = Bun.cwd + artifactDirectory;
}
artifactDirectory = resolve(artifactDirectory);
export function graphql(node) {
let query;

3
src/api/schema.d.ts generated vendored
View File

@@ -21,6 +21,7 @@ export enum Loader {
file = 6,
json = 7,
toml = 8,
mdx = 9,
}
export const LoaderKeys = {
1: "jsx",
@@ -39,6 +40,8 @@ export const LoaderKeys = {
json: "json",
8: "toml",
toml: "toml",
9: "mdx",
mdx: "mdx",
};
export enum FrameworkEntryPointType {
client = 1,

4
src/api/schema.js generated
View File

@@ -7,6 +7,7 @@ const Loader = {
6: 6,
7: 7,
8: 8,
9: 9,
jsx: 1,
js: 2,
ts: 3,
@@ -15,6 +16,7 @@ const Loader = {
file: 6,
json: 7,
toml: 8,
mdx: 9,
};
const LoaderKeys = {
1: "jsx",
@@ -25,6 +27,7 @@ const LoaderKeys = {
6: "file",
7: "json",
8: "toml",
9: "mdx",
jsx: "jsx",
js: "js",
ts: "ts",
@@ -33,6 +36,7 @@ const LoaderKeys = {
file: "file",
json: "json",
toml: "toml",
mdx: "mdx",
};
const FrameworkEntryPointType = {
1: 1,

View File

@@ -9,6 +9,7 @@ smol Loader {
file = 6;
json = 7;
toml = 8;
mdx = 9;
}
smol FrameworkEntryPointType {

View File

@@ -356,6 +356,9 @@ pub const Api = struct {
/// toml
toml,
/// mdx
mdx,
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {

View File

@@ -70,6 +70,16 @@ pub const Ref = packed struct {
return @intCast(Int, int);
}
pub inline fn slice(this: Ref, text: []const u8) []const u8 {
return if (!this.isNull()) text[this.source_index .. this.source_index + this.inner_index] else "";
}
pub fn from(text: []const u8, section: []const u8) Ref {
const start = Ref.toInt(@ptrToInt(section.ptr) - @ptrToInt(text.ptr));
const end = Ref.toInt(section.len);
return Ref{ .source_index = start, .inner_index = end, .is_source_contents_slice = true };
}
pub fn hash(key: Ref) u32 {
return @truncate(u32, key.hash64());
}

View File

@@ -54,6 +54,7 @@ const Report = @import("./report.zig");
const Linker = linker.Linker;
const Resolver = _resolver.Resolver;
const TOML = @import("./toml/toml_parser.zig").TOML;
const MDX = @import("./mdx/mdx_parser.zig").MDX;
const EntryPoints = @import("./bundler/entry_points.zig");
pub usingnamespace EntryPoints;
@@ -2439,7 +2440,7 @@ pub const Bundler = struct {
}
switch (loader) {
.jsx, .tsx, .js, .ts, .json, .toml => {
.jsx, .tsx, .js, .ts, .json, .toml, .mdx => {
var result = bundler.parse(
ParseOptions{
.allocator = bundler.allocator,
@@ -2769,6 +2770,55 @@ pub const Bundler = struct {
.input_fd = input_fd,
};
},
.mdx => {
var jsx = this_parse.jsx;
jsx.parse = loader.isJSX();
var opts = js_parser.Parser.Options.init(jsx, loader);
opts.enable_bundling = false;
opts.transform_require_to_import = true;
opts.can_import_from_bundle = bundler.options.node_modules_bundle != null;
// HMR is enabled when devserver is running
// unless you've explicitly disabled it
// or you're running in SSR
// or the file is a node_module
opts.features.hot_module_reloading = bundler.options.hot_module_reloading and
bundler.options.platform.isNotBun() and
(!opts.can_import_from_bundle or
(opts.can_import_from_bundle and !path.isNodeModule()));
opts.features.react_fast_refresh = opts.features.hot_module_reloading and
jsx.parse and
bundler.options.jsx.supports_fast_refresh;
opts.filepath_hash_for_hmr = file_hash orelse 0;
opts.warn_about_unbundled_modules = bundler.options.platform.isNotBun();
if (bundler.macro_context == null) {
bundler.macro_context = js_ast.Macro.MacroContext.init(bundler);
}
// we'll just always enable top-level await
// this is incorrect for Node.js files which are CommonJS modules
opts.features.top_level_await = true;
opts.macro_context = &bundler.macro_context.?;
opts.features.is_macro_runtime = bundler.options.platform == .bun_macro;
var mdx: MDX = undefined;
mdx.setup(opts, bundler.log, &source, bundler.options.define, allocator) catch return null;
if (mdx.parse()) |result| {
if (result.ok) {
return ParseResult{
.ast = result.ast,
.source = source,
.loader = loader,
.input_fd = input_fd,
};
}
} else |_| {}
return null;
},
.json => {
var expr = json_parser.ParseJSON(&source, bundler.log, allocator) catch return null;
var stmt = js_ast.Stmt.alloc(js_ast.S.ExportDefault, js_ast.S.ExportDefault{
@@ -2787,6 +2837,7 @@ pub const Bundler = struct {
.input_fd = input_fd,
};
},
.toml => {
var expr = TOML.parse(&source, bundler.log, allocator) catch return null;
var stmt = js_ast.Stmt.alloc(js_ast.S.ExportDefault, js_ast.S.ExportDefault{
@@ -2889,7 +2940,13 @@ pub const Bundler = struct {
const mime_type_ext = bundler.options.out_extensions.get(path.name.ext) orelse path.name.ext;
switch (loader) {
.js, .jsx, .ts, .tsx, .css => {
.js,
.jsx,
.ts,
.tsx,
.css,
.mdx,
=> {
return ServeResult{
.file = options.OutputFile.initPending(loader, resolved),
.mime_type = MimeType.byLoader(

View File

@@ -200,7 +200,7 @@ pub const JavaScript = struct {
const result = parser.parse() catch |err| {
if (temp_log.errors == 0) {
log.addRangeError(source, parser.lexer.range(), @errorName(err)) catch unreachable;
log.addError(source, logger.Loc{ .start = 0 }, @errorName(err)) catch unreachable;
}
temp_log.appendToMaybeRecycled(log, source) catch {};

View File

@@ -163,7 +163,7 @@ pub const Arguments = struct {
clap.parseParam("--bunfile <STR> Use a .bun file (default: node_modules.bun)") catch unreachable,
clap.parseParam("--server-bunfile <STR> Use a .server.bun file (default: node_modules.server.bun)") catch unreachable,
clap.parseParam("--cwd <STR> Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable,
clap.parseParam("-c, --config <PATH>? Config file to load bun from (e.g. -c bunfig.toml") catch unreachable,
clap.parseParam("-c, --config <PATH>? Config file to load bun from (e.g. -c bunfig.toml") catch unreachable,
clap.parseParam("--disable-react-fast-refresh Disable React Fast Refresh") catch unreachable,
clap.parseParam("--disable-hmr Disable Hot Module Reloading (disables fast refresh too)") catch unreachable,
clap.parseParam("--extension-order <STR>... defaults to: .tsx,.ts,.jsx,.js,.json ") catch unreachable,

View File

@@ -892,7 +892,7 @@ pub const RequestContext = struct {
}
switch (loader) {
.toml, .json, .ts, .tsx, .js, .jsx => {
.toml, .json, .ts, .tsx, .js, .jsx, .mdx => {
// Since we already have:
// - The file descriptor
// - The path
@@ -2230,7 +2230,7 @@ pub const RequestContext = struct {
if (written.empty) {
switch (loader) {
.css => try ctx.sendNoContent(),
.toml, .js, .jsx, .ts, .tsx, .json => {
.toml, .js, .jsx, .ts, .tsx, .json, .mdx => {
const buf = "export default {};";
const strong_etag = comptime std.hash.Wyhash.hash(0, buf);
const etag_content_slice = std.fmt.bufPrintIntToSlice(strong_etag_buffer[0..49], strong_etag, 16, .upper, .{});
@@ -3125,7 +3125,7 @@ pub const Server = struct {
// We use a secondary loop so that we avoid the extra branch in a hot code path
Analytics.Features.fast_refresh = server.bundler.options.jsx.supports_fast_refresh;
server.detectTSConfig();
server.detectFastRefresh();
try server.initWatcher();
did_init = true;
Analytics.enqueue(Analytics.EventName.http_start);
@@ -3478,6 +3478,8 @@ pub const Server = struct {
server.bundler.configureLinker();
try server.bundler.configureRouter(true);
server.detectFastRefresh();
if (debug.dump_environment_variables) {
server.bundler.dumpEnvironmentVariables();
return;

View File

@@ -54,7 +54,7 @@ fn init(comptime str: string, t: Category) MimeType {
// TODO: improve this
pub fn byLoader(loader: Loader, ext: string) MimeType {
switch (loader) {
.tsx, .ts, .js, .jsx, .json => {
.tsx, .ts, .js, .jsx, .json, .mdx => {
return javascript;
},
.css => {
@@ -83,7 +83,7 @@ pub fn byExtension(ext: string) MimeType {
Four.case("gif") => MimeType.init("image/gif", .image),
Four.case("png") => MimeType.init("image/png", .image),
Four.case("bmp") => MimeType.init("image/bmp", .image),
Four.case("jsx"), Four.case("mjs") => MimeType.javascript,
Four.case("jsx"), Four.case("mjs"), Four.case("mdx") => MimeType.javascript,
Four.case("wav") => MimeType.init("audio/wave", .audio),
Four.case("aac") => MimeType.init("audio/aic", .audio),
Four.case("mp4") => MimeType.init("video/mp4", .video),

View File

@@ -1632,7 +1632,7 @@ pub const VirtualMachine = struct {
const loader = vm.bundler.options.loaders.get(path.name.ext) orelse .file;
switch (loader) {
.js, .jsx, .ts, .tsx, .json, .toml => {
.js, .jsx, .ts, .tsx, .json, .toml, .mdx => {
vm.transpiled_count += 1;
vm.bundler.resetStore();
const hash = http.Watcher.getHash(path.text);

View File

@@ -268,6 +268,13 @@ pub fn BabyList(comptime Type: type) type {
this.update(list_);
}
pub fn fetchPush(this: *ListType, allocator: std.mem.Allocator, value: Type) OOM!*Type {
var list_ = this.list();
try list_.append(allocator, value);
this.update(list_);
return this.ptr[list_.items.len - 1];
}
pub inline fn slice(this: ListType) []Type {
@setRuntimeSafety(false);
return this.ptr[0..this.len];
@@ -1211,6 +1218,76 @@ pub const E = struct {
.{ "key", .key },
});
};
pub const Tag = enum {
a,
blockquote,
br,
caption,
code,
div,
em,
h1,
h2,
h3,
h4,
h5,
h6,
hr,
img,
li,
ol,
p,
pre,
span,
strong,
table,
tbody,
td,
tfoot,
th,
thead,
tr,
ul,
main,
pub const MapType = std.enums.EnumArray(Tag, E.String);
pub const map: MapType = brk: {
var map_ = MapType.initFill(E.String{ .utf8 = "" });
map_.set(.a, E.String{ .utf8 = "a" });
map_.set(.blockquote, E.String{ .utf8 = "blockquote" });
map_.set(.br, E.String{ .utf8 = "br" });
map_.set(.caption, E.String{ .utf8 = "caption" });
map_.set(.code, E.String{ .utf8 = "code" });
map_.set(.div, E.String{ .utf8 = "div" });
map_.set(.em, E.String{ .utf8 = "em" });
map_.set(.h1, E.String{ .utf8 = "h1" });
map_.set(.h2, E.String{ .utf8 = "h2" });
map_.set(.h3, E.String{ .utf8 = "h3" });
map_.set(.h4, E.String{ .utf8 = "h4" });
map_.set(.h5, E.String{ .utf8 = "h5" });
map_.set(.h6, E.String{ .utf8 = "h6" });
map_.set(.hr, E.String{ .utf8 = "hr" });
map_.set(.img, E.String{ .utf8 = "img" });
map_.set(.li, E.String{ .utf8 = "li" });
map_.set(.ol, E.String{ .utf8 = "ol" });
map_.set(.p, E.String{ .utf8 = "p" });
map_.set(.pre, E.String{ .utf8 = "pre" });
map_.set(.span, E.String{ .utf8 = "span" });
map_.set(.strong, E.String{ .utf8 = "strong" });
map_.set(.table, E.String{ .utf8 = "table" });
map_.set(.tbody, E.String{ .utf8 = "tbody" });
map_.set(.td, E.String{ .utf8 = "td" });
map_.set(.tfoot, E.String{ .utf8 = "tfoot" });
map_.set(.th, E.String{ .utf8 = "th" });
map_.set(.thead, E.String{ .utf8 = "thead" });
map_.set(.tr, E.String{ .utf8 = "tr" });
map_.set(.ul, E.String{ .utf8 = "ul" });
map_.set(.main, E.String{ .utf8 = "main" });
break :brk map_;
};
};
};
pub const Missing = struct {

View File

@@ -52,7 +52,6 @@ pub const JSONOptions = struct {
json_warn_duplicate_keys: bool = true,
};
pub fn NewLexer(comptime json_options: JSONOptions) type {
return struct {
const LexerType = @This();
@@ -205,7 +204,7 @@ pub fn NewLexer(comptime json_options: JSONOptions) type {
/// Look ahead at the next n codepoints without advancing the iterator.
/// If fewer than n codepoints are available, then return the remainder of the string.
fn peek(it: *LexerType, n: usize) string {
pub fn peek(it: *LexerType, n: usize) string {
const original_i = it.current;
defer it.current = original_i;
@@ -744,8 +743,8 @@ pub fn NewLexer(comptime json_options: JSONOptions) type {
return code_point;
}
inline fn step(lexer: *LexerType) void {
// public for usage by MDXLexer
pub inline fn step(lexer: *LexerType) void {
lexer.code_point = lexer.nextCodepoint();
// Track the approximate number of newlines in the file so we can preallocate
@@ -1747,10 +1746,10 @@ pub fn NewLexer(comptime json_options: JSONOptions) type {
return lex;
}
pub fn init(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) !LexerType {
try tables.initJSXEntityMap();
pub fn initNoAutoStep(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) LexerType {
tables.initJSXEntityMap();
var empty_string_literal: JavascriptString = &emptyJavaScriptString;
var lex = LexerType{
return LexerType{
.log = log,
.source = source,
.string_literal = empty_string_literal,
@@ -1759,6 +1758,10 @@ pub fn NewLexer(comptime json_options: JSONOptions) type {
.allocator = allocator,
.comments_to_preserve_before = std.ArrayList(js_ast.G.Comment).init(allocator),
};
}
pub fn init(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) !LexerType {
var lex = initNoAutoStep(log, source, allocator);
lex.step();
try lex.next();

View File

@@ -569,7 +569,7 @@ pub var jsxEntity: JSXEntityMap = undefined;
var has_loaded_jsx_map = false;
// There's probably a way to move this to comptime
pub fn initJSXEntityMap() !void {
pub fn initJSXEntityMap() void {
if (has_loaded_jsx_map) {
return;
}

View File

@@ -313,7 +313,7 @@ pub const ImportScanner = struct {
kept_import_equals: bool = false,
removed_import_equals: bool = false,
pub fn scan(comptime P: type, p: P, stmts: []Stmt) !ImportScanner {
pub fn scan(comptime P: type, p: P, stmts: []Stmt, will_transform_to_common_js: bool) !ImportScanner {
var scanner = ImportScanner{};
var stmts_end: usize = 0;
const allocator = p.allocator;
@@ -655,6 +655,10 @@ pub const ImportScanner = struct {
.e_identifier => {
continue;
},
.e_import_identifier => |import_ident| {
st.default_name.ref = import_ident.ref;
continue;
},
.e_function => |func| {
if (func.func.name) |name_ref| {
if (name_ref.ref != null) {
@@ -742,6 +746,25 @@ pub const ImportScanner = struct {
},
}
}
} else if (will_transform_to_common_js) {
const expr: js_ast.Expr = switch (st.value) {
.expr => |exp| exp,
.stmt => |s2| brk2: {
switch (s2.data) {
.s_function => |func| {
break :brk2 p.e(E.Function{ .func = func.func }, s2.loc);
},
.s_class => |class| {
break :brk2 p.e(class.class, s2.loc);
},
else => unreachable,
}
},
};
var export_default_args = p.allocator.alloc(Expr, 2) catch unreachable;
export_default_args[0] = p.@"module.exports"(expr.loc);
export_default_args[1] = expr;
stmt = p.s(S.SExpr{ .value = p.callRuntime(expr.loc, "__exportDefault", export_default_args) }, expr.loc);
}
},
.s_export_clause => |st| {
@@ -1954,7 +1977,6 @@ pub const ScanPassResult = struct {
pub const Parser = struct {
options: Options,
lexer: js_lexer.Lexer,
log: *logger.Log,
source: *const logger.Source,
define: *Define,
@@ -2010,7 +2032,7 @@ pub const Parser = struct {
fn _scanImports(self: *Parser, comptime ParserType: type, scan_pass: *ScanPassResult) !void {
var p: ParserType = undefined;
try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
try ParserType.init(self.allocator, self.log, self.source, self.define, try js_lexer.Lexer.init(self.log, self.source.*, self.allocator), self.options, &p);
p.import_records = &scan_pass.import_records;
p.named_imports = &scan_pass.named_imports;
@@ -2081,53 +2103,17 @@ pub const Parser = struct {
if (!self.options.ts and self.options.features.is_macro_runtime) return try self._parse(JSParserMacro);
if (self.options.ts and self.options.jsx.parse) {
if (self.options.features.react_fast_refresh) {
return try self._parse(TSXParserFastRefresh);
}
return try self._parse(TSXParser);
} else if (self.options.ts) {
if (self.options.features.react_fast_refresh) {
return try self._parse(TypeScriptParserFastRefresh);
}
return try self._parse(TypeScriptParser);
} else if (self.options.jsx.parse) {
if (self.options.features.react_fast_refresh) {
return try self._parse(JSXParserFastRefresh);
}
return try self._parse(JSXParser);
} else {
if (self.options.features.react_fast_refresh) {
return try self._parse(JavaScriptParserFastRefresh);
}
return try self._parse(JavaScriptParser);
}
}
fn _parse(self: *Parser, comptime ParserType: type) !js_ast.Result {
var p: ParserType = undefined;
try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
defer p.lexer.deinit();
var result: js_ast.Result = undefined;
// Consume a leading hashbang comment
var hashbang: string = "";
if (p.lexer.token == .t_hashbang) {
hashbang = p.lexer.identifier;
try p.lexer.next();
}
// Parse the file in the first pass, but do not bind symbols
var opts = ParseStatementOptions{ .is_module_scope = true };
// Parsing seems to take around 2x as much time as visiting.
// Which makes sense.
// June 4: "Parsing took: 18028000"
// June 4: "Rest of this took: 8003000"
const stmts = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts);
pub fn runVisitPassAndFinish(comptime ParserType: type, p: *ParserType, stmts: []Stmt) !js_ast.Result {
try p.prepareForVisitPass();
// ESM is always strict mode. I don't think we need this.
@@ -2170,6 +2156,12 @@ pub const Parser = struct {
exports_kind = .cjs;
if (p.options.transform_require_to_import) {
var args = p.allocator.alloc(Expr, 2) catch unreachable;
if (p.runtime_imports.__exportDefault == null and p.has_export_default) {
p.runtime_imports.__exportDefault = try p.declareGeneratedSymbol(.other, "__exportDefault");
p.resolveGeneratedSymbol(&p.runtime_imports.__exportDefault.?);
}
wrapper_expr = p.callRuntime(logger.Loc.Empty, "__cJS2eSM", args);
p.resolveGeneratedSymbol(&p.runtime_imports.__cJS2eSM.?);
@@ -2180,6 +2172,7 @@ pub const Parser = struct {
p.options.features.react_fast_refresh = false;
p.runtime_imports.__HMRModule = null;
p.runtime_imports.__FastRefreshModule = null;
p.runtime_imports.__FastRefreshRuntime = null;
p.runtime_imports.__HMRClient = null;
}
} else {
@@ -2187,7 +2180,7 @@ pub const Parser = struct {
}
// Auto-import JSX
if (ParserType.jsx_transform_type == .react) {
if (ParserType.jsx_transform_type == .react or ParserType.jsx_transform_type == .mdx) {
const jsx_filename_symbol = p.symbols.items[p.jsx_filename.ref.inner_index];
{
@@ -2683,19 +2676,38 @@ pub const Parser = struct {
// Pop the module scope to apply the "ContainsDirectEval" rules
// p.popScope();
return js_ast.Result{ .ast = try p.toAST(parts_slice, exports_kind, wrapper_expr), .ok = true };
}
result.ast = try p.toAST(parts_slice, exports_kind, wrapper_expr);
result.ok = true;
fn _parse(self: *Parser, comptime ParserType: type) !js_ast.Result {
var p: ParserType = undefined;
try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
defer p.lexer.deinit();
// Consume a leading hashbang comment
var hashbang: string = "";
if (p.lexer.token == .t_hashbang) {
hashbang = p.lexer.identifier;
try p.lexer.next();
}
// Parse the file in the first pass, but do not bind symbols
var opts = ParseStatementOptions{ .is_module_scope = true };
// Parsing seems to take around 2x as much time as visiting.
// Which makes sense.
// June 4: "Parsing took: 18028000"
// June 4: "Rest of this took: 8003000"
const stmts = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts);
const result = self.runVisitPassAndFinish(ParserType, &p, stmts);
return result;
}
pub fn init(_options: Options, log: *logger.Log, source: *const logger.Source, define: *Define, allocator: std.mem.Allocator) !Parser {
const lexer = try js_lexer.Lexer.init(log, source.*, allocator);
return Parser{
.options = _options,
.allocator = allocator,
.lexer = lexer,
.define = define,
.source = source,
.log = log,
@@ -2728,7 +2740,7 @@ const ParseClassOptions = struct {
is_type_script_declare: bool = false,
};
const ParseStatementOptions = struct {
pub const ParseStatementOptions = struct {
ts_decorators: ?DeferredTsDecorators = null,
lexical_decl: LexicalDecl = .forbid,
is_module_scope: bool = false,
@@ -2827,6 +2839,7 @@ const JSXTransformType = enum {
none,
react,
macro,
mdx,
};
const ParserFeatures = struct {
@@ -2910,7 +2923,6 @@ pub fn NewParser(
const is_typescript_enabled = js_parser_features.typescript;
const is_jsx_enabled = js_parser_jsx != .none;
const only_scan_imports_and_do_not_visit = js_parser_features.scan_only;
const is_react_fast_refresh_enabled = js_parser_features.react_fast_refresh;
// P is for Parser!
// public only because of Binding.ToExpr
@@ -2956,6 +2968,9 @@ pub fn NewParser(
scopes_in_order_visitor_index: usize = 0,
has_classic_runtime_warned: bool = false,
/// Used for transforming export default -> module.exports
has_export_default: bool = false,
hmr_module: GeneratedSymbol = GeneratedSymbol{ .primary = Ref.None, .backup = Ref.None, .ref = Ref.None },
has_called_runtime: bool = false,
@@ -3703,7 +3718,7 @@ pub fn NewParser(
if (p.options.features.hot_module_reloading) {
generated_symbols_count += 3;
if (is_react_fast_refresh_enabled) {
if (p.options.features.react_fast_refresh) {
generated_symbols_count += 1;
}
}
@@ -3738,7 +3753,7 @@ pub fn NewParser(
if (p.options.features.hot_module_reloading) {
p.hmr_module = try p.declareGeneratedSymbol(.other, "hmr");
if (is_react_fast_refresh_enabled) {
if (p.options.features.react_fast_refresh) {
if (p.options.jsx.use_embedded_refresh_runtime) {
p.runtime_imports.__FastRefreshRuntime = try p.declareGeneratedSymbol(.other, "__FastRefreshRuntime");
p.recordUsage(p.runtime_imports.__FastRefreshRuntime.?.ref);
@@ -3760,7 +3775,7 @@ pub fn NewParser(
}
switch (comptime jsx_transform_type) {
.react => {
.mdx, .react => {
if (p.options.jsx.development) {
p.jsx_filename = p.declareGeneratedSymbol(.other, "jsxFilename") catch unreachable;
}
@@ -5044,7 +5059,7 @@ pub fn NewParser(
}
}
fn createDefaultName(p: *P, loc: logger.Loc) !js_ast.LocRef {
pub fn createDefaultName(p: *P, loc: logger.Loc) !js_ast.LocRef {
var identifier = try std.fmt.allocPrint(p.allocator, "{s}_default", .{try p.source.path.name.nonUniqueNameString(p.allocator)});
const name = js_ast.LocRef{ .loc = loc, .ref = try p.newSymbol(Symbol.Kind.other, identifier) };
@@ -5185,7 +5200,7 @@ pub fn NewParser(
// pub fn maybeRewriteExportSymbol(p: *P, )
fn parseStmt(p: *P, opts: *ParseStatementOptions) anyerror!Stmt {
pub fn parseStmt(p: *P, opts: *ParseStatementOptions) anyerror!Stmt {
var loc = p.lexer.loc();
switch (p.lexer.token) {
@@ -5345,7 +5360,6 @@ pub fn NewParser(
} else {
defaultName = try p.createDefaultName(defaultLoc);
}
// this is probably a panic
var value = js_ast.StmtOrExpr{ .stmt = stmt };
return p.s(S.ExportDefault{ .default_name = defaultName, .value = value }, loc);
}
@@ -5355,8 +5369,8 @@ pub fn NewParser(
const prefix_expr = try p.parseAsyncPrefixExpr(async_range, Level.comma);
var expr = try p.parseSuffix(prefix_expr, Level.comma, null, Expr.EFlags.none);
try p.lexer.expectOrInsertSemicolon();
// this is probably a panic
var value = js_ast.StmtOrExpr{ .expr = expr };
p.has_export_default = true;
return p.s(S.ExportDefault{ .default_name = defaultName, .value = value }, loc);
}
@@ -5390,7 +5404,7 @@ pub fn NewParser(
break :default_name_getter createDefaultName(p, defaultLoc) catch unreachable;
};
p.has_export_default = true;
return p.s(
S.ExportDefault{ .default_name = default_name, .value = js_ast.StmtOrExpr{ .stmt = stmt } },
loc,
@@ -5434,7 +5448,7 @@ pub fn NewParser(
break :default_name_getter createDefaultName(p, defaultLoc) catch unreachable;
};
p.has_export_default = true;
return p.s(S.ExportDefault{ .default_name = default_name, .value = js_ast.StmtOrExpr{ .stmt = stmt } }, loc);
},
else => {
@@ -5458,6 +5472,9 @@ pub fn NewParser(
.e_identifier => |ident| {
break :default_name_getter LocRef{ .loc = defaultLoc, .ref = ident.ref };
},
.e_import_identifier => |ident| {
break :default_name_getter LocRef{ .loc = defaultLoc, .ref = ident.ref };
},
.e_class => |class| {
if (class.class_name) |_name| {
if (_name.ref) |ref| {
@@ -5471,6 +5488,7 @@ pub fn NewParser(
break :default_name_getter createDefaultName(p, defaultLoc) catch unreachable;
};
p.has_export_default = true;
return p.s(
S.ExportDefault{
.default_name = default_name,
@@ -6896,28 +6914,98 @@ pub fn NewParser(
var original_name = alias;
try p.lexer.next();
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
original_name = p.lexer.identifier;
name = LocRef{ .loc = alias_loc, .ref = try p.storeNameInRef(original_name) };
try p.lexer.expect(.t_identifier);
} else if (!isIdentifier) {
// An import where the name is a keyword must have an alias
try p.lexer.expectedString("\"as\"");
}
// "import { type xx } from 'mod'"
// "import { type xx as yy } from 'mod'"
// "import { type 'xx' as yy } from 'mod'"
// "import { type as } from 'mod'"
// "import { type as as } from 'mod'"
// "import { type as as as } from 'mod'"
if (is_typescript_enabled and
strings.eqlComptime(alias, "type") and
p.lexer.token != .t_comma and
p.lexer.token != .t_close_brace)
{
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
if (p.lexer.isContextualKeyword("as")) {
original_name = p.lexer.identifier;
name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(original_name) };
try p.lexer.next();
// Reject forbidden names
if (isEvalOrArguments(original_name)) {
const r = js_lexer.rangeOfIdentifier(p.source, name.loc);
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use \"{s}\" as an identifier here", .{original_name});
}
if (p.lexer.token == .t_identifier) {
// "import { type as as as } from 'mod'"
// "import { type as as foo } from 'mod'"
try p.lexer.next();
} else {
// "import { type as as } from 'mod'"
try items.append(.{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
});
}
} else if (p.lexer.token == .t_identifier) {
// "import { type as xxx } from 'mod'"
original_name = p.lexer.identifier;
name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(original_name) };
try p.lexer.expect(.t_identifier);
try items.append(js_ast.ClauseItem{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
});
if (isEvalOrArguments(original_name)) {
const r = p.source.rangeOfString(name.loc);
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use {s} as an identifier here", .{original_name});
}
try items.append(.{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
});
}
} else {
const is_identifier = p.lexer.token == .t_identifier;
// "import { type xx } from 'mod'"
// "import { type xx as yy } from 'mod'"
// "import { type if as yy } from 'mod'"
// "import { type 'xx' as yy } from 'mod'"
_ = try p.parseClauseAlias("import");
try p.lexer.next();
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
try p.lexer.expect(.t_identifier);
} else if (!is_identifier) {
// An import where the name is a keyword must have an alias
try p.lexer.expectedString("\"as\"");
}
}
} else {
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
original_name = p.lexer.identifier;
name = LocRef{ .loc = alias_loc, .ref = try p.storeNameInRef(original_name) };
try p.lexer.expect(.t_identifier);
} else if (!isIdentifier) {
// An import where the name is a keyword must have an alias
try p.lexer.expectedString("\"as\"");
}
// Reject forbidden names
if (isEvalOrArguments(original_name)) {
const r = js_lexer.rangeOfIdentifier(p.source, name.loc);
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use \"{s}\" as an identifier here", .{original_name});
}
try items.append(js_ast.ClauseItem{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
});
}
if (p.lexer.token != .t_comma) {
break;
@@ -7510,7 +7598,7 @@ pub fn NewParser(
// TODO:
pub fn checkForNonBMPCodePoint(_: *P, _: logger.Loc, _: string) void {}
fn parseStmtsUpTo(p: *P, eend: js_lexer.T, _opts: *ParseStatementOptions) ![]Stmt {
pub fn parseStmtsUpTo(p: *P, eend: js_lexer.T, _opts: *ParseStatementOptions) ![]Stmt {
var opts = _opts.*;
var stmts = StmtList.init(p.allocator);
@@ -10430,6 +10518,7 @@ pub fn NewParser(
.utf8 = name,
}, loc) },
.range = tag_range,
.name = name,
};
}
@@ -11112,7 +11201,7 @@ pub fn NewParser(
var writer = WriterType.initWriter(p, &BunJSX.bun_jsx_identifier);
return writer.writeFunctionCall(e_.*);
},
.react => {
.mdx, .react => {
const tag: Expr = tagger: {
if (e_.tag) |_tag| {
break :tagger p.visitExpr(_tag);
@@ -14637,7 +14726,7 @@ pub fn NewParser(
p.import_records_for_current_part.shrinkRetainingCapacity(0);
p.declared_symbols.shrinkRetainingCapacity(0);
var result = try ImportScanner.scan(*P, p, part.stmts);
var result = try ImportScanner.scan(*P, p, part.stmts, commonjs_wrapper_expr != null);
kept_import_equals = kept_import_equals or result.kept_import_equals;
removed_import_equals = removed_import_equals or result.removed_import_equals;
part.import_record_indices = part.import_record_indices;
@@ -14872,7 +14961,7 @@ pub fn NewParser(
var args_list: []Expr = if (Environment.isDebug) &Prefill.HotModuleReloading.DebugEnabledArgs else &Prefill.HotModuleReloading.DebugDisabled;
const new_call_args_count: usize = comptime if (is_react_fast_refresh_enabled) 3 else 2;
const new_call_args_count: usize = if (p.options.features.react_fast_refresh) 3 else 2;
var call_args = try allocator.alloc(Expr, new_call_args_count + 1);
var new_call_args = call_args[0..new_call_args_count];
var hmr_module_ident = p.e(E.Identifier{ .ref = p.hmr_module.ref }, logger.Loc.Empty);
@@ -14880,7 +14969,7 @@ pub fn NewParser(
new_call_args[0] = p.e(E.Number{ .value = @intToFloat(f64, p.options.filepath_hash_for_hmr) }, logger.Loc.Empty);
// This helps us provide better error messages
new_call_args[1] = p.e(E.String{ .utf8 = p.source.path.pretty }, logger.Loc.Empty);
if (is_react_fast_refresh_enabled) {
if (p.options.features.react_fast_refresh) {
new_call_args[2] = p.e(E.Identifier{ .ref = p.jsx_refresh_runtime.ref }, logger.Loc.Empty);
}
@@ -14906,10 +14995,12 @@ pub fn NewParser(
var first_decl = decls[0..2];
// We cannot rely on import.meta.url because if we import it within a blob: url, it will be nonsensical
// var __hmrModule = new HMRModule(123123124, "/index.js"), __exports = __hmrModule.exports;
const hmr_import_ref = (if (comptime is_react_fast_refresh_enabled)
const hmr_import_module_ = if (p.options.features.react_fast_refresh)
p.runtime_imports.__FastRefreshModule.?
else
p.runtime_imports.__HMRModule.?).ref;
p.runtime_imports.__HMRModule.?;
const hmr_import_ref = hmr_import_module_.ref;
first_decl[0] = G.Decl{
.binding = p.b(B.Identifier{ .ref = p.hmr_module.ref }, logger.Loc.Empty),
@@ -15347,6 +15438,7 @@ pub fn NewParser(
const JavaScriptParser = NewParser(.{});
const JSXParser = NewParser(.{ .jsx = .react });
const TSXParser = NewParser(.{ .jsx = .react, .typescript = true });
const MDXParser = NewParser(.{ .jsx = .mdx });
const TypeScriptParser = NewParser(.{ .typescript = true });
const JSParserMacro = NewParser(.{
@@ -15357,11 +15449,6 @@ const TSParserMacro = NewParser(.{
.typescript = true,
});
const JavaScriptParserFastRefresh = NewParser(.{ .react_fast_refresh = true });
const JSXParserFastRefresh = NewParser(.{ .jsx = .react, .react_fast_refresh = true });
const TSXParserFastRefresh = NewParser(.{ .jsx = .react, .typescript = true, .react_fast_refresh = true });
const TypeScriptParserFastRefresh = NewParser(.{ .typescript = true, .react_fast_refresh = true });
const JavaScriptImportScanner = NewParser(.{ .scan_only = true });
const JSXImportScanner = NewParser(.{ .jsx = .react, .scan_only = true });
const TSXImportScanner = NewParser(.{ .jsx = .react, .typescript = true, .scan_only = true });

View File

@@ -2548,11 +2548,6 @@ pub fn NewPrinter(
p.printNewline();
},
.s_export_default => |s| {
// Give an extra newline for export default for readability
if (!prev_stmt_tag.isExportLike()) {
p.printNewline();
}
p.printIndent();
p.printSpaceBeforeIdentifier();
@@ -2608,7 +2603,9 @@ pub fn NewPrinter(
if (is_inside_bundle) {
p.printSemicolonAfterStatement();
}
if (is_inside_bundle) {
if (func.func.name) |name| {
p.printIndent();
p.printBundledExport("default", p.renamer.nameForSymbol(name.ref.?));
@@ -2639,7 +2636,9 @@ pub fn NewPrinter(
if (is_inside_bundle) {
p.printSemicolonAfterStatement();
}
if (is_inside_bundle) {
if (class.class.class_name) |name| {
p.printIndent();
p.printBundledExport("default", p.renamer.nameForSymbol(name.ref.?));

View File

@@ -42,7 +42,7 @@ const Op = js_ast.Op;
const Scope = js_ast.Scope;
const locModuleScope = logger.Loc.Empty;
const LEXER_DEBUGGER_WORKAROUND = false;
const LEXER_DEBUGGER_WORKAROUND = Environment.isDebug;
const HashMapPool = struct {
const HashMap = std.HashMap(u64, void, IdentityContext, 80);
@@ -464,7 +464,7 @@ pub const PackageJSONVersionChecker = struct {
if (p.lexer.token == closer) {
if (comptime !opts.allow_trailing_commas) {
p.log.addRangeError(p.source(), comma_range, "JSON does not support trailing commas") catch unreachable;
p.log.addRangeError(p.source, comma_range, "JSON does not support trailing commas") catch unreachable;
}
return false;
}

View File

@@ -218,7 +218,7 @@ pub const Linker = struct {
}
// Step 1. Resolve imports & requires
switch (result.loader) {
.jsx, .js, .ts, .tsx => {
.jsx, .js, .ts, .tsx, .mdx => {
var record_i: u32 = 0;
const record_count = @truncate(u32, import_records.len);

577
src/mdx/mdx_lexer.zig Normal file
View File

@@ -0,0 +1,577 @@
const std = @import("std");
const logger = @import("../logger.zig");
const js_ast = @import("../js_ast.zig");
const _global = @import("../global.zig");
const string = _global.string;
const Output = _global.Output;
const Global = _global.Global;
const Environment = _global.Environment;
const strings = _global.strings;
const CodePoint = _global.CodePoint;
const MutableString = _global.MutableString;
const stringZ = _global.stringZ;
const default_allocator = _global.default_allocator;
const js_lexer = @import("../js_lexer.zig");
const JSLexerTable = @import("../js_lexer_tables.zig");
pub const T = enum {
t_export,
t_import,
t_end_of_file,
t_empty_line,
t_star,
t_star_2,
t_star_3,
t_dash,
t_dash_2,
t_dash_3,
t_dash_4,
t_dash_5,
t_dash_6,
t_underscore,
t_underscore_2,
t_underscore_3,
t_hash,
t_hash_2,
t_hash_3,
t_hash_4,
t_hash_5,
t_hash_6,
t_equals,
t_text,
t_paren_open,
t_paren_close,
t_tilde,
t_backtick,
t_bang_bracket_open,
t_bracket_open,
t_bracket_close,
t_js_block_open,
t_js_block_close,
t_less_than,
t_greater_than,
t_greater_than_greater_than,
t_ampersand,
t_string,
t_bar,
};
const tokenToString: std.enums.EnumArray(T, string) = brk: {
var map = std.enums.EnumArray(T, string).initFill("");
map.set("import", .t_export);
map.set("export", .t_import);
map.set("end of file", .t_end_of_file);
map.set("empty line", .t_empty_line);
map.set("*", .t_star);
map.set("*", .t_star_2);
map.set("*", .t_star_3);
map.set("-", .t_dash);
map.set("-", .t_dash_2);
map.set("-", .t_dash_3);
map.set("-", .t_dash_4);
map.set("-", .t_dash_5);
map.set("-", .t_dash_6);
map.set("_", .t_underscore);
map.set("_", .t_underscore_2);
map.set("_", .t_underscore_3);
map.set("#", .t_hash);
map.set("#", .t_hash_2);
map.set("#", .t_hash_3);
map.set("#", .t_hash_4);
map.set("#", .t_hash_5);
map.set("#", .t_hash_6);
map.set("=", .t_equals);
map.set("text", .t_text);
map.set("(", .t_paren_open);
map.set(")", .t_paren_close);
map.set("~", .t_tilde);
map.set("`", .t_backtick);
map.set("![", .t_bang_bracket_open);
map.set("[", .t_bracket_open);
map.set("]", .t_bracket_close);
map.set("{", .t_js_block_open);
map.set("}", .t_js_block_close);
map.set("<", .t_less_than);
map.set(">", .t_greater_than);
map.set(">>", .t_greater_than_greater_than);
map.set("&", .t_ampersand);
map.set("''", .t_string);
break :brk map;
};
pub const Lexer = struct {
const JSLexer = js_lexer.Lexer;
js: *JSLexer,
token: T = T.t_end_of_file,
// we only care about indentation up to 3 spaces
// if it exceeds 3 spaces, it is not relevant for parsing
indent: u3 = 0,
link: Link = Link{},
info: string = "",
pub fn init(js: *JSLexer) !Lexer {
var lex = Lexer{
.js = js,
};
lex.step();
try lex.next();
return lex;
}
const Link = struct {
title: string = "",
href: string = "",
};
pub fn expected(self: *Lexer, token: T) !void {
if (tokenToString.get(token).len > 0) {
try self.expectedString(tokenToString.get(token));
} else {
try self.unexpected();
}
}
pub inline fn expect(self: *Lexer, comptime token: T) !void {
if (self.token != token) {
try self.expected(token);
}
try self.next();
}
pub inline fn codePoint(this: *const Lexer) CodePoint {
return this.js.code_point;
}
pub inline fn log(this: *Lexer) *logger.Log {
return this.js.log;
}
pub inline fn loc(self: *const Lexer) logger.Loc {
return self.js.loc();
}
pub fn syntaxError(self: *Lexer) !void {
return try self.js.syntaxError();
}
pub fn addError(self: *Lexer, _loc: usize, comptime format: []const u8, args: anytype, _: bool) void {
return self.js.addError(_loc, format, args, false);
}
pub fn addDefaultError(self: *Lexer, msg: []const u8) !void {
return try self.js.addDefaultError(msg);
}
pub fn addRangeError(self: *Lexer, r: logger.Range, comptime format: []const u8, args: anytype, _: bool) !void {
return try self.js.addRangeError(r, format, args, false);
}
inline fn step(lexer: *Lexer) void {
lexer.js.step();
}
pub inline fn raw(self: *Lexer) []const u8 {
return self.js.raw();
}
pub inline fn identifier(self: *Lexer) []const u8 {
return self.js.identifier;
}
fn peek(self: *Lexer, n: usize) []const u8 {
return self.js.peek(n);
}
fn consumeIndent(self: *Lexer) void {
self.indent = 0;
while (true) {
switch (self.codePoint()) {
' ', '\t', 0x000C => {
self.indent +|= 1;
if (self.indent > 3) {
self.step();
return;
}
},
else => return,
}
self.step();
}
}
pub inline fn toEString(
lexer: *Lexer,
) js_ast.E.String {
return lexer.js.toEString();
}
pub fn nextInsideLink(lexer: *Lexer, comptime allow_space: bool) !bool {
var js = lexer.js;
js.has_newline_before = js.end == 0;
while (true) {
switch (js.code_point) {
']' => {
lexer.step();
lexer.token = T.t_bracket_close;
js.string_literal_slice = js.raw();
js.string_literal_is_ascii = true;
return true;
},
'(' => {
lexer.step();
lexer.token = T.t_paren_open;
return true;
},
')' => {
lexer.step();
lexer.token = T.t_paren_close;
return true;
},
'\\' => {
lexer.step();
lexer.step();
lexer.token = T.t_text;
},
' ', '\t', 0x000C => {
lexer.step();
lexer.token = T.t_text;
if (allow_space) {
continue;
}
return false;
},
-1, '\r', '\n', 0x2028, 0x2029 => {
lexer.step();
lexer.token = T.t_text;
return false;
},
else => {},
}
lexer.step();
}
unreachable;
}
pub fn next(lexer: *Lexer) !void {
var js = lexer.js;
js.has_newline_before = js.end == 0;
lexer.indent = 0;
outer: while (true) {
js.start = js.end;
js.token = .t_end_of_file;
switch (js.code_point) {
-1 => {
lexer.token = T.t_end_of_file;
lexer.indent = 0;
return;
},
' ', '\t', 0x000C => {
lexer.indent +|= 1;
lexer.step();
continue;
},
'\r', '\n', 0x2028, 0x2029 => {
const was_empty_line = js.has_newline_before;
lexer.step();
js.has_newline_before = true;
if (was_empty_line) {
lexer.token = T.t_empty_line;
return;
}
continue;
},
'{' => {
lexer.token = T.t_js_block_open;
lexer.step();
return;
},
'<' => {
lexer.token = T.t_less_than;
lexer.js.token = .t_less_than;
lexer.step();
if (lexer.codePoint() == '!') {
lexer.step();
while (lexer.token != .t_end_of_file) {
while (lexer.codePoint() != '-') {
lexer.step();
}
if (lexer.codePoint() == '-') {
lexer.step();
if (lexer.codePoint() == '-') {
lexer.step();
if (lexer.codePoint() == '>') {
lexer.step();
lexer.js.start = lexer.js.current;
continue :outer;
}
}
}
}
}
return;
},
'>' => {
lexer.token = T.t_greater_than;
lexer.js.token = .t_greater_than;
lexer.step();
if (lexer.codePoint() == '>') {
lexer.step();
lexer.token = T.t_greater_than_greater_than;
js.token = .t_greater_than_greater_than;
}
return;
},
'*' => {
lexer.step();
lexer.token = T.t_star;
lexer.consumeIndent();
if (lexer.codePoint() == '*') {
lexer.token = T.t_star_2;
lexer.step();
lexer.consumeIndent();
if (lexer.codePoint() == '*') {
lexer.token = T.t_star_3;
lexer.step();
lexer.consumeIndent();
if (!js.has_newline_before and lexer.codePoint() == '*') {
if (lexer.peek(1)[0] == '*') {
lexer.token = T.t_star_2;
lexer.step();
lexer.step();
js.string_literal_slice = "";
return;
}
}
}
}
return;
},
'_' => {
lexer.step();
lexer.token = T.t_underscore;
lexer.consumeIndent();
if (lexer.codePoint() == '_') {
lexer.token = T.t_underscore_2;
lexer.step();
lexer.consumeIndent();
if (lexer.codePoint() == '_') {
lexer.token = T.t_underscore_3;
lexer.step();
lexer.consumeIndent();
if (!js.has_newline_before and lexer.codePoint() == '_') {
if (lexer.peek(1)[0] == '_') {
lexer.token = T.t_underscore_2;
lexer.step();
lexer.step();
js.string_literal_slice = "";
return;
}
}
}
}
return;
},
'#' => {
if (!js.has_newline_before or lexer.indent > 3) {
lexer.step();
continue;
}
lexer.token = T.t_hash;
lexer.step();
lexer.consumeIndent();
if (lexer.codePoint() == '#') {
lexer.token = T.t_hash_2;
lexer.step();
lexer.consumeIndent();
if (lexer.codePoint() == '#') {
lexer.token = T.t_hash_3;
lexer.step();
lexer.consumeIndent();
if (lexer.codePoint() == '#') {
lexer.token = T.t_hash_4;
lexer.step();
lexer.consumeIndent();
if (lexer.codePoint() == '#') {
lexer.token = T.t_hash_5;
lexer.step();
lexer.consumeIndent();
if (lexer.codePoint() == '#') {
lexer.token = T.t_hash_6;
lexer.step();
lexer.consumeIndent();
}
}
}
}
}
return;
},
'=' => {
lexer.token = T.t_text;
lexer.step();
if (lexer.js.has_newline_before) {
lexer.token = T.t_equals;
js.token = .t_equals;
return;
}
},
'!' => {
lexer.token = T.t_text;
lexer.step();
if (lexer.codePoint() == '[') {
lexer.token = T.t_bang_bracket_open;
lexer.step();
return;
}
},
'[' => {
lexer.token = T.t_bracket_open;
lexer.step();
return;
},
'`' => {
lexer.token = T.t_backtick;
lexer.step();
return;
},
'~' => {
lexer.token = T.t_tilde;
lexer.step();
},
'\\' => {
js.string_literal_is_ascii = true;
lexer.token = T.t_text;
lexer.step();
lexer.step();
continue;
},
'&' => {
const start = lexer.js.start;
lexer.step();
lexer.js.start = lexer.js.current;
inner: while (true) {
switch (lexer.codePoint()) {
';' => {
const label = lexer.raw();
lexer.step();
js.string_literal = try js.fixWhitespaceAndDecodeJSXEntities(label);
js.string_literal_is_ascii = false;
lexer.token = T.t_string;
return;
},
-1, '\r', '\n', 0x2028, 0x2029, ' ', '\t' => {
lexer.js.start = start;
lexer.step();
lexer.token = T.t_text;
break :inner;
},
else => {},
}
lexer.step();
}
continue;
},
'|' => {
lexer.token = T.t_bar;
lexer.step();
return;
},
'i' => {
lexer.step();
lexer.token = T.t_text;
if (js.has_newline_before) {
if (strings.eqlComptime(lexer.peek("mport ".len), "mport ")) {
lexer.step();
lexer.step();
lexer.step();
lexer.step();
lexer.step();
lexer.token = T.t_import;
lexer.consumeIndent();
return;
}
}
},
'e' => {
lexer.step();
lexer.token = T.t_text;
if (js.has_newline_before) {
if (strings.eqlComptime(lexer.peek("xport ".len), "xport ")) {
lexer.step();
lexer.step();
lexer.step();
lexer.step();
lexer.step();
lexer.token = T.t_export;
lexer.consumeIndent();
return;
}
}
},
else => {
lexer.step();
lexer.token = .t_text;
while (true) {
switch (lexer.codePoint()) {
'\\' => {
lexer.step();
lexer.step();
continue;
},
-1, '\r', '\n', 0x2028, 0x2029, '&', '~', '{', '<', '*', '_', '!', '[', '`' => {
js.string_literal_slice = lexer.raw();
js.string_literal_is_ascii = true;
return;
},
else => {},
}
lexer.step();
}
},
}
}
}
};
inline fn float64(num: anytype) f64 {
return @intToFloat(f64, num);
}

1862
src/mdx/mdx_parser.zig Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -630,6 +630,7 @@ pub const Loader = enum(u4) {
file,
json,
toml,
mdx,
pub const Map = std.EnumArray(Loader, string);
pub const stdin_name: Map = brk: {
var map = Map.initFill("");
@@ -641,6 +642,7 @@ pub const Loader = enum(u4) {
map.set(Loader.file, "input");
map.set(Loader.json, "input.json");
map.set(Loader.toml, "input.toml");
map.set(Loader.toml, "input.mdx");
break :brk map;
};
@@ -682,6 +684,7 @@ pub const Loader = enum(u4) {
LoaderMatcher.case("file") => Loader.file,
LoaderMatcher.case("json") => Loader.json,
LoaderMatcher.case("toml") => Loader.toml,
LoaderMatcher.case("md"), LoaderMatcher.case("mdx") => Loader.mdx,
else => null,
};
}
@@ -702,12 +705,13 @@ pub const Loader = enum(u4) {
.css => .css,
.json => .json,
.toml => .toml,
.mdx => .mdx,
else => .file,
};
}
pub fn isJSX(loader: Loader) bool {
return loader == .jsx or loader == .tsx;
return loader == .jsx or loader == .tsx or loader == .mdx;
}
pub fn isTypeScript(loader: Loader) bool {
return loader == .tsx or loader == .ts;
@@ -751,6 +755,7 @@ pub const defaultLoaders = std.ComptimeStringMap(Loader, .{
.{ ".cts", Loader.ts },
.{ ".toml", Loader.toml },
.{ ".mdx", Loader.mdx },
});
// https://webpack.js.org/guides/package-exports/#reference-syntax
@@ -1027,6 +1032,7 @@ pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api.
.tsx => Loader.tsx,
.json => Loader.json,
.toml => Loader.toml,
.mdx => Loader.mdx,
else => unreachable,
};
@@ -1048,7 +1054,7 @@ pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api.
".ts", ".tsx",
".mts", ".cts",
".toml",
".toml", ".mdx",
};
inline for (default_loader_ext) |ext| {

View File

@@ -1 +1 @@
6d5479b747f121cc
ee448e44f6cf1e3c

View File

@@ -905,6 +905,8 @@ if (typeof window !== "undefined") {
case API.Loader.js:
case API.Loader.jsx:
case API.Loader.mdx:
case API.Loader.toml:
case API.Loader.tsx:
case API.Loader.ts:
case API.Loader.json: {
@@ -970,6 +972,8 @@ if (typeof window !== "undefined") {
// Else, it will fall back to live reloading.
case API.Loader.js:
case API.Loader.jsx:
case API.Loader.mdx:
case API.Loader.toml:
case API.Loader.json:
case API.Loader.ts:
case API.Loader.tsx: {
@@ -1393,6 +1397,7 @@ if (typeof window !== "undefined") {
i++ // let i = HMRModule.dependencies.graph_used - 1; // i > this.module_index; // i--
) {
const mod = HMRModule.dependencies.modules[i];
if (!mod) continue; // this array is holey sometimes
let handled = false;
if (!mod.exports.__hmrDisable) {

View File

@@ -163,7 +163,7 @@ pub const Lexer = struct {
return code_point;
}
inline fn step(lexer: *Lexer) void {
pub inline fn step(lexer: *Lexer) void {
lexer.code_point = lexer.nextCodepoint();
lexer.line_number += @as(u32, @boolToInt(lexer.code_point == '\n'));

View File

@@ -41,40 +41,6 @@ const Symbol = js_ast.Symbol;
const Level = js_ast.Op.Level;
const Op = js_ast.Op;
const Scope = js_ast.Scope;
const locModuleScope = logger.Loc.Empty;
const LEXER_DEBUGGER_WORKAROUND = false;
const IdentityContext = @import("../identity_context.zig").IdentityContext;
const HashMapPool = struct {
const HashMap = std.HashMap(u64, void, IdentityContext, 80);
const LinkedList = std.SinglyLinkedList(HashMap);
threadlocal var list: LinkedList = undefined;
threadlocal var loaded: bool = false;
pub fn get(_: std.mem.Allocator) *LinkedList.Node {
if (loaded) {
if (list.popFirst()) |node| {
node.data.clearRetainingCapacity();
return node;
}
}
var new_node = default_allocator.create(LinkedList.Node) catch unreachable;
new_node.* = LinkedList.Node{ .data = HashMap.initContext(default_allocator, IdentityContext{}) };
return new_node;
}
pub fn release(node: *LinkedList.Node) void {
if (loaded) {
list.prepend(node);
return;
}
list = LinkedList{ .first = node };
loaded = true;
}
};
pub const TOML = struct {
lexer: Lexer,