Compare commits

...

77 Commits

Author SHA1 Message Date
pfg
e0d01b6d94 more merge 2025-01-24 16:12:38 -08:00
pfg
6f90017716 Merge branch 'main' into pfg/only-parse-twice 2025-01-24 16:07:20 -08:00
pfg
703e1e1c28 switch to getOrPutContextAdapted & move ensureUnusedCapacity above the getorput 2025-01-14 13:12:01 -08:00
pfg
d67742db28 Merge branch 'main' into pfg/only-parse-twice 2025-01-13 13:49:36 -08:00
pfg
3daed8d95c Merge branch 'main' into pfg/only-parse-twice 2025-01-09 14:36:15 -08:00
pfg
17662291da Merge branch 'main' into pfg/only-parse-twice 2025-01-08 14:05:23 -08:00
pfg
1cbd4699bc syntax error rather than crashing for multiple exports with the same name 2025-01-08 12:29:35 -08:00
pfg
3e1016a47a failing test case 2025-01-07 18:43:03 -08:00
pfg
a7e9549832 Update BunAnalyzeTranspiledModule.cpp 2025-01-06 13:24:46 -08:00
pfg
e7f6fa509d enable comparison in debug builds & windows 2025-01-03 15:06:42 -08:00
pfg
729d2e627e pass resolve.test.ts by allowing null bytes in ModuleInfo strings & fix a bug where strings_len was much larger than it should have been causing an unnecessarily large allocation 2025-01-03 12:26:09 -08:00
pfg
f3d0e2e8ac . 2025-01-02 15:51:17 -08:00
pfg
f0f8e9cdb9 try calling deinit, find ci failures 2025-01-02 15:47:08 -08:00
pfg
692ddc154d impl deinit for ModuleInfoDeserialized 2025-01-02 15:45:28 -08:00
pfg
78298e6607 merge 2025-01-02 12:46:24 -08:00
pfg
fc24835489 Merge branch 'main' into pfg/only-parse-twice 2025-01-02 12:11:04 -08:00
pfg
0652407da4 set PROFILE_MODE 2024-12-19 20:03:34 -08:00
pfg
57260a9177 add an import not found test 2024-12-19 18:33:05 -08:00
pfg
fb9a671bc4 type-export.test.ts was completely skipped 2024-12-19 18:13:36 -08:00
pfg
2834abecb5 infer module type in create & switch some to Module 2024-12-19 17:41:54 -08:00
pfg
43e4e48c7c transpilercache moduleinfo support & require .module_info on all ResolvedSource initializers 2024-12-19 17:00:49 -08:00
pfg
2ea00c33e5 skip duping every ModuleInfo 2024-12-19 15:52:49 -08:00
pfg
b449b00bdd no forgiveness 2024-12-19 15:14:11 -08:00
pfg
92b23d216d forgive bun:main for not having module_info in profile mode 2024-12-19 14:11:54 -08:00
pfg
042c6b5e90 PROFILE_MODE switch 2024-12-18 19:45:28 -08:00
pfg
9cace81662 switch to storing in the source provider 2024-12-18 19:43:45 -08:00
pfg
9b6a962a17 no individually allocating strings 2024-12-18 19:00:40 -08:00
pfg
1aecc554de note in contributing.md 2024-12-18 15:56:15 -08:00
pfg
78581aa218 fix order 2024-12-18 15:56:09 -08:00
pfg
555ee1da0b add unused field 2024-12-18 15:10:57 -08:00
pfg
338931b88f Merge branch 'main' into pfg/only-parse-twice 2024-12-18 13:50:19 -08:00
pfg
2729b4b843 ? 2024-12-17 21:17:02 -08:00
pfg
a21a1ccc3d fix everything being broken 2024-12-17 20:33:14 -08:00
pfg
ce0b3793ff fix runtimetranspilercache 2024-12-17 18:59:34 -08:00
pfg
ab7584b613 maybe in runtimetranspilercache? 2024-12-17 18:57:18 -08:00
pfg
05ce66c869 Merge branch 'main' into pfg/only-parse-twice 2024-12-17 18:31:34 -08:00
pfg
93dcd32c39 binary transpiled module format 2024-12-17 18:31:15 -08:00
pfg
b1905bf983 use bun default_allocator 2024-12-17 15:18:16 -08:00
pfg
7e2c6b8560 add faster compile errors env var 2024-12-16 20:31:57 -08:00
pfg
46942678fc fix attributes comparison 2024-12-16 20:31:48 -08:00
pfg
b0995f7e7f remove launch.json entry 2024-12-16 20:11:44 -08:00
pfg
a992242dfd allow 'var' inside 'try' to add to varDeclarations 2024-12-16 20:11:27 -08:00
pfg
b39151081a runtimetranspilercache notes 2024-12-16 19:44:31 -08:00
pfg
a6ac4a6c47 temporary fix until runtimetranspilercache while it still serializes to json 2024-12-16 19:44:20 -08:00
pfg
cb8214c068 Merge branch 'main' into pfg/only-parse-twice 2024-12-16 18:53:59 -08:00
pfg
542a069fcd Merge branch 'main' into pfg/only-parse-twice 2024-12-16 16:29:37 -08:00
pfg
eeab914725 something 2024-12-16 15:50:14 -08:00
pfg
ff0f561ee6 no more printing except for a diff fail 2024-12-16 15:48:18 -08:00
pfg
ea04b81eea move the top level check inside toe p.moduleInfo 2024-12-16 15:42:43 -08:00
pfg
d8aea25878 fix missing export for export default (expression); 2024-12-16 15:40:17 -08:00
pfg
55a36bba72 fix missing exports for shorthand property names 2024-12-16 15:38:02 -08:00
pfg
3fec10a982 sort exportEntries and mask only importmetafeature for comparison 2024-12-16 14:57:36 -08:00
pfg
e9cb96cb98 fix missing is_top_level check for s_function and s_class 2024-12-16 14:40:25 -08:00
pfg
f4418e3f82 fix 'class' missing lexicalVariable & jest import missing varDeclarations 2024-12-16 14:35:59 -08:00
pfg
1644142da0 diff print for error 2024-12-16 14:35:34 -08:00
pfg
605467db59 error if results are differebt 2024-12-16 14:09:15 -08:00
pfg
0268a4f14f fix 2024-12-13 20:57:09 -08:00
pfg
c57fead7ae set webkit commit hash 2024-12-13 20:42:57 -08:00
pfg
5f887270f3 put vars in varDeclarations rather than lexicalDeclarations 2024-12-13 20:40:57 -08:00
pfg
a655ad26e5 RELEASE_AND_RETURN & fix indirect checking 2024-12-13 19:48:08 -08:00
pfg
d1dd1b9e13 support export {a, b, c} from "./d" and export {a, b} and indirect export {a, b} 2024-12-13 19:32:12 -08:00
pfg
7fe6556034 support 'export * from ' 2024-12-13 18:49:12 -08:00
pfg
e6fab95c86 more progress on filling the ModuleInfo 2024-12-13 18:39:00 -08:00
pfg
57742010ac Merge branch 'main' into pfg/only-parse-twice 2024-12-13 13:52:24 -08:00
pfg
344c6a73c5 import notes 2024-12-12 20:23:55 -08:00
pfg
213bcfbe41 make sure to pass uses_import_meta flag 2024-12-12 20:11:43 -08:00
pfg
5817c3045c first module parse 2024-12-12 19:47:20 -08:00
pfg
c6e1ca127c wip: starting to fill out ModuleInfo 2024-12-12 17:36:18 -08:00
pfg
a72d2035db starting on zig side 2024-12-12 16:17:00 -08:00
pfg
0d3d11ac3a examples using our own manually defined module records
sample files in folder: c5b80dd5337b6903cc6ff8e4172c55f2
2024-12-12 15:14:25 -08:00
pfg
2d59092efb Merge branch 'main' into pfg/fix-typescript-reexport 2024-12-12 13:24:47 -08:00
pfg
4ee546a24e wip 2024-12-10 18:51:38 -08:00
pfg
3904259c5d oops missing quote 2024-12-10 16:40:21 -08:00
pfg
2e5f949df9 Merge branch 'main' into pfg/fix-typescript-reexport 2024-12-10 16:08:38 -08:00
pfg
37fffb4b79 add another export test 2024-12-10 16:08:00 -08:00
pfg
c57cf76f17 {todo: true} 2024-12-06 19:48:17 -08:00
pfg
a0586da1a3 add a test demonstrating the re-export problem 2024-12-06 18:13:36 -08:00
21 changed files with 1513 additions and 190 deletions

View File

@@ -207,7 +207,7 @@ $ git clone https://github.com/oven-sh/WebKit vendor/WebKit
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
# Optionally, you can use `make jsc` for a release build
$ make jsc-debug
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
# Build bun with the local JSC build
$ bun run build:local

View File

@@ -473,6 +473,11 @@ pub fn addInstallObjectFile(
name: []const u8,
out_mode: ObjectFormat,
) *Step {
if (@import("builtin").os.tag != .windows and std.posix.getenvZ("COMPILE_ERRORS_ONLY") != null) {
const failstep = b.addSystemCommand(&.{"COMPILE_ERRORS_ONLY set but there were no compile errors"});
failstep.step.dependOn(&compile.step);
return &failstep.step;
}
// bin always needed to be computed or else the compilation will do nothing. zig build system bug?
const bin = compile.getEmittedBin();
return &b.addInstallFile(switch (out_mode) {

View File

@@ -30,6 +30,8 @@ if(WEBKIT_LOCAL)
${WEBKIT_PATH}
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
${WEBKIT_PATH}/bmalloc/Headers
${WEBKIT_PATH}/WTF/Headers
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector

View File

@@ -0,0 +1,471 @@
const std = @import("std");
const bun = @import("bun.zig");
const js_ast = bun.JSAst;
const Ast = js_ast.Ast;
pub const RecordKind = enum(u8) {
/// var_name
declared_variable,
/// let_name
lexical_variable,
/// module_name, import_name, local_name
import_info_single,
/// module_name, import_name = '*', local_name
import_info_namespace,
/// export_name, import_name, module_name
export_info_indirect,
/// export_name, local_name, padding (for local => indirect conversion)
export_info_local,
/// export_name, module_name
export_info_namespace,
/// module_name
export_info_star,
_,
pub fn len(record: RecordKind) !usize {
return switch (record) {
.declared_variable, .lexical_variable => 1,
.import_info_single => 3,
.import_info_namespace => 3,
.export_info_indirect => 3,
.export_info_local => 3,
.export_info_namespace => 2,
.export_info_star => 1,
else => return error.InvalidRecordKind,
};
}
};
pub const ModuleInfoDeserialized = struct {
strings_buf: []const u8,
strings_lens: []align(1) const u32,
requested_modules_keys: []align(1) const StringID,
requested_modules_values: []align(1) const ModuleInfo.FetchParameters,
buffer: []align(1) const StringID,
record_kinds: []align(1) const RecordKind,
contains_import_meta: bool,
owner: union(enum) {
module_info,
allocated_slice: struct {
slice: []const u8,
allocator: std.mem.Allocator,
},
},
dead: bool = false,
pub fn deinit(self: *ModuleInfoDeserialized) void {
switch (self.owner) {
.module_info => {
const mi: *ModuleInfo = @fieldParentPtr("_deserialized", self);
mi.destroy();
},
.allocated_slice => |as| {
as.allocator.free(as.slice);
as.allocator.destroy(self);
},
}
}
inline fn eat(rem: *[]const u8, len: usize) ![]const u8 {
if (rem.*.len < len) return error.BadModuleInfo;
const res = rem.*[0..len];
rem.* = rem.*[len..];
return res;
}
inline fn eatC(rem: *[]const u8, comptime len: usize) !*const [len]u8 {
if (rem.*.len < len) return error.BadModuleInfo;
const res = rem.*[0..len];
rem.* = rem.*[len..];
return res;
}
pub fn create(source: []const u8, gpa: std.mem.Allocator) !*ModuleInfoDeserialized {
std.log.info("ModuleInfoDeserialized.create", .{});
var rem = try gpa.dupe(u8, source);
errdefer gpa.free(rem);
var res = try gpa.create(ModuleInfoDeserialized);
errdefer res.deinit();
const record_kinds_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
const record_kinds = std.mem.bytesAsSlice(RecordKind, try eat(&rem, record_kinds_len * @sizeOf(RecordKind)));
const buffer_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
const buffer = std.mem.bytesAsSlice(StringID, try eat(&rem, buffer_len * @sizeOf(StringID)));
const requested_modules_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
const requested_modules_keys = std.mem.bytesAsSlice(StringID, try eat(&rem, requested_modules_len * @sizeOf(StringID)));
const requested_modules_values = std.mem.bytesAsSlice(ModuleInfo.FetchParameters, try eat(&rem, requested_modules_len * @sizeOf(ModuleInfo.FetchParameters)));
const contains_import_meta = (try eatC(&rem, 1))[0] != 0;
const strings_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
const strings_lens = std.mem.bytesAsSlice(u32, try eat(&rem, strings_len * @sizeOf(u32)));
const strings_buf = rem;
res.* = .{
.strings_buf = strings_buf,
.strings_lens = strings_lens,
.requested_modules_keys = requested_modules_keys,
.requested_modules_values = requested_modules_values,
.buffer = buffer,
.record_kinds = record_kinds,
.contains_import_meta = contains_import_meta,
.owner = .{ .allocated_slice = .{
.slice = source,
.allocator = gpa,
} },
};
return res;
}
pub fn serialize(self: *const ModuleInfoDeserialized, writer: anytype) !void {
try writer.writeInt(u32, @truncate(self.record_kinds.len), .little);
try writer.writeAll(std.mem.sliceAsBytes(self.record_kinds));
try writer.writeInt(u32, @truncate(self.buffer.len), .little);
try writer.writeAll(std.mem.sliceAsBytes(self.buffer));
try writer.writeInt(u32, @truncate(self.requested_modules_keys.len), .little);
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_keys));
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_values));
try writer.writeInt(u8, @intFromBool(self.contains_import_meta), .little);
try writer.writeInt(u32, @truncate(self.strings_lens.len), .little);
try writer.writeAll(std.mem.sliceAsBytes(self.strings_lens));
try writer.writeAll(self.strings_buf);
}
};
const StringMapKey = enum(u32) {
_,
};
pub const StringContext = struct {
strings_buf: []const u8,
strings_lens: []const u32,
pub fn hash(_: @This(), s: []const u8) u32 {
return @as(u32, @truncate(std.hash.Wyhash.hash(0, s)));
}
pub fn eql(self: @This(), fetch_key: []const u8, item_key: StringMapKey, item_i: usize) bool {
return bun.strings.eqlLong(fetch_key, self.strings_buf[@intFromEnum(item_key)..][0..self.strings_lens[item_i]], true);
}
};
pub const ModuleInfo = struct {
/// all strings in wtf-8. index in hashmap = StringID
gpa: std.mem.Allocator,
strings_map: std.ArrayHashMapUnmanaged(StringMapKey, void, void, true),
strings_buf: std.ArrayListUnmanaged(u8),
strings_lens: std.ArrayListUnmanaged(u32),
requested_modules: std.AutoArrayHashMap(StringID, FetchParameters),
buffer: std.ArrayList(StringID),
record_kinds: std.ArrayList(RecordKind),
exported_names: std.AutoArrayHashMapUnmanaged(StringID, void),
contains_import_meta: bool,
finalized: bool = false,
_deserialized: ModuleInfoDeserialized = undefined,
pub fn asDeserialized(self: *ModuleInfo) *ModuleInfoDeserialized {
bun.assert(self.finalized);
return &self._deserialized;
}
pub const FetchParameters = enum(u32) {
none = std.math.maxInt(u32),
javascript = std.math.maxInt(u32) - 1,
webassembly = std.math.maxInt(u32) - 2,
json = std.math.maxInt(u32) - 3,
_, // host_defined: cast to StringID
pub fn hostDefined(value: StringID) FetchParameters {
return @enumFromInt(@intFromEnum(value));
}
};
pub const VarKind = enum { declared, lexical };
pub fn addVar(self: *ModuleInfo, name: []const u8, kind: VarKind) !void {
switch (kind) {
.declared => try self.addDeclaredVariable(name),
.lexical => try self.addLexicalVariable(name),
}
}
fn _addRecord(self: *ModuleInfo, kind: RecordKind, data: []const StringID) !void {
bun.assert(!self.finalized);
bun.assert(data.len == kind.len() catch unreachable);
try self.record_kinds.append(kind);
try self.buffer.appendSlice(data);
}
pub fn addDeclaredVariable(self: *ModuleInfo, id: []const u8) !void {
try self._addRecord(.declared_variable, &.{try self.str(id)});
}
pub fn addLexicalVariable(self: *ModuleInfo, id: []const u8) !void {
try self._addRecord(.lexical_variable, &.{try self.str(id)});
}
pub fn addImportInfoSingle(self: *ModuleInfo, module_name: []const u8, import_name: []const u8, local_name: []const u8) !void {
try self._addRecord(.import_info_single, &.{ try self.str(module_name), try self.str(import_name), try self.str(local_name) });
}
pub fn addImportInfoNamespace(self: *ModuleInfo, module_name: []const u8, local_name: []const u8) !void {
try self._addRecord(.import_info_namespace, &.{ try self.str(module_name), try self.str("*"), try self.str(local_name) });
}
pub fn addExportInfoIndirect(self: *ModuleInfo, export_name: []const u8, import_name: []const u8, module_name: []const u8) !void {
const export_name_id = try self.str(export_name);
if (try self._hasOrAddExportedName(export_name_id)) return; // a syntax error will be emitted later in this case
try self._addRecord(.export_info_indirect, &.{ export_name_id, try self.str(import_name), try self.str(module_name) });
}
pub fn addExportInfoLocal(self: *ModuleInfo, export_name: []const u8, local_name: []const u8) !void {
const export_name_id = try self.str(export_name);
if (try self._hasOrAddExportedName(export_name_id)) return; // a syntax error will be emitted later in this case
try self._addRecord(.export_info_local, &.{ export_name_id, try self.str(local_name), @enumFromInt(std.math.maxInt(u32)) });
}
pub fn addExportInfoNamespace(self: *ModuleInfo, export_name: []const u8, module_name: []const u8) !void {
const export_name_id = try self.str(export_name);
if (try self._hasOrAddExportedName(export_name_id)) return; // a syntax error will be emitted later in this case
try self._addRecord(.export_info_namespace, &.{ export_name_id, try self.str(module_name) });
}
pub fn addExportInfoStar(self: *ModuleInfo, module_name: []const u8) !void {
try self._addRecord(.export_info_star, &.{try self.str(module_name)});
}
pub fn _hasOrAddExportedName(self: *ModuleInfo, name: StringID) !bool {
if (try self.exported_names.fetchPut(self.gpa, name, {}) != null) return true;
return false;
}
pub fn create(gpa: std.mem.Allocator) !*ModuleInfo {
const res = try gpa.create(ModuleInfo);
res.* = ModuleInfo.init(gpa);
return res;
}
fn init(allocator: std.mem.Allocator) ModuleInfo {
return .{
.gpa = allocator,
.strings_map = .{},
.strings_buf = .{},
.strings_lens = .{},
.exported_names = .{},
.requested_modules = std.AutoArrayHashMap(StringID, FetchParameters).init(allocator),
.buffer = std.ArrayList(StringID).init(allocator),
.record_kinds = std.ArrayList(RecordKind).init(allocator),
.contains_import_meta = false,
};
}
fn deinit(self: *ModuleInfo) void {
self.strings_map.deinit(self.gpa);
self.strings_buf.deinit(self.gpa);
self.strings_lens.deinit(self.gpa);
self.exported_names.deinit(self.gpa);
self.requested_modules.deinit();
self.buffer.deinit();
self.record_kinds.deinit();
}
pub fn destroy(self: *ModuleInfo) void {
const alloc = self.gpa;
self.deinit();
alloc.destroy(self);
}
pub fn str(self: *ModuleInfo, value: []const u8) !StringID {
try self.strings_buf.ensureUnusedCapacity(self.gpa, value.len);
try self.strings_lens.ensureUnusedCapacity(self.gpa, 1);
const gpres = try self.strings_map.getOrPutAdapted(self.gpa, value, StringContext{
.strings_buf = self.strings_buf.items,
.strings_lens = self.strings_lens.items,
});
if (gpres.found_existing) return @enumFromInt(@as(u32, @intCast(gpres.index)));
gpres.key_ptr.* = @enumFromInt(@as(u32, @truncate(self.strings_buf.items.len)));
gpres.value_ptr.* = {};
self.strings_buf.appendSliceAssumeCapacity(value);
self.strings_lens.appendAssumeCapacity(@as(u32, @truncate(value.len)));
return @enumFromInt(@as(u32, @intCast(gpres.index)));
}
pub const star_default = "*default*";
pub fn requestModule(self: *ModuleInfo, import_record_path: []const u8, fetch_parameters: FetchParameters) !void {
// jsc only records the attributes of the first import with the given import_record_path. so only put if not exists.
const gpres = try self.requested_modules.getOrPut(try self.str(import_record_path));
if (!gpres.found_existing) gpres.value_ptr.* = fetch_parameters;
}
/// find any exports marked as 'local' that are actually 'indirect' and fix them
pub fn finalize(self: *ModuleInfo) !void {
bun.assert(!self.finalized);
var local_name_to_module_name = std.AutoArrayHashMap(StringID, struct { module_name: StringID, import_name: StringID }).init(bun.default_allocator);
defer local_name_to_module_name.deinit();
{
var i: usize = 0;
for (self.record_kinds.items) |k| {
if (k == .import_info_single) {
try local_name_to_module_name.put(self.buffer.items[i + 2], .{ .module_name = self.buffer.items[i], .import_name = self.buffer.items[i + 1] });
}
i += k.len() catch unreachable;
}
}
{
var i: usize = 0;
for (self.record_kinds.items) |*k| {
if (k.* == .export_info_local) {
if (local_name_to_module_name.get(self.buffer.items[i + 1])) |ip| {
k.* = .export_info_indirect;
self.buffer.items[i + 1] = ip.import_name;
self.buffer.items[i + 2] = ip.module_name;
}
}
i += k.len() catch unreachable;
}
}
self._deserialized = .{
.strings_buf = self.strings_buf.items,
.strings_lens = self.strings_lens.items,
.requested_modules_keys = self.requested_modules.keys(),
.requested_modules_values = self.requested_modules.values(),
.buffer = self.buffer.items,
.record_kinds = self.record_kinds.items,
.contains_import_meta = self.contains_import_meta,
.owner = .module_info,
};
self.finalized = true;
}
};
pub const StringID = enum(u32) {
_,
};
export fn zig__renderDiff(expected_ptr: [*:0]const u8, expected_len: usize, received_ptr: [*:0]const u8, received_len: usize, globalThis: *bun.JSC.JSGlobalObject) void {
const DiffFormatter = @import("bun.js/test/diff_format.zig").DiffFormatter;
const formatter = DiffFormatter{
.received_string = received_ptr[0..received_len],
.expected_string = expected_ptr[0..expected_len],
.globalThis = globalThis,
};
const stderr = std.io.getStdErr().writer();
stderr.print("DIFF:\n{}\n", .{formatter}) catch {};
}
export fn zig__ModuleInfoDeserialized__toJSModuleRecord(
globalObject: *bun.JSC.JSGlobalObject,
vm: *bun.JSC.VM,
module_key: *const IdentifierArray,
source_code: *const SourceCode,
declared_variables: *VariableEnvironment,
lexical_variables: *VariableEnvironment,
res: *ModuleInfoDeserialized,
) ?*JSModuleRecord {
if (res.dead) @panic("ModuleInfoDeserialized already deinit()ed");
defer res.deinit();
var identifiers = IdentifierArray.create(res.strings_lens.len);
defer identifiers.destroy();
var offset: usize = 0;
for (0.., res.strings_lens) |index, len| {
if (res.strings_buf.len < offset + len) return null; // error!
const sub = res.strings_buf[offset..][0..len];
if (bun.strings.eqlComptime(sub, ModuleInfo.star_default)) {
identifiers.setFromStarDefault(index, vm);
} else {
identifiers.setFromUtf8(index, vm, sub);
}
offset += len;
}
{
var i: usize = 0;
for (res.record_kinds) |k| {
if (i + (k.len() catch 0) > res.buffer.len) return null;
switch (k) {
.declared_variable => declared_variables.add(identifiers, res.buffer[i]),
.lexical_variable => lexical_variables.add(identifiers, res.buffer[i]),
.import_info_single, .import_info_namespace, .export_info_indirect, .export_info_local, .export_info_namespace, .export_info_star => {},
else => return null,
}
i += k.len() catch unreachable; // handled above
}
}
const module_record = JSModuleRecord.create(globalObject, vm, module_key, source_code, declared_variables, lexical_variables, res.contains_import_meta);
for (res.requested_modules_keys, res.requested_modules_values) |reqk, reqv| {
switch (reqv) {
.none => module_record.addRequestedModuleNullAttributesPtr(identifiers, reqk),
.javascript => module_record.addRequestedModuleJavaScript(identifiers, reqk),
.webassembly => module_record.addRequestedModuleWebAssembly(identifiers, reqk),
.json => module_record.addRequestedModuleJSON(identifiers, reqk),
else => |uv| module_record.addRequestedModuleHostDefined(identifiers, reqk, @enumFromInt(@intFromEnum(uv))),
}
}
{
var i: usize = 0;
for (res.record_kinds) |k| {
if (i + (k.len() catch unreachable) > res.buffer.len) unreachable; // handled above
switch (k) {
.declared_variable, .lexical_variable => {},
.import_info_single => module_record.addImportEntrySingle(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
.import_info_namespace => module_record.addImportEntryNamespace(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
.export_info_indirect => module_record.addIndirectExport(identifiers, res.buffer[i + 0], res.buffer[i + 1], res.buffer[i + 2]),
.export_info_local => module_record.addLocalExport(identifiers, res.buffer[i], res.buffer[i + 1]),
.export_info_namespace => module_record.addNamespaceExport(identifiers, res.buffer[i], res.buffer[i + 1]),
.export_info_star => module_record.addStarExport(identifiers, res.buffer[i]),
else => unreachable, // handled above
}
i += k.len() catch unreachable; // handled above
}
}
return module_record;
}
export fn zig__ModuleInfo__destroy(info: *ModuleInfo) void {
info.deinit();
bun.default_allocator.destroy(info);
}
const VariableEnvironment = opaque {
extern fn JSC__VariableEnvironment__add(environment: *VariableEnvironment, identifier_array: *IdentifierArray, identifier_index: StringID) void;
pub const add = JSC__VariableEnvironment__add;
};
const IdentifierArray = opaque {
extern fn JSC__IdentifierArray__create(len: usize) *IdentifierArray;
pub const create = JSC__IdentifierArray__create;
extern fn JSC__IdentifierArray__destroy(identifier_array: *IdentifierArray) void;
pub const destroy = JSC__IdentifierArray__destroy;
extern fn JSC__IdentifierArray__setFromUtf8(identifier_array: *IdentifierArray, n: usize, vm: *bun.JSC.VM, str: [*]const u8, len: usize) void;
pub fn setFromUtf8(self: *IdentifierArray, n: usize, vm: *bun.JSC.VM, str: []const u8) void {
JSC__IdentifierArray__setFromUtf8(self, n, vm, str.ptr, str.len);
}
extern fn JSC__IdentifierArray__setFromStarDefault(identifier_array: *IdentifierArray, n: usize, vm: *bun.JSC.VM) void;
pub const setFromStarDefault = JSC__IdentifierArray__setFromStarDefault;
};
const SourceCode = opaque {};
const JSModuleRecord = opaque {
extern fn JSC_JSModuleRecord__create(global_object: *bun.JSC.JSGlobalObject, vm: *bun.JSC.VM, module_key: *const IdentifierArray, source_code: *const SourceCode, declared_variables: *VariableEnvironment, lexical_variables: *VariableEnvironment, has_import_meta: bool) *JSModuleRecord;
pub const create = JSC_JSModuleRecord__create;
extern fn JSC_JSModuleRecord__declaredVariables(module_record: *JSModuleRecord) *VariableEnvironment;
pub const declaredVariables = JSC_JSModuleRecord__declaredVariables;
extern fn JSC_JSModuleRecord__lexicalVariables(module_record: *JSModuleRecord) *VariableEnvironment;
pub const lexicalVariables = JSC_JSModuleRecord__lexicalVariables;
extern fn JSC_JSModuleRecord__addIndirectExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, import_name: StringID, module_name: StringID) void;
pub const addIndirectExport = JSC_JSModuleRecord__addIndirectExport;
extern fn JSC_JSModuleRecord__addLocalExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, local_name: StringID) void;
pub const addLocalExport = JSC_JSModuleRecord__addLocalExport;
extern fn JSC_JSModuleRecord__addNamespaceExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, module_name: StringID) void;
pub const addNamespaceExport = JSC_JSModuleRecord__addNamespaceExport;
extern fn JSC_JSModuleRecord__addStarExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
pub const addStarExport = JSC_JSModuleRecord__addStarExport;
extern fn JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
pub const addRequestedModuleNullAttributesPtr = JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr;
extern fn JSC_JSModuleRecord__addRequestedModuleJavaScript(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
pub const addRequestedModuleJavaScript = JSC_JSModuleRecord__addRequestedModuleJavaScript;
extern fn JSC_JSModuleRecord__addRequestedModuleWebAssembly(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
pub const addRequestedModuleWebAssembly = JSC_JSModuleRecord__addRequestedModuleWebAssembly;
extern fn JSC_JSModuleRecord__addRequestedModuleJSON(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
pub const addRequestedModuleJSON = JSC_JSModuleRecord__addRequestedModuleJSON;
extern fn JSC_JSModuleRecord__addRequestedModuleHostDefined(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID, host_defined_import_type: StringID) void;
pub const addRequestedModuleHostDefined = JSC_JSModuleRecord__addRequestedModuleHostDefined;
extern fn JSC_JSModuleRecord__addImportEntrySingle(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
pub const addImportEntrySingle = JSC_JSModuleRecord__addImportEntrySingle;
extern fn JSC_JSModuleRecord__addImportEntryNamespace(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
pub const addImportEntryNamespace = JSC_JSModuleRecord__addImportEntryNamespace;
};

View File

@@ -10,7 +10,8 @@
/// Version 11: Fix \uFFFF printing regression
/// Version 12: "use strict"; makes it CommonJS if we otherwise don't know which one to pick.
/// Version 13: Hoist `import.meta.require` definition, see #15738
const expected_version = 13;
/// Version 14: Include module info with an ES Module, see #15758
const expected_version = 14;
const bun = @import("root").bun;
const std = @import("std");
@@ -33,6 +34,7 @@ pub const RuntimeTranspilerCache = struct {
sourcemap_allocator: std.mem.Allocator,
output_code_allocator: std.mem.Allocator,
esm_record_allocator: std.mem.Allocator,
const seed = 42;
pub const Metadata = struct {
@@ -53,6 +55,10 @@ pub const RuntimeTranspilerCache = struct {
sourcemap_byte_length: u64 = 0,
sourcemap_hash: u64 = 0,
esm_record_byte_offset: u64 = 0,
esm_record_byte_length: u64 = 0,
esm_record_hash: u64 = 0,
pub const size = brk: {
var count: usize = 0;
const meta: Metadata = .{};
@@ -79,6 +85,10 @@ pub const RuntimeTranspilerCache = struct {
try writer.writeInt(u64, this.sourcemap_byte_offset, .little);
try writer.writeInt(u64, this.sourcemap_byte_length, .little);
try writer.writeInt(u64, this.sourcemap_hash, .little);
try writer.writeInt(u64, this.esm_record_byte_offset, .little);
try writer.writeInt(u64, this.esm_record_byte_length, .little);
try writer.writeInt(u64, this.esm_record_hash, .little);
}
pub fn decode(this: *Metadata, reader: anytype) !void {
@@ -103,6 +113,10 @@ pub const RuntimeTranspilerCache = struct {
this.sourcemap_byte_length = try reader.readInt(u64, .little);
this.sourcemap_hash = try reader.readInt(u64, .little);
this.esm_record_byte_offset = try reader.readInt(u64, .little);
this.esm_record_byte_length = try reader.readInt(u64, .little);
this.esm_record_hash = try reader.readInt(u64, .little);
switch (this.module_type) {
.esm, .cjs => {},
// Invalid module type
@@ -121,7 +135,7 @@ pub const RuntimeTranspilerCache = struct {
metadata: Metadata,
output_code: OutputCode = .{ .utf8 = "" },
sourcemap: []const u8 = "",
esm_record: []const u8 = "",
pub const OutputCode = union(enum) {
utf8: []const u8,
string: bun.String,
@@ -143,11 +157,14 @@ pub const RuntimeTranspilerCache = struct {
}
};
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator) void {
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator, esm_record_allocator: std.mem.Allocator) void {
this.output_code.deinit(output_code_allocator);
if (this.sourcemap.len > 0) {
sourcemap_allocator.free(this.sourcemap);
}
if (this.esm_record.len > 0) {
esm_record_allocator.free(this.esm_record);
}
}
pub fn save(
@@ -157,6 +174,7 @@ pub const RuntimeTranspilerCache = struct {
input_hash: u64,
features_hash: u64,
sourcemap: []const u8,
esm_record: []const u8,
output_code: OutputCode,
exports_kind: bun.JSAst.ExportsKind,
) !void {
@@ -202,6 +220,8 @@ pub const RuntimeTranspilerCache = struct {
.output_byte_offset = Metadata.size,
.output_byte_length = output_bytes.len,
.sourcemap_byte_offset = Metadata.size + output_bytes.len,
.esm_record_byte_offset = Metadata.size + output_bytes.len + sourcemap.len,
.esm_record_byte_length = esm_record.len,
};
metadata.output_hash = hash(output_bytes);
@@ -220,20 +240,26 @@ pub const RuntimeTranspilerCache = struct {
break :brk metadata_buf[0..metadata_stream.pos];
};
const vecs: []const bun.PlatformIOVecConst = if (output_bytes.len > 0)
&.{
bun.platformIOVecConstCreate(metadata_bytes),
bun.platformIOVecConstCreate(output_bytes),
bun.platformIOVecConstCreate(sourcemap),
}
else
&.{
bun.platformIOVecConstCreate(metadata_bytes),
bun.platformIOVecConstCreate(sourcemap),
};
var vecs_buf: [4]bun.PlatformIOVecConst = undefined;
var vecs_i: usize = 0;
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(metadata_bytes);
vecs_i += 1;
if (output_bytes.len > 0) {
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(output_bytes);
vecs_i += 1;
}
if (sourcemap.len > 0) {
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(sourcemap);
vecs_i += 1;
}
if (esm_record.len > 0) {
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(esm_record);
vecs_i += 1;
}
const vecs: []const bun.PlatformIOVecConst = vecs_buf[0..vecs_i];
var position: isize = 0;
const end_position = Metadata.size + output_bytes.len + sourcemap.len;
const end_position = Metadata.size + output_bytes.len + sourcemap.len + esm_record.len;
if (bun.Environment.allow_assert) {
var total: usize = 0;
@@ -243,7 +269,7 @@ pub const RuntimeTranspilerCache = struct {
}
bun.assert(end_position == total);
}
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size + esm_record.len)));
bun.C.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {};
while (position < end_position) {
@@ -264,6 +290,7 @@ pub const RuntimeTranspilerCache = struct {
file: std.fs.File,
sourcemap_allocator: std.mem.Allocator,
output_code_allocator: std.mem.Allocator,
esm_record_allocator: std.mem.Allocator,
) !void {
const stat_size = try file.getEndPos();
if (stat_size < Metadata.size + this.metadata.output_byte_length + this.metadata.sourcemap_byte_length) {
@@ -339,6 +366,17 @@ pub const RuntimeTranspilerCache = struct {
this.sourcemap = sourcemap;
}
if (this.metadata.esm_record_byte_length > 0) {
const esm_record = try esm_record_allocator.alloc(u8, this.metadata.esm_record_byte_length);
errdefer esm_record_allocator.free(esm_record);
const read_bytes = try file.preadAll(esm_record, this.metadata.esm_record_byte_offset);
if (read_bytes != this.metadata.esm_record_byte_length) {
return error.MissingData;
}
this.esm_record = esm_record;
}
}
};
@@ -456,6 +494,7 @@ pub const RuntimeTranspilerCache = struct {
input_stat_size: u64,
sourcemap_allocator: std.mem.Allocator,
output_code_allocator: std.mem.Allocator,
esm_record_allocator: std.mem.Allocator,
) !Entry {
var tracer = bun.tracy.traceNamed(@src(), "RuntimeTranspilerCache.fromFile");
defer tracer.end();
@@ -470,6 +509,7 @@ pub const RuntimeTranspilerCache = struct {
input_stat_size,
sourcemap_allocator,
output_code_allocator,
esm_record_allocator,
);
}
@@ -480,6 +520,7 @@ pub const RuntimeTranspilerCache = struct {
input_stat_size: u64,
sourcemap_allocator: std.mem.Allocator,
output_code_allocator: std.mem.Allocator,
esm_record_allocator: std.mem.Allocator,
) !Entry {
var metadata_bytes_buf: [Metadata.size * 2]u8 = undefined;
const cache_fd = try bun.sys.open(cache_file_path.sliceAssumeZ(), bun.O.RDONLY, 0).unwrap();
@@ -511,7 +552,7 @@ pub const RuntimeTranspilerCache = struct {
return error.MismatchedFeatureHash;
}
try entry.load(file, sourcemap_allocator, output_code_allocator);
try entry.load(file, sourcemap_allocator, output_code_allocator, esm_record_allocator);
return entry;
}
@@ -528,6 +569,7 @@ pub const RuntimeTranspilerCache = struct {
input_hash: u64,
features_hash: u64,
sourcemap: []const u8,
esm_record: []const u8,
source_code: bun.String,
exports_kind: bun.JSAst.ExportsKind,
) !void {
@@ -567,6 +609,7 @@ pub const RuntimeTranspilerCache = struct {
input_hash,
features_hash,
sourcemap,
esm_record,
output_code,
exports_kind,
);
@@ -600,7 +643,7 @@ pub const RuntimeTranspilerCache = struct {
parser_options.hashForRuntimeTranspiler(&features_hasher, used_jsx);
this.features_hash = features_hasher.final();
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator) catch |err| {
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator) catch |err| {
debug("get(\"{s}\") = {s}", .{ source.path.text, @errorName(err) });
return false;
};
@@ -616,7 +659,7 @@ pub const RuntimeTranspilerCache = struct {
if (comptime bun.Environment.isDebug) {
if (!bun_debug_restore_from_cache) {
if (this.entry) |*entry| {
entry.deinit(this.sourcemap_allocator, this.output_code_allocator);
entry.deinit(this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator);
this.entry = null;
}
}
@@ -625,7 +668,7 @@ pub const RuntimeTranspilerCache = struct {
return this.entry != null;
}
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8) void {
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8, esm_record: []const u8) void {
if (comptime !bun.FeatureFlags.runtime_transpiler_cache)
@compileError("RuntimeTranspilerCache is disabled");
@@ -636,7 +679,7 @@ pub const RuntimeTranspilerCache = struct {
const output_code = bun.String.createLatin1(output_code_bytes);
this.output_code = output_code;
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, output_code, this.exports_kind) catch |err| {
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, esm_record, output_code, this.exports_kind) catch |err| {
debug("put() = {s}", .{@errorName(err)});
return;
};

View File

@@ -0,0 +1,313 @@
#include "root.h"
#include "JavaScriptCore/JSInternalPromise.h"
#include "JavaScriptCore/JSModuleRecord.h"
#include "JavaScriptCore/GlobalObjectMethodTable.h"
#include "JavaScriptCore/JSModuleRecord.h"
#include "JavaScriptCore/Nodes.h"
#include "JavaScriptCore/Parser.h"
#include "JavaScriptCore/ParserError.h"
#include "JavaScriptCore/SyntheticModuleRecord.h"
#include <wtf/text/MakeString.h>
#include "JavaScriptCore/JSGlobalObject.h"
#include "JavaScriptCore/JSModuleRecord.h"
#include "JavaScriptCore/ExceptionScope.h"
#include "ZigSourceProvider.h"
#include "BunAnalyzeTranspiledModule.h"
// ref: JSModuleLoader.cpp
// ref: ModuleAnalyzer.cpp
// ref: JSModuleRecord.cpp
// ref: NodesAnalyzeModule.cpp, search ::analyzeModule
// TODO: #include "JavaScriptCore/parser/ModuleAnalyzer.h"
#include "JavaScriptCore/ErrorType.h"
#include "JavaScriptCore/Nodes.h"
namespace JSC {
class JSModuleRecord;
class SourceCode;
class ScriptFetchParameters;
class ModuleAnalyzer {
WTF_MAKE_NONCOPYABLE(ModuleAnalyzer);
WTF_FORBID_HEAP_ALLOCATION;
public:
ModuleAnalyzer(JSGlobalObject*, const Identifier& moduleKey, const SourceCode&, const VariableEnvironment& declaredVariables, const VariableEnvironment& lexicalVariables, CodeFeatures);
Expected<JSModuleRecord*, std::tuple<ErrorType, String>> analyze(ModuleProgramNode&);
VM& vm() { return m_vm; }
JSModuleRecord* moduleRecord() { return m_moduleRecord; }
void appendRequestedModule(const Identifier&, RefPtr<ScriptFetchParameters>&&);
void fail(std::tuple<ErrorType, String>&& errorMessage) { m_errorMessage = errorMessage; }
private:
void exportVariable(ModuleProgramNode&, const RefPtr<UniquedStringImpl>&, const VariableEnvironmentEntry&);
VM& m_vm;
JSModuleRecord* m_moduleRecord;
IdentifierSet m_requestedModules;
std::tuple<ErrorType, String> m_errorMessage;
};
}
namespace JSC {
String dumpRecordInfo(JSModuleRecord* moduleRecord);
extern "C" JSModuleRecord* zig__ModuleInfoDeserialized__toJSModuleRecord(JSGlobalObject* globalObject, VM& vm, const Identifier& module_key, const SourceCode& source_code, VariableEnvironment& declared_variables, VariableEnvironment& lexical_variables, bun_ModuleInfoDeserialized* module_info);
extern "C" void zig__renderDiff(const char* expected_ptr, size_t expected_len, const char* received_ptr, size_t received_len, JSGlobalObject* globalObject);
extern "C" Identifier* JSC__IdentifierArray__create(size_t len)
{
return new Identifier[len];
}
extern "C" void JSC__IdentifierArray__destroy(Identifier* identifier)
{
delete[] identifier;
}
extern "C" void JSC__IdentifierArray__setFromUtf8(Identifier* identifierArray, size_t n, VM& vm, char* str, size_t len)
{
identifierArray[n] = Identifier::fromString(vm, AtomString::fromUTF8(std::span<const char>(str, len)));
}
extern "C" void JSC__IdentifierArray__setFromStarDefault(Identifier* identifierArray, size_t n, VM& vm)
{
identifierArray[n] = vm.propertyNames->starDefaultPrivateName;
}
extern "C" void JSC__VariableEnvironment__add(VariableEnvironment& environment, Identifier* identifierArray, uint32_t index)
{
environment.add(identifierArray[index]);
}
extern "C" VariableEnvironment* JSC_JSModuleRecord__declaredVariables(JSModuleRecord* moduleRecord)
{
return &moduleRecord->m_declaredVariables;
}
extern "C" VariableEnvironment* JSC_JSModuleRecord__lexicalVariables(JSModuleRecord* moduleRecord)
{
return &moduleRecord->m_lexicalVariables;
}
extern "C" JSModuleRecord* JSC_JSModuleRecord__create(JSGlobalObject* globalObject, VM& vm, const Identifier* moduleKey, const SourceCode& sourceCode, const VariableEnvironment& declaredVariables, const VariableEnvironment& lexicalVariables, bool hasImportMeta)
{
return JSModuleRecord::create(globalObject, vm, globalObject->moduleRecordStructure(), moduleKey[0], sourceCode, declaredVariables, lexicalVariables, hasImportMeta ? ImportMetaFeature : 0);
}
extern "C" void JSC_JSModuleRecord__addIndirectExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t importName, uint32_t moduleName)
{
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createIndirect(identifierArray[exportName], identifierArray[importName], identifierArray[moduleName]));
}
extern "C" void JSC_JSModuleRecord__addLocalExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t localName)
{
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createLocal(identifierArray[exportName], identifierArray[localName]));
}
extern "C" void JSC_JSModuleRecord__addNamespaceExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t moduleName)
{
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createNamespace(identifierArray[exportName], identifierArray[moduleName]));
}
extern "C" void JSC_JSModuleRecord__addStarExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
{
moduleRecord->addStarExportEntry(identifierArray[moduleName]);
}
extern "C" void JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
{
RefPtr<ScriptFetchParameters> attributes = RefPtr<ScriptFetchParameters> {};
moduleRecord->appendRequestedModule(identifierArray[moduleName], WTFMove(attributes));
}
extern "C" void JSC_JSModuleRecord__addRequestedModuleJavaScript(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
{
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::JavaScript);
moduleRecord->appendRequestedModule(identifierArray[moduleName], WTFMove(attributes));
}
extern "C" void JSC_JSModuleRecord__addRequestedModuleWebAssembly(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
{
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::WebAssembly);
moduleRecord->appendRequestedModule(identifierArray[moduleName], WTFMove(attributes));
}
extern "C" void JSC_JSModuleRecord__addRequestedModuleJSON(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
{
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::JSON);
moduleRecord->appendRequestedModule(identifierArray[moduleName], WTFMove(attributes));
}
extern "C" void JSC_JSModuleRecord__addRequestedModuleHostDefined(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName, uint32_t hostDefinedImportType)
{
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(identifierArray[hostDefinedImportType].string());
moduleRecord->appendRequestedModule(identifierArray[moduleName], WTFMove(attributes));
}
extern "C" void JSC_JSModuleRecord__addImportEntrySingle(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
{
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
.type = JSModuleRecord::ImportEntryType::Single,
.moduleRequest = identifierArray[moduleName],
.importName = identifierArray[importName],
.localName = identifierArray[localName],
});
}
extern "C" void JSC_JSModuleRecord__addImportEntryNamespace(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
{
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
.type = JSModuleRecord::ImportEntryType::Namespace,
.moduleRequest = identifierArray[moduleName],
.importName = identifierArray[importName],
.localName = identifierArray[localName],
});
}
static EncodedJSValue fallbackParse(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise, JSModuleRecord* resultValue = nullptr);
extern "C" EncodedJSValue Bun__analyzeTranspiledModule(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise)
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
auto rejectWithError = [&](JSValue error) {
promise->reject(globalObject, error);
return promise;
};
VariableEnvironment declaredVariables = VariableEnvironment();
VariableEnvironment lexicalVariables = VariableEnvironment();
auto provider = static_cast<Zig::SourceProvider*>(sourceCode.provider());
if (provider->m_resolvedSource.module_info == nullptr) {
dataLog("[note] module_info is null for module: ", moduleKey.utf8(), "\n");
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("module_info is null")))));
}
auto moduleRecord = zig__ModuleInfoDeserialized__toJSModuleRecord(globalObject, vm, moduleKey, sourceCode, declaredVariables, lexicalVariables, provider->m_resolvedSource.module_info);
if (moduleRecord == nullptr) {
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("parseFromSourceCode failed")))));
}
#ifdef DEBUG
RELEASE_AND_RETURN(scope, fallbackParse(globalObject, moduleKey, sourceCode, promise, moduleRecord));
#else
promise->fulfillWithNonPromise(globalObject, moduleRecord);
RELEASE_AND_RETURN(scope, JSValue::encode(promise));
#endif
}
static EncodedJSValue fallbackParse(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise, JSModuleRecord* resultValue)
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
auto rejectWithError = [&](JSValue error) {
promise->reject(globalObject, error);
return promise;
};
ParserError error;
std::unique_ptr<ModuleProgramNode> moduleProgramNode = parseRootNode<ModuleProgramNode>(
vm, sourceCode, ImplementationVisibility::Public, JSParserBuiltinMode::NotBuiltin,
StrictModeLexicallyScopedFeature, JSParserScriptMode::Module, SourceParseMode::ModuleAnalyzeMode, error);
if (error.isValid())
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(error.toErrorObject(globalObject, sourceCode))));
ASSERT(moduleProgramNode);
ModuleAnalyzer ModuleAnalyzer(globalObject, moduleKey, sourceCode, moduleProgramNode->varDeclarations(), moduleProgramNode->lexicalVariables(), moduleProgramNode->features());
RETURN_IF_EXCEPTION(scope, JSValue::encode(promise->rejectWithCaughtException(globalObject, scope)));
auto result = ModuleAnalyzer.analyze(*moduleProgramNode);
if (!result) {
auto [errorType, message] = WTFMove(result.error());
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, errorType, message))));
}
JSModuleRecord* moduleRecord = result.value();
if (resultValue != nullptr) {
auto actual = dumpRecordInfo(resultValue);
auto expected = dumpRecordInfo(moduleRecord);
if (actual != expected) {
dataLog("\n\n\n\n\n\n\x1b[95mBEGIN analyzeTranspiledModule\x1b(B\x1b[m\n ---code---\n\n", sourceCode.toUTF8().data(), "\n");
dataLog(" ------", "\n");
dataLog(" BunAnalyzeTranspiledModule:", "\n");
zig__renderDiff(expected.utf8().data(), expected.utf8().length(), actual.utf8().data(), actual.utf8().length(), globalObject);
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("Imports different between parseFromSourceCode and fallbackParse")))));
}
}
scope.release();
promise->fulfillWithNonPromise(globalObject, resultValue == nullptr ? moduleRecord : resultValue);
return JSValue::encode(promise);
}
String dumpRecordInfo(JSModuleRecord* moduleRecord)
{
WTF::StringPrintStream stream;
stream.print(" varDeclarations:\n");
for (const auto& pair : moduleRecord->m_declaredVariables) {
stream.print(" - ", pair.key, "\n");
}
stream.print(" lexicalVariables:\n");
for (const auto& pair : moduleRecord->m_lexicalVariables) {
stream.print(" - ", pair.key, "\n");
}
stream.print(" features: ");
stream.print(moduleRecord->m_features & ImportMetaFeature);
stream.print("\n");
stream.print("\nAnalyzing ModuleRecord key(", moduleRecord->moduleKey().impl(), ")\n");
stream.print(" Dependencies: ", moduleRecord->requestedModules().size(), " modules\n");
for (const auto& request : moduleRecord->requestedModules())
if (request.m_attributes == nullptr) {
stream.print(" module(", request.m_specifier, ")\n");
} else {
stream.print(" module(", request.m_specifier, "),attributes(", (uint8_t)request.m_attributes->type(), ", ", request.m_attributes->hostDefinedImportType(), ")\n");
}
stream.print(" Import: ", moduleRecord->importEntries().size(), " entries\n");
for (const auto& pair : moduleRecord->importEntries()) {
auto& importEntry = pair.value;
stream.print(" import(", importEntry.importName, "), local(", importEntry.localName, "), module(", importEntry.moduleRequest, ")\n");
}
stream.print(" Export: ", moduleRecord->exportEntries().size(), " entries\n");
Vector<String> sortedEntries;
for (const auto& pair : moduleRecord->exportEntries()) {
WTF::StringPrintStream line;
auto& exportEntry = pair.value;
switch (exportEntry.type) {
case AbstractModuleRecord::ExportEntry::Type::Local:
line.print(" [Local] ", "export(", exportEntry.exportName, "), local(", exportEntry.localName, ")\n");
break;
case AbstractModuleRecord::ExportEntry::Type::Indirect:
line.print(" [Indirect] ", "export(", exportEntry.exportName, "), import(", exportEntry.importName, "), module(", exportEntry.moduleName, ")\n");
break;
case AbstractModuleRecord::ExportEntry::Type::Namespace:
line.print(" [Namespace] ", "export(", exportEntry.exportName, "), module(", exportEntry.moduleName, ")\n");
break;
}
sortedEntries.append(line.toString());
}
std::sort(sortedEntries.begin(), sortedEntries.end(), [](const String& a, const String& b) {
return a.utf8().toStdString() < b.utf8().toStdString();
});
for (const auto& entry : sortedEntries)
stream.print(entry);
for (const auto& moduleName : moduleRecord->starExportEntries())
stream.print(" [Star] module(", moduleName.get(), ")\n");
stream.print(" -> done\n");
return stream.toString();
}
}

View File

@@ -0,0 +1 @@
struct bun_ModuleInfoDeserialized;

View File

@@ -1145,7 +1145,7 @@ bool JSCommonJSModule::evaluate(
bool isBuiltIn)
{
auto& vm = globalObject->vm();
auto sourceProvider = Zig::SourceProvider::create(jsCast<Zig::GlobalObject*>(globalObject), source, JSC::SourceProviderSourceType::Program, isBuiltIn);
auto sourceProvider = Zig::SourceProvider::create(jsCast<Zig::GlobalObject*>(globalObject), source, isBuiltIn);
this->ignoreESModuleAnnotation = source.tag == ResolvedSourceTagPackageJSONTypeModule;
if (this->hasEvaluated)
return true;
@@ -1199,7 +1199,7 @@ std::optional<JSC::SourceCode> createCommonJSModule(
dirname = jsEmptyString(vm);
}
auto sourceProvider = Zig::SourceProvider::create(jsCast<Zig::GlobalObject*>(globalObject), source, JSC::SourceProviderSourceType::Program, isBuiltIn);
auto sourceProvider = Zig::SourceProvider::create(jsCast<Zig::GlobalObject*>(globalObject), source, isBuiltIn);
sourceOrigin = sourceProvider->sourceOrigin();
moduleObject = JSCommonJSModule::create(
vm,

View File

@@ -745,7 +745,7 @@ static JSValue fetchESMSourceCode(
auto tag = res->result.value.tag;
switch (tag) {
case SyntheticModuleType::ESM: {
auto&& provider = Zig::SourceProvider::create(globalObject, res->result.value, JSC::SourceProviderSourceType::Module, true);
auto&& provider = Zig::SourceProvider::create(globalObject, res->result.value, true);
return rejectOrResolve(JSSourceCode::create(vm, JSC::SourceCode(provider)));
}
@@ -764,7 +764,7 @@ static JSValue fetchESMSourceCode(
auto source = JSC::SourceCode(JSC::SyntheticSourceProvider::create(generateInternalModuleSourceCode(globalObject, static_cast<InternalModuleRegistry::Field>(tag & mask)), JSC::SourceOrigin(URL(makeString("builtins://"_s, moduleKey))), moduleKey));
return rejectOrResolve(JSSourceCode::create(vm, WTFMove(source)));
} else {
auto&& provider = Zig::SourceProvider::create(globalObject, res->result.value, JSC::SourceProviderSourceType::Module, true);
auto&& provider = Zig::SourceProvider::create(globalObject, res->result.value, true);
return rejectOrResolve(JSC::JSSourceCode::create(vm, JSC::SourceCode(provider)));
}
}

View File

@@ -75,9 +75,17 @@ extern "C" void Bun__removeSourceProviderSourceMap(void* bun_vm, SourceProvider*
Ref<SourceProvider> SourceProvider::create(
Zig::GlobalObject* globalObject,
ResolvedSource& resolvedSource,
JSC::SourceProviderSourceType sourceType,
bool isBuiltin)
{
JSC::SourceProviderSourceType sourceType = JSC::SourceProviderSourceType::BunTranspiledModule;
if (resolvedSource.isCommonJSModule) {
ASSERT(resolvedSource.module_info == nullptr, "isCommonJSModule should not have module_info");
sourceType = JSC::SourceProviderSourceType::Program;
} else if (resolvedSource.module_info == nullptr) {
sourceType = JSC::SourceProviderSourceType::Module;
}
auto string = resolvedSource.source_code.toWTFString(BunString::ZeroCopy);
auto sourceURLString = resolvedSource.source_url.toWTFString(BunString::ZeroCopy);

View File

@@ -39,7 +39,6 @@ public:
static Ref<SourceProvider> create(
Zig::GlobalObject*,
ResolvedSource& resolvedSource,
JSC::SourceProviderSourceType sourceType = JSC::SourceProviderSourceType::Module,
bool isBuiltIn = false);
~SourceProvider();
unsigned hash() const override;

View File

@@ -7142,3 +7142,7 @@ pub const DeferredError = struct {
return err;
}
};
comptime {
_ = @import("../../analyze_transpiled_module.zig");
}

View File

@@ -205,6 +205,7 @@ pub fn Errorable(comptime Type: type) type {
};
}
/// must be kept in sync with `ResolvedSource` in `headers-handwritten.h`
pub const ResolvedSource = extern struct {
pub const shim = Shimmer("Zig", "ResolvedSource", @This());
pub const name = "ResolvedSource";
@@ -234,6 +235,14 @@ pub const ResolvedSource = extern struct {
bytecode_cache: ?[*]u8 = null,
bytecode_cache_size: usize = 0,
/// - for esm: null means to use jsc's regular parsing step. more info: https://github.com/oven-sh/bun/pull/15758
/// - for cjs: must be null
module_info: ?*@import("../../analyze_transpiled_module.zig").ModuleInfoDeserialized,
pub const unfilled = ResolvedSource{
.module_info = null,
};
pub const Tag = @import("ResolvedSourceTag").ResolvedSourceTag;
};

View File

@@ -3,6 +3,7 @@
#include "wtf/text/OrdinalNumber.h"
#include "JavaScriptCore/JSCJSValue.h"
#include "JavaScriptCore/ArgList.h"
#include "BunAnalyzeTranspiledModule.h"
#include <set>
#ifndef HEADERS_HANDWRITTEN
@@ -107,6 +108,7 @@ typedef struct ResolvedSource {
bool already_bundled;
uint8_t* bytecode_cache;
size_t bytecode_cache_size;
bun_ModuleInfoDeserialized* module_info;
} ResolvedSource;
static const uint32_t ResolvedSourceTagPackageJSONTypeModule = 1;
typedef union ErrorableResolvedSourceResult {

View File

@@ -2336,6 +2336,7 @@ pub const VirtualMachine = struct {
.hash = 0,
.allocator = null,
.source_code_needs_deref = false,
.module_info = null,
};
}
var source = this.refCountedString(code, hash_, !add_double_ref);
@@ -2351,6 +2352,7 @@ pub const VirtualMachine = struct {
.hash = source.hash,
.allocator = source,
.source_code_needs_deref = false,
.module_info = null,
};
}

View File

@@ -13,6 +13,8 @@ const default_allocator = bun.default_allocator;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const Arena = @import("../allocators/mimalloc_arena.zig").Arena;
const C = bun.C;
const analyze_transpiled_module = @import("../analyze_transpiled_module.zig");
const ModuleInfo = analyze_transpiled_module.ModuleInfo;
const Allocator = std.mem.Allocator;
const IdentityContext = @import("../identity_context.zig").IdentityContext;
@@ -97,6 +99,7 @@ inline fn jsSyntheticModule(comptime name: ResolvedSource.Tag, specifier: String
.hash = 0,
.tag = name,
.source_code_needs_deref = false,
.module_info = null,
};
}
@@ -289,7 +292,7 @@ pub const RuntimeTranspilerStore = struct {
generation_number: u32 = 0,
log: logger.Log,
parse_error: ?anyerror = null,
resolved_source: ResolvedSource = ResolvedSource{},
resolved_source: ResolvedSource = ResolvedSource.unfilled,
work_task: JSC.WorkPoolTask = .{ .callback = runFromWorkerThread },
next: ?*TranspilerJob = null,
@@ -421,6 +424,7 @@ pub const RuntimeTranspilerStore = struct {
var cache = JSC.RuntimeTranspilerCache{
.output_code_allocator = allocator,
.sourcemap_allocator = bun.default_allocator,
.esm_record_allocator = bun.default_allocator,
};
var vm = this.vm;
@@ -565,6 +569,19 @@ pub const RuntimeTranspilerStore = struct {
dumpSourceString(vm, specifier, entry.output_code.byteSlice());
}
var module_info: ?*analyze_transpiled_module.ModuleInfoDeserialized = null;
if (entry.esm_record.len > 0) {
if (entry.metadata.module_type == .cjs) {
@panic("TranspilerCache contained cjs module with module info");
}
module_info = analyze_transpiled_module.ModuleInfoDeserialized.create(entry.esm_record, bun.default_allocator) catch |e| switch (e) {
error.OutOfMemory => bun.outOfMemory(),
// uh oh! invalid module info in cache
// (not sure what to do here)
error.BadModuleInfo => @panic("TranspilerCache contained invalid module info"),
};
}
this.resolved_source = ResolvedSource{
.allocator = null,
.source_code = switch (entry.output_code) {
@@ -578,6 +595,7 @@ pub const RuntimeTranspilerStore = struct {
},
.hash = 0,
.is_commonjs_module = entry.metadata.module_type == .cjs,
.module_info = module_info,
};
return;
@@ -593,6 +611,7 @@ pub const RuntimeTranspilerStore = struct {
.bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null,
.bytecode_cache_size = bytecode_slice.len,
.is_commonjs_module = parse_result.already_bundled.isCommonJS(),
.module_info = null,
};
this.resolved_source.source_code.ensureHash();
return;
@@ -645,6 +664,10 @@ pub const RuntimeTranspilerStore = struct {
var printer = source_code_printer.?.*;
printer.ctx.reset();
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
const module_info: ?*ModuleInfo = if (is_commonjs_module) null else ModuleInfo.create(bun.default_allocator) catch bun.outOfMemory();
// defer module_info.destroy(); // TODO: do not leak module_info
{
var mapper = vm.sourceMapHandler(&printer);
defer source_code_printer.?.* = printer;
@@ -654,6 +677,7 @@ pub const RuntimeTranspilerStore = struct {
&printer,
.esm_ascii,
mapper.get(),
module_info,
) catch |err| {
this.parse_error = err;
return;
@@ -686,11 +710,13 @@ pub const RuntimeTranspilerStore = struct {
break :brk result;
};
this.resolved_source = ResolvedSource{
.allocator = null,
.source_code = source_code,
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
.is_commonjs_module = is_commonjs_module,
.hash = 0,
.module_info = if (module_info) |mi| mi.asDeserialized() else null,
};
}
};
@@ -1409,6 +1435,10 @@ pub const ModuleLoader = struct {
var printer = VirtualMachine.source_code_printer.?.*;
printer.ctx.reset();
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
const module_info: ?*ModuleInfo = if (is_commonjs_module) null else ModuleInfo.create(bun.default_allocator) catch bun.outOfMemory();
// defer module_info.destroy(); // TODO: do not leak module_info
{
var mapper = jsc_vm.sourceMapHandler(&printer);
defer VirtualMachine.source_code_printer.?.* = printer;
@@ -1418,6 +1448,7 @@ pub const ModuleLoader = struct {
&printer,
.esm_ascii,
mapper.get(),
module_info,
);
}
@@ -1442,7 +1473,7 @@ pub const ModuleLoader = struct {
}
}
resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
resolved_source.is_commonjs_module = is_commonjs_module;
return resolved_source;
}
@@ -1452,9 +1483,10 @@ pub const ModuleLoader = struct {
.source_code = bun.String.createLatin1(printer.ctx.getWritten()),
.specifier = String.init(specifier),
.source_url = String.init(path.text),
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
.is_commonjs_module = is_commonjs_module,
.hash = 0,
.module_info = if (module_info) |mi| mi.asDeserialized() else null,
};
}
@@ -1515,6 +1547,7 @@ pub const ModuleLoader = struct {
.specifier = input_specifier,
.source_url = input_specifier.createIfDifferent(path.text),
.hash = 0,
.module_info = null,
};
}
}
@@ -1582,6 +1615,7 @@ pub const ModuleLoader = struct {
var cache = JSC.RuntimeTranspilerCache{
.output_code_allocator = allocator,
.sourcemap_allocator = bun.default_allocator,
.esm_record_allocator = bun.default_allocator,
};
const old = jsc_vm.transpiler.log;
@@ -1747,6 +1781,7 @@ pub const ModuleLoader = struct {
.hash = 0,
.tag = ResolvedSource.Tag.json_for_object_loader,
.module_info = null,
};
}
@@ -1761,6 +1796,7 @@ pub const ModuleLoader = struct {
.specifier = input_specifier,
.source_url = input_specifier.createIfDifferent(path.text),
.hash = 0,
.module_info = null,
};
}
@@ -1773,6 +1809,7 @@ pub const ModuleLoader = struct {
.hash = 0,
.jsvalue_for_export = JSC.JSValue.createEmptyObject(jsc_vm.global, 0),
.tag = .exports_object,
.module_info = null,
};
}
@@ -1783,6 +1820,7 @@ pub const ModuleLoader = struct {
.hash = 0,
.jsvalue_for_export = parse_result.ast.parts.@"[0]"().stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}),
.tag = .exports_object,
.module_info = null,
};
}
@@ -1798,6 +1836,7 @@ pub const ModuleLoader = struct {
.bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null,
.bytecode_cache_size = if (bytecode_slice.len > 0) bytecode_slice.len else 0,
.is_commonjs_module = parse_result.already_bundled.isCommonJS(),
.module_info = null,
};
}
@@ -1811,6 +1850,19 @@ pub const ModuleLoader = struct {
dumpSourceString(jsc_vm, specifier, entry.output_code.byteSlice());
}
var module_info: ?*analyze_transpiled_module.ModuleInfoDeserialized = null;
if (entry.esm_record.len > 0) {
if (entry.metadata.module_type == .cjs) {
@panic("TranspilerCache contained cjs module with module info");
}
module_info = analyze_transpiled_module.ModuleInfoDeserialized.create(entry.esm_record, bun.default_allocator) catch |e| switch (e) {
error.OutOfMemory => bun.outOfMemory(),
// uh oh! invalid module info in cache
// (not sure what to do here)
error.BadModuleInfo => @panic("TranspilerCache contained invalid module info"),
};
}
return ResolvedSource{
.allocator = null,
.source_code = switch (entry.output_code) {
@@ -1843,6 +1895,7 @@ pub const ModuleLoader = struct {
break :brk ResolvedSource.Tag.javascript;
},
.module_info = module_info,
};
}
@@ -1897,6 +1950,11 @@ pub const ModuleLoader = struct {
var printer = source_code_printer.*;
printer.ctx.reset();
defer source_code_printer.* = printer;
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
const module_info: ?*ModuleInfo = if (is_commonjs_module) null else ModuleInfo.create(bun.default_allocator) catch bun.outOfMemory();
// defer module_info.destroy(); // TODO: do not leak module_info
_ = brk: {
var mapper = jsc_vm.sourceMapHandler(&printer);
@@ -1906,6 +1964,7 @@ pub const ModuleLoader = struct {
&printer,
.esm_ascii,
mapper.get(),
module_info,
);
};
@@ -1958,9 +2017,10 @@ pub const ModuleLoader = struct {
},
.specifier = input_specifier,
.source_url = input_specifier.createIfDifferent(path.text),
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
.is_commonjs_module = is_commonjs_module,
.hash = 0,
.tag = tag,
.module_info = if (module_info) |mi| mi.asDeserialized() else null,
};
},
// provideFetch() should be called
@@ -2029,6 +2089,7 @@ pub const ModuleLoader = struct {
.source_url = input_specifier.createIfDifferent(path.text),
.tag = .esm,
.hash = 0,
.module_info = null,
};
}
@@ -2088,6 +2149,7 @@ pub const ModuleLoader = struct {
.source_url = input_specifier.createIfDifferent(path.text),
.tag = .esm,
.hash = 0,
.module_info = null,
};
},
@@ -2100,6 +2162,7 @@ pub const ModuleLoader = struct {
.source_url = input_specifier.createIfDifferent(path.text),
.hash = 0,
.tag = .esm,
.module_info = null,
};
}
@@ -2115,6 +2178,7 @@ pub const ModuleLoader = struct {
.source_url = input_specifier.createIfDifferent(path.text),
.hash = 0,
.tag = .export_default_object,
.module_info = null,
};
},
@@ -2127,6 +2191,7 @@ pub const ModuleLoader = struct {
.source_url = input_specifier.createIfDifferent(path.text),
.hash = 0,
.tag = .esm,
.module_info = null,
};
}
@@ -2198,6 +2263,7 @@ pub const ModuleLoader = struct {
.specifier = input_specifier,
.source_url = input_specifier.createIfDifferent(path.text),
.hash = 0,
.module_info = null,
.tag = .export_default_object,
};
},
@@ -2477,6 +2543,7 @@ pub const ModuleLoader = struct {
.specifier = specifier,
.source_url = specifier,
.hash = Runtime.Runtime.versionHash(),
.module_info = null,
};
} else if (HardcodedModule.Map.getWithEql(specifier, bun.String.eqlComptime)) |hardcoded| {
Analytics.Features.builtin_modules.insert(hardcoded);
@@ -2491,6 +2558,7 @@ pub const ModuleLoader = struct {
.hash = 0,
.tag = .esm,
.source_code_needs_deref = true,
.module_info = null,
};
},
@@ -2590,6 +2658,7 @@ pub const ModuleLoader = struct {
.specifier = specifier,
.source_url = specifier.dupeRef(),
.hash = 0,
.module_info = null,
};
}
} else if (jsc_vm.standalone_module_graph) |graph| {
@@ -2613,6 +2682,7 @@ pub const ModuleLoader = struct {
.source_url = specifier.dupeRef(),
.hash = 0,
.source_code_needs_deref = false,
.module_info = null,
};
}
@@ -2626,6 +2696,7 @@ pub const ModuleLoader = struct {
.bytecode_cache = if (file.bytecode.len > 0) file.bytecode.ptr else null,
.bytecode_cache_size = file.bytecode.len,
.is_commonjs_module = file.module_format == .cjs,
.module_info = null,
};
}
}

View File

@@ -322,10 +322,6 @@ pub const Binding = struct {
loc: logger.Loc,
};
pub fn jsonStringify(self: *const @This(), writer: anytype) !void {
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "binding", .value = self.data, .loc = self.loc });
}
pub fn ToExpr(comptime expr_type: type, comptime func_type: anytype) type {
const ExprType = expr_type;
return struct {
@@ -2851,6 +2847,89 @@ pub const Stmt = struct {
loc: logger.Loc,
};
pub fn print(self: *const Stmt, tree: Ast, writer: std.io.AnyWriter) !void {
_ = tree;
switch (self.data) {
.s_import => |simport| {
// const record = &tree.import_records.slice()[simport.import_record_index];
try writer.print(".s_import{{\n", .{});
try writer.print(" import_records[import_record_index = {d}] = ,\n", .{simport.import_record_index});
// simport.default_name
// simport.is_single_line
// simport.items
// simport.namespace_ref
// === record: ===
// range: logger.Range,
// path: fs.Path,
// kind: ImportKind,
// tag: Tag = .none,
// source_index: Index = Index.invalid,
// print_mode: PrintMode = .normal,
// handles_import_errors: bool = false,
// is_internal: bool = false,
// is_unused: bool = false,
// contains_import_star: bool = false,
// contains_default_alias: bool = false,
// contains_es_module_alias: bool = false,
// calls_runtime_re_export_fn: bool = false,
// is_inside_try_body: bool = false,
// was_originally_bare_import: bool = false,
// was_originally_require: bool = false,
// was_injected_by_macro: bool = false,
// is_external_without_side_effects: bool = false,
// print_namespace_in_path: bool = false,
// wrap_with_to_esm: bool = false,
// wrap_with_to_commonjs: bool = false,
try writer.print(" ", .{});
try writer.print("}}", .{});
},
.s_expr => |expr| {
try writer.print(".s_expr{{ .does_not_affect_tree_shaking = {}, .value = ", .{expr.does_not_affect_tree_shaking});
try expr.value.print(writer, 0);
try writer.print("}}", .{});
},
.s_local => |local| {
try writer.print(".s_local{{ .kind = .{s}, .is_export = {}, .was_ts_import_equals = {}, .was_commonjs_export = {}, .decls = .{{\n", .{ @tagName(local.kind), local.is_export, local.was_ts_import_equals, local.was_commonjs_export });
for (local.decls.slice()) |m| {
try writer.print(" .{{\n .binding = ", .{});
switch (m.binding.data) {
.b_array => |v| {
try writer.print(".b_array{{ .has_spread = {}, .is_single_line = {}, .items = .{{", .{ v.has_spread, v.is_single_line });
for (v.items, 0..) |item, i| {
if (i != 0) try writer.print(", ", .{});
try writer.print("(TODO)", .{});
_ = item;
}
try writer.print("}}}}", .{});
},
.b_identifier => |v| {
try writer.print(".b_identifier{{ .ref = {} }}", .{v.ref});
},
.b_object => {
try writer.print(".b_object", .{});
},
.b_missing => {
try writer.print(".b_missing", .{});
},
}
try writer.print(",\n .value = ", .{});
if (m.value == null) {
try writer.print("null", .{});
} else {
try m.value.?.print(writer, 2);
}
try writer.print(",\n }},\n", .{});
}
try writer.print("}} }}", .{});
},
else => {
try writer.print(".{s}._todo_print_stmt", .{@tagName(self.data)});
},
}
}
pub fn jsonStringify(self: *const Stmt, writer: anytype) !void {
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "stmt", .value = self.data, .loc = self.loc });
}
@@ -3230,6 +3309,18 @@ pub const Expr = struct {
loc: logger.Loc,
data: Data,
pub fn print(self: *const Expr, writer: std.io.AnyWriter, depth: u32) !void {
_ = depth;
switch (self.data) {
.e_string => |str| {
try writer.print("(string: \"{s}\")", .{bun.strings.formatEscapes(str.data, .{ .str_encoding = .utf8, .quote_char = '"' })});
},
else => {
try writer.print("(expr: {s})", .{@tagName(self.data)});
},
}
}
pub const empty = Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = logger.Loc.Empty };
pub fn isAnonymousNamed(expr: Expr) bool {

View File

@@ -23,6 +23,8 @@ const Ref = @import("ast/base.zig").Ref;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const FeatureFlags = bun.FeatureFlags;
const FileDescriptorType = bun.FileDescriptor;
const analyze_transpiled_module = @import("analyze_transpiled_module.zig");
const ModuleInfo = analyze_transpiled_module.ModuleInfo;
const expect = std.testing.expect;
const ImportKind = importRecord.ImportKind;
@@ -691,8 +693,38 @@ fn NewPrinter(
binary_expression_stack: std.ArrayList(BinaryExpressionVisitor) = undefined,
module_info: if (!may_have_module_info) void else ?*ModuleInfo = if (!may_have_module_info) {} else null,
const Printer = @This();
const may_have_module_info = is_bun_platform and !rewrite_esm_to_cjs;
const TopLevelAndIsExport = if (!may_have_module_info) struct {} else struct {
is_export: bool = false,
is_top_level: ?ModuleInfo.VarKind = null,
};
const TopLevel = if (!may_have_module_info) struct {
pub inline fn init(_: IsTopLevel) @This() {
return .{};
}
pub inline fn subVar(_: @This()) @This() {
return .{};
}
} else struct {
is_top_level: IsTopLevel = .no,
pub inline fn init(is_top_level: IsTopLevel) @This() {
return .{ .is_top_level = is_top_level };
}
pub fn subVar(self: @This()) @This() {
if (self.is_top_level == .no) return @This().init(.no);
return @This().init(.var_only);
}
};
const IsTopLevel = enum { yes, var_only, no };
inline fn moduleInfo(self: *const @This()) ?*ModuleInfo {
if (!may_have_module_info) return null;
return self.module_info;
}
/// When Printer is used as a io.Writer, this represents it's error type, aka nothing.
pub const Error = error{};
@@ -989,6 +1021,7 @@ fn NewPrinter(
printInternalBunImport(p, import, @TypeOf("globalThis.Bun.jest(__filename)"), "globalThis.Bun.jest(__filename)");
},
else => {
if (p.moduleInfo()) |mi| mi.contains_import_meta = true;
printInternalBunImport(p, import, @TypeOf("globalThis.Bun.jest(import.meta.path)"), "globalThis.Bun.jest(import.meta.path)");
},
}
@@ -1004,7 +1037,9 @@ fn NewPrinter(
if (import.star_name_loc != null) {
p.print("var ");
p.printSymbol(import.namespace_ref);
const name = p.renamer.nameForSymbol(import.namespace_ref);
p.printIdentifier(name);
if (p.moduleInfo()) |mi| mi.addVar(name, .declared) catch bun.outOfMemory();
p.printSpace();
p.print("=");
p.printSpaceBeforeIdentifier();
@@ -1027,7 +1062,9 @@ fn NewPrinter(
if (import.default_name) |default| {
p.print("var ");
p.printSymbol(default.ref.?);
const default_name = p.renamer.nameForSymbol(default.ref.?);
p.printIdentifier(default_name);
if (p.moduleInfo()) |mi| mi.addVar(default_name, .declared) catch bun.outOfMemory();
if (comptime Statement == void) {
p.@"print = "();
p.printRequireOrImportExpr(
@@ -1065,7 +1102,11 @@ fn NewPrinter(
}
}
p.printClauseItemAs(item, .@"var");
p.printVarClauseItem(item);
if (p.moduleInfo()) |mi| {
const varname = p.renamer.nameForSymbol(item.name.ref.?);
mi.addVar(varname, .declared) catch bun.outOfMemory();
}
}
if (!import.is_single_line) {
@@ -1137,32 +1178,32 @@ fn NewPrinter(
switch (stmt.data) {
.s_block => |block| {
p.printSpace();
p.printBlock(stmt.loc, block.stmts, block.close_brace_loc);
p.printBlock(stmt.loc, block.stmts, block.close_brace_loc, .{});
p.printNewline();
},
else => {
p.printNewline();
p.indent();
p.printStmt(stmt) catch unreachable;
p.printStmt(stmt, .{}) catch bun.outOfMemory();
p.unindent();
},
}
}
pub fn printBlockBody(p: *Printer, stmts: []const Stmt) void {
pub fn printBlockBody(p: *Printer, stmts: []const Stmt, tlmtlo: TopLevel) void {
for (stmts) |stmt| {
p.printSemicolonIfNeeded();
p.printStmt(stmt) catch unreachable;
p.printStmt(stmt, tlmtlo) catch bun.outOfMemory();
}
}
pub fn printBlock(p: *Printer, loc: logger.Loc, stmts: []const Stmt, close_brace_loc: ?logger.Loc) void {
pub fn printBlock(p: *Printer, loc: logger.Loc, stmts: []const Stmt, close_brace_loc: ?logger.Loc, tlmtlo: TopLevel) void {
p.addSourceMapping(loc);
p.print("{");
p.printNewline();
p.indent();
p.printBlockBody(stmts);
p.printBlockBody(stmts, tlmtlo);
p.unindent();
p.needs_semicolon = false;
@@ -1179,8 +1220,8 @@ fn NewPrinter(
p.printNewline();
p.indent();
p.printBlockBody(prepend);
p.printBlockBody(stmts);
p.printBlockBody(prepend, .{});
p.printBlockBody(stmts, .{});
p.unindent();
p.needs_semicolon = false;
@@ -1188,7 +1229,7 @@ fn NewPrinter(
p.print("}");
}
pub fn printDecls(p: *Printer, comptime keyword: string, decls_: []G.Decl, flags: ExprFlag.Set) void {
pub fn printDecls(p: *Printer, comptime keyword: string, decls_: []G.Decl, flags: ExprFlag.Set, tlm: TopLevelAndIsExport) void {
p.print(keyword);
p.printSpace();
var decls = decls_;
@@ -1296,7 +1337,7 @@ fn NewPrinter(
.is_single_line = true,
};
const binding = Binding.init(&b_object, target_e_dot.target.loc);
p.printBinding(binding);
p.printBinding(binding, tlm);
}
p.printWhitespacer(ws(" = "));
@@ -1312,7 +1353,7 @@ fn NewPrinter(
}
{
p.printBinding(decls[0].binding);
p.printBinding(decls[0].binding, tlm);
if (decls[0].value) |value| {
p.printWhitespacer(ws(" = "));
@@ -1324,7 +1365,7 @@ fn NewPrinter(
p.print(",");
p.printSpace();
p.printBinding(decl.binding);
p.printBinding(decl.binding, tlm);
if (decl.value) |value| {
p.printWhitespacer(ws(" = "));
@@ -1391,7 +1432,7 @@ fn NewPrinter(
p.print("...");
}
p.printBinding(arg.binding);
p.printBinding(arg.binding, .{});
if (arg.default) |default| {
p.printWhitespacer(ws(" = "));
@@ -1407,7 +1448,7 @@ fn NewPrinter(
pub fn printFunc(p: *Printer, func: G.Fn) void {
p.printFnArgs(func.open_parens_loc, func.args, func.flags.contains(.has_rest_arg), false);
p.printSpace();
p.printBlock(func.body.loc, func.body.stmts, null);
p.printBlock(func.body.loc, func.body.stmts, null, .{});
}
pub fn printClass(p: *Printer, class: G.Class) void {
if (class.extends) |extends| {
@@ -1430,7 +1471,7 @@ fn NewPrinter(
if (item.kind == .class_static_block) {
p.print("static");
p.printSpace();
p.printBlock(item.class_static_block.?.loc, item.class_static_block.?.stmts.slice(), null);
p.printBlock(item.class_static_block.?.loc, item.class_static_block.?.stmts.slice(), null, .{});
p.printNewline();
continue;
}
@@ -1671,12 +1712,14 @@ fn NewPrinter(
if (module_type == .cjs) {
p.print("Promise.resolve(globalThis.Bun.jest(__filename))");
} else {
if (p.moduleInfo()) |mi| mi.contains_import_meta = true;
p.print("Promise.resolve(globalThis.Bun.jest(import.meta.path))");
}
} else if (record.kind == .require) {
if (module_type == .cjs) {
p.print("globalThis.Bun.jest(__filename)");
} else {
if (p.moduleInfo()) |mi| mi.contains_import_meta = true;
p.print("globalThis.Bun.jest(import.meta.path)");
}
}
@@ -1897,45 +1940,28 @@ fn NewPrinter(
p.print(quote);
}
fn printClauseItem(p: *Printer, item: js_ast.ClauseItem) void {
return printClauseItemAs(p, item, .import);
}
fn printExportClauseItem(p: *Printer, item: js_ast.ClauseItem) void {
return printClauseItemAs(p, item, .@"export");
}
fn printClauseItemAs(p: *Printer, item: js_ast.ClauseItem, comptime as: @Type(.EnumLiteral)) void {
const name = p.renamer.nameForSymbol(item.name.ref.?);
if (comptime as == .import) {
if (strings.eql(name, item.alias)) {
p.printIdentifier(name);
} else {
p.printClauseAlias(item.alias);
p.print(" as ");
p.addSourceMapping(item.alias_loc);
p.printIdentifier(name);
}
} else if (comptime as == .@"var") {
p.printIdentifier(name);
if (!strings.eql(name, item.alias)) {
p.print(" as ");
p.addSourceMapping(item.alias_loc);
p.printClauseAlias(item.alias);
}
}
if (!strings.eql(name, item.alias)) {
p.print(":");
p.printSpace();
fn printVarClauseItem(p: *Printer, item: js_ast.ClauseItem) void {
const name = p.renamer.nameForSymbol(item.name.ref.?);
p.printClauseAlias(item.alias);
if (!strings.eql(name, item.alias)) {
p.print(":");
p.printSpace();
p.printIdentifier(name);
}
} else if (comptime as == .@"export") {
p.printIdentifier(name);
if (!strings.eql(name, item.alias)) {
p.print(" as ");
p.addSourceMapping(item.alias_loc);
p.printClauseAlias(item.alias);
}
} else {
@compileError("Unknown as");
}
}
@@ -2047,6 +2073,7 @@ fn NewPrinter(
p.print(".importMeta()");
} else if (!p.options.import_meta_ref.isValid()) {
// Most of the time, leave it in there
if (p.moduleInfo()) |mi| mi.contains_import_meta = true;
p.print("import.meta");
} else {
// Note: The bundler will not hit this code path. The bundler will replace
@@ -2073,6 +2100,7 @@ fn NewPrinter(
p.printSpaceBeforeIdentifier();
p.addSourceMapping(expr.loc);
}
if (p.moduleInfo()) |mi| mi.contains_import_meta = true;
p.print("import.meta.main");
} else {
bun.assert(p.options.module_type != .internal_bake_dev);
@@ -2538,7 +2566,7 @@ fn NewPrinter(
}
if (!wasPrinted) {
p.printBlock(e.body.loc, e.body.stmts, null);
p.printBlock(e.body.loc, e.body.stmts, null, .{});
}
if (wrap) {
@@ -3446,13 +3474,19 @@ fn NewPrinter(
p.printExpr(initial, .comma, ExprFlag.None());
}
pub fn printBinding(p: *Printer, binding: Binding) void {
pub fn printBinding(p: *Printer, binding: Binding, tlm: TopLevelAndIsExport) void {
switch (binding.data) {
.b_missing => {},
.b_identifier => |b| {
p.printSpaceBeforeIdentifier();
p.addSourceMapping(binding.loc);
p.printSymbol(b.ref);
const local_name = p.renamer.nameForSymbol(b.ref);
p.printIdentifier(local_name);
if (p.moduleInfo()) |mi| {
if (tlm.is_top_level) |vk| mi.addVar(local_name, vk) catch bun.outOfMemory();
if (tlm.is_export) mi.addExportInfoLocal(local_name, local_name) catch bun.outOfMemory();
}
},
.b_array => |b| {
p.print("[");
@@ -3479,7 +3513,7 @@ fn NewPrinter(
p.print("...");
}
p.printBinding(item.binding);
p.printBinding(item.binding, tlm);
p.maybePrintDefaultBindingValue(item);
@@ -3526,7 +3560,7 @@ fn NewPrinter(
p.print("]:");
p.printSpace();
p.printBinding(property.value);
p.printBinding(property.value, tlm);
p.maybePrintDefaultBindingValue(property);
continue;
}
@@ -3551,6 +3585,10 @@ fn NewPrinter(
switch (property.value.data) {
.b_identifier => |id| {
if (str.eql(string, p.renamer.nameForSymbol(id.ref))) {
if (p.moduleInfo()) |mi| {
if (tlm.is_top_level) |vk| mi.addVar(str.data, vk) catch bun.outOfMemory();
if (tlm.is_export) mi.addExportInfoLocal(str.data, str.data) catch bun.outOfMemory();
}
p.maybePrintDefaultBindingValue(property);
continue;
}
@@ -3568,6 +3606,11 @@ fn NewPrinter(
switch (property.value.data) {
.b_identifier => |id| {
if (strings.utf16EqlString(str.slice16(), p.renamer.nameForSymbol(id.ref))) {
if (p.moduleInfo()) |mi| {
const str8 = str.slice(p.options.allocator);
if (tlm.is_top_level) |vk| mi.addVar(str8, vk) catch bun.outOfMemory();
if (tlm.is_export) mi.addExportInfoLocal(str8, str8) catch bun.outOfMemory();
}
p.maybePrintDefaultBindingValue(property);
continue;
}
@@ -3587,7 +3630,7 @@ fn NewPrinter(
p.printSpace();
}
p.printBinding(property.value);
p.printBinding(property.value, tlm);
p.maybePrintDefaultBindingValue(property);
}
@@ -3613,7 +3656,7 @@ fn NewPrinter(
}
}
pub fn printStmt(p: *Printer, stmt: Stmt) !void {
pub fn printStmt(p: *Printer, stmt: Stmt, tlmtlo: TopLevel) !void {
const prev_stmt_tag = p.prev_stmt_tag;
defer {
@@ -3647,9 +3690,15 @@ fn NewPrinter(
p.printSpaceBeforeIdentifier();
p.addSourceMapping(name.loc);
p.printSymbol(nameRef);
const local_name = p.renamer.nameForSymbol(nameRef);
p.printIdentifier(local_name);
p.printFunc(s.func);
if (p.moduleInfo()) |mi| {
if (tlmtlo.is_top_level == .yes) try mi.addVar(local_name, .lexical);
if (s.func.flags.contains(.is_export)) try mi.addExportInfoLocal(local_name, local_name);
}
// if (rewrite_esm_to_cjs and s.func.flags.contains(.is_export)) {
// p.printSemicolonAfterStatement();
// p.print("var ");
@@ -3676,6 +3725,7 @@ fn NewPrinter(
p.printIndent();
p.printSpaceBeforeIdentifier();
const nameRef = s.class.class_name.?.ref.?;
const nameStr = p.renamer.nameForSymbol(nameRef);
if (s.is_export) {
if (!rewrite_esm_to_cjs) {
p.print("export ");
@@ -3684,9 +3734,14 @@ fn NewPrinter(
p.print("class ");
p.addSourceMapping(s.class.class_name.?.loc);
p.printSymbol(nameRef);
p.printIdentifier(nameStr);
p.printClass(s.class);
if (p.moduleInfo()) |mi| {
if (s.is_export) try mi.addExportInfoLocal(nameStr, nameStr);
if (tlmtlo.is_top_level == .yes) try mi.addVar(nameStr, .lexical);
}
if (rewrite_esm_to_cjs and s.is_export) {
p.printSemicolonAfterStatement();
} else {
@@ -3720,6 +3775,10 @@ fn NewPrinter(
p.export_default_start = p.writer.written;
p.printExpr(expr, .comma, ExprFlag.None());
p.printSemicolonAfterStatement();
if (p.moduleInfo()) |mi| {
try mi.addExportInfoLocal("default", ModuleInfo.star_default);
try mi.addVar(ModuleInfo.star_default, .lexical);
}
return;
},
@@ -3740,20 +3799,26 @@ fn NewPrinter(
p.maybePrintSpace();
}
if (func.func.name) |name| {
p.printSymbol(name.ref.?);
}
const func_name: ?[]const u8 = if (func.func.name) |f| p.renamer.nameForSymbol(f.ref.?) else null;
if (func_name) |f| p.printIdentifier(f);
p.printFunc(func.func);
p.printNewline();
if (p.moduleInfo()) |mi| {
const local_name = func_name orelse ModuleInfo.star_default;
try mi.addExportInfoLocal("default", local_name);
try mi.addVar(local_name, .lexical);
}
},
.s_class => |class| {
p.printSpaceBeforeIdentifier();
if (class.class.class_name) |name| {
const class_name: ?[]const u8 = if (class.class.class_name) |f| p.renamer.nameForSymbol(f.ref.?) else null;
if (class_name) |name| {
p.print("class ");
p.printSymbol(name.ref orelse Output.panic("Internal error: Expected class to have a name ref\n{any}", .{class}));
p.printIdentifier(name);
} else {
p.print("class");
}
@@ -3761,6 +3826,12 @@ fn NewPrinter(
p.printClass(class.class);
p.printNewline();
if (p.moduleInfo()) |mi| {
const local_name = class_name orelse ModuleInfo.star_default;
try mi.addExportInfoLocal("default", local_name);
try mi.addVar(local_name, .lexical);
}
},
else => {
Output.panic("Internal error: unexpected export default stmt data {any}", .{s});
@@ -3778,19 +3849,27 @@ fn NewPrinter(
p.printIndent();
p.printSpaceBeforeIdentifier();
if (s.alias != null)
p.printWhitespacer(comptime ws("export *").append(" as "))
else
p.printWhitespacer(comptime ws("export * from "));
if (s.alias) |alias| {
p.printWhitespacer(comptime ws("export *").append(" as "));
p.printClauseAlias(alias.original_name);
p.print(" ");
p.printWhitespacer(ws("from "));
} else {
p.printWhitespacer(comptime ws("export * from "));
}
p.printImportRecordPath(p.importRecord(s.import_record_index));
const irp = try p.fmtImportRecordPath(p.importRecord(s.import_record_index));
p.printStringLiteralUTF8(irp, false);
p.printSemicolonAfterStatement();
if (p.moduleInfo()) |mi| {
try mi.requestModule(irp, .none);
if (s.alias) |alias| {
try mi.addExportInfoNamespace(alias.original_name, irp);
} else {
try mi.addExportInfoStar(irp);
}
}
},
.s_export_clause => |s| {
if (rewrite_esm_to_cjs) {
@@ -3938,7 +4017,12 @@ fn NewPrinter(
p.printIndent();
}
const name = p.renamer.nameForSymbol(item.name.ref.?);
p.printExportClauseItem(item);
if (p.moduleInfo()) |mi| {
try mi.addExportInfoLocal(item.alias, name);
}
}
if (!s.is_single_line) {
@@ -3990,26 +4074,26 @@ fn NewPrinter(
}
p.printWhitespacer(ws("} from "));
p.printImportRecordPath(import_record);
const irp = try p.fmtImportRecordPath(import_record);
p.printStringLiteralUTF8(irp, false);
p.printSemicolonAfterStatement();
if (p.moduleInfo()) |mi| {
try mi.requestModule(irp, .none);
for (s.items) |item| {
// how could this be renamed, it's in `export from`?
const name = p.renamer.nameForSymbol(item.name.ref.?);
try mi.addExportInfoIndirect(item.alias, name, irp);
}
}
},
.s_local => |s| {
switch (s.kind) {
.k_const => {
p.printDeclStmt(s.is_export, "const", s.decls.slice());
},
.k_let => {
p.printDeclStmt(s.is_export, "let", s.decls.slice());
},
.k_var => {
p.printDeclStmt(s.is_export, "var", s.decls.slice());
},
.k_using => {
p.printDeclStmt(s.is_export, "using", s.decls.slice());
},
.k_await_using => {
p.printDeclStmt(s.is_export, "await using", s.decls.slice());
},
.k_const => p.printDeclStmt(s.is_export, "const", s.decls.slice(), tlmtlo),
.k_let => p.printDeclStmt(s.is_export, "let", s.decls.slice(), tlmtlo),
.k_var => p.printDeclStmt(s.is_export, "var", s.decls.slice(), tlmtlo),
.k_using => p.printDeclStmt(s.is_export, "using", s.decls.slice(), tlmtlo),
.k_await_using => p.printDeclStmt(s.is_export, "await using", s.decls.slice(), tlmtlo),
}
},
.s_if => |s| {
@@ -4023,13 +4107,13 @@ fn NewPrinter(
switch (s.body.data) {
.s_block => {
p.printSpace();
p.printBlock(s.body.loc, s.body.data.s_block.stmts, s.body.data.s_block.close_brace_loc);
p.printBlock(s.body.loc, s.body.data.s_block.stmts, s.body.data.s_block.close_brace_loc, .{});
p.printSpace();
},
else => {
p.printNewline();
p.indent();
p.printStmt(s.body) catch unreachable;
p.printStmt(s.body, .{}) catch unreachable;
p.printSemicolonIfNeeded();
p.unindent();
p.printIndent();
@@ -4112,7 +4196,7 @@ fn NewPrinter(
p.printSpaceBeforeIdentifier();
p.print("try");
p.printSpace();
p.printBlock(s.body_loc, s.body, null);
p.printBlock(s.body_loc, s.body, null, tlmtlo.subVar());
if (s.catch_) |catch_| {
p.printSpace();
@@ -4120,18 +4204,18 @@ fn NewPrinter(
if (catch_.binding) |binding| {
p.printSpace();
p.print("(");
p.printBinding(binding);
p.printBinding(binding, .{});
p.print(")");
}
p.printSpace();
p.printBlock(catch_.loc, catch_.body, null);
p.printBlock(catch_.loc, catch_.body, null, tlmtlo.subVar());
}
if (s.finally) |finally| {
p.printSpace();
p.print("finally");
p.printSpace();
p.printBlock(finally.loc, finally.stmts, null);
p.printBlock(finally.loc, finally.stmts, null, tlmtlo.subVar());
}
p.printNewline();
@@ -4196,7 +4280,7 @@ fn NewPrinter(
switch (c.body[0].data) {
.s_block => {
p.printSpace();
p.printBlock(c.body[0].loc, c.body[0].data.s_block.stmts, c.body[0].data.s_block.close_brace_loc);
p.printBlock(c.body[0].loc, c.body[0].data.s_block.stmts, c.body[0].data.s_block.close_brace_loc, .{});
p.printNewline();
continue;
},
@@ -4208,7 +4292,7 @@ fn NewPrinter(
p.indent();
for (c.body) |st| {
p.printSemicolonIfNeeded();
p.printStmt(st) catch unreachable;
p.printStmt(st, .{}) catch unreachable;
}
p.unindent();
}
@@ -4289,6 +4373,7 @@ fn NewPrinter(
p.printSymbol(s.namespace_ref);
p.@"print = "();
if (p.moduleInfo()) |mi| mi.contains_import_meta = true;
p.print("import.meta.require(");
p.printImportRecordPath(record);
p.print(")");
@@ -4341,7 +4426,7 @@ fn NewPrinter(
p.print(",");
p.printSpace();
for (s.items, 0..) |item, i| {
p.printClauseItemAs(item, .@"var");
p.printVarClauseItem(item);
if (i < s.items.len - 1) {
p.print(",");
@@ -4351,7 +4436,7 @@ fn NewPrinter(
}
} else {
for (s.items, 0..) |item, i| {
p.printClauseItemAs(item, .@"var");
p.printVarClauseItem(item);
if (i < s.items.len - 1) {
p.print(",");
@@ -4383,10 +4468,18 @@ fn NewPrinter(
var item_count: usize = 0;
const import_record_path = try p.fmtImportRecordPath(record);
if (s.default_name) |name| {
p.print(" ");
p.printSymbol(name.ref.?);
const local_name = p.renamer.nameForSymbol(name.ref.?);
p.printIdentifier(local_name);
item_count += 1;
if (p.moduleInfo()) |mi| {
try mi.addVar(local_name, .lexical);
try mi.addImportInfoSingle(import_record_path, "default", local_name);
}
}
if (s.items.len > 0) {
@@ -4415,7 +4508,20 @@ fn NewPrinter(
p.printIndent();
}
p.printClauseItem(item);
const local_name = p.renamer.nameForSymbol(item.name.ref.?);
if (strings.eql(local_name, item.alias)) {
p.printIdentifier(local_name);
} else {
p.printClauseAlias(item.alias);
p.print(" as ");
p.addSourceMapping(item.alias_loc);
p.printIdentifier(local_name);
}
if (p.moduleInfo()) |mi| {
try mi.addVar(local_name, .lexical);
try mi.addImportInfoSingle(import_record_path, item.alias, local_name);
}
}
if (!s.is_single_line) {
@@ -4435,10 +4541,17 @@ fn NewPrinter(
}
p.printSpace();
const local_name = p.renamer.nameForSymbol(s.namespace_ref);
p.printWhitespacer(ws("* as"));
p.print(" ");
p.printSymbol(s.namespace_ref);
p.printIdentifier(local_name);
item_count += 1;
if (p.moduleInfo()) |mi| {
try mi.addVar(local_name, .lexical);
try mi.addImportInfoNamespace(import_record_path, local_name);
}
}
if (item_count > 0) {
@@ -4450,43 +4563,54 @@ fn NewPrinter(
p.printWhitespacer(ws("from "));
}
p.printImportRecordPath(record);
p.printStringLiteralUTF8(import_record_path, false);
var fetch_parameters: ModuleInfo.FetchParameters = if (p.moduleInfo() != null) .none else undefined;
switch (record.tag) {
.with_type_sqlite, .with_type_sqlite_embedded => {
// we do not preserve "embed": "true" since it is not necessary
p.printWhitespacer(ws(" with { type: \"sqlite\" }"));
if (p.moduleInfo()) |mi| fetch_parameters = ModuleInfo.FetchParameters.hostDefined(try mi.str("sqlite"));
},
.with_type_text => {
if (comptime is_bun_platform) {
p.printWhitespacer(ws(" with { type: \"text\" }"));
if (p.moduleInfo()) |mi| fetch_parameters = ModuleInfo.FetchParameters.hostDefined(try mi.str("text"));
}
},
.with_type_json => {
// backwards compatibility: previously, we always stripped type json
if (comptime is_bun_platform) {
p.printWhitespacer(ws(" with { type: \"json\" }"));
if (p.moduleInfo() != null) fetch_parameters = .json;
}
},
.with_type_toml => {
// backwards compatibility: previously, we always stripped type
if (comptime is_bun_platform) {
p.printWhitespacer(ws(" with { type: \"toml\" }"));
if (p.moduleInfo()) |mi| fetch_parameters = ModuleInfo.FetchParameters.hostDefined(try mi.str("toml"));
}
},
.with_type_file => {
// backwards compatibility: previously, we always stripped type
if (comptime is_bun_platform) {
p.printWhitespacer(ws(" with { type: \"file\" }"));
if (p.moduleInfo()) |mi| fetch_parameters = ModuleInfo.FetchParameters.hostDefined(try mi.str("file"));
}
},
else => {},
}
if (p.moduleInfo()) |mi| {
try mi.requestModule(import_record_path, fetch_parameters);
}
p.printSemicolonAfterStatement();
},
.s_block => |s| {
p.printIndent();
p.printBlock(stmt.loc, s.stmts, s.close_brace_loc);
p.printBlock(stmt.loc, s.stmts, s.close_brace_loc, .{});
p.printNewline();
},
.s_debugger => {
@@ -4565,22 +4689,16 @@ fn NewPrinter(
p.print("module.exports");
}
pub fn printImportRecordPath(p: *Printer, import_record: *const ImportRecord) void {
if (comptime is_json)
unreachable;
pub fn fmtImportRecordPath(p: *Printer, import_record: *const ImportRecord) ![]const u8 {
if (comptime is_json) unreachable;
const quote = bestQuoteCharForString(u8, import_record.path.text, false);
if (import_record.print_namespace_in_path and !import_record.path.isFile()) {
p.print(quote);
p.printStringCharactersUTF8(import_record.path.namespace, quote);
p.print(":");
p.printStringCharactersUTF8(import_record.path.text, quote);
p.print(quote);
} else {
p.print(quote);
p.printStringCharactersUTF8(import_record.path.text, quote);
p.print(quote);
return try std.fmt.allocPrint(p.options.allocator, "{s}:{s}", .{ import_record.path.namespace, import_record.path.text });
}
return import_record.path.text;
}
pub fn printImportRecordPath(p: *Printer, import_record: *const ImportRecord) void {
p.printStringLiteralUTF8(p.fmtImportRecordPath(import_record) catch bun.outOfMemory(), false);
}
pub fn printBundledImport(p: *Printer, record: ImportRecord, s: *S.Import) void {
@@ -4756,21 +4874,11 @@ fn NewPrinter(
},
.s_local => |s| {
switch (s.kind) {
.k_var => {
p.printDecls("var", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
},
.k_let => {
p.printDecls("let", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
},
.k_const => {
p.printDecls("const", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
},
.k_using => {
p.printDecls("using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
},
.k_await_using => {
p.printDecls("await using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
},
.k_var => p.printDecls("var", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
.k_let => p.printDecls("let", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
.k_const => p.printDecls("const", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
.k_using => p.printDecls("using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
.k_await_using => p.printDecls("await using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
}
},
// for(;)
@@ -4791,7 +4899,7 @@ fn NewPrinter(
switch (s.yes.data) {
.s_block => |block| {
p.printSpace();
p.printBlock(s.yes.loc, block.stmts, block.close_brace_loc);
p.printBlock(s.yes.loc, block.stmts, block.close_brace_loc, .{});
if (s.no != null) {
p.printSpace();
@@ -4806,7 +4914,7 @@ fn NewPrinter(
p.printNewline();
p.indent();
p.printStmt(s.yes) catch unreachable;
p.printStmt(s.yes, .{}) catch bun.outOfMemory();
p.unindent();
p.needs_semicolon = false;
@@ -4821,7 +4929,7 @@ fn NewPrinter(
} else {
p.printNewline();
p.indent();
p.printStmt(s.yes) catch unreachable;
p.printStmt(s.yes, .{}) catch bun.outOfMemory();
p.unindent();
if (s.no != null) {
@@ -4839,7 +4947,7 @@ fn NewPrinter(
switch (no_block.data) {
.s_block => {
p.printSpace();
p.printBlock(no_block.loc, no_block.data.s_block.stmts, null);
p.printBlock(no_block.loc, no_block.data.s_block.stmts, null, .{});
p.printNewline();
},
.s_if => {
@@ -4848,7 +4956,7 @@ fn NewPrinter(
else => {
p.printNewline();
p.indent();
p.printStmt(no_block) catch unreachable;
p.printStmt(no_block, .{}) catch bun.outOfMemory();
p.unindent();
},
}
@@ -4929,14 +5037,17 @@ fn NewPrinter(
}
}
pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl) void {
pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl, tlmtlo: TopLevel) void {
p.printIndent();
p.printSpaceBeforeIdentifier();
if (!rewrite_esm_to_cjs and is_export) {
p.print("export ");
}
p.printDecls(keyword, decls, ExprFlag.None());
p.printDecls(keyword, decls, ExprFlag.None(), if (may_have_module_info) .{
.is_export = is_export and !rewrite_esm_to_cjs,
.is_top_level = if (comptime std.mem.eql(u8, keyword, "var")) if (tlmtlo.is_top_level == .no) null else .declared else if (tlmtlo.is_top_level == .yes) .lexical else null,
} else .{});
p.printSemicolonAfterStatement();
if (rewrite_esm_to_cjs and is_export and decls.len > 0) {
for (decls) |decl| {
@@ -4981,7 +5092,7 @@ fn NewPrinter(
p.print("}");
},
else => {
p.printBinding(decl.binding);
p.printBinding(decl.binding, .{});
},
}
p.print(")");
@@ -5664,6 +5775,7 @@ pub fn printAst(
comptime ascii_only: bool,
opts: Options,
comptime generate_source_map: bool,
module_info: ?*@import("analyze_transpiled_module.zig").ModuleInfo,
) !usize {
var renamer: rename.Renamer = undefined;
var no_op_renamer: rename.NoOpRenamer = undefined;
@@ -5762,6 +5874,11 @@ pub fn printAst(
printer.source_map_builder.line_offset_tables.deinit(opts.allocator);
}
}
if (module_info != null) bun.assert(PrinterType.may_have_module_info);
const have_module_info = PrinterType.may_have_module_info and module_info != null;
if (have_module_info) printer.module_info = module_info;
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
printer.binary_expression_stack = std.ArrayList(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
defer printer.binary_expression_stack.clearAndFree();
@@ -5785,12 +5902,13 @@ pub fn printAst(
//
// This is never a symbol collision because `uses_require_ref` means
// `require` must be an unbound variable.
if (printer.moduleInfo()) |mi| mi.contains_import_meta = true;
printer.print("var {require}=import.meta;");
}
for (tree.parts.slice()) |part| {
for (part.stmts) |stmt| {
try printer.printStmt(stmt);
try printer.printStmt(stmt, PrinterType.TopLevel.init(.yes));
if (printer.writer.getError()) {} else |err| {
return err;
}
@@ -5798,24 +5916,25 @@ pub fn printAst(
}
}
if (comptime FeatureFlags.runtime_transpiler_cache and generate_source_map) {
if (have_module_info) {
try module_info.?.finalize();
}
var sourcemap: []const u8 = "";
if (comptime generate_source_map) {
if (opts.source_map_handler) |handler| {
const source_maps_chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten());
if (opts.runtime_transpiler_cache) |cache| {
cache.put(printer.writer.ctx.getWritten(), source_maps_chunk.buffer.list.items);
}
sourcemap = source_maps_chunk.buffer.list.items;
try handler.onSourceMapChunk(source_maps_chunk, source.*);
} else {
if (opts.runtime_transpiler_cache) |cache| {
cache.put(printer.writer.ctx.getWritten(), "");
}
}
} else if (comptime generate_source_map) {
if (opts.source_map_handler) |handler| {
try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source.*);
}
}
if (opts.runtime_transpiler_cache) |cache| {
var srlz_res = std.ArrayList(u8).init(bun.default_allocator);
defer srlz_res.deinit();
if (have_module_info) try module_info.?.asDeserialized().serialize(srlz_res.writer());
cache.put(printer.writer.ctx.getWritten(), sourcemap, srlz_res.items);
}
try printer.writer.done();
@@ -6003,7 +6122,7 @@ pub fn printWithWriterAndPlatform(
for (parts) |part| {
for (part.stmts) |stmt| {
printer.printStmt(stmt) catch |err| {
printer.printStmt(stmt, PrinterType.TopLevel.init(.yes)) catch |err| {
return .{ .err = err };
};
if (printer.writer.getError()) {} else |err| {
@@ -6066,7 +6185,7 @@ pub fn printCommonJS(
for (tree.parts.slice()) |part| {
for (part.stmts) |stmt| {
try printer.printStmt(stmt);
try printer.printStmt(stmt, PrinterType.TopLevel.init(.yes));
if (printer.writer.getError()) {} else |err| {
return err;
}

View File

@@ -6611,7 +6611,7 @@ fn QuoteEscapeFormat(comptime flags: QuoteEscapeFormatFlags) type {
data: []const u8,
pub fn format(self: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
try bun.js_printer.writePreQuotedString(self.data, @TypeOf(writer), writer, flags.quote_char, false, flags.json, flags.str_encoding);
try bun.js_printer.writePreQuotedString(self.data, @TypeOf(writer), writer, flags.quote_char, flags.ascii_only, flags.json, flags.str_encoding);
}
};
}

View File

@@ -1002,12 +1002,15 @@ pub const Transpiler = struct {
comptime enable_source_map: bool,
source_map_context: ?js_printer.SourceMapHandler,
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache,
module_info: ?*@import("analyze_transpiled_module.zig").ModuleInfo,
) !usize {
const tracer = bun.tracy.traceNamed(@src(), if (enable_source_map) "JSPrinter.printWithSourceMap" else "JSPrinter.print");
defer tracer.end();
const symbols = js_ast.Symbol.NestedList.init(&[_]js_ast.Symbol.List{ast.symbols});
if (module_info != null) bun.assert(format == .esm or format == .esm_ascii);
return switch (format) {
.cjs => try js_printer.printCommonJS(
Writer,
@@ -1054,6 +1057,7 @@ pub const Transpiler = struct {
.print_dce_annotations = transpiler.options.emit_dce_annotations,
},
enable_source_map,
module_info,
),
.esm_ascii => switch (transpiler.options.target.isBun()) {
inline else => |is_bun| try js_printer.printAst(
@@ -1088,6 +1092,7 @@ pub const Transpiler = struct {
.print_dce_annotations = transpiler.options.emit_dce_annotations,
},
enable_source_map,
module_info,
),
},
else => unreachable,
@@ -1110,6 +1115,7 @@ pub const Transpiler = struct {
false,
null,
null,
null,
);
}
@@ -1120,6 +1126,7 @@ pub const Transpiler = struct {
writer: Writer,
comptime format: js_printer.Format,
handler: js_printer.SourceMapHandler,
module_info: ?*@import("analyze_transpiled_module.zig").ModuleInfo,
) !usize {
if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_SOURCE_MAPS")) {
return transpiler.printWithSourceMapMaybe(
@@ -1131,6 +1138,7 @@ pub const Transpiler = struct {
false,
handler,
result.runtime_transpiler_cache,
module_info,
);
}
return transpiler.printWithSourceMapMaybe(
@@ -1142,6 +1150,7 @@ pub const Transpiler = struct {
true,
handler,
result.runtime_transpiler_cache,
module_info,
);
}

View File

@@ -0,0 +1,174 @@
import { describe, test, expect } from "bun:test" with { todo: "true" };
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
/*
Potential solutions:
- Option 1: Make a fake export `export const my_string = undefined;` and make sure it is not enumerable
- Option 2: In b.ts, make javascriptcore skip re-exporting something if it is not found rather than SyntaxErroring
- this won't work because in the import {} export {} case, the error will be on the import
*/
const a_file = `
export type my_string = "1";
export type my_value = "2";
export const my_value = "2";
export const my_only = "3";
`;
const a_no_value = `
export type my_string = "1";
export type my_value = "2";
export const my_only = "3";
`;
const a_with_value = `
export type my_string = "1";
export const my_value = "2";
`;
const b_files = [
{
name: "export from",
value: `export { my_string, my_value, my_only } from "./a.ts";`,
},
{
name: "import then export",
value: `
import { my_string, my_value, my_only } from "./a.ts";
export { my_string, my_value, my_only };
`,
},
{
name: "export star",
value: `export * from "./a.ts";`,
},
{
name: "export merge",
value: `export * from "./a_no_value.ts"; export * from "./a_with_value.ts"`,
},
];
const c_files = [
{ name: "require", value: `console.log(JSON.stringify(require("./b")));` },
{ name: "import star", value: `import * as b from "./b"; console.log(JSON.stringify(b));` },
{ name: "await import", value: `console.log(JSON.stringify(await import("./b")));` },
{
name: "import individual",
value: `
import { my_string, my_value, my_only } from "./b";
console.log(JSON.stringify({ my_only, my_value }));
`,
},
];
for (const b_file of b_files) {
describe(`re-export with ${b_file.name}`, () => {
for (const c_file of c_files) {
describe(`import with ${c_file.name}`, () => {
const dir = tempDirWithFiles("type-export", {
"a.ts": a_file,
"b.ts": b_file.value,
"c.ts": c_file.value,
"a_no_value.ts": a_no_value,
"a_with_value.ts": a_with_value,
});
const runAndVerify = (filename: string) => {
const result = Bun.spawnSync({
cmd: [bunExe(), "run", filename],
cwd: dir,
env: bunEnv,
stdio: ["inherit", "pipe", "inherit"],
});
expect(result.exitCode).toBe(0);
expect(JSON.parse(result.stdout.toString().trim())).toEqual({ my_value: "2", my_only: "3" });
};
test.todoIf(b_file.name !== "export star" && b_file.name !== "export merge")("run", () => {
runAndVerify("c.ts");
});
test("build", async () => {
const result = Bun.spawnSync({
cmd: [bunExe(), "build", "--target=bun", "--outfile", "bundle.js", "c.ts"],
cwd: dir,
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
expect(result.exitCode).toBe(0);
runAndVerify("bundle.js");
});
});
}
});
}
test("import not found", () => {
const dir = tempDirWithFiles("type-export", {
"a.ts": `export const a = 25; export const c = "hello";`,
"b.ts": /*js*/ `
import { a, b, c } from "./a";
console.log(a, b, c);
`,
});
const result = Bun.spawnSync({
cmd: [bunExe(), "run", "b.ts"],
cwd: dir,
env: bunEnv,
stdio: ["inherit", "pipe", "pipe"],
});
expect(result.stderr?.toString().trim()).toContain("SyntaxError: Export named 'b' not found in module");
expect({
exitCode: result.exitCode,
stdout: result.stdout?.toString().trim(),
}).toEqual({
exitCode: 1,
stdout: "",
});
});
describe("through export merge", () => {
// this isn't allowed, even in typescript (tsc emits "Duplicate identifier 'value'.")
for (const fmt of ["js", "ts"]) {
describe(fmt, () => {
for (const [name, mode] of [
["through", "export {value} from './b'; export {value} from './c';"],
["direct", "export {value} from './b'; export const value = 'abc';"],
["direct2", "export const value = 'abc'; export {value};"],
["ns", "export * as value from './c'; export * as value from './c';"],
]) {
describe(name, () => {
const dir = tempDirWithFiles("type-import", {
["main." + fmt]: "import {value} from './a'; console.log(value);",
["a." + fmt]: mode,
["b." + fmt]: fmt === "ts" ? "export type value = 'b';" : "",
["c." + fmt]: "export const value = 'c';",
});
for (const file of ["main." + fmt, "a." + fmt]) {
test(file, () => {
const result = Bun.spawnSync({
cmd: [bunExe(), file],
cwd: dir,
env: bunEnv,
stdio: ["inherit", "pipe", "pipe"],
});
expect(result.stderr?.toString().trim()).toInclude(
file === "a." + fmt
? 'error: Multiple exports with the same name "value"\n' // bun's syntax error
: "SyntaxError: Cannot export a duplicate name 'value'.\n", // jsc's syntax error
);
expect(result.exitCode).toBe(1);
});
}
});
}
});
}
});
// TODO:
// - check ownkeys from a star import
// - check commonjs cases
// - what happens with `export * from ./a; export * from ./b` where a and b have different definitions of the same name?