mirror of
https://github.com/oven-sh/bun
synced 2026-02-25 02:57:27 +01:00
Compare commits
17 Commits
nektro-pat
...
pfg/fix-ty
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de08acf2cd | ||
|
|
0800c4a9ea | ||
|
|
cb5118c778 | ||
|
|
88b473bf8b | ||
|
|
4b26e39fb9 | ||
|
|
24eb08199b | ||
|
|
179f41dbea | ||
|
|
a85df7fde6 | ||
|
|
485abb68f1 | ||
|
|
79e781d5b4 | ||
|
|
fcc1627a7a | ||
|
|
32778cd135 | ||
|
|
c1b1c8cc8a | ||
|
|
cb6c37f6a7 | ||
|
|
c6b059b2b3 | ||
|
|
4b700cd84b | ||
|
|
9c2fde82d2 |
@@ -509,6 +509,11 @@ pub fn addInstallObjectFile(
|
||||
name: []const u8,
|
||||
out_mode: ObjectFormat,
|
||||
) *Step {
|
||||
if (@import("builtin").os.tag != .windows and std.posix.getenvZ("COMPILE_ERRORS_ONLY") != null) {
|
||||
const failstep = b.addSystemCommand(&.{"COMPILE_ERRORS_ONLY set but there were no compile errors"});
|
||||
failstep.step.dependOn(&compile.step);
|
||||
return &failstep.step;
|
||||
}
|
||||
// bin always needed to be computed or else the compilation will do nothing. zig build system bug?
|
||||
const bin = compile.getEmittedBin();
|
||||
return &b.addInstallFile(switch (out_mode) {
|
||||
|
||||
@@ -30,6 +30,8 @@ if(WEBKIT_LOCAL)
|
||||
${WEBKIT_PATH}
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
|
||||
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
|
||||
${WEBKIT_PATH}/bmalloc/Headers
|
||||
${WEBKIT_PATH}/WTF/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
|
||||
|
||||
480
src/analyze_transpiled_module.zig
Normal file
480
src/analyze_transpiled_module.zig
Normal file
@@ -0,0 +1,480 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun.zig");
|
||||
const js_ast = bun.JSAst;
|
||||
const Ast = js_ast.Ast;
|
||||
|
||||
pub const RecordKind = enum(u8) {
|
||||
/// var_name
|
||||
declared_variable,
|
||||
/// let_name
|
||||
lexical_variable,
|
||||
/// module_name, import_name, local_name
|
||||
import_info_single,
|
||||
/// module_name, import_name, local_name
|
||||
import_info_single_type_script,
|
||||
/// module_name, import_name = '*', local_name
|
||||
import_info_namespace,
|
||||
/// export_name, import_name, module_name
|
||||
export_info_indirect,
|
||||
/// export_name, local_name, padding (for local => indirect conversion)
|
||||
export_info_local,
|
||||
/// export_name, module_name
|
||||
export_info_namespace,
|
||||
/// module_name
|
||||
export_info_star,
|
||||
_,
|
||||
|
||||
pub fn len(record: RecordKind) !usize {
|
||||
return switch (record) {
|
||||
.declared_variable, .lexical_variable => 1,
|
||||
.import_info_single => 3,
|
||||
.import_info_single_type_script => 3,
|
||||
.import_info_namespace => 3,
|
||||
.export_info_indirect => 3,
|
||||
.export_info_local => 3,
|
||||
.export_info_namespace => 2,
|
||||
.export_info_star => 1,
|
||||
else => return error.InvalidRecordKind,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const Flags = packed struct(u8) {
|
||||
contains_import_meta: bool,
|
||||
is_typescript: bool,
|
||||
_padding: u6 = 0,
|
||||
};
|
||||
pub const ModuleInfoDeserialized = struct {
|
||||
strings_buf: []const u8,
|
||||
strings_lens: []align(1) const u32,
|
||||
requested_modules_keys: []align(1) const StringID,
|
||||
requested_modules_values: []align(1) const ModuleInfo.FetchParameters,
|
||||
buffer: []align(1) const StringID,
|
||||
record_kinds: []align(1) const RecordKind,
|
||||
flags: Flags,
|
||||
owner: union(enum) {
|
||||
module_info,
|
||||
allocated_slice: struct {
|
||||
slice: []const u8,
|
||||
allocator: std.mem.Allocator,
|
||||
},
|
||||
},
|
||||
dead: bool = false,
|
||||
|
||||
pub fn deinit(self: *ModuleInfoDeserialized) void {
|
||||
switch (self.owner) {
|
||||
.module_info => {
|
||||
const mi: *ModuleInfo = @fieldParentPtr("_deserialized", self);
|
||||
mi.destroy();
|
||||
},
|
||||
.allocated_slice => |as| {
|
||||
as.allocator.free(as.slice);
|
||||
as.allocator.destroy(self);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
inline fn eat(rem: *[]const u8, len: usize) ![]const u8 {
|
||||
if (rem.*.len < len) return error.BadModuleInfo;
|
||||
const res = rem.*[0..len];
|
||||
rem.* = rem.*[len..];
|
||||
return res;
|
||||
}
|
||||
inline fn eatC(rem: *[]const u8, comptime len: usize) !*const [len]u8 {
|
||||
if (rem.*.len < len) return error.BadModuleInfo;
|
||||
const res = rem.*[0..len];
|
||||
rem.* = rem.*[len..];
|
||||
return res;
|
||||
}
|
||||
pub fn create(source: []const u8, gpa: std.mem.Allocator) !*ModuleInfoDeserialized {
|
||||
std.log.info("ModuleInfoDeserialized.create", .{});
|
||||
var rem: []const u8 = try gpa.dupe(u8, source);
|
||||
errdefer gpa.free(rem);
|
||||
var res = try gpa.create(ModuleInfoDeserialized);
|
||||
errdefer res.deinit();
|
||||
|
||||
const record_kinds_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const record_kinds = std.mem.bytesAsSlice(RecordKind, try eat(&rem, record_kinds_len * @sizeOf(RecordKind)));
|
||||
const buffer_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const buffer = std.mem.bytesAsSlice(StringID, try eat(&rem, buffer_len * @sizeOf(StringID)));
|
||||
const requested_modules_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const requested_modules_keys = std.mem.bytesAsSlice(StringID, try eat(&rem, requested_modules_len * @sizeOf(StringID)));
|
||||
const requested_modules_values = std.mem.bytesAsSlice(ModuleInfo.FetchParameters, try eat(&rem, requested_modules_len * @sizeOf(ModuleInfo.FetchParameters)));
|
||||
const flags: Flags = @bitCast((try eatC(&rem, 1))[0]);
|
||||
const strings_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const strings_lens = std.mem.bytesAsSlice(u32, try eat(&rem, strings_len * @sizeOf(u32)));
|
||||
const strings_buf = rem;
|
||||
|
||||
res.* = .{
|
||||
.strings_buf = strings_buf,
|
||||
.strings_lens = strings_lens,
|
||||
.requested_modules_keys = requested_modules_keys,
|
||||
.requested_modules_values = requested_modules_values,
|
||||
.buffer = buffer,
|
||||
.record_kinds = record_kinds,
|
||||
.flags = flags,
|
||||
.owner = .{ .allocated_slice = .{
|
||||
.slice = source,
|
||||
.allocator = gpa,
|
||||
} },
|
||||
};
|
||||
return res;
|
||||
}
|
||||
pub fn serialize(self: *const ModuleInfoDeserialized, writer: anytype) !void {
|
||||
try writer.writeInt(u32, @truncate(self.record_kinds.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.record_kinds));
|
||||
try writer.writeInt(u32, @truncate(self.buffer.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.buffer));
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.requested_modules_keys.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_keys));
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_values));
|
||||
|
||||
try writer.writeInt(u8, @bitCast(self.flags), .little);
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.strings_lens.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.strings_lens));
|
||||
try writer.writeAll(self.strings_buf);
|
||||
}
|
||||
};
|
||||
|
||||
const StringMapKey = enum(u32) {
|
||||
_,
|
||||
};
|
||||
pub const StringContext = struct {
|
||||
strings_buf: []const u8,
|
||||
strings_lens: []const u32,
|
||||
|
||||
pub fn hash(_: @This(), s: []const u8) u32 {
|
||||
return @as(u32, @truncate(std.hash.Wyhash.hash(0, s)));
|
||||
}
|
||||
pub fn eql(self: @This(), fetch_key: []const u8, item_key: StringMapKey, item_i: usize) bool {
|
||||
return bun.strings.eqlLong(fetch_key, self.strings_buf[@intFromEnum(item_key)..][0..self.strings_lens[item_i]], true);
|
||||
}
|
||||
};
|
||||
|
||||
pub const ModuleInfo = struct {
|
||||
/// all strings in wtf-8. index in hashmap = StringID
|
||||
gpa: std.mem.Allocator,
|
||||
strings_map: std.ArrayHashMapUnmanaged(StringMapKey, void, void, true),
|
||||
strings_buf: std.ArrayListUnmanaged(u8),
|
||||
strings_lens: std.ArrayListUnmanaged(u32),
|
||||
requested_modules: std.AutoArrayHashMap(StringID, FetchParameters),
|
||||
buffer: std.ArrayList(StringID),
|
||||
record_kinds: std.ArrayList(RecordKind),
|
||||
flags: Flags,
|
||||
exported_names: std.AutoArrayHashMapUnmanaged(StringID, void),
|
||||
finalized: bool = false,
|
||||
|
||||
/// only initialized after .finalize() is called
|
||||
_deserialized: ModuleInfoDeserialized,
|
||||
|
||||
pub fn asDeserialized(self: *ModuleInfo) *ModuleInfoDeserialized {
|
||||
bun.assert(self.finalized);
|
||||
return &self._deserialized;
|
||||
}
|
||||
|
||||
pub const FetchParameters = enum(u32) {
|
||||
none = std.math.maxInt(u32),
|
||||
javascript = std.math.maxInt(u32) - 1,
|
||||
webassembly = std.math.maxInt(u32) - 2,
|
||||
json = std.math.maxInt(u32) - 3,
|
||||
_, // host_defined: cast to StringID
|
||||
pub fn hostDefined(value: StringID) FetchParameters {
|
||||
return @enumFromInt(@intFromEnum(value));
|
||||
}
|
||||
};
|
||||
|
||||
pub const VarKind = enum { declared, lexical };
|
||||
pub fn addVar(self: *ModuleInfo, name: StringID, kind: VarKind) !void {
|
||||
switch (kind) {
|
||||
.declared => try self.addDeclaredVariable(name),
|
||||
.lexical => try self.addLexicalVariable(name),
|
||||
}
|
||||
}
|
||||
|
||||
fn _addRecord(self: *ModuleInfo, kind: RecordKind, data: []const StringID) !void {
|
||||
bun.assert(!self.finalized);
|
||||
bun.assert(data.len == kind.len() catch unreachable);
|
||||
try self.record_kinds.append(kind);
|
||||
try self.buffer.appendSlice(data);
|
||||
}
|
||||
pub fn addDeclaredVariable(self: *ModuleInfo, id: StringID) !void {
|
||||
try self._addRecord(.declared_variable, &.{id});
|
||||
}
|
||||
pub fn addLexicalVariable(self: *ModuleInfo, id: StringID) !void {
|
||||
try self._addRecord(.lexical_variable, &.{id});
|
||||
}
|
||||
pub fn addImportInfoSingle(self: *ModuleInfo, module_name: StringID, import_name: StringID, local_name: StringID, only_used_as_type: bool) !void {
|
||||
try self._addRecord(if (only_used_as_type) .import_info_single_type_script else .import_info_single, &.{ module_name, import_name, local_name });
|
||||
}
|
||||
pub fn addImportInfoNamespace(self: *ModuleInfo, module_name: StringID, local_name: StringID) !void {
|
||||
try self._addRecord(.import_info_namespace, &.{ module_name, try self.str("*"), local_name });
|
||||
}
|
||||
pub fn addExportInfoIndirect(self: *ModuleInfo, export_name: StringID, import_name: StringID, module_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_indirect, &.{ export_name, import_name, module_name });
|
||||
}
|
||||
pub fn addExportInfoLocal(self: *ModuleInfo, export_name: StringID, local_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_local, &.{ export_name, local_name, @enumFromInt(std.math.maxInt(u32)) });
|
||||
}
|
||||
pub fn addExportInfoNamespace(self: *ModuleInfo, export_name: StringID, module_name: StringID) !void {
|
||||
if (try self._hasOrAddExportedName(export_name)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_namespace, &.{ export_name, module_name });
|
||||
}
|
||||
pub fn addExportInfoStar(self: *ModuleInfo, module_name: StringID) !void {
|
||||
try self._addRecord(.export_info_star, &.{module_name});
|
||||
}
|
||||
|
||||
pub fn _hasOrAddExportedName(self: *ModuleInfo, name: StringID) !bool {
|
||||
if (try self.exported_names.fetchPut(self.gpa, name, {}) != null) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn create(gpa: std.mem.Allocator, is_typescript: bool) !*ModuleInfo {
|
||||
const res = try gpa.create(ModuleInfo);
|
||||
res.* = ModuleInfo.init(gpa, is_typescript);
|
||||
return res;
|
||||
}
|
||||
fn init(allocator: std.mem.Allocator, is_typescript: bool) ModuleInfo {
|
||||
return .{
|
||||
.gpa = allocator,
|
||||
.strings_map = .{},
|
||||
.strings_buf = .{},
|
||||
.strings_lens = .{},
|
||||
.exported_names = .{},
|
||||
.requested_modules = std.AutoArrayHashMap(StringID, FetchParameters).init(allocator),
|
||||
.buffer = std.ArrayList(StringID).init(allocator),
|
||||
.record_kinds = std.ArrayList(RecordKind).init(allocator),
|
||||
.flags = .{ .contains_import_meta = false, .is_typescript = is_typescript },
|
||||
._deserialized = undefined,
|
||||
};
|
||||
}
|
||||
fn deinit(self: *ModuleInfo) void {
|
||||
self.strings_map.deinit(self.gpa);
|
||||
self.strings_buf.deinit(self.gpa);
|
||||
self.strings_lens.deinit(self.gpa);
|
||||
self.exported_names.deinit(self.gpa);
|
||||
self.requested_modules.deinit();
|
||||
self.buffer.deinit();
|
||||
self.record_kinds.deinit();
|
||||
}
|
||||
pub fn destroy(self: *ModuleInfo) void {
|
||||
const alloc = self.gpa;
|
||||
self.deinit();
|
||||
alloc.destroy(self);
|
||||
}
|
||||
pub fn str(self: *ModuleInfo, value: []const u8) !StringID {
|
||||
try self.strings_buf.ensureUnusedCapacity(self.gpa, value.len);
|
||||
try self.strings_lens.ensureUnusedCapacity(self.gpa, 1);
|
||||
const gpres = try self.strings_map.getOrPutAdapted(self.gpa, value, StringContext{
|
||||
.strings_buf = self.strings_buf.items,
|
||||
.strings_lens = self.strings_lens.items,
|
||||
});
|
||||
if (gpres.found_existing) return @enumFromInt(@as(u32, @intCast(gpres.index)));
|
||||
|
||||
gpres.key_ptr.* = @enumFromInt(@as(u32, @truncate(self.strings_buf.items.len)));
|
||||
gpres.value_ptr.* = {};
|
||||
self.strings_buf.appendSliceAssumeCapacity(value);
|
||||
self.strings_lens.appendAssumeCapacity(@as(u32, @truncate(value.len)));
|
||||
return @enumFromInt(@as(u32, @intCast(gpres.index)));
|
||||
}
|
||||
pub fn requestModule(self: *ModuleInfo, import_record_path: StringID, fetch_parameters: FetchParameters) !void {
|
||||
// jsc only records the attributes of the first import with the given import_record_path. so only put if not exists.
|
||||
const gpres = try self.requested_modules.getOrPut(import_record_path);
|
||||
if (!gpres.found_existing) gpres.value_ptr.* = fetch_parameters;
|
||||
}
|
||||
|
||||
/// find any exports marked as 'local' that are actually 'indirect' and fix them
|
||||
pub fn finalize(self: *ModuleInfo) !void {
|
||||
bun.assert(!self.finalized);
|
||||
var local_name_to_module_name = std.AutoArrayHashMap(StringID, struct { module_name: StringID, import_name: StringID }).init(bun.default_allocator);
|
||||
defer local_name_to_module_name.deinit();
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (self.record_kinds.items) |k| {
|
||||
if (k == .import_info_single or k == .import_info_single_type_script) {
|
||||
try local_name_to_module_name.put(self.buffer.items[i + 2], .{ .module_name = self.buffer.items[i], .import_name = self.buffer.items[i + 1] });
|
||||
}
|
||||
i += k.len() catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (self.record_kinds.items) |*k| {
|
||||
if (k.* == .export_info_local) {
|
||||
if (local_name_to_module_name.get(self.buffer.items[i + 1])) |ip| {
|
||||
k.* = .export_info_indirect;
|
||||
self.buffer.items[i + 1] = ip.import_name;
|
||||
self.buffer.items[i + 2] = ip.module_name;
|
||||
}
|
||||
}
|
||||
i += k.len() catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
self._deserialized = .{
|
||||
.strings_buf = self.strings_buf.items,
|
||||
.strings_lens = self.strings_lens.items,
|
||||
.requested_modules_keys = self.requested_modules.keys(),
|
||||
.requested_modules_values = self.requested_modules.values(),
|
||||
.buffer = self.buffer.items,
|
||||
.record_kinds = self.record_kinds.items,
|
||||
.flags = self.flags,
|
||||
.owner = .module_info,
|
||||
};
|
||||
|
||||
self.finalized = true;
|
||||
}
|
||||
};
|
||||
pub const StringID = enum(u32) {
|
||||
star_default = std.math.maxInt(u32),
|
||||
star_namespace = std.math.maxInt(u32) - 1,
|
||||
_,
|
||||
};
|
||||
|
||||
export fn zig__renderDiff(expected_ptr: [*:0]const u8, expected_len: usize, received_ptr: [*:0]const u8, received_len: usize, globalThis: *bun.JSC.JSGlobalObject) void {
|
||||
const DiffFormatter = @import("bun.js/test/diff_format.zig").DiffFormatter;
|
||||
const formatter = DiffFormatter{
|
||||
.received_string = received_ptr[0..received_len],
|
||||
.expected_string = expected_ptr[0..expected_len],
|
||||
.globalThis = globalThis,
|
||||
};
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stderr.print("DIFF:\n{}\n", .{formatter}) catch {};
|
||||
}
|
||||
|
||||
export fn zig__ModuleInfoDeserialized__toJSModuleRecord(
|
||||
globalObject: *bun.JSC.JSGlobalObject,
|
||||
vm: *bun.JSC.VM,
|
||||
module_key: *const IdentifierArray,
|
||||
source_code: *const SourceCode,
|
||||
declared_variables: *VariableEnvironment,
|
||||
lexical_variables: *VariableEnvironment,
|
||||
res: *ModuleInfoDeserialized,
|
||||
) ?*JSModuleRecord {
|
||||
if (res.dead) @panic("ModuleInfoDeserialized already deinit()ed");
|
||||
defer res.deinit();
|
||||
|
||||
var identifiers = IdentifierArray.create(res.strings_lens.len);
|
||||
defer identifiers.destroy();
|
||||
var offset: usize = 0;
|
||||
for (0.., res.strings_lens) |index, len| {
|
||||
if (res.strings_buf.len < offset + len) return null; // error!
|
||||
const sub = res.strings_buf[offset..][0..len];
|
||||
identifiers.setFromUtf8(index, vm, sub);
|
||||
offset += len;
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (res.record_kinds) |k| {
|
||||
if (i + (k.len() catch 0) > res.buffer.len) return null;
|
||||
switch (k) {
|
||||
.declared_variable => declared_variables.add(vm, identifiers, res.buffer[i]),
|
||||
.lexical_variable => lexical_variables.add(vm, identifiers, res.buffer[i]),
|
||||
.import_info_single, .import_info_single_type_script, .import_info_namespace, .export_info_indirect, .export_info_local, .export_info_namespace, .export_info_star => {},
|
||||
else => return null,
|
||||
}
|
||||
i += k.len() catch unreachable; // handled above
|
||||
}
|
||||
}
|
||||
|
||||
const module_record = JSModuleRecord.create(globalObject, vm, module_key, source_code, declared_variables, lexical_variables, res.flags.contains_import_meta, res.flags.is_typescript);
|
||||
|
||||
for (res.requested_modules_keys, res.requested_modules_values) |reqk, reqv| {
|
||||
switch (reqv) {
|
||||
.none => module_record.addRequestedModuleNullAttributesPtr(identifiers, reqk),
|
||||
.javascript => module_record.addRequestedModuleJavaScript(identifiers, reqk),
|
||||
.webassembly => module_record.addRequestedModuleWebAssembly(identifiers, reqk),
|
||||
.json => module_record.addRequestedModuleJSON(identifiers, reqk),
|
||||
else => |uv| module_record.addRequestedModuleHostDefined(identifiers, reqk, @enumFromInt(@intFromEnum(uv))),
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (res.record_kinds) |k| {
|
||||
if (i + (k.len() catch unreachable) > res.buffer.len) unreachable; // handled above
|
||||
switch (k) {
|
||||
.declared_variable, .lexical_variable => {},
|
||||
.import_info_single => module_record.addImportEntrySingle(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.import_info_single_type_script => module_record.addImportEntrySingleTypeScript(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.import_info_namespace => module_record.addImportEntryNamespace(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.export_info_indirect => module_record.addIndirectExport(identifiers, res.buffer[i + 0], res.buffer[i + 1], res.buffer[i + 2]),
|
||||
.export_info_local => module_record.addLocalExport(identifiers, res.buffer[i], res.buffer[i + 1]),
|
||||
.export_info_namespace => module_record.addNamespaceExport(identifiers, res.buffer[i], res.buffer[i + 1]),
|
||||
.export_info_star => module_record.addStarExport(identifiers, res.buffer[i]),
|
||||
else => unreachable, // handled above
|
||||
}
|
||||
i += k.len() catch unreachable; // handled above
|
||||
}
|
||||
}
|
||||
|
||||
return module_record;
|
||||
}
|
||||
export fn zig__ModuleInfo__destroy(info: *ModuleInfo) void {
|
||||
info.deinit();
|
||||
bun.default_allocator.destroy(info);
|
||||
}
|
||||
|
||||
const VariableEnvironment = opaque {
|
||||
extern fn JSC__VariableEnvironment__add(environment: *VariableEnvironment, vm: *bun.JSC.VM, identifier_array: *IdentifierArray, identifier_index: StringID) void;
|
||||
pub const add = JSC__VariableEnvironment__add;
|
||||
};
|
||||
const IdentifierArray = opaque {
|
||||
extern fn JSC__IdentifierArray__create(len: usize) *IdentifierArray;
|
||||
pub const create = JSC__IdentifierArray__create;
|
||||
|
||||
extern fn JSC__IdentifierArray__destroy(identifier_array: *IdentifierArray) void;
|
||||
pub const destroy = JSC__IdentifierArray__destroy;
|
||||
|
||||
extern fn JSC__IdentifierArray__setFromUtf8(identifier_array: *IdentifierArray, n: usize, vm: *bun.JSC.VM, str: [*]const u8, len: usize) void;
|
||||
pub fn setFromUtf8(self: *IdentifierArray, n: usize, vm: *bun.JSC.VM, str: []const u8) void {
|
||||
JSC__IdentifierArray__setFromUtf8(self, n, vm, str.ptr, str.len);
|
||||
}
|
||||
};
|
||||
const SourceCode = opaque {};
|
||||
const JSModuleRecord = opaque {
|
||||
extern fn JSC_JSModuleRecord__create(global_object: *bun.JSC.JSGlobalObject, vm: *bun.JSC.VM, module_key: *const IdentifierArray, source_code: *const SourceCode, declared_variables: *VariableEnvironment, lexical_variables: *VariableEnvironment, has_import_meta: bool, is_typescript: bool) *JSModuleRecord;
|
||||
pub const create = JSC_JSModuleRecord__create;
|
||||
|
||||
extern fn JSC_JSModuleRecord__declaredVariables(module_record: *JSModuleRecord) *VariableEnvironment;
|
||||
pub const declaredVariables = JSC_JSModuleRecord__declaredVariables;
|
||||
extern fn JSC_JSModuleRecord__lexicalVariables(module_record: *JSModuleRecord) *VariableEnvironment;
|
||||
pub const lexicalVariables = JSC_JSModuleRecord__lexicalVariables;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addIndirectExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, import_name: StringID, module_name: StringID) void;
|
||||
pub const addIndirectExport = JSC_JSModuleRecord__addIndirectExport;
|
||||
extern fn JSC_JSModuleRecord__addLocalExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, local_name: StringID) void;
|
||||
pub const addLocalExport = JSC_JSModuleRecord__addLocalExport;
|
||||
extern fn JSC_JSModuleRecord__addNamespaceExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, module_name: StringID) void;
|
||||
pub const addNamespaceExport = JSC_JSModuleRecord__addNamespaceExport;
|
||||
extern fn JSC_JSModuleRecord__addStarExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addStarExport = JSC_JSModuleRecord__addStarExport;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleNullAttributesPtr = JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleJavaScript(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleJavaScript = JSC_JSModuleRecord__addRequestedModuleJavaScript;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleWebAssembly(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleWebAssembly = JSC_JSModuleRecord__addRequestedModuleWebAssembly;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleJSON(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleJSON = JSC_JSModuleRecord__addRequestedModuleJSON;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleHostDefined(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID, host_defined_import_type: StringID) void;
|
||||
pub const addRequestedModuleHostDefined = JSC_JSModuleRecord__addRequestedModuleHostDefined;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addImportEntrySingle(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntrySingle = JSC_JSModuleRecord__addImportEntrySingle;
|
||||
extern fn JSC_JSModuleRecord__addImportEntrySingleTypeScript(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntrySingleTypeScript = JSC_JSModuleRecord__addImportEntrySingleTypeScript;
|
||||
extern fn JSC_JSModuleRecord__addImportEntryNamespace(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntryNamespace = JSC_JSModuleRecord__addImportEntryNamespace;
|
||||
};
|
||||
|
||||
export fn zig_log(msg: [*:0]const u8) void {
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stderr.print("{s}\n", .{std.mem.span(msg)}) catch {};
|
||||
}
|
||||
@@ -10,7 +10,9 @@
|
||||
/// Version 11: Fix \uFFFF printing regression
|
||||
/// Version 12: "use strict"; makes it CommonJS if we otherwise don't know which one to pick.
|
||||
/// Version 13: Hoist `import.meta.require` definition, see #15738
|
||||
const expected_version = 13;
|
||||
/// Version 14: Include module info with an ES Module, see #15758
|
||||
/// Version 15: Support re-exporting typescript types, see #16296
|
||||
const expected_version = 15;
|
||||
|
||||
const bun = @import("root").bun;
|
||||
const std = @import("std");
|
||||
@@ -33,6 +35,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
|
||||
const seed = 42;
|
||||
pub const Metadata = struct {
|
||||
@@ -53,6 +56,10 @@ pub const RuntimeTranspilerCache = struct {
|
||||
sourcemap_byte_length: u64 = 0,
|
||||
sourcemap_hash: u64 = 0,
|
||||
|
||||
esm_record_byte_offset: u64 = 0,
|
||||
esm_record_byte_length: u64 = 0,
|
||||
esm_record_hash: u64 = 0,
|
||||
|
||||
pub const size = brk: {
|
||||
var count: usize = 0;
|
||||
const meta: Metadata = .{};
|
||||
@@ -79,6 +86,10 @@ pub const RuntimeTranspilerCache = struct {
|
||||
try writer.writeInt(u64, this.sourcemap_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_byte_length, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_hash, .little);
|
||||
|
||||
try writer.writeInt(u64, this.esm_record_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.esm_record_byte_length, .little);
|
||||
try writer.writeInt(u64, this.esm_record_hash, .little);
|
||||
}
|
||||
|
||||
pub fn decode(this: *Metadata, reader: anytype) !void {
|
||||
@@ -103,6 +114,10 @@ pub const RuntimeTranspilerCache = struct {
|
||||
this.sourcemap_byte_length = try reader.readInt(u64, .little);
|
||||
this.sourcemap_hash = try reader.readInt(u64, .little);
|
||||
|
||||
this.esm_record_byte_offset = try reader.readInt(u64, .little);
|
||||
this.esm_record_byte_length = try reader.readInt(u64, .little);
|
||||
this.esm_record_hash = try reader.readInt(u64, .little);
|
||||
|
||||
switch (this.module_type) {
|
||||
.esm, .cjs => {},
|
||||
// Invalid module type
|
||||
@@ -121,7 +136,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
metadata: Metadata,
|
||||
output_code: OutputCode = .{ .utf8 = "" },
|
||||
sourcemap: []const u8 = "",
|
||||
|
||||
esm_record: []const u8 = "",
|
||||
pub const OutputCode = union(enum) {
|
||||
utf8: []const u8,
|
||||
string: bun.String,
|
||||
@@ -143,11 +158,14 @@ pub const RuntimeTranspilerCache = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator) void {
|
||||
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator, esm_record_allocator: std.mem.Allocator) void {
|
||||
this.output_code.deinit(output_code_allocator);
|
||||
if (this.sourcemap.len > 0) {
|
||||
sourcemap_allocator.free(this.sourcemap);
|
||||
}
|
||||
if (this.esm_record.len > 0) {
|
||||
esm_record_allocator.free(this.esm_record);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save(
|
||||
@@ -157,6 +175,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
esm_record: []const u8,
|
||||
output_code: OutputCode,
|
||||
exports_kind: bun.JSAst.ExportsKind,
|
||||
) !void {
|
||||
@@ -202,6 +221,8 @@ pub const RuntimeTranspilerCache = struct {
|
||||
.output_byte_offset = Metadata.size,
|
||||
.output_byte_length = output_bytes.len,
|
||||
.sourcemap_byte_offset = Metadata.size + output_bytes.len,
|
||||
.esm_record_byte_offset = Metadata.size + output_bytes.len + sourcemap.len,
|
||||
.esm_record_byte_length = esm_record.len,
|
||||
};
|
||||
|
||||
metadata.output_hash = hash(output_bytes);
|
||||
@@ -220,20 +241,26 @@ pub const RuntimeTranspilerCache = struct {
|
||||
break :brk metadata_buf[0..metadata_stream.pos];
|
||||
};
|
||||
|
||||
const vecs: []const bun.PlatformIOVecConst = if (output_bytes.len > 0)
|
||||
&.{
|
||||
bun.platformIOVecConstCreate(metadata_bytes),
|
||||
bun.platformIOVecConstCreate(output_bytes),
|
||||
bun.platformIOVecConstCreate(sourcemap),
|
||||
}
|
||||
else
|
||||
&.{
|
||||
bun.platformIOVecConstCreate(metadata_bytes),
|
||||
bun.platformIOVecConstCreate(sourcemap),
|
||||
};
|
||||
var vecs_buf: [4]bun.PlatformIOVecConst = undefined;
|
||||
var vecs_i: usize = 0;
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(metadata_bytes);
|
||||
vecs_i += 1;
|
||||
if (output_bytes.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(output_bytes);
|
||||
vecs_i += 1;
|
||||
}
|
||||
if (sourcemap.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(sourcemap);
|
||||
vecs_i += 1;
|
||||
}
|
||||
if (esm_record.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(esm_record);
|
||||
vecs_i += 1;
|
||||
}
|
||||
const vecs: []const bun.PlatformIOVecConst = vecs_buf[0..vecs_i];
|
||||
|
||||
var position: isize = 0;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len + esm_record.len;
|
||||
|
||||
if (bun.Environment.allow_assert) {
|
||||
var total: usize = 0;
|
||||
@@ -243,7 +270,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
}
|
||||
bun.assert(end_position == total);
|
||||
}
|
||||
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
|
||||
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size + esm_record.len)));
|
||||
|
||||
bun.C.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {};
|
||||
while (position < end_position) {
|
||||
@@ -264,6 +291,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
file: std.fs.File,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
const stat_size = try file.getEndPos();
|
||||
if (stat_size < Metadata.size + this.metadata.output_byte_length + this.metadata.sourcemap_byte_length) {
|
||||
@@ -339,6 +367,17 @@ pub const RuntimeTranspilerCache = struct {
|
||||
|
||||
this.sourcemap = sourcemap;
|
||||
}
|
||||
|
||||
if (this.metadata.esm_record_byte_length > 0) {
|
||||
const esm_record = try esm_record_allocator.alloc(u8, this.metadata.esm_record_byte_length);
|
||||
errdefer esm_record_allocator.free(esm_record);
|
||||
const read_bytes = try file.preadAll(esm_record, this.metadata.esm_record_byte_offset);
|
||||
if (read_bytes != this.metadata.esm_record_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
this.esm_record = esm_record;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -456,6 +495,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var tracer = bun.tracy.traceNamed(@src(), "RuntimeTranspilerCache.fromFile");
|
||||
defer tracer.end();
|
||||
@@ -470,6 +510,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size,
|
||||
sourcemap_allocator,
|
||||
output_code_allocator,
|
||||
esm_record_allocator,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -480,6 +521,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var metadata_bytes_buf: [Metadata.size * 2]u8 = undefined;
|
||||
const cache_fd = try bun.sys.open(cache_file_path.sliceAssumeZ(), bun.O.RDONLY, 0).unwrap();
|
||||
@@ -511,7 +553,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
return error.MismatchedFeatureHash;
|
||||
}
|
||||
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator);
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator, esm_record_allocator);
|
||||
|
||||
return entry;
|
||||
}
|
||||
@@ -528,6 +570,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
esm_record: []const u8,
|
||||
source_code: bun.String,
|
||||
exports_kind: bun.JSAst.ExportsKind,
|
||||
) !void {
|
||||
@@ -567,6 +610,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash,
|
||||
features_hash,
|
||||
sourcemap,
|
||||
esm_record,
|
||||
output_code,
|
||||
exports_kind,
|
||||
);
|
||||
@@ -600,7 +644,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
parser_options.hashForRuntimeTranspiler(&features_hasher, used_jsx);
|
||||
this.features_hash = features_hasher.final();
|
||||
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator) catch |err| {
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator) catch |err| {
|
||||
debug("get(\"{s}\") = {s}", .{ source.path.text, @errorName(err) });
|
||||
return false;
|
||||
};
|
||||
@@ -616,7 +660,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
if (comptime bun.Environment.isDebug) {
|
||||
if (!bun_debug_restore_from_cache) {
|
||||
if (this.entry) |*entry| {
|
||||
entry.deinit(this.sourcemap_allocator, this.output_code_allocator);
|
||||
entry.deinit(this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator);
|
||||
this.entry = null;
|
||||
}
|
||||
}
|
||||
@@ -625,7 +669,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
return this.entry != null;
|
||||
}
|
||||
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8) void {
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8, esm_record: []const u8) void {
|
||||
if (comptime !bun.FeatureFlags.runtime_transpiler_cache)
|
||||
@compileError("RuntimeTranspilerCache is disabled");
|
||||
|
||||
@@ -636,7 +680,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
const output_code = bun.String.createLatin1(output_code_bytes);
|
||||
this.output_code = output_code;
|
||||
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, output_code, this.exports_kind) catch |err| {
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, esm_record, output_code, this.exports_kind) catch |err| {
|
||||
debug("put() = {s}", .{@errorName(err)});
|
||||
return;
|
||||
};
|
||||
|
||||
328
src/bun.js/bindings/BunAnalyzeTranspiledModule.cpp
Normal file
328
src/bun.js/bindings/BunAnalyzeTranspiledModule.cpp
Normal file
@@ -0,0 +1,328 @@
|
||||
#include "root.h"
|
||||
|
||||
#include "JavaScriptCore/JSInternalPromise.h"
|
||||
#include "JavaScriptCore/JSModuleRecord.h"
|
||||
#include "JavaScriptCore/GlobalObjectMethodTable.h"
|
||||
#include "JavaScriptCore/JSModuleRecord.h"
|
||||
#include "JavaScriptCore/Nodes.h"
|
||||
#include "JavaScriptCore/Parser.h"
|
||||
#include "JavaScriptCore/ParserError.h"
|
||||
#include "JavaScriptCore/SyntheticModuleRecord.h"
|
||||
#include <wtf/text/MakeString.h>
|
||||
#include "JavaScriptCore/JSGlobalObject.h"
|
||||
#include "JavaScriptCore/JSModuleRecord.h"
|
||||
#include "JavaScriptCore/ExceptionScope.h"
|
||||
#include "ZigSourceProvider.h"
|
||||
#include "BunAnalyzeTranspiledModule.h"
|
||||
|
||||
// ref: JSModuleLoader.cpp
|
||||
// ref: ModuleAnalyzer.cpp
|
||||
// ref: JSModuleRecord.cpp
|
||||
// ref: NodesAnalyzeModule.cpp, search ::analyzeModule
|
||||
|
||||
// TODO: #include "JavaScriptCore/parser/ModuleAnalyzer.h"
|
||||
#include "JavaScriptCore/ErrorType.h"
|
||||
#include "JavaScriptCore/Nodes.h"
|
||||
|
||||
namespace JSC {
|
||||
|
||||
class JSModuleRecord;
|
||||
class SourceCode;
|
||||
class ScriptFetchParameters;
|
||||
|
||||
class ModuleAnalyzer {
|
||||
WTF_MAKE_NONCOPYABLE(ModuleAnalyzer);
|
||||
WTF_FORBID_HEAP_ALLOCATION;
|
||||
|
||||
public:
|
||||
ModuleAnalyzer(JSGlobalObject*, const Identifier& moduleKey, const SourceCode&, const VariableEnvironment& declaredVariables, const VariableEnvironment& lexicalVariables, CodeFeatures);
|
||||
|
||||
Expected<JSModuleRecord*, std::tuple<ErrorType, String>> analyze(ModuleProgramNode&);
|
||||
|
||||
VM& vm() { return m_vm; }
|
||||
|
||||
JSModuleRecord* moduleRecord() { return m_moduleRecord; }
|
||||
|
||||
void appendRequestedModule(const Identifier&, RefPtr<ScriptFetchParameters>&&);
|
||||
|
||||
void fail(std::tuple<ErrorType, String>&& errorMessage) { m_errorMessage = errorMessage; }
|
||||
|
||||
private:
|
||||
void exportVariable(ModuleProgramNode&, const RefPtr<UniquedStringImpl>&, const VariableEnvironmentEntry&);
|
||||
|
||||
VM& m_vm;
|
||||
JSModuleRecord* m_moduleRecord;
|
||||
IdentifierSet m_requestedModules;
|
||||
std::tuple<ErrorType, String> m_errorMessage;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace JSC {
|
||||
|
||||
String dumpRecordInfo(JSModuleRecord* moduleRecord);
|
||||
|
||||
Identifier getFromIdentifierArray(VM& vm, Identifier* identifierArray, uint32_t n)
|
||||
{
|
||||
if (n == std::numeric_limits<uint32_t>::max()) {
|
||||
return vm.propertyNames->starDefaultPrivateName;
|
||||
}
|
||||
return identifierArray[n];
|
||||
}
|
||||
|
||||
extern "C" JSModuleRecord* zig__ModuleInfoDeserialized__toJSModuleRecord(JSGlobalObject* globalObject, VM& vm, const Identifier& module_key, const SourceCode& source_code, VariableEnvironment& declared_variables, VariableEnvironment& lexical_variables, bun_ModuleInfoDeserialized* module_info);
|
||||
extern "C" void zig__renderDiff(const char* expected_ptr, size_t expected_len, const char* received_ptr, size_t received_len, JSGlobalObject* globalObject);
|
||||
|
||||
extern "C" Identifier* JSC__IdentifierArray__create(size_t len)
|
||||
{
|
||||
return new Identifier[len];
|
||||
}
|
||||
extern "C" void JSC__IdentifierArray__destroy(Identifier* identifier)
|
||||
{
|
||||
delete[] identifier;
|
||||
}
|
||||
extern "C" void JSC__IdentifierArray__setFromUtf8(Identifier* identifierArray, size_t n, VM& vm, char* str, size_t len)
|
||||
{
|
||||
identifierArray[n] = Identifier::fromString(vm, AtomString::fromUTF8(std::span<const char>(str, len)));
|
||||
}
|
||||
|
||||
extern "C" void JSC__VariableEnvironment__add(VariableEnvironment& environment, VM& vm, Identifier* identifierArray, uint32_t index)
|
||||
{
|
||||
environment.add(getFromIdentifierArray(vm, identifierArray, index));
|
||||
}
|
||||
|
||||
extern "C" VariableEnvironment* JSC_JSModuleRecord__declaredVariables(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
return &moduleRecord->m_declaredVariables;
|
||||
}
|
||||
extern "C" VariableEnvironment* JSC_JSModuleRecord__lexicalVariables(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
return &moduleRecord->m_lexicalVariables;
|
||||
}
|
||||
|
||||
extern "C" JSModuleRecord* JSC_JSModuleRecord__create(JSGlobalObject* globalObject, VM& vm, const Identifier* moduleKey, const SourceCode& sourceCode, const VariableEnvironment& declaredVariables, const VariableEnvironment& lexicalVariables, bool hasImportMeta, bool isTypescript)
|
||||
{
|
||||
JSModuleRecord* result = JSModuleRecord::create(globalObject, vm, globalObject->moduleRecordStructure(), *moduleKey, sourceCode, declaredVariables, lexicalVariables, hasImportMeta ? ImportMetaFeature : 0);
|
||||
result->m_isTypeScript = isTypescript;
|
||||
return result;
|
||||
}
|
||||
|
||||
extern "C" void JSC_JSModuleRecord__addIndirectExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t importName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createIndirect(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addLocalExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t localName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createLocal(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addNamespaceExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t exportName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addExportEntry(JSModuleRecord::ExportEntry::createNamespace(getFromIdentifierArray(moduleRecord->vm(), identifierArray, exportName), getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName)));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addStarExport(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addStarExportEntry(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
RefPtr<ScriptFetchParameters> attributes = RefPtr<ScriptFetchParameters> {};
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), WTFMove(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleJavaScript(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::JavaScript);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), WTFMove(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleWebAssembly(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::WebAssembly);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), WTFMove(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleJSON(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(ScriptFetchParameters::Type::JSON);
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), WTFMove(attributes));
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addRequestedModuleHostDefined(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t moduleName, uint32_t hostDefinedImportType)
|
||||
{
|
||||
Ref<ScriptFetchParameters> attributes = ScriptFetchParameters::create(identifierArray[hostDefinedImportType].string());
|
||||
moduleRecord->appendRequestedModule(getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName), WTFMove(attributes));
|
||||
}
|
||||
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntrySingle(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::Single,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntrySingleTypeScript(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::SingleTypeScript,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
extern "C" void JSC_JSModuleRecord__addImportEntryNamespace(JSModuleRecord* moduleRecord, Identifier* identifierArray, uint32_t importName, uint32_t localName, uint32_t moduleName)
|
||||
{
|
||||
moduleRecord->addImportEntry(JSModuleRecord::ImportEntry {
|
||||
.type = JSModuleRecord::ImportEntryType::Namespace,
|
||||
.moduleRequest = getFromIdentifierArray(moduleRecord->vm(), identifierArray, moduleName),
|
||||
.importName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, importName),
|
||||
.localName = getFromIdentifierArray(moduleRecord->vm(), identifierArray, localName),
|
||||
});
|
||||
}
|
||||
|
||||
static EncodedJSValue fallbackParse(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise, JSModuleRecord* resultValue = nullptr);
|
||||
extern "C" EncodedJSValue Bun__analyzeTranspiledModule(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise)
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
auto rejectWithError = [&](JSValue error) {
|
||||
promise->reject(globalObject, error);
|
||||
return promise;
|
||||
};
|
||||
|
||||
VariableEnvironment declaredVariables = VariableEnvironment();
|
||||
VariableEnvironment lexicalVariables = VariableEnvironment();
|
||||
|
||||
auto provider = static_cast<Zig::SourceProvider*>(sourceCode.provider());
|
||||
|
||||
if (provider->m_resolvedSource.module_info == nullptr) {
|
||||
dataLog("[note] module_info is null for module: ", moduleKey.utf8(), "\n");
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("module_info is null")))));
|
||||
}
|
||||
|
||||
auto moduleRecord = zig__ModuleInfoDeserialized__toJSModuleRecord(globalObject, vm, moduleKey, sourceCode, declaredVariables, lexicalVariables, provider->m_resolvedSource.module_info);
|
||||
if (moduleRecord == nullptr) {
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("parseFromSourceCode failed")))));
|
||||
}
|
||||
|
||||
#if BUN_DEBUG or true
|
||||
RELEASE_AND_RETURN(scope, fallbackParse(globalObject, moduleKey, sourceCode, promise, moduleRecord));
|
||||
#else
|
||||
promise->fulfillWithNonPromise(globalObject, moduleRecord);
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(promise));
|
||||
#endif
|
||||
}
|
||||
static EncodedJSValue fallbackParse(JSGlobalObject* globalObject, const Identifier& moduleKey, const SourceCode& sourceCode, JSInternalPromise* promise, JSModuleRecord* resultValue)
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto rejectWithError = [&](JSValue error) {
|
||||
promise->reject(globalObject, error);
|
||||
return promise;
|
||||
};
|
||||
|
||||
ParserError error;
|
||||
std::unique_ptr<ModuleProgramNode> moduleProgramNode = parseRootNode<ModuleProgramNode>(
|
||||
vm, sourceCode, ImplementationVisibility::Public, JSParserBuiltinMode::NotBuiltin,
|
||||
StrictModeLexicallyScopedFeature, JSParserScriptMode::Module, SourceParseMode::ModuleAnalyzeMode, error);
|
||||
if (error.isValid())
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(error.toErrorObject(globalObject, sourceCode))));
|
||||
ASSERT(moduleProgramNode);
|
||||
|
||||
ModuleAnalyzer ModuleAnalyzer(globalObject, moduleKey, sourceCode, moduleProgramNode->varDeclarations(), moduleProgramNode->lexicalVariables(), moduleProgramNode->features());
|
||||
RETURN_IF_EXCEPTION(scope, JSValue::encode(promise->rejectWithCaughtException(globalObject, scope)));
|
||||
|
||||
auto result = ModuleAnalyzer.analyze(*moduleProgramNode);
|
||||
if (!result) {
|
||||
auto [errorType, message] = WTFMove(result.error());
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, errorType, message))));
|
||||
}
|
||||
|
||||
JSModuleRecord* moduleRecord = result.value();
|
||||
|
||||
if (resultValue != nullptr) {
|
||||
auto actual = dumpRecordInfo(resultValue);
|
||||
auto expected = dumpRecordInfo(moduleRecord);
|
||||
if (actual != expected) {
|
||||
dataLog("\n\n\n\n\n\n\x1b[95mBEGIN analyzeTranspiledModule\x1b(B\x1b[m\n --- module key ---\n", moduleKey.utf8().data(), "\n --- code ---\n\n", sourceCode.toUTF8().data(), "\n");
|
||||
dataLog(" ------", "\n");
|
||||
dataLog(" BunAnalyzeTranspiledModule:", "\n");
|
||||
|
||||
zig__renderDiff(expected.utf8().data(), expected.utf8().length(), actual.utf8().data(), actual.utf8().length(), globalObject);
|
||||
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(rejectWithError(createError(globalObject, WTF::String::fromLatin1("Imports different between parseFromSourceCode and fallbackParse")))));
|
||||
}
|
||||
}
|
||||
|
||||
scope.release();
|
||||
promise->fulfillWithNonPromise(globalObject, resultValue == nullptr ? moduleRecord : resultValue);
|
||||
return JSValue::encode(promise);
|
||||
}
|
||||
|
||||
String dumpRecordInfo(JSModuleRecord* moduleRecord)
|
||||
{
|
||||
WTF::StringPrintStream stream;
|
||||
|
||||
stream.print(" varDeclarations:\n");
|
||||
for (const auto& pair : moduleRecord->m_declaredVariables) {
|
||||
stream.print(" - ", pair.key, "\n");
|
||||
}
|
||||
|
||||
stream.print(" lexicalVariables:\n");
|
||||
for (const auto& pair : moduleRecord->m_lexicalVariables) {
|
||||
stream.print(" - ", pair.key, "\n");
|
||||
}
|
||||
|
||||
stream.print(" features: ");
|
||||
stream.print(moduleRecord->m_features & ImportMetaFeature);
|
||||
stream.print("\n");
|
||||
|
||||
stream.print("\nAnalyzing ModuleRecord key(", moduleRecord->moduleKey().impl(), ")\n");
|
||||
|
||||
stream.print(" Dependencies: ", moduleRecord->requestedModules().size(), " modules\n");
|
||||
for (const auto& request : moduleRecord->requestedModules())
|
||||
if (request.m_attributes == nullptr) {
|
||||
stream.print(" module(", request.m_specifier, ")\n");
|
||||
} else {
|
||||
stream.print(" module(", request.m_specifier, "),attributes(", (uint8_t)request.m_attributes->type(), ", ", request.m_attributes->hostDefinedImportType(), ")\n");
|
||||
}
|
||||
|
||||
stream.print(" Import: ", moduleRecord->importEntries().size(), " entries\n");
|
||||
for (const auto& pair : moduleRecord->importEntries()) {
|
||||
auto& importEntry = pair.value;
|
||||
stream.print(" import(", importEntry.importName, "), local(", importEntry.localName, "), module(", importEntry.moduleRequest, ")\n");
|
||||
}
|
||||
|
||||
stream.print(" Export: ", moduleRecord->exportEntries().size(), " entries\n");
|
||||
Vector<String> sortedEntries;
|
||||
for (const auto& pair : moduleRecord->exportEntries()) {
|
||||
WTF::StringPrintStream line;
|
||||
auto& exportEntry = pair.value;
|
||||
switch (exportEntry.type) {
|
||||
case AbstractModuleRecord::ExportEntry::Type::Local:
|
||||
line.print(" [Local] ", "export(", exportEntry.exportName, "), local(", exportEntry.localName, ")\n");
|
||||
break;
|
||||
|
||||
case AbstractModuleRecord::ExportEntry::Type::Indirect:
|
||||
line.print(" [Indirect] ", "export(", exportEntry.exportName, "), import(", exportEntry.importName, "), module(", exportEntry.moduleName, ")\n");
|
||||
break;
|
||||
|
||||
case AbstractModuleRecord::ExportEntry::Type::Namespace:
|
||||
line.print(" [Namespace] ", "export(", exportEntry.exportName, "), module(", exportEntry.moduleName, ")\n");
|
||||
break;
|
||||
}
|
||||
sortedEntries.append(line.toString());
|
||||
}
|
||||
std::sort(sortedEntries.begin(), sortedEntries.end(), [](const String& a, const String& b) {
|
||||
return a.utf8().toStdString() < b.utf8().toStdString();
|
||||
});
|
||||
for (const auto& entry : sortedEntries)
|
||||
stream.print(entry);
|
||||
|
||||
for (const auto& moduleName : moduleRecord->starExportEntries())
|
||||
stream.print(" [Star] module(", moduleName.get(), ")\n");
|
||||
|
||||
stream.print(" -> done\n");
|
||||
|
||||
return stream.toString();
|
||||
}
|
||||
|
||||
}
|
||||
1
src/bun.js/bindings/BunAnalyzeTranspiledModule.h
Normal file
1
src/bun.js/bindings/BunAnalyzeTranspiledModule.h
Normal file
@@ -0,0 +1 @@
|
||||
struct bun_ModuleInfoDeserialized;
|
||||
@@ -1273,7 +1273,7 @@ void JSCommonJSModule::evaluate(
|
||||
}
|
||||
}
|
||||
|
||||
auto sourceProvider = Zig::SourceProvider::create(jsCast<Zig::GlobalObject*>(globalObject), source, JSC::SourceProviderSourceType::Program, isBuiltIn);
|
||||
auto sourceProvider = Zig::SourceProvider::create(jsCast<Zig::GlobalObject*>(globalObject), source, isBuiltIn);
|
||||
this->ignoreESModuleAnnotation = source.tag == ResolvedSourceTagPackageJSONTypeModule;
|
||||
if (this->hasEvaluated)
|
||||
return;
|
||||
@@ -1326,7 +1326,7 @@ std::optional<JSC::SourceCode> createCommonJSModule(
|
||||
source.source_code = Bun::toStringRef(concat);
|
||||
}
|
||||
|
||||
auto sourceProvider = Zig::SourceProvider::create(jsCast<Zig::GlobalObject*>(globalObject), source, JSC::SourceProviderSourceType::Program, isBuiltIn);
|
||||
auto sourceProvider = Zig::SourceProvider::create(jsCast<Zig::GlobalObject*>(globalObject), source, isBuiltIn);
|
||||
sourceOrigin = sourceProvider->sourceOrigin();
|
||||
moduleObject = JSCommonJSModule::create(
|
||||
vm,
|
||||
|
||||
@@ -862,7 +862,7 @@ static JSValue fetchESMSourceCode(
|
||||
auto tag = res->result.value.tag;
|
||||
switch (tag) {
|
||||
case SyntheticModuleType::ESM: {
|
||||
auto&& provider = Zig::SourceProvider::create(globalObject, res->result.value, JSC::SourceProviderSourceType::Module, true);
|
||||
auto&& provider = Zig::SourceProvider::create(globalObject, res->result.value, true);
|
||||
return rejectOrResolve(JSSourceCode::create(vm, JSC::SourceCode(provider)));
|
||||
}
|
||||
|
||||
@@ -881,7 +881,7 @@ static JSValue fetchESMSourceCode(
|
||||
auto source = JSC::SourceCode(JSC::SyntheticSourceProvider::create(generateInternalModuleSourceCode(globalObject, static_cast<InternalModuleRegistry::Field>(tag & mask)), JSC::SourceOrigin(URL(makeString("builtins://"_s, moduleKey))), moduleKey));
|
||||
return rejectOrResolve(JSSourceCode::create(vm, WTFMove(source)));
|
||||
} else {
|
||||
auto&& provider = Zig::SourceProvider::create(globalObject, res->result.value, JSC::SourceProviderSourceType::Module, true);
|
||||
auto&& provider = Zig::SourceProvider::create(globalObject, res->result.value, true);
|
||||
return rejectOrResolve(JSC::JSSourceCode::create(vm, JSC::SourceCode(provider)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,5 +29,13 @@ pub const ResolvedSource = extern struct {
|
||||
bytecode_cache: ?[*]u8 = null,
|
||||
bytecode_cache_size: usize = 0,
|
||||
|
||||
/// - for esm: null means to use jsc's regular parsing step. more info: https://github.com/oven-sh/bun/pull/15758
|
||||
/// - for cjs: must be null
|
||||
module_info: ?*@import("../../analyze_transpiled_module.zig").ModuleInfoDeserialized,
|
||||
|
||||
pub const unfilled = ResolvedSource{
|
||||
.module_info = null,
|
||||
};
|
||||
|
||||
pub const Tag = @import("ResolvedSourceTag").ResolvedSourceTag;
|
||||
};
|
||||
|
||||
@@ -75,9 +75,17 @@ extern "C" void Bun__removeSourceProviderSourceMap(void* bun_vm, SourceProvider*
|
||||
Ref<SourceProvider> SourceProvider::create(
|
||||
Zig::GlobalObject* globalObject,
|
||||
ResolvedSource& resolvedSource,
|
||||
JSC::SourceProviderSourceType sourceType,
|
||||
bool isBuiltin)
|
||||
{
|
||||
|
||||
JSC::SourceProviderSourceType sourceType = JSC::SourceProviderSourceType::BunTranspiledModule;
|
||||
if (resolvedSource.isCommonJSModule) {
|
||||
ASSERT(resolvedSource.module_info == nullptr, "isCommonJSModule should not have module_info");
|
||||
sourceType = JSC::SourceProviderSourceType::Program;
|
||||
} else if (resolvedSource.module_info == nullptr) {
|
||||
sourceType = JSC::SourceProviderSourceType::Module;
|
||||
}
|
||||
|
||||
auto string = resolvedSource.source_code.toWTFString(BunString::ZeroCopy);
|
||||
auto sourceURLString = resolvedSource.source_url.toWTFString(BunString::ZeroCopy);
|
||||
|
||||
|
||||
@@ -39,7 +39,6 @@ public:
|
||||
static Ref<SourceProvider> create(
|
||||
Zig::GlobalObject*,
|
||||
ResolvedSource& resolvedSource,
|
||||
JSC::SourceProviderSourceType sourceType = JSC::SourceProviderSourceType::Module,
|
||||
bool isBuiltIn = false);
|
||||
~SourceProvider();
|
||||
unsigned hash() const override;
|
||||
|
||||
@@ -402,3 +402,7 @@ comptime {
|
||||
@export(&@"windows process.dlopen", .{ .name = "Bun__LoadLibraryBunString" });
|
||||
}
|
||||
}
|
||||
|
||||
comptime {
|
||||
_ = @import("../../analyze_transpiled_module.zig");
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#include "wtf/text/OrdinalNumber.h"
|
||||
#include "JavaScriptCore/JSCJSValue.h"
|
||||
#include "JavaScriptCore/ArgList.h"
|
||||
#include "BunAnalyzeTranspiledModule.h"
|
||||
#include <set>
|
||||
|
||||
#ifndef HEADERS_HANDWRITTEN
|
||||
@@ -114,6 +115,7 @@ typedef struct ResolvedSource {
|
||||
bool already_bundled;
|
||||
uint8_t* bytecode_cache;
|
||||
size_t bytecode_cache_size;
|
||||
bun_ModuleInfoDeserialized* module_info;
|
||||
} ResolvedSource;
|
||||
static const uint32_t ResolvedSourceTagPackageJSONTypeModule = 1;
|
||||
typedef union ErrorableResolvedSourceResult {
|
||||
|
||||
@@ -2382,6 +2382,7 @@ pub const VirtualMachine = struct {
|
||||
.source_url = specifier.createIfDifferent(source_url),
|
||||
.allocator = null,
|
||||
.source_code_needs_deref = false,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
var source = this.refCountedString(code, hash_, !add_double_ref);
|
||||
@@ -2396,6 +2397,7 @@ pub const VirtualMachine = struct {
|
||||
.source_url = specifier.createIfDifferent(source_url),
|
||||
.allocator = source,
|
||||
.source_code_needs_deref = false,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,8 @@ const stringZ = bun.stringZ;
|
||||
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
|
||||
const Arena = @import("../allocators/mimalloc_arena.zig").Arena;
|
||||
const C = bun.C;
|
||||
const analyze_transpiled_module = @import("../analyze_transpiled_module.zig");
|
||||
const ModuleInfo = analyze_transpiled_module.ModuleInfo;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
const IdentityContext = @import("../identity_context.zig").IdentityContext;
|
||||
@@ -81,6 +83,7 @@ inline fn jsSyntheticModule(name: ResolvedSource.Tag, specifier: String) Resolve
|
||||
.source_url = bun.String.static(@tagName(name)),
|
||||
.tag = name,
|
||||
.source_code_needs_deref = false,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -246,7 +249,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
|
||||
// NOTE: DirInfo should already be cached since module loading happens
|
||||
// after module resolution, so this should be cheap
|
||||
var resolved_source = ResolvedSource{};
|
||||
var resolved_source = ResolvedSource.unfilled;
|
||||
if (package_json) |pkg| {
|
||||
switch (pkg.module_type) {
|
||||
.cjs => {
|
||||
@@ -292,7 +295,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
generation_number: u32 = 0,
|
||||
log: logger.Log,
|
||||
parse_error: ?anyerror = null,
|
||||
resolved_source: ResolvedSource = ResolvedSource{},
|
||||
resolved_source: ResolvedSource = ResolvedSource.unfilled,
|
||||
work_task: JSC.WorkPoolTask = .{ .callback = runFromWorkerThread },
|
||||
next: ?*TranspilerJob = null,
|
||||
|
||||
@@ -406,6 +409,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
var cache = JSC.RuntimeTranspilerCache{
|
||||
.output_code_allocator = allocator,
|
||||
.sourcemap_allocator = bun.default_allocator,
|
||||
.esm_record_allocator = bun.default_allocator,
|
||||
};
|
||||
|
||||
var vm = this.vm;
|
||||
@@ -557,6 +561,19 @@ pub const RuntimeTranspilerStore = struct {
|
||||
dumpSourceString(vm, specifier, entry.output_code.byteSlice());
|
||||
}
|
||||
|
||||
var module_info: ?*analyze_transpiled_module.ModuleInfoDeserialized = null;
|
||||
if (entry.esm_record.len > 0) {
|
||||
if (entry.metadata.module_type == .cjs) {
|
||||
@panic("TranspilerCache contained cjs module with module info");
|
||||
}
|
||||
module_info = analyze_transpiled_module.ModuleInfoDeserialized.create(entry.esm_record, bun.default_allocator) catch |e| switch (e) {
|
||||
error.OutOfMemory => bun.outOfMemory(),
|
||||
// uh oh! invalid module info in cache
|
||||
// (not sure what to do here)
|
||||
error.BadModuleInfo => @panic("TranspilerCache contained invalid module info"),
|
||||
};
|
||||
}
|
||||
|
||||
this.resolved_source = ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (entry.output_code) {
|
||||
@@ -569,6 +586,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
},
|
||||
},
|
||||
.is_commonjs_module = entry.metadata.module_type == .cjs,
|
||||
.module_info = module_info,
|
||||
.tag = this.resolved_source.tag,
|
||||
};
|
||||
|
||||
@@ -584,6 +602,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
.bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null,
|
||||
.bytecode_cache_size = bytecode_slice.len,
|
||||
.is_commonjs_module = parse_result.already_bundled.isCommonJS(),
|
||||
.module_info = null,
|
||||
.tag = this.resolved_source.tag,
|
||||
};
|
||||
this.resolved_source.source_code.ensureHash();
|
||||
@@ -637,6 +656,10 @@ pub const RuntimeTranspilerStore = struct {
|
||||
var printer = source_code_printer.?.*;
|
||||
printer.ctx.reset();
|
||||
|
||||
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
const module_info: ?*ModuleInfo = if (is_commonjs_module) null else ModuleInfo.create(bun.default_allocator, parse_result.ast.is_from_typescript) catch bun.outOfMemory();
|
||||
// defer module_info.destroy(); // TODO: do not leak module_info
|
||||
|
||||
{
|
||||
var mapper = vm.sourceMapHandler(&printer);
|
||||
defer source_code_printer.?.* = printer;
|
||||
@@ -646,6 +669,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
module_info,
|
||||
) catch |err| {
|
||||
this.parse_error = err;
|
||||
return;
|
||||
@@ -678,10 +702,12 @@ pub const RuntimeTranspilerStore = struct {
|
||||
|
||||
break :brk result;
|
||||
};
|
||||
|
||||
this.resolved_source = ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = source_code,
|
||||
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
|
||||
.is_commonjs_module = is_commonjs_module,
|
||||
.module_info = if (module_info) |mi| mi.asDeserialized() else null,
|
||||
.tag = this.resolved_source.tag,
|
||||
};
|
||||
}
|
||||
@@ -1400,6 +1426,10 @@ pub const ModuleLoader = struct {
|
||||
var printer = VirtualMachine.source_code_printer.?.*;
|
||||
printer.ctx.reset();
|
||||
|
||||
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
const module_info: ?*ModuleInfo = if (is_commonjs_module) null else ModuleInfo.create(bun.default_allocator, parse_result.ast.is_from_typescript) catch bun.outOfMemory();
|
||||
// defer module_info.destroy(); // TODO: do not leak module_info
|
||||
|
||||
{
|
||||
var mapper = jsc_vm.sourceMapHandler(&printer);
|
||||
defer VirtualMachine.source_code_printer.?.* = printer;
|
||||
@@ -1409,6 +1439,7 @@ pub const ModuleLoader = struct {
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
module_info,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1433,7 +1464,7 @@ pub const ModuleLoader = struct {
|
||||
}
|
||||
}
|
||||
|
||||
resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
resolved_source.is_commonjs_module = is_commonjs_module;
|
||||
|
||||
return resolved_source;
|
||||
}
|
||||
@@ -1443,7 +1474,9 @@ pub const ModuleLoader = struct {
|
||||
.source_code = bun.String.createLatin1(printer.ctx.getWritten()),
|
||||
.specifier = String.init(specifier),
|
||||
.source_url = String.init(path.text),
|
||||
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
|
||||
.is_commonjs_module = is_commonjs_module,
|
||||
|
||||
.module_info = if (module_info) |mi| mi.asDeserialized() else null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1504,6 +1537,7 @@ pub const ModuleLoader = struct {
|
||||
.source_code = bun.String.empty,
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1571,6 +1605,7 @@ pub const ModuleLoader = struct {
|
||||
var cache = JSC.RuntimeTranspilerCache{
|
||||
.output_code_allocator = allocator,
|
||||
.sourcemap_allocator = bun.default_allocator,
|
||||
.esm_record_allocator = bun.default_allocator,
|
||||
};
|
||||
|
||||
const old = jsc_vm.transpiler.log;
|
||||
@@ -1736,6 +1771,7 @@ pub const ModuleLoader = struct {
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.tag = ResolvedSource.Tag.json_for_object_loader,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1749,6 +1785,7 @@ pub const ModuleLoader = struct {
|
||||
},
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1760,6 +1797,7 @@ pub const ModuleLoader = struct {
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.jsvalue_for_export = JSValue.createEmptyObject(jsc_vm.global, 0),
|
||||
.tag = .exports_object,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1769,6 +1807,7 @@ pub const ModuleLoader = struct {
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.jsvalue_for_export = parse_result.ast.parts.@"[0]"().stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}),
|
||||
.tag = .exports_object,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1783,6 +1822,7 @@ pub const ModuleLoader = struct {
|
||||
.bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null,
|
||||
.bytecode_cache_size = bytecode_slice.len,
|
||||
.is_commonjs_module = parse_result.already_bundled.isCommonJS(),
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1799,6 +1839,7 @@ pub const ModuleLoader = struct {
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.is_commonjs_module = true,
|
||||
.tag = .javascript,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1813,6 +1854,19 @@ pub const ModuleLoader = struct {
|
||||
dumpSourceString(jsc_vm, specifier, entry.output_code.byteSlice());
|
||||
}
|
||||
|
||||
var module_info: ?*analyze_transpiled_module.ModuleInfoDeserialized = null;
|
||||
if (entry.esm_record.len > 0) {
|
||||
if (entry.metadata.module_type == .cjs) {
|
||||
@panic("TranspilerCache contained cjs module with module info");
|
||||
}
|
||||
module_info = analyze_transpiled_module.ModuleInfoDeserialized.create(entry.esm_record, bun.default_allocator) catch |e| switch (e) {
|
||||
error.OutOfMemory => bun.outOfMemory(),
|
||||
// uh oh! invalid module info in cache
|
||||
// (not sure what to do here)
|
||||
error.BadModuleInfo => @panic("TranspilerCache contained invalid module info"),
|
||||
};
|
||||
}
|
||||
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (entry.output_code) {
|
||||
@@ -1844,6 +1898,7 @@ pub const ModuleLoader = struct {
|
||||
|
||||
break :brk ResolvedSource.Tag.javascript;
|
||||
},
|
||||
.module_info = module_info,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1898,6 +1953,11 @@ pub const ModuleLoader = struct {
|
||||
var printer = source_code_printer.*;
|
||||
printer.ctx.reset();
|
||||
defer source_code_printer.* = printer;
|
||||
|
||||
const is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
|
||||
const module_info: ?*ModuleInfo = if (is_commonjs_module) null else ModuleInfo.create(bun.default_allocator, parse_result.ast.is_from_typescript) catch bun.outOfMemory();
|
||||
// defer module_info.destroy(); // TODO: do not leak module_info
|
||||
|
||||
_ = brk: {
|
||||
var mapper = jsc_vm.sourceMapHandler(&printer);
|
||||
|
||||
@@ -1907,6 +1967,7 @@ pub const ModuleLoader = struct {
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
mapper.get(),
|
||||
module_info,
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1955,8 +2016,9 @@ pub const ModuleLoader = struct {
|
||||
},
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
|
||||
.is_commonjs_module = is_commonjs_module,
|
||||
.tag = tag,
|
||||
.module_info = if (module_info) |mi| mi.asDeserialized() else null,
|
||||
};
|
||||
},
|
||||
// provideFetch() should be called
|
||||
@@ -2023,6 +2085,7 @@ pub const ModuleLoader = struct {
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.tag = .esm,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2082,6 +2145,7 @@ pub const ModuleLoader = struct {
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.tag = .esm,
|
||||
.module_info = null,
|
||||
};
|
||||
},
|
||||
|
||||
@@ -2093,6 +2157,7 @@ pub const ModuleLoader = struct {
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.tag = .esm,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2107,6 +2172,7 @@ pub const ModuleLoader = struct {
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.tag = .export_default_object,
|
||||
.module_info = null,
|
||||
};
|
||||
},
|
||||
|
||||
@@ -2118,6 +2184,7 @@ pub const ModuleLoader = struct {
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.tag = .esm,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2188,6 +2255,7 @@ pub const ModuleLoader = struct {
|
||||
.jsvalue_for_export = value,
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.module_info = null,
|
||||
.tag = .export_default_object,
|
||||
};
|
||||
},
|
||||
@@ -2295,6 +2363,7 @@ pub const ModuleLoader = struct {
|
||||
.source_url = .empty,
|
||||
.cjs_custom_extension_index = index,
|
||||
.tag = .common_js_custom_extension,
|
||||
.module_info = null,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
@@ -2420,6 +2489,7 @@ pub const ModuleLoader = struct {
|
||||
.source_url = .empty,
|
||||
.cjs_custom_extension_index = index,
|
||||
.tag = .common_js_custom_extension,
|
||||
.module_info = null,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
@@ -2511,6 +2581,7 @@ pub const ModuleLoader = struct {
|
||||
.source_url = specifier,
|
||||
.tag = .esm,
|
||||
.source_code_needs_deref = true,
|
||||
.module_info = null,
|
||||
},
|
||||
.@"bun:internal-for-testing" => {
|
||||
if (!Environment.isDebug) {
|
||||
@@ -2524,6 +2595,7 @@ pub const ModuleLoader = struct {
|
||||
.source_code = String.init(Runtime.Runtime.sourceCode()),
|
||||
.specifier = specifier,
|
||||
.source_url = specifier,
|
||||
.module_info = null,
|
||||
},
|
||||
inline else => |tag| jsSyntheticModule(@field(ResolvedSource.Tag, @tagName(tag)), specifier),
|
||||
};
|
||||
@@ -2543,6 +2615,7 @@ pub const ModuleLoader = struct {
|
||||
.source_code = bun.String.createUTF8(entry.source.contents),
|
||||
.specifier = specifier,
|
||||
.source_url = specifier.dupeRef(),
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
} else if (jsc_vm.standalone_module_graph) |graph| {
|
||||
@@ -2565,6 +2638,7 @@ pub const ModuleLoader = struct {
|
||||
.specifier = specifier,
|
||||
.source_url = specifier.dupeRef(),
|
||||
.source_code_needs_deref = false,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2577,6 +2651,7 @@ pub const ModuleLoader = struct {
|
||||
.bytecode_cache = if (file.bytecode.len > 0) file.bytecode.ptr else null,
|
||||
.bytecode_cache_size = file.bytecode.len,
|
||||
.is_commonjs_module = file.module_format == .cjs,
|
||||
.module_info = null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
109
src/js_ast.zig
109
src/js_ast.zig
@@ -321,10 +321,6 @@ pub const Binding = struct {
|
||||
loc: logger.Loc,
|
||||
};
|
||||
|
||||
pub fn jsonStringify(self: *const @This(), writer: anytype) !void {
|
||||
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "binding", .value = self.data, .loc = self.loc });
|
||||
}
|
||||
|
||||
pub fn ToExpr(comptime expr_type: type, comptime func_type: anytype) type {
|
||||
const ExprType = expr_type;
|
||||
return struct {
|
||||
@@ -994,6 +990,14 @@ pub const Symbol = struct {
|
||||
/// code. But it should always be non-zero when the symbol is used.
|
||||
use_count_estimate: u32 = 0,
|
||||
|
||||
/// The number of times this symbol is used where it could be referring to a
|
||||
/// typescript type. Examples:
|
||||
/// - T `export { T }`
|
||||
/// - T in `@Decorate() myField?: T` when using emitDecoratorMetadata
|
||||
/// If this symbol came from an import statement, this is used to determine
|
||||
/// if an error should be emitted if the import is not found.
|
||||
use_count_as_type: u32 = 0,
|
||||
|
||||
/// This is for generating cross-chunk imports and exports for code splitting.
|
||||
///
|
||||
/// Do not use this directly. Use `chunkIndex()` instead.
|
||||
@@ -2898,6 +2902,89 @@ pub const Stmt = struct {
|
||||
loc: logger.Loc,
|
||||
};
|
||||
|
||||
pub fn print(self: *const Stmt, tree: Ast, writer: std.io.AnyWriter) !void {
|
||||
_ = tree;
|
||||
switch (self.data) {
|
||||
.s_import => |simport| {
|
||||
// const record = &tree.import_records.slice()[simport.import_record_index];
|
||||
try writer.print(".s_import{{\n", .{});
|
||||
try writer.print(" import_records[import_record_index = {d}] = ,\n", .{simport.import_record_index});
|
||||
// simport.default_name
|
||||
// simport.is_single_line
|
||||
// simport.items
|
||||
// simport.namespace_ref
|
||||
|
||||
// === record: ===
|
||||
// range: logger.Range,
|
||||
// path: fs.Path,
|
||||
// kind: ImportKind,
|
||||
// tag: Tag = .none,
|
||||
// source_index: Index = Index.invalid,
|
||||
// print_mode: PrintMode = .normal,
|
||||
// handles_import_errors: bool = false,
|
||||
// is_internal: bool = false,
|
||||
// is_unused: bool = false,
|
||||
// contains_import_star: bool = false,
|
||||
// contains_default_alias: bool = false,
|
||||
// contains_es_module_alias: bool = false,
|
||||
// calls_runtime_re_export_fn: bool = false,
|
||||
// is_inside_try_body: bool = false,
|
||||
// was_originally_bare_import: bool = false,
|
||||
// was_originally_require: bool = false,
|
||||
// was_injected_by_macro: bool = false,
|
||||
// is_external_without_side_effects: bool = false,
|
||||
// print_namespace_in_path: bool = false,
|
||||
// wrap_with_to_esm: bool = false,
|
||||
// wrap_with_to_commonjs: bool = false,
|
||||
|
||||
try writer.print(" ", .{});
|
||||
try writer.print("}}", .{});
|
||||
},
|
||||
.s_expr => |expr| {
|
||||
try writer.print(".s_expr{{ .does_not_affect_tree_shaking = {}, .value = ", .{expr.does_not_affect_tree_shaking});
|
||||
try expr.value.print(writer, 0);
|
||||
try writer.print("}}", .{});
|
||||
},
|
||||
.s_local => |local| {
|
||||
try writer.print(".s_local{{ .kind = .{s}, .is_export = {}, .was_ts_import_equals = {}, .was_commonjs_export = {}, .decls = .{{\n", .{ @tagName(local.kind), local.is_export, local.was_ts_import_equals, local.was_commonjs_export });
|
||||
for (local.decls.slice()) |m| {
|
||||
try writer.print(" .{{\n .binding = ", .{});
|
||||
switch (m.binding.data) {
|
||||
.b_array => |v| {
|
||||
try writer.print(".b_array{{ .has_spread = {}, .is_single_line = {}, .items = .{{", .{ v.has_spread, v.is_single_line });
|
||||
for (v.items, 0..) |item, i| {
|
||||
if (i != 0) try writer.print(", ", .{});
|
||||
try writer.print("(TODO)", .{});
|
||||
_ = item;
|
||||
}
|
||||
try writer.print("}}}}", .{});
|
||||
},
|
||||
.b_identifier => |v| {
|
||||
try writer.print(".b_identifier{{ .ref = {} }}", .{v.ref});
|
||||
},
|
||||
.b_object => {
|
||||
try writer.print(".b_object", .{});
|
||||
},
|
||||
.b_missing => {
|
||||
try writer.print(".b_missing", .{});
|
||||
},
|
||||
}
|
||||
try writer.print(",\n .value = ", .{});
|
||||
if (m.value == null) {
|
||||
try writer.print("null", .{});
|
||||
} else {
|
||||
try m.value.?.print(writer, 2);
|
||||
}
|
||||
try writer.print(",\n }},\n", .{});
|
||||
}
|
||||
try writer.print("}} }}", .{});
|
||||
},
|
||||
else => {
|
||||
try writer.print(".{s}._todo_print_stmt", .{@tagName(self.data)});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn jsonStringify(self: *const Stmt, writer: anytype) !void {
|
||||
return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "stmt", .value = self.data, .loc = self.loc });
|
||||
}
|
||||
@@ -3277,6 +3364,18 @@ pub const Expr = struct {
|
||||
loc: logger.Loc,
|
||||
data: Data,
|
||||
|
||||
pub fn print(self: *const Expr, writer: std.io.AnyWriter, depth: u32) !void {
|
||||
_ = depth;
|
||||
switch (self.data) {
|
||||
.e_string => |str| {
|
||||
try writer.print("(string: \"{s}\")", .{bun.strings.formatEscapes(str.data, .{ .str_encoding = .utf8, .quote_char = '"' })});
|
||||
},
|
||||
else => {
|
||||
try writer.print("(expr: {s})", .{@tagName(self.data)});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub const empty = Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = logger.Loc.Empty };
|
||||
|
||||
pub fn isAnonymousNamed(expr: Expr) bool {
|
||||
@@ -6961,6 +7060,7 @@ pub const Ast = struct {
|
||||
/// We use this with `commonjs_at_runtime` to re-export CommonJS
|
||||
has_commonjs_export_names: bool = false,
|
||||
import_meta_ref: Ref = Ref.None,
|
||||
is_from_typescript: bool = false,
|
||||
|
||||
pub const CommonJSNamedExport = struct {
|
||||
loc_ref: LocRef,
|
||||
@@ -7136,6 +7236,7 @@ pub const BundledAst = struct {
|
||||
.has_lazy_export = this.flags.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized,
|
||||
.directive = if (this.flags.has_explicit_use_strict_directive) "use strict" else null,
|
||||
.is_from_typescript = false,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -8499,6 +8499,7 @@ fn NewParser_(
|
||||
else => {
|
||||
if (comptime get_metadata) {
|
||||
const find_result = p.findSymbol(logger.Loc.Empty, p.lexer.identifier) catch unreachable;
|
||||
find_result.ref.getSymbol(&p.symbols).use_count_as_type += 1;
|
||||
result.* = .{ .m_identifier = find_result.ref };
|
||||
}
|
||||
|
||||
@@ -19106,6 +19107,7 @@ fn NewParser_(
|
||||
const name = p.loadNameFromRef(item.name.ref.?);
|
||||
const symbol = try p.findSymbol(item.alias_loc, name);
|
||||
const ref = symbol.ref;
|
||||
if (is_typescript_enabled) ref.getSymbol(&p.symbols).use_count_as_type += 1;
|
||||
|
||||
if (p.symbols.items[ref.innerIndex()].kind == .unbound) {
|
||||
// Silently strip exports of non-local symbols in TypeScript, since
|
||||
@@ -21413,6 +21415,7 @@ fn NewParser_(
|
||||
.m_identifier => |ref| {
|
||||
p.recordUsage(ref);
|
||||
if (p.is_import_item.contains(ref)) {
|
||||
ref.getSymbol(&p.symbols).use_count_as_type += 1; // if this identifier is only used from ``
|
||||
return p.maybeDefinedHelper(p.newExpr(
|
||||
E.ImportIdentifier{
|
||||
.ref = ref,
|
||||
@@ -23908,6 +23911,7 @@ fn NewParser_(
|
||||
.top_level_await_keyword = p.top_level_await_keyword,
|
||||
.commonjs_named_exports = p.commonjs_named_exports,
|
||||
.has_commonjs_export_names = p.has_commonjs_export_names,
|
||||
.is_from_typescript = is_typescript_enabled,
|
||||
|
||||
.hashbang = hashbang,
|
||||
|
||||
|
||||
@@ -23,6 +23,8 @@ const Ref = @import("ast/base.zig").Ref;
|
||||
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
|
||||
const FeatureFlags = bun.FeatureFlags;
|
||||
const FileDescriptorType = bun.FileDescriptor;
|
||||
const analyze_transpiled_module = @import("analyze_transpiled_module.zig");
|
||||
const ModuleInfo = analyze_transpiled_module.ModuleInfo;
|
||||
|
||||
const expect = std.testing.expect;
|
||||
const ImportKind = importRecord.ImportKind;
|
||||
@@ -700,9 +702,38 @@ fn NewPrinter(
|
||||
binary_expression_stack: std.ArrayList(BinaryExpressionVisitor) = undefined,
|
||||
|
||||
was_lazy_export: bool = false,
|
||||
module_info: if (!may_have_module_info) void else ?*ModuleInfo = if (!may_have_module_info) {} else null,
|
||||
|
||||
const Printer = @This();
|
||||
|
||||
const may_have_module_info = is_bun_platform and !rewrite_esm_to_cjs;
|
||||
const TopLevelAndIsExport = if (!may_have_module_info) struct {} else struct {
|
||||
is_export: bool = false,
|
||||
is_top_level: ?ModuleInfo.VarKind = null,
|
||||
};
|
||||
const TopLevel = if (!may_have_module_info) struct {
|
||||
pub inline fn init(_: IsTopLevel) @This() {
|
||||
return .{};
|
||||
}
|
||||
pub inline fn subVar(_: @This()) @This() {
|
||||
return .{};
|
||||
}
|
||||
} else struct {
|
||||
is_top_level: IsTopLevel = .no,
|
||||
pub inline fn init(is_top_level: IsTopLevel) @This() {
|
||||
return .{ .is_top_level = is_top_level };
|
||||
}
|
||||
pub fn subVar(self: @This()) @This() {
|
||||
if (self.is_top_level == .no) return @This().init(.no);
|
||||
return @This().init(.var_only);
|
||||
}
|
||||
};
|
||||
const IsTopLevel = enum { yes, var_only, no };
|
||||
inline fn moduleInfo(self: *const @This()) ?*ModuleInfo {
|
||||
if (!may_have_module_info) return null;
|
||||
return self.module_info;
|
||||
}
|
||||
|
||||
/// When Printer is used as a io.Writer, this represents it's error type, aka nothing.
|
||||
pub const Error = error{};
|
||||
|
||||
@@ -1008,6 +1039,7 @@ fn NewPrinter(
|
||||
printInternalBunImport(p, import, @TypeOf("globalThis.Bun.jest(__filename)"), "globalThis.Bun.jest(__filename)");
|
||||
},
|
||||
else => {
|
||||
if (p.moduleInfo()) |mi| mi.flags.contains_import_meta = true;
|
||||
printInternalBunImport(p, import, @TypeOf("globalThis.Bun.jest(import.meta.path)"), "globalThis.Bun.jest(import.meta.path)");
|
||||
},
|
||||
}
|
||||
@@ -1023,7 +1055,9 @@ fn NewPrinter(
|
||||
|
||||
if (import.star_name_loc != null) {
|
||||
p.print("var ");
|
||||
p.printSymbol(import.namespace_ref);
|
||||
const name = p.renamer.nameForSymbol(import.namespace_ref);
|
||||
p.printIdentifier(name);
|
||||
if (p.moduleInfo()) |mi| mi.addVar(mi.str(name) catch bun.outOfMemory(), .declared) catch bun.outOfMemory();
|
||||
p.printSpace();
|
||||
p.print("=");
|
||||
p.printSpaceBeforeIdentifier();
|
||||
@@ -1046,7 +1080,9 @@ fn NewPrinter(
|
||||
|
||||
if (import.default_name) |default| {
|
||||
p.print("var ");
|
||||
p.printSymbol(default.ref.?);
|
||||
const default_name = p.renamer.nameForSymbol(default.ref.?);
|
||||
p.printIdentifier(default_name);
|
||||
if (p.moduleInfo()) |mi| mi.addVar(mi.str(default_name) catch bun.outOfMemory(), .declared) catch bun.outOfMemory();
|
||||
if (comptime Statement == void) {
|
||||
p.@"print = "();
|
||||
p.printRequireOrImportExpr(
|
||||
@@ -1084,7 +1120,11 @@ fn NewPrinter(
|
||||
}
|
||||
}
|
||||
|
||||
p.printClauseItemAs(item, .@"var");
|
||||
p.printVarClauseItem(item);
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const varname = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
mi.addVar(mi.str(varname) catch bun.outOfMemory(), .declared) catch bun.outOfMemory();
|
||||
}
|
||||
}
|
||||
|
||||
if (!import.is_single_line) {
|
||||
@@ -1156,26 +1196,26 @@ fn NewPrinter(
|
||||
switch (stmt.data) {
|
||||
.s_block => |block| {
|
||||
p.printSpace();
|
||||
p.printBlock(stmt.loc, block.stmts, block.close_brace_loc);
|
||||
p.printBlock(stmt.loc, block.stmts, block.close_brace_loc, .{});
|
||||
p.printNewline();
|
||||
},
|
||||
else => {
|
||||
p.printNewline();
|
||||
p.indent();
|
||||
p.printStmt(stmt) catch unreachable;
|
||||
p.printStmt(stmt, .{}) catch bun.outOfMemory();
|
||||
p.unindent();
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printBlockBody(p: *Printer, stmts: []const Stmt) void {
|
||||
pub fn printBlockBody(p: *Printer, stmts: []const Stmt, tlmtlo: TopLevel) void {
|
||||
for (stmts) |stmt| {
|
||||
p.printSemicolonIfNeeded();
|
||||
p.printStmt(stmt) catch unreachable;
|
||||
p.printStmt(stmt, tlmtlo) catch bun.outOfMemory();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printBlock(p: *Printer, loc: logger.Loc, stmts: []const Stmt, close_brace_loc: ?logger.Loc) void {
|
||||
pub fn printBlock(p: *Printer, loc: logger.Loc, stmts: []const Stmt, close_brace_loc: ?logger.Loc, tlmtlo: TopLevel) void {
|
||||
p.addSourceMapping(loc);
|
||||
p.print("{");
|
||||
if (stmts.len > 0) {
|
||||
@@ -1183,7 +1223,7 @@ fn NewPrinter(
|
||||
p.printNewline();
|
||||
|
||||
p.indent();
|
||||
p.printBlockBody(stmts);
|
||||
p.printBlockBody(stmts, tlmtlo);
|
||||
p.unindent();
|
||||
|
||||
p.printIndent();
|
||||
@@ -1202,8 +1242,8 @@ fn NewPrinter(
|
||||
p.printNewline();
|
||||
|
||||
p.indent();
|
||||
p.printBlockBody(prepend);
|
||||
p.printBlockBody(stmts);
|
||||
p.printBlockBody(prepend, .{});
|
||||
p.printBlockBody(stmts, .{});
|
||||
p.unindent();
|
||||
p.needs_semicolon = false;
|
||||
|
||||
@@ -1211,7 +1251,7 @@ fn NewPrinter(
|
||||
p.print("}");
|
||||
}
|
||||
|
||||
pub fn printDecls(p: *Printer, comptime keyword: string, decls_: []G.Decl, flags: ExprFlag.Set) void {
|
||||
pub fn printDecls(p: *Printer, comptime keyword: string, decls_: []G.Decl, flags: ExprFlag.Set, tlm: TopLevelAndIsExport) void {
|
||||
p.print(keyword);
|
||||
p.printSpace();
|
||||
var decls = decls_;
|
||||
@@ -1319,7 +1359,7 @@ fn NewPrinter(
|
||||
.is_single_line = true,
|
||||
};
|
||||
const binding = Binding.init(&b_object, target_e_dot.target.loc);
|
||||
p.printBinding(binding);
|
||||
p.printBinding(binding, tlm);
|
||||
}
|
||||
|
||||
p.printWhitespacer(ws(" = "));
|
||||
@@ -1335,7 +1375,7 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
{
|
||||
p.printBinding(decls[0].binding);
|
||||
p.printBinding(decls[0].binding, tlm);
|
||||
|
||||
if (decls[0].value) |value| {
|
||||
p.printWhitespacer(ws(" = "));
|
||||
@@ -1347,7 +1387,7 @@ fn NewPrinter(
|
||||
p.print(",");
|
||||
p.printSpace();
|
||||
|
||||
p.printBinding(decl.binding);
|
||||
p.printBinding(decl.binding, tlm);
|
||||
|
||||
if (decl.value) |value| {
|
||||
p.printWhitespacer(ws(" = "));
|
||||
@@ -1421,7 +1461,7 @@ fn NewPrinter(
|
||||
p.print("...");
|
||||
}
|
||||
|
||||
p.printBinding(arg.binding);
|
||||
p.printBinding(arg.binding, .{});
|
||||
|
||||
if (arg.default) |default| {
|
||||
p.printWhitespacer(ws(" = "));
|
||||
@@ -1437,7 +1477,7 @@ fn NewPrinter(
|
||||
pub fn printFunc(p: *Printer, func: G.Fn) void {
|
||||
p.printFnArgs(func.open_parens_loc, func.args, func.flags.contains(.has_rest_arg), false);
|
||||
p.printSpace();
|
||||
p.printBlock(func.body.loc, func.body.stmts, null);
|
||||
p.printBlock(func.body.loc, func.body.stmts, null, .{});
|
||||
}
|
||||
|
||||
pub fn printClass(p: *Printer, class: G.Class) void {
|
||||
@@ -1461,7 +1501,7 @@ fn NewPrinter(
|
||||
if (item.kind == .class_static_block) {
|
||||
p.print("static");
|
||||
p.printSpace();
|
||||
p.printBlock(item.class_static_block.?.loc, item.class_static_block.?.stmts.slice(), null);
|
||||
p.printBlock(item.class_static_block.?.loc, item.class_static_block.?.stmts.slice(), null, .{});
|
||||
p.printNewline();
|
||||
continue;
|
||||
}
|
||||
@@ -1702,12 +1742,14 @@ fn NewPrinter(
|
||||
if (module_type == .cjs) {
|
||||
p.print("Promise.resolve(globalThis.Bun.jest(__filename))");
|
||||
} else {
|
||||
if (p.moduleInfo()) |mi| mi.flags.contains_import_meta = true;
|
||||
p.print("Promise.resolve(globalThis.Bun.jest(import.meta.path))");
|
||||
}
|
||||
} else if (record.kind == .require) {
|
||||
if (module_type == .cjs) {
|
||||
p.print("globalThis.Bun.jest(__filename)");
|
||||
} else {
|
||||
if (p.moduleInfo()) |mi| mi.flags.contains_import_meta = true;
|
||||
p.print("globalThis.Bun.jest(import.meta.path)");
|
||||
}
|
||||
}
|
||||
@@ -1929,45 +1971,28 @@ fn NewPrinter(
|
||||
p.print(quote);
|
||||
}
|
||||
|
||||
fn printClauseItem(p: *Printer, item: js_ast.ClauseItem) void {
|
||||
return printClauseItemAs(p, item, .import);
|
||||
}
|
||||
|
||||
fn printExportClauseItem(p: *Printer, item: js_ast.ClauseItem) void {
|
||||
return printClauseItemAs(p, item, .@"export");
|
||||
}
|
||||
|
||||
fn printClauseItemAs(p: *Printer, item: js_ast.ClauseItem, comptime as: @Type(.enum_literal)) void {
|
||||
const name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
|
||||
if (comptime as == .import) {
|
||||
if (strings.eql(name, item.alias)) {
|
||||
p.printIdentifier(name);
|
||||
} else {
|
||||
p.printClauseAlias(item.alias);
|
||||
p.print(" as ");
|
||||
p.addSourceMapping(item.alias_loc);
|
||||
p.printIdentifier(name);
|
||||
}
|
||||
} else if (comptime as == .@"var") {
|
||||
p.printIdentifier(name);
|
||||
|
||||
if (!strings.eql(name, item.alias)) {
|
||||
p.print(" as ");
|
||||
p.addSourceMapping(item.alias_loc);
|
||||
p.printClauseAlias(item.alias);
|
||||
}
|
||||
}
|
||||
|
||||
if (!strings.eql(name, item.alias)) {
|
||||
p.print(":");
|
||||
p.printSpace();
|
||||
fn printVarClauseItem(p: *Printer, item: js_ast.ClauseItem) void {
|
||||
const name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
|
||||
p.printClauseAlias(item.alias);
|
||||
|
||||
if (!strings.eql(name, item.alias)) {
|
||||
p.print(":");
|
||||
p.printSpace();
|
||||
|
||||
p.printIdentifier(name);
|
||||
}
|
||||
} else if (comptime as == .@"export") {
|
||||
p.printIdentifier(name);
|
||||
|
||||
if (!strings.eql(name, item.alias)) {
|
||||
p.print(" as ");
|
||||
p.addSourceMapping(item.alias_loc);
|
||||
p.printClauseAlias(item.alias);
|
||||
}
|
||||
} else {
|
||||
@compileError("Unknown as");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2081,6 +2106,7 @@ fn NewPrinter(
|
||||
p.print(".importMeta");
|
||||
} else if (!p.options.import_meta_ref.isValid()) {
|
||||
// Most of the time, leave it in there
|
||||
if (p.moduleInfo()) |mi| mi.flags.contains_import_meta = true;
|
||||
p.print("import.meta");
|
||||
} else {
|
||||
// Note: The bundler will not hit this code path. The bundler will replace
|
||||
@@ -2106,6 +2132,7 @@ fn NewPrinter(
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(expr.loc);
|
||||
}
|
||||
if (p.moduleInfo()) |mi| mi.flags.contains_import_meta = true;
|
||||
p.print("import.meta.main");
|
||||
} else {
|
||||
bun.debugAssert(p.options.module_type != .internal_bake_dev);
|
||||
@@ -2608,7 +2635,7 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
if (!wasPrinted) {
|
||||
p.printBlock(e.body.loc, e.body.stmts, null);
|
||||
p.printBlock(e.body.loc, e.body.stmts, null, .{});
|
||||
}
|
||||
|
||||
if (wrap) {
|
||||
@@ -3581,13 +3608,19 @@ fn NewPrinter(
|
||||
p.printExpr(initial, .comma, ExprFlag.None());
|
||||
}
|
||||
|
||||
pub fn printBinding(p: *Printer, binding: Binding) void {
|
||||
pub fn printBinding(p: *Printer, binding: Binding, tlm: TopLevelAndIsExport) void {
|
||||
switch (binding.data) {
|
||||
.b_missing => {},
|
||||
.b_identifier => |b| {
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(binding.loc);
|
||||
p.printSymbol(b.ref);
|
||||
const local_name = p.renamer.nameForSymbol(b.ref);
|
||||
p.printIdentifier(local_name);
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
if (tlm.is_top_level) |vk| mi.addVar(mi.str(local_name) catch bun.outOfMemory(), vk) catch bun.outOfMemory();
|
||||
if (tlm.is_export) mi.addExportInfoLocal(mi.str(local_name) catch bun.outOfMemory(), mi.str(local_name) catch bun.outOfMemory()) catch bun.outOfMemory();
|
||||
}
|
||||
},
|
||||
.b_array => |b| {
|
||||
p.print("[");
|
||||
@@ -3614,7 +3647,7 @@ fn NewPrinter(
|
||||
p.print("...");
|
||||
}
|
||||
|
||||
p.printBinding(item.binding);
|
||||
p.printBinding(item.binding, tlm);
|
||||
|
||||
p.maybePrintDefaultBindingValue(item);
|
||||
|
||||
@@ -3661,7 +3694,7 @@ fn NewPrinter(
|
||||
p.print("]:");
|
||||
p.printSpace();
|
||||
|
||||
p.printBinding(property.value);
|
||||
p.printBinding(property.value, tlm);
|
||||
p.maybePrintDefaultBindingValue(property);
|
||||
continue;
|
||||
}
|
||||
@@ -3686,6 +3719,10 @@ fn NewPrinter(
|
||||
switch (property.value.data) {
|
||||
.b_identifier => |id| {
|
||||
if (str.eql(string, p.renamer.nameForSymbol(id.ref))) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
if (tlm.is_top_level) |vk| mi.addVar(mi.str(str.data) catch bun.outOfMemory(), vk) catch bun.outOfMemory();
|
||||
if (tlm.is_export) mi.addExportInfoLocal(mi.str(str.data) catch bun.outOfMemory(), mi.str(str.data) catch bun.outOfMemory()) catch bun.outOfMemory();
|
||||
}
|
||||
p.maybePrintDefaultBindingValue(property);
|
||||
continue;
|
||||
}
|
||||
@@ -3703,6 +3740,11 @@ fn NewPrinter(
|
||||
switch (property.value.data) {
|
||||
.b_identifier => |id| {
|
||||
if (strings.utf16EqlString(str.slice16(), p.renamer.nameForSymbol(id.ref))) {
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const str8 = str.slice(p.options.allocator);
|
||||
if (tlm.is_top_level) |vk| mi.addVar(mi.str(str8) catch bun.outOfMemory(), vk) catch bun.outOfMemory();
|
||||
if (tlm.is_export) mi.addExportInfoLocal(mi.str(str8) catch bun.outOfMemory(), mi.str(str8) catch bun.outOfMemory()) catch bun.outOfMemory();
|
||||
}
|
||||
p.maybePrintDefaultBindingValue(property);
|
||||
continue;
|
||||
}
|
||||
@@ -3722,7 +3764,7 @@ fn NewPrinter(
|
||||
p.printSpace();
|
||||
}
|
||||
|
||||
p.printBinding(property.value);
|
||||
p.printBinding(property.value, tlm);
|
||||
p.maybePrintDefaultBindingValue(property);
|
||||
}
|
||||
|
||||
@@ -3748,7 +3790,7 @@ fn NewPrinter(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printStmt(p: *Printer, stmt: Stmt) !void {
|
||||
pub fn printStmt(p: *Printer, stmt: Stmt, tlmtlo: TopLevel) !void {
|
||||
const prev_stmt_tag = p.prev_stmt_tag;
|
||||
|
||||
defer {
|
||||
@@ -3783,9 +3825,15 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
p.addSourceMapping(name.loc);
|
||||
p.printSymbol(nameRef);
|
||||
const local_name = p.renamer.nameForSymbol(nameRef);
|
||||
p.printIdentifier(local_name);
|
||||
p.printFunc(s.func);
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
if (tlmtlo.is_top_level == .yes) try mi.addVar(try mi.str(local_name), .lexical);
|
||||
if (s.func.flags.contains(.is_export)) try mi.addExportInfoLocal(try mi.str(local_name), try mi.str(local_name));
|
||||
}
|
||||
|
||||
// if (rewrite_esm_to_cjs and s.func.flags.contains(.is_export)) {
|
||||
// p.printSemicolonAfterStatement();
|
||||
// p.print("var ");
|
||||
@@ -3813,6 +3861,7 @@ fn NewPrinter(
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(stmt.loc);
|
||||
const nameRef = s.class.class_name.?.ref.?;
|
||||
const nameStr = p.renamer.nameForSymbol(nameRef);
|
||||
if (s.is_export) {
|
||||
if (!rewrite_esm_to_cjs) {
|
||||
p.print("export ");
|
||||
@@ -3821,9 +3870,14 @@ fn NewPrinter(
|
||||
|
||||
p.print("class ");
|
||||
p.addSourceMapping(s.class.class_name.?.loc);
|
||||
p.printSymbol(nameRef);
|
||||
p.printIdentifier(nameStr);
|
||||
p.printClass(s.class);
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
if (s.is_export) try mi.addExportInfoLocal(try mi.str(nameStr), try mi.str(nameStr));
|
||||
if (tlmtlo.is_top_level == .yes) try mi.addVar(try mi.str(nameStr), .lexical);
|
||||
}
|
||||
|
||||
if (rewrite_esm_to_cjs and s.is_export) {
|
||||
p.printSemicolonAfterStatement();
|
||||
} else {
|
||||
@@ -3859,6 +3913,10 @@ fn NewPrinter(
|
||||
p.export_default_start = p.writer.written;
|
||||
p.printExpr(expr, .comma, ExprFlag.None());
|
||||
p.printSemicolonAfterStatement();
|
||||
if (p.moduleInfo()) |mi| {
|
||||
try mi.addExportInfoLocal(try mi.str("default"), .star_default);
|
||||
try mi.addVar(.star_default, .lexical);
|
||||
}
|
||||
return;
|
||||
},
|
||||
|
||||
@@ -3879,20 +3937,26 @@ fn NewPrinter(
|
||||
p.maybePrintSpace();
|
||||
}
|
||||
|
||||
if (func.func.name) |name| {
|
||||
p.printSymbol(name.ref.?);
|
||||
}
|
||||
const func_name: ?[]const u8 = if (func.func.name) |f| p.renamer.nameForSymbol(f.ref.?) else null;
|
||||
if (func_name) |f| p.printIdentifier(f);
|
||||
|
||||
p.printFunc(func.func);
|
||||
|
||||
p.printNewline();
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const local_name: analyze_transpiled_module.StringID = if (func_name) |f| try mi.str(f) else .star_default;
|
||||
try mi.addExportInfoLocal(try mi.str("default"), local_name);
|
||||
try mi.addVar(local_name, .lexical);
|
||||
}
|
||||
},
|
||||
.s_class => |class| {
|
||||
p.printSpaceBeforeIdentifier();
|
||||
|
||||
if (class.class.class_name) |name| {
|
||||
const class_name: ?[]const u8 = if (class.class.class_name) |f| p.renamer.nameForSymbol(f.ref.?) else null;
|
||||
if (class_name) |name| {
|
||||
p.print("class ");
|
||||
p.printSymbol(name.ref orelse Output.panic("Internal error: Expected class to have a name ref\n{any}", .{class}));
|
||||
p.printIdentifier(name);
|
||||
} else {
|
||||
p.print("class");
|
||||
}
|
||||
@@ -3900,6 +3964,12 @@ fn NewPrinter(
|
||||
p.printClass(class.class);
|
||||
|
||||
p.printNewline();
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const local_name: analyze_transpiled_module.StringID = if (class_name) |f| try mi.str(f) else .star_default;
|
||||
try mi.addExportInfoLocal(try mi.str("default"), local_name);
|
||||
try mi.addVar(local_name, .lexical);
|
||||
}
|
||||
},
|
||||
else => {
|
||||
Output.panic("Internal error: unexpected export default stmt data {any}", .{s});
|
||||
@@ -3918,19 +3988,28 @@ fn NewPrinter(
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(stmt.loc);
|
||||
|
||||
if (s.alias != null)
|
||||
p.printWhitespacer(comptime ws("export *").append(" as "))
|
||||
else
|
||||
p.printWhitespacer(comptime ws("export * from "));
|
||||
|
||||
if (s.alias) |alias| {
|
||||
p.printWhitespacer(comptime ws("export *").append(" as "));
|
||||
p.printClauseAlias(alias.original_name);
|
||||
p.print(" ");
|
||||
p.printWhitespacer(ws("from "));
|
||||
} else {
|
||||
p.printWhitespacer(comptime ws("export * from "));
|
||||
}
|
||||
|
||||
p.printImportRecordPath(p.importRecord(s.import_record_index));
|
||||
const irp = try p.fmtImportRecordPath(p.importRecord(s.import_record_index));
|
||||
p.printStringLiteralUTF8(irp, false);
|
||||
p.printSemicolonAfterStatement();
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const irp_id = try mi.str(irp);
|
||||
try mi.requestModule(irp_id, .none);
|
||||
if (s.alias) |alias| {
|
||||
try mi.addExportInfoNamespace(try mi.str(alias.original_name), irp_id);
|
||||
} else {
|
||||
try mi.addExportInfoStar(irp_id);
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_export_clause => |s| {
|
||||
if (rewrite_esm_to_cjs) {
|
||||
@@ -4080,7 +4159,12 @@ fn NewPrinter(
|
||||
p.printIndent();
|
||||
}
|
||||
|
||||
const name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
p.printExportClauseItem(item);
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
try mi.addExportInfoLocal(try mi.str(item.alias), try mi.str(name));
|
||||
}
|
||||
}
|
||||
|
||||
if (!s.is_single_line) {
|
||||
@@ -4133,29 +4217,30 @@ fn NewPrinter(
|
||||
}
|
||||
|
||||
p.printWhitespacer(ws("} from "));
|
||||
p.printImportRecordPath(import_record);
|
||||
const irp = try p.fmtImportRecordPath(import_record);
|
||||
p.printStringLiteralUTF8(irp, false);
|
||||
p.printSemicolonAfterStatement();
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const irp_id = try mi.str(irp);
|
||||
try mi.requestModule(irp_id, .none);
|
||||
for (s.items) |item| {
|
||||
// how could this be renamed, it's in `export from`?
|
||||
const name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
try mi.addExportInfoIndirect(try mi.str(item.alias), try mi.str(name), irp_id);
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_local => |s| {
|
||||
p.printIndent();
|
||||
p.printSpaceBeforeIdentifier();
|
||||
p.addSourceMapping(stmt.loc);
|
||||
switch (s.kind) {
|
||||
.k_const => {
|
||||
p.printDeclStmt(s.is_export, "const", s.decls.slice());
|
||||
},
|
||||
.k_let => {
|
||||
p.printDeclStmt(s.is_export, "let", s.decls.slice());
|
||||
},
|
||||
.k_var => {
|
||||
p.printDeclStmt(s.is_export, "var", s.decls.slice());
|
||||
},
|
||||
.k_using => {
|
||||
p.printDeclStmt(s.is_export, "using", s.decls.slice());
|
||||
},
|
||||
.k_await_using => {
|
||||
p.printDeclStmt(s.is_export, "await using", s.decls.slice());
|
||||
},
|
||||
.k_const => p.printDeclStmt(s.is_export, "const", s.decls.slice(), tlmtlo),
|
||||
.k_let => p.printDeclStmt(s.is_export, "let", s.decls.slice(), tlmtlo),
|
||||
.k_var => p.printDeclStmt(s.is_export, "var", s.decls.slice(), tlmtlo),
|
||||
.k_using => p.printDeclStmt(s.is_export, "using", s.decls.slice(), tlmtlo),
|
||||
.k_await_using => p.printDeclStmt(s.is_export, "await using", s.decls.slice(), tlmtlo),
|
||||
}
|
||||
},
|
||||
.s_if => |s| {
|
||||
@@ -4170,13 +4255,13 @@ fn NewPrinter(
|
||||
switch (s.body.data) {
|
||||
.s_block => {
|
||||
p.printSpace();
|
||||
p.printBlock(s.body.loc, s.body.data.s_block.stmts, s.body.data.s_block.close_brace_loc);
|
||||
p.printBlock(s.body.loc, s.body.data.s_block.stmts, s.body.data.s_block.close_brace_loc, .{});
|
||||
p.printSpace();
|
||||
},
|
||||
else => {
|
||||
p.printNewline();
|
||||
p.indent();
|
||||
p.printStmt(s.body) catch unreachable;
|
||||
p.printStmt(s.body, .{}) catch unreachable;
|
||||
p.printSemicolonIfNeeded();
|
||||
p.unindent();
|
||||
p.printIndent();
|
||||
@@ -4264,7 +4349,7 @@ fn NewPrinter(
|
||||
p.addSourceMapping(stmt.loc);
|
||||
p.print("try");
|
||||
p.printSpace();
|
||||
p.printBlock(s.body_loc, s.body, null);
|
||||
p.printBlock(s.body_loc, s.body, null, tlmtlo.subVar());
|
||||
|
||||
if (s.catch_) |catch_| {
|
||||
p.printSpace();
|
||||
@@ -4273,18 +4358,18 @@ fn NewPrinter(
|
||||
if (catch_.binding) |binding| {
|
||||
p.printSpace();
|
||||
p.print("(");
|
||||
p.printBinding(binding);
|
||||
p.printBinding(binding, .{});
|
||||
p.print(")");
|
||||
}
|
||||
p.printSpace();
|
||||
p.printBlock(catch_.body_loc, catch_.body, null);
|
||||
p.printBlock(catch_.body_loc, catch_.body, null, tlmtlo.subVar());
|
||||
}
|
||||
|
||||
if (s.finally) |finally| {
|
||||
p.printSpace();
|
||||
p.print("finally");
|
||||
p.printSpace();
|
||||
p.printBlock(finally.loc, finally.stmts, null);
|
||||
p.printBlock(finally.loc, finally.stmts, null, tlmtlo.subVar());
|
||||
}
|
||||
|
||||
p.printNewline();
|
||||
@@ -4351,7 +4436,7 @@ fn NewPrinter(
|
||||
switch (c.body[0].data) {
|
||||
.s_block => {
|
||||
p.printSpace();
|
||||
p.printBlock(c.body[0].loc, c.body[0].data.s_block.stmts, c.body[0].data.s_block.close_brace_loc);
|
||||
p.printBlock(c.body[0].loc, c.body[0].data.s_block.stmts, c.body[0].data.s_block.close_brace_loc, .{});
|
||||
p.printNewline();
|
||||
continue;
|
||||
},
|
||||
@@ -4363,7 +4448,7 @@ fn NewPrinter(
|
||||
p.indent();
|
||||
for (c.body) |st| {
|
||||
p.printSemicolonIfNeeded();
|
||||
p.printStmt(st) catch unreachable;
|
||||
p.printStmt(st, .{}) catch unreachable;
|
||||
}
|
||||
p.unindent();
|
||||
}
|
||||
@@ -4422,7 +4507,7 @@ fn NewPrinter(
|
||||
p.print(",");
|
||||
p.printSpace();
|
||||
for (s.items, 0..) |item, i| {
|
||||
p.printClauseItemAs(item, .@"var");
|
||||
p.printVarClauseItem(item);
|
||||
|
||||
if (i < s.items.len - 1) {
|
||||
p.print(",");
|
||||
@@ -4432,7 +4517,7 @@ fn NewPrinter(
|
||||
}
|
||||
} else {
|
||||
for (s.items, 0..) |item, i| {
|
||||
p.printClauseItemAs(item, .@"var");
|
||||
p.printVarClauseItem(item);
|
||||
|
||||
if (i < s.items.len - 1) {
|
||||
p.print(",");
|
||||
@@ -4464,10 +4549,19 @@ fn NewPrinter(
|
||||
|
||||
var item_count: usize = 0;
|
||||
|
||||
const import_record_path = try p.fmtImportRecordPath(record);
|
||||
|
||||
if (s.default_name) |name| {
|
||||
p.print(" ");
|
||||
p.printSymbol(name.ref.?);
|
||||
const local_name = p.renamer.nameForSymbol(name.ref.?);
|
||||
p.printIdentifier(local_name);
|
||||
item_count += 1;
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const local_name_id = try mi.str(local_name);
|
||||
try mi.addVar(local_name_id, .lexical);
|
||||
try mi.addImportInfoSingle(try mi.str(import_record_path), try mi.str("default"), local_name_id, false);
|
||||
}
|
||||
}
|
||||
|
||||
if (s.items.len > 0) {
|
||||
@@ -4496,7 +4590,22 @@ fn NewPrinter(
|
||||
p.printIndent();
|
||||
}
|
||||
|
||||
p.printClauseItem(item);
|
||||
const local_name = p.renamer.nameForSymbol(item.name.ref.?);
|
||||
if (strings.eql(local_name, item.alias)) {
|
||||
p.printIdentifier(local_name);
|
||||
} else {
|
||||
p.printClauseAlias(item.alias);
|
||||
p.print(" as ");
|
||||
p.addSourceMapping(item.alias_loc);
|
||||
p.printIdentifier(local_name);
|
||||
}
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
const symbol = p.symbols().get(item.name.ref.?).?;
|
||||
const local_name_id = try mi.str(local_name);
|
||||
try mi.addVar(local_name_id, .lexical);
|
||||
try mi.addImportInfoSingle(try mi.str(import_record_path), try mi.str(item.alias), local_name_id, symbol.use_count_as_type > 0 and symbol.use_count_estimate <= symbol.use_count_as_type);
|
||||
}
|
||||
}
|
||||
|
||||
if (!s.is_single_line) {
|
||||
@@ -4516,10 +4625,17 @@ fn NewPrinter(
|
||||
}
|
||||
p.printSpace();
|
||||
|
||||
const local_name = p.renamer.nameForSymbol(s.namespace_ref);
|
||||
|
||||
p.printWhitespacer(ws("* as"));
|
||||
p.print(" ");
|
||||
p.printSymbol(s.namespace_ref);
|
||||
p.printIdentifier(local_name);
|
||||
item_count += 1;
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
try mi.addVar(try mi.str(local_name), .lexical);
|
||||
try mi.addImportInfoNamespace(try mi.str(import_record_path), try mi.str(local_name));
|
||||
}
|
||||
}
|
||||
|
||||
if (item_count > 0) {
|
||||
@@ -4531,34 +4647,48 @@ fn NewPrinter(
|
||||
p.printWhitespacer(ws("from "));
|
||||
}
|
||||
|
||||
p.printImportRecordPath(record);
|
||||
p.printStringLiteralUTF8(import_record_path, false);
|
||||
|
||||
// backwards compatibility: previously, we always stripped type
|
||||
if (comptime is_bun_platform) if (record.loader) |loader| switch (loader) {
|
||||
.jsx => p.printWhitespacer(ws(" with { type: \"jsx\" }")),
|
||||
.js => p.printWhitespacer(ws(" with { type: \"js\" }")),
|
||||
.ts => p.printWhitespacer(ws(" with { type: \"ts\" }")),
|
||||
.tsx => p.printWhitespacer(ws(" with { type: \"tsx\" }")),
|
||||
.css => p.printWhitespacer(ws(" with { type: \"css\" }")),
|
||||
.file => p.printWhitespacer(ws(" with { type: \"file\" }")),
|
||||
.json => p.printWhitespacer(ws(" with { type: \"json\" }")),
|
||||
.jsonc => p.printWhitespacer(ws(" with { type: \"jsonc\" }")),
|
||||
.toml => p.printWhitespacer(ws(" with { type: \"toml\" }")),
|
||||
.wasm => p.printWhitespacer(ws(" with { type: \"wasm\" }")),
|
||||
.napi => p.printWhitespacer(ws(" with { type: \"napi\" }")),
|
||||
.base64 => p.printWhitespacer(ws(" with { type: \"base64\" }")),
|
||||
.dataurl => p.printWhitespacer(ws(" with { type: \"dataurl\" }")),
|
||||
.text => p.printWhitespacer(ws(" with { type: \"text\" }")),
|
||||
.bunsh => p.printWhitespacer(ws(" with { type: \"sh\" }")),
|
||||
// sqlite_embedded only relevant when bundling
|
||||
.sqlite, .sqlite_embedded => p.printWhitespacer(ws(" with { type: \"sqlite\" }")),
|
||||
.html => p.printWhitespacer(ws(" with { type: \"html\" }")),
|
||||
var fetch_parameters: ModuleInfo.FetchParameters = .none;
|
||||
if (comptime is_bun_platform) if (record.loader) |loader| {
|
||||
const str: []const u8 = switch (loader) {
|
||||
.jsx => "jsx",
|
||||
.js => "js",
|
||||
.ts => "ts",
|
||||
.tsx => "tsx",
|
||||
.css => "css",
|
||||
.file => "file",
|
||||
.json => "json",
|
||||
.jsonc => "jsonc",
|
||||
.toml => "toml",
|
||||
.wasm => "wasm",
|
||||
.napi => "napi",
|
||||
.base64 => "base64",
|
||||
.dataurl => "dataurl",
|
||||
.text => "text",
|
||||
.bunsh => "sh",
|
||||
// sqlite_embedded only relevant when bundling
|
||||
.sqlite, .sqlite_embedded => "sqlite",
|
||||
.html => "html",
|
||||
};
|
||||
p.printWhitespacer(ws(" with { type: \""));
|
||||
p.print(str);
|
||||
p.printWhitespacer(ws("\" }"));
|
||||
|
||||
if (p.moduleInfo()) |mi| {
|
||||
fetch_parameters = switch (loader) {
|
||||
.json => .json,
|
||||
else => ModuleInfo.FetchParameters.hostDefined(try mi.str(str)),
|
||||
};
|
||||
}
|
||||
};
|
||||
if (p.moduleInfo()) |mi| try mi.requestModule(try mi.str(import_record_path), fetch_parameters);
|
||||
p.printSemicolonAfterStatement();
|
||||
},
|
||||
.s_block => |s| {
|
||||
p.printIndent();
|
||||
p.printBlock(stmt.loc, s.stmts, s.close_brace_loc);
|
||||
p.printBlock(stmt.loc, s.stmts, s.close_brace_loc, .{});
|
||||
p.printNewline();
|
||||
},
|
||||
.s_debugger => {
|
||||
@@ -4642,22 +4772,16 @@ fn NewPrinter(
|
||||
p.print("module.exports");
|
||||
}
|
||||
|
||||
pub fn printImportRecordPath(p: *Printer, import_record: *const ImportRecord) void {
|
||||
if (comptime is_json)
|
||||
unreachable;
|
||||
pub fn fmtImportRecordPath(p: *Printer, import_record: *const ImportRecord) ![]const u8 {
|
||||
if (comptime is_json) unreachable;
|
||||
|
||||
const quote = bestQuoteCharForString(u8, import_record.path.text, false);
|
||||
if (import_record.print_namespace_in_path and !import_record.path.isFile()) {
|
||||
p.print(quote);
|
||||
p.printStringCharactersUTF8(import_record.path.namespace, quote);
|
||||
p.print(":");
|
||||
p.printStringCharactersUTF8(import_record.path.text, quote);
|
||||
p.print(quote);
|
||||
} else {
|
||||
p.print(quote);
|
||||
p.printStringCharactersUTF8(import_record.path.text, quote);
|
||||
p.print(quote);
|
||||
return try std.fmt.allocPrint(p.options.allocator, "{s}:{s}", .{ import_record.path.namespace, import_record.path.text });
|
||||
}
|
||||
return import_record.path.text;
|
||||
}
|
||||
pub fn printImportRecordPath(p: *Printer, import_record: *const ImportRecord) void {
|
||||
p.printStringLiteralUTF8(p.fmtImportRecordPath(import_record) catch bun.outOfMemory(), false);
|
||||
}
|
||||
|
||||
pub fn printBundledImport(p: *Printer, record: ImportRecord, s: *S.Import) void {
|
||||
@@ -4833,21 +4957,11 @@ fn NewPrinter(
|
||||
},
|
||||
.s_local => |s| {
|
||||
switch (s.kind) {
|
||||
.k_var => {
|
||||
p.printDecls("var", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_let => {
|
||||
p.printDecls("let", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_const => {
|
||||
p.printDecls("const", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_using => {
|
||||
p.printDecls("using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_await_using => {
|
||||
p.printDecls("await using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
|
||||
},
|
||||
.k_var => p.printDecls("var", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
|
||||
.k_let => p.printDecls("let", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
|
||||
.k_const => p.printDecls("const", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
|
||||
.k_using => p.printDecls("using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
|
||||
.k_await_using => p.printDecls("await using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }), .{}),
|
||||
}
|
||||
},
|
||||
// for(;)
|
||||
@@ -4869,7 +4983,7 @@ fn NewPrinter(
|
||||
switch (s.yes.data) {
|
||||
.s_block => |block| {
|
||||
p.printSpace();
|
||||
p.printBlock(s.yes.loc, block.stmts, block.close_brace_loc);
|
||||
p.printBlock(s.yes.loc, block.stmts, block.close_brace_loc, .{});
|
||||
|
||||
if (s.no != null) {
|
||||
p.printSpace();
|
||||
@@ -4884,7 +4998,7 @@ fn NewPrinter(
|
||||
p.printNewline();
|
||||
|
||||
p.indent();
|
||||
p.printStmt(s.yes) catch unreachable;
|
||||
p.printStmt(s.yes, .{}) catch bun.outOfMemory();
|
||||
p.unindent();
|
||||
p.needs_semicolon = false;
|
||||
|
||||
@@ -4899,7 +5013,7 @@ fn NewPrinter(
|
||||
} else {
|
||||
p.printNewline();
|
||||
p.indent();
|
||||
p.printStmt(s.yes) catch unreachable;
|
||||
p.printStmt(s.yes, .{}) catch bun.outOfMemory();
|
||||
p.unindent();
|
||||
|
||||
if (s.no != null) {
|
||||
@@ -4918,7 +5032,7 @@ fn NewPrinter(
|
||||
switch (no_block.data) {
|
||||
.s_block => {
|
||||
p.printSpace();
|
||||
p.printBlock(no_block.loc, no_block.data.s_block.stmts, null);
|
||||
p.printBlock(no_block.loc, no_block.data.s_block.stmts, null, .{});
|
||||
p.printNewline();
|
||||
},
|
||||
.s_if => {
|
||||
@@ -4927,7 +5041,7 @@ fn NewPrinter(
|
||||
else => {
|
||||
p.printNewline();
|
||||
p.indent();
|
||||
p.printStmt(no_block) catch unreachable;
|
||||
p.printStmt(no_block, .{}) catch bun.outOfMemory();
|
||||
p.unindent();
|
||||
},
|
||||
}
|
||||
@@ -5008,11 +5122,14 @@ fn NewPrinter(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl) void {
|
||||
pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl, tlmtlo: TopLevel) void {
|
||||
if (!rewrite_esm_to_cjs and is_export) {
|
||||
p.print("export ");
|
||||
}
|
||||
p.printDecls(keyword, decls, ExprFlag.None());
|
||||
p.printDecls(keyword, decls, ExprFlag.None(), if (may_have_module_info) .{
|
||||
.is_export = is_export and !rewrite_esm_to_cjs,
|
||||
.is_top_level = if (comptime std.mem.eql(u8, keyword, "var")) if (tlmtlo.is_top_level == .no) null else .declared else if (tlmtlo.is_top_level == .yes) .lexical else null,
|
||||
} else .{});
|
||||
p.printSemicolonAfterStatement();
|
||||
if (rewrite_esm_to_cjs and is_export and decls.len > 0) {
|
||||
for (decls) |decl| {
|
||||
@@ -5057,7 +5174,7 @@ fn NewPrinter(
|
||||
p.print("}");
|
||||
},
|
||||
else => {
|
||||
p.printBinding(decl.binding);
|
||||
p.printBinding(decl.binding, .{});
|
||||
},
|
||||
}
|
||||
p.print(")");
|
||||
@@ -5388,7 +5505,7 @@ fn NewPrinter(
|
||||
p.printFnArgs(func.open_parens_loc, func.args, func.flags.contains(.has_rest_arg), false);
|
||||
p.print(" => {\n");
|
||||
p.indent();
|
||||
p.printBlockBody(func.body.stmts);
|
||||
p.printBlockBody(func.body.stmts, .init(.no));
|
||||
p.unindent();
|
||||
p.printIndent();
|
||||
p.print("}, ");
|
||||
@@ -5874,6 +5991,7 @@ pub fn printAst(
|
||||
comptime ascii_only: bool,
|
||||
opts: Options,
|
||||
comptime generate_source_map: bool,
|
||||
module_info: ?*@import("analyze_transpiled_module.zig").ModuleInfo,
|
||||
) !usize {
|
||||
var renamer: rename.Renamer = undefined;
|
||||
var no_op_renamer: rename.NoOpRenamer = undefined;
|
||||
@@ -5973,6 +6091,11 @@ pub fn printAst(
|
||||
}
|
||||
}
|
||||
printer.was_lazy_export = tree.has_lazy_export;
|
||||
if (module_info != null) bun.assert(PrinterType.may_have_module_info);
|
||||
const have_module_info = PrinterType.may_have_module_info and module_info != null;
|
||||
|
||||
if (have_module_info) printer.module_info = module_info;
|
||||
|
||||
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
|
||||
printer.binary_expression_stack = std.ArrayList(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
|
||||
defer printer.binary_expression_stack.clearAndFree();
|
||||
@@ -5996,12 +6119,16 @@ pub fn printAst(
|
||||
//
|
||||
// This is never a symbol collision because `uses_require_ref` means
|
||||
// `require` must be an unbound variable.
|
||||
if (printer.moduleInfo()) |mi| {
|
||||
mi.flags.contains_import_meta = true;
|
||||
try mi.addVar(try mi.str("require"), .declared);
|
||||
}
|
||||
printer.print("var {require}=import.meta;");
|
||||
}
|
||||
|
||||
for (tree.parts.slice()) |part| {
|
||||
for (part.stmts) |stmt| {
|
||||
try printer.printStmt(stmt);
|
||||
try printer.printStmt(stmt, PrinterType.TopLevel.init(.yes));
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
return err;
|
||||
}
|
||||
@@ -6009,24 +6136,25 @@ pub fn printAst(
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime FeatureFlags.runtime_transpiler_cache and generate_source_map) {
|
||||
if (have_module_info) {
|
||||
try module_info.?.finalize();
|
||||
}
|
||||
|
||||
var sourcemap: []const u8 = "";
|
||||
if (comptime generate_source_map) {
|
||||
if (opts.source_map_handler) |handler| {
|
||||
const source_maps_chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten());
|
||||
if (opts.runtime_transpiler_cache) |cache| {
|
||||
cache.put(printer.writer.ctx.getWritten(), source_maps_chunk.buffer.list.items);
|
||||
}
|
||||
sourcemap = source_maps_chunk.buffer.list.items;
|
||||
|
||||
try handler.onSourceMapChunk(source_maps_chunk, source.*);
|
||||
} else {
|
||||
if (opts.runtime_transpiler_cache) |cache| {
|
||||
cache.put(printer.writer.ctx.getWritten(), "");
|
||||
}
|
||||
}
|
||||
} else if (comptime generate_source_map) {
|
||||
if (opts.source_map_handler) |handler| {
|
||||
try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source.*);
|
||||
}
|
||||
}
|
||||
if (opts.runtime_transpiler_cache) |cache| {
|
||||
var srlz_res = std.ArrayList(u8).init(bun.default_allocator);
|
||||
defer srlz_res.deinit();
|
||||
if (have_module_info) try module_info.?.asDeserialized().serialize(srlz_res.writer());
|
||||
cache.put(printer.writer.ctx.getWritten(), sourcemap, srlz_res.items);
|
||||
}
|
||||
|
||||
try printer.writer.done();
|
||||
|
||||
@@ -6187,7 +6315,7 @@ pub fn printWithWriterAndPlatform(
|
||||
|
||||
for (parts) |part| {
|
||||
for (part.stmts) |stmt| {
|
||||
printer.printStmt(stmt) catch |err| {
|
||||
printer.printStmt(stmt, PrinterType.TopLevel.init(.yes)) catch |err| {
|
||||
return .{ .err = err };
|
||||
};
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
@@ -6258,7 +6386,7 @@ pub fn printCommonJS(
|
||||
|
||||
for (tree.parts.slice()) |part| {
|
||||
for (part.stmts) |stmt| {
|
||||
try printer.printStmt(stmt);
|
||||
try printer.printStmt(stmt, PrinterType.TopLevel.init(.yes));
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
return err;
|
||||
}
|
||||
|
||||
@@ -6681,7 +6681,7 @@ fn QuoteEscapeFormat(comptime flags: QuoteEscapeFormatFlags) type {
|
||||
data: []const u8,
|
||||
|
||||
pub fn format(self: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
|
||||
try bun.js_printer.writePreQuotedString(self.data, @TypeOf(writer), writer, flags.quote_char, false, flags.json, flags.str_encoding);
|
||||
try bun.js_printer.writePreQuotedString(self.data, @TypeOf(writer), writer, flags.quote_char, flags.ascii_only, flags.json, flags.str_encoding);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -821,12 +821,15 @@ pub const Transpiler = struct {
|
||||
comptime enable_source_map: bool,
|
||||
source_map_context: ?js_printer.SourceMapHandler,
|
||||
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache,
|
||||
module_info: ?*@import("analyze_transpiled_module.zig").ModuleInfo,
|
||||
) !usize {
|
||||
const tracer = bun.tracy.traceNamed(@src(), if (enable_source_map) "JSPrinter.printWithSourceMap" else "JSPrinter.print");
|
||||
defer tracer.end();
|
||||
|
||||
const symbols = js_ast.Symbol.NestedList.init(&[_]js_ast.Symbol.List{ast.symbols});
|
||||
|
||||
if (module_info != null) bun.assert(format == .esm or format == .esm_ascii);
|
||||
|
||||
return switch (format) {
|
||||
.cjs => try js_printer.printCommonJS(
|
||||
Writer,
|
||||
@@ -876,6 +879,7 @@ pub const Transpiler = struct {
|
||||
.mangled_props = null,
|
||||
},
|
||||
enable_source_map,
|
||||
module_info,
|
||||
),
|
||||
.esm_ascii => switch (transpiler.options.target.isBun()) {
|
||||
inline else => |is_bun| try js_printer.printAst(
|
||||
@@ -912,6 +916,7 @@ pub const Transpiler = struct {
|
||||
.mangled_props = null,
|
||||
},
|
||||
enable_source_map,
|
||||
module_info,
|
||||
),
|
||||
},
|
||||
else => unreachable,
|
||||
@@ -934,6 +939,7 @@ pub const Transpiler = struct {
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -944,6 +950,7 @@ pub const Transpiler = struct {
|
||||
writer: Writer,
|
||||
comptime format: js_printer.Format,
|
||||
handler: js_printer.SourceMapHandler,
|
||||
module_info: ?*@import("analyze_transpiled_module.zig").ModuleInfo,
|
||||
) !usize {
|
||||
if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_SOURCE_MAPS")) {
|
||||
return transpiler.printWithSourceMapMaybe(
|
||||
@@ -955,6 +962,7 @@ pub const Transpiler = struct {
|
||||
false,
|
||||
handler,
|
||||
result.runtime_transpiler_cache,
|
||||
module_info,
|
||||
);
|
||||
}
|
||||
return transpiler.printWithSourceMapMaybe(
|
||||
@@ -966,6 +974,7 @@ pub const Transpiler = struct {
|
||||
true,
|
||||
handler,
|
||||
result.runtime_transpiler_cache,
|
||||
module_info,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
432
test/js/bun/typescript/type-export.test.ts
Normal file
432
test/js/bun/typescript/type-export.test.ts
Normal file
@@ -0,0 +1,432 @@
|
||||
import { describe, test, expect } from "bun:test" with { todo: "true" };
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
|
||||
/*
|
||||
Potential solutions:
|
||||
- Option 1: Make a fake export `export const my_string = undefined;` and make sure it is not enumerable
|
||||
- Option 2: In b.ts, make javascriptcore skip re-exporting something if it is not found rather than SyntaxErroring
|
||||
- this won't work because in the import {} export {} case, the error will be on the import
|
||||
*/
|
||||
|
||||
const a_file = `
|
||||
export type my_string = "1";
|
||||
|
||||
export type my_value = "2";
|
||||
export const my_value = "2";
|
||||
|
||||
export const my_only = "3";
|
||||
`;
|
||||
const a_no_value = `
|
||||
export type my_string = "1";
|
||||
export type my_value = "2";
|
||||
export const my_only = "3";
|
||||
`;
|
||||
const a_with_value = `
|
||||
export type my_string = "1";
|
||||
export const my_value = "2";
|
||||
`;
|
||||
const b_files = [
|
||||
{
|
||||
name: "export from",
|
||||
value: `export { my_string, my_value, my_only } from "./a.ts";`,
|
||||
},
|
||||
{
|
||||
name: "import then export",
|
||||
value: `
|
||||
import { my_string, my_value, my_only } from "./a.ts";
|
||||
export { my_string, my_value, my_only };
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "export star",
|
||||
value: `export * from "./a.ts";`,
|
||||
},
|
||||
{
|
||||
name: "export merge",
|
||||
value: `export * from "./a_no_value.ts"; export * from "./a_with_value.ts"`,
|
||||
},
|
||||
];
|
||||
const c_files = [
|
||||
{ name: "require", value: `console.log(JSON.stringify(require("./b")));` },
|
||||
{ name: "import star", value: `import * as b from "./b"; console.log(JSON.stringify(b));` },
|
||||
{ name: "await import", value: `console.log(JSON.stringify(await import("./b")));` },
|
||||
{
|
||||
name: "import individual",
|
||||
value: `
|
||||
import { my_string, my_value, my_only } from "./b";
|
||||
console.log(JSON.stringify({ my_only, my_value }));
|
||||
`,
|
||||
},
|
||||
];
|
||||
for (const b_file of b_files) {
|
||||
describe(`re-export with ${b_file.name}`, () => {
|
||||
for (const c_file of c_files) {
|
||||
describe(`import with ${c_file.name}`, () => {
|
||||
const dir = tempDirWithFiles("type-export", {
|
||||
"a.ts": a_file,
|
||||
"b.ts": b_file.value,
|
||||
"c.ts": c_file.value,
|
||||
|
||||
"a_no_value.ts": a_no_value,
|
||||
"a_with_value.ts": a_with_value,
|
||||
});
|
||||
|
||||
const runAndVerify = (filename: string) => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "run", filename],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "inherit"],
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(JSON.parse(result.stdout.toString().trim())).toEqual({ my_value: "2", my_only: "3" });
|
||||
};
|
||||
|
||||
test("run", () => {
|
||||
runAndVerify("c.ts");
|
||||
});
|
||||
|
||||
test("build", async () => {
|
||||
const build_result = await Bun.build({
|
||||
entrypoints: [dir + "/c.ts"],
|
||||
outdir: dir + "/dist",
|
||||
});
|
||||
expect(build_result.success).toBe(true);
|
||||
runAndVerify(dir + "/dist/c.js");
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe("import not found", () => {
|
||||
for (const [ccase, target_value, name] of [
|
||||
[``, /SyntaxError: Export named 'not_found' not found in module '[^']+?'\./, "none"],
|
||||
[
|
||||
`export default function not_found() {};`,
|
||||
/SyntaxError: Export named 'not_found' not found in module '[^']+?'\. Did you mean to import default\?/,
|
||||
"default with same name",
|
||||
],
|
||||
[
|
||||
`export type not_found = "not_found";`,
|
||||
/SyntaxError: Export named 'not_found' not found in module '[^']+?'\./,
|
||||
"type",
|
||||
],
|
||||
] as const)
|
||||
test(`${name}`, () => {
|
||||
const dir = tempDirWithFiles("type-export", {
|
||||
"a.ts": ccase,
|
||||
"b.ts": /*js*/ `
|
||||
import { not_found } from "./a";
|
||||
console.log(not_found);
|
||||
`,
|
||||
"nf.ts": "",
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "run", "b.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
expect(result.stderr?.toString().trim()).toMatch(target_value);
|
||||
expect({
|
||||
exitCode: result.exitCode,
|
||||
stdout: result.stdout?.toString().trim(),
|
||||
}).toEqual({
|
||||
exitCode: 1,
|
||||
stdout: "",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test("js file type export", () => {
|
||||
const dir = tempDirWithFiles("type-export", {
|
||||
"a.js": "export {not_found};",
|
||||
});
|
||||
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "a.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toInclude('error: "not_found" is not declared in this file');
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
test("js file type import", () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
"b.js": "import {type_only} from './ts.ts';",
|
||||
"ts.ts": "export type type_only = 'type_only';",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "b.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toInclude("Export named 'type_only' not found in module '");
|
||||
expect(result.stderr?.toString().trim()).not.toInclude("Did you mean to import default?");
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
test("js file type import with default export", () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
"b.js": "import {type_only} from './ts.ts';",
|
||||
"ts.ts": "export type type_only = 'type_only'; export default function type_only() {};",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "b.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toInclude("Export named 'type_only' not found in module '");
|
||||
expect(result.stderr?.toString().trim()).toInclude("Did you mean to import default?");
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
test("js file with through export", () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
"b.js": "export {type_only} from './ts.ts';",
|
||||
"ts.ts": "export type type_only = 'type_only'; export default function type_only() {};",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "b.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toInclude("SyntaxError: export 'type_only' not found in './ts.ts'\n");
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
test("js file with through export 2", () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
"b.js": "import {type_only} from './ts.ts'; export {type_only};",
|
||||
"ts.ts": "export type type_only = 'type_only'; export default function type_only() {};",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "b.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toInclude("SyntaxError: export 'type_only' not found in './ts.ts'\n");
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
|
||||
describe("through export merge", () => {
|
||||
// this isn't allowed, even in typescript (tsc emits "Duplicate identifier 'value'.")
|
||||
for (const fmt of ["js", "ts"]) {
|
||||
describe(fmt, () => {
|
||||
for (const [name, mode] of [
|
||||
["through", "export {value} from './b'; export {value} from './c';"],
|
||||
["direct", "export {value} from './b'; export const value = 'abc';"],
|
||||
["direct2", "export const value = 'abc'; export {value};"],
|
||||
["ns", "export * as value from './c'; export * as value from './c';"],
|
||||
]) {
|
||||
describe(name, () => {
|
||||
const dir = tempDirWithFiles("type-import", {
|
||||
["main." + fmt]: "import {value} from './a'; console.log(value);",
|
||||
["a." + fmt]: mode,
|
||||
["b." + fmt]: fmt === "ts" ? "export type value = 'b';" : "",
|
||||
["c." + fmt]: "export const value = 'c';",
|
||||
});
|
||||
for (const file of ["main." + fmt, "a." + fmt]) {
|
||||
test(file, () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), file],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toInclude(
|
||||
file === "a." + fmt
|
||||
? 'error: Multiple exports with the same name "value"\n' // bun's syntax error
|
||||
: "SyntaxError: Cannot export a duplicate name 'value'.\n", // jsc's syntax error
|
||||
);
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// TODO:
|
||||
test("check ownkeys from a star import", () => {
|
||||
const dir = tempDirWithFiles("ownkeys-star-import", {
|
||||
["main.ts"]: `
|
||||
import * as ns from './a';
|
||||
console.log(JSON.stringify({
|
||||
keys: Object.keys(ns),
|
||||
ns,
|
||||
has_sometype: Object.hasOwn(ns, 'sometype'),
|
||||
}));
|
||||
`,
|
||||
["a.ts"]: "export * from './b'; export {sometype} from './b';",
|
||||
["b.ts"]: "export const value = 'b'; export const anotherValue = 'another'; export type sometype = 'sometype';",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "main.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(JSON.parse(result.stdout?.toString().trim())).toEqual({
|
||||
keys: ["anotherValue", "value"],
|
||||
ns: {
|
||||
anotherValue: "another",
|
||||
value: "b",
|
||||
},
|
||||
has_sometype: false,
|
||||
});
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
test("check commonjs", () => {
|
||||
const dir = tempDirWithFiles("commonjs", {
|
||||
["main.ts"]: "const {my_value, my_type} = require('./a'); console.log(my_value, my_type);",
|
||||
["a.ts"]: "module.exports = require('./b');",
|
||||
["b.ts"]: "export const my_value = 'my_value'; export type my_type = 'my_type';",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "main.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.stdout?.toString().trim()).toBe("my_value undefined");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
test("check merge", () => {
|
||||
const dir = tempDirWithFiles("merge", {
|
||||
["main.ts"]: "import {value} from './a'; console.log(value);",
|
||||
["a.ts"]: "export * from './b'; export * from './c';",
|
||||
["b.ts"]: "export const value = 'b';",
|
||||
["c.ts"]: "export const value = 'c';",
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "main.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toInclude(
|
||||
"SyntaxError: Export named 'value' cannot be resolved due to ambiguous multiple bindings in module",
|
||||
);
|
||||
expect(result.exitCode).toBe(1);
|
||||
});
|
||||
describe("export * from './module'", () => {
|
||||
for (const fmt of ["js", "ts"]) {
|
||||
describe(fmt, () => {
|
||||
const dir = tempDirWithFiles("export-star", {
|
||||
["main." + fmt]: "import {value} from './a'; console.log(value);",
|
||||
["a." + fmt]: "export * from './b';",
|
||||
["b." + fmt]: "export const value = 'b';",
|
||||
});
|
||||
for (const file of ["main." + fmt, "a." + fmt]) {
|
||||
test(file, () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), file],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("export * as ns from './module'", () => {
|
||||
for (const fmt of ["js", "ts"]) {
|
||||
describe(fmt, () => {
|
||||
const dir = tempDirWithFiles("export-star-as", {
|
||||
["main." + fmt]: "import {ns} from './a'; console.log(ns.value);",
|
||||
["a." + fmt]: "export * as ns from './b';",
|
||||
["b." + fmt]: "export const value = 'b';",
|
||||
});
|
||||
for (const file of ["main." + fmt, "a." + fmt]) {
|
||||
test(file, () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), file],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("export type {Type} from './module'", () => {
|
||||
for (const fmt of ["ts"]) {
|
||||
describe(fmt, () => {
|
||||
const dir = tempDirWithFiles("export-type", {
|
||||
["main." + fmt]: "import {Type} from './a'; const x: Type = 'test'; console.log(x);",
|
||||
["a." + fmt]: "export type {Type} from './b';",
|
||||
["b." + fmt]: "export type Type = string;",
|
||||
});
|
||||
for (const file of ["main." + fmt, "a." + fmt]) {
|
||||
test(file, () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), file],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
test("import only used in decorator (#8439)", () => {
|
||||
const dir = tempDirWithFiles("import-only-used-in-decorator", {
|
||||
["index.ts"]: /*js*/ `
|
||||
// index.ts
|
||||
import { TestInterface } from "./interface.ts";
|
||||
|
||||
function Decorator(): PropertyDecorator {
|
||||
return () => {};
|
||||
}
|
||||
|
||||
class TestClass {
|
||||
@Decorator()
|
||||
test?: TestInterface;
|
||||
}
|
||||
class OtherClass {
|
||||
other?: TestInterface;
|
||||
}
|
||||
|
||||
export {TestInterface};
|
||||
`,
|
||||
["interface.ts"]: "export interface TestInterface {};",
|
||||
"tsconfig.json": JSON.stringify({
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
},
|
||||
}),
|
||||
});
|
||||
const result = Bun.spawnSync({
|
||||
cmd: [bunExe(), "index.ts"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdio: ["inherit", "pipe", "pipe"],
|
||||
});
|
||||
expect(result.stderr?.toString().trim()).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
Reference in New Issue
Block a user