diff --git a/build.zig b/build.zig index e383b2f513..d3e9e85921 100644 --- a/build.zig +++ b/build.zig @@ -123,7 +123,7 @@ pub fn getOSGlibCVersion(os: OperatingSystem) ?Version { } pub fn build(b: *Build) !void { - std.debug.print("zig build v{s}\n", .{builtin.zig_version_string}); + std.log.info("zig compiler v{s}", .{builtin.zig_version_string}); b.zig_lib_dir = b.zig_lib_dir orelse b.path("src/deps/zig/lib"); diff --git a/package.json b/package.json index c56f372c64..ffa7856496 100644 --- a/package.json +++ b/package.json @@ -46,6 +46,7 @@ "lint:fix": "eslint './**/*.d.ts' --cache --fix", "test": "node scripts/runner.node.mjs ./build/bun-debug", "test:release": "node scripts/runner.node.mjs ./build-release/bun", + "banned": "bun packages/bun-internal-test/src/linter.ts", "zig-check": ".cache/zig/zig.exe build check --summary new", "zig-check-all": ".cache/zig/zig.exe build check-all --summary new", "zig-check-windows": ".cache/zig/zig.exe build check-windows --summary new", diff --git a/packages/bun-internal-test/src/banned.json b/packages/bun-internal-test/src/banned.json index dcb4f3c8dc..6b17b5358d 100644 --- a/packages/bun-internal-test/src/banned.json +++ b/packages/bun-internal-test/src/banned.json @@ -5,9 +5,13 @@ "std.debug.assert": "Use bun.assert instead", "std.debug.dumpStackTrace": "Use bun.handleErrorReturnTrace or bun.crash_handler.dumpStackTrace instead", "std.debug.print": "Don't let this be committed", - "std.mem.indexOfAny": "Use bun.strings.indexAny or bun.strings.indexAnyComptime", + "std.mem.indexOfAny(": "Use bun.strings.indexOfAny", "undefined != ": "This is by definition Undefined Behavior.", "undefined == ": "This is by definition Undefined Behavior.", "bun.toFD(std.fs.cwd().fd)": "Use bun.FD.cwd()", + "std.StringArrayHashMapUnmanaged(": "bun.StringArrayHashMapUnmanaged has a faster `eql`", + "std.StringArrayHashMap(": "bun.StringArrayHashMap has a faster `eql`", + "std.StringHashMapUnmanaged(": "bun.StringHashMapUnmanaged has a faster `eql`", + "std.StringHashMap(": "bun.StringHashMaphas a faster `eql`", "": "" } diff --git a/packages/bun-internal-test/src/linter.ts b/packages/bun-internal-test/src/linter.ts index e42dfac28f..080175115f 100644 --- a/packages/bun-internal-test/src/linter.ts +++ b/packages/bun-internal-test/src/linter.ts @@ -19,9 +19,7 @@ for (const [banned, suggestion] of Object.entries(BANNED)) { if (banned.length === 0) continue; // Run git grep to find occurrences of std.debug.assert in .zig files // .nothrow() is here since git will exit with non-zero if no matches are found. - let stdout = await $`git grep -n -F "${banned}" "src/**/**.zig" | grep -v -F '//' | grep -v -F bench` - .nothrow() - .text(); + let stdout = await $`git grep -n -F "${banned}" "src/**.zig" | grep -v -F '//' | grep -v -F bench`.nothrow().text(); stdout = stdout.trim(); if (stdout.length === 0) continue; diff --git a/src/ArenaAllocator.zig b/src/ArenaAllocator.zig index fcc99ea15a..4c62038cab 100644 --- a/src/ArenaAllocator.zig +++ b/src/ArenaAllocator.zig @@ -1,5 +1,6 @@ const std = @import("std"); -const assert = @import("root").bun.assert; +const bun = @import("root").bun; +const assert = bun.assert; const mem = std.mem; const Allocator = std.mem.Allocator; diff --git a/src/Global.zig b/src/Global.zig index 2499f40461..d56b5fc269 100644 --- a/src/Global.zig +++ b/src/Global.zig @@ -171,18 +171,7 @@ pub inline fn configureAllocator(_: AllocatorConfiguration) void { // if (!config.long_running) Mimalloc.mi_option_set(Mimalloc.mi_option_reset_delay, 0); } -pub fn panic(comptime fmt: string, args: anytype) noreturn { - @setCold(true); - if (comptime Environment.isWasm) { - Output.printErrorln(fmt, args); - Output.flush(); - @panic(fmt); - } else { - Output.prettyErrorln(fmt, args); - Output.flush(); - std.debug.panic(fmt, args); - } -} +pub const panic = Output.panic; // deprecated pub fn notimpl() noreturn { @setCold(true); diff --git a/src/Progress.zig b/src/Progress.zig index da05df7f24..822feb7576 100644 --- a/src/Progress.zig +++ b/src/Progress.zig @@ -18,7 +18,7 @@ const std = @import("std"); const builtin = @import("builtin"); const windows = std.os.windows; const testing = std.testing; -const assert = std.debug.assert; +const assert = (std.debug).assert; const Progress = @This(); /// `null` if the current node (and its children) should @@ -246,7 +246,7 @@ fn clearWithHeldLock(p: *Progress, end_ptr: *usize) void { end += (std.fmt.bufPrint(p.output_buffer[end..], "\x1b[{d}D", .{p.columns_written}) catch unreachable).len; end += (std.fmt.bufPrint(p.output_buffer[end..], "\x1b[0K", .{}) catch unreachable).len; } else if (builtin.os.tag == .windows) winapi: { - std.debug.assert(p.is_windows_terminal); + assert(p.is_windows_terminal); var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined; if (windows.kernel32.GetConsoleScreenBufferInfo(file.handle, &info) != windows.TRUE) { @@ -357,7 +357,7 @@ fn refreshWithHeldLock(self: *Progress) void { pub fn log(self: *Progress, comptime format: []const u8, args: anytype) void { const file = self.terminal orelse { - std.debug.print(format, args); + (std.debug).print(format, args); return; }; self.refresh(); diff --git a/src/StaticHashMap.zig b/src/StaticHashMap.zig index 2e8dd64949..e45a5823f7 100644 --- a/src/StaticHashMap.zig +++ b/src/StaticHashMap.zig @@ -6,7 +6,8 @@ const mem = std.mem; const math = std.math; const testing = std.testing; -const assert = @import("root").bun.assert; +const bun = @import("root").bun; +const assert = bun.assert; pub fn AutoHashMap(comptime K: type, comptime V: type, comptime max_load_percentage: comptime_int) type { return HashMap(K, V, std.hash_map.AutoContext(K), max_load_percentage); diff --git a/src/api/schema.zig b/src/api/schema.zig index 914eceb22a..8ad90cfc47 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -2863,7 +2863,7 @@ pub const Api = struct { }; pub const NpmRegistryMap = struct { - scopes: std.StringArrayHashMapUnmanaged(NpmRegistry) = .{}, + scopes: bun.StringArrayHashMapUnmanaged(NpmRegistry) = .{}, pub fn decode(reader: anytype) anyerror!NpmRegistryMap { var this = std.mem.zeroes(NpmRegistryMap); diff --git a/src/ast/base.zig b/src/ast/base.zig index 738ac64255..160bb28815 100644 --- a/src/ast/base.zig +++ b/src/ast/base.zig @@ -2,27 +2,10 @@ const std = @import("std"); const bun = @import("root").bun; const unicode = std.unicode; -pub const JavascriptString = []u16; -pub fn newJavascriptString(comptime text: []const u8) JavascriptString { - return unicode.utf8ToUtf16LeStringLiteral(text); -} - pub const NodeIndex = u32; pub const NodeIndexNone = 4294967293; // TODO: figure out if we actually need this -// -- original comment -- -// Files are parsed in parallel for speed. We want to allow each parser to -// generate symbol IDs that won't conflict with each other. We also want to be -// able to quickly merge symbol tables from all files into one giant symbol -// table. -// -// We can accomplish both goals by giving each symbol ID two parts: a source -// index that is unique to the parser goroutine, and an inner index that -// increments as the parser generates new symbol IDs. Then a symbol map can -// be an array of arrays indexed first by source index, then by inner index. -// The maps can be merged quickly by creating a single outer array containing -// all inner arrays from all parsed files. pub const RefHashCtx = struct { pub fn hash(_: @This(), key: Ref) u32 { @@ -44,89 +27,6 @@ pub const RefCtx = struct { } }; -/// Sets the range of bits starting at `start_bit` upto and excluding `start_bit` + `number_of_bits` -/// to be specific, if the range is N bits long, the N lower bits of `value` will be used; if any of -/// the other bits in `value` are set to 1, this function will panic. -/// -/// ```zig -/// var val: u8 = 0b10000000; -/// setBits(&val, 2, 4, 0b00001101); -/// try testing.expectEqual(@as(u8, 0b10110100), val); -/// ``` -/// -/// ## Panics -/// This method will panic if the `value` exceeds the bit range of the type of `target` -pub fn setBits( - comptime TargetType: type, - target: TargetType, - comptime start_bit: comptime_int, - comptime number_of_bits: comptime_int, - value: TargetType, -) TargetType { - const end_bit = start_bit + number_of_bits; - - comptime { - if (number_of_bits == 0) @compileError("non-zero number_of_bits must be provided"); - - if (@typeInfo(TargetType) == .Int) { - if (@typeInfo(TargetType).Int.signedness != .unsigned) { - @compileError("requires an unsigned integer, found " ++ @typeName(TargetType)); - } - if (start_bit >= @bitSizeOf(TargetType)) { - @compileError("start_bit index is out of bounds of the bit field"); - } - if (end_bit > @bitSizeOf(TargetType)) { - @compileError("start_bit + number_of_bits is out of bounds of the bit field"); - } - } else if (@typeInfo(TargetType) == .ComptimeInt) { - @compileError("comptime_int is unsupported"); - } else { - @compileError("requires an unsigned integer, found " ++ @typeName(TargetType)); - } - } - - if (comptime std.debug.runtime_safety) { - if (getBits(TargetType, value, 0, (end_bit - start_bit)) != value) @panic("value exceeds bit range"); - } - - const bitmask: TargetType = comptime blk: { - var bitmask = ~@as(TargetType, 0); - bitmask <<= (@bitSizeOf(TargetType) - end_bit); - bitmask >>= (@bitSizeOf(TargetType) - end_bit); - bitmask >>= start_bit; - bitmask <<= start_bit; - break :blk ~bitmask; - }; - - return (target & bitmask) | (value << start_bit); -} - -pub inline fn getBits(comptime TargetType: type, target: anytype, comptime start_bit: comptime_int, comptime number_of_bits: comptime_int) TargetType { - comptime { - if (number_of_bits == 0) @compileError("non-zero number_of_bits must be provided"); - - if (@typeInfo(TargetType) == .Int) { - if (@typeInfo(TargetType).Int.signedness != .unsigned) { - @compileError("requires an unsigned integer, found " ++ @typeName(TargetType)); - } - if (start_bit >= @bitSizeOf(TargetType)) { - @compileError("start_bit index is out of bounds of the bit field"); - } - if (start_bit + number_of_bits > @bitSizeOf(TargetType)) { - @compileError("start_bit + number_of_bits is out of bounds of the bit field"); - } - } else if (@typeInfo(TargetType) == .ComptimeInt) { - if (target < 0) { - @compileError("requires an unsigned integer, found " ++ @typeName(TargetType)); - } - } else { - @compileError("requires an unsigned integer, found " ++ @typeName(TargetType)); - } - } - - return @as(TargetType, @truncate(target >> start_bit)); -} - /// In some parts of Bun, we have many different IDs pointing to different things. /// It's easy for them to get mixed up, so we use this type to make sure we don't. /// @@ -186,6 +86,19 @@ pub const Index = packed struct(u32) { } }; +/// -- original comment from esbuild -- +/// +/// Files are parsed in parallel for speed. We want to allow each parser to +/// generate symbol IDs that won't conflict with each other. We also want to be +/// able to quickly merge symbol tables from all files into one giant symbol +/// table. +/// +/// We can accomplish both goals by giving each symbol ID two parts: a source +/// index that is unique to the parser goroutine, and an inner index that +/// increments as the parser generates new symbol IDs. Then a symbol map can +/// be an array of arrays indexed first by source index, then by inner index. +/// The maps can be merged quickly by creating a single outer array containing +/// all inner arrays from all parsed files. pub const Ref = packed struct(u64) { inner_index: Int = 0, @@ -198,6 +111,9 @@ pub const Ref = packed struct(u64) { source_index: Int = 0, + /// Represents a null state without using an extra bit + pub const None = Ref{ .inner_index = 0, .source_index = 0, .tag = .invalid }; + pub inline fn isEmpty(this: Ref) bool { return this.asU64() == 0; } @@ -222,7 +138,7 @@ pub const Ref = packed struct(u64) { pub fn format(ref: Ref, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { try std.fmt.format( writer, - "Ref[{d}, {d}, {s}]", + "Ref[inner={d}, src={d}, .{s}]", .{ ref.sourceIndex(), ref.innerIndex(), @@ -235,9 +151,6 @@ pub const Ref = packed struct(u64) { return this.tag != .invalid; } - // 2 bits of padding for whatever is the parent - pub const None = Ref{ .inner_index = 0, .source_index = 0, .tag = .invalid }; - pub inline fn sourceIndex(this: Ref) Int { return this.source_index; } @@ -253,10 +166,7 @@ pub const Ref = packed struct(u64) { pub fn init(inner_index: Int, source_index: usize, is_source_contents_slice: bool) Ref { return .{ .inner_index = inner_index, - - // if we overflow, we want a panic - .source_index = @as(Int, @intCast(source_index)), - + .source_index = @intCast(source_index), .tag = if (is_source_contents_slice) .source_contents_slice else .allocated_name, }; } @@ -278,9 +188,10 @@ pub const Ref = packed struct(u64) { return bun.hash(&@as([8]u8, @bitCast(key.asU64()))); } - pub fn eql(ref: Ref, b: Ref) bool { - return asU64(ref) == b.asU64(); + pub fn eql(ref: Ref, other: Ref) bool { + return ref.asU64() == other.asU64(); } + pub inline fn isNull(self: Ref) bool { return self.tag == .invalid; } diff --git a/src/bench/string-handling.zig b/src/bench/string-handling.zig deleted file mode 100644 index 8d778e3564..0000000000 --- a/src/bench/string-handling.zig +++ /dev/null @@ -1,64 +0,0 @@ -const strings = bun.strings; -const std = @import("std"); - -pub fn main() anyerror!void { - const args = try std.process.argsAlloc(std.heap.c_allocator); - const filepath = args[args.len - 3]; - const find = args[args.len - 2]; - const amount = try std.fmt.parseInt(usize, args[args.len - 1], 10); - var file = try std.fs.cwd().openFile(filepath, .{ .mode = .read_only }); - var contents = try file.readToEndAlloc(std.heap.c_allocator, std.math.maxInt(usize)); - var list = try std.ArrayList(u8).initCapacity(std.heap.c_allocator, contents.len); - var duped = list.items.ptr[0..contents.len]; - { - var timer = try std.time.Timer.start(); - var index: usize = std.math.maxInt(usize); - var j: usize = 0; - var i: usize = 0; - while (j < amount) : (j += 1) { - i = 0; - strings.copy(duped, contents); - } - - if (index == std.math.maxInt(usize)) { - std.debug.print("manual [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); - } else { - std.debug.print("manual [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); - } - } - - { - var timer = try std.time.Timer.start(); - var index: usize = std.math.maxInt(usize); - var j: usize = 0; - var i: usize = 0; - while (j < amount) : (j += 1) { - i = 0; - @memcpy(duped[0..contents.len], contents); - } - - if (index == std.math.maxInt(usize)) { - std.debug.print("memcpy [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); - } else { - std.debug.print("memcpy [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); - } - } - - { - var timer = try std.time.Timer.start(); - var index: usize = std.math.maxInt(usize); - var j: usize = 0; - var i: usize = 0; - while (j < amount) : (j += 1) { - i = 0; - list.clearRetainingCapacity(); - list.appendSliceAssumeCapacity(contents); - } - - if (index == std.math.maxInt(usize)) { - std.debug.print("ArrayList [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); - } else { - std.debug.print("ArrayList [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); - } - } -} diff --git a/src/bun.js/RuntimeTranspilerCache.zig b/src/bun.js/RuntimeTranspilerCache.zig index 113007648d..9b57d24fb6 100644 --- a/src/bun.js/RuntimeTranspilerCache.zig +++ b/src/bun.js/RuntimeTranspilerCache.zig @@ -1,6 +1,7 @@ -// ** Update the version number when any breaking changes are made to the cache format or to the JS parser ** -// Version 2 -> 3: "Infinity" becomes "1/0". -const expected_version = 3; +/// ** Update the version number when any breaking changes are made to the cache format or to the JS parser ** +/// Version 3: "Infinity" becomes "1/0". +/// Version 4: TypeScript enums are properly handled + more constant folding +const expected_version = 4; const bun = @import("root").bun; const std = @import("std"); @@ -203,7 +204,7 @@ pub const RuntimeTranspilerCache = struct { if (comptime bun.Environment.allow_assert) { var metadata_stream2 = std.io.fixedBufferStream(metadata_buf[0..Metadata.size]); var metadata2 = Metadata{}; - metadata2.decode(metadata_stream2.reader()) catch |err| bun.Output.panic("Metadata did not rountrip encode -> decode successfully: {s}", .{@errorName(err)}); + metadata2.decode(metadata_stream2.reader()) catch |err| bun.Output.panic("Metadata did not roundtrip encode -> decode successfully: {s}", .{@errorName(err)}); bun.assert(std.meta.eql(metadata, metadata2)); } diff --git a/src/bun.js/bindings/DoubleFormatter.cpp b/src/bun.js/bindings/DoubleFormatter.cpp index 019476f0f1..011ef4d937 100644 --- a/src/bun.js/bindings/DoubleFormatter.cpp +++ b/src/bun.js/bindings/DoubleFormatter.cpp @@ -1,5 +1,7 @@ #include "root.h" #include "wtf/dtoa.h" +#include "wtf/text/StringView.h" +#include "JavaScriptCore/JSGlobalObjectFunctions.h" #include /// Must be called with a buffer of exactly 124 @@ -9,3 +11,11 @@ extern "C" void WTF__dtoa(char* buf_124_bytes, double number) NumberToStringBuffer& buf = *reinterpret_cast(buf_124_bytes); WTF::numberToString(number, buf); } + +/// This is the equivalent of the unary '+' operator on a JS string +/// See https://262.ecma-international.org/14.0/#sec-stringtonumber +/// Grammar: https://262.ecma-international.org/14.0/#prod-StringNumericLiteral +extern "C" double JSC__jsToNumber(char* latin1_ptr, size_t len) +{ + return JSC::jsToNumber(WTF::StringView(latin1_ptr, len, true)); +} diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index d0d29ee0f7..c51732b51b 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -4186,18 +4186,18 @@ public: offset = end; // the proper singular spelling is parenthesis - auto openingParenthese = line.reverseFind('('); - auto closingParenthese = line.reverseFind(')'); + auto openingParentheses = line.reverseFind('('); + auto closingParentheses = line.reverseFind(')'); - if (openingParenthese > closingParenthese) - openingParenthese = WTF::notFound; + if (openingParentheses > closingParentheses) + openingParentheses = WTF::notFound; - if (closingParenthese == WTF::notFound || closingParenthese == WTF::notFound) { + if (closingParentheses == WTF::notFound || closingParentheses == WTF::notFound) { offset = stack.length(); return false; } - auto lineInner = StringView_slice(line, openingParenthese + 1, closingParenthese); + auto lineInner = StringView_slice(line, openingParentheses + 1, closingParentheses); { auto marker1 = 0; @@ -4270,7 +4270,7 @@ public: } done_block: - StringView functionName = line.substring(0, openingParenthese - 1); + StringView functionName = line.substring(0, openingParentheses - 1); if (functionName == ""_s) { functionName = StringView(); diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 1438306b40..cf672895a9 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -3722,7 +3722,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime ctx: *Ctx, verbose: bool = false, - tombstones: std.StringHashMapUnmanaged(*bun.fs.FileSystem.RealFS.EntriesOption) = .{}, + tombstones: bun.StringHashMapUnmanaged(*bun.fs.FileSystem.RealFS.EntriesOption) = .{}, pub fn eventLoop(this: @This()) *EventLoopType { return this.ctx.eventLoop(); diff --git a/src/bun.zig b/src/bun.zig index 97998d0ad1..6a9a1449d9 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -1961,10 +1961,10 @@ pub const ArenaAllocator = @import("./ArenaAllocator.zig").ArenaAllocator; pub const Wyhash11 = @import("./wyhash.zig").Wyhash11; pub const RegularExpression = @import("./bun.js/bindings/RegularExpression.zig").RegularExpression; + pub inline fn assertComptime() void { - if (comptime !@inComptime()) { - @compileError("This function can only be called in comptime."); - } + var x = 0; // if you hit an error on this line, you are not in a comptime context + _ = &x; } const TODO_LOG = Output.scoped(.TODO, false); @@ -1995,7 +1995,7 @@ pub inline fn toFD(fd: anytype) FileDescriptor { }).encode(); } else { // TODO: remove intCast. we should not be casting u32 -> i32 - // even though file descriptors are always positive, linux/mac repesents them as signed integers + // even though file descriptors are always positive, linux/mac represents them as signed integers return switch (T) { FileDescriptor => fd, // TODO: remove the toFD call from these places and make this a @compileError sys.File => fd.handle, @@ -2584,7 +2584,7 @@ pub inline fn pathLiteral(comptime literal: anytype) *const [literal.len:0]u8 { var buf: [literal.len:0]u8 = undefined; for (literal, 0..) |c, i| { buf[i] = if (c == '/') '\\' else c; - std.debug.assert(buf[i] != 0 and buf[i] < 128); + assert(buf[i] != 0 and buf[i] < 128); } buf[buf.len] = 0; const final = buf[0..buf.len :0].*; @@ -2599,7 +2599,7 @@ pub inline fn OSPathLiteral(comptime literal: anytype) *const [literal.len:0]OSP var buf: [literal.len:0]OSPathChar = undefined; for (literal, 0..) |c, i| { buf[i] = if (c == '/') '\\' else c; - std.debug.assert(buf[i] != 0 and buf[i] < 128); + assert(buf[i] != 0 and buf[i] < 128); } buf[buf.len] = 0; const final = buf[0..buf.len :0].*; diff --git a/src/bundler.zig b/src/bundler.zig index c7177379eb..56b4506ac5 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -1389,7 +1389,6 @@ pub const Bundler = struct { opts.features.allow_runtime = bundler.options.allow_runtime; opts.features.set_breakpoint_on_first_line = this_parse.set_breakpoint_on_first_line; opts.features.trim_unused_imports = bundler.options.trim_unused_imports orelse loader.isTypeScript(); - opts.features.should_fold_typescript_constant_expressions = loader.isTypeScript() or target.isBun() or bundler.options.minify_syntax; opts.features.use_import_meta_require = target.isBun(); opts.features.no_macros = bundler.options.no_macros; opts.features.runtime_transpiler_cache = this_parse.runtime_transpiler_cache; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 02dde1721d..c79a518633 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -1370,7 +1370,7 @@ pub const BundleV2 = struct { bun.fmt.quote(source.path.namespace), }) catch {}; - // An error ocurred, prevent spinning the event loop forever + // An error occurred, prevent spinning the event loop forever _ = @atomicRmw(usize, &this.graph.parse_pending, .Sub, 1, .monotonic); }, .success => |code| { @@ -1389,7 +1389,7 @@ pub const BundleV2 = struct { log.errors += @as(usize, @intFromBool(err.kind == .err)); log.warnings += @as(usize, @intFromBool(err.kind == .warn)); - // An error ocurred, prevent spinning the event loop forever + // An error occurred, prevent spinning the event loop forever _ = @atomicRmw(usize, &this.graph.parse_pending, .Sub, 1, .monotonic); }, .pending, .consumed => unreachable, @@ -2797,11 +2797,11 @@ pub const ParseTask = struct { else null, ) catch |err| { - const source_ = &Logger.Source.initEmptyFile(log.msgs.allocator.dupe(u8, file_path.text) catch unreachable); + const source = &Logger.Source.initEmptyFile(log.msgs.allocator.dupe(u8, file_path.text) catch unreachable); switch (err) { error.ENOENT, error.FileNotFound => { log.addErrorFmt( - source_, + source, Logger.Loc.Empty, allocator, "File not found {}", @@ -2810,7 +2810,7 @@ pub const ParseTask = struct { }, else => { log.addErrorFmt( - source_, + source, Logger.Loc.Empty, allocator, "{s} reading file: {}", @@ -2875,7 +2875,6 @@ pub const ParseTask = struct { opts.features.inlining = bundler.options.minify_syntax; opts.features.minify_syntax = bundler.options.minify_syntax; opts.features.minify_identifiers = bundler.options.minify_identifiers; - opts.features.should_fold_typescript_constant_expressions = opts.features.inlining or loader.isTypeScript(); opts.features.emit_decorator_metadata = bundler.options.emit_decorator_metadata; opts.tree_shaking = if (source.index.isRuntime()) true else bundler.options.tree_shaking; @@ -3202,7 +3201,7 @@ pub const Graph = struct { use_directive_entry_points: UseDirective.List = .{}, - const_values: std.HashMapUnmanaged(Ref, Expr, Ref.HashCtx, 80) = .{}, + // const_values: std.HashMapUnmanaged(Ref, Expr, Ref.HashCtx, 80) = .{}, estimated_file_loader_count: usize = 0, @@ -3315,6 +3314,13 @@ const LinkerGraph = struct { ast: MultiArrayList(JSAst) = .{}, meta: MultiArrayList(JSMeta) = .{}, + /// We should avoid traversing all files in the bundle, because the linker + /// should be able to run a linking operation on a large bundle where only + /// a few files are needed (e.g. an incremental compilation scenario). This + /// holds all files that could possibly be reached through the entry points. + /// If you need to iterate over all files in the linking operation, iterate + /// over this array. This array is also sorted in a deterministic ordering + /// to help ensure deterministic builds (source indices are random). reachable_files: []Index = &[_]Index{}, stable_source_indices: []const u32 = &[_]u32{}, @@ -3324,7 +3330,10 @@ const LinkerGraph = struct { has_client_components: bool = false, has_server_components: bool = false, - const_values: std.HashMapUnmanaged(Ref, Expr, Ref.HashCtx, 80) = .{}, + /// This is for cross-module inlining of detected inlinable constants + // const_values: js_ast.Ast.ConstValuesMap = .{}, + /// This is for cross-module inlining of TypeScript enum constants + ts_enums: js_ast.Ast.TsEnumsMap = .{}, pub fn init(allocator: std.mem.Allocator, file_count: usize) !LinkerGraph { return LinkerGraph{ @@ -3708,24 +3717,40 @@ const LinkerGraph = struct { this.symbols = js_ast.Symbol.Map.initList(symbols); } + // TODO: const_values + // { + // var const_values = this.const_values; + // var count: usize = 0; + + // for (this.ast.items(.const_values)) |const_value| { + // count += const_value.count(); + // } + + // if (count > 0) { + // try const_values.ensureTotalCapacity(this.allocator, count); + // for (this.ast.items(.const_values)) |const_value| { + // for (const_value.keys(), const_value.values()) |key, value| { + // const_values.putAssumeCapacityNoClobber(key, value); + // } + // } + // } + + // this.const_values = const_values; + // } + { - var const_values = this.const_values; var count: usize = 0; - - for (this.ast.items(.const_values)) |const_value| { - count += const_value.count(); + for (this.ast.items(.ts_enums)) |ts_enums| { + count += ts_enums.count(); } - if (count > 0) { - try const_values.ensureTotalCapacity(this.allocator, @as(u32, @truncate(count))); - for (this.ast.items(.const_values)) |const_value| { - for (const_value.keys(), const_value.values()) |key, value| { - const_values.putAssumeCapacityNoClobber(key, value); + try this.ts_enums.ensureTotalCapacity(this.allocator, count); + for (this.ast.items(.ts_enums)) |ts_enums| { + for (ts_enums.keys(), ts_enums.values()) |key, value| { + this.ts_enums.putAssumeCapacityNoClobber(key, value); } } } - - this.const_values = const_values; } const in_resolved_exports: []ResolvedExports = this.meta.items(.resolved_exports); @@ -4671,7 +4696,7 @@ const LinkerContext = struct { const other_kind = exports_kind[other_file]; switch (record.kind) { - ImportKind.stmt => { + .stmt => { // Importing using ES6 syntax from a file without any ES6 syntax // causes that module to be considered CommonJS-style, even if it // doesn't have any CommonJS exports. @@ -4707,7 +4732,7 @@ const LinkerContext = struct { flags[other_file].wrap = .cjs; } }, - ImportKind.require => + .require => // Files that are imported with require() must be CommonJS modules { if (other_kind == .esm) { @@ -4718,7 +4743,7 @@ const LinkerContext = struct { exports_kind[other_file] = .cjs; } }, - ImportKind.dynamic => { + .dynamic => { if (!this.graph.code_splitting) { // If we're not splitting, then import() is just a require() that // returns a promise, so the imported file must be a CommonJS module @@ -5469,8 +5494,8 @@ const LinkerContext = struct { const needs_exports_variable = c.graph.meta.items(.flags)[id].needs_exports_variable; const stmts_count = - // 2 statements for every export - export_aliases.len * 2 + + // 1 statement for every export + export_aliases.len + // + 1 if there are non-zero exports @as(usize, @intFromBool(export_aliases.len > 0)) + // + 1 if we need to inject the exports variable @@ -5520,20 +5545,15 @@ const LinkerContext = struct { ); }; - const block = stmts.eat1( - js_ast.Stmt.allocate(allocator_, js_ast.S.Block, .{ - .stmts = stmts.eat1( - js_ast.Stmt.allocate( - allocator_, - js_ast.S.Return, - .{ .value = value }, - loc, - ), - ), - }, loc), - ); const fn_body = js_ast.G.FnBody{ - .stmts = block, + .stmts = stmts.eat1( + js_ast.Stmt.allocate( + allocator_, + js_ast.S.Return, + .{ .value = value }, + loc, + ), + ), .loc = loc, }; properties.appendAssumeCapacity( @@ -5683,20 +5703,20 @@ const LinkerContext = struct { const id = source_index; if (id > c.graph.meta.len) return; - var worker: *ThreadPool.Worker = ThreadPool.Worker.get(@fieldParentPtr("linker", c)); + const worker: *ThreadPool.Worker = ThreadPool.Worker.get(@fieldParentPtr("linker", c)); defer worker.unget(); // we must use this allocator here - const allocator_ = worker.allocator; + const allocator = worker.allocator; - var resolved_exports: *ResolvedExports = &c.graph.meta.items(.resolved_exports)[id]; + const resolved_exports: *ResolvedExports = &c.graph.meta.items(.resolved_exports)[id]; // Now that all exports have been resolved, sort and filter them to create // something we can iterate over later. - var aliases = std.ArrayList(string).initCapacity(allocator_, resolved_exports.count()) catch unreachable; + var aliases = std.ArrayList(string).initCapacity(allocator, resolved_exports.count()) catch unreachable; var alias_iter = resolved_exports.iterator(); - var imports_to_bind = c.graph.meta.items(.imports_to_bind); - var probably_typescript_type = c.graph.meta.items(.probably_typescript_type); + const imports_to_bind = c.graph.meta.items(.imports_to_bind); + const probably_typescript_type = c.graph.meta.items(.probably_typescript_type); // counting in here saves us an extra pass through the array var re_exports_count: usize = 0; @@ -5743,7 +5763,7 @@ const LinkerContext = struct { // Export creation uses "sortedAndFilteredExportAliases" so this must // come second after we fill in that array c.createExportsForFile( - allocator_, + allocator, id, resolved_exports, imports_to_bind, @@ -5752,60 +5772,103 @@ const LinkerContext = struct { ); // Each part tracks the other parts it depends on within this file - var local_dependencies = std.AutoHashMap(u32, u32).init(allocator_); + var local_dependencies = std.AutoHashMap(u32, u32).init(allocator); defer local_dependencies.deinit(); - var parts = &c.graph.ast.items(.parts)[id]; - const parts_slice: []js_ast.Part = parts.slice(); - var named_imports: *js_ast.Ast.NamedImports = &c.graph.ast.items(.named_imports)[id]; + + const parts_slice: []js_ast.Part = c.graph.ast.items(.parts)[id].slice(); + const named_imports: *js_ast.Ast.NamedImports = &c.graph.ast.items(.named_imports)[id]; + + const our_imports_to_bind = imports_to_bind[id]; outer: for (parts_slice, 0..) |*part, part_index| { + // Now that all files have been parsed, determine which property + // accesses off of imported symbols are inlined enum values and + // which ones aren't + for ( + part.import_symbol_property_uses.keys(), + part.import_symbol_property_uses.values(), + ) |ref, properties| { + const use = part.symbol_uses.getPtr(ref).?; - // TODO: inline const TypeScript enum here + // Rare path: this import is a TypeScript enum + if (our_imports_to_bind.get(ref)) |import_data| { + const import_ref = import_data.data.import_ref; + if (c.graph.symbols.get(import_ref)) |symbol| { + if (symbol.kind == .ts_enum) { + if (c.graph.ts_enums.get(import_ref)) |enum_data| { + var found_non_inlined_enum = false; - // TODO: inline function calls here + var it = properties.iterator(); + while (it.next()) |next| { + const name = next.key_ptr.*; + const prop_use = next.value_ptr; - // Inline cross-module constants - if (c.graph.const_values.count() > 0) { - // First, find any symbol usage that points to a constant value. - // This will be pretty rare. - const first_constant_i: ?usize = brk: { - for (part.symbol_uses.keys(), 0..) |ref, j| { - if (c.graph.const_values.contains(ref)) { - break :brk j; - } - } + if (enum_data.get(name) == null) { + found_non_inlined_enum = true; + use.count_estimate += prop_use.count_estimate; + } + } - break :brk null; - }; - if (first_constant_i) |j| { - var end_i: usize = 0; - // symbol_uses is an array - var keys = part.symbol_uses.keys()[j..]; - var values = part.symbol_uses.values()[j..]; - for (keys, values) |ref, val| { - if (c.graph.const_values.contains(ref)) { + if (!found_non_inlined_enum) { + _ = part.symbol_uses.swapRemove(ref); + } + } continue; } - - keys[end_i] = ref; - values[end_i] = val; - end_i += 1; } - part.symbol_uses.entries.len = end_i + j; + } - if (part.symbol_uses.entries.len == 0 and part.can_be_removed_if_unused) { - part.tag = .dead_due_to_inlining; - part.dependencies.len = 0; - continue :outer; - } - - part.symbol_uses.reIndex(allocator_) catch unreachable; + // Common path: this import isn't a TypeScript enum + var it = properties.valueIterator(); + while (it.next()) |prop_use| { + use.count_estimate += prop_use.count_estimate; } } - const symbol_uses = part.symbol_uses.keys(); + // TODO: inline function calls here + + // TODO: Inline cross-module constants + // if (c.graph.const_values.count() > 0) { + // // First, find any symbol usage that points to a constant value. + // // This will be pretty rare. + // const first_constant_i: ?usize = brk: { + // for (part.symbol_uses.keys(), 0..) |ref, j| { + // if (c.graph.const_values.contains(ref)) { + // break :brk j; + // } + // } + + // break :brk null; + // }; + // if (first_constant_i) |j| { + // var end_i: usize = 0; + // // symbol_uses is an array + // var keys = part.symbol_uses.keys()[j..]; + // var values = part.symbol_uses.values()[j..]; + // for (keys, values) |ref, val| { + // if (c.graph.const_values.contains(ref)) { + // continue; + // } + + // keys[end_i] = ref; + // values[end_i] = val; + // end_i += 1; + // } + // part.symbol_uses.entries.len = end_i + j; + + // if (part.symbol_uses.entries.len == 0 and part.can_be_removed_if_unused) { + // part.tag = .dead_due_to_inlining; + // part.dependencies.len = 0; + // continue :outer; + // } + + // part.symbol_uses.reIndex(allocator) catch unreachable; + // } + // } + if (false) break :outer; // this `if` is here to preserve the unused + // block label from the above commented code. // Now that we know this, we can determine cross-part dependencies - for (symbol_uses, 0..) |ref, j| { + for (part.symbol_uses.keys(), 0..) |ref, j| { if (comptime Environment.allow_assert) { bun.assert(part.symbol_uses.values()[j].count_estimate > 0); } @@ -5818,7 +5881,7 @@ const LinkerContext = struct { local.value_ptr.* = @as(u32, @intCast(part_index)); // note: if we crash on append, it is due to threadlocal heaps in mimalloc part.dependencies.push( - allocator_, + allocator, .{ .source_index = Index.source(source_index), .part_index = other_part_index, @@ -5829,7 +5892,7 @@ const LinkerContext = struct { // Also map from imports to parts that use them if (named_imports.getPtr(ref)) |existing| { - existing.local_parts_with_uses.push(allocator_, @as(u32, @intCast(part_index))) catch unreachable; + existing.local_parts_with_uses.push(allocator, @as(u32, @intCast(part_index))) catch unreachable; } } } @@ -6735,7 +6798,7 @@ const LinkerContext = struct { .minify_whitespace = c.options.minify_whitespace, .minify_identifiers = c.options.minify_identifiers, .minify_syntax = c.options.minify_syntax, - .const_values = c.graph.const_values, + // .const_values = c.graph.const_values, }; var cross_chunk_import_records = ImportRecord.List.initCapacity(worker.allocator, chunk.cross_chunk_imports.len) catch unreachable; @@ -7068,12 +7131,12 @@ const LinkerContext = struct { \\ "sourcesContent": [ ); - const source_indicies_for_contents = source_id_map.keys(); - if (source_indicies_for_contents.len > 0) { + const source_indices_for_contents = source_id_map.keys(); + if (source_indices_for_contents.len > 0) { j.pushStatic("\n "); - j.pushStatic(quoted_source_map_contents[source_indicies_for_contents[0]]); + j.pushStatic(quoted_source_map_contents[source_indices_for_contents[0]]); - for (source_indicies_for_contents[1..]) |index| { + for (source_indices_for_contents[1..]) |index| { j.pushStatic(",\n "); j.pushStatic(quoted_source_map_contents[index]); } @@ -7626,7 +7689,7 @@ const LinkerContext = struct { .minify_whitespace = c.options.minify_whitespace, .minify_syntax = c.options.minify_syntax, - .const_values = c.graph.const_values, + // .const_values = c.graph.const_values, }; return .{ @@ -7950,7 +8013,7 @@ const LinkerContext = struct { for (part_stmts) |stmt_| { var stmt = stmt_; - proccess_stmt: { + process_stmt: { switch (stmt.data) { .s_import => |s| { // "import * as ns from 'path'" @@ -8005,12 +8068,12 @@ const LinkerContext = struct { continue; } - break :proccess_stmt; + break :process_stmt; } // "export * from 'path'" if (!shouldStripExports) { - break :proccess_stmt; + break :process_stmt; } const record = ast.import_records.at(s.import_record_index); @@ -8901,7 +8964,9 @@ const LinkerContext = struct { .commonjs_named_exports = ast.commonjs_named_exports, .commonjs_named_exports_ref = ast.exports_ref, .commonjs_named_exports_deoptimized = flags.wrap == .cjs, - .const_values = c.graph.const_values, + // .const_values = c.graph.const_values, + .ts_enums = c.graph.ts_enums, + .minify_whitespace = c.options.minify_whitespace, .minify_syntax = c.options.minify_syntax, .module_type = c.options.output_format, @@ -9067,7 +9132,7 @@ const LinkerContext = struct { const DuplicateEntry = struct { sources: std.ArrayListUnmanaged(*Chunk) = .{}, }; - var duplicates_map: std.StringArrayHashMapUnmanaged(DuplicateEntry) = .{}; + var duplicates_map: bun.StringArrayHashMapUnmanaged(DuplicateEntry) = .{}; // Compute the final hashes of each chunk. This can technically be done in // parallel but it probably doesn't matter so much because we're not hashing @@ -11427,7 +11492,7 @@ pub const Chunk = struct { }; pub const OutputPiece = struct { - // layed out like this so it takes up the same amount of space as a []const u8 + // laid out like this so it takes up the same amount of space as a []const u8 data_ptr: [*]const u8 = undefined, data_len: u32 = 0, diff --git a/src/cli/pm_trusted_command.zig b/src/cli/pm_trusted_command.zig index 6c2d5fa193..0e133f0571 100644 --- a/src/cli/pm_trusted_command.zig +++ b/src/cli/pm_trusted_command.zig @@ -239,7 +239,7 @@ pub const TrustCommand = struct { try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash); try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep); - var package_names_to_add: std.StringArrayHashMapUnmanaged(void) = .{}; + var package_names_to_add: bun.StringArrayHashMapUnmanaged(void) = .{}; var scripts_at_depth: std.AutoArrayHashMapUnmanaged(usize, std.ArrayListUnmanaged(struct { package_id: PackageID, scripts_list: Lockfile.Package.Scripts.List, diff --git a/src/compile_target.zig b/src/compile_target.zig index fb7328b909..0a8ec3ff79 100644 --- a/src/compile_target.zig +++ b/src/compile_target.zig @@ -12,7 +12,7 @@ const Output = bun.Output; const CompileTarget = @This(); os: Environment.OperatingSystem = Environment.os, -arch: Environment.Archictecture = Environment.arch, +arch: Environment.Architecture = Environment.arch, baseline: bool = !Environment.enableSIMD, version: bun.Semver.Version = .{ .major = @truncate(Environment.version.major), @@ -340,7 +340,7 @@ pub fn from(input_: []const u8) CompileTarget { const token = splitter.next() orelse break; if (token.len == 0) continue; - if (Environment.Archictecture.names.get(token)) |arch| { + if (Environment.Architecture.names.get(token)) |arch| { this.arch = arch; found_arch = true; continue; diff --git a/src/comptime_string_map.zig b/src/comptime_string_map.zig index 28eedeca42..0dad921d9f 100644 --- a/src/comptime_string_map.zig +++ b/src/comptime_string_map.zig @@ -462,44 +462,3 @@ const TestEnum2 = enum { .{ "00", .FL }, }); }; - -pub fn compareString(input: []const u8) !void { - const str = try std.heap.page_allocator.dupe(u8, input); - if (TestEnum2.map.has(str) != TestEnum2.official.has(str)) { - std.debug.panic("{s} - TestEnum2.map.has(str) ({d}) != TestEnum2.official.has(str) ({d})", .{ - str, - @intFromBool(TestEnum2.map.has(str)), - @intFromBool(TestEnum2.official.has(str)), - }); - } - - std.debug.print("For string: \"{s}\" (has a match? {d})\n", .{ str, @intFromBool(TestEnum2.map.has(str)) }); - - var is_eql = false; - var timer = try std.time.Timer.start(); - - for (0..99999999) |_| { - is_eql = @call(.never_inline, TestEnum2.map.has, .{str}); - } - const new = timer.lap(); - - std.debug.print("- new {}\n", .{std.fmt.fmtDuration(new)}); - - for (0..99999999) |_| { - is_eql = @call(.never_inline, TestEnum2.official.has, .{str}); - } - - const _std = timer.lap(); - - std.debug.print("- std {}\n\n", .{std.fmt.fmtDuration(_std)}); -} - -pub fn main() anyerror!void { - try compareString("naaaaaa"); - try compareString("nothinz"); - try compareString("these"); - try compareString("incommon"); - try compareString("noMatch"); - try compareString("0"); - try compareString("00"); -} diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 3bdc961b9c..dc7b9aafa2 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -277,7 +277,7 @@ pub fn crashHandler( report(trace_str_buf.slice()); if (bun.auto_reload_on_crash and - // Do not reload if the panic arised FROM the reload function. + // Do not reload if the panic arose FROM the reload function. !bun.isProcessReloadInProgressOnAnotherThread()) { // attempt to prevent a double panic @@ -288,7 +288,7 @@ pub fn crashHandler( }); Output.flush(); - comptime std.debug.assert(void == @TypeOf(bun.reloadProcess(bun.default_allocator, false, true))); + comptime bun.assert(void == @TypeOf(bun.reloadProcess(bun.default_allocator, false, true))); bun.reloadProcess(bun.default_allocator, false, true); } }, @@ -621,7 +621,7 @@ else const metadata_version_line = std.fmt.comptimePrint( "Bun {s}v{s} {s} {s}{s}\n", .{ - if (bun.Environment.is_canary) "Canary " else "", + if (bun.Environment.isDebug) "Debug " else if (bun.Environment.is_canary) "Canary " else "", Global.package_json_version_with_sha, bun.Environment.os.displayString(), arch_display_string, @@ -633,7 +633,7 @@ fn handleSegfaultPosix(sig: i32, info: *const std.posix.siginfo_t, _: ?*const an const addr = switch (bun.Environment.os) { .linux => @intFromPtr(info.fields.sigfault.addr), .mac => @intFromPtr(info.addr), - else => unreachable, + else => @compileError(unreachable), }; crashHandler( diff --git a/src/deps/diffz/DiffMatchPatch.zig b/src/deps/diffz/DiffMatchPatch.zig index 405060238b..98a1d6e22c 100644 --- a/src/deps/diffz/DiffMatchPatch.zig +++ b/src/deps/diffz/DiffMatchPatch.zig @@ -691,7 +691,7 @@ fn diffLinesToChars( text2: []const u8, ) DiffError!LinesToCharsResult { var line_array = ArrayListUnmanaged([]const u8){}; - var line_hash = std.StringHashMapUnmanaged(usize){}; + var line_hash = bun.StringHashMapUnmanaged(usize){}; // e.g. line_array[4] == "Hello\n" // e.g. line_hash.get("Hello\n") == 4 @@ -716,7 +716,7 @@ fn diffLinesToCharsMunge( allocator: std.mem.Allocator, text: []const u8, line_array: *ArrayListUnmanaged([]const u8), - line_hash: *std.StringHashMapUnmanaged(usize), + line_hash: *bun.StringHashMapUnmanaged(usize), max_lines: usize, ) DiffError![]const u8 { var line_start: isize = 0; diff --git a/src/env.zig b/src/env.zig index f67ade6ec7..7d410c83e6 100644 --- a/src/env.zig +++ b/src/env.zig @@ -1,5 +1,6 @@ const std = @import("std"); const builtin = @import("builtin"); +const bun = @import("root").bun; pub const BuildTarget = enum { native, wasm, wasi }; pub const build_target: BuildTarget = brk: { @@ -63,26 +64,23 @@ pub const OperatingSystem = enum { // wAsM is nOt aN oPeRaTiNg SyStEm wasm, - pub const names = @import("root").bun.ComptimeStringMap( - OperatingSystem, - &.{ - .{ "windows", OperatingSystem.windows }, - .{ "win32", OperatingSystem.windows }, - .{ "win", OperatingSystem.windows }, - .{ "win64", OperatingSystem.windows }, - .{ "win_x64", OperatingSystem.windows }, - .{ "darwin", OperatingSystem.mac }, - .{ "macos", OperatingSystem.mac }, - .{ "macOS", OperatingSystem.mac }, - .{ "mac", OperatingSystem.mac }, - .{ "apple", OperatingSystem.mac }, - .{ "linux", OperatingSystem.linux }, - .{ "Linux", OperatingSystem.linux }, - .{ "linux-gnu", OperatingSystem.linux }, - .{ "gnu/linux", OperatingSystem.linux }, - .{ "wasm", OperatingSystem.wasm }, - }, - ); + pub const names = bun.ComptimeStringMap(OperatingSystem, &.{ + .{ "windows", .windows }, + .{ "win32", .windows }, + .{ "win", .windows }, + .{ "win64", .windows }, + .{ "win_x64", .windows }, + .{ "darwin", .mac }, + .{ "macos", .mac }, + .{ "macOS", .mac }, + .{ "mac", .mac }, + .{ "apple", .mac }, + .{ "linux", .linux }, + .{ "Linux", .linux }, + .{ "linux-gnu", .linux }, + .{ "gnu/linux", .linux }, + .{ "wasm", .wasm }, + }); /// user-facing name with capitalization pub fn displayString(self: OperatingSystem) []const u8 { @@ -125,23 +123,23 @@ pub const OperatingSystem = enum { }; pub const os: OperatingSystem = if (isMac) - OperatingSystem.mac + .mac else if (isLinux) - OperatingSystem.linux + .linux else if (isWindows) - OperatingSystem.windows + .windows else if (isWasm) - OperatingSystem.wasm + .wasm else @compileError("Please add your OS to the OperatingSystem enum"); -pub const Archictecture = enum { +pub const Architecture = enum { x64, arm64, wasm, /// npm package name, `@oven-sh/bun-{os}-{arch}` - pub fn npmName(this: Archictecture) []const u8 { + pub fn npmName(this: Architecture) []const u8 { return switch (this) { .x64 => "x64", .arm64 => "aarch64", @@ -149,22 +147,21 @@ pub const Archictecture = enum { }; } - pub const names = @import("root").bun.ComptimeStringMap( - Archictecture, - &.{ - .{ "x86_64", Archictecture.x64 }, - .{ "x64", Archictecture.x64 }, - .{ "amd64", Archictecture.x64 }, - .{ "aarch64", Archictecture.arm64 }, - .{ "arm64", Archictecture.arm64 }, - .{ "wasm", Archictecture.wasm }, - }, - ); + pub const names = bun.ComptimeStringMap(Architecture, &.{ + .{ "x86_64", .x64 }, + .{ "x64", .x64 }, + .{ "amd64", .x64 }, + .{ "aarch64", .arm64 }, + .{ "arm64", .arm64 }, + .{ "wasm", .wasm }, + }); }; -pub const arch = if (isX64) - Archictecture.x64 +pub const arch: Architecture = if (isWasm) + .wasm +else if (isX64) + .x64 else if (isAarch64) - Archictecture.arm64 + .arm64 else - @compileError("Please add your architecture to the Archictecture enum"); + @compileError("Please add your architecture to the Architecture enum"); diff --git a/src/env_loader.zig b/src/env_loader.zig index 315fc1ef1d..21f8d20878 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -38,7 +38,7 @@ pub const Loader = struct { @".env": ?logger.Source = null, // only populated with files specified explicitely (e.g. --env-file arg) - custom_files_loaded: std.StringArrayHashMap(logger.Source), + custom_files_loaded: bun.StringArrayHashMap(logger.Source), quiet: bool = false, @@ -451,7 +451,7 @@ pub const Loader = struct { return Loader{ .map = map, .allocator = allocator, - .custom_files_loaded = std.StringArrayHashMap(logger.Source).init(allocator), + .custom_files_loaded = bun.StringArrayHashMap(logger.Source).init(allocator), }; } diff --git a/src/feature_flags.zig b/src/feature_flags.zig index a53ad08ed1..0c4f214dfb 100644 --- a/src/feature_flags.zig +++ b/src/feature_flags.zig @@ -46,8 +46,6 @@ pub const allow_json_single_quotes = true; pub const react_specific_warnings = true; -pub const log_allocations = false; - pub const CSSInJSImportBehavior = enum { // When you import a .css file and you reference the import in JavaScript // Just return whatever the property key they referenced was diff --git a/src/futex.zig b/src/futex.zig index 7b20ea9150..49f4e83e77 100644 --- a/src/futex.zig +++ b/src/futex.zig @@ -13,7 +13,7 @@ const Futex = @This(); const target = builtin.target; const single_threaded = builtin.single_threaded; -const assert = @import("root").bun.assert; +const assert = bun.assert; const testing = std.testing; const Atomic = std.atomic.Value; diff --git a/src/hive_array.zig b/src/hive_array.zig index 05df5f1955..0f1ee8d8e5 100644 --- a/src/hive_array.zig +++ b/src/hive_array.zig @@ -1,5 +1,6 @@ const std = @import("std"); -const assert = @import("root").bun.assert; +const bun = @import("root").bun; +const assert = bun.assert; const mem = std.mem; const testing = std.testing; diff --git a/src/ini.zig b/src/ini.zig index 2a4ffe14a6..4e0ca6e7f1 100644 --- a/src/ini.zig +++ b/src/ini.zig @@ -64,7 +64,7 @@ pub const Parser = struct { var iter = std.mem.splitScalar(u8, this.src, '\n'); var head: *E.Object = this.out.data.e_object; - // var duplicates = std.StringArrayHashMapUnmanaged(u32){}; + // var duplicates = bun.StringArrayHashMapUnmanaged(u32){}; // defer duplicates.deinit(allocator); var rope_stack = std.heap.stackFallback(@sizeOf(Rope) * 6, arena_allocator); @@ -268,7 +268,7 @@ pub const Parser = struct { return "[Object object]"; }, else => { - const str = std.fmt.allocPrint(arena_allocator, "{}", .{toStringFormatter{ .d = json_val.data }}) catch |e| { + const str = std.fmt.allocPrint(arena_allocator, "{}", .{ToStringFormatter{ .d = json_val.data }}) catch |e| { this.logger.addErrorFmt(&this.source, Loc{ .start = offset }, arena_allocator, "failed to stringify value: {s}", .{@errorName(e)}) catch bun.outOfMemory(); return error.ParserError; }; @@ -653,7 +653,7 @@ pub const IniTestingAPIs = struct { } }; -pub const toStringFormatter = struct { +pub const ToStringFormatter = struct { d: js_ast.Expr.Data, pub fn format(this: *const @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { @@ -662,7 +662,7 @@ pub const toStringFormatter = struct { const last = this.d.e_array.items.len -| 1; for (this.d.e_array.items.slice(), 0..) |*e, i| { const is_last = i == last; - try writer.print("{}{s}", .{ toStringFormatter{ .d = e.data }, if (is_last) "" else "," }); + try writer.print("{}{s}", .{ ToStringFormatter{ .d = e.data }, if (is_last) "" else "," }); } }, .e_object => try writer.print("[Object object]", .{}), @@ -672,49 +672,9 @@ pub const toStringFormatter = struct { .e_null => try writer.print("null", .{}), .e_utf8_string => try writer.print("{s}", .{this.d.e_utf8_string.data}), - .e_unary => {}, - .e_binary => {}, - .e_class => {}, - - .e_new => {}, - .e_function => {}, - .e_call => {}, - .e_dot => {}, - .e_index => {}, - .e_arrow => {}, - - .e_jsx_element => {}, - .e_spread => {}, - .e_template_part => {}, - .e_template => {}, - .e_reg_exp => {}, - .e_await => {}, - .e_yield => {}, - .e_if => {}, - .e_import => {}, - - .e_identifier => {}, - .e_import_identifier => {}, - .e_private_identifier => {}, - .e_commonjs_export_identifier => {}, - - .e_big_int => {}, - - .e_require_string => {}, - .e_require_resolve_string => {}, - .e_require_call_target => {}, - .e_require_resolve_call_target => {}, - - .e_missing => {}, - .e_this => {}, - .e_super => {}, - .e_undefined => {}, - .e_new_target => {}, - .e_import_meta => {}, - - // This type should not exist outside of MacroContext - // If it ends up in JSParser or JSPrinter, it is a bug. - .inline_identifier => {}, + else => |tag| if (bun.Environment.isDebug) { + Output.panic("Unexpected AST node: {s}", .{@tagName(tag)}); + }, } } }; @@ -1068,7 +1028,7 @@ pub fn loadNpmrc( // The line that sets the auth token should only apply to the @myorg scope // The line that sets the username would apply to both @myorg and @another var url_map = url_map: { - var url_map = std.StringArrayHashMap(bun.URL).init(parser.arena.allocator()); + var url_map = bun.StringArrayHashMap(bun.URL).init(parser.arena.allocator()); url_map.ensureTotalCapacity(registry_map.scopes.keys().len) catch bun.outOfMemory(); for (registry_map.scopes.keys(), registry_map.scopes.values()) |*k, *v| { diff --git a/src/install/migration.zig b/src/install/migration.zig index 0f4cb9e5fd..f8b8e68faa 100644 --- a/src/install/migration.zig +++ b/src/install/migration.zig @@ -97,9 +97,9 @@ pub fn detectAndLoadOtherLockfile( return LoadFromDiskResult{ .not_found = {} }; } -const ResolvedURLsMap = std.StringHashMapUnmanaged(string); +const ResolvedURLsMap = bun.StringHashMapUnmanaged(string); -const IdMap = std.StringHashMapUnmanaged(IdMapValue); +const IdMap = bun.StringHashMapUnmanaged(IdMapValue); const IdMapValue = struct { /// index into the old package-lock.json package entries. old_json_index: u32, @@ -702,7 +702,7 @@ pub fn migrateNPMLockfile( } if (expr.data != .e_array) return error.InvalidNPMLockfile; const arr: *E.Array = expr.data.e_array; - var map = std.StringArrayHashMapUnmanaged(void){}; + var map = bun.StringArrayHashMapUnmanaged(void){}; try map.ensureTotalCapacity(allocator, arr.items.len); for (arr.items.slice()) |item| { map.putAssumeCapacity(item.asString(allocator) orelse return error.InvalidNPMLockfile, {}); diff --git a/src/js_ast.zig b/src/js_ast.zig index d2759ea07b..068a750032 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -837,7 +837,7 @@ pub const G = struct { // class Foo { a = 1 } // initializer: ?ExprNodeIndex = null, - kind: Kind = Kind.normal, + kind: Kind = .normal, flags: Flags.Property.Set = Flags.Property.None, class_static_block: ?*ClassStaticBlock = null, @@ -1396,6 +1396,7 @@ pub const Symbol = struct { } } + /// Equivalent to followSymbols in esbuild pub fn follow(symbols: *const Map, ref: Ref) Ref { var symbol = symbols.get(ref) orelse return ref; if (!symbol.hasLink()) { @@ -1433,13 +1434,12 @@ pub const Symbol = struct { }; pub const OptionalChain = enum(u1) { - - // "a?.b" + /// "a?.b" start, - // "a?.b.c" => ".c" is OptionalChainContinue - // "(a?.b).c" => ".c" is OptionalChain null - ccontinue, + /// "a?.b.c" => ".c" is .continuation + /// "(a?.b).c" => ".c" is null + continuation, pub fn jsonStringify(self: @This(), writer: anytype) !void { return try writer.write(@tagName(self)); @@ -1789,28 +1789,28 @@ pub const E = struct { const neg_double_digit = [_]string{ "-0", "-1", "-2", "-3", "-4", "-5", "-6", "-7", "-8", "-9", "-10", "-11", "-12", "-13", "-14", "-15", "-16", "-17", "-18", "-19", "-20", "-21", "-22", "-23", "-24", "-25", "-26", "-27", "-28", "-29", "-30", "-31", "-32", "-33", "-34", "-35", "-36", "-37", "-38", "-39", "-40", "-41", "-42", "-43", "-44", "-45", "-46", "-47", "-48", "-49", "-50", "-51", "-52", "-53", "-54", "-55", "-56", "-57", "-58", "-59", "-60", "-61", "-62", "-63", "-64", "-65", "-66", "-67", "-68", "-69", "-70", "-71", "-72", "-73", "-74", "-75", "-76", "-77", "-78", "-79", "-80", "-81", "-82", "-83", "-84", "-85", "-86", "-87", "-88", "-89", "-90", "-91", "-92", "-93", "-94", "-95", "-96", "-97", "-98", "-99", "-100" }; /// String concatenation with numbers is required by the TypeScript compiler for - /// "constant expression" handling in enums. However, we don't want to introduce - /// correctness bugs by accidentally stringifying a number differently than how - /// a real JavaScript VM would do it. So we are conservative and we only do this - /// when we know it'll be the same result. - pub fn toStringSafely(this: Number, allocator: std.mem.Allocator) ?string { - return toStringFromF64Safe(this.value, allocator); + /// "constant expression" handling in enums. We can match the behavior of a JS VM + /// by calling out to the APIs in WebKit which are responsible for this operation. + /// + /// This can return `null` in wasm builds to avoid linking JSC + pub fn toString(this: Number, allocator: std.mem.Allocator) ?string { + return toStringFromF64(this.value, allocator); } - pub fn toStringFromF64Safe(value: f64, allocator: std.mem.Allocator) ?string { - if (comptime !Environment.isWasm) { - if (value == @trunc(value) and (value < std.math.maxInt(i32) and value > std.math.minInt(i32))) { - const int_value = @as(i64, @intFromFloat(value)); - const abs = @as(u64, @intCast(@abs(int_value))); - if (abs < double_digit.len) { - return if (int_value < 0) - neg_double_digit[abs] - else - double_digit[abs]; - } + pub fn toStringFromF64(value: f64, allocator: std.mem.Allocator) ?string { + if (value == @trunc(value) and (value < std.math.maxInt(i32) and value > std.math.minInt(i32))) { + const int_value = @as(i64, @intFromFloat(value)); + const abs = @as(u64, @intCast(@abs(int_value))); - return std.fmt.allocPrint(allocator, "{d}", .{@as(i32, @intCast(int_value))}) catch return null; + // do not allocate for a small set of constant numbers: -100 through 100 + if (abs < double_digit.len) { + return if (int_value < 0) + neg_double_digit[abs] + else + double_digit[abs]; } + + return std.fmt.allocPrint(allocator, "{d}", .{@as(i32, @intCast(int_value))}) catch return null; } if (std.math.isNan(value)) { @@ -1825,6 +1825,13 @@ pub const E = struct { return "Infinity"; } + if (Environment.isNative) { + var buf: [124]u8 = undefined; + return allocator.dupe(u8, bun.fmt.FormatDouble.dtoa(&buf, value)) catch bun.outOfMemory(); + } else { + // do not attempt to implement the spec here, it would be error prone. + } + return null; } @@ -1892,9 +1899,7 @@ pub const E = struct { } const rope = try allocator.create(Rope); - rope.* = .{ - .head = expr, - }; + rope.* = .{ .head = expr }; this.next = rope; return rope; } @@ -2249,17 +2254,18 @@ pub const E = struct { return bun.js_lexer.isIdentifier(this.slice(allocator)); } - pub var class = E.String{ .data = "class" }; + pub const class = E.String{ .data = "class" }; + pub fn push(this: *String, other: *String) void { bun.assert(this.isUTF8()); bun.assert(other.isUTF8()); if (other.rope_len == 0) { - other.rope_len = @as(u32, @truncate(other.data.len)); + other.rope_len = @truncate(other.data.len); } if (this.rope_len == 0) { - this.rope_len = @as(u32, @truncate(this.data.len)); + this.rope_len = @truncate(this.data.len); } this.rope_len += other.rope_len; @@ -2274,6 +2280,28 @@ pub const E = struct { } } + /// Cloning the rope string is rarely needed, see `foldStringAddition`'s + /// comments and the 'edgecase/EnumInliningRopeStringPoison' test + pub fn cloneRopeNodes(s: String) String { + var root = s; + + if (root.next != null) { + var current: ?*String = &root; + while (true) { + const node = current.?; + if (node.next) |next| { + node.next = Expr.Data.Store.append(String, next.*); + current = node.next; + } else { + root.end = node; + break; + } + } + } + + return root; + } + pub fn toUTF8(this: *String, allocator: std.mem.Allocator) !void { if (!this.is_utf16) return; this.data = try strings.toUTF8Alloc(allocator, this.slice16()); @@ -2292,6 +2320,16 @@ pub const E = struct { return .{ .data = value }; } + /// E.String containing non-ascii characters may not fully work. + /// https://github.com/oven-sh/bun/issues/11963 + /// More investigation is needed. + pub fn initReEncodeUTF8(utf8: []const u8, allocator: std.mem.Allocator) String { + return if (bun.strings.isAllASCII(utf8)) + init(utf8) + else + init(bun.strings.toUTF16AllocForReal(allocator, utf8, false, false) catch bun.outOfMemory()); + } + pub fn slice16(this: *const String) []const u16 { bun.assert(this.is_utf16); return @as([*]const u16, @ptrCast(@alignCast(this.data.ptr)))[0..this.data.len]; @@ -2299,13 +2337,13 @@ pub const E = struct { pub fn resolveRopeIfNeeded(this: *String, allocator: std.mem.Allocator) void { if (this.next == null or !this.isUTF8()) return; - var str = this.next; - var bytes = std.ArrayList(u8).initCapacity(allocator, this.rope_len) catch unreachable; + var bytes = std.ArrayList(u8).initCapacity(allocator, this.rope_len) catch bun.outOfMemory(); bytes.appendSliceAssumeCapacity(this.data); - while (str) |strin| { - bytes.appendSlice(strin.data) catch unreachable; - str = strin.next; + var str = this.next; + while (str) |part| { + bytes.appendSlice(part.data) catch bun.outOfMemory(); + str = part.next; } this.data = bytes.items; this.next = null; @@ -2313,7 +2351,7 @@ pub const E = struct { pub fn slice(this: *String, allocator: std.mem.Allocator) []const u8 { this.resolveRopeIfNeeded(allocator); - return this.string(allocator) catch unreachable; + return this.string(allocator) catch bun.outOfMemory(); } pub var empty = String{}; @@ -2420,7 +2458,7 @@ pub const E = struct { } } - pub fn eqlComptime(s: *const String, comptime value: anytype) bool { + pub fn eqlComptime(s: *const String, comptime value: []const u8) bool { return if (s.isUTF8()) strings.eqlComptime(s.data, value) else @@ -2510,6 +2548,34 @@ pub const E = struct { } } + pub fn format(s: String, comptime fmt: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + comptime bun.assert(fmt.len == 0); + + try writer.writeAll("E.String"); + if (s.next == null) { + try writer.writeAll("("); + if (s.isUTF8()) { + try writer.print("\"{s}\"", .{s.data}); + } else { + try writer.print("\"{}\"", .{bun.fmt.utf16(s.slice16())}); + } + try writer.writeAll(")"); + } else { + try writer.writeAll("(rope: ["); + var it: ?*const String = &s; + while (it) |part| { + if (part.isUTF8()) { + try writer.print("\"{s}\"", .{part.data}); + } else { + try writer.print("\"{}\"", .{bun.fmt.utf16(part.slice16())}); + } + it = part.next; + if (it != null) try writer.writeAll(" "); + } + try writer.writeAll("])"); + } + } + pub fn jsonStringify(s: *const String, writer: anytype) !void { var buf = [_]u8{0} ** 4096; var i: usize = 0; @@ -2534,7 +2600,7 @@ pub const E = struct { pub const Template = struct { tag: ?ExprNodeIndex = null, - parts: []TemplatePart = &([_]TemplatePart{}), + parts: []TemplatePart = &.{}, head: Contents, pub const Contents = union(Tag) { @@ -2545,6 +2611,10 @@ pub const E = struct { cooked, raw, }; + + pub fn isUTF8(contents: Contents) bool { + return contents == .cooked and contents.cooked.isUTF8(); + } }; /// "`a${'b'}c`" => "`abc`" @@ -2556,9 +2626,7 @@ pub const E = struct { if (this.tag != null or (this.head == .cooked and !this.head.cooked.isUTF8())) { // we only fold utf-8/ascii for now return Expr{ - .data = .{ - .e_template = this, - }, + .data = .{ .e_template = this }, .loc = loc, }; } @@ -2571,13 +2639,15 @@ pub const E = struct { var parts = std.ArrayList(TemplatePart).initCapacity(allocator, this.parts.len) catch unreachable; var head = Expr.init(E.String, this.head.cooked, loc); - for (this.parts) |part_| { - var part = part_; + for (this.parts) |part_src| { + var part = part_src; bun.assert(part.tail == .cooked); + part.value = part.value.unwrapInlined(); + switch (part.value.data) { .e_number => { - if (part.value.data.e_number.toStringSafely(allocator)) |s| { + if (part.value.data.e_number.toString(allocator)) |s| { part.value = Expr.init(E.String, E.String.init(s), part.value.loc); } }, @@ -2593,6 +2663,9 @@ pub const E = struct { .e_undefined => { part.value = Expr.init(E.String, E.String.init("undefined"), part.value.loc); }, + .e_big_int => |value| { + part.value = Expr.init(E.String, E.String.init(value.value), part.value.loc); + }, else => {}, } @@ -2634,15 +2707,11 @@ pub const E = struct { return head; } - return Expr.init( - E.Template, - E.Template{ - .tag = null, - .parts = parts.items, - .head = .{ .cooked = head.data.e_string.* }, - }, - loc, - ); + return Expr.init(E.Template, .{ + .tag = null, + .parts = parts.items, + .head = .{ .cooked = head.data.e_string.* }, + }, loc); } }; @@ -2724,6 +2793,11 @@ pub const E = struct { close_paren_loc: logger.Loc = logger.Loc.Empty, }; + pub const InlinedEnum = struct { + value: ExprNodeIndex, + comment: string, + }; + pub const Import = struct { expr: ExprNodeIndex, import_record_index: u32, @@ -3206,6 +3280,11 @@ pub const Expr = struct { return this.data.canBeConstValue(); } + pub fn unwrapInlined(expr: Expr) Expr { + if (expr.data.as(.e_inlined_enum)) |inlined| return inlined.value; + return expr; + } + pub fn fromBlob( blob: *const JSC.WebCore.Blob, allocator: std.mem.Allocator, @@ -3559,11 +3638,10 @@ pub const Expr = struct { } pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 { - if (!(@as(Expr.Tag, left) == .e_number and @as(Expr.Tag, right) == .e_number)) { - return null; - } - - return [2]f64{ left.e_number.value, right.e_number.value }; + return .{ + left.extractNumericValue() orelse return null, + right.extractNumericValue() orelse return null, + }; } pub var icount: usize = 0; @@ -4338,6 +4416,9 @@ pub const Expr = struct { }, }; }, + E.InlinedEnum => return .{ .loc = loc, .data = .{ + .e_inlined_enum = Data.Store.append(@TypeOf(st), st), + } }, else => { @compileError("Invalid type passed to Expr.init: " ++ @typeName(Type)); @@ -4397,6 +4478,7 @@ pub const Expr = struct { e_undefined, e_new_target, e_import_meta, + e_inlined_enum, /// A string that is UTF-8 encoded without escaping for use in JavaScript. e_utf8_string, @@ -4956,6 +5038,9 @@ pub const Expr = struct { else => {}, } }, + .e_inlined_enum => |inlined| { + return maybeSimplifyNot(inlined.value, allocator); + }, else => {}, } @@ -4963,6 +5048,35 @@ pub const Expr = struct { return null; } + pub fn toStringExprWithoutSideEffects(expr: Expr, allocator: std.mem.Allocator) ?Expr { + const unwrapped = expr.unwrapInlined(); + const slice = switch (unwrapped.data) { + .e_null => "null", + .e_string => return expr, + .e_undefined => "undefined", + .e_boolean => |data| if (data.value) "true" else "false", + .e_big_int => |bigint| bigint.value, + .e_number => |num| if (num.toString(allocator)) |str| + str + else + null, + .e_reg_exp => |regexp| regexp.value, + .e_dot => |dot| @as(?[]const u8, brk: { + // This is dumb but some JavaScript obfuscators use this to generate string literals + if (bun.strings.eqlComptime(dot.name, "constructor")) { + break :brk switch (dot.target.data) { + .e_string => "function String() { [native code] }", + .e_reg_exp => "function RegExp() { [native code] }", + else => null, + }; + } + break :brk null; + }), + else => null, + }; + return if (slice) |s| Expr.init(E.String, E.String.init(s), expr.loc) else null; + } + pub fn isOptionalChain(self: *const @This()) bool { return switch (self.data) { .e_dot => self.data.e_dot.optional_chain != null, @@ -5062,6 +5176,7 @@ pub const Expr = struct { e_undefined: E.Undefined, e_new_target: E.NewTarget, e_import_meta: E.ImportMeta, + e_inlined_enum: *E.InlinedEnum, e_utf8_string: *E.UTF8String, @@ -5069,6 +5184,10 @@ pub const Expr = struct { // If it ends up in JSParser or JSPrinter, it is a bug. inline_identifier: i32, + pub fn as(data: Data, comptime tag: Tag) ?std.meta.FieldType(Data, tag) { + return if (data == tag) @field(data, @tagName(tag)) else null; + } + pub fn clone(this: Expr.Data, allocator: std.mem.Allocator) !Data { return switch (this) { .e_array => |el| { @@ -5181,6 +5300,11 @@ pub const Expr = struct { item.* = el.*; return .{ .e_string = item }; }, + .e_inlined_enum => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_inlined_enum))); + item.* = el.*; + return .{ .e_inlined_enum = item }; + }, else => this, }; } @@ -5392,6 +5516,13 @@ pub const Expr = struct { }); return .{ .e_string = item }; }, + .e_inlined_enum => |el| { + const item = bun.create(allocator, E.InlinedEnum, .{ + .value = el.value, + .comment = el.comment, + }); + return .{ .e_inlined_enum = item }; + }, else => this, }; } @@ -5516,6 +5647,9 @@ pub const Expr = struct { else => PrimitiveType.unknown, }, + + .e_inlined_enum => |inlined| inlined.value.data.knownPrimitive(), + else => PrimitiveType.unknown, }; } @@ -5535,8 +5669,24 @@ pub const Expr = struct { return switch (data) { .e_null => 0, .e_undefined => std.math.nan(f64), + .e_string => |str| { + if (str.next != null) return null; + + // +'1' => 1 + return stringToEquivalentNumberValue(str.data); + }, .e_boolean => @as(f64, if (data.e_boolean.value) 1.0 else 0.0), .e_number => data.e_number.value, + .e_inlined_enum => |inlined| switch (inlined.value.data) { + .e_number => |num| num.value, + .e_string => |str| { + if (str.next != null) return null; + + // +'1' => 1 + return stringToEquivalentNumberValue(str.data); + }, + else => null, + }, else => null, }; } @@ -5548,6 +5698,24 @@ pub const Expr = struct { data.e_number.value else null, + .e_inlined_enum => |inlined| switch (inlined.value.data) { + .e_number => |num| if (std.math.isFinite(num.value)) + num.value + else + null, + else => null, + }, + else => null, + }; + } + + pub fn extractNumericValue(data: Expr.Data) ?f64 { + return switch (data) { + .e_number => data.e_number.value, + .e_inlined_enum => |inlined| switch (inlined.value.data) { + .e_number => |num| num.value, + else => null, + }, else => null, }; } @@ -5572,6 +5740,8 @@ pub const Expr = struct { ) Equality { // https://dorey.github.io/JavaScript-Equality-Table/ switch (left) { + .e_inlined_enum => |inlined| return inlined.value.data.eql(right, allocator, kind), + .e_null, .e_undefined => { const ok = switch (@as(Expr.Tag, right)) { .e_null, .e_undefined => true, @@ -5630,6 +5800,12 @@ pub const Expr = struct { .equal = l.value == r.value, }; }, + .e_inlined_enum => |r| if (r.value.data == .e_number) { + return .{ + .ok = true, + .equal = l.value == r.value.data.e_number.value, + }; + }, .e_boolean => |r| { if (comptime kind == .loose) { return .{ @@ -5682,6 +5858,19 @@ pub const Expr = struct { .equal = r.eql(E.String, l), }; }, + .e_inlined_enum => |inlined| { + if (inlined.value.data == .e_string) { + const r = inlined.value.data.e_string; + + r.resolveRopeIfNeeded(allocator); + l.resolveRopeIfNeeded(allocator); + + return .{ + .ok = true, + .equal = r.eql(E.String, l), + }; + } + }, .e_null, .e_undefined => { return Equality.false; }, @@ -5726,6 +5915,8 @@ pub const Expr = struct { .e_number => |e| e.toJS(), // .e_big_int => |e| e.toJS(ctx, exception), + .e_inlined_enum => |inlined| inlined.value.data.toJS(allocator, globalObject, .{}), + .e_identifier, .e_import_identifier, .inline_identifier, @@ -5812,7 +6003,7 @@ pub const Expr = struct { } } - pub fn append(comptime ValueType: type, value: anytype) *ValueType { + pub fn append(comptime ValueType: type, value: ValueType) *ValueType { if (memory_allocator) |allocator| { return allocator.append(ValueType, value); } @@ -5835,8 +6026,12 @@ pub const Expr = struct { pub const EnumValue = struct { loc: logger.Loc, ref: Ref, - name: E.String, + name: []const u8, value: ?ExprNodeIndex, + + pub fn nameAsEString(enum_value: EnumValue, allocator: std.mem.Allocator) E.String { + return E.String.initReEncodeUTF8(enum_value.name, allocator); + } }; pub const S = struct { @@ -6070,10 +6265,10 @@ pub const Op = struct { // If you add a new token, remember to add it to "Table" too pub const Code = enum { // Prefix - un_pos, - un_neg, - un_cpl, - un_not, + un_pos, // +expr + un_neg, // -expr + un_cpl, // ~expr + un_not, // !expr un_void, un_typeof, un_delete, @@ -6420,8 +6615,8 @@ pub const Ast = struct { /// Only populated when bundling target: bun.options.Target = .browser, - - const_values: ConstValuesMap = .{}, + // const_values: ConstValuesMap = .{}, + ts_enums: TsEnumsMap = .{}, /// Not to be confused with `commonjs_named_exports` /// This is a list of named exports that may exist in a CommonJS module @@ -6438,6 +6633,7 @@ pub const Ast = struct { pub const NamedImports = std.ArrayHashMap(Ref, NamedImport, RefHashCtx, true); pub const NamedExports = bun.StringArrayHashMap(NamedExport); pub const ConstValuesMap = std.ArrayHashMapUnmanaged(Ref, Expr, RefHashCtx, false); + pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged(InlinedEnumValue), RefHashCtx, false); pub fn fromParts(parts: []Part) Ast { return Ast{ @@ -6522,7 +6718,8 @@ pub const BundledAst = struct { /// Only populated when bundling target: bun.options.Target = .browser, - const_values: ConstValuesMap = .{}, + // const_values: ConstValuesMap = .{}, + ts_enums: Ast.TsEnumsMap = .{}, flags: BundledAst.Flags = .{}, @@ -6598,7 +6795,8 @@ pub const BundledAst = struct { .target = this.target, - .const_values = this.const_values, + // .const_values = this.const_values, + .ts_enums = this.ts_enums, .uses_exports_ref = this.flags.uses_exports_ref, .uses_module_ref = this.flags.uses_module_ref, @@ -6648,7 +6846,8 @@ pub const BundledAst = struct { .target = ast.target, - .const_values = ast.const_values, + // .const_values = ast.const_values, + .ts_enums = ast.ts_enums, .flags = .{ .uses_exports_ref = ast.uses_exports_ref, @@ -6668,6 +6867,178 @@ pub const Span = struct { range: logger.Range = .{}, }; +/// This is for TypeScript "enum" and "namespace" blocks. Each block can +/// potentially be instantiated multiple times. The exported members of each +/// block are merged into a single namespace while the non-exported code is +/// still scoped to just within that block: +/// +/// let x = 1; +/// namespace Foo { +/// let x = 2; +/// export let y = 3; +/// } +/// namespace Foo { +/// console.log(x); // 1 +/// console.log(y); // 3 +/// } +/// +/// Doing this also works inside an enum: +/// +/// enum Foo { +/// A = 3, +/// B = A + 1, +/// } +/// enum Foo { +/// C = A + 2, +/// } +/// console.log(Foo.B) // 4 +/// console.log(Foo.C) // 5 +/// +/// This is a form of identifier lookup that works differently than the +/// hierarchical scope-based identifier lookup in JavaScript. Lookup now needs +/// to search sibling scopes in addition to parent scopes. This is accomplished +/// by sharing the map of exported members between all matching sibling scopes. +pub const TSNamespaceScope = struct { + /// This is specific to this namespace block. It's the argument of the + /// immediately-invoked function expression that the namespace block is + /// compiled into: + /// + /// var ns; + /// (function (ns2) { + /// ns2.x = 123; + /// })(ns || (ns = {})); + /// + /// This variable is "ns2" in the above example. It's the symbol to use when + /// generating property accesses off of this namespace when it's in scope. + arg_ref: Ref, + + /// This is shared between all sibling namespace blocks + exported_members: *TSNamespaceMemberMap, + + /// This is a lazily-generated map of identifiers that actually represent + /// property accesses to this namespace's properties. For example: + /// + /// namespace x { + /// export let y = 123 + /// } + /// namespace x { + /// export let z = y + /// } + /// + /// This should be compiled into the following code: + /// + /// var x; + /// (function(x2) { + /// x2.y = 123; + /// })(x || (x = {})); + /// (function(x3) { + /// x3.z = x3.y; + /// })(x || (x = {})); + /// + /// When we try to find the symbol "y", we instead return one of these lazily + /// generated proxy symbols that represent the property access "x3.y". This + /// map is unique per namespace block because "x3" is the argument symbol that + /// is specific to that particular namespace block. + property_accesses: bun.StringArrayHashMapUnmanaged(Ref) = .{}, + + /// Even though enums are like namespaces and both enums and namespaces allow + /// implicit references to properties of sibling scopes, they behave like + /// separate, er, namespaces. Implicit references only work namespace-to- + /// namespace and enum-to-enum. They do not work enum-to-namespace. And I'm + /// not sure what's supposed to happen for the namespace-to-enum case because + /// the compiler crashes: https://github.com/microsoft/TypeScript/issues/46891. + /// So basically these both work: + /// + /// enum a { b = 1 } + /// enum a { c = b } + /// + /// namespace x { export let y = 1 } + /// namespace x { export let z = y } + /// + /// This doesn't work: + /// + /// enum a { b = 1 } + /// namespace a { export let c = b } + /// + /// And this crashes the TypeScript compiler: + /// + /// namespace a { export let b = 1 } + /// enum a { c = b } + /// + /// Therefore we only allow enum/enum and namespace/namespace interactions. + is_enum_scope: bool, +}; + +pub const TSNamespaceMemberMap = bun.StringArrayHashMapUnmanaged(TSNamespaceMember); + +pub const TSNamespaceMember = struct { + loc: logger.Loc, + data: Data, + + pub const Data = union(enum) { + /// "namespace ns { export let it }" + property, + /// "namespace ns { export namespace it {} }" + namespace: *TSNamespaceMemberMap, + /// "enum ns { it }" + enum_number: f64, + /// "enum ns { it = 'it' }" + enum_string: *E.String, + /// "enum ns { it = something() }" + enum_property: void, + + pub fn isEnum(data: Data) bool { + return switch (data) { + inline else => |_, tag| comptime std.mem.startsWith(u8, @tagName(tag), "enum_"), + }; + } + }; +}; + +/// Inlined enum values can only be numbers and strings +/// This type special cases an encoding similar to JSValue, where nan-boxing is used +/// to encode both a 64-bit pointer or a 64-bit float using 64 bits. +pub const InlinedEnumValue = packed struct { + raw_data: u64, + + pub const Decoded = union(enum) { + string: *E.String, + number: f64, + }; + + /// See JSCJSValue.h in WebKit for more details + const double_encode_offset = 1 << 49; + /// See PureNaN.h in WebKit for more details + const pure_nan: f64 = @bitCast(@as(u64, 0x7ff8000000000000)); + + fn purifyNaN(value: f64) f64 { + return if (std.math.isNan(value)) pure_nan else value; + } + + pub fn encode(decoded: Decoded) InlinedEnumValue { + const encoded: InlinedEnumValue = .{ .raw_data = switch (decoded) { + .string => |ptr| @as(u48, @truncate(@intFromPtr(ptr))), + .number => |num| @as(u64, @bitCast(purifyNaN(num))) + double_encode_offset, + } }; + if (Environment.allow_assert) { + bun.assert(switch (encoded.decode()) { + .string => |str| str == decoded.string, + .number => |num| @as(u64, @bitCast(num)) == + @as(u64, @bitCast(purifyNaN(decoded.number))), + }); + } + return encoded; + } + + pub fn decode(encoded: InlinedEnumValue) Decoded { + if (encoded.raw_data > 0x0000FFFFFFFFFFFF) { + return .{ .number = @bitCast(encoded.raw_data - double_encode_offset) }; + } else { + return .{ .string = @ptrFromInt(encoded.raw_data) }; + } + } +}; + pub const ExportsKind = enum { // This file doesn't have any kind of export, so it's impossible to say what // kind of file this is. An empty file is in this category, for example. @@ -6841,40 +7212,45 @@ pub const Part = struct { stmts: []Stmt = &([_]Stmt{}), scopes: []*Scope = &([_]*Scope{}), - // Each is an index into the file-level import record list + /// Each is an index into the file-level import record list import_record_indices: ImportRecordIndices = .{}, - // All symbols that are declared in this part. Note that a given symbol may - // have multiple declarations, and so may end up being declared in multiple - // parts (e.g. multiple "var" declarations with the same name). Also note - // that this list isn't deduplicated and may contain duplicates. + /// All symbols that are declared in this part. Note that a given symbol may + /// have multiple declarations, and so may end up being declared in multiple + /// parts (e.g. multiple "var" declarations with the same name). Also note + /// that this list isn't deduplicated and may contain duplicates. declared_symbols: DeclaredSymbol.List = .{}, - // An estimate of the number of uses of all symbols used within this part. - symbol_uses: SymbolUseMap = SymbolUseMap{}, + /// An estimate of the number of uses of all symbols used within this part. + symbol_uses: SymbolUseMap = .{}, - // The indices of the other parts in this file that are needed if this part - // is needed. + /// This tracks property accesses off of imported symbols. We don't know + /// during parsing if an imported symbol is going to be an inlined enum + /// value or not. This is only known during linking. So we defer adding + /// a dependency on these imported symbols until we know whether the + /// property access is an inlined enum value or not. + import_symbol_property_uses: SymbolPropertyUseMap = .{}, + + /// The indices of the other parts in this file that are needed if this part + /// is needed. dependencies: Dependency.List = .{}, - // If true, this part can be removed if none of the declared symbols are - // used. If the file containing this part is imported, then all parts that - // don't have this flag enabled must be included. + /// If true, this part can be removed if none of the declared symbols are + /// used. If the file containing this part is imported, then all parts that + /// don't have this flag enabled must be included. can_be_removed_if_unused: bool = false, - // This is used for generated parts that we don't want to be present if they - // aren't needed. This enables tree shaking for these parts even if global - // tree shaking isn't enabled. + /// This is used for generated parts that we don't want to be present if they + /// aren't needed. This enables tree shaking for these parts even if global + /// tree shaking isn't enabled. force_tree_shaking: bool = false, - // This is true if this file has been marked as live by the tree shaking - // algorithm. + /// This is true if this file has been marked as live by the tree shaking + /// algorithm. is_live: bool = false, tag: Tag = Tag.none, - valid_in_development: if (bun.Environment.allow_assert) bool else void = bun.DebugOnlyDefault(true), - pub const Tag = enum { none, jsx_import, @@ -6889,6 +7265,8 @@ pub const Part = struct { }; pub const SymbolUseMap = std.ArrayHashMapUnmanaged(Ref, Symbol.Use, RefHashCtx, false); + pub const SymbolPropertyUseMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged(Symbol.Use), RefHashCtx, false); + pub fn jsonStringify(self: *const Part, writer: anytype) !void { return writer.write(self.stmts); } @@ -6968,6 +7346,9 @@ pub const Scope = struct { is_after_const_local_prefix: bool = false, + // This will be non-null if this is a TypeScript "namespace" or "enum" + ts_namespace: ?*TSNamespaceScope = null, + pub const NestedScopeMap = std.AutoArrayHashMap(u32, bun.BabyList(*Scope)); pub fn getMemberHash(name: []const u8) u64 { @@ -7028,6 +7409,7 @@ pub const Scope = struct { become_private_get_set_pair, become_private_static_get_set_pair, }; + pub fn canMergeSymbols( scope: *Scope, existing: Symbol.Kind, @@ -7060,9 +7442,12 @@ pub const Scope = struct { // "enum Foo {} namespace Foo { ... }" if (new == .ts_namespace) { switch (existing) { - .ts_namespace, .hoisted_function, .generator_or_async_function, .ts_enum, .class => { - return .keep_existing; - }, + .ts_namespace, + .ts_enum, + .hoisted_function, + .generator_or_async_function, + .class, + => return .keep_existing, else => {}, } } @@ -7933,6 +8318,16 @@ pub const GlobalStoreHandle = struct { } }; +extern fn JSC__jsToNumber(latin1_ptr: [*]const u8, len: usize) f64; + +fn stringToEquivalentNumberValue(str: []const u8) f64 { + // +"" -> 0 + if (str.len == 0) return 0; + if (!bun.strings.isAllASCII(str)) + return std.math.nan(f64); + return JSC__jsToNumber(str.ptr, str.len); +} + // test "Binding.init" { // var binding = Binding.alloc( // std.heap.page_allocator, diff --git a/src/js_lexer/identifier.zig b/src/js_lexer/identifier.zig index f97a1987b8..b8da1da65f 100644 --- a/src/js_lexer/identifier.zig +++ b/src/js_lexer/identifier.zig @@ -1851,7 +1851,7 @@ pub const JumpTableInline = struct { // } // } -// std.debug.print( +// .print( // \\---- Unicode text ----- // \\ // \\Timings (sum of running {d} times each, lower is better): @@ -1988,7 +1988,7 @@ pub const JumpTableInline = struct { // } // } -// std.debug.print( +// ( // \\---- ASCII text ----- // \\ // \\Timings (sum of running {d} times each, lower is better): diff --git a/src/js_parser.zig b/src/js_parser.zig index cdd3dce9c7..ac6c5a6959 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -46,7 +46,7 @@ const JSC = bun.JSC; const Index = @import("./ast/base.zig").Index; fn _disabledAssert(_: bool) void { - if (!Environment.allow_assert) @compileLog("assert is missing an if (Environment.allow_assert)"); + if (!Environment.allow_assert) @compileError("assert is missing an if (Environment.allow_assert)"); unreachable; } @@ -206,31 +206,221 @@ const Substitution = union(enum) { continue_: Expr, }; -fn foldStringAddition(lhs: Expr, rhs: Expr) ?Expr { +/// Concatenate two `E.String`s, mutating BOTH inputs +/// unless `has_inlined_enum_poison` is set. +/// +/// Currently inlined enum poison refers to where mutation would cause output +/// bugs due to inlined enum values sharing `E.String`s. If a new use case +/// besides inlined enums comes up to set this to true, please rename the +/// variable and document it. +fn joinStrings(left: *const E.String, right: *const E.String, has_inlined_enum_poison: bool) E.String { + var new = if (has_inlined_enum_poison) + // Inlined enums can be shared by multiple call sites. In + // this case, we need to ensure that the ENTIRE rope is + // cloned. In other situations, the lhs doesn't have any + // other owner, so it is fine to mutate `lhs.data.end.next`. + // + // Consider the following case: + // const enum A { + // B = "a" + "b", + // D = B + "d", + // }; + // console.log(A.B, A.D); + left.cloneRopeNodes() + else + left.*; + + // Similarly, the right side has to be cloned for an enum rope too. + // + // Consider the following case: + // const enum A { + // B = "1" + "2", + // C = ("3" + B) + "4", + // }; + // console.log(A.B, A.C); + const rhs_clone = Expr.Data.Store.append(E.String, if (has_inlined_enum_poison) + right.cloneRopeNodes() + else + right.*); + + new.push(rhs_clone); + new.prefer_template = new.prefer_template or rhs_clone.prefer_template; + + return new; +} + +/// Transforming the left operand into a string is not safe if it comes from a +/// nested AST node. +const FoldStringAdditionKind = enum { + // "x" + "y" -> "xy" + // 1 + "y" -> "1y" + normal, + // a + "x" + "y" -> a + "xy" + // a + 1 + "y" -> a + 1 + y + nested_left, +}; + +// NOTE: unlike esbuild's js_ast_helpers.FoldStringAddition, this does mutate +// the input AST in the case of rope strings +fn foldStringAddition(l: Expr, r: Expr, allocator: std.mem.Allocator, kind: FoldStringAdditionKind) ?Expr { + // "See through" inline enum constants + // TODO: implement foldAdditionPreProcess to fold some more things :) + var lhs = l.unwrapInlined(); + var rhs = r.unwrapInlined(); + + if (kind != .nested_left) { + // See comment on `FoldStringAdditionKind` for examples + switch (rhs.data) { + .e_string, .e_template => { + if (lhs.toStringExprWithoutSideEffects(allocator)) |str| { + lhs = str; + } + }, + else => {}, + } + } + switch (lhs.data) { .e_string => |left| { - if (rhs.data == .e_string and left.isUTF8() and rhs.data.e_string.isUTF8()) { - var orig = lhs.data.e_string.*; - const rhs_clone = Expr.init(E.String, rhs.data.e_string.*, rhs.loc); - orig.push( - rhs_clone.data.e_string, - ); - - orig.prefer_template = orig.prefer_template or rhs_clone.data.e_string.prefer_template; - - return Expr.init(E.String, orig, lhs.loc); + if (rhs.toStringExprWithoutSideEffects(allocator)) |str| { + rhs = str; } - }, - .e_binary => |bin| { - // 123 + "bar" + "baz" - if (bin.op == .bin_add) { - if (foldStringAddition(bin.right, rhs)) |out| { - return Expr.init(E.Binary, E.Binary{ .op = bin.op, .left = bin.left, .right = out }, lhs.loc); + if (left.isUTF8()) { + switch (rhs.data) { + // "bar" + "baz" => "barbaz" + .e_string => |right| { + if (right.isUTF8()) { + const has_inlined_enum_poison = + l.data == .e_inlined_enum or + r.data == .e_inlined_enum; + + return Expr.init(E.String, joinStrings( + left, + right, + has_inlined_enum_poison, + ), lhs.loc); + } + }, + // "bar" + `baz${bar}` => `barbaz${bar}` + .e_template => |right| { + if (right.head.isUTF8()) { + return Expr.init(E.Template, E.Template{ + .parts = right.parts, + .head = .{ .cooked = joinStrings( + left, + &right.head.cooked, + l.data == .e_inlined_enum, + ) }, + }, l.loc); + } + }, + else => { + // other constant-foldable ast nodes would have been converted to .e_string + }, + } + + // "'x' + `y${z}`" => "`xy${z}`" + if (rhs.data == .e_template and rhs.data.e_template.tag == null) {} + } + + if (left.len() == 0 and rhs.knownPrimitive() == .string) { + return rhs; + } + + return null; + }, + + .e_template => |left| { + // "`${x}` + 0" => "`${x}` + '0'" + if (rhs.toStringExprWithoutSideEffects(allocator)) |str| { + rhs = str; + } + + if (left.tag == null) { + switch (rhs.data) { + // `foo${bar}` + "baz" => `foo${bar}baz` + .e_string => |right| { + if (right.isUTF8()) { + // Mutation of this node is fine because it will be not + // be shared by other places. Note that e_template will + // be treated by enums as strings, but will not be + // inlined unless they could be converted into + // .e_string. + if (left.parts.len > 0) { + const i = left.parts.len - 1; + const last = left.parts[i]; + if (last.tail.isUTF8()) { + left.parts[i].tail = .{ .cooked = joinStrings( + &last.tail.cooked, + right, + r.data == .e_inlined_enum, + ) }; + } + } else { + if (left.head.isUTF8()) { + left.head = .{ .cooked = joinStrings( + &left.head.cooked, + right, + r.data == .e_inlined_enum, + ) }; + } + } + + return lhs; + } + }, + // `foo${bar}` + `a${hi}b` => `foo${bar}a${hi}b` + .e_template => |right| { + if (right.tag == null and right.head.isUTF8()) { + if (left.parts.len > 0) { + const i = left.parts.len - 1; + const last = left.parts[i]; + if (last.tail.isUTF8() and right.head.isUTF8()) { + left.parts[i].tail = .{ .cooked = joinStrings( + &last.tail.cooked, + &right.head.cooked, + r.data == .e_inlined_enum, + ) }; + + left.parts = if (right.parts.len == 0) + left.parts + else + std.mem.concat( + allocator, + E.TemplatePart, + &.{ left.parts, right.parts }, + ) catch bun.outOfMemory(); + } + } else { + if (left.head.isUTF8() and right.head.isUTF8()) { + left.head = .{ .cooked = joinStrings( + &left.head.cooked, + &right.head.cooked, + r.data == .e_inlined_enum, + ) }; + left.parts = right.parts; + } + } + return lhs; + } + }, + else => { + // other constant-foldable ast nodes would have been converted to .e_string + }, } } }, - else => {}, + + else => { + // other constant-foldable ast nodes would have been converted to .e_string + }, + } + + if (rhs.data.as(.e_string)) |right| { + if (right.len() == 0 and lhs.knownPrimitive() == .string) { + return lhs; + } } return null; @@ -298,7 +488,7 @@ pub fn ExpressionTransposer( } } - pub fn tranposeKnownToBeIf(self: *This, arg: Expr, state: anytype) Expr { + pub fn transposeKnownToBeIf(self: *This, arg: Expr, state: anytype) Expr { return Expr.init( E.If, E.If{ @@ -1657,7 +1847,14 @@ pub const SideEffects = enum(u1) { pub fn isPrimitiveToReorder(data: Expr.Data) bool { return switch (data) { - .e_null, .e_undefined, .e_string, .e_boolean, .e_number, .e_big_int => true, + .e_null, + .e_undefined, + .e_string, + .e_boolean, + .e_number, + .e_big_int, + .e_inlined_enum, + => true, else => false, }; } @@ -1665,7 +1862,20 @@ pub const SideEffects = enum(u1) { pub fn simpifyUnusedExpr(p: anytype, expr: Expr) ?Expr { if (!p.options.features.dead_code_elimination) return expr; switch (expr.data) { - .e_null, .e_undefined, .e_missing, .e_boolean, .e_number, .e_big_int, .e_string, .e_this, .e_reg_exp, .e_function, .e_arrow, .e_import_meta => { + .e_null, + .e_undefined, + .e_missing, + .e_boolean, + .e_number, + .e_big_int, + .e_string, + .e_this, + .e_reg_exp, + .e_function, + .e_arrow, + .e_import_meta, + .e_inlined_enum, + => { return null; }, @@ -2014,7 +2224,14 @@ pub const SideEffects = enum(u1) { // cannot be removed due to side effects. pub fn isPrimitiveWithSideEffects(data: Expr.Data) bool { switch (data) { - .e_null, .e_undefined, .e_boolean, .e_number, .e_big_int, .e_string => { + .e_null, + .e_undefined, + .e_boolean, + .e_number, + .e_big_int, + .e_string, + .e_inlined_enum, + => { return true; }, .e_unary => |e| { @@ -2124,7 +2341,7 @@ pub const SideEffects = enum(u1) { return Result{ .value = false, .side_effects = .could_have_side_effects, .ok = true }; }, - // always anull or undefined + // always a null or undefined .e_null, .e_undefined => { return Result{ .value = true, .side_effects = .no_side_effects, .ok = true }; }, @@ -2209,6 +2426,9 @@ pub const SideEffects = enum(u1) { else => {}, } }, + .e_inlined_enum => |inlined| { + return toNullOrUndefined(p, inlined.value.data); + }, else => {}, } @@ -2317,6 +2537,9 @@ pub const SideEffects = enum(u1) { else => {}, } }, + .e_inlined_enum => |inlined| { + return toBoolean(p, inlined.value.data); + }, else => {}, } @@ -2528,7 +2751,7 @@ const InvalidLoc = struct { pub const Tag = enum { spread, - parenthese, + parentheses, getter, setter, method, @@ -2539,7 +2762,7 @@ const InvalidLoc = struct { @setCold(true); const text = switch (loc.kind) { .spread => "Unexpected trailing comma after rest element", - .parenthese => "Unexpected parentheses in binding pattern", + .parentheses => "Unexpected parentheses in binding pattern", .getter => "Unexpected getter in binding pattern", .setter => "Unexpected setter in binding pattern", .method => "Unexpected method in binding pattern", @@ -2589,7 +2812,8 @@ const StringVoidMap = struct { pub const Node = Pool.Node; }; const RefCtx = @import("./ast/base.zig").RefCtx; -const SymbolUseMap = std.HashMapUnmanaged(Ref, js_ast.Symbol.Use, RefCtx, 80); +const SymbolUseMap = js_ast.Part.SymbolUseMap; +const SymbolPropertyUseMap = js_ast.Part.SymbolPropertyUseMap; const StringBoolMap = bun.StringHashMapUnmanaged(bool); const RefMap = std.HashMapUnmanaged(Ref, void, RefCtx, 80); const RefArrayMap = std.ArrayHashMapUnmanaged(Ref, void, @import("./ast/base.zig").RefHashCtx, false); @@ -2965,7 +3189,14 @@ pub const Parser = struct { var p: JavaScriptParser = undefined; try JavaScriptParser.init(this.allocator, this.log, this.source, this.define, this.lexer, this.options, &p); p.lexer.track_comments = this.options.features.minify_identifiers; - p.should_fold_typescript_constant_expressions = this.options.features.should_fold_typescript_constant_expressions; + // Instead of doing "should_fold_typescript_constant_expressions or features.minify_syntax" + // Let's enable this flag file-wide + if (p.options.features.minify_syntax or + p.options.features.inlining) + { + p.should_fold_typescript_constant_expressions = true; + } + defer p.lexer.deinit(); const result: js_ast.Result = undefined; _ = result; @@ -3030,7 +3261,6 @@ pub const Parser = struct { pub fn analyze(self: *Parser, context: *anyopaque, callback: *const fn (*anyopaque, *TSXParser, []js_ast.Part) anyerror!void) anyerror!void { var p: TSXParser = undefined; try TSXParser.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p); - p.should_fold_typescript_constant_expressions = false; defer p.lexer.deinit(); @@ -3099,7 +3329,15 @@ pub const Parser = struct { var p: ParserType = undefined; const orig_error_count = self.log.errors; try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p); - p.should_fold_typescript_constant_expressions = self.options.features.should_fold_typescript_constant_expressions; + + // Instead of doing "should_fold_typescript_constant_expressions or features.minify_syntax" + // Let's enable this flag file-wide + if (p.options.features.minify_syntax or + p.options.features.inlining) + { + p.should_fold_typescript_constant_expressions = true; + } + defer p.lexer.deinit(); var binary_expression_stack_heap = std.heap.stackFallback(1024, bun.default_allocator); @@ -3251,6 +3489,11 @@ pub const Parser = struct { // that we're processing and expect to be able to access top-level variables. p.will_wrap_module_in_try_catch_for_using = p.shouldLowerUsingDeclarations(stmts); + // Bind symbols in a second pass over the AST. I started off doing this in a + // single pass, but it turns out it's pretty much impossible to do this + // correctly while handling arrow functions because of the grammar + // ambiguities. + // // Note that top-level lowered "using" declarations disable tree-shaking // because we only do tree-shaking on top-level statements and lowering // a top-level "using" declaration moves all top-level statements into a @@ -3259,6 +3502,34 @@ pub const Parser = struct { // When tree shaking is disabled, everything comes in a single part try p.appendPart(&parts, stmts); } else { + // Preprocess TypeScript enums to improve code generation. Otherwise + // uses of an enum before that enum has been declared won't be inlined: + // + // console.log(Foo.FOO) // We want "FOO" to be inlined here + // const enum Foo { FOO = 0 } + // + // The TypeScript compiler itself contains code with this pattern, so + // it's important to implement this optimization. + + var preprocessed_enums: std.ArrayListUnmanaged([]js_ast.Part) = .{}; + var preprocessed_enum_i: usize = 0; + if (p.scopes_in_order_for_enum.count() > 0) { + for (stmts) |*stmt| { + if (stmt.data == .s_enum) { + const old_scopes_in_order = p.scope_order_to_visit; + defer p.scope_order_to_visit = old_scopes_in_order; + + p.scope_order_to_visit = p.scopes_in_order_for_enum.get(stmt.loc).?; + + var enum_parts = ListManaged(js_ast.Part).init(p.allocator); + var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1); + sliced.appendAssumeCapacity(stmt.*); + try p.appendPart(&enum_parts, sliced.items); + try preprocessed_enums.append(p.allocator, enum_parts.items); + } + } + } + // When tree shaking is enabled, each top-level statement is potentially a separate part. for (stmts) |stmt| { switch (stmt.data) { @@ -3358,10 +3629,14 @@ pub const Parser = struct { parts.items.len -= 1; } }, + .s_enum => { + try parts.appendSlice(preprocessed_enums.items[preprocessed_enum_i]); + preprocessed_enum_i += 1; + p.scope_order_to_visit = p.scope_order_to_visit[1..]; + }, else => { var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1); - sliced.items.len = 1; - sliced.items[0] = stmt; + sliced.appendAssumeCapacity(stmt); try p.appendPart(&parts, sliced.items); }, } @@ -4695,7 +4970,7 @@ fn NewParser_( legacy_cjs_import_stmts: std.ArrayList(Stmt), injected_define_symbols: List(Ref) = .{}, - symbol_uses: js_ast.Part.SymbolUseMap = .{}, + symbol_uses: SymbolUseMap = .{}, declared_symbols: DeclaredSymbol.List = .{}, declared_symbols_for_reuse: DeclaredSymbol.List = .{}, runtime_imports: RuntimeImports = RuntimeImports{}, @@ -4742,11 +5017,13 @@ fn NewParser_( /// we don't implement certain items in this list. For example, we don't do all /// number-to-string conversions since ours might differ from how JavaScript /// would do it, which would be a correctness issue. + /// + /// This flag is also set globally when minify_syntax is enabled, in which this means + /// we always fold constant expressions. should_fold_typescript_constant_expressions: bool = false, emitted_namespace_vars: RefMap = RefMap{}, is_exported_inside_namespace: RefRefMap = .{}, - known_enum_values: Map(Ref, StringHashMapUnmanaged(f64)) = .{}, local_type_names: StringBoolMap = StringBoolMap{}, // This is the reference to the generated function argument for the namespace, @@ -4786,6 +5063,7 @@ fn NewParser_( import_records: ImportRecordList, import_records_for_current_part: List(u32) = .{}, export_star_import_records: List(u32) = .{}, + import_symbol_property_uses: SymbolPropertyUseMap = .{}, // These are for handling ES6 imports and exports esm_import_keyword: logger.Range = logger.Range.None, @@ -4820,6 +5098,7 @@ fn NewParser_( // symbols to handle declaring a hoisted "var" symbol in a nested scope and // binding a name to it in a parent or sibling scope. scopes_in_order: ScopeOrderList = .{}, + scope_order_to_visit: []ScopeOrder = &.{}, // These properties are for the visit pass, which runs after the parse pass. // The visit pass binds identifiers to declared symbols, does constant @@ -4937,15 +5216,36 @@ fn NewParser_( // This is a general place to put lots of Expr objects expr_list: List(Expr) = .{}, - scope_order_to_visit: []ScopeOrder = &([_]ScopeOrder{}), - const_values: js_ast.Ast.ConstValuesMap = .{}, binary_expression_stack: std.ArrayList(BinaryExpressionVisitor) = undefined, + /// We build up enough information about the TypeScript namespace hierarchy to + /// be able to resolve scope lookups and property accesses for TypeScript enum + /// and namespace features. Each JavaScript scope object inside a namespace + /// has a reference to a map of exported namespace members from sibling scopes. + /// + /// In addition, there is a map from each relevant symbol reference to the data + /// associated with that namespace or namespace member: "ref_to_ts_namespace_member". + /// This gives enough info to be able to resolve queries into the namespace. + ref_to_ts_namespace_member: std.AutoHashMapUnmanaged(Ref, js_ast.TSNamespaceMember.Data) = .{}, + /// When visiting expressions, namespace metadata is associated with the most + /// recently visited node. If namespace metadata is present, "tsNamespaceTarget" + /// will be set to the most recently visited node (as a way to mark that this + /// node has metadata) and "tsNamespaceMemberData" will be set to the metadata. + ts_namespace: RecentlyVisitedTSNamespace = .{}, + top_level_enums: std.ArrayListUnmanaged(Ref) = .{}, + + scopes_in_order_for_enum: std.AutoArrayHashMapUnmanaged(logger.Loc, []ScopeOrder) = .{}, + // If this is true, then all top-level statements are wrapped in a try/catch will_wrap_module_in_try_catch_for_using: bool = false, + const RecentlyVisitedTSNamespace = struct { + ref: Ref = Ref.None, + data: ?*js_ast.TSNamespaceMemberMap = null, + }; + /// use this instead of checking p.source.index /// because when not bundling, p.source.index is `0` inline fn isSourceRuntime(p: *const P) bool { @@ -5520,24 +5820,6 @@ fn NewParser_( } } - pub fn deinit(parser: *P) void { - parser.allocated_names.deinit(); - parser.scopes_for_current_part.deinit(); - parser.symbols.deinit(); - parser.ts_use_counts.deinit(); - parser.declared_symbols.deinit(); - parser.known_enum_values.deinit(); - parser.import_records.deinit(); - parser.import_records_for_current_part.deinit(); - parser.export_star_import_records.deinit(); - parser.import_items_for_namespace.deinit(); - parser.named_imports.deinit(); - parser.import_namespace_cc_map.deinit(); - parser.scopes_in_order.deinit(); - parser.temp_refs_to_declare.deinit(); - parser.relocated_top_level_vars.deinit(); - } - pub fn findSymbol(p: *P, loc: logger.Loc, name: string) !FindSymbolResult { return findSymbolWithRecordUsage(p, loc, name, true); } @@ -5552,22 +5834,21 @@ fn NewParser_( const allocator = p.allocator; const ref: Ref = brk: { - var _scope: ?*Scope = p.current_scope; + var current: ?*Scope = p.current_scope; - var did_forbid_argumen = false; - - while (_scope) |scope| : (_scope = _scope.?.parent) { + var did_forbid_arguments = false; + while (current) |scope| : (current = current.?.parent) { // Track if we're inside a "with" statement body if (scope.kind == .with) { is_inside_with_scope = true; } // Forbid referencing "arguments" inside class bodies - if (scope.forbid_arguments and !did_forbid_argumen and strings.eqlComptime(name, "arguments")) { + if (scope.forbid_arguments and !did_forbid_arguments and strings.eqlComptime(name, "arguments")) { const r = js_lexer.rangeOfIdentifier(p.source, loc); p.log.addRangeErrorFmt(p.source, r, allocator, "Cannot access \"{s}\" here", .{name}) catch unreachable; - did_forbid_argumen = true; + did_forbid_arguments = true; } // Is the symbol a member of this scope? @@ -5575,6 +5856,29 @@ fn NewParser_( declare_loc = member.loc; break :brk member.ref; } + + // Is the symbol a member of this scope's TypeScript namespace? + if (scope.ts_namespace) |ts_namespace| { + if (ts_namespace.exported_members.get(name)) |member| { + if (member.data.isEnum() == ts_namespace.is_enum_scope) { + declare_loc = member.loc; + // If this is an identifier from a sibling TypeScript namespace, then we're + // going to have to generate a property access instead of a simple reference. + // Lazily-generate an identifier that represents this property access. + const gop = try ts_namespace.property_accesses.getOrPut(p.allocator, name); + if (!gop.found_existing) { + const ref = try p.newSymbol(.other, name); + gop.value_ptr.* = ref; + p.symbols.items[ref.inner_index].namespace_alias = .{ + .namespace_ref = ts_namespace.arg_ref, + .alias = name, + }; + break :brk ref; + } + break :brk gop.value_ptr.*; + } + } + } } // Allocate an "unbound" symbol @@ -5723,7 +6027,7 @@ fn NewParser_( } /// This function is very very hot. - pub fn handleIdentifier(p: *P, loc: logger.Loc, ident: E.Identifier, _original_name: ?string, opts: IdentifierOpts) Expr { + pub fn handleIdentifier(p: *P, loc: logger.Loc, ident: E.Identifier, original_name: ?string, opts: IdentifierOpts) Expr { const ref = ident.ref; if (p.options.features.inlining) { @@ -5733,18 +6037,57 @@ fn NewParser_( } } + // Create an error for assigning to an import namespace if ((opts.assign_target != .none or opts.is_delete_target) and p.symbols.items[ref.innerIndex()].kind == .import) { - // Create an error for assigning to an import namespace const r = js_lexer.rangeOfIdentifier(p.source, loc); p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot assign to import \"{s}\"", .{ p.symbols.items[ref.innerIndex()].original_name, }) catch unreachable; } - // TODO: TypeScript namespace - // if (opts.assign_target == .none and !opts.is_delete_target and p.options.bundle) { + // Substitute an EImportIdentifier now if this has a namespace alias + if (opts.assign_target == .none and !opts.is_delete_target) { + const symbol = &p.symbols.items[ref.inner_index]; + if (symbol.namespace_alias) |ns_alias| { + if (p.ref_to_ts_namespace_member.get(ns_alias.namespace_ref)) |ts_member_data| { + if (ts_member_data == .namespace) { + if (ts_member_data.namespace.get(ns_alias.alias)) |member| { + switch (member.data) { + .enum_number => |num| return p.wrapInlinedEnum( + .{ .loc = loc, .data = .{ .e_number = .{ .value = num } } }, + p.symbols.items[ref.inner_index].original_name, + ), - // } + .enum_string => |str| return p.wrapInlinedEnum( + .{ .loc = loc, .data = .{ .e_string = str } }, + p.symbols.items[ref.inner_index].original_name, + ), + + .namespace => |data| { + p.ts_namespace = .{ + .ref = ref, + .data = data, + }; + return p.newExpr(E.Dot{ + .target = p.newExpr(E.Identifier.init(ns_alias.namespace_ref), loc), + .name = ns_alias.alias, + .name_loc = loc, + }, loc); + }, + + else => {}, + } + } + } + } + + return p.newExpr(E.Dot{ + .target = p.newExpr(E.Identifier.init(ns_alias.namespace_ref), loc), + .name = ns_alias.alias, + .name_loc = loc, + }, loc); + } + } // Substitute an EImportIdentifier now if this is an import item if (p.is_import_item.contains(ref)) { @@ -5754,36 +6097,56 @@ fn NewParser_( ); } - // Substitute a namespace export reference now if appropriate if (is_typescript_enabled) { + if (p.ref_to_ts_namespace_member.get(ref)) |member_data| { + switch (member_data) { + .enum_number => |num| return p.wrapInlinedEnum( + .{ .loc = loc, .data = .{ .e_number = .{ .value = num } } }, + p.symbols.items[ref.inner_index].original_name, + ), + + .enum_string => |str| return p.wrapInlinedEnum( + .{ .loc = loc, .data = .{ .e_string = str } }, + p.symbols.items[ref.inner_index].original_name, + ), + + .namespace => |data| { + p.ts_namespace = .{ + .ref = ref, + .data = data, + }; + return .{ + .data = .{ .e_identifier = ident }, + .loc = loc, + }; + }, + + else => {}, + } + } + + // Substitute a namespace export reference now if appropriate if (p.is_exported_inside_namespace.get(ref)) |ns_ref| { const name = p.symbols.items[ref.innerIndex()].original_name; - // If this is a known enum value, inline the value of the enum - if (p.known_enum_values.get(ns_ref)) |enum_values| { - if (enum_values.get(name)) |number| { - return p.newExpr(E.Number{ .value = number }, loc); - } - } - - // Otherwise, create a property access on the namespace p.recordUsage(ns_ref); - - return p.newExpr(E.Dot{ .target = p.newExpr(E.Identifier{ .ref = ns_ref }, loc), .name = name, .name_loc = loc }, loc); + return p.newExpr(E.Dot{ + .target = p.newExpr(E.Identifier.init(ns_ref), loc), + .name = name, + .name_loc = loc, + }, loc); } } - if (_original_name) |original_name| { - const result = p.findSymbol(loc, original_name) catch unreachable; - var _ident = ident; - _ident.ref = result.ref; - return p.newExpr(_ident, loc); + if (original_name) |name| { + const result = p.findSymbol(loc, name) catch unreachable; + var id_clone = ident; + id_clone.ref = result.ref; + return p.newExpr(id_clone, loc); } - return Expr{ - .data = .{ - .e_identifier = ident, - }, + return .{ + .data = .{ .e_identifier = ident }, .loc = loc, }; } @@ -6479,7 +6842,6 @@ fn NewParser_( // "Foo.Bar.createElement" becomes: // import { Bar } from 'foo'; // Usages become Bar.createElement - switch (comptime jsx_transform_type) { .react => { if (!p.options.bundle) { @@ -6776,17 +7138,18 @@ fn NewParser_( } fn pushScopeForVisitPass(p: *P, kind: js_ast.Scope.Kind, loc: logger.Loc) anyerror!void { - // Output.print("\n+Loc: {d}\n", .{loc.start}); - // for (p.scopes_in_order.items[p.scopes_in_order_visitor_index..p.scopes_in_order.items.len]) |scope_order, i| { - // if (scope_order) |ord| { - // Output.print("Scope ({d}, {d})\n", .{ @intFromEnum(ord.scope.kind), ord.loc.start }); - // } - // } const order = p.nextScopeInOrderForVisitPass(); // Sanity-check that the scopes generated by the first and second passes match - if (order.loc.start != loc.start or order.scope.kind != kind) { - p.panic("Expected scope ({any}, {d}) in {s}, found scope ({any}, {d})", .{ kind, loc.start, p.source.path.pretty, order.scope.kind, order.loc.start }); + if (bun.Environment.allow_assert and + order.loc.start != loc.start or order.scope.kind != kind) + { + p.log.level = .verbose; + + p.log.addDebugFmt(p.source, loc, p.allocator, "Expected this scope (.{s})", .{@tagName(kind)}) catch bun.outOfMemory(); + p.log.addDebugFmt(p.source, order.loc, p.allocator, "Found this scope (.{s})", .{@tagName(order.scope.kind)}) catch bun.outOfMemory(); + + p.panic("Scope mismatch while visiting", .{}); } p.current_scope = order.scope; @@ -6819,7 +7182,7 @@ fn NewParser_( } } - if (comptime !Environment.isRelease) { + if (comptime Environment.isDebug) { // Enforce that scope locations are strictly increasing to help catch bugs // where the pushed scopes are mismatched between the first and second passes if (p.scopes_in_order.items.len > 0) { @@ -6830,6 +7193,9 @@ fn NewParser_( if (p.scopes_in_order.items[last_i]) |prev_scope| { if (prev_scope.loc.start >= loc.start) { + p.log.level = .verbose; + p.log.addDebugFmt(p.source, prev_scope.loc, p.allocator, "Previous Scope", .{}) catch bun.outOfMemory(); + p.log.addDebugFmt(p.source, loc, p.allocator, "Next Scope", .{}) catch bun.outOfMemory(); p.panic("Scope location {d} must be greater than {d}", .{ loc.start, prev_scope.loc.start }); } } @@ -6885,7 +7251,7 @@ fn NewParser_( if (ex.is_parenthesized) { invalid_loc.append(.{ .loc = p.source.rangeOfOperatorBefore(expr.loc, "(").loc, - .kind = .parenthese, + .kind = .parentheses, }) catch unreachable; } @@ -6922,7 +7288,7 @@ fn NewParser_( } if (ex.is_parenthesized) { - invalid_loc.append(.{ .loc = p.source.rangeOfOperatorBefore(expr.loc, "(").loc, .kind = .parenthese }) catch unreachable; + invalid_loc.append(.{ .loc = p.source.rangeOfOperatorBefore(expr.loc, "(").loc, .kind = .parentheses }) catch unreachable; } // p.markSyntaxFeature(compat.Destructuring, p.source.RangeOfOperatorAfter(expr.Loc, "{")) @@ -7188,10 +7554,24 @@ fn NewParser_( if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { return p.newExpr(E.Number{ .value = vals[0] + vals[1] }, v.loc); } - } - if (foldStringAddition(e_.left, e_.right)) |res| { - return res; + // "'abc' + 'xyz'" => "'abcxyz'" + if (foldStringAddition(e_.left, e_.right, p.allocator, .normal)) |res| { + return res; + } + + // "(x + 'abc') + 'xyz'" => "'abcxyz'" + if (e_.left.data.as(.e_binary)) |left| { + if (left.op == .bin_add) { + if (foldStringAddition(left.right, e_.right, p.allocator, .nested_left)) |result| { + return p.newExpr(E.Binary{ + .left = left.left, + .right = result, + .op = .bin_add, + }, e_.left.loc); + } + } + } } }, .bin_sub => { @@ -7235,108 +7615,74 @@ fn NewParser_( } }, .bin_shl => { - // TODO: - // if (p.should_fold_typescript_constant_expressions) { - // if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { - // return p.newExpr(E.Number{ .value = ((@intFromFloat(i32, vals[0]) << @intFromFloat(u32, vals[1])) & 31) }, expr.loc); - // } - // } - }, - .bin_shr => { - // TODO: - // if (p.should_fold_typescript_constant_expressions) { - // if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { - // return p.newExpr(E.Number{ .value = ((@intFromFloat(i32, vals[0]) >> @intFromFloat(u32, vals[1])) & 31) }, expr.loc); - // } - // } - }, - .bin_u_shr => { - // TODO: - // if (p.should_fold_typescript_constant_expressions) { - // if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { - // return p.newExpr(E.Number{ .value = ((@intFromFloat(i32, vals[0]) >> @intFromFloat(u32, vals[1])) & 31) }, expr.loc); - // } - // } - }, - .bin_bitwise_and => { - // TODO: - // if (p.should_fold_typescript_constant_expressions) { - // if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { - // return p.newExpr(E.Number{ .value = ((@intFromFloat(i32, vals[0]) >> @intFromFloat(u32, vals[1])) & 31) }, expr.loc); - // } - // } - }, - .bin_bitwise_or => { - // TODO: - // if (p.should_fold_typescript_constant_expressions) { - // if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { - // return p.newExpr(E.Number{ .value = ((@intFromFloat(i32, vals[0]) >> @intFromFloat(u32, vals[1])) & 31) }, expr.loc); - // } - // } - }, - .bin_bitwise_xor => { - // TODO: - // if (p.should_fold_typescript_constant_expressions) { - // if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { - // return p.newExpr(E.Number{ .value = ((@intFromFloat(i32, vals[0]) >> @intFromFloat(u32, vals[1])) & 31) }, expr.loc); - // } - // } - }, - // --------------------------------------------------------------------------------------------------- - // --------------------------------------------------------------------------------------------------- - // --------------------------------------------------------------------------------------------------- - // --------------------------------------------------------------------------------------------------- - .bin_assign => { - - // Optionally preserve the name - if (@as(Expr.Tag, e_.left.data) == .e_identifier) { - e_.right = p.maybeKeepExprSymbolName(e_.right, p.symbols.items[e_.left.data.e_identifier.ref.innerIndex()].original_name, was_anonymous_named_expr); + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { + const left = floatToInt32(vals[0]); + const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32); + const result: i32 = @bitCast(std.math.shl(i32, left, right)); + return p.newExpr(E.Number{ + .value = @floatFromInt(result), + }, v.loc); + } } }, - .bin_add_assign => { - // notimpl(); + .bin_shr => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { + const left = floatToInt32(vals[0]); + const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32); + const result: i32 = @bitCast(std.math.shr(i32, left, right)); + return p.newExpr(E.Number{ + .value = @floatFromInt(result), + }, v.loc); + } + } }, - .bin_sub_assign => { - // notimpl(); + .bin_u_shr => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { + const left: u32 = @bitCast(floatToInt32(vals[0])); + const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32); + const result: u32 = std.math.shr(u32, left, right); + return p.newExpr(E.Number{ + .value = @floatFromInt(result), + }, v.loc); + } + } }, - .bin_mul_assign => { - // notimpl(); + .bin_bitwise_and => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { + return p.newExpr(E.Number{ + .value = @floatFromInt((floatToInt32(vals[0]) & floatToInt32(vals[1]))), + }, v.loc); + } + } }, - .bin_div_assign => { - // notimpl(); + .bin_bitwise_or => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { + return p.newExpr(E.Number{ + .value = @floatFromInt((floatToInt32(vals[0]) | floatToInt32(vals[1]))), + }, v.loc); + } + } }, - .bin_rem_assign => { - // notimpl(); + .bin_bitwise_xor => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { + return p.newExpr(E.Number{ + .value = @floatFromInt((floatToInt32(vals[0]) ^ floatToInt32(vals[1]))), + }, v.loc); + } + } }, - .bin_pow_assign => { - // notimpl(); - }, - .bin_shl_assign => { - // notimpl(); - }, - .bin_shr_assign => { - // notimpl(); - }, - .bin_u_shr_assign => { - // notimpl(); - }, - .bin_bitwise_or_assign => { - // notimpl(); - }, - .bin_bitwise_and_assign => { - // notimpl(); - }, - .bin_bitwise_xor_assign => { - // notimpl(); - }, - .bin_nullish_coalescing_assign => { - // notimpl(); - }, - .bin_logical_and_assign => { - // notimpl(); - }, - .bin_logical_or_assign => { - // notimpl(); + // --------------------------------------------------------------------------------------------------- + .bin_assign => { + // Optionally preserve the name + if (e_.left.data == .e_identifier) { + e_.right = p.maybeKeepExprSymbolName(e_.right, p.symbols.items[e_.left.data.e_identifier.ref.innerIndex()].original_name, was_anonymous_named_expr); + } }, else => {}, } @@ -8286,11 +8632,7 @@ fn NewParser_( else => { if (comptime get_metadata) { const find_result = p.findSymbol(logger.Loc.Empty, p.lexer.identifier) catch unreachable; - if (p.known_enum_values.contains(find_result.ref)) { - result.* = .m_number; - } else { - result.* = .{ .m_identifier = find_result.ref }; - } + result.* = .{ .m_identifier = find_result.ref }; } try p.lexer.next(); @@ -9172,8 +9514,6 @@ fn NewParser_( // // } - // pub fn maybeRewriteExportSymbol(p: *P, ) - fn defaultNameForExpr(p: *P, expr: Expr, loc: logger.Loc) LocRef { switch (expr.data) { .e_function => |func_container| { @@ -10569,14 +10909,21 @@ fn NewParser_( const name_text = p.lexer.identifier; try p.lexer.next(); + // Generate the namespace object + const ts_namespace = p.getOrCreateExportedNamespaceMembers(name_text, opts.is_export, false); + const exported_members = ts_namespace.exported_members; + const ns_member_data = js_ast.TSNamespaceMember.Data{ .namespace = exported_members }; + + // Declare the namespace and create the scope var name = LocRef{ .loc = name_loc, .ref = null }; const scope_index = try p.pushScopeForParsePass(.entry, loc); + p.current_scope.ts_namespace = ts_namespace; const old_has_non_local_export_declare_inside_namespace = p.has_non_local_export_declare_inside_namespace; p.has_non_local_export_declare_inside_namespace = false; + // Parse the statements inside the namespace var stmts: ListManaged(Stmt) = ListManaged(Stmt).init(p.allocator); - if (p.lexer.token == .t_dot) { const dot_loc = p.lexer.loc(); try p.lexer.next(); @@ -10601,6 +10948,74 @@ fn NewParser_( const has_non_local_export_declare_inside_namespace = p.has_non_local_export_declare_inside_namespace; p.has_non_local_export_declare_inside_namespace = old_has_non_local_export_declare_inside_namespace; + // Add any exported members from this namespace's body as members of the + // associated namespace object. + for (stmts.items) |stmt| { + switch (stmt.data) { + .s_function => |func| { + if (func.func.flags.contains(.is_export)) { + const locref = func.func.name.?; + const fn_name = p.symbols.items[locref.ref.?.inner_index].original_name; + try exported_members.put(p.allocator, fn_name, .{ + .loc = locref.loc, + .data = .property, + }); + try p.ref_to_ts_namespace_member.put( + p.allocator, + locref.ref.?, + .property, + ); + } + }, + .s_class => |class| { + if (class.is_export) { + const locref = class.class.class_name.?; + const class_name = p.symbols.items[locref.ref.?.inner_index].original_name; + try exported_members.put(p.allocator, class_name, .{ + .loc = locref.loc, + .data = .property, + }); + try p.ref_to_ts_namespace_member.put( + p.allocator, + locref.ref.?, + .property, + ); + } + }, + inline .s_namespace, .s_enum => |ns| { + if (ns.is_export) { + if (p.ref_to_ts_namespace_member.get(ns.name.ref.?)) |member_data| { + bun.assert(member_data == .namespace); + try exported_members.put( + p.allocator, + p.symbols.items[ns.name.ref.?.inner_index].original_name, + .{ + .data = member_data, + .loc = ns.name.loc, + }, + ); + // try p.ref_to_ts_namespace_member.put( + // p.allocator, + // id.ref, + // member_data, + // ); + } + } + }, + .s_local => |local| { + if (local.is_export) { + for (local.decls.slice()) |decl| { + try p.defineExportedNamespaceBinding( + exported_members, + decl.binding, + ); + } + } + }, + else => {}, + } + } + // Import assignments may be only used in type expressions, not value // expressions. If this is the case, the TypeScript compiler removes // them entirely from the output. That can cause the namespace itself @@ -10636,7 +11051,7 @@ fn NewParser_( return p.s(S.TypeScript{}, loc); } - var arg_ref: ?Ref = null; + var arg_ref = Ref.None; if (!opts.is_typescript_declare) { // Avoid a collision with the namespace closure argument variable if the // namespace exports a symbol with the same name as the namespace itself: @@ -10659,21 +11074,58 @@ fn NewParser_( // run the renamer. For external-facing things the renamer will avoid // collisions automatically so this isn't important for correctness. arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable; - p.current_scope.generated.push(p.allocator, arg_ref.?) catch unreachable; + p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable; } else { arg_ref = p.newSymbol(.hoisted, name_text) catch unreachable; } + ts_namespace.arg_ref = arg_ref; } p.popScope(); if (!opts.is_typescript_declare) { - name.ref = p.declareSymbol(.ts_namespace, name_loc, name_text) catch unreachable; + name.ref = p.declareSymbol(.ts_namespace, name_loc, name_text) catch bun.outOfMemory(); + try p.ref_to_ts_namespace_member.put(p.allocator, name.ref.?, ns_member_data); } - return p.s( - S.Namespace{ .name = name, .arg = arg_ref orelse Ref.None, .stmts = stmts.items, .is_export = opts.is_export }, - loc, - ); + return p.s(S.Namespace{ + .name = name, + .arg = arg_ref, + .stmts = stmts.items, + .is_export = opts.is_export, + }, loc); + } + + fn defineExportedNamespaceBinding( + p: *P, + exported_members: *js_ast.TSNamespaceMemberMap, + binding: Binding, + ) !void { + switch (binding.data) { + .b_missing => {}, + .b_identifier => |id| { + const name = p.symbols.items[id.ref.inner_index].original_name; + try exported_members.put(p.allocator, name, .{ + .loc = binding.loc, + .data = .property, + }); + try p.ref_to_ts_namespace_member.put( + p.allocator, + id.ref, + .property, + ); + }, + .b_object => |obj| { + for (obj.properties) |prop| { + try p.defineExportedNamespaceBinding(exported_members, prop.value); + } + }, + .b_array => |obj| { + for (obj.items) |prop| { + try p.defineExportedNamespaceBinding(exported_members, prop.binding); + } + }, + else => Output.panic("Unexpected binding: {s}", .{@tagName(binding.data)}), + } } fn skipTypeScriptInterfaceStmt(p: *P, opts: *ParseStatementOptions) anyerror!void { @@ -11423,14 +11875,25 @@ fn NewParser_( const name_text = p.lexer.identifier; try p.lexer.expect(.t_identifier); var name = LocRef{ .loc = name_loc, .ref = Ref.None }; - var arg_ref = Ref.None; + + // Generate the namespace object + var arg_ref: Ref = undefined; + const ts_namespace = p.getOrCreateExportedNamespaceMembers(name_text, opts.is_export, true); + const exported_members = ts_namespace.exported_members; + const enum_member_data = js_ast.TSNamespaceMember.Data{ .namespace = exported_members }; + + // Declare the enum and create the scope + const scope_index = p.scopes_in_order.items.len; if (!opts.is_typescript_declare) { name.ref = try p.declareSymbol(.ts_enum, name_loc, name_text); _ = try p.pushScopeForParsePass(.entry, loc); + p.current_scope.ts_namespace = ts_namespace; + p.ref_to_ts_namespace_member.putNoClobber(p.allocator, name.ref.?, enum_member_data) catch bun.outOfMemory(); } try p.lexer.expect(.t_open_brace); + // Parse the body var values = std.ArrayList(js_ast.EnumValue).init(p.allocator); while (p.lexer.token != .t_close_brace) { var value = js_ast.EnumValue{ .loc = p.lexer.loc(), .ref = Ref.None, .name = undefined, .value = null }; @@ -11438,13 +11901,10 @@ fn NewParser_( // Parse the name if (p.lexer.token == .t_string_literal) { - value.name = p.lexer.toEString(); + value.name = p.lexer.toUTF8EString().data; + needs_symbol = js_lexer.isIdentifier(value.name); } else if (p.lexer.isIdentifierOrKeyword()) { - const id = p.lexer.identifier; - value.name = if (bun.strings.isAllASCII(id)) - .{ .data = id } - else - E.String.init(try bun.strings.toUTF16AllocForReal(p.allocator, id, false, false)); + value.name = p.lexer.identifier; needs_symbol = true; } else { try p.lexer.expect(.t_identifier); @@ -11453,7 +11913,7 @@ fn NewParser_( // Identifiers can be referenced by other values if (!opts.is_typescript_declare and needs_symbol) { - value.ref = try p.declareSymbol(.other, value.loc, try value.name.string(p.allocator)); + value.ref = try p.declareSymbol(.other, value.loc, value.name); } // Parse the initializer @@ -11464,6 +11924,11 @@ fn NewParser_( values.append(value) catch unreachable; + exported_members.put(p.allocator, value.name, .{ + .loc = value.loc, + .data = .enum_property, + }) catch bun.outOfMemory(); + if (p.lexer.token != .t_comma and p.lexer.token != .t_semicolon) { break; } @@ -11500,7 +11965,6 @@ fn NewParser_( // (function (foo) { // foo[foo["bar"] = foo] = "bar"; // })(foo || (foo = {})); - // if (p.current_scope.members.contains(name_text)) { // Add a "_" to make tests easier to read, since non-bundler tests don't // run the renamer. For external-facing things the renamer will avoid @@ -11510,6 +11974,8 @@ fn NewParser_( } else { arg_ref = p.declareSymbol(.hoisted, name_loc, name_text) catch unreachable; } + p.ref_to_ts_namespace_member.put(p.allocator, arg_ref, enum_member_data) catch bun.outOfMemory(); + ts_namespace.arg_ref = arg_ref; p.popScope(); } @@ -11524,6 +11990,30 @@ fn NewParser_( return p.s(S.TypeScript{}, loc); } + // Save these for when we do out-of-order enum visiting + // + // Make a copy of "scopesInOrder" instead of a slice or index since + // the original array may be flattened in the future by + // "popAndFlattenScope" + p.scopes_in_order_for_enum.putNoClobber( + p.allocator, + loc, + scope_order_clone: { + var count: usize = 0; + for (p.scopes_in_order.items[scope_index..]) |i| { + if (i != null) count += 1; + } + + const items = p.allocator.alloc(ScopeOrder, count) catch bun.outOfMemory(); + var i: usize = 0; + for (p.scopes_in_order.items[scope_index..]) |item| { + items[i] = item orelse continue; + i += 1; + } + break :scope_order_clone items; + }, + ) catch bun.outOfMemory(); + return p.s(S.Enum{ .name = name, .arg = arg_ref, @@ -11532,6 +12022,59 @@ fn NewParser_( }, loc); } + // Generate a TypeScript namespace object for this namespace's scope. If this + // namespace is another block that is to be merged with an existing namespace, + // use that earlier namespace's object instead. + pub fn getOrCreateExportedNamespaceMembers(p: *P, name: []const u8, is_export: bool, is_enum_scope: bool) *js_ast.TSNamespaceScope { + const map = brk: { + + // Merge with a sibling namespace from the same scope + if (p.current_scope.members.get(name)) |existing_member| { + if (p.ref_to_ts_namespace_member.get(existing_member.ref)) |member_data| { + if (member_data == .namespace) + break :brk member_data.namespace; + } + } + + // Merge with a sibling namespace from a different scope + if (is_export) { + if (p.current_scope.ts_namespace) |ns| { + if (ns.exported_members.get(name)) |member| { + if (member.data == .namespace) + break :brk member.data.namespace; + } + } + } + + break :brk null; + }; + + if (map) |existing| { + return bun.create(p.allocator, js_ast.TSNamespaceScope, .{ + .exported_members = existing, + .is_enum_scope = is_enum_scope, + .arg_ref = Ref.None, + }); + } + + // Otherwise, generate a new namespace object + // Batch the allocation of the namespace object and the map into a single allocation. + const Pair = struct { + map: js_ast.TSNamespaceMemberMap, + scope: js_ast.TSNamespaceScope, + }; + + var pair = p.allocator.create(Pair) catch bun.outOfMemory(); + pair.map = .{}; + pair.scope = .{ + .exported_members = &pair.map, + .is_enum_scope = is_enum_scope, + .arg_ref = Ref.None, + }; + + return &pair.scope; + } + fn parseExportClause(p: *P) !ExportClauseResult { var items = ListManaged(js_ast.ClauseItem).initCapacity(p.allocator, 1) catch unreachable; try p.lexer.expect(.t_open_brace); @@ -12070,9 +12613,11 @@ fn NewParser_( try p.log.addSymbolAlreadyDeclaredError(p.allocator, p.source, symbol.original_name, loc, existing.loc); return existing.ref; }, + .keep_existing => { ref = existing.ref; }, + .replace_with_new => { symbol.link = ref; @@ -12081,17 +12626,18 @@ fn NewParser_( symbol.remove_overwritten_function_declaration = true; } }, + .become_private_get_set_pair => { ref = existing.ref; symbol.kind = .private_get_set_pair; }, + .become_private_static_get_set_pair => { ref = existing.ref; symbol.kind = .private_static_get_set_pair; }, .overwrite_with_new => {}, - // else => unreachable, } } else { p.symbols.items[ref.innerIndex()].link = existing.ref; @@ -12245,41 +12791,22 @@ fn NewParser_( bind.ref = try p.declareSymbol(kind, binding.loc, p.loadNameFromRef(bind.ref)); } }, - .b_array => |bind| { for (bind.items) |*item| { p.declareBinding(kind, &item.binding, opts) catch unreachable; } }, - .b_object => |bind| { for (bind.properties) |*prop| { p.declareBinding(kind, &prop.value, opts) catch unreachable; } }, - else => { // @compileError("Missing binding type"); }, } } - // This is where the allocate memory to the heap for AST objects. - // This is a short name to keep the code more readable. - // It also swallows errors, but I think that's correct here. - // We can handle errors via the log. - // We'll have to deal with @wasmHeapGrow or whatever that thing is. - pub inline fn mm(self: *P, comptime ast_object_type: type, instance: anytype) *ast_object_type { - const obj = self.allocator.create(ast_object_type) catch unreachable; - obj.* = instance; - return obj; - } - - // mmmm memory allocation - pub inline fn m(self: *P, kind: anytype) *@TypeOf(kind) { - return self.mm(@TypeOf(kind), kind); - } - pub fn storeNameInRef(p: *P, name: string) !Ref { if (comptime track_symbol_usage_during_parse_pass) { if (p.parse_pass_symbol_uses.getPtr(name)) |res| { @@ -13633,8 +14160,8 @@ fn NewParser_( } // Only continue if we have started - if ((optional_start orelse .ccontinue) == .start) { - optional_chain = .ccontinue; + if ((optional_start orelse .continuation) == .start) { + optional_chain = .continuation; } }, .t_no_substitution_template_literal => { @@ -14270,16 +14797,32 @@ fn NewParser_( } }; - pub fn panic(p: *P, comptime str: string, args: anytype) noreturn { + pub fn panic(p: *P, comptime fmt: string, args: anytype) noreturn { + p.panicLoc(fmt, args, null); @setCold(true); + } + + pub fn panicLoc(p: *P, comptime fmt: string, args: anytype, loc: ?logger.Loc) noreturn { var panic_buffer = p.allocator.alloc(u8, 32 * 1024) catch unreachable; var panic_stream = std.io.fixedBufferStream(panic_buffer); - p.log.addRangeErrorFmt(p.source, p.lexer.range(), p.allocator, str, args) catch unreachable; - p.log.printForLogLevel( - panic_stream.writer(), - ) catch unreachable; - Global.panic("{s}", .{panic_buffer[0..panic_stream.pos]}); + // panic during visit pass leaves the lexer at the end, which + // would make this location absolutely useless. + const location = loc orelse p.lexer.loc(); + if (location.start < p.lexer.source.contents.len and !location.isEmpty()) { + p.log.addRangeErrorFmt( + p.source, + .{ .loc = location }, + p.allocator, + "panic here", + .{}, + ) catch bun.outOfMemory(); + } + + p.log.level = .verbose; + p.log.printForLogLevel(panic_stream.writer()) catch unreachable; + + Global.panic(fmt ++ "\n{s}", args ++ .{panic_buffer[0..panic_stream.pos]}); } pub fn parsePrefix(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr { @@ -15399,6 +15942,7 @@ fn NewParser_( p.declared_symbols.clearRetainingCapacity(); p.scopes_for_current_part.clearRetainingCapacity(); p.import_records_for_current_part.clearRetainingCapacity(); + p.import_symbol_property_uses.clearRetainingCapacity(); p.had_commonjs_named_exports_this_visit = false; @@ -15406,8 +15950,6 @@ fn NewParser_( var opts = PrependTempRefsOpts{}; var partStmts = ListManaged(Stmt).fromOwnedSlice(allocator, stmts); - // - try p.visitStmtsAndPrependTempRefs(&partStmts, &opts); // Insert any relocated variable statements now @@ -15440,11 +15982,12 @@ fn NewParser_( } if (partStmts.items.len > 0) { - const _stmts = partStmts.items; + const final_stmts = partStmts.items; try parts.append(js_ast.Part{ - .stmts = _stmts, + .stmts = final_stmts, .symbol_uses = p.symbol_uses, + .import_symbol_property_uses = p.import_symbol_property_uses, .declared_symbols = p.declared_symbols.toOwnedSlice(), .import_record_indices = bun.BabyList(u32).init( p.import_records_for_current_part.toOwnedSlice( @@ -15452,13 +15995,13 @@ fn NewParser_( ) catch unreachable, ), .scopes = try p.scopes_for_current_part.toOwnedSlice(p.allocator), - .can_be_removed_if_unused = p.stmtsCanBeRemovedIfUnused(_stmts), + .can_be_removed_if_unused = p.stmtsCanBeRemovedIfUnused(final_stmts), .tag = if (p.had_commonjs_named_exports_this_visit) js_ast.Part.Tag.commonjs_named_export else .none, }); p.symbol_uses = .{}; + p.import_symbol_property_uses = .{}; p.had_commonjs_named_exports_this_visit = false; } else if (p.declared_symbols.len() > 0 or p.symbol_uses.count() > 0) { - // if the part is dead, invalidate all the usage counts p.clearSymbolUsagesFromDeadPart(.{ .stmts = undefined, .declared_symbols = p.declared_symbols, .symbol_uses = p.symbol_uses }); p.declared_symbols.clearRetainingCapacity(); @@ -15517,22 +16060,25 @@ fn NewParser_( // can remove a SImport statement. Otherwise the import must be kept for // its side effects. .s_import => {}, + .s_class => |st| { if (!p.classCanBeRemovedIfUnused(&st.class)) { return false; } }, + .s_expr => |st| { if (st.does_not_affect_tree_shaking) { // Expressions marked with this are automatically generated and have // no side effects by construction. - break; + continue; } if (!p.exprCanBeRemovedIfUnused(&st.value)) { return false; } }, + .s_local => |st| { // "await" is a side effect because it affects code timing if (st.kind == .k_await_using) return false; @@ -15594,7 +16140,10 @@ fn NewParser_( }, } }, + else => { + // Assume that all statements not explicitly special-cased here have side + // effects, and cannot be removed even if unused return false; }, } @@ -15615,12 +16164,12 @@ fn NewParser_( try p.visitStmts(stmts, opts.kind); // Prepend values for "this" and "arguments" - if (opts.fn_body_loc != null) { + if (opts.fn_body_loc) |fn_body_loc| { // Capture "this" if (p.fn_only_data_visit.this_capture_ref) |ref| { try p.temp_refs_to_declare.append(p.allocator, TempRef{ .ref = ref, - .value = p.newExpr(E.This{}, opts.fn_body_loc orelse p.panic("Internal error: Expected opts.fn_body_loc to exist", .{})), + .value = p.newExpr(E.This{}, fn_body_loc), }); } } @@ -15776,7 +16325,6 @@ fn NewParser_( // return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.EIdentifier{Ref: p.captureThis()}}, exprOut{} // } }, - .e_import_meta => { // TODO: delete import.meta might not work const is_delete_target = std.meta.activeTag(p.delete_target) == .e_import_meta; @@ -16373,38 +16921,41 @@ fn NewParser_( return current; }, .e_index => |e_| { - const is_call_target = std.meta.activeTag(p.call_target) == .e_index and expr.data.e_index == p.call_target.e_index; - const is_delete_target = std.meta.activeTag(p.delete_target) == .e_index and expr.data.e_index == p.delete_target.e_index; + const is_call_target = p.call_target == .e_index and expr.data.e_index == p.call_target.e_index; + const is_delete_target = p.delete_target == .e_index and expr.data.e_index == p.delete_target.e_index; - if (p.options.features.minify_syntax) { - if (e_.index.data == .e_string and e_.index.data.e_string.isUTF8() and e_.index.data.e_string.isIdentifier(p.allocator)) { - const dot = p.newExpr( - E.Dot{ - .name = e_.index.data.e_string.slice(p.allocator), - .name_loc = e_.index.loc, - .target = e_.target, - .optional_chain = e_.optional_chain, - }, - expr.loc, - ); + // "a['b']" => "a.b" + if (p.options.features.minify_syntax and + e_.index.data == .e_string and + e_.index.data.e_string.isUTF8() and + e_.index.data.e_string.isIdentifier(p.allocator)) + { + const dot = p.newExpr( + E.Dot{ + .name = e_.index.data.e_string.slice(p.allocator), + .name_loc = e_.index.loc, + .target = e_.target, + .optional_chain = e_.optional_chain, + }, + expr.loc, + ); - if (is_call_target) { - p.call_target = dot.data; - } - - if (is_delete_target) { - p.delete_target = dot.data; - } - - return p.visitExprInOut(dot, in); + if (is_call_target) { + p.call_target = dot.data; } + + if (is_delete_target) { + p.delete_target = dot.data; + } + + return p.visitExprInOut(dot, in); } - const target = p.visitExprInOut(e_.target, ExprIn{ - // this is awkward due to a zig compiler bug - .has_chain_parent = (e_.optional_chain orelse js_ast.OptionalChain.start) == js_ast.OptionalChain.ccontinue, + const target_visited = p.visitExprInOut(e_.target, ExprIn{ + .has_chain_parent = e_.optional_chain == .continuation, }); - e_.target = target; + e_.target = target_visited; + switch (e_.index.data) { .e_private_identifier => |_private| { var private = _private; @@ -16436,80 +16987,102 @@ fn NewParser_( else => { const index = p.visitExpr(e_.index); e_.index = index; + + const unwrapped = e_.index.unwrapInlined(); + if (unwrapped.data == .e_string and + unwrapped.data.e_string.isUTF8()) + { + // "a['b' + '']" => "a.b" + // "enum A { B = 'b' }; a[A.B]" => "a.b" + if (p.options.features.minify_syntax and + unwrapped.data.e_string.isIdentifier(p.allocator)) + { + const dot = p.newExpr( + E.Dot{ + .name = unwrapped.data.e_string.slice(p.allocator), + .name_loc = unwrapped.loc, + .target = e_.target, + .optional_chain = e_.optional_chain, + }, + expr.loc, + ); + + if (is_call_target) { + p.call_target = dot.data; + } + + if (is_delete_target) { + p.delete_target = dot.data; + } + + return p.visitExprInOut(dot, in); + } + + // Handle property rewrites to ensure things + // like .e_import_identifier tracking works + // Reminder that this can only be done after + // `target` is visited. + if (p.maybeRewritePropertyAccess( + expr.loc, + e_.target, + unwrapped.data.e_string.data, + unwrapped.loc, + .{ + .is_call_target = is_call_target, + // .is_template_tag = is_template_tag, + .is_delete_target = is_delete_target, + .assign_target = in.assign_target, + }, + )) |rewrite| { + return rewrite; + } + } }, } - if (e_.optional_chain == null and e_.index.data == .e_string and e_.index.data.e_string.isUTF8()) { - const literal = e_.index.data.e_string.slice(p.allocator); - if (p.maybeRewritePropertyAccess( - expr.loc, - e_.target, - literal, - e_.index.loc, - .{ - .is_call_target = is_call_target, - // .is_template_tag = is_template_tag, - .is_delete_target = is_delete_target, - .assign_target = in.assign_target, - }, - )) |val| { - return val; - } + const target = e_.target.unwrapInlined(); + const index = e_.index.unwrapInlined(); - // delete process.env["NODE_ENV"] - // shouldn't be transformed into - // delete undefined - if (!is_delete_target and !is_call_target and in.assign_target == .none) { - // We check for defines here as well - // esbuild doesn't do this - // In a lot of codebases, people will sometimes do: - // process.env["NODE_ENV"] - // Often not intentionally - // So we want to be able to detect this and still Do The Right Thing - if (p.define.dots.get(literal)) |parts| { - for (parts) |define| { - if (p.isDotDefineMatch(expr, define.parts)) { - if (!define.data.valueless) { - return p.valueForDefine(expr.loc, in.assign_target, is_delete_target, &define.data); + if (p.options.features.minify_syntax) { + if (index.data.as(.e_number)) |number| { + if (number.value >= 0 and + number.value < std.math.maxInt(usize) and + @mod(number.value, 1) == 0) + { + // "foo"[2] -> "o" + if (target.data.as(.e_string)) |str| { + if (str.isUTF8()) { + const literal = str.slice(p.allocator); + const num: usize = index.data.e_number.toUsize(); + if (Environment.allow_assert) { + bun.assert(bun.strings.isAllASCII(literal)); } + if (num < literal.len) { + return p.newExpr(E.String{ .data = literal[num .. num + 1] }, expr.loc); + } + } + } else if (target.data.as(.e_array)) |array| { + // [x][0] -> x + if (array.items.len == 1 and number.value == 0) { + const inlined = target.data.e_array.items.at(0).*; + if (inlined.canBeInlinedFromPropertyAccess()) + return inlined; + } + + // ['a', 'b', 'c'][1] -> 'b' + const int: usize = @intFromFloat(number.value); + if (int < array.items.len and p.exprCanBeRemovedIfUnused(&target)) { + const inlined = target.data.e_array.items.at(int).*; + // ['a', , 'c'][1] -> undefined + if (inlined.data == .e_missing) return p.newExpr(E.Undefined{}, inlined.loc); + if (Environment.allow_assert) assert(inlined.canBeInlinedFromPropertyAccess()); + return inlined; } } } } - // "foo"[2] - } else if ((comptime FeatureFlags.inline_properties_in_transpiler) and - e_.optional_chain == null and - target.data == .e_string and - e_.index.data == .e_number and - target.data.e_string.isUTF8() and - e_.index.data.e_number.value >= 0) - { - const literal = target.data.e_string.slice(p.allocator); - const index = e_.index.data.e_number.toUsize(); - if (literal.len > index) { - return p.newExpr(E.String{ .data = literal[index .. index + 1] }, expr.loc); - } - } else if ((comptime FeatureFlags.inline_properties_in_transpiler) and - // Input: - // - // [123][0] - // - // Output: - // - // 123 - in.assign_target == .none and - !is_delete_target and - !is_call_target and - // target should already be on the stack - target.data == .e_array and - target.data.e_array.items.len == 1 and - e_.index.data == .e_number and - e_.index.data.e_number.value == 0.0 and - e_.optional_chain == null and - target.data.e_array.items.ptr[0].canBeInlinedFromPropertyAccess()) - { - return target.data.e_array.items.ptr[0]; } + // Create an error for assigning to an import namespace when bundling. Even // though this is a run-time error, we make it a compile-time error when // bundling because scope hoisting means these will no longer be run-time @@ -16558,7 +17131,6 @@ fn NewParser_( e_.value = p.visitExprInOut(e_.value, ExprIn{ .assign_target = e_.op.unaryAssignTarget() }); // Post-process the unary expression - switch (e_.op) { .un_not => { if (p.options.features.minify_syntax) @@ -16575,6 +17147,15 @@ fn NewParser_( } } }, + .un_cpl => { + if (p.should_fold_typescript_constant_expressions) { + if (SideEffects.toNumber(e_.value.data)) |value| { + return p.newExpr(E.Number{ + .value = @floatFromInt(~floatToInt32(value)), + }, expr.loc); + } + } + }, .un_void => { if (p.exprCanBeRemovedIfUnused(&e_.value)) { return p.newExpr(E.Undefined{}, e_.value.loc); @@ -16914,6 +17495,18 @@ fn NewParser_( .type_attribute = e_.type_attribute, }; + // We want to forcefully fold constants inside of imports + // even when minification is disabled, so that if we have an + // import based on a string template, it does not cause a + // bundle error. This is especially relevant for bundling NAPI + // modules with 'bun build --compile': + // + // const binding = await import(`./${process.platform}-${process.arch}.node`); + // + const prev_should_fold_typescript_constant_expressions = true; + defer p.should_fold_typescript_constant_expressions = prev_should_fold_typescript_constant_expressions; + p.should_fold_typescript_constant_expressions = true; + e_.expr = p.visitExpr(e_.expr); return p.import_transposer.maybeTransposeIf(e_.expr, state); }, @@ -16928,7 +17521,7 @@ fn NewParser_( const target_was_identifier_before_visit = e_.target.data == .e_identifier; e_.target = p.visitExprInOut(e_.target, ExprIn{ - .has_chain_parent = (e_.optional_chain orelse js_ast.OptionalChain.start) == .ccontinue, + .has_chain_parent = (e_.optional_chain orelse js_ast.OptionalChain.start) == .continuation, }); // Copy the call side effect flag over if this is a known target @@ -16982,8 +17575,23 @@ fn NewParser_( { const old_ce = p.options.ignore_dce_annotations; defer p.options.ignore_dce_annotations = old_ce; - if (is_macro_ref) + const old_should_fold_typescript_constant_expressions = p.should_fold_typescript_constant_expressions; + defer p.should_fold_typescript_constant_expressions = old_should_fold_typescript_constant_expressions; + + // We want to forcefully fold constants inside of + // certain calls even when minification is disabled, so + // that if we have an import based on a string template, + // it does not cause a bundle error. This is relevant for + // macros, as they require constant known values, but also + // for `require` and `require.resolve`, as they go through + // the module resolver. + if (is_macro_ref or + e_.target.data == .e_require_call_target or + e_.target.data == .e_require_resolve_call_target) + { p.options.ignore_dce_annotations = true; + p.should_fold_typescript_constant_expressions = true; + } for (e_.args.slice()) |*arg| { arg.* = p.visitExpr(arg.*); @@ -17009,7 +17617,7 @@ fn NewParser_( .e_if => { // require(FOO ? '123' : '456') => FOO ? require('123') : require('456') // This makes static analysis later easier - return p.require_transposer.tranposeKnownToBeIf(first, state); + return p.require_transposer.transposeKnownToBeIf(first, state); }, else => {}, } @@ -17055,7 +17663,7 @@ fn NewParser_( // => // FOO ? require.resolve('123') : require.resolve('456') // This makes static analysis later easier - return p.require_resolve_transposer.tranposeKnownToBeIf(first, e_.target); + return p.require_resolve_transposer.transposeKnownToBeIf(first, e_.target); }, else => {}, } @@ -17088,8 +17696,7 @@ fn NewParser_( const copied = Expr{ .loc = expr.loc, .data = .{ .e_call = e_ } }; const start_error_count = p.log.msgs.items.len; p.macro_call_count += 1; - const macro_result = - p.options.macro_context.call( + const macro_result = p.options.macro_context.call( record.path.text, p.source.path.sourceDir(), p.log, @@ -17369,6 +17976,8 @@ fn NewParser_( return true; }, + .e_inlined_enum => |e| return p.exprCanBeRemovedIfUnused(&e.value), + .e_dot => |ex| { return ex.can_be_removed_if_unused; }, @@ -17465,7 +18074,6 @@ fn NewParser_( return true; }, .e_call => |ex| { - // A call that has been marked "__PURE__" can be removed if all arguments // can be removed. The annotation causes us to ignore the target. if (ex.can_be_unwrapped_if_unused) { @@ -17914,24 +18522,6 @@ fn NewParser_( return .{ .stmt = p.s(S.SExpr{ .value = value }, value.loc), .ok = true }; } - // fn maybeInlineMacroObject(p: *P, decl: *G.Decl, macro: Expr) void { - // if (decl.value == null) return; - // switch (decl.binding.data) { - // .b_identifier => |ident| { - // if (macro.get(p.loadNameFromRef(ident.ref))) |val| { - // decl - // } - // } - // } - // } - // if (comptime allow_macros) { - // if (p.macro_call_count and data.decls[i].value != null and - // data.decls[i].value.?.data == .e_object and data.decls[i].value.?.data.e_object.was_originally_macro) - // { - // p.maybeInlineMacroObject(&data.decls[i], data.decls[i].value.?); - // } - // } - // EDot nodes represent a property access. This function may return an // expression to replace the property access with. It assumes that the // target of the EDot expression has already been visited. @@ -17945,7 +18535,6 @@ fn NewParser_( ) ?Expr { switch (target.data) { .e_identifier => |id| { - // Rewrite property accesses on explicit namespace imports as an identifier. // This lets us replace them easily in the printer to rebind them to // something else without paying the cost of a whole-tree traversal during @@ -18227,22 +18816,52 @@ fn NewParser_( } } - // If this is a known enum value, inline the value of the enum - if (is_typescript_enabled) { - if (p.known_enum_values.get(id.ref)) |enum_value_map| { - if (enum_value_map.get(name)) |enum_value| { - return p.newExpr(E.Number{ .value = enum_value }, loc); + // Handle references to namespaces or namespace members + if (id.ref.eql(p.ts_namespace.ref) and + identifier_opts.assign_target == .none and + !identifier_opts.is_delete_target) + { + if (p.ts_namespace.data.?.get(name)) |value| { + switch (value.data) { + .enum_number => |num| { + p.ignoreUsageOfIdentifierInDotChain(target); + return p.wrapInlinedEnum( + .{ .loc = loc, .data = .{ .e_number = .{ .value = num } } }, + name, + ); + }, + + .enum_string => |str| { + p.ignoreUsageOfIdentifierInDotChain(target); + return p.wrapInlinedEnum( + .{ .loc = loc, .data = .{ .e_string = str } }, + name, + ); + }, + + .namespace => |data| { + p.ts_namespace = .{ + .ref = id.ref, + .data = data, + }; + return .{ + .loc = loc, + .data = .{ .e_identifier = id }, + }; + }, + else => {}, } } } }, + // TODO: e_inlined_enum -> .e_string -> "length" should inline the length .e_string => |str| { // Disable until https://github.com/oven-sh/bun/issues/4217 is fixed if (comptime FeatureFlags.minify_javascript_string_length) { if (p.options.features.minify_syntax) { // minify "long-string".length to 11 if (strings.eqlComptime(name, "length")) { - return p.newExpr(E.Number{ .value = @as(f64, @floatFromInt(str.javascriptLength())) }, loc); + return p.newExpr(E.Number{ .value = @floatFromInt(str.javascriptLength()) }, loc); } } } @@ -18250,7 +18869,6 @@ fn NewParser_( .e_object => |obj| { if (comptime FeatureFlags.inline_properties_in_transpiler) { if (p.options.features.minify_syntax) { - // // Rewrite a property access like this: // { f: () => {} }.f // To: @@ -18268,7 +18886,8 @@ fn NewParser_( prop.flags.count() == 0 and prop.key != null and prop.key.?.data == .e_string and - prop.key.?.data.e_string.eql([]const u8, name)) + prop.key.?.data.e_string.eql([]const u8, name) and + !bun.strings.eqlComptime(name, "__proto__")) { return prop.value.?; } @@ -18290,6 +18909,31 @@ fn NewParser_( ); } }, + .e_import_identifier => |id| { + // Symbol uses due to a property access off of an imported symbol are tracked + // specially. This lets us do tree shaking for cross-file TypeScript enums. + if (p.options.bundle and !p.is_control_flow_dead) { + const i = p.symbol_uses.getIndex(id.ref).?; + const use = &p.symbol_uses.values()[i]; + use.count_estimate -= 1; + if (use.count_estimate == 0) { + // p.symbol_uses.swapRemoveAt(i); + } + + // Add a special symbol use instead + const gop = p.import_symbol_property_uses.getOrPutValue( + p.allocator, + id.ref, + .{}, + ) catch bun.outOfMemory(); + const inner_use = gop.value_ptr.getOrPutValue( + p.allocator, + name, + .{}, + ) catch bun.outOfMemory(); + inner_use.value_ptr.count_estimate += 1; + } + }, else => {}, } @@ -18313,6 +18957,30 @@ fn NewParser_( // the value is ignored because that's what the TypeScript compiler does. } + pub fn ignoreUsageOfIdentifierInDotChain(p: *P, expr: Expr) void { + var current = expr; + while (true) { + switch (current.data) { + .e_identifier => |id| { + p.ignoreUsage(id.ref); + }, + .e_dot => |dot| { + current = dot.target; + continue; + }, + .e_index => |index| { + if (index.index.isString()) { + current = index.target; + continue; + } + }, + else => return, + } + + return; + } + } + fn visitAndAppendStmt(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt) anyerror!void { // By default any statement ends the const local prefix const was_after_after_const_local_prefix = p.current_scope.is_after_const_local_prefix; @@ -18686,7 +19354,7 @@ fn NewParser_( }, .s_break => |data| { if (data.label) |*label| { - const name = p.loadNameFromRef(label.ref orelse p.panic("Expected label to have a ref", .{})); + const name = p.loadNameFromRef(label.ref orelse p.panicLoc("Expected label to have a ref", .{}, label.loc)); const res = p.findLabelSymbol(label.loc, name); if (res.found) { label.ref = res.ref; @@ -18700,7 +19368,7 @@ fn NewParser_( }, .s_continue => |data| { if (data.label) |*label| { - const name = p.loadNameFromRef(label.ref orelse p.panic("Expected continue label to have a ref", .{})); + const name = p.loadNameFromRef(label.ref orelse p.panicLoc("Expected continue label to have a ref", .{}, label.loc)); const res = p.findLabelSymbol(label.loc, name); label.ref = res.ref; if (res.found and !res.is_loop) { @@ -19388,10 +20056,25 @@ fn NewParser_( return; }, .s_enum => |data| { - p.recordDeclaredSymbol(data.name.ref.?) catch unreachable; - p.pushScopeForVisitPass(.entry, stmt.loc) catch unreachable; + // Do not end the const local prefix after TypeScript enums. We process + // them first within their scope so that they are inlined into all code in + // that scope. We don't want that to cause the const local prefix to end. + p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix; + + // Track cross-module enum constants during bundling. This + // part of the code is different from esbuilt in that we are + // only storing a list of enum indexes. At the time of + // referencing, `esbuild` builds a separate hash map of hash + // maps. We are avoiding that to reduce memory usage, since + // enum inlining already uses alot of hash maps. + if (p.current_scope == p.module_scope and p.options.bundle) { + try p.top_level_enums.append(p.allocator, data.name.ref.?); + } + + p.recordDeclaredSymbol(data.name.ref.?) catch bun.outOfMemory(); + p.pushScopeForVisitPass(.entry, stmt.loc) catch bun.outOfMemory(); defer p.popScope(); - p.recordDeclaredSymbol(data.arg) catch unreachable; + p.recordDeclaredSymbol(data.arg) catch bun.outOfMemory(); const allocator = p.allocator; // Scan ahead for any variables inside this namespace. This must be done @@ -19399,79 +20082,132 @@ fn NewParser_( // because we may end up visiting the uses before the declarations. // We need to convert the uses into property accesses on the namespace. for (data.values) |value| { - if (!value.ref.isNull()) { - p.is_exported_inside_namespace.put(allocator, value.ref, data.arg) catch unreachable; + if (value.ref.isValid()) { + p.is_exported_inside_namespace.put(allocator, value.ref, data.arg) catch bun.outOfMemory(); } } // Values without initializers are initialized to one more than the // previous value if the previous value is numeric. Otherwise values // without initializers are initialized to undefined. - var next_numeric_value: f64 = 0.0; - var has_numeric_value = true; + var next_numeric_value: ?f64 = 0.0; - var value_exprs = ListManaged(Expr).initCapacity(allocator, data.values.len) catch unreachable; + var value_exprs = ListManaged(Expr).initCapacity(allocator, data.values.len) catch bun.outOfMemory(); - // Track values so they can be used by constant folding. We need to follow - // links here in case the enum was merged with a preceding namespace - var values_so_far = StringHashMapUnmanaged(f64){}; + var all_values_are_pure = true; - p.known_enum_values.put(allocator, data.name.ref orelse p.panic("Expected data.name.ref", .{}), values_so_far) catch unreachable; - p.known_enum_values.put(allocator, data.arg, values_so_far) catch unreachable; + const exported_members = p.current_scope.ts_namespace.?.exported_members; // We normally don't fold numeric constants because they might increase code // size, but it's important to fold numeric constants inside enums since // that's what the TypeScript compiler does. const old_should_fold_typescript_constant_expressions = p.should_fold_typescript_constant_expressions; p.should_fold_typescript_constant_expressions = true; - for (data.values) |*enum_value| { - // gotta allocate here so it lives after this function stack frame goes poof - const name = enum_value.name; - var assign_target: Expr = Expr{ .loc = logger.Loc.Empty, .data = Prefill.Data.EMissing }; + + // Create an assignment for each enum value + for (data.values) |*value| { + const name = value.name; + var has_string_value = false; + if (value.value) |enum_value| { + next_numeric_value = null; - if (enum_value.value != null) { - enum_value.value = p.visitExpr(enum_value.value.?); - switch (enum_value.value.?.data) { + const visited = p.visitExpr(enum_value); + + // "See through" any wrapped comments + const underlying_value = if (visited.data == .e_inlined_enum) + visited.data.e_inlined_enum.value + else + visited; + value.value = underlying_value; + + switch (underlying_value.data) { .e_number => |num| { + exported_members.getPtr(name).?.data = .{ .enum_number = num.value }; + + p.ref_to_ts_namespace_member.put( + p.allocator, + value.ref, + .{ .enum_number = num.value }, + ) catch bun.outOfMemory(); - // prob never allocates in practice - values_so_far.put(allocator, name.string(allocator) catch unreachable, num.value) catch unreachable; - has_numeric_value = true; next_numeric_value = num.value + 1.0; }, - .e_string => { + .e_string => |str| { has_string_value = true; + + exported_members.getPtr(name).?.data = .{ .enum_string = str }; + + p.ref_to_ts_namespace_member.put( + p.allocator, + value.ref, + .{ .enum_string = str }, + ) catch bun.outOfMemory(); + }, + else => { + if (enum_value.knownPrimitive() == .string) { + has_string_value = true; + } + + if (!p.exprCanBeRemovedIfUnused(&enum_value)) { + all_values_are_pure = false; + } }, - else => {}, } - } else if (has_numeric_value) { - enum_value.value = p.newExpr(E.Number{ .value = next_numeric_value }, enum_value.loc); - values_so_far.put(allocator, name.string(allocator) catch unreachable, next_numeric_value) catch unreachable; - next_numeric_value += 1; + } else if (next_numeric_value) |num| { + value.value = p.newExpr(E.Number{ .value = num }, value.loc); + + next_numeric_value = num + 1; + + exported_members.getPtr(name).?.data = .{ .enum_number = num }; + + p.ref_to_ts_namespace_member.put( + p.allocator, + value.ref, + .{ .enum_number = num }, + ) catch bun.outOfMemory(); } else { - enum_value.value = p.newExpr(E.Undefined{}, enum_value.loc); + value.value = p.newExpr(E.Undefined{}, value.loc); } - // "Enum['Name'] = value" - assign_target = Expr.assign( - p.newExpr(E.Index{ - .target = p.newExpr( - E.Identifier{ .ref = data.arg }, - enum_value.loc, - ), - .index = p.newExpr( - enum_value.name, - enum_value.loc, - ), - }, enum_value.loc), - enum_value.value orelse unreachable, - ); + + const is_assign_target = p.options.features.minify_syntax and bun.js_lexer.isIdentifier(value.name); + + const name_as_e_string = if (!is_assign_target or !has_string_value) + p.newExpr(value.nameAsEString(allocator), value.loc) + else + null; + + const assign_target = if (is_assign_target) + // "Enum.Name = value" + Expr.assign( + p.newExpr(E.Dot{ + .target = p.newExpr( + E.Identifier{ .ref = data.arg }, + value.loc, + ), + .name = value.name, + .name_loc = value.loc, + }, value.loc), + value.value.?, + ) + else + // "Enum['Name'] = value" + Expr.assign( + p.newExpr(E.Index{ + .target = p.newExpr( + E.Identifier{ .ref = data.arg }, + value.loc, + ), + .index = name_as_e_string.?, + }, value.loc), + value.value.?, + ); p.recordUsage(data.arg); // String-valued enums do not form a two-way map if (has_string_value) { - value_exprs.append(assign_target) catch unreachable; + value_exprs.append(assign_target) catch bun.outOfMemory(); } else { // "Enum[assignTarget] = 'Name'" value_exprs.append( @@ -19479,15 +20215,15 @@ fn NewParser_( p.newExpr(E.Index{ .target = p.newExpr( E.Identifier{ .ref = data.arg }, - enum_value.loc, + value.loc, ), .index = assign_target, - }, enum_value.loc), - p.newExpr(enum_value.name, enum_value.loc), + }, value.loc), + name_as_e_string.?, ), - ) catch unreachable; + ) catch bun.outOfMemory(); + p.recordUsage(data.arg); } - p.recordUsage(data.arg); } p.should_fold_typescript_constant_expressions = old_should_fold_typescript_constant_expressions; @@ -19506,6 +20242,7 @@ fn NewParser_( data.name.ref.?, data.arg, value_stmts.items, + all_values_are_pure, ); return; }, @@ -19546,6 +20283,7 @@ fn NewParser_( data.name.ref.?, data.arg, prepend_list.items, + false, ); return; }, @@ -19877,11 +20615,13 @@ fn NewParser_( stmt_loc: logger.Loc, is_export: bool, name_loc: logger.Loc, - _name_ref: Ref, + original_name_ref: Ref, arg_ref: Ref, stmts_inside_closure: []Stmt, + all_values_are_pure: bool, ) anyerror!void { - var name_ref = _name_ref; + var name_ref = original_name_ref; + // Follow the link chain in case symbols were merged var symbol: Symbol = p.symbols.items[name_ref.innerIndex()]; while (symbol.hasLink()) { @@ -19893,49 +20633,47 @@ fn NewParser_( // Make sure to only emit a variable once for a given namespace, since there // can be multiple namespace blocks for the same namespace - if (symbol.kind == .ts_namespace or symbol.kind == .ts_enum and !p.emitted_namespace_vars.contains(name_ref)) { - p.emitted_namespace_vars.put(allocator, name_ref, {}) catch unreachable; + if ((symbol.kind == .ts_namespace or symbol.kind == .ts_enum) and + !p.emitted_namespace_vars.contains(name_ref)) + { + p.emitted_namespace_vars.putNoClobber(allocator, name_ref, {}) catch bun.outOfMemory(); - var decls = allocator.alloc(G.Decl, 1) catch unreachable; + var decls = allocator.alloc(G.Decl, 1) catch bun.outOfMemory(); decls[0] = G.Decl{ .binding = p.b(B.Identifier{ .ref = name_ref }, name_loc) }; if (p.enclosing_namespace_arg_ref == null) { - // Top-level namespace + // Top-level namespace: "var" stmts.append( - p.s( - S.Local{ - .kind = .k_var, - .decls = G.Decl.List.init(decls), - .is_export = is_export, - }, - stmt_loc, - ), - ) catch unreachable; + p.s(S.Local{ + .kind = .k_var, + .decls = G.Decl.List.init(decls), + .is_export = is_export, + }, stmt_loc), + ) catch bun.outOfMemory(); } else { - // Nested namespace + // Nested namespace: "let" stmts.append( - p.s( - S.Local{ - .kind = .k_let, - .decls = G.Decl.List.init(decls), - }, - stmt_loc, - ), - ) catch unreachable; + p.s(S.Local{ + .kind = .k_let, + .decls = G.Decl.List.init(decls), + }, stmt_loc), + ) catch bun.outOfMemory(); } } - var arg_expr: Expr = undefined; + const arg_expr: Expr = arg_expr: { + // TODO: unsupportedJSFeatures.has(.logical_assignment) + // If the "||=" operator is supported, our minified output can be slightly smaller + if (is_export) if (p.enclosing_namespace_arg_ref) |namespace| { + const name = p.symbols.items[name_ref.innerIndex()].original_name; - if (is_export and p.enclosing_namespace_arg_ref != null) { - const namespace = p.enclosing_namespace_arg_ref.?; - // "name = enclosing.name || (enclosing.name = {})" - const name = p.symbols.items[name_ref.innerIndex()].original_name; - arg_expr = Expr.assign( - Expr.initIdentifier(name_ref, name_loc), - p.newExpr( - E.Binary{ - .op = .bin_logical_or, + // "name = (enclosing.name ||= {})" + p.recordUsage(namespace); + p.recordUsage(name_ref); + break :arg_expr Expr.assign( + Expr.initIdentifier(name_ref, name_loc), + p.newExpr(E.Binary{ + .op = .bin_logical_or_assign, .left = p.newExpr( E.Dot{ .target = Expr.initIdentifier(namespace, name_loc), @@ -19944,75 +20682,74 @@ fn NewParser_( }, name_loc, ), - .right = Expr.assign( - p.newExpr( - E.Dot{ - .target = Expr.initIdentifier(namespace, name_loc), - .name = name, - .name_loc = name_loc, - }, - name_loc, - ), - p.newExpr(E.Object{}, name_loc), - ), - }, - name_loc, - ), - ); - p.recordUsage(namespace); - p.recordUsage(namespace); + .right = p.newExpr(E.Object{}, name_loc), + }, name_loc), + ); + }; + + // "name ||= {}" p.recordUsage(name_ref); - } else { - // "name || (name = {})" - arg_expr = p.newExpr(E.Binary{ - .op = .bin_logical_or, + break :arg_expr p.newExpr(E.Binary{ + .op = .bin_logical_or_assign, .left = Expr.initIdentifier(name_ref, name_loc), - .right = Expr.assign( - Expr.initIdentifier(name_ref, name_loc), - p.newExpr( - E.Object{}, - name_loc, - ), - ), + .right = p.newExpr(E.Object{}, name_loc), }, name_loc); - p.recordUsage(name_ref); - p.recordUsage(name_ref); - } - - var func_args = allocator.alloc(G.Arg, 1) catch unreachable; - func_args[0] = .{ .binding = p.b(B.Identifier{ .ref = arg_ref }, name_loc) }; - var args_list = allocator.alloc(ExprNodeIndex, 1) catch unreachable; - args_list[0] = arg_expr; - const func = G.Fn{ - .args = func_args, - .name = null, - .open_parens_loc = stmt_loc, - .body = G.FnBody{ - .loc = stmt_loc, - .stmts = try allocator.dupe(StmtNodeIndex, stmts_inside_closure), - }, }; - const target = p.newExpr( - E.Function{ - .func = func, - }, - stmt_loc, - ); + var func_args = allocator.alloc(G.Arg, 1) catch bun.outOfMemory(); + func_args[0] = .{ .binding = p.b(B.Identifier{ .ref = arg_ref }, name_loc) }; + + var args_list = allocator.alloc(ExprNodeIndex, 1) catch bun.outOfMemory(); + args_list[0] = arg_expr; + + // TODO: if unsupported features includes arrow functions + // const target = p.newExpr( + // E.Function{ .func = .{ + // .args = func_args, + // .name = null, + // .open_parens_loc = stmt_loc, + // .body = G.FnBody{ + // .loc = stmt_loc, + // .stmts = try allocator.dupe(StmtNodeIndex, stmts_inside_closure), + // }, + // } }, + // stmt_loc, + // ); + + const target = target: { + // "(() => { foo() })()" => "(() => foo())()" + if (p.options.features.minify_syntax and stmts_inside_closure.len == 1) { + if (stmts_inside_closure[0].data == .s_expr) { + stmts_inside_closure[0] = p.s(S.Return{ + .value = stmts_inside_closure[0].data.s_expr.value, + }, stmts_inside_closure[0].loc); + } + } + + break :target p.newExpr(E.Arrow{ + .args = func_args, + .body = .{ + .loc = stmt_loc, + .stmts = try allocator.dupe(StmtNodeIndex, stmts_inside_closure), + }, + .prefer_expr = true, + }, stmt_loc); + }; + + // Call the closure with the name object const call = p.newExpr( E.Call{ .target = target, .args = ExprNodeList.init(args_list), + .can_be_unwrapped_if_unused = all_values_are_pure, }, stmt_loc, ); - const closure = p.s( - S.SExpr{ - .value = call, - }, - stmt_loc, - ); + const closure = p.s(S.SExpr{ + .value = call, + .does_not_affect_tree_shaking = all_values_are_pure, + }, stmt_loc); stmts.append(closure) catch unreachable; } @@ -20604,6 +21341,19 @@ fn NewParser_( return Expr.initIdentifier(ref, loc); } + fn wrapInlinedEnum(p: *P, value: Expr, comment: string) Expr { + if (bun.strings.containsComptime(comment, "*/")) { + // Don't wrap with a comment + return value; + } + + // Wrap with a comment + return p.newExpr(E.InlinedEnum{ + .value = value, + .comment = comment, + }, value.loc); + } + fn valueForDefine(p: *P, loc: logger.Loc, assign_target: js_ast.AssignTarget, is_delete_target: bool, define_data: *const DefineData) Expr { switch (define_data.value) { .e_identifier => { @@ -21149,7 +21899,7 @@ fn NewParser_( @compileError("only_scan_imports_and_do_not_visit must not run this."); } - const initial_scope: *Scope = if (comptime Environment.allow_assert) p.current_scope else undefined; + const initial_scope = if (comptime Environment.allow_assert) p.current_scope else {}; { // Save the current control-flow liveness. This represents if we are @@ -21158,7 +21908,37 @@ fn NewParser_( defer p.is_control_flow_dead = old_is_control_flow_dead; var before = ListManaged(Stmt).init(p.allocator); + defer before.deinit(); + var after = ListManaged(Stmt).init(p.allocator); + defer after.deinit(); + + // Preprocess TypeScript enums to improve code generation. Otherwise + // uses of an enum before that enum has been declared won't be inlined: + // + // console.log(Foo.FOO) // We want "FOO" to be inlined here + // const enum Foo { FOO = 0 } + // + // The TypeScript compiler itself contains code with this pattern, so + // it's important to implement this optimization. + var preprocessed_enums: std.ArrayListUnmanaged([]Stmt) = .{}; + defer preprocessed_enums.deinit(p.allocator); + if (p.scopes_in_order_for_enum.count() > 0) { + var found: usize = 0; + for (stmts.items) |*stmt| { + if (stmt.data == .s_enum) { + const old_scopes_in_order = p.scope_order_to_visit; + defer p.scope_order_to_visit = old_scopes_in_order; + + p.scope_order_to_visit = p.scopes_in_order_for_enum.get(stmt.loc).?; + + var temp = ListManaged(Stmt).init(p.allocator); + try p.visitAndAppendStmt(&temp, stmt); + try preprocessed_enums.append(p.allocator, temp.items); + found += 1; + } + } + } if (p.current_scope == p.module_scope) { p.macro.prepend_stmts = &before; @@ -21166,10 +21946,9 @@ fn NewParser_( // visit all statements first var visited = try ListManaged(Stmt).initCapacity(p.allocator, stmts.items.len); - - defer before.deinit(); defer visited.deinit(); - defer after.deinit(); + + var preprocessed_enum_i: usize = 0; for (stmts.items) |*stmt| { const list = list_getter: { @@ -21194,6 +21973,13 @@ fn NewParser_( break :list_getter &before; } }, + .s_enum => { + const enum_stmts = preprocessed_enums.items[preprocessed_enum_i]; + preprocessed_enum_i += 1; + try visited.appendSlice(enum_stmts); + p.scope_order_to_visit = p.scope_order_to_visit[1..]; + continue; + }, else => {}, } break :list_getter &visited; @@ -21926,6 +22712,33 @@ fn NewParser_( return ref; } + pub fn computeTsEnumsMap(p: *const P, allocator: Allocator) !js_ast.Ast.TsEnumsMap { + const InlinedEnumValue = js_ast.InlinedEnumValue; + var map: js_ast.Ast.TsEnumsMap = .{}; + try map.ensureTotalCapacity(allocator, @intCast(p.top_level_enums.items.len)); + for (p.top_level_enums.items) |ref| { + const entry = p.ref_to_ts_namespace_member.getEntry(ref).?; + const namespace = entry.value_ptr.namespace; + var inner_map: bun.StringHashMapUnmanaged(InlinedEnumValue) = .{}; + try inner_map.ensureTotalCapacity(allocator, @intCast(namespace.count())); + for (namespace.keys(), namespace.values()) |key, val| { + switch (val.data) { + .enum_number => |num| inner_map.putAssumeCapacityNoClobber( + key, + InlinedEnumValue.encode(.{ .number = num }), + ), + .enum_string => |str| inner_map.putAssumeCapacityNoClobber( + key, + InlinedEnumValue.encode(.{ .string = str }), + ), + else => continue, + } + } + map.putAssumeCapacity(entry.key_ptr.*, inner_map); + } + return map; + } + fn shouldLowerUsingDeclarations(p: *const P, stmts: []Stmt) bool { // TODO: We do not support lowering await, but when we do this needs to point to that var const lower_await = false; @@ -22985,8 +23798,9 @@ fn NewParser_( .hashbang = hashbang, - // TODO: + // TODO: cross-module constant inlining // .const_values = p.const_values, + .ts_enums = try p.computeTsEnumsMap(allocator), .import_meta_ref = p.import_meta_ref, }; @@ -23189,3 +24003,14 @@ const CommonJSWrapper = union(enum) { bun_dev: Expr, bun_js: void, }; + +/// Equivalent of esbuild's js_ast_helpers.ToInt32 +fn floatToInt32(f: f64) i32 { + // Special-case non-finite numbers + if (!std.math.isFinite(f)) + return 0; + + const uint: u32 = @intFromFloat(@mod(@abs(f), std.math.maxInt(u32) + 1)); + const int: i32 = @bitCast(uint); + return if (f < 0) @as(i32, 0) -% int else int; +} diff --git a/src/js_printer.zig b/src/js_printer.zig index 1861990846..382c33bf57 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -544,7 +544,8 @@ pub const Options = struct { module_type: options.OutputFormat = .preserve, /// Used for cross-module inlining of import items when bundling - const_values: std.HashMapUnmanaged(Ref, Expr, Ref.HashCtx, 80) = .{}, + // const_values: Ast.ConstValuesMap = .{}, + ts_enums: Ast.TsEnumsMap = .{}, // TODO: remove this // The reason for this is: @@ -2549,13 +2550,17 @@ fn NewPrinter( } }, .e_dot => |e| { - // Ironic Zig compiler bug: e.optional_chain == null or e.optional_chain == .start causes broken LLVM IR - // https://github.com/ziglang/zig/issues/6059 - const isOptionalChain = (e.optional_chain orelse js_ast.OptionalChain.ccontinue) == js_ast.OptionalChain.start; + const isOptionalChain = e.optional_chain == .start; var wrap = false; if (e.optional_chain == null) { flags.insert(.has_non_optional_chain_parent); + + // Inline cross-module TypeScript enum references here + if (p.tryToGetImportedEnumValue(e.target, e.name)) |inlined| { + p.printInlinedEnum(inlined, e.name, level); + return; + } } else { if (flags.contains(.has_non_optional_chain_parent)) { wrap = true; @@ -2609,6 +2614,15 @@ fn NewPrinter( var wrap = false; if (e.optional_chain == null) { flags.insert(.has_non_optional_chain_parent); + + if (e.index.data.as(.e_string)) |str| { + str.resolveRopeIfNeeded(p.options.allocator); + + if (str.isUTF8()) if (p.tryToGetImportedEnumValue(e.target, str.data)) |value| { + p.printInlinedEnum(value, str.data, level); + return; + }; + } } else { if (flags.contains(.has_non_optional_chain_parent)) { wrap = true; @@ -2619,10 +2633,7 @@ fn NewPrinter( p.printExpr(e.target, .postfix, flags); - // Zig compiler bug: e.optional_chain == null or e.optional_chain == .start causes broken LLVM IR - // https://github.com/ziglang/zig/issues/6059 - const is_optional_chain_start = (e.optional_chain orelse js_ast.OptionalChain.ccontinue) == js_ast.OptionalChain.start; - + const is_optional_chain_start = e.optional_chain == .start; if (is_optional_chain_start) { p.print("?."); } @@ -2949,75 +2960,8 @@ fn NewPrinter( p.print('n'); }, .e_number => |e| { - const value = e.value; p.addSourceMapping(expr.loc); - - const absValue = @abs(value); - - if (std.math.isNan(value)) { - p.printSpaceBeforeIdentifier(); - - p.print("NaN"); - } else if (std.math.isPositiveInf(value) or std.math.isNegativeInf(value)) { - const wrap = ((!p.options.has_run_symbol_renamer or p.options.minify_syntax) and level.gte(.multiply)) or - (std.math.isNegativeInf(value) and level.gte(.prefix)); - - if (wrap) { - p.print("("); - } - - if (std.math.isNegativeInf(value)) { - p.printSpaceBeforeOperator(.un_neg); - p.print("-"); - } else { - p.printSpaceBeforeIdentifier(); - } - - // If we are not running the symbol renamer, we must not print "Infinity". - // Some code may assign `Infinity` to another idenitifier. - // - // We do not want: - // - // const Infinity = 1 / 0 - // - // to be transformed into: - // - // const Infinity = Infinity - // - if (is_json or (!p.options.minify_syntax and p.options.has_run_symbol_renamer)) { - p.print("Infinity"); - } else if (p.options.minify_whitespace) { - p.print("1/0"); - } else { - p.print("1 / 0"); - } - - if (wrap) { - p.print(")"); - } - } else if (!std.math.signbit(value)) { - p.printSpaceBeforeIdentifier(); - - p.printNonNegativeFloat(absValue); - - // Remember the end of the latest number - p.prev_num_end = p.writer.written; - } else if (level.gte(.prefix)) { - // Expressions such as "(-1).toString" need to wrap negative numbers. - // Instead of testing for "value < 0" we test for "signbit(value)" and - // "!isNaN(value)" because we need this to be true for "-0" and "-0 < 0" - // is false. - p.print("(-"); - p.printNonNegativeFloat(absValue); - p.print(")"); - } else { - p.printSpaceBeforeOperator(Op.Code.un_neg); - p.print("-"); - p.printNonNegativeFloat(absValue); - - // Remember the end of the latest number - p.prev_num_end = p.writer.written; - } + p.printNumber(e.value, level); }, .e_identifier => |e| { const name = p.renamer.nameForSymbol(e.ref); @@ -3112,16 +3056,15 @@ fn NewPrinter( p.print(")"); } } - } else if (p.options.const_values.count() > 0) { - if (p.options.const_values.get(ref)) |const_value| { - p.printSpaceBeforeIdentifier(); - // TODO: addSourceMappingForName - // p.addSourceMappingForName(renamer.nameForSymbol(e.ref)); - p.addSourceMapping(expr.loc); - p.printExpr(const_value, level, flags); - didPrint = true; - } } + // else if (p.options.const_values.get(ref)) |const_value| { + // p.printSpaceBeforeIdentifier(); + // // TODO: addSourceMappingForName + // // p.addSourceMappingForName(renamer.nameForSymbol(e.ref)); + // p.addSourceMapping(expr.loc); + // p.printExpr(const_value, level, flags); + // didPrint = true; + // } if (!didPrint) { p.printSpaceBeforeIdentifier(); @@ -3246,8 +3189,17 @@ fn NewPrinter( last.visitRightAndFinish(p); } }, + .e_inlined_enum => |e| { + p.printExpr(e.value, level, flags); + if (!p.options.minify_whitespace and !p.options.minify_identifiers) { + p.print(" /* "); + p.print(e.comment); + p.print(" */"); + } + }, else => { - // Global.panic("Unexpected expression of type {any}", .{std.meta.activeTag(expr.data}); + if (Environment.isDebug) + Output.panic("Unexpected expression of type .{s}", .{@tagName(expr.data)}); }, } } @@ -3465,7 +3417,8 @@ fn NewPrinter( p.prev_reg_exp_end = p.writer.written; } - pub fn printProperty(p: *Printer, item: G.Property) void { + pub fn printProperty(p: *Printer, item_in: G.Property) void { + var item = item_in; if (comptime !is_json) { if (item.kind == .spread) { if (comptime is_json and Environment.allow_assert) @@ -3475,6 +3428,28 @@ fn NewPrinter( return; } + // Handle key syntax compression for cross-module constant inlining of enums + if (p.options.minify_syntax and item.flags.contains(.is_computed)) { + if (item.key.?.data.as(.e_dot)) |dot| { + if (p.tryToGetImportedEnumValue(dot.target, dot.name)) |value| { + switch (value) { + .string => |str| { + item.key.?.data = .{ .e_string = str }; + + // Problematic key names must stay computed for correctness + if (!str.eqlComptime("__proto__") and !str.eqlComptime("constructor") and !str.eqlComptime("prototype")) { + item.flags.setPresent(.is_computed, false); + } + }, + .number => |num| { + item.key.?.data = .{ .e_number = .{ .value = num } }; + item.flags.setPresent(.is_computed, false); + }, + } + } + } + } + if (item.flags.contains(.is_static)) { if (comptime is_json and Environment.allow_assert) unreachable; @@ -3536,11 +3511,7 @@ fn NewPrinter( const _key = item.key.?; - if (item.flags.contains(.is_computed)) { - if (comptime is_json) { - unreachable; - } - + if (!is_json and item.flags.contains(.is_computed)) { p.print("["); p.printExpr(_key, .comma, ExprFlag.None()); p.print("]"); @@ -3610,10 +3581,11 @@ fn NewPrinter( } } }, - .e_import_identifier => |e| inner: { + // .e_import_identifier => |e| inner: { + .e_import_identifier => |e| { const ref = p.symbols().follow(e.ref); - if (p.options.const_values.count() > 0 and p.options.const_values.contains(ref)) - break :inner; + // if (p.options.const_values.count() > 0 and p.options.const_values.contains(ref)) + // break :inner; if (p.symbols().get(ref)) |symbol| { if (symbol.namespace_alias == null and strings.eql(key.data, p.renamer.nameForSymbol(e.ref))) { @@ -3650,11 +3622,12 @@ fn NewPrinter( } // if (strings) {} }, - .e_import_identifier => |e| inner: { + // .e_import_identifier => |e| inner: { + .e_import_identifier => |e| { const ref = p.symbols().follow(e.ref); - if (p.options.const_values.count() > 0 and p.options.const_values.contains(ref)) - break :inner; + // if (p.options.const_values.count() > 0 and p.options.const_values.contains(ref)) + // break :inner; if (p.symbols().get(ref)) |symbol| { if (symbol.namespace_alias == null and strings.utf16EqlString(key.slice16(), p.renamer.nameForSymbol(e.ref))) { @@ -5270,6 +5243,47 @@ fn NewPrinter( } } + pub fn tryToGetImportedEnumValue(p: *Printer, target: Expr, name: []const u8) ?js_ast.InlinedEnumValue.Decoded { + if (target.data.as(.e_import_identifier)) |id| { + const ref = p.symbols().follow(id.ref); + if (p.symbols().get(ref)) |symbol| { + if (symbol.kind == .ts_enum) { + if (p.options.ts_enums.get(ref)) |enum_value| { + if (enum_value.get(name)) |value| + return value.decode(); + } + } + } + } + return null; + } + + pub fn printInlinedEnum( + p: *Printer, + inlined: js_ast.InlinedEnumValue.Decoded, + comment: []const u8, + level: Level, + ) void { + switch (inlined) { + .number => |num| p.printNumber(num, level), + + // TODO: extract printString + .string => |str| p.printExpr(.{ + .data = .{ .e_string = str }, + .loc = logger.Loc.Empty, + }, level, .{}), + } + + if (!p.options.minify_whitespace and !p.options.minify_identifiers) { + // TODO: rewrite this to handle + if (!bun.strings.containsComptime(comment, "*/")) { + p.print(" /* "); + p.print(comment); + p.print(" */"); + } + } + } + pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl) void { p.printIndent(); p.printSpaceBeforeIdentifier(); @@ -5414,6 +5428,73 @@ fn NewPrinter( } } + pub fn printNumber(p: *Printer, value: f64, level: Level) void { + const absValue = @abs(value); + if (std.math.isNan(value)) { + p.printSpaceBeforeIdentifier(); + p.print("NaN"); + } else if (std.math.isPositiveInf(value) or std.math.isNegativeInf(value)) { + const wrap = ((!p.options.has_run_symbol_renamer or p.options.minify_syntax) and level.gte(.multiply)) or + (std.math.isNegativeInf(value) and level.gte(.prefix)); + + if (wrap) { + p.print("("); + } + + if (std.math.isNegativeInf(value)) { + p.printSpaceBeforeOperator(.un_neg); + p.print("-"); + } else { + p.printSpaceBeforeIdentifier(); + } + + // If we are not running the symbol renamer, we must not print "Infinity". + // Some code may assign `Infinity` to another idenitifier. + // + // We do not want: + // + // const Infinity = 1 / 0 + // + // to be transformed into: + // + // const Infinity = Infinity + // + if (is_json or (!p.options.minify_syntax and p.options.has_run_symbol_renamer)) { + p.print("Infinity"); + } else if (p.options.minify_whitespace) { + p.print("1/0"); + } else { + p.print("1 / 0"); + } + + if (wrap) { + p.print(")"); + } + } else if (!std.math.signbit(value)) { + p.printSpaceBeforeIdentifier(); + + p.printNonNegativeFloat(absValue); + + // Remember the end of the latest number + p.prev_num_end = p.writer.written; + } else if (level.gte(.prefix)) { + // Expressions such as "(-1).toString" need to wrap negative numbers. + // Instead of testing for "value < 0" we test for "signbit(value)" and + // "!isNaN(value)" because we need this to be true for "-0" and "-0 < 0" + // is false. + p.print("(-"); + p.printNonNegativeFloat(absValue); + p.print(")"); + } else { + p.printSpaceBeforeOperator(Op.Code.un_neg); + p.print("-"); + p.printNonNegativeFloat(absValue); + + // Remember the end of the latest number + p.prev_num_end = p.writer.written; + } + } + pub fn printIndentedComment(p: *Printer, _text: string) void { var text = _text; if (strings.startsWith(text, "/*")) { diff --git a/src/jsc.zig b/src/jsc.zig index 842adb4c93..f3aa1580bd 100644 --- a/src/jsc.zig +++ b/src/jsc.zig @@ -106,7 +106,8 @@ pub const GeneratedClassesList = @import("./bun.js/bindings/generated_classes_li pub const RuntimeTranspilerCache = @import("./bun.js/RuntimeTranspilerCache.zig").RuntimeTranspilerCache; /// The calling convention used for JavaScript functions <> Native -pub const conv = if (@import("root").bun.Environment.isWindows and @import("root").bun.Environment.isX64) +const bun = @import("root").bun; +pub const conv = if (bun.Environment.isWindows and bun.Environment.isX64) std.builtin.CallingConvention.SysV else std.builtin.CallingConvention.C; diff --git a/src/memory_allocator.zig b/src/memory_allocator.zig index 159cbc15d2..d479b35e52 100644 --- a/src/memory_allocator.zig +++ b/src/memory_allocator.zig @@ -1,7 +1,10 @@ const mem = @import("std").mem; const builtin = @import("std").builtin; const std = @import("std"); - +const bun = @import("root").bun; +const log = bun.Output.scoped(.mimalloc, true); +const assert = bun.assert; +const Allocator = mem.Allocator; const mimalloc = @import("./allocators/mimalloc.zig"); const FeatureFlags = @import("./feature_flags.zig"); const Environment = @import("./env.zig"); @@ -27,39 +30,10 @@ fn mimalloc_free( } } -const c = struct { - pub const malloc_size = mimalloc.mi_malloc_size; - pub const malloc_usable_size = mimalloc.mi_malloc_usable_size; - pub const malloc = struct { - pub inline fn malloc_wrapped(size: usize) ?*anyopaque { - if (comptime FeatureFlags.log_allocations) std.debug.print("Malloc: {d}\n", .{size}); - - return mimalloc.mi_malloc(size); - } - }.malloc_wrapped; - pub inline fn free(ptr: anytype) void { - if (comptime Environment.isDebug) { - assert(mimalloc.mi_is_in_heap_region(ptr)); - } - - mimalloc.mi_free(ptr); - } - pub const posix_memalign = struct { - pub inline fn mi_posix_memalign(p: [*c]?*anyopaque, alignment: usize, size: usize) c_int { - if (comptime FeatureFlags.log_allocations) std.debug.print("Posix_memalign: {d}\n", .{std.mem.alignForward(size, alignment)}); - return mimalloc.mi_posix_memalign(p, alignment, size); - } - }.mi_posix_memalign; -}; -const Allocator = mem.Allocator; -const assert = @import("root").bun.assert; const CAllocator = struct { - const malloc_size = c.malloc_size; pub const supports_posix_memalign = true; fn alignedAlloc(len: usize, alignment: usize) ?[*]u8 { - if (comptime FeatureFlags.log_allocations) std.debug.print("Malloc: {d}\n", .{len}); - const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment)) mimalloc.mi_malloc_aligned(len, alignment) else @@ -78,7 +52,7 @@ const CAllocator = struct { } fn alignedAllocSize(ptr: [*]u8) usize { - return CAllocator.malloc_size(ptr); + return mimalloc.mi_malloc_size(ptr); } fn alloc(_: *anyopaque, len: usize, log2_align: u8, _: usize) ?[*]u8 { @@ -117,11 +91,10 @@ const c_allocator_vtable = Allocator.VTable{ }; const ZAllocator = struct { - const malloc_size = c.malloc_size; pub const supports_posix_memalign = true; fn alignedAlloc(len: usize, alignment: usize) ?[*]u8 { - if (comptime FeatureFlags.log_allocations) std.debug.print("Malloc: {d}\n", .{len}); + log("ZAllocator.alignedAlloc: {d}\n", .{len}); const ptr = if (mimalloc.canUseAlignedAlloc(len, alignment)) mimalloc.mi_zalloc_aligned(len, alignment) @@ -141,7 +114,7 @@ const ZAllocator = struct { } fn alignedAllocSize(ptr: [*]u8) usize { - return CAllocator.malloc_size(ptr); + return mimalloc.mi_malloc_size(ptr); } fn alloc(_: *anyopaque, len: usize, ptr_align: u8, _: usize) ?[*]u8 { diff --git a/src/mimalloc_arena.zig b/src/mimalloc_arena.zig index a32a0ce3d0..1c2db8c09d 100644 --- a/src/mimalloc_arena.zig +++ b/src/mimalloc_arena.zig @@ -8,6 +8,7 @@ const FeatureFlags = @import("./feature_flags.zig"); const Allocator = mem.Allocator; const assert = bun.assert; const bun = @import("root").bun; +const log = bun.Output.scoped(.mimalloc, true); pub const GlobalArena = struct { arena: Arena, @@ -202,7 +203,7 @@ pub const Arena = struct { pub const supports_posix_memalign = true; fn alignedAlloc(heap: *mimalloc.Heap, len: usize, alignment: usize) ?[*]u8 { - if (comptime FeatureFlags.log_allocations) std.debug.print("Malloc: {d}\n", .{len}); + log("Malloc: {d}\n", .{len}); const ptr: ?*anyopaque = if (mimalloc.canUseAlignedAlloc(len, alignment)) mimalloc.mi_heap_malloc_aligned(heap, len, alignment) diff --git a/src/multi_array_list.zig b/src/multi_array_list.zig index 2fcd7453f3..8a85525034 100644 --- a/src/multi_array_list.zig +++ b/src/multi_array_list.zig @@ -1,6 +1,7 @@ const std = @import("std"); const builtin = @import("builtin"); -const assert = @import("root").bun.assert; +const bun = @import("root").bun; +const assert = bun.assert; const meta = std.meta; const mem = std.mem; const Allocator = mem.Allocator; diff --git a/src/output.zig b/src/output.zig index 9ed0aac8e0..61c6f3cabb 100644 --- a/src/output.zig +++ b/src/output.zig @@ -354,7 +354,7 @@ pub fn disableBuffering() void { if (comptime Environment.isNative) enable_buffering = false; } -pub noinline fn panic(comptime fmt: string, args: anytype) noreturn { +pub fn panic(comptime fmt: string, args: anytype) noreturn { @setCold(true); if (Output.isEmojiEnabled()) { diff --git a/src/patch.zig b/src/patch.zig index ab19217262..e64bca19dd 100644 --- a/src/patch.zig +++ b/src/patch.zig @@ -1043,7 +1043,10 @@ const PatchLinesParser = struct { if (b_part_start >= line.len) return null; const lmao_bro = line[b_part_start..]; std.mem.doNotOptimizeAway(lmao_bro); - const b_part_end = if (std.mem.indexOfAny(u8, line[b_part_start..], " \n\r\t")) |pos| pos + b_part_start else line.len; + const b_part_end = if (bun.strings.indexAnyComptime(line[b_part_start..], " \n\r\t")) |pos| + pos + b_part_start + else + line.len; const b_part = line[b_part_start..b_part_end]; for (a_part) |c| if (!VALID_CHARS.isSet(c)) return null; diff --git a/src/pool.zig b/src/pool.zig index ec5a07388b..7d994c752f 100644 --- a/src/pool.zig +++ b/src/pool.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const bun = @import("root").bun; fn SinglyLinkedList(comptime T: type, comptime Parent: type) type { return struct { @@ -157,7 +158,7 @@ pub fn ObjectPool( pub fn push(allocator: std.mem.Allocator, pooled: Type) void { if (comptime @import("./env.zig").allow_assert) - @import("root").bun.assert(!full()); + bun.assert(!full()); const new_node = allocator.create(LinkedList.Node) catch unreachable; new_node.* = LinkedList.Node{ diff --git a/src/runtime.zig b/src/runtime.zig index 8892cad444..e369e567b1 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -244,7 +244,6 @@ pub const Runtime = struct { jsx_optimization_hoist: bool = false, trim_unused_imports: bool = false, - should_fold_typescript_constant_expressions: bool = false, /// Use `import.meta.require()` instead of require()? /// This is only supported in Bun. @@ -286,7 +285,6 @@ pub const Runtime = struct { .dead_code_elimination, .set_breakpoint_on_first_line, .trim_unused_imports, - .should_fold_typescript_constant_expressions, .use_import_meta_require, .dont_bundle_twice, .commonjs_at_runtime, diff --git a/src/sha.zig b/src/sha.zig index 0fac3db29c..9447f4bb7a 100644 --- a/src/sha.zig +++ b/src/sha.zig @@ -196,125 +196,3 @@ const labels = [_][]const u8{ "Blake2", "Blake3", }; -pub fn main() anyerror!void { - var file = try std.fs.cwd().openFileZ(bun.argv[bun.argv.len - 1], .{}); - const bytes = try file.readToEndAlloc(std.heap.c_allocator, std.math.maxInt(usize)); - - const engine = BoringSSL.ENGINE_new().?; - - std.debug.print( - "Hashing {any:3}\n\n", - .{bun.fmt.size(bytes.len)}, - ); - - { - var clock1 = try std.time.Timer.start(); - std.mem.doNotOptimizeAway(bun.hash(bytes)); - const zig_time = clock1.read(); - std.debug.print( - "Wyhash:\n\n zig: {any}\n\n", - .{std.fmt.fmtDuration(zig_time)}, - ); - } - - { - var clock1 = try std.time.Timer.start(); - std.mem.doNotOptimizeAway(std.hash.XxHash64.hash(0, bytes)); - const zig_time = clock1.read(); - std.debug.print( - "xxhash:\n\n zig: {any}\n\n", - .{std.fmt.fmtDuration(zig_time)}, - ); - } - - { - var clock1 = try std.time.Timer.start(); - std.mem.doNotOptimizeAway(std.hash.Murmur2_64.hash(bytes)); - const zig_time = clock1.read(); - std.debug.print( - "Murmur2_64:\n\n zig: {any}\n\n", - .{std.fmt.fmtDuration(zig_time)}, - ); - } - - inline for (evp, 0..) |BoringHasher, i| { - const ZigHasher = zig[i]; - std.debug.print( - comptime labels[i] ++ ":\n\n", - .{}, - ); - const DigestType = if (BoringHasher != void) BoringHasher.Digest else [32]u8; - var digest1 = std.mem.zeroes(DigestType); - var digest2 = std.mem.zeroes(DigestType); - var digest3 = std.mem.zeroes(DigestType); - var digest4 = std.mem.zeroes(DigestType); - defer { - std.mem.doNotOptimizeAway(&digest1); - std.mem.doNotOptimizeAway(&digest2); - std.mem.doNotOptimizeAway(&digest3); - std.mem.doNotOptimizeAway(&digest4); - } - - var clock1 = try std.time.Timer.start(); - ZigHasher.hash(bytes, &digest1, .{}); - const zig_time = clock1.read(); - - const boring_time = brk: { - if (BoringHasher != void) { - var clock2 = try std.time.Timer.start(); - BoringHasher.hash(bytes, &digest2, engine); - break :brk clock2.read(); - } else { - break :brk 0; - } - }; - - const evp_time: usize = brk: { - if (evp[i] != void) { - var clock3 = try std.time.Timer.start(); - evp[i].hash(bytes, &digest3, engine); - break :brk clock3.read(); - } - - break :brk 0; - }; - - const evp_in_time: usize = brk: { - if (evp[i] != void) { - var evp_in = evp[i].init(); - var clock4 = try std.time.Timer.start(); - evp_in.update(bytes); - evp_in.final(&digest4); - break :brk clock4.read(); - } - - break :brk 0; - }; - - std.debug.print( - " zig: {}\n", - .{std.fmt.fmtDuration(zig_time)}, - ); - - if (boring_time > 0) - std.debug.print( - " boring: {}\n", - .{std.fmt.fmtDuration(boring_time)}, - ); - if (evp_time > 0) - std.debug.print( - " evp: {}\n", - .{std.fmt.fmtDuration(evp_time)}, - ); - - if (evp_in_time > 0) - std.debug.print( - " evp in: {}\n\n", - .{std.fmt.fmtDuration(evp_in_time)}, - ); - - if (!std.mem.eql(u8, &digest3, &digest2)) { - @panic("\ndigests don't match! for " ++ labels[i]); - } - } -} diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index b61f7e8d18..dfd6b3a8aa 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -10474,8 +10474,8 @@ pub const Interpreter = struct { /// threadpool. const EbusyState = struct { tasks: std.ArrayListUnmanaged(*ShellCpTask) = .{}, - absolute_targets: std.StringArrayHashMapUnmanaged(void) = .{}, - absolute_srcs: std.StringArrayHashMapUnmanaged(void) = .{}, + absolute_targets: bun.StringArrayHashMapUnmanaged(void) = .{}, + absolute_srcs: bun.StringArrayHashMapUnmanaged(void) = .{}, pub fn deinit(this: *EbusyState) void { // The tasks themselves are freed in `ignoreEbusyErrorIfPossible()` @@ -12013,8 +12013,8 @@ fn closefd(fd: bun.FileDescriptor) void { } const CmdEnvIter = struct { - env: *const std.StringArrayHashMap([:0]const u8), - iter: std.StringArrayHashMap([:0]const u8).Iterator, + env: *const bun.StringArrayHashMap([:0]const u8), + iter: bun.StringArrayHashMap([:0]const u8).Iterator, const Entry = struct { key: Key, @@ -12041,7 +12041,7 @@ const CmdEnvIter = struct { } }; - pub fn fromEnv(env: *const std.StringArrayHashMap([:0]const u8)) CmdEnvIter { + pub fn fromEnv(env: *const bun.StringArrayHashMap([:0]const u8)) CmdEnvIter { const iter = env.iterator(); return .{ .env = env, diff --git a/src/shell/shell.zig b/src/shell/shell.zig index 500b3ee83e..ecd520cf9c 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -2124,20 +2124,6 @@ pub const Token = union(TokenTag) { .Eof => "EOF", }; } - - // pub fn debug(self: Token, buf: []const u8) void { - // switch (self) { - // .Var => |txt| { - // std.debug.print("(var) {s}\n", .{buf[txt.start..txt.end]}); - // }, - // .Text => |txt| { - // std.debug.print("(txt) {s}\n", .{buf[txt.start..txt.end]}); - // }, - // else => { - // std.debug.print("{s}\n", .{@tagName(self)}); - // }, - // } - // } }; pub const LexerAscii = NewLexer(.ascii); @@ -3629,8 +3615,8 @@ pub fn hasEqSignAsciiSlow(str: []const u8) ?u32 { } pub const CmdEnvIter = struct { - env: *const std.StringArrayHashMap([:0]const u8), - iter: std.StringArrayHashMap([:0]const u8).Iterator, + env: *const bun.StringArrayHashMap([:0]const u8), + iter: bun.StringArrayHashMap([:0]const u8).Iterator, const Entry = struct { key: Key, @@ -3657,7 +3643,7 @@ pub const CmdEnvIter = struct { } }; - pub fn fromEnv(env: *const std.StringArrayHashMap([:0]const u8)) CmdEnvIter { + pub fn fromEnv(env: *const bun.StringArrayHashMap([:0]const u8)) CmdEnvIter { const iter = env.iterator(); return .{ .env = env, @@ -4023,7 +4009,7 @@ const SPECIAL_CHARS_TABLE: std.bit_set.IntegerBitSet(256) = brk: { break :brk table; }; pub fn assertSpecialChar(c: u8) void { - comptime bun.assert(@inComptime()); + bun.assertComptime(); bun.assert(SPECIAL_CHARS_TABLE.isSet(c)); } /// Characters that need to be backslashed inside double quotes diff --git a/src/sourcemap/vlq_bench.zig b/src/sourcemap/vlq_bench.zig deleted file mode 100644 index 1d6c40f2fb..0000000000 --- a/src/sourcemap/vlq_bench.zig +++ /dev/null @@ -1,323 +0,0 @@ -const std = @import("std"); - -const SourceMap = struct { - const base64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; - - const vlq_lookup_table: [256]VLQ = brk: { - var entries: [256]VLQ = undefined; - var i: usize = 0; - var j: i32 = 0; - while (i < 256) : (i += 1) { - entries[i] = encodeVLQ(j); - j += 1; - } - break :brk entries; - }; - - const vlq_max_in_bytes = 8; - pub const VLQ = struct { - // We only need to worry about i32 - // That means the maximum VLQ-encoded value is 8 bytes - // because there are only 4 bits of number inside each VLQ value - // and it expects i32 - // therefore, it can never be more than 32 bits long - // I believe the actual number is 7 bytes long, however we can add an extra byte to be more cautious - bytes: [vlq_max_in_bytes]u8, - len: u4 = 0, - }; - - pub fn encodeVLQWithLookupTable( - value: i32, - ) VLQ { - return if (value >= 0 and value <= 255) - vlq_lookup_table[@as(usize, @intCast(value))] - else - encodeVLQ(value); - } - - // A single base 64 digit can contain 6 bits of data. For the base 64 variable - // length quantities we use in the source map spec, the first bit is the sign, - // the next four bits are the actual value, and the 6th bit is the continuation - // bit. The continuation bit tells us whether there are more digits in this - // value following this digit. - // - // Continuation - // | Sign - // | | - // V V - // 101011 - // - pub fn encodeVLQ( - value: i32, - ) VLQ { - var len: u4 = 0; - var bytes: [vlq_max_in_bytes]u8 = undefined; - - var vlq: u32 = if (value >= 0) - @as(u32, @bitCast(value << 1)) - else - @as(u32, @bitCast((-value << 1) | 1)); - - // source mappings are limited to i32 - comptime var i: usize = 0; - inline while (i < vlq_max_in_bytes) : (i += 1) { - var digit = vlq & 31; - vlq >>= 5; - - // If there are still more digits in this value, we must make sure the - // continuation bit is marked - if (vlq != 0) { - digit |= 32; - } - - bytes[len] = base64[digit]; - len += 1; - - if (vlq == 0) { - return VLQ{ - .bytes = bytes, - .len = len, - }; - } - } - - return .{ .bytes = bytes, .len = 0 }; - } - - pub const VLQResult = struct { - value: i32 = 0, - start: usize = 0, - }; - - // base64 stores values up to 7 bits - const base64_lut: [std.math.maxInt(u7)]u7 = brk: { - @setEvalBranchQuota(9999); - var bytes = [_]u7{std.math.maxInt(u7)} ** std.math.maxInt(u7); - - for (base64, 0..) |c, i| { - bytes[c] = i; - } - - break :brk bytes; - }; - - pub fn decodeVLQ(encoded: []const u8, start: usize) VLQResult { - var shift: u8 = 0; - var vlq: u32 = 0; - - // hint to the compiler what the maximum value is - const encoded_ = encoded[start..][0..@min(encoded.len - start, comptime (vlq_max_in_bytes + 1))]; - - // inlining helps for the 1 or 2 byte case, hurts a little for larger - comptime var i: usize = 0; - inline while (i < vlq_max_in_bytes + 1) : (i += 1) { - const index = @as(u32, base64_lut[@as(u7, @truncate(encoded_[i]))]); - - // decode a byte - vlq |= (index & 31) << @as(u5, @truncate(shift)); - shift += 5; - - // Stop if there's no continuation bit - if ((index & 32) == 0) { - return VLQResult{ - .start = i + start, - .value = if ((vlq & 1) == 0) - @as(i32, @intCast(vlq >> 1)) - else - -@as(i32, @intCast((vlq >> 1))), - }; - } - } - - return VLQResult{ .start = start + encoded_.len, .value = 0 }; - } -}; - -pub fn main() anyerror!void { - const args = try std.process.argsAlloc(std.heap.c_allocator); - const how_many = try std.fmt.parseInt(u64, args[args.len - 1], 10); - - var numbers = try std.heap.c_allocator.alloc(i32, how_many); - var results = try std.heap.c_allocator.alloc(SourceMap.VLQ, how_many); - var leb_buf = try std.heap.c_allocator.alloc(u8, how_many * 8); - const byte_size = std.mem.sliceAsBytes(numbers).len; - - var rand = std.rand.DefaultPrng.init(0); - - std.debug.print("Random values:\n\n", .{}); - - for (numbers, 0..) |_, i| { - numbers[i] = rand.random().int(i32); - } - - { - var timer = try std.time.Timer.start(); - - for (numbers, 0..) |n, i| { - results[i] = SourceMap.encodeVLQ(n); - } - const elapsed = timer.read(); - std.debug.print("[{d}] encode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - - for (numbers, 0..) |n, i| { - results[i] = SourceMap.encodeVLQWithLookupTable(n); - } - const elapsed = timer.read(); - std.debug.print("[{d}] encodeWithLookupTable: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - - for (results, 0..) |n, i| { - numbers[i] = SourceMap.decodeVLQ(n.bytes[0..n.len], 0).value; - } - - const elapsed = timer.read(); - std.debug.print("[{d}] decode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - var stream = std.io.fixedBufferStream(leb_buf); - var writer = stream.writer(); - for (numbers) |n| { - std.leb.writeILEB128(writer, n) catch unreachable; - } - const elapsed = timer.read(); - std.debug.print("[{d}] ILEB128 encode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - var stream = std.io.fixedBufferStream(leb_buf); - var reader = stream.reader(); - for (numbers, 0..) |_, i| { - numbers[i] = std.leb.readILEB128(i32, reader) catch unreachable; - } - const elapsed = timer.read(); - std.debug.print("[{d}] ILEB128 decode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - std.debug.print("\nNumbers between 0 - 8192:\n\n", .{}); - - for (numbers, 0..) |_, i| { - numbers[i] = rand.random().intRangeAtMost(i32, 0, 8192); - } - - { - var timer = try std.time.Timer.start(); - - for (numbers, 0..) |n, i| { - results[i] = SourceMap.encodeVLQ(n); - } - const elapsed = timer.read(); - std.debug.print("[{d}] encode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - - for (numbers, 0..) |n, i| { - results[i] = SourceMap.encodeVLQWithLookupTable(n); - } - const elapsed = timer.read(); - std.debug.print("[{d}] encodeWithLookupTable: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - - for (results, 0..) |n, i| { - numbers[i] = SourceMap.decodeVLQ(n.bytes[0..n.len], 0).value; - } - - const elapsed = timer.read(); - std.debug.print("[{d}] decode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - var stream = std.io.fixedBufferStream(leb_buf); - var writer = stream.writer(); - for (numbers) |n| { - std.leb.writeILEB128(writer, n) catch unreachable; - } - const elapsed = timer.read(); - std.debug.print("[{d}] ILEB128 encode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - var stream = std.io.fixedBufferStream(leb_buf); - var reader = stream.reader(); - for (numbers, 0..) |_, i| { - numbers[i] = std.leb.readILEB128(i32, reader) catch unreachable; - } - const elapsed = timer.read(); - std.debug.print("[{d}] ILEB128 decode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - std.debug.print("\nNumbers between 0 - 255:\n\n", .{}); - - for (numbers, 0..) |_, i| { - numbers[i] = rand.random().intRangeAtMost(i32, 0, 255); - } - - { - var timer = try std.time.Timer.start(); - - for (numbers, 0..) |n, i| { - results[i] = SourceMap.encodeVLQ(n); - } - const elapsed = timer.read(); - std.debug.print("[{d}] encode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - - for (numbers, 0..) |n, i| { - results[i] = SourceMap.encodeVLQWithLookupTable(n); - } - const elapsed = timer.read(); - std.debug.print("[{d}] encodeWithLookupTable: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - - for (results, 0..) |n, i| { - numbers[i] = SourceMap.decodeVLQ(n.bytes[0..n.len], 0).value; - } - - const elapsed = timer.read(); - std.debug.print("[{d}] decode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - var stream = std.io.fixedBufferStream(leb_buf); - var writer = stream.writer(); - for (numbers) |n| { - std.leb.writeILEB128(writer, n) catch unreachable; - } - const elapsed = timer.read(); - std.debug.print("[{d}] ILEB128 encode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } - - { - var timer = try std.time.Timer.start(); - var stream = std.io.fixedBufferStream(leb_buf); - var reader = stream.reader(); - for (numbers, 0..) |_, i| { - numbers[i] = std.leb.readILEB128(i32, reader) catch unreachable; - } - const elapsed = timer.read(); - std.debug.print("[{d}] ILEB128 decode: {} in {}\n", .{ how_many, std.fmt.fmtIntSizeDec(byte_size), std.fmt.fmtDuration(elapsed) }); - } -} diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 7d7b4111a8..25f76f7c51 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -200,11 +200,6 @@ pub inline fn isNPMPackageName(target: string) bool { return !scoped or slash_index > 0 and slash_index + 1 < target.len; } -pub inline fn indexAny(in: anytype, target: string) ?usize { - for (in, 0..) |str, i| if (indexOf(str, target) != null) return i; - return null; -} - pub inline fn indexAnyComptime(target: string, comptime chars: string) ?usize { for (target, 0..) |parent, i| { inline for (chars) |char| { diff --git a/src/util.zig b/src/util.zig index 30a42a3b3f..fbf2e5838d 100644 --- a/src/util.zig +++ b/src/util.zig @@ -256,22 +256,22 @@ pub fn Batcher(comptime Type: type) type { return @This(){ .head = all }; } - pub inline fn done(this: *@This()) void { - bun.assert(this.head.len == 0); + pub fn done(this: *@This()) void { + bun.assert(this.head.len == 0); // count to init() was too large, overallocation } - pub inline fn eat(this: *@This(), value: Type) *Type { + pub fn eat(this: *@This(), value: Type) *Type { return @as(*Type, @ptrCast(&this.head.eat1(value).ptr)); } - pub inline fn eat1(this: *@This(), value: Type) []Type { + pub fn eat1(this: *@This(), value: Type) []Type { var prev = this.head[0..1]; prev[0] = value; this.head = this.head[1..]; return prev; } - pub inline fn next(this: *@This(), values: anytype) []Type { + pub fn next(this: *@This(), values: anytype) []Type { this.head[0..values.len].* = values; const prev = this.head[0..values.len]; this.head = this.head[values.len..]; diff --git a/src/windows.zig b/src/windows.zig index de41aca60e..ad0fc86c29 100644 --- a/src/windows.zig +++ b/src/windows.zig @@ -3047,6 +3047,7 @@ pub fn translateNTStatusToErrno(err: win32.NTSTATUS) bun.C.E { .DIRECTORY_NOT_EMPTY => .NOTEMPTY, .FILE_TOO_LARGE => .@"2BIG", .NOT_SAME_DEVICE => .XDEV, + .DELETE_PENDING => .BUSY, .SHARING_VIOLATION => if (comptime Environment.isDebug) brk: { bun.Output.debugWarn("Received SHARING_VIOLATION, indicates file handle should've been opened with FILE_SHARE_DELETE", .{}); break :brk .BUSY; @@ -3058,9 +3059,9 @@ pub fn translateNTStatusToErrno(err: win32.NTSTATUS) bun.C.E { } else .INVAL, else => |t| { - // if (bun.Environment.isDebug) { - bun.Output.warn("Called translateNTStatusToErrno with {s} which does not have a mapping to errno.", .{@tagName(t)}); - // } + if (bun.Environment.isDebug) { + bun.Output.warn("Called translateNTStatusToErrno with {s} which does not have a mapping to errno.", .{@tagName(t)}); + } return .UNKNOWN; }, }; diff --git a/src/wyhash.zig b/src/wyhash.zig index 1d3a1a0ad8..b4b72410aa 100644 --- a/src/wyhash.zig +++ b/src/wyhash.zig @@ -1,7 +1,7 @@ // // this file is a copy of Wyhash from the zig standard library, version v0.11.0-dev.2609+5e19250a1 // -const assert = if (@hasDecl(@import("root"), "bun")) @import("root").bun.assert else @import("std").debug.assert; +const assert = if (@hasDecl(@import("root"), "bun")) (@import("root").bun).assert else @import("std").debug.assert; const std = @import("std"); const mem = std.mem; diff --git a/test/bundler/__snapshots__/bun-build-api.test.ts.snap b/test/bundler/__snapshots__/bun-build-api.test.ts.snap index 30746b67d6..790d9d2757 100644 --- a/test/bundler/__snapshots__/bun-build-api.test.ts.snap +++ b/test/bundler/__snapshots__/bun-build-api.test.ts.snap @@ -1,117 +1,105 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP - -exports[`Bun.build BuildArtifact properties: hash 1`] = `"e4885a8bc2de343a"`; - -exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"cb8abf3391c2971f"`; - -exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"e4885a8bc2de343a"`; - -exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"0000000000000000"`; - -exports[`Bun.build Bun.write(BuildArtifact) 1`] = ` -"var __defProp = Object.defineProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { - get: all[name], - enumerable: true, - configurable: true, - set: (newValue) => all[name] = () => newValue - }); -}; -var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); - -// test/bundler/fixtures/trivial/fn.js -var exports_fn = {}; -__export(exports_fn, { - fn: () => { - { - return fn; - } - } -}); -function fn(a) { - return a + 42; -} -var init_fn = __esm(() => { -}); - -// test/bundler/fixtures/trivial/index.js -var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); -NS.then(({ fn: fn2 }) => { - console.log(fn2(42)); -}); -" -`; - -exports[`Bun.build outdir + reading out blobs works 1`] = ` -"var __defProp = Object.defineProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { - get: all[name], - enumerable: true, - configurable: true, - set: (newValue) => all[name] = () => newValue - }); -}; -var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); - -// test/bundler/fixtures/trivial/fn.js -var exports_fn = {}; -__export(exports_fn, { - fn: () => { - { - return fn; - } - } -}); -function fn(a) { - return a + 42; -} -var init_fn = __esm(() => { -}); - -// test/bundler/fixtures/trivial/index.js -var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); -NS.then(({ fn: fn2 }) => { - console.log(fn2(42)); -}); -" -`; - -exports[`Bun.build new Response(BuildArtifact) sets content type: response text 1`] = ` -"var __defProp = Object.defineProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { - get: all[name], - enumerable: true, - configurable: true, - set: (newValue) => all[name] = () => newValue - }); -}; -var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); - -// test/bundler/fixtures/trivial/fn.js -var exports_fn = {}; -__export(exports_fn, { - fn: () => { - { - return fn; - } - } -}); -function fn(a) { - return a + 42; -} -var init_fn = __esm(() => { -}); - -// test/bundler/fixtures/trivial/index.js -var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); -NS.then(({ fn: fn2 }) => { - console.log(fn2(42)); -}); -" -`; +// Bun Snapshot v1, https://goo.gl/fbAQLP + +exports[`Bun.build Bun.write(BuildArtifact) 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; +var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} +var init_fn = __esm(() => { +}); + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build outdir + reading out blobs works 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; +var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} +var init_fn = __esm(() => { +}); + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build BuildArtifact properties: hash 1`] = `"3f0ccbb87bbfe04c"`; + +exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"aba9c75f86b1a251"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"3f0ccbb87bbfe04c"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"0000000000000000"`; + +exports[`Bun.build new Response(BuildArtifact) sets content type: response text 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; +var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res); + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} +var init_fn = __esm(() => { +}); + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => (init_fn(), exports_fn)); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; diff --git a/test/bundler/bundler_edgecase.test.ts b/test/bundler/bundler_edgecase.test.ts index 95d8fccdcc..d30276f8ec 100644 --- a/test/bundler/bundler_edgecase.test.ts +++ b/test/bundler/bundler_edgecase.test.ts @@ -1169,6 +1169,138 @@ describe("bundler", () => { expect(count, "should only emit two constructors: " + content).toBe(2); }, }); + itBundled("edgecase/EnumInliningRopeStringPoison", { + files: { + "/entry.ts": ` + const enum A1 { + B = "1" + "2", + C = "3" + B, + }; + console.log(A1.B, A1.C); + + const enum A2 { + B = "1" + "2", + C = ("3" + B) + "4", + }; + console.log(A2.B, A2.C); + `, + }, + run: { + stdout: "12 312\n12 3124", + }, + }); + itBundled("edgecase/ProtoNullProtoInlining", { + files: { + "/entry.ts": ` + console.log({ __proto__: null }.__proto__ !== void 0) + `, + }, + run: { + stdout: "false", + }, + }); + itBundled("edgecase/ConstantFoldingShiftOperations", { + files: { + "/entry.ts": ` + capture(421 >> -542) + capture(421 >>> -542) + capture(1 << 32) + capture(1 >> 32) + capture(1 >>> 32) + capture(47849312 << 34) + capture(-9 >> 1) + capture(-5 >> 1) + `, + }, + minifySyntax: true, + capture: ["105", "105", "1", "1", "1", "191397248", "-5", "-3"], + }); + itBundled("edgecase/ConstantFoldingBitwiseCoersion", { + files: { + "/entry.ts": ` + capture(0 | 0) + capture(12582912 | 0) + capture(0xc00000 | 0) + capture(Infinity | 0) + capture(-Infinity | 0) + capture(NaN | 0) + // u32 limits + capture(-4294967295 | 0) + capture(-4294967296 | 0) + capture(-4294967297 | 0) + capture(4294967295 | 0) + capture(4294967296 | 0) + capture(4294967297 | 0) + // i32 limits + capture(-2147483647 | 0) + capture(-2147483648 | 0) + capture(-2147483649 | 0) + capture(2147483647 | 0) + capture(2147483648 | 0) + capture(2147483649 | 0) + capture(0.5 | 0) + `, + }, + minifySyntax: true, + capture: [ + "0", + "12582912", + "12582912", + "0", + "0", + "0", + "1", + "0", + "-1", + "-1", + "0", + "1", + "-2147483647", + "-2147483648", + "2147483647", + "2147483647", + "-2147483648", + "-2147483647", + "0", + ], + }); + itBundled("edgecase/EnumInliningNanBoxedEncoding", { + files: { + "/main.ts": ` + import { Enum } from './other.ts'; + capture(Enum.a); + capture(Enum.b); + capture(Enum.c); + capture(Enum.d); + capture(Enum.e); + capture(Enum.f); + capture(Enum.g); + `, + "/other.ts": ` + export const enum Enum { + a = 0, + b = NaN, + c = (0 / 0) + 1, + d = Infinity, + e = -Infinity, + f = 3e450, + // https://float.exposed/0xffefffffffffffff + g = -1.79769313486231570815e+308, + } + `, + }, + minifySyntax: true, + capture: [ + "0 /* a */", + "NaN /* b */", + "NaN /* c */", + "1 / 0 /* d */", + "-1 / 0 /* e */", + "1 / 0 /* f */", + // should probably fix this + "-179769313486231570000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 /* g */", + ], + }); // TODO(@paperdave): test every case of this. I had already tested it manually, but it may break later const requireTranspilationListESM = [ diff --git a/test/bundler/bundler_minify.test.ts b/test/bundler/bundler_minify.test.ts index 7ac0fef092..da39021791 100644 --- a/test/bundler/bundler_minify.test.ts +++ b/test/bundler/bundler_minify.test.ts @@ -55,6 +55,17 @@ describe("bundler", () => { minifySyntax: true, target: "bun", }); + itBundled("minify/StringAdditionFolding", { + files: { + "/entry.js": /* js */ ` + capture("Objects are not valid as a React child (found: " + (childString === "[object Object]" ? "object with keys {" + Object.keys(node).join(", ") + "}" : childString) + "). " + "If you meant to render a collection of children, use an array " + "instead.") + `, + }, + capture: [ + '"Objects are not valid as a React child (found: " + (childString === "[object Object]" ? "object with keys {" + Object.keys(node).join(", ") + "}" : childString) + "). If you meant to render a collection of children, use an array instead."', + ], + minifySyntax: true, + }); itBundled("minify/FunctionExpressionRemoveName", { todo: true, files: { @@ -156,8 +167,8 @@ describe("bundler", () => { "NaN", "1 / 0", "-1 / 0", - "~(1 / 0)", - "~(-1 / 0)", + "-1", + "-1", ], minifySyntax: true, }); @@ -180,22 +191,7 @@ describe("bundler", () => { capture(~-Infinity); `, }, - capture: [ - "1/0", - "-1/0", - "1/0", - "-1/0", - "1/0", - "-1/0", - "NaN", - "NaN", - "NaN", - "NaN", - "1/0", - "1/0", - "~(1/0)", - "~(-1/0)", - ], + capture: ["1/0", "-1/0", "1/0", "-1/0", "1/0", "-1/0", "NaN", "NaN", "NaN", "NaN", "1/0", "1/0", "-1", "-1"], minifySyntax: true, minifyWhitespace: true, }); diff --git a/test/bundler/esbuild/ts.test.ts b/test/bundler/esbuild/ts.test.ts index d98076e7fb..34732c65c4 100644 --- a/test/bundler/esbuild/ts.test.ts +++ b/test/bundler/esbuild/ts.test.ts @@ -226,9 +226,6 @@ describe("bundler", () => { }, }); itBundled("ts/ConstEnumComments", { - // When it comes time to implement this inlining, we may decide we do NOT - // want to insert helper comments. - todo: true, files: { "/bar.ts": /* ts */ ` export const enum Foo { @@ -383,12 +380,11 @@ describe("bundler", () => { }, }); itBundled("ts/MinifyEnum", { - todo: true, files: { "/a.ts": `enum Foo { A, B, C = Foo }\ncapture(Foo)`, - // "/b.ts": `export enum Foo { X, Y, Z = Foo }`, + "/b.ts": `export enum Foo { X, Y, Z = Foo }`, }, - entryPoints: ["/a.ts"], + entryPoints: ["/a.ts", "./b.ts"], minifySyntax: true, minifyWhitespace: true, minifyIdentifiers: true, @@ -396,20 +392,20 @@ describe("bundler", () => { onAfterBundle(api) { const a = api.readFile("/out.js"); api.writeFile("/out.edited.js", a.replace(/capture\((.*?)\)/, `export const Foo = $1`)); - // const b = api.readFile("/out/b.js"); + const b = api.readFile("/out/b.js"); // make sure the minification trick "enum[enum.K=V]=K" is used, but `enum` assert(a.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.A=0]=["']A["']/), "should be using enum minification trick (1)"); assert(a.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.B=1]=["']B["']/), "should be using enum minification trick (2)"); assert(a.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.C=[a-zA-Z$]]=["']C["']/), "should be using enum minification trick (3)"); - // assert(b.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.X=0]=["']X["']/), "should be using enum minification trick (4)"); - // assert(b.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.Y=1]=["']Y["']/), "should be using enum minification trick (5)"); - // assert(b.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.Z=[a-zA-Z$]]=["']Z["']/), "should be using enum minification trick (6)"); + assert(b.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.X=0]=["']X["']/), "should be using enum minification trick (4)"); + assert(b.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.Y=1]=["']Y["']/), "should be using enum minification trick (5)"); + assert(b.match(/\b[a-zA-Z$]\[[a-zA-Z$]\.Z=[a-zA-Z$]]=["']Z["']/), "should be using enum minification trick (6)"); }, runtimeFiles: { "/test.js": /* js */ ` import {Foo as FooA} from './out/a.edited.js' - // import {Foo as FooB} from './out/b.js' + import {Foo as FooB} from './out/b.js' import assert from 'assert'; assert.strictEqual(FooA.A, 0, 'a.ts Foo.A') assert.strictEqual(FooA.B, 1, 'a.ts Foo.B') @@ -417,17 +413,16 @@ describe("bundler", () => { assert.strictEqual(FooA[0], 'A', 'a.ts Foo[0]') assert.strictEqual(FooA[1], 'B', 'a.ts Foo[1]') assert.strictEqual(FooA[FooA], 'C', 'a.ts Foo[Foo]') - // assert.strictEqual(FooB.X, 0, 'b.ts Foo.X') - // assert.strictEqual(FooB.Y, 1, 'b.ts Foo.Y') - // assert.strictEqual(FooB.Z, FooB, 'b.ts Foo.Z') - // assert.strictEqual(FooB[0], 'X', 'b.ts Foo[0]') - // assert.strictEqual(FooB[1], 'Y', 'b.ts Foo[1]') - // assert.strictEqual(FooB[FooB], 'Z', 'b.ts Foo[Foo]') + assert.strictEqual(FooB.X, 0, 'b.ts Foo.X') + assert.strictEqual(FooB.Y, 1, 'b.ts Foo.Y') + assert.strictEqual(FooB.Z, FooB, 'b.ts Foo.Z') + assert.strictEqual(FooB[0], 'X', 'b.ts Foo[0]') + assert.strictEqual(FooB[1], 'Y', 'b.ts Foo[1]') + assert.strictEqual(FooB[FooB], 'Z', 'b.ts Foo[Foo]') `, }, }); itBundled("ts/MinifyEnumExported", { - todo: true, files: { "/b.ts": `export enum Foo { X, Y, Z = Foo }`, }, @@ -831,7 +826,6 @@ describe("bundler", () => { stdout: '[123,{"test":true}]', }, }); - // TODO: all situations with decorators are currently not runtime-checked. as of writing bun crashes when hitting them at all. itBundled("ts/TypeScriptDecoratorsSimpleCase", { files: { "/entry.ts": /* ts */ ` @@ -1782,7 +1776,6 @@ describe("bundler", () => { }, }); itBundled("ts/SiblingNamespaceLet", { - todo: true, files: { "/let.ts": /* ts */ ` export namespace x { export let y = 123 } @@ -1800,7 +1793,6 @@ describe("bundler", () => { }, }); itBundled("ts/SiblingNamespaceFunction", { - todo: true, files: { "/function.ts": /* ts */ ` export namespace x { export function y() {} } @@ -1818,14 +1810,13 @@ describe("bundler", () => { }, }); itBundled("ts/SiblingNamespaceClass", { - todo: true, files: { "/let.ts": /* ts */ ` export namespace x { export class y {} } export namespace x { export let z = y } `, }, - entryPoints: ["/function.ts"], + entryPoints: ["/let.ts"], bundling: false, runtimeFiles: { "/test.js": /* js */ ` @@ -1836,7 +1827,6 @@ describe("bundler", () => { }, }); itBundled("ts/SiblingNamespaceNamespace", { - todo: true, files: { "/namespace.ts": /* ts */ ` export namespace x { export namespace y { 0 } } @@ -1854,7 +1844,6 @@ describe("bundler", () => { }, }); itBundled("ts/SiblingNamespaceEnum", { - todo: true, files: { "/enum.ts": /* ts */ ` export namespace x { export enum y {} } @@ -1870,10 +1859,9 @@ describe("bundler", () => { assert(m.x === m.z, "it worked.ts worked") `, }, + minifySyntax: false, // intentionally disabled. enum inlining always happens }); itBundled("ts/SiblingEnum", { - todo: true, - // GENERATED files: { "/number.ts": /* ts */ ` (0, eval)('globalThis.y = 1234'); @@ -1965,9 +1953,9 @@ describe("bundler", () => { { file: "/out/nested-string.js", stdout: "1234 2345\na a" }, { file: "/out/nested-propagation.js", stdout: "100 100 100 625 625 625" }, ], + minifySyntax: false, // intentionally disabled. enum inlining always happens }); itBundled("ts/EnumTreeShaking", { - todo: true, files: { "/simple-member.ts": /* ts */ ` enum x_DROP { y_DROP = 123 } @@ -2033,8 +2021,11 @@ describe("bundler", () => { { file: "/out/namespace-before.js", stdout: "{} 1234" }, { file: "/out/namespace-after.js", stdout: '{"123":"y","y":123} 1234' }, ], + minifySyntax: false, // intentionally disabled. enum inlining always happens }); itBundled("ts/EnumJSX", { + // Blocking: + // - jsx bugs (configuration does not seem to be respected) todo: true, files: { "/element.tsx": /* tsx */ ` @@ -2074,6 +2065,7 @@ describe("bundler", () => { export const create = (tag, props, ...children) => [tag, props, children] `, }, + minifySyntax: false, // intentionally disabled. enum inlining always happens run: [ { file: "/out/element.js", stdout: '["div",null,[]]' }, { file: "/out/fragment.js", stdout: '["div",null,["test"]]' }, @@ -2085,17 +2077,17 @@ describe("bundler", () => { todo: true, files: { "/entry.ts": ` - enum a { b = 123, c = d } - console.log(a.b, a.c) + enum a { b = 123, c = d } + console.log(a.b, a.c) `, }, define: { d: "b", }, + minifySyntax: false, // intentionally disabled. enum inlining always happens run: { stdout: "123 123" }, }); itBundled("ts/EnumSameModuleInliningAccess", { - todo: true, files: { "/entry.ts": /* ts */ ` enum a_drop { x = 123 } @@ -2113,10 +2105,10 @@ describe("bundler", () => { `, }, dce: true, + minifySyntax: false, // intentionally disabled. enum inlining always happens run: { stdout: '[123,123,123,123,{"123":"x","x":123}]' }, }); itBundled("ts/EnumCrossModuleInliningAccess", { - todo: true, files: { "/entry.ts": /* ts */ ` import { drop_a, drop_b, c, d, e } from './enums' @@ -2136,14 +2128,14 @@ describe("bundler", () => { export enum e { x = 123 } `, }, + minifySyntax: false, // intentionally disabled. enum inlining always happens dce: true, }); itBundled("ts/EnumCrossModuleInliningDefinitions", { - todo: true, files: { "/entry.ts": /* ts */ ` import { a } from './enums' - (0, eval)('globalThis.capture = x => x'); + (0, eval)('globalThis.["captu" + "re"] = x => x'); console.log(JSON.stringify([ capture(a.implicit_number), capture(a.explicit_number), @@ -2162,12 +2154,12 @@ describe("bundler", () => { } `, }, + minifySyntax: false, // intentionally disabled. enum inlining always happens onAfterBundle(api) { expect(api.captureFile("/out.js").map(x => x.replace(/\/\*.*\*\//g, "").trim())).toEqual(["0", "123", '"xyz"']); }, }); itBundled("ts/EnumCrossModuleInliningReExport", { - todo: true, files: { "/entry.js": /* js */ ` import { a } from './re-export' @@ -2187,12 +2179,12 @@ describe("bundler", () => { export enum c { x = 'c' } `, }, + minifySyntax: false, // intentionally disabled. enum inlining always happens onAfterBundle(api) { expect(api.captureFile("/out.js").map(x => x.replace(/\/\*.*\*\//g, "").trim())).toEqual(['"a"', '"b"', '"c"']); }, }); itBundled("ts/EnumCrossModuleTreeShaking", { - todo: true, files: { "/entry.ts": /* ts */ ` import { @@ -2229,6 +2221,7 @@ describe("bundler", () => { export let e = {} // non-enum properties should be kept `, }, + minifySyntax: false, // intentionally disabled. enum inlining always happens onAfterBundle(api) { expect(api.captureFile("/out.js").map(x => x.replace(/\/\*.*\*\//g, "").trim())).toEqual([ "1", @@ -2243,7 +2236,6 @@ describe("bundler", () => { }, }); itBundled("ts/EnumExportClause", { - todo: true, files: { "/entry.ts": /* ts */ ` import { @@ -2268,23 +2260,12 @@ describe("bundler", () => { export { B, D as d } `, }, + minifySyntax: false, // intentionally disabled. enum inlining always happens onAfterBundle(api) { expect(api.captureFile("/out.js").map(x => x.replace(/\/\*.*\*\//g, "").trim())).toEqual(["1", "2", "3", "4"]); }, }); - // itBundled("ts/CommonJSVariableInESMTypeModule", { - // // GENERATED - // files: { - // "/entry.ts": `module.exports = null`, - // "/package.json": `{ "type": "module" }`, - // }, - // /* TODO FIX expectedScanLog: `entry.ts: WARNING: The CommonJS "module" variable is treated as a global variable in an ECMAScript module and may not work as expected - // package.json: NOTE: This file is considered to be an ECMAScript module because the enclosing "package.json" file sets the type of this file to "module": - // NOTE: Node's package format requires that CommonJS files in a "type": "module" package use the ".cjs" file extension. If you are using TypeScript, you can use the ".cts" file extension with esbuild instead. - // `, */ - // }); itBundled("ts/EnumRulesFrom_TypeScript_5_0", { - // GENERATED files: { "/supported.ts": ` @@ -2424,43 +2405,42 @@ describe("bundler", () => { ])) `, "/not-supported.ts": /* ts */ ` - (0, eval)('globalThis.capture = x => x'); + (0, eval)('globalThis["captu" + "re"] = x => x'); - const enum NonIntegerNumberToString { - SUPPORTED = '' + 1, - UNSUPPORTED = '' + 1.5, + const enum NumberToString { + DROP_One = '' + 1, + DROP_OnePointFive = '' + 1.5, + DROP_Other = '' + 4132879497321892437432187943789312894378237491578123414321431, + DROP_Billion = '' + 1_000_000_000, + DROP_Trillion = '' + 1_000_000_000_000, } console.log( - capture(NonIntegerNumberToString.SUPPORTED), - capture(NonIntegerNumberToString.UNSUPPORTED), + capture(NumberToString.DROP_One), + capture(NumberToString.DROP_OnePointFive), + capture(NumberToString.DROP_Other), + capture(NumberToString.DROP_Billion), + capture(NumberToString.DROP_Trillion), ) - const enum OutOfBoundsNumberToString { - SUPPORTED = '' + 1_000_000_000, - UNSUPPORTED = '' + 1_000_000_000_000, - } - console.log( - capture(OutOfBoundsNumberToString.SUPPORTED), - capture(OutOfBoundsNumberToString.UNSUPPORTED), - ) - - const enum TemplateExpressions { + const enum DROP_TemplateExpressions { // TypeScript enums don't handle any of these NULL = '' + null, TRUE = '' + true, FALSE = '' + false, BIGINT = '' + 123n, + BIGINT_2 = '' + 4132879497321892437432187943789312894378237491578123414321431n, } console.log( - capture(TemplateExpressions.NULL), - capture(TemplateExpressions.TRUE), - capture(TemplateExpressions.FALSE), - capture(TemplateExpressions.BIGINT), + capture(DROP_TemplateExpressions.NULL), + capture(DROP_TemplateExpressions.TRUE), + capture(DROP_TemplateExpressions.FALSE), + capture(DROP_TemplateExpressions.BIGINT), + capture(DROP_TemplateExpressions.BIGINT_2), ) `, }, - // dce: true, + dce: true, entryPoints: ["/supported.ts", "/not-supported.ts"], run: [ { @@ -2471,27 +2451,27 @@ describe("bundler", () => { { file: "/out/not-supported.js", stdout: ` - 1 1.5 - 1000000000 1000000000000 - null true false 123 + 1 1.5 4.1328794973218926e+60 1000000000 1000000000000 + null true false 123 4132879497321892437432187943789312894378237491578123414321431 `, }, ], onAfterBundle(api) { - // expect(api.captureFile("/out/not-supported.js").map(x => x.replace(/\/\*.*\*\//g, "").trim())).toEqual([ - // '"1"', - // "NonIntegerNumberToString.UNSUPPORTED", - // '"1000000000"', - // "OutOfBoundsNumberToString.UNSUPPORTED", - // "TemplateExpressions.NULL", - // "TemplateExpressions.TRUE", - // "TemplateExpressions.FALSE", - // "TemplateExpressions.BIGINT", - // ]); + expect(api.captureFile("/out/not-supported.js").map(x => x.replace(/\/\*.*\*\//g, "").trim())).toEqual([ + '"1"', + '"1.5"', + '"4.1328794973218926e+60"', + '"1000000000"', + '"1000000000000"', + '"null"', + '"true"', + '"false"', + '"123"', + '"4132879497321892437432187943789312894378237491578123414321431"', + ]); }, }); itBundled("ts/EnumUseBeforeDeclare", { - todo: true, files: { "/entry.ts": /* ts */ ` before(); diff --git a/test/js/bun/jsc/bun-jsc.test.ts b/test/js/bun/jsc/bun-jsc.test.ts index 6982eafb89..fd7120e5e0 100644 --- a/test/js/bun/jsc/bun-jsc.test.ts +++ b/test/js/bun/jsc/bun-jsc.test.ts @@ -73,7 +73,10 @@ describe("bun:jsc", () => { expect(setRandomSeed(2)).toBeUndefined(); }); it("isRope", () => { - expect(isRope("a" + 123 + "b")).toBe(true); + // https://twitter.com/bunjavascript/status/1806921203644571685 + let y; + y = 123; + expect(isRope("a" + y + "b")).toBe(true); expect(isRope("abcdefgh")).toBe(false); }); it("callerSourceOrigin", () => { diff --git a/test/transpiler/transpiler.test.js b/test/transpiler/transpiler.test.js index f02f71a371..357925e6ae 100644 --- a/test/transpiler/transpiler.test.js +++ b/test/transpiler/transpiler.test.js @@ -18,14 +18,31 @@ describe("Bun.Transpiler", () => { }, platform: "browser", }); + const transpilerMinifySyntax = new Bun.Transpiler({ + loader: "tsx", + define: { + "process.env.NODE_ENV": JSON.stringify("development"), + user_undefined: "undefined", + user_nested: "location.origin", + "hello.earth": "hello.mars", + "Math.log": "console.error", + }, + macro: { + react: { + bacon: `${import.meta.dir}/macro-check.js`, + }, + }, + minify: { syntax: true }, + platform: "browser", + }); const ts = { - parsed: (code, trim = true, autoExport = false) => { + parsed: (code, trim = true, autoExport = false, minify = false) => { if (autoExport) { code = "export default (" + code + ")"; } - var out = transpiler.transformSync(code, "ts"); + var out = (minify ? transpilerMinifySyntax : transpiler).transformSync(code, "ts"); if (autoExport && out.startsWith("export default ")) { out = out.substring("export default ".length); } @@ -43,6 +60,10 @@ describe("Bun.Transpiler", () => { return out; }, + parsedMin: (code, trim = true, autoExport = false) => { + return ts.parsed(code, trim, autoExport, true); + }, + expectPrinted: (code, out) => { expect(ts.parsed(code, true, true)).toBe(out); }, @@ -51,6 +72,10 @@ describe("Bun.Transpiler", () => { expect(ts.parsed(code, !out.endsWith(";\n"), false)).toBe(out); }, + expectPrintedMin_: (code, out) => { + expect(ts.parsedMin(code, !out.endsWith(";\n"), false)).toBe(out); + }, + expectParseError: (code, message) => { try { ts.parsed(code, false, false); @@ -89,17 +114,17 @@ describe("Bun.Transpiler", () => { describe("property access inlining", () => { it("bails out with spread", () => { - ts.expectPrinted_("const a = [...b][0];", "const a = [...b][0]"); - ts.expectPrinted_("const a = {...b}[0];", "const a = { ...b }[0]"); + ts.expectPrintedMin_("const a = [...b][0];", "const a = [...b][0]"); + ts.expectPrintedMin_("const a = {...b}[0];", "const a = { ...b }[0]"); }); it("bails out with multiple items", () => { - ts.expectPrinted_("const a = [b, c][0];", "const a = [b, c][0]"); + ts.expectPrintedMin_("const a = [b, c][0];", "const a = [b, c][0]"); }); it("works", () => { - ts.expectPrinted_('const a = ["hey"][0];', 'const a = "hey"'); + ts.expectPrintedMin_('const a = ["hey"][0];', 'const a = "hey"'); }); it("works nested", () => { - ts.expectPrinted_('const a = ["hey"][0][0];', 'const a = "h"'); + ts.expectPrintedMin_('const a = ["hey"][0][0];', 'const a = "h"'); }); }); @@ -905,12 +930,12 @@ export default class { export enum x { y } }`; const output1 = `var test; -(function(test) { +((test) => { let x; - (function(x) { + ((x) => { x[x["y"] = 0] = "y"; - })(x = test.x || (test.x = {})); -})(test || (test = {}))`; + })(x = test.x ||= {}); +})(test ||= {})`; it("namespace with exported enum", () => { ts.expectPrinted_(input1, output1); @@ -920,12 +945,12 @@ export default class { export enum x { y } }`; const output2 = `export var test; -(function(test) { +((test) => { let x; - (function(x) { + ((x) => { x[x["y"] = 0] = "y"; - })(x = test.x || (test.x = {})); -})(test || (test = {}))`; + })(x = test.x ||= {}); +})(test ||= {})`; it("exported namespace with exported enum", () => { ts.expectPrinted_(input2, output2); @@ -937,15 +962,15 @@ export default class { } }`; const output3 = `var first; -(function(first) { +((first) => { let second; - (function(second) { + ((second) => { let x; - (function(x) { + ((x) => { x[x["y"] = 0] = "y"; - })(x || (x = {})); - })(second = first.second || (first.second = {})); -})(first || (first = {}))`; + })(x ||= {}); + })(second = first.second ||= {}); +})(first ||= {})`; it("exported inner namespace", () => { ts.expectPrinted_(input3, output3); @@ -953,9 +978,9 @@ export default class { const input4 = `export enum x { y }`; const output4 = `export var x; -(function(x) { +((x) => { x[x["y"] = 0] = "y"; -})(x || (x = {}))`; +})(x ||= {})`; it("exported enum", () => { ts.expectPrinted_(input4, output4); @@ -1512,8 +1537,10 @@ export var ComponentThatHasSpreadCausesDeopt = }); it("CommonJS", () => { - var nodeTranspiler = new Bun.Transpiler({ platform: "node" }); - expect(nodeTranspiler.transformSync("module.require('hi' + 123)")).toBe('require("hi" + 123);\n'); + var nodeTranspiler = new Bun.Transpiler({ platform: "node", minify: { syntax: false } }); + + // note: even if minify syntax is off, constant folding must happen within require calls + expect(nodeTranspiler.transformSync("module.require('hi' + 123)")).toBe('require("hi123");\n'); expect(nodeTranspiler.transformSync("module.require(1 ? 'foo' : 'bar')")).toBe('require("foo");\n'); expect(nodeTranspiler.transformSync("require(1 ? 'foo' : 'bar')")).toBe('require("foo");\n'); @@ -1598,6 +1625,10 @@ export var ComponentThatHasSpreadCausesDeopt = expect(parsed(code, !out.endsWith(";\n"), false)).toBe(out); }; + const expectPrintedMin_ = (code, out) => { + expect(parsed(code, !out.endsWith(";\n"), false, transpilerMinifySyntax)).toBe(out); + }; + const expectPrintedNoTrim = (code, out) => { expect(parsed(code, false, false)).toBe(out); }; @@ -1748,14 +1779,14 @@ console.log(a) `.trim(), ); - expectPrinted_(`export const foo = "a" + "b";`, `export const foo = "ab"`); - expectPrinted_( + expectPrintedMin_(`export const foo = "a" + "b";`, `export const foo = "ab"`); + expectPrintedMin_( `export const foo = "F" + "0" + "F" + "0123456789" + "ABCDEF" + "0123456789ABCDEFF0123456789ABCDEF00" + "b";`, `export const foo = "F0F0123456789ABCDEF0123456789ABCDEFF0123456789ABCDEF00b"`, ); - expectPrinted_(`export const foo = "a" + 1 + "b";`, `export const foo = "a" + 1 + "b"`); - expectPrinted_(`export const foo = "a" + "b" + 1 + "b";`, `export const foo = "ab" + 1 + "b"`); - expectPrinted_(`export const foo = "a" + "b" + 1 + "b" + "c";`, `export const foo = "ab" + 1 + "bc"`); + expectPrintedMin_(`export const foo = "a" + 1 + "b";`, `export const foo = "a1b"`); + expectPrintedMin_(`export const foo = "a" + "b" + 1 + "b";`, `export const foo = "ab1b"`); + expectPrintedMin_(`export const foo = "a" + "b" + 1 + "b" + "c";`, `export const foo = "ab1bc"`); }); it("numeric constants", () => { @@ -2866,6 +2897,10 @@ console.log(foo, array); }); it("constant folding", () => { + const expectPrinted = (code, out) => { + expect(parsed(code, true, true, transpilerMinifySyntax)).toBe(out); + }; + // we have an optimization for numbers 0 - 100, -0 - -100 so we must test those specifically // https://github.com/oven-sh/bun/issues/2810 for (let i = 1; i < 120; i++) { @@ -2970,7 +3005,7 @@ console.log(foo, array); expectPrinted("x + 'a' + 'b'", 'x + "ab"'); expectPrinted("x + 'a' + 'bc'", 'x + "abc"'); expectPrinted("x + 'ab' + 'c'", 'x + "abc"'); - expectPrinted("'a' + 1", '"a" + 1'); + expectPrinted("'a' + 1", '"a1"'); expectPrinted("x * 'a' + 'b'", 'x * "a" + "b"'); // rope string push another rope string @@ -3273,10 +3308,10 @@ console.log(foo, array); }); it('`str` + "``"', () => { - expectPrinted_('const x = `str` + "``";', "const x = `str\\`\\``"); - expectPrinted_('const x = `` + "`";', "const x = `\\``"); - expectPrinted_('const x = `` + "``";', "const x = `\\`\\``"); - expectPrinted_('const x = "``" + ``;', "const x = `\\`\\``"); + expectPrintedMin_('const x = `str` + "``";', 'const x = "str``"'); + expectPrintedMin_('const x = `` + "`";', 'const x = "`"'); + expectPrintedMin_('const x = `` + "``";', 'const x = "``"'); + expectPrintedMin_('const x = "``" + ``;', 'const x = "``"'); }); });