From f9e78c419fd9f515a203bbd1060a6285aa0ffffb Mon Sep 17 00:00:00 2001 From: dave caruso Date: Mon, 15 Jan 2024 23:14:41 -0800 Subject: [PATCH] feat: Implement `using` keyword (explicit resource management) (#8151) * port 'initial support for using declarations' 1:1 port of this commit: https://github.com/evanw/esbuild/commit/56a3e01244735c0966baf13b4d389ca215400214 * port 'initial support for await using declarations' 1:1 port of this commit: https://github.com/evanw/esbuild/commit/1634a0b5ad53018ea0d271da8e84774cd0f072de * fix cmake config for local jsc * add global defines for symbols * begin porting lowering implementation based off of https://github.com/evanw/esbuild/pull/3192 * [autofix.ci] apply automated fixes * add some fun webkit scripts * fix the minification bug * refactor runtime_js, etc * rename test file * finished yapping * silly silyl * Update src/bundler.zig * ok * a * Fix crash --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Jarred Sumner Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> --- CMakeLists.txt | 1 + Dockerfile | 2 +- Makefile | 12 +- scripts/fork-webkit.sh | 56 ++ scripts/set-webkit-submodule-to-cmake.sh | 15 + src/bun.js/WebKit | 2 +- src/bun.js/bindings/GeneratedJS2Native.zig | 24 + .../webcore/JSReadableStreamDefaultReader.cpp | 2 + src/bun.js/module_loader.zig | 2 +- src/bundler.zig | 20 +- src/bundler/bundle_v2.zig | 89 +- src/defines-table.zig | 21 +- src/deps/tinycc | 2 +- src/js_ast.zig | 17 +- src/js_parser.zig | 920 ++++++++++++++---- src/js_printer.zig | 12 + src/runtime.bun.js | 36 + src/runtime.footer.bun.js | 19 - src/runtime.footer.js | 29 - src/runtime.footer.node.js | 85 -- src/runtime.footer.with-refresh.js | 29 - src/runtime.zig | 196 +--- src/runtime/index-without-hmr.ts | 2 +- .../web/explicit-resource-management.test.ts | 141 +++ .../__snapshots__/transpiler.test.js.snap | 183 ++++ test/transpiler/transpiler.test.js | 51 + 26 files changed, 1418 insertions(+), 550 deletions(-) create mode 100755 scripts/fork-webkit.sh create mode 100755 scripts/set-webkit-submodule-to-cmake.sh create mode 100644 src/bun.js/bindings/GeneratedJS2Native.zig create mode 100644 src/runtime.bun.js delete mode 100644 src/runtime.footer.bun.js delete mode 100644 src/runtime.footer.js delete mode 100644 src/runtime.footer.node.js delete mode 100644 src/runtime.footer.with-refresh.js create mode 100644 test/js/web/explicit-resource-management.test.ts create mode 100644 test/transpiler/__snapshots__/transpiler.test.js.snap diff --git a/CMakeLists.txt b/CMakeLists.txt index 732ceb4fcf..24b9b1cd76 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -469,6 +469,7 @@ else() # make jsc-compile-debug jsc-copy-headers include_directories( "${WEBKIT_DIR}/" + "${WEBKIT_DIR}/JavaScriptCore/Headers/JavaScriptCore" "${WEBKIT_DIR}/JavaScriptCore/PrivateHeaders" "${WEBKIT_DIR}/bmalloc/Headers" "${WEBKIT_DIR}/WTF/Headers" diff --git a/Dockerfile b/Dockerfile index a9f6ae4332..f0f9871de8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -380,7 +380,7 @@ FROM bun-base-with-zig as bun-codegen-for-zig COPY package.json bun.lockb Makefile .gitmodules .prettierrc.cjs ${BUN_DIR}/ COPY src/runtime ${BUN_DIR}/src/runtime -COPY src/runtime.js src/runtime.footer*.js src/react-refresh.js ${BUN_DIR}/src/ +COPY src/runtime.js src/runtime.bun.js ${BUN_DIR}/src/ COPY packages/bun-error ${BUN_DIR}/packages/bun-error COPY src/fallback.ts ${BUN_DIR}/src/fallback.ts COPY src/api ${BUN_DIR}/src/api diff --git a/Makefile b/Makefile index 1195d12a70..580f1c37ff 100644 --- a/Makefile +++ b/Makefile @@ -836,17 +836,7 @@ fallback_decoder: .PHONY: runtime_js runtime_js: - @NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js - @NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js - @NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js - @NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js - -.PHONY: runtime_js_dev -runtime_js_dev: - @NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js - @NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js - @NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js - @NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js + @NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime.bun.js --format=esm --platform=node --minify --external:/bun:* > src/runtime.out.js .PHONY: bun_error bun_error: diff --git a/scripts/fork-webkit.sh b/scripts/fork-webkit.sh new file mode 100755 index 0000000000..b9d43ee922 --- /dev/null +++ b/scripts/fork-webkit.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash +# this script is the magic script to configure your devenv for making a patch to WebKit +# once you are done with the patch you can run this again with --undo +# you can also run this with --danger-reset to force reset the submodule (danger) +set -euo pipefail + +cd "$(dirname "$0")/.." + +if [ "$#" == "0" ]; then + if ! [ -d build ]; then + bash ./scripts/setup.sh + fi + + bash ./scripts/update-submodules.sh --webkit + + platform=linux + if [ "$(uname)" == "Darwin" ]; then + platform=mac + fi + + make jsc-build-${platform}-compile-debug + cmake -Bbuild -DWEBKIT_DIR=$(pwd)/src/bun.js/WebKit/WebKitBuild/Debug + # ninja -Cbuild + + echo "" + echo "Ready" + echo "" + echo "TODO: add a better way to invoke the webkit build script" + echo "For now to recompile WebKit, run:" + echo "" + echo " $ make jsc-build-${platform}-compile-debug && ninja -Cbuild" + echo "" + echo "To reset this back to using prebuild, run:" + echo "" + echo " $ $0 --undo" + echo " $ $0 --danger-reset # this invokes 'git reset --hard'" + echo "" + + + exit; +fi + +if [ "$1" == '--undo' ]; then + cmake -Bbuild -UWEBKIT_DIR + echo Reset ./build to use the system WebKit + exit; +fi + +if [ "$1" == '--danger-reset' ]; then + cmake -Bbuild -UWEBKIT_DIR + bash ./scripts/set-webkit-submodule-to-cmake.sh + exit; +fi + +echo "Unknown argument: $1" +echo "Usage: $0 [--undo/--danger-reset]" \ No newline at end of file diff --git a/scripts/set-webkit-submodule-to-cmake.sh b/scripts/set-webkit-submodule-to-cmake.sh new file mode 100755 index 0000000000..9fc9a121f7 --- /dev/null +++ b/scripts/set-webkit-submodule-to-cmake.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +set -euo pipefail + +cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.." + +WEBKIT_TAG=$(grep 'set(WEBKIT_TAG' "CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')') +if [ -z "${WEBKIT_TAG}" ]; then + echo "Could not find WEBKIT_TAG in CMakeLists.txt" + exit 1 +fi + +echo "Setting WebKit submodule to ${WEBKIT_TAG}" +cd src/bun.js/WebKit +git fetch origin "${WEBKIT_TAG}" +git reset --hard "${WEBKIT_TAG}" diff --git a/src/bun.js/WebKit b/src/bun.js/WebKit index 347037014a..9e975e808a 160000 --- a/src/bun.js/WebKit +++ b/src/bun.js/WebKit @@ -1 +1 @@ -Subproject commit 347037014ae069eed1c4f4687001a256949b124e +Subproject commit 9e975e808ab32043ae6c7927cdb51de4501b9f73 diff --git a/src/bun.js/bindings/GeneratedJS2Native.zig b/src/bun.js/bindings/GeneratedJS2Native.zig new file mode 100644 index 0000000000..f9ac086061 --- /dev/null +++ b/src/bun.js/bindings/GeneratedJS2Native.zig @@ -0,0 +1,24 @@ +const JSC = @import("root").bun.JSC; +export fn JS2Zig__createBinding(global: *JSC.JSGlobalObject) JSC.JSValue { + return @import("../node/node_fs_binding.zig").createBinding(global); +} +export fn JS2Zig__createNodeHttp_Binding(global: *JSC.JSGlobalObject) JSC.JSValue { + return @import("../api/bun/h2_frame_parser.zig").createNodeHttp2Binding(global); +} +export fn JS2Zig__OS_create(global: *JSC.JSGlobalObject) JSC.JSValue { + return @import("../node/node_os.zig").OS.create(global); +} +export fn JS2Zig__String_jsGetStringWidth(global: *JSC.JSGlobalObject, call_frame: *JSC.CallFrame) JSC.JSValue { + return @import("../../string.zig").String.jsGetStringWidth(global, call_frame); +} +export fn JS2Zig__parseArgs(global: *JSC.JSGlobalObject, call_frame: *JSC.CallFrame) JSC.JSValue { + return @import("../node/util/parse_args.zig").parseArgs(global, call_frame); +} +export fn JS2Zig__QuickAndDirtyJavaScriptSyntaxHighlighter_jsFunctionSyntaxHighlight(global: *JSC.JSGlobalObject, call_frame: *JSC.CallFrame) JSC.JSValue { + return @import("../../fmt.zig").QuickAndDirtyJavaScriptSyntaxHighlighter.jsFunctionSyntaxHighlight(global, call_frame); +} +comptime { + _ = &JS2Zig__createBinding; + _ = &JS2Zig__createNodeHttp_Binding; + _ = &JS2Zig__OS_create; +} \ No newline at end of file diff --git a/src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp b/src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp index 021901cebf..e86af40e38 100644 --- a/src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp +++ b/src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp @@ -122,6 +122,8 @@ void JSReadableStreamDefaultReaderPrototype::finishCreation(VM& vm) Base::finishCreation(vm); reifyStaticProperties(vm, JSReadableStreamDefaultReader::info(), JSReadableStreamDefaultReaderPrototypeTableValues, *this); JSC_TO_STRING_TAG_WITHOUT_TRANSITION(); + // As suggested by https://github.com/tc39/proposal-explicit-resource-management#relation-to-dom-apis + // putDirectWithoutTransition(vm, vm.propertyNames->disposeSymbol, get(globalObject(), PropertyName(Identifier::fromString(vm, "releaseLock"_s))), JSC::PropertyAttribute::DontEnum | 0); } const ClassInfo JSReadableStreamDefaultReader::s_info = { "ReadableStreamDefaultReader"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSReadableStreamDefaultReader) }; diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 33a2987128..a0475dade2 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -2224,7 +2224,7 @@ pub const ModuleLoader = struct { if (specifier.eqlComptime(Runtime.Runtime.Imports.Name)) { return ResolvedSource{ .allocator = null, - .source_code = String.init(Runtime.Runtime.sourceContentBun()), + .source_code = String.init(Runtime.Runtime.source_code), .specifier = specifier, .source_url = specifier, .hash = Runtime.Runtime.versionHash(), diff --git a/src/bundler.zig b/src/bundler.zig index 8a0b86ab99..c4abeae9b6 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -1821,16 +1821,16 @@ pub const Bundler = struct { // } if (bundler.linker.any_needs_runtime) { - try bundler.output_files.append( - options.OutputFile.initBuf( - runtime.Runtime.sourceContent(false), - bun.default_allocator, - Linker.runtime_source_path, - .js, - null, - null, - ), - ); + // try bundler.output_files.append( + // options.OutputFile.initBuf( + // runtime.Runtime.source_code, + // bun.default_allocator, + // Linker.runtime_source_path, + // .js, + // null, + // null, + // ), + // ); } if (FeatureFlags.tracing and bundler.options.log.level.atLeast(.info)) { diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 19953f9835..6e633099a2 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -2357,6 +2357,7 @@ pub const ParseTask = struct { .node => \\import { createRequire } from "node:module"; \\export var __require = /* @__PURE__ */ createRequire(import.meta.url); + \\ , // Copied from esbuild's runtime.go: @@ -2372,16 +2373,90 @@ pub const ParseTask = struct { // is not always defined there. The `createRequire` call approach is more reliable. else => \\export var __require = /* @__PURE__ */ (x => - \\ typeof require !== 'undefined' ? require : - \\ typeof Proxy !== 'undefined' ? new Proxy(x, { - \\ get: (a, b) => (typeof require !== 'undefined' ? require : a)[b] - \\ }) : x + \\ typeof require !== 'undefined' ? require : + \\ typeof Proxy !== 'undefined' ? new Proxy(x, { + \\ get: (a, b) => (typeof require !== 'undefined' ? require : a)[b] + \\ }) : x \\)(function (x) { - \\ if (typeof require !== 'undefined') return require.apply(this, arguments) - \\ throw Error('Dynamic require of "' + x + '" is not supported') + \\ if (typeof require !== 'undefined') return require.apply(this, arguments) + \\ throw Error('Dynamic require of "' + x + '" is not supported') \\}); + \\ }; - const runtime_code = @embedFile("../runtime.js") ++ runtime_require; + const runtime_using_symbols = switch (target) { + // bun's webkit has Symbol.asyncDispose, Symbol.dispose, and SuppressedError, but not the syntax support + .bun => + \\export var __using = (stack, value, async) => { + \\ if (value != null) { + \\ if (typeof value !== 'object' && typeof value !== 'function') throw TypeError('Object expected to be assigned to "using" declaration') + \\ let dispose + \\ if (async) dispose = value[Symbol.asyncDispose] + \\ if (dispose === void 0) dispose = value[Symbol.dispose] + \\ if (typeof dispose !== 'function') throw TypeError('Object not disposable') + \\ stack.push([async, dispose, value]) + \\ } else if (async) { + \\ stack.push([async]) + \\ } + \\ return value + \\} + \\ + \\export var __callDispose = (stack, error, hasError) => { + \\ let fail = e => error = hasError ? new SuppressedError(e, error, 'An error was suppressed during disposal') : (hasError = true, e) + \\ , next = (it) => { + \\ while (it = stack.pop()) { + \\ try { + \\ var result = it[1] && it[1].call(it[2]) + \\ if (it[0]) return Promise.resolve(result).then(next, (e) => (fail(e), next())) + \\ } catch (e) { + \\ fail(e) + \\ } + \\ } + \\ if (hasError) throw error + \\ } + \\ return next() + \\} + \\ + , + // Other platforms may or may not have the symbol or errors + // The definitions of __dispose and __asyncDispose match what esbuild's __wellKnownSymbol() helper does + else => + \\var __dispose = Symbol.dispose || /* @__PURE__ */ Symbol.for('Symbol.dispose'); + \\var __asyncDispose = Symbol.dispose || /* @__PURE__ */ Symbol.for('Symbol.dispose'); + \\ + \\export var __using = (stack, value, async) => { + \\ if (value != null) { + \\ if (typeof value !== 'object' && typeof value !== 'function') throw TypeError('Object expected to be assigned to "using" declaration') + \\ var dispose + \\ if (async) dispose = value[__asyncDispose] + \\ if (dispose === void 0) dispose = value[__dispose] + \\ if (typeof dispose !== 'function') throw TypeError('Object not disposable') + \\ stack.push([async, dispose, value]) + \\ } else if (async) { + \\ stack.push([async]) + \\ } + \\ return value + \\} + \\ + \\export var __callDispose = (stack, error, hasError) => { + \\ var E = typeof SuppressedError === 'function' ? SuppressedError : + \\ function (e, s, m, _) { return _ = Error(m), _.name = 'SuppressedError', _.error = e, _.suppressed = s, _ }, + \\ fail = e => error = hasError ? new E(e, error, 'An error was suppressed during disposal') : (hasError = true, e), + \\ next = (it) => { + \\ while (it = stack.pop()) { + \\ try { + \\ var result = it[1] && it[1].call(it[2]) + \\ if (it[0]) return Promise.resolve(result).then(next, (e) => (fail(e), next())) + \\ } catch (e) { + \\ fail(e) + \\ } + \\ } + \\ if (hasError) throw error + \\ } + \\ return next() + \\} + \\ + }; + const runtime_code = @embedFile("../runtime.js") ++ runtime_require ++ runtime_using_symbols; const parse_task = ParseTask{ .ctx = undefined, diff --git a/src/defines-table.zig b/src/defines-table.zig index 1fa85a0b82..645d7ef913 100644 --- a/src/defines-table.zig +++ b/src/defines-table.zig @@ -114,9 +114,29 @@ pub const GlobalDefinesKey = [_][]const string{ &[_]string{ "Math", "tanh" }, &[_]string{ "Math", "trunc" }, + // Symbol: Static methods + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol#static_methods &[_]string{ "Symbol", "for" }, &[_]string{ "Symbol", "keyFor" }, + // Symbol: Static properties + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol#static_properties + &[_]string{ "Symbol", "asyncDispose" }, + &[_]string{ "Symbol", "asyncIterator" }, + &[_]string{ "Symbol", "dispose" }, + &[_]string{ "Symbol", "hasInstance" }, + &[_]string{ "Symbol", "isConcatSpreadable" }, + &[_]string{ "Symbol", "iterator" }, + &[_]string{ "Symbol", "match" }, + &[_]string{ "Symbol", "matchAll" }, + &[_]string{ "Symbol", "replace" }, + &[_]string{ "Symbol", "search" }, + &[_]string{ "Symbol", "species" }, + &[_]string{ "Symbol", "split" }, + &[_]string{ "Symbol", "toPrimitive" }, + &[_]string{ "Symbol", "toStringTag" }, + &[_]string{ "Symbol", "unscopables" }, + // Reflect: Static methods // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect#static_methods &[_]string{ "Reflect", "apply" }, @@ -859,7 +879,6 @@ pub const pure_global_identifiers = .{ .{ "top", pure_global_identifier_define }, .{ "webkitURL", pure_global_identifier_define }, .{ "window", pure_global_identifier_define }, - .{ "crypto", pure_global_identifier_define }, }; diff --git a/src/deps/tinycc b/src/deps/tinycc index ab631362d8..2d3ad9e0d3 160000 --- a/src/deps/tinycc +++ b/src/deps/tinycc @@ -1 +1 @@ -Subproject commit ab631362d839333660a265d3084d8ff060b96753 +Subproject commit 2d3ad9e0d32194ad7fd867b66ebe218dcc8cb5cd diff --git a/src/js_ast.zig b/src/js_ast.zig index 1ac0220003..5bec4dce2b 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -1160,7 +1160,7 @@ pub const Symbol = struct { import, // Assigning to a "const" symbol will throw a TypeError at runtime - cconst, + constant, // This annotates all other symbols that don't have special behavior. other, @@ -1361,7 +1361,7 @@ pub const Symbol = struct { pub fn isReactComponentishName(symbol: *const Symbol) bool { switch (symbol.kind) { - .hoisted, .hoisted_function, .cconst, .class, .other => { + .hoisted, .hoisted_function, .constant, .class, .other => { return switch (symbol.original_name[0]) { 'A'...'Z' => true, else => false, @@ -1375,7 +1375,7 @@ pub const Symbol = struct { } }; -pub const OptionalChain = enum(u2) { +pub const OptionalChain = enum(u1) { // "a?.b" start, @@ -5591,7 +5591,7 @@ pub const S = struct { pub const Throw = struct { value: ExprNodeIndex }; pub const Local = struct { - kind: Kind = Kind.k_var, + kind: Kind = .k_var, decls: G.Decl.List = .{}, is_export: bool = false, // The TypeScript compiler doesn't generate code for "import foo = bar" @@ -5605,13 +5605,20 @@ pub const S = struct { this.was_commonjs_export == other.was_commonjs_export; } - pub const Kind = enum(u2) { + pub const Kind = enum { k_var, k_let, k_const, + k_using, + k_await_using, + pub fn jsonStringify(self: @This(), writer: anytype) !void { return try writer.write(@tagName(self)); } + + pub fn isUsing(self: Kind) bool { + return self == .k_using or self == .k_await_using; + } }; }; diff --git a/src/js_parser.zig b/src/js_parser.zig index 515f760876..2983ea98eb 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -1316,7 +1316,7 @@ pub const ImportScanner = struct { } // when bundling, all top-level variables become var - if (p.options.bundle) { + if (p.options.bundle and !st.kind.isUsing()) { st.kind = .k_var; } }, @@ -1594,6 +1594,9 @@ const StaticSymbolName = struct { pub const delegateEvents = NewStaticSymbol("delegateEvents"); pub const __merge = NewStaticSymbol("__merge"); + + pub const __using = NewStaticSymbol("__using"); + pub const __callDispose = NewStaticSymbol("__callDispose"); }; }; @@ -2470,6 +2473,7 @@ const PrependTempRefsOpts = struct { pub const StmtsKind = enum { none, loop_body, + switch_stmt, fn_body, }; @@ -3206,7 +3210,49 @@ pub const Parser = struct { ) catch unreachable; } - if (!p.options.tree_shaking) { + // When "using" declarations appear at the top level, we change all TDZ + // variables in the top-level scope into "var" so that they aren't harmed + // when they are moved into the try/catch statement that lowering will + // generate. + // + // This is necessary because exported function declarations must be hoisted + // outside of the try/catch statement because they can be evaluated before + // this module is evaluated due to ESM cross-file function hoisting. And + // these function bodies might reference anything else in this scope, which + // must still work when those things are moved inside a try/catch statement. + // + // Before: + // + // using foo = get() + // export function fn() { + // return [foo, new Bar] + // } + // class Bar {} + // + // After ("fn" is hoisted, "Bar" is converted to "var"): + // + // export function fn() { + // return [foo, new Bar] + // } + // try { + // var foo = get(); + // var Bar = class {}; + // } catch (_) { + // ... + // } finally { + // ... + // } + // + // This is also necessary because other code might be appended to the code + // that we're processing and expect to be able to access top-level variables. + p.will_wrap_module_in_try_catch_for_using = p.shouldLowerUsingDeclarations(stmts); + + // Note that top-level lowered "using" declarations disable tree-shaking + // because we only do tree-shaking on top-level statements and lowering + // a top-level "using" declaration moves all top-level statements into a + // nested scope. + if (!p.options.tree_shaking or p.will_wrap_module_in_try_catch_for_using) { + // When tree shaking is disabled, everything comes in a single part try p.appendPart(&parts, stmts); } else { // When tree shaking is enabled, each top-level statement is potentially a separate part. @@ -4150,8 +4196,10 @@ const ParseStatementOptions = struct { is_module_scope: bool = false, is_namespace_scope: bool = false, is_export: bool = false, + is_using_statement: bool = false, is_name_optional: bool = false, // For "export default" pseudo-statements, is_typescript_declare: bool = false, + is_for_loop_init: bool = false, pub fn hasDecorators(self: *ParseStatementOptions) bool { const decs = self.ts_decorators orelse return false; @@ -4888,6 +4936,9 @@ fn NewParser_( binary_expression_stack: std.ArrayList(BinaryExpressionVisitor) = undefined, + // If this is true, then all top-level statements are wrapped in a try/catch + will_wrap_module_in_try_catch_for_using: bool = false, + /// use this instead of checking p.source.index /// because when not bundling, p.source.index is `0` inline fn isSourceRuntime(p: *const P) bool { @@ -7570,7 +7621,7 @@ fn NewParser_( var is_typescript_ctor_field = false; const is_identifier = p.lexer.token == T.t_identifier; var text = p.lexer.identifier; - var arg = try p.parseBinding(); + var arg = try p.parseBinding(.{}); var ts_metadata = TypeScript.Metadata.default; if (comptime is_typescript_enabled) { @@ -7594,7 +7645,7 @@ fn NewParser_( text = p.lexer.identifier; // Re-parse the binding (the current binding is the TypeScript keyword) - arg = try p.parseBinding(); + arg = try p.parseBinding(.{}); }, else => { break; @@ -9618,11 +9669,11 @@ fn NewParser_( return p.parseTypescriptEnumStmt(loc, opts); } - const decls = try p.parseAndDeclareDecls(.cconst, opts); + const decls = try p.parseAndDeclareDecls(.constant, opts); try p.lexer.expectOrInsertSemicolon(); if (!opts.is_typescript_declare) { - try p.requireInitializers(decls.items); + try p.requireInitializers(.k_const, decls.items); } // When HMR is enabled, replace all const/let exports with var @@ -9774,7 +9825,7 @@ fn NewParser_( // jarred: TIL! if (p.lexer.token != .t_open_brace) { try p.lexer.expect(.t_open_paren); - var value = try p.parseBinding(); + var value = try p.parseBinding(.{}); // Skip over types if (is_typescript_enabled and p.lexer.token == .t_colon) { @@ -9882,13 +9933,16 @@ fn NewParser_( .t_const => { try p.lexer.next(); var stmtOpts = ParseStatementOptions{}; - decls.update(try p.parseAndDeclareDecls(.cconst, &stmtOpts)); + decls.update(try p.parseAndDeclareDecls(.constant, &stmtOpts)); init_ = p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls) }, init_loc); }, // for (;) .t_semicolon => {}, else => { - var stmtOpts = ParseStatementOptions{ .lexical_decl = .allow_all }; + var stmtOpts = ParseStatementOptions{ + .lexical_decl = .allow_all, + .is_for_loop_init = true, + }; const res = try p.parseExprOrLetStmt(&stmtOpts); switch (res.stmt_or_expr) { @@ -9949,7 +10003,7 @@ fn NewParser_( switch (init_stmt.data) { .s_local => { if (init_stmt.data.s_local.kind == .k_const) { - try p.requireInitializers(decls.slice()); + try p.requireInitializers(.k_const, decls.slice()); } }, else => {}, @@ -10658,7 +10712,7 @@ fn NewParser_( return p.s(S.TypeScript{}, loc); } - const ref = p.declareSymbol(.cconst, default_name_loc, default_name) catch unreachable; + const ref = p.declareSymbol(.constant, default_name_loc, default_name) catch unreachable; var decls = p.allocator.alloc(Decl, 1) catch unreachable; decls[0] = Decl{ .binding = p.b(B.Identifier{ .ref = ref }, default_name_loc), @@ -10868,62 +10922,176 @@ fn NewParser_( } fn parseExprOrLetStmt(p: *P, opts: *ParseStatementOptions) !ExprOrLetStmt { - const let_range = p.lexer.range(); - const raw = p.lexer.raw(); - if (p.lexer.token != .t_identifier or !strings.eqlComptime(raw, "let")) { - // Output.print("HI", .{}); + const token_range = p.lexer.range(); + + if (p.lexer.token != .t_identifier) { return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .expr = try p.parseExpr(.lowest) } }; } - try p.lexer.next(); + const raw = p.lexer.raw(); + if (strings.eqlComptime(raw, "let")) { + try p.lexer.next(); - switch (p.lexer.token) { - .t_identifier, .t_open_bracket, .t_open_brace => { - if (opts.lexical_decl == .allow_all or !p.lexer.has_newline_before or p.lexer.token == .t_open_bracket) { - if (opts.lexical_decl != .allow_all) { - try p.forbidLexicalDecl(let_range.loc); + switch (p.lexer.token) { + .t_identifier, .t_open_bracket, .t_open_brace => { + if (opts.lexical_decl == .allow_all or !p.lexer.has_newline_before or p.lexer.token == .t_open_bracket) { + if (opts.lexical_decl != .allow_all) { + try p.forbidLexicalDecl(token_range.loc); + } + + const decls = try p.parseAndDeclareDecls(.other, opts); + return ExprOrLetStmt{ + .stmt_or_expr = js_ast.StmtOrExpr{ + .stmt = p.s(S.Local{ + // Replace all "export let" with "export var" when HMR is enabled + .kind = if (opts.is_export and p.options.features.hot_module_reloading) .k_var else .k_let, + .decls = G.Decl.List.fromList(decls), + .is_export = opts.is_export, + }, token_range.loc), + }, + .decls = decls.items, + }; } + }, + else => {}, + } + } else if (strings.eqlComptime(raw, "using")) { + // Handle an "using" declaration + if (opts.is_export) { + try p.log.addError(p.source, token_range.loc, "Cannot use \"export\" with a \"using\" declaration"); + } - const decls = try p.parseAndDeclareDecls(.other, opts); + try p.lexer.next(); + + if (p.lexer.token == .t_identifier and !p.lexer.has_newline_before) { + if (opts.lexical_decl != .allow_all) { + try p.forbidLexicalDecl(token_range.loc); + } + // p.markSyntaxFeature(.using, token_range.loc); + opts.is_using_statement = true; + const decls = try p.parseAndDeclareDecls(.constant, opts); + if (!opts.is_for_loop_init) { + try p.requireInitializers(.k_using, decls.items); + } + return ExprOrLetStmt{ + .stmt_or_expr = js_ast.StmtOrExpr{ + .stmt = p.s(S.Local{ + .kind = .k_using, + .decls = G.Decl.List.fromList(decls), + .is_export = false, + }, token_range.loc), + }, + .decls = decls.items, + }; + } + } else if (p.fn_or_arrow_data_parse.allow_await == .allow_expr and strings.eqlComptime(raw, "await")) { + // Handle an "await using" declaration + if (opts.is_export) { + try p.log.addError(p.source, token_range.loc, "Cannot use \"export\" with an \"await using\" declaration"); + } + + if (p.fn_or_arrow_data_parse.is_top_level) { + p.top_level_await_keyword = token_range; + } + + try p.lexer.next(); + + const raw2 = p.lexer.raw(); + const value = if (p.lexer.token == .t_identifier and strings.eqlComptime(raw2, "using")) value: { + // const using_loc = p.saveExprCommentsHere(); + const using_range = p.lexer.range(); + try p.lexer.next(); + if (p.lexer.token == .t_identifier and !p.lexer.has_newline_before) { + // It's an "await using" declaration if we get here + if (opts.lexical_decl != .allow_all) { + try p.forbidLexicalDecl(using_range.loc); + } + // p.markSyntaxFeature(.using, using_range.loc); + opts.is_using_statement = true; + const decls = try p.parseAndDeclareDecls(.constant, opts); + if (!opts.is_for_loop_init) { + try p.requireInitializers(.k_await_using, decls.items); + } return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .stmt = p.s(S.Local{ - // Replace all "export let" with "export var" when HMR is enabled - .kind = if (opts.is_export and p.options.features.hot_module_reloading) .k_var else .k_let, + .kind = .k_await_using, .decls = G.Decl.List.fromList(decls), - .is_export = opts.is_export, - }, let_range.loc), + .is_export = false, + }, token_range.loc), }, .decls = decls.items, }; } - }, - else => {}, + break :value Expr{ + .data = .{ .e_identifier = .{ .ref = try p.storeNameInRef(raw) } }, + // TODO: implement saveExprCommentsHere and use using_loc here + .loc = using_range.loc, + }; + } else try p.parseExpr(.prefix); + + if (p.lexer.token == .t_asterisk_asterisk) { + try p.lexer.unexpected(); + } + const expr = p.newExpr( + E.Await{ .value = try p.parseSuffix(value, .prefix, null, .none) }, + token_range.loc, + ); + return ExprOrLetStmt{ + .stmt_or_expr = js_ast.StmtOrExpr{ + .expr = try p.parseSuffix(expr, .lowest, null, .none), + }, + }; + } else { + return ExprOrLetStmt{ + .stmt_or_expr = js_ast.StmtOrExpr{ + .expr = try p.parseExpr(.lowest), + }, + }; } - const ref = p.storeNameInRef(raw) catch unreachable; - const expr = p.newExpr(E.Identifier{ .ref = ref }, let_range.loc); - return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .expr = try p.parseSuffix(expr, .lowest, null, Expr.EFlags.none) } }; + // Parse the remainder of this expression that starts with an identifier + const ref = try p.storeNameInRef(raw); + const expr = p.newExpr(E.Identifier{ .ref = ref }, token_range.loc); + return ExprOrLetStmt{ + .stmt_or_expr = js_ast.StmtOrExpr{ + .expr = try p.parseSuffix(expr, .lowest, null, .none), + }, + }; } - fn requireInitializers(p: *P, decls: []G.Decl) anyerror!void { + fn requireInitializers(p: *P, comptime kind: S.Local.Kind, decls: []G.Decl) anyerror!void { + const what = switch (kind) { + .k_await_using, .k_using => "declaration", + .k_const => "constant", + else => comptime unreachable, + }; + for (decls) |decl| { if (decl.value == null) { switch (decl.binding.data) { .b_identifier => |ident| { const r = js_lexer.rangeOfIdentifier(p.source, decl.binding.loc); - try p.log.addRangeErrorFmt(p.source, r, p.allocator, "The constant \"{s}\" must be initialized", .{p.symbols.items[ident.ref.innerIndex()].original_name}); + try p.log.addRangeErrorFmt(p.source, r, p.allocator, "The " ++ what ++ " \"{s}\" must be initialized", .{ + p.symbols.items[ident.ref.innerIndex()].original_name, + }); // return;/ }, else => { - try p.log.addError(p.source, decl.binding.loc, "This constant must be initialized"); + try p.log.addError(p.source, decl.binding.loc, "This " ++ what ++ " must be initialized"); }, } } } } - fn parseBinding(p: *P) anyerror!Binding { + const ParseBindingOptions = struct { + /// This will prevent parsing of destructuring patterns, as using statement + /// is only allowed to be `using name, name2, name3`, nothing special. + is_using_statement: bool = false, + }; + + fn parseBinding(p: *P, comptime opts: ParseBindingOptions) anyerror!Binding { const loc = p.lexer.loc(); switch (p.lexer.token) { @@ -10939,119 +11107,123 @@ fn NewParser_( return p.b(B.Identifier{ .ref = ref }, loc); }, .t_open_bracket => { - try p.lexer.next(); - var is_single_line = !p.lexer.has_newline_before; - var items = ListManaged(js_ast.ArrayBinding).init(p.allocator); - var has_spread = false; + if (!opts.is_using_statement) { + try p.lexer.next(); + var is_single_line = !p.lexer.has_newline_before; + var items = ListManaged(js_ast.ArrayBinding).init(p.allocator); + var has_spread = false; - // "in" expressions are allowed - const old_allow_in = p.allow_in; - p.allow_in = true; + // "in" expressions are allowed + const old_allow_in = p.allow_in; + p.allow_in = true; - while (p.lexer.token != .t_close_bracket) { - if (p.lexer.token == .t_comma) { - items.append(js_ast.ArrayBinding{ - .binding = Binding{ .data = Prefill.Data.BMissing, .loc = p.lexer.loc() }, - }) catch unreachable; - } else { - if (p.lexer.token == .t_dot_dot_dot) { - try p.lexer.next(); - has_spread = true; + while (p.lexer.token != .t_close_bracket) { + if (p.lexer.token == .t_comma) { + items.append(js_ast.ArrayBinding{ + .binding = Binding{ .data = Prefill.Data.BMissing, .loc = p.lexer.loc() }, + }) catch unreachable; + } else { + if (p.lexer.token == .t_dot_dot_dot) { + try p.lexer.next(); + has_spread = true; - // This was a bug in the ES2015 spec that was fixed in ES2016 - if (p.lexer.token != .t_identifier) { - // p.markSyntaxFeature(compat.NestedRestBinding, p.lexer.Range()) + // This was a bug in the ES2015 spec that was fixed in ES2016 + if (p.lexer.token != .t_identifier) { + // p.markSyntaxFeature(compat.NestedRestBinding, p.lexer.Range()) + } + } + + const binding = try p.parseBinding(opts); + + var default_value: ?Expr = null; + if (!has_spread and p.lexer.token == .t_equals) { + try p.lexer.next(); + default_value = try p.parseExpr(.comma); + } + + items.append(js_ast.ArrayBinding{ .binding = binding, .default_value = default_value }) catch unreachable; + + // Commas after spread elements are not allowed + if (has_spread and p.lexer.token == .t_comma) { + p.log.addRangeError(p.source, p.lexer.range(), "Unexpected \",\" after rest pattern") catch unreachable; + return error.SyntaxError; } } - const binding = try p.parseBinding(); - - var default_value: ?Expr = null; - if (!has_spread and p.lexer.token == .t_equals) { - try p.lexer.next(); - default_value = try p.parseExpr(.comma); + if (p.lexer.token != .t_comma) { + break; } - items.append(js_ast.ArrayBinding{ .binding = binding, .default_value = default_value }) catch unreachable; + if (p.lexer.has_newline_before) { + is_single_line = false; + } + try p.lexer.next(); + + if (p.lexer.has_newline_before) { + is_single_line = false; + } + } + + p.allow_in = old_allow_in; + + if (p.lexer.has_newline_before) { + is_single_line = false; + } + try p.lexer.expect(.t_close_bracket); + return p.b(B.Array{ + .items = items.items, + .has_spread = has_spread, + .is_single_line = is_single_line, + }, loc); + } + }, + .t_open_brace => { + if (!opts.is_using_statement) { + // p.markSyntaxFeature(compat.Destructuring, p.lexer.Range()) + try p.lexer.next(); + var is_single_line = !p.lexer.has_newline_before; + var properties = ListManaged(js_ast.B.Property).init(p.allocator); + + // "in" expressions are allowed + const old_allow_in = p.allow_in; + p.allow_in = true; + + while (p.lexer.token != .t_close_brace) { + var property = try p.parsePropertyBinding(); + properties.append(property) catch unreachable; // Commas after spread elements are not allowed - if (has_spread and p.lexer.token == .t_comma) { + if (property.flags.contains(.is_spread) and p.lexer.token == .t_comma) { p.log.addRangeError(p.source, p.lexer.range(), "Unexpected \",\" after rest pattern") catch unreachable; return error.SyntaxError; } + + if (p.lexer.token != .t_comma) { + break; + } + + if (p.lexer.has_newline_before) { + is_single_line = false; + } + try p.lexer.next(); + if (p.lexer.has_newline_before) { + is_single_line = false; + } } - if (p.lexer.token != .t_comma) { - break; - } + p.allow_in = old_allow_in; if (p.lexer.has_newline_before) { is_single_line = false; } - try p.lexer.next(); + try p.lexer.expect(.t_close_brace); - if (p.lexer.has_newline_before) { - is_single_line = false; - } + return p.b(B.Object{ + .properties = properties.items, + .is_single_line = is_single_line, + }, loc); } - - p.allow_in = old_allow_in; - - if (p.lexer.has_newline_before) { - is_single_line = false; - } - try p.lexer.expect(.t_close_bracket); - return p.b(B.Array{ - .items = items.items, - .has_spread = has_spread, - .is_single_line = is_single_line, - }, loc); - }, - .t_open_brace => { - // p.markSyntaxFeature(compat.Destructuring, p.lexer.Range()) - try p.lexer.next(); - var is_single_line = !p.lexer.has_newline_before; - var properties = ListManaged(js_ast.B.Property).init(p.allocator); - - // "in" expressions are allowed - const old_allow_in = p.allow_in; - p.allow_in = true; - - while (p.lexer.token != .t_close_brace) { - var property = try p.parsePropertyBinding(); - properties.append(property) catch unreachable; - - // Commas after spread elements are not allowed - if (property.flags.contains(.is_spread) and p.lexer.token == .t_comma) { - p.log.addRangeError(p.source, p.lexer.range(), "Unexpected \",\" after rest pattern") catch unreachable; - return error.SyntaxError; - } - - if (p.lexer.token != .t_comma) { - break; - } - - if (p.lexer.has_newline_before) { - is_single_line = false; - } - try p.lexer.next(); - if (p.lexer.has_newline_before) { - is_single_line = false; - } - } - - p.allow_in = old_allow_in; - - if (p.lexer.has_newline_before) { - is_single_line = false; - } - try p.lexer.expect(.t_close_brace); - - return p.b(B.Object{ - .properties = properties.items, - .is_single_line = is_single_line, - }, loc); }, else => {}, } @@ -11135,7 +11307,7 @@ fn NewParser_( } try p.lexer.expect(.t_colon); - const value = try p.parseBinding(); + const value = try p.parseBinding(.{}); var default_value: ?Expr = null; if (p.lexer.token == .t_equals) { @@ -11158,12 +11330,16 @@ fn NewParser_( while (true) { // Forbid "let let" and "const let" but not "var let" - if ((kind == .other or kind == .cconst) and p.lexer.isContextualKeyword("let")) { + if ((kind == .other or kind == .constant) and p.lexer.isContextualKeyword("let")) { p.log.addRangeError(p.source, p.lexer.range(), "Cannot use \"let\" as an identifier here") catch unreachable; } var value: ?js_ast.Expr = null; - var local = try p.parseBinding(); + var local = switch (opts.is_using_statement) { + inline else => |is_using| try p.parseBinding(.{ + .is_using_statement = is_using, + }), + }; p.declareBinding(kind, &local, opts) catch unreachable; // Skip over types @@ -15265,6 +15441,9 @@ fn NewParser_( } }, .s_local => |st| { + // "await" is a side effect because it affects code timing + if (st.kind == .k_await_using) return false; + for (st.decls.slice()) |*decl| { if (!p.bindingCanBeRemovedIfUnused(decl.binding)) { return false; @@ -15273,6 +15452,11 @@ fn NewParser_( if (decl.value) |*decl_value| { if (!p.exprCanBeRemovedIfUnused(decl_value)) { return false; + } else if (st.kind == .k_using) { + // "using" declarations are only side-effect free if they are initialized to null or undefined + if (decl_value.data != .e_null and decl_value.data != .e_undefined) { + return false; + } } } } @@ -15537,7 +15721,7 @@ fn NewParser_( e_.ref = result.ref; // Handle assigning to a constant - if (in.assign_target != .none and p.symbols.items[result.ref.innerIndex()].kind == .cconst) { + if (in.assign_target != .none and p.symbols.items[result.ref.innerIndex()].kind == .constant) { const r = js_lexer.rangeOfIdentifier(p.source, expr.loc); var notes = p.allocator.alloc(logger.Data, 1) catch unreachable; notes[0] = logger.Data{ @@ -17581,10 +17765,17 @@ fn NewParser_( } fn selectLocalKind(p: *P, kind: S.Local.Kind) S.Local.Kind { - if (p.options.bundle and p.current_scope.parent == null) { + // Use "var" instead of "let" and "const" if the variable declaration may + // need to be separated from the initializer. This allows us to safely move + // this declaration into a nested scope. + if ((p.options.bundle or p.will_wrap_module_in_try_catch_for_using) and + (p.current_scope.parent == null and !kind.isUsing())) + { return .k_var; } + // Optimization: use "let" instead of "const" because it's shorter. This is + // only done when bundling because assigning to "const" is only an error when bundling. if (p.options.bundle and kind == .k_const and p.options.features.minify_syntax) { return .k_let; } @@ -18223,7 +18414,7 @@ fn NewParser_( } switch (data.value) { - .expr => |expr| { + .expr => |expr| brk_expr: { const was_anonymous_named_expr = expr.isAnonymousNamed(); data.value.expr = p.visitExpr(expr); @@ -18258,6 +18449,30 @@ fn NewParser_( } } + // If there are lowered "using" declarations, change this into a "var" + if (p.current_scope.parent == null and p.will_wrap_module_in_try_catch_for_using) { + try stmts.ensureUnusedCapacity(2); + + const decls = p.allocator.alloc(G.Decl, 1) catch bun.outOfMemory(); + decls[0] = .{ + .binding = p.b(B.Identifier{ .ref = data.default_name.ref.? }, data.default_name.loc), + .value = expr, + }; + stmts.appendAssumeCapacity(p.s(S.Local{ + .decls = G.Decl.List.init(decls), + }, stmt.loc)); + const items = p.allocator.alloc(js_ast.ClauseItem, 1) catch bun.outOfMemory(); + items[0] = js_ast.ClauseItem{ + .alias = "default", + .alias_loc = data.default_name.loc, + .name = data.default_name, + }; + stmts.appendAssumeCapacity(p.s(S.ExportClause{ + .items = items, + }, stmt.loc)); + break :brk_expr; + } + if (mark_for_replace) { const entry = p.options.features.replace_exports.getPtr("default").?; if (entry.* == .replace) { @@ -18421,6 +18636,9 @@ fn NewParser_( p.popScope(); }, .s_local => |data| { + // TODO: Silently remove unsupported top-level "await" in dead code branches + // (this was from 'await using' syntax) + // Local statements do not end the const local prefix p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix; @@ -18451,6 +18669,21 @@ fn NewParser_( return; } + // Optimization: Avoid unnecessary "using" machinery by changing ones + // initialized to "null" or "undefined" into a normal variable. Note that + // "await using" still needs the "await", so we can't do it for those. + if (p.options.features.minify_syntax and data.kind == .k_using) { + data.kind = .k_let; + for (data.decls.slice()) |*d| { + if (d.value) |val| { + if (val.data != .e_null and val.data != .e_undefined) { + data.kind = .k_using; + break; + } + } + } + } + // We must relocate vars in order to safely handle removing if/else depending on NODE_ENV. // Edgecase: // `export var` is skipped because it's unnecessary. That *should* be a noop, but it loses the `is_export` flag if we're in HMR. @@ -18755,40 +18988,41 @@ fn NewParser_( } }, .s_for => |data| { - { - p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable; + p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable; - if (data.init) |initst| { - data.init = p.visitForLoopInit(initst, false); + if (data.init) |initst| { + data.init = p.visitForLoopInit(initst, false); + } + + if (data.test_) |test_| { + data.test_ = SideEffects.simplifyBoolean(p, p.visitExpr(test_)); + + const result = SideEffects.toBoolean(p, data.test_.?.data); + if (result.ok and result.value and result.side_effects == .no_side_effects) { + data.test_ = null; } + } - if (data.test_) |test_| { - data.test_ = SideEffects.simplifyBoolean(p, p.visitExpr(test_)); + if (data.update) |update| { + data.update = p.visitExpr(update); + } - const result = SideEffects.toBoolean(p, data.test_.?.data); - if (result.ok and result.value and result.side_effects == .no_side_effects) { - data.test_ = null; - } - } + data.body = p.visitLoopBody(data.body); - if (data.update) |update| { - data.update = p.visitExpr(update); - } - - data.body = p.visitLoopBody(data.body); - - // Potentially relocate "var" declarations to the top level. Note that this - // must be done inside the scope of the for loop or they won't be relocated. - if (data.init) |init_| { - if (init_.data == .s_local and init_.data.s_local.kind == .k_var) { - const relocate = p.maybeRelocateVarsToTopLevel(init_.data.s_local.decls.slice(), .normal); + if (data.init) |for_init| { + if (for_init.data == .s_local) { + // Potentially relocate "var" declarations to the top level. Note that this + // must be done inside the scope of the for loop or they won't be relocated. + if (for_init.data.s_local.kind == .k_var) { + const relocate = p.maybeRelocateVarsToTopLevel(for_init.data.s_local.decls.slice(), .normal); if (relocate.stmt) |relocated| { data.init = relocated; } } } - p.popScope(); } + + p.popScope(); }, .s_for_in => |data| { { @@ -18800,7 +19034,6 @@ fn NewParser_( // Check for a variable initializer if (data.init.data == .s_local and data.init.data.s_local.kind == .k_var) { - // Lower for-in variable initializers in case the output is used in strict mode var local = data.init.data.s_local; if (local.decls.len == 1) { @@ -18818,9 +19051,7 @@ fn NewParser_( } } } - } - if (data.init.data == .s_local and data.init.data.s_local.kind == .k_var) { const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls.slice(), RelocateVars.Mode.for_in_or_for_of); if (relocate.stmt) |relocated_stmt| { data.init = relocated_stmt; @@ -18835,10 +19066,56 @@ fn NewParser_( data.value = p.visitExpr(data.value); data.body = p.visitLoopBody(data.body); - if (data.init.data == .s_local and data.init.data.s_local.kind == .k_var) { - const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls.slice(), RelocateVars.Mode.for_in_or_for_of); - if (relocate.stmt) |relocated_stmt| { - data.init = relocated_stmt; + if (data.init.data == .s_local) { + if (data.init.data.s_local.kind == .k_var) { + const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls.slice(), RelocateVars.Mode.for_in_or_for_of); + if (relocate.stmt) |relocated_stmt| { + data.init = relocated_stmt; + } + } + + // Handle "for (using x of y)" and "for (await using x of y)" + if (data.init.data == .s_local and data.init.data.s_local.kind.isUsing() and p.options.features.lower_using) { + // fn lowerUsingDeclarationInForOf() + const loc = data.init.loc; + const init2 = data.init.data.s_local; + const binding = init2.decls.at(0).binding; + var id = binding.data.b_identifier; + const temp_ref = p.generateTempRef(p.symbols.items[id.ref.inner_index].original_name); + + const first = p.s(S.Local{ + .kind = init2.kind, + .decls = bindings: { + const decls = p.allocator.alloc(G.Decl, 1) catch bun.outOfMemory(); + decls[0] = .{ + .binding = p.b(B.Identifier{ .ref = id.ref }, loc), + .value = p.newExpr(E.Identifier{ .ref = temp_ref }, loc), + }; + break :bindings G.Decl.List.init(decls); + }, + }, loc); + + const length = if (data.body.data == .s_block) data.body.data.s_block.stmts.len else 1; + const statements = p.allocator.alloc(Stmt, 1 + length) catch bun.outOfMemory(); + statements[0] = first; + if (data.body.data == .s_block) { + @memcpy(statements[1..], data.body.data.s_block.stmts); + } else { + statements[1] = data.body; + } + + var ctx = try P.LowerUsingDeclarationsContext.init(p); + ctx.scanStmts(p, statements); + const visited_stmts = ctx.finalize(p, statements, p.will_wrap_module_in_try_catch_for_using and p.current_scope.parent == null); + if (data.body.data == .s_block) { + data.body.data.s_block.stmts = visited_stmts.items; + } else { + data.body = p.s(S.Block{ + .stmts = visited_stmts.items, + }, loc); + } + id.ref = temp_ref; + init2.kind = .k_const; } } }, @@ -20496,7 +20773,7 @@ fn NewParser_( ) catch unreachable; } else { const name_str: []const u8 = if (default_name_ref.isNull()) "_this" else "_default"; - shadow_ref = p.newSymbol(.cconst, name_str) catch unreachable; + shadow_ref = p.newSymbol(.constant, name_str) catch unreachable; } p.recordDeclaredSymbol(shadow_ref) catch unreachable; @@ -20758,7 +21035,7 @@ fn NewParser_( } // Try separating the list for appending, so that it's not a pointer. - fn visitStmts(p: *P, stmts: *ListManaged(Stmt), _: StmtsKind) anyerror!void { + fn visitStmts(p: *P, stmts: *ListManaged(Stmt), kind: StmtsKind) anyerror!void { if (only_scan_imports_and_do_not_visit) { @compileError("only_scan_imports_and_do_not_visit must not run this."); } @@ -20766,7 +21043,6 @@ fn NewParser_( const initial_scope: *Scope = if (comptime Environment.allow_assert) p.current_scope else undefined; { - // Save the current control-flow liveness. This represents if we are // currently inside an "if (false) { ... }" block. const old_is_control_flow_dead = p.is_control_flow_dead; @@ -20966,6 +21242,13 @@ fn NewParser_( } } + // Lower using declarations + if (kind != .switch_stmt and p.shouldLowerUsingDeclarations(stmts.items)) { + var ctx = try LowerUsingDeclarationsContext.init(p); + ctx.scanStmts(p, stmts.items); + stmts.* = ctx.finalize(p, stmts.items, p.current_scope.parent == null); + } + if (comptime Environment.allow_assert) // if this fails it means that scope pushing/popping is not balanced assert(p.current_scope == initial_scope); @@ -20975,7 +21258,6 @@ fn NewParser_( } if (p.current_scope.parent != null and !p.current_scope.contains_direct_eval) { - // Remove inlined constants now that we know whether any of these statements // contained a direct eval() or not. This can't be done earlier when we // encounter the constant because we haven't encountered the eval() yet. @@ -21512,6 +21794,312 @@ fn NewParser_( p.log.addRangeError(p.source, logger.Range{ .loc = comma_after_spread, .len = 1 }, "Unexpected \",\" after rest pattern") catch unreachable; } + /// When not transpiling we dont use the renamer, so our solution is to generate really + /// hard to collide with variables, instead of actually making things collision free + pub fn generateTempRef(p: *P, default_name: ?string) Ref { + var scope = p.current_scope; + + const name = (if (p.willUseRenamer()) default_name else null) orelse brk: { + p.temp_ref_count += 1; + break :brk std.fmt.allocPrint(p.allocator, "__bun_temp_ref_{x}$", .{p.temp_ref_count}) catch bun.outOfMemory(); + }; + const ref = p.newSymbol(.other, name) catch bun.outOfMemory(); + + p.temp_refs_to_declare.append(p.allocator, .{ + .ref = ref, + }) catch bun.outOfMemory(); + + scope.generated.append(p.allocator, &.{ref}) catch bun.outOfMemory(); + + return ref; + } + + fn shouldLowerUsingDeclarations(p: *const P, stmts: []Stmt) bool { + // TODO: We do not support lowering await, but when we do this needs to point to that var + const lower_await = false; + + // Check feature flags first, then iterate statements. + if (!p.options.features.lower_using and !lower_await) return false; + + for (stmts) |stmt| { + if (stmt.data == .s_local and + // Need to re-check lower_using for the k_using case in case lower_await is true + ((stmt.data.s_local.kind == .k_using and p.options.features.lower_using) or + (stmt.data.s_local.kind == .k_await_using))) + { + return true; + } + } + + return false; + } + + const LowerUsingDeclarationsContext = struct { + first_using_loc: logger.Loc, + stack_ref: Ref, + has_await_using: bool, + + pub fn init(p: *P) !LowerUsingDeclarationsContext { + return LowerUsingDeclarationsContext{ + .first_using_loc = logger.Loc.Empty, + .stack_ref = p.generateTempRef("__stack"), + .has_await_using = false, + }; + } + + pub fn scanStmts(ctx: *LowerUsingDeclarationsContext, p: *P, stmts: []Stmt) void { + for (stmts) |stmt| { + switch (stmt.data) { + .s_local => |local| { + if (!local.kind.isUsing()) continue; + + if (ctx.first_using_loc.isEmpty()) { + ctx.first_using_loc = stmt.loc; + } + if (local.kind == .k_await_using) { + ctx.has_await_using = true; + } + for (local.decls.slice()) |*decl| { + if (decl.value) |*decl_value| { + const value_loc = decl_value.loc; + p.recordUsage(ctx.stack_ref); + const args = p.allocator.alloc(Expr, 3) catch bun.outOfMemory(); + args[0] = Expr{ + .data = .{ .e_identifier = .{ .ref = ctx.stack_ref } }, + .loc = stmt.loc, + }; + args[1] = decl_value.*; + // 1. always pass this param for hopefully better jit performance + // 2. pass 1 or 0 to be shorter than `true` or `false` + args[2] = Expr{ + .data = .{ .e_number = .{ .value = if (local.kind == .k_await_using) 1 else 0 } }, + .loc = stmt.loc, + }; + decl.value = p.callRuntime(value_loc, "__using", args); + } + } + if (p.will_wrap_module_in_try_catch_for_using and p.current_scope.kind == .entry) { + local.kind = .k_var; + } else { + local.kind = .k_const; + } + }, + else => {}, + } + } + } + + pub fn finalize(ctx: *LowerUsingDeclarationsContext, p: *P, stmts: []Stmt, should_hoist_fns: bool) ListManaged(Stmt) { + var result = ListManaged(Stmt).init(p.allocator); + var exports = ListManaged(js_ast.ClauseItem).init(p.allocator); + var end: u32 = 0; + for (stmts) |stmt| { + switch (stmt.data) { + .s_directive, + .s_import, + .s_export_from, + .s_export_star, + => { + // These can't go in a try/catch block + result.append(stmt) catch bun.outOfMemory(); + continue; + }, + + .s_export_clause => |data| { + // Merge export clauses together + exports.appendSlice(data.items) catch bun.outOfMemory(); + continue; + }, + + .s_function => { + if (should_hoist_fns) { + // Hoist function declarations for cross-file ESM references + result.append(stmt) catch bun.outOfMemory(); + continue; + } + }, + + .s_local => |local| { + // If any of these are exported, turn it into a "var" and add export clauses + if (local.is_export) { + local.is_export = false; + for (local.decls.slice()) |decl| { + if (decl.binding.data == .b_identifier) { + const identifier = decl.binding.data.b_identifier; + exports.append(js_ast.ClauseItem{ + .name = .{ + .loc = decl.binding.loc, + .ref = identifier.ref, + }, + .alias = p.symbols.items[identifier.ref.inner_index].original_name, + .alias_loc = decl.binding.loc, + }) catch bun.outOfMemory(); + local.kind = .k_var; + } + } + } + }, + + else => {}, + } + + stmts[end] = stmt; + end += 1; + } + + // TODO(@paperdave): leak + const non_exported_statements = stmts[0..end]; + + const caught_ref = p.generateTempRef("_catch"); + const err_ref = p.generateTempRef("_err"); + const has_err_ref = p.generateTempRef("_hasErr"); + + var scope = p.current_scope; + while (!scope.kindStopsHoisting()) { + scope = scope.parent.?; + } + + const is_top_level = scope == p.module_scope; + scope.generated.append(p.allocator, &.{ + ctx.stack_ref, + caught_ref, + err_ref, + has_err_ref, + }) catch bun.outOfMemory(); + p.declared_symbols.ensureUnusedCapacity( + p.allocator, + // 5 to include the _promise decl later on: + if (ctx.has_await_using) 5 else 4, + ) catch bun.outOfMemory(); + p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = ctx.stack_ref }); + p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = caught_ref }); + p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = err_ref }); + p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = has_err_ref }); + + const loc = ctx.first_using_loc; + const call_dispose = call_dispose: { + p.recordUsage(ctx.stack_ref); + p.recordUsage(err_ref); + p.recordUsage(has_err_ref); + const args = p.allocator.alloc(Expr, 3) catch bun.outOfMemory(); + args[0] = Expr{ + .data = .{ .e_identifier = .{ .ref = ctx.stack_ref } }, + .loc = loc, + }; + args[1] = Expr{ + .data = .{ .e_identifier = .{ .ref = err_ref } }, + .loc = loc, + }; + args[2] = Expr{ + .data = .{ .e_identifier = .{ .ref = has_err_ref } }, + .loc = loc, + }; + break :call_dispose p.callRuntime(loc, "__callDispose", args); + }; + + const finally_stmts = finally: { + if (ctx.has_await_using) { + const promise_ref = p.generateTempRef("_promise"); + scope.generated.append(p.allocator, &.{promise_ref}) catch bun.outOfMemory(); + p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = promise_ref }); + + const promise_ref_expr = p.newExpr(E.Identifier{ .ref = promise_ref }, loc); + + const await_expr = p.newExpr(E.Await{ + .value = promise_ref_expr, + }, loc); + p.recordUsage(promise_ref); + + const statements = p.allocator.alloc(Stmt, 2) catch bun.outOfMemory(); + statements[0] = p.s(S.Local{ + .decls = decls: { + const decls = p.allocator.alloc(Decl, 1) catch bun.outOfMemory(); + decls[0] = .{ + .binding = p.b(B.Identifier{ .ref = promise_ref }, loc), + .value = call_dispose, + }; + break :decls G.Decl.List.init(decls); + }, + }, loc); + + // The "await" must not happen if an error was thrown before the + // "await using", so we conditionally await here: + // + // var promise = __callDispose(stack, error, hasError); + // promise && await promise; + // + statements[1] = p.s(S.SExpr{ + .value = p.newExpr(E.Binary{ + .op = .bin_logical_and, + .left = promise_ref_expr, + .right = await_expr, + }, loc), + }, loc); + + break :finally statements; + } else { + const single = p.allocator.alloc(Stmt, 1) catch bun.outOfMemory(); + single[0] = p.s(S.SExpr{ .value = call_dispose }, call_dispose.loc); + break :finally single; + } + }; + + // Wrap everything in a try/catch/finally block + p.recordUsage(caught_ref); + result.ensureUnusedCapacity(2) catch bun.outOfMemory(); + result.appendAssumeCapacity(p.s(S.Local{ + .decls = decls: { + const decls = p.allocator.alloc(Decl, 1) catch bun.outOfMemory(); + decls[0] = .{ + .binding = p.b(B.Identifier{ .ref = ctx.stack_ref }, loc), + .value = p.newExpr(E.Array{}, loc), + }; + break :decls G.Decl.List.init(decls); + }, + .kind = .k_let, + }, loc)); + result.appendAssumeCapacity(p.s(S.Try{ + .body = non_exported_statements, + .body_loc = loc, + .catch_ = .{ + .binding = p.b(B.Identifier{ .ref = caught_ref }, loc), + .body = catch_body: { + const statements = p.allocator.alloc(Stmt, 1) catch bun.outOfMemory(); + statements[0] = p.s(S.Local{ + .decls = decls: { + const decls = p.allocator.alloc(Decl, 2) catch bun.outOfMemory(); + decls[0] = .{ + .binding = p.b(B.Identifier{ .ref = err_ref }, loc), + .value = p.newExpr(E.Identifier{ .ref = caught_ref }, loc), + }; + decls[1] = .{ + .binding = p.b(B.Identifier{ .ref = has_err_ref }, loc), + .value = p.newExpr(E.Number{ .value = 1 }, loc), + }; + break :decls G.Decl.List.init(decls); + }, + }, loc); + break :catch_body statements; + }, + .body_loc = loc, + .loc = loc, + }, + .finally = .{ + .loc = loc, + .stmts = finally_stmts, + }, + }, loc)); + + if (exports.items.len > 0) { + result.appendAssumeCapacity(p.s(S.ExportClause{ + .items = exports.items, + }, loc)); + } + + return result; + } + }; + pub fn toAST( p: *P, _parts: []js_ast.Part, diff --git a/src/js_printer.zig b/src/js_printer.zig index 4d6b9fb6eb..5146a0db2f 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -4239,6 +4239,12 @@ fn NewPrinter( .k_var => { p.printDeclStmt(s.is_export, "var", s.decls.slice()); }, + .k_using => { + p.printDeclStmt(s.is_export, "using", s.decls.slice()); + }, + .k_await_using => { + p.printDeclStmt(s.is_export, "await using", s.decls.slice()); + }, } }, .s_if => |s| { @@ -5009,6 +5015,12 @@ fn NewPrinter( .k_const => { p.printDecls("const", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true })); }, + .k_using => { + p.printDecls("using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true })); + }, + .k_await_using => { + p.printDecls("await using", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true })); + }, } }, // for(;) diff --git a/src/runtime.bun.js b/src/runtime.bun.js new file mode 100644 index 0000000000..a94eab4049 --- /dev/null +++ b/src/runtime.bun.js @@ -0,0 +1,36 @@ +export * from "./runtime"; + +// TODO: these are duplicated from bundle_v2.js, can we ... not do that? +export var __using = (stack, value, async) => { + if (value != null) { + if (typeof value !== "object" && typeof value !== "function") + throw TypeError('Object expected to be assigned to "using" declaration'); + let dispose; + if (async) dispose = value[Symbol.asyncDispose]; + if (dispose === void 0) dispose = value[Symbol.dispose]; + if (typeof dispose !== "function") throw TypeError("Object not disposable"); + stack.push([async, dispose, value]); + } else if (async) { + stack.push([async]); + } + return value; +}; + +export var __callDispose = (stack, error, hasError) => { + let fail = e => + (error = hasError + ? new SuppressedError(e, error, "An error was suppressed during disposal") + : ((hasError = true), e)), + next = it => { + while ((it = stack.pop())) { + try { + var result = it[1] && it[1].call(it[2]); + if (it[0]) return Promise.resolve(result).then(next, e => (fail(e), next())); + } catch (e) { + fail(e); + } + } + if (hasError) throw error; + }; + return next(); +}; diff --git a/src/runtime.footer.bun.js b/src/runtime.footer.bun.js deleted file mode 100644 index 95e99b9ba2..0000000000 --- a/src/runtime.footer.bun.js +++ /dev/null @@ -1,19 +0,0 @@ -export var $$m = BUN_RUNTIME.$$m; -export var __markAsModule = BUN_RUNTIME.__markAsModule; -export var $$lzy = BUN_RUNTIME.$$lzy; -export var __toModule = BUN_RUNTIME.__toModule; -export var __commonJS = BUN_RUNTIME.__commonJS; -export var __name = BUN_RUNTIME.__name; -export var __export = BUN_RUNTIME.__export; -export var __reExport = BUN_RUNTIME.__reExport; -export var __cJS2eSM = BUN_RUNTIME.__cJS2eSM; -export var regeneratorRuntime = BUN_RUNTIME.regeneratorRuntime; -export var __exportValue = BUN_RUNTIME.__exportValue; -export var __exportDefault = BUN_RUNTIME.__exportDefault; -export var __merge = BUN_RUNTIME.__merge; -export var __legacyDecorateClassTS = BUN_RUNTIME.__legacyDecorateClassTS; -export var __legacyDecorateParamTS = BUN_RUNTIME.__legacyDecorateParamTS; -export var __legacyMetadataTS = BUN_RUNTIME.__legacyMetadataTS; -export var $$bun_runtime_json_parse = JSON.parse; -export var __internalIsCommonJSNamespace = BUN_RUNTIME.__internalIsCommonJSNamespace; -export var $$typeof = BUN_RUNTIME.$$typeof; diff --git a/src/runtime.footer.js b/src/runtime.footer.js deleted file mode 100644 index 48c86f47bc..0000000000 --- a/src/runtime.footer.js +++ /dev/null @@ -1,29 +0,0 @@ -// --- -// Public exports from runtime -// Compatible with bun's Runtime Environment and web browsers. -export var $$m = "$primordials" in globalThis ? $primordials.require : BUN_RUNTIME.$$m; -export var __HMRModule = BUN_RUNTIME.__HMRModule; -export var __FastRefreshModule = BUN_RUNTIME.__FastRefreshModule; -export var __HMRClient = BUN_RUNTIME.__HMRClient; -export var __markAsModule = BUN_RUNTIME.__markAsModule; -export var $$lzy = BUN_RUNTIME.$$lzy; -export var __toModule = BUN_RUNTIME.__toModule; -export var __commonJS = BUN_RUNTIME.__commonJS; -export var __require = BUN_RUNTIME.__require; -export var __name = BUN_RUNTIME.__name; -export var __export = BUN_RUNTIME.__export; -export var __reExport = BUN_RUNTIME.__reExport; -export var __cJS2eSM = BUN_RUNTIME.__cJS2eSM; -export var regeneratorRuntime = BUN_RUNTIME.regeneratorRuntime; -export var __exportValue = BUN_RUNTIME.__exportValue; -export var __exportDefault = BUN_RUNTIME.__exportDefault; -export var __merge = BUN_RUNTIME.__merge; -export var __legacyDecorateClassTS = BUN_RUNTIME.__legacyDecorateClassTS; -export var __legacyDecorateParamTS = BUN_RUNTIME.__legacyDecorateParamTS; -export var __legacyMetadataTS = BUN_RUNTIME.__legacyMetadataTS; -export var $$bun_runtime_json_parse = JSON.parse; -export var __internalIsCommonJSNamespace = BUN_RUNTIME.__internalIsCommonJSNamespace; - -globalThis.__internalIsCommonJSNamespace ||= __internalIsCommonJSNamespace; -globalThis.require ||= BUN_RUNTIME.__require; -globalThis.self ||= globalThis; diff --git a/src/runtime.footer.node.js b/src/runtime.footer.node.js deleted file mode 100644 index 4318195c58..0000000000 --- a/src/runtime.footer.node.js +++ /dev/null @@ -1,85 +0,0 @@ -import * as __$module from "node:module"; -export var $$m = BUN_RUNTIME.$$m; -export var __markAsModule = BUN_RUNTIME.__markAsModule; -export var $$lzy = BUN_RUNTIME.$$lzy; -export var __toModule = BUN_RUNTIME.__toModule; -export var __commonJS = BUN_RUNTIME.__commonJS; -export var __require = BUN_RUNTIME.__require; -export var __name = BUN_RUNTIME.__name; -export var __export = BUN_RUNTIME.__export; -export var __reExport = BUN_RUNTIME.__reExport; -export var __cJS2eSM = BUN_RUNTIME.__cJS2eSM; -export var regeneratorRuntime = BUN_RUNTIME.regeneratorRuntime; -export var __exportValue = BUN_RUNTIME.__exportValue; -export var __exportDefault = BUN_RUNTIME.__exportDefault; -export var __legacyDecorateClassTS = BUN_RUNTIME.__legacyDecorateClassTS; -export var __legacyDecorateParamTS = BUN_RUNTIME.__legacyDecorateParamTS; -export var __legacyMetadataTS = BUN_RUNTIME.__legacyMetadataTS; -export var $$bun_runtime_json_parse = JSON.parse; -export var __internalIsCommonJSNamespace = BUN_RUNTIME.__internalIsCommonJSNamespace; -var require = __$module.createRequire(import.meta.url); -var process = - globalThis.process || - new Proxy( - {}, - { - get: function (target, prop, receiver) { - var _process = require("process"); - target = process = _process; - return Reflect.get(_process, prop, receiver); - }, - apply: function (target, thisArg, argumentsList) { - var _process = require("process"); - target = process = _process; - return Reflect.apply(target, thisArg, argumentsList); - }, - defineProperty(target, key, descriptor) { - var _process = require("process"); - target = process = _process; - return Reflect.defineProperty(_process, key, descriptor); - }, - construct: function (target, args) { - var _process = require("process"); - target = process = _process; - return Reflect.construct(_process, args); - }, - has: function (target, prop, receiver) { - var _process = require("process"); - target = process = _process; - return Reflect.has(_process, prop, receiver); - }, - }, - ); - -var Buffer = - globalThis.Buffer || - new Proxy( - {}, - { - get: function (target, prop, receiver) { - var NewBuffer = require("buffer").Buffer; - target = Buffer = NewBuffer; - return Reflect.get(NewBuffer, prop, receiver); - }, - apply: function (target, thisArg, argumentsList) { - var NewBuffer = require("buffer").Buffer; - target = Buffer = NewBuffer; - return Reflect.apply(target, thisArg, argumentsList); - }, - defineProperty(target, key, descriptor) { - var NewBuffer = require("buffer").Buffer; - target = Buffer = NewBuffer; - return Reflect.defineProperty(NewBuffer, key, descriptor); - }, - construct: function (target, args) { - var NewBuffer = require("buffer").Buffer; - target = Buffer = NewBuffer; - return Reflect.construct(NewBuffer, args); - }, - has: function (target, prop, receiver) { - var NewBuffer = require("buffer").Buffer; - target = Buffer = NewBuffer; - return Reflect.has(NewBuffer, prop, receiver); - }, - }, - ); diff --git a/src/runtime.footer.with-refresh.js b/src/runtime.footer.with-refresh.js deleted file mode 100644 index 9aa9472e9f..0000000000 --- a/src/runtime.footer.with-refresh.js +++ /dev/null @@ -1,29 +0,0 @@ -// --- -// Public exports from runtime -// Compatible with bun's Runtime Environment and web browsers. -export var $$m = "$primordials" in globalThis ? $primordials.require : BUN_RUNTIME.$$m; -export var __HMRModule = BUN_RUNTIME.__HMRModule; -export var __FastRefreshModule = BUN_RUNTIME.__FastRefreshModule; -export var __HMRClient = BUN_RUNTIME.__HMRClient; -export var __markAsModule = BUN_RUNTIME.__markAsModule; -export var $$lzy = BUN_RUNTIME.$$lzy; -export var __toModule = BUN_RUNTIME.__toModule; -export var __commonJS = BUN_RUNTIME.__commonJS; -export var __require = BUN_RUNTIME.__require; -export var __name = BUN_RUNTIME.__name; -export var __export = BUN_RUNTIME.__export; -export var __reExport = BUN_RUNTIME.__reExport; -export var __cJS2eSM = BUN_RUNTIME.__cJS2eSM; -export var regeneratorRuntime = BUN_RUNTIME.regeneratorRuntime; -export var __exportValue = BUN_RUNTIME.__exportValue; -export var __exportDefault = BUN_RUNTIME.__exportDefault; -export var __legacyDecorateClassTS = BUN_RUNTIME.__legacyDecorateClassTS; -export var __legacyDecorateParamTS = BUN_RUNTIME.__legacyDecorateParamTS; -export var __legacyMetadataTS = BUN_RUNTIME.__legacyMetadataTS; -export var $$bun_runtime_json_parse = JSON.parse; -export var __FastRefreshRuntime = BUN_RUNTIME.__FastRefreshRuntime; -export var __internalIsCommonJSNamespace = BUN_RUNTIME.__internalIsCommonJSNamespace; - -globalThis.__internalIsCommonJSNamespace ||= __internalIsCommonJSNamespace; -globalThis.require ||= BUN_RUNTIME.__require; -globalThis.self ||= globalThis; diff --git a/src/runtime.zig b/src/runtime.zig index 424aa74a10..e80a524422 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -197,71 +197,7 @@ pub const Fallback = struct { }; pub const Runtime = struct { - pub const ProdSourceContent = @embedFile("./runtime.out.js"); - pub const ProdSourceContentNode = @embedFile("./runtime.node.out.js"); - pub const ProdSourceContentBun = @embedFile("./runtime.bun.out.js"); - pub const ProdSourceContentWithRefresh = @embedFile("./runtime.out.refresh.js"); - - pub inline fn sourceContentWithoutRefresh() string { - if (comptime Environment.isDebug) { - const dirpath = comptime bun.Environment.base_path ++ std.fs.path.dirname(@src().file).?; - var env = std.process.getEnvMap(default_allocator) catch unreachable; - - const dir = std.mem.replaceOwned( - u8, - default_allocator, - dirpath, - "jarred", - env.get("USER").?, - ) catch unreachable; - const runtime_path = std.fs.path.join(default_allocator, &[_]string{ dir, "runtime.out.js" }) catch unreachable; - const file = std.fs.openFileAbsolute(runtime_path, .{}) catch return embedDebugFallback( - "Missing bun/src/runtime.out.js. " ++ "Please run \"make runtime_js_dev\"", - ProdSourceContent, - ); - defer file.close(); - return file.readToEndAlloc(default_allocator, file.getEndPos() catch 0) catch unreachable; - } else { - return ProdSourceContent; - } - } - - pub inline fn sourceContent(with_refresh: bool) string { - if (with_refresh) return sourceContentWithRefresh(); - return sourceContentWithoutRefresh(); - } - - pub inline fn sourceContentNode() string { - return ProdSourceContentNode; - } - - pub inline fn sourceContentBun() string { - return ProdSourceContentBun; - } - - pub inline fn sourceContentWithRefresh() string { - if (comptime Environment.isDebug) { - const dirpath = comptime bun.Environment.base_path ++ std.fs.path.dirname(@src().file).?; - var env = std.process.getEnvMap(default_allocator) catch unreachable; - - const dir = std.mem.replaceOwned( - u8, - default_allocator, - dirpath, - "jarred", - env.get("USER").?, - ) catch unreachable; - const runtime_path = std.fs.path.join(default_allocator, &[_]string{ dir, "runtime.out.refresh.js" }) catch unreachable; - const file = std.fs.openFileAbsolute(runtime_path, .{}) catch return embedDebugFallback( - "Missing bun/src/runtime.out.refresh.js. " ++ "Please run \"make runtime_js_dev\"", - ProdSourceContentWithRefresh, - ); - defer file.close(); - return file.readToEndAlloc(default_allocator, file.getEndPos() catch 0) catch unreachable; - } else { - return ProdSourceContentWithRefresh; - } - } + pub const source_code = @embedFile("./runtime.out.js"); pub const version_hash = @import("build_options").runtime_js_version; var version_hash_int: u32 = 0; @@ -333,6 +269,9 @@ pub const Runtime = struct { runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = null, + // TODO: make this a bitset of all unsupported features + lower_using: bool = true, + const hash_fields_for_runtime_transpiler = .{ .top_level_await, .auto_import_jsx, @@ -349,6 +288,7 @@ pub const Runtime = struct { .dont_bundle_twice, .commonjs_at_runtime, .emit_decorator_metadata, + .lower_using, // note that we do not include .inject_jest_globals, as we bail out of the cache entirely if this is true }; @@ -412,6 +352,8 @@ pub const Runtime = struct { __legacyDecorateParamTS: ?GeneratedSymbol = null, __legacyMetadataTS: ?GeneratedSymbol = null, @"$$typeof": ?GeneratedSymbol = null, + __using: ?GeneratedSymbol = null, + __callDispose: ?GeneratedSymbol = null, pub const all = [_][]const u8{ // __HMRClient goes first @@ -436,6 +378,8 @@ pub const Runtime = struct { "__legacyDecorateParamTS", "__legacyMetadataTS", "$$typeof", + "__using", + "__callDispose", }; const all_sorted: [all.len]string = brk: { @setEvalBranchQuota(1000000); @@ -483,104 +427,9 @@ pub const Runtime = struct { defer this.i += 1; switch (this.i) { - 0 => { - if (@field(this.runtime_imports, all[0])) |val| { - return Entry{ .key = 0, .value = val.ref }; - } - }, - 1 => { - if (@field(this.runtime_imports, all[1])) |val| { - return Entry{ .key = 1, .value = val.ref }; - } - }, - 2 => { - if (@field(this.runtime_imports, all[2])) |val| { - return Entry{ .key = 2, .value = val.ref }; - } - }, - 3 => { - if (@field(this.runtime_imports, all[3])) |val| { - return Entry{ .key = 3, .value = val.ref }; - } - }, - 4 => { - if (@field(this.runtime_imports, all[4])) |val| { - return Entry{ .key = 4, .value = val.ref }; - } - }, - 5 => { - if (@field(this.runtime_imports, all[5])) |val| { - return Entry{ .key = 5, .value = val.ref }; - } - }, - 6 => { - if (@field(this.runtime_imports, all[6])) |val| { - return Entry{ .key = 6, .value = val.ref }; - } - }, - 7 => { - if (@field(this.runtime_imports, all[7])) |val| { - return Entry{ .key = 7, .value = val.ref }; - } - }, - 8 => { - if (@field(this.runtime_imports, all[8])) |val| { - return Entry{ .key = 8, .value = val.ref }; - } - }, - 9 => { - if (@field(this.runtime_imports, all[9])) |val| { - return Entry{ .key = 9, .value = val.ref }; - } - }, - 10 => { - if (@field(this.runtime_imports, all[10])) |val| { - return Entry{ .key = 10, .value = val.ref }; - } - }, - 11 => { - if (@field(this.runtime_imports, all[11])) |val| { - return Entry{ .key = 11, .value = val.ref }; - } - }, - 12 => { - if (@field(this.runtime_imports, all[12])) |val| { - return Entry{ .key = 12, .value = val.ref }; - } - }, - 13 => { - if (@field(this.runtime_imports, all[13])) |val| { - return Entry{ .key = 13, .value = val.ref }; - } - }, - 14 => { - if (@field(this.runtime_imports, all[14])) |val| { - return Entry{ .key = 14, .value = val.ref }; - } - }, - 15 => { - if (@field(this.runtime_imports, all[15])) |val| { - return Entry{ .key = 15, .value = val.ref }; - } - }, - 16 => { - if (@field(this.runtime_imports, all[16])) |val| { - return Entry{ .key = 16, .value = val.ref }; - } - }, - 17 => { - if (@field(this.runtime_imports, all[17])) |val| { - return Entry{ .key = 17, .value = val.ref }; - } - }, - 18 => { - if (@field(this.runtime_imports, all[18])) |val| { - return Entry{ .key = 18, .value = val.ref }; - } - }, - 19 => { - if (@field(this.runtime_imports, all[19])) |val| { - return Entry{ .key = 19, .value = val.ref }; + inline 0...21 => |t| { + if (@field(this.runtime_imports, all[t])) |val| { + return Entry{ .key = t, .value = val.ref }; } }, else => { @@ -627,26 +476,7 @@ pub const Runtime = struct { key: anytype, ) ?Ref { return switch (key) { - 0 => (@field(imports, all[0]) orelse return null).ref, - 1 => (@field(imports, all[1]) orelse return null).ref, - 2 => (@field(imports, all[2]) orelse return null).ref, - 3 => (@field(imports, all[3]) orelse return null).ref, - 4 => (@field(imports, all[4]) orelse return null).ref, - 5 => (@field(imports, all[5]) orelse return null).ref, - 6 => (@field(imports, all[6]) orelse return null).ref, - 7 => (@field(imports, all[7]) orelse return null).ref, - 8 => (@field(imports, all[8]) orelse return null).ref, - 9 => (@field(imports, all[9]) orelse return null).ref, - 10 => (@field(imports, all[10]) orelse return null).ref, - 11 => (@field(imports, all[11]) orelse return null).ref, - 12 => (@field(imports, all[12]) orelse return null).ref, - 13 => (@field(imports, all[13]) orelse return null).ref, - 14 => (@field(imports, all[14]) orelse return null).ref, - 15 => (@field(imports, all[15]) orelse return null).ref, - 16 => (@field(imports, all[16]) orelse return null).ref, - 17 => (@field(imports, all[17]) orelse return null).ref, - 18 => (@field(imports, all[18]) orelse return null).ref, - 19 => (@field(imports, all[19]) orelse return null).ref, + inline 0...21 => |t| (@field(imports, all[t]) orelse return null).ref, else => null, }; } diff --git a/src/runtime/index-without-hmr.ts b/src/runtime/index-without-hmr.ts index 5fcebbc285..514d6a23a0 100644 --- a/src/runtime/index-without-hmr.ts +++ b/src/runtime/index-without-hmr.ts @@ -1,3 +1,3 @@ // @ts-nocheck export * from "../runtime.js"; -export { default as regeneratorRuntime } from "./regenerator"; +// export { default as regeneratorRuntime } from "./regenerator"; diff --git a/test/js/web/explicit-resource-management.test.ts b/test/js/web/explicit-resource-management.test.ts new file mode 100644 index 0000000000..c85513afb3 --- /dev/null +++ b/test/js/web/explicit-resource-management.test.ts @@ -0,0 +1,141 @@ +// tbh, we should have more tests for this +test('Symbol.dispose exists', () => { + expect(Symbol.dispose).toBeDefined() + expect(Symbol.dispose).toBeSymbol() + expect(Symbol.asyncDispose).toBeDefined() + expect(Symbol.asyncDispose).toBeSymbol() +}); + +test('SuppressedError works', () => { + const e = new SuppressedError(new Error("this is error"), new Error('this was suppressed'), 'this is a message'); + expect(e.message).toBe('this is a message'); + expect(() => { throw e.suppressed }).toThrow('this was suppressed'); + expect(() => { throw e.error }).toThrow('this is error'); +}) + +let disposeOrder = 0; +function useWithAsync() { + return { + status: 'none', + disposeOrder: -1, + [Symbol.dispose]() { + this.status = 'disposed'; + this.disposeOrder = disposeOrder++; + }, + [Symbol.asyncDispose]() { + this.status = 'async-disposed'; + this.disposeOrder = disposeOrder++; + } + } +} + +test('using syntax works and doesnt collide with user symbols', () => { + disposeOrder = 0; + { + let __using = 'break'; + let __callDispose = function () { + throw new Error('should not be called'); + }; + let __stack = { + push: () => { + throw new Error('stack corruption'); + } + } + + const a1 = useWithAsync(); + { + using u1 = a1; + expect(u1.status).toBe('none'); + } + expect(a1.status).toBe('disposed'); + } + + { + const a1 = useWithAsync(); + const a2 = useWithAsync(); + const a3 = useWithAsync(); + { + using u1 = a1, u2 = a2; + { + using u3 = a3; + expect(u3.status).toBe('none'); + } + expect(u1.status).toBe('none'); + expect(u2.status).toBe('none'); + expect(a3.status).toBe('disposed'); + } + expect(a1.status).toBe('disposed'); + expect(a2.status).toBe('disposed'); + + expect(a3.disposeOrder).toBe(1); + expect(a2.disposeOrder).toBe(2); + expect(a1.disposeOrder).toBe(3); + } + + const a1 = useWithAsync(); + { + using u1 = a1; + { + var __stack = 1; + var _catch = 1; + var _err = 1; + var _hasErr = 1; + } + } +}) + +test('await using syntax works and doesnt collide with user symbols', async () => { + disposeOrder = 0; + { + let __using = 'break'; + let __callDispose = function () { + throw new Error('should not be called'); + }; + let __stack = { + push: () => { + throw new Error('stack corruption'); + } + } + + const a1 = useWithAsync(); + { + using u1 = a1; + expect(u1.status).toBe('none'); + } + expect(a1.status).toBe('disposed'); + } + + { + const a1 = useWithAsync(); + const a2 = useWithAsync(); + const a3 = useWithAsync(); + { + using u1 = a1 + await using u2 = a2; + { + using u3 = a3; + expect(u3.status).toBe('none'); + } + expect(u1.status).toBe('none'); + expect(u2.status).toBe('none'); + expect(a3.status).toBe('disposed'); + } + expect(a1.status).toBe('disposed'); + expect(a2.status).toBe('async-disposed'); + + expect(a3.disposeOrder).toBe(1); + expect(a2.disposeOrder).toBe(2); + expect(a1.disposeOrder).toBe(3); + } + + const a1 = useWithAsync(); + { + await using u1 = a1; + { + var __stack = 1; + var _catch = 1; + var _err = 1; + var _hasErr = 1; + } + } +}) diff --git a/test/transpiler/__snapshots__/transpiler.test.js.snap b/test/transpiler/__snapshots__/transpiler.test.js.snap new file mode 100644 index 0000000000..5f19f866fe --- /dev/null +++ b/test/transpiler/__snapshots__/transpiler.test.js.snap @@ -0,0 +1,183 @@ +// Bun Snapshot v1, https://goo.gl/fbAQLP + +exports[`Bun.Transpiler using statements work right 1`] = ` +"let __bun_temp_ref_1$ = []; +try { +const x = __using(__bun_temp_ref_1$, a, 0); +} catch (__bun_temp_ref_2$) { +var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; +} finally { +__callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); +}" +`; + +exports[`Bun.Transpiler using statements work right 2`] = ` +"let __bun_temp_ref_1$ = []; +try { +const x = __using(__bun_temp_ref_1$, a, 1); +} catch (__bun_temp_ref_2$) { +var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; +} finally { +var __bun_temp_ref_5$ = __callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); +__bun_temp_ref_5$ && await __bun_temp_ref_5$; +}" +`; + +exports[`Bun.Transpiler using statements work right 3`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 4`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 5`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 6`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 7`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 8`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 9`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 10`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using top level 1`] = ` +"import { +__callDispose as __callDispose, +__using as __using +} from "bun:wrap"; +export function c(e) { + let __bun_temp_ref_1$ = []; + try { + const f = __using(__bun_temp_ref_1$, g(a), 0); + return f.h; + } catch (__bun_temp_ref_2$) { + var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; + } finally { + __callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); + } +} +import {using} from "n"; +let __bun_temp_ref_5$ = []; +try { + var a = __using(__bun_temp_ref_5$, b, 0); + var j = __using(__bun_temp_ref_5$, c(i), 1); + var k = __using(__bun_temp_ref_5$, l(m), 0); + var o = __using(__bun_temp_ref_5$, using, 0); + var p = __using(__bun_temp_ref_5$, await using, 1); + var q = r; +} catch (__bun_temp_ref_6$) { + var __bun_temp_ref_7$ = __bun_temp_ref_6$, __bun_temp_ref_8$ = 1; +} finally { + var __bun_temp_ref_9$ = __callDispose(__bun_temp_ref_5$, __bun_temp_ref_7$, __bun_temp_ref_8$); + __bun_temp_ref_9$ && await __bun_temp_ref_9$; +} + +export { + k, + q +}; +" +`; diff --git a/test/transpiler/transpiler.test.js b/test/transpiler/transpiler.test.js index 2310cfc11b..34acf6db4b 100644 --- a/test/transpiler/transpiler.test.js +++ b/test/transpiler/transpiler.test.js @@ -3321,6 +3321,57 @@ console.log("boop"); ts.expectPrinted("a", "a"); expect(new Bun.Transpiler({ loader: "ts" }).transformSync(`a`)).toBe(`a;\n`); }); + + const prepareForSnapshot = code => { + return code.replace(/(__using|__callDispose)_([a-z0-9]+)/g, '$1') + } + const expectCapturePrintedSnapshot = code => { + const result = parsed(`(async() => {${code}})()`, false, false); + expect(result).toEndWith("})();\n"); + const of_relevance = result + .slice(result.indexOf("() => {") + 9, result.lastIndexOf("})();") - 1) + .trim() + .split("\n") + .map(x => x.trim()) + .filter(x => x.length > 0) + .join("\n"); + expect(prepareForSnapshot(of_relevance)).toMatchSnapshot(); + }; + const expectPrintedSnapshot = code => { + expect(prepareForSnapshot(parsed(`${code}`, false, false))).toMatchSnapshot(); + }; + + it("using statements work right", () => { + expectCapturePrintedSnapshot(`using x = a;`); + expectCapturePrintedSnapshot(`await using x = a;`); + + expectCapturePrintedSnapshot(`for (using a of b) c(a)`); + expectCapturePrintedSnapshot(`for await (using a of b) c(a)`); + expectCapturePrintedSnapshot(`for (await using a of b) c(a)`); + expectCapturePrintedSnapshot(`for await (await using a of b) c(a)`); + + expectCapturePrintedSnapshot(`for (using a of b) { c(a); a(c) }`); + expectCapturePrintedSnapshot(`for await (using a of b) { c(a); a(c) }`); + expectCapturePrintedSnapshot(`for (await using a of b) { c(a); a(c) }`); + expectCapturePrintedSnapshot(`for await (await using a of b) { c(a); a(c) }`); + }); + + it("using top level", () => { + expectPrintedSnapshot(` + using a = b; + export function c(e) { + using f = g(a); + return f.h; + } + await using j = c(i); + using k = l(m); + export { k }; + import { using } from 'n'; + using o = using; + await using p = await using; + export var q = r; + `); + }); }); describe("await can only be used inside an async function message", () => {