mirror of
https://github.com/oven-sh/bun
synced 2026-02-07 01:18:51 +00:00
Compare commits
20 Commits
ciro/fix-a
...
zack/shell
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1139264d64 | ||
|
|
52c2487f24 | ||
|
|
ee319cf55a | ||
|
|
a6ac500a53 | ||
|
|
5f6f2e9547 | ||
|
|
ecf1d1b8cb | ||
|
|
d1bada7afe | ||
|
|
ee44ef5a03 | ||
|
|
a88a94c371 | ||
|
|
18b6b94c3a | ||
|
|
aeddc6b8ba | ||
|
|
f0a605af2e | ||
|
|
163e83fcbb | ||
|
|
3881ee1d61 | ||
|
|
6347c7f843 | ||
|
|
33d60b671a | ||
|
|
0b15f98664 | ||
|
|
f3be9b5d6f | ||
|
|
5004362713 | ||
|
|
b94bfc5ca8 |
@@ -121,6 +121,8 @@ src/bun.js/bindings/Exception.zig
|
||||
src/bun.js/bindings/FetchHeaders.zig
|
||||
src/bun.js/bindings/FFI.zig
|
||||
src/bun.js/bindings/generated_classes_list.zig
|
||||
src/bun.js/bindings/GeneratedBindings.zig
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/bindings/GetterSetter.zig
|
||||
src/bun.js/bindings/HTTPServerAgent.zig
|
||||
src/bun.js/bindings/JSArray.zig
|
||||
@@ -305,6 +307,7 @@ src/bundler/linker_context/generateCodeForLazyExport.zig
|
||||
src/bundler/linker_context/generateCompileResultForCssChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForJSChunk.zig
|
||||
src/bundler/linker_context/OutputFileListBuilder.zig
|
||||
src/bundler/linker_context/postProcessCSSChunk.zig
|
||||
src/bundler/linker_context/postProcessHTMLChunk.zig
|
||||
src/bundler/linker_context/postProcessJSChunk.zig
|
||||
|
||||
6
shell-crash-repo/.prettierrc
Normal file
6
shell-crash-repo/.prettierrc
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
26
shell-crash-repo/README.md
Normal file
26
shell-crash-repo/README.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# Shell Crash Reproduction
|
||||
|
||||
This repository reproduces a Bun crash on Windows when running `bun run format`.
|
||||
|
||||
## Reproduction Steps
|
||||
|
||||
1. Navigate to this directory
|
||||
2. Run `bun install` to install dependencies
|
||||
3. Run `bun run format`
|
||||
4. The crash should occur
|
||||
|
||||
## Notes
|
||||
|
||||
- Running `npx prettier --write src/**/*.ts` directly works fine
|
||||
- The crash only occurs when running through the npm script
|
||||
- This appears to be related to Bun's shell implementation on Windows
|
||||
|
||||
## Expected vs Actual
|
||||
|
||||
**Expected**: The prettier command should format all TypeScript files in the src directory.
|
||||
|
||||
**Actual**: Bun crashes with a segmentation fault at address 0xFFFFFFFFFFFFFFFF.
|
||||
|
||||
## Environment
|
||||
|
||||
Tested with Bun v1.2.18 on Windows.
|
||||
10
shell-crash-repo/package.json
Normal file
10
shell-crash-repo/package.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "shell-crash-test",
|
||||
"version": "1.0.0",
|
||||
"scripts": {
|
||||
"format": "npx prettier --write src/**/*.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^3.0.0"
|
||||
}
|
||||
}
|
||||
5
shell-crash-repo/src/cli.ts
Normal file
5
shell-crash-repo/src/cli.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
console.log('CLI entry point');
|
||||
|
||||
export function runCLI() {
|
||||
console.log('Running CLI...');
|
||||
}
|
||||
5
shell-crash-repo/src/cli/index.ts
Normal file
5
shell-crash-repo/src/cli/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { runCLI } from '../cli';
|
||||
|
||||
runCLI();
|
||||
4
shell-crash-repo/src/commands/commit.ts
Normal file
4
shell-crash-repo/src/commands/commit.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export function commit(message: string): void {
|
||||
console.log(`Committing with message: ${message}`);
|
||||
// Simulated git commit logic
|
||||
}
|
||||
21
shell-crash-repo/src/core/config.ts
Normal file
21
shell-crash-repo/src/core/config.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Config } from '../types';
|
||||
|
||||
export class ConfigManager {
|
||||
private config: Config;
|
||||
|
||||
constructor() {
|
||||
this.config = {
|
||||
apiKey: '',
|
||||
baseUrl: 'https://api.example.com',
|
||||
timeout: 5000
|
||||
};
|
||||
}
|
||||
|
||||
get(): Config {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
set(key: keyof Config, value: any): void {
|
||||
this.config[key] = value;
|
||||
}
|
||||
}
|
||||
15
shell-crash-repo/src/core/context.ts
Normal file
15
shell-crash-repo/src/core/context.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
export class Context {
|
||||
private data: Map<string, any>;
|
||||
|
||||
constructor() {
|
||||
this.data = new Map();
|
||||
}
|
||||
|
||||
set(key: string, value: any): void {
|
||||
this.data.set(key, value);
|
||||
}
|
||||
|
||||
get<T>(key: string): T | undefined {
|
||||
return this.data.get(key);
|
||||
}
|
||||
}
|
||||
11
shell-crash-repo/src/types/index.ts
Normal file
11
shell-crash-repo/src/types/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export interface User {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
export interface Config {
|
||||
apiKey: string;
|
||||
baseUrl: string;
|
||||
timeout: number;
|
||||
}
|
||||
3
shell-crash-repo/src/utils/box.ts
Normal file
3
shell-crash-repo/src/utils/box.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function createBox(content: string): string {
|
||||
return `┌─────────────┐\n│ ${content} │\n└─────────────┘`;
|
||||
}
|
||||
7
shell-crash-repo/src/utils/clipboard.ts
Normal file
7
shell-crash-repo/src/utils/clipboard.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export function copyToClipboard(text: string): void {
|
||||
console.log('Copying to clipboard:', text);
|
||||
}
|
||||
|
||||
export function pasteFromClipboard(): string {
|
||||
return 'clipboard content';
|
||||
}
|
||||
12
shell-crash-repo/src/utils/formatFileSize.ts
Normal file
12
shell-crash-repo/src/utils/formatFileSize.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export function formatFileSize(bytes: number): string {
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
let size = bytes;
|
||||
let unitIndex = 0;
|
||||
|
||||
while (size >= 1024 && unitIndex < units.length - 1) {
|
||||
size /= 1024;
|
||||
unitIndex++;
|
||||
}
|
||||
|
||||
return `${size.toFixed(2)} ${units[unitIndex]}`;
|
||||
}
|
||||
8
shell-crash-repo/src/utils/interactive.ts
Normal file
8
shell-crash-repo/src/utils/interactive.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export async function prompt(question: string): Promise<string> {
|
||||
console.log(question);
|
||||
return 'user input';
|
||||
}
|
||||
|
||||
export function confirm(message: string): boolean {
|
||||
return true;
|
||||
}
|
||||
15
shell-crash-repo/src/utils/logger.ts
Normal file
15
shell-crash-repo/src/utils/logger.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
export class Logger {
|
||||
private prefix: string;
|
||||
|
||||
constructor(prefix: string) {
|
||||
this.prefix = prefix;
|
||||
}
|
||||
|
||||
log(message: string): void {
|
||||
console.log(`[${this.prefix}] ${message}`);
|
||||
}
|
||||
|
||||
error(message: string): void {
|
||||
console.error(`[${this.prefix}] ERROR: ${message}`);
|
||||
}
|
||||
}
|
||||
15
shell-crash-repo/src/utils/spinner.ts
Normal file
15
shell-crash-repo/src/utils/spinner.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
export class Spinner {
|
||||
private message: string;
|
||||
|
||||
constructor(message: string) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
start(): void {
|
||||
console.log(`⏳ ${this.message}...`);
|
||||
}
|
||||
|
||||
stop(): void {
|
||||
console.log('✓ Done');
|
||||
}
|
||||
}
|
||||
8
shell-crash-repo/src/utils/url.ts
Normal file
8
shell-crash-repo/src/utils/url.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export function isValidUrl(url: string): boolean {
|
||||
try {
|
||||
new URL(url);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
9
shell-crash-repo/src/utils/validation.ts
Normal file
9
shell-crash-repo/src/utils/validation.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export function validateEmail(email: string): boolean {
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
return emailRegex.test(email);
|
||||
}
|
||||
|
||||
export function validatePhone(phone: string): boolean {
|
||||
const phoneRegex = /^\+?[1-9]\d{1,14}$/;
|
||||
return phoneRegex.test(phone);
|
||||
}
|
||||
@@ -20,7 +20,7 @@ side: ?bun.bake.Side,
|
||||
/// This is only set for the JS bundle, and not files associated with an
|
||||
/// entrypoint like sourcemaps and bytecode
|
||||
entry_point_index: ?u32,
|
||||
referenced_css_files: []const Index = &.{},
|
||||
referenced_css_chunks: []const Index = &.{},
|
||||
source_index: Index.Optional = .none,
|
||||
|
||||
pub const Index = bun.GenericIndex(u32, OutputFile);
|
||||
@@ -30,7 +30,7 @@ pub fn deinit(this: *OutputFile) void {
|
||||
|
||||
bun.default_allocator.free(this.src_path.text);
|
||||
bun.default_allocator.free(this.dest_path);
|
||||
bun.default_allocator.free(this.referenced_css_files);
|
||||
bun.default_allocator.free(this.referenced_css_chunks);
|
||||
}
|
||||
|
||||
// Depending on:
|
||||
@@ -99,6 +99,13 @@ pub const Value = union(Kind) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn asSlice(v: Value) []const u8 {
|
||||
return switch (v) {
|
||||
.buffer => |buf| buf.bytes,
|
||||
else => "",
|
||||
};
|
||||
}
|
||||
|
||||
pub fn toBunString(v: Value) bun.String {
|
||||
return switch (v) {
|
||||
.noop => bun.String.empty,
|
||||
@@ -206,7 +213,7 @@ pub const Options = struct {
|
||||
},
|
||||
side: ?bun.bake.Side,
|
||||
entry_point_index: ?u32,
|
||||
referenced_css_files: []const Index = &.{},
|
||||
referenced_css_chunks: []const Index = &.{},
|
||||
};
|
||||
|
||||
pub fn init(options: Options) OutputFile {
|
||||
@@ -240,7 +247,7 @@ pub fn init(options: Options) OutputFile {
|
||||
},
|
||||
.side = options.side,
|
||||
.entry_point_index = options.entry_point_index,
|
||||
.referenced_css_files = options.referenced_css_files,
|
||||
.referenced_css_chunks = options.referenced_css_chunks,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1956,6 +1956,27 @@ pub const Api = struct {
|
||||
|
||||
_,
|
||||
|
||||
pub fn fromJS(global: *bun.JSC.JSGlobalObject, value: bun.JSC.JSValue) bun.JSError!?SourceMapMode {
|
||||
if (value.isString()) {
|
||||
const str = try value.toSliceOrNull(global);
|
||||
defer str.deinit();
|
||||
const utf8 = str.slice();
|
||||
if (bun.strings.eqlComptime(utf8, "none")) {
|
||||
return .none;
|
||||
}
|
||||
if (bun.strings.eqlComptime(utf8, "inline")) {
|
||||
return .@"inline";
|
||||
}
|
||||
if (bun.strings.eqlComptime(utf8, "external")) {
|
||||
return .external;
|
||||
}
|
||||
if (bun.strings.eqlComptime(utf8, "linked")) {
|
||||
return .linked;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
|
||||
@@ -107,7 +107,13 @@ static JSC::JSInternalPromise* resolvedInternalPromise(JSC::JSGlobalObject* glob
|
||||
return promise;
|
||||
}
|
||||
|
||||
extern "C" BunString BakeProdLoad(ProductionPerThread* perThreadData, BunString a);
|
||||
extern "C" BunString BakeProdLoad(void* perThreadData, BunString a);
|
||||
|
||||
extern "C" void* BakeGlobalObject__getPerThreadData(JSC::JSGlobalObject* global)
|
||||
{
|
||||
Bake::GlobalObject* bake = jsCast<Bake::GlobalObject*>(global);
|
||||
return bake->m_perThreadData;
|
||||
}
|
||||
|
||||
JSC::JSInternalPromise* bakeModuleLoaderFetch(JSC::JSGlobalObject* globalObject,
|
||||
JSC::JSModuleLoader* loader, JSC::JSValue key,
|
||||
@@ -134,7 +140,17 @@ JSC::JSInternalPromise* bakeModuleLoaderFetch(JSC::JSGlobalObject* globalObject,
|
||||
JSC::SourceProviderSourceType::Module));
|
||||
return resolvedInternalPromise(globalObject, JSC::JSSourceCode::create(vm, WTFMove(sourceCode)));
|
||||
}
|
||||
return rejectedInternalPromise(globalObject, createTypeError(globalObject, makeString("Bundle does not have \""_s, moduleKey, "\". This is a bug in Bun's bundler."_s)));
|
||||
|
||||
// We unconditionally prefix the key with "bake:" inside
|
||||
// BakeProdResolve in production.zig.
|
||||
//
|
||||
// But if someone does: `await import(resolve(import.meta.dir, "nav.ts"))`
|
||||
// we don't actually want to load it from the Bake production module
|
||||
// map and instead make it go through the normal codepath.
|
||||
auto bakePrefixRemoved = moduleKey.substringSharingImpl("bake:"_s.length());
|
||||
JSString* bakePrefixRemovedString = jsNontrivialString(vm, bakePrefixRemoved);
|
||||
JSValue bakePrefixRemovedJsvalue = bakePrefixRemovedString;
|
||||
return Zig::GlobalObject::moduleLoaderFetch(globalObject, loader, bakePrefixRemovedJsvalue, parameters, script);
|
||||
}
|
||||
return rejectedInternalPromise(globalObject, createTypeError(globalObject, "BakeGlobalObject does not have per-thread data configured"_s));
|
||||
}
|
||||
@@ -240,7 +256,7 @@ extern "C" GlobalObject* BakeCreateProdGlobal(void* console)
|
||||
return global;
|
||||
}
|
||||
|
||||
extern "C" void BakeGlobalObject__attachPerThreadData(GlobalObject* global, ProductionPerThread* perThreadData)
|
||||
extern "C" void BakeGlobalObject__attachPerThreadData(GlobalObject* global, void* perThreadData)
|
||||
{
|
||||
global->m_perThreadData = perThreadData;
|
||||
}
|
||||
|
||||
@@ -4,13 +4,15 @@
|
||||
|
||||
namespace Bake {
|
||||
|
||||
struct ProductionPerThread;
|
||||
// Opaque pointer to Zig's bake.production.PerThread structure
|
||||
// This must never be dereferenced in C++ code
|
||||
// Using void* to avoid any issues with incomplete types
|
||||
|
||||
class GlobalObject : public Zig::GlobalObject {
|
||||
public:
|
||||
using Base = Zig::GlobalObject;
|
||||
|
||||
ProductionPerThread* m_perThreadData = nullptr;
|
||||
void* m_perThreadData = nullptr;
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename, JSC::SubspaceAccess mode> static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
@@ -39,4 +41,7 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
extern "C" void* BakeGlobalObject__getPerThreadData(JSC::JSGlobalObject* global);
|
||||
extern "C" void BakeGlobalObject__attachPerThreadData(GlobalObject* global, void* perThreadData);
|
||||
|
||||
}; // namespace Kit
|
||||
|
||||
8
src/bake/bake.d.ts
vendored
8
src/bake/bake.d.ts
vendored
@@ -418,9 +418,9 @@ declare module "bun" {
|
||||
}
|
||||
|
||||
type GetParamIterator =
|
||||
| AsyncIterable<Record<string, string>, GetParamsFinalOpts>
|
||||
| Iterable<Record<string, string>, GetParamsFinalOpts>
|
||||
| ({ pages: Array<Record<string, string>> } & GetParamsFinalOpts);
|
||||
| AsyncIterable<Record<string, string | string[]>, GetParamsFinalOpts>
|
||||
| Iterable<Record<string, string | string[]>, GetParamsFinalOpts>
|
||||
| ({ pages: Array<Record<string, string | string[]>> } & GetParamsFinalOpts);
|
||||
|
||||
type GetParamsFinalOpts = void | null | {
|
||||
/**
|
||||
@@ -478,7 +478,7 @@ declare module "bun" {
|
||||
*/
|
||||
readonly layouts: ReadonlyArray<any>;
|
||||
/** Received route params. `null` if the route does not take params */
|
||||
readonly params: null | Record<string, string>;
|
||||
readonly params: null | Record<string, string | string[]>;
|
||||
/**
|
||||
* A list of js files that the route will need to be interactive.
|
||||
*/
|
||||
|
||||
@@ -155,6 +155,20 @@ const BuildConfigSubset = struct {
|
||||
env: bun.Schema.Api.DotEnvBehavior = ._none,
|
||||
env_prefix: ?[]const u8 = null,
|
||||
define: bun.Schema.Api.StringMap = .{ .keys = &.{}, .values = &.{} },
|
||||
source_map: bun.Schema.Api.SourceMapMode = .external,
|
||||
|
||||
pub fn fromJS(global: *JSC.JSGlobalObject, js_options: JSValue) bun.JSError!BuildConfigSubset {
|
||||
var options = BuildConfigSubset{};
|
||||
|
||||
if (try js_options.getOptional(global, "sourcemap", JSValue)) |val| {
|
||||
if (try bun.Schema.Api.SourceMapMode.fromJS(global, val)) |sourcemap| {
|
||||
options.source_map = sourcemap;
|
||||
}
|
||||
return bun.JSC.Node.validators.throwErrInvalidArgType(global, "sourcemap", .{}, "string", val);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
};
|
||||
|
||||
/// A "Framework" in our eyes is simply set of bundler options that a framework
|
||||
@@ -578,8 +592,15 @@ pub const Framework = struct {
|
||||
}
|
||||
|
||||
if (try opts.getOptional(global, "bundlerOptions", JSValue)) |js_options| {
|
||||
_ = js_options; // TODO:
|
||||
// try bundler_options.parseInto(global, js_options, .root);
|
||||
if (try js_options.getOptional(global, "server", JSValue)) |server_options| {
|
||||
bundler_options.server = try BuildConfigSubset.fromJS(global, server_options);
|
||||
}
|
||||
if (try js_options.getOptional(global, "client", JSValue)) |client_options| {
|
||||
bundler_options.client = try BuildConfigSubset.fromJS(global, client_options);
|
||||
}
|
||||
if (try js_options.getOptional(global, "ssr", JSValue)) |ssr_options| {
|
||||
bundler_options.ssr = try BuildConfigSubset.fromJS(global, ssr_options);
|
||||
}
|
||||
}
|
||||
|
||||
return framework;
|
||||
|
||||
@@ -82,7 +82,21 @@ pub fn buildCommand(ctx: bun.CLI.Command.Context) !void {
|
||||
|
||||
const api_lock = vm.jsc.getAPILock();
|
||||
defer api_lock.release();
|
||||
buildWithVm(ctx, cwd, vm) catch |err| switch (err) {
|
||||
|
||||
var pt: PerThread = .{
|
||||
.input_files = &.{},
|
||||
.bundled_outputs = &.{},
|
||||
.output_indexes = &.{},
|
||||
.module_keys = &.{},
|
||||
.module_map = .{},
|
||||
.source_maps = .{},
|
||||
|
||||
.vm = vm,
|
||||
.loaded_files = bun.bit_set.AutoBitSet.initEmpty(vm.allocator, 0) catch unreachable,
|
||||
.all_server_files = JSValue.null,
|
||||
};
|
||||
|
||||
buildWithVm(ctx, cwd, vm, &pt) catch |err| switch (err) {
|
||||
error.JSError => |e| {
|
||||
bun.handleErrorReturnTrace(err, @errorReturnTrace());
|
||||
const err_value = vm.global.takeException(e);
|
||||
@@ -96,7 +110,7 @@ pub fn buildCommand(ctx: bun.CLI.Command.Context) !void {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMachine) !void {
|
||||
pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMachine, pt: *PerThread) !void {
|
||||
// Load and evaluate the configuration module
|
||||
const global = vm.global;
|
||||
const b = &vm.transpiler;
|
||||
@@ -174,10 +188,10 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
var client_transpiler: bun.transpiler.Transpiler = undefined;
|
||||
var server_transpiler: bun.transpiler.Transpiler = undefined;
|
||||
var ssr_transpiler: bun.transpiler.Transpiler = undefined;
|
||||
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .server, &server_transpiler, &options.bundler_options.server, .@"inline");
|
||||
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .client, &client_transpiler, &options.bundler_options.client, .@"inline");
|
||||
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .server, &server_transpiler, &options.bundler_options.server, bun.options.SourceMapOption.fromApi(options.bundler_options.server.source_map));
|
||||
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .client, &client_transpiler, &options.bundler_options.client, bun.options.SourceMapOption.fromApi(options.bundler_options.client.source_map));
|
||||
if (separate_ssr_graph) {
|
||||
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .ssr, &ssr_transpiler, &options.bundler_options.ssr, .@"inline");
|
||||
try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .ssr, &ssr_transpiler, &options.bundler_options.ssr, bun.options.SourceMapOption.fromApi(options.bundler_options.ssr.source_map));
|
||||
}
|
||||
|
||||
if (ctx.bundler_options.bake_debug_disable_minify) {
|
||||
@@ -261,6 +275,11 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
.{ .js = vm.event_loop },
|
||||
);
|
||||
const bundled_outputs = bundled_outputs_list.items;
|
||||
if (bundled_outputs.len == 0) {
|
||||
Output.prettyln("done", .{});
|
||||
Output.flush();
|
||||
return;
|
||||
}
|
||||
|
||||
Output.prettyErrorln("Rendering routes", .{});
|
||||
Output.flush();
|
||||
@@ -278,6 +297,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
const module_keys = try vm.allocator.alloc(bun.String, entry_points.files.count());
|
||||
const output_indexes = entry_points.files.values();
|
||||
var output_module_map: bun.StringArrayHashMapUnmanaged(OutputFile.Index) = .{};
|
||||
var source_maps: bun.StringArrayHashMapUnmanaged(OutputFile.Index) = .{};
|
||||
@memset(module_keys, bun.String.dead);
|
||||
for (bundled_outputs, 0..) |file, i| {
|
||||
log("{s} - {s} : {s} - {?d}\n", .{
|
||||
@@ -287,7 +307,11 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
file.entry_point_index,
|
||||
});
|
||||
if (file.loader.isCSS()) {
|
||||
if (css_chunks_count == 0) css_chunks_first = i;
|
||||
if (css_chunks_count == 0) {
|
||||
css_chunks_first = i;
|
||||
} else {
|
||||
css_chunks_first = @min(css_chunks_first, i);
|
||||
}
|
||||
css_chunks_count += 1;
|
||||
}
|
||||
|
||||
@@ -299,6 +323,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
|
||||
switch (file.side orelse continue) {
|
||||
.client => {
|
||||
// TODO: Maybe not do this all in 1 thread?
|
||||
// Client-side resources will be written to disk for usage in on the client side
|
||||
_ = file.writeToDisk(root_dir, ".") catch |err| {
|
||||
bun.handleErrorReturnTrace(err, @errorReturnTrace());
|
||||
@@ -314,6 +339,27 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
};
|
||||
}
|
||||
|
||||
// If the file has a sourcemap, store it so we can put it on
|
||||
// `PerThread` so we can provide sourcemapped stacktraces for
|
||||
// server components.
|
||||
if (file.source_map_index != std.math.maxInt(u32)) {
|
||||
const source_map_index = file.source_map_index;
|
||||
const source_map_file: *const OutputFile = &bundled_outputs[source_map_index];
|
||||
bun.assert(source_map_file.output_kind == .sourcemap);
|
||||
|
||||
const without_prefix = if (bun.strings.hasPrefixComptime(file.dest_path, "./") or
|
||||
(Environment.isWindows and bun.strings.hasPrefixComptime(file.dest_path, ".\\")))
|
||||
file.dest_path[2..]
|
||||
else
|
||||
file.dest_path;
|
||||
|
||||
try source_maps.put(
|
||||
allocator,
|
||||
try std.fmt.allocPrint(allocator, "bake:/{s}", .{without_prefix}),
|
||||
OutputFile.Index.init(@intCast(source_map_index)),
|
||||
);
|
||||
}
|
||||
|
||||
switch (file.output_kind) {
|
||||
.@"entry-point", .chunk => {
|
||||
const without_prefix = if (bun.strings.hasPrefixComptime(file.dest_path, "./") or
|
||||
@@ -350,9 +396,10 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
.output_indexes = output_indexes,
|
||||
.module_keys = module_keys,
|
||||
.module_map = output_module_map,
|
||||
.source_maps = source_maps,
|
||||
};
|
||||
|
||||
var pt = try PerThread.init(vm, per_thread_options);
|
||||
pt.* = try PerThread.init(vm, per_thread_options);
|
||||
pt.attach();
|
||||
|
||||
// Static site generator
|
||||
@@ -417,6 +464,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
const css_chunk_js_strings = try allocator.alloc(JSValue, css_chunks_count);
|
||||
for (bundled_outputs[css_chunks_first..][0..css_chunks_count], css_chunk_js_strings) |output_file, *str| {
|
||||
bun.assert(output_file.dest_path[0] != '.');
|
||||
// CSS chunks must be in contiguous order!!
|
||||
bun.assert(output_file.loader.isCSS());
|
||||
str.* = (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
|
||||
}
|
||||
@@ -444,15 +492,18 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
.param => {
|
||||
params_buf.append(ctx.allocator, route.part.param) catch unreachable;
|
||||
},
|
||||
.catch_all, .catch_all_optional => {
|
||||
.catch_all => {
|
||||
params_buf.append(ctx.allocator, route.part.catch_all) catch unreachable;
|
||||
},
|
||||
.catch_all_optional => {
|
||||
return global.throw("catch-all routes are not supported in static site generation", .{});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
var file_count: u32 = 1;
|
||||
var css_file_count: u32 = @intCast(main_file.referenced_css_files.len);
|
||||
var css_file_count: u32 = @intCast(main_file.referenced_css_chunks.len);
|
||||
if (route.file_layout.unwrap()) |file| {
|
||||
css_file_count += @intCast(pt.outputFile(file).referenced_css_files.len);
|
||||
css_file_count += @intCast(pt.outputFile(file).referenced_css_chunks.len);
|
||||
file_count += 1;
|
||||
}
|
||||
var next: ?FrameworkRouter.Route.Index = route.parent.unwrap();
|
||||
@@ -463,13 +514,16 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
.param => {
|
||||
params_buf.append(ctx.allocator, parent.part.param) catch unreachable;
|
||||
},
|
||||
.catch_all, .catch_all_optional => {
|
||||
.catch_all => {
|
||||
params_buf.append(ctx.allocator, parent.part.catch_all) catch unreachable;
|
||||
},
|
||||
.catch_all_optional => {
|
||||
return global.throw("catch-all routes are not supported in static site generation", .{});
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
if (parent.file_layout.unwrap()) |file| {
|
||||
css_file_count += @intCast(pt.outputFile(file).referenced_css_files.len);
|
||||
css_file_count += @intCast(pt.outputFile(file).referenced_css_chunks.len);
|
||||
file_count += 1;
|
||||
}
|
||||
next = parent.parent.unwrap();
|
||||
@@ -483,13 +537,13 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
file_count = 1;
|
||||
css_file_count = 0;
|
||||
try file_list.putIndex(global, 0, try pt.preloadBundledModule(main_file_route_index));
|
||||
for (main_file.referenced_css_files) |ref| {
|
||||
for (main_file.referenced_css_chunks) |ref| {
|
||||
try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
|
||||
css_file_count += 1;
|
||||
}
|
||||
if (route.file_layout.unwrap()) |file| {
|
||||
try file_list.putIndex(global, file_count, try pt.preloadBundledModule(file));
|
||||
for (pt.outputFile(file).referenced_css_files) |ref| {
|
||||
for (pt.outputFile(file).referenced_css_chunks) |ref| {
|
||||
try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
|
||||
css_file_count += 1;
|
||||
}
|
||||
@@ -500,7 +554,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
const parent = router.routePtr(parent_index);
|
||||
if (parent.file_layout.unwrap()) |file| {
|
||||
try file_list.putIndex(global, file_count, try pt.preloadBundledModule(file));
|
||||
for (pt.outputFile(file).referenced_css_files) |ref| {
|
||||
for (pt.outputFile(file).referenced_css_chunks) |ref| {
|
||||
try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
|
||||
css_file_count += 1;
|
||||
}
|
||||
@@ -743,6 +797,7 @@ pub const PerThread = struct {
|
||||
module_keys: []const bun.String,
|
||||
/// Unordered
|
||||
module_map: bun.StringArrayHashMapUnmanaged(OutputFile.Index),
|
||||
source_maps: bun.StringArrayHashMapUnmanaged(OutputFile.Index),
|
||||
|
||||
// Thread-local
|
||||
vm: *JSC.VirtualMachine,
|
||||
@@ -761,6 +816,7 @@ pub const PerThread = struct {
|
||||
module_keys: []const bun.String,
|
||||
/// Unordered
|
||||
module_map: bun.StringArrayHashMapUnmanaged(OutputFile.Index),
|
||||
source_maps: bun.StringArrayHashMapUnmanaged(OutputFile.Index),
|
||||
};
|
||||
|
||||
extern fn BakeGlobalObject__attachPerThreadData(global: *JSC.JSGlobalObject, pt: ?*PerThread) void;
|
||||
@@ -782,6 +838,7 @@ pub const PerThread = struct {
|
||||
.vm = vm,
|
||||
.loaded_files = loaded_files,
|
||||
.all_server_files = all_server_files,
|
||||
.source_maps = opts.source_maps,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -848,6 +905,17 @@ pub export fn BakeProdLoad(pt: *PerThread, key: bun.String) bun.String {
|
||||
return bun.String.dead;
|
||||
}
|
||||
|
||||
pub export fn BakeProdSourceMap(pt: *PerThread, key: bun.String) bun.String {
|
||||
var sfa = std.heap.stackFallback(4096, bun.default_allocator);
|
||||
const allocator = sfa.get();
|
||||
const utf8 = key.toUTF8(allocator);
|
||||
defer utf8.deinit();
|
||||
if (pt.source_maps.get(utf8.slice())) |value| {
|
||||
return pt.bundled_outputs[value.get()].value.toBunString();
|
||||
}
|
||||
return bun.String.dead;
|
||||
}
|
||||
|
||||
const TypeAndFlags = packed struct(i32) {
|
||||
type: u8,
|
||||
unused: u24 = 0,
|
||||
|
||||
@@ -2,6 +2,8 @@ pub const LinkerContext = struct {
|
||||
pub const debug = Output.scoped(.LinkerCtx, false);
|
||||
pub const CompileResult = bundler.CompileResult;
|
||||
|
||||
pub const OutputFileListBuilder = @import("./linker_context/OutputFileListBuilder.zig");
|
||||
|
||||
parse_graph: *Graph = undefined,
|
||||
graph: LinkerGraph = undefined,
|
||||
allocator: std.mem.Allocator = undefined,
|
||||
|
||||
@@ -1484,6 +1484,10 @@ pub const BundleV2 = struct {
|
||||
reachable_files,
|
||||
);
|
||||
|
||||
if (chunks.len == 0) {
|
||||
return std.ArrayList(options.OutputFile).init(bun.default_allocator);
|
||||
}
|
||||
|
||||
return try this.linker.generateChunksInParallel(chunks, false);
|
||||
}
|
||||
|
||||
|
||||
148
src/bundler/linker_context/OutputFileListBuilder.zig
Normal file
148
src/bundler/linker_context/OutputFileListBuilder.zig
Normal file
@@ -0,0 +1,148 @@
|
||||
//! Q: What does this struct do?
|
||||
//! A: This struct segments the `OutputFile` list into 3 separate spaces so
|
||||
//! chunk indexing remains the same:
|
||||
//!
|
||||
//! 1. chunks
|
||||
//! 2. sourcemaps and bytecode
|
||||
//! 3. additional output files
|
||||
//!
|
||||
//! We can calculate the space ahead of time and avoid having to do something
|
||||
//! more complicated or which requires extra work.
|
||||
//!
|
||||
//! Q: Why does it need to do that?
|
||||
//! A: We would like it so if we have a chunk index, we can also index its
|
||||
//! corresponding output file in the output file list.
|
||||
//!
|
||||
//! The DevServer uses the `referenced_css_chunks` (a list of chunk indices)
|
||||
//! field on `OutputFile` to know which CSS files to hand to the rendering
|
||||
//! function. For React this just adds <link> tags that point to each output CSS
|
||||
//! file.
|
||||
//!
|
||||
//! However, we previously were pushing sourcemaps and bytecode output files
|
||||
//! to the output file list directly after their corresponding chunk, meaning
|
||||
//! the index of the chunk in the chunk list and its corresponding
|
||||
//! `OutputFile` in the output file list got scrambled.
|
||||
//!
|
||||
//! If we maintain the property that `outputIndexForChunk(chunk[i]) == i`
|
||||
//! then we don't need to do any allocations or extra work to get the output
|
||||
//! file for a chunk.
|
||||
pub const OutputFileList = @This();
|
||||
|
||||
output_files: std.ArrayList(options.OutputFile),
|
||||
index_for_chunk: u32,
|
||||
index_for_sourcemaps_and_bytecode: ?u32,
|
||||
additional_output_files_start: u32,
|
||||
|
||||
total_insertions: u32,
|
||||
|
||||
pub fn init(
|
||||
allocator: std.mem.Allocator,
|
||||
c: *const bun.bundle_v2.LinkerContext,
|
||||
chunks: []const bun.bundle_v2.Chunk,
|
||||
_: usize,
|
||||
) !@This() {
|
||||
const length, const source_map_and_bytecode_count = OutputFileList.calculateOutputFileListCapacity(c, chunks);
|
||||
var output_files = try std.ArrayList(options.OutputFile).initCapacity(
|
||||
allocator,
|
||||
length,
|
||||
);
|
||||
output_files.items.len = length;
|
||||
|
||||
return .{
|
||||
.output_files = output_files,
|
||||
.index_for_chunk = 0,
|
||||
.index_for_sourcemaps_and_bytecode = if (source_map_and_bytecode_count == 0) null else @as(u32, @truncate(chunks.len)),
|
||||
.additional_output_files_start = @as(u32, @intCast(chunks.len)) + source_map_and_bytecode_count,
|
||||
.total_insertions = 0,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn take(this: *@This()) std.ArrayList(options.OutputFile) {
|
||||
bun.assertf(this.total_insertions == this.output_files.items.len, "total_insertions ({d}) != output_files.items.len ({d})", .{ this.total_insertions, this.output_files.items.len });
|
||||
const list = this.output_files;
|
||||
this.output_files = std.ArrayList(options.OutputFile).init(bun.default_allocator);
|
||||
return list;
|
||||
}
|
||||
|
||||
pub fn calculateOutputFileListCapacity(c: *const bun.bundle_v2.LinkerContext, chunks: []const bun.bundle_v2.Chunk) struct { u32, u32 } {
|
||||
const source_map_count = if (c.options.source_maps.hasExternalFiles()) brk: {
|
||||
var count: usize = 0;
|
||||
for (chunks) |*chunk| {
|
||||
if (chunk.content.sourcemap(c.options.source_maps).hasExternalFiles()) {
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
break :brk count;
|
||||
} else 0;
|
||||
const bytecode_count = if (c.options.generate_bytecode_cache) bytecode_count: {
|
||||
var bytecode_count: usize = 0;
|
||||
for (chunks) |*chunk| {
|
||||
// TODO: this was the original logic, but it seems like it is
|
||||
// incorrect / does unnecessary work? Leaving it here just in-case,
|
||||
// as it moved from a different file and is not git blame-able.
|
||||
//
|
||||
// const loader: Loader = if (chunk.entry_point.is_entry_point)
|
||||
// c.parse_graph.input_files.items(.loader)[
|
||||
// chunk.entry_point.source_index
|
||||
// ]
|
||||
// else
|
||||
// .js;
|
||||
// if (loader.isJavaScriptLike()) {
|
||||
// bytecode_count += 1;
|
||||
// }
|
||||
|
||||
if (chunk.content == .javascript) {
|
||||
bytecode_count += 1;
|
||||
}
|
||||
}
|
||||
break :bytecode_count bytecode_count;
|
||||
} else 0;
|
||||
|
||||
return .{ @intCast(chunks.len + source_map_count + bytecode_count + c.parse_graph.additional_output_files.items.len), @intCast(source_map_count + bytecode_count) };
|
||||
}
|
||||
|
||||
pub fn insertForChunk(this: *OutputFileList, output_file: options.OutputFile) u32 {
|
||||
const index = this.indexForChunk();
|
||||
bun.assertf(index < this.index_for_sourcemaps_and_bytecode orelse std.math.maxInt(u32), "index ({d}) \\< index_for_sourcemaps_and_bytecode ({d})", .{ index, this.index_for_sourcemaps_and_bytecode orelse std.math.maxInt(u32) });
|
||||
this.output_files.items[index] = output_file;
|
||||
this.total_insertions += 1;
|
||||
return index;
|
||||
}
|
||||
|
||||
pub fn insertForSourcemapOrBytecode(this: *OutputFileList, output_file: options.OutputFile) !u32 {
|
||||
const index = this.indexForSourcemapOrBytecode() orelse return error.NoSourceMapsOrBytecode;
|
||||
bun.assertf(index < this.additional_output_files_start, "index ({d}) \\< additional_output_files_start ({d})", .{ index, this.additional_output_files_start });
|
||||
this.output_files.items[index] = output_file;
|
||||
this.total_insertions += 1;
|
||||
return index;
|
||||
}
|
||||
|
||||
pub fn insertAdditionalOutputFiles(this: *OutputFileList, additional_output_files: []const options.OutputFile) void {
|
||||
bun.assertf(this.index_for_sourcemaps_and_bytecode orelse std.math.maxInt(u32) <= this.additional_output_files_start, "index_for_sourcemaps_and_bytecode ({d}) \\< additional_output_files_start ({d})", .{ this.index_for_sourcemaps_and_bytecode orelse std.math.maxInt(u32), this.additional_output_files_start });
|
||||
bun.copy(
|
||||
options.OutputFile,
|
||||
this.getMutableAdditionalOutputFiles(),
|
||||
additional_output_files,
|
||||
);
|
||||
this.total_insertions += @as(u32, @intCast(additional_output_files.len));
|
||||
}
|
||||
|
||||
pub fn getMutableAdditionalOutputFiles(this: *OutputFileList) []options.OutputFile {
|
||||
return this.output_files.items[this.additional_output_files_start..];
|
||||
}
|
||||
|
||||
fn indexForChunk(this: *@This()) u32 {
|
||||
const result = this.index_for_chunk;
|
||||
this.index_for_chunk += 1;
|
||||
return result;
|
||||
}
|
||||
|
||||
fn indexForSourcemapOrBytecode(this: *@This()) ?u32 {
|
||||
const result = this.index_for_sourcemaps_and_bytecode orelse return null;
|
||||
this.index_for_sourcemaps_and_bytecode.? += 1;
|
||||
return result;
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const options = bun.options;
|
||||
@@ -310,6 +310,12 @@ pub noinline fn computeChunks(
|
||||
// Determine the order of JS files (and parts) within the chunk ahead of time
|
||||
try this.findAllImportedPartsInJSOrder(temp_allocator, chunks);
|
||||
|
||||
// Handle empty chunks case
|
||||
if (chunks.len == 0) {
|
||||
this.unique_key_buf = "";
|
||||
return chunks;
|
||||
}
|
||||
|
||||
const unique_key_item_len = std.fmt.count("{any}C{d:0>8}", .{ bun.fmt.hexIntLower(unique_key), chunks.len });
|
||||
var unique_key_builder = try bun.StringBuilder.initCapacity(this.allocator, unique_key_item_len * chunks.len);
|
||||
this.unique_key_buf = unique_key_builder.allocatedSlice();
|
||||
|
||||
@@ -326,11 +326,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
}
|
||||
}
|
||||
|
||||
var output_files = std.ArrayList(options.OutputFile).initCapacity(
|
||||
bun.default_allocator,
|
||||
(if (c.options.source_maps.hasExternalFiles()) chunks.len * 2 else chunks.len) +
|
||||
@as(usize, c.parse_graph.additional_output_files.items.len),
|
||||
) catch unreachable;
|
||||
var output_files = try OutputFileListBuilder.init(bun.default_allocator, c, chunks, c.parse_graph.additional_output_files.items.len);
|
||||
|
||||
const root_path = c.resolver.opts.output_dir;
|
||||
const more_than_one_output = c.parse_graph.additional_output_files.items.len > 0 or c.options.generate_bytecode_cache or (has_css_chunk and has_js_chunk) or (has_html_chunk and (has_js_chunk or has_css_chunk));
|
||||
@@ -346,7 +342,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
try c.writeOutputFilesToDisk(root_path, chunks, &output_files);
|
||||
} else {
|
||||
// In-memory build
|
||||
for (chunks) |*chunk| {
|
||||
for (chunks, 0..) |*chunk, chunk_index_in_chunks_list| {
|
||||
var display_size: usize = 0;
|
||||
|
||||
const public_path = if (chunk.is_browser_chunk_from_server_build)
|
||||
@@ -495,14 +491,12 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
};
|
||||
|
||||
const source_map_index: ?u32 = if (sourcemap_output_file != null)
|
||||
@as(u32, @truncate(output_files.items.len + 1))
|
||||
try output_files.insertForSourcemapOrBytecode(sourcemap_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
const bytecode_index: ?u32 = if (bytecode_output_file != null and source_map_index != null)
|
||||
@as(u32, @truncate(output_files.items.len + 2))
|
||||
else if (bytecode_output_file != null)
|
||||
@as(u32, @truncate(output_files.items.len + 1))
|
||||
const bytecode_index: ?u32 = if (bytecode_output_file != null)
|
||||
try output_files.insertForSourcemapOrBytecode(bytecode_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
@@ -512,7 +506,8 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
c.graph.files.items(.entry_point_kind)[chunk.entry_point.source_index].outputKind()
|
||||
else
|
||||
.chunk;
|
||||
try output_files.append(options.OutputFile.init(.{
|
||||
|
||||
const chunk_index = output_files.insertForChunk(options.OutputFile.init(.{
|
||||
.data = .{
|
||||
.buffer = .{
|
||||
.data = code_result.buffer,
|
||||
@@ -539,24 +534,21 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
chunk.entry_point.source_index - @as(u32, (if (c.framework) |fw| if (fw.server_components != null) 3 else 1 else 1))
|
||||
else
|
||||
null,
|
||||
.referenced_css_files = switch (chunk.content) {
|
||||
.referenced_css_chunks = switch (chunk.content) {
|
||||
.javascript => |js| @ptrCast(try bun.default_allocator.dupe(u32, js.css_chunks)),
|
||||
.css => &.{},
|
||||
.html => &.{},
|
||||
},
|
||||
}));
|
||||
if (sourcemap_output_file) |sourcemap_file| {
|
||||
try output_files.append(sourcemap_file);
|
||||
}
|
||||
if (bytecode_output_file) |bytecode_file| {
|
||||
try output_files.append(bytecode_file);
|
||||
}
|
||||
|
||||
// We want the chunk index to remain the same in `output_files` so the indices in `OutputFile.referenced_css_chunks` work
|
||||
bun.assertf(chunk_index == chunk_index_in_chunks_list, "chunk_index ({d}) != chunk_index_in_chunks_list ({d})", .{ chunk_index, chunk_index_in_chunks_list });
|
||||
}
|
||||
|
||||
try output_files.appendSlice(c.parse_graph.additional_output_files.items);
|
||||
output_files.insertAdditionalOutputFiles(c.parse_graph.additional_output_files.items);
|
||||
}
|
||||
|
||||
return output_files;
|
||||
return output_files.take();
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
@@ -600,3 +592,4 @@ const base64 = bun.base64;
|
||||
const JSC = bun.JSC;
|
||||
|
||||
pub const ThreadPoolLib = bun.ThreadPool;
|
||||
const OutputFileListBuilder = bun.bundle_v2.LinkerContext.OutputFileListBuilder;
|
||||
|
||||
@@ -2,7 +2,7 @@ pub fn writeOutputFilesToDisk(
|
||||
c: *LinkerContext,
|
||||
root_path: string,
|
||||
chunks: []Chunk,
|
||||
output_files: *std.ArrayList(options.OutputFile),
|
||||
output_files: *OutputFileListBuilder,
|
||||
) !void {
|
||||
const trace = bun.perf.trace("Bundler.writeOutputFilesToDisk");
|
||||
defer trace.end();
|
||||
@@ -41,7 +41,7 @@ pub fn writeOutputFilesToDisk(
|
||||
var pathbuf: bun.PathBuffer = undefined;
|
||||
const bv2: *bundler.BundleV2 = @fieldParentPtr("linker", c);
|
||||
|
||||
for (chunks) |*chunk| {
|
||||
for (chunks, 0..) |*chunk, chunk_index_in_chunks_list| {
|
||||
const trace2 = bun.perf.trace("Bundler.writeChunkToDisk");
|
||||
defer trace2.end();
|
||||
defer max_heap_allocator.reset();
|
||||
@@ -292,14 +292,12 @@ pub fn writeOutputFilesToDisk(
|
||||
}
|
||||
|
||||
const source_map_index: ?u32 = if (source_map_output_file != null)
|
||||
@as(u32, @truncate(output_files.items.len + 1))
|
||||
try output_files.insertForSourcemapOrBytecode(source_map_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
const bytecode_index: ?u32 = if (bytecode_output_file != null and source_map_index != null)
|
||||
@as(u32, @truncate(output_files.items.len + 2))
|
||||
else if (bytecode_output_file != null)
|
||||
@as(u32, @truncate(output_files.items.len + 1))
|
||||
const bytecode_index: ?u32 = if (bytecode_output_file != null)
|
||||
try output_files.insertForSourcemapOrBytecode(bytecode_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
@@ -309,7 +307,8 @@ pub fn writeOutputFilesToDisk(
|
||||
c.graph.files.items(.entry_point_kind)[chunk.entry_point.source_index].outputKind()
|
||||
else
|
||||
.chunk;
|
||||
try output_files.append(options.OutputFile.init(.{
|
||||
|
||||
const chunk_index = output_files.insertForChunk(options.OutputFile.init(.{
|
||||
.output_path = bun.default_allocator.dupe(u8, chunk.final_rel_path) catch unreachable,
|
||||
.input_path = input_path,
|
||||
.input_loader = if (chunk.entry_point.is_entry_point)
|
||||
@@ -337,27 +336,19 @@ pub fn writeOutputFilesToDisk(
|
||||
chunk.entry_point.source_index - @as(u32, (if (c.framework) |fw| if (fw.server_components != null) 3 else 1 else 1))
|
||||
else
|
||||
null,
|
||||
.referenced_css_files = switch (chunk.content) {
|
||||
.referenced_css_chunks = switch (chunk.content) {
|
||||
.javascript => |js| @ptrCast(try bun.default_allocator.dupe(u32, js.css_chunks)),
|
||||
.css => &.{},
|
||||
.html => &.{},
|
||||
},
|
||||
}));
|
||||
|
||||
if (source_map_output_file) |sourcemap_file| {
|
||||
try output_files.append(sourcemap_file);
|
||||
}
|
||||
|
||||
if (bytecode_output_file) |bytecode_file| {
|
||||
try output_files.append(bytecode_file);
|
||||
}
|
||||
// We want the chunk index to remain the same in `output_files` so the indices in `OutputFile.referenced_css_chunks` work
|
||||
bun.assertf(chunk_index == chunk_index_in_chunks_list, "chunk_index ({d}) != chunk_index_in_chunks_list ({d})", .{ chunk_index, chunk_index_in_chunks_list });
|
||||
}
|
||||
|
||||
{
|
||||
const offset = output_files.items.len;
|
||||
output_files.items.len += c.parse_graph.additional_output_files.items.len;
|
||||
|
||||
for (c.parse_graph.additional_output_files.items, output_files.items[offset..][0..c.parse_graph.additional_output_files.items.len]) |*src, *dest| {
|
||||
for (c.parse_graph.additional_output_files.items, output_files.getMutableAdditionalOutputFiles()) |*src, *dest| {
|
||||
const bytes = src.value.buffer.bytes;
|
||||
src.value.buffer.bytes.len = 0;
|
||||
|
||||
@@ -442,3 +433,4 @@ pub const ParseTask = bun.bundle_v2.ParseTask;
|
||||
const Chunk = bundler.Chunk;
|
||||
const cheapPrefixNormalizer = bundler.cheapPrefixNormalizer;
|
||||
const debug = LinkerContext.debug;
|
||||
const OutputFileListBuilder = bun.bundle_v2.LinkerContext.OutputFileListBuilder;
|
||||
|
||||
@@ -40,6 +40,8 @@ export function renderRoutesForProdStatic(
|
||||
});
|
||||
const { join: pathJoin } = require("node:path");
|
||||
|
||||
const regex = /:(\w+)/g;
|
||||
|
||||
let loadedModules = new Array(allServerFiles.length);
|
||||
|
||||
async function doGenerateRoute(
|
||||
@@ -47,7 +49,7 @@ export function renderRoutesForProdStatic(
|
||||
i: number,
|
||||
layouts: any[],
|
||||
pageModule: any,
|
||||
params: Record<string, string> | null,
|
||||
params: Record<string, string | string[]> | null,
|
||||
) {
|
||||
// Call the framework's rendering function
|
||||
const callback = renderStatic[type];
|
||||
@@ -77,10 +79,20 @@ export function renderRoutesForProdStatic(
|
||||
Object.entries(files).map(([key, value]) => {
|
||||
if (params != null) {
|
||||
$assert(patterns[i].includes(`:`));
|
||||
// replace the :paramName part of patterns[i] with the value of params[paramName]
|
||||
// use a regex in replace with a callback
|
||||
const newKey = patterns[i].replace(/:(\w+)/g, (_, p1) => params[p1]);
|
||||
return Bun.write(pathJoin(outBase, newKey + key), value);
|
||||
const matches = regex.exec(patterns[i]);
|
||||
const [_, p1] = matches!;
|
||||
if (typeof params[p1] === "string") {
|
||||
// replace the :paramName part of patterns[i] with the value of params[paramName]
|
||||
// use a regex in replace with a callback
|
||||
const newKey = params[p1];
|
||||
return Bun.write(pathJoin(outBase, newKey + key), value);
|
||||
}
|
||||
if (Array.isArray(params[p1])) {
|
||||
return Bun.write(pathJoin(outBase, ...params[p1], key), value);
|
||||
}
|
||||
throw new Error(
|
||||
`Route ${JSON.stringify(sourceRouteFiles[i])} has a param that is not a string or array of strings: ${p1}`,
|
||||
);
|
||||
}
|
||||
return Bun.write(pathJoin(outBase, patterns[i] + key), value);
|
||||
}),
|
||||
@@ -92,10 +104,10 @@ export function renderRoutesForProdStatic(
|
||||
i: number,
|
||||
layouts: any[],
|
||||
pageModule: any,
|
||||
params: Record<string, string>,
|
||||
params: Record<string, string | string[]>,
|
||||
) {
|
||||
for (const param of paramInformation[i]!) {
|
||||
if (!params[param]) {
|
||||
if (params[param] === undefined) {
|
||||
throw new Error(`Missing param ${param} for route ${JSON.stringify(sourceRouteFiles[i])}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,8 +116,16 @@ pub const Flags = struct {
|
||||
};
|
||||
|
||||
pub const ClauseItem = struct {
|
||||
/// The local alias used for the imported/exported symbol in the current module.
|
||||
/// For imports: `import { foo as bar }` - "bar" is the alias
|
||||
/// For exports: `export { foo as bar }` - "bar" is the alias
|
||||
/// For re-exports: `export { foo as bar } from 'path'` - "bar" is the alias
|
||||
alias: string,
|
||||
alias_loc: logger.Loc = logger.Loc.Empty,
|
||||
/// Reference to the actual symbol being imported/exported.
|
||||
/// For imports: `import { foo as bar }` - ref to the symbol representing "foo" from the source module
|
||||
/// For exports: `export { foo as bar }` - ref to the local symbol "foo"
|
||||
/// For re-exports: `export { foo as bar } from 'path'` - ref to an intermediate symbol
|
||||
name: LocRef,
|
||||
|
||||
/// This is the original name of the symbol stored in "Name". It's needed for
|
||||
@@ -550,6 +558,13 @@ pub const NamedImport = struct {
|
||||
// Parts within this file that use this import
|
||||
local_parts_with_uses: BabyList(u32) = BabyList(u32){},
|
||||
|
||||
// The original export name from the source module being imported.
|
||||
// Examples:
|
||||
// - `import { foo } from 'module'` → alias = "foo"
|
||||
// - `import { foo as bar } from 'module'` → alias = "foo" (original export name)
|
||||
// - `import * as ns from 'module'` → alias_is_star = true, alias = ""
|
||||
// This field is used by the bundler to match imports with their corresponding
|
||||
// exports and for error reporting when imports can't be resolved.
|
||||
alias: ?string,
|
||||
alias_loc: ?logger.Loc = null,
|
||||
namespace_ref: ?Ref,
|
||||
|
||||
@@ -9017,7 +9017,7 @@ fn NewParser_(
|
||||
}) catch unreachable;
|
||||
}
|
||||
|
||||
item_refs.putAssumeCapacity(name, name_loc.*);
|
||||
// No need to add this to `item_refs` because `.scanForImportsAndExports` special cases `s
|
||||
}
|
||||
var end: usize = 0;
|
||||
|
||||
@@ -18761,6 +18761,26 @@ fn NewParser_(
|
||||
}, .loc = loc };
|
||||
}
|
||||
|
||||
// Inline import.meta properties for Bake
|
||||
if (p.options.framework != null) {
|
||||
if (strings.eqlComptime(name, "dir") or strings.eqlComptime(name, "dirname")) {
|
||||
// Inline import.meta.dir
|
||||
return p.newExpr(E.String.init(p.source.path.name.dir), name_loc);
|
||||
} else if (strings.eqlComptime(name, "file")) {
|
||||
// Inline import.meta.file (filename only)
|
||||
return p.newExpr(E.String.init(p.source.path.name.filename), name_loc);
|
||||
} else if (strings.eqlComptime(name, "path")) {
|
||||
// Inline import.meta.path (full path)
|
||||
return p.newExpr(E.String.init(p.source.path.text), name_loc);
|
||||
} else if (strings.eqlComptime(name, "url")) {
|
||||
// Inline import.meta.url as file:// URL
|
||||
const bunstr = bun.String.fromBytes(p.source.path.text);
|
||||
defer bunstr.deref();
|
||||
const url = std.fmt.allocPrint(p.allocator, "{s}", .{JSC.URL.fileURLFromString(bunstr)}) catch unreachable;
|
||||
return p.newExpr(E.String.init(url), name_loc);
|
||||
}
|
||||
}
|
||||
|
||||
// Make all property accesses on `import.meta.url` side effect free.
|
||||
return p.newExpr(
|
||||
E.Dot{
|
||||
|
||||
@@ -836,6 +836,33 @@ pub fn getSourceMapImpl(
|
||||
|
||||
// try to load a .map file
|
||||
if (load_hint != .is_inline_map) try_external: {
|
||||
if (comptime SourceProviderKind == BakeSourceProvider) {
|
||||
const data = BakeSourceProvider.getExternal(
|
||||
provider,
|
||||
bun.JSC.VirtualMachine.get().global,
|
||||
source_filename,
|
||||
);
|
||||
break :parsed .{
|
||||
.is_external_map,
|
||||
parseJSON(
|
||||
bun.default_allocator,
|
||||
allocator,
|
||||
data,
|
||||
result,
|
||||
) catch |err| {
|
||||
// Print warning even if this came from non-visible code like
|
||||
// calling `error.stack`. This message is only printed if
|
||||
// the sourcemap has been found but is invalid, such as being
|
||||
// invalid JSON text or corrupt mappings.
|
||||
bun.Output.warn("Could not decode sourcemap in '{s}': {s}", .{
|
||||
source_filename,
|
||||
@errorName(err),
|
||||
}); // Disable the "try using --sourcemap=external" hint
|
||||
bun.JSC.SavedSourceMap.MissingSourceMapNoteInfo.seen_invalid = true;
|
||||
return null;
|
||||
},
|
||||
};
|
||||
}
|
||||
var load_path_buf: *bun.PathBuffer = bun.PathBufferPool.get();
|
||||
defer bun.PathBufferPool.put(load_path_buf);
|
||||
if (source_filename.len + 4 > load_path_buf.len)
|
||||
@@ -918,6 +945,8 @@ pub const SourceProviderMap = opaque {
|
||||
}
|
||||
};
|
||||
|
||||
extern "c" fn BakeGlobalObject__getPerThreadData(global: *bun.JSC.JSGlobalObject) *bun.bake.production.PerThread;
|
||||
|
||||
pub const BakeSourceProvider = opaque {
|
||||
extern fn BakeSourceProvider__getSourceSlice(*BakeSourceProvider) bun.String;
|
||||
pub const getSourceSlice = BakeSourceProvider__getSourceSlice;
|
||||
@@ -925,6 +954,14 @@ pub const BakeSourceProvider = opaque {
|
||||
return ParsedSourceMap.SourceContentPtr.fromBakeProvider(this);
|
||||
}
|
||||
|
||||
pub fn getExternal(_: *BakeSourceProvider, global: *bun.JSC.JSGlobalObject, source_filename: []const u8) []const u8 {
|
||||
const pt = BakeGlobalObject__getPerThreadData(global);
|
||||
if (pt.source_maps.get(source_filename)) |value| {
|
||||
return pt.bundled_outputs[value.get()].value.asSlice();
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
/// The last two arguments to this specify loading hints
|
||||
pub fn getSourceMap(
|
||||
provider: *BakeSourceProvider,
|
||||
|
||||
@@ -18,7 +18,7 @@ import { Matchers } from "bun:test";
|
||||
import { EventEmitter } from "node:events";
|
||||
// @ts-ignore
|
||||
import { dedent } from "../bundler/expectBundled.ts";
|
||||
import { bunEnv, bunExe, isCI, isWindows, mergeWindowEnvs } from "harness";
|
||||
import { bunEnv, bunExe, isCI, isWindows, mergeWindowEnvs, tempDirWithFiles } from "harness";
|
||||
import { expect } from "bun:test";
|
||||
import { exitCodeMapStrings } from "./exit-code-map.mjs";
|
||||
|
||||
@@ -318,7 +318,7 @@ export class Dev extends EventEmitter {
|
||||
if (wantsHmrEvent && interactive) {
|
||||
await seenFiles.promise;
|
||||
} else if (wantsHmrEvent) {
|
||||
await Promise.race([seenFiles.promise, Bun.sleep(1000)]);
|
||||
await Promise.race([seenFiles.promise]);
|
||||
}
|
||||
if (!fastBatches) {
|
||||
// Wait an extra delay to avoid double-triggering events.
|
||||
@@ -1415,6 +1415,90 @@ async function installReactWithCache(root: string) {
|
||||
}
|
||||
}
|
||||
|
||||
// Global React cache management
|
||||
let reactCachePromise: Promise<void> | null = null;
|
||||
|
||||
/**
|
||||
* Ensures the React cache is populated. This is a global operation that
|
||||
* only happens once per test run.
|
||||
*/
|
||||
export async function ensureReactCache(): Promise<void> {
|
||||
if (!reactCachePromise) {
|
||||
reactCachePromise = (async () => {
|
||||
const cacheFiles = ["node_modules", "package.json", "bun.lock"];
|
||||
const cacheValid = cacheFiles.every(file => fs.existsSync(path.join(reactCacheDir, file)));
|
||||
|
||||
if (!cacheValid) {
|
||||
// Create a temporary directory for installation
|
||||
const tempInstallDir = fs.mkdtempSync(path.join(tempDir, "react-install-"));
|
||||
|
||||
// Create a minimal package.json
|
||||
fs.writeFileSync(
|
||||
path.join(tempInstallDir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "react-cache-install",
|
||||
version: "1.0.0",
|
||||
private: true,
|
||||
}),
|
||||
);
|
||||
|
||||
try {
|
||||
// Install React packages
|
||||
await Bun.$`${bunExe()} i react@experimental react-dom@experimental react-server-dom-bun react-refresh@experimental && ${bunExe()} install`
|
||||
.cwd(tempInstallDir)
|
||||
.env({ ...bunEnv })
|
||||
.throws(true);
|
||||
|
||||
// Copy to cache
|
||||
for (const file of cacheFiles) {
|
||||
const src = path.join(tempInstallDir, file);
|
||||
const dest = path.join(reactCacheDir, file);
|
||||
if (fs.existsSync(src)) {
|
||||
if (fs.statSync(src).isDirectory()) {
|
||||
fs.cpSync(src, dest, { recursive: true, force: true });
|
||||
} else {
|
||||
fs.copyFileSync(src, dest);
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
// Clean up temp directory
|
||||
fs.rmSync(tempInstallDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return reactCachePromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies cached React dependencies to the specified directory.
|
||||
* This ensures React is available without running install.
|
||||
*/
|
||||
export async function copyCachedReactDeps(root: string): Promise<void> {
|
||||
// Ensure cache is populated
|
||||
await ensureReactCache();
|
||||
|
||||
// Copy node_modules from cache to target directory
|
||||
const src = path.join(reactCacheDir, "node_modules");
|
||||
const dest = path.join(root, "node_modules");
|
||||
|
||||
if (fs.existsSync(src)) {
|
||||
fs.cpSync(src, dest, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temporary directory with files and React dependencies pre-installed.
|
||||
* This is a convenience wrapper that combines tempDirWithFiles with copyCachedReactDeps.
|
||||
*/
|
||||
export async function tempDirWithBakeDeps(name: string, files: Record<string, string>): Promise<string> {
|
||||
const dir = tempDirWithFiles(name, files);
|
||||
await copyCachedReactDeps(dir);
|
||||
return dir;
|
||||
}
|
||||
|
||||
const devTestRoot = path.join(import.meta.dir, "dev").replaceAll("\\", "/");
|
||||
const prodTestRoot = path.join(import.meta.dir, "dev").replaceAll("\\", "/");
|
||||
const counts: Record<string, number> = {};
|
||||
@@ -1632,6 +1716,7 @@ function testImpl<T extends DevServerTest>(
|
||||
await writeAll(root, options.files);
|
||||
const runInstall = options.framework === "react";
|
||||
if (runInstall) {
|
||||
// await copyCachedReactDeps(root);
|
||||
await installReactWithCache(root);
|
||||
}
|
||||
if (options.files["bun.app.ts"] == undefined && htmlFiles.length === 0) {
|
||||
|
||||
40
test/bake/dev/import-meta-inline-negative.test.ts
Normal file
40
test/bake/dev/import-meta-inline-negative.test.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
|
||||
test("import.meta properties are NOT inlined without bake framework", async () => {
|
||||
const dir = tempDirWithFiles("import-meta-no-inline", {
|
||||
"index.ts": `
|
||||
console.log("dir:", import.meta.dir);
|
||||
console.log("dirname:", import.meta.dirname);
|
||||
console.log("file:", import.meta.file);
|
||||
console.log("path:", import.meta.path);
|
||||
console.log("url:", import.meta.url);
|
||||
`,
|
||||
});
|
||||
|
||||
// Run without bundling - should show actual values
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "index.ts"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stderr).toBe("");
|
||||
|
||||
// When not bundled, these properties should resolve to actual values
|
||||
expect(stdout).toContain("dir:");
|
||||
expect(stdout).toContain("dirname:");
|
||||
expect(stdout).toContain("file:");
|
||||
expect(stdout).toContain("path:");
|
||||
expect(stdout).toContain("url:");
|
||||
|
||||
// The values should NOT be inlined - they should be the actual runtime values
|
||||
expect(stdout).not.toContain("undefined");
|
||||
});
|
||||
298
test/bake/dev/import-meta-inline.test.ts
Normal file
298
test/bake/dev/import-meta-inline.test.ts
Normal file
@@ -0,0 +1,298 @@
|
||||
// import.meta properties are inlined at parse time in Bake
|
||||
import { expect } from "bun:test";
|
||||
import { devTest, emptyHtmlFile, minimalFramework } from "../bake-harness";
|
||||
|
||||
devTest("import.meta properties are inlined in bake", {
|
||||
framework: minimalFramework,
|
||||
files: {
|
||||
"routes/index.ts": `
|
||||
export default function (req, meta) {
|
||||
return Response.json({
|
||||
dir: import.meta.dir,
|
||||
dirname: import.meta.dirname,
|
||||
file: import.meta.file,
|
||||
path: import.meta.path,
|
||||
url: import.meta.url,
|
||||
});
|
||||
}
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
const response = await dev.fetch("/");
|
||||
const json = await response.json();
|
||||
|
||||
// Check that all properties are strings, not undefined
|
||||
expect(typeof json.dir).toBe("string");
|
||||
expect(typeof json.dirname).toBe("string");
|
||||
expect(typeof json.file).toBe("string");
|
||||
expect(typeof json.path).toBe("string");
|
||||
expect(typeof json.url).toBe("string");
|
||||
|
||||
// Check that dir and dirname are the same
|
||||
expect(json.dir).toBe(json.dirname);
|
||||
|
||||
// Check that file is just the filename
|
||||
expect(json.file).toBe("index.ts");
|
||||
|
||||
// Check that path contains the full path including filename
|
||||
expect(json.path).toContain("routes/index.ts");
|
||||
expect(json.path).toEndWith("index.ts");
|
||||
|
||||
// Check that url is a file:// URL
|
||||
expect(json.url).toStartWith("file://");
|
||||
expect(json.url).toContain("routes/index.ts");
|
||||
},
|
||||
});
|
||||
|
||||
devTest("import.meta properties work with dynamic updates", {
|
||||
framework: minimalFramework,
|
||||
files: {
|
||||
"routes/test.ts": `
|
||||
export default function (req, meta) {
|
||||
const values = [
|
||||
"dir: " + import.meta.dir,
|
||||
"file: " + import.meta.file,
|
||||
"path: " + import.meta.path,
|
||||
];
|
||||
return new Response(values.join("\\n"));
|
||||
}
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
const response = await dev.fetch("/test");
|
||||
const text = await response.text();
|
||||
|
||||
// Verify the values are inlined strings
|
||||
expect(text).toContain("dir: ");
|
||||
expect(text).toContain("file: test.ts");
|
||||
expect(text).toContain("path: ");
|
||||
expect(text).toContain("routes/test.ts");
|
||||
|
||||
// Update the file with a meaningful change
|
||||
await dev.patch("routes/test.ts", {
|
||||
find: '"dir: "',
|
||||
replace: '"directory: "',
|
||||
});
|
||||
|
||||
const response2 = await dev.fetch("/test");
|
||||
const text2 = await response2.text();
|
||||
|
||||
// After the patch, the first line should say "directory:" instead of "dir:"
|
||||
expect(text2).toContain("directory: ");
|
||||
expect(text2).toContain("file: test.ts");
|
||||
expect(text2).toContain("path: ");
|
||||
expect(text2).toContain("routes/test.ts");
|
||||
},
|
||||
});
|
||||
|
||||
devTest("import.meta properties with nested directories", {
|
||||
framework: minimalFramework,
|
||||
files: {
|
||||
"routes/api/v1/handler.ts": `
|
||||
export default function (req, meta) {
|
||||
return Response.json({
|
||||
dir: import.meta.dir,
|
||||
file: import.meta.file,
|
||||
path: import.meta.path,
|
||||
url: import.meta.url,
|
||||
});
|
||||
}
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
const response = await dev.fetch("/api/v1/handler");
|
||||
const json = await response.json();
|
||||
|
||||
expect(json.file).toBe("handler.ts");
|
||||
expect(json.path).toContain("routes/api/v1/handler.ts");
|
||||
expect(json.dir).toContain("routes/api/v1");
|
||||
expect(json.url).toMatch(/^file:\/\/.*routes\/api\/v1\/handler\.ts$/);
|
||||
},
|
||||
});
|
||||
|
||||
devTest("import.meta properties in client-side code show runtime values", {
|
||||
framework: minimalFramework,
|
||||
files: {
|
||||
"test_import_meta_inline.js": `
|
||||
// Test file for import.meta inlining
|
||||
console.log("import.meta.dir:", import.meta.dir);
|
||||
console.log("import.meta.dirname:", import.meta.dirname);
|
||||
console.log("import.meta.file:", import.meta.file);
|
||||
console.log("import.meta.path:", import.meta.path);
|
||||
console.log("import.meta.url:", import.meta.url);
|
||||
`,
|
||||
"index.html": emptyHtmlFile({
|
||||
scripts: ["test_import_meta_inline.js"],
|
||||
}),
|
||||
},
|
||||
async test(dev) {
|
||||
await using c = await dev.client("/");
|
||||
|
||||
// In client-side code, import.meta properties show runtime values
|
||||
// They are NOT inlined because this is not server-side code
|
||||
const messages = [
|
||||
await c.getStringMessage(),
|
||||
await c.getStringMessage(),
|
||||
await c.getStringMessage(),
|
||||
await c.getStringMessage(),
|
||||
await c.getStringMessage(),
|
||||
];
|
||||
|
||||
// Verify all properties are logged
|
||||
expect(messages.some(m => m.startsWith("import.meta.dir:"))).toBe(true);
|
||||
expect(messages.some(m => m.startsWith("import.meta.dirname:"))).toBe(true);
|
||||
expect(messages.some(m => m.startsWith("import.meta.file:"))).toBe(true);
|
||||
expect(messages.some(m => m.startsWith("import.meta.path:"))).toBe(true);
|
||||
expect(messages.some(m => m.startsWith("import.meta.url:"))).toBe(true);
|
||||
},
|
||||
});
|
||||
|
||||
devTest("import.meta properties in catch-all routes", {
|
||||
framework: minimalFramework,
|
||||
files: {
|
||||
"routes/blog/[...slug].ts": `
|
||||
export default function BlogPost(req, meta) {
|
||||
const url = new URL(req.url);
|
||||
const slug = url.pathname.replace('/blog/', '').split('/').filter(Boolean);
|
||||
|
||||
const metaInfo = {
|
||||
file: import.meta.file,
|
||||
dir: import.meta.dir,
|
||||
path: import.meta.path,
|
||||
url: import.meta.url,
|
||||
dirname: import.meta.dirname,
|
||||
};
|
||||
|
||||
return Response.json({
|
||||
slug: slug,
|
||||
title: slug.map(s => s.charAt(0).toUpperCase() + s.slice(1)).join(' '),
|
||||
meta: metaInfo,
|
||||
content: "This is a blog post at: " + slug.join('/'),
|
||||
});
|
||||
}
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
// Test single segment
|
||||
const post1 = await dev.fetch("/blog/hello");
|
||||
const json1 = await post1.json();
|
||||
|
||||
expect(json1.slug).toEqual(["hello"]);
|
||||
expect(json1.title).toBe("Hello");
|
||||
expect(json1.content).toBe("This is a blog post at: hello");
|
||||
|
||||
// Verify import.meta properties are inlined
|
||||
expect(json1.meta.file).toBe("[...slug].ts");
|
||||
expect(json1.meta.dir).toContain("routes/blog");
|
||||
expect(json1.meta.dirname).toBe(json1.meta.dir);
|
||||
expect(json1.meta.path).toContain("routes/blog/[...slug].ts");
|
||||
expect(json1.meta.url).toMatch(/^file:\/\/.*routes\/blog\/\[\.\.\.slug\]\.ts$/);
|
||||
|
||||
// Test multiple segments
|
||||
const post2 = await dev.fetch("/blog/2024/tech/bun-framework");
|
||||
const json2 = await post2.json();
|
||||
|
||||
expect(json2.slug).toEqual(["2024", "tech", "bun-framework"]);
|
||||
expect(json2.title).toBe("2024 Tech Bun-framework");
|
||||
expect(json2.content).toBe("This is a blog post at: 2024/tech/bun-framework");
|
||||
|
||||
// Meta properties should be the same regardless of the route
|
||||
expect(json2.meta.file).toBe("[...slug].ts");
|
||||
expect(json2.meta.path).toContain("routes/blog/[...slug].ts");
|
||||
|
||||
// Test empty slug (just /blog/)
|
||||
const post3 = await dev.fetch("/blog/");
|
||||
const json3 = await post3.json();
|
||||
|
||||
expect(json3.slug).toEqual([]);
|
||||
expect(json3.title).toBe("");
|
||||
expect(json3.content).toBe("This is a blog post at: ");
|
||||
},
|
||||
});
|
||||
|
||||
devTest("import.meta properties in nested catch-all routes with static siblings", {
|
||||
framework: minimalFramework,
|
||||
files: {
|
||||
"routes/docs/[...path].ts": `
|
||||
export default function DocsPage(req, meta) {
|
||||
const url = new URL(req.url);
|
||||
const path = url.pathname.replace('/docs/', '').split('/').filter(Boolean);
|
||||
|
||||
return Response.json({
|
||||
type: "catch-all",
|
||||
path: path,
|
||||
file: import.meta.file,
|
||||
dir: import.meta.dir,
|
||||
fullPath: import.meta.path,
|
||||
});
|
||||
}
|
||||
`,
|
||||
"routes/docs/api.ts": `
|
||||
export default function ApiDocs(req, meta) {
|
||||
return Response.json({
|
||||
type: "static",
|
||||
page: "API Documentation",
|
||||
file: import.meta.file,
|
||||
dir: import.meta.dir,
|
||||
fullPath: import.meta.path,
|
||||
});
|
||||
}
|
||||
`,
|
||||
"routes/docs/getting-started.ts": `
|
||||
export default function GettingStarted(req, meta) {
|
||||
return Response.json({
|
||||
type: "static",
|
||||
page: "Getting Started",
|
||||
file: import.meta.file,
|
||||
dir: import.meta.dir,
|
||||
fullPath: import.meta.path,
|
||||
});
|
||||
}
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
// Test static route - should match api.ts, not catch-all
|
||||
const apiResponse = await dev.fetch("/docs/api");
|
||||
const apiJson = await apiResponse.json();
|
||||
|
||||
expect(apiJson.type).toBe("static");
|
||||
expect(apiJson.page).toBe("API Documentation");
|
||||
expect(apiJson.file).toBe("api.ts");
|
||||
expect(apiJson.dir).toContain("routes/docs");
|
||||
expect(apiJson.fullPath).toContain("routes/docs/api.ts");
|
||||
|
||||
// Test another static route
|
||||
const startResponse = await dev.fetch("/docs/getting-started");
|
||||
const startJson = await startResponse.json();
|
||||
|
||||
expect(startJson.type).toBe("static");
|
||||
expect(startJson.page).toBe("Getting Started");
|
||||
expect(startJson.file).toBe("getting-started.ts");
|
||||
expect(startJson.fullPath).toContain("routes/docs/getting-started.ts");
|
||||
|
||||
// Test catch-all route - should match for non-static paths
|
||||
const guideResponse = await dev.fetch("/docs/guides/advanced/optimization");
|
||||
const guideText = await guideResponse.text();
|
||||
console.log("Guide text", guideText);
|
||||
const guideJson = await guideResponse.json();
|
||||
|
||||
expect(guideJson.type).toBe("catch-all");
|
||||
expect(guideJson.path).toEqual(["guides", "advanced", "optimization"]);
|
||||
expect(guideJson.file).toBe("[...path].ts");
|
||||
expect(guideJson.dir).toContain("routes/docs");
|
||||
expect(guideJson.fullPath).toContain("routes/docs/[...path].ts");
|
||||
|
||||
// Update catch-all route and verify import.meta values remain inlined
|
||||
await dev.patch("routes/docs/[...path].ts", {
|
||||
find: '"catch-all"',
|
||||
replace: '"dynamic-catch-all"',
|
||||
});
|
||||
|
||||
const updatedResponse = await dev.fetch("/docs/tutorials/intro");
|
||||
const updatedJson = await updatedResponse.json();
|
||||
|
||||
expect(updatedJson.type).toBe("dynamic-catch-all");
|
||||
expect(updatedJson.file).toBe("[...path].ts");
|
||||
expect(updatedJson.fullPath).toContain("routes/docs/[...path].ts");
|
||||
},
|
||||
});
|
||||
320
test/bake/dev/production.test.ts
Normal file
320
test/bake/dev/production.test.ts
Normal file
@@ -0,0 +1,320 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { devTest, minimalFramework, tempDirWithBakeDeps } from "../bake-harness";
|
||||
import { bunEnv, bunExe } from "harness";
|
||||
import path from "path";
|
||||
|
||||
/**
|
||||
* Production build tests
|
||||
*/
|
||||
describe("production", () => {
|
||||
test("works with sourcemaps - error thrown in React component", async () => {
|
||||
const dir = await tempDirWithBakeDeps("bake-production-sourcemap", {
|
||||
"src/index.tsx": `export default { app: { framework: "react" } };`,
|
||||
"pages/index.tsx": `export default function IndexPage() {
|
||||
throw new Error("oh no!");
|
||||
return <div>Hello World</div>;
|
||||
}`,
|
||||
"package.json": JSON.stringify({
|
||||
"name": "test-app",
|
||||
"version": "1.0.0",
|
||||
"devDependencies": {
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Run the build command
|
||||
const {
|
||||
exitCode: buildExitCode,
|
||||
stdout: buildStdout,
|
||||
stderr: buildStderr,
|
||||
} = await Bun.$`${bunExe()} build --app ./src/index.tsx`.cwd(dir).throws(false);
|
||||
|
||||
// The build should fail due to the runtime error during SSG
|
||||
expect(buildExitCode).toBe(1);
|
||||
|
||||
// Check that the error message shows the proper source location
|
||||
expect(buildStderr.toString()).toContain("throw new Error");
|
||||
expect(buildStderr.toString()).toContain("oh no!");
|
||||
});
|
||||
|
||||
test("import.meta properties are inlined in production build", async () => {
|
||||
const dir = await tempDirWithBakeDeps("bake-production-import-meta", {
|
||||
"src/index.tsx": `export default {
|
||||
app: {
|
||||
framework: "react",
|
||||
}
|
||||
};`,
|
||||
"pages/index.tsx": `
|
||||
export default function IndexPage() {
|
||||
const metaInfo = {
|
||||
dir: import.meta.dir,
|
||||
dirname: import.meta.dirname,
|
||||
file: import.meta.file,
|
||||
path: import.meta.path,
|
||||
url: import.meta.url,
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h1>Import Meta Test</h1>
|
||||
<pre>{JSON.stringify(metaInfo, null, 2)}</pre>
|
||||
<div id="meta-data" style={{display: 'none'}}>{JSON.stringify(metaInfo)}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
`,
|
||||
"pages/api/test.tsx": `
|
||||
export default function TestPage() {
|
||||
const values = [
|
||||
"dir=" + import.meta.dir,
|
||||
"dirname=" + import.meta.dirname,
|
||||
"file=" + import.meta.file,
|
||||
"path=" + import.meta.path,
|
||||
"url=" + import.meta.url,
|
||||
];
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h1>API Test</h1>
|
||||
<pre>{values.join("\\n")}</pre>
|
||||
<div id="api-meta-data" style={{display: 'none'}}>{values.join("|")}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
`,
|
||||
});
|
||||
|
||||
// Run the build command
|
||||
const buildProc = await Bun.$`${bunExe()} build --app ./src/index.tsx --outdir ./dist`
|
||||
.cwd(dir)
|
||||
.env(bunEnv)
|
||||
.throws(false);
|
||||
|
||||
expect(buildProc.exitCode).toBe(0);
|
||||
|
||||
// Check that the build output contains the generated files
|
||||
const distFiles = await Bun.$`ls -la dist/`.cwd(dir).text();
|
||||
expect(distFiles).toContain("index.html");
|
||||
expect(distFiles).toContain("_bun");
|
||||
|
||||
// In production SSG, the import.meta values are inlined during build time
|
||||
// and rendered into the static HTML. The values should appear in the HTML output.
|
||||
|
||||
// Check the generated static HTML files
|
||||
const indexHtml = await Bun.file(path.join(dir, "dist", "index.html")).text();
|
||||
const apiTestHtml = await Bun.file(path.join(dir, "dist", "api", "test", "index.html")).text();
|
||||
|
||||
// The HTML output should contain the rendered import.meta values
|
||||
// Check for the presence of the expected values in the HTML
|
||||
|
||||
// For the index page, check that it contains the expected file paths
|
||||
expect(indexHtml).toContain("index.tsx");
|
||||
expect(indexHtml).toContain("pages");
|
||||
|
||||
// Check if the HTML contains evidence of import.meta values being used
|
||||
// The exact format might be HTML-escaped, so we check for key patterns
|
||||
const hasIndexPath =
|
||||
indexHtml.includes("pages/index.tsx") ||
|
||||
indexHtml.includes("pages/index.tsx") ||
|
||||
indexHtml.includes("pages\\index.tsx");
|
||||
expect(hasIndexPath).toBe(true);
|
||||
|
||||
// For the API test page
|
||||
expect(apiTestHtml).toContain("test.tsx");
|
||||
expect(apiTestHtml).toContain("pages");
|
||||
|
||||
const hasApiPath =
|
||||
apiTestHtml.includes("pages/api/test.tsx") ||
|
||||
apiTestHtml.includes("pages/api/test.tsx") ||
|
||||
apiTestHtml.includes("pages\\api\\test.tsx");
|
||||
expect(hasApiPath).toBe(true);
|
||||
});
|
||||
|
||||
test("import.meta properties are inlined in catch-all routes during production build", async () => {
|
||||
const dir = await tempDirWithBakeDeps("bake-production-catch-all", {
|
||||
"src/index.tsx": `export default {
|
||||
app: {
|
||||
framework: "react",
|
||||
}
|
||||
};`,
|
||||
"pages/blog/[...slug].tsx": `
|
||||
export default function BlogPost({ params }) {
|
||||
const slug = params.slug || [];
|
||||
|
||||
const metaInfo = {
|
||||
file: import.meta.file,
|
||||
dir: import.meta.dir,
|
||||
path: import.meta.path,
|
||||
url: import.meta.url,
|
||||
dirname: import.meta.dirname,
|
||||
};
|
||||
|
||||
return (
|
||||
<article>
|
||||
<h1>Blog Post: {slug.join(' / ')}</h1>
|
||||
<p>You are reading: {slug.length === 0 ? 'the blog index' : slug.join('/')}</p>
|
||||
<div id="blog-meta" data-file={metaInfo.file} data-dir={metaInfo.dir} data-path={metaInfo.path}>
|
||||
<pre>{JSON.stringify(metaInfo, null, 2)}</pre>
|
||||
</div>
|
||||
</article>
|
||||
);
|
||||
}
|
||||
|
||||
export async function getStaticPaths() {
|
||||
return {
|
||||
paths: [
|
||||
{ params: { slug: ['2024', 'hello-world'] } },
|
||||
{ params: { slug: ['2024', 'tech', 'bun-framework'] } },
|
||||
{ params: { slug: ['tutorials', 'getting-started'] } },
|
||||
],
|
||||
fallback: false,
|
||||
};
|
||||
}
|
||||
`,
|
||||
"pages/docs/[...path].tsx": `
|
||||
export default function DocsPage({ params }) {
|
||||
const path = params.path || [];
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h1>Documentation</h1>
|
||||
<nav aria-label="Breadcrumb">
|
||||
<ol>
|
||||
<li>Docs</li>
|
||||
{path.map((segment, i) => (
|
||||
<li key={i}>{segment}</li>
|
||||
))}
|
||||
</ol>
|
||||
</nav>
|
||||
<div id="docs-content">
|
||||
<p>Reading docs at: /{path.join('/')}</p>
|
||||
<div id="docs-meta" style={{display: 'none'}}>
|
||||
<span data-file={import.meta.file}></span>
|
||||
<span data-dir={import.meta.dir}></span>
|
||||
<span data-path={import.meta.path}></span>
|
||||
<span data-url={import.meta.url}></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export async function getStaticPaths() {
|
||||
return {
|
||||
paths: [
|
||||
{ params: { path: ['api', 'reference'] } },
|
||||
{ params: { path: ['guides', 'advanced', 'optimization'] } },
|
||||
{ params: { path: [] } }, // docs index
|
||||
],
|
||||
fallback: false,
|
||||
};
|
||||
}
|
||||
`,
|
||||
"pages/docs/getting-started.tsx": `
|
||||
export default function GettingStarted() {
|
||||
return (
|
||||
<div>
|
||||
<h1>Getting Started</h1>
|
||||
<p>This is a static page, not a catch-all route.</p>
|
||||
<div id="static-meta" style={{display: 'none'}}>
|
||||
<span data-file={import.meta.file}></span>
|
||||
<span data-path={import.meta.path}></span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
`,
|
||||
});
|
||||
|
||||
// Run the build command
|
||||
const buildProc = await Bun.$`${bunExe()} build --app ./src/index.tsx --outdir ./dist`
|
||||
.cwd(dir)
|
||||
.env(bunEnv)
|
||||
.throws(false);
|
||||
|
||||
expect(buildProc.exitCode).toBe(0);
|
||||
|
||||
// Check that the build output contains the generated files
|
||||
const distFiles = await Bun.$`find dist -name "*.html" -type f | sort`.cwd(dir).text();
|
||||
const htmlFiles = distFiles.trim().split("\n").filter(Boolean);
|
||||
|
||||
// Should have generated all the static paths
|
||||
// Note: React's routing may flatten the paths
|
||||
expect(htmlFiles).toContain("dist/2024/hello-world/index.html");
|
||||
expect(htmlFiles).toContain("dist/2024/tech/bun-framework/index.html");
|
||||
expect(htmlFiles).toContain("dist/tutorials/getting-started/index.html");
|
||||
expect(htmlFiles).toContain("dist/api/reference/index.html");
|
||||
expect(htmlFiles).toContain("dist/guides/advanced/optimization/index.html");
|
||||
expect(htmlFiles).toContain("dist/index.html");
|
||||
expect(htmlFiles).toContain("dist/docs/getting-started/index.html");
|
||||
|
||||
// Check blog post with multiple segments
|
||||
const blogPostHtml = await Bun.file(path.join(dir, "dist", "2024", "tech", "bun-framework", "index.html")).text();
|
||||
|
||||
// Verify the content is rendered (may include HTML comments)
|
||||
expect(blogPostHtml).toContain("Blog Post:");
|
||||
expect(blogPostHtml).toContain("2024 / tech / bun-framework");
|
||||
expect(blogPostHtml).toContain("You are reading:");
|
||||
expect(blogPostHtml).toContain("2024/tech/bun-framework");
|
||||
|
||||
// Check that import.meta values are inlined in the HTML
|
||||
expect(blogPostHtml).toContain('data-file="[...slug].tsx"');
|
||||
expect(blogPostHtml).toContain("data-dir=");
|
||||
expect(blogPostHtml).toContain('/pages/blog"'); // The full path will include the temp directory
|
||||
expect(blogPostHtml).toContain("data-path=");
|
||||
expect(blogPostHtml).toContain('/pages/blog/[...slug].tsx"');
|
||||
|
||||
// Check docs catch-all route
|
||||
const docsHtml = await Bun.file(path.join(dir, "dist", "guides", "advanced", "optimization", "index.html")).text();
|
||||
|
||||
expect(docsHtml).toContain("Reading docs at:");
|
||||
expect(docsHtml).toContain("guides/advanced/optimization");
|
||||
expect(docsHtml).toContain('data-file="[...path].tsx"');
|
||||
expect(docsHtml).toContain('/pages/docs/[...path].tsx"');
|
||||
|
||||
// Check that the static getting-started page uses its own file name, not the catch-all
|
||||
const staticHtml = await Bun.file(path.join(dir, "dist", "docs", "getting-started", "index.html")).text();
|
||||
|
||||
expect(staticHtml).toContain("Getting Started");
|
||||
expect(staticHtml).toContain("This is a static page");
|
||||
expect(staticHtml).toContain('data-file="getting-started.tsx"');
|
||||
expect(staticHtml).toContain('/pages/docs/getting-started.tsx"');
|
||||
expect(staticHtml).not.toContain("[...path].tsx");
|
||||
|
||||
// Verify that import.meta values are consistent across all catch-all instances
|
||||
const blogIndex = await Bun.file(path.join(dir, "dist", "tutorials", "getting-started", "index.html")).text();
|
||||
expect(blogIndex).toContain('data-file="[...slug].tsx"');
|
||||
expect(blogIndex).toContain('/pages/blog/[...slug].tsx"');
|
||||
});
|
||||
|
||||
test("handles build with no pages directory without crashing", async () => {
|
||||
const dir = await tempDirWithBakeDeps("bake-production-no-pages", {
|
||||
"app.ts": `export default { app: { framework: "react" } };`,
|
||||
"package.json": JSON.stringify({
|
||||
"name": "test-app",
|
||||
"version": "1.0.0",
|
||||
"devDependencies": {
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0",
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Run the build command - should not crash even with no pages
|
||||
const { exitCode, stderr } = await Bun.$`${bunExe()} build --app ./app.ts`
|
||||
.cwd(dir)
|
||||
.throws(false);
|
||||
|
||||
// The build should complete successfully (or fail gracefully, not crash)
|
||||
// We're testing that it doesn't crash with the StringBuilder assertion
|
||||
expect(exitCode).toBeDefined();
|
||||
|
||||
// If it fails, it should be a graceful failure, not a crash
|
||||
if (exitCode !== 0) {
|
||||
expect(stderr.toString()).not.toContain("reached unreachable code");
|
||||
expect(stderr.toString()).not.toContain("assert(this.cap > 0)");
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -293,3 +293,34 @@ devTest("SSG pages router - file loading with Bun.file", {
|
||||
expect(await c2.elemText("div div")).toBe("This is the second post content");
|
||||
},
|
||||
});
|
||||
|
||||
devTest("SSG pages router - named import edge case", {
|
||||
framework: "react",
|
||||
fixture: "ssg-pages-router",
|
||||
files: {
|
||||
"pages/index.tsx": `
|
||||
import Markdoc, * as md from '../src/ooga'
|
||||
|
||||
console.log(md);
|
||||
|
||||
export default function IndexPage() {
|
||||
return <h1>Welcome to SSG</h1>;
|
||||
}
|
||||
`,
|
||||
"src/ooga.ts": `var Markdoc = function () {
|
||||
return {
|
||||
parse: () => {},
|
||||
transform: () => {},
|
||||
};
|
||||
};
|
||||
|
||||
export { Markdoc as default };`,
|
||||
"posts/hello-world.txt": "This is the content of hello world post",
|
||||
"posts/second-post.txt": "This is the second post content",
|
||||
},
|
||||
async test(dev) {
|
||||
// Should not error
|
||||
await using c1 = await dev.client("/");
|
||||
expect(await c1.elemText("h1")).toBe("Welcome to SSG");
|
||||
},
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user