Compare commits

...

5 Commits

Author SHA1 Message Date
pfg
fc6a168981 . 2025-01-06 13:29:20 -08:00
pfg
ef5427ddc6 Merge branch 'main' into pfg/fix-15536 2025-01-06 13:10:50 -08:00
pfg
93fdbed85d add back whitespace checks but limited to 33 bytes 2024-12-06 20:03:20 -08:00
pfg
1982df220b remove empty file checks & remove isAllWhitespace 2024-12-06 15:37:50 -08:00
pfg
7cd823feeb Fix importing empty .txt file 2024-12-06 15:23:40 -08:00
9 changed files with 219 additions and 228 deletions

View File

@@ -2051,7 +2051,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
if (Environment.allow_assert) {
switch (kind) {
.css => bun.assert(code.len == 0),
.js => if (bun.strings.isAllWhitespace(code)) {
.js => if (bun.strings.isSmallAndOnlyWhitespace(code)) {
// Should at least contain the function wrapper
bun.Output.panic("Empty chunk is impossible: {s} {s}", .{
key,

View File

@@ -3634,6 +3634,10 @@ pub const ParseTask = struct {
) !JSAst {
switch (loader) {
.jsx, .tsx, .js, .ts => {
if (bun.strings.isSmallAndOnlyWhitespace(source.contents)) return switch (opts.module_type == .esm) {
inline else => |as_undefined| try getEmptyAST(log, transpiler, opts, allocator, source, if (as_undefined) E.Undefined else E.Object),
};
const trace = tracer(@src(), "ParseJS");
defer trace.end();
return if (try resolver.caches.js.parse(
@@ -3656,12 +3660,20 @@ pub const ParseTask = struct {
};
},
.json => {
if (strings.isSmallAndOnlyWhitespace(source.contents)) return switch (opts.module_type == .esm) {
inline else => |as_undefined| try getEmptyAST(log, transpiler, opts, allocator, source, if (as_undefined) E.Undefined else E.Object),
};
const trace = tracer(@src(), "ParseJSON");
defer trace.end();
const root = (try resolver.caches.json.parsePackageJSON(log, source, allocator, false)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty);
return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?);
},
.toml => {
if (strings.isSmallAndOnlyWhitespace(source.contents)) return switch (opts.module_type == .esm) {
inline else => |as_undefined| try getEmptyAST(log, transpiler, opts, allocator, source, if (as_undefined) E.Undefined else E.Object),
};
const trace = tracer(@src(), "ParseTOML");
defer trace.end();
const root = try TOML.parse(&source, log, allocator, false);
@@ -4361,9 +4373,7 @@ pub const ParseTask = struct {
}
step.* = .parse;
const is_empty = strings.isAllWhitespace(entry.contents);
const use_directive: UseDirective = if (!is_empty and transpiler.options.server_components)
const use_directive: UseDirective = if (transpiler.options.server_components)
if (UseDirective.parse(entry.contents)) |use|
use
else
@@ -4456,24 +4466,7 @@ pub const ParseTask = struct {
task.jsx.parse = loader.isJSX();
var unique_key_for_additional_file: []const u8 = "";
var ast: JSAst = if (!is_empty)
try getAST(log, transpiler, opts, allocator, resolver, source, loader, task.ctx.unique_key, &unique_key_for_additional_file)
else switch (opts.module_type == .esm) {
inline else => |as_undefined| if (loader == .css and this.ctx.transpiler.options.experimental.css) try getEmptyCSSAST(
log,
transpiler,
opts,
allocator,
source,
) else try getEmptyAST(
log,
transpiler,
opts,
allocator,
source,
if (as_undefined) E.Undefined else E.Object,
),
};
var ast: JSAst = try getAST(log, transpiler, opts, allocator, resolver, source, loader, task.ctx.unique_key, &unique_key_for_additional_file);
ast.target = target;
if (ast.parts.len <= 1 and ast.css == null and (task.loader == null or task.loader.? != .html)) {

View File

@@ -4872,10 +4872,8 @@ pub fn trim(slice: anytype, comptime values_to_strip: []const u8) @TypeOf(slice)
return slice[begin..end];
}
pub fn isAllWhitespace(slice: []const u8) bool {
var begin: usize = 0;
while (begin < slice.len and std.mem.indexOfScalar(u8, &whitespace_chars, slice[begin]) != null) : (begin += 1) {}
return begin == slice.len;
pub fn isSmallAndOnlyWhitespace(slice: string) bool {
return slice.len == 0 or (slice.len < 33 and std.mem.trimLeft(u8, slice, "\n\r ").len == 0);
}
pub const whitespace_chars = [_]u8{ ' ', '\t', '\n', '\r', std.ascii.control_code.vt, std.ascii.control_code.ff };

View File

@@ -635,204 +635,6 @@ pub const Transpiler = struct {
empty: bool = false,
};
pub fn buildWithResolveResult(
transpiler: *Transpiler,
resolve_result: _resolver.Result,
allocator: std.mem.Allocator,
loader: options.Loader,
comptime Writer: type,
writer: Writer,
comptime import_path_format: options.BundleOptions.ImportPathFormat,
file_descriptor: ?StoredFileDescriptorType,
filepath_hash: u32,
comptime WatcherType: type,
watcher: *WatcherType,
client_entry_point: ?*EntryPoints.ClientEntryPoint,
origin: URL,
comptime is_source_map: bool,
source_map_handler: ?js_printer.SourceMapHandler,
) !BuildResolveResultPair {
if (resolve_result.is_external) {
return BuildResolveResultPair{
.written = 0,
.input_fd = null,
};
}
errdefer transpiler.resetStore();
var file_path = (resolve_result.pathConst() orelse {
return BuildResolveResultPair{
.written = 0,
.input_fd = null,
};
}).*;
if (strings.indexOf(file_path.text, transpiler.fs.top_level_dir)) |i| {
file_path.pretty = file_path.text[i + transpiler.fs.top_level_dir.len ..];
} else if (!file_path.is_symlink) {
file_path.pretty = allocator.dupe(u8, transpiler.fs.relativeTo(file_path.text)) catch unreachable;
}
const old_bundler_allocator = transpiler.allocator;
transpiler.allocator = allocator;
defer transpiler.allocator = old_bundler_allocator;
const old_linker_allocator = transpiler.linker.allocator;
defer transpiler.linker.allocator = old_linker_allocator;
transpiler.linker.allocator = allocator;
switch (loader) {
.css => {
const CSSBundlerHMR = Css.NewBundler(
Writer,
@TypeOf(&transpiler.linker),
@TypeOf(&transpiler.resolver.caches.fs),
WatcherType,
@TypeOf(transpiler.fs),
true,
import_path_format,
);
const CSSBundler = Css.NewBundler(
Writer,
@TypeOf(&transpiler.linker),
@TypeOf(&transpiler.resolver.caches.fs),
WatcherType,
@TypeOf(transpiler.fs),
false,
import_path_format,
);
const written = brk: {
if (transpiler.options.hot_module_reloading) {
break :brk (try CSSBundlerHMR.bundle(
file_path.text,
transpiler.fs,
writer,
watcher,
&transpiler.resolver.caches.fs,
filepath_hash,
file_descriptor,
allocator,
transpiler.log,
&transpiler.linker,
origin,
)).written;
} else {
break :brk (try CSSBundler.bundle(
file_path.text,
transpiler.fs,
writer,
watcher,
&transpiler.resolver.caches.fs,
filepath_hash,
file_descriptor,
allocator,
transpiler.log,
&transpiler.linker,
origin,
)).written;
}
};
return BuildResolveResultPair{
.written = written,
.input_fd = file_descriptor,
};
},
else => {
var result = transpiler.parse(
ParseOptions{
.allocator = allocator,
.path = file_path,
.loader = loader,
.dirname_fd = resolve_result.dirname_fd,
.file_descriptor = file_descriptor,
.file_hash = filepath_hash,
.macro_remappings = transpiler.options.macro_remap,
.emit_decorator_metadata = resolve_result.emit_decorator_metadata,
.jsx = resolve_result.jsx,
},
client_entry_point,
) orelse {
transpiler.resetStore();
return BuildResolveResultPair{
.written = 0,
.input_fd = null,
};
};
if (result.empty) {
return BuildResolveResultPair{ .written = 0, .input_fd = result.input_fd, .empty = true };
}
if (transpiler.options.target.isBun()) {
if (!transpiler.options.transform_only) {
try transpiler.linker.link(file_path, &result, origin, import_path_format, false, true);
}
return BuildResolveResultPair{
.written = switch (result.ast.exports_kind) {
.esm => try transpiler.printWithSourceMapMaybe(
result.ast,
&result.source,
Writer,
writer,
.esm_ascii,
is_source_map,
source_map_handler,
null,
),
.cjs => try transpiler.printWithSourceMapMaybe(
result.ast,
&result.source,
Writer,
writer,
.cjs,
is_source_map,
source_map_handler,
null,
),
else => unreachable,
},
.input_fd = result.input_fd,
};
}
if (!transpiler.options.transform_only) {
try transpiler.linker.link(file_path, &result, origin, import_path_format, false, false);
}
return BuildResolveResultPair{
.written = switch (result.ast.exports_kind) {
.none, .esm => try transpiler.printWithSourceMapMaybe(
result.ast,
&result.source,
Writer,
writer,
.esm,
is_source_map,
source_map_handler,
null,
),
.cjs => try transpiler.printWithSourceMapMaybe(
result.ast,
&result.source,
Writer,
writer,
.cjs,
is_source_map,
source_map_handler,
null,
),
else => unreachable,
},
.input_fd = result.input_fd,
};
},
}
}
pub fn buildWithResolveResultEager(
transpiler: *Transpiler,
resolve_result: _resolver.Result,
@@ -1330,16 +1132,16 @@ pub const Transpiler = struct {
return ParseResult{ .source = source, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty };
}
if (loader != .wasm and source.contents.len == 0 and source.contents.len < 33 and std.mem.trim(u8, source.contents, "\n\r ").len == 0) {
return ParseResult{ .source = source, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty };
}
switch (loader) {
.js,
.jsx,
.ts,
.tsx,
=> {
if (bun.strings.isSmallAndOnlyWhitespace(source.contents)) {
return ParseResult{ .source = source, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty };
}
// wasm magic number
if (source.isWebAssembly()) {
return ParseResult{
@@ -1454,6 +1256,10 @@ pub const Transpiler = struct {
},
// TODO: use lazy export AST
inline .toml, .json => |kind| {
if (bun.strings.isSmallAndOnlyWhitespace(source.contents)) {
return ParseResult{ .source = source, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty };
}
var expr = if (kind == .json)
// We allow importing tsconfig.*.json or jsconfig.*.json with comments
// These files implicitly become JSONC files, which aligns with the behavior of text editors.

View File

@@ -0,0 +1,81 @@
import { $ } from "bun";
import { test, expect } from "bun:test";
import { bunExe, bunEnv, tempDirWithFiles } from "harness";
import * as empty_text from "./15536/empty_text.html" with { type: "text" };
import * as partial_text from "./15536/partial_text.html" with { type: "text" };
import * as empty_script from "./15536/empty_script.js";
import * as empty_script_2 from "./15536/empty_script_2.js";
test("empty files from import", () => {
expect(
JSON.stringify({
empty_text,
partial_text,
empty_script,
empty_script_2,
}),
).toMatchInlineSnapshot(
`"{"empty_text":{"default":""},"partial_text":{"default":"\\n\\n\\n\\n\\n"},"empty_script":{},"empty_script_2":{}}"`,
);
});
test("empty files from build (#15536)", async () => {
const dir = tempDirWithFiles("15536", {
"demo": {
"a.js": 'import html from "./a.html";\nconsole.log(html);',
"a.html": "",
},
"demo.js": `\
const { outputs } = await Bun.build({
loader: {
".html": "text"
},
entrypoints: ["./demo/a.js"]
});
console.log(await outputs[0].text());`,
});
const result = Bun.spawnSync({
cmd: [bunExe(), "demo.js"],
cwd: dir,
env: { ...bunEnv },
stdio: ["inherit", "pipe", "inherit"],
});
expect(result.exitCode).toBe(0);
expect(result.stdout.toString().replaceAll(/\[parsetask\] ParseTask\(.+?, runtime\) callback\n/g, ""))
.toMatchInlineSnapshot(`
"// demo/a.html
var a_default = "";
// demo/a.js
console.log(a_default);
"
`);
});
test("empty js file", async () => {
const dir = tempDirWithFiles("15536", {
"demo": {
"a.js": 'import value from "./empty.js";\nconsole.log(value);',
"empty.js": "",
},
"demo.js": `\
const { logs } = await Bun.build({
loader: {
".html": "text"
},
entrypoints: ["./demo/a.js"]
});
console.log(logs.join("\\n"));`,
});
const result = Bun.spawnSync({
cmd: [bunExe(), "demo.js"],
cwd: dir,
env: { ...bunEnv },
stdio: ["inherit", "pipe", "inherit"],
});
expect(result.exitCode).toBe(0);
});

View File

@@ -0,0 +1,108 @@

View File

@@ -0,0 +1,5 @@