mirror of
https://github.com/oven-sh/bun
synced 2026-02-15 21:32:05 +00:00
fix a lot of leaks
This commit is contained in:
@@ -25,12 +25,12 @@ source_index: Index.Optional = .none,
|
||||
|
||||
pub const Index = bun.GenericIndex(u32, OutputFile);
|
||||
|
||||
pub fn deinit(this: *OutputFile) void {
|
||||
pub fn deinit(this: *OutputFile, default_allocator: std.mem.Allocator) void {
|
||||
this.value.deinit();
|
||||
|
||||
bun.default_allocator.free(this.src_path.text);
|
||||
bun.default_allocator.free(this.dest_path);
|
||||
bun.default_allocator.free(this.referenced_css_files);
|
||||
default_allocator.free(this.src_path.text);
|
||||
default_allocator.free(this.dest_path);
|
||||
default_allocator.free(this.referenced_css_files);
|
||||
}
|
||||
|
||||
// Depending on:
|
||||
|
||||
@@ -1639,7 +1639,7 @@ pub const BundleV2 = struct {
|
||||
|
||||
pub fn deinit(this: *BuildResult) void {
|
||||
for (this.output_files.items) |*output_file| {
|
||||
output_file.deinit();
|
||||
output_file.deinit(bun.default_allocator);
|
||||
}
|
||||
|
||||
this.output_files.clearAndFree();
|
||||
|
||||
@@ -1,4 +1,22 @@
|
||||
pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_dev_server: bool) !if (is_dev_server) void else std.ArrayList(options.OutputFile) {
|
||||
// TODO: Ideally we end the scope unconditionally but that would require a
|
||||
// lot of changes since we also transfer ownership of the memory in the
|
||||
// success case.
|
||||
//
|
||||
// If we hit successful codepaths, we should return a list of output files
|
||||
// and the caller should handle it and all should be fine and dandy.
|
||||
//
|
||||
// But it is *very* easy to leak memory if we hit a codepath which returns an
|
||||
// error and we forgot an `errdefer` somewhere.
|
||||
var error_alloc_scope = bun.shell.AllocScope{
|
||||
.__scope = bun.AllocationScope.init(bun.default_allocator),
|
||||
};
|
||||
errdefer error_alloc_scope.endScope();
|
||||
// **IMPORTANT**:
|
||||
// Any usages of `bun.default_allocator` which own their allocations and
|
||||
// *don't* pass it to somewhere else should use this allocator.
|
||||
const tracked_default_allocator = error_alloc_scope.allocator();
|
||||
|
||||
const trace = bun.perf.trace("Bundler.generateChunksInParallel");
|
||||
defer trace.end();
|
||||
|
||||
@@ -327,10 +345,16 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
}
|
||||
|
||||
var output_files = std.ArrayList(options.OutputFile).initCapacity(
|
||||
bun.default_allocator,
|
||||
error_alloc_scope.allocator(),
|
||||
(if (c.options.source_maps.hasExternalFiles()) chunks.len * 2 else chunks.len) +
|
||||
@as(usize, c.parse_graph.additional_output_files.items.len),
|
||||
) catch unreachable;
|
||||
errdefer {
|
||||
for (output_files.items) |*file| {
|
||||
file.deinit(tracked_default_allocator);
|
||||
}
|
||||
output_files.deinit();
|
||||
}
|
||||
|
||||
const root_path = c.resolver.opts.output_dir;
|
||||
const more_than_one_output = c.parse_graph.additional_output_files.items.len > 0 or c.options.generate_bytecode_cache or (has_css_chunk and has_js_chunk) or (has_html_chunk and (has_js_chunk or has_css_chunk));
|
||||
@@ -343,7 +367,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
const bundler = @as(*bun.bundle_v2.BundleV2, @fieldParentPtr("linker", c));
|
||||
|
||||
if (root_path.len > 0) {
|
||||
try c.writeOutputFilesToDisk(root_path, chunks, &output_files);
|
||||
try c.writeOutputFilesToDisk(tracked_default_allocator, root_path, chunks, &output_files);
|
||||
} else {
|
||||
// In-memory build
|
||||
for (chunks) |*chunk| {
|
||||
@@ -367,7 +391,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
var code_result = _code_result catch @panic("Failed to allocate memory for output file");
|
||||
|
||||
var sourcemap_output_file: ?options.OutputFile = null;
|
||||
const input_path = try bun.default_allocator.dupe(
|
||||
const input_path = try error_alloc_scope.allocator().dupe(
|
||||
u8,
|
||||
if (chunk.entry_point.is_entry_point)
|
||||
c.parse_graph.input_files.items(.source)[chunk.entry_point.source_index].path.text
|
||||
@@ -377,8 +401,8 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
|
||||
switch (chunk.content.sourcemap(c.options.source_maps)) {
|
||||
.external, .linked => |tag| {
|
||||
const output_source_map = chunk.output_source_map.finalize(bun.default_allocator, code_result.shifts) catch @panic("Failed to allocate memory for external source map");
|
||||
var source_map_final_rel_path = bun.default_allocator.alloc(u8, chunk.final_rel_path.len + ".map".len) catch unreachable;
|
||||
const output_source_map = chunk.output_source_map.finalize(error_alloc_scope.allocator(), code_result.shifts) catch @panic("Failed to allocate memory for external source map");
|
||||
var source_map_final_rel_path = error_alloc_scope.allocator().alloc(u8, chunk.final_rel_path.len + ".map".len) catch unreachable;
|
||||
bun.copy(u8, source_map_final_rel_path, chunk.final_rel_path);
|
||||
bun.copy(u8, source_map_final_rel_path[chunk.final_rel_path.len..], ".map");
|
||||
|
||||
@@ -405,7 +429,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
.data = .{
|
||||
.buffer = .{
|
||||
.data = output_source_map,
|
||||
.allocator = bun.default_allocator,
|
||||
.allocator = error_alloc_scope.allocator(),
|
||||
},
|
||||
},
|
||||
.hash = null,
|
||||
@@ -413,14 +437,14 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
.input_loader = .file,
|
||||
.output_path = source_map_final_rel_path,
|
||||
.output_kind = .sourcemap,
|
||||
.input_path = try strings.concat(bun.default_allocator, &.{ input_path, ".map" }),
|
||||
.input_path = try strings.concat(error_alloc_scope.allocator(), &.{ input_path, ".map" }),
|
||||
.side = null,
|
||||
.entry_point_index = null,
|
||||
.is_executable = false,
|
||||
});
|
||||
},
|
||||
.@"inline" => {
|
||||
const output_source_map = chunk.output_source_map.finalize(bun.default_allocator, code_result.shifts) catch @panic("Failed to allocate memory for external source map");
|
||||
const output_source_map = chunk.output_source_map.finalize(error_alloc_scope.allocator(), code_result.shifts) catch @panic("Failed to allocate memory for external source map");
|
||||
const encode_len = base64.encodeLen(output_source_map);
|
||||
|
||||
const source_map_start = "//# sourceMappingURL=data:application/json;base64,";
|
||||
@@ -460,15 +484,15 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
|
||||
if (JSC.CachedBytecode.generate(c.options.output_format, code_result.buffer, &source_provider_url)) |result| {
|
||||
const bytecode, const cached_bytecode = result;
|
||||
const source_provider_url_str = source_provider_url.toSlice(bun.default_allocator);
|
||||
const source_provider_url_str = source_provider_url.toSlice(error_alloc_scope.allocator());
|
||||
defer source_provider_url_str.deinit();
|
||||
debug("Bytecode cache generated {s}: {}", .{ source_provider_url_str.slice(), bun.fmt.size(bytecode.len, .{ .space_between_number_and_unit = true }) });
|
||||
@memcpy(fdpath[0..chunk.final_rel_path.len], chunk.final_rel_path);
|
||||
fdpath[chunk.final_rel_path.len..][0..bun.bytecode_extension.len].* = bun.bytecode_extension.*;
|
||||
|
||||
break :brk options.OutputFile.init(.{
|
||||
.output_path = bun.default_allocator.dupe(u8, source_provider_url_str.slice()) catch unreachable,
|
||||
.input_path = std.fmt.allocPrint(bun.default_allocator, "{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path}) catch unreachable,
|
||||
.output_path = error_alloc_scope.allocator().dupe(u8, source_provider_url_str.slice()) catch unreachable,
|
||||
.input_path = std.fmt.allocPrint(error_alloc_scope.allocator(), "{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path}) catch unreachable,
|
||||
.input_loader = .js,
|
||||
.hash = if (chunk.template.placeholder.hash != null) bun.hash(bytecode) else null,
|
||||
.output_kind = .bytecode,
|
||||
@@ -484,7 +508,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
});
|
||||
} else {
|
||||
// an error
|
||||
c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "Failed to generate bytecode for {s}", .{
|
||||
c.log.addErrorFmt(null, Logger.Loc.Empty, error_alloc_scope.allocator(), "Failed to generate bytecode for {s}", .{
|
||||
chunk.final_rel_path,
|
||||
}) catch unreachable;
|
||||
}
|
||||
@@ -525,7 +549,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
.display_size = @as(u32, @truncate(display_size)),
|
||||
.output_kind = output_kind,
|
||||
.input_loader = if (chunk.entry_point.is_entry_point) c.parse_graph.input_files.items(.loader)[chunk.entry_point.source_index] else .js,
|
||||
.output_path = try bun.default_allocator.dupe(u8, chunk.final_rel_path),
|
||||
.output_path = try error_alloc_scope.allocator().dupe(u8, chunk.final_rel_path),
|
||||
.is_executable = chunk.is_executable,
|
||||
.source_map_index = source_map_index,
|
||||
.bytecode_index = bytecode_index,
|
||||
@@ -540,7 +564,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
else
|
||||
null,
|
||||
.referenced_css_files = switch (chunk.content) {
|
||||
.javascript => |js| @ptrCast(try bun.default_allocator.dupe(u32, js.css_chunks)),
|
||||
.javascript => |js| @ptrCast(try error_alloc_scope.allocator().dupe(u32, js.css_chunks)),
|
||||
.css => &.{},
|
||||
.html => &.{},
|
||||
},
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub fn writeOutputFilesToDisk(
|
||||
c: *LinkerContext,
|
||||
tracked_default_allocator: std.mem.Allocator,
|
||||
root_path: string,
|
||||
chunks: []Chunk,
|
||||
output_files: *std.ArrayList(options.OutputFile),
|
||||
@@ -26,17 +27,17 @@ pub fn writeOutputFilesToDisk(
|
||||
var max_heap_allocator: bun.MaxHeapAllocator = undefined;
|
||||
defer max_heap_allocator.deinit();
|
||||
|
||||
const code_allocator = max_heap_allocator.init(bun.default_allocator);
|
||||
const code_allocator = max_heap_allocator.init(tracked_default_allocator);
|
||||
|
||||
var max_heap_allocator_source_map: bun.MaxHeapAllocator = undefined;
|
||||
defer max_heap_allocator_source_map.deinit();
|
||||
|
||||
const source_map_allocator = max_heap_allocator_source_map.init(bun.default_allocator);
|
||||
const source_map_allocator = max_heap_allocator_source_map.init(tracked_default_allocator);
|
||||
|
||||
var max_heap_allocator_inline_source_map: bun.MaxHeapAllocator = undefined;
|
||||
defer max_heap_allocator_inline_source_map.deinit();
|
||||
|
||||
const code_with_inline_source_map_allocator = max_heap_allocator_inline_source_map.init(bun.default_allocator);
|
||||
const code_with_inline_source_map_allocator = max_heap_allocator_inline_source_map.init(tracked_default_allocator);
|
||||
|
||||
var pathbuf: bun.PathBuffer = undefined;
|
||||
const bv2: *bundler.BundleV2 = @fieldParentPtr("linker", c);
|
||||
@@ -77,22 +78,31 @@ pub fn writeOutputFilesToDisk(
|
||||
) catch |err| bun.Output.panic("Failed to create output chunk: {s}", .{@errorName(err)});
|
||||
|
||||
var source_map_output_file: ?options.OutputFile = null;
|
||||
errdefer {
|
||||
if (source_map_output_file) |*output_file| {
|
||||
output_file.deinit(tracked_default_allocator);
|
||||
}
|
||||
}
|
||||
|
||||
const input_path = try bun.default_allocator.dupe(
|
||||
var input_path = try tracked_default_allocator.dupe(
|
||||
u8,
|
||||
if (chunk.entry_point.is_entry_point)
|
||||
c.parse_graph.input_files.items(.source)[chunk.entry_point.source_index].path.text
|
||||
else
|
||||
chunk.final_rel_path,
|
||||
);
|
||||
errdefer {
|
||||
tracked_default_allocator.free(input_path);
|
||||
}
|
||||
|
||||
switch (chunk.content.sourcemap(c.options.source_maps)) {
|
||||
.external, .linked => |tag| {
|
||||
const output_source_map = chunk.output_source_map.finalize(source_map_allocator, code_result.shifts) catch @panic("Failed to allocate memory for external source map");
|
||||
const source_map_final_rel_path = strings.concat(default_allocator, &.{
|
||||
const source_map_final_rel_path = strings.concat(tracked_default_allocator, &.{
|
||||
chunk.final_rel_path,
|
||||
".map",
|
||||
}) catch @panic("Failed to allocate memory for external source map path");
|
||||
errdefer tracked_default_allocator.free(source_map_final_rel_path);
|
||||
|
||||
if (tag == .linked) {
|
||||
const a, const b = if (public_path.len > 0)
|
||||
@@ -108,6 +118,7 @@ pub fn writeOutputFilesToDisk(
|
||||
buf.appendSliceAssumeCapacity(a);
|
||||
buf.appendSliceAssumeCapacity(b);
|
||||
buf.appendAssumeCapacity('\n');
|
||||
code_allocator.free(code_result.buffer);
|
||||
code_result.buffer = buf.items;
|
||||
}
|
||||
|
||||
@@ -142,9 +153,11 @@ pub fn writeOutputFilesToDisk(
|
||||
.result => {},
|
||||
}
|
||||
|
||||
const input_path_joined = try strings.concat(tracked_default_allocator, &.{ input_path, ".map" });
|
||||
|
||||
source_map_output_file = options.OutputFile.init(.{
|
||||
.output_path = source_map_final_rel_path,
|
||||
.input_path = try strings.concat(bun.default_allocator, &.{ input_path, ".map" }),
|
||||
.input_path = input_path_joined,
|
||||
.loader = .json,
|
||||
.input_loader = .file,
|
||||
.output_kind = .sourcemap,
|
||||
@@ -172,11 +185,12 @@ pub fn writeOutputFilesToDisk(
|
||||
_ = base64.encode(buf.items[buf.items.len - encode_len ..], output_source_map);
|
||||
|
||||
buf.appendAssumeCapacity('\n');
|
||||
code_allocator.free(code_result.buffer);
|
||||
code_result.buffer = buf.items;
|
||||
},
|
||||
.none => {},
|
||||
}
|
||||
const bytecode_output_file: ?options.OutputFile = brk: {
|
||||
var bytecode_output_file: ?options.OutputFile = brk: {
|
||||
if (c.options.generate_bytecode_cache) {
|
||||
const loader: Loader = if (chunk.entry_point.is_entry_point)
|
||||
c.parse_graph.input_files.items(.loader)[
|
||||
@@ -236,8 +250,8 @@ pub fn writeOutputFilesToDisk(
|
||||
}
|
||||
|
||||
break :brk options.OutputFile.init(.{
|
||||
.output_path = bun.default_allocator.dupe(u8, source_provider_url_str.slice()) catch unreachable,
|
||||
.input_path = std.fmt.allocPrint(bun.default_allocator, "{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path}) catch unreachable,
|
||||
.output_path = tracked_default_allocator.dupe(u8, source_provider_url_str.slice()) catch unreachable,
|
||||
.input_path = std.fmt.allocPrint(tracked_default_allocator, "{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path}) catch unreachable,
|
||||
.input_loader = .file,
|
||||
.hash = if (chunk.template.placeholder.hash != null) bun.hash(bytecode) else null,
|
||||
.output_kind = .bytecode,
|
||||
@@ -257,6 +271,9 @@ pub fn writeOutputFilesToDisk(
|
||||
|
||||
break :brk null;
|
||||
};
|
||||
errdefer if (bytecode_output_file) |*output_file| {
|
||||
output_file.deinit(tracked_default_allocator);
|
||||
};
|
||||
|
||||
switch (JSC.Node.fs.NodeFS.writeFileWithPathBuffer(
|
||||
&pathbuf,
|
||||
@@ -310,8 +327,12 @@ pub fn writeOutputFilesToDisk(
|
||||
else
|
||||
.chunk;
|
||||
try output_files.append(options.OutputFile.init(.{
|
||||
.output_path = bun.default_allocator.dupe(u8, chunk.final_rel_path) catch unreachable,
|
||||
.input_path = input_path,
|
||||
.output_path = tracked_default_allocator.dupe(u8, chunk.final_rel_path) catch unreachable,
|
||||
.input_path = brk: {
|
||||
const ret = input_path;
|
||||
input_path = "";
|
||||
break :brk ret;
|
||||
},
|
||||
.input_loader = if (chunk.entry_point.is_entry_point)
|
||||
c.parse_graph.input_files.items(.loader)[chunk.entry_point.source_index]
|
||||
else
|
||||
@@ -338,17 +359,17 @@ pub fn writeOutputFilesToDisk(
|
||||
else
|
||||
null,
|
||||
.referenced_css_files = switch (chunk.content) {
|
||||
.javascript => |js| @ptrCast(try bun.default_allocator.dupe(u32, js.css_chunks)),
|
||||
.javascript => |js| @ptrCast(try tracked_default_allocator.dupe(u32, js.css_chunks)),
|
||||
.css => &.{},
|
||||
.html => &.{},
|
||||
},
|
||||
}));
|
||||
|
||||
if (source_map_output_file) |sourcemap_file| {
|
||||
if (bun.take(&source_map_output_file)) |sourcemap_file| {
|
||||
try output_files.append(sourcemap_file);
|
||||
}
|
||||
|
||||
if (bytecode_output_file) |bytecode_file| {
|
||||
if (bun.take(&bytecode_output_file)) |bytecode_file| {
|
||||
try output_files.append(bytecode_file);
|
||||
}
|
||||
}
|
||||
@@ -427,7 +448,6 @@ const LinkerContext = bun.bundle_v2.LinkerContext;
|
||||
const string = bun.string;
|
||||
const Output = bun.Output;
|
||||
const strings = bun.strings;
|
||||
const default_allocator = bun.default_allocator;
|
||||
|
||||
const std = @import("std");
|
||||
const sourcemap = bun.sourcemap;
|
||||
|
||||
123
test/bundler/bundler_error_leak.test.ts
Normal file
123
test/bundler/bundler_error_leak.test.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { itBundled } from "./expectBundled";
|
||||
import { bunEnv, bunExe, DirectoryTree, isASAN, isDebug, tempDirWithFiles } from "harness";
|
||||
import { build } from "bun";
|
||||
|
||||
async function doesNotLeak(tempdir: string, expectedError: string) {
|
||||
const { exitCode, stdout, stderr } = await Bun.$`${bunExe()} build.ts`
|
||||
.env({ ...bunEnv })
|
||||
.cwd(tempdir)
|
||||
.quiet()
|
||||
.nothrow();
|
||||
|
||||
console.log("STDOUT:\n", stdout.toString());
|
||||
console.error("STDERR:\n", stderr.toString());
|
||||
// these tests should fail
|
||||
expect(exitCode).toBe(1);
|
||||
expect(stderr.toString()).not.toContain("leak");
|
||||
expect(stderr.toString()).toContain(expectedError);
|
||||
}
|
||||
|
||||
async function runTest(
|
||||
testKinds: Array<"in-memory" | "disk">,
|
||||
testname: string,
|
||||
basename: string,
|
||||
filesOrAbsolutePathToCopyFolderFrom: DirectoryTree,
|
||||
buildOptions: Bun.BuildConfig,
|
||||
expectedError: string,
|
||||
) {
|
||||
for (const kind of testKinds) {
|
||||
test(`${kind}: ${testname}`, async () => {
|
||||
const options = { ...buildOptions, outdir: kind === "disk" ? buildOptions.outdir : undefined };
|
||||
const tempdir = tempDirWithFiles(`${basename}-${kind}`, {
|
||||
...filesOrAbsolutePathToCopyFolderFrom,
|
||||
"build.ts": /* ts */ `
|
||||
const output = await Bun.build(${JSON.stringify(options)});
|
||||
console.log(output);
|
||||
`,
|
||||
});
|
||||
|
||||
await doesNotLeak(tempdir, expectedError);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Only run if AllocationScope is enabled
|
||||
describe.if(isDebug || isASAN)("bundler", () => {
|
||||
describe("should not leak memory in error codepaths", () => {
|
||||
runTest(
|
||||
["disk"],
|
||||
"output directory is a file",
|
||||
"output-directory-is-a-file",
|
||||
{
|
||||
"index.ts": `console.log('ooga booga!')`,
|
||||
"output": "lol!",
|
||||
},
|
||||
{
|
||||
entrypoints: ["./index.ts"],
|
||||
outdir: "./output",
|
||||
target: "bun",
|
||||
format: "esm",
|
||||
sourcemap: "external",
|
||||
},
|
||||
'Failed to create output directory "./output" is a file. Please choose a different outdir or delete "./output"',
|
||||
);
|
||||
|
||||
runTest(
|
||||
["disk"],
|
||||
"trying to write a chunk but a folder exists with same name",
|
||||
"chunk-folder-exists",
|
||||
{
|
||||
"index.ts": `console.log('ooga booga!')`,
|
||||
"dist/index.js": {
|
||||
"hi": "hi",
|
||||
},
|
||||
},
|
||||
{
|
||||
entrypoints: ["./index.ts"],
|
||||
outdir: "./dist",
|
||||
target: "bun",
|
||||
format: "esm",
|
||||
sourcemap: "external",
|
||||
},
|
||||
'Is a directory: writing chunk "./index.js"',
|
||||
);
|
||||
|
||||
runTest(
|
||||
["disk"],
|
||||
"trying to write a sourcemap but a folder exists with same name",
|
||||
"sourcemap-folder-exists",
|
||||
{
|
||||
"index.ts": `console.log('ooga booga!')`,
|
||||
"dist/index.js.map": {},
|
||||
},
|
||||
{
|
||||
entrypoints: ["./index.ts"],
|
||||
outdir: "./dist",
|
||||
target: "bun",
|
||||
format: "esm",
|
||||
sourcemap: "external",
|
||||
},
|
||||
'Is a directory: writing sourcemap for chunk "./index.js"',
|
||||
);
|
||||
|
||||
runTest(
|
||||
["disk"],
|
||||
"trying to write a bytecode file but a folder exists with same name",
|
||||
"bytecode-folder-exists",
|
||||
{
|
||||
"index.ts": `console.log('ooga booga!')`,
|
||||
"dist/index.js.jsc": {},
|
||||
},
|
||||
{
|
||||
entrypoints: ["./index.ts"],
|
||||
outdir: "./dist",
|
||||
target: "bun",
|
||||
format: "cjs",
|
||||
sourcemap: "external",
|
||||
bytecode: true,
|
||||
},
|
||||
"EISDIR: ./index.js.jsc: Is a directory",
|
||||
);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user