mirror of
https://github.com/oven-sh/bun
synced 2026-02-05 16:38:55 +00:00
Compare commits
23 Commits
ciro/fix-a
...
jarred/run
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1052e75910 | ||
|
|
3760e5acd5 | ||
|
|
7ff59e7f79 | ||
|
|
348a3e984a | ||
|
|
34fdc21350 | ||
|
|
bad003ce9f | ||
|
|
cb0bfd9eb7 | ||
|
|
f61c84c9b5 | ||
|
|
17a5e59d34 | ||
|
|
71315ef358 | ||
|
|
b867e2fdf8 | ||
|
|
f8b35ab094 | ||
|
|
844d48d346 | ||
|
|
609675a50b | ||
|
|
2b9213aeb5 | ||
|
|
cf1051191e | ||
|
|
e1c8fb08e5 | ||
|
|
4408f80e4d | ||
|
|
98a018b212 | ||
|
|
b3e3573a50 | ||
|
|
acd79c864a | ||
|
|
f872b90b8f | ||
|
|
5d13a3e737 |
@@ -91,6 +91,11 @@ RUN apk --no-cache add \
|
||||
|
||||
FROM alpine:3.18
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY --from=build /tmp/glibc.apk /tmp/
|
||||
COPY --from=build /tmp/glibc-bin.apk /tmp/
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
|
||||
@@ -57,6 +57,11 @@ RUN apt-get update -qq \
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
|
||||
@@ -58,6 +58,11 @@ FROM debian:bullseye
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
RUN groupadd bun \
|
||||
--gid 1000 \
|
||||
&& useradd bun \
|
||||
|
||||
@@ -57,6 +57,11 @@ RUN apt-get update -qq \
|
||||
|
||||
FROM gcr.io/distroless/base-nossl-debian11
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
|
||||
# Temporarily use the `build`-stage image binaries to create a symlink:
|
||||
|
||||
@@ -137,6 +137,11 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
|
||||
---
|
||||
|
||||
- `BUN_RUNTIME_TRANSPILER_CACHE_PATH`
|
||||
- The runtime transpiler caches the transpiled output of source files larger than 50 kb. This makes CLIs using Bun load faster. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is set, then the runtime transpiler will cache transpiled output to the specified directory. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is set to an empty string or the string `"0"`, then the runtime transpiler will not cache transpiled output. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is unset, then the runtime transpiler will cache transpiled output to the platform-specific cache directory.
|
||||
|
||||
---
|
||||
|
||||
- `TMPDIR`
|
||||
- Bun occasionally requires a directory to store intermediate assets during bundling or other operations. If unset, defaults to the platform-specific temporary directory: `/tmp` on Linux, `/private/tmp` on macOS.
|
||||
|
||||
@@ -153,6 +158,31 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
---
|
||||
|
||||
- `DO_NOT_TRACK`
|
||||
- If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. Equivalent of `telemetry=false` in bunfig.
|
||||
- Telemetry is not sent yet as of November 28th, 2023, but we are planning to add telemetry in the coming months. If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. Equivalent of `telemetry=false` in bunfig.
|
||||
|
||||
{% /table %}
|
||||
|
||||
## Runtime transpiler caching
|
||||
|
||||
For files larger than 50 KB, Bun caches transpiled output into `$BUN_RUNTIME_TRANSPILER_CACHE_PATH` or the platform-specific cache directory. This makes CLIs using Bun load faster.
|
||||
|
||||
This transpiler cache is global and shared across all projects. It is safe to delete the cache at any time. It is a content-addressable cache, so it will never contain duplicate entries. It is also safe to delete the cache while a Bun process is running.
|
||||
|
||||
It is recommended to disable this cache when using ephemeral filesystems like Docker. Bun's Docker images automatically disable this cache.
|
||||
|
||||
### Disable the runtime transpiler cache
|
||||
|
||||
To disable the runtime transpiler cache, set `BUN_RUNTIME_TRANSPILER_CACHE_PATH` to an empty string or the string `"0"`.
|
||||
|
||||
```sh
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH=0 bun run dev
|
||||
```
|
||||
|
||||
### What does it cache?
|
||||
|
||||
It caches:
|
||||
|
||||
- The transpiled output of source files larger than 50 KB.
|
||||
- The sourcemap for the transpiled output of the file
|
||||
|
||||
The file extension `.pile` is used for these cached files.
|
||||
|
||||
@@ -49,6 +49,7 @@ async function runTest(path) {
|
||||
FORCE_COLOR: "1",
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
|
||||
BUN_JSC_forceRAMSize,
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
|
||||
@@ -53,6 +53,7 @@ pub const Features = struct {
|
||||
pub var fetch = false;
|
||||
pub var bunfig = false;
|
||||
pub var extracted_packages = false;
|
||||
pub var transpiler_cache = false;
|
||||
|
||||
pub fn formatter() Formatter {
|
||||
return Formatter{};
|
||||
@@ -81,6 +82,7 @@ pub const Features = struct {
|
||||
"fetch",
|
||||
"bunfig",
|
||||
"extracted_packages",
|
||||
"transpiler_cache",
|
||||
};
|
||||
inline for (fields) |field| {
|
||||
if (@field(Features, field)) {
|
||||
|
||||
3
src/api/schema.d.ts
generated
vendored
3
src/api/schema.d.ts
generated
vendored
@@ -201,6 +201,7 @@ export const enum DotEnvBehavior {
|
||||
disable = 1,
|
||||
prefix = 2,
|
||||
load_all = 3,
|
||||
load_all_without_inlining = 4,
|
||||
}
|
||||
export const DotEnvBehaviorKeys: {
|
||||
1: "disable";
|
||||
@@ -209,6 +210,8 @@ export const DotEnvBehaviorKeys: {
|
||||
prefix: "prefix";
|
||||
3: "load_all";
|
||||
load_all: "load_all";
|
||||
4: "load_all_without_inlining";
|
||||
load_all_without_inlining: "load_all_without_inlining";
|
||||
};
|
||||
export const enum SourceMapMode {
|
||||
inline_into_file = 1,
|
||||
|
||||
4
src/api/schema.js
generated
4
src/api/schema.js
generated
@@ -1101,17 +1101,21 @@ const DotEnvBehavior = {
|
||||
"1": 1,
|
||||
"2": 2,
|
||||
"3": 3,
|
||||
"4": 4,
|
||||
"disable": 1,
|
||||
"prefix": 2,
|
||||
"load_all": 3,
|
||||
"load_all_without_inlining": 4,
|
||||
};
|
||||
const DotEnvBehaviorKeys = {
|
||||
"1": "disable",
|
||||
"2": "prefix",
|
||||
"3": "load_all",
|
||||
"4": "load_all_without_inlining",
|
||||
"disable": "disable",
|
||||
"prefix": "prefix",
|
||||
"load_all": "load_all",
|
||||
"load_all_without_inlining": "load_all_without_inlining",
|
||||
};
|
||||
|
||||
function decodeEnvConfig(bb) {
|
||||
|
||||
@@ -241,6 +241,7 @@ enum DotEnvBehavior {
|
||||
disable = 1;
|
||||
prefix = 2;
|
||||
load_all = 3;
|
||||
load_all_without_inlining = 4;
|
||||
}
|
||||
|
||||
message EnvConfig {
|
||||
|
||||
@@ -1244,6 +1244,9 @@ pub const Api = struct {
|
||||
/// load_all
|
||||
load_all,
|
||||
|
||||
/// load_all_without_inlining
|
||||
load_all_without_inlining,
|
||||
|
||||
_,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
|
||||
601
src/bun.js/RuntimeTranspilerCache.zig
Normal file
601
src/bun.js/RuntimeTranspilerCache.zig
Normal file
@@ -0,0 +1,601 @@
|
||||
// ** Update the version number when any breaking changes are made to the cache format or to the JS parser **
|
||||
const expected_version = 1;
|
||||
|
||||
const bun = @import("root").bun;
|
||||
const std = @import("std");
|
||||
const Output = bun.Output;
|
||||
const JSC = bun.JSC;
|
||||
|
||||
const debug = Output.scoped(.cache, false);
|
||||
const MINIMUM_CACHE_SIZE = 50 * 1024;
|
||||
|
||||
pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: ?u64 = null,
|
||||
input_byte_length: ?u64 = null,
|
||||
features_hash: ?u64 = null,
|
||||
exports_kind: bun.JSAst.ExportsKind = .none,
|
||||
output_code: ?bun.String = null,
|
||||
entry: ?Entry = null,
|
||||
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
|
||||
const seed = 42;
|
||||
pub const Metadata = struct {
|
||||
cache_version: u32 = expected_version,
|
||||
output_encoding: Encoding = Encoding.none,
|
||||
module_type: ModuleType = ModuleType.none,
|
||||
|
||||
features_hash: u64 = 0,
|
||||
|
||||
input_byte_length: u64 = 0,
|
||||
input_hash: u64 = 0,
|
||||
|
||||
output_byte_offset: u64 = 0,
|
||||
output_byte_length: u64 = 0,
|
||||
output_hash: u64 = 0,
|
||||
|
||||
sourcemap_byte_offset: u64 = 0,
|
||||
sourcemap_byte_length: u64 = 0,
|
||||
sourcemap_hash: u64 = 0,
|
||||
|
||||
pub const size = brk: {
|
||||
var count: usize = 0;
|
||||
var meta: Metadata = undefined;
|
||||
for (std.meta.fieldNames(Metadata)) |name| {
|
||||
count += @sizeOf(@TypeOf(@field(meta, name)));
|
||||
}
|
||||
|
||||
break :brk count;
|
||||
};
|
||||
|
||||
pub fn encode(this: *const Metadata, writer: anytype) !void {
|
||||
try writer.writeInt(u32, this.cache_version, .little);
|
||||
try writer.writeInt(u8, @intFromEnum(this.module_type), .little);
|
||||
try writer.writeInt(u8, @intFromEnum(this.output_encoding), .little);
|
||||
|
||||
try writer.writeInt(u64, this.features_hash, .little);
|
||||
|
||||
try writer.writeInt(u64, this.input_byte_length, .little);
|
||||
try writer.writeInt(u64, this.input_hash, .little);
|
||||
|
||||
try writer.writeInt(u64, this.output_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.output_byte_length, .little);
|
||||
try writer.writeInt(u64, this.output_hash, .little);
|
||||
|
||||
try writer.writeInt(u64, this.sourcemap_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_byte_length, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_hash, .little);
|
||||
}
|
||||
|
||||
pub fn decode(this: *Metadata, reader: anytype) !void {
|
||||
this.cache_version = try reader.readInt(u32, .little);
|
||||
if (this.cache_version != expected_version) {
|
||||
return error.StaleCache;
|
||||
}
|
||||
|
||||
this.module_type = @enumFromInt(try reader.readInt(u8, .little));
|
||||
this.output_encoding = @enumFromInt(try reader.readInt(u8, .little));
|
||||
|
||||
this.features_hash = try reader.readInt(u64, .little);
|
||||
|
||||
this.input_byte_length = try reader.readInt(u64, .little);
|
||||
this.input_hash = try reader.readInt(u64, .little);
|
||||
|
||||
this.output_byte_offset = try reader.readInt(u64, .little);
|
||||
this.output_byte_length = try reader.readInt(u64, .little);
|
||||
this.output_hash = try reader.readInt(u64, .little);
|
||||
|
||||
this.sourcemap_byte_offset = try reader.readInt(u64, .little);
|
||||
this.sourcemap_byte_length = try reader.readInt(u64, .little);
|
||||
this.sourcemap_hash = try reader.readInt(u64, .little);
|
||||
|
||||
switch (this.module_type) {
|
||||
.esm, .cjs => {},
|
||||
// Invalid module type
|
||||
else => return error.InvalidModuleType,
|
||||
}
|
||||
|
||||
switch (this.output_encoding) {
|
||||
.utf8, .utf16, .latin1 => {},
|
||||
// Invalid encoding
|
||||
else => return error.UnknownEncoding,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Entry = struct {
|
||||
metadata: Metadata,
|
||||
output_code: OutputCode = .{ .utf8 = "" },
|
||||
sourcemap: []const u8 = "",
|
||||
|
||||
pub const OutputCode = union(enum) {
|
||||
utf8: []const u8,
|
||||
string: bun.String,
|
||||
|
||||
pub fn byteSlice(this: *const OutputCode) []const u8 {
|
||||
switch (this.*) {
|
||||
.utf8 => return this.utf8,
|
||||
.string => return this.string.byteSlice(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn save(
|
||||
destination_dir: bun.FileDescriptor,
|
||||
destination_path: bun.PathString,
|
||||
input_byte_length: u64,
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
output_code: OutputCode,
|
||||
exports_kind: bun.JSAst.ExportsKind,
|
||||
) !void {
|
||||
var tracer = bun.tracy.traceNamed(@src(), "RuntimeTranspilerCache.save");
|
||||
defer tracer.end();
|
||||
|
||||
// atomically write to a tmpfile and then move it to the final destination
|
||||
var tmpname_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const tmpfilename = bun.sliceTo(try bun.fs.FileSystem.instance.tmpname(std.fs.path.extension(destination_path.slice()), &tmpname_buf, input_hash), 0);
|
||||
|
||||
const output_bytes = output_code.byteSlice();
|
||||
|
||||
// First we open the tmpfile, to avoid any other work in the event of failure.
|
||||
var tmpfile = try bun.Tmpfile.create(destination_dir, tmpfilename).unwrap();
|
||||
defer {
|
||||
_ = bun.sys.close(tmpfile.fd);
|
||||
}
|
||||
{
|
||||
errdefer {
|
||||
if (!tmpfile.using_tmpfile) {
|
||||
_ = bun.sys.unlinkat(destination_dir, tmpfilename);
|
||||
}
|
||||
}
|
||||
var metadata_buf = [_]u8{0} ** (Metadata.size * 2);
|
||||
const metadata_bytes = brk: {
|
||||
var metadata = Metadata{
|
||||
.input_byte_length = input_byte_length,
|
||||
.input_hash = input_hash,
|
||||
.features_hash = features_hash,
|
||||
.module_type = switch (exports_kind) {
|
||||
.cjs => ModuleType.cjs,
|
||||
else => ModuleType.esm,
|
||||
},
|
||||
.output_encoding = switch (output_code) {
|
||||
.utf8 => Encoding.utf8,
|
||||
.string => |str| switch (str.encoding()) {
|
||||
.utf8 => Encoding.utf8,
|
||||
.utf16 => Encoding.utf16,
|
||||
.latin1 => Encoding.latin1,
|
||||
else => @panic("Unexpected encoding"),
|
||||
},
|
||||
},
|
||||
.sourcemap_byte_length = sourcemap.len,
|
||||
.output_byte_offset = Metadata.size,
|
||||
.output_byte_length = output_bytes.len,
|
||||
.sourcemap_byte_offset = Metadata.size + output_bytes.len,
|
||||
};
|
||||
|
||||
metadata.output_hash = hash(output_bytes);
|
||||
metadata.sourcemap_hash = hash(sourcemap);
|
||||
var metadata_stream = std.io.fixedBufferStream(&metadata_buf);
|
||||
|
||||
try metadata.encode(metadata_stream.writer());
|
||||
|
||||
if (comptime bun.Environment.allow_assert) {
|
||||
var metadata_stream2 = std.io.fixedBufferStream(metadata_buf[0..Metadata.size]);
|
||||
var metadata2 = Metadata{};
|
||||
metadata2.decode(metadata_stream2.reader()) catch |err| bun.Output.panic("Metadata did not rountrip encode -> decode successfully: {s}", .{@errorName(err)});
|
||||
std.debug.assert(std.meta.eql(metadata, metadata2));
|
||||
}
|
||||
|
||||
break :brk metadata_buf[0..metadata_stream.pos];
|
||||
};
|
||||
|
||||
var vecs: [3]std.os.iovec = .{
|
||||
.{ .iov_base = metadata_bytes.ptr, .iov_len = metadata_bytes.len },
|
||||
.{ .iov_base = @constCast(output_bytes.ptr), .iov_len = output_bytes.len },
|
||||
.{ .iov_base = @constCast(sourcemap.ptr), .iov_len = sourcemap.len },
|
||||
};
|
||||
|
||||
var position: isize = 0;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len;
|
||||
std.debug.assert(end_position == @as(i64, @intCast(vecs[0].iov_len + vecs[1].iov_len + vecs[2].iov_len)));
|
||||
std.debug.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
|
||||
|
||||
bun.C.preallocate_file(tmpfile.fd, 0, @intCast(end_position)) catch {};
|
||||
var current_vecs: []std.os.iovec = vecs[0..];
|
||||
while (position < end_position) {
|
||||
const written = try bun.sys.pwritev(tmpfile.fd, current_vecs, position).unwrap();
|
||||
if (written <= 0) {
|
||||
return error.WriteFailed;
|
||||
}
|
||||
|
||||
position += @intCast(written);
|
||||
}
|
||||
}
|
||||
|
||||
try tmpfile.finish(destination_path.sliceAssumeZ());
|
||||
}
|
||||
|
||||
pub fn load(
|
||||
this: *Entry,
|
||||
file: std.fs.File,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
const stat_size = try file.getEndPos();
|
||||
if (stat_size < Metadata.size + this.metadata.output_byte_length + this.metadata.sourcemap_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
std.debug.assert(this.output_code == .utf8 and this.output_code.utf8.len == 0); // this should be the default value
|
||||
|
||||
this.output_code = brk: {
|
||||
switch (this.metadata.output_encoding) {
|
||||
.utf8 => {
|
||||
var utf8 = try output_code_allocator.alloc(u8, this.metadata.output_byte_length);
|
||||
errdefer output_code_allocator.free(utf8);
|
||||
const read_bytes = try file.preadAll(utf8, this.metadata.output_byte_offset);
|
||||
if (read_bytes != this.metadata.output_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
if (this.metadata.output_hash != 0) {
|
||||
if (hash(utf8) != this.metadata.output_hash) {
|
||||
return error.InvalidHash;
|
||||
}
|
||||
}
|
||||
|
||||
break :brk .{ .utf8 = utf8 };
|
||||
},
|
||||
.latin1 => {
|
||||
var latin1 = bun.String.createUninitializedLatin1(this.metadata.output_byte_length);
|
||||
errdefer latin1.deref();
|
||||
const read_bytes = try file.preadAll(@constCast(latin1.latin1()), this.metadata.output_byte_offset);
|
||||
|
||||
if (this.metadata.output_hash != 0) {
|
||||
if (hash(latin1.latin1()) != this.metadata.output_hash) {
|
||||
return error.InvalidHash;
|
||||
}
|
||||
}
|
||||
|
||||
if (read_bytes != this.metadata.output_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
break :brk .{ .string = latin1 };
|
||||
},
|
||||
|
||||
.utf16 => {
|
||||
var utf16 = bun.String.createUninitializedUTF16(this.metadata.output_byte_length / 2);
|
||||
errdefer utf16.deref();
|
||||
const read_bytes = try file.preadAll(std.mem.sliceAsBytes(@constCast(utf16.utf16())), this.metadata.output_byte_offset);
|
||||
if (read_bytes != this.metadata.output_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
if (this.metadata.output_hash != 0) {
|
||||
if (hash(std.mem.sliceAsBytes(utf16.utf16())) != this.metadata.output_hash) {
|
||||
return error.InvalidHash;
|
||||
}
|
||||
}
|
||||
|
||||
break :brk .{ .string = utf16 };
|
||||
},
|
||||
|
||||
else => @panic("Unexpected output encoding"),
|
||||
}
|
||||
};
|
||||
|
||||
errdefer {
|
||||
switch (this.output_code) {
|
||||
.utf8 => output_code_allocator.free(this.output_code.utf8),
|
||||
.string => this.output_code.string.deref(),
|
||||
}
|
||||
}
|
||||
|
||||
if (this.metadata.sourcemap_byte_length > 0) {
|
||||
var sourcemap = try sourcemap_allocator.alloc(u8, this.metadata.sourcemap_byte_length);
|
||||
errdefer sourcemap_allocator.free(sourcemap);
|
||||
const read_bytes = try file.preadAll(sourcemap, this.metadata.sourcemap_byte_offset);
|
||||
if (read_bytes != this.metadata.sourcemap_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
this.sourcemap = sourcemap;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn hash(bytes: []const u8) u64 {
|
||||
return std.hash.Wyhash.hash(seed, bytes);
|
||||
}
|
||||
|
||||
pub const ModuleType = enum(u8) {
|
||||
none = 0,
|
||||
esm = 1,
|
||||
cjs = 2,
|
||||
};
|
||||
|
||||
pub const Encoding = enum(u8) {
|
||||
none = 0,
|
||||
utf8 = 1,
|
||||
utf16 = 2,
|
||||
latin1 = 3,
|
||||
_,
|
||||
};
|
||||
|
||||
pub fn writeCacheFilename(
|
||||
buf: []u8,
|
||||
input_hash: u64,
|
||||
) !usize {
|
||||
const fmt_name = if (comptime bun.Environment.allow_assert) "{any}.debug.pile" else "{any}.pile";
|
||||
|
||||
const printed = try std.fmt.bufPrint(buf, fmt_name, .{bun.fmt.fmtSliceHexLower(std.mem.asBytes(&input_hash))});
|
||||
return printed.len;
|
||||
}
|
||||
|
||||
pub fn getCacheFilePath(
|
||||
buf: *[bun.MAX_PATH_BYTES]u8,
|
||||
input_hash: u64,
|
||||
) ![:0]const u8 {
|
||||
const cache_dir = getCacheDir(buf);
|
||||
if (cache_dir.len == 0) {
|
||||
return "";
|
||||
}
|
||||
buf[cache_dir.len] = std.fs.path.sep;
|
||||
const cache_filename_len = try writeCacheFilename(buf[cache_dir.len + 1 ..], input_hash);
|
||||
buf[cache_dir.len + 1 + cache_filename_len] = 0;
|
||||
|
||||
return buf[0 .. cache_dir.len + 1 + cache_filename_len :0];
|
||||
}
|
||||
|
||||
fn reallyGetCacheDir(
|
||||
buf: *[bun.MAX_PATH_BYTES]u8,
|
||||
) [:0]const u8 {
|
||||
if (bun.getenvZ("BUN_RUNTIME_TRANSPILER_CACHE_PATH")) |dir| {
|
||||
if (dir.len == 0 or (dir.len == 1 and dir[0] == '0')) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const len = @min(dir.len, bun.MAX_PATH_BYTES - 1);
|
||||
@memcpy(buf[0..len], dir[0..len]);
|
||||
buf[len] = 0;
|
||||
return buf[0..len :0];
|
||||
}
|
||||
|
||||
if (bun.getenvZ("XDG_CACHE_HOME")) |dir| {
|
||||
var parts = &[_][]const u8{ dir, "bun", "@t@" };
|
||||
return bun.fs.FileSystem.instance.absBufZ(parts, buf);
|
||||
}
|
||||
|
||||
if (comptime bun.Environment.isMac) {
|
||||
// On a mac, default to ~/Library/Caches/bun/*
|
||||
// This is different than ~/.bun/install/cache, and not configurable by the user.
|
||||
if (bun.getenvZ("HOME")) |home| {
|
||||
const parts = &[_][]const u8{
|
||||
home,
|
||||
"Library/",
|
||||
"Caches/",
|
||||
"bun",
|
||||
"@t@",
|
||||
};
|
||||
return bun.fs.FileSystem.instance.absBufZ(parts, buf);
|
||||
}
|
||||
}
|
||||
|
||||
if (bun.getenvZ(bun.DotEnv.home_env)) |dir| {
|
||||
var parts = &[_][]const u8{ dir, ".bun", "install", "cache", "@t@" };
|
||||
return bun.fs.FileSystem.instance.absBufZ(parts, buf);
|
||||
}
|
||||
|
||||
{
|
||||
var parts = &[_][]const u8{ bun.fs.FileSystem.instance.fs.tmpdirPath(), "bun", "@t@" };
|
||||
return bun.fs.FileSystem.instance.absBufZ(parts, buf);
|
||||
}
|
||||
}
|
||||
|
||||
// Only do this at most once per-thread.
|
||||
threadlocal var runtime_transpiler_cache_static_buffer: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
threadlocal var runtime_transpiler_cache: [:0]u8 = undefined;
|
||||
threadlocal var runtime_transpiler_cache_loaded: bool = false;
|
||||
pub var is_disabled = false;
|
||||
|
||||
fn getCacheDir(
|
||||
buf: *[bun.MAX_PATH_BYTES]u8,
|
||||
) [:0]const u8 {
|
||||
if (is_disabled) return "";
|
||||
|
||||
if (!runtime_transpiler_cache_loaded) {
|
||||
runtime_transpiler_cache_loaded = true;
|
||||
runtime_transpiler_cache = @constCast(reallyGetCacheDir(&runtime_transpiler_cache_static_buffer));
|
||||
if (runtime_transpiler_cache.len == 0) {
|
||||
is_disabled = true;
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
@memcpy(buf[0..runtime_transpiler_cache.len], runtime_transpiler_cache);
|
||||
buf[runtime_transpiler_cache.len] = 0;
|
||||
|
||||
return buf[0..runtime_transpiler_cache.len :0];
|
||||
}
|
||||
|
||||
pub fn fromFile(
|
||||
input_hash: u64,
|
||||
feature_hash: u64,
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var tracer = bun.tracy.traceNamed(@src(), "RuntimeTranspilerCache.fromFile");
|
||||
defer tracer.end();
|
||||
|
||||
var cache_file_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const cache_file_path = try getCacheFilePath(&cache_file_path_buf, input_hash);
|
||||
if (cache_file_path.len == 0) {
|
||||
return error.CacheDisabled;
|
||||
}
|
||||
return fromFileWithCacheFilePath(
|
||||
bun.PathString.init(cache_file_path),
|
||||
input_hash,
|
||||
feature_hash,
|
||||
input_stat_size,
|
||||
sourcemap_allocator,
|
||||
output_code_allocator,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn fromFileWithCacheFilePath(
|
||||
cache_file_path: bun.PathString,
|
||||
input_hash: u64,
|
||||
feature_hash: u64,
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var metadata_bytes_buf: [Metadata.size * 2]u8 = undefined;
|
||||
const cache_fd = try bun.sys.open(cache_file_path.sliceAssumeZ(), std.os.O.RDONLY, 0).unwrap();
|
||||
defer _ = bun.sys.close(cache_fd);
|
||||
errdefer {
|
||||
// On any error, we delete the cache file
|
||||
_ = bun.sys.unlink(cache_file_path.sliceAssumeZ());
|
||||
}
|
||||
|
||||
const file = std.fs.File{ .handle = bun.fdcast(cache_fd) };
|
||||
const metadata_bytes = try file.preadAll(&metadata_bytes_buf, 0);
|
||||
var metadata_stream = std.io.fixedBufferStream(metadata_bytes_buf[0..metadata_bytes]);
|
||||
|
||||
var entry = Entry{
|
||||
.metadata = Metadata{},
|
||||
.output_code = .{ .utf8 = "" },
|
||||
.sourcemap = "",
|
||||
};
|
||||
var reader = metadata_stream.reader();
|
||||
try entry.metadata.decode(reader);
|
||||
if (entry.metadata.input_hash != input_hash or entry.metadata.input_byte_length != input_stat_size) {
|
||||
// delete the cache in this case
|
||||
return error.InvalidInputHash;
|
||||
}
|
||||
|
||||
if (entry.metadata.features_hash != feature_hash) {
|
||||
// delete the cache in this case
|
||||
return error.MismatchedFeatureHash;
|
||||
}
|
||||
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator);
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
pub fn isEligible(
|
||||
_: *const @This(),
|
||||
path: *const bun.fs.Path,
|
||||
) bool {
|
||||
return path.isFile();
|
||||
}
|
||||
|
||||
pub fn toFile(
|
||||
input_byte_length: u64,
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
source_code: bun.String,
|
||||
exports_kind: bun.JSAst.ExportsKind,
|
||||
) !void {
|
||||
var tracer = bun.tracy.traceNamed(@src(), "RuntimeTranspilerCache.toFile");
|
||||
defer tracer.end();
|
||||
|
||||
var cache_file_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const output_code: Entry.OutputCode = switch (source_code.encoding()) {
|
||||
.utf8 => .{ .utf8 = source_code.byteSlice() },
|
||||
else => .{ .string = source_code },
|
||||
};
|
||||
|
||||
const cache_file_path = try getCacheFilePath(&cache_file_path_buf, input_hash);
|
||||
|
||||
if (cache_file_path.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const cache_dir_fd = brk: {
|
||||
if (std.fs.path.dirname(cache_file_path)) |dirname| {
|
||||
const dir = try std.fs.cwd().makeOpenPathIterable(dirname, .{ .access_sub_paths = true });
|
||||
break :brk bun.toFD(dir.dir.fd);
|
||||
}
|
||||
|
||||
break :brk bun.toFD(std.fs.cwd().fd);
|
||||
};
|
||||
defer {
|
||||
if (cache_dir_fd != bun.toFD(std.fs.cwd().fd)) _ = bun.sys.close(cache_dir_fd);
|
||||
}
|
||||
|
||||
try Entry.save(
|
||||
cache_dir_fd,
|
||||
bun.PathString.init(cache_file_path),
|
||||
input_byte_length,
|
||||
input_hash,
|
||||
features_hash,
|
||||
sourcemap,
|
||||
output_code,
|
||||
exports_kind,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn get(
|
||||
this: *RuntimeTranspilerCache,
|
||||
source: *const bun.logger.Source,
|
||||
parser_options: *const bun.js_parser.Parser.Options,
|
||||
used_jsx: bool,
|
||||
) bool {
|
||||
if (comptime !bun.FeatureFlags.runtime_transpiler_cache)
|
||||
return false;
|
||||
|
||||
if (this.entry != null) return true;
|
||||
|
||||
if (source.contents.len < MINIMUM_CACHE_SIZE)
|
||||
return false;
|
||||
|
||||
if (is_disabled)
|
||||
return false;
|
||||
|
||||
if (!source.path.isFile())
|
||||
return false;
|
||||
|
||||
const input_hash = this.input_hash orelse hash(source.contents);
|
||||
this.input_hash = input_hash;
|
||||
this.input_byte_length = source.contents.len;
|
||||
|
||||
var features_hasher = std.hash.Wyhash.init(seed);
|
||||
parser_options.hashForRuntimeTranspiler(&features_hasher, used_jsx);
|
||||
this.features_hash = features_hasher.final();
|
||||
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator) catch |err| {
|
||||
debug("get(\"{s}\") = {s}", .{ source.path.text, @errorName(err) });
|
||||
return false;
|
||||
};
|
||||
if (comptime bun.Environment.allow_assert)
|
||||
debug("get(\"{s}\") = {d} bytes", .{ source.path.text, this.entry.?.output_code.byteSlice().len });
|
||||
|
||||
bun.Analytics.Features.transpiler_cache = true;
|
||||
|
||||
return this.entry != null;
|
||||
}
|
||||
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8) void {
|
||||
if (this.input_hash == null or is_disabled) {
|
||||
return;
|
||||
}
|
||||
std.debug.assert(this.entry == null);
|
||||
const output_code = bun.String.createLatin1(output_code_bytes);
|
||||
this.output_code = output_code;
|
||||
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, output_code, this.exports_kind) catch |err| {
|
||||
debug("put() = {s}", .{@errorName(err)});
|
||||
return;
|
||||
};
|
||||
if (comptime bun.Environment.allow_assert)
|
||||
debug("put() = {d} bytes", .{output_code.latin1().len});
|
||||
}
|
||||
};
|
||||
Submodule src/bun.js/WebKit updated: 63d0e18c06...0aa1f6dfc9
@@ -135,7 +135,26 @@ JSValue createEnvironmentVariablesMap(Zig::GlobalObject* globalObject)
|
||||
hasTZ = true;
|
||||
continue;
|
||||
}
|
||||
object->putDirectCustomAccessor(vm, Identifier::fromString(vm, name), JSC::CustomGetterSetter::create(vm, jsGetterEnvironmentVariable, jsSetterEnvironmentVariable), JSC::PropertyAttribute::CustomAccessor | 0);
|
||||
ASSERT(len > 0);
|
||||
|
||||
Identifier identifier = Identifier::fromString(vm, name);
|
||||
|
||||
// CustomGetterSetter doesn't support indexed properties yet.
|
||||
// This causes strange issues when the environment variable name is an integer.
|
||||
if (UNLIKELY(chars[0] >= '0' && chars[0] <= '9')) {
|
||||
if (auto index = parseIndex(identifier)) {
|
||||
ZigString valueString = { nullptr, 0 };
|
||||
ZigString nameStr = toZigString(name);
|
||||
JSValue value = jsUndefined();
|
||||
if (Bun__getEnvValue(globalObject, &nameStr, &valueString)) {
|
||||
value = jsString(vm, Zig::toStringCopy(valueString));
|
||||
}
|
||||
object->putDirectIndex(globalObject, *index, value, 0, PutDirectIndexLikePutDirect);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
object->putDirectCustomAccessor(vm, identifier, JSC::CustomGetterSetter::create(vm, jsGetterEnvironmentVariable, jsSetterEnvironmentVariable), JSC::PropertyAttribute::CustomAccessor | 0);
|
||||
}
|
||||
|
||||
unsigned int TZAttrs = JSC::PropertyAttribute::CustomAccessor | 0;
|
||||
|
||||
@@ -159,6 +159,10 @@ const BunDebugHolder = struct {
|
||||
/// This can technically fail if concurrent access across processes happens, or permission issues.
|
||||
/// Errors here should always be ignored.
|
||||
fn dumpSource(specifier: string, printer: anytype) void {
|
||||
dumpSourceString(specifier, printer.ctx.getWritten());
|
||||
}
|
||||
|
||||
fn dumpSourceString(specifier: string, written: []const u8) void {
|
||||
if (comptime Environment.isWindows) return;
|
||||
if (BunDebugHolder.dir == null) {
|
||||
BunDebugHolder.dir = std.fs.cwd().makeOpenPathIterable("/tmp/bun-debug-src/", .{}) catch return;
|
||||
@@ -172,9 +176,9 @@ fn dumpSource(specifier: string, printer: anytype) void {
|
||||
if (std.fs.path.dirname(specifier)) |dir_path| {
|
||||
var parent = dir.dir.makeOpenPathIterable(dir_path[1..], .{}) catch return;
|
||||
defer parent.close();
|
||||
parent.dir.writeFile(std.fs.path.basename(specifier), printer.ctx.getWritten()) catch return;
|
||||
parent.dir.writeFile(std.fs.path.basename(specifier), written) catch return;
|
||||
} else {
|
||||
dir.dir.writeFile(std.fs.path.basename(specifier), printer.ctx.getWritten()) catch return;
|
||||
dir.dir.writeFile(std.fs.path.basename(specifier), written) catch return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,6 +360,11 @@ pub const RuntimeTranspilerStore = struct {
|
||||
const loader = this.loader;
|
||||
this.log = logger.Log.init(bun.default_allocator);
|
||||
|
||||
var cache = JSC.RuntimeTranspilerCache{
|
||||
.output_code_allocator = allocator,
|
||||
.sourcemap_allocator = bun.default_allocator,
|
||||
};
|
||||
|
||||
var vm = this.vm;
|
||||
var bundler: bun.Bundler = undefined;
|
||||
bundler = vm.bundler;
|
||||
@@ -417,6 +426,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
vm.main_hash == hash and
|
||||
strings.eqlLong(vm.main, path.text, false),
|
||||
.set_breakpoint_on_first_line = vm.debugger != null and vm.debugger.?.set_breakpoint_on_first_line and strings.eqlLong(vm.main, path.text, true) and setBreakPointOnFirstLine(),
|
||||
.runtime_transpiler_cache = if (!JSC.RuntimeTranspilerCache.is_disabled) &cache else null,
|
||||
};
|
||||
|
||||
defer {
|
||||
@@ -480,6 +490,37 @@ pub const RuntimeTranspilerStore = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (cache.entry) |*entry| {
|
||||
const duped = String.create(specifier);
|
||||
vm.source_mappings.putMappings(parse_result.source, .{
|
||||
.list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len },
|
||||
.allocator = bun.default_allocator,
|
||||
}) catch {};
|
||||
|
||||
if (comptime Environment.allow_assert) {
|
||||
dumpSourceString(specifier, entry.output_code.byteSlice());
|
||||
}
|
||||
|
||||
this.resolved_source = ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (entry.output_code) {
|
||||
.string => entry.output_code.string,
|
||||
.utf8 => brk: {
|
||||
const result = bun.String.create(entry.output_code.utf8);
|
||||
cache.output_code_allocator.free(entry.output_code.utf8);
|
||||
entry.output_code.utf8 = "";
|
||||
break :brk result;
|
||||
},
|
||||
},
|
||||
.specifier = duped,
|
||||
.source_url = if (duped.eqlUTF8(path.text)) duped.dupeRef() else String.init(path.text),
|
||||
.hash = 0,
|
||||
.commonjs_exports_len = if (entry.metadata.module_type == .cjs) std.math.maxInt(u32) else 0,
|
||||
};
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (parse_result.already_bundled) {
|
||||
const duped = String.create(specifier);
|
||||
this.resolved_source = ResolvedSource{
|
||||
@@ -560,7 +601,8 @@ pub const RuntimeTranspilerStore = struct {
|
||||
.allocator = null,
|
||||
.source_code = brk: {
|
||||
const written = printer.ctx.getWritten();
|
||||
const result = bun.String.createLatin1(written);
|
||||
|
||||
const result = cache.output_code orelse bun.String.createLatin1(written);
|
||||
|
||||
if (written.len > 1024 * 1024 * 2 or vm.smol) {
|
||||
printer.ctx.buffer.deinit();
|
||||
@@ -1429,6 +1471,11 @@ pub const ModuleLoader = struct {
|
||||
package_json = jsc_vm.bun_watcher.watchlist().items(.package_json)[index];
|
||||
}
|
||||
|
||||
var cache = JSC.RuntimeTranspilerCache{
|
||||
.output_code_allocator = allocator,
|
||||
.sourcemap_allocator = bun.default_allocator,
|
||||
};
|
||||
|
||||
var old = jsc_vm.bundler.log;
|
||||
jsc_vm.bundler.log = log;
|
||||
jsc_vm.bundler.linker.log = log;
|
||||
@@ -1480,6 +1527,8 @@ pub const ModuleLoader = struct {
|
||||
.allow_commonjs = true,
|
||||
.inject_jest_globals = jsc_vm.bundler.options.rewrite_jest_for_tests and is_main,
|
||||
.set_breakpoint_on_first_line = is_main and jsc_vm.debugger != null and jsc_vm.debugger.?.set_breakpoint_on_first_line and setBreakPointOnFirstLine(),
|
||||
|
||||
.runtime_transpiler_cache = if (!disable_transpilying and !JSC.RuntimeTranspilerCache.is_disabled) &cache else null,
|
||||
};
|
||||
defer {
|
||||
if (should_close_input_file_fd and input_file_fd != bun.invalid_fd) {
|
||||
@@ -1608,6 +1657,51 @@ pub const ModuleLoader = struct {
|
||||
};
|
||||
}
|
||||
|
||||
if (cache.entry) |*entry| {
|
||||
jsc_vm.source_mappings.putMappings(parse_result.source, .{
|
||||
.list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len },
|
||||
.allocator = bun.default_allocator,
|
||||
}) catch {};
|
||||
|
||||
if (comptime Environment.allow_assert) {
|
||||
dumpSourceString(specifier, entry.output_code.byteSlice());
|
||||
}
|
||||
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
.source_code = switch (entry.output_code) {
|
||||
.string => entry.output_code.string,
|
||||
.utf8 => brk: {
|
||||
const result = bun.String.create(entry.output_code.utf8);
|
||||
cache.output_code_allocator.free(entry.output_code.utf8);
|
||||
entry.output_code.utf8 = "";
|
||||
break :brk result;
|
||||
},
|
||||
},
|
||||
.specifier = input_specifier,
|
||||
.source_url = if (input_specifier.eqlUTF8(path.text)) input_specifier.dupeRef() else String.init(path.text),
|
||||
.hash = 0,
|
||||
.commonjs_exports_len = if (entry.metadata.module_type == .cjs) std.math.maxInt(u32) else 0,
|
||||
.tag = brk: {
|
||||
if (entry.metadata.module_type == .cjs and parse_result.source.path.isFile()) {
|
||||
var actual_package_json: *PackageJSON = package_json orelse brk2: {
|
||||
// this should already be cached virtually always so it's fine to do this
|
||||
var dir_info = (jsc_vm.bundler.resolver.readDirInfo(parse_result.source.path.name.dir) catch null) orelse
|
||||
break :brk .javascript;
|
||||
|
||||
break :brk2 dir_info.package_json orelse dir_info.enclosing_package_json;
|
||||
} orelse break :brk .javascript;
|
||||
|
||||
if (actual_package_json.module_type == .esm) {
|
||||
break :brk ResolvedSource.Tag.package_json_type_module;
|
||||
}
|
||||
}
|
||||
|
||||
break :brk ResolvedSource.Tag.javascript;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const start_count = jsc_vm.bundler.linker.import_counter;
|
||||
|
||||
// We _must_ link because:
|
||||
@@ -1725,7 +1819,7 @@ pub const ModuleLoader = struct {
|
||||
.allocator = null,
|
||||
.source_code = brk: {
|
||||
const written = printer.ctx.getWritten();
|
||||
const result = bun.String.createLatin1(written);
|
||||
const result = cache.output_code orelse bun.String.createLatin1(written);
|
||||
|
||||
if (written.len > 1024 * 1024 * 2 or jsc_vm.smol) {
|
||||
printer.ctx.buffer.deinit();
|
||||
|
||||
@@ -31,6 +31,8 @@ pub const meta = @import("./meta.zig");
|
||||
pub const ComptimeStringMap = @import("./comptime_string_map.zig").ComptimeStringMap;
|
||||
pub const base64 = @import("./base64/base64.zig");
|
||||
pub const path = @import("./resolver/resolve_path.zig");
|
||||
pub const resolver = @import("./resolver//resolver.zig");
|
||||
pub const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
|
||||
pub const fmt = struct {
|
||||
pub usingnamespace std.fmt;
|
||||
|
||||
@@ -2056,3 +2058,5 @@ pub fn exitThread() noreturn {
|
||||
pub fn outOfMemory() noreturn {
|
||||
@panic("Out of memory");
|
||||
}
|
||||
|
||||
pub const Tmpfile = @import("./tmp.zig").Tmpfile;
|
||||
|
||||
@@ -103,6 +103,8 @@ pub const Run = struct {
|
||||
.unspecified => {},
|
||||
}
|
||||
|
||||
b.options.env.behavior = .load_all_without_inlining;
|
||||
|
||||
b.configureRouter(false) catch {
|
||||
if (Output.enable_ansi_colors_stderr) {
|
||||
vm.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
|
||||
@@ -191,6 +193,7 @@ pub const Run = struct {
|
||||
b.resolver.opts.minify_identifiers = ctx.bundler_options.minify_identifiers;
|
||||
b.resolver.opts.minify_whitespace = ctx.bundler_options.minify_whitespace;
|
||||
|
||||
b.options.env.behavior = .load_all_without_inlining;
|
||||
// b.options.minify_syntax = ctx.bundler_options.minify_syntax;
|
||||
|
||||
switch (ctx.debug.macros) {
|
||||
|
||||
@@ -76,6 +76,8 @@ pub const ParseResult = struct {
|
||||
empty: bool = false,
|
||||
pending_imports: _resolver.PendingResolution.List = .{},
|
||||
|
||||
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = null,
|
||||
|
||||
pub fn isPendingImport(this: *const ParseResult, id: u32) bool {
|
||||
const import_record_ids = this.pending_imports.items(.import_record_id);
|
||||
|
||||
@@ -455,7 +457,7 @@ pub const Bundler = struct {
|
||||
|
||||
pub fn runEnvLoader(this: *Bundler) !void {
|
||||
switch (this.options.env.behavior) {
|
||||
.prefix, .load_all => {
|
||||
.prefix, .load_all, .load_all_without_inlining => {
|
||||
// Step 1. Load the project root.
|
||||
const dir_info = this.resolver.readDirInfo(this.fs.top_level_dir) catch return orelse return;
|
||||
|
||||
@@ -813,6 +815,7 @@ pub const Bundler = struct {
|
||||
.esm_ascii,
|
||||
is_source_map,
|
||||
source_map_handler,
|
||||
null,
|
||||
),
|
||||
.cjs => try bundler.printWithSourceMapMaybe(
|
||||
result.ast,
|
||||
@@ -822,6 +825,7 @@ pub const Bundler = struct {
|
||||
.cjs,
|
||||
is_source_map,
|
||||
source_map_handler,
|
||||
null,
|
||||
),
|
||||
else => unreachable,
|
||||
},
|
||||
@@ -843,6 +847,7 @@ pub const Bundler = struct {
|
||||
.esm,
|
||||
is_source_map,
|
||||
source_map_handler,
|
||||
null,
|
||||
),
|
||||
.cjs => try bundler.printWithSourceMapMaybe(
|
||||
result.ast,
|
||||
@@ -852,6 +857,7 @@ pub const Bundler = struct {
|
||||
.cjs,
|
||||
is_source_map,
|
||||
source_map_handler,
|
||||
null,
|
||||
),
|
||||
else => unreachable,
|
||||
},
|
||||
@@ -1059,6 +1065,7 @@ pub const Bundler = struct {
|
||||
comptime format: js_printer.Format,
|
||||
comptime enable_source_map: bool,
|
||||
source_map_context: ?js_printer.SourceMapHandler,
|
||||
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache,
|
||||
) !usize {
|
||||
const tracer = bun.tracy.traceNamed(@src(), if (enable_source_map) "JSPrinter.printWithSourceMap" else "JSPrinter.print");
|
||||
defer tracer.end();
|
||||
@@ -1084,6 +1091,7 @@ pub const Bundler = struct {
|
||||
.minify_syntax = bundler.options.minify_syntax,
|
||||
.minify_identifiers = bundler.options.minify_identifiers,
|
||||
.transform_only = bundler.options.transform_only,
|
||||
.runtime_transpiler_cache = runtime_transpiler_cache,
|
||||
},
|
||||
enable_source_map,
|
||||
),
|
||||
@@ -1107,6 +1115,7 @@ pub const Bundler = struct {
|
||||
.minify_identifiers = bundler.options.minify_identifiers,
|
||||
.transform_only = bundler.options.transform_only,
|
||||
.import_meta_ref = ast.import_meta_ref,
|
||||
.runtime_transpiler_cache = runtime_transpiler_cache,
|
||||
},
|
||||
enable_source_map,
|
||||
),
|
||||
@@ -1131,6 +1140,7 @@ pub const Bundler = struct {
|
||||
.module_type = if (ast.exports_kind == .cjs) .cjs else .esm,
|
||||
.inline_require_and_import_errors = false,
|
||||
.import_meta_ref = ast.import_meta_ref,
|
||||
.runtime_transpiler_cache = runtime_transpiler_cache,
|
||||
},
|
||||
enable_source_map,
|
||||
),
|
||||
@@ -1154,6 +1164,7 @@ pub const Bundler = struct {
|
||||
format,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1173,6 +1184,7 @@ pub const Bundler = struct {
|
||||
format,
|
||||
true,
|
||||
handler,
|
||||
result.runtime_transpiler_cache,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1198,6 +1210,8 @@ pub const Bundler = struct {
|
||||
|
||||
dont_bundle_twice: bool = false,
|
||||
allow_commonjs: bool = false,
|
||||
|
||||
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = null,
|
||||
};
|
||||
|
||||
pub fn parse(
|
||||
@@ -1316,6 +1330,7 @@ pub const Bundler = struct {
|
||||
opts.features.should_fold_typescript_constant_expressions = loader.isTypeScript() or target.isBun() or bundler.options.minify_syntax;
|
||||
opts.features.dynamic_require = target.isBun();
|
||||
opts.features.no_macros = bundler.options.no_macros;
|
||||
opts.features.runtime_transpiler_cache = this_parse.runtime_transpiler_cache;
|
||||
opts.transform_only = bundler.options.transform_only;
|
||||
|
||||
// @bun annotation
|
||||
@@ -1374,6 +1389,14 @@ pub const Bundler = struct {
|
||||
.source = source,
|
||||
.loader = loader,
|
||||
.input_fd = input_fd,
|
||||
.runtime_transpiler_cache = this_parse.runtime_transpiler_cache,
|
||||
},
|
||||
.cached => ParseResult{
|
||||
.ast = undefined,
|
||||
.runtime_transpiler_cache = this_parse.runtime_transpiler_cache,
|
||||
.source = source,
|
||||
.loader = loader,
|
||||
.input_fd = input_fd,
|
||||
},
|
||||
.already_bundled => ParseResult{
|
||||
.ast = undefined,
|
||||
@@ -1743,7 +1766,7 @@ pub const Bundler = struct {
|
||||
var entry_points = try allocator.alloc(_resolver.Result, bundler.options.entry_points.len);
|
||||
entry_points = entry_points[0..bundler.enqueueEntryPoints(entry_points, true)];
|
||||
|
||||
if (log.level == .verbose) {
|
||||
if (log.level.atLeast(.debug)) {
|
||||
bundler.resolver.debug_logs = try DebugLogs.init(allocator);
|
||||
}
|
||||
bundler.options.transform_only = true;
|
||||
|
||||
117
src/c.zig
117
src/c.zig
@@ -105,90 +105,75 @@ pub fn lstat_absolute(path: [:0]const u8) !Stat {
|
||||
|
||||
// renameatZ fails when renaming across mount points
|
||||
// we assume that this is relatively uncommon
|
||||
pub fn moveFileZ(from_dir: std.os.fd_t, filename: [*:0]const u8, to_dir: std.os.fd_t, destination: [*:0]const u8) !void {
|
||||
std.os.renameatZ(from_dir, filename, to_dir, destination) catch |err| {
|
||||
switch (err) {
|
||||
error.RenameAcrossMountPoints => {
|
||||
pub fn moveFileZ(from_dir: std.os.fd_t, filename: [:0]const u8, to_dir: std.os.fd_t, destination: [:0]const u8) !void {
|
||||
switch (bun.sys.renameat(from_dir, filename, to_dir, destination)) {
|
||||
.err => |err| {
|
||||
// allow over-writing an empty directory
|
||||
if (err.getErrno() == .ISDIR) {
|
||||
_ = bun.sys.rmdirat(to_dir, destination.ptr);
|
||||
|
||||
try (bun.sys.renameat(from_dir, filename, to_dir, destination).unwrap());
|
||||
return;
|
||||
}
|
||||
|
||||
if (err.getErrno() == .XDEV) {
|
||||
try moveFileZSlow(from_dir, filename, to_dir, destination);
|
||||
},
|
||||
else => {
|
||||
return err;
|
||||
},
|
||||
}
|
||||
};
|
||||
} else {
|
||||
return bun.AsyncIO.asError(err.errno);
|
||||
}
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn moveFileZWithHandle(from_handle: std.os.fd_t, from_dir: std.os.fd_t, filename: [*:0]const u8, to_dir: std.os.fd_t, destination: [*:0]const u8) !void {
|
||||
std.os.renameatZ(from_dir, filename, to_dir, destination) catch |err| {
|
||||
switch (err) {
|
||||
error.RenameAcrossMountPoints => {
|
||||
pub fn moveFileZWithHandle(from_handle: std.os.fd_t, from_dir: std.os.fd_t, filename: [:0]const u8, to_dir: std.os.fd_t, destination: [:0]const u8) !void {
|
||||
switch (bun.sys.renameat(from_dir, filename, to_dir, destination)) {
|
||||
.err => |err| {
|
||||
// allow over-writing an empty directory
|
||||
if (err.getErrno() == .ISDIR) {
|
||||
_ = bun.sys.rmdirat(to_dir, destination.ptr);
|
||||
|
||||
try (bun.sys.renameat(from_dir, filename, to_dir, destination).unwrap());
|
||||
return;
|
||||
}
|
||||
|
||||
if (err.getErrno() == .XDEV) {
|
||||
try copyFileZSlowWithHandle(from_handle, to_dir, destination);
|
||||
std.os.unlinkatZ(from_dir, filename, 0) catch {};
|
||||
},
|
||||
else => {
|
||||
return err;
|
||||
},
|
||||
}
|
||||
};
|
||||
_ = bun.sys.unlinkat(from_dir, filename);
|
||||
}
|
||||
|
||||
return bun.AsyncIO.asError(err.errno);
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
}
|
||||
|
||||
// On Linux, this will be fast because sendfile() supports copying between two file descriptors on disk
|
||||
// macOS & BSDs will be slow because
|
||||
pub fn moveFileZSlow(from_dir: std.os.fd_t, filename: [*:0]const u8, to_dir: std.os.fd_t, destination: [*:0]const u8) !void {
|
||||
const in_handle = try std.os.openatZ(from_dir, filename, std.os.O.RDONLY | std.os.O.CLOEXEC, if (Environment.isWindows) 0 else 0o600);
|
||||
defer std.os.close(in_handle);
|
||||
pub fn moveFileZSlow(from_dir: std.os.fd_t, filename: [:0]const u8, to_dir: std.os.fd_t, destination: [:0]const u8) !void {
|
||||
const in_handle = try bun.sys.openat(from_dir, filename, std.os.O.RDONLY | std.os.O.CLOEXEC, if (Environment.isWindows) 0 else 0o644).unwrap();
|
||||
defer _ = bun.sys.close(in_handle);
|
||||
_ = bun.sys.unlinkat(from_dir, filename);
|
||||
try copyFileZSlowWithHandle(in_handle, to_dir, destination);
|
||||
std.os.unlinkatZ(from_dir, filename, 0) catch {};
|
||||
}
|
||||
|
||||
pub fn copyFileZSlowWithHandle(in_handle: std.os.fd_t, to_dir: std.os.fd_t, destination: [*:0]const u8) !void {
|
||||
pub fn copyFileZSlowWithHandle(in_handle: std.os.fd_t, to_dir: std.os.fd_t, destination: [:0]const u8) !void {
|
||||
const stat_ = if (comptime Environment.isPosix) try std.os.fstat(in_handle) else void{};
|
||||
const size = brk: {
|
||||
if (comptime Environment.isPosix) {
|
||||
break :brk stat_.size;
|
||||
}
|
||||
|
||||
break :brk try std.os.windows.GetFileSizeEx(in_handle);
|
||||
};
|
||||
|
||||
// delete if exists, don't care if it fails. it may fail due to the file not existing
|
||||
// delete here because we run into weird truncation issues if we do not
|
||||
// ftruncate() instead didn't work.
|
||||
// this is technically racy because it could end up deleting the file without saving
|
||||
std.os.unlinkatZ(to_dir, destination, 0) catch {};
|
||||
const out_handle = try std.os.openatZ(
|
||||
const out_handle = try bun.sys.openat(
|
||||
to_dir,
|
||||
destination,
|
||||
std.os.O.WRONLY | std.os.O.CREAT | std.os.O.CLOEXEC,
|
||||
if (comptime Environment.isPosix) 0o022 else 0,
|
||||
);
|
||||
defer std.os.close(out_handle);
|
||||
if (comptime Environment.isLinux) {
|
||||
_ = std.os.system.fallocate(out_handle, 0, 0, @as(i64, @intCast(size)));
|
||||
_ = try std.os.sendfile(out_handle, in_handle, 0, @as(usize, @intCast(size)), &[_]std.os.iovec_const{}, &[_]std.os.iovec_const{}, 0);
|
||||
} else {
|
||||
if (comptime Environment.isMac) {
|
||||
// if this fails, it doesn't matter
|
||||
// we only really care about read & write succeeding
|
||||
PlatformSpecific.preallocate_file(
|
||||
out_handle,
|
||||
@as(std.os.off_t, @intCast(0)),
|
||||
@as(std.os.off_t, @intCast(size)),
|
||||
) catch {};
|
||||
}
|
||||
std.os.O.WRONLY | std.os.O.CREAT | std.os.O.CLOEXEC | std.os.O.TRUNC,
|
||||
if (comptime Environment.isPosix) 0o644 else 0,
|
||||
).unwrap();
|
||||
defer _ = bun.sys.close(out_handle);
|
||||
|
||||
var buf: [8092 * 2]u8 = undefined;
|
||||
var total_read: usize = 0;
|
||||
while (true) {
|
||||
const read = try std.os.pread(in_handle, &buf, total_read);
|
||||
total_read += read;
|
||||
if (read == 0) break;
|
||||
const bytes = buf[0..read];
|
||||
const written = try std.os.write(out_handle, bytes);
|
||||
if (written == 0) break;
|
||||
}
|
||||
if (comptime Environment.isLinux) {
|
||||
_ = std.os.linux.fallocate(out_handle, 0, 0, @intCast(stat_.size));
|
||||
}
|
||||
|
||||
try bun.copyFile(in_handle, out_handle);
|
||||
|
||||
if (comptime Environment.isPosix) {
|
||||
_ = fchmod(out_handle, stat_.mode);
|
||||
_ = fchown(out_handle, stat_.uid, stat_.gid);
|
||||
|
||||
11
src/cli.zig
11
src/cli.zig
@@ -558,6 +558,8 @@ pub const Arguments = struct {
|
||||
Command.Debugger{ .enable = .{
|
||||
.path_or_port = inspect_flag,
|
||||
} };
|
||||
|
||||
bun.JSC.RuntimeTranspilerCache.is_disabled = true;
|
||||
} else if (args.option("--inspect-wait")) |inspect_flag| {
|
||||
ctx.runtime_options.debugger = if (inspect_flag.len == 0)
|
||||
Command.Debugger{ .enable = .{
|
||||
@@ -568,6 +570,8 @@ pub const Arguments = struct {
|
||||
.path_or_port = inspect_flag,
|
||||
.wait_for_connection = true,
|
||||
} };
|
||||
|
||||
bun.JSC.RuntimeTranspilerCache.is_disabled = true;
|
||||
} else if (args.option("--inspect-brk")) |inspect_flag| {
|
||||
ctx.runtime_options.debugger = if (inspect_flag.len == 0)
|
||||
Command.Debugger{ .enable = .{
|
||||
@@ -580,6 +584,8 @@ pub const Arguments = struct {
|
||||
.wait_for_connection = true,
|
||||
.set_breakpoint_on_first_line = true,
|
||||
} };
|
||||
|
||||
bun.JSC.RuntimeTranspilerCache.is_disabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -739,6 +745,11 @@ pub const Arguments = struct {
|
||||
if (cmd == .AutoCommand or cmd == .RunCommand) {
|
||||
ctx.debug.silent = args.flag("--silent");
|
||||
ctx.debug.run_in_bun = args.flag("--bun") or ctx.debug.run_in_bun;
|
||||
|
||||
if (opts.define) |define| {
|
||||
if (define.keys.len > 0)
|
||||
bun.JSC.RuntimeTranspilerCache.is_disabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
opts.resolve = Api.ResolveMode.lazy;
|
||||
|
||||
@@ -27,7 +27,6 @@ pub fn copyFile(fd_in: os.fd_t, fd_out: os.fd_t) CopyFileError!void {
|
||||
const rc = os.system.fcopyfile(fd_in, fd_out, null, os.system.COPYFILE_DATA);
|
||||
switch (os.errno(rc)) {
|
||||
.SUCCESS => return,
|
||||
.INVAL => unreachable,
|
||||
.NOMEM => return error.SystemResources,
|
||||
// The source file is not a directory, symbolic link, or regular file.
|
||||
// Try with the fallback path before giving up.
|
||||
|
||||
@@ -240,7 +240,7 @@ pub const Loader = struct {
|
||||
var key_buf_len: usize = 0;
|
||||
var e_strings_to_allocate: usize = 0;
|
||||
|
||||
if (behavior != .disable) {
|
||||
if (behavior != .disable and behavior != .load_all_without_inlining) {
|
||||
if (behavior == .prefix) {
|
||||
std.debug.assert(prefix.len > 0);
|
||||
|
||||
|
||||
@@ -181,3 +181,5 @@ pub const disable_on_windows_due_to_bugs = env.isWindows;
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/5426#issuecomment-1813865316
|
||||
pub const disable_auto_js_to_ts_in_node_modules = true;
|
||||
|
||||
pub const runtime_transpiler_cache = true;
|
||||
|
||||
32
src/fs.zig
32
src/fs.zig
@@ -72,7 +72,7 @@ pub const FileSystem = struct {
|
||||
|
||||
var tmpname_id_number = std.atomic.Atomic(u32).init(0);
|
||||
pub fn tmpname(_: *const FileSystem, extname: string, buf: []u8, hash: u64) ![*:0]u8 {
|
||||
const hex_value = @as(u64, @truncate(@as(u128, @intCast(hash)) ^ @as(u128, @intCast(std.time.nanoTimestamp()))));
|
||||
const hex_value = @as(u64, @truncate(@as(u128, @intCast(hash)) | @as(u128, @intCast(std.time.nanoTimestamp()))));
|
||||
|
||||
return try std.fmt.bufPrintZ(buf, ".{any}-{any}.{s}", .{
|
||||
bun.fmt.hexIntLower(hex_value),
|
||||
@@ -494,6 +494,10 @@ pub const FileSystem = struct {
|
||||
return path_handler.joinAbsStringBuf(f.top_level_dir, buf, parts, .loose);
|
||||
}
|
||||
|
||||
pub fn absBufZ(f: *@This(), parts: anytype, buf: []u8) stringZ {
|
||||
return path_handler.joinAbsStringBufZ(f.top_level_dir, buf, parts, .loose);
|
||||
}
|
||||
|
||||
pub fn joinAlloc(f: *@This(), allocator: std.mem.Allocator, parts: anytype) !string {
|
||||
const joined = f.join(parts);
|
||||
return try allocator.dupe(u8, joined);
|
||||
@@ -617,14 +621,6 @@ pub const FileSystem = struct {
|
||||
tmpdir_path_set = true;
|
||||
}
|
||||
|
||||
pub fn fetchCacheFile(fs: *RealFS, basename: string) !std.fs.File {
|
||||
const file = try fs._fetchCacheFile(basename);
|
||||
if (comptime FeatureFlags.store_file_descriptors) {
|
||||
setMaxFd(file.handle);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
pub const TmpfilePosix = struct {
|
||||
fd: bun.FileDescriptor = bun.invalid_fd,
|
||||
dir_fd: bun.FileDescriptor = bun.invalid_fd,
|
||||
@@ -659,7 +655,7 @@ pub const FileSystem = struct {
|
||||
std.debug.assert(this.fd != bun.invalid_fd);
|
||||
std.debug.assert(this.dir_fd != bun.invalid_fd);
|
||||
|
||||
try C.moveFileZWithHandle(bun.fdcast(this.fd), bun.fdcast(this.dir_fd), from_name, std.fs.cwd().fd, name);
|
||||
try C.moveFileZWithHandle(bun.fdcast(this.fd), bun.fdcast(this.dir_fd), bun.sliceTo(from_name, 0), std.fs.cwd().fd, bun.sliceTo(name, 0));
|
||||
this.close();
|
||||
}
|
||||
|
||||
@@ -729,18 +725,6 @@ pub const FileSystem = struct {
|
||||
}
|
||||
};
|
||||
|
||||
inline fn _fetchCacheFile(fs: *RealFS, basename: string) !std.fs.File {
|
||||
var parts = [_]string{ "node_modules", ".cache", basename };
|
||||
var path = fs.parent_fs.join(&parts);
|
||||
return std.fs.cwd().openFile(path, .{ .mode = .read_write, .lock = .Shared }) catch {
|
||||
path = fs.parent_fs.join(parts[0..2]);
|
||||
try std.fs.cwd().makePath(path);
|
||||
|
||||
path = fs.parent_fs.join(&parts);
|
||||
return try std.fs.cwd().createFile(path, .{ .mode = .read_write, .lock = .Shared });
|
||||
};
|
||||
}
|
||||
|
||||
pub fn needToCloseFiles(rfs: *const RealFS) bool {
|
||||
// On Windows, we must always close open file handles
|
||||
// Windows locks files
|
||||
@@ -1761,6 +1745,10 @@ pub const Path = struct {
|
||||
pub fn isNodeModule(this: *const Path) bool {
|
||||
return strings.lastIndexOf(this.name.dir, std.fs.path.sep_str ++ "node_modules" ++ std.fs.path.sep_str) != null;
|
||||
}
|
||||
|
||||
pub fn isJSXFile(this: *const Path) bool {
|
||||
return strings.hasSuffixComptime(this.name.filename, ".jsx") or strings.hasSuffixComptime(this.name.filename, ".tsx");
|
||||
}
|
||||
};
|
||||
|
||||
// pub fn customRealpath(allocator: std.mem.Allocator, path: string) !string {
|
||||
|
||||
@@ -279,16 +279,19 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
|
||||
}
|
||||
|
||||
// Now that we've extracted the archive, we rename.
|
||||
std.os.renameatZ(tmpdir.fd, tmpname, cache_dir.fd, folder_name) catch |err| {
|
||||
this.package_manager.log.addErrorFmt(
|
||||
null,
|
||||
logger.Loc.Empty,
|
||||
this.package_manager.allocator,
|
||||
"moving \"{s}\" to cache dir failed: {s}\n From: {s}\n To: {s}",
|
||||
.{ name, @errorName(err), tmpname, folder_name },
|
||||
) catch unreachable;
|
||||
return error.InstallFailed;
|
||||
};
|
||||
switch (bun.sys.renameat(tmpdir.fd, bun.sliceTo(tmpname, 0), cache_dir.fd, folder_name)) {
|
||||
.err => |err| {
|
||||
this.package_manager.log.addErrorFmt(
|
||||
null,
|
||||
logger.Loc.Empty,
|
||||
this.package_manager.allocator,
|
||||
"moving \"{s}\" to cache dir failed: {}\n From: {s}\n To: {s}",
|
||||
.{ name, err, tmpname, folder_name },
|
||||
) catch unreachable;
|
||||
return error.InstallFailed;
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
|
||||
// We return a resolved absolute absolute file path to the cache dir.
|
||||
// To get that directory, we open the directory again.
|
||||
|
||||
@@ -6470,6 +6470,7 @@ pub const Part = struct {
|
||||
|
||||
pub const Result = union(enum) {
|
||||
already_bundled: void,
|
||||
cached: void,
|
||||
ast: Ast,
|
||||
};
|
||||
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
/// ** IMPORTANT **
|
||||
/// ** When making changes to the JavaScript Parser that impact runtime behavior or fix bugs **
|
||||
/// ** you must also increment the `expected_version` in RuntimeTranspilerCache.zig **
|
||||
/// ** IMPORTANT **
|
||||
pub const std = @import("std");
|
||||
pub const logger = @import("root").bun.logger;
|
||||
pub const js_lexer = bun.js_lexer;
|
||||
@@ -2798,7 +2802,6 @@ pub const Parser = struct {
|
||||
jsx: options.JSX.Pragma,
|
||||
ts: bool = false,
|
||||
keep_names: bool = true,
|
||||
omit_runtime_for_tests: bool = false,
|
||||
ignore_dce_annotations: bool = false,
|
||||
preserve_unused_imports_ts: bool = false,
|
||||
use_define_for_class_fields: bool = false,
|
||||
@@ -2820,6 +2823,31 @@ pub const Parser = struct {
|
||||
|
||||
transform_only: bool = false,
|
||||
|
||||
pub fn hashForRuntimeTranspiler(this: *const Options, hasher: *std.hash.Wyhash, did_use_jsx: bool) void {
|
||||
std.debug.assert(!this.bundle);
|
||||
|
||||
if (did_use_jsx) {
|
||||
if (this.jsx.parse) {
|
||||
this.jsx.hashForRuntimeTranspiler(hasher);
|
||||
const jsx_optimizations = [_]bool{
|
||||
this.features.jsx_optimization_inline,
|
||||
this.features.jsx_optimization_hoist,
|
||||
};
|
||||
hasher.update(std.mem.asBytes(&jsx_optimizations));
|
||||
} else {
|
||||
hasher.update("NO_JSX");
|
||||
}
|
||||
}
|
||||
|
||||
if (this.ts) {
|
||||
hasher.update("TS");
|
||||
} else {
|
||||
hasher.update("NO_TS");
|
||||
}
|
||||
|
||||
this.features.hashForRuntimeTranspiler(hasher);
|
||||
}
|
||||
|
||||
pub fn init(jsx: options.JSX.Pragma, loader: options.Loader) Options {
|
||||
var opts = Options{
|
||||
.ts = loader.isTypeScript(),
|
||||
@@ -3070,12 +3098,26 @@ pub const Parser = struct {
|
||||
try p.lexer.next();
|
||||
}
|
||||
|
||||
// Detect a leading "// @bun" pragma
|
||||
if (p.lexer.bun_pragma and p.options.features.dont_bundle_twice) {
|
||||
return js_ast.Result{
|
||||
.already_bundled = {},
|
||||
};
|
||||
}
|
||||
|
||||
// We must check the cache only after we've consumed the hashbang and leading // @bun pragma
|
||||
// We don't want to ever put files with `// @bun` into this cache, as that would be wasteful.
|
||||
if (comptime Environment.isNative and bun.FeatureFlags.runtime_transpiler_cache) {
|
||||
var runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = p.options.features.runtime_transpiler_cache;
|
||||
if (runtime_transpiler_cache) |cache| {
|
||||
if (cache.get(p.source, &p.options, p.options.jsx.parse and (!p.source.path.isNodeModule() or p.source.path.isJSXFile()))) {
|
||||
return js_ast.Result{
|
||||
.cached = {},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the file in the first pass, but do not bind symbols
|
||||
var opts = ParseStatementOptions{ .is_module_scope = true };
|
||||
const parse_tracer = bun.tracy.traceNamed(@src(), "JSParser.parse");
|
||||
@@ -3271,45 +3313,57 @@ pub const Parser = struct {
|
||||
const postvisit_tracer = bun.tracy.traceNamed(@src(), "JSParser.postvisit");
|
||||
defer postvisit_tracer.end();
|
||||
|
||||
const uses_dirname = p.symbols.items[p.dirname_ref.innerIndex()].use_count_estimate > 0;
|
||||
const uses_filename = p.symbols.items[p.filename_ref.innerIndex()].use_count_estimate > 0;
|
||||
var uses_dirname = p.symbols.items[p.dirname_ref.innerIndex()].use_count_estimate > 0;
|
||||
var uses_filename = p.symbols.items[p.filename_ref.innerIndex()].use_count_estimate > 0;
|
||||
|
||||
if (uses_dirname or uses_filename) {
|
||||
const count = @as(usize, @intFromBool(uses_dirname)) + @as(usize, @intFromBool(uses_filename));
|
||||
var declared_symbols = DeclaredSymbol.List.initCapacity(p.allocator, count) catch unreachable;
|
||||
var decls = p.allocator.alloc(G.Decl, count) catch unreachable;
|
||||
if (uses_dirname) {
|
||||
decls[0] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
// TODO: test UTF-8 file paths
|
||||
E.String.init(p.source.path.name.dir),
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.dirname_ref, .is_top_level = true });
|
||||
}
|
||||
if (uses_filename) {
|
||||
decls[@as(usize, @intFromBool(uses_dirname))] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
E.String.init(p.source.path.text),
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.filename_ref, .is_top_level = true });
|
||||
}
|
||||
// Handle dirname and filename at bundle-time
|
||||
// We always inject it at the top of the module
|
||||
//
|
||||
// This inlines
|
||||
//
|
||||
// var __dirname = "foo/bar"
|
||||
// var __filename = "foo/bar/baz.js"
|
||||
//
|
||||
if (p.options.bundle or !p.options.features.commonjs_at_runtime) {
|
||||
if (uses_dirname or uses_filename) {
|
||||
const count = @as(usize, @intFromBool(uses_dirname)) + @as(usize, @intFromBool(uses_filename));
|
||||
var declared_symbols = DeclaredSymbol.List.initCapacity(p.allocator, count) catch unreachable;
|
||||
var decls = p.allocator.alloc(G.Decl, count) catch unreachable;
|
||||
if (uses_dirname) {
|
||||
decls[0] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
// TODO: test UTF-8 file paths
|
||||
E.String.init(p.source.path.name.dir),
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.dirname_ref, .is_top_level = true });
|
||||
}
|
||||
if (uses_filename) {
|
||||
decls[@as(usize, @intFromBool(uses_dirname))] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
E.String.init(p.source.path.text),
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.filename_ref, .is_top_level = true });
|
||||
}
|
||||
|
||||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||||
part_stmts[0] = p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.init(decls),
|
||||
}, logger.Loc.Empty);
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
.tag = .dirname_filename,
|
||||
}) catch unreachable;
|
||||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||||
part_stmts[0] = p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.init(decls),
|
||||
}, logger.Loc.Empty);
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
.tag = .dirname_filename,
|
||||
}) catch unreachable;
|
||||
uses_dirname = false;
|
||||
uses_filename = false;
|
||||
}
|
||||
}
|
||||
|
||||
var did_import_fast_refresh = false;
|
||||
@@ -3752,6 +3806,62 @@ pub const Parser = struct {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle dirname and filename at runtime.
|
||||
//
|
||||
// If we reach this point, it means:
|
||||
//
|
||||
// 1) we are building an ESM file that uses __dirname or __filename
|
||||
// 2) we are targeting bun's runtime.
|
||||
// 3) we are not bundling.
|
||||
//
|
||||
if (exports_kind == .esm and (uses_dirname or uses_filename)) {
|
||||
std.debug.assert(!p.options.bundle);
|
||||
const count = @as(usize, @intFromBool(uses_dirname)) + @as(usize, @intFromBool(uses_filename));
|
||||
var declared_symbols = DeclaredSymbol.List.initCapacity(p.allocator, count) catch unreachable;
|
||||
var decls = p.allocator.alloc(G.Decl, count) catch unreachable;
|
||||
if (uses_dirname) {
|
||||
// var __dirname = import.meta.dir
|
||||
decls[0] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
E.Dot{
|
||||
.name = "dir",
|
||||
.name_loc = logger.Loc.Empty,
|
||||
.target = p.newExpr(E.ImportMeta{}, logger.Loc.Empty),
|
||||
},
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.dirname_ref, .is_top_level = true });
|
||||
}
|
||||
if (uses_filename) {
|
||||
// var __filename = import.meta.path
|
||||
decls[@as(usize, @intFromBool(uses_dirname))] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty),
|
||||
.value = p.newExpr(
|
||||
E.Dot{
|
||||
.name = "path",
|
||||
.name_loc = logger.Loc.Empty,
|
||||
.target = p.newExpr(E.ImportMeta{}, logger.Loc.Empty),
|
||||
},
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
};
|
||||
declared_symbols.appendAssumeCapacity(.{ .ref = p.filename_ref, .is_top_level = true });
|
||||
}
|
||||
|
||||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||||
part_stmts[0] = p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.init(decls),
|
||||
}, logger.Loc.Empty);
|
||||
before.append(js_ast.Part{
|
||||
.stmts = part_stmts,
|
||||
.declared_symbols = declared_symbols,
|
||||
.tag = .dirname_filename,
|
||||
}) catch unreachable;
|
||||
}
|
||||
|
||||
if (exports_kind == .esm and p.commonjs_named_exports.count() > 0 and !p.unwrap_all_requires and !force_esm) {
|
||||
exports_kind = .esm_with_dynamic_fallback_from_cjs;
|
||||
}
|
||||
@@ -3763,6 +3873,13 @@ pub const Parser = struct {
|
||||
for (p.import_records.items) |*item| {
|
||||
// skip if they did import it
|
||||
if (strings.eqlComptime(item.path.text, "bun:test") or strings.eqlComptime(item.path.text, "@jest/globals") or strings.eqlComptime(item.path.text, "vitest")) {
|
||||
if (p.options.features.runtime_transpiler_cache) |cache| {
|
||||
// If we rewrote import paths, we need to disable the runtime transpiler cache
|
||||
if (!strings.eqlComptime(item.path.text, "bun:test")) {
|
||||
cache.input_hash = null;
|
||||
}
|
||||
}
|
||||
|
||||
break :outer;
|
||||
}
|
||||
}
|
||||
@@ -3819,6 +3936,11 @@ pub const Parser = struct {
|
||||
.import_record_indices = bun.BabyList(u32).init(import_record_indices),
|
||||
.tag = .bun_test,
|
||||
}) catch unreachable;
|
||||
|
||||
// If we injected jest globals, we need to disable the runtime transpiler cache
|
||||
if (p.options.features.runtime_transpiler_cache) |cache| {
|
||||
cache.input_hash = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (p.legacy_cjs_import_stmts.items.len > 0 and p.options.legacy_transform_require_to_import) {
|
||||
@@ -3937,6 +4059,19 @@ pub const Parser = struct {
|
||||
// Pop the module scope to apply the "ContainsDirectEval" rules
|
||||
// p.popScope();
|
||||
|
||||
if (comptime Environment.isNative and bun.FeatureFlags.runtime_transpiler_cache) {
|
||||
var runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = p.options.features.runtime_transpiler_cache;
|
||||
if (runtime_transpiler_cache) |cache| {
|
||||
if (p.macro_call_count != 0) {
|
||||
// disable this for:
|
||||
// - macros
|
||||
cache.input_hash = null;
|
||||
} else {
|
||||
cache.exports_kind = exports_kind;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return js_ast.Result{ .ast = try p.toAST(parts_slice, exports_kind, wrapper_expr, hashbang) };
|
||||
}
|
||||
|
||||
@@ -21985,6 +22120,11 @@ fn NewParser_(
|
||||
this.fn_or_arrow_data_parse.allow_await = .allow_expr;
|
||||
this.fn_or_arrow_data_parse.is_top_level = true;
|
||||
}
|
||||
|
||||
if (comptime !is_typescript_enabled) {
|
||||
// This is so it doesn't impact runtime transpiler caching when not in use
|
||||
this.options.features.emit_decorator_metadata = false;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -494,6 +494,8 @@ pub const Options = struct {
|
||||
source_map_builder: ?*bun.sourcemap.Chunk.Builder = null,
|
||||
css_import_behavior: Api.CssInJsBehavior = Api.CssInJsBehavior.facade,
|
||||
|
||||
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = null,
|
||||
|
||||
commonjs_named_exports: js_ast.Ast.CommonJSNamedExports = .{},
|
||||
commonjs_named_exports_deoptimized: bool = false,
|
||||
commonjs_named_exports_ref: Ref = Ref.None,
|
||||
@@ -5790,7 +5792,20 @@ pub fn printAst(
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime generate_source_map) {
|
||||
if (comptime FeatureFlags.runtime_transpiler_cache and generate_source_map) {
|
||||
if (opts.source_map_handler) |handler| {
|
||||
const source_maps_chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten());
|
||||
if (opts.runtime_transpiler_cache) |cache| {
|
||||
cache.put(printer.writer.ctx.getWritten(), source_maps_chunk.buffer.list.items);
|
||||
}
|
||||
|
||||
try handler.onSourceMapChunk(source_maps_chunk, source.*);
|
||||
} else {
|
||||
if (opts.runtime_transpiler_cache) |cache| {
|
||||
cache.put(printer.writer.ctx.getWritten(), "");
|
||||
}
|
||||
}
|
||||
} else if (comptime generate_source_map) {
|
||||
if (opts.source_map_handler) |handler| {
|
||||
try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source.*);
|
||||
}
|
||||
|
||||
@@ -94,3 +94,5 @@ pub const Codegen = struct {
|
||||
};
|
||||
|
||||
pub const GeneratedClassesList = @import("./bun.js/bindings/generated_classes_list.zig").Classes;
|
||||
|
||||
pub const RuntimeTranspilerCache = @import("./bun.js/RuntimeTranspilerCache.zig").RuntimeTranspilerCache;
|
||||
|
||||
@@ -979,6 +979,15 @@ pub const JSX = struct {
|
||||
production: string = "react/jsx-runtime",
|
||||
};
|
||||
|
||||
pub fn hashForRuntimeTranspiler(this: *const Pragma, hasher: *std.hash.Wyhash) void {
|
||||
for (this.factory) |factory| hasher.update(factory);
|
||||
for (this.fragment) |fragment| hasher.update(fragment);
|
||||
hasher.update(this.import_source.development);
|
||||
hasher.update(this.import_source.production);
|
||||
hasher.update(this.classic_import_source);
|
||||
hasher.update(this.package_name);
|
||||
}
|
||||
|
||||
pub fn importSource(this: *const Pragma) string {
|
||||
return switch (this.development) {
|
||||
true => this.import_source.development,
|
||||
@@ -1707,9 +1716,8 @@ pub const BundleOptions = struct {
|
||||
opts.allow_runtime = false;
|
||||
},
|
||||
.bun => {
|
||||
// If we're doing SSR, we want all the URLs to be the same as what it would be in the browser
|
||||
// If we're not doing SSR, we want all the import paths to be absolute
|
||||
opts.import_path_format = if (opts.import_path_format == .absolute_url) .absolute_url else .absolute_path;
|
||||
|
||||
opts.env.behavior = .load_all;
|
||||
if (transform.extension_order.len == 0) {
|
||||
// we must also support require'ing .node files
|
||||
@@ -2018,7 +2026,7 @@ pub const OutputFile = struct {
|
||||
}
|
||||
|
||||
pub fn moveTo(file: *const OutputFile, _: string, rel_path: []u8, dir: FileDescriptorType) !void {
|
||||
try bun.C.moveFileZ(bun.fdcast(file.value.move.dir), &(try std.os.toPosixPath(file.value.move.getPathname())), bun.fdcast(dir), &(try std.os.toPosixPath(rel_path)));
|
||||
try bun.C.moveFileZ(bun.fdcast(file.value.move.dir), bun.sliceTo(&(try std.os.toPosixPath(file.value.move.getPathname())), 0), bun.fdcast(dir), bun.sliceTo(&(try std.os.toPosixPath(rel_path)), 0));
|
||||
}
|
||||
|
||||
pub fn copyTo(file: *const OutputFile, _: string, rel_path: []u8, dir: FileDescriptorType) !void {
|
||||
|
||||
@@ -331,6 +331,39 @@ pub const Runtime = struct {
|
||||
|
||||
emit_decorator_metadata: bool = false,
|
||||
|
||||
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = null,
|
||||
|
||||
const hash_fields_for_runtime_transpiler = .{
|
||||
.top_level_await,
|
||||
.auto_import_jsx,
|
||||
.allow_runtime,
|
||||
.inlining,
|
||||
.commonjs_named_exports,
|
||||
.minify_syntax,
|
||||
.minify_identifiers,
|
||||
.dead_code_elimination,
|
||||
.set_breakpoint_on_first_line,
|
||||
.trim_unused_imports,
|
||||
.should_fold_typescript_constant_expressions,
|
||||
.dynamic_require,
|
||||
.dont_bundle_twice,
|
||||
.commonjs_at_runtime,
|
||||
.emit_decorator_metadata,
|
||||
|
||||
// note that we do not include .inject_jest_globals, as we bail out of the cache entirely if this is true
|
||||
};
|
||||
|
||||
pub fn hashForRuntimeTranspiler(this: *const Features, hasher: *std.hash.Wyhash) void {
|
||||
std.debug.assert(this.runtime_transpiler_cache != null);
|
||||
|
||||
var bools: [std.meta.fieldNames(@TypeOf(hash_fields_for_runtime_transpiler)).len]bool = undefined;
|
||||
inline for (hash_fields_for_runtime_transpiler, 0..) |field, i| {
|
||||
bools[i] = @field(this, @tagName(field));
|
||||
}
|
||||
|
||||
hasher.update(std.mem.asBytes(&bools));
|
||||
}
|
||||
|
||||
pub fn shouldUnwrapRequire(this: *const Features, package_name: string) bool {
|
||||
return package_name.len > 0 and strings.indexEqualAny(this.unwrap_commonjs_packages, package_name) != null;
|
||||
}
|
||||
|
||||
@@ -222,7 +222,7 @@ pub fn main() anyerror!void {
|
||||
|
||||
{
|
||||
var clock1 = try std.time.Timer.start();
|
||||
std.mem.doNotOptimizeAway(std.hash.XxHash64.hash(bytes));
|
||||
std.mem.doNotOptimizeAway(std.hash.XxHash64.hash(0, bytes));
|
||||
const zig_time = clock1.read();
|
||||
std.debug.print(
|
||||
"xxhash:\n\n zig: {any}\n\n",
|
||||
|
||||
@@ -446,9 +446,9 @@ pub const StandaloneModuleGraph = struct {
|
||||
bun.C.moveFileZWithHandle(
|
||||
fd,
|
||||
std.fs.cwd().fd,
|
||||
&(try std.os.toPosixPath(temp_location)),
|
||||
bun.sliceTo(&(try std.os.toPosixPath(temp_location)), 0),
|
||||
root_dir.dir.fd,
|
||||
&(try std.os.toPosixPath(std.fs.path.basename(outfile))),
|
||||
bun.sliceTo(&(try std.os.toPosixPath(std.fs.path.basename(outfile))), 0),
|
||||
) catch |err| {
|
||||
if (err == error.IsDir) {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> {} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.quote(outfile)});
|
||||
|
||||
51
src/sys.zig
51
src/sys.zig
@@ -1013,6 +1013,17 @@ pub fn rename(from: [:0]const u8, to: [:0]const u8) Maybe(void) {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn renameat(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.FileDescriptor, to: [:0]const u8) Maybe(void) {
|
||||
while (true) {
|
||||
if (Maybe(void).errnoSys(sys.renameat(from_dir, from, to_dir, to), .rename)) |err| {
|
||||
if (err.getErrno() == .INTR) continue;
|
||||
return err;
|
||||
}
|
||||
return Maybe(void).success;
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn chown(path: [:0]const u8, uid: os.uid_t, gid: os.gid_t) Maybe(void) {
|
||||
while (true) {
|
||||
if (Maybe(void).errnoSys(C.chown(path, uid, gid), .chown)) |err| {
|
||||
@@ -1085,6 +1096,28 @@ pub fn unlink(from: [:0]const u8) Maybe(void) {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn rmdirat(dirfd: bun.FileDescriptor, to: anytype) Maybe(void) {
|
||||
while (true) {
|
||||
if (Maybe(void).errnoSys(sys.unlinkat(dirfd, to, 1), .unlink)) |err| {
|
||||
if (err.getErrno() == .INTR) continue;
|
||||
return err;
|
||||
}
|
||||
return Maybe(void).success;
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn unlinkat(dirfd: bun.FileDescriptor, to: anytype) Maybe(void) {
|
||||
while (true) {
|
||||
if (Maybe(void).errnoSys(sys.unlinkat(dirfd, to, 0), .unlink)) |err| {
|
||||
if (err.getErrno() == .INTR) continue;
|
||||
return err;
|
||||
}
|
||||
return Maybe(void).success;
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn getFdPath(fd_: bun.FileDescriptor, out_buffer: *[MAX_PATH_BYTES]u8) Maybe([]u8) {
|
||||
const fd = bun.fdcast(fd_);
|
||||
switch (comptime builtin.os.tag) {
|
||||
@@ -1537,3 +1570,21 @@ pub fn linkat(dir_fd: bun.FileDescriptor, basename: []const u8, dest_dir_fd: bun
|
||||
basename,
|
||||
) orelse Maybe(void).success;
|
||||
}
|
||||
|
||||
pub fn linkatTmpfile(tmpfd: bun.FileDescriptor, dirfd: bun.FileDescriptor, name: [:0]const u8) Maybe(void) {
|
||||
if (comptime !Environment.isLinux) {
|
||||
@compileError("Linux only.");
|
||||
}
|
||||
|
||||
return Maybe(void).errnoSysP(
|
||||
std.os.linux.linkat(
|
||||
bun.fdcast(tmpfd),
|
||||
"",
|
||||
dirfd,
|
||||
name,
|
||||
os.AT.EMPTY_PATH,
|
||||
),
|
||||
.link,
|
||||
name,
|
||||
) orelse Maybe(void).success;
|
||||
}
|
||||
|
||||
60
src/tmp.zig
Normal file
60
src/tmp.zig
Normal file
@@ -0,0 +1,60 @@
|
||||
const bun = @import("root").bun;
|
||||
const std = @import("std");
|
||||
const Environment = bun.Environment;
|
||||
const O = std.os.O;
|
||||
// To be used with files
|
||||
// not folders!
|
||||
pub const Tmpfile = struct {
|
||||
destination_dir: bun.FileDescriptor = bun.invalid_fd,
|
||||
tmpfilename: [:0]const u8 = "",
|
||||
fd: bun.FileDescriptor = bun.invalid_fd,
|
||||
using_tmpfile: bool = Environment.isLinux,
|
||||
|
||||
pub fn create(
|
||||
destination_dir: bun.FileDescriptor,
|
||||
tmpfilename: [:0]const u8,
|
||||
) bun.JSC.Maybe(Tmpfile) {
|
||||
const perm = 0o644;
|
||||
var tmpfile = Tmpfile{
|
||||
.destination_dir = destination_dir,
|
||||
.tmpfilename = tmpfilename,
|
||||
};
|
||||
|
||||
open: while (true) {
|
||||
if (comptime Environment.isLinux) {
|
||||
switch (bun.sys.openat(destination_dir, ".", O.WRONLY | O.TMPFILE | O.CLOEXEC, perm)) {
|
||||
.result => |fd| {
|
||||
tmpfile.fd = fd;
|
||||
break :open;
|
||||
},
|
||||
.err => |err| {
|
||||
switch (err.getErrno()) {
|
||||
.INVAL, .OPNOTSUPP, .NOSYS => {
|
||||
tmpfile.using_tmpfile = false;
|
||||
},
|
||||
else => return .{ .err = err },
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
tmpfile.fd = switch (bun.sys.openat(destination_dir, tmpfilename, O.CREAT | O.CLOEXEC | O.WRONLY, perm)) {
|
||||
.result => |fd| fd,
|
||||
.err => |err| return .{ .err = err },
|
||||
};
|
||||
break :open;
|
||||
}
|
||||
|
||||
return .{ .result = tmpfile };
|
||||
}
|
||||
|
||||
pub fn finish(this: *Tmpfile, destname: [:0]const u8) !void {
|
||||
if (comptime Environment.isLinux) {
|
||||
if (this.using_tmpfile) {
|
||||
return try bun.sys.linkatTmpfile(this.fd, this.destination_dir, destname).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
try bun.C.moveFileZWithHandle(bun.fdcast(this.fd), this.destination_dir, this.tmpfilename, bun.fdcast(this.destination_dir), destname);
|
||||
}
|
||||
};
|
||||
7
test/cli/run/transpiler-cache-aggressive-remover.js
Normal file
7
test/cli/run/transpiler-cache-aggressive-remover.js
Normal file
@@ -0,0 +1,7 @@
|
||||
import { rmSync } from "fs";
|
||||
|
||||
while (1) {
|
||||
try {
|
||||
rmSync(process.argv[2], { recursive: true, force: true });
|
||||
} catch (error) {}
|
||||
}
|
||||
162
test/cli/run/transpiler-cache.test.ts
Normal file
162
test/cli/run/transpiler-cache.test.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import assert from "assert";
|
||||
import { Subprocess } from "bun";
|
||||
import { beforeEach, describe, expect, test } from "bun:test";
|
||||
import { realpathSync, chmodSync, existsSync, mkdirSync, readdirSync, rmSync, writeFileSync } from "fs";
|
||||
import { bunEnv, bunExe, bunRun } from "harness";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path";
|
||||
|
||||
function dummyFile(size: number, cache_bust: string, value: string) {
|
||||
const data = Buffer.alloc(size);
|
||||
data.write("/*" + cache_bust);
|
||||
const end = `*/\nconsole.log(${JSON.stringify(value)});`;
|
||||
data.fill("*", 2 + cache_bust.length, size - end.length, "utf-8");
|
||||
data.write(end, size - end.length, "utf-8");
|
||||
return data;
|
||||
}
|
||||
|
||||
let temp_dir: string = "";
|
||||
let cache_dir = "";
|
||||
|
||||
const env = {
|
||||
...bunEnv,
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH: cache_dir,
|
||||
};
|
||||
|
||||
let prev_cache_count = 0;
|
||||
function newCacheCount() {
|
||||
let new_count = readdirSync(cache_dir).length;
|
||||
let delta = new_count - prev_cache_count;
|
||||
prev_cache_count = new_count;
|
||||
return delta;
|
||||
}
|
||||
|
||||
function removeCache() {
|
||||
prev_cache_count = 0;
|
||||
try {
|
||||
rmSync(cache_dir, { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
chmodSync(cache_dir, 0o777);
|
||||
readdirSync(cache_dir).forEach(item => {
|
||||
chmodSync(join(cache_dir, item), 0o777);
|
||||
});
|
||||
rmSync(cache_dir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
if (cache_dir) {
|
||||
rmSync(temp_dir, { recursive: true, force: true });
|
||||
removeCache();
|
||||
}
|
||||
|
||||
temp_dir = join(tmpdir(), `bun-test-transpiler-cache-${Date.now()}-` + (Math.random() * 81023).toString(36).slice(2));
|
||||
mkdirSync(temp_dir, { recursive: true });
|
||||
temp_dir = realpathSync(temp_dir);
|
||||
cache_dir = join(temp_dir, ".cache");
|
||||
env.BUN_RUNTIME_TRANSPILER_CACHE_PATH = cache_dir;
|
||||
});
|
||||
|
||||
describe("transpiler cache", () => {
|
||||
test("works", async () => {
|
||||
writeFileSync(join(temp_dir, "a.js"), dummyFile((50 * 1024 * 1.5) | 0, "1", "a"));
|
||||
const a = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(a.stdout == "a");
|
||||
assert(existsSync(cache_dir));
|
||||
expect(newCacheCount()).toBe(1);
|
||||
const b = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(b.stdout == "a");
|
||||
expect(newCacheCount()).toBe(0);
|
||||
});
|
||||
test("ignores files under 50kb", async () => {
|
||||
writeFileSync(join(temp_dir, "a.js"), dummyFile(50 * 1024 - 1, "1", "a"));
|
||||
const a = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(a.stdout == "a");
|
||||
assert(!existsSync(cache_dir));
|
||||
});
|
||||
test("it is indeed content addressable", async () => {
|
||||
writeFileSync(join(temp_dir, "a.js"), dummyFile(50 * 1024, "1", "b"));
|
||||
const a = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(a.stdout == "b");
|
||||
expect(newCacheCount()).toBe(1);
|
||||
|
||||
writeFileSync(join(temp_dir, "a.js"), dummyFile(50 * 1024, "1", "c"));
|
||||
const b = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(b.stdout == "c");
|
||||
expect(newCacheCount()).toBe(1);
|
||||
|
||||
writeFileSync(join(temp_dir, "b.js"), dummyFile(50 * 1024, "1", "b"));
|
||||
const c = bunRun(join(temp_dir, "b.js"), env);
|
||||
expect(b.stdout == "b");
|
||||
expect(newCacheCount()).toBe(0);
|
||||
});
|
||||
test("doing 500 buns at once does not crash", async () => {
|
||||
writeFileSync(join(temp_dir, "a.js"), dummyFile(50 * 1024, "1", "b"));
|
||||
writeFileSync(join(temp_dir, "b.js"), dummyFile(50 * 1024, "2", "b"));
|
||||
|
||||
const remover = Bun.spawn({
|
||||
cmd: [bunExe(), join(import.meta.dir, "transpiler-cache-aggressive-remover.js"), cache_dir],
|
||||
env,
|
||||
cwd: temp_dir,
|
||||
});
|
||||
|
||||
let processes: Subprocess<"ignore", "pipe", "inherit">[] = [];
|
||||
let killing = false;
|
||||
for (let i = 0; i < 500; i++) {
|
||||
processes.push(
|
||||
Bun.spawn({
|
||||
cmd: [bunExe(), i % 2 == 0 ? "a.js" : "b.js"],
|
||||
env,
|
||||
cwd: temp_dir,
|
||||
onExit(subprocess, exitCode, signalCode, error) {
|
||||
if (exitCode != 0 && !killing) {
|
||||
killing = true;
|
||||
processes.forEach(x => x.kill(9));
|
||||
remover.kill(9);
|
||||
}
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(processes.map(x => x.exited));
|
||||
|
||||
assert(!killing);
|
||||
|
||||
remover.kill(9);
|
||||
|
||||
for (const proc of processes) {
|
||||
expect(proc.exitCode).toBe(0);
|
||||
expect(await Bun.readableStreamToText(proc.stdout)).toBe("b\n");
|
||||
}
|
||||
}, 99999999);
|
||||
test("works if the cache is not user-readable", () => {
|
||||
mkdirSync(cache_dir, { recursive: true });
|
||||
writeFileSync(join(temp_dir, "a.js"), dummyFile((50 * 1024 * 1.5) | 0, "1", "b"));
|
||||
const a = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(a.stdout == "b");
|
||||
expect(newCacheCount()).toBe(1);
|
||||
|
||||
const cache_item = readdirSync(cache_dir)[0];
|
||||
|
||||
chmodSync(join(cache_dir, cache_item), 0);
|
||||
const b = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(b.stdout == "b");
|
||||
expect(newCacheCount()).toBe(0);
|
||||
|
||||
chmodSync(join(cache_dir), "0");
|
||||
const c = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(c.stdout == "b");
|
||||
});
|
||||
test("works if the cache is not user-writable", () => {
|
||||
mkdirSync(cache_dir, { recursive: true });
|
||||
writeFileSync(join(temp_dir, "a.js"), dummyFile((50 * 1024 * 1.5) | 0, "1", "b"));
|
||||
|
||||
chmodSync(join(cache_dir), "0");
|
||||
|
||||
const a = bunRun(join(temp_dir, "a.js"), env);
|
||||
expect(a.stdout == "b");
|
||||
|
||||
chmodSync(join(cache_dir), "777");
|
||||
});
|
||||
});
|
||||
@@ -8,6 +8,7 @@ export const bunEnv: any = {
|
||||
FORCE_COLOR: undefined,
|
||||
TZ: "Etc/UTC",
|
||||
CI: "1",
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
|
||||
};
|
||||
|
||||
export function bunExe() {
|
||||
|
||||
Reference in New Issue
Block a user