mirror of
https://github.com/oven-sh/bun
synced 2026-02-15 05:12:29 +00:00
Fixes ENG-21287
Build times, from `bun run build && echo '//' >> src/main.zig && time
bun run build`
|Platform|0.14.1|0.15.2|Speedup|
|-|-|-|-|
|macos debug asan|126.90s|106.27s|1.19x|
|macos debug noasan|60.62s|50.85s|1.19x|
|linux debug asan|292.77s|241.45s|1.21x|
|linux debug noasan|146.58s|130.94s|1.12x|
|linux debug use_llvm=false|n/a|78.27s|1.87x|
|windows debug asan|177.13s|142.55s|1.24x|
Runtime performance:
- next build memory usage may have gone up by 5%. Otherwise seems the
same. Some code with writers may have gotten slower, especially one
instance of a counting writer and a few instances of unbuffered writers
that now have vtable overhead.
- File size reduced by 800kb (from 100.2mb to 99.4mb)
Improvements:
- `@export` hack is no longer needed for watch
- native x86_64 backend for linux builds faster. to use it, set use_llvm
false and no_link_obj false. also set `ASAN_OPTIONS=detect_leaks=0`
otherwise it will spam the output with tens of thousands of lines of
debug info errors. may need to use the zig lldb fork for debugging.
- zig test-obj, which we will be able to use for zig unit tests
Still an issue:
- false 'dependency loop' errors remain in watch mode
- watch mode crashes observed
Follow-up:
- [ ] search `comptime Writer: type` and `comptime W: type` and remove
- [ ] remove format_mode in our zig fork
- [ ] remove deprecated.zig autoFormatLabelFallback
- [ ] remove deprecated.zig autoFormatLabel
- [ ] remove deprecated.BufferedWriter and BufferedReader
- [ ] remove override_no_export_cpp_apis as it is no longer needed
- [ ] css Parser(W) -> Parser, and remove all the comptime writer: type
params
- [ ] remove deprecated writer fully
Files that add lines:
```
649 src/deprecated.zig
167 scripts/pack-codegen-for-zig-team.ts
54 scripts/cleartrace-impl.js
46 scripts/cleartrace.ts
43 src/windows.zig
18 src/fs.zig
17 src/bun.js/ConsoleObject.zig
16 src/output.zig
12 src/bun.js/test/debug.zig
12 src/bun.js/node/node_fs.zig
8 src/env_loader.zig
7 src/css/printer.zig
7 src/cli/init_command.zig
7 src/bun.js/node.zig
6 src/string/escapeRegExp.zig
6 src/install/PnpmMatcher.zig
5 src/bun.js/webcore/Blob.zig
4 src/crash_handler.zig
4 src/bun.zig
3 src/install/lockfile/bun.lock.zig
3 src/cli/update_interactive_command.zig
3 src/cli/pack_command.zig
3 build.zig
2 src/Progress.zig
2 src/install/lockfile/lockfile_json_stringify_for_debugging.zig
2 src/css/small_list.zig
2 src/bun.js/webcore/prompt.zig
1 test/internal/ban-words.test.ts
1 test/internal/ban-limits.json
1 src/watcher/WatcherTrace.zig
1 src/transpiler.zig
1 src/shell/builtin/cp.zig
1 src/js_printer.zig
1 src/io/PipeReader.zig
1 src/install/bin.zig
1 src/css/selectors/selector.zig
1 src/cli/run_command.zig
1 src/bun.js/RuntimeTranspilerStore.zig
1 src/bun.js/bindings/JSRef.zig
1 src/bake/DevServer.zig
```
Files that remove lines:
```
-1 src/test/recover.zig
-1 src/sql/postgres/SocketMonitor.zig
-1 src/sql/mysql/MySQLRequestQueue.zig
-1 src/sourcemap/CodeCoverage.zig
-1 src/css/values/color_js.zig
-1 src/compile_target.zig
-1 src/bundler/linker_context/convertStmtsForChunk.zig
-1 src/bundler/bundle_v2.zig
-1 src/bun.js/webcore/blob/read_file.zig
-1 src/ast/base.zig
-2 src/sql/postgres/protocol/ArrayList.zig
-2 src/shell/builtin/mkdir.zig
-2 src/install/PackageManager/patchPackage.zig
-2 src/install/PackageManager/PackageManagerDirectories.zig
-2 src/fmt.zig
-2 src/css/declaration.zig
-2 src/css/css_parser.zig
-2 src/collections/baby_list.zig
-2 src/bun.js/bindings/ZigStackFrame.zig
-2 src/ast/E.zig
-3 src/StandaloneModuleGraph.zig
-3 src/deps/picohttp.zig
-3 src/deps/libuv.zig
-3 src/btjs.zig
-4 src/threading/Futex.zig
-4 src/shell/builtin/touch.zig
-4 src/meta.zig
-4 src/install/lockfile.zig
-4 src/css/selectors/parser.zig
-5 src/shell/interpreter.zig
-5 src/css/error.zig
-5 src/bun.js/web_worker.zig
-5 src/bun.js.zig
-6 src/cli/test_command.zig
-6 src/bun.js/VirtualMachine.zig
-6 src/bun.js/uuid.zig
-6 src/bun.js/bindings/JSValue.zig
-9 src/bun.js/test/pretty_format.zig
-9 src/bun.js/api/BunObject.zig
-14 src/install/install_binding.zig
-14 src/fd.zig
-14 src/bun.js/node/path.zig
-14 scripts/pack-codegen-for-zig-team.sh
-17 src/bun.js/test/diff_format.zig
```
`git diff --numstat origin/main...HEAD | awk '{ print ($1-$2)"\t"$3 }' |
sort -rn`
---------
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Meghan Denny <meghan@bun.com>
Co-authored-by: tayor.fish <contact@taylor.fish>
387 lines
13 KiB
Zig
387 lines
13 KiB
Zig
const HashMapPool = struct {
|
|
const HashMap = std.HashMap(u64, void, IdentityContext, 80);
|
|
const LinkedList = bun.deprecated.SinglyLinkedList(HashMap);
|
|
threadlocal var list: LinkedList = undefined;
|
|
threadlocal var loaded: bool = false;
|
|
|
|
pub fn get(_: std.mem.Allocator) *LinkedList.Node {
|
|
if (loaded) {
|
|
if (list.popFirst()) |node| {
|
|
node.data.clearRetainingCapacity();
|
|
return node;
|
|
}
|
|
}
|
|
|
|
const new_node = default_allocator.create(LinkedList.Node) catch unreachable;
|
|
new_node.* = LinkedList.Node{ .data = HashMap.initContext(default_allocator, IdentityContext{}) };
|
|
return new_node;
|
|
}
|
|
|
|
pub fn release(node: *LinkedList.Node) void {
|
|
if (loaded) {
|
|
list.prepend(node);
|
|
return;
|
|
}
|
|
|
|
list = LinkedList{ .first = node };
|
|
loaded = true;
|
|
}
|
|
};
|
|
|
|
pub const TOML = struct {
|
|
lexer: Lexer,
|
|
log: *logger.Log,
|
|
allocator: std.mem.Allocator,
|
|
|
|
pub fn init(allocator: std.mem.Allocator, source_: logger.Source, log: *logger.Log, redact_logs: bool) !TOML {
|
|
return TOML{
|
|
.lexer = try Lexer.init(log, source_, allocator, redact_logs),
|
|
.allocator = allocator,
|
|
.log = log,
|
|
};
|
|
}
|
|
|
|
pub inline fn source(p: *const TOML) *const logger.Source {
|
|
return &p.lexer.source;
|
|
}
|
|
|
|
pub fn e(_: *TOML, t: anytype, loc: logger.Loc) Expr {
|
|
const Type = @TypeOf(t);
|
|
if (@typeInfo(Type) == .pointer) {
|
|
return Expr.init(std.meta.Child(Type), t.*, loc);
|
|
} else {
|
|
return Expr.init(Type, t, loc);
|
|
}
|
|
}
|
|
|
|
const Rope = js_ast.E.Object.Rope;
|
|
|
|
pub fn parseKeySegment(p: *TOML) anyerror!?Expr {
|
|
const loc = p.lexer.loc();
|
|
|
|
switch (p.lexer.token) {
|
|
.t_string_literal => {
|
|
const str = p.lexer.toString(loc);
|
|
try p.lexer.next();
|
|
return str;
|
|
},
|
|
.t_identifier => {
|
|
const str = E.String{ .data = p.lexer.identifier };
|
|
try p.lexer.next();
|
|
return p.e(str, loc);
|
|
},
|
|
.t_false => {
|
|
try p.lexer.next();
|
|
return p.e(
|
|
E.String{
|
|
.data = "false",
|
|
},
|
|
loc,
|
|
);
|
|
},
|
|
.t_true => {
|
|
try p.lexer.next();
|
|
return p.e(
|
|
E.String{
|
|
.data = "true",
|
|
},
|
|
loc,
|
|
);
|
|
},
|
|
// what we see as a number here could actually be a string
|
|
.t_numeric_literal => {
|
|
const literal = p.lexer.raw();
|
|
try p.lexer.next();
|
|
return p.e(E.String{ .data = literal }, loc);
|
|
},
|
|
|
|
else => return null,
|
|
}
|
|
}
|
|
|
|
pub fn parseKey(p: *TOML, allocator: std.mem.Allocator) anyerror!*Rope {
|
|
var rope = try allocator.create(Rope);
|
|
const head = rope;
|
|
rope.* = .{
|
|
.head = (try p.parseKeySegment()) orelse {
|
|
try p.lexer.expectedString("key");
|
|
return error.SyntaxError;
|
|
},
|
|
.next = null,
|
|
};
|
|
while (p.lexer.token == .t_dot) {
|
|
try p.lexer.next();
|
|
|
|
rope = try rope.append((try p.parseKeySegment()) orelse break, allocator);
|
|
}
|
|
|
|
return head;
|
|
}
|
|
|
|
pub fn parse(source_: *const logger.Source, log: *logger.Log, allocator: std.mem.Allocator, redact_logs: bool) !Expr {
|
|
switch (source_.contents.len) {
|
|
// This is to be consisntent with how disabled JS files are handled
|
|
0 => {
|
|
return Expr{ .loc = logger.Loc{ .start = 0 }, .data = Expr.init(E.Object, E.Object{}, logger.Loc.Empty).data };
|
|
},
|
|
else => {},
|
|
}
|
|
|
|
var parser = try TOML.init(allocator, source_.*, log, redact_logs);
|
|
|
|
return try parser.runParser();
|
|
}
|
|
|
|
fn runParser(p: *TOML) anyerror!Expr {
|
|
var root = p.e(E.Object{}, p.lexer.loc());
|
|
var head = root.data.e_object;
|
|
|
|
var stack = std.heap.stackFallback(@sizeOf(Rope) * 6, p.allocator);
|
|
const key_allocator = stack.get();
|
|
|
|
while (true) {
|
|
const loc = p.lexer.loc();
|
|
switch (p.lexer.token) {
|
|
.t_end_of_file => {
|
|
return root;
|
|
},
|
|
// child table
|
|
.t_open_bracket => {
|
|
try p.lexer.next();
|
|
const key = try p.parseKey(key_allocator);
|
|
|
|
try p.lexer.expect(.t_close_bracket);
|
|
if (!p.lexer.has_newline_before) {
|
|
try p.lexer.expectedString("line break");
|
|
}
|
|
|
|
const parent_object = root.data.e_object.getOrPutObject(key, p.allocator) catch |err| {
|
|
switch (err) {
|
|
error.Clobber => {
|
|
try p.lexer.addDefaultError("Table already defined");
|
|
return error.SyntaxError;
|
|
},
|
|
else => return err,
|
|
}
|
|
};
|
|
head = parent_object.data.e_object;
|
|
stack.fixed_buffer_allocator.reset();
|
|
},
|
|
// child table array
|
|
.t_open_bracket_double => {
|
|
try p.lexer.next();
|
|
|
|
const key = try p.parseKey(key_allocator);
|
|
|
|
try p.lexer.expect(.t_close_bracket_double);
|
|
if (!p.lexer.has_newline_before) {
|
|
try p.lexer.expectedString("line break");
|
|
}
|
|
|
|
var array = root.data.e_object.getOrPutArray(key, p.allocator) catch |err| {
|
|
switch (err) {
|
|
error.Clobber => {
|
|
try p.lexer.addDefaultError("Cannot overwrite table array");
|
|
return error.SyntaxError;
|
|
},
|
|
else => return err,
|
|
}
|
|
};
|
|
const new_head = p.e(E.Object{}, loc);
|
|
try array.data.e_array.push(p.allocator, new_head);
|
|
head = new_head.data.e_object;
|
|
stack.fixed_buffer_allocator.reset();
|
|
},
|
|
else => {
|
|
try p.parseAssignment(head, key_allocator);
|
|
stack.fixed_buffer_allocator.reset();
|
|
},
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn parseAssignment(p: *TOML, obj: *E.Object, allocator: std.mem.Allocator) anyerror!void {
|
|
p.lexer.allow_double_bracket = false;
|
|
const rope = try p.parseKey(allocator);
|
|
const rope_end = p.lexer.start;
|
|
|
|
const is_array = p.lexer.token == .t_empty_array;
|
|
if (is_array) {
|
|
try p.lexer.next();
|
|
}
|
|
|
|
try p.lexer.expectAssignment();
|
|
if (!is_array) {
|
|
obj.setRope(rope, p.allocator, try p.parseValue()) catch |err| {
|
|
switch (err) {
|
|
error.Clobber => {
|
|
const loc = rope.head.loc;
|
|
assert(loc.start > 0);
|
|
const start: u32 = @intCast(loc.start);
|
|
const key_name = std.mem.trimRight(u8, p.source().contents[start..rope_end], &std.ascii.whitespace);
|
|
p.lexer.addError(start, "Cannot redefine key '{s}'", .{key_name});
|
|
return error.SyntaxError;
|
|
},
|
|
else => return err,
|
|
}
|
|
};
|
|
}
|
|
p.lexer.allow_double_bracket = true;
|
|
}
|
|
|
|
pub fn parseValue(p: *TOML) anyerror!Expr {
|
|
const loc = p.lexer.loc();
|
|
|
|
p.lexer.allow_double_bracket = true;
|
|
|
|
switch (p.lexer.token) {
|
|
.t_false => {
|
|
try p.lexer.next();
|
|
|
|
return p.e(E.Boolean{
|
|
.value = false,
|
|
}, loc);
|
|
},
|
|
.t_true => {
|
|
try p.lexer.next();
|
|
return p.e(E.Boolean{
|
|
.value = true,
|
|
}, loc);
|
|
},
|
|
.t_string_literal => {
|
|
const result = p.lexer.toString(loc);
|
|
try p.lexer.next();
|
|
return result;
|
|
},
|
|
.t_identifier => {
|
|
const str: E.String = E.String{ .data = p.lexer.identifier };
|
|
|
|
try p.lexer.next();
|
|
return p.e(str, loc);
|
|
},
|
|
.t_numeric_literal => {
|
|
const value = p.lexer.number;
|
|
try p.lexer.next();
|
|
return p.e(E.Number{ .value = value }, loc);
|
|
},
|
|
.t_minus => {
|
|
try p.lexer.next();
|
|
const value = p.lexer.number;
|
|
|
|
try p.lexer.expect(.t_numeric_literal);
|
|
return p.e(E.Number{ .value = -value }, loc);
|
|
},
|
|
.t_plus => {
|
|
try p.lexer.next();
|
|
const value = p.lexer.number;
|
|
|
|
try p.lexer.expect(.t_numeric_literal);
|
|
return p.e(E.Number{ .value = value }, loc);
|
|
},
|
|
.t_open_brace => {
|
|
try p.lexer.next();
|
|
var is_single_line = !p.lexer.has_newline_before;
|
|
var stack = std.heap.stackFallback(@sizeOf(Rope) * 6, p.allocator);
|
|
const key_allocator = stack.get();
|
|
const expr = p.e(E.Object{}, loc);
|
|
const obj = expr.data.e_object;
|
|
|
|
while (p.lexer.token != .t_close_brace) {
|
|
if (obj.properties.len > 0) {
|
|
if (p.lexer.has_newline_before) {
|
|
is_single_line = false;
|
|
}
|
|
if (!try p.parseMaybeTrailingComma(.t_close_brace)) {
|
|
break;
|
|
}
|
|
if (p.lexer.has_newline_before) {
|
|
is_single_line = false;
|
|
}
|
|
}
|
|
try p.parseAssignment(obj, key_allocator);
|
|
p.lexer.allow_double_bracket = false;
|
|
stack.fixed_buffer_allocator.reset();
|
|
}
|
|
|
|
if (p.lexer.has_newline_before) {
|
|
is_single_line = false;
|
|
}
|
|
p.lexer.allow_double_bracket = true;
|
|
try p.lexer.expect(.t_close_brace);
|
|
return expr;
|
|
},
|
|
.t_empty_array => {
|
|
try p.lexer.next();
|
|
p.lexer.allow_double_bracket = true;
|
|
return p.e(E.Array{}, loc);
|
|
},
|
|
.t_open_bracket => {
|
|
try p.lexer.next();
|
|
var is_single_line = !p.lexer.has_newline_before;
|
|
const array_ = p.e(E.Array{}, loc);
|
|
var array = array_.data.e_array;
|
|
const allocator = p.allocator;
|
|
p.lexer.allow_double_bracket = false;
|
|
|
|
while (p.lexer.token != .t_close_bracket) {
|
|
if (array.items.len > 0) {
|
|
if (p.lexer.has_newline_before) {
|
|
is_single_line = false;
|
|
}
|
|
|
|
if (!try p.parseMaybeTrailingComma(.t_close_bracket)) {
|
|
break;
|
|
}
|
|
|
|
if (p.lexer.has_newline_before) {
|
|
is_single_line = false;
|
|
}
|
|
}
|
|
|
|
array.push(allocator, try p.parseValue()) catch unreachable;
|
|
}
|
|
|
|
if (p.lexer.has_newline_before) {
|
|
is_single_line = false;
|
|
}
|
|
p.lexer.allow_double_bracket = true;
|
|
try p.lexer.expect(.t_close_bracket);
|
|
return array_;
|
|
},
|
|
else => {
|
|
try p.lexer.unexpected();
|
|
return error.SyntaxError;
|
|
},
|
|
}
|
|
}
|
|
|
|
pub fn parseMaybeTrailingComma(p: *TOML, closer: T) !bool {
|
|
try p.lexer.expect(.t_comma);
|
|
|
|
if (p.lexer.token == closer) {
|
|
return false;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
};
|
|
|
|
pub const lexer = @import("./toml/lexer.zig");
|
|
pub const Lexer = lexer.Lexer;
|
|
const T = lexer.T;
|
|
|
|
const string = []const u8;
|
|
|
|
const std = @import("std");
|
|
const IdentityContext = @import("../identity_context.zig").IdentityContext;
|
|
const expect = std.testing.expect;
|
|
|
|
const bun = @import("bun");
|
|
const assert = bun.assert;
|
|
const default_allocator = bun.default_allocator;
|
|
const logger = bun.logger;
|
|
|
|
const js_ast = bun.ast;
|
|
const E = js_ast.E;
|
|
const Expr = js_ast.Expr;
|