mirror of
https://github.com/oven-sh/bun
synced 2026-02-05 00:18:53 +00:00
Compare commits
14 Commits
dylan/pyth
...
dylan/prin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a5a289d0ef | ||
|
|
3f9438d4d3 | ||
|
|
139db58fbb | ||
|
|
9ece7aa41e | ||
|
|
691c5f2ff6 | ||
|
|
acbd1ea3a2 | ||
|
|
4336306d33 | ||
|
|
6eabaa9cfe | ||
|
|
7faf1bd1bd | ||
|
|
9fb82966c4 | ||
|
|
26bdf8a282 | ||
|
|
512b96c407 | ||
|
|
2bbd7af77d | ||
|
|
d3e42455df |
@@ -165,7 +165,7 @@ pub const NameMinifier = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn numberToMinifiedName(this: *NameMinifier, name: *std.ArrayList(u8), _i: isize) !void {
|
||||
pub fn numberToMinifiedName(this: *NameMinifier, name: *std.ArrayList(u8), _i: isize) bun.OOM!void {
|
||||
name.clearRetainingCapacity();
|
||||
var i = _i;
|
||||
var j = @as(usize, @intCast(@mod(i, 54)));
|
||||
@@ -180,7 +180,7 @@ pub const NameMinifier = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn defaultNumberToMinifiedName(allocator: std.mem.Allocator, _i: isize) !string {
|
||||
pub fn defaultNumberToMinifiedName(allocator: std.mem.Allocator, _i: isize) bun.OOM!string {
|
||||
var i = _i;
|
||||
var j = @as(usize, @intCast(@mod(i, 54)));
|
||||
var name = std.ArrayList(u8).init(allocator);
|
||||
@@ -206,7 +206,7 @@ pub const OptionalChain = enum(u1) {
|
||||
/// "(a?.b).c" => ".c" is null
|
||||
continuation,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) @TypeOf(writer).Error!void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -97,7 +97,7 @@ pub fn fromBlob(
|
||||
if (mime_type.category.isTextLike()) {
|
||||
var output = MutableString.initEmpty(allocator);
|
||||
try JSPrinter.quoteForJSON(bytes, &output, true);
|
||||
var list = output.toOwnedSlice();
|
||||
var list = try output.toOwnedSlice();
|
||||
// remove the quotes
|
||||
if (list.len > 0) {
|
||||
list = list[1 .. list.len - 1];
|
||||
|
||||
@@ -209,7 +209,7 @@ pub fn Parse(
|
||||
}
|
||||
|
||||
pub fn parseTemplateParts(p: *P, include_raw: bool) ![]E.TemplatePart {
|
||||
var parts = ListManaged(E.TemplatePart).initCapacity(p.allocator, 1) catch unreachable;
|
||||
var parts = try ListManaged(E.TemplatePart).initCapacity(p.allocator, 1);
|
||||
// Allow "in" inside template literals
|
||||
const oldAllowIn = p.allow_in;
|
||||
p.allow_in = true;
|
||||
@@ -222,14 +222,14 @@ pub fn Parse(
|
||||
|
||||
const tail: E.Template.Contents = brk: {
|
||||
if (!include_raw) break :brk .{ .cooked = try p.lexer.toEString() };
|
||||
break :brk .{ .raw = p.lexer.rawTemplateContents() };
|
||||
break :brk .{ .raw = try p.lexer.rawTemplateContents() };
|
||||
};
|
||||
|
||||
parts.append(E.TemplatePart{
|
||||
try parts.append(E.TemplatePart{
|
||||
.value = value,
|
||||
.tail_loc = tail_loc,
|
||||
.tail = tail,
|
||||
}) catch unreachable;
|
||||
});
|
||||
|
||||
if (p.lexer.token == .t_template_tail) {
|
||||
try p.lexer.next();
|
||||
|
||||
@@ -213,7 +213,7 @@ pub fn ParseSuffix(
|
||||
p.log.addRangeError(p.source, p.lexer.range(), "Template literals cannot have an optional chain as a tag") catch unreachable;
|
||||
}
|
||||
// p.markSyntaxFeature(compat.TemplateLiteral, p.lexer.Range());
|
||||
const head = p.lexer.rawTemplateContents();
|
||||
const head = try p.lexer.rawTemplateContents();
|
||||
try p.lexer.next();
|
||||
|
||||
left.* = p.newExpr(E.Template{
|
||||
@@ -222,12 +222,12 @@ pub fn ParseSuffix(
|
||||
}, left.loc);
|
||||
return .next;
|
||||
}
|
||||
fn t_template_head(p: *P, _: Level, _: *?OptionalChain, old_optional_chain: ?OptionalChain, left: *Expr) anyerror!Continuation {
|
||||
fn t_template_head(p: *P, _: Level, _: *?OptionalChain, old_optional_chain: ?OptionalChain, left: *Expr) !Continuation {
|
||||
if (old_optional_chain != null) {
|
||||
p.log.addRangeError(p.source, p.lexer.range(), "Template literals cannot have an optional chain as a tag") catch unreachable;
|
||||
try p.log.addRangeError(p.source, p.lexer.range(), "Template literals cannot have an optional chain as a tag");
|
||||
}
|
||||
// p.markSyntaxFeature(compat.TemplateLiteral, p.lexer.Range());
|
||||
const head = p.lexer.rawTemplateContents();
|
||||
const head = try p.lexer.rawTemplateContents();
|
||||
const partsGroup = try p.parseTemplateParts(true);
|
||||
const tag = left.*;
|
||||
left.* = p.newExpr(E.Template{
|
||||
|
||||
@@ -545,7 +545,7 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
|
||||
if (js.source_map) |*source_map| {
|
||||
bun.assert(html_route_bundle_index == null); // suspect behind #17956
|
||||
if (source_map.chunk.buffer.len() > 0) {
|
||||
break :blk .{ .some = PackedMap.newNonEmpty(
|
||||
break :blk .{ .some = try PackedMap.newNonEmpty(
|
||||
source_map.chunk,
|
||||
source_map.escaped_source.take().?,
|
||||
) };
|
||||
|
||||
@@ -19,12 +19,12 @@ end_state: struct {
|
||||
original_column: i32,
|
||||
},
|
||||
|
||||
pub fn newNonEmpty(chunk: SourceMap.Chunk, escaped_source: Owned([]u8)) bun.ptr.Shared(*Self) {
|
||||
pub fn newNonEmpty(chunk: SourceMap.Chunk, escaped_source: Owned([]u8)) bun.OOM!bun.ptr.Shared(*Self) {
|
||||
var buffer = chunk.buffer;
|
||||
assert(!buffer.isEmpty());
|
||||
const dev_allocator = DevAllocator.downcast(buffer.allocator);
|
||||
return .new(.{
|
||||
.vlq_ = .fromRawIn(buffer.toOwnedSlice(), dev_allocator),
|
||||
.vlq_ = .fromRawIn(try buffer.toOwnedSlice(), dev_allocator),
|
||||
.escaped_source = escaped_source,
|
||||
.end_state = .{
|
||||
.original_line = chunk.end_state.original_line,
|
||||
|
||||
@@ -817,6 +817,13 @@ pub export fn Bun__getDefaultLoader(global: *JSGlobalObject, str: *const bun.Str
|
||||
return loader;
|
||||
}
|
||||
|
||||
pub const TranspileSourceCodeError = js_printer.Error || bun.JSError || error{
|
||||
AsyncModule,
|
||||
ParseError,
|
||||
ResolveMessage,
|
||||
UnexpectedPendingResolution,
|
||||
};
|
||||
|
||||
pub fn transpileSourceCode(
|
||||
jsc_vm: *VirtualMachine,
|
||||
specifier: string,
|
||||
@@ -829,9 +836,9 @@ pub fn transpileSourceCode(
|
||||
virtual_source: ?*const logger.Source,
|
||||
promise_ptr: ?*?*jsc.JSInternalPromise,
|
||||
source_code_printer: *js_printer.BufferPrinter,
|
||||
globalObject: ?*JSGlobalObject,
|
||||
globalObject: *JSGlobalObject,
|
||||
comptime flags: FetchFlags,
|
||||
) !ResolvedSource {
|
||||
) TranspileSourceCodeError!ResolvedSource {
|
||||
const disable_transpilying = comptime flags.disableTranspiling();
|
||||
|
||||
if (comptime disable_transpilying) {
|
||||
@@ -1111,7 +1118,10 @@ pub fn transpileSourceCode(
|
||||
.allocator = null,
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.jsvalue_for_export = parse_result.ast.parts.at(0).stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}),
|
||||
.jsvalue_for_export = parse_result.ast.parts.at(0).stmts[0].data.s_expr.value.toJS(allocator, globalObject) catch |err| switch (err) {
|
||||
error.JSError, error.OutOfMemory => |e| return e,
|
||||
else => panic("Unexpected JS error: {s}", .{@errorName(err)}),
|
||||
},
|
||||
.tag = .exports_object,
|
||||
};
|
||||
}
|
||||
@@ -1217,7 +1227,7 @@ pub fn transpileSourceCode(
|
||||
}
|
||||
|
||||
jsc_vm.modules.enqueue(
|
||||
globalObject.?,
|
||||
globalObject,
|
||||
.{
|
||||
.parse_result = parse_result,
|
||||
.path = path,
|
||||
@@ -1348,18 +1358,16 @@ pub fn transpileSourceCode(
|
||||
.wasm => {
|
||||
if (strings.eqlComptime(referrer, "undefined") and strings.eqlLong(jsc_vm.main, path.text, true)) {
|
||||
if (virtual_source) |source| {
|
||||
if (globalObject) |globalThis| {
|
||||
// attempt to avoid reading the WASM file twice.
|
||||
const encoded = jsc.EncodedJSValue{
|
||||
.asPtr = globalThis,
|
||||
};
|
||||
const globalValue = @as(JSValue, @enumFromInt(encoded.asInt64));
|
||||
globalValue.put(
|
||||
globalThis,
|
||||
ZigString.static("wasmSourceBytes"),
|
||||
try jsc.ArrayBuffer.create(globalThis, source.contents, .Uint8Array),
|
||||
);
|
||||
}
|
||||
// attempt to avoid reading the WASM file twice.
|
||||
const encoded = jsc.EncodedJSValue{
|
||||
.asPtr = globalObject,
|
||||
};
|
||||
const globalValue = @as(JSValue, @enumFromInt(encoded.asInt64));
|
||||
globalValue.put(
|
||||
globalObject,
|
||||
ZigString.static("wasmSourceBytes"),
|
||||
try jsc.ArrayBuffer.create(globalObject, source.contents, .Uint8Array),
|
||||
);
|
||||
}
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
@@ -1440,14 +1448,10 @@ pub fn transpileSourceCode(
|
||||
};
|
||||
}
|
||||
|
||||
if (globalObject == null) {
|
||||
return error.NotSupported;
|
||||
}
|
||||
|
||||
const html_bundle = try jsc.API.HTMLBundle.init(globalObject.?, path.text);
|
||||
const html_bundle = try jsc.API.HTMLBundle.init(globalObject, path.text);
|
||||
return ResolvedSource{
|
||||
.allocator = &jsc_vm.allocator,
|
||||
.jsvalue_for_export = html_bundle.toJS(globalObject.?),
|
||||
.jsvalue_for_export = html_bundle.toJS(globalObject),
|
||||
.specifier = input_specifier,
|
||||
.source_url = input_specifier.createIfDifferent(path.text),
|
||||
.tag = .export_default_object,
|
||||
@@ -1521,10 +1525,10 @@ pub fn transpileSourceCode(
|
||||
defer buf.deinit();
|
||||
var writer = buf.writer();
|
||||
jsc.API.Bun.getPublicPath(specifier, jsc_vm.origin, @TypeOf(&writer), &writer);
|
||||
break :brk try bun.String.createUTF8ForJS(globalObject.?, buf.slice());
|
||||
break :brk try bun.String.createUTF8ForJS(globalObject, buf.slice());
|
||||
}
|
||||
|
||||
break :brk try bun.String.createUTF8ForJS(globalObject.?, path.text);
|
||||
break :brk try bun.String.createUTF8ForJS(globalObject, path.text);
|
||||
};
|
||||
|
||||
return ResolvedSource{
|
||||
@@ -1562,26 +1566,14 @@ pub export fn Bun__resolveAndFetchBuiltinModule(
|
||||
|
||||
pub export fn Bun__fetchBuiltinModule(
|
||||
jsc_vm: *VirtualMachine,
|
||||
globalObject: *JSGlobalObject,
|
||||
specifier: *bun.String,
|
||||
referrer: *bun.String,
|
||||
ret: *jsc.ErrorableResolvedSource,
|
||||
) bool {
|
||||
jsc.markBinding(@src());
|
||||
var log = logger.Log.init(jsc_vm.transpiler.allocator);
|
||||
defer log.deinit();
|
||||
|
||||
if (ModuleLoader.fetchBuiltinModule(
|
||||
jsc_vm,
|
||||
specifier.*,
|
||||
) catch |err| {
|
||||
if (err == error.AsyncModule) {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
VirtualMachine.processFetchLog(globalObject, specifier.*, referrer.*, &log, ret, err);
|
||||
return true;
|
||||
}) |builtin| {
|
||||
if (ModuleLoader.fetchBuiltinModule(jsc_vm, specifier.*)) |builtin| {
|
||||
ret.* = jsc.ErrorableResolvedSource.ok(builtin);
|
||||
return true;
|
||||
} else {
|
||||
@@ -1816,9 +1808,12 @@ pub export fn Bun__transpileFile(
|
||||
bun.assert(promise != null);
|
||||
return promise;
|
||||
},
|
||||
error.PluginError => return null,
|
||||
error.JSError => {
|
||||
ret.* = jsc.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError));
|
||||
error.JSError, error.OutOfMemory => |e| {
|
||||
ret.* = jsc.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(e));
|
||||
return null;
|
||||
},
|
||||
error.StackOverflow => {
|
||||
ret.* = .err(error.JSError, globalObject.takeError(globalObject.throwStackOverflow()));
|
||||
return null;
|
||||
},
|
||||
else => {
|
||||
@@ -1882,7 +1877,7 @@ fn getHardcodedModule(jsc_vm: *VirtualMachine, specifier: bun.String, hardcoded:
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: bun.String) !?ResolvedSource {
|
||||
pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: bun.String) ?ResolvedSource {
|
||||
if (HardcodedModule.map.getWithEql(specifier, bun.String.eqlComptime)) |hardcoded| {
|
||||
return getHardcodedModule(jsc_vm, specifier, hardcoded);
|
||||
}
|
||||
@@ -1993,9 +1988,12 @@ export fn Bun__transpileVirtualModule(
|
||||
FetchFlags.transpile,
|
||||
) catch |err| {
|
||||
switch (err) {
|
||||
error.PluginError => return true,
|
||||
error.JSError => {
|
||||
ret.* = jsc.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError));
|
||||
error.JSError, error.OutOfMemory => |e| {
|
||||
ret.* = .err(error.JSError, globalObject.takeError(e));
|
||||
return true;
|
||||
},
|
||||
error.StackOverflow => {
|
||||
ret.* = .err(error.JSError, globalObject.takeError(globalObject.throwStackOverflow()));
|
||||
return true;
|
||||
},
|
||||
else => {
|
||||
|
||||
@@ -136,7 +136,7 @@ pub fn removeZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *a
|
||||
|
||||
pub const HashTable = std.HashMap(u64, *anyopaque, bun.IdentityContext(u64), 80);
|
||||
|
||||
pub fn onSourceMapChunk(this: *SavedSourceMap, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void {
|
||||
pub fn onSourceMapChunk(this: *SavedSourceMap, chunk: SourceMap.Chunk, source: *const logger.Source) bun.OOM!void {
|
||||
try this.putMappings(source, chunk.buffer);
|
||||
}
|
||||
|
||||
@@ -165,11 +165,11 @@ pub fn deinit(this: *SavedSourceMap) void {
|
||||
this.map.deinit();
|
||||
}
|
||||
|
||||
pub fn putMappings(this: *SavedSourceMap, source: *const logger.Source, mappings: MutableString) !void {
|
||||
pub fn putMappings(this: *SavedSourceMap, source: *const logger.Source, mappings: MutableString) bun.OOM!void {
|
||||
try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, try bun.default_allocator.dupe(u8, mappings.list.items))));
|
||||
}
|
||||
|
||||
pub fn putValue(this: *SavedSourceMap, path: []const u8, value: Value) !void {
|
||||
pub fn putValue(this: *SavedSourceMap, path: []const u8, value: Value) bun.OOM!void {
|
||||
this.lock();
|
||||
defer this.unlock();
|
||||
|
||||
|
||||
@@ -367,7 +367,7 @@ const SourceMapHandlerGetter = struct {
|
||||
/// When the inspector is enabled, we want to generate an inline sourcemap.
|
||||
/// And, for now, we also store it in source_mappings like normal
|
||||
/// This is hideously expensive memory-wise...
|
||||
pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void {
|
||||
pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: *const logger.Source) bun.OOM!void {
|
||||
var temp_json_buffer = bun.MutableString.initEmpty(bun.default_allocator);
|
||||
defer temp_json_buffer.deinit();
|
||||
try chunk.printSourceMapContentsAtOffset(source, &temp_json_buffer, true, SavedSourceMap.vlq_offset, true);
|
||||
@@ -1491,6 +1491,8 @@ pub fn refCountedString(this: *VirtualMachine, input_: []const u8, hash_: ?u32,
|
||||
return this.refCountedStringWithWasNew(&_was_new, input_, hash_, comptime dupe);
|
||||
}
|
||||
|
||||
const FetchWithoutOnLoadPluginsError = ModuleLoader.TranspileSourceCodeError || error{ModuleNotFound};
|
||||
|
||||
pub fn fetchWithoutOnLoadPlugins(
|
||||
jsc_vm: *VirtualMachine,
|
||||
globalObject: *JSGlobalObject,
|
||||
@@ -1498,10 +1500,10 @@ pub fn fetchWithoutOnLoadPlugins(
|
||||
referrer: String,
|
||||
log: *logger.Log,
|
||||
comptime flags: FetchFlags,
|
||||
) anyerror!ResolvedSource {
|
||||
) FetchWithoutOnLoadPluginsError!ResolvedSource {
|
||||
bun.assert(VirtualMachine.isLoaded());
|
||||
|
||||
if (try ModuleLoader.fetchBuiltinModule(jsc_vm, _specifier)) |builtin| {
|
||||
if (ModuleLoader.fetchBuiltinModule(jsc_vm, _specifier)) |builtin| {
|
||||
return builtin;
|
||||
}
|
||||
|
||||
@@ -1686,7 +1688,7 @@ pub fn resolve(
|
||||
source: bun.String,
|
||||
query_string: ?*ZigString,
|
||||
is_esm: bool,
|
||||
) !void {
|
||||
) bun.JSError!void {
|
||||
try resolveMaybeNeedsTrailingSlash(res, global, specifier, source, query_string, is_esm, true, false);
|
||||
}
|
||||
|
||||
|
||||
@@ -45,8 +45,9 @@ pub fn parse(
|
||||
.{
|
||||
.mangled_props = null,
|
||||
},
|
||||
) catch {
|
||||
return globalThis.throwValue(try log.toJS(globalThis, default_allocator, "Failed to print toml"));
|
||||
) catch |err| return switch (err) {
|
||||
error.OutOfMemory => |oom| oom,
|
||||
error.StackOverflow => globalThis.throwStackOverflow(),
|
||||
};
|
||||
|
||||
const slice = writer.ctx.buffer.slice();
|
||||
|
||||
@@ -1812,7 +1812,10 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
|
||||
bun.Global.BunInfo.generate(*Transpiler, &jsc.VirtualMachine.get().transpiler, allocator) catch unreachable,
|
||||
source,
|
||||
.{ .mangled_props = null },
|
||||
) catch unreachable;
|
||||
) catch |err| switch (err) {
|
||||
error.OutOfMemory => unreachable, // TODO
|
||||
error.StackOverflow => unreachable, // TODO
|
||||
};
|
||||
|
||||
resp.writeStatus("200 OK");
|
||||
resp.writeHeader("Content-Type", MimeType.json.value);
|
||||
|
||||
@@ -526,8 +526,7 @@ JSValue fetchBuiltinModuleWithoutResolution(
|
||||
void* bunVM = globalObject->bunVM();
|
||||
auto& vm = JSC::getVM(globalObject);
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
BunString referrer = BunStringEmpty;
|
||||
if (Bun__fetchBuiltinModule(bunVM, globalObject, specifier, &referrer, res)) {
|
||||
if (Bun__fetchBuiltinModule(bunVM, specifier, res)) {
|
||||
if (!res->success) {
|
||||
return {};
|
||||
}
|
||||
@@ -943,7 +942,7 @@ static JSValue fetchESMSourceCode(
|
||||
}
|
||||
}
|
||||
|
||||
if (Bun__fetchBuiltinModule(bunVM, globalObject, specifier, referrer, res)) {
|
||||
if (Bun__fetchBuiltinModule(bunVM, specifier, res)) {
|
||||
if (!res->success) {
|
||||
throwException(scope, res->result.err, globalObject);
|
||||
auto* exception = scope.exception();
|
||||
|
||||
@@ -527,7 +527,7 @@ pub const ZigString = extern struct {
|
||||
return str;
|
||||
}
|
||||
|
||||
pub fn toBase64DataURL(this: ZigString, allocator: std.mem.Allocator) ![]const u8 {
|
||||
pub fn toBase64DataURL(this: ZigString, allocator: std.mem.Allocator) OOM![]const u8 {
|
||||
const slice_ = this.slice();
|
||||
const size = std.base64.standard.Encoder.calcSize(slice_.len);
|
||||
var buf = try allocator.alloc(u8, size + "data:;base64,".len);
|
||||
|
||||
@@ -363,9 +363,7 @@ extern "C" JSC::JSInternalPromise* Bun__transpileFile(
|
||||
|
||||
extern "C" bool Bun__fetchBuiltinModule(
|
||||
void* bunVM,
|
||||
JSC::JSGlobalObject* global,
|
||||
const BunString* specifier,
|
||||
const BunString* referrer,
|
||||
ErrorableResolvedSource* result);
|
||||
extern "C" bool Bun__resolveAndFetchBuiltinModule(
|
||||
void* bunVM,
|
||||
|
||||
@@ -779,7 +779,7 @@ pub const Expect = struct {
|
||||
try runner.snapshots.addInlineSnapshotToWrite(file_id, .{
|
||||
.line = srcloc.line,
|
||||
.col = srcloc.column,
|
||||
.value = pretty_value.toOwnedSlice(),
|
||||
.value = try pretty_value.toOwnedSlice(),
|
||||
.has_matchers = property_matchers != null,
|
||||
.is_added = result == null,
|
||||
.kind = fn_name,
|
||||
|
||||
@@ -101,7 +101,7 @@ pub const LinkerContext = struct {
|
||||
|
||||
const worker = ThreadPool.Worker.get(@fieldParentPtr("linker", task.ctx));
|
||||
defer worker.unget();
|
||||
SourceMapData.computeLineOffsets(task.ctx, worker.allocator, task.source_index);
|
||||
bun.handleOom(SourceMapData.computeLineOffsets(task.ctx, worker.allocator, task.source_index));
|
||||
}
|
||||
|
||||
pub fn runQuotedSourceContents(thread_task: *ThreadPoolLib.Task) void {
|
||||
@@ -123,11 +123,11 @@ pub const LinkerContext = struct {
|
||||
else
|
||||
worker.allocator;
|
||||
|
||||
SourceMapData.computeQuotedSourceContents(task.ctx, alloc, task.source_index);
|
||||
bun.handleOom(SourceMapData.computeQuotedSourceContents(task.ctx, alloc, task.source_index));
|
||||
}
|
||||
};
|
||||
|
||||
pub fn computeLineOffsets(this: *LinkerContext, alloc: std.mem.Allocator, source_index: Index.Int) void {
|
||||
pub fn computeLineOffsets(this: *LinkerContext, alloc: std.mem.Allocator, source_index: Index.Int) bun.OOM!void {
|
||||
debug("Computing LineOffsetTable: {d}", .{source_index});
|
||||
const line_offset_table: *bun.sourcemap.LineOffsetTable.List = &this.graph.files.items(.line_offset_table)[source_index];
|
||||
|
||||
@@ -142,7 +142,7 @@ pub const LinkerContext = struct {
|
||||
|
||||
const approximate_line_count = this.graph.ast.items(.approximate_newline_count)[source_index];
|
||||
|
||||
line_offset_table.* = bun.sourcemap.LineOffsetTable.generate(
|
||||
line_offset_table.* = try bun.sourcemap.LineOffsetTable.generate(
|
||||
alloc,
|
||||
source.contents,
|
||||
|
||||
@@ -151,7 +151,7 @@ pub const LinkerContext = struct {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn computeQuotedSourceContents(this: *LinkerContext, _: std.mem.Allocator, source_index: Index.Int) void {
|
||||
pub fn computeQuotedSourceContents(this: *LinkerContext, _: std.mem.Allocator, source_index: Index.Int) bun.OOM!void {
|
||||
debug("Computing Quoted Source Contents: {d}", .{source_index});
|
||||
const quoted_source_contents = &this.graph.files.items(.quoted_source_contents)[source_index];
|
||||
quoted_source_contents.reset();
|
||||
@@ -164,7 +164,7 @@ pub const LinkerContext = struct {
|
||||
const source: *const Logger.Source = &this.parse_graph.input_files.items(.source)[source_index];
|
||||
var mutable = MutableString.initEmpty(bun.default_allocator);
|
||||
bun.handleOom(js_printer.quoteForJSON(source.contents, &mutable, false));
|
||||
var mutableOwned = mutable.toDefaultOwned();
|
||||
var mutableOwned = try mutable.toDefaultOwned();
|
||||
quoted_source_contents.* = mutableOwned.toOptional();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1529,10 +1529,6 @@ fn updatePackageJsonAfterMigration(allocator: Allocator, manager: *PackageManage
|
||||
},
|
||||
) catch return;
|
||||
|
||||
package_json_writer.flush() catch {
|
||||
return error.OutOfMemory;
|
||||
};
|
||||
|
||||
root_pkg_json.source.contents = try allocator.dupe(u8, package_json_writer.ctx.writtenWithoutTrailingZero());
|
||||
|
||||
// Write the updated package.json
|
||||
|
||||
@@ -962,49 +962,6 @@ pub fn parseTSConfig(source: *const logger.Source, log: *logger.Log, allocator:
|
||||
return parser.parseExpr(false, force_utf8);
|
||||
}
|
||||
|
||||
fn expectPrintedJSON(_contents: string, expected: string) !void {
|
||||
Expr.Data.Store.create(default_allocator);
|
||||
Stmt.Data.Store.create(default_allocator);
|
||||
defer {
|
||||
Expr.Data.Store.reset();
|
||||
Stmt.Data.Store.reset();
|
||||
}
|
||||
var contents = default_allocator.alloc(u8, _contents.len + 1) catch unreachable;
|
||||
bun.copy(u8, contents, _contents);
|
||||
contents[contents.len - 1] = ';';
|
||||
var log = logger.Log.init(default_allocator);
|
||||
defer log.msgs.deinit();
|
||||
|
||||
const source = &logger.Source.initPathString(
|
||||
"source.json",
|
||||
contents,
|
||||
);
|
||||
const expr = try parse(source, &log, default_allocator);
|
||||
|
||||
if (log.msgs.items.len > 0) {
|
||||
Output.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text });
|
||||
}
|
||||
|
||||
const buffer_writer = js_printer.BufferWriter.init(default_allocator);
|
||||
var writer = js_printer.BufferPrinter.init(buffer_writer);
|
||||
const written = try js_printer.printJSON(@TypeOf(&writer), &writer, expr, source, .{
|
||||
.mangled_props = null,
|
||||
});
|
||||
var js = writer.ctx.buffer.list.items.ptr[0 .. written + 1];
|
||||
|
||||
if (js.len > 1) {
|
||||
while (js[js.len - 1] == '\n') {
|
||||
js = js[0 .. js.len - 1];
|
||||
}
|
||||
|
||||
if (js[js.len - 1] == ';') {
|
||||
js = js[0 .. js.len - 1];
|
||||
}
|
||||
}
|
||||
|
||||
try std.testing.expectEqualStrings(expected, js);
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const std = @import("std");
|
||||
@@ -1013,7 +970,6 @@ const expect = std.testing.expect;
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const MutableString = bun.MutableString;
|
||||
const Output = bun.Output;
|
||||
const assert = bun.assert;
|
||||
const default_allocator = bun.default_allocator;
|
||||
const js_printer = bun.js_printer;
|
||||
|
||||
@@ -2648,7 +2648,7 @@ fn NewLexer_(
|
||||
lexer.rescan_close_brace_as_template_token = false;
|
||||
}
|
||||
|
||||
pub fn rawTemplateContents(noalias lexer: *LexerType) string {
|
||||
pub fn rawTemplateContents(noalias lexer: *LexerType) bun.OOM!string {
|
||||
lexer.assertNotJSON();
|
||||
|
||||
var text: string = undefined;
|
||||
@@ -2675,7 +2675,7 @@ fn NewLexer_(
|
||||
// them. <CR><LF> and <CR> LineTerminatorSequences are normalized to
|
||||
// <LF> for both TV and TRV. An explicit EscapeSequence is needed to
|
||||
// include a <CR> or <CR><LF> sequence.
|
||||
var bytes = bun.handleOom(MutableString.initCopy(lexer.allocator, text));
|
||||
var bytes = try MutableString.initCopy(lexer.allocator, text);
|
||||
var end: usize = 0;
|
||||
var i: usize = 0;
|
||||
var c: u8 = '0';
|
||||
|
||||
3159
src/js_printer.zig
3159
src/js_printer.zig
File diff suppressed because it is too large
Load Diff
@@ -234,7 +234,7 @@ pub const Linker = struct {
|
||||
import_record: *ImportRecord,
|
||||
result: *_transpiler.ParseResult,
|
||||
comptime is_bun: bool,
|
||||
) !bool {
|
||||
) bun.OOM!bool {
|
||||
if (import_record.handles_import_errors) {
|
||||
import_record.path.is_disabled = true;
|
||||
return false;
|
||||
|
||||
@@ -132,7 +132,7 @@ pub const MinifyRenamer = struct {
|
||||
symbols: js_ast.Symbol.Map,
|
||||
first_top_level_slots: js_ast.SlotCounts,
|
||||
reserved_names: bun.StringHashMapUnmanaged(u32),
|
||||
) !*MinifyRenamer {
|
||||
) bun.OOM!*MinifyRenamer {
|
||||
const renamer = try allocator.create(MinifyRenamer);
|
||||
var slots = SymbolSlot.List.initUndefined();
|
||||
|
||||
@@ -209,7 +209,7 @@ pub const MinifyRenamer = struct {
|
||||
_ref: Ref,
|
||||
count: u32,
|
||||
stable_source_indices: []const u32,
|
||||
) !void {
|
||||
) bun.OOM!void {
|
||||
var ref = this.symbols.follow(_ref);
|
||||
var symbol = this.symbols.get(ref).?;
|
||||
|
||||
@@ -239,7 +239,7 @@ pub const MinifyRenamer = struct {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn allocateTopLevelSymbolSlots(this: *MinifyRenamer, top_level_symbols: StableSymbolCount.Array) !void {
|
||||
pub fn allocateTopLevelSymbolSlots(this: *MinifyRenamer, top_level_symbols: StableSymbolCount.Array) bun.OOM!void {
|
||||
for (top_level_symbols.items) |stable| {
|
||||
const symbol = this.symbols.get(stable.ref).?;
|
||||
var slots = this.slots.getPtr(symbol.slotNamespace());
|
||||
@@ -261,7 +261,7 @@ pub const MinifyRenamer = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assignNamesByFrequency(this: *MinifyRenamer, name_minifier: *js_ast.NameMinifier) !void {
|
||||
pub fn assignNamesByFrequency(this: *MinifyRenamer, name_minifier: *js_ast.NameMinifier) bun.OOM!void {
|
||||
var name_buf = try std.ArrayList(u8).initCapacity(this.allocator, 64);
|
||||
defer name_buf.deinit();
|
||||
|
||||
@@ -859,7 +859,7 @@ pub const ExportRenamer = struct {
|
||||
pub fn computeInitialReservedNames(
|
||||
allocator: std.mem.Allocator,
|
||||
output_format: bun.options.Format,
|
||||
) !bun.StringHashMapUnmanaged(u32) {
|
||||
) bun.OOM!bun.StringHashMapUnmanaged(u32) {
|
||||
if (comptime bun.Environment.isWasm) {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
@@ -387,7 +387,7 @@ pub const ByteRangeMapping = struct {
|
||||
var source_contents = source_contents_str.toUTF8(bun.default_allocator);
|
||||
defer source_contents.deinit();
|
||||
|
||||
entry.value_ptr.* = compute(source_contents.slice(), source_id, slice);
|
||||
entry.value_ptr.* = bun.handleOom(compute(source_contents.slice(), source_id, slice));
|
||||
}
|
||||
|
||||
pub fn getSourceID(this: *ByteRangeMapping) callconv(.C) i32 {
|
||||
@@ -689,9 +689,9 @@ pub const ByteRangeMapping = struct {
|
||||
return bun.String.createUTF8ForJS(globalThis, mutable_str.slice()) catch return .zero;
|
||||
}
|
||||
|
||||
pub fn compute(source_contents: []const u8, source_id: i32, source_url: bun.jsc.ZigString.Slice) ByteRangeMapping {
|
||||
pub fn compute(source_contents: []const u8, source_id: i32, source_url: bun.jsc.ZigString.Slice) bun.OOM!ByteRangeMapping {
|
||||
return ByteRangeMapping{
|
||||
.line_offset_table = LineOffsetTable.generate(bun.jsc.VirtualMachine.get().allocator, source_contents, 0),
|
||||
.line_offset_table = try LineOffsetTable.generate(bun.jsc.VirtualMachine.get().allocator, source_contents, 0),
|
||||
.source_id = source_id,
|
||||
.source_url = source_url,
|
||||
};
|
||||
|
||||
@@ -72,10 +72,10 @@ pub fn findIndex(byte_offsets_to_start_of_line: []const u32, loc: Logger.Loc) ?u
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_line_count: i32) List {
|
||||
pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_line_count: i32) bun.OOM!List {
|
||||
var list = List{};
|
||||
// Preallocate the top-level table using the approximate line count from the lexer
|
||||
list.ensureUnusedCapacity(allocator, @as(usize, @intCast(@max(approximate_line_count, 1)))) catch unreachable;
|
||||
try list.ensureUnusedCapacity(allocator, @as(usize, @intCast(@max(approximate_line_count, 1))));
|
||||
var column: i32 = 0;
|
||||
var byte_offset_to_first_non_ascii: u32 = 0;
|
||||
var column_byte_offset: u32 = 0;
|
||||
@@ -85,7 +85,7 @@ pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_
|
||||
// we want to avoid re-allocating this array _most_ of the time
|
||||
// when lines _do_ have unicode characters, they probably still won't be longer than 255 much
|
||||
var stack_fallback = std.heap.stackFallback(@sizeOf(i32) * 256, allocator);
|
||||
var columns_for_non_ascii = std.ArrayList(i32).initCapacity(stack_fallback.get(), 120) catch unreachable;
|
||||
var columns_for_non_ascii = try std.ArrayList(i32).initCapacity(stack_fallback.get(), 120);
|
||||
const reset_end_index = stack_fallback.fixed_buffer_allocator.end_index;
|
||||
const initial_columns_for_non_ascii = columns_for_non_ascii;
|
||||
|
||||
@@ -128,7 +128,7 @@ pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_
|
||||
u32,
|
||||
@truncate(@intFromPtr(remaining.ptr) - @intFromPtr(contents.ptr)),
|
||||
))) - line_byte_offset;
|
||||
columns_for_non_ascii.ensureUnusedCapacity((line_bytes_so_far - column_byte_offset) + 1) catch unreachable;
|
||||
try columns_for_non_ascii.ensureUnusedCapacity((line_bytes_so_far - column_byte_offset) + 1);
|
||||
while (column_byte_offset <= line_bytes_so_far) : (column_byte_offset += 1) {
|
||||
columns_for_non_ascii.appendAssumeCapacity(column);
|
||||
}
|
||||
@@ -165,14 +165,14 @@ pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_
|
||||
// hideously expensive
|
||||
var owned = columns_for_non_ascii.items;
|
||||
if (stack_fallback.fixed_buffer_allocator.ownsSlice(std.mem.sliceAsBytes(owned))) {
|
||||
owned = allocator.dupe(i32, owned) catch unreachable;
|
||||
owned = try allocator.dupe(i32, owned);
|
||||
}
|
||||
|
||||
list.append(allocator, .{
|
||||
try list.append(allocator, .{
|
||||
.byte_offset_to_start_of_line = line_byte_offset,
|
||||
.byte_offset_to_first_non_ascii = byte_offset_to_first_non_ascii,
|
||||
.columns_for_non_ascii = BabyList(i32).fromOwnedSlice(owned),
|
||||
}) catch unreachable;
|
||||
});
|
||||
|
||||
column = 0;
|
||||
byte_offset_to_first_non_ascii = 0;
|
||||
@@ -200,21 +200,21 @@ pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_
|
||||
|
||||
if (columns_for_non_ascii.items.len > 0) {
|
||||
const line_bytes_so_far = @as(u32, @intCast(contents.len)) - line_byte_offset;
|
||||
columns_for_non_ascii.ensureUnusedCapacity((line_bytes_so_far - column_byte_offset) + 1) catch unreachable;
|
||||
try columns_for_non_ascii.ensureUnusedCapacity((line_bytes_so_far - column_byte_offset) + 1);
|
||||
while (column_byte_offset <= line_bytes_so_far) : (column_byte_offset += 1) {
|
||||
columns_for_non_ascii.appendAssumeCapacity(column);
|
||||
}
|
||||
}
|
||||
{
|
||||
var owned = columns_for_non_ascii.toOwnedSlice() catch unreachable;
|
||||
var owned = try columns_for_non_ascii.toOwnedSlice();
|
||||
if (stack_fallback.fixed_buffer_allocator.ownsSlice(std.mem.sliceAsBytes(owned))) {
|
||||
owned = allocator.dupe(i32, owned) catch unreachable;
|
||||
owned = try allocator.dupe(i32, owned);
|
||||
}
|
||||
list.append(allocator, .{
|
||||
try list.append(allocator, .{
|
||||
.byte_offset_to_start_of_line = line_byte_offset,
|
||||
.byte_offset_to_first_non_ascii = byte_offset_to_first_non_ascii,
|
||||
.columns_for_non_ascii = BabyList(i32).fromOwnedSlice(owned),
|
||||
}) catch unreachable;
|
||||
});
|
||||
}
|
||||
|
||||
if (list.capacity > list.len) {
|
||||
|
||||
@@ -1483,7 +1483,7 @@ pub fn appendSourceMapChunk(
|
||||
prev_end_state_: SourceMapState,
|
||||
start_state_: SourceMapState,
|
||||
source_map_: []const u8,
|
||||
) !void {
|
||||
) bun.OOM!void {
|
||||
var prev_end_state = prev_end_state_;
|
||||
var start_state = start_state_;
|
||||
// Handle line breaks in between this mapping and the previous one
|
||||
@@ -1527,7 +1527,7 @@ pub fn appendSourceMapChunk(
|
||||
start_state.original_column += original_column.value;
|
||||
|
||||
var str = MutableString.initEmpty(allocator);
|
||||
appendMappingToBuffer(&str, j.lastByte(), prev_end_state, start_state);
|
||||
try appendMappingToBuffer(&str, j.lastByte(), prev_end_state, start_state);
|
||||
j.push(str.slice(), allocator);
|
||||
|
||||
// Then append everything after that without modification.
|
||||
@@ -1553,7 +1553,7 @@ pub fn appendSourceMappingURLRemote(
|
||||
}
|
||||
|
||||
/// This function is extremely hot.
|
||||
pub fn appendMappingToBuffer(buffer: *MutableString, last_byte: u8, prev_state: SourceMapState, current_state: SourceMapState) void {
|
||||
pub fn appendMappingToBuffer(buffer: *MutableString, last_byte: u8, prev_state: SourceMapState, current_state: SourceMapState) bun.OOM!void {
|
||||
const needs_comma = last_byte != 0 and last_byte != ';' and last_byte != '"';
|
||||
|
||||
const vlqs = [_]VLQ{
|
||||
@@ -1574,7 +1574,7 @@ pub fn appendMappingToBuffer(buffer: *MutableString, last_byte: u8, prev_state:
|
||||
@as(usize, vlqs[3].len);
|
||||
|
||||
// Instead of updating .len 5 times, we only need to update it once.
|
||||
var writable = buffer.writableNBytes(total_len + @as(usize, @intFromBool(needs_comma))) catch unreachable;
|
||||
var writable = try buffer.writableNBytes(total_len + @as(usize, @intFromBool(needs_comma)));
|
||||
|
||||
// Put commas in between mappings
|
||||
if (needs_comma) {
|
||||
@@ -1625,7 +1625,7 @@ pub const Chunk = struct {
|
||||
mutable: *MutableString,
|
||||
include_sources_contents: bool,
|
||||
comptime ascii_only: bool,
|
||||
) !void {
|
||||
) bun.OOM!void {
|
||||
try printSourceMapContentsAtOffset(
|
||||
chunk,
|
||||
source,
|
||||
@@ -1643,7 +1643,7 @@ pub const Chunk = struct {
|
||||
include_sources_contents: bool,
|
||||
offset: usize,
|
||||
comptime ascii_only: bool,
|
||||
) !void {
|
||||
) bun.OOM!void {
|
||||
// attempt to pre-allocate
|
||||
|
||||
var filename_buf: bun.PathBuffer = undefined;
|
||||
@@ -1656,9 +1656,9 @@ pub const Chunk = struct {
|
||||
filename = filename_buf[0 .. filename.len + 1];
|
||||
}
|
||||
|
||||
mutable.growIfNeeded(
|
||||
try mutable.growIfNeeded(
|
||||
filename.len + 2 + (source.contents.len * @as(usize, @intFromBool(include_sources_contents))) + (chunk.buffer.list.items.len - offset) + 32 + 39 + 29 + 22 + 20,
|
||||
) catch unreachable;
|
||||
);
|
||||
try mutable.append("{\n \"version\":3,\n \"sources\": [");
|
||||
|
||||
try JSPrinter.quoteForJSON(filename, mutable, ascii_only);
|
||||
@@ -1683,11 +1683,11 @@ pub const Chunk = struct {
|
||||
return .{ .ctx = Type.init(allocator, prepend_count) };
|
||||
}
|
||||
|
||||
pub inline fn appendLineSeparator(this: *Format) anyerror!void {
|
||||
pub inline fn appendLineSeparator(this: *Format) bun.OOM!void {
|
||||
try this.ctx.appendLineSeparator();
|
||||
}
|
||||
|
||||
pub inline fn append(this: *Format, current_state: SourceMapState, prev_state: SourceMapState) anyerror!void {
|
||||
pub inline fn append(this: *Format, current_state: SourceMapState, prev_state: SourceMapState) bun.OOM!void {
|
||||
try this.ctx.append(current_state, prev_state);
|
||||
}
|
||||
|
||||
@@ -1729,17 +1729,17 @@ pub const Chunk = struct {
|
||||
return map;
|
||||
}
|
||||
|
||||
pub fn appendLineSeparator(this: *VLQSourceMap) anyerror!void {
|
||||
pub fn appendLineSeparator(this: *VLQSourceMap) bun.OOM!void {
|
||||
try this.data.appendChar(';');
|
||||
}
|
||||
|
||||
pub fn append(this: *VLQSourceMap, current_state: SourceMapState, prev_state: SourceMapState) anyerror!void {
|
||||
pub fn append(this: *VLQSourceMap, current_state: SourceMapState, prev_state: SourceMapState) bun.OOM!void {
|
||||
const last_byte: u8 = if (this.data.list.items.len > this.offset)
|
||||
this.data.list.items[this.data.list.items.len - 1]
|
||||
else
|
||||
0;
|
||||
|
||||
appendMappingToBuffer(&this.data, last_byte, prev_state, current_state);
|
||||
try appendMappingToBuffer(&this.data, last_byte, prev_state, current_state);
|
||||
this.count += 1;
|
||||
}
|
||||
|
||||
@@ -1792,8 +1792,8 @@ pub const Chunk = struct {
|
||||
|
||||
pub const SourceMapper = SourceMapFormat(SourceMapFormatType);
|
||||
|
||||
pub noinline fn generateChunk(b: *ThisBuilder, output: []const u8) Chunk {
|
||||
b.updateGeneratedLineAndColumn(output);
|
||||
pub noinline fn generateChunk(b: *ThisBuilder, output: []const u8) bun.OOM!Chunk {
|
||||
try b.updateGeneratedLineAndColumn(output);
|
||||
var buffer = b.source_map.getBuffer();
|
||||
if (b.prepend_count) {
|
||||
buffer.list.items[0..8].* = @as([8]u8, @bitCast(buffer.list.items.len));
|
||||
@@ -1811,7 +1811,7 @@ pub const Chunk = struct {
|
||||
|
||||
// Scan over the printed text since the last source mapping and update the
|
||||
// generated line and column numbers
|
||||
pub fn updateGeneratedLineAndColumn(b: *ThisBuilder, output: []const u8) void {
|
||||
pub fn updateGeneratedLineAndColumn(b: *ThisBuilder, output: []const u8) bun.OOM!void {
|
||||
const slice = output[b.last_generated_update..];
|
||||
var needs_mapping = b.cover_lines_without_mappings and !b.line_starts_with_mapping and b.has_prev_state;
|
||||
|
||||
@@ -1859,7 +1859,7 @@ pub const Chunk = struct {
|
||||
b.prev_state.generated_line += 1;
|
||||
b.prev_state.generated_column = 0;
|
||||
b.generated_column = 0;
|
||||
b.source_map.appendLineSeparator() catch unreachable;
|
||||
try b.source_map.appendLineSeparator();
|
||||
|
||||
// This new line doesn't have a mapping yet
|
||||
b.line_starts_with_mapping = false;
|
||||
@@ -1887,7 +1887,7 @@ pub const Chunk = struct {
|
||||
b.has_prev_state = true;
|
||||
}
|
||||
|
||||
pub fn addSourceMapping(b: *ThisBuilder, loc: Logger.Loc, output: []const u8) void {
|
||||
pub fn addSourceMapping(b: *ThisBuilder, loc: Logger.Loc, output: []const u8) bun.OOM!void {
|
||||
if (
|
||||
// don't insert mappings for same location twice
|
||||
b.prev_loc.eql(loc) or
|
||||
@@ -1917,7 +1917,7 @@ pub const Chunk = struct {
|
||||
original_column = line.columns_for_non_ascii.slice()[@as(u32, @intCast(original_column)) - line.byte_offset_to_first_non_ascii];
|
||||
}
|
||||
|
||||
b.updateGeneratedLineAndColumn(output);
|
||||
try b.updateGeneratedLineAndColumn(output);
|
||||
|
||||
// If this line doesn't start with a mapping and we're about to add a mapping
|
||||
// that's not at the start, insert a mapping first so the line starts with one.
|
||||
|
||||
@@ -240,8 +240,8 @@ pub inline fn lenI(self: *MutableString) i32 {
|
||||
return @as(i32, @intCast(self.list.items.len));
|
||||
}
|
||||
|
||||
pub fn toOwnedSlice(self: *MutableString) []u8 {
|
||||
return bun.handleOom(self.list.toOwnedSlice(self.allocator)); // TODO
|
||||
pub fn toOwnedSlice(self: *MutableString) bun.OOM![]u8 {
|
||||
return self.list.toOwnedSlice(self.allocator);
|
||||
}
|
||||
|
||||
pub fn toDynamicOwned(self: *MutableString) DynamicOwned([]u8) {
|
||||
@@ -249,9 +249,9 @@ pub fn toDynamicOwned(self: *MutableString) DynamicOwned([]u8) {
|
||||
}
|
||||
|
||||
/// `self.allocator` must be `bun.default_allocator`.
|
||||
pub fn toDefaultOwned(self: *MutableString) Owned([]u8) {
|
||||
pub fn toDefaultOwned(self: *MutableString) bun.OOM!Owned([]u8) {
|
||||
bun.safety.alloc.assertEq(self.allocator, bun.default_allocator);
|
||||
return .fromRaw(self.toOwnedSlice());
|
||||
return .fromRaw(try self.toOwnedSlice());
|
||||
}
|
||||
|
||||
pub fn slice(self: *MutableString) []u8 {
|
||||
@@ -266,7 +266,7 @@ pub fn sliceWithSentinel(self: *MutableString) [:0]u8 {
|
||||
return self.list.items[0 .. self.list.items.len - 1 :0];
|
||||
}
|
||||
|
||||
pub fn toOwnedSliceLength(self: *MutableString, length: usize) string {
|
||||
pub fn toOwnedSliceLength(self: *MutableString, length: usize) bun.OOM!string {
|
||||
self.list.items.len = length;
|
||||
return self.toOwnedSlice();
|
||||
}
|
||||
|
||||
@@ -379,7 +379,7 @@ pub fn indexEqualAny(in: anytype, target: string) ?usize {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn repeatingAlloc(allocator: std.mem.Allocator, count: usize, char: u8) ![]u8 {
|
||||
pub fn repeatingAlloc(allocator: std.mem.Allocator, count: usize, char: u8) bun.OOM![]u8 {
|
||||
const buf = try allocator.alloc(u8, count);
|
||||
repeatingBuf(buf, char);
|
||||
return buf;
|
||||
|
||||
@@ -775,7 +775,7 @@ pub const Transpiler = struct {
|
||||
comptime enable_source_map: bool,
|
||||
source_map_context: ?js_printer.SourceMapHandler,
|
||||
runtime_transpiler_cache: ?*bun.jsc.RuntimeTranspilerCache,
|
||||
) !usize {
|
||||
) js_printer.Error!usize {
|
||||
const tracer = if (enable_source_map)
|
||||
bun.perf.trace("JSPrinter.printWithSourceMap")
|
||||
else
|
||||
@@ -881,7 +881,7 @@ pub const Transpiler = struct {
|
||||
comptime Writer: type,
|
||||
writer: Writer,
|
||||
comptime format: js_printer.Format,
|
||||
) !usize {
|
||||
) js_printer.Error!usize {
|
||||
return transpiler.printWithSourceMapMaybe(
|
||||
result.ast,
|
||||
&result.source,
|
||||
@@ -901,7 +901,7 @@ pub const Transpiler = struct {
|
||||
writer: Writer,
|
||||
comptime format: js_printer.Format,
|
||||
handler: js_printer.SourceMapHandler,
|
||||
) !usize {
|
||||
) js_printer.Error!usize {
|
||||
if (bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_DISABLE_SOURCE_MAPS)) {
|
||||
return transpiler.printWithSourceMapMaybe(
|
||||
result.ast,
|
||||
|
||||
@@ -211,7 +211,7 @@ pub const URL = struct {
|
||||
try writer.print("{s}/{s}", .{ this.origin, normalized_path });
|
||||
}
|
||||
|
||||
pub fn joinAlloc(this: *const URL, allocator: std.mem.Allocator, prefix: string, dirname: string, basename: string, extname: string, absolute_path: string) !string {
|
||||
pub fn joinAlloc(this: *const URL, allocator: std.mem.Allocator, prefix: string, dirname: string, basename: string, extname: string, absolute_path: string) bun.OOM!string {
|
||||
const has_uplevels = std.mem.indexOf(u8, dirname, "../") != null;
|
||||
|
||||
if (has_uplevels) {
|
||||
|
||||
Reference in New Issue
Block a user