mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 23:18:47 +00:00
Compare commits
1 Commits
ciro/fix-p
...
jarred/dum
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c7e3b4aaa6 |
2
.vscode/c_cpp_properties.json
vendored
2
.vscode/c_cpp_properties.json
vendored
@@ -4,7 +4,6 @@
|
||||
"name": "Mac",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"includePath": [
|
||||
"${workspaceFolder}/../webkit-build/include/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/",
|
||||
@@ -23,7 +22,6 @@
|
||||
],
|
||||
"browse": {
|
||||
"path": [
|
||||
"${workspaceFolder}/../webkit-build/include/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/**",
|
||||
|
||||
17
.vscode/launch.json
generated
vendored
17
.vscode/launch.json
generated
vendored
@@ -7,7 +7,7 @@
|
||||
"name": "bun test",
|
||||
"program": "bun-debug",
|
||||
"args": ["wiptest", "${file}"],
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"cwd": "${workspaceFolder}/test/bun.js",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
@@ -19,7 +19,7 @@
|
||||
"name": "bun test (all)",
|
||||
"program": "bun-debug",
|
||||
"args": ["wiptest"],
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"cwd": "${workspaceFolder}/test/bun.js",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
@@ -30,8 +30,8 @@
|
||||
"request": "launch",
|
||||
"name": "bun run current file",
|
||||
"program": "bun-debug",
|
||||
"args": ["${file}"],
|
||||
"cwd": "${file}/../../",
|
||||
"args": ["-i", "${file}"],
|
||||
"cwd": "${file}/../",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
@@ -104,6 +104,15 @@
|
||||
"args": ["https://example.com", "--verbose"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun link test",
|
||||
"program": "bun-debug",
|
||||
"args": ["install"],
|
||||
"cwd": "${workspaceFolder}/test/fixtures/bun-link-to-pkg-fixture",
|
||||
"console": "internalConsole"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
42
packages/bun-crash-report-symbolizer/index.ts
Normal file
42
packages/bun-crash-report-symbolizer/index.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { which } from "bun";
|
||||
|
||||
const symbolizerPath = ["llvm-symbolizer-13", "llvm-symbolizer"].find((a) =>
|
||||
which(a),
|
||||
);
|
||||
|
||||
if (!symbolizerPath) {
|
||||
throw new Error("llvm-symbolizer not found in $PATH");
|
||||
}
|
||||
|
||||
export const symbolizer = symbolizerPath;
|
||||
|
||||
function readCrashReport(text: string) {
|
||||
const lines = text
|
||||
.split("\n")
|
||||
.map((a) => a.trim())
|
||||
.filter((a) => a.length > 0);
|
||||
|
||||
const metaOffset = lines.findIndex((a) => a.includes(" bun meta "));
|
||||
let lastMetaLine = metaOffset + 1;
|
||||
for (; lastMetaLine < lines.length; lastMetaLine++) {
|
||||
const line = lines[lastMetaLine];
|
||||
if (line.includes(" bun meta ")) break;
|
||||
}
|
||||
|
||||
const meta = lines.slice(metaOffset, lastMetaLine);
|
||||
console.log(metaOffset, lastMetaLine);
|
||||
const version = /v(\d+\.\d+\.\d+)/.exec(meta[0])?.[1];
|
||||
var stack = lines
|
||||
.slice(lastMetaLine + 1)
|
||||
.filter((a) => a.length > 0 && !a.includes("ask for"));
|
||||
|
||||
return { version, stack };
|
||||
}
|
||||
|
||||
console.log(
|
||||
readCrashReport(
|
||||
await Bun.file(
|
||||
"/Users/jarred/.bun/.bun-crash/v0.2.3-1668157348119.crash",
|
||||
).text(),
|
||||
),
|
||||
);
|
||||
@@ -155,6 +155,11 @@ JSObject* Zig::ImportMetaObject::createRequireFunction(VM& vm, JSGlobalObject* g
|
||||
auto clientData = WebCore::clientData(vm);
|
||||
requireFunction->putDirectCustomAccessor(vm, clientData->builtinNames().resolvePublicName(), JSC::CustomGetterSetter::create(vm, functionRequireResolveLazyGetter, functionRequireResolveLazySetter), 0);
|
||||
requireFunction->putDirect(vm, clientData->builtinNames().pathPrivateName(), jsOwnedString(vm, pathString), JSC::PropertyAttribute::DontEnum | 0);
|
||||
|
||||
// TODO: use a polyfill with @requireMap to Proxy it so it works for real
|
||||
// This is just a temporary workaround
|
||||
requireFunction->putDirect(vm, JSC::Identifier::fromString(vm, "cache"_s), JSC::constructEmptyObject(globalObject), 0);
|
||||
|
||||
return requireFunction;
|
||||
}
|
||||
|
||||
|
||||
@@ -49,6 +49,18 @@
|
||||
|
||||
namespace WebCore {
|
||||
|
||||
const JSC::ConstructAbility s_processObjectEmitWarningCodeConstructAbility = JSC::ConstructAbility::CannotConstruct;
|
||||
const JSC::ConstructorKind s_processObjectEmitWarningCodeConstructorKind = JSC::ConstructorKind::None;
|
||||
const JSC::ImplementationVisibility s_processObjectEmitWarningCodeImplementationVisibility = JSC::ImplementationVisibility::Public;
|
||||
const int s_processObjectEmitWarningCodeLength = 83;
|
||||
static const JSC::Intrinsic s_processObjectEmitWarningCodeIntrinsic = JSC::NoIntrinsic;
|
||||
const char* const s_processObjectEmitWarningCode =
|
||||
"(function (warning) {\n" \
|
||||
" \"use strict\";\n" \
|
||||
" console.warn.apply(console, arguments);\n" \
|
||||
"})\n" \
|
||||
;
|
||||
|
||||
|
||||
#define DEFINE_BUILTIN_GENERATOR(codeName, functionName, overriddenName, argumentCount) \
|
||||
JSC::FunctionExecutable* codeName##Generator(JSC::VM& vm) \
|
||||
|
||||
@@ -48,13 +48,22 @@ class FunctionExecutable;
|
||||
namespace WebCore {
|
||||
|
||||
/* ProcessObject */
|
||||
extern const char* const s_processObjectEmitWarningCode;
|
||||
extern const int s_processObjectEmitWarningCodeLength;
|
||||
extern const JSC::ConstructAbility s_processObjectEmitWarningCodeConstructAbility;
|
||||
extern const JSC::ConstructorKind s_processObjectEmitWarningCodeConstructorKind;
|
||||
extern const JSC::ImplementationVisibility s_processObjectEmitWarningCodeImplementationVisibility;
|
||||
|
||||
#define WEBCORE_FOREACH_PROCESSOBJECT_BUILTIN_DATA(macro) \
|
||||
macro(emitWarning, processObjectEmitWarning, 1) \
|
||||
|
||||
#define WEBCORE_BUILTIN_PROCESSOBJECT_EMITWARNING 1
|
||||
|
||||
#define WEBCORE_FOREACH_PROCESSOBJECT_BUILTIN_CODE(macro) \
|
||||
macro(processObjectEmitWarningCode, emitWarning, ASCIILiteral(), s_processObjectEmitWarningCodeLength) \
|
||||
|
||||
#define WEBCORE_FOREACH_PROCESSOBJECT_BUILTIN_FUNCTION_NAME(macro) \
|
||||
macro(emitWarning) \
|
||||
|
||||
#define DECLARE_BUILTIN_GENERATOR(codeName, functionName, overriddenName, argumentCount) \
|
||||
JSC::FunctionExecutable* codeName##Generator(JSC::VM&);
|
||||
|
||||
@@ -22,3 +22,8 @@
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
function emitWarning(warning) {
|
||||
"use strict";
|
||||
console.warn.apply(console, arguments);
|
||||
}
|
||||
|
||||
@@ -1047,6 +1047,10 @@ pub const VirtualMachine = struct {
|
||||
return;
|
||||
};
|
||||
|
||||
if (log.msgs.items.len > 0) {
|
||||
log.appendToWithRecycled(old_log, true) catch {};
|
||||
}
|
||||
|
||||
res.* = ErrorableZigString.ok(ZigString.init(result.path));
|
||||
}
|
||||
|
||||
|
||||
@@ -2039,7 +2039,7 @@ var require_destroy = __commonJS({
|
||||
r.destroyed = true;
|
||||
}
|
||||
if (!s.constructed) {
|
||||
this.once(kDestroy, function (er) {
|
||||
this.once(kDestroy, (er) => {
|
||||
_destroy(this, aggregateTwoErrors(er, err), cb);
|
||||
});
|
||||
} else {
|
||||
@@ -5825,6 +5825,7 @@ function createNativeStream(nativeType, Readable) {
|
||||
}
|
||||
|
||||
_construct(cb) {
|
||||
// TODO: why do we need to set this flag here?
|
||||
this._readableState.constructed = true;
|
||||
cb();
|
||||
}
|
||||
|
||||
@@ -3762,6 +3762,24 @@ pub const FileReader = struct {
|
||||
this.buf = read_buf;
|
||||
}
|
||||
|
||||
this.watch(fd);
|
||||
return .{
|
||||
.pending = &this.pending,
|
||||
};
|
||||
}
|
||||
} else if (this.isFIFO() and this.poll_ref == null and available_to_read == null) {
|
||||
// we don't know if it's readable or not
|
||||
if (!bun.isReadable(fd)) {
|
||||
if (free_buffer_on_error) {
|
||||
bun.default_allocator.free(buf_to_use);
|
||||
buf_to_use = read_buf;
|
||||
}
|
||||
|
||||
if (view != .zero) {
|
||||
this.view.set(this.globalThis(), view);
|
||||
this.buf = read_buf;
|
||||
}
|
||||
|
||||
this.watch(fd);
|
||||
return .{
|
||||
.pending = &this.pending,
|
||||
@@ -3769,21 +3787,6 @@ pub const FileReader = struct {
|
||||
}
|
||||
}
|
||||
|
||||
// const rc: JSC.Node.Maybe(usize) = if (comptime Environment.isLinux) brk: {
|
||||
// if (len == 65536 and this.has_adjusted_pipe_size_on_linux and buf_to_use.len > len) {
|
||||
// var iovecs = [_]std.os.iovec{.{ .iov_base = @intToPtr([*]u8, @ptrToInt(buf_to_use.ptr)), .iov_len = @intCast(usize, buf_to_use.len) }};
|
||||
// const rc = bun.C.linux.vmsplice(fd, &iovecs, 1, 0);
|
||||
// Output.debug("vmsplice({d}, {d}) = {d}", .{ fd, buf_to_use.len, rc });
|
||||
// if (JSC.Node.Maybe(usize).errnoSys(rc, .read)) |err| {
|
||||
// break :brk err;
|
||||
// }
|
||||
|
||||
// break :brk JSC.Node.Maybe(usize){ .result = @intCast(usize, rc) };
|
||||
// }
|
||||
|
||||
// break :brk Syscall.read(fd, buf_to_use);
|
||||
// } else Syscall.read(fd, buf_to_use);
|
||||
|
||||
switch (Syscall.read(fd, buf_to_use)) {
|
||||
.err => |err| {
|
||||
const retry = std.os.E.AGAIN;
|
||||
|
||||
@@ -60,6 +60,12 @@ pub const Run = struct {
|
||||
run.vm.argv = ctx.passthrough;
|
||||
run.vm.arena = &run.arena;
|
||||
|
||||
if (ctx.debug.log_level) |level| {
|
||||
run.vm.bundler.options.log.level = level;
|
||||
run.vm.bundler.resolver.log.level = level;
|
||||
run.vm.log.level = level;
|
||||
}
|
||||
|
||||
run.vm.bundler.options.install = ctx.install;
|
||||
run.vm.bundler.resolver.opts.install = ctx.install;
|
||||
run.vm.bundler.resolver.opts.global_cache = ctx.debug.global_cache;
|
||||
|
||||
@@ -104,18 +104,11 @@ pub const Bunfig = struct {
|
||||
|
||||
fn loadLogLevel(this: *Parser, expr: js_ast.Expr) !void {
|
||||
try this.expect(expr, .e_string);
|
||||
const Matcher = strings.ExactSizeMatcher(8);
|
||||
|
||||
this.bunfig.log_level = switch (Matcher.match(expr.asString(this.allocator).?)) {
|
||||
Matcher.case("debug") => Api.MessageLevel.debug,
|
||||
Matcher.case("error") => Api.MessageLevel.err,
|
||||
Matcher.case("warn") => Api.MessageLevel.warn,
|
||||
Matcher.case("info") => Api.MessageLevel.info,
|
||||
else => {
|
||||
try this.addError(expr.loc, "Invalid log level, must be one of debug, error, or warn");
|
||||
unreachable;
|
||||
},
|
||||
this.ctx.debug.log_level = logger.Log.Level.Map.get(expr.asString(this.allocator).?) orelse {
|
||||
try this.addError(expr.loc, "Invalid log level, must be one of \"debug\", \"error\", \"warn\", \"info\", \"verbose\"");
|
||||
unreachable;
|
||||
};
|
||||
logger.Log.default_log_level = this.ctx.debug.log_level.?;
|
||||
}
|
||||
|
||||
pub fn parse(this: *Parser, comptime cmd: Command.Tag) !void {
|
||||
|
||||
@@ -671,6 +671,9 @@ pub const Arguments = struct {
|
||||
else => logger.Log.Level.err,
|
||||
};
|
||||
ctx.log.level = logger.Log.default_log_level;
|
||||
} else if (ctx.debug.log_level) |level| {
|
||||
logger.Log.default_log_level = level;
|
||||
ctx.log.level = logger.Log.default_log_level;
|
||||
}
|
||||
|
||||
opts.output_dir = output_dir;
|
||||
@@ -816,6 +819,8 @@ pub const Command = struct {
|
||||
global_cache: options.GlobalCache = .auto,
|
||||
offline_mode_setting: ?Bunfig.OfflineMode = null,
|
||||
|
||||
log_level: ?logger.Log.Level = null,
|
||||
|
||||
// technical debt
|
||||
macros: ?MacroMap = null,
|
||||
editor: string = "",
|
||||
|
||||
@@ -23,7 +23,11 @@ pub fn put(mutable: *MutableString) void {
|
||||
}
|
||||
|
||||
pub fn decompress(compressed_data: []const u8, output: *MutableString) Zlib.ZlibError!void {
|
||||
var reader = try Zlib.ZlibReaderArrayList.init(compressed_data, &output.list, output.allocator);
|
||||
var reader = try Zlib.ZlibReaderArrayList.init(
|
||||
compressed_data,
|
||||
&output.list,
|
||||
output.allocator,
|
||||
);
|
||||
try reader.readAll();
|
||||
reader.deinit();
|
||||
}
|
||||
|
||||
@@ -2437,9 +2437,7 @@ fn NewLexer_(
|
||||
else => {},
|
||||
}
|
||||
|
||||
if (strings.indexOfChar(text, '\r') == null) {
|
||||
return text;
|
||||
}
|
||||
var i = strings.indexOfChar(text, '\r') orelse return text;
|
||||
|
||||
// From the specification:
|
||||
//
|
||||
@@ -2450,28 +2448,27 @@ fn NewLexer_(
|
||||
// <LF> for both TV and TRV. An explicit EscapeSequence is needed to
|
||||
// include a <CR> or <CR><LF> sequence.
|
||||
var bytes = MutableString.init(lexer.allocator, text.len) catch unreachable;
|
||||
var end: usize = 0;
|
||||
var i: usize = 0;
|
||||
var c: u8 = '0';
|
||||
while (i < bytes.list.items.len) {
|
||||
c = bytes.list.items[i];
|
||||
i += 1;
|
||||
|
||||
if (c == '\r') {
|
||||
// Convert '\r\n' into '\n'
|
||||
if (i < bytes.list.items.len and bytes.list.items[i] == '\n') {
|
||||
i += 1;
|
||||
}
|
||||
|
||||
// Convert '\r' into '\n'
|
||||
c = '\n';
|
||||
}
|
||||
|
||||
bytes.list.items[end] = c;
|
||||
end += 1;
|
||||
bytes.appendAssumeCapacity(text[0..i]) catch unreachable;
|
||||
var remain = text[i..];
|
||||
if (remain[0] == '\n') {
|
||||
bytes.appendChar('\n');
|
||||
remain = remain[1..];
|
||||
}
|
||||
|
||||
return bytes.toOwnedSliceLength(end + 1);
|
||||
while (strings.indexofChar(remain, '\r')) |j| {
|
||||
bytes.appendAssumeCapacity(remain[0..j]) catch unreachable;
|
||||
remain = remain[j..];
|
||||
if (remain[0] == '\n') {
|
||||
bytes.appendCharAssumeCapacity('\n');
|
||||
remain = remain[1..];
|
||||
} else {
|
||||
bytes.appendCharAssumeCapacity('\r');
|
||||
}
|
||||
}
|
||||
|
||||
bytes.appendAssumeCapacity(remain) catch unreachable;
|
||||
|
||||
return bytes.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn parseNumericLiteralOrDot(lexer: *LexerType) !void {
|
||||
|
||||
@@ -803,6 +803,7 @@ pub const Linker = struct {
|
||||
result.ast.runtime_import_record_id = @truncate(u32, new_import_records.len - 1);
|
||||
import_records = new_import_records;
|
||||
}
|
||||
// if (comptime !is_bun) {
|
||||
|
||||
// We _assume_ you're importing ESM.
|
||||
// But, that assumption can be wrong without parsing code of the imports.
|
||||
@@ -833,6 +834,26 @@ pub const Linker = struct {
|
||||
|
||||
result.ast.prepend_part = js_ast.Part{ .stmts = std.mem.span(&require_part_stmts) };
|
||||
}
|
||||
// } else {
|
||||
// if (needs_require and !result.ast.uses_require_ref) {
|
||||
// result.ast.uses_require_ref = true;
|
||||
// var decls = try linker.allocator.alloc(js_ast.G.Decl, 1);
|
||||
// decls[0] = .{
|
||||
// .binding = js_ast.Binding.alloc(linker.allocator, js_ast.B.Identifier{ .ref = result.ast.require_ref }),
|
||||
// };
|
||||
// var require_local_statement = js_ast.Stmt.alloc(js_ast.S.Local, js_ast.S.Local{
|
||||
// .kind = .k_var,
|
||||
// .decls = decls,
|
||||
// });
|
||||
|
||||
// require_part_stmts[0] = js_ast.Stmt{
|
||||
// .data = .{ .s_import = &require_part_import_statement },
|
||||
// .loc = logger.Loc.Empty,
|
||||
// };
|
||||
|
||||
// result.ast.prepend_part = js_ast.Part{ .stmts = std.mem.span(&require_part_stmts) };
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
const ImportPathsList = allocators.BSSStringList(512, 128);
|
||||
|
||||
@@ -541,11 +541,14 @@ pub const Resolver = struct {
|
||||
|
||||
pub fn flushDebugLogs(r: *ThisResolver, flush_mode: DebugLogs.FlushMode) !void {
|
||||
if (r.debug_logs) |*debug| {
|
||||
if (flush_mode == DebugLogs.FlushMode.fail) {
|
||||
try r.log.addRangeDebugWithNotes(null, logger.Range{ .loc = logger.Loc{} }, debug.what, debug.notes.toOwnedSlice());
|
||||
} else if (@enumToInt(r.log.level) <= @enumToInt(logger.Log.Level.verbose)) {
|
||||
try r.log.addVerboseWithNotes(null, logger.Loc.Empty, debug.what, debug.notes.toOwnedSlice());
|
||||
if (debug.notes.items.len > 0) {
|
||||
if (flush_mode == DebugLogs.FlushMode.fail) {
|
||||
try r.log.addRangeDebugWithNotes(null, logger.Range{ .loc = logger.Loc{} }, debug.what, debug.notes.toOwnedSlice());
|
||||
} else if (@enumToInt(r.log.level) <= @enumToInt(logger.Log.Level.verbose)) {
|
||||
try r.log.addVerboseWithNotes(null, logger.Loc.Empty, debug.what, debug.notes.toOwnedSlice());
|
||||
}
|
||||
}
|
||||
r.debug_logs = null;
|
||||
}
|
||||
}
|
||||
var tracing_start: i128 = if (FeatureFlags.tracing) 0 else undefined;
|
||||
|
||||
@@ -155,6 +155,7 @@ export var __require = (namespace) => {
|
||||
__require.d = (namespace) => {
|
||||
return namespace;
|
||||
};
|
||||
__require.cache = {};
|
||||
|
||||
export var $$m = __commonJS;
|
||||
|
||||
|
||||
@@ -2,13 +2,13 @@ import { transform, transformSync } from "esbuild";
|
||||
import { describe, it, expect } from "bun:test";
|
||||
|
||||
describe("child_process.spawn - esbuild", () => {
|
||||
it("should transform successfully", async () => {
|
||||
const result = await transform("console.log('hello world')", {
|
||||
loader: "js",
|
||||
target: "node12",
|
||||
});
|
||||
expect(result.code).toBe('console.log("hello world");\n');
|
||||
});
|
||||
// it("should transform successfully", async () => {
|
||||
// const result = await transform("console.log('hello world')", {
|
||||
// loader: "js",
|
||||
// target: "node12",
|
||||
// });
|
||||
// expect(result.code).toBe('console.log("hello world");\n');
|
||||
// });
|
||||
|
||||
it("works for input exceeding the pipe capacity", async () => {
|
||||
const hugeString = `console.log(${JSON.stringify("a".repeat(1000000))});`;
|
||||
|
||||
@@ -22,6 +22,12 @@ it("import.meta.resolveSync", () => {
|
||||
).toBe(import.meta.path);
|
||||
});
|
||||
|
||||
it("require.cache", () => {
|
||||
expect(require.cache).toBe(import.meta.require.cache);
|
||||
expect(typeof require.cache).toBe("object");
|
||||
expect(require.cache !== null).toBe(true);
|
||||
});
|
||||
|
||||
it("import.meta.require (json)", () => {
|
||||
expect(import.meta.require("./require-json.json").hello).toBe(sync.hello);
|
||||
const require = Module.createRequire(import.meta.path);
|
||||
|
||||
BIN
test/fixtures/bun-link-to-pkg-fixture/bun.lockb
vendored
BIN
test/fixtures/bun-link-to-pkg-fixture/bun.lockb
vendored
Binary file not shown.
Reference in New Issue
Block a user