fix bug with freeing typed arrays

This commit is contained in:
Jarred Sumner
2022-03-11 03:09:57 -08:00
parent 44b0c8153a
commit 313ad01e42
3 changed files with 63 additions and 12 deletions

View File

@@ -17,7 +17,12 @@ import {
problemsToMarkdown,
withBunInfo,
} from "./markdown";
import { fetchMappings, remapPosition, sourceMappings } from "./sourcemap";
import {
fetchAllMappings,
fetchMappings,
remapPosition,
sourceMappings,
} from "./sourcemap";
export enum StackFrameScope {
Eval = 1,
@@ -1319,13 +1324,15 @@ export function renderRuntimeError(error: Error) {
// But! If we've already fetched the source mappings in this page load before
// Rely on the cached ones
// and don't fetch them again
const framePromises = exception.stack.frames
const framePromises = fetchAllMappings(
exception.stack.frames.map((frame) =>
normalizedFilename(frame.file, thisCwd)
),
signal
)
.map((frame, i) => {
if (stopThis.stopped) return null;
return [
fetchMappings(normalizedFilename(frame.file, thisCwd), signal),
i,
];
return [frame, i];
})
.map((result) => {
if (!result) return;

View File

@@ -233,7 +233,7 @@ export function remapPosition(
const index = indexOfMapping(decodedMappings, line, column);
if (index === -1) return null;
return [decodedMappings[index + 3], decodedMappings[index + 4]];
return [decodedMappings[index + 3] + 1, decodedMappings[index + 4]];
}
async function fetchRemoteSourceMap(file: string, signal) {
@@ -255,6 +255,7 @@ async function fetchRemoteSourceMap(file: string, signal) {
export var sourceMappings = new Map();
export function fetchMappings(file, signal) {
if (file.includes(".bun")) return null;
if (sourceMappings.has(file)) {
return sourceMappings.get(file);
}
@@ -267,6 +268,48 @@ export function fetchMappings(file, signal) {
});
}
// this batches duplicate requests
export function fetchAllMappings(files, signal) {
var results = new Array(files.length);
var map = new Map();
for (var i = 0; i < files.length; i++) {
const existing = map.get(files[i]);
if (existing) {
existing.push(i);
} else map.set(files[i], [i]);
}
for (const [file, indices] of [...map.entries()]) {
const mapped = fetchMappings(file, signal);
if (mapped.then) {
var resolvers = [];
for (let i = 0; i < indices.length; i++) {
results[indices[i]] = new Promise((resolve, reject) => {
resolvers[i] = (res) => resolve(res ? [res, i] : null);
});
}
mapped.finally((a) => {
for (let resolve of resolvers) {
try {
resolve(a);
} catch {
} finally {
}
}
resolvers.length = 0;
resolvers = null;
});
} else {
for (let i = 0; i < indices.length; i++) {
results[indices[i]] = mapped ? [mapped, indices[i]] : null;
}
}
}
return results;
}
function indexOfMapping(mappings: Int32Array, line: number, column: number) {
// the array is [generatedLine, generatedColumn, sourceIndex, sourceLine, sourceColumn, nameIndex]
// 0 - generated line

View File

@@ -1829,11 +1829,12 @@ pub const MarkedArrayBuffer = struct {
pub const toJS = toJSObjectRef;
};
export fn MarkedArrayBuffer_deallocator(bytes_: *anyopaque, ctx_: *anyopaque) void {
var ctx = @ptrCast(*MarkedArrayBuffer, @alignCast(@alignOf(*MarkedArrayBuffer), ctx_));
if (comptime Environment.allow_assert) std.debug.assert(ctx.buffer.ptr == @ptrCast([*]u8, bytes_));
ctx.destroy();
export fn MarkedArrayBuffer_deallocator(bytes_: *anyopaque, _: *anyopaque) void {
const mimalloc = @import("../../allocators/mimalloc.zig");
// zig's memory allocator interface won't work here
// mimalloc knows the size of things
// but we don't
mimalloc.mi_free(bytes_);
}
pub fn castObj(obj: js.JSObjectRef, comptime Type: type) *Type {