Compare commits

..

1 Commits

Author SHA1 Message Date
Claude Bot
53282ab45b Implement Istanbul-style coverage ignore comments
This implements support for Istanbul-style ignore comments in code coverage:
- /* istanbul ignore next */ - ignores the next statement
- /* istanbul ignore file */ - ignores the entire file
- /* istanbul ignore if */ - ignores if branch (TODO)
- /* istanbul ignore else */ - ignores else branch (TODO)
- Support for both single-line (//) and multi-line (/* */) comments
- Support for optional explanatory text after colon

Changes:
- Add CoverageIgnoreDirective structure in js_lexer.zig
- Parse ignore comments during lexing
- Add ignore directive parsing in ByteRangeMapping.compute()
- Apply ignore logic in generateReportFromBlocks()
- Add comprehensive test cases for different ignore patterns

Fixes #7662

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-08-05 21:24:25 +00:00
6 changed files with 725 additions and 264 deletions

View File

@@ -47,7 +47,6 @@ const {
const { Agent, NODE_HTTP_WARNING } = require("node:_http_agent");
const { IncomingMessage } = require("node:_http_incoming");
const { OutgoingMessage } = require("node:_http_outgoing");
const { freeParser, parsers, HTTPParser, isLenient, prepareError } = require("node:_http_common");
const globalReportError = globalThis.reportError;
const setTimeout = globalThis.setTimeout;
@@ -56,29 +55,6 @@ const fetch = Bun.fetch;
const { URL } = globalThis;
// HTTP parser constants for lenient parsing
const kLenientNone = 0;
const kLenientHeaders = 1 << 0;
const kLenientChunkedLength = 1 << 1;
const kLenientTransferEncoding = 1 << 2;
const kLenientVersion = 1 << 3;
const kLenientDataAfterClose = 1 << 4;
const kLenientOptionalLFAfterCR = 1 << 5;
const kLenientOptionalCRLFAfterChunk = 1 << 6;
const kLenientOptionalCRBeforeLF = 1 << 7;
const kLenientSpacesAfterChunkSize = 1 << 8;
const kLenientAll =
kLenientHeaders |
kLenientChunkedLength |
kLenientTransferEncoding |
kLenientVersion |
kLenientDataAfterClose |
kLenientOptionalLFAfterCR |
kLenientOptionalCRLFAfterChunk |
kLenientOptionalCRBeforeLF |
kLenientSpacesAfterChunkSize;
// Primordials
const ObjectAssign = Object.assign;
const RegExpPrototypeExec = RegExp.prototype.exec;
@@ -91,157 +67,6 @@ function emitErrorEventNT(self, err) {
}
}
function statusIsInformational(status) {
return status >= 100 && status < 200;
}
// Parser callback for handling incoming responses (from Node.js implementation)
function parserOnIncomingClient(res, shouldKeepAlive) {
const socket = this.socket;
const req = socket._httpMessage;
if (req.res) {
// We already have a response object, something is wrong
socket.destroy();
return 0;
}
req.res = res;
res.req = req;
// Handle upgrade responses
if (res.upgrade) {
return 2; // Skip body and treat as Upgrade
}
// Handle CONNECT method responses
if (req.method === "CONNECT") {
res.upgrade = true;
return 2; // Skip body and treat as Upgrade
}
// Handle informational responses (1xx status codes)
if (statusIsInformational(res.statusCode)) {
req.res = null; // Clear res so we can handle the final response
if (res.statusCode === 100) {
req.emit("continue");
}
req.emit("information", {
statusCode: res.statusCode,
statusMessage: res.statusMessage,
httpVersion: res.httpVersion,
httpVersionMajor: res.httpVersionMajor,
httpVersionMinor: res.httpVersionMinor,
headers: res.headers,
rawHeaders: res.rawHeaders,
});
return 1; // Skip body but don't treat as Upgrade
}
// Emit the response event
process.nextTick(() => {
if (!req.aborted && !req.emit("response", res)) {
// If no listeners, dump the response
res._dump();
}
});
return 0; // No special treatment
}
// Socket event handlers (from Node.js implementation)
function socketOnData(d) {
const socket = this;
const req = socket._httpMessage;
const parser = socket.parser;
if (!parser) return;
const ret = parser.execute(d);
if (ret instanceof Error) {
prepareError(ret, parser, d);
freeParser(parser, req, socket);
socket.destroy();
return;
}
// Handle upgrades/CONNECT
if (parser.incoming && parser.incoming.upgrade) {
return;
}
}
function socketOnEnd() {
const socket = this;
const req = socket._httpMessage;
const parser = socket.parser;
if (!req.res && !req._hadError) {
req._hadError = true;
emitErrorEventNT(req, new ConnResetException("socket hang up"));
}
if (parser) {
parser.finish();
freeParser(parser, req, socket);
}
}
function socketOnError(err) {
const socket = this;
const req = socket._httpMessage;
if (req) {
req.emit("error", err);
}
}
function socketOnClose() {
const socket = this;
const req = socket._httpMessage;
const parser = socket.parser;
if (parser) {
freeParser(parser, req, socket);
}
if (req) {
if (!req.res || !req.res.complete) {
req.emit("error", new ConnResetException("socket hang up"));
}
req.emit("close");
}
}
// Initialize parser on socket connection (from Node.js implementation)
function tickOnSocket(req, socket) {
const parser = parsers.alloc();
const lenient = req.insecureHTTPParser === undefined ? isLenient() : req.insecureHTTPParser;
// Initialize parser for response parsing
parser.initialize(
HTTPParser.RESPONSE,
undefined, // asyncResource - not implemented
req.maxHeaderSize || 0,
lenient ? kLenientAll : kLenientNone,
);
parser.socket = socket;
parser.outgoing = req;
req.parser = parser;
socket.parser = parser;
socket._httpMessage = req;
if (typeof req.maxHeadersCount === "number") {
parser.maxHeaderPairs = req.maxHeadersCount << 1;
}
parser.joinDuplicateHeaders = req.joinDuplicateHeaders;
parser.onIncoming = parserOnIncomingClient;
// Emit socket event
process.nextTick(() => {
req.emit("socket", socket);
});
}
function ClientRequest(input, options, cb) {
if (!(this instanceof ClientRequest)) {
return new (ClientRequest as any)(input, options, cb);
@@ -269,9 +94,7 @@ function ClientRequest(input, options, cb) {
const pushChunk = chunk => {
this[kBodyChunks].push(chunk);
if (writeCount > 1) {
connectToServer().catch(err => {
this.emit("error", err);
});
startFetch();
}
resolveNextChunk?.(false);
};
@@ -365,9 +188,7 @@ function ClientRequest(input, options, cb) {
this[kAbortController].signal.addEventListener("abort", onAbort, {
once: true,
});
connectToServer().catch(err => {
this.emit("error", err);
});
startFetch();
}
};
@@ -437,7 +258,7 @@ function ClientRequest(input, options, cb) {
let fetching = false;
const connectToServer = async () => {
const startFetch = (customBody?) => {
if (fetching) {
return false;
}
@@ -445,81 +266,274 @@ function ClientRequest(input, options, cb) {
fetching = true;
const method = this[kMethod];
const protocol = this[kProtocol];
const path = this[kPath];
const host = this[kHost];
const port = this[kPort];
const socketPath = this[kSocketPath];
// Create connection options for Bun.connect()
let connectionOptions;
if (socketPath) {
connectionOptions = { unix: socketPath };
} else {
connectionOptions = {
hostname: host,
port: port,
};
// Add TLS options for HTTPS
if (protocol === "https:" && this[kTls]) {
connectionOptions.tls = { ...this[kTls], serverName: this[kTls].servername };
}
let keepalive = true;
const agentKeepalive = this[kAgent]?.keepalive;
if (agentKeepalive !== undefined) {
keepalive = agentKeepalive;
}
// Use Bun.connect() to create a real TCP socket
try {
const socket = await Bun.connect({
...connectionOptions,
socket: {
open: socket => {
// Initialize the HTTP parser
tickOnSocket(this, socket);
const protocol = this[kProtocol];
const path = this[kPath];
let host = this[kHost];
// Send the HTTP request
const requestLine = `${method} ${path} HTTP/1.1\r\n`;
const headers = this.getHeaders();
let headerString = "";
const getURL = host => {
if (isIPv6(host)) {
host = `[${host}]`;
}
for (const [key, value] of Object.entries(headers)) {
headerString += `${key}: ${value}\r\n`;
if (path.startsWith("http://") || path.startsWith("https://")) {
return [path, `${protocol}//${host}${this[kUseDefaultPort] ? "" : ":" + this[kPort]}`];
} else {
let proxy: string | undefined;
const url = `${protocol}//${host}${this[kUseDefaultPort] ? "" : ":" + this[kPort]}${path}`;
// support agent proxy url/string for http/https
try {
// getters can throw
const agentProxy = this[kAgent]?.proxy;
// this should work for URL like objects and strings
proxy = agentProxy?.href || agentProxy;
} catch {}
return [url, proxy];
}
};
const go = (url, proxy, softFail = false) => {
const tls =
protocol === "https:" && this[kTls] ? { ...this[kTls], serverName: this[kTls].servername } : undefined;
const fetchOptions: any = {
method,
headers: this.getHeaders(),
redirect: "manual",
signal: this[kAbortController]?.signal,
// Timeouts are handled via this.setTimeout.
timeout: false,
// Disable auto gzip/deflate
decompress: false,
keepalive,
};
let keepOpen = false;
// no body and not finished
const isDuplex = customBody === undefined && !this.finished;
if (isDuplex) {
fetchOptions.duplex = "half";
keepOpen = true;
}
if (method !== "GET" && method !== "HEAD" && method !== "OPTIONS") {
const self = this;
if (customBody !== undefined) {
fetchOptions.body = customBody;
} else if (isDuplex) {
fetchOptions.body = async function* () {
while (self[kBodyChunks]?.length > 0) {
yield self[kBodyChunks].shift();
}
// Add Host header if not present
if (!this.hasHeader("Host")) {
headerString += `Host: ${host}${port !== 80 && port !== 443 ? `:${port}` : ""}\r\n`;
if (self[kBodyChunks]?.length === 0) {
self.emit("drain");
}
headerString += "\r\n";
while (!self.finished) {
yield await new Promise(resolve => {
resolveNextChunk = end => {
resolveNextChunk = undefined;
if (end) {
resolve(undefined);
} else {
resolve(self[kBodyChunks].shift());
}
};
});
const requestHeader = requestLine + headerString;
socket.write(requestHeader);
// Send request body if present
if (this[kBodyChunks] && this[kBodyChunks].length > 0) {
for (const chunk of this[kBodyChunks]) {
socket.write(chunk);
if (self[kBodyChunks]?.length === 0) {
self.emit("drain");
}
}
// Store socket reference
this.socket = socket;
socket._httpMessage = this;
},
handleResponse?.();
};
}
}
data: (socket, data) => {
socketOnData.$call(socket, data);
},
end: socket => {
socketOnEnd.$call(socket);
},
error: (socket, error) => {
socketOnError.$call(socket, error);
},
close: socket => {
socketOnClose.$call(socket);
},
},
if (tls) {
fetchOptions.tls = tls;
}
if (!!$debug) {
fetchOptions.verbose = true;
}
if (proxy) {
fetchOptions.proxy = proxy;
}
const socketPath = this[kSocketPath];
if (socketPath) {
fetchOptions.unix = socketPath;
}
//@ts-ignore
this[kFetchRequest] = fetch(url, fetchOptions).then(response => {
if (this.aborted) {
maybeEmitClose();
return;
}
handleResponse = () => {
this[kFetchRequest] = null;
this[kClearTimeout]();
handleResponse = undefined;
const prevIsHTTPS = getIsNextIncomingMessageHTTPS();
setIsNextIncomingMessageHTTPS(response.url.startsWith("https:"));
var res = (this.res = new IncomingMessage(response, {
[typeSymbol]: NodeHTTPIncomingRequestType.FetchResponse,
[reqSymbol]: this,
}));
setIsNextIncomingMessageHTTPS(prevIsHTTPS);
res.req = this;
let timer;
res.setTimeout = (msecs, callback) => {
if (timer) {
clearTimeout(timer);
}
timer = setTimeout(() => {
if (res.complete) {
return;
}
res.emit("timeout");
callback?.();
}, msecs);
};
process.nextTick(
(self, res) => {
// If the user did not listen for the 'response' event, then they
// can't possibly read the data, so we ._dump() it into the void
// so that the socket doesn't hang there in a paused state.
const contentLength = res.headers["content-length"];
if (contentLength && isNaN(Number(contentLength))) {
emitErrorEventNT(self, $HPE_UNEXPECTED_CONTENT_LENGTH("Parse Error"));
res.complete = true;
maybeEmitClose();
return;
}
try {
if (self.aborted || !self.emit("response", res)) {
res._dump();
}
} finally {
maybeEmitClose();
if (res.statusCode === 304) {
res.complete = true;
maybeEmitClose();
return;
}
}
},
this,
res,
);
};
if (!keepOpen) {
handleResponse();
}
onEnd();
});
if (!softFail) {
// Don't emit an error if we're iterating over multiple possible addresses and we haven't reached the end yet.
// This is for the happy eyeballs implementation.
this[kFetchRequest]
.catch(err => {
if (err.code === "ConnectionRefused") {
err = new Error("ECONNREFUSED");
err.code = "ECONNREFUSED";
}
// Node treats AbortError separately.
// The "abort" listener on the abort controller should have called this
if (isAbortError(err)) {
return;
}
if (!!$debug) globalReportError(err);
try {
this.emit("error", err);
} catch (_err) {
void _err;
}
})
.finally(() => {
if (!keepOpen) {
fetching = false;
this[kFetchRequest] = null;
this[kClearTimeout]();
}
});
}
return this[kFetchRequest];
};
if (isIP(host) || !options.lookup) {
// Don't need to bother with lookup if it's already an IP address or no lookup function is provided.
const [url, proxy] = getURL(host);
go(url, proxy, false);
return true;
}
try {
options.lookup(host, { all: true }, (err, results) => {
if (err) {
if (!!$debug) globalReportError(err);
process.nextTick((self, err) => self.emit("error", err), this, err);
return;
}
let candidates = results.sort((a, b) => b.family - a.family); // prefer IPv6
const fail = (message, name, code, syscall) => {
const error = new Error(message);
error.name = name;
error.code = code;
error.syscall = syscall;
if (!!$debug) globalReportError(error);
process.nextTick((self, err) => self.emit("error", err), this, error);
};
if (candidates.length === 0) {
fail("No records found", "DNSException", "ENOTFOUND", "getaddrinfo");
return;
}
if (!this.hasHeader("Host")) {
this.setHeader("Host", `${host}:${port}`);
}
// We want to try all possible addresses, beginning with the IPv6 ones, until one succeeds.
// All addresses except for the last are allowed to "soft fail" -- instead of reporting
// an error to the user, we'll just skip to the next address.
// The last address is required to work, and if it fails we'll throw an error.
const iterate = () => {
if (candidates.length === 0) {
// If we get to this point, it means that none of the addresses could be connected to.
fail(`connect ECONNREFUSED ${host}:${port}`, "Error", "ECONNREFUSED", "connect");
return;
}
const [url, proxy] = getURL(candidates.shift().address);
go(url, proxy, candidates.length > 0).catch(iterate);
};
iterate();
});
return true;
@@ -530,19 +544,27 @@ function ClientRequest(input, options, cb) {
}
};
let onEnd = () => {};
let handleResponse: (() => void) | undefined = () => {};
const send = () => {
this.finished = true;
this[kAbortController] ??= new AbortController();
this[kAbortController].signal.addEventListener("abort", onAbort, { once: true });
connectToServer()
.catch(err => {
if (!!$debug) globalReportError(err);
this.emit("error", err);
})
.finally(() => {
process.nextTick(maybeEmitFinish.bind(this));
});
var body = this[kBodyChunks] && this[kBodyChunks].length > 1 ? new Blob(this[kBodyChunks]) : this[kBodyChunks]?.[0];
try {
startFetch(body);
onEnd = () => {
handleResponse?.();
};
} catch (err) {
if (!!$debug) globalReportError(err);
this.emit("error", err);
} finally {
process.nextTick(maybeEmitFinish.bind(this));
}
};
// --- For faking the events in the right order ---
@@ -783,11 +805,6 @@ function ClientRequest(input, options, cb) {
this[kHost] = host;
this[kProtocol] = protocol;
// Initialize socket-related properties
this.socket = null;
this.parser = null;
this._hadError = false;
if (options.timeout !== undefined) {
const timeout = getTimerDuration(options.timeout, "timeout");
this.timeout = timeout;

View File

@@ -34,6 +34,19 @@ pub const JSXPragma = struct {
}
};
pub const CoverageIgnoreDirective = struct {
kind: Kind,
line: u32,
end_line: u32 = 0, // For tracking the end of ignored ranges
pub const Kind = enum {
ignore_next,
ignore_if,
ignore_else,
ignore_file,
};
};
pub const JSONOptions = struct {
/// Enable JSON-specific warnings/errors
is_json: bool = false,
@@ -150,6 +163,7 @@ fn NewLexer_(
is_ascii_only: JSONBool = JSONBoolDefault,
track_comments: bool = false,
all_comments: std.ArrayList(logger.Range),
coverage_ignore_directives: std.ArrayList(CoverageIgnoreDirective),
indent_info: if (json_options.guess_indentation)
struct {
@@ -247,19 +261,23 @@ fn NewLexer_(
const all_comments = this.all_comments;
const comments_to_preserve_before = this.comments_to_preserve_before;
const temp_buffer_u16 = this.temp_buffer_u16;
const coverage_ignore_directives = this.coverage_ignore_directives;
this.* = original.*;
// make sure pointers are valid
this.all_comments = all_comments;
this.comments_to_preserve_before = comments_to_preserve_before;
this.temp_buffer_u16 = temp_buffer_u16;
this.coverage_ignore_directives = coverage_ignore_directives;
bun.debugAssert(all_comments.items.len >= original.all_comments.items.len);
bun.debugAssert(comments_to_preserve_before.items.len >= original.comments_to_preserve_before.items.len);
bun.debugAssert(temp_buffer_u16.items.len == 0 and original.temp_buffer_u16.items.len == 0);
bun.debugAssert(coverage_ignore_directives.items.len >= original.coverage_ignore_directives.items.len);
this.all_comments.items.len = original.all_comments.items.len;
this.comments_to_preserve_before.items.len = original.comments_to_preserve_before.items.len;
this.coverage_ignore_directives.items.len = original.coverage_ignore_directives.items.len;
}
/// Look ahead at the next n codepoints without advancing the iterator.
@@ -286,6 +304,7 @@ fn NewLexer_(
this.temp_buffer_u16.clearAndFree();
this.all_comments.clearAndFree();
this.comments_to_preserve_before.clearAndFree();
this.coverage_ignore_directives.clearAndFree();
}
fn decodeEscapeSequences(lexer: *LexerType, start: usize, text: string, comptime BufType: type, buf_: *BufType) !void {
@@ -1887,6 +1906,11 @@ fn NewLexer_(
}) catch unreachable;
}
// Scan for Istanbul ignore directives (always active, not just for pragmas)
if (comptime !is_json) {
_ = lexer.scanIstanbulIgnoreDirective(text, lexer.loc());
}
// tsconfig.json doesn't care about annotations
if (comptime is_json)
return;
@@ -2017,6 +2041,58 @@ fn NewLexer_(
return 0;
}
/// Scan for Istanbul ignore directives in comment text
/// Returns true if an ignore directive was found
fn scanIstanbulIgnoreDirective(noalias lexer: *LexerType, comment_text: string, comment_loc: logger.Loc) bool {
// Look for "istanbul ignore" pattern in comment text
// This handles both "/* istanbul ignore next */" and "// istanbul ignore next" patterns
const trimmed = strings.trim(comment_text, " \t\r\n");
// Skip comment prefix (// or /* and */)
var text = trimmed;
if (strings.hasPrefix(text, "//")) {
text = text[2..];
} else if (strings.hasPrefix(text, "/*") and strings.hasSuffix(text, "*/")) {
text = text[2 .. text.len - 2];
}
text = strings.trim(text, " \t\r\n");
// Check for "istanbul ignore" prefix
if (!strings.hasPrefix(text, "istanbul ignore")) {
return false;
}
text = text["istanbul ignore".len..];
text = strings.trim(text, " \t\r\n");
// Parse the directive type
var directive_kind: ?CoverageIgnoreDirective.Kind = null;
if (strings.hasPrefix(text, "next")) {
directive_kind = .ignore_next;
} else if (strings.hasPrefix(text, "if")) {
directive_kind = .ignore_if;
} else if (strings.hasPrefix(text, "else")) {
directive_kind = .ignore_else;
} else if (strings.hasPrefix(text, "file")) {
directive_kind = .ignore_file;
}
if (directive_kind) |kind| {
// Convert location to line number (0-based)
const line_number = @as(u32, @intCast(lexer.source.lineColFromLoc(comment_loc).line));
lexer.coverage_ignore_directives.append(.{
.kind = kind,
.line = line_number,
}) catch return false; // Ignore error, just don't track this directive
return true;
}
return false;
}
// TODO: implement this
pub fn removeMultilineCommentIndent(_: *LexerType, _: string, text: string) string {
return text;
@@ -2038,6 +2114,7 @@ fn NewLexer_(
.allocator = allocator,
.comments_to_preserve_before = std.ArrayList(js_ast.G.Comment).init(allocator),
.all_comments = std.ArrayList(logger.Range).init(allocator),
.coverage_ignore_directives = std.ArrayList(CoverageIgnoreDirective).init(allocator),
};
lex.step();
try lex.next();
@@ -2054,6 +2131,7 @@ fn NewLexer_(
.allocator = allocator,
.comments_to_preserve_before = std.ArrayList(js_ast.G.Comment).init(allocator),
.all_comments = std.ArrayList(logger.Range).init(allocator),
.coverage_ignore_directives = std.ArrayList(CoverageIgnoreDirective).init(allocator),
};
}

View File

@@ -1,5 +1,8 @@
const LinesHits = bun.collections.BabyList(u32);
/// Coverage ignore directive for Istanbul-style ignore comments
pub const CoverageIgnoreDirective = bun.js_lexer.CoverageIgnoreDirective;
/// Our code coverage currently only deals with lines of code, not statements or branches.
/// JSC doesn't expose function names in their coverage data, so we don't include that either :(.
/// Since we only need to store line numbers, our job gets simpler
@@ -359,6 +362,7 @@ pub const ByteRangeMapping = struct {
line_offset_table: LineOffsetTable.List = .{},
source_id: i32,
source_url: bun.jsc.ZigString.Slice,
coverage_ignore_directives: std.ArrayListUnmanaged(CoverageIgnoreDirective) = .{},
pub fn isLessThan(_: void, a: ByteRangeMapping, b: ByteRangeMapping) bool {
return bun.strings.order(a.source_url.slice(), b.source_url.slice()) == .lt;
@@ -368,6 +372,7 @@ pub const ByteRangeMapping = struct {
pub fn deinit(this: *ByteRangeMapping) void {
this.line_offset_table.deinit(bun.default_allocator);
this.coverage_ignore_directives.deinit(bun.default_allocator);
}
pub threadlocal var map: ?*HashMap = null;
@@ -404,6 +409,26 @@ pub const ByteRangeMapping = struct {
return entry;
}
/// Check if a line should be ignored based on coverage ignore directives
fn shouldIgnoreLine(this: *const ByteRangeMapping, line: u32) bool {
// Handle "ignore file" directive - if present, ignore entire file
for (this.coverage_ignore_directives.items) |directive| {
if (directive.kind == .ignore_file) {
return true;
}
// Handle "ignore next" directive - ignore the line immediately following the directive
if (directive.kind == .ignore_next and line == directive.line + 1) {
return true;
}
// TODO: Handle "ignore if" and "ignore else" - these require more complex logic
// to understand the structure of if statements
}
return false;
}
pub fn generateReportFromBlocks(
this: *ByteRangeMapping,
allocator: std.mem.Allocator,
@@ -468,10 +493,13 @@ pub const ByteRangeMapping = struct {
min_line = @min(min_line, line);
max_line = @max(max_line, line);
executable_lines.set(line);
if (has_executed) {
lines_which_have_executed.set(line);
line_hits_slice[line] += 1;
// Skip lines that should be ignored based on coverage directives
if (!this.shouldIgnoreLine(line)) {
executable_lines.set(line);
if (has_executed) {
lines_which_have_executed.set(line);
line_hits_slice[line] += 1;
}
}
}
@@ -559,10 +587,13 @@ pub const ByteRangeMapping = struct {
const line: u32 = @as(u32, @intCast(point.original.lines));
executable_lines.set(line);
if (has_executed) {
lines_which_have_executed.set(line);
line_hits_slice[line] += 1;
// Skip lines that should be ignored based on coverage directives
if (!this.shouldIgnoreLine(line)) {
executable_lines.set(line);
if (has_executed) {
lines_which_have_executed.set(line);
line_hits_slice[line] += 1;
}
}
min_line = @min(min_line, line);
@@ -690,12 +721,111 @@ pub const ByteRangeMapping = struct {
}
pub fn compute(source_contents: []const u8, source_id: i32, source_url: bun.jsc.ZigString.Slice) ByteRangeMapping {
var coverage_ignore_directives = std.ArrayListUnmanaged(CoverageIgnoreDirective){};
// Parse the source code to extract coverage ignore directives
parseIgnoreDirectives(source_contents, &coverage_ignore_directives) catch {};
return ByteRangeMapping{
.line_offset_table = LineOffsetTable.generate(bun.jsc.VirtualMachine.get().allocator, source_contents, 0),
.source_id = source_id,
.source_url = source_url,
.coverage_ignore_directives = coverage_ignore_directives,
};
}
/// Parse coverage ignore directives from source code comments
fn parseIgnoreDirectives(source_contents: []const u8, directives: *std.ArrayListUnmanaged(CoverageIgnoreDirective)) !void {
const allocator = bun.default_allocator;
var line_number: u32 = 0;
var i: usize = 0;
while (i < source_contents.len) {
if (source_contents[i] == '\n') {
line_number += 1;
i += 1;
continue;
}
// Look for comment starts
if (i + 1 < source_contents.len) {
// Single line comment
if (source_contents[i] == '/' and source_contents[i + 1] == '/') {
const comment_start = i;
// Find end of line
while (i < source_contents.len and source_contents[i] != '\n') {
i += 1;
}
const comment_text = source_contents[comment_start..i];
try parseIgnoreDirectiveFromComment(comment_text, line_number, directives, allocator);
continue;
}
// Multi-line comment
if (source_contents[i] == '/' and source_contents[i + 1] == '*') {
const comment_start = i;
i += 2; // Skip /*
// Find end of comment
while (i + 1 < source_contents.len and !(source_contents[i] == '*' and source_contents[i + 1] == '/')) {
if (source_contents[i] == '\n') {
line_number += 1;
}
i += 1;
}
if (i + 1 < source_contents.len) {
i += 2; // Skip */
const comment_text = source_contents[comment_start..i];
try parseIgnoreDirectiveFromComment(comment_text, line_number, directives, allocator);
continue;
}
}
}
i += 1;
}
}
/// Parse a single comment for ignore directives
fn parseIgnoreDirectiveFromComment(comment_text: []const u8, line_number: u32, directives: *std.ArrayListUnmanaged(CoverageIgnoreDirective), allocator: std.mem.Allocator) !void {
// Remove comment prefixes and whitespace
const strings = bun.strings;
var text = strings.trim(comment_text, " \t\r\n");
if (strings.hasPrefix(text, "//")) {
text = text[2..];
} else if (strings.hasPrefix(text, "/*") and strings.hasSuffix(text, "*/")) {
text = text[2 .. text.len - 2];
}
text = strings.trim(text, " \t\r\n");
// Check for "istanbul ignore" prefix
if (!strings.hasPrefix(text, "istanbul ignore")) {
return;
}
text = text["istanbul ignore".len..];
text = strings.trim(text, " \t\r\n");
// Parse directive type
var directive_kind: ?CoverageIgnoreDirective.Kind = null;
if (strings.hasPrefix(text, "next")) {
directive_kind = .ignore_next;
} else if (strings.hasPrefix(text, "if")) {
directive_kind = .ignore_if;
} else if (strings.hasPrefix(text, "else")) {
directive_kind = .ignore_else;
} else if (strings.hasPrefix(text, "file")) {
directive_kind = .ignore_file;
}
if (directive_kind) |kind| {
try directives.append(allocator, .{
.kind = kind,
.line = line_number,
});
}
}
};
comptime {

View File

@@ -589,3 +589,219 @@ Ran 1 test across 1 file."
`);
expect(result.exitCode).toBe(0);
});
test("istanbul ignore next - single line comment", () => {
const dir = tempDirWithFiles("cov", {
"demo.ts": `
export function covered() {
return "covered";
}
// istanbul ignore next
export function ignored() {
return "ignored";
}
export function alsoIgnored() {
return "also ignored";
}
`,
"demo.test.ts": `
import { test, expect } from "bun:test";
import { covered, ignored, alsoIgnored } from "./demo";
test("should call all functions but ignore specified ones", () => {
expect(covered()).toBe("covered");
expect(ignored()).toBe("ignored");
expect(alsoIgnored()).toBe("also ignored");
});
`,
});
const result = Bun.spawnSync([bunExe(), "test", "--coverage"], {
cwd: dir,
env: {
...bunEnv,
},
stdio: [null, null, "pipe"],
});
let stderr = result.stderr.toString("utf-8");
// Normalize output for cross-platform consistency
stderr = normalizeBunSnapshot(stderr, dir);
// The ignored function should not appear in coverage
expect(stderr).toContain("covered");
expect(stderr).toContain("alsoIgnored");
// Function coverage should be less than 100% due to ignored function
expect(stderr).not.toContain("100.00");
expect(result.exitCode).toBe(0);
});
test("istanbul ignore next - multi-line comment", () => {
const dir = tempDirWithFiles("cov", {
"demo.ts": `
export function covered() {
return "covered";
}
/* istanbul ignore next */
export function ignored() {
return "ignored";
}
`,
"demo.test.ts": `
import { test, expect } from "bun:test";
import { covered, ignored } from "./demo";
test("should call all functions", () => {
expect(covered()).toBe("covered");
expect(ignored()).toBe("ignored");
});
`,
});
const result = Bun.spawnSync([bunExe(), "test", "--coverage"], {
cwd: dir,
env: {
...bunEnv,
},
stdio: [null, null, "pipe"],
});
let stderr = result.stderr.toString("utf-8");
// Normalize output for cross-platform consistency
stderr = normalizeBunSnapshot(stderr, dir);
// Should show coverage but with the ignored function excluded
expect(stderr).toContain("covered");
expect(result.exitCode).toBe(0);
});
test("istanbul ignore file", () => {
const dir = tempDirWithFiles("cov", {
"ignored-file.ts": `
/* istanbul ignore file */
export function shouldBeIgnored() {
return "ignored";
}
export function alsoIgnored() {
return "also ignored";
}
`,
"normal-file.ts": `
export function shouldBeCovered() {
return "covered";
}
`,
"demo.test.ts": `
import { test, expect } from "bun:test";
import { shouldBeIgnored, alsoIgnored } from "./ignored-file";
import { shouldBeCovered } from "./normal-file";
test("should call all functions", () => {
expect(shouldBeIgnored()).toBe("ignored");
expect(alsoIgnored()).toBe("also ignored");
expect(shouldBeCovered()).toBe("covered");
});
`,
});
const result = Bun.spawnSync([bunExe(), "test", "--coverage"], {
cwd: dir,
env: {
...bunEnv,
},
stdio: [null, null, "pipe"],
});
let stderr = result.stderr.toString("utf-8");
// Normalize output for cross-platform consistency
stderr = normalizeBunSnapshot(stderr, dir);
// Should only show coverage for normal-file.ts, not ignored-file.ts
expect(stderr).toContain("normal-file.ts");
expect(stderr).not.toContain("ignored-file.ts");
expect(result.exitCode).toBe(0);
});
test("istanbul ignore with reason", () => {
const dir = tempDirWithFiles("cov", {
"demo.ts": `
export function covered() {
return "covered";
}
// istanbul ignore next: difficult to test in unit tests
export function ignored() {
return "ignored";
}
`,
"demo.test.ts": `
import { test, expect } from "bun:test";
import { covered, ignored } from "./demo";
test("should call all functions", () => {
expect(covered()).toBe("covered");
expect(ignored()).toBe("ignored");
});
`,
});
const result = Bun.spawnSync([bunExe(), "test", "--coverage"], {
cwd: dir,
env: {
...bunEnv,
},
stdio: [null, null, "pipe"],
});
let stderr = result.stderr.toString("utf-8");
// Normalize output for cross-platform consistency
stderr = normalizeBunSnapshot(stderr, dir);
// Should show coverage but with the ignored function excluded
expect(stderr).toContain("covered");
expect(result.exitCode).toBe(0);
});
test("istanbul ignore next - lcov reporter", () => {
const dir = tempDirWithFiles("cov", {
"demo.ts": `
export function covered() {
return "covered";
}
// istanbul ignore next
export function ignored() {
return "ignored";
}
`,
"demo.test.ts": `
import { test, expect } from "bun:test";
import { covered, ignored } from "./demo";
test("should call all functions", () => {
expect(covered()).toBe("covered");
expect(ignored()).toBe("ignored");
});
`,
});
const result = Bun.spawnSync([bunExe(), "test", "--coverage", "--coverage-reporter", "lcov"], {
cwd: dir,
env: {
...bunEnv,
},
stdio: [null, null, "pipe"],
});
let lcovContent = readFileSync(path.join(dir, "coverage", "lcov.info"), "utf-8");
// Normalize LCOV content for cross-platform consistency
lcovContent = normalizeBunSnapshot(lcovContent, dir);
// Should contain coverage data but with ignored lines excluded
expect(lcovContent).toContain("demo.ts");
expect(result.exitCode).toBe(0);
});

12
test_istanbul_ignore.js Normal file
View File

@@ -0,0 +1,12 @@
export function covered() {
return "covered";
}
// istanbul ignore next
export function ignored() {
return "ignored";
}
export function alsoCovered() {
return "also covered";
}

View File

@@ -0,0 +1,8 @@
import { test, expect } from "bun:test";
import { covered, ignored, alsoCovered } from "./test_istanbul_ignore.js";
test("should call all functions", () => {
expect(covered()).toBe("covered");
expect(ignored()).toBe("ignored");
expect(alsoCovered()).toBe("also covered");
});