Compare commits

...

22 Commits

Author SHA1 Message Date
Erik Dunteman
39c017aeb9 tests fixed (?) with help from Jarred 2024-07-07 23:29:17 -07:00
Erik Dunteman
77ff2e3d09 merge 2024-07-05 21:15:30 -07:00
Erik Dunteman
cb259e7147 add TS perf_hooks tests 2024-07-05 18:30:30 -07:00
Erik Dunteman
b67ba78f0c add timerify function naming and histogram.toJSON 2024-07-05 18:29:12 -07:00
Erik Dunteman
178b41695b added bigint support 2024-07-04 16:53:53 -07:00
Erik Dunteman
d6472f0999 callconvs fixed 2024-07-04 15:12:08 -07:00
Erik Dunteman
06919f1670 fixed post-merge 2024-07-04 15:05:57 -07:00
Erik Dunteman
d7a2641da5 merge 2024-07-04 14:48:02 -07:00
Erik Dunteman
b25dceadc9 applied more of daves reccs 2024-07-04 14:44:41 -07:00
Erik Dunteman
895a2da4d2 reverd readme nuke 2024-07-04 14:30:59 -07:00
Erik Dunteman
533f6a3718 added dave's reccs 2024-07-04 14:30:07 -07:00
Erik Dunteman
8e15221904 removed comments 2024-07-03 22:32:22 -07:00
Erik Dunteman
23c5116122 moved to hdr_histogram file 2024-07-03 22:14:26 -07:00
Erik Dunteman
a1b872baf0 cleanup 2024-07-03 22:06:32 -07:00
Erik Dunteman
7d5cd8a3f1 should... be done 2024-07-03 22:04:05 -07:00
Erik Dunteman
7d66a545e8 moved into bindings 2024-07-03 09:32:25 -07:00
Erik Dunteman
a7a22656ac basic histogram integration into JS 2024-07-02 18:46:19 -07:00
Erik Dunteman
5ee28d0c22 record value tests passing hell yeah brother 2024-07-02 17:13:32 -07:00
Erik Dunteman
c77c83e8ae record value compiles, still unclear if works 2024-07-01 20:41:55 -07:00
Erik Dunteman
4d48e7ea7c started implementing hdr histogram 2024-07-01 20:27:27 -07:00
Erik Dunteman
a722a21fe7 massive progress 2024-06-28 18:17:32 -07:00
Erik Dunteman
34cd762581 working WIP 2024-06-27 22:40:26 -07:00
14 changed files with 915 additions and 12 deletions

View File

@@ -0,0 +1,36 @@
#include "root.h"
#include "blob.h"
#include "headers-handwritten.h"
#include "JavaScriptCore/JSCJSValue.h"
#include "JavaScriptCore/JSCast.h"
#include "JavaScriptCore/JSBigInt.h"
#include <JavaScriptCore/PropertySlot.h>
#include <JavaScriptCore/JSMap.h>
#include "JavaScriptCore/JSMapInlines.h"
#include <JavaScriptCore/JSString.h>
#include "ZigGlobalObject.h"
struct DoubleToIntMapKV {
double key;
uint64_t value;
};
extern "C" JSC::EncodedJSValue Bun__createMapFromDoubleUint64KVArray(Zig::GlobalObject* globalObject, const DoubleToIntMapKV* kvs, size_t length, bool asBigInt)
{
JSC::JSMap* map
= JSC::JSMap::create(globalObject->vm(), globalObject->mapStructure());
for (size_t i = 0; i < length; i++) {
if (asBigInt) {
map->set(globalObject, JSC::jsDoubleNumber(kvs[i].key), JSC::JSBigInt::createFrom(globalObject, kvs[i].value));
} else {
map->set(globalObject, JSC::jsDoubleNumber(kvs[i].key), JSC::jsNumber(kvs[i].value));
}
}
return JSC::JSValue::encode(map);
}

View File

@@ -1,5 +1,6 @@
#include "JSFFIFunction.h"
#include "root.h"
#include "JSFFIFunction.h"
#include "JavaScriptCore/JSCast.h"
#include "JavaScriptCore/JSType.h"
#include "JavaScriptCore/NumberObject.h"

View File

@@ -3903,6 +3903,24 @@ pub const JSValue = enum(JSValueReprInt) {
JSC__JSValue__putBunString(value, global, key, result);
}
pub fn putMaybeDouble(value: JSValue, global: *JSGlobalObject, double_key: f64, result: JSC.JSValue) void {
// if webkit can convert the stringified key back to an int, it will, and then crash
// so if our double has a decimal part, we can still put it as a string
// otherwise, we need to use putIndex
var doubleBuf: [256]u8 = undefined;
const doubleStr: []const u8 = std.fmt.bufPrint(&doubleBuf, "{d}", .{double_key}) catch {
// doesn't happen unless user float as a string is 256 bytes long
return;
};
const hasDecimal = std.mem.containsAtLeast(u8, doubleStr, 1, ".");
if (hasDecimal) {
put(value, global, bun.String.fromBytes(doubleStr), result);
} else {
JSC__JSValue__putMayBeIndex(value, global, &bun.String.fromBytes(doubleStr), result);
}
}
pub fn put(value: JSValue, global: *JSGlobalObject, key: anytype, result: JSC.JSValue) void {
const Key = @TypeOf(key);
if (comptime @typeInfo(Key) == .Pointer) {
@@ -5105,6 +5123,17 @@ pub const JSValue = enum(JSValueReprInt) {
return null;
}
pub const DoubleToIntMapKV = extern struct {
key: f64,
value: u64,
};
extern fn Bun__createMapFromDoubleUint64KVArray(globalObject: *JSC.JSGlobalObject, array: [*]DoubleToIntMapKV, length: usize, asBigInt: bool) JSValue;
pub fn createMapFromDoubleUint64KVArray(global: *JSC.JSGlobalObject, kvs: []DoubleToIntMapKV, asBigInt: bool) JSValue {
return Bun__createMapFromDoubleUint64KVArray(global, kvs.ptr, kvs.len, asBigInt);
}
/// Alias for getIfPropertyExists
pub const getIfPropertyExists = get;

View File

@@ -73,4 +73,5 @@ pub const Classes = struct {
pub const BytesInternalReadableStreamSource = JSC.WebCore.ByteStream.Source;
pub const BrotliEncoder = JSC.API.BrotliEncoder;
pub const BrotliDecoder = JSC.API.BrotliDecoder;
pub const RecordableHistogram = JSC.Node.RecordableHistogram;
};

View File

@@ -52,6 +52,8 @@
// #include "JSPerformanceNavigation.h"
#include "JSPerformanceTiming.h"
#include "JavaScriptCore/Error.h"
#include "JavaScriptCore/JSFunction.h"
#include "ScriptExecutionContext.h"
#include "WebCoreJSClientData.h"
// #include "WebCoreOpaqueRootInlines.h"
@@ -207,6 +209,7 @@ static const HashTableValue JSPerformancePrototypeTableValues[] = {
// { "clearResourceTimings"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsPerformancePrototypeFunction_clearResourceTimings, 0 } },
// { "setResourceTimingBufferSize"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsPerformancePrototypeFunction_setResourceTimingBufferSize, 1 } },
{ "mark"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsPerformancePrototypeFunction_mark, 1 } },
{ "timerify"_s, static_cast<unsigned>(JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinGeneratorType, performanceTimerifyCodeGenerator, 2 } }, // this routes through codegen'ed CPP to timerify in Performance.ts
{ "clearMarks"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsPerformancePrototypeFunction_clearMarks, 0 } },
{ "measure"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsPerformancePrototypeFunction_measure, 1 } },
{ "clearMeasures"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsPerformancePrototypeFunction_clearMeasures, 0 } },

View File

@@ -31,6 +31,8 @@
*/
#include "config.h"
#include "JavaScriptCore/JSFunction.h"
#include "JavaScriptCore/JSType.h"
#include "Performance.h"
// #include "Document.h"
@@ -79,8 +81,8 @@ void Performance::contextDestroyed()
DOMHighResTimeStamp Performance::now() const
{
auto* globalObject = scriptExecutionContext()->globalObject();
auto* bunVM = jsCast<Zig::GlobalObject*>(globalObject)->bunVM();
auto nowNano = Bun__readOriginTimer(bunVM);
auto* bunVM = jsCast<Zig::GlobalObject*>(globalObject)->bunVM();
auto nowNano = Bun__readOriginTimer(bunVM);
return static_cast<double>(nowNano) / 1000000.0;
}
@@ -172,7 +174,7 @@ Vector<RefPtr<PerformanceEntry>> Performance::getEntriesByType(const String& ent
// if (m_navigationTiming && entryType == "navigation"_s)
// entries.append(m_navigationTiming);
// if (entryType == "resource"_s)
// entries.appendVector(m_resourceTimingBuffer);
@@ -453,7 +455,7 @@ void Performance::scheduleTaskIfNeeded()
return;
m_hasScheduledTimingBufferDeliveryTask = true;
context->postTask([protectedThis = Ref { *this }, this] (ScriptExecutionContext& context) {
context->postTask([protectedThis = Ref { *this }, this](ScriptExecutionContext& context) {
m_hasScheduledTimingBufferDeliveryTask = false;
for (auto& observer : copyToVector(m_observers))
observer->deliver();

View File

@@ -37,6 +37,8 @@
#include "EventTarget.h"
#include "ExceptionOr.h"
// #include "ReducedResolutionSeconds.h"
#include "JavaScriptCore/JSCJSValue.h"
#include "JavaScriptCore/JSFunction.h"
#include "ScriptExecutionContext.h"
// #include "Timer.h"
#include <variant>
@@ -78,6 +80,7 @@ struct PerformanceMeasureOptions;
class Performance final : public RefCounted<Performance>, public ContextDestructionObserver, public EventTarget {
WTF_MAKE_ISO_ALLOCATED(Performance);
public:
static Ref<Performance> create(ScriptExecutionContext* context, MonotonicTime timeOrigin) { return adoptRef(*new Performance(context, timeOrigin)); }
~Performance();
@@ -123,14 +126,13 @@ public:
ScriptExecutionContext* scriptExecutionContext() const final { return ContextDestructionObserver::scriptExecutionContext(); }
using RefCounted::ref;
using RefCounted::deref;
using RefCounted::ref;
// void scheduleNavigationObservationTaskIfNeeded();
// PerformanceNavigationTiming* navigationTiming() { return m_navigationTiming.get(); }
// EventTargetData* eventTargetData() override;
// EventTargetData* eventTargetDataConcurrently() override;
// EventTargetData& ensureEventTargetData() override;
@@ -174,7 +176,6 @@ private:
ListHashSet<RefPtr<PerformanceObserver>> m_observers;
EventTargetData* eventTargetData() final { return &m_eventTargetData; }
EventTargetData* eventTargetDataConcurrently() final { return &m_eventTargetData; }
EventTargetData& ensureEventTargetData() final { return m_eventTargetData; }

View File

@@ -0,0 +1,503 @@
const std = @import("std");
pub const HistogramOptions = struct {
lowest_trackable_value: u64 = 1,
highest_trackable_value: u64 = 9007199254740991, // Number.MAX_SAFE_INTEGER, as per node perf_hooks
significant_figures: u8 = 3,
};
// Zig port of High Dynamic Range (HDR) Histogram algorithm
pub const HDRHistogram = struct {
// TLDR: an HDR histogram has buckets, with each bucket having a fixed number of sub-buckets
// Using default sig-figure of 3, the first bucket has 2048 sub-buckets
// In the 0th bucket, each sub-bucket represents a value range of 1
// In the 1st bucket, each sub-bucket represents a value range of 2
// In the 2nd bucket, each sub-bucket represents a value range of 4 and so on
// The sub-buckets are used to track the frequency of values within their range
// visible to user
min: u64,
max: u64,
total_count: u64 = 6,
// internals
allocator: std.mem.Allocator,
lowest_trackable_value: u64,
highest_trackable_value: u64,
significant_figures: u64,
sub_bucket_count: u64,
sub_bucket_half_count: u64,
sub_bucket_half_count_magnitude: u6,
unit_magnitude: u8,
sub_bucket_mask: u64,
bucket_count: u64,
counts: []u64,
const This = @This();
pub fn init(allocator: std.mem.Allocator, options: HistogramOptions) !This {
// Validate input
if (options.significant_figures < 1 or options.significant_figures > 5) {
return error.InvalidSignificantFigures;
}
if (options.lowest_trackable_value < 1) {
return error.InvalidLowestTrackableValue;
}
// Calculate derived values for efficient bucketing
// HDR Histogram is optimized for writes using bitwise operations and bit shifting, so we precalculate bitmasks and other helpful values
// upper value bound of each bucket
const largest_value_in_bucket = 2 * std.math.pow(u64, 10, options.significant_figures);
const log2largest_value = std.math.log2(@as(f64, @floatFromInt(largest_value_in_bucket)));
const sub_bucket_count_magnitude: u8 = @intFromFloat(@ceil(log2largest_value)); // bits required to represent largest value, rounded up
const sub_bucket_count = std.math.pow(u64, 2, sub_bucket_count_magnitude); // actual quantity of sub-buckets per bucket, defaults to 2048
const sub_bucket_half_count = sub_bucket_count / 2;
const sub_bucket_half_count_magnitude: u6 = @truncate(sub_bucket_count_magnitude - 1);
// lower bound of each bucket
const log2lowest_value = std.math.log2(@as(f64, @floatFromInt(options.lowest_trackable_value)));
const unit_magnitude = @as(u8, @intFromFloat(std.math.floor(log2lowest_value)));
// represent this as a mask of 1s for efficient bitwise operations
const sub_bucket_mask = (sub_bucket_count - 1) * std.math.pow(u64, 2, unit_magnitude);
// add more buckets if we need to track higher values
var bucket_count: u32 = 1;
var smallest_untrackable_value = sub_bucket_count * std.math.pow(u64, 2, unit_magnitude);
while (smallest_untrackable_value <= options.highest_trackable_value) {
if (smallest_untrackable_value > std.math.maxInt(u64) / 2) {
// next step would overflow, so we just increment the bucket count and break
bucket_count += 1;
break;
}
smallest_untrackable_value = 2 * smallest_untrackable_value;
bucket_count += 1;
}
const counts_len = (bucket_count + 1) * sub_bucket_half_count;
const counts = try allocator.alloc(u64, counts_len);
for (0..counts_len) |i| {
counts[i] = 0;
}
return This{
.allocator = allocator,
.lowest_trackable_value = options.lowest_trackable_value,
.highest_trackable_value = options.highest_trackable_value,
.significant_figures = options.significant_figures,
.sub_bucket_count = sub_bucket_count,
.sub_bucket_half_count = sub_bucket_half_count,
.sub_bucket_half_count_magnitude = sub_bucket_half_count_magnitude,
.unit_magnitude = unit_magnitude,
.sub_bucket_mask = sub_bucket_mask,
.bucket_count = bucket_count,
.counts = counts,
.total_count = 0,
.min = @intCast(std.math.maxInt(u64)),
.max = 0,
};
}
pub fn deinit(self: *This) void {
self.allocator.free(self.counts);
}
pub fn mean(self: *This) ?f64 {
if (self.total_count == 0) {
return null;
}
var total_sum: u64 = 0;
for (self.counts, 0..) |count, index| {
if (count > 0) {
const median_equiv_value = self.value_from_index(index);
total_sum += count * median_equiv_value;
}
}
return @as(f64, @floatFromInt(total_sum)) / @as(f64, @floatFromInt(self.total_count));
}
pub fn stddev(self: *This) ?f64 {
if (self.total_count == 0) {
return null;
}
const m = self.mean() orelse return null;
var geometric_dev_total: f64 = 0.0;
for (self.counts, 0..) |count, index| {
if (count > 0) {
const median_equiv_value = self.value_from_index(index);
const dev = @as(f64, @floatFromInt(median_equiv_value)) - m;
geometric_dev_total += (dev * dev) * @as(f64, @floatFromInt(count));
}
}
return std.math.sqrt(geometric_dev_total / @as(f64, @floatFromInt(self.total_count)));
}
pub fn reset(self: *This) void {
for (0..self.counts.len) |index| {
self.counts[index] = 0;
}
self.total_count = 0;
self.min = @intCast(std.math.maxInt(u64));
self.max = 0;
}
pub fn add(self: *This, other: *const This) !void {
if (self.lowest_trackable_value != other.lowest_trackable_value or self.highest_trackable_value != other.highest_trackable_value or self.significant_figures != other.significant_figures) {
return error.InvalidHistograms;
}
for (other.counts, 0..) |count, index| {
self.counts[index] += count;
}
self.total_count += other.total_count;
if (self.min > other.min) self.min = other.min;
if (self.max < other.max) self.max = other.max;
}
//
// Writes to the histogram
//
pub fn record_value(self: *This, value: u64, quanity: u64) void {
if (value < self.lowest_trackable_value or value > self.highest_trackable_value) return;
const counts_index = self.calculate_index(value);
if (counts_index >= self.counts.len) return;
self.counts[counts_index] += quanity;
self.total_count += quanity;
if (self.min > value) self.min = value;
if (self.max < value) self.max = value;
}
fn calculate_index(self: *const This, value: u64) usize {
const bucket_index = self.get_bucket_index(value);
const sub_bucket_index = self.get_sub_bucket_index(value, bucket_index);
const counts_index = self.get_counts_index(bucket_index, sub_bucket_index);
return counts_index;
}
fn get_counts_index(self: *const This, bucket_index: u64, sub_bucket_index: u64) usize {
const bucket_base_index = (bucket_index + 1) << self.sub_bucket_half_count_magnitude;
return @as(usize, bucket_base_index + sub_bucket_index - self.sub_bucket_half_count);
}
fn get_bucket_index(self: *const This, value: u64) u8 {
const pow2ceiling = 64 - @clz(value | self.sub_bucket_mask);
return pow2ceiling - self.unit_magnitude - (self.sub_bucket_half_count_magnitude + 1);
}
fn get_sub_bucket_index(self: *const This, value: u64, bucket_index: u8) u64 {
return value >> @as(u6, @intCast(bucket_index + self.unit_magnitude));
}
//
// Reads from the histogram
//
fn value_from_index(self: *This, index: u64) u64 {
const bucket_index = self.get_bucket_index_from_idx(index);
const sub_bucket_index = self.get_sub_bucket_index_from_idx(index, bucket_index);
// Directly compute the value from the bucket index and sub-bucket index
return @as(u64, sub_bucket_index) << @as(u6, @truncate(bucket_index + self.unit_magnitude));
}
fn get_bucket_index_from_idx(self: *const This, index: u64) u8 {
var bucket_index: u8 = 0;
var remaining_index = index;
while (remaining_index >= self.sub_bucket_count) {
bucket_index += 1;
remaining_index -= self.sub_bucket_half_count;
}
return bucket_index;
}
fn get_sub_bucket_index_from_idx(self: *const This, index: u64, bucket_index: u8) u64 {
const sub_bucket_index = index - (bucket_index * self.sub_bucket_half_count);
return sub_bucket_index;
}
// percentile is a value between 0 and 100
pub fn value_at_percentile(self: *This, percentile: f64) ?u64 {
if (percentile < 0 or percentile > 100) {
return null;
}
if (percentile == 0 and self.total_count > 0) {
return self.min;
}
const total = self.total_count;
const target_count = @as(f64, @floatFromInt(total)) * percentile / 100;
var running_total: f64 = 0;
for (self.counts, 0..) |count, index| {
running_total += @floatFromInt(count);
if (running_total >= target_count) {
// we found the index that corresponds to the percentile
return self.value_from_index(index);
}
}
return null;
}
};
test "value_at_percentile" {
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{});
defer histogram.deinit();
histogram.record_value(100, 9000); // 0-90%
histogram.record_value(200, 990); // 90-99.9%
histogram.record_value(1000, 9); // 99.9-99.99%
histogram.record_value(2000, 1); // 99.99-100%
try std.testing.expect(histogram.value_at_percentile(0) == 100);
try std.testing.expect(histogram.value_at_percentile(50) == 100);
try std.testing.expect(histogram.value_at_percentile(90) == 100);
try std.testing.expect(histogram.value_at_percentile(99) == 200);
try std.testing.expect(histogram.value_at_percentile(99.9) == 200);
try std.testing.expect(histogram.value_at_percentile(99.99) == 1000);
}
test "value_from_index" {
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{});
defer histogram.deinit();
histogram.record_value(100, 1); // -> bucket_index: 0, sub_bucket_index: 100 ==> counts_index: 100
var value = histogram.value_from_index(100);
// first bucket has a unit width of 1, so the value returned is guaranteed to be 100
try std.testing.expect(value == 100);
histogram.record_value(5000, 1); // -> bucket_index: 2, sub_bucket_index: 1250 ==> counts_index: 3298
value = histogram.value_from_index(3298);
// value is calculated as the lower bound of the sub-bucket, which in this case is 5000-5003
try std.testing.expect(value == 5000);
}
test "get_indices_from_idx" {
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{});
defer histogram.deinit();
histogram.record_value(100, 1); // -> bucket_index: 0, sub_bucket_index: 100 ==> counts_index: 100
histogram.record_value(200, 1); // -> bucket_index: 0, sub_bucket_index: 200 ==> counts_index: 200
histogram.record_value(1000, 1); // -> bucket_index: 0, sub_bucket_index: 1000 ==> counts_index: 1000
histogram.record_value(2000, 1); // -> bucket_index: 0, sub_bucket_index: 2000 ==> counts_index: 2000
histogram.record_value(3000, 1); // -> bucket_index: 1, sub_bucket_index: 1500 ==> counts_index: 2524
histogram.record_value(5000, 1); // -> bucket_index: 2, sub_bucket_index: 1250 ==> counts_index: 3298
histogram.record_value(1000000, 1); // -> bucket_index: 9, sub_bucket_index: 1953 ==> counts_index: 11169
var bucket_index = histogram.get_bucket_index_from_idx(100);
try std.testing.expect(bucket_index == 0);
var sub_bucket_index = histogram.get_sub_bucket_index_from_idx(100, bucket_index);
try std.testing.expect(sub_bucket_index == 100);
bucket_index = histogram.get_bucket_index_from_idx(200);
try std.testing.expect(bucket_index == 0);
sub_bucket_index = histogram.get_sub_bucket_index_from_idx(200, bucket_index);
try std.testing.expect(sub_bucket_index == 200);
bucket_index = histogram.get_bucket_index_from_idx(1000);
try std.testing.expect(bucket_index == 0);
sub_bucket_index = histogram.get_sub_bucket_index_from_idx(1000, bucket_index);
try std.testing.expect(sub_bucket_index == 1000);
bucket_index = histogram.get_bucket_index_from_idx(2000);
try std.testing.expect(bucket_index == 0);
sub_bucket_index = histogram.get_sub_bucket_index_from_idx(2000, bucket_index);
try std.testing.expect(sub_bucket_index == 2000);
bucket_index = histogram.get_bucket_index_from_idx(2524);
try std.testing.expect(bucket_index == 1);
sub_bucket_index = histogram.get_sub_bucket_index_from_idx(2524, bucket_index);
try std.testing.expect(sub_bucket_index == 1500);
bucket_index = histogram.get_bucket_index_from_idx(3298);
try std.testing.expect(bucket_index == 2);
sub_bucket_index = histogram.get_sub_bucket_index_from_idx(3298, bucket_index);
try std.testing.expect(sub_bucket_index == 1250);
bucket_index = histogram.get_bucket_index_from_idx(11169);
try std.testing.expect(bucket_index == 9);
sub_bucket_index = histogram.get_sub_bucket_index_from_idx(11169, bucket_index);
try std.testing.expect(sub_bucket_index == 1953);
}
test "record_value" {
const significant_figures = 3;
const lowest_trackable_value = 1;
const highest_trackable_value = 1000;
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{ .lowest_trackable_value = lowest_trackable_value, .highest_trackable_value = highest_trackable_value, .significant_figures = significant_figures });
defer histogram.deinit();
histogram.record_value(1, 1);
try std.testing.expect(histogram.total_count == 1);
try std.testing.expect(histogram.min == 1);
try std.testing.expect(histogram.max == 1);
try std.testing.expect(histogram.counts.len == 2048);
try std.testing.expect(histogram.counts[1] == 1);
histogram.record_value(1, 1);
try std.testing.expect(histogram.total_count == 2);
try std.testing.expect(histogram.min == 1);
try std.testing.expect(histogram.max == 1);
try std.testing.expect(histogram.counts[1] == 2);
histogram.record_value(100, 1);
histogram.record_value(900, 1);
try std.testing.expect(histogram.total_count == 4);
try std.testing.expect(histogram.min == 1);
try std.testing.expect(histogram.max == 900);
try std.testing.expect(histogram.counts[1] == 2);
try std.testing.expect(histogram.counts[100] == 1);
try std.testing.expect(histogram.counts[900] == 1);
}
test "record_value_multiple_buckets" {
const significant_figures = 1;
const lowest_trackable_value = 1;
const highest_trackable_value = 10000;
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{ .lowest_trackable_value = lowest_trackable_value, .highest_trackable_value = highest_trackable_value, .significant_figures = significant_figures });
defer histogram.deinit();
histogram.record_value(1, 1);
histogram.record_value(2, 1);
histogram.record_value(3, 1);
histogram.record_value(4, 1);
histogram.record_value(5, 1);
histogram.record_value(10, 1);
histogram.record_value(100, 1);
histogram.record_value(1000, 1);
try std.testing.expect(histogram.total_count == 8);
try std.testing.expect(histogram.min == 1);
try std.testing.expect(histogram.max == 1000);
try std.testing.expect(histogram.counts[1] == 1);
try std.testing.expect(histogram.counts[2] == 1);
try std.testing.expect(histogram.counts[3] == 1);
try std.testing.expect(histogram.counts[4] == 1);
try std.testing.expect(histogram.counts[5] == 1);
try std.testing.expect(histogram.counts[10] == 1);
try std.testing.expect(histogram.counts[57] == 1); // indices pulled from official implementation
try std.testing.expect(histogram.counts[111] == 1); // indices pulled from official implementation
}
test "init sigfig=3 lowest=1 highest=1000" {
// used official implementation to verify the values
const significant_figures = 3;
const lowest_trackable_value = 1;
const highest_trackable_value = 1000;
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{ .lowest_trackable_value = lowest_trackable_value, .highest_trackable_value = highest_trackable_value, .significant_figures = significant_figures });
defer histogram.deinit();
try std.testing.expect(histogram.lowest_trackable_value == lowest_trackable_value);
try std.testing.expect(histogram.highest_trackable_value == highest_trackable_value);
try std.testing.expect(histogram.significant_figures == significant_figures);
try std.testing.expect(histogram.sub_bucket_count == 2048);
try std.testing.expect(histogram.sub_bucket_half_count == 1024);
try std.testing.expect(histogram.unit_magnitude == 0);
try std.testing.expect(histogram.sub_bucket_mask == 2047);
try std.testing.expect(histogram.bucket_count == 1);
try std.testing.expect(histogram.counts.len == 2048);
}
test "init sigfig=3 lowest=1 highest=10_000" {
const significant_figures = 3;
const lowest_trackable_value = 1;
const highest_trackable_value = 10_000;
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{ .lowest_trackable_value = lowest_trackable_value, .highest_trackable_value = highest_trackable_value, .significant_figures = significant_figures });
defer histogram.deinit();
try std.testing.expect(histogram.lowest_trackable_value == lowest_trackable_value);
try std.testing.expect(histogram.highest_trackable_value == highest_trackable_value);
try std.testing.expect(histogram.significant_figures == significant_figures);
try std.testing.expect(histogram.sub_bucket_count == 2048);
try std.testing.expect(histogram.sub_bucket_half_count == 1024);
try std.testing.expect(histogram.unit_magnitude == 0);
try std.testing.expect(histogram.sub_bucket_mask == 2047);
try std.testing.expect(histogram.bucket_count == 4);
try std.testing.expect(histogram.counts.len == 5120);
}
test "init sigfig=4 lowest=1 highest=10_000" {
const significant_figures = 4;
const lowest_trackable_value = 1;
const highest_trackable_value = 10_000;
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{ .lowest_trackable_value = lowest_trackable_value, .highest_trackable_value = highest_trackable_value, .significant_figures = significant_figures });
defer histogram.deinit();
//&{lowestDiscernibleValue:1 highestTrackableValue:10000 unitMagnitude:0 significantFigures:4 subBucketHalfCountMagnitude:14 subBucketHalfCount:16384 subBucketMask:32767 subBucketCount:32768 bucketCount:1 countsLen:32768 totalCount:0 counts
try std.testing.expect(histogram.lowest_trackable_value == lowest_trackable_value);
try std.testing.expect(histogram.highest_trackable_value == highest_trackable_value);
try std.testing.expect(histogram.significant_figures == significant_figures);
try std.testing.expect(histogram.sub_bucket_count == 32768);
try std.testing.expect(histogram.sub_bucket_half_count == 16384);
try std.testing.expect(histogram.unit_magnitude == 0);
try std.testing.expect(histogram.sub_bucket_mask == 32767);
try std.testing.expect(histogram.bucket_count == 1);
try std.testing.expect(histogram.counts.len == 32768);
}
test "init sigfig=4 lowest=5 highest=1000" {
const significant_figures = 4;
const lowest_trackable_value = 5;
const highest_trackable_value = 1000;
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{ .lowest_trackable_value = lowest_trackable_value, .highest_trackable_value = highest_trackable_value, .significant_figures = significant_figures });
defer histogram.deinit();
try std.testing.expect(histogram.lowest_trackable_value == lowest_trackable_value);
try std.testing.expect(histogram.highest_trackable_value == highest_trackable_value);
try std.testing.expect(histogram.significant_figures == significant_figures);
try std.testing.expect(histogram.sub_bucket_count == 32768);
try std.testing.expect(histogram.sub_bucket_half_count == 16384);
try std.testing.expect(histogram.unit_magnitude == 2);
try std.testing.expect(histogram.sub_bucket_mask == 131068);
try std.testing.expect(histogram.bucket_count == 1);
try std.testing.expect(histogram.counts.len == 32768);
}
test "init sigfig=5 lowest=10 highest=200" {
const significant_figures = 5;
const lowest_trackable_value = 10;
const highest_trackable_value = 200;
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{ .lowest_trackable_value = lowest_trackable_value, .highest_trackable_value = highest_trackable_value, .significant_figures = significant_figures });
defer histogram.deinit();
try std.testing.expect(histogram.lowest_trackable_value == lowest_trackable_value);
try std.testing.expect(histogram.highest_trackable_value == highest_trackable_value);
try std.testing.expect(histogram.significant_figures == significant_figures);
try std.testing.expect(histogram.sub_bucket_count == 262144);
try std.testing.expect(histogram.sub_bucket_half_count == 131072);
try std.testing.expect(histogram.unit_magnitude == 3);
try std.testing.expect(histogram.sub_bucket_mask == 2097144);
try std.testing.expect(histogram.bucket_count == 1);
try std.testing.expect(histogram.counts.len == 262144);
}
// default node timerify histogram
test "default init" {
const allocator = std.testing.allocator;
var histogram = try HDRHistogram.init(allocator, .{});
defer histogram.deinit();
histogram.record_value(100, 1); // -> bucket_index: 0, sub_bucket_index: 100 ==> counts_index: 100
histogram.record_value(200, 1); // -> bucket_index: 0, sub_bucket_index: 200 ==> counts_index: 200
histogram.record_value(1000, 1); // -> bucket_index: 0, sub_bucket_index: 1000 ==> counts_index: 1000
histogram.record_value(2000, 1); // -> bucket_index: 0, sub_bucket_index: 2000 ==> counts_index: 2000
histogram.record_value(3000, 1); // -> bucket_index: 1, sub_bucket_index: 1500 ==> counts_index: 2524
histogram.record_value(5000, 1); // -> bucket_index: 2, sub_bucket_index: 1250 ==> counts_index: 3298
histogram.record_value(1000000, 1); // -> bucket_index: 9, sub_bucket_index: 1953 ==> counts_index: 11169
try std.testing.expect(histogram.total_count == 7);
try std.testing.expect(histogram.min == 100);
try std.testing.expect(histogram.max == 1000000);
try std.testing.expect(histogram.counts[100] == 1);
try std.testing.expect(histogram.counts[200] == 1);
try std.testing.expect(histogram.counts[1000] == 1);
try std.testing.expect(histogram.counts[2000] == 1);
try std.testing.expect(histogram.counts[2524] == 1);
try std.testing.expect(histogram.counts[3298] == 1);
try std.testing.expect(histogram.counts[11169] == 1);
}

View File

@@ -549,4 +549,36 @@ export default [
// createWriteStream: { fn: "createWriteStream", length: 2 },
},
}),
define({
name: "RecordableHistogram",
construct: false,
noConstructor: true,
finalize: true, // this triggers the deallocation for bun.destroy
configurable: false,
hasPendingActivity: false,
klass: {},
JSType: "0b11101110",
proto: {
min: { getter: "min" },
max: { getter: "max" },
mean: { getter: "mean" },
// exceeds: { getter: "exceeds" }, // not implemented
stddev: { getter: "stddev" },
count: { getter: "count" },
percentiles: { getter: "percentiles" },
reset: { fn: "reset", length: 0 },
record: { fn: "record", length: 1 },
recordDelta: { fn: "recordDelta", length: 0 },
add: { fn: "add", length: 1 },
percentile: { fn: "percentile", length: 1 },
minBigInt: { fn: "minBigInt", length: 0 },
maxBigInt: { fn: "maxBigInt", length: 0 },
// exceedsBigInt: { fn: "exceedsBigInt", length: 0 }, // not implemented
countBigInt: { fn: "countBigInt", length: 0 },
percentilesBigInt: { fn: "percentilesBigInt", length: 0 },
percentileBigInt: { fn: "percentileBigInt", length: 1 },
toJSON: { fn: "toJSON", length: 0 },
},
values: [],
}),
];

View File

@@ -0,0 +1,229 @@
const std = @import("std");
const bun = @import("root").bun;
const HDRHistogram = @import("hdr_histogram.zig").HDRHistogram;
const meta = bun.meta;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const ZigString = JSC.ZigString;
// Wrapper around HRD Histogram
pub const RecordableHistogram = struct {
pub usingnamespace JSC.Codegen.JSRecordableHistogram;
hdrHist: HDRHistogram,
// RecordableHistogram specific internals
delta_start: ?bun.timespec = null,
const This = @This();
const PropertyGetter = fn (this: *This, globalThis: *JSC.JSGlobalObject) JSC.JSValue;
pub fn min(this: *This, globalThis: *JSC.JSGlobalObject) JSValue {
return globalThis.toJS(this.hdrHist.min, .temporary);
}
pub fn minBigInt(this: *This, globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue {
return JSC.JSValue.fromUInt64NoTruncate(globalThis, this.hdrHist.min);
}
pub fn max(this: *This, globalThis: *JSC.JSGlobalObject) JSValue {
return globalThis.toJS(this.hdrHist.max, .temporary);
}
pub fn maxBigInt(this: *This, globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue {
return JSC.JSValue.fromUInt64NoTruncate(globalThis, this.hdrHist.max);
}
pub fn count(this: *This, globalThis: *JSC.JSGlobalObject) JSValue {
return globalThis.toJS(this.hdrHist.total_count, .temporary);
}
pub fn countBigInt(this: *This, globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue {
return JSC.JSValue.fromUInt64NoTruncate(globalThis, this.hdrHist.total_count);
}
pub fn mean(this: *This, globalThis: *JSC.JSGlobalObject) JSValue {
if (this.hdrHist.mean()) |m| {
return globalThis.toJS(m, .temporary);
}
return globalThis.toJS(std.math.nan(f64), .temporary);
}
pub fn stddev(this: *This, globalThis: *JSC.JSGlobalObject) JSValue {
if (this.hdrHist.stddev()) |sd| {
return globalThis.toJS(sd, .temporary);
}
return globalThis.toJS(std.math.nan(f64), .temporary);
}
pub fn percentile_calc(this: *This, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) ?u64 {
const args = callframe.arguments(1).slice();
if (args.len < 1) {
globalThis.throwInvalidArguments("Expected query percent as argument", .{});
return null;
}
const percent = args[0].getNumber() orelse {
globalThis.throwInvalidArguments("Expected a number", .{});
return null;
};
const value = this.hdrHist.value_at_percentile(percent) orelse return null;
return value;
}
pub fn percentile(this: *This, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue {
const value = this.percentile_calc(globalThis, callframe);
if (value) |v| {
return globalThis.toJS(v, .temporary);
}
return .zero;
}
pub fn percentileBigInt(this: *This, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue {
const value = this.percentile_calc(globalThis, callframe);
if (value) |v| {
return JSC.JSValue.fromUInt64NoTruncate(globalThis, v);
}
return .zero;
}
pub fn percentiles_calc(this: *This, globalThis: *JSC.JSGlobalObject) ?std.ArrayList(JSValue.DoubleToIntMapKV) {
// first get 100th percentile, and loop 0, 50, 75, 82.5, ... until we find the highest percentile
const maxPercentileValue = this.hdrHist.value_at_percentile(100) orelse return null;
var percent: f64 = 0;
var stack_allocator = std.heap.stackFallback(4096, bun.default_allocator);
var kvs = std.ArrayList(JSValue.DoubleToIntMapKV).init(stack_allocator.get());
while (true) {
if (this.hdrHist.value_at_percentile(percent)) |val| {
const kv = JSValue.DoubleToIntMapKV{ .key = percent, .value = val };
kvs.append(kv) catch {
globalThis.throwOutOfMemory();
return null;
};
if (val >= maxPercentileValue) {
break;
}
}
percent += ((100 - percent) / 2);
}
kvs.append(JSValue.DoubleToIntMapKV{ .key = 100, .value = maxPercentileValue }) catch {
globalThis.throwOutOfMemory();
return null;
};
return kvs;
}
pub fn percentiles(this: *This, globalThis: *JSC.JSGlobalObject) JSValue {
const kvs = this.percentiles_calc(globalThis) orelse return .zero;
defer kvs.deinit();
const asBigInt = false;
return JSValue.createMapFromDoubleUint64KVArray(globalThis, kvs.items, asBigInt);
}
pub fn percentilesBigInt(this: *This, globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue {
const kvs = this.percentiles_calc(globalThis) orelse return .zero;
defer kvs.deinit();
const asBigInt = true;
return JSValue.createMapFromDoubleUint64KVArray(globalThis, kvs.items, asBigInt);
}
//
// additional functions
// record duration in nanoseconds
pub fn record(this: *This, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue {
const args = callframe.arguments(1).slice();
if (args.len < 1) {
globalThis.throwInvalidArguments("Expected the value to record as an argument", .{});
return .zero;
}
const value = args[0].toUInt64NoTruncate();
this.hdrHist.record_value(value, 1);
return .undefined;
}
// record time since last call to recordDelta
pub fn recordDelta(this: *This, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) callconv(JSC.conv) JSValue {
if (this.delta_start) |start| {
const end = bun.timespec.now();
const diff = end.duration(&start);
this.hdrHist.record_value(@intCast(diff.nsec), 1);
this.delta_start = end;
return .undefined;
}
// first call no-ops
this.delta_start = bun.timespec.now();
return .undefined;
}
pub fn reset(this: *This, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue {
_ = globalThis;
_ = callframe;
this.hdrHist.reset();
return .undefined;
}
pub fn add(this: *This, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue {
const args = callframe.arguments(1).slice();
if (args.len < 1) {
globalThis.throwInvalidArguments("Expected other histogram to add as an argument", .{});
return .zero;
}
const other = RecordableHistogram.fromJS(args[0]) orelse {
globalThis.throwInvalidArguments("Expected a RecordableHistogram", .{});
return .zero;
};
this.hdrHist.add(&other.hdrHist) catch |err| {
globalThis.throwError(err, "failed to add histograms");
return .zero;
};
return .undefined;
}
pub fn toJSON(this: *This, globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue {
var object = JSC.JSValue.createEmptyObject(globalThis, 4);
//api for ref
// object.put(globalThis, ZigString.static("name"), ZigString.init("BuildMessage").toJS(globalThis));
// object.put(globalThis, ZigString.static("position"), this.getPosition(globalThis));
object.put(globalThis, ZigString.static("min"), this.min(globalThis));
object.put(globalThis, ZigString.static("max"), this.max(globalThis));
object.put(globalThis, ZigString.static("count"), this.count(globalThis));
object.put(globalThis, ZigString.static("mean"), this.mean(globalThis));
object.put(globalThis, ZigString.static("stddev"), this.stddev(globalThis));
const percentilesKV = this.percentiles_calc(globalThis) orelse return .undefined;
defer percentilesKV.deinit();
var percentagesObj = JSC.JSValue.createEmptyObject(globalThis, percentilesKV.items.len);
for (percentilesKV.items) |kv| {
// many of the percentiles are integers which crash WebKit when used as stringified keys, so treat them as MaybeDouble
percentagesObj.putMaybeDouble(globalThis, kv.key, globalThis.toJS(kv.value, .temporary));
}
object.put(globalThis, ZigString.static("percentiles"), percentagesObj);
return object;
}
// since we create this with bun.new, we need to have it be destroyable
// our node.classes.ts has finalize=true to generate the call to finalize
pub fn finalize(this: *This) callconv(JSC.conv) void {
this.hdrHist.deinit();
bun.destroy(this);
}
};
fn createHistogram(globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue {
const hdrHist = HDRHistogram.init(bun.default_allocator, .{}) catch |err| {
globalThis.throwError(err, "failed to initialize histogram");
return .zero;
};
var histogram = bun.new(RecordableHistogram, .{ .hdrHist = hdrHist });
return histogram.toJS(globalThis);
}
pub fn createPerfHooksHistogramBinding(global: *JSC.JSGlobalObject) callconv(JSC.conv) JSC.JSValue {
return JSC.JSFunction.create(
global,
"createHistogram",
createHistogram,
3, // function length
.{},
);
}

View File

@@ -0,0 +1,27 @@
export function timerify(fn: Function, options?: { histogram?: any }) {
// histogram is an optional parameter
let { histogram } = options || {};
if (!histogram?.record) {
var wrapped = function wrapper() {
return fn.$apply(this, arguments);
};
} else {
// wrap fn in a timer and return the wrapped function
var wrapped = function () {
const start = performance.now();
const result = fn.$apply(this, arguments);
const end = performance.now();
histogram.record(Math.ceil((end - start) * 1e6));
return result;
};
}
// set the name of the wrapped function
Object.defineProperty(wrapped, "name", {
value: `timerified ${fn.name || "anonymous"}`,
configurable: true,
});
return wrapped;
}

View File

@@ -1,6 +1,8 @@
// Hardcoded module "node:perf_hooks"
const { throwNotImplemented } = require("internal/shared");
const createHistogram = $zig("node_perf_hooks_histogram_binding.zig", "createPerfHooksHistogramBinding");
var {
Performance,
PerformanceEntry,
@@ -104,6 +106,9 @@ export default {
measure(f) {
return performance.measure(...arguments);
},
timerify(f) {
return performance.timerify(...arguments);
},
clearMarks(f) {
return performance.clearMarks(...arguments);
},
@@ -158,8 +163,6 @@ export default {
monitorEventLoopDelay() {
throwNotImplemented("perf_hooks.monitorEventLoopDelay");
},
createHistogram() {
throwNotImplemented("perf_hooks.createHistogram");
},
createHistogram,
PerformanceResourceTiming,
};

View File

@@ -61,6 +61,7 @@ pub const Node = struct {
pub usingnamespace @import("./bun.js/node/node_fs_stat_watcher.zig");
pub usingnamespace @import("./bun.js/node/node_fs_binding.zig");
pub usingnamespace @import("./bun.js/node/node_os.zig");
pub usingnamespace @import("./bun.js/node/node_perf_hooks_histogram_binding.zig");
pub const fs = @import("./bun.js/node/node_fs_constant.zig");
pub const Util = struct {
pub const parseArgs = @import("./bun.js/node/util/parse_args.zig").parseArgs;

View File

@@ -3,7 +3,6 @@ import { test, expect } from "bun:test";
test("stubs", () => {
expect(() => perf.monitorEventLoopDelay()).toThrow();
expect(() => perf.createHistogram()).toThrow();
expect(perf.performance.nodeTiming).toBeObject();
expect(perf.performance.now()).toBeNumber();
@@ -21,4 +20,40 @@ test("doesn't throw", () => {
expect(() => performance.getEntriesByType("measure")).not.toThrow();
expect(() => performance.now()).not.toThrow();
expect(() => performance.timeOrigin).not.toThrow();
expect(() => perf.createHistogram()).not.toThrow();
expect(() => performance.timerify(() => {})).not.toThrow();
expect(() => performance.timerify(() => {}, { histogram: perf.createHistogram() })).not.toThrow();
});
test("timerify with histogram", () => {
const histogram = perf.createHistogram({ auto: true });
const fn = performance.timerify(() => {}, { histogram: histogram });
expect(histogram.max).toBe(0); // should default to 0
fn();
expect(histogram.toJSON()).toBeObject();
expect(histogram.min).toBeGreaterThan(0);
expect(histogram.max).toBe(histogram.min); // one entry
expect(histogram.percentiles.size).toBe(2); // 0th and 100th
fn();
expect(histogram.min).toBeGreaterThan(0);
expect(histogram.max).toBeGreaterThan(histogram.min);
expect(histogram.percentiles.size).toBeGreaterThan(2);
});
test("nested timerify", () => {
const zeroth = (a, b = 1) => {};
const first = performance.timerify(zeroth);
const second = performance.timerify(first);
expect(first).not.toBe(second);
expect(second).not.toBe(first);
expect(first.name).toBe("timerified zeroth");
expect(second.name).toBe("timerified timerified zeroth");
// assert.notStrictEqual(n, o);
// assert.notStrictEqual(n, p);
// assert.notStrictEqual(o, p);
// assert.strictEqual(n.length, m.length);
// assert.strictEqual(n.name, "timerified m");
// assert.strictEqual(p.name, "timerified timerified m");
});