mirror of
https://github.com/oven-sh/bun
synced 2026-03-02 13:31:01 +01:00
Compare commits
18 Commits
dylan/fix-
...
kai/fix-pr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7c5db11ead | ||
|
|
2970c85d2c | ||
|
|
270c09bd90 | ||
|
|
d40662760a | ||
|
|
6a922be010 | ||
|
|
dcf1b1fbf5 | ||
|
|
4e234ded2a | ||
|
|
3f360b0682 | ||
|
|
efdeac0a85 | ||
|
|
d900309354 | ||
|
|
da7dfa1b76 | ||
|
|
a182c313e2 | ||
|
|
a2ae0d06b3 | ||
|
|
26579c3ba5 | ||
|
|
dd53294a25 | ||
|
|
2bc637ccff | ||
|
|
2e9e448498 | ||
|
|
3ee9af7649 |
@@ -78,7 +78,7 @@
|
||||
"clang-tidy:diff": "bun run analysis --target clang-tidy-diff",
|
||||
"zig-format": "bun run analysis:no-llvm --target zig-format",
|
||||
"zig-format:check": "bun run analysis:no-llvm --target zig-format-check",
|
||||
"prettier": "bunx prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'",
|
||||
"prettier": "bunx prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write scripts packages src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'",
|
||||
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests ",
|
||||
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true"
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import test from 'ava'
|
||||
import test from "ava";
|
||||
|
||||
import { sum } from '../index.js'
|
||||
import { sum } from "../index.js";
|
||||
|
||||
test('sum from native', (t) => {
|
||||
t.is(sum(1, 2), 3)
|
||||
})
|
||||
test("sum from native", t => {
|
||||
t.is(sum(1, 2), 3);
|
||||
});
|
||||
|
||||
@@ -15,4 +15,4 @@
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,4 +15,4 @@
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,4 +18,4 @@
|
||||
"libc": [
|
||||
"glibc"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,4 +18,4 @@
|
||||
"libc": [
|
||||
"musl"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,4 +18,4 @@
|
||||
"libc": [
|
||||
"glibc"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,4 +18,4 @@
|
||||
"libc": [
|
||||
"musl"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,4 +15,4 @@
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,4 +34,4 @@
|
||||
"universal": "napi universal",
|
||||
"version": "napi version"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
:host {
|
||||
--bun-error-color: #e33737;
|
||||
--bun-error-monospace: ui-monospace, Menlo, Monaco, "Cascadia Mono",
|
||||
"Segoe UI Mono", "Roboto Mono", "Oxygen Mono", "Ubuntu Monospace",
|
||||
--bun-error-monospace:
|
||||
ui-monospace, Menlo, Monaco, "Cascadia Mono", "Segoe UI Mono", "Roboto Mono", "Oxygen Mono", "Ubuntu Monospace",
|
||||
"Source Code Pro", "Fira Mono", "Droid Sans Mono", "Courier New", monospace;
|
||||
--bun-error-width: 512px;
|
||||
}
|
||||
@@ -15,7 +15,9 @@
|
||||
text-decoration: underline;
|
||||
}
|
||||
#BunErrorOverlay-container {
|
||||
box-shadow: 0px 16px 24px rgba(0, 0, 0, 0.06), 0px 2px 6px rgba(0, 0, 0, 0.1),
|
||||
box-shadow:
|
||||
0px 16px 24px rgba(0, 0, 0, 0.06),
|
||||
0px 2px 6px rgba(0, 0, 0, 0.1),
|
||||
0px 0px 1px rgba(0, 0, 0, 0.04);
|
||||
backdrop-filter: blur(42px);
|
||||
backface-visibility: visible;
|
||||
@@ -29,8 +31,9 @@
|
||||
right: 48px;
|
||||
z-index: 999999;
|
||||
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica,
|
||||
Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
|
||||
font-family:
|
||||
-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji",
|
||||
"Segoe UI Emoji", "Segoe UI Symbol";
|
||||
}
|
||||
|
||||
.BunErrorRoot--FullPage #BunErrorOverlay-container {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import React from "react";
|
||||
import { useContext, createContext } from "react";
|
||||
import React, { createContext, useContext } from "react";
|
||||
import { render, unmountComponentAtNode } from "react-dom";
|
||||
import type {
|
||||
FallbackMessageContainer,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { normalizedFilename, StackFrameIdentifier, thisCwd, StackFrameScope } from "./index";
|
||||
import type { JSException, JSException as JSExceptionType, Message, Problems } from "../../src/api/schema";
|
||||
import { normalizedFilename, StackFrameIdentifier, StackFrameScope, thisCwd } from "./index";
|
||||
|
||||
export function problemsToMarkdown(problems: Problems) {
|
||||
var markdown = "";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// Based on https://github.com/stacktracejs/error-stack-parser/blob/master/error-stack-parser.js
|
||||
|
||||
import type { StackFrame as StackFrameType, StackFramePosition, StackFrameScope } from "../../src/api/schema";
|
||||
import type { StackFramePosition, StackFrameScope, StackFrame as StackFrameType } from "../../src/api/schema";
|
||||
|
||||
export class StackFrame implements StackFrameType {
|
||||
function_name: string;
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
const UNKNOWN_FUNCTION = "<unknown>";
|
||||
import type {
|
||||
FallbackMessageContainer,
|
||||
JSException,
|
||||
Location,
|
||||
Message,
|
||||
SourceLine,
|
||||
StackFrame,
|
||||
WebsocketMessageBuildFailure,
|
||||
} from "../../src/api/schema";
|
||||
import type { StackFrame } from "../../src/api/schema";
|
||||
|
||||
/**
|
||||
* This parses the different stack traces and puts them into one format
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"jsx": "react",
|
||||
"lib": [
|
||||
"ESNext",
|
||||
"DOM"
|
||||
],
|
||||
"lib": ["ESNext", "DOM"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"allowSyntheticDefaultImports": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export type * from "./src/inspector/index";
|
||||
export * from "./src/inspector/websocket";
|
||||
export * from "./src/inspector/node-socket";
|
||||
export * from "./src/inspector/websocket";
|
||||
export type * from "./src/protocol/index";
|
||||
export * from "./src/util/preview";
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, writeFileSync, realpathSync } from "node:fs";
|
||||
import type { Domain, Property, Protocol } from "../src/protocol/schema";
|
||||
import { readFileSync, realpathSync, writeFileSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { Property, Protocol } from "../src/protocol/schema";
|
||||
|
||||
function formatProtocol(protocol: Protocol, extraTs?: string): string {
|
||||
const { name, domains } = protocol;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { mkdirSync, mkdtempSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { mkdtempSync, mkdirSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import type { FindTestOptions, ParseTestResult, RunTestResult } from "./runner";
|
||||
import { bunSpawn, nodeSpawn, findTests, runTest, runTests } from "./runner";
|
||||
import { bunSpawn, findTests, nodeSpawn, runTest, runTests } from "./runner";
|
||||
|
||||
describe("runTests()", () => {
|
||||
const cwd = createFs({
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
//
|
||||
// In the future, a version of this will be built-in to Bun.
|
||||
|
||||
import { join } from "node:path";
|
||||
import { readdirSync, writeSync, fsyncSync, symlinkSync, unlinkSync } from "node:fs";
|
||||
import { spawn } from "node:child_process";
|
||||
import { fsyncSync, readdirSync, symlinkSync, unlinkSync, writeSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
export type TestInfo = {
|
||||
name: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { Assert } from "./qunit.d";
|
||||
import type { BunExpect } from "bun-test";
|
||||
import type { Assert } from "./qunit.d";
|
||||
|
||||
export { $Assert as Assert };
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { DataInit, EachFn, Fn, Hooks, HooksFn, ModuleFn, TestEachFn, TestFn, TestOrEachFn } from "./qunit.d";
|
||||
import { deepEquals, inspect } from "bun";
|
||||
import type { TestContext } from "bun-test";
|
||||
import { inspect, deepEquals } from "bun";
|
||||
import { Assert } from "./assert";
|
||||
import type { DataInit, EachFn, Fn, Hooks, HooksFn, ModuleFn, TestEachFn, TestFn, TestOrEachFn } from "./qunit.d";
|
||||
|
||||
type Status = "todo" | "skip" | "only" | undefined;
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { a, br, code, count, duration, h, table, ul } from "html";
|
||||
import { appendFileSync } from "node:fs";
|
||||
import { resolve, basename } from "node:path";
|
||||
import { a, h, count, duration, table, br, ul, code } from "html";
|
||||
import { TestError, TestStatus, printTest } from "runner";
|
||||
import { runTests } from "runner";
|
||||
import { basename, resolve } from "node:path";
|
||||
import { TestError, TestStatus, printTest, runTests } from "runner";
|
||||
|
||||
const cwd = resolve(import.meta.dir, "..", "..", "..", "test");
|
||||
const filters = process.argv.slice(2); // TODO
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { readFileSync, existsSync, appendFileSync } from "node:fs";
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { TestSummary, TestError, TestStatus, TestFile, Test, printTest } from "runner";
|
||||
import { runTests } from "runner";
|
||||
import { a, br, code, count, details, duration, h, percent, table, ul } from "html";
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { appendFileSync, existsSync, readFileSync } from "node:fs";
|
||||
import { Test, TestError, TestFile, TestStatus, TestSummary, printTest, runTests } from "runner";
|
||||
|
||||
const [filter] = process.argv.slice(2);
|
||||
const packagesText = readFileSync(resolve("resources/packages.json"), "utf8");
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import * as action from "@actions/core";
|
||||
import { spawn, spawnSync } from "child_process";
|
||||
import { rmSync, writeFileSync, readFileSync, mkdirSync, openSync, closeSync } from "fs";
|
||||
import { closeSync, mkdirSync, openSync, readFileSync, rmSync, writeFileSync } from "fs";
|
||||
import { readdirSync } from "node:fs";
|
||||
import { resolve, basename } from "node:path";
|
||||
import { basename, resolve } from "node:path";
|
||||
import { cpus, hostname, tmpdir, totalmem, userInfo } from "os";
|
||||
import PQueue from "p-queue";
|
||||
import { join, normalize, posix, relative } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import PQueue from "p-queue";
|
||||
|
||||
const run_start = new Date();
|
||||
const TIMEOUT_DURATION = 1000 * 60 * 5;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { Expect, test, describe, beforeAll, beforeEach, afterAll, afterEach } from "bun:test";
|
||||
import type { afterAll, afterEach, beforeAll, beforeEach, describe, Expect, test } from "bun:test";
|
||||
|
||||
export type BunExpect = (value: unknown) => Expect;
|
||||
export type BunDescribe = typeof describe;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { Server, ServerWebSocket } from "bun";
|
||||
import { AwsClient } from "aws4fetch";
|
||||
import type { Server, ServerWebSocket } from "bun";
|
||||
|
||||
type Lambda = {
|
||||
fetch: (request: Request, server: Server) => Promise<Response | undefined>;
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
process.stdout.getWindowSize = () => [80, 80];
|
||||
process.stderr.getWindowSize = () => [80, 80];
|
||||
|
||||
import { createReadStream, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { Command, Flags } from "@oclif/core";
|
||||
import JSZip from "jszip";
|
||||
import { createReadStream, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
export class BuildCommand extends Command {
|
||||
static summary = "Build a custom Lambda layer for Bun.";
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, it, expect } from "bun:test";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { SveltePlugin } from "./index";
|
||||
|
||||
describe("SveltePlugin", () => {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import type { BunPlugin, BuildConfig, OnLoadResult } from "bun";
|
||||
import type { BuildConfig, BunPlugin, OnLoadResult } from "bun";
|
||||
import { basename } from "node:path";
|
||||
import { compile, compileModule } from "svelte/compiler";
|
||||
import {
|
||||
getBaseCompileOptions,
|
||||
getBaseModuleCompileOptions,
|
||||
hash,
|
||||
validateOptions,
|
||||
type SvelteOptions,
|
||||
hash,
|
||||
getBaseModuleCompileOptions,
|
||||
} from "./options";
|
||||
|
||||
const kEmptyObject = Object.create(null);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, beforeAll, it, expect } from "bun:test";
|
||||
import type { BuildConfig } from "bun";
|
||||
import { beforeAll, describe, expect, it } from "bun:test";
|
||||
import type { CompileOptions } from "svelte/compiler";
|
||||
|
||||
import { getBaseCompileOptions, validateOptions, type SvelteOptions } from "./options";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { strict as assert } from "node:assert";
|
||||
import { type BuildConfig } from "bun";
|
||||
import { strict as assert } from "node:assert";
|
||||
import type { CompileOptions, ModuleCompileOptions } from "svelte/compiler";
|
||||
|
||||
type OverrideCompileOptions = Pick<CompileOptions, "customElement" | "runes" | "modernAst" | "namespace">;
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { describe, beforeAll, it, expect, afterEach, afterAll } from "bun:test";
|
||||
import path from "node:path";
|
||||
import type { BuildOutput } from "bun";
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from "bun:test";
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { render } from "svelte/server";
|
||||
import { SveltePlugin } from "../src";
|
||||
import type { BuildOutput } from "bun";
|
||||
|
||||
const fixturePath = (...segs: string[]) => path.join(import.meta.dirname, "fixtures", ...segs);
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { test, expect } from "bun:test";
|
||||
import { expect, test } from "bun:test";
|
||||
import YamlPlugin from ".";
|
||||
import data from "./data.yml";
|
||||
|
||||
test("yaml loader - no plugin", async () => {
|
||||
expect(async () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { importBun, optimizeBun } from "../src/npm/install";
|
||||
import { execFileSync } from "child_process";
|
||||
import { importBun } from "../src/npm/install";
|
||||
|
||||
importBun()
|
||||
.then(bun => {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { getRelease, uploadAsset } from "../src/github";
|
||||
import { fetch } from "../src/fetch";
|
||||
import { spawn } from "../src/spawn";
|
||||
import { confirm, exit, log, stdin, warn } from "../src/console";
|
||||
import { hash, join, rm, tmp, write, basename, blob } from "../src/fs";
|
||||
import { fetch } from "../src/fetch";
|
||||
import { basename, blob, hash, join, rm, tmp, write } from "../src/fs";
|
||||
import { getRelease, uploadAsset } from "../src/github";
|
||||
import { spawn } from "../src/spawn";
|
||||
|
||||
const [tag, ...paths] = process.argv.slice(2);
|
||||
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
import { join, copy, exists, chmod, write, writeJson } from "../src/fs";
|
||||
import { mkdtemp } from "fs/promises";
|
||||
import { rmSync, mkdirSync } from "fs";
|
||||
import { tmpdir } from "os";
|
||||
import { dirname } from "path";
|
||||
import { fetch } from "../src/fetch";
|
||||
import { spawn } from "../src/spawn";
|
||||
import type { Platform } from "../src/platform";
|
||||
import { platforms } from "../src/platform";
|
||||
import { getSemver } from "../src/github";
|
||||
import { getRelease } from "../src/github";
|
||||
import { expect } from "bun:test";
|
||||
import type { BuildOptions } from "esbuild";
|
||||
import { buildSync, formatMessagesSync } from "esbuild";
|
||||
import { mkdirSync, rmSync } from "fs";
|
||||
import { mkdtemp } from "fs/promises";
|
||||
import type { JSZipObject } from "jszip";
|
||||
import { loadAsync } from "jszip";
|
||||
import { debug, log, error } from "../src/console";
|
||||
import { expect } from "bun:test";
|
||||
import { tmpdir } from "os";
|
||||
import { dirname } from "path";
|
||||
import { debug, error, log } from "../src/console";
|
||||
import { fetch } from "../src/fetch";
|
||||
import { chmod, copy, exists, join, write, writeJson } from "../src/fs";
|
||||
import { getRelease, getSemver } from "../src/github";
|
||||
import type { Platform } from "../src/platform";
|
||||
import { platforms } from "../src/platform";
|
||||
import { spawn } from "../src/spawn";
|
||||
|
||||
const module = "bun";
|
||||
const owner = "@oven";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { AwsClient } from "aws4fetch";
|
||||
import { getBuild, getRelease, getSemver, getSha } from "../src/github";
|
||||
import { join, tmp } from "../src/fs";
|
||||
import { getBuild, getRelease, getSemver, getSha } from "../src/github";
|
||||
|
||||
// The source of truth for the git sha is what's in the local build, extracted from features.json
|
||||
// NOT the git tag revision.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { isatty } from "tty";
|
||||
import { createInterface } from "readline";
|
||||
import { isatty } from "tty";
|
||||
|
||||
export const isAction = !!process.env["GITHUB_ACTION"];
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import path from "path";
|
||||
import crypto from "crypto";
|
||||
import fs from "fs";
|
||||
import os from "os";
|
||||
import crypto from "crypto";
|
||||
import path from "path";
|
||||
import { debug } from "./console";
|
||||
|
||||
export function join(...paths: (string | string[])[]): string {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { Endpoints, RequestParameters, Route } from "@octokit/types";
|
||||
import { Octokit } from "octokit";
|
||||
import { debug, error, log, warn } from "./console";
|
||||
import { fetch } from "./fetch";
|
||||
import { debug, log, warn, error } from "./console";
|
||||
|
||||
const [owner, repo] = process.env["GITHUB_REPOSITORY"]?.split("/") ?? ["oven-sh", "bun"];
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { fetch } from "../fetch";
|
||||
import { spawn } from "../spawn";
|
||||
import { chmod, join, rename, rm, tmp, write } from "../fs";
|
||||
import { unzipSync } from "zlib";
|
||||
import type { Platform } from "../platform";
|
||||
import { os, arch, abi, supportedPlatforms } from "../platform";
|
||||
import { debug, error } from "../console";
|
||||
import { fetch } from "../fetch";
|
||||
import { chmod, join, rename, rm, tmp, write } from "../fs";
|
||||
import type { Platform } from "../platform";
|
||||
import { abi, arch, os, supportedPlatforms } from "../platform";
|
||||
import { spawn } from "../spawn";
|
||||
|
||||
declare const version: string;
|
||||
declare const module: string;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { spawn } from "./spawn";
|
||||
import { exists, read } from "./fs";
|
||||
import { debug } from "./console";
|
||||
import { exists, read } from "./fs";
|
||||
import { spawn } from "./spawn";
|
||||
|
||||
export const os = process.platform;
|
||||
|
||||
|
||||
@@ -10,8 +10,5 @@
|
||||
"strict": true,
|
||||
"resolveJsonModule": true
|
||||
},
|
||||
"include": [
|
||||
"src",
|
||||
"scripts"
|
||||
]
|
||||
"include": ["src", "scripts"]
|
||||
}
|
||||
|
||||
@@ -2,13 +2,6 @@ import { join } from "node:path";
|
||||
|
||||
import pkg from "../package.json";
|
||||
|
||||
const BUN_VERSION = (
|
||||
process.env.BUN_VERSION ||
|
||||
Bun.version ||
|
||||
process.versions.bun
|
||||
).replace(/^.*v/, "");
|
||||
const BUN_VERSION = (process.env.BUN_VERSION || Bun.version || process.versions.bun).replace(/^.*v/, "");
|
||||
|
||||
Bun.write(
|
||||
join(import.meta.dir, "..", "package.json"),
|
||||
JSON.stringify({ version: BUN_VERSION, ...pkg }, null, 2),
|
||||
);
|
||||
Bun.write(join(import.meta.dir, "..", "package.json"), JSON.stringify({ version: BUN_VERSION, ...pkg }, null, 2));
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
# libusockets.h
|
||||
|
||||
This is the only header you include. Following documentation has been extracted from this header. It may be outdated, go read the header directly for up-to-date documentation.
|
||||
|
||||
These interfaces are "beta" and subject to smaller changes. Last updated **2019-06-11**.
|
||||
|
||||
# A quick note on compilation
|
||||
|
||||
Major differences in performance can be seen based solely on compiler and/or linker options. Important is to compile with some kind of link-time-optimization mode, preferably with static linking of this library such as including all C source files in the user program build step itself. Proper compilation and linking can lead to over 25% performance increase (in my case, YMMV).
|
||||
|
||||
# Cross-platform benchmarks
|
||||
|
||||
While the library is compatible with many platforms, Linux in particular is the preferred production system. Benchmarking has been done on Windows, Linux and macOS where Linux clearly stood out as significant winner. Windows performed about half that of Linux and macOS was not much better than Windows. Do run your production systems on Linux.
|
||||
|
||||
# us_loop_t - The root per-thread resource and callback emitter
|
||||
|
||||
```c
|
||||
/* Returns a new event loop with user data extension */
|
||||
WIN32_EXPORT struct us_loop_t *us_create_loop(void *hint, void (*wakeup_cb)(struct us_loop_t *loop), void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop), unsigned int ext_size);
|
||||
@@ -35,6 +39,7 @@ WIN32_EXPORT long long us_loop_iteration_number(struct us_loop_t *loop);
|
||||
```
|
||||
|
||||
# us_socket_context_t - The per-behavior group of networking sockets
|
||||
|
||||
```c
|
||||
struct us_socket_context_options_t {
|
||||
const char *key_file_name;
|
||||
@@ -87,6 +92,7 @@ WIN32_EXPORT struct us_socket_context_t *us_create_child_socket_context(int ssl,
|
||||
```
|
||||
|
||||
# us_socket_t - The network connection (SSL or non-SSL)
|
||||
|
||||
```c
|
||||
/* Write up to length bytes of data. Returns actual bytes written. Will call the on_writable callback of active socket context on failure to write everything off in one go.
|
||||
* Set hint msg_more if you have more immediate data to write. */
|
||||
@@ -145,6 +151,7 @@ WIN32_EXPORT struct us_loop_t *us_timer_loop(struct us_timer_t *t);
|
||||
```
|
||||
|
||||
## us_poll_t - The eventing foundation of a socket or anything that has a file descriptor
|
||||
|
||||
```c
|
||||
/* A fallthrough poll does not keep the loop running, it falls through */
|
||||
WIN32_EXPORT struct us_poll_t *us_create_poll(struct us_loop_t *loop, int fallthrough, unsigned int ext_size);
|
||||
|
||||
@@ -12,6 +12,7 @@ It comes with built-in pub/sub support, URL routing, TLS 1.3, SNI, IPv6, permess
|
||||
Unlike other "pub/sub brokers", µWS does not assume or push any particular application protocol but only operates over raw, standard WebSockets. You need nothing but a standards compliant web browser and a handful of standards compliant JavaScript to communicate with it. No particular client library is needed or enforced - this unlike inadequate solutions like Socket.IO where you end up locked to a set of proprietary non-standard protocols with horrible performance.
|
||||
|
||||
### Performant
|
||||
|
||||
The implementation is header-only C++17 (but examples use C++20 features for brevity and elegance!), cross-platform and compiles down to a tiny binary of a handful kilobytes.
|
||||
It depends on µSockets, which is a standard C project for Linux, macOS & Windows.
|
||||
|
||||
@@ -20,6 +21,7 @@ Performance wise you can expect to outperform, or equal, just about anything sim
|
||||
We've [openly presented](https://medium.com/swlh/100k-secure-websockets-with-raspberry-pi-4-1ba5d2127a23) detailed cases where a single Raspberry Pi 4 can serve more than 100k very active TLS 1.3 WebSockets, simultaneously, with excellent stability. This is entirely impossible with the vast majority of alternative solutions. Most solutions cramp up and become unreliable at a tiny fraction of this load, on such a limited hardware. We also have measurements where we [serve 12x the HTTP requests per second](https://levelup.gitconnected.com/serving-100k-requests-second-from-a-fanless-raspberry-pi-4-over-ethernet-fdd2c2e05a1e) as compared to Node.js.
|
||||
|
||||
### Simple to use
|
||||
|
||||
Another goal of the project is minimalism, simplicity and elegance.
|
||||
Design wise it follows an ExpressJS-like interface where you attach callbacks to different URL routes.
|
||||
This way you can easily build complete REST/WebSocket services in a few lines of code.
|
||||
@@ -29,23 +31,26 @@ Boilerplate logic like heartbeat timeouts, backpressure handling, ping/pong and
|
||||
The project is async only and runs local to one thread. You scale it as individual threads much like Node.js scales as individual processes. That is, the implementation only sees a single thread and is not thread-safe. There are simple ways to do threading via async delegates though, if you really need to.
|
||||
|
||||
## Compiling
|
||||
|
||||
µWebSockets is 100% standard header-only C++17 - it compiles on any platform. However, it depends on µSockets in all cases, which is platform-specific C code that runs on Linux, Windows and macOS.
|
||||
|
||||
There are a few compilation flags for µSockets (see its documentation), but common between µSockets and µWebSockets flags are as follows:
|
||||
|
||||
* LIBUS_NO_SSL - disable OpenSSL dependency/functionality for uSockets and uWebSockets builds
|
||||
* UWS_NO_ZLIB - disable Zlib dependency/functionality for uWebSockets
|
||||
- LIBUS_NO_SSL - disable OpenSSL dependency/functionality for uSockets and uWebSockets builds
|
||||
- UWS_NO_ZLIB - disable Zlib dependency/functionality for uWebSockets
|
||||
|
||||
You can use the Makefile on Linux and macOS. It is simple to use and builds the examples for you. `WITH_OPENSSL=1 make` builds all examples with SSL enabled. Examples will fail to listen if cert and key cannot be found, so make sure to specify a path that works for you.
|
||||
|
||||
## User manual
|
||||
|
||||
### uWS::App & uWS::SSLApp
|
||||
|
||||
You begin your journey by constructing an "App". Either an SSL-app or a regular TCP-only App. The uWS::SSLApp constructor takes a struct holding SSL options such as cert and key. Interfaces for both apps are identical, so let's call them both "App" from now on.
|
||||
|
||||
Apps follow the builder pattern, member functions return the App so that you can chain calls.
|
||||
|
||||
### App.get, post, put, [...] and any routes
|
||||
|
||||
You attach behavior to "URL routes". A lambda is paired with a "method" (Http method that is) and a pattern (the URL matching pattern).
|
||||
|
||||
Methods are many, but most common are probably get & post. They all have the same signature, let's look at one example:
|
||||
@@ -67,40 +72,44 @@ Data that you capture in a res follows RAII and is move-only so you can properly
|
||||
The "any" route will match any method.
|
||||
|
||||
#### Pattern matching
|
||||
|
||||
Routes are matched in **order of specificity**, not by the order you register them:
|
||||
|
||||
* Highest priority - static routes, think "/hello/this/is/static".
|
||||
* Middle priority - parameter routes, think "/candy/:kind", where value of :kind is retrieved by req.getParameter(0).
|
||||
* Lowest priority - wildcard routes, think "/hello/*".
|
||||
- Highest priority - static routes, think "/hello/this/is/static".
|
||||
- Middle priority - parameter routes, think "/candy/:kind", where value of :kind is retrieved by req.getParameter(0).
|
||||
- Lowest priority - wildcard routes, think "/hello/\*".
|
||||
|
||||
In other words, the more specific a route is, the earlier it will match. This allows you to define wildcard routes that match a wide range of URLs and then "carve" out more specific behavior from that.
|
||||
|
||||
"Any" routes, those who match any HTTP method, will match with lower priority than routes which specify their specific HTTP method (such as GET) if and only if the two routes otherwise are equally specific.
|
||||
|
||||
#### Middlewares
|
||||
|
||||
A very commonly asked question is how to achieve something like middlewares. We don't support middlewares as something built into the router itself. Partly because routes cannot pass data to other routes, partly because the HttpRequest object being stack-allocated and only valid in one single callback invocation, but most importantly - you can **easily** achieve the same function-chaining that is middlewares by instead using simple high-order functions and functional programming. There are tons of examples of this under Discussions (since it is a commonly asked question). A middleware isn't really something that has to be built-in to the server library itself, it really is just **a regular function**. By passing functions to other functions you can build chains of behaviors in very elegant and efficient ways.
|
||||
|
||||
Whether this library should keep a set of commonly used functions is another question - we might do that in the future and we might add an example of its usage but right now there is nothing like this provided. We aim to provide an easy to use server implementation that you can build things on. Not complete business logic puzzle pieces.
|
||||
|
||||
#### Streaming data
|
||||
|
||||
You should never call res.end(huge buffer). res.end guarantees sending so backpressure will probably spike. Instead you should use res.tryEnd to stream huge data part by part. Use in combination with res.onWritable and res.onAborted callbacks.
|
||||
|
||||
Tip: Check out the JavaScript project, it has many useful examples of async streaming of huge data.
|
||||
|
||||
#### Corking
|
||||
|
||||
It is very important to understand the corking mechanism, as that is responsible for efficiently formatting, packing and sending data. Without corking your app will still work reliably, but can perform very bad and use excessive networking. In some cases the performance can be dreadful without proper corking.
|
||||
|
||||
That's why your sockets will be corked by default in most simple cases, including all of the examples provided. However there are cases where default corking cannot happen automatically.
|
||||
|
||||
* Whenever your registered business logic (your callbacks) are called from the library, such as when receiving a message or when a socket opens, you'll be corked by default. Whatever you do with the socket inside of that callback will be efficient and properly corked.
|
||||
- Whenever your registered business logic (your callbacks) are called from the library, such as when receiving a message or when a socket opens, you'll be corked by default. Whatever you do with the socket inside of that callback will be efficient and properly corked.
|
||||
|
||||
* If you have callbacks registered to some other library, say libhiredis, those callbacks will not be called with corked sockets (how could **we** know when to cork the socket if we don't control the third-party library!?).
|
||||
- If you have callbacks registered to some other library, say libhiredis, those callbacks will not be called with corked sockets (how could **we** know when to cork the socket if we don't control the third-party library!?).
|
||||
|
||||
* Only one single socket can be corked at any point in time (isolated per thread, of course). It is efficient to cork-and-uncork.
|
||||
- Only one single socket can be corked at any point in time (isolated per thread, of course). It is efficient to cork-and-uncork.
|
||||
|
||||
* Whenever your callback is a coroutine, such as the JavaScript async/await, automatic corking can only happen in the very first portion of the coroutine (consider await a separator which essentially cuts the coroutine into smaller segments). Only the first "segment" of the coroutine will be called from µWS, the following async segments will be called by the JavaScript runtime at a later point in time and will thus not be under our control with default corking enabled.
|
||||
- Whenever your callback is a coroutine, such as the JavaScript async/await, automatic corking can only happen in the very first portion of the coroutine (consider await a separator which essentially cuts the coroutine into smaller segments). Only the first "segment" of the coroutine will be called from µWS, the following async segments will be called by the JavaScript runtime at a later point in time and will thus not be under our control with default corking enabled.
|
||||
|
||||
* Corking is important even for calls which seem to be "atomic" and only send one chunk. res->end, res->tryEnd, res->writeStatus, res->writeHeader will most likely send multiple chunks of data and is very important to properly cork.
|
||||
- Corking is important even for calls which seem to be "atomic" and only send one chunk. res->end, res->tryEnd, res->writeStatus, res->writeHeader will most likely send multiple chunks of data and is very important to properly cork.
|
||||
|
||||
You can make sure corking is enabled, even for cases where default corking would be enabled, by wrapping whatever sending function calls in a lambda like so:
|
||||
|
||||
@@ -115,6 +124,7 @@ The above res->end call will actually call three separate send functions; res->w
|
||||
Keep this in mind, corking is by far the single most important performance trick to use. Even when streaming huge amounts of data it can be useful to cork. At least in the very tip of the response, as that holds the headers and status.
|
||||
|
||||
### The App.ws route
|
||||
|
||||
WebSocket "routes" are registered similarly, but not identically.
|
||||
|
||||
Every websocket route has the same pattern and pattern matching as for Http, but instead of one single callback you have a whole set of them, here's an example:
|
||||
@@ -155,39 +165,47 @@ WebSocket routes specify a user data type that should be used to keep per-websoc
|
||||
which should belong to the websocket by putting the pointer and the user data in a std::map. That's wrong! Don't do that!
|
||||
|
||||
#### Use the WebSocket.getUserData() feature
|
||||
|
||||
You should use the provided user data feature to store and attach any per-socket user data. Going from user data to WebSocket is possible if you make your user data hold a pointer to WebSocket, and hook things up in the WebSocket open handler. Your user data memory is valid while your WebSocket is.
|
||||
|
||||
If you want to create something more elaborate you could have the user data hold a pointer to some dynamically allocated memory block that keeps a boolean whether the WebSocket is still valid or not. Sky is the limit here, you should never need any std::map for this.
|
||||
|
||||
#### WebSockets are valid from open to close
|
||||
|
||||
All given WebSocket pointers are guaranteed to live from open event (where you got your WebSocket) until close event is called. So is the user data memory. One open event will always end in exactly one close event, they are 1-to-1 and will always be balanced no matter what. Use them to drive your RAII data types, they can be seen as constructor and destructor.
|
||||
|
||||
Message events will never emit outside of open/close. Calling WebSocket.close or WebSocket.end will immediately call the close handler.
|
||||
|
||||
#### Backpressure in websockets
|
||||
|
||||
Similarly to for Http, methods such as ws.send(...) can cause backpressure. Make sure to check ws.getBufferedAmount() before sending, and check the return value of ws.send before sending any more data. WebSockets do not have .onWritable, but instead make use of the .drain handler of the websocket route handler.
|
||||
|
||||
Inside of .drain event you should check ws.getBufferedAmount(), it might have drained, or even increased. Most likely drained but don't assume that it has, .drain event is only a hint that it has changed.
|
||||
|
||||
#### Ping/pongs "heartbeats"
|
||||
|
||||
The library will automatically send pings to clients according to the `idleTimeout` specified. If you set idleTimeout = 120 seconds a ping will go out a few seconds before this timeout unless the client has sent something to the server recently. If the client responds to the ping, the socket will stay open. When client fails to respond in time, the socket will be forcefully closed and the close event will trigger. On disconnect all resources are freed, including subscriptions to topics and any backpressure. You can easily let the browser reconnect using 3-lines-or-so of JavaScript if you want to.
|
||||
|
||||
#### Backpressure
|
||||
|
||||
Sending on a WebSocket can build backpressure. WebSocket::send returns an enum of BACKPRESSURE, SUCCESS or DROPPED. When send returns BACKPRESSURE it means you should stop sending data until the drain event fires and WebSocket::getBufferedAmount() returns a reasonable amount of bytes. But in case you specified a maxBackpressure when creating the WebSocketContext, this limit will automatically be enforced. That means an attempt at sending a message which would result in too much backpressure will be canceled and send will return DROPPED. This means the message was dropped and will not be put in the queue. maxBackpressure is an essential setting when using pub/sub as a slow receiver otherwise could build up a lot of backpressure. By setting maxBackpressure the library will automatically manage an enforce a maximum allowed backpressure per socket for you.
|
||||
|
||||
#### Threading
|
||||
|
||||
The library is single threaded. You cannot, absolutely not, mix threads. A socket created from an App on thread 1 cannot be used in any way from thread 2. The only function in the whole entire library which is thread-safe and can be used from any thread is Loop:defer. Loop::defer takes a function (such as a lambda with data) and defers the execution of said function until the specified loop's thread is ready to execute the function in a single-threaded fashion on correct thread. So in case you want to publish a message under a topic, or send on some other thread's sockets you can, but it requires a bit of indirection. You should aim for having as isolated apps and threads as possible.
|
||||
|
||||
#### Settings
|
||||
|
||||
Compression (permessage-deflate) has three main modes; uWS::DISABLED, uWS::SHARED_COMPRESSOR and any of the uWS::DEDICATED_COMPRESSOR_xKB. Disabled and shared options require no memory, while dedicated compressor requires the amount of memory you selected. For instance, uWS::DEDICATED_COMPRESSOR_4KB adds an overhead of 4KB per WebSocket while uWS::DEDICATED_COMPRESSOR_256KB adds - you guessed it - 256KB!
|
||||
|
||||
Compressing using shared means that every WebSocket message is an isolated compression stream, it does not have a sliding compression window, kept between multiple send calls like the dedicated variants do.
|
||||
|
||||
You probably want shared compressor if dealing with larger JSON messages, or 4kb dedicated compressor if dealing with smaller JSON messages and if doing binary messaging you probably want to disable it completely.
|
||||
|
||||
* idleTimeout is roughly the amount of seconds that may pass between messages. Being idle for more than this, and the connection is severed. This means you should make your clients send small ping messages every now and then, to keep the connection alive. You can also make the server send ping messages but I would definitely put that labor on the client side. (outdated text - this is not entirely true anymore. The server will automatically send pings in case it needs to).
|
||||
- idleTimeout is roughly the amount of seconds that may pass between messages. Being idle for more than this, and the connection is severed. This means you should make your clients send small ping messages every now and then, to keep the connection alive. You can also make the server send ping messages but I would definitely put that labor on the client side. (outdated text - this is not entirely true anymore. The server will automatically send pings in case it needs to).
|
||||
|
||||
### Listening on a port
|
||||
|
||||
Once you have defined your routes and their behavior, it is time to start listening for new connections. You do this by calling
|
||||
|
||||
```c++
|
||||
@@ -199,6 +217,7 @@ App.listen(port, [](auto *listenSocket) {
|
||||
Canceling listening is done with the uSockets function call `us_listen_socket_close`.
|
||||
|
||||
### App.run and fallthrough
|
||||
|
||||
When you are done and want to enter the event loop, you call, once and only once, App.run.
|
||||
This will block the calling thread until "fallthrough". The event loop will block until no more async work is scheduled, just like for Node.js.
|
||||
|
||||
@@ -231,6 +250,7 @@ If you want to, you can simply take the previous example, put it inside of a few
|
||||
Recent Node.js versions may scale using multiple threads, via the new Worker threads support. Scaling using that feature is identical to scaling using multiple threads in C++.
|
||||
|
||||
### Compression
|
||||
|
||||
We aren't as careful with resources as we used to be. Just look at, how many web developers represent time - it is not uncommon for web developers to send an entire textual representation of time as 30-something actual letters inside a JSON document with an actual textual key. This is just awful. We have had standardized, time-zone neutral representation of time in binary, efficient, 4-byte (or more commonly the 8 byte variant) representation since the 1970s. It's called unix timestamp and is an elegant and efficient way of representing time-zone neutral time down to the seconds.
|
||||
|
||||
This is just an example of how we have regressed in our algorithmic thinking. Today it is common to use textual representations such as bloated JSON to represent data, even though most of that bloat is obvious repetitions and inefficient in nature. But we don't care because we have compression. True, even the most bloated source format can be compressed down to a small payload with few repetitions - however - this comes at TREMENDOUS cost.
|
||||
@@ -248,4 +268,5 @@ It is true that we can do more permessage-deflate messages/second than many othe
|
||||
So you might say - hey - that's too complex. Well build an SDK for your users then. Just wrap that "complex" protocol up in a JavaScript library that internally knows about this palette and exposes only simple-to-use functions for the end user. It's not that hard of a problem to solve.
|
||||
|
||||
##### What about TLS/SSL then? I still have to encrypt!
|
||||
|
||||
TLS is nothing like compression. With TLS 1.3 you're still looking at around 80% performance retention over non-TLS. This because TLS is block based and efficiently maps to modern CPUs. Modern CPUs also have hardware offloads for this. It's not that demanding to encrypt traffic using modern encryption standards. Compression is by far the most CPU-demanding thing you can do with your connection, and it requires TONS of per-socket memory.
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
"title": "JSON schema for bun.lock files.",
|
||||
"allowTrailingCommas": true,
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,69 +1,129 @@
|
||||
.mtk1 { color: #cccccc; }
|
||||
.mtk2 { color: #1f1f1f; }
|
||||
.mtk3 { color: #d4d4d4; }
|
||||
.mtk4 { color: #000080; }
|
||||
.mtk5 { color: #6a9955; }
|
||||
.mtk6 { color: #569cd6; }
|
||||
.mtk7 { color: #b5cea8; }
|
||||
.mtk8 { color: #646695; }
|
||||
.mtk9 { color: #d7ba7d; }
|
||||
.mtk10 { color: #9cdcfe; }
|
||||
.mtk11 { color: #f44747; }
|
||||
.mtk12 { color: #ce9178; }
|
||||
.mtk13 { color: #6796e6; }
|
||||
.mtk14 { color: #808080; }
|
||||
.mtk15 { color: #d16969; }
|
||||
.mtk16 { color: #dcdcaa; }
|
||||
.mtk17 { color: #4ec9b0; }
|
||||
.mtk18 { color: #c586c0; }
|
||||
.mtk19 { color: #4fc1ff; }
|
||||
.mtk20 { color: #c8c8c8; }
|
||||
.mtk21 { color: #606060; }
|
||||
.mtk22 { color: #ffffff; }
|
||||
.mtk23 { color: #cd9731; }
|
||||
.mtk24 { color: #b267e6; }
|
||||
.mtki { font-style: italic; }
|
||||
.mtkb { font-weight: bold; }
|
||||
.mtku { text-decoration: underline; text-underline-position: under; }
|
||||
.mtks { text-decoration: line-through; }
|
||||
.mtks.mtku { text-decoration: underline line-through; text-underline-position: under; }
|
||||
.mtk1 {
|
||||
color: #cccccc;
|
||||
}
|
||||
.mtk2 {
|
||||
color: #1f1f1f;
|
||||
}
|
||||
.mtk3 {
|
||||
color: #d4d4d4;
|
||||
}
|
||||
.mtk4 {
|
||||
color: #000080;
|
||||
}
|
||||
.mtk5 {
|
||||
color: #6a9955;
|
||||
}
|
||||
.mtk6 {
|
||||
color: #569cd6;
|
||||
}
|
||||
.mtk7 {
|
||||
color: #b5cea8;
|
||||
}
|
||||
.mtk8 {
|
||||
color: #646695;
|
||||
}
|
||||
.mtk9 {
|
||||
color: #d7ba7d;
|
||||
}
|
||||
.mtk10 {
|
||||
color: #9cdcfe;
|
||||
}
|
||||
.mtk11 {
|
||||
color: #f44747;
|
||||
}
|
||||
.mtk12 {
|
||||
color: #ce9178;
|
||||
}
|
||||
.mtk13 {
|
||||
color: #6796e6;
|
||||
}
|
||||
.mtk14 {
|
||||
color: #808080;
|
||||
}
|
||||
.mtk15 {
|
||||
color: #d16969;
|
||||
}
|
||||
.mtk16 {
|
||||
color: #dcdcaa;
|
||||
}
|
||||
.mtk17 {
|
||||
color: #4ec9b0;
|
||||
}
|
||||
.mtk18 {
|
||||
color: #c586c0;
|
||||
}
|
||||
.mtk19 {
|
||||
color: #4fc1ff;
|
||||
}
|
||||
.mtk20 {
|
||||
color: #c8c8c8;
|
||||
}
|
||||
.mtk21 {
|
||||
color: #606060;
|
||||
}
|
||||
.mtk22 {
|
||||
color: #ffffff;
|
||||
}
|
||||
.mtk23 {
|
||||
color: #cd9731;
|
||||
}
|
||||
.mtk24 {
|
||||
color: #b267e6;
|
||||
}
|
||||
.mtki {
|
||||
font-style: italic;
|
||||
}
|
||||
.mtkb {
|
||||
font-weight: bold;
|
||||
}
|
||||
.mtku {
|
||||
text-decoration: underline;
|
||||
text-underline-position: under;
|
||||
}
|
||||
.mtks {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
.mtks.mtku {
|
||||
text-decoration: underline line-through;
|
||||
text-underline-position: under;
|
||||
}
|
||||
|
||||
.bunlock {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
|
||||
.lines {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
width: 30px;
|
||||
margin-right: 15px;
|
||||
text-align: right;
|
||||
opacity: 0.5;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
width: 30px;
|
||||
margin-right: 15px;
|
||||
text-align: right;
|
||||
opacity: 0.5;
|
||||
|
||||
font-size: var(--vscode-editor-font-size);
|
||||
font-weight: var(--vscode-editor-font-weight);
|
||||
font-family: var(--vscode-editor-font-family);
|
||||
background-color: var(--vscode-editor-background);
|
||||
font-size: var(--vscode-editor-font-size);
|
||||
font-weight: var(--vscode-editor-font-weight);
|
||||
font-family: var(--vscode-editor-font-family);
|
||||
background-color: var(--vscode-editor-background);
|
||||
}
|
||||
|
||||
.lines > span {
|
||||
margin-top: 1px;
|
||||
margin-bottom: 1px;
|
||||
margin-top: 1px;
|
||||
margin-bottom: 1px;
|
||||
}
|
||||
|
||||
code {
|
||||
white-space: pre;
|
||||
white-space: pre;
|
||||
|
||||
font-size: var(--vscode-editor-font-size);
|
||||
font-weight: var(--vscode-editor-font-weight);
|
||||
font-family: var(--vscode-editor-font-family);
|
||||
background-color: var(--vscode-editor-background);
|
||||
font-size: var(--vscode-editor-font-size);
|
||||
font-weight: var(--vscode-editor-font-weight);
|
||||
font-family: var(--vscode-editor-font-family);
|
||||
background-color: var(--vscode-editor-background);
|
||||
}
|
||||
|
||||
code > span {
|
||||
display: inline-block;
|
||||
width: 100%;
|
||||
margin-top: 1px;
|
||||
margin-bottom: 1px;
|
||||
display: inline-block;
|
||||
width: 100%;
|
||||
margin-top: 1px;
|
||||
margin-bottom: 1px;
|
||||
}
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
"use client";
|
||||
|
||||
|
||||
import { useState } from "react";
|
||||
|
||||
function App() {
|
||||
const [count, setCount] = useState(null);
|
||||
const [count, setCount] = useState(null);
|
||||
|
||||
return (
|
||||
<>
|
||||
return (
|
||||
<>
|
||||
{/* @ts-expect-error */}
|
||||
<button onClick={() => setCount(count => count.charAt(0))}>count is {count}</button>
|
||||
</>
|
||||
);
|
||||
<button onClick={() => setCount(count => count.charAt(0))}>count is {count}</button>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
export default App;
|
||||
|
||||
@@ -32,5 +32,5 @@ describe("example", () => {
|
||||
test("can run with special chars :)", () => {
|
||||
// if this test runs, it's a success.
|
||||
// a failure is if it's either skipped or fails the runner
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
@@ -379,4 +379,4 @@
|
||||
"../bun-debug-adapter-protocol",
|
||||
"../bun-inspector-protocol"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ export function registerDebugger(context: vscode.ExtensionContext, factory?: vsc
|
||||
);
|
||||
|
||||
if (getConfig("debugTerminal.enabled")) {
|
||||
injectDebugTerminal2().then(context.subscriptions.push)
|
||||
injectDebugTerminal2().then(context.subscriptions.push);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,15 +136,17 @@ async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
|
||||
}
|
||||
|
||||
async function injectDebugTerminal2() {
|
||||
const jsDebugExt = vscode.extensions.getExtension('ms-vscode.js-debug-nightly') || vscode.extensions.getExtension('ms-vscode.js-debug');
|
||||
const jsDebugExt =
|
||||
vscode.extensions.getExtension("ms-vscode.js-debug-nightly") ||
|
||||
vscode.extensions.getExtension("ms-vscode.js-debug");
|
||||
if (!jsDebugExt) {
|
||||
return vscode.window.onDidOpenTerminal(injectDebugTerminal)
|
||||
return vscode.window.onDidOpenTerminal(injectDebugTerminal);
|
||||
}
|
||||
|
||||
await jsDebugExt.activate()
|
||||
const jsDebug: import('@vscode/js-debug').IExports = jsDebugExt.exports;
|
||||
await jsDebugExt.activate();
|
||||
const jsDebug: import("@vscode/js-debug").IExports = jsDebugExt.exports;
|
||||
if (!jsDebug) {
|
||||
return vscode.window.onDidOpenTerminal(injectDebugTerminal)
|
||||
return vscode.window.onDidOpenTerminal(injectDebugTerminal);
|
||||
}
|
||||
|
||||
return jsDebug.registerDebugTerminalOptionsProvider({
|
||||
@@ -331,7 +333,7 @@ class FileDebugSession extends DebugSession {
|
||||
}
|
||||
|
||||
this.adapter.on("Adapter.reverseRequest", ({ command, arguments: args }) =>
|
||||
this.sendRequest(command, args, 5000, () => { }),
|
||||
this.sendRequest(command, args, 5000, () => {}),
|
||||
);
|
||||
|
||||
adapters.set(url, this);
|
||||
|
||||
@@ -10,8 +10,8 @@ import {
|
||||
UnixSignal,
|
||||
} from "../../../../bun-debug-adapter-protocol";
|
||||
import type { JSC } from "../../../../bun-inspector-protocol";
|
||||
import { typedGlobalState } from "../../global-state";
|
||||
import { getConfig } from "../../extension";
|
||||
import { typedGlobalState } from "../../global-state";
|
||||
|
||||
const output = vscode.window.createOutputChannel("Bun - Diagnostics");
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import * as vscode from "vscode";
|
||||
import { styleLockfile } from "./lockfile.style";
|
||||
import { getConfig } from "../../extension";
|
||||
import { styleLockfile } from "./lockfile.style";
|
||||
|
||||
export type BunLockfile = vscode.CustomDocument & {
|
||||
readonly preview: string;
|
||||
@@ -38,8 +38,8 @@ export class BunLockfileEditorProvider implements vscode.CustomReadonlyEditorPro
|
||||
|
||||
function renderLockfile({ webview }: vscode.WebviewPanel, preview: string, extensionUri: vscode.Uri): void {
|
||||
if (!getConfig("bunlockb.enabled")) {
|
||||
webview.html = "<code>bun.bunlockb</code> config option is disabled."
|
||||
return
|
||||
webview.html = "<code>bun.bunlockb</code> config option is disabled.";
|
||||
return;
|
||||
}
|
||||
|
||||
const styleVSCodeUri = webview.asWebviewUri(vscode.Uri.joinPath(extensionUri, "assets", "vscode.css"));
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
import * as vscode from "vscode";
|
||||
import { debugCommand } from "../debug";
|
||||
import { BunTask } from "./tasks";
|
||||
import { getConfig } from "../../extension";
|
||||
|
||||
/**
|
||||
* Parses tasks defined in the package.json.
|
||||
|
||||
@@ -204,7 +204,6 @@ export function registerTestRunner(context: vscode.ExtensionContext) {
|
||||
context.subscriptions.push(watchTestCommand);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Escape any special characters in the input string, so that regex-matching on it
|
||||
* will work as expected.
|
||||
|
||||
62
packages/bun-vscode/src/vscode-js-debug.d.ts
vendored
62
packages/bun-vscode/src/vscode-js-debug.d.ts
vendored
@@ -2,38 +2,38 @@
|
||||
* Copyright (C) Microsoft Corporation. All rights reserved.
|
||||
*--------------------------------------------------------*/
|
||||
|
||||
declare module '@vscode/js-debug' {
|
||||
import type * as vscode from 'vscode';
|
||||
|
||||
/** @see {IExports.registerDebugTerminalOptionsProvider} */
|
||||
export interface IDebugTerminalOptionsProvider {
|
||||
/**
|
||||
* Called when the user creates a JavaScript Debug Terminal. It's called
|
||||
* with the options js-debug wants to use to create the terminal. It should
|
||||
* modify and return the options to use in the terminal.
|
||||
*
|
||||
* In order to avoid conflicting with existing logic, participants should
|
||||
* try to modify options in a additive way. For example prefer appending
|
||||
* to rather than reading and overwriting `options.env.PATH`.
|
||||
*/
|
||||
provideTerminalOptions(options: vscode.TerminalOptions): vscode.ProviderResult<vscode.TerminalOptions>;
|
||||
}
|
||||
declare module "@vscode/js-debug" {
|
||||
import type * as vscode from "vscode";
|
||||
|
||||
/** @see {IExports.registerDebugTerminalOptionsProvider} */
|
||||
export interface IDebugTerminalOptionsProvider {
|
||||
/**
|
||||
* Defines the exports of the `js-debug` extension. Once you have this typings
|
||||
* file, these can be acquired in your extension using the following code:
|
||||
* Called when the user creates a JavaScript Debug Terminal. It's called
|
||||
* with the options js-debug wants to use to create the terminal. It should
|
||||
* modify and return the options to use in the terminal.
|
||||
*
|
||||
* ```
|
||||
* const jsDebugExt = vscode.extensions.getExtension('ms-vscode.js-debug-nightly')
|
||||
* || vscode.extensions.getExtension('ms-vscode.js-debug');
|
||||
* await jsDebugExt.activate()
|
||||
* const jsDebug: import('@vscode/js-debug').IExports = jsDebug.exports;
|
||||
* ```
|
||||
* In order to avoid conflicting with existing logic, participants should
|
||||
* try to modify options in a additive way. For example prefer appending
|
||||
* to rather than reading and overwriting `options.env.PATH`.
|
||||
*/
|
||||
export interface IExports {
|
||||
/**
|
||||
* Registers a participant used when the user creates a JavaScript Debug Terminal.
|
||||
*/
|
||||
registerDebugTerminalOptionsProvider(provider: IDebugTerminalOptionsProvider): vscode.Disposable;
|
||||
}
|
||||
}
|
||||
provideTerminalOptions(options: vscode.TerminalOptions): vscode.ProviderResult<vscode.TerminalOptions>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the exports of the `js-debug` extension. Once you have this typings
|
||||
* file, these can be acquired in your extension using the following code:
|
||||
*
|
||||
* ```
|
||||
* const jsDebugExt = vscode.extensions.getExtension('ms-vscode.js-debug-nightly')
|
||||
* || vscode.extensions.getExtension('ms-vscode.js-debug');
|
||||
* await jsDebugExt.activate()
|
||||
* const jsDebug: import('@vscode/js-debug').IExports = jsDebug.exports;
|
||||
* ```
|
||||
*/
|
||||
export interface IExports {
|
||||
/**
|
||||
* Registers a participant used when the user creates a JavaScript Debug Terminal.
|
||||
*/
|
||||
registerDebugTerminalOptionsProvider(provider: IDebugTerminalOptionsProvider): vscode.Disposable;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,10 +77,8 @@ const Wasi = {
|
||||
},
|
||||
proc_exit() {},
|
||||
|
||||
fd_seek(fd: number, offset_bigint: bigint, whence: unknown, newOffset: unknown) {
|
||||
},
|
||||
fd_write(fd: unknown, iov: unknown, iovcnt: unknown, pnum: unknown) {
|
||||
},
|
||||
fd_seek(fd: number, offset_bigint: bigint, whence: unknown, newOffset: unknown) {},
|
||||
fd_write(fd: unknown, iov: unknown, iovcnt: unknown, pnum: unknown) {},
|
||||
};
|
||||
|
||||
const env = {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { readFileSync } from "fs";
|
||||
import { init, getTests } from "../index.mjs";
|
||||
import { getTests, init } from "../index.mjs";
|
||||
|
||||
const filePath = process.argv[2];
|
||||
if (!filePath) throw new Error("Usage: node node.mjs <file>");
|
||||
|
||||
@@ -13,11 +13,6 @@
|
||||
"baseUrl": ".",
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": [
|
||||
"./node_modules/peechy",
|
||||
"./schema.d.ts",
|
||||
"./index.ts",
|
||||
"./schema.js"
|
||||
],
|
||||
"include": ["./node_modules/peechy", "./schema.d.ts", "./index.ts", "./schema.js"],
|
||||
"exclude": []
|
||||
}
|
||||
|
||||
@@ -2,27 +2,27 @@
|
||||
|
||||
// An agent that starts buildkite-agent and runs others services.
|
||||
|
||||
import { join } from "node:path";
|
||||
import { realpathSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { parseArgs } from "node:util";
|
||||
import {
|
||||
isWindows,
|
||||
getOs,
|
||||
getArch,
|
||||
getKernel,
|
||||
getAbi,
|
||||
getAbiVersion,
|
||||
getDistro,
|
||||
getDistroVersion,
|
||||
getHostname,
|
||||
getArch,
|
||||
getCloud,
|
||||
getCloudMetadataTag,
|
||||
which,
|
||||
getDistro,
|
||||
getDistroVersion,
|
||||
getEnv,
|
||||
writeFile,
|
||||
spawnSafe,
|
||||
getHostname,
|
||||
getKernel,
|
||||
getOs,
|
||||
isWindows,
|
||||
mkdir,
|
||||
spawnSafe,
|
||||
which,
|
||||
writeFile,
|
||||
} from "./utils.mjs";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
/**
|
||||
* @param {"install" | "start"} action
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { spawn as nodeSpawn } from "node:child_process";
|
||||
import { existsSync, readFileSync, mkdirSync, cpSync, chmodSync } from "node:fs";
|
||||
import { chmodSync, cpSync, existsSync, mkdirSync, readFileSync } from "node:fs";
|
||||
import { basename, join, relative, resolve } from "node:path";
|
||||
import {
|
||||
formatAnnotationToHtml,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// @bun
|
||||
// Used to generate a features.json file after building Bun.
|
||||
|
||||
import { writeFileSync } from "node:fs";
|
||||
import { crash_handler } from "bun:internal-for-testing";
|
||||
import { writeFileSync } from "node:fs";
|
||||
|
||||
writeFileSync("./features.json", JSON.stringify(crash_handler.getFeatureData()));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { write, Glob, file } from "bun";
|
||||
import { join, resolve, relative } from "path";
|
||||
import { Glob, file, write } from "bun";
|
||||
import { join, relative, resolve } from "path";
|
||||
import { normalize } from "path/posix";
|
||||
|
||||
const root = resolve(import.meta.dirname, "..");
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { $, spawnSafe, which, getUsernameForDistro } from "./utils.mjs";
|
||||
import { $, getUsernameForDistro, spawnSafe, which } from "./utils.mjs";
|
||||
|
||||
export const google = {
|
||||
get cloud() {
|
||||
|
||||
@@ -1,48 +1,47 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { existsSync, mkdtempSync, readdirSync } from "node:fs";
|
||||
import { basename, extname, join, relative, resolve } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { inspect, parseArgs } from "node:util";
|
||||
import { docker } from "./docker.mjs";
|
||||
import { google } from "./google.mjs";
|
||||
import { orbstack } from "./orbstack.mjs";
|
||||
import { tart } from "./tart.mjs";
|
||||
import {
|
||||
$,
|
||||
copyFile,
|
||||
curlSafe,
|
||||
escapePowershell,
|
||||
getBootstrapVersion,
|
||||
getBuildNumber,
|
||||
getGithubApiUrl,
|
||||
getGithubUrl,
|
||||
getSecret,
|
||||
getUsernameForDistro,
|
||||
homedir,
|
||||
isCI,
|
||||
isMacOS,
|
||||
isWindows,
|
||||
mkdir,
|
||||
mkdtemp,
|
||||
parseArch,
|
||||
parseOs,
|
||||
readFile,
|
||||
rm,
|
||||
setupUserData,
|
||||
sha256,
|
||||
spawn,
|
||||
spawnSafe,
|
||||
spawnSsh,
|
||||
spawnSshSafe,
|
||||
spawnSyncSafe,
|
||||
startGroup,
|
||||
spawnSshSafe,
|
||||
spawnSsh,
|
||||
tmpdir,
|
||||
waitForPort,
|
||||
which,
|
||||
escapePowershell,
|
||||
getGithubUrl,
|
||||
getGithubApiUrl,
|
||||
curlSafe,
|
||||
mkdtemp,
|
||||
writeFile,
|
||||
copyFile,
|
||||
isMacOS,
|
||||
mkdir,
|
||||
rm,
|
||||
homedir,
|
||||
isWindows,
|
||||
setupUserData,
|
||||
sha256,
|
||||
isPrivileged,
|
||||
getUsernameForDistro,
|
||||
} from "./utils.mjs";
|
||||
import { basename, extname, join, relative, resolve } from "node:path";
|
||||
import { existsSync, mkdtempSync, readdirSync } from "node:fs";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { orbstack } from "./orbstack.mjs";
|
||||
import { docker } from "./docker.mjs";
|
||||
import { google } from "./google.mjs";
|
||||
import { tart } from "./tart.mjs";
|
||||
|
||||
const aws = {
|
||||
get name() {
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
// bun ./scripts/nav2readme.ts
|
||||
//
|
||||
//
|
||||
import nav from "../docs/nav";
|
||||
import { readdirSync } from "fs";
|
||||
import path from "path";
|
||||
import nav from "../docs/nav";
|
||||
function getQuickLinks() {
|
||||
let md = "";
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { inspect } from "node:util";
|
||||
import { $, mkdtemp, rm, spawnSafe, writeFile, getUsernameForDistro, spawnSshSafe, setupUserData } from "./utils.mjs";
|
||||
import { getUserData } from "./machine.mjs";
|
||||
import { $, getUsernameForDistro, mkdtemp, rm, setupUserData, spawnSafe, spawnSshSafe, writeFile } from "./utils.mjs";
|
||||
|
||||
/**
|
||||
* @link https://docs.orbstack.dev/
|
||||
|
||||
@@ -14,21 +14,21 @@ import {
|
||||
appendFileSync,
|
||||
existsSync,
|
||||
constants as fs,
|
||||
linkSync,
|
||||
mkdirSync,
|
||||
mkdtempSync,
|
||||
readdirSync,
|
||||
readFileSync,
|
||||
realpathSync,
|
||||
statSync,
|
||||
symlinkSync,
|
||||
unlink,
|
||||
unlinkSync,
|
||||
writeFileSync,
|
||||
linkSync,
|
||||
symlinkSync,
|
||||
} from "node:fs";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { userInfo } from "node:os";
|
||||
import { basename, dirname, join, relative, sep, extname } from "node:path";
|
||||
import { basename, dirname, extname, join, relative, sep } from "node:path";
|
||||
import { parseArgs } from "node:util";
|
||||
import {
|
||||
getAbi,
|
||||
@@ -749,7 +749,8 @@ async function spawnSafe(options) {
|
||||
(error = /(Illegal instruction) at address/i.exec(buffer)) ||
|
||||
(error = /panic: (.*) at address/i.exec(buffer)) ||
|
||||
(error = /oh no: Bun has crashed/i.exec(buffer)) ||
|
||||
(error = /(ERROR: AddressSanitizer)/.exec(buffer))
|
||||
(error = /(ERROR: AddressSanitizer)/.exec(buffer)) ||
|
||||
(error = /(SIGABRT)/.exec(buffer))
|
||||
) {
|
||||
const [, message] = error || [];
|
||||
error = message ? message.split("\n")[0].toLowerCase() : "crash";
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import { connect } from "node:net";
|
||||
import { hostname, tmpdir as nodeTmpdir, homedir as nodeHomedir, userInfo, release } from "node:os";
|
||||
import { hostname, homedir as nodeHomedir, tmpdir as nodeTmpdir, release, userInfo } from "node:os";
|
||||
import { dirname, join, relative, resolve } from "node:path";
|
||||
import { normalize as normalizeWindows } from "node:path/win32";
|
||||
|
||||
|
||||
@@ -3041,9 +3041,6 @@ pub const H2FrameParser = struct {
|
||||
}
|
||||
|
||||
const error_code = error_arg.toU32();
|
||||
if (error_code > 13) {
|
||||
return globalObject.throw("Invalid ErrorCode", .{});
|
||||
}
|
||||
|
||||
this.endStream(stream, @enumFromInt(error_code));
|
||||
|
||||
@@ -3472,6 +3469,16 @@ pub const H2FrameParser = struct {
|
||||
return stream_id;
|
||||
}
|
||||
|
||||
pub fn setNextStreamID(this: *H2FrameParser, _: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
JSC.markBinding(@src());
|
||||
const args_list = callframe.arguments();
|
||||
bun.debugAssert(args_list.len >= 1);
|
||||
const stream_id_arg = args_list.ptr[0];
|
||||
bun.debugAssert(stream_id_arg.isNumber());
|
||||
this.lastStreamID = stream_id_arg.to(u32);
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
pub fn hasNativeRead(this: *H2FrameParser, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue {
|
||||
return JSC.JSValue.jsBoolean(this.native_socket == .tcp or this.native_socket == .tls);
|
||||
}
|
||||
@@ -3562,6 +3569,7 @@ pub const H2FrameParser = struct {
|
||||
}
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
pub fn emitErrorToAllStreams(this: *H2FrameParser, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
JSC.markBinding(@src());
|
||||
|
||||
|
||||
@@ -101,6 +101,10 @@ export default [
|
||||
fn: "hasNativeRead",
|
||||
length: 1,
|
||||
},
|
||||
setNextStreamID: {
|
||||
fn: "setNextStreamID",
|
||||
length: 1,
|
||||
},
|
||||
getAllStreams: {
|
||||
fn: "getAllStreams",
|
||||
length: 0,
|
||||
|
||||
@@ -116,6 +116,8 @@ const errors: ErrorCodeMapping = [
|
||||
["ERR_HTTP2_STREAM_ERROR", Error],
|
||||
["ERR_HTTP2_TRAILERS_ALREADY_SENT", Error],
|
||||
["ERR_HTTP2_TRAILERS_NOT_READY", Error],
|
||||
["ERR_HTTP2_TOO_MANY_CUSTOM_SETTINGS", Error],
|
||||
["ERR_HTTP2_UNSUPPORTED_PROTOCOL", Error],
|
||||
["ERR_ILLEGAL_CONSTRUCTOR", TypeError],
|
||||
["ERR_INCOMPATIBLE_OPTION_PAIR", TypeError],
|
||||
["ERR_INVALID_ADDRESS", Error],
|
||||
|
||||
@@ -615,11 +615,14 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validateBuffer, (JSC::JSGlobalObject * globa
|
||||
auto buffer = callFrame->argument(0);
|
||||
auto name = callFrame->argument(1);
|
||||
|
||||
if (!buffer.isCell()) return JSValue::encode(jsUndefined());
|
||||
auto ty = buffer.asCell()->type();
|
||||
if (!buffer.isUndefined()) {
|
||||
if (!buffer.isCell()) return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "Buffer, TypedArray, or DataView"_s, buffer);
|
||||
|
||||
if (JSC::typedArrayType(ty) == NotTypedArray) {
|
||||
return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "Buffer, TypedArray, or DataView"_s, buffer);
|
||||
auto ty = buffer.asCell()->type();
|
||||
|
||||
if (JSC::typedArrayType(ty) == NotTypedArray) {
|
||||
return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "Buffer, TypedArray, or DataView"_s, buffer);
|
||||
}
|
||||
}
|
||||
return JSValue::encode(jsUndefined());
|
||||
}
|
||||
|
||||
@@ -31,23 +31,12 @@
|
||||
namespace WebCore {
|
||||
namespace Process {
|
||||
|
||||
static std::optional<ProcessIdentifier> globalIdentifier;
|
||||
|
||||
void setIdentifier(ProcessIdentifier processIdentifier)
|
||||
{
|
||||
ASSERT(isUIThread());
|
||||
globalIdentifier = processIdentifier;
|
||||
}
|
||||
// Bun only has 1 process
|
||||
static ProcessIdentifier globalIdentifier { 1 };
|
||||
|
||||
ProcessIdentifier identifier()
|
||||
{
|
||||
static std::once_flag onceFlag;
|
||||
std::call_once(onceFlag, [] {
|
||||
if (!globalIdentifier)
|
||||
globalIdentifier = ProcessIdentifier::generate();
|
||||
});
|
||||
|
||||
return *globalIdentifier;
|
||||
return globalIdentifier;
|
||||
}
|
||||
|
||||
} // namespace ProcessIdent
|
||||
|
||||
@@ -34,7 +34,6 @@ using ProcessIdentifier = ObjectIdentifier<ProcessIdentifierType>;
|
||||
|
||||
namespace Process {
|
||||
|
||||
WEBCORE_EXPORT void setIdentifier(ProcessIdentifier);
|
||||
WEBCORE_EXPORT ProcessIdentifier identifier();
|
||||
|
||||
} // namespace Process
|
||||
|
||||
@@ -143,7 +143,6 @@
|
||||
#include "Performance.h"
|
||||
#include "ProcessBindingConstants.h"
|
||||
#include "ProcessBindingTTYWrap.h"
|
||||
#include "ProcessIdentifier.h"
|
||||
#include "ReadableStream.h"
|
||||
#include "SerializedScriptValue.h"
|
||||
#include "StructuredClone.h"
|
||||
|
||||
121
src/bun.js/bindings/sqlite/sqlite3.c
vendored
121
src/bun.js/bindings/sqlite/sqlite3.c
vendored
@@ -1,7 +1,7 @@
|
||||
// clang-format off
|
||||
/******************************************************************************
|
||||
** This file is an amalgamation of many separate C source files from SQLite
|
||||
** version 3.49.1. By combining all the individual C code files into this
|
||||
** version 3.49.2. By combining all the individual C code files into this
|
||||
** single large file, the entire code can be compiled as a single translation
|
||||
** unit. This allows many compilers to do optimizations that would not be
|
||||
** possible if the files were compiled separately. Performance improvements
|
||||
@@ -19,7 +19,7 @@
|
||||
** separate file. This file contains only code for the core SQLite library.
|
||||
**
|
||||
** The content in this amalgamation comes from Fossil check-in
|
||||
** 873d4e274b4988d260ba8354a9718324a1c2 with changes in files:
|
||||
** 17144570b0d96ae63cd6f3edca39e27ebd74 with changes in files:
|
||||
**
|
||||
**
|
||||
*/
|
||||
@@ -466,9 +466,9 @@ extern "C" {
|
||||
** [sqlite3_libversion_number()], [sqlite3_sourceid()],
|
||||
** [sqlite_version()] and [sqlite_source_id()].
|
||||
*/
|
||||
#define SQLITE_VERSION "3.49.1"
|
||||
#define SQLITE_VERSION_NUMBER 3049001
|
||||
#define SQLITE_SOURCE_ID "2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70"
|
||||
#define SQLITE_VERSION "3.49.2"
|
||||
#define SQLITE_VERSION_NUMBER 3049002
|
||||
#define SQLITE_SOURCE_ID "2025-05-07 10:39:52 17144570b0d96ae63cd6f3edca39e27ebd74925252bbaf6723bcb2f6b4861fb1"
|
||||
|
||||
/*
|
||||
** CAPI3REF: Run-Time Library Version Numbers
|
||||
@@ -19065,6 +19065,7 @@ struct Index {
|
||||
unsigned bLowQual:1; /* sqlite_stat1 says this is a low-quality index */
|
||||
unsigned bNoQuery:1; /* Do not use this index to optimize queries */
|
||||
unsigned bAscKeyBug:1; /* True if the bba7b69f9849b5bf bug applies */
|
||||
unsigned bIdxRowid:1; /* One or more of the index keys is the ROWID */
|
||||
unsigned bHasVCol:1; /* Index references one or more VIRTUAL columns */
|
||||
unsigned bHasExpr:1; /* Index contains an expression, either a literal
|
||||
** expression, or a reference to a VIRTUAL column */
|
||||
@@ -97242,6 +97243,7 @@ case OP_MakeRecord: {
|
||||
zHdr += sqlite3PutVarint(zHdr, serial_type);
|
||||
if( pRec->n ){
|
||||
assert( pRec->z!=0 );
|
||||
assert( pRec->z!=(const char*)sqlite3CtypeMap );
|
||||
memcpy(zPayload, pRec->z, pRec->n);
|
||||
zPayload += pRec->n;
|
||||
}
|
||||
@@ -115469,11 +115471,11 @@ SQLITE_PRIVATE void sqlite3ExprIfTrue(Parse *pParse, Expr *pExpr, int dest, int
|
||||
assert( TK_ISNULL==OP_IsNull ); testcase( op==TK_ISNULL );
|
||||
assert( TK_NOTNULL==OP_NotNull ); testcase( op==TK_NOTNULL );
|
||||
r1 = sqlite3ExprCodeTemp(pParse, pExpr->pLeft, ®Free1);
|
||||
sqlite3VdbeTypeofColumn(v, r1);
|
||||
assert( regFree1==0 || regFree1==r1 );
|
||||
if( regFree1 ) sqlite3VdbeTypeofColumn(v, r1);
|
||||
sqlite3VdbeAddOp2(v, op, r1, dest);
|
||||
VdbeCoverageIf(v, op==TK_ISNULL);
|
||||
VdbeCoverageIf(v, op==TK_NOTNULL);
|
||||
testcase( regFree1==0 );
|
||||
break;
|
||||
}
|
||||
case TK_BETWEEN: {
|
||||
@@ -115644,11 +115646,11 @@ SQLITE_PRIVATE void sqlite3ExprIfFalse(Parse *pParse, Expr *pExpr, int dest, int
|
||||
case TK_ISNULL:
|
||||
case TK_NOTNULL: {
|
||||
r1 = sqlite3ExprCodeTemp(pParse, pExpr->pLeft, ®Free1);
|
||||
sqlite3VdbeTypeofColumn(v, r1);
|
||||
assert( regFree1==0 || regFree1==r1 );
|
||||
if( regFree1 ) sqlite3VdbeTypeofColumn(v, r1);
|
||||
sqlite3VdbeAddOp2(v, op, r1, dest);
|
||||
testcase( op==TK_ISNULL ); VdbeCoverageIf(v, op==TK_ISNULL);
|
||||
testcase( op==TK_NOTNULL ); VdbeCoverageIf(v, op==TK_NOTNULL);
|
||||
testcase( regFree1==0 );
|
||||
break;
|
||||
}
|
||||
case TK_BETWEEN: {
|
||||
@@ -126337,6 +126339,7 @@ SQLITE_PRIVATE void sqlite3CreateIndex(
|
||||
assert( j<=0x7fff );
|
||||
if( j<0 ){
|
||||
j = pTab->iPKey;
|
||||
pIndex->bIdxRowid = 1;
|
||||
}else{
|
||||
if( pTab->aCol[j].notNull==0 ){
|
||||
pIndex->uniqNotNull = 0;
|
||||
@@ -139133,48 +139136,48 @@ static const char *const pragCName[] = {
|
||||
/* 13 */ "pk",
|
||||
/* 14 */ "hidden",
|
||||
/* table_info reuses 8 */
|
||||
/* 15 */ "schema", /* Used by: table_list */
|
||||
/* 16 */ "name",
|
||||
/* 15 */ "name", /* Used by: function_list */
|
||||
/* 16 */ "builtin",
|
||||
/* 17 */ "type",
|
||||
/* 18 */ "ncol",
|
||||
/* 19 */ "wr",
|
||||
/* 20 */ "strict",
|
||||
/* 21 */ "seqno", /* Used by: index_xinfo */
|
||||
/* 22 */ "cid",
|
||||
/* 23 */ "name",
|
||||
/* 24 */ "desc",
|
||||
/* 25 */ "coll",
|
||||
/* 26 */ "key",
|
||||
/* 27 */ "name", /* Used by: function_list */
|
||||
/* 28 */ "builtin",
|
||||
/* 29 */ "type",
|
||||
/* 30 */ "enc",
|
||||
/* 31 */ "narg",
|
||||
/* 32 */ "flags",
|
||||
/* 33 */ "tbl", /* Used by: stats */
|
||||
/* 34 */ "idx",
|
||||
/* 35 */ "wdth",
|
||||
/* 36 */ "hght",
|
||||
/* 37 */ "flgs",
|
||||
/* 38 */ "seq", /* Used by: index_list */
|
||||
/* 39 */ "name",
|
||||
/* 40 */ "unique",
|
||||
/* 41 */ "origin",
|
||||
/* 42 */ "partial",
|
||||
/* 18 */ "enc",
|
||||
/* 19 */ "narg",
|
||||
/* 20 */ "flags",
|
||||
/* 21 */ "schema", /* Used by: table_list */
|
||||
/* 22 */ "name",
|
||||
/* 23 */ "type",
|
||||
/* 24 */ "ncol",
|
||||
/* 25 */ "wr",
|
||||
/* 26 */ "strict",
|
||||
/* 27 */ "seqno", /* Used by: index_xinfo */
|
||||
/* 28 */ "cid",
|
||||
/* 29 */ "name",
|
||||
/* 30 */ "desc",
|
||||
/* 31 */ "coll",
|
||||
/* 32 */ "key",
|
||||
/* 33 */ "seq", /* Used by: index_list */
|
||||
/* 34 */ "name",
|
||||
/* 35 */ "unique",
|
||||
/* 36 */ "origin",
|
||||
/* 37 */ "partial",
|
||||
/* 38 */ "tbl", /* Used by: stats */
|
||||
/* 39 */ "idx",
|
||||
/* 40 */ "wdth",
|
||||
/* 41 */ "hght",
|
||||
/* 42 */ "flgs",
|
||||
/* 43 */ "table", /* Used by: foreign_key_check */
|
||||
/* 44 */ "rowid",
|
||||
/* 45 */ "parent",
|
||||
/* 46 */ "fkid",
|
||||
/* index_info reuses 21 */
|
||||
/* 47 */ "seq", /* Used by: database_list */
|
||||
/* 48 */ "name",
|
||||
/* 49 */ "file",
|
||||
/* 50 */ "busy", /* Used by: wal_checkpoint */
|
||||
/* 51 */ "log",
|
||||
/* 52 */ "checkpointed",
|
||||
/* collation_list reuses 38 */
|
||||
/* 47 */ "busy", /* Used by: wal_checkpoint */
|
||||
/* 48 */ "log",
|
||||
/* 49 */ "checkpointed",
|
||||
/* 50 */ "seq", /* Used by: database_list */
|
||||
/* 51 */ "name",
|
||||
/* 52 */ "file",
|
||||
/* index_info reuses 27 */
|
||||
/* 53 */ "database", /* Used by: lock_status */
|
||||
/* 54 */ "status",
|
||||
/* collation_list reuses 33 */
|
||||
/* 55 */ "cache_size", /* Used by: default_cache_size */
|
||||
/* module_list pragma_list reuses 9 */
|
||||
/* 56 */ "timeout", /* Used by: busy_timeout */
|
||||
@@ -139267,7 +139270,7 @@ static const PragmaName aPragmaName[] = {
|
||||
{/* zName: */ "collation_list",
|
||||
/* ePragTyp: */ PragTyp_COLLATION_LIST,
|
||||
/* ePragFlg: */ PragFlg_Result0,
|
||||
/* ColNames: */ 38, 2,
|
||||
/* ColNames: */ 33, 2,
|
||||
/* iArg: */ 0 },
|
||||
#endif
|
||||
#if !defined(SQLITE_OMIT_COMPILEOPTION_DIAGS)
|
||||
@@ -139302,7 +139305,7 @@ static const PragmaName aPragmaName[] = {
|
||||
{/* zName: */ "database_list",
|
||||
/* ePragTyp: */ PragTyp_DATABASE_LIST,
|
||||
/* ePragFlg: */ PragFlg_Result0,
|
||||
/* ColNames: */ 47, 3,
|
||||
/* ColNames: */ 50, 3,
|
||||
/* iArg: */ 0 },
|
||||
#endif
|
||||
#if !defined(SQLITE_OMIT_PAGER_PRAGMAS) && !defined(SQLITE_OMIT_DEPRECATED)
|
||||
@@ -139382,7 +139385,7 @@ static const PragmaName aPragmaName[] = {
|
||||
{/* zName: */ "function_list",
|
||||
/* ePragTyp: */ PragTyp_FUNCTION_LIST,
|
||||
/* ePragFlg: */ PragFlg_Result0,
|
||||
/* ColNames: */ 27, 6,
|
||||
/* ColNames: */ 15, 6,
|
||||
/* iArg: */ 0 },
|
||||
#endif
|
||||
#endif
|
||||
@@ -139411,17 +139414,17 @@ static const PragmaName aPragmaName[] = {
|
||||
{/* zName: */ "index_info",
|
||||
/* ePragTyp: */ PragTyp_INDEX_INFO,
|
||||
/* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt,
|
||||
/* ColNames: */ 21, 3,
|
||||
/* ColNames: */ 27, 3,
|
||||
/* iArg: */ 0 },
|
||||
{/* zName: */ "index_list",
|
||||
/* ePragTyp: */ PragTyp_INDEX_LIST,
|
||||
/* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt,
|
||||
/* ColNames: */ 38, 5,
|
||||
/* ColNames: */ 33, 5,
|
||||
/* iArg: */ 0 },
|
||||
{/* zName: */ "index_xinfo",
|
||||
/* ePragTyp: */ PragTyp_INDEX_INFO,
|
||||
/* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt,
|
||||
/* ColNames: */ 21, 6,
|
||||
/* ColNames: */ 27, 6,
|
||||
/* iArg: */ 1 },
|
||||
#endif
|
||||
#if !defined(SQLITE_OMIT_INTEGRITY_CHECK)
|
||||
@@ -139600,7 +139603,7 @@ static const PragmaName aPragmaName[] = {
|
||||
{/* zName: */ "stats",
|
||||
/* ePragTyp: */ PragTyp_STATS,
|
||||
/* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_SchemaReq,
|
||||
/* ColNames: */ 33, 5,
|
||||
/* ColNames: */ 38, 5,
|
||||
/* iArg: */ 0 },
|
||||
#endif
|
||||
#if !defined(SQLITE_OMIT_PAGER_PRAGMAS)
|
||||
@@ -139619,7 +139622,7 @@ static const PragmaName aPragmaName[] = {
|
||||
{/* zName: */ "table_list",
|
||||
/* ePragTyp: */ PragTyp_TABLE_LIST,
|
||||
/* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1,
|
||||
/* ColNames: */ 15, 6,
|
||||
/* ColNames: */ 21, 6,
|
||||
/* iArg: */ 0 },
|
||||
{/* zName: */ "table_xinfo",
|
||||
/* ePragTyp: */ PragTyp_TABLE_INFO,
|
||||
@@ -139696,7 +139699,7 @@ static const PragmaName aPragmaName[] = {
|
||||
{/* zName: */ "wal_checkpoint",
|
||||
/* ePragTyp: */ PragTyp_WAL_CHECKPOINT,
|
||||
/* ePragFlg: */ PragFlg_NeedSchema,
|
||||
/* ColNames: */ 50, 3,
|
||||
/* ColNames: */ 47, 3,
|
||||
/* iArg: */ 0 },
|
||||
#endif
|
||||
#if !defined(SQLITE_OMIT_FLAG_PRAGMAS)
|
||||
@@ -147074,6 +147077,7 @@ static int multiSelect(
|
||||
multi_select_end:
|
||||
pDest->iSdst = dest.iSdst;
|
||||
pDest->nSdst = dest.nSdst;
|
||||
pDest->iSDParm2 = dest.iSDParm2;
|
||||
if( pDelete ){
|
||||
sqlite3ParserAddCleanup(pParse, sqlite3SelectDeleteGeneric, pDelete);
|
||||
}
|
||||
@@ -151028,6 +151032,7 @@ static void agginfoFree(sqlite3 *db, void *pArg){
|
||||
** * There is no WHERE or GROUP BY or HAVING clauses on the subqueries
|
||||
** * The outer query is a simple count(*) with no WHERE clause or other
|
||||
** extraneous syntax.
|
||||
** * None of the subqueries are DISTINCT (forumpost/a860f5fb2e 2025-03-10)
|
||||
**
|
||||
** Return TRUE if the optimization is undertaken.
|
||||
*/
|
||||
@@ -151060,7 +151065,11 @@ static int countOfViewOptimization(Parse *pParse, Select *p){
|
||||
if( pSub->op!=TK_ALL && pSub->pPrior ) return 0; /* Must be UNION ALL */
|
||||
if( pSub->pWhere ) return 0; /* No WHERE clause */
|
||||
if( pSub->pLimit ) return 0; /* No LIMIT clause */
|
||||
if( pSub->selFlags & SF_Aggregate ) return 0; /* Not an aggregate */
|
||||
if( pSub->selFlags & (SF_Aggregate|SF_Distinct) ){
|
||||
testcase( pSub->selFlags & SF_Aggregate );
|
||||
testcase( pSub->selFlags & SF_Distinct );
|
||||
return 0; /* Not an aggregate nor DISTINCT */
|
||||
}
|
||||
assert( pSub->pHaving==0 ); /* Due to the previous */
|
||||
pSub = pSub->pPrior; /* Repeat over compound */
|
||||
}while( pSub );
|
||||
@@ -166882,7 +166891,7 @@ static int whereLoopAddBtreeIndex(
|
||||
if( (pNew->wsFlags & WHERE_TOP_LIMIT)==0
|
||||
&& pNew->u.btree.nEq<pProbe->nColumn
|
||||
&& (pNew->u.btree.nEq<pProbe->nKeyCol ||
|
||||
pProbe->idxType!=SQLITE_IDXTYPE_PRIMARYKEY)
|
||||
(pProbe->idxType!=SQLITE_IDXTYPE_PRIMARYKEY && !pProbe->bIdxRowid))
|
||||
){
|
||||
if( pNew->u.btree.nEq>3 ){
|
||||
sqlite3ProgressCheck(pParse);
|
||||
@@ -255875,7 +255884,7 @@ static void fts5SourceIdFunc(
|
||||
){
|
||||
assert( nArg==0 );
|
||||
UNUSED_PARAM2(nArg, apUnused);
|
||||
sqlite3_result_text(pCtx, "fts5: 2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70", -1, SQLITE_TRANSIENT);
|
||||
sqlite3_result_text(pCtx, "fts5: 2025-05-07 10:39:52 17144570b0d96ae63cd6f3edca39e27ebd74925252bbaf6723bcb2f6b4861fb1", -1, SQLITE_TRANSIENT);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
6
src/bun.js/bindings/sqlite/sqlite3_local.h
vendored
6
src/bun.js/bindings/sqlite/sqlite3_local.h
vendored
@@ -147,9 +147,9 @@ extern "C" {
|
||||
** [sqlite3_libversion_number()], [sqlite3_sourceid()],
|
||||
** [sqlite_version()] and [sqlite_source_id()].
|
||||
*/
|
||||
#define SQLITE_VERSION "3.49.1"
|
||||
#define SQLITE_VERSION_NUMBER 3049001
|
||||
#define SQLITE_SOURCE_ID "2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70"
|
||||
#define SQLITE_VERSION "3.49.2"
|
||||
#define SQLITE_VERSION_NUMBER 3049002
|
||||
#define SQLITE_SOURCE_ID "2025-05-07 10:39:52 17144570b0d96ae63cd6f3edca39e27ebd74925252bbaf6723bcb2f6b4861fb1"
|
||||
|
||||
/*
|
||||
** CAPI3REF: Run-Time Library Version Numbers
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
#include <wtf/GetPtr.h>
|
||||
#include <wtf/PointerPreparations.h>
|
||||
#include <wtf/URL.h>
|
||||
#include "ErrorCode.h"
|
||||
|
||||
namespace WebCore {
|
||||
using namespace JSC;
|
||||
@@ -308,14 +309,28 @@ static inline JSC::EncodedJSValue jsAbortSignalConstructorFunction_anyBody(JSC::
|
||||
auto throwScope = DECLARE_THROW_SCOPE(vm);
|
||||
UNUSED_PARAM(throwScope);
|
||||
UNUSED_PARAM(callFrame);
|
||||
if (UNLIKELY(callFrame->argumentCount() < 1))
|
||||
return throwVMError(lexicalGlobalObject, throwScope, createNotEnoughArgumentsError(lexicalGlobalObject));
|
||||
auto* context = jsCast<JSDOMGlobalObject*>(lexicalGlobalObject)->scriptExecutionContext();
|
||||
if (UNLIKELY(!context))
|
||||
return JSValue::encode(jsUndefined());
|
||||
EnsureStillAliveScope argument0 = callFrame->uncheckedArgument(0);
|
||||
auto signals = convert<IDLSequence<IDLInterface<AbortSignal>>>(*lexicalGlobalObject, argument0.value());
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
EnsureStillAliveScope argument0 = callFrame->argument(0);
|
||||
|
||||
// manual conversion to nodejs error handling
|
||||
Vector<RefPtr<AbortSignal>> signals;
|
||||
if (argument0.value().isUndefinedOrNull()) {
|
||||
Bun::ERR::INVALID_ARG_TYPE(throwScope, lexicalGlobalObject, "signals can not be converted to sequence"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
size_t i = 0;
|
||||
forEachInIterable(lexicalGlobalObject, argument0.value(), [&](VM& vm, JSGlobalObject* globalObject, JSValue item) {
|
||||
if (auto* signal = JSAbortSignal::toWrapped(vm, item)) {
|
||||
signals.append(signal);
|
||||
} else {
|
||||
Bun::ERR::INVALID_ARG_INSTANCE(throwScope, lexicalGlobalObject, makeString("signals["_s, i, "]"_s), "AbortSignal"_s, item);
|
||||
}
|
||||
i++;
|
||||
});
|
||||
|
||||
RELEASE_AND_RETURN(throwScope, JSValue::encode(toJSNewlyCreated<IDLInterface<AbortSignal>>(*lexicalGlobalObject, *jsCast<JSDOMGlobalObject*>(lexicalGlobalObject), throwScope, AbortSignal::any(*context, WTFMove(signals)))));
|
||||
}
|
||||
|
||||
|
||||
@@ -138,13 +138,13 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSWorkerDOMConstructor::
|
||||
}
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
EnsureStillAliveScope argument1 = callFrame->argument(1);
|
||||
|
||||
WorkerOptions options {};
|
||||
JSValue nodeWorkerObject {};
|
||||
if (callFrame->argumentCount() == 3) {
|
||||
nodeWorkerObject = callFrame->argument(2);
|
||||
options.kind = WorkerOptions::Kind::Node;
|
||||
}
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
|
||||
auto options = WorkerOptions {};
|
||||
JSValue workerData = jsUndefined();
|
||||
Vector<JSC::Strong<JSC::JSObject>> transferList;
|
||||
|
||||
|
||||
@@ -389,12 +389,10 @@ void Worker::fireEarlyMessages(Zig::GlobalObject* workerGlobalObject)
|
||||
}
|
||||
}
|
||||
|
||||
void Worker::dispatchError(WTF::String message)
|
||||
void Worker::dispatchErrorWithMessage(WTF::String message)
|
||||
{
|
||||
|
||||
auto* ctx = scriptExecutionContext();
|
||||
if (!ctx)
|
||||
return;
|
||||
if (!ctx) return;
|
||||
|
||||
ScriptExecutionContext::postTaskTo(ctx->identifier(), [protectedThis = Ref { *this }, message = message.isolatedCopy()](ScriptExecutionContext& context) -> void {
|
||||
ErrorEvent::Init init;
|
||||
@@ -404,6 +402,27 @@ void Worker::dispatchError(WTF::String message)
|
||||
protectedThis->dispatchEvent(event);
|
||||
});
|
||||
}
|
||||
|
||||
bool Worker::dispatchErrorWithValue(Zig::GlobalObject* workerGlobalObject, JSValue value)
|
||||
{
|
||||
auto* ctx = scriptExecutionContext();
|
||||
if (!ctx) return false;
|
||||
auto serialized = SerializedScriptValue::create(*workerGlobalObject, value, SerializationForStorage::No, SerializationErrorMode::NonThrowing);
|
||||
if (!serialized) return false;
|
||||
|
||||
ScriptExecutionContext::postTaskTo(ctx->identifier(), [protectedThis = Ref { *this }, serialized](ScriptExecutionContext& context) -> void {
|
||||
auto* globalObject = context.globalObject();
|
||||
ErrorEvent::Init init;
|
||||
JSValue deserialized = serialized->deserialize(*globalObject, globalObject, SerializationErrorMode::NonThrowing);
|
||||
if (!deserialized) return;
|
||||
init.error = deserialized;
|
||||
|
||||
auto event = ErrorEvent::create(eventNames().errorEvent, init, EventIsTrusted::Yes);
|
||||
protectedThis->dispatchEvent(event);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
void Worker::dispatchExit(int32_t exitCode)
|
||||
{
|
||||
auto* ctx = scriptExecutionContext();
|
||||
@@ -483,7 +502,16 @@ extern "C" void WebWorker__dispatchError(Zig::GlobalObject* globalObject, Worker
|
||||
init.bubbles = false;
|
||||
|
||||
globalObject->globalEventScope->dispatchEvent(ErrorEvent::create(eventNames().errorEvent, init, EventIsTrusted::Yes));
|
||||
worker->dispatchError(message.toWTFString(BunString::ZeroCopy));
|
||||
switch (worker->options().kind) {
|
||||
case WorkerOptions::Kind::Web:
|
||||
return worker->dispatchErrorWithMessage(message.toWTFString(BunString::ZeroCopy));
|
||||
case WorkerOptions::Kind::Node:
|
||||
if (!worker->dispatchErrorWithValue(globalObject, error)) {
|
||||
// If serialization threw an error, use the string instead
|
||||
worker->dispatchErrorWithMessage(message.toWTFString(BunString::ZeroCopy));
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" WebCore::Worker* WebWorker__getParentWorker(void* bunVM);
|
||||
|
||||
@@ -27,10 +27,7 @@
|
||||
|
||||
#include "ActiveDOMObject.h"
|
||||
#include "EventTarget.h"
|
||||
// #include "MessagePort.h"
|
||||
#include "WorkerOptions.h"
|
||||
// #include "WorkerScriptLoaderClient.h"
|
||||
// #include "WorkerType.h"
|
||||
#include <JavaScriptCore/RuntimeFlags.h>
|
||||
#include <wtf/Deque.h>
|
||||
#include <wtf/MonotonicTime.h>
|
||||
@@ -50,7 +47,6 @@ class RTCRtpScriptTransform;
|
||||
class RTCRtpScriptTransformer;
|
||||
class ScriptExecutionContext;
|
||||
class WorkerGlobalScopeProxy;
|
||||
// class WorkerScriptLoader;
|
||||
|
||||
struct StructuredSerializeOptions;
|
||||
struct WorkerOptions;
|
||||
@@ -80,15 +76,6 @@ public:
|
||||
void dispatchCloseEvent(Event&);
|
||||
void setKeepAlive(bool);
|
||||
|
||||
#if ENABLE(WEB_RTC)
|
||||
void createRTCRtpScriptTransformer(RTCRtpScriptTransform&, MessageWithMessagePorts&&);
|
||||
#endif
|
||||
|
||||
// WorkerType type() const
|
||||
// {
|
||||
// return m_options.type;
|
||||
// }
|
||||
|
||||
void postTaskToWorkerGlobalScope(Function<void(ScriptExecutionContext&)>&&);
|
||||
|
||||
static void forEachWorker(const Function<Function<void(ScriptExecutionContext&)>()>&);
|
||||
@@ -97,7 +84,9 @@ public:
|
||||
void dispatchOnline(Zig::GlobalObject* workerGlobalObject);
|
||||
// Fire a 'message' event in the Worker for messages that were sent before the Worker started running
|
||||
void fireEarlyMessages(Zig::GlobalObject* workerGlobalObject);
|
||||
void dispatchError(WTF::String message);
|
||||
void dispatchErrorWithMessage(WTF::String message);
|
||||
// true if successful
|
||||
bool dispatchErrorWithValue(Zig::GlobalObject* workerGlobalObject, JSValue value);
|
||||
void dispatchExit(int32_t exitCode);
|
||||
ScriptExecutionContext* scriptExecutionContext() const final { return ContextDestructionObserver::scriptExecutionContext(); }
|
||||
ScriptExecutionContextIdentifier clientIdentifier() const { return m_clientIdentifier; }
|
||||
@@ -111,16 +100,6 @@ private:
|
||||
void derefEventTarget() final { deref(); }
|
||||
void eventListenersDidChange() final {};
|
||||
|
||||
// void didReceiveResponse(ResourceLoaderIdentifier, const ResourceResponse&) final;
|
||||
// void notifyFinished() final;
|
||||
|
||||
// ActiveDOMObject.
|
||||
// void stop() final;
|
||||
// void suspend(ReasonForSuspension) final;
|
||||
// void resume() final;
|
||||
// const char* activeDOMObjectName() const final;
|
||||
// bool virtualHasPendingActivity() const final;
|
||||
|
||||
static void networkStateChanged(bool isOnLine);
|
||||
|
||||
static constexpr uint8_t OnlineFlag = 1 << 0;
|
||||
@@ -128,15 +107,9 @@ private:
|
||||
static constexpr uint8_t TerminateRequestedFlag = 1 << 0;
|
||||
static constexpr uint8_t TerminatedFlag = 1 << 1;
|
||||
|
||||
// RefPtr<WorkerScriptLoader> m_scriptLoader;
|
||||
WorkerOptions m_options;
|
||||
String m_identifier;
|
||||
// WorkerGlobalScopeProxy& m_contextProxy; // The proxy outlives the worker to perform thread shutdown.
|
||||
// std::optional<ContentSecurityPolicyResponseHeaders> m_contentSecurityPolicyResponseHeaders;
|
||||
MonotonicTime m_workerCreationTime;
|
||||
// bool m_shouldBypassMainWorldContentSecurityPolicy { false };
|
||||
// bool m_isSuspendedForBackForwardCache { false };
|
||||
// JSC::RuntimeFlags m_runtimeFlags;
|
||||
Deque<RefPtr<Event>> m_pendingEvents;
|
||||
Lock m_pendingTasksMutex;
|
||||
Deque<Function<void(ScriptExecutionContext&)>> m_pendingTasks;
|
||||
|
||||
@@ -8,6 +8,13 @@
|
||||
namespace WebCore {
|
||||
|
||||
struct WorkerOptions {
|
||||
enum class Kind : uint8_t {
|
||||
// Created by the global Worker constructor
|
||||
Web,
|
||||
// Created by the `require("node:worker_threads").Worker` constructor
|
||||
Node,
|
||||
};
|
||||
|
||||
String name;
|
||||
bool mini { false };
|
||||
bool unref { false };
|
||||
@@ -16,6 +23,7 @@ struct WorkerOptions {
|
||||
// true, then we need to make sure that `process.argv` contains "[worker eval]" instead of the
|
||||
// Blob URL.
|
||||
bool evalMode { false };
|
||||
Kind kind { Kind::Web };
|
||||
// Serialized array containing [workerData, environmentData]
|
||||
// (environmentData is always a Map)
|
||||
RefPtr<SerializedScriptValue> workerDataAndEnvironmentData;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -14,7 +14,7 @@ pub const ByteListPool = bun.ObjectPool(bun.ByteList, null, true, 8);
|
||||
|
||||
pub const Crypto = @import("webcore/Crypto.zig");
|
||||
pub const AbortSignal = @import("bindings/AbortSignal.zig").AbortSignal;
|
||||
pub const WebWorker = @import("web_worker.zig").WebWorker;
|
||||
pub const WebWorker = @import("web_worker.zig");
|
||||
pub const AutoFlusher = @import("webcore/AutoFlusher.zig");
|
||||
pub const EncodingLabel = @import("webcore/EncodingLabel.zig").EncodingLabel;
|
||||
pub const Fetch = @import("webcore/fetch.zig");
|
||||
|
||||
@@ -356,14 +356,14 @@ pub const TokenList = struct {
|
||||
.{ .color = color },
|
||||
) catch unreachable;
|
||||
last_is_delim = false;
|
||||
last_is_whitespace = true;
|
||||
last_is_whitespace = false;
|
||||
} else if (input.tryParse(UnresolvedColor.parse, .{ f, options }).asValue()) |color| {
|
||||
tokens.append(
|
||||
input.allocator(),
|
||||
.{ .unresolved_color = color },
|
||||
) catch unreachable;
|
||||
last_is_delim = false;
|
||||
last_is_whitespace = true;
|
||||
last_is_whitespace = false;
|
||||
} else if (bun.strings.eql(f, "url")) {
|
||||
input.reset(&state);
|
||||
tokens.append(
|
||||
|
||||
@@ -96,14 +96,23 @@ Agent.prototype.createConnection = function () {
|
||||
};
|
||||
|
||||
Agent.prototype.getName = function (options = kEmptyObject) {
|
||||
let name = `http:${options.host || "localhost"}:`;
|
||||
if (options.port) name += options.port;
|
||||
let name = options.host || "localhost";
|
||||
name += ":";
|
||||
if (options.localAddress) name += options.localAddress;
|
||||
if (options.port) {
|
||||
name += options.port;
|
||||
}
|
||||
name += ":";
|
||||
if (options.localAddress) {
|
||||
name += options.localAddress;
|
||||
}
|
||||
// Pacify parallel/test-http-agent-getname by only appending
|
||||
// the ':' when options.family is set.
|
||||
if (options.family === 4 || options.family === 6) name += `:${options.family}`;
|
||||
if (options.socketPath) name += `:${options.socketPath}`;
|
||||
if (options.family === 4 || options.family === 6) {
|
||||
name += `:${options.family}`;
|
||||
}
|
||||
if (options.socketPath) {
|
||||
name += `:${options.socketPath}`;
|
||||
}
|
||||
return name;
|
||||
};
|
||||
|
||||
|
||||
@@ -803,8 +803,7 @@ const ServerPrototype = {
|
||||
}
|
||||
if (typeof optionalCallback === "function") setCloseCallback(this, optionalCallback);
|
||||
this.listening = false;
|
||||
server.stop(true);
|
||||
return this;
|
||||
server.stop();
|
||||
},
|
||||
[EventEmitter.captureRejectionSymbol]: function (err, event, ...args) {
|
||||
switch (event) {
|
||||
|
||||
@@ -1,8 +1,34 @@
|
||||
// Hardcoded module "node:http2"
|
||||
|
||||
/*
|
||||
* Portions of this code are derived from the Node.js project (https://nodejs.org/),
|
||||
* originally developed by Node.js contributors and Joyent, Inc.
|
||||
*
|
||||
* Copyright Node.js contributors. All rights reserved.
|
||||
* Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*
|
||||
* Modifications were made to the original code.
|
||||
*/
|
||||
const { isTypedArray } = require("node:util/types");
|
||||
const { hideFromStack, throwNotImplemented } = require("internal/shared");
|
||||
|
||||
const { STATUS_CODES } = require("internal/http");
|
||||
const tls = require("node:tls");
|
||||
const net = require("node:net");
|
||||
const fs = require("node:fs");
|
||||
@@ -12,7 +38,7 @@ const bunTLSConnectOptions = Symbol.for("::buntlsconnectoptions::");
|
||||
const bunSocketServerOptions = Symbol.for("::bunnetserveroptions::");
|
||||
const kInfoHeaders = Symbol("sent-info-headers");
|
||||
const kQuotedString = /^[\x09\x20-\x5b\x5d-\x7e\x80-\xff]*$/;
|
||||
|
||||
const MAX_ADDITIONAL_SETTINGS = 10;
|
||||
const Stream = require("node:stream");
|
||||
const { Readable } = Stream;
|
||||
type Http2ConnectOptions = {
|
||||
@@ -73,6 +99,8 @@ const kAppendHeader = Symbol("appendHeader");
|
||||
const kAborted = Symbol("aborted");
|
||||
const kRequest = Symbol("request");
|
||||
const kHeadRequest = Symbol("headRequest");
|
||||
const kMaxStreams = 2 ** 32 - 1;
|
||||
const kMaxInt = 4294967295;
|
||||
const {
|
||||
validateInteger,
|
||||
validateString,
|
||||
@@ -81,6 +109,8 @@ const {
|
||||
checkIsHttpToken,
|
||||
validateLinkHeaderValue,
|
||||
validateUint32,
|
||||
validateBuffer,
|
||||
validateNumber,
|
||||
} = require("internal/validators");
|
||||
|
||||
let utcCache;
|
||||
@@ -267,7 +297,19 @@ function assertValidHeader(name, value) {
|
||||
connectionHeaderMessageWarn();
|
||||
}
|
||||
}
|
||||
function assertIsObject(value: any, name: string, types?: string): asserts value is object {
|
||||
if (value !== undefined && (!$isObject(value) || $isArray(value))) {
|
||||
throw $ERR_INVALID_ARG_TYPE(name, types || "Object", value);
|
||||
}
|
||||
}
|
||||
|
||||
function assertIsArray(value: any, name: string, types?: string): asserts value is any[] {
|
||||
if (value !== undefined && !$isArray(value)) {
|
||||
throw $ERR_INVALID_ARG_TYPE(name, types || "Array", value);
|
||||
}
|
||||
}
|
||||
hideFromStack(assertIsObject);
|
||||
hideFromStack(assertIsArray);
|
||||
hideFromStack(assertValidHeader);
|
||||
|
||||
class Http2ServerRequest extends Readable {
|
||||
@@ -1512,6 +1554,17 @@ type Settings = {
|
||||
class Http2Session extends EventEmitter {
|
||||
[bunHTTP2Socket]: TLSSocket | Socket | null;
|
||||
[bunHTTP2OriginSet]: Set<string> | undefined = undefined;
|
||||
[EventEmitter.captureRejectionSymbol](err, event, ...args) {
|
||||
switch (event) {
|
||||
case "stream": {
|
||||
const stream = args[0];
|
||||
stream.destroy(err);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
this.destroy(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function streamErrorFromCode(code: number) {
|
||||
@@ -1537,6 +1590,15 @@ function assertSession(session) {
|
||||
hideFromStack(assertSession);
|
||||
|
||||
function pushToStream(stream, data) {
|
||||
if (data && stream[bunHTTP2StreamStatus] & StreamState.Closed) {
|
||||
if (!stream._readableState.ended) {
|
||||
// closed, but not ended, so resume and push null to end the stream
|
||||
stream.resume();
|
||||
stream.push(null);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
stream.push(data);
|
||||
}
|
||||
|
||||
@@ -1567,7 +1629,10 @@ function markStreamClosed(stream: Http2Stream) {
|
||||
markWritableDone(stream);
|
||||
}
|
||||
}
|
||||
|
||||
function rstNextTick(id: number, rstCode: number) {
|
||||
const session = this as Http2Session;
|
||||
session[bunHTTP2Native]?.rstStream(id, rstCode);
|
||||
}
|
||||
class Http2Stream extends Duplex {
|
||||
#id: number;
|
||||
[bunHTTP2Session]: ClientHttp2Session | ServerHttp2Session | null = null;
|
||||
@@ -1720,21 +1785,21 @@ class Http2Stream extends Duplex {
|
||||
const session = this[bunHTTP2Session];
|
||||
assertSession(session);
|
||||
code = code || 0;
|
||||
validateInteger(code, "code", 0, 13);
|
||||
validateInteger(code, "code", 0, kMaxInt);
|
||||
|
||||
if (typeof callback !== "undefined") {
|
||||
validateFunction(callback, "callback");
|
||||
this.once("close", callback);
|
||||
}
|
||||
this.rstCode = code;
|
||||
markStreamClosed(this);
|
||||
|
||||
session[bunHTTP2Native]?.rstStream(this.#id, code);
|
||||
}
|
||||
|
||||
if (typeof callback === "function") {
|
||||
this.once("close", callback);
|
||||
}
|
||||
}
|
||||
_destroy(err, callback) {
|
||||
const { ending } = this._writableState;
|
||||
this.push(null);
|
||||
|
||||
if (!ending) {
|
||||
// If the writable side of the Http2Stream is still open, emit the
|
||||
// 'aborted' event and set the aborted flag.
|
||||
@@ -1765,10 +1830,23 @@ class Http2Stream extends Duplex {
|
||||
rstCode = this.rstCode = 0;
|
||||
}
|
||||
}
|
||||
this.rstCode = rstCode;
|
||||
// RST code 8 not emitted as an error as its used by clients to signify
|
||||
// abort and is already covered by aborted event, also allows more
|
||||
// seamless compatibility with http1
|
||||
if (err == null && rstCode !== NGHTTP2_NO_ERROR && rstCode !== NGHTTP2_CANCEL)
|
||||
err = $ERR_HTTP2_STREAM_ERROR(nameForErrorCode[rstCode] || rstCode);
|
||||
|
||||
markStreamClosed(this);
|
||||
session[bunHTTP2Native]?.rstStream(this.#id, rstCode);
|
||||
this[bunHTTP2Session] = null;
|
||||
// This notifies the session that this stream has been destroyed and
|
||||
// gives the session the opportunity to clean itself up. The session
|
||||
// will destroy if it has been closed and there are no other open or
|
||||
// pending streams. Delay with setImmediate so we don't do it on the
|
||||
// nghttp2 stack.
|
||||
if (session) {
|
||||
setImmediate(rstNextTick.bind(session, this.#id, rstCode));
|
||||
}
|
||||
callback(err);
|
||||
}
|
||||
|
||||
@@ -1866,6 +1944,18 @@ class Http2Stream extends Duplex {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
[EventEmitter.captureRejectionSymbol](err, event, ...args) {
|
||||
switch (event) {
|
||||
case "stream": {
|
||||
const stream = args[0];
|
||||
stream.destroy(err);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
this.destroy(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
class ClientHttp2Stream extends Http2Stream {
|
||||
constructor(streamId, session, headers) {
|
||||
@@ -2201,16 +2291,17 @@ class ServerHttp2Stream extends Http2Stream {
|
||||
options = { ...options, endStream: true };
|
||||
endStream = true;
|
||||
}
|
||||
const sendDate = options?.sendDate;
|
||||
if (sendDate == null || sendDate) {
|
||||
const current_date = headers["date"];
|
||||
if (current_date == null) {
|
||||
headers["date"] = utcDate();
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof options === "undefined") {
|
||||
session[bunHTTP2Native]?.request(this.id, undefined, headers, sensitiveNames);
|
||||
} else {
|
||||
if (options.sendDate == null || options.sendDate) {
|
||||
const current_date = headers["date"];
|
||||
if (current_date === null || current_date === undefined) {
|
||||
headers["date"] = utcDate();
|
||||
}
|
||||
}
|
||||
session[bunHTTP2Native]?.request(this.id, undefined, headers, sensitiveNames, options);
|
||||
}
|
||||
this.headersSent = true;
|
||||
@@ -2445,7 +2536,7 @@ class ServerHttp2Session extends Http2Session {
|
||||
sensitiveHeadersValue: string[] | undefined,
|
||||
flags: number,
|
||||
) {
|
||||
if (!self || typeof stream !== "object") return;
|
||||
if (!self || typeof stream !== "object" || self.closed || stream.closed) return;
|
||||
const headers = toHeaderObject(rawheaders, sensitiveHeadersValue || []);
|
||||
if (headers[HTTP2_HEADER_METHOD] === HTTP2_METHOD_HEAD) {
|
||||
stream[kHeadRequest] = true;
|
||||
@@ -2755,8 +2846,15 @@ class ServerHttp2Session extends Http2Session {
|
||||
parser.ping(payload);
|
||||
return true;
|
||||
}
|
||||
goaway(errorCode, lastStreamId, opaqueData) {
|
||||
return this.#parser?.goaway(errorCode, lastStreamId, opaqueData);
|
||||
goaway(code = NGHTTP2_NO_ERROR, lastStreamID = 0, opaqueData) {
|
||||
if (this.destroyed) throw $ERR_HTTP2_INVALID_SESSION();
|
||||
|
||||
if (opaqueData !== undefined) {
|
||||
validateBuffer(opaqueData, "opaqueData");
|
||||
}
|
||||
validateNumber(code, "code");
|
||||
validateNumber(lastStreamID, "lastStreamID");
|
||||
return this.#parser?.goaway(code, lastStreamID, opaqueData);
|
||||
}
|
||||
|
||||
setLocalWindowSize(windowSize) {
|
||||
@@ -2787,9 +2885,14 @@ class ServerHttp2Session extends Http2Session {
|
||||
}
|
||||
}
|
||||
|
||||
destroy(error?: Error, code?: number) {
|
||||
const socket = this[bunHTTP2Socket];
|
||||
destroy(error: Error | number | undefined = NGHTTP2_NO_ERROR, code?: number) {
|
||||
if (typeof error === "number") {
|
||||
code = error;
|
||||
error = code !== NGHTTP2_NO_ERROR ? $ERR_HTTP2_SESSION_ERROR(code) : undefined;
|
||||
}
|
||||
|
||||
const socket = this[bunHTTP2Socket];
|
||||
if (!this.#connected) return;
|
||||
this.#closed = true;
|
||||
this.#connected = false;
|
||||
if (socket) {
|
||||
@@ -2904,7 +3007,7 @@ class ClientHttp2Session extends Http2Session {
|
||||
sensitiveHeadersValue: string[] | undefined,
|
||||
flags: number,
|
||||
) {
|
||||
if (!self || typeof stream !== "object") return;
|
||||
if (!self || typeof stream !== "object" || stream.rstCode) return;
|
||||
const headers = toHeaderObject(rawheaders, sensitiveHeadersValue || []);
|
||||
const status = stream[bunHTTP2StreamStatus];
|
||||
const header_status = headers[HTTP2_HEADER_STATUS];
|
||||
@@ -2973,10 +3076,8 @@ class ClientHttp2Session extends Http2Session {
|
||||
goaway(self: ClientHttp2Session, errorCode: number, lastStreamId: number, opaqueData: Buffer) {
|
||||
if (!self) return;
|
||||
self.emit("goaway", errorCode, lastStreamId, opaqueData || Buffer.allocUnsafe(0));
|
||||
if (errorCode !== 0) {
|
||||
self.#parser.emitErrorToAllStreams(errorCode);
|
||||
}
|
||||
self.close();
|
||||
if (self.closed) return;
|
||||
self.destroy(undefined, errorCode);
|
||||
},
|
||||
end(self: ClientHttp2Session, errorCode: number, lastStreamId: number, opaqueData: Buffer) {
|
||||
if (!self) return;
|
||||
@@ -3125,6 +3226,13 @@ class ClientHttp2Session extends Http2Session {
|
||||
ref() {
|
||||
return this[bunHTTP2Socket]?.ref();
|
||||
}
|
||||
setNextStreamID(id) {
|
||||
if (this.destroyed) throw $ERR_HTTP2_INVALID_SESSION();
|
||||
|
||||
validateNumber(id, "id");
|
||||
if (id <= 0 || id > kMaxStreams) throw $ERR_OUT_OF_RANGE("id", `> 0 and <= ${kMaxStreams}`, id);
|
||||
this.#parser?.setNextStreamID(id);
|
||||
}
|
||||
setTimeout(msecs, callback) {
|
||||
return this[bunHTTP2Socket]?.setTimeout(msecs, callback);
|
||||
}
|
||||
@@ -3192,19 +3300,34 @@ class ClientHttp2Session extends Http2Session {
|
||||
constructor(url: string | URL, options?: Http2ConnectOptions, listener?: Function) {
|
||||
super();
|
||||
|
||||
if (typeof url === "string") {
|
||||
url = new URL(url);
|
||||
}
|
||||
if (!(url instanceof URL)) {
|
||||
throw $ERR_INVALID_ARG_TYPE("url", "URL", url);
|
||||
}
|
||||
if (typeof options === "function") {
|
||||
listener = options;
|
||||
options = undefined;
|
||||
}
|
||||
|
||||
assertIsObject(options, "options");
|
||||
options = { ...options };
|
||||
|
||||
assertIsArray(options.remoteCustomSettings, "options.remoteCustomSettings");
|
||||
if (options.remoteCustomSettings) {
|
||||
options.remoteCustomSettings = [...options.remoteCustomSettings];
|
||||
if (options.remoteCustomSettings.length > MAX_ADDITIONAL_SETTINGS) throw $ERR_HTTP2_TOO_MANY_CUSTOM_SETTINGS();
|
||||
}
|
||||
|
||||
if (typeof url === "string") url = new URL(url);
|
||||
|
||||
assertIsObject(url, "authority", ["string", "Object", "URL"]);
|
||||
|
||||
this.#url = url;
|
||||
|
||||
const protocol = url.protocol || options?.protocol || "https:";
|
||||
switch (protocol) {
|
||||
case "http:":
|
||||
case "https:":
|
||||
break;
|
||||
default:
|
||||
throw $ERR_HTTP2_UNSUPPORTED_PROTOCOL(protocol);
|
||||
}
|
||||
const port = url.port ? parseInt(url.port, 10) : protocol === "http:" ? 80 : 443;
|
||||
|
||||
function onConnect() {
|
||||
@@ -3474,17 +3597,48 @@ function connectionListener(socket: Socket) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function initializeOptions(options) {
|
||||
assertIsObject(options, "options");
|
||||
options = { ...options };
|
||||
assertIsObject(options.settings, "options.settings");
|
||||
options.settings = { ...options.settings };
|
||||
|
||||
assertIsArray(options.remoteCustomSettings, "options.remoteCustomSettings");
|
||||
if (options.remoteCustomSettings) {
|
||||
options.remoteCustomSettings = [...options.remoteCustomSettings];
|
||||
if (options.remoteCustomSettings.length > MAX_ADDITIONAL_SETTINGS) throw $ERR_HTTP2_TOO_MANY_CUSTOM_SETTINGS();
|
||||
}
|
||||
|
||||
if (options.maxSessionInvalidFrames !== undefined)
|
||||
validateUint32(options.maxSessionInvalidFrames, "maxSessionInvalidFrames");
|
||||
|
||||
if (options.maxSessionRejectedStreams !== undefined) {
|
||||
validateUint32(options.maxSessionRejectedStreams, "maxSessionRejectedStreams");
|
||||
}
|
||||
|
||||
if (options.unknownProtocolTimeout !== undefined)
|
||||
validateUint32(options.unknownProtocolTimeout, "unknownProtocolTimeout");
|
||||
else options.unknownProtocolTimeout = 10000;
|
||||
|
||||
// Used only with allowHTTP1
|
||||
// options.Http1IncomingMessage ||= http.IncomingMessage;
|
||||
// options.Http1ServerResponse ||= http.ServerResponse;
|
||||
|
||||
options.Http2ServerRequest ||= Http2ServerRequest;
|
||||
options.Http2ServerResponse ||= Http2ServerResponse;
|
||||
return options;
|
||||
}
|
||||
|
||||
class Http2Server extends net.Server {
|
||||
timeout = 0;
|
||||
constructor(options, onRequestHandler) {
|
||||
if (typeof options === "function") {
|
||||
onRequestHandler = options;
|
||||
options = {};
|
||||
} else if (options == null || typeof options == "object") {
|
||||
options = { ...options };
|
||||
} else {
|
||||
throw $ERR_INVALID_ARG_TYPE("options", "object", options);
|
||||
}
|
||||
options = initializeOptions(options);
|
||||
|
||||
super(options, connectionListener);
|
||||
this.setMaxListeners(0);
|
||||
|
||||
@@ -3509,6 +3663,39 @@ class Http2Server extends net.Server {
|
||||
}
|
||||
}
|
||||
|
||||
Http2Server.prototype[EventEmitter.captureRejectionSymbol] = function (err, event, ...args) {
|
||||
switch (event) {
|
||||
case "stream": {
|
||||
const { 0: stream } = args;
|
||||
if (stream.sentHeaders) {
|
||||
stream.destroy(err);
|
||||
} else {
|
||||
stream.respond({ [HTTP2_HEADER_STATUS]: 500 });
|
||||
stream.end();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "request": {
|
||||
const { 1: res } = args;
|
||||
if (!res.headersSent && !res.finished) {
|
||||
// Don't leak headers.
|
||||
for (const name of res.getHeaderNames()) {
|
||||
res.removeHeader(name);
|
||||
}
|
||||
res.statusCode = 500;
|
||||
res.end(STATUS_CODES[500]);
|
||||
} else {
|
||||
res.destroy();
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
// args.unshift(err, event);
|
||||
// ReflectApply(net.Server.prototype[EventEmitter.captureRejectionSymbol], this, args);
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
function onErrorSecureServerSession(err, socket) {
|
||||
if (!this.emit("clientError", err, socket)) socket.destroy(err);
|
||||
}
|
||||
|
||||
@@ -362,7 +362,9 @@ class Worker extends EventEmitter {
|
||||
#onError(event: ErrorEvent) {
|
||||
this.#isRunning = false;
|
||||
let error = event?.error;
|
||||
if (!error) {
|
||||
// if the thrown value serialized successfully, the message will be empty
|
||||
// if not the message is the actual error
|
||||
if (event.message !== "") {
|
||||
error = new Error(event.message, { cause: event });
|
||||
const stack = event?.stack;
|
||||
if (stack) {
|
||||
|
||||
@@ -647,10 +647,10 @@ fn NewLexer_(
|
||||
}
|
||||
}
|
||||
|
||||
pub const InnerStringLiteral = packed struct { suffix_len: u8, needs_decode: bool };
|
||||
pub const InnerStringLiteral = packed struct(u8) { suffix_len: u2, needs_decode: bool, _padding: u5 = 0 };
|
||||
|
||||
fn parseStringLiteralInner(lexer: *LexerType, comptime quote: CodePoint) !InnerStringLiteral {
|
||||
var suffix_len: u8 = if (comptime quote == 0) 0 else 1;
|
||||
var suffix_len: u2 = if (comptime quote == 0) 0 else 1;
|
||||
var needs_decode = false;
|
||||
stringLiteral: while (true) {
|
||||
switch (lexer.code_point) {
|
||||
|
||||
@@ -154,7 +154,7 @@ pub const URL = struct {
|
||||
}
|
||||
|
||||
pub fn hasValidPort(this: *const URL) bool {
|
||||
return (this.getPort() orelse 0) > 0;
|
||||
return this.port.len == 0 or (this.getPort() orelse 0) > 0;
|
||||
}
|
||||
|
||||
pub fn isEmpty(this: *const URL) bool {
|
||||
|
||||
@@ -263,17 +263,23 @@ describe("bundler", () => {
|
||||
run: { stdout: "ok" },
|
||||
});
|
||||
|
||||
for (const additionalOptions of [
|
||||
const additionalOptionsIters: Array<{
|
||||
bytecode?: boolean;
|
||||
minify?: boolean;
|
||||
format: "cjs" | "esm";
|
||||
}> = [
|
||||
{ bytecode: true, minify: true, format: "cjs" },
|
||||
{ format: "cjs" },
|
||||
{ format: "cjs", minify: true },
|
||||
{ format: "esm" },
|
||||
{ format: "esm", minify: true },
|
||||
]) {
|
||||
];
|
||||
|
||||
for (const additionalOptions of additionalOptionsIters) {
|
||||
const { bytecode = false, format, minify = false } = additionalOptions;
|
||||
const NODE_ENV = minify ? "'production'" : undefined;
|
||||
itBundled("compile/ReactSSR" + (bytecode ? "+bytecode" : "") + "+" + format + (minify ? "+minify" : ""), {
|
||||
install: ["react@next", "react-dom@next"],
|
||||
install: ["react@19.2.0-canary-b94603b9-20250513", "react-dom@19.2.0-canary-b94603b9-20250513"],
|
||||
format,
|
||||
minifySyntax: minify,
|
||||
minifyIdentifiers: minify,
|
||||
@@ -316,8 +322,7 @@ describe("bundler", () => {
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
stdout:
|
||||
'<!DOCTYPE html><html><head><link rel="expect" href="#«R»" blocking="render"/></head><body><h1>Hello World</h1><p>This is an example.</p><template id="«R»"></template></body></html>',
|
||||
stdout: "<!DOCTYPE html><html><head></head><body><h1>Hello World</h1><p>This is an example.</p></body></html>",
|
||||
stderr: bytecode
|
||||
? "[Disk Cache] Cache hit for sourceCode\n[Disk Cache] Cache miss for sourceCode\n"
|
||||
: undefined,
|
||||
|
||||
@@ -1434,3 +1434,28 @@ exports[`esbuild-bundler css/CSSAndJavaScriptCodeSplittingESBuildIssue1064: /d.c
|
||||
"@import "./shared.css";
|
||||
body { color: blue }"
|
||||
`;
|
||||
|
||||
exports[`bundler css/ComposesWithSharedPropertiesError 1`] = `
|
||||
"// styles.module.css
|
||||
var styles_module_default = {
|
||||
button: "otherButton_NlEjJA button_-MSaAA"
|
||||
};
|
||||
|
||||
// entry.js
|
||||
console.log(styles_module_default);
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`bundler css/ComposesWithSharedPropertiesError 2`] = `
|
||||
"/* other.module.css */
|
||||
.otherButton_NlEjJA {
|
||||
color: red;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
/* styles.module.css */
|
||||
.button_-MSaAA {
|
||||
color: #00f;
|
||||
}
|
||||
"
|
||||
`;
|
||||
|
||||
@@ -122,7 +122,7 @@ export interface BundlerTestInput {
|
||||
todo?: boolean;
|
||||
|
||||
// file options
|
||||
files: Record<string, string | Buffer | Blob>;
|
||||
files: Record<string, string | Buffer | Uint8ClampedArray | Blob>;
|
||||
/** Files to be written only after the bundle is done. */
|
||||
runtimeFiles?: Record<string, string | Buffer>;
|
||||
/** Defaults to the first item in `files` */
|
||||
|
||||
@@ -53,6 +53,51 @@ describe("css tests", () => {
|
||||
'.foo{content:"+";}',
|
||||
);
|
||||
|
||||
describe("custom property cases", () => {
|
||||
cssTest(
|
||||
`div {
|
||||
--foo: 1 1 1 rgba(1, 1, 1, 0.1), 2 2 2 rgba(2, 2, 2, 0.2);
|
||||
--bar: 1 1 1 #01010116, 2 2 2 #02020233;
|
||||
}`,
|
||||
`div {
|
||||
--foo: 1 1 1 #0101011a, 2 2 2 #02020233;
|
||||
--bar: 1 1 1 #01010116, 2 2 2 #02020233;
|
||||
}`,
|
||||
);
|
||||
cssTest(
|
||||
`:root {
|
||||
--my-color: red;
|
||||
--my-bg: white;
|
||||
--my-font-size: 16px;
|
||||
}
|
||||
.element {
|
||||
color: var(--my-color);
|
||||
background-color: var(--my-bg);
|
||||
font-size: var(--my-font-size);
|
||||
}`,
|
||||
`:root {
|
||||
--my-color: red;
|
||||
--my-bg: white;
|
||||
--my-font-size: 16px;
|
||||
}
|
||||
.element {
|
||||
color: var(--my-color);
|
||||
background-color: var(--my-bg);
|
||||
font-size: var(--my-font-size);
|
||||
}`,
|
||||
);
|
||||
cssTest(
|
||||
`div {
|
||||
--custom-padding: calc(20px * 10px);
|
||||
padding: var(--custom-padding);
|
||||
}`,
|
||||
`div {
|
||||
--custom-padding: calc(20px * 10px);
|
||||
padding: var(--custom-padding);
|
||||
}`,
|
||||
);
|
||||
});
|
||||
|
||||
describe("pseudo-class edge case", () => {
|
||||
cssTest(
|
||||
indoc`[type="file"]::file-selector-button:-moz-any() {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { afterEach, beforeEach, describe, expect, it } from "bun:test";
|
||||
import { bunEnv, bunExe, isCI, nodeExe } from "harness";
|
||||
import { createTest } from "node-harness";
|
||||
import fs from "node:fs";
|
||||
import http2 from "node:http2";
|
||||
import net from "node:net";
|
||||
@@ -9,7 +9,16 @@ import tls from "node:tls";
|
||||
import { Duplex } from "stream";
|
||||
import http2utils from "./helpers";
|
||||
import { nodeEchoServer, TLS_CERT, TLS_OPTIONS } from "./http2-helpers";
|
||||
const { afterEach, beforeEach, describe, expect, it, createCallCheckCtx } = createTest(import.meta.path);
|
||||
function invalidArgTypeHelper(input) {
|
||||
if (input === null) return " Received null";
|
||||
|
||||
if (typeof input == "symbol") return ` Received type symbol`;
|
||||
if (typeof input == "object")
|
||||
return ` Received an instance of ${Object.prototype.toString.call(input).split(" ")[1]?.replace("]", "")?.replace("[", "")}`;
|
||||
if (typeof input == "string") return ` Received type string ('${input}')`;
|
||||
return ` Received type ${typeof input} (${input})`;
|
||||
}
|
||||
for (const nodeExecutable of [nodeExe(), bunExe()]) {
|
||||
describe(`${path.basename(nodeExecutable)}`, () => {
|
||||
let nodeEchoServer_;
|
||||
@@ -899,21 +908,7 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) {
|
||||
expect(stream).toBeDefined();
|
||||
expect(stream.id).toBe(1);
|
||||
});
|
||||
it("should wait request to be sent before closing", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers();
|
||||
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
|
||||
client.on("error", reject);
|
||||
const req = client.request({ ":path": "/" });
|
||||
let response_headers = null;
|
||||
req.on("response", (headers, flags) => {
|
||||
response_headers = headers;
|
||||
});
|
||||
client.close(resolve);
|
||||
req.end();
|
||||
await promise;
|
||||
expect(response_headers).toBeTruthy();
|
||||
expect(response_headers[":status"]).toBe(200);
|
||||
});
|
||||
|
||||
it("wantTrailers should work", async () => {
|
||||
const { promise, resolve, reject } = Promise.withResolvers();
|
||||
const client = http2.connect(HTTPS_SERVER, TLS_OPTIONS);
|
||||
@@ -1334,3 +1329,585 @@ it("sensitive headers should work", async () => {
|
||||
client?.close?.();
|
||||
}
|
||||
});
|
||||
|
||||
it("http2 session.goaway() validates input types", async done => {
|
||||
const { mustCall } = createCallCheckCtx(done);
|
||||
const server = http2.createServer((req, res) => {
|
||||
res.end();
|
||||
});
|
||||
const types = [true, {}, [], null, new Date()];
|
||||
return await new Promise(resolve => {
|
||||
server.on(
|
||||
"stream",
|
||||
mustCall(stream => {
|
||||
const session = stream.session;
|
||||
|
||||
for (const input of types) {
|
||||
const received = invalidArgTypeHelper(input);
|
||||
|
||||
// Test code argument
|
||||
expect(() => session.goaway(input)).toThrow('The "code" argument must be of type number.' + received);
|
||||
|
||||
// Test lastStreamID argument
|
||||
expect(() => session.goaway(0, input)).toThrow(
|
||||
'The "lastStreamID" argument must be of type number.' + received,
|
||||
);
|
||||
|
||||
// Test opaqueData argument
|
||||
expect(() => session.goaway(0, 0, input)).toThrow(
|
||||
'The "opaqueData" argument must be of type Buffer, ' + `TypedArray, or DataView.${received}`,
|
||||
);
|
||||
}
|
||||
|
||||
server.close();
|
||||
resolve();
|
||||
}),
|
||||
);
|
||||
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port;
|
||||
const client = http2.connect(`http://localhost:${port}`);
|
||||
const req = client.request();
|
||||
|
||||
req.resume();
|
||||
req.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("http2 stream.close() validates input types and ranges", async () => {
|
||||
const server = http2.createServer();
|
||||
|
||||
return await new Promise(resolve => {
|
||||
server.on("stream", stream => {
|
||||
// Test string input
|
||||
expect(() => stream.close("string")).toThrow(
|
||||
'The "code" argument must be of type number. ' + "Received type string ('string')",
|
||||
);
|
||||
|
||||
// Test non-integer number
|
||||
expect(() => stream.close(1.01)).toThrow(
|
||||
'The value of "code" is out of range. It must be an integer. ' + "Received 1.01",
|
||||
);
|
||||
|
||||
// Test out of range values
|
||||
[-1, 2 ** 32].forEach(code => {
|
||||
expect(() => stream.close(code)).toThrow(
|
||||
`The value of "code" is out of range. It must be >= 0 and <= 4294967295. Received ${code}`,
|
||||
);
|
||||
});
|
||||
|
||||
// Complete the stream
|
||||
stream.respond();
|
||||
stream.end("ok");
|
||||
});
|
||||
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port;
|
||||
const client = http2.connect(`http://localhost:${port}`);
|
||||
const req = client.request();
|
||||
|
||||
req.resume();
|
||||
req.on("close", () => {
|
||||
server.close();
|
||||
client.close();
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("http2 session.goaway() sends custom data", async done => {
|
||||
const { mustCall } = createCallCheckCtx(done);
|
||||
|
||||
const data = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]);
|
||||
|
||||
let session;
|
||||
|
||||
const server = http2.createServer();
|
||||
|
||||
return await new Promise(resolve => {
|
||||
server.on("stream", stream => {
|
||||
session = stream.session;
|
||||
session.on("close", () => {});
|
||||
|
||||
// Send GOAWAY frame with custom data
|
||||
session.goaway(0, 0, data);
|
||||
|
||||
// Complete the stream
|
||||
stream.respond();
|
||||
stream.end();
|
||||
});
|
||||
|
||||
server.on("close", mustCall());
|
||||
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port;
|
||||
const client = http2.connect(`http://localhost:${port}`);
|
||||
|
||||
client.once("goaway", (code, lastStreamID, buf) => {
|
||||
// Verify the GOAWAY frame parameters
|
||||
expect(code).toBe(0);
|
||||
expect(lastStreamID).toBe(1);
|
||||
expect(buf).toEqual(data);
|
||||
|
||||
// Clean up
|
||||
session.close();
|
||||
server.close();
|
||||
resolve();
|
||||
});
|
||||
|
||||
const req = client.request();
|
||||
req.resume();
|
||||
req.on("end", mustCall());
|
||||
req.on("close", mustCall());
|
||||
req.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("http2 server with minimal maxSessionMemory handles multiple requests", async () => {
|
||||
const server = http2.createServer({ maxSessionMemory: 1 });
|
||||
|
||||
return await new Promise(resolve => {
|
||||
server.on("session", session => {
|
||||
session.on("stream", stream => {
|
||||
stream.on("end", function () {
|
||||
this.respond(
|
||||
{
|
||||
":status": 200,
|
||||
},
|
||||
{
|
||||
endStream: true,
|
||||
},
|
||||
);
|
||||
});
|
||||
stream.resume();
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port;
|
||||
const client = http2.connect(`http://localhost:${port}`);
|
||||
|
||||
function next(i) {
|
||||
if (i === 10000) {
|
||||
client.close();
|
||||
server.close();
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const stream = client.request({ ":method": "POST" });
|
||||
|
||||
stream.on("response", function (headers) {
|
||||
expect(headers[":status"]).toBe(200);
|
||||
|
||||
this.on("close", () => next(i + 1));
|
||||
});
|
||||
|
||||
stream.end();
|
||||
}
|
||||
|
||||
// Start the sequence with the first request
|
||||
next(0);
|
||||
});
|
||||
});
|
||||
}, 10_000);
|
||||
|
||||
it("http2.createServer validates input options", () => {
|
||||
// Test invalid options passed to createServer
|
||||
const invalidOptions = [1, true, "test", null, Symbol("test")];
|
||||
|
||||
invalidOptions.forEach(invalidOption => {
|
||||
expect(() => http2.createServer(invalidOption)).toThrow(
|
||||
'The "options" argument must be of type Object.' + invalidArgTypeHelper(invalidOption),
|
||||
);
|
||||
});
|
||||
|
||||
// Test invalid options.settings passed to createServer
|
||||
invalidOptions.forEach(invalidSettingsOption => {
|
||||
expect(() => http2.createServer({ settings: invalidSettingsOption })).toThrow(
|
||||
'The "options.settings" property must be of type Object.' + invalidArgTypeHelper(invalidSettingsOption),
|
||||
);
|
||||
});
|
||||
|
||||
// Test that http2.createServer validates numeric range options
|
||||
const rangeTests = {
|
||||
maxSessionInvalidFrames: [
|
||||
{
|
||||
val: -1,
|
||||
err: {
|
||||
name: "RangeError",
|
||||
code: "ERR_OUT_OF_RANGE",
|
||||
},
|
||||
},
|
||||
{
|
||||
val: Number.NEGATIVE_INFINITY,
|
||||
err: {
|
||||
name: "RangeError",
|
||||
code: "ERR_OUT_OF_RANGE",
|
||||
},
|
||||
},
|
||||
],
|
||||
maxSessionRejectedStreams: [
|
||||
{
|
||||
val: -1,
|
||||
err: {
|
||||
name: "RangeError",
|
||||
code: "ERR_OUT_OF_RANGE",
|
||||
},
|
||||
},
|
||||
{
|
||||
val: Number.NEGATIVE_INFINITY,
|
||||
err: {
|
||||
name: "RangeError",
|
||||
code: "ERR_OUT_OF_RANGE",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
Object.entries(rangeTests).forEach(([opt, tests]) => {
|
||||
tests.forEach(({ val, err }) => {
|
||||
expect(() => http2.createServer({ [opt]: val })).toThrow();
|
||||
|
||||
// Note: Bun's expect doesn't have the same detailed error matching as Node's assert,
|
||||
// so we're just checking that it throws an error with the expected name
|
||||
let error;
|
||||
try {
|
||||
http2.createServer({ [opt]: val });
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeTruthy();
|
||||
expect(error?.name).toBe(err.name);
|
||||
expect(error?.code).toBe(err.code);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("http2 server handles multiple concurrent requests", async () => {
|
||||
const body = "<html><head></head><body><h1>this is some data</h2></body></html>";
|
||||
const server = http2.createServer();
|
||||
const count = 100;
|
||||
|
||||
// Stream handler
|
||||
function onStream(stream, headers, flags) {
|
||||
expect(headers[":scheme"]).toBe("http");
|
||||
expect(headers[":authority"]).toBeTruthy();
|
||||
expect(headers[":method"]).toBe("GET");
|
||||
expect(flags).toBe(5);
|
||||
|
||||
stream.respond({
|
||||
"content-type": "text/html",
|
||||
":status": 200,
|
||||
});
|
||||
|
||||
stream.write(body.slice(0, 20));
|
||||
stream.end(body.slice(20));
|
||||
}
|
||||
|
||||
// Register stream handler
|
||||
server.on("stream", (stream, headers, flags) => onStream(stream, headers, flags));
|
||||
|
||||
return await new Promise(resolve => {
|
||||
server.on("close", () => {
|
||||
resolve();
|
||||
});
|
||||
|
||||
server.listen(0);
|
||||
|
||||
server.on("listening", () => {
|
||||
const port = server.address().port;
|
||||
const client = http2.connect(`http://localhost:${port}`);
|
||||
|
||||
client.setMaxListeners(101);
|
||||
client.on("goaway", console.log);
|
||||
|
||||
client.on("connect", () => {
|
||||
expect(client.encrypted).toBeFalsy();
|
||||
expect(client.originSet).toBeFalsy();
|
||||
expect(client.alpnProtocol).toBe("h2c");
|
||||
});
|
||||
|
||||
let countdown = count;
|
||||
function countDown() {
|
||||
countdown--;
|
||||
if (countdown === 0) {
|
||||
client.close();
|
||||
server.close();
|
||||
}
|
||||
}
|
||||
|
||||
for (let n = 0; n < count; n++) {
|
||||
const req = client.request();
|
||||
|
||||
req.on("response", function (headers) {
|
||||
expect(headers[":status"]).toBe(200);
|
||||
expect(headers["content-type"]).toBe("text/html");
|
||||
expect(headers.date).toBeTruthy();
|
||||
});
|
||||
|
||||
let data = "";
|
||||
req.setEncoding("utf8");
|
||||
req.on("data", d => (data += d));
|
||||
|
||||
req.on("end", () => {
|
||||
expect(body).toBe(data);
|
||||
});
|
||||
|
||||
req.on("close", () => countDown());
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("http2 connect supports various URL formats", async done => {
|
||||
const { mustCall } = createCallCheckCtx(done);
|
||||
return await new Promise(resolve => {
|
||||
const server = http2.createServer();
|
||||
server.listen(0);
|
||||
|
||||
server.on("listening", function () {
|
||||
const port = this.address().port;
|
||||
|
||||
const items = [
|
||||
[`http://localhost:${port}`],
|
||||
[new URL(`http://localhost:${port}`)],
|
||||
[{ protocol: "http:", hostname: "localhost", port }],
|
||||
[{ port }, { protocol: "http:" }],
|
||||
[{ port, hostname: "127.0.0.1" }, { protocol: "http:" }],
|
||||
];
|
||||
|
||||
let countdown = items.length + 1;
|
||||
function countDown() {
|
||||
countdown--;
|
||||
if (countdown === 0) {
|
||||
setImmediate(() => {
|
||||
server.close();
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const maybeClose = client => {
|
||||
client.close();
|
||||
countDown();
|
||||
};
|
||||
|
||||
items.forEach(i => {
|
||||
const client = http2.connect.apply(null, i);
|
||||
client.on("connect", () => maybeClose(client));
|
||||
client.on("close", mustCall());
|
||||
});
|
||||
|
||||
// Will fail because protocol does not match the server.
|
||||
const client = http2.connect({
|
||||
port: port,
|
||||
protocol: "https:",
|
||||
});
|
||||
client.on("error", () => countDown());
|
||||
client.on("close", mustCall());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("http2 request.close() validates input and manages stream state", async done => {
|
||||
const { mustCall } = createCallCheckCtx(done);
|
||||
const server = http2.createServer();
|
||||
|
||||
server.on("stream", stream => {
|
||||
stream.on("close", () => {});
|
||||
stream.respond();
|
||||
stream.end("ok");
|
||||
});
|
||||
|
||||
return await new Promise(resolve => {
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port;
|
||||
const client = http2.connect(`http://localhost:${port}`);
|
||||
const req = client.request();
|
||||
const closeCode = 1;
|
||||
|
||||
// Test out of range code
|
||||
expect(() => req.close(2 ** 32)).toThrow(
|
||||
'The value of "code" is out of range. It must be ' + ">= 0 and <= 4294967295. Received 4294967296",
|
||||
);
|
||||
expect(req.closed).toBe(false);
|
||||
|
||||
// Test invalid callback argument types
|
||||
[true, 1, {}, [], null, "test"].forEach(notFunction => {
|
||||
expect(() => req.close(closeCode, notFunction)).toThrow();
|
||||
expect(req.closed).toBe(false);
|
||||
});
|
||||
|
||||
// Valid close call with callback
|
||||
req.close(closeCode, mustCall());
|
||||
|
||||
expect(req.closed).toBe(true);
|
||||
|
||||
// Store original _destroy method
|
||||
const originalDestroy = req._destroy;
|
||||
|
||||
// Replace _destroy to check if it's called
|
||||
req._destroy = mustCall((...args) => {
|
||||
return originalDestroy.apply(req, args);
|
||||
});
|
||||
|
||||
// Second call doesn't do anything
|
||||
req.close(closeCode + 1);
|
||||
|
||||
req.on("close", () => {
|
||||
expect(req.destroyed).toBe(true);
|
||||
expect(req.rstCode).toBe(closeCode);
|
||||
|
||||
server.close();
|
||||
client.close();
|
||||
resolve();
|
||||
});
|
||||
|
||||
req.on("error", err => {
|
||||
expect(err.code).toBe("ERR_HTTP2_STREAM_ERROR");
|
||||
expect(err.name).toBe("Error");
|
||||
expect(err.message).toBe("Stream closed with error code NGHTTP2_PROTOCOL_ERROR");
|
||||
});
|
||||
|
||||
// The `response` event should not fire as the server should receive the
|
||||
// RST_STREAM frame before it ever has a chance to reply.
|
||||
req.on("response", () => {
|
||||
throw new Error("Response event should not be called");
|
||||
});
|
||||
|
||||
// The `end` event should still fire as we close the readable stream by
|
||||
// pushing a `null` chunk.
|
||||
req.on("end", mustCall());
|
||||
|
||||
req.resume();
|
||||
req.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("http2 client.setNextStreamID validates input", async () => {
|
||||
const server = http2.createServer();
|
||||
|
||||
server.on("stream", stream => {
|
||||
stream.respond();
|
||||
stream.end("ok");
|
||||
});
|
||||
|
||||
const types = {
|
||||
boolean: true,
|
||||
function: () => {},
|
||||
number: 1,
|
||||
object: {},
|
||||
array: [],
|
||||
null: null,
|
||||
symbol: Symbol("test"),
|
||||
};
|
||||
|
||||
return await new Promise(resolve => {
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port;
|
||||
const client = http2.connect(`http://localhost:${port}`);
|
||||
|
||||
client.on("connect", () => {
|
||||
// Test out of range value
|
||||
const outOfRangeNum = 2 ** 32;
|
||||
expect(() => client.setNextStreamID(outOfRangeNum)).toThrow(
|
||||
'The value of "id" is out of range.' + " It must be > 0 and <= 4294967295. Received " + outOfRangeNum,
|
||||
);
|
||||
|
||||
// Test invalid types
|
||||
Object.entries(types).forEach(([type, value]) => {
|
||||
if (type === "number") {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
client.setNextStreamID(value);
|
||||
// If we reach here, the function didn't throw, which is an error
|
||||
expect(false).toBe(true); // Force test failure
|
||||
} catch (err) {
|
||||
expect(err.name).toBe("TypeError");
|
||||
expect(err.code).toBe("ERR_INVALID_ARG_TYPE");
|
||||
expect(err.message).toContain('The "id" argument must be of type number');
|
||||
}
|
||||
});
|
||||
|
||||
server.close();
|
||||
client.close();
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("http2 request.destroy() with error", async () => {
|
||||
const server = http2.createServer();
|
||||
|
||||
// Do not mustCall the server side callbacks, they may or may not be called
|
||||
// depending on the OS. The determination is based largely on operating
|
||||
// system specific timings
|
||||
server.on("stream", stream => {
|
||||
// Do not wrap in a must call or use common.expectsError (which now uses
|
||||
// must call). The error may or may not be reported depending on operating
|
||||
// system specific timings.
|
||||
stream.on("error", err => {
|
||||
expect(err.code).toBe("ERR_HTTP2_STREAM_ERROR");
|
||||
expect(err.message).toBe("Stream closed with error code NGHTTP2_INTERNAL_ERROR");
|
||||
});
|
||||
|
||||
stream.respond();
|
||||
stream.end();
|
||||
});
|
||||
|
||||
return new Promise(resolve => {
|
||||
server.listen(0, () => {
|
||||
let countdown = 2;
|
||||
function countDown() {
|
||||
countdown--;
|
||||
if (countdown === 0) {
|
||||
server.close();
|
||||
client.close();
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
|
||||
const port = server.address().port;
|
||||
const client = http2.connect(`http://localhost:${port}`);
|
||||
|
||||
client.on("connect", () => countDown());
|
||||
|
||||
const req = client.request();
|
||||
|
||||
// Destroy the request with an error
|
||||
req.destroy(new Error("test"));
|
||||
|
||||
// Error event should receive the provided error
|
||||
req.on("error", err => {
|
||||
expect(err.name).toBe("Error");
|
||||
expect(err.message).toBe("test");
|
||||
});
|
||||
|
||||
// Close event should fire with the correct reset code
|
||||
req.on("close", () => {
|
||||
expect(req.rstCode).toBe(http2.constants.NGHTTP2_INTERNAL_ERROR);
|
||||
countDown();
|
||||
});
|
||||
|
||||
// These events should not fire since the stream is destroyed
|
||||
req.on("response", () => {
|
||||
throw new Error("response event should not be called");
|
||||
});
|
||||
|
||||
req.resume();
|
||||
|
||||
req.on("end", () => {
|
||||
throw new Error("end event should not be called");
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
134
test/js/node/test/parallel/test-abortsignal-any.mjs
Normal file
134
test/js/node/test/parallel/test-abortsignal-any.mjs
Normal file
@@ -0,0 +1,134 @@
|
||||
import * as common from '../common/index.mjs';
|
||||
import { describe, it } from 'node:test';
|
||||
import { once } from 'node:events';
|
||||
import assert from 'node:assert';
|
||||
|
||||
describe('AbortSignal.any()', { concurrency: !process.env.TEST_PARALLEL }, () => {
|
||||
it('should throw when not receiving an array', () => {
|
||||
const expectedError = { code: 'ERR_INVALID_ARG_TYPE' };
|
||||
assert.throws(() => AbortSignal.any(), expectedError);
|
||||
assert.throws(() => AbortSignal.any(null), expectedError);
|
||||
assert.throws(() => AbortSignal.any(undefined), expectedError);
|
||||
});
|
||||
|
||||
it('should throw when input contains non-signal values', () => {
|
||||
assert.throws(
|
||||
() => AbortSignal.any([AbortSignal.abort(), undefined]),
|
||||
{
|
||||
code: 'ERR_INVALID_ARG_TYPE',
|
||||
message: 'The "signals[1]" argument must be an instance of AbortSignal. Received undefined'
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('creates a non-aborted signal for an empty input', () => {
|
||||
const signal = AbortSignal.any([]);
|
||||
assert.strictEqual(signal.aborted, false);
|
||||
signal.addEventListener('abort', common.mustNotCall());
|
||||
});
|
||||
|
||||
it('returns a new signal', () => {
|
||||
const originalSignal = new AbortController().signal;
|
||||
const signalAny = AbortSignal.any([originalSignal]);
|
||||
assert.notStrictEqual(originalSignal, signalAny);
|
||||
});
|
||||
|
||||
it('returns an aborted signal if input has an aborted signal', () => {
|
||||
const signal = AbortSignal.any([AbortSignal.abort('some reason')]);
|
||||
assert.strictEqual(signal.aborted, true);
|
||||
assert.strictEqual(signal.reason, 'some reason');
|
||||
signal.addEventListener('abort', common.mustNotCall());
|
||||
});
|
||||
|
||||
it('returns an aborted signal with the reason of first aborted signal input', () => {
|
||||
const signal = AbortSignal.any([AbortSignal.abort('some reason'), AbortSignal.abort('another reason')]);
|
||||
assert.strictEqual(signal.aborted, true);
|
||||
assert.strictEqual(signal.reason, 'some reason');
|
||||
signal.addEventListener('abort', common.mustNotCall());
|
||||
});
|
||||
|
||||
it('returns the correct signal in the event target', async () => {
|
||||
const signal = AbortSignal.any([AbortSignal.timeout(5)]);
|
||||
const interval = setInterval(() => {}, 100000); // Keep event loop alive
|
||||
const [{ target }] = await once(signal, 'abort');
|
||||
clearInterval(interval);
|
||||
assert.strictEqual(target, signal);
|
||||
assert.ok(signal.aborted);
|
||||
assert.strictEqual(signal.reason.name, 'TimeoutError');
|
||||
});
|
||||
|
||||
it('aborts with reason of first aborted signal', () => {
|
||||
const controllers = Array.from({ length: 3 }, () => new AbortController());
|
||||
const combinedSignal = AbortSignal.any(controllers.map((c) => c.signal));
|
||||
controllers[1].abort(1);
|
||||
controllers[2].abort(2);
|
||||
assert.ok(combinedSignal.aborted);
|
||||
assert.strictEqual(combinedSignal.reason, 1);
|
||||
});
|
||||
|
||||
it('can accept the same signal more than once', () => {
|
||||
const controller = new AbortController();
|
||||
const signal = AbortSignal.any([controller.signal, controller.signal]);
|
||||
assert.strictEqual(signal.aborted, false);
|
||||
controller.abort('reason');
|
||||
assert.ok(signal.aborted);
|
||||
assert.strictEqual(signal.reason, 'reason');
|
||||
});
|
||||
|
||||
it('handles deeply aborted signals', async () => {
|
||||
const controllers = Array.from({ length: 2 }, () => new AbortController());
|
||||
const composedSignal1 = AbortSignal.any([controllers[0].signal]);
|
||||
const composedSignal2 = AbortSignal.any([composedSignal1, controllers[1].signal]);
|
||||
|
||||
composedSignal2.onabort = common.mustCall();
|
||||
controllers[0].abort();
|
||||
assert.ok(composedSignal2.aborted);
|
||||
assert.ok(composedSignal2.reason instanceof DOMException);
|
||||
assert.strictEqual(composedSignal2.reason.name, 'AbortError');
|
||||
});
|
||||
|
||||
it('executes abort handlers in correct order', () => {
|
||||
const controller = new AbortController();
|
||||
const signals = [];
|
||||
signals.push(controller.signal);
|
||||
signals.push(AbortSignal.any([controller.signal]));
|
||||
signals.push(AbortSignal.any([controller.signal]));
|
||||
signals.push(AbortSignal.any([signals[0]]));
|
||||
signals.push(AbortSignal.any([signals[1]]));
|
||||
|
||||
let result = '';
|
||||
signals.forEach((signal, i) => signal.addEventListener('abort', () => result += i));
|
||||
controller.abort();
|
||||
assert.strictEqual(result, '01234');
|
||||
});
|
||||
|
||||
it('must accept WebIDL sequence', () => {
|
||||
const controller = new AbortController();
|
||||
const iterable = {
|
||||
*[Symbol.iterator]() {
|
||||
yield controller.signal;
|
||||
yield new AbortController().signal;
|
||||
yield new AbortController().signal;
|
||||
yield new AbortController().signal;
|
||||
},
|
||||
};
|
||||
const signal = AbortSignal.any(iterable);
|
||||
let result = 0;
|
||||
signal.addEventListener('abort', () => result += 1);
|
||||
controller.abort();
|
||||
assert.strictEqual(result, 1);
|
||||
});
|
||||
|
||||
it('throws TypeError if any value does not implement AbortSignal', () => {
|
||||
const expectedError = { code: 'ERR_INVALID_ARG_TYPE' };
|
||||
assert.throws(() => AbortSignal.any([ null ]), expectedError);
|
||||
assert.throws(() => AbortSignal.any([ undefined ]), expectedError);
|
||||
assert.throws(() => AbortSignal.any([ '123' ]), expectedError);
|
||||
assert.throws(() => AbortSignal.any([ 123 ]), expectedError);
|
||||
assert.throws(() => AbortSignal.any([{}]), expectedError);
|
||||
assert.throws(() => AbortSignal.any([{ aborted: true }]), expectedError);
|
||||
assert.throws(() => AbortSignal.any([{
|
||||
aborted: true, reason: '', throwIfAborted: null,
|
||||
}]), expectedError);
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user