Compare commits

...

3 Commits

Author SHA1 Message Date
Claude Bot
d8eb0909a7 Consolidate dependency order and --parallel docs
Merged the two sections to reduce repetition and make the
documentation clearer and more concise.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-10-09 20:23:29 +00:00
Claude Bot
1f07fc505f Add comprehensive tests and documentation for --parallel flag
- Added 8 new tests in test/cli/run/filter-parallel.test.ts covering:
  - Running scripts in parallel without respecting dependency order
  - Parallel execution across multiple packages
  - Ignoring dependency chains with --parallel
  - Handling circular dependencies
  - Pre/post script ordering within packages
  - Exit code propagation
  - Pattern matching with --filter
  - Workspace integration

- Updated docs/cli/filter.md with new section on --parallel flag
- Updated docs/cli/run.md with example of --parallel usage

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-10-09 20:13:58 +00:00
Harry
b280e06bd8 feat: parallel arg for filter 2025-10-09 20:05:17 +00:00
6 changed files with 397 additions and 36 deletions

View File

@@ -85,6 +85,20 @@ Filters respect your [workspace configuration](https://bun.com/docs/install/work
bun run --filter foo myscript
```
### Dependency Order
### Dependency Order and `--parallel`
Bun will respect package dependency order when running scripts. Say you have a package `foo` that depends on another package `bar` in your workspace, and both packages have a `build` script. When you run `bun --filter '*' build`, you will notice that `foo` will only start running once `bar` is done.
By default, Bun respects package dependency order when running scripts. If package `foo` depends on package `bar`, and both have a `build` script, `foo` will only start after `bar` completes:
```bash
bun --filter '*' build # bar runs first, then foo
```
To skip dependency ordering and run all scripts immediately, use the `--parallel` flag:
```bash
bun --filter '*' --parallel dev # all packages start at once
```
This is useful for tasks like running tests, linters, or dev servers where dependency order doesn't matter.
**Note:** Pre and post scripts (e.g., `predev`, `postdev`) within the same package still run in order.

View File

@@ -172,6 +172,12 @@ bun run --filter 'ba*' <script>
will execute `<script>` in both `bar` and `baz`, but not in `foo`.
To run scripts in parallel without waiting for dependency order, add the `--parallel` flag:
```bash
bun run --filter '*' --parallel test
```
Find more details in the docs page for [filter](https://bun.com/docs/cli/filter#running-scripts-with-filter).
### `--workspaces`

View File

@@ -409,6 +409,7 @@ pub const Command = struct {
filters: []const []const u8 = &.{},
workspaces: bool = false,
if_present: bool = false,
parallel: bool = false,
preloads: []const string = &.{},
has_loaded_global_config: bool = false,

View File

@@ -120,6 +120,7 @@ pub const auto_or_run_params = [_]ParamType{
clap.parseParam("-b, --bun Force a script or package to use Bun's runtime instead of Node.js (via symlinking node)") catch unreachable,
clap.parseParam("--shell <STR> Control the shell used for package.json scripts. Supports either 'bun' or 'system'") catch unreachable,
clap.parseParam("--workspaces Run a script in all workspace packages (from the \"workspaces\" field in package.json)") catch unreachable,
clap.parseParam("--parallel Run scripts in parallel without waiting for dependencies") catch unreachable,
};
pub const auto_only_params = [_]ParamType{
@@ -389,6 +390,7 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
ctx.filters = args.options("--filter");
ctx.workspaces = args.flag("--workspaces");
ctx.if_present = args.flag("--if-present");
ctx.parallel = args.flag("--parallel");
if (args.option("--elide-lines")) |elide_lines| {
if (elide_lines.len > 0) {

View File

@@ -578,47 +578,50 @@ pub fn runScriptsWithFilter(ctx: Command.Context) !noreturn {
// &state.handles[i];
}
}
// compute dependencies (TODO: maybe we should do this only in a workspace?)
for (state.handles) |*handle| {
var iter = handle.config.deps.map.iterator();
while (iter.next()) |entry| {
var sfa = std.heap.stackFallback(256, ctx.allocator);
const alloc = sfa.get();
const buf = try alloc.alloc(u8, entry.key_ptr.len());
defer alloc.free(buf);
const name = entry.key_ptr.slice(buf);
// is it a workspace dependency?
if (map.get(name)) |pkgs| {
for (pkgs.items) |dep| {
try dep.dependents.append(handle);
handle.remaining_dependencies += 1;
// Skip dependency resolution if --parallel flag is set
if (!ctx.parallel) {
// compute dependencies (TODO: maybe we should do this only in a workspace?)
for (state.handles) |*handle| {
var iter = handle.config.deps.map.iterator();
while (iter.next()) |entry| {
var sfa = std.heap.stackFallback(256, ctx.allocator);
const alloc = sfa.get();
const buf = try alloc.alloc(u8, entry.key_ptr.len());
defer alloc.free(buf);
const name = entry.key_ptr.slice(buf);
// is it a workspace dependency?
if (map.get(name)) |pkgs| {
for (pkgs.items) |dep| {
try dep.dependents.append(handle);
handle.remaining_dependencies += 1;
}
}
}
}
}
// check if there is a dependency cycle
var has_cycle = false;
for (state.handles) |*handle| {
if (hasCycle(handle)) {
has_cycle = true;
break;
}
}
// if there is, we ignore dependency order completely
if (has_cycle) {
// check if there is a dependency cycle
var has_cycle = false;
for (state.handles) |*handle| {
handle.dependents.clearRetainingCapacity();
handle.remaining_dependencies = 0;
if (hasCycle(handle)) {
has_cycle = true;
break;
}
}
// if there is, we ignore dependency order completely
if (has_cycle) {
for (state.handles) |*handle| {
handle.dependents.clearRetainingCapacity();
handle.remaining_dependencies = 0;
}
}
}
// set up dependencies between pre/post scripts
// this is done after the cycle check because we don't want these to be removed if there is a cycle
for (0..state.handles.len - 1) |i| {
if (bun.strings.eql(state.handles[i].config.package_name, state.handles[i + 1].config.package_name)) {
try state.handles[i].dependents.append(&state.handles[i + 1]);
state.handles[i + 1].remaining_dependencies += 1;
// set up dependencies between pre/post scripts
// this is done after the cycle check because we don't want these to be removed if there is a cycle
for (0..state.handles.len - 1) |i| {
if (bun.strings.eql(state.handles[i].config.package_name, state.handles[i + 1].config.package_name)) {
try state.handles[i].dependents.append(&state.handles[i + 1]);
state.handles[i + 1].remaining_dependencies += 1;
}
}
}

View File

@@ -0,0 +1,335 @@
import { spawnSync } from "bun";
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
describe("bun run --filter --parallel", () => {
test("runs scripts in parallel without respecting dependency order", () => {
const dir = tempDirWithFiles("parallel-workspace", {
dep0: {
"write.js": "await Bun.write('dep0.txt', 'dep0-done'); await Bun.sleep(100);",
"package.json": JSON.stringify({
name: "dep0",
scripts: {
script: `${bunExe()} run write.js`,
},
}),
},
dep1: {
// This depends on dep0, but with --parallel it should run immediately
// and potentially fail or run without waiting
"read.js": "await Bun.sleep(50); console.log('dep1-started')",
"package.json": JSON.stringify({
name: "dep1",
dependencies: {
dep0: "*",
},
scripts: {
script: `${bunExe()} run read.js`,
},
}),
},
});
const { exitCode, stdout } = spawnSync({
cwd: dir,
cmd: [bunExe(), "run", "--filter", "*", "--parallel", "script"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
expect(exitCode).toBe(0);
expect(stdout.toString()).toContain("dep1-started");
});
test("runs multiple packages in parallel with --parallel flag", () => {
const dir = tempDirWithFiles("parallel-workspace", {
pkg1: {
"index.js": "console.log('pkg1'); await Bun.sleep(100);",
"package.json": JSON.stringify({
name: "pkg1",
scripts: {
test: `${bunExe()} run index.js`,
},
}),
},
pkg2: {
"index.js": "console.log('pkg2'); await Bun.sleep(100);",
"package.json": JSON.stringify({
name: "pkg2",
scripts: {
test: `${bunExe()} run index.js`,
},
}),
},
pkg3: {
"index.js": "console.log('pkg3'); await Bun.sleep(100);",
"package.json": JSON.stringify({
name: "pkg3",
scripts: {
test: `${bunExe()} run index.js`,
},
}),
},
});
const { exitCode, stdout } = spawnSync({
cwd: dir,
cmd: [bunExe(), "run", "--filter", "*", "--parallel", "test"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
expect(exitCode).toBe(0);
const output = stdout.toString();
expect(output).toContain("pkg1");
expect(output).toContain("pkg2");
expect(output).toContain("pkg3");
});
test("--parallel ignores dependency chains", () => {
const dir = tempDirWithFiles("parallel-workspace", {
dep0: {
"index.js": "await Bun.write('dep0.txt', 'done'); console.log('dep0');",
"package.json": JSON.stringify({
name: "dep0",
scripts: {
script: `${bunExe()} run index.js`,
},
}),
},
dep1: {
"index.js": "console.log('dep1');",
"package.json": JSON.stringify({
name: "dep1",
dependencies: {
dep0: "*",
},
scripts: {
script: `${bunExe()} run index.js`,
},
}),
},
dep2: {
"index.js": "console.log('dep2');",
"package.json": JSON.stringify({
name: "dep2",
dependencies: {
dep1: "*",
},
scripts: {
script: `${bunExe()} run index.js`,
},
}),
},
});
const { exitCode, stdout } = spawnSync({
cwd: dir,
cmd: [bunExe(), "run", "--filter", "*", "--parallel", "script"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
expect(exitCode).toBe(0);
const output = stdout.toString();
expect(output).toContain("dep0");
expect(output).toContain("dep1");
expect(output).toContain("dep2");
});
test("--parallel with circular dependencies runs all scripts", () => {
const dir = tempDirWithFiles("parallel-workspace", {
dep0: {
"package.json": JSON.stringify({
name: "dep0",
scripts: {
script: "echo dep0",
},
dependencies: {
dep1: "*",
},
}),
},
dep1: {
"package.json": JSON.stringify({
name: "dep1",
dependencies: {
dep0: "*",
},
scripts: {
script: "echo dep1",
},
}),
},
});
const { exitCode, stdout } = spawnSync({
cwd: dir,
cmd: [bunExe(), "run", "--filter", "*", "--parallel", "script"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
expect(exitCode).toBe(0);
const output = stdout.toString();
expect(output).toContain("dep0");
expect(output).toContain("dep1");
});
test("--parallel still runs pre and post scripts in order within same package", () => {
const dir = tempDirWithFiles("parallel-workspace", {
pkg1: {
"package.json": JSON.stringify({
name: "pkg1",
scripts: {
prescript: "echo pre",
script: "echo main",
postscript: "echo post",
},
}),
},
});
const { exitCode, stdout } = spawnSync({
cwd: dir,
cmd: [bunExe(), "run", "--filter", "*", "--parallel", "script"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
expect(exitCode).toBe(0);
const output = stdout.toString();
// Verify that pre/main/post scripts all ran
expect(output).toContain("pre");
expect(output).toContain("main");
expect(output).toContain("post");
});
test("--parallel propagates exit codes correctly", () => {
const dir = tempDirWithFiles("parallel-workspace", {
pkg1: {
"package.json": JSON.stringify({
name: "pkg1",
scripts: {
script: "exit 0",
},
}),
},
pkg2: {
"package.json": JSON.stringify({
name: "pkg2",
scripts: {
script: "exit 42",
},
}),
},
});
const { exitCode, stdout } = spawnSync({
cwd: dir,
cmd: [bunExe(), "run", "--filter", "*", "--parallel", "script"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const output = stdout.toString();
expect(output).toMatch(/code 0/);
expect(output).toMatch(/code 42/);
expect(exitCode).toBe(42);
});
test("--parallel works with --filter pattern matching", () => {
const dir = tempDirWithFiles("parallel-workspace", {
packages: {
"pkg-a": {
"package.json": JSON.stringify({
name: "pkg-a",
scripts: {
test: "echo pkg-a",
},
}),
},
"pkg-b": {
"package.json": JSON.stringify({
name: "pkg-b",
scripts: {
test: "echo pkg-b",
},
}),
},
"other": {
"package.json": JSON.stringify({
name: "other",
scripts: {
test: "echo other",
},
}),
},
},
"package.json": JSON.stringify({
name: "ws",
workspaces: ["packages/*"],
}),
});
const { exitCode, stdout } = spawnSync({
cwd: dir,
cmd: [bunExe(), "run", "--filter", "pkg-*", "--parallel", "test"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
expect(exitCode).toBe(0);
const output = stdout.toString();
expect(output).toContain("pkg-a");
expect(output).toContain("pkg-b");
expect(output).not.toContain("other");
});
test("--parallel works with workspaces", () => {
const dir = tempDirWithFiles("parallel-workspace", {
packages: {
pkg1: {
"package.json": JSON.stringify({
name: "pkg1",
scripts: {
build: "echo pkg1-build",
},
}),
},
pkg2: {
"package.json": JSON.stringify({
name: "pkg2",
scripts: {
build: "echo pkg2-build",
},
}),
},
},
"package.json": JSON.stringify({
name: "ws",
workspaces: ["packages/*"],
}),
});
const { exitCode, stdout } = spawnSync({
cwd: dir,
cmd: [bunExe(), "run", "--filter", "*", "--parallel", "build"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
expect(exitCode).toBe(0);
const output = stdout.toString();
expect(output).toContain("pkg1-build");
expect(output).toContain("pkg2-build");
});
});