mirror of
https://github.com/oven-sh/bun
synced 2026-02-25 02:57:27 +01:00
Compare commits
231 Commits
claude/mac
...
claude/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9f5bc62b72 | ||
|
|
8e6184707d | ||
|
|
a57dee5721 | ||
|
|
e4beddb839 | ||
|
|
dd427a1c61 | ||
|
|
0b7a6024e0 | ||
|
|
35027a1399 | ||
|
|
cf8d3183b3 | ||
|
|
45ed0cb08e | ||
|
|
6ad208bc32 | ||
|
|
b0799da968 | ||
|
|
a67ba81e0b | ||
|
|
7cdc5d879c | ||
|
|
1dc9fdfd9b | ||
|
|
584946b0ce | ||
|
|
3766f183e6 | ||
|
|
19fac68e81 | ||
|
|
2f84949fe0 | ||
|
|
964d4dac2c | ||
|
|
a9d62d58ea | ||
|
|
0827add9a3 | ||
|
|
05cff5cfde | ||
|
|
f5c138d646 | ||
|
|
ee88c489ab | ||
|
|
46e1c5a0fa | ||
|
|
5893ae99c2 | ||
|
|
0c46791d28 | ||
|
|
aab14c161a | ||
|
|
d8e5f6106f | ||
|
|
428c8d4bbf | ||
|
|
92f896ddd7 | ||
|
|
3b1842723e | ||
|
|
f5b397c040 | ||
|
|
c3c2dccc55 | ||
|
|
a9a7526ed1 | ||
|
|
74b1462ad4 | ||
|
|
47c8a67b75 | ||
|
|
2ed5b0ffad | ||
|
|
0bf0d8420e | ||
|
|
df61e88dc0 | ||
|
|
c088a6838f | ||
|
|
4deeadd53a | ||
|
|
3652008b0d | ||
|
|
7c65c35f8f | ||
|
|
455f3a65b9 | ||
|
|
4d301cc3c4 | ||
|
|
e9dc25200a | ||
|
|
ccbd3f3575 | ||
|
|
a72d74e09a | ||
|
|
04883a8bdc | ||
|
|
fe28e00d53 | ||
|
|
da856dd347 | ||
|
|
25d490fb65 | ||
|
|
806d6c156f | ||
|
|
198d7c3b19 | ||
|
|
dfe1a1848a | ||
|
|
0612f459a4 | ||
|
|
408fda7ad2 | ||
|
|
7ad3049e70 | ||
|
|
ed6f099e5e | ||
|
|
258a2a2e3a | ||
|
|
4568258960 | ||
|
|
dd27ad7716 | ||
|
|
d3d08eeb2d | ||
|
|
47727bdbe3 | ||
|
|
be5c69df79 | ||
|
|
9785e37e10 | ||
|
|
4494353abf | ||
|
|
fa1ad54257 | ||
|
|
a0687c06f8 | ||
|
|
15578df7fc | ||
|
|
b6d3768038 | ||
|
|
1ac2391b20 | ||
|
|
276eee74eb | ||
|
|
deaef1882b | ||
|
|
711de8a667 | ||
|
|
a5af485354 | ||
|
|
73e737be56 | ||
|
|
68d322f05f | ||
|
|
39eccf89a8 | ||
|
|
a729a046bd | ||
|
|
9bb4a6af19 | ||
|
|
07ffde8a69 | ||
|
|
bb67f2b345 | ||
|
|
7c4c360431 | ||
|
|
4b39a9b07d | ||
|
|
d0edcc69ae | ||
|
|
0cf2b71ff1 | ||
|
|
40bff9fea8 | ||
|
|
7726e5c670 | ||
|
|
7a31108019 | ||
|
|
dd68364630 | ||
|
|
7d4f6efe7a | ||
|
|
7cdcd34f58 | ||
|
|
2a6d018d73 | ||
|
|
8efe7945eb | ||
|
|
5bdcf339d7 | ||
|
|
03afe6ef28 | ||
|
|
ce5152dd7a | ||
|
|
5c65c18e72 | ||
|
|
100ab8c503 | ||
|
|
a51af710c0 | ||
|
|
5ca1580427 | ||
|
|
b34bab745b | ||
|
|
6034c2f94b | ||
|
|
2b5a59cae1 | ||
|
|
3bcf93ddd6 | ||
|
|
53b24ace79 | ||
|
|
a1f44caa87 | ||
|
|
3de884f2c9 | ||
|
|
a6162295c5 | ||
|
|
80c46b1607 | ||
|
|
26cbcd21c1 | ||
|
|
3d6dda6901 | ||
|
|
93f92658b3 | ||
|
|
f8c2dac836 | ||
|
|
4bbe32fff8 | ||
|
|
60c735a11d | ||
|
|
003d13ec27 | ||
|
|
245abb92fb | ||
|
|
066a25ac40 | ||
|
|
ab88317846 | ||
|
|
e7373bbf32 | ||
|
|
4687cc4f5e | ||
|
|
a5ff729665 | ||
|
|
62e8a7fb01 | ||
|
|
220807f3dc | ||
|
|
562f82d3f8 | ||
|
|
4580e11fc3 | ||
|
|
2956281845 | ||
|
|
9a2dfee3ca | ||
|
|
7a47c945aa | ||
|
|
24b7835ecd | ||
|
|
95990e7bd6 | ||
|
|
f2e487b1e6 | ||
|
|
3315ade0e9 | ||
|
|
95e653e52b | ||
|
|
a8522b16af | ||
|
|
aba8c4efd2 | ||
|
|
0bebdc9049 | ||
|
|
1058d0dee4 | ||
|
|
679a07caef | ||
|
|
0bd73b4363 | ||
|
|
bbdc3ae055 | ||
|
|
81e08d45d4 | ||
|
|
89eb48047f | ||
|
|
712d5be741 | ||
|
|
60823348c5 | ||
|
|
2f0ddf3018 | ||
|
|
1ab76610cf | ||
|
|
d3927a6e09 | ||
|
|
8424caa5fa | ||
|
|
3e6d792b62 | ||
|
|
9bb2474adb | ||
|
|
fe94a36dbc | ||
|
|
cb2887feee | ||
|
|
64361eb964 | ||
|
|
3413a2816f | ||
|
|
97a530d832 | ||
|
|
72a6278b3f | ||
|
|
bd232189b4 | ||
|
|
87df7527bb | ||
|
|
a1f756fea9 | ||
|
|
03dfd7d96b | ||
|
|
9fa8ae9a40 | ||
|
|
e4dd780c2a | ||
|
|
76c623817f | ||
|
|
f90a007593 | ||
|
|
ea12cb5801 | ||
|
|
75027e9616 | ||
|
|
e8d0935717 | ||
|
|
ace81813fc | ||
|
|
71e2161591 | ||
|
|
07cd45deae | ||
|
|
73d92c7518 | ||
|
|
5c44553a02 | ||
|
|
f4116bfa7d | ||
|
|
5aeede1ac7 | ||
|
|
6d2a0e30f5 | ||
|
|
382fe74fd0 | ||
|
|
aac646dbfe | ||
|
|
da90ad84d0 | ||
|
|
6383c8f94c | ||
|
|
718e7cdc43 | ||
|
|
cd54db1e4b | ||
|
|
171169a237 | ||
|
|
5fbd99e0cb | ||
|
|
60faa8696f | ||
|
|
d2a4fb8124 | ||
|
|
a4d031a841 | ||
|
|
56bc65932f | ||
|
|
83760fc446 | ||
|
|
74d3610d41 | ||
|
|
1d085cb4d4 | ||
|
|
a868e859d7 | ||
|
|
39dd5002c3 | ||
|
|
7940861b87 | ||
|
|
f65f31b783 | ||
|
|
cc5d8adcb5 | ||
|
|
bbc4f89c25 | ||
|
|
f4339df16b | ||
|
|
12dafa4f89 | ||
|
|
436be9f277 | ||
|
|
422991719d | ||
|
|
bc030d23b3 | ||
|
|
146fb2f7aa | ||
|
|
1e5f746f9b | ||
|
|
aad3abeadd | ||
|
|
6d5637b568 | ||
|
|
eb0b0db8fd | ||
|
|
1a9bc5da09 | ||
|
|
a1c0f74037 | ||
|
|
b5a9d09009 | ||
|
|
a280d15bdc | ||
|
|
f380458bae | ||
|
|
6ed245b889 | ||
|
|
89e322e3b5 | ||
|
|
45e97919e5 | ||
|
|
1927b06c50 | ||
|
|
851fa7d3e6 | ||
|
|
be03a537df | ||
|
|
664506474a | ||
|
|
804e76af22 | ||
|
|
67bed87795 | ||
|
|
d181e19952 | ||
|
|
6b14f77252 | ||
|
|
cc4f840e8b | ||
|
|
0bb7132f61 | ||
|
|
45a0559374 | ||
|
|
7bbb4e2ad9 | ||
|
|
e273f7d122 |
@@ -1,78 +0,0 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const { positionals, values } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
help: {
|
||||
type: "boolean",
|
||||
short: "h",
|
||||
default: false,
|
||||
},
|
||||
interactive: {
|
||||
type: "boolean",
|
||||
short: "i",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (values.help || positionals.length === 0) {
|
||||
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
|
||||
console.log("Example: node agent.mjs triage fix bug in authentication");
|
||||
console.log("Options:");
|
||||
console.log(" -h, --help Show this help message");
|
||||
console.log(" -i, --interactive Run in interactive mode");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const promptName = positionals[0].toUpperCase();
|
||||
const promptFile = `.agent/${promptName}.md`;
|
||||
const extraArgs = positionals.slice(1);
|
||||
|
||||
if (!existsSync(promptFile)) {
|
||||
console.error(`Error: Prompt file "${promptFile}" not found`);
|
||||
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
let prompt = readFileSync(promptFile, "utf-8");
|
||||
|
||||
const githubEnvs = Object.entries(process.env)
|
||||
.filter(([key]) => key.startsWith("GITHUB_"))
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
|
||||
if (githubEnvs.length > 0) {
|
||||
const githubContext = `## GitHub Environment\n\n${githubEnvs
|
||||
.map(([key, value]) => `**${key}**: \`${value}\``)
|
||||
.join("\n")}\n\n---\n\n`;
|
||||
prompt = githubContext + prompt;
|
||||
}
|
||||
|
||||
if (extraArgs.length > 0) {
|
||||
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
|
||||
prompt = prompt + extraArgsContext;
|
||||
}
|
||||
|
||||
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
|
||||
if (!values.interactive) {
|
||||
claudeArgs.unshift("--print");
|
||||
}
|
||||
|
||||
const { status, error } = spawnSync("claude", claudeArgs, {
|
||||
stdio: "inherit",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Error running claude:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(status || 0);
|
||||
} catch (error) {
|
||||
console.error(`Error reading prompt file "${promptFile}":`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -303,9 +303,34 @@ function getCppAgent(platform, options) {
|
||||
}
|
||||
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge",
|
||||
cpuCount: 32,
|
||||
threadsPerCore: 1,
|
||||
instanceType: arch === "aarch64" ? "c8g.4xlarge" : "c7i.4xlarge",
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {string}
|
||||
*/
|
||||
function getLinkBunAgent(platform, options) {
|
||||
const { os, arch, distro } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
queue: `build-${os}`,
|
||||
os,
|
||||
arch,
|
||||
};
|
||||
}
|
||||
|
||||
if (os === "windows") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "r8g.large" : "r7i.large",
|
||||
});
|
||||
}
|
||||
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "r8g.xlarge" : "r7i.xlarge",
|
||||
});
|
||||
}
|
||||
|
||||
@@ -356,7 +381,7 @@ function getTestAgent(platform, options) {
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: `dev-server-ssr-110.test.ts` and `next-build.test.ts` run out of memory at 8GB of memory, so use 16GB instead.
|
||||
// TODO: delete this block when we upgrade to mimalloc v3
|
||||
if (os === "windows") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.2xlarge",
|
||||
@@ -502,7 +527,7 @@ function getLinkBunStep(platform, options) {
|
||||
key: `${getTargetKey(platform)}-build-bun`,
|
||||
label: `${getTargetLabel(platform)} - build-bun`,
|
||||
depends_on: [`${getTargetKey(platform)}-build-cpp`, `${getTargetKey(platform)}-build-zig`],
|
||||
agents: getCppAgent(platform, options),
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
@@ -569,7 +594,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" ? 45 : 30,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
# macOS Runner Infrastructure - Claude Development Guide
|
||||
|
||||
This document provides context and guidance for Claude to work on the macOS runner infrastructure.
|
||||
|
||||
## Overview
|
||||
|
||||
This infrastructure provides automated, scalable macOS CI runners for Bun using MacStadium's Orka platform. It implements complete job isolation, daily image rebuilds, and comprehensive testing.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
- **Packer**: Builds VM images with all required software
|
||||
- **Terraform**: Manages VM fleet with auto-scaling
|
||||
- **GitHub Actions**: Automates daily rebuilds and deployments
|
||||
- **User Management**: Creates isolated users per job (`bk-<job-id>`)
|
||||
|
||||
### Key Features
|
||||
- **Complete Job Isolation**: Each Buildkite job runs in its own user account
|
||||
- **Daily Image Rebuilds**: Automated nightly rebuilds ensure fresh environments
|
||||
- **Flakiness Testing**: Multiple test iterations ensure reliability (80% success rate minimum)
|
||||
- **Software Validation**: All tools tested for proper installation and functionality
|
||||
- **Version Synchronization**: Exact versions match bootstrap.sh requirements
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
.buildkite/macos-runners/
|
||||
├── packer/
|
||||
│ └── macos-base.pkr.hcl # VM image building configuration
|
||||
├── terraform/
|
||||
│ ├── main.tf # Infrastructure definition
|
||||
│ ├── variables.tf # Configuration variables
|
||||
│ ├── outputs.tf # Resource outputs
|
||||
│ └── user-data.sh # VM initialization script
|
||||
├── scripts/
|
||||
│ ├── bootstrap-macos.sh # macOS software installation
|
||||
│ ├── create-build-user.sh # User creation for job isolation
|
||||
│ ├── cleanup-build-user.sh # User cleanup after jobs
|
||||
│ └── job-runner.sh # Main job lifecycle management
|
||||
├── github-actions/
|
||||
│ ├── image-rebuild.yml # Daily image rebuild workflow
|
||||
│ └── deploy-fleet.yml # Fleet deployment workflow
|
||||
├── README.md # User documentation
|
||||
├── DEPLOYMENT.md # Deployment guide
|
||||
└── CLAUDE.md # This file
|
||||
```
|
||||
|
||||
## Software Versions (Must Match bootstrap.sh)
|
||||
|
||||
These versions are synchronized with `/scripts/bootstrap.sh`:
|
||||
|
||||
- **Node.js**: 24.3.0 (exact)
|
||||
- **Bun**: 1.2.17 (exact)
|
||||
- **LLVM**: 19.1.7 (exact)
|
||||
- **CMake**: 3.30.5 (exact)
|
||||
- **Buildkite Agent**: 3.87.0
|
||||
|
||||
## Key Scripts
|
||||
|
||||
### bootstrap-macos.sh
|
||||
- Installs all required software with exact versions
|
||||
- Configures development environment
|
||||
- Sets up Tailscale, Docker, and other dependencies
|
||||
- **Critical**: Must stay synchronized with main bootstrap.sh
|
||||
|
||||
### create-build-user.sh
|
||||
- Creates unique user per job: `bk-<job-id>`
|
||||
- Sets up isolated environment with proper permissions
|
||||
- Configures shell environment and paths
|
||||
- Creates workspace directories
|
||||
|
||||
### cleanup-build-user.sh
|
||||
- Kills all processes owned by build user
|
||||
- Removes user account and home directory
|
||||
- Cleans up temporary files and caches
|
||||
- Ensures complete isolation between jobs
|
||||
|
||||
### job-runner.sh
|
||||
- Main orchestration script
|
||||
- Manages job lifecycle: create user → run job → cleanup
|
||||
- Handles timeouts and health checks
|
||||
- Runs as root via LaunchDaemon
|
||||
|
||||
## GitHub Actions Workflows
|
||||
|
||||
### image-rebuild.yml
|
||||
- Runs daily at 2 AM UTC
|
||||
- Detects changes to trigger rebuilds
|
||||
- Builds images for macOS 13, 14, 15
|
||||
- **Validation Steps**:
|
||||
- Software installation verification
|
||||
- Flakiness testing (3 iterations, 80% success rate)
|
||||
- Health endpoint testing
|
||||
- Discord notifications for status
|
||||
|
||||
### deploy-fleet.yml
|
||||
- Manual deployment trigger
|
||||
- Validates inputs and plans changes
|
||||
- Deploys VM fleet with health checks
|
||||
- Supports different environments (prod/staging/dev)
|
||||
|
||||
## Required Secrets
|
||||
|
||||
### MacStadium
|
||||
- `MACSTADIUM_API_KEY`: API access key
|
||||
- `ORKA_ENDPOINT`: Orka API endpoint
|
||||
- `ORKA_AUTH_TOKEN`: Authentication token
|
||||
|
||||
### AWS
|
||||
- `AWS_ACCESS_KEY_ID`: For Terraform state storage
|
||||
- `AWS_SECRET_ACCESS_KEY`: For Terraform state storage
|
||||
|
||||
### Buildkite
|
||||
- `BUILDKITE_AGENT_TOKEN`: Agent registration token
|
||||
- `BUILDKITE_API_TOKEN`: For monitoring/status checks
|
||||
- `BUILDKITE_ORG`: Organization slug
|
||||
|
||||
### GitHub
|
||||
- `GITHUB_TOKEN`: For private repository access
|
||||
|
||||
### Notifications
|
||||
- `DISCORD_WEBHOOK_URL`: For status notifications
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
### Adding New Software
|
||||
1. Update `bootstrap-macos.sh` with installation commands
|
||||
2. Add version verification in the script
|
||||
3. Include in validation tests in `image-rebuild.yml`
|
||||
4. Update documentation in README.md
|
||||
|
||||
### Modifying User Isolation
|
||||
1. Update `create-build-user.sh` for user creation
|
||||
2. Update `cleanup-build-user.sh` for cleanup
|
||||
3. Test isolation in `job-runner.sh`
|
||||
4. Ensure proper permissions and security
|
||||
|
||||
### Updating VM Configuration
|
||||
1. Modify `terraform/variables.tf` for fleet sizing
|
||||
2. Update `terraform/main.tf` for infrastructure changes
|
||||
3. Test deployment with `deploy-fleet.yml`
|
||||
4. Update documentation
|
||||
|
||||
### Version Updates
|
||||
1. **Critical**: Check `/scripts/bootstrap.sh` for version changes
|
||||
2. Update exact versions in `bootstrap-macos.sh`
|
||||
3. Update version verification in workflows
|
||||
4. Update documentation
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Image Validation
|
||||
- Software installation verification
|
||||
- Version checking for exact matches
|
||||
- Health endpoint testing
|
||||
- Basic functionality tests
|
||||
|
||||
### Flakiness Testing
|
||||
- 3 test iterations per image
|
||||
- 80% success rate minimum
|
||||
- Tests basic commands, Node.js, Bun, build tools
|
||||
- Automated cleanup of test VMs
|
||||
|
||||
### Integration Testing
|
||||
- End-to-end job execution
|
||||
- User isolation verification
|
||||
- Resource cleanup validation
|
||||
- Performance monitoring
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
1. **Version Mismatches**: Check bootstrap.sh for updates
|
||||
2. **User Cleanup Failures**: Check process termination and file permissions
|
||||
3. **Image Build Failures**: Check Packer logs and VM resources
|
||||
4. **Flakiness**: Investigate VM performance and network issues
|
||||
|
||||
### Debugging Commands
|
||||
```bash
|
||||
# Check VM status
|
||||
orka vm list
|
||||
|
||||
# Check image status
|
||||
orka image list
|
||||
|
||||
# Test user creation
|
||||
sudo /usr/local/bin/bun-ci/create-build-user.sh
|
||||
|
||||
# Check health endpoint
|
||||
curl http://localhost:8080/health
|
||||
|
||||
# View logs
|
||||
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Resource Management
|
||||
- VMs configured with 12 CPU cores, 32GB RAM
|
||||
- Auto-scaling based on queue demand
|
||||
- Aggressive cleanup to prevent resource leaks
|
||||
|
||||
### Cost Optimization
|
||||
- Automated cleanup of old images and snapshots
|
||||
- Efficient VM sizing based on workload requirements
|
||||
- Scheduled maintenance windows
|
||||
|
||||
## Security
|
||||
|
||||
### Isolation
|
||||
- Complete process isolation per job
|
||||
- Separate user accounts with unique UIDs
|
||||
- Cleanup of all user data after jobs
|
||||
|
||||
### Network Security
|
||||
- VPC isolation with security groups
|
||||
- Limited SSH access for debugging
|
||||
- Encrypted communications
|
||||
|
||||
### Credential Management
|
||||
- Secure secret storage in GitHub
|
||||
- No hardcoded credentials in code
|
||||
- Regular rotation of access tokens
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Health Checks
|
||||
- HTTP endpoints on port 8080
|
||||
- Buildkite agent connectivity monitoring
|
||||
- Resource usage tracking
|
||||
|
||||
### Alerts
|
||||
- Discord notifications for failures
|
||||
- Build status reporting
|
||||
- Fleet deployment notifications
|
||||
|
||||
## Next Steps for Development
|
||||
|
||||
1. **Monitor bootstrap.sh**: Watch for version updates that need synchronization
|
||||
2. **Performance Optimization**: Monitor resource usage and optimize VM sizes
|
||||
3. **Enhanced Testing**: Add more comprehensive validation tests
|
||||
4. **Cost Monitoring**: Track usage and optimize for cost efficiency
|
||||
5. **Security Hardening**: Regular security reviews and updates
|
||||
|
||||
## References
|
||||
|
||||
- [MacStadium Orka Documentation](https://orkadocs.macstadium.com/)
|
||||
- [Packer Documentation](https://www.packer.io/docs)
|
||||
- [Terraform Documentation](https://www.terraform.io/docs)
|
||||
- [Buildkite Agent Documentation](https://buildkite.com/docs/agent/v3)
|
||||
- [Main bootstrap.sh](../../scripts/bootstrap.sh) - **Keep synchronized!**
|
||||
|
||||
---
|
||||
|
||||
**Important**: This infrastructure is critical for Bun's CI/CD pipeline. Always test changes thoroughly and maintain backward compatibility. The `bootstrap-macos.sh` script must stay synchronized with the main `bootstrap.sh` script to ensure consistent environments.
|
||||
@@ -1,428 +0,0 @@
|
||||
# macOS Runner Deployment Guide
|
||||
|
||||
This guide provides step-by-step instructions for deploying the macOS runner infrastructure for Bun CI.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### 1. MacStadium Account Setup
|
||||
|
||||
1. **Create MacStadium Account**
|
||||
- Sign up at [MacStadium](https://www.macstadium.com/)
|
||||
- Purchase Orka plan with appropriate VM allocation
|
||||
|
||||
2. **Configure API Access**
|
||||
- Generate API key from MacStadium dashboard
|
||||
- Note down your Orka endpoint URL
|
||||
- Test API connectivity
|
||||
|
||||
3. **Base Image Preparation**
|
||||
- Ensure base macOS images are available in your account
|
||||
- Verify image naming convention: `base-images/macos-{version}-{name}`
|
||||
|
||||
### 2. AWS Account Setup
|
||||
|
||||
1. **Create AWS Account**
|
||||
- Set up AWS account for Terraform state storage
|
||||
- Create S3 bucket for Terraform backend: `bun-terraform-state`
|
||||
|
||||
2. **Configure IAM**
|
||||
- Create IAM user with appropriate permissions
|
||||
- Generate access key and secret key
|
||||
- Attach policies for S3, CloudWatch, and EC2 (if using AWS resources)
|
||||
|
||||
### 3. GitHub Repository Setup
|
||||
|
||||
1. **Fork or Clone Repository**
|
||||
- Ensure you have admin access to the repository
|
||||
- Create necessary branches for deployment
|
||||
|
||||
2. **Configure Repository Secrets**
|
||||
- Add all required secrets (see main README.md)
|
||||
- Test secret accessibility
|
||||
|
||||
### 4. Buildkite Setup
|
||||
|
||||
1. **Organization Configuration**
|
||||
- Create or access Buildkite organization
|
||||
- Generate agent token with appropriate permissions
|
||||
- Note organization slug
|
||||
|
||||
2. **Queue Configuration**
|
||||
- Create queues: `macos`, `macos-arm64`, `macos-x86_64`
|
||||
- Configure queue-specific settings
|
||||
|
||||
## Step-by-Step Deployment
|
||||
|
||||
### Step 1: Environment Preparation
|
||||
|
||||
1. **Install Required Tools**
|
||||
```bash
|
||||
# Install Terraform
|
||||
wget https://releases.hashicorp.com/terraform/1.6.0/terraform_1.6.0_linux_amd64.zip
|
||||
unzip terraform_1.6.0_linux_amd64.zip
|
||||
sudo mv terraform /usr/local/bin/
|
||||
|
||||
# Install Packer
|
||||
wget https://releases.hashicorp.com/packer/1.9.4/packer_1.9.4_linux_amd64.zip
|
||||
unzip packer_1.9.4_linux_amd64.zip
|
||||
sudo mv packer /usr/local/bin/
|
||||
|
||||
# Install AWS CLI
|
||||
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
|
||||
unzip awscliv2.zip
|
||||
sudo ./aws/install
|
||||
|
||||
# Install MacStadium CLI
|
||||
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
|
||||
sudo mv orka-cli /usr/local/bin/orka
|
||||
```
|
||||
|
||||
2. **Configure AWS Credentials**
|
||||
```bash
|
||||
aws configure
|
||||
# Enter your AWS access key, secret key, and region
|
||||
```
|
||||
|
||||
3. **Configure MacStadium CLI**
|
||||
```bash
|
||||
orka config set endpoint <your-orka-endpoint>
|
||||
orka auth token <your-orka-token>
|
||||
```
|
||||
|
||||
### Step 2: SSH Key Setup
|
||||
|
||||
1. **Generate SSH Key Pair**
|
||||
```bash
|
||||
ssh-keygen -t rsa -b 4096 -f ~/.ssh/bun-runner -N ""
|
||||
```
|
||||
|
||||
2. **Copy Public Key to Terraform Directory**
|
||||
```bash
|
||||
mkdir -p .buildkite/macos-runners/terraform/ssh-keys
|
||||
cp ~/.ssh/bun-runner.pub .buildkite/macos-runners/terraform/ssh-keys/bun-runner.pub
|
||||
```
|
||||
|
||||
### Step 3: Terraform Backend Setup
|
||||
|
||||
1. **Create S3 Bucket for Terraform State**
|
||||
```bash
|
||||
aws s3 mb s3://bun-terraform-state --region us-west-2
|
||||
aws s3api put-bucket-versioning --bucket bun-terraform-state --versioning-configuration Status=Enabled
|
||||
aws s3api put-bucket-encryption --bucket bun-terraform-state --server-side-encryption-configuration '{
|
||||
"Rules": [
|
||||
{
|
||||
"ApplyServerSideEncryptionByDefault": {
|
||||
"SSEAlgorithm": "AES256"
|
||||
}
|
||||
}
|
||||
]
|
||||
}'
|
||||
```
|
||||
|
||||
2. **Create Terraform Variables File**
|
||||
```bash
|
||||
cd .buildkite/macos-runners/terraform
|
||||
cat > production.tfvars << EOF
|
||||
environment = "production"
|
||||
macstadium_api_key = "your-macstadium-api-key"
|
||||
buildkite_agent_token = "your-buildkite-agent-token"
|
||||
github_token = "your-github-token"
|
||||
fleet_size = {
|
||||
macos_13 = 4
|
||||
macos_14 = 6
|
||||
macos_15 = 8
|
||||
}
|
||||
vm_configuration = {
|
||||
cpu_count = 12
|
||||
memory_gb = 32
|
||||
disk_size = 500
|
||||
}
|
||||
EOF
|
||||
```
|
||||
|
||||
### Step 4: Build VM Images
|
||||
|
||||
1. **Validate Packer Configuration**
|
||||
```bash
|
||||
cd .buildkite/macos-runners/packer
|
||||
packer validate -var "macos_version=15" macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
2. **Build macOS 15 Image**
|
||||
```bash
|
||||
packer build \
|
||||
-var "macos_version=15" \
|
||||
-var "orka_endpoint=<your-orka-endpoint>" \
|
||||
-var "orka_auth_token=<your-orka-token>" \
|
||||
macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
3. **Build macOS 14 Image**
|
||||
```bash
|
||||
packer build \
|
||||
-var "macos_version=14" \
|
||||
-var "orka_endpoint=<your-orka-endpoint>" \
|
||||
-var "orka_auth_token=<your-orka-token>" \
|
||||
macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
4. **Build macOS 13 Image**
|
||||
```bash
|
||||
packer build \
|
||||
-var "macos_version=13" \
|
||||
-var "orka_endpoint=<your-orka-endpoint>" \
|
||||
-var "orka_auth_token=<your-orka-token>" \
|
||||
macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
### Step 5: Deploy VM Fleet
|
||||
|
||||
1. **Initialize Terraform**
|
||||
```bash
|
||||
cd .buildkite/macos-runners/terraform
|
||||
terraform init
|
||||
```
|
||||
|
||||
2. **Create Production Workspace**
|
||||
```bash
|
||||
terraform workspace new production
|
||||
```
|
||||
|
||||
3. **Plan Deployment**
|
||||
```bash
|
||||
terraform plan -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
4. **Apply Deployment**
|
||||
```bash
|
||||
terraform apply -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
### Step 6: Verify Deployment
|
||||
|
||||
1. **Check VM Status**
|
||||
```bash
|
||||
orka vm list
|
||||
```
|
||||
|
||||
2. **Check Terraform Outputs**
|
||||
```bash
|
||||
terraform output
|
||||
```
|
||||
|
||||
3. **Test VM Connectivity**
|
||||
```bash
|
||||
# Get VM IP from terraform output
|
||||
VM_IP=$(terraform output -json vm_instances | jq -r '.value | to_entries[0].value.ip_address')
|
||||
|
||||
# Test SSH connectivity
|
||||
ssh -i ~/.ssh/bun-runner admin@$VM_IP
|
||||
|
||||
# Test health endpoint
|
||||
curl http://$VM_IP:8080/health
|
||||
```
|
||||
|
||||
4. **Verify Buildkite Agent Connectivity**
|
||||
```bash
|
||||
curl -H "Authorization: Bearer <your-buildkite-api-token>" \
|
||||
"https://api.buildkite.com/v2/organizations/<your-org>/agents"
|
||||
```
|
||||
|
||||
### Step 7: Configure GitHub Actions
|
||||
|
||||
1. **Enable GitHub Actions Workflows**
|
||||
- Navigate to repository Actions tab
|
||||
- Enable workflows if not already enabled
|
||||
|
||||
2. **Test Image Rebuild Workflow**
|
||||
```bash
|
||||
# Trigger manual rebuild
|
||||
gh workflow run image-rebuild.yml
|
||||
```
|
||||
|
||||
3. **Test Fleet Deployment Workflow**
|
||||
```bash
|
||||
# Trigger manual deployment
|
||||
gh workflow run deploy-fleet.yml
|
||||
```
|
||||
|
||||
## Post-Deployment Configuration
|
||||
|
||||
### 1. Monitoring Setup
|
||||
|
||||
1. **CloudWatch Dashboards**
|
||||
- Create custom dashboards for VM metrics
|
||||
- Set up alarms for critical thresholds
|
||||
|
||||
2. **Discord Notifications**
|
||||
- Configure Discord webhook for alerts
|
||||
- Test notification delivery
|
||||
|
||||
### 2. Backup Configuration
|
||||
|
||||
1. **Enable Automated Snapshots**
|
||||
```bash
|
||||
# Update terraform configuration
|
||||
backup_config = {
|
||||
enable_snapshots = true
|
||||
snapshot_schedule = "0 4 * * *"
|
||||
snapshot_retention = 7
|
||||
}
|
||||
```
|
||||
|
||||
2. **Test Backup Restoration**
|
||||
- Create test snapshot
|
||||
- Verify restoration process
|
||||
|
||||
### 3. Security Hardening
|
||||
|
||||
1. **Review Security Groups**
|
||||
- Minimize open ports
|
||||
- Restrict source IP ranges
|
||||
|
||||
2. **Enable Audit Logging**
|
||||
- Configure CloudTrail for AWS resources
|
||||
- Enable MacStadium audit logs
|
||||
|
||||
### 4. Performance Optimization
|
||||
|
||||
1. **Monitor Resource Usage**
|
||||
- Review CPU, memory, disk usage
|
||||
- Adjust VM sizes if needed
|
||||
|
||||
2. **Optimize Auto-Scaling**
|
||||
- Monitor scaling events
|
||||
- Adjust thresholds as needed
|
||||
|
||||
## Maintenance Procedures
|
||||
|
||||
### Daily Maintenance
|
||||
|
||||
1. **Automated Tasks**
|
||||
- Image rebuilds (automatic)
|
||||
- Health checks (automatic)
|
||||
- Cleanup processes (automatic)
|
||||
|
||||
2. **Manual Monitoring**
|
||||
- Check Discord notifications
|
||||
- Review CloudWatch metrics
|
||||
- Monitor Buildkite queue
|
||||
|
||||
### Weekly Maintenance
|
||||
|
||||
1. **Review Metrics**
|
||||
- Analyze performance trends
|
||||
- Check cost optimization opportunities
|
||||
|
||||
2. **Update Documentation**
|
||||
- Update configuration changes
|
||||
- Review troubleshooting guides
|
||||
|
||||
### Monthly Maintenance
|
||||
|
||||
1. **Capacity Planning**
|
||||
- Review usage patterns
|
||||
- Plan capacity adjustments
|
||||
|
||||
2. **Security Updates**
|
||||
- Review security patches
|
||||
- Update base images if needed
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
|
||||
### Issue: VM Creation Fails
|
||||
|
||||
```bash
|
||||
# Check MacStadium account limits
|
||||
orka account info
|
||||
|
||||
# Check available resources
|
||||
orka resource list
|
||||
|
||||
# Review Packer logs
|
||||
tail -f packer-build.log
|
||||
```
|
||||
|
||||
### Issue: Terraform Apply Fails
|
||||
|
||||
```bash
|
||||
# Check Terraform state
|
||||
terraform state list
|
||||
|
||||
# Refresh state
|
||||
terraform refresh
|
||||
|
||||
# Check provider versions
|
||||
terraform version
|
||||
```
|
||||
|
||||
### Issue: Buildkite Agents Not Connecting
|
||||
|
||||
```bash
|
||||
# Check agent configuration
|
||||
cat /usr/local/var/buildkite-agent/buildkite-agent.cfg
|
||||
|
||||
# Check agent logs
|
||||
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
|
||||
|
||||
# Restart agent service
|
||||
sudo launchctl unload /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
```
|
||||
|
||||
## Rollback Procedures
|
||||
|
||||
### Rollback VM Fleet
|
||||
|
||||
1. **Identify Previous Good State**
|
||||
```bash
|
||||
terraform state list
|
||||
git log --oneline terraform/
|
||||
```
|
||||
|
||||
2. **Rollback to Previous Configuration**
|
||||
```bash
|
||||
git checkout <previous-commit>
|
||||
terraform plan -var-file="production.tfvars"
|
||||
terraform apply -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
### Rollback VM Images
|
||||
|
||||
1. **List Available Images**
|
||||
```bash
|
||||
orka image list
|
||||
```
|
||||
|
||||
2. **Update Terraform to Use Previous Images**
|
||||
```bash
|
||||
# Edit terraform configuration to use previous image IDs
|
||||
terraform plan -var-file="production.tfvars"
|
||||
terraform apply -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
## Cost Optimization Tips
|
||||
|
||||
1. **Right-Size VMs**
|
||||
- Monitor actual resource usage
|
||||
- Adjust VM specifications accordingly
|
||||
|
||||
2. **Implement Scheduling**
|
||||
- Schedule VM shutdowns during low-usage periods
|
||||
- Use auto-scaling effectively
|
||||
|
||||
3. **Resource Cleanup**
|
||||
- Regularly clean up old images
|
||||
- Remove unused snapshots
|
||||
|
||||
4. **Monitor Costs**
|
||||
- Set up cost alerts
|
||||
- Review monthly usage reports
|
||||
|
||||
## Support
|
||||
|
||||
For additional support:
|
||||
- Check the main README.md for troubleshooting
|
||||
- Review GitHub Actions logs
|
||||
- Contact MacStadium support for platform issues
|
||||
- Open issues in the repository for infrastructure problems
|
||||
@@ -1,374 +0,0 @@
|
||||
# macOS Runner Infrastructure
|
||||
|
||||
This directory contains the infrastructure-as-code for deploying and managing macOS CI runners for the Bun project. It is located in the `.buildkite` folder alongside other CI configuration. The infrastructure provides automated, scalable, and reliable macOS build environments using MacStadium's Orka platform.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
The infrastructure consists of several key components:
|
||||
|
||||
1. **VM Images**: Golden images built with Packer containing all necessary software
|
||||
2. **VM Fleet**: Terraform-managed fleet of macOS VMs across different versions
|
||||
3. **User Isolation**: Per-job user creation and cleanup for complete isolation
|
||||
4. **Automation**: GitHub Actions workflows for daily image rebuilds and fleet management
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Complete Isolation**: Each Buildkite job runs in its own user account
|
||||
- **Automatic Cleanup**: Processes and temporary files are cleaned up after each job
|
||||
- **Daily Image Rebuilds**: Automated nightly rebuilds ensure fresh, up-to-date environments
|
||||
- **Multi-Version Support**: Supports macOS 13, 14, and 15 simultaneously
|
||||
- **Auto-Scaling**: Automatic scaling based on job queue demand
|
||||
- **Health Monitoring**: Continuous health checks and monitoring
|
||||
- **Cost Optimization**: Efficient resource utilization and cleanup
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
.buildkite/macos-runners/
|
||||
├── packer/ # Packer configuration for VM images
|
||||
│ ├── macos-base.pkr.hcl # Main Packer configuration
|
||||
│ └── ssh-keys/ # SSH keys for VM access
|
||||
├── terraform/ # Terraform configuration for VM fleet
|
||||
│ ├── main.tf # Main Terraform configuration
|
||||
│ ├── variables.tf # Variable definitions
|
||||
│ ├── outputs.tf # Output definitions
|
||||
│ └── user-data.sh # VM initialization script
|
||||
├── scripts/ # Management and utility scripts
|
||||
│ ├── bootstrap-macos.sh # macOS-specific bootstrap script
|
||||
│ ├── create-build-user.sh # User creation script
|
||||
│ ├── cleanup-build-user.sh # User cleanup script
|
||||
│ └── job-runner.sh # Main job runner script
|
||||
├── github-actions/ # GitHub Actions workflows
|
||||
│ ├── image-rebuild.yml # Daily image rebuild workflow
|
||||
│ └── deploy-fleet.yml # Fleet deployment workflow
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before deploying the infrastructure, ensure you have:
|
||||
|
||||
1. **MacStadium Account**: Active MacStadium Orka account with API access
|
||||
2. **AWS Account**: For Terraform state storage and CloudWatch monitoring
|
||||
3. **GitHub Repository**: With required secrets configured
|
||||
4. **Buildkite Account**: With organization and agent tokens
|
||||
5. **Required Tools**: Packer, Terraform, AWS CLI, and MacStadium CLI
|
||||
|
||||
## Required Secrets
|
||||
|
||||
Configure the following secrets in your GitHub repository:
|
||||
|
||||
### MacStadium
|
||||
- `MACSTADIUM_API_KEY`: MacStadium API key
|
||||
- `ORKA_ENDPOINT`: MacStadium Orka API endpoint
|
||||
- `ORKA_AUTH_TOKEN`: MacStadium authentication token
|
||||
|
||||
### AWS
|
||||
- `AWS_ACCESS_KEY_ID`: AWS access key ID
|
||||
- `AWS_SECRET_ACCESS_KEY`: AWS secret access key
|
||||
|
||||
### Buildkite
|
||||
- `BUILDKITE_AGENT_TOKEN`: Buildkite agent token
|
||||
- `BUILDKITE_API_TOKEN`: Buildkite API token (for monitoring)
|
||||
- `BUILDKITE_ORG`: Buildkite organization slug
|
||||
|
||||
### GitHub
|
||||
- `GITHUB_TOKEN`: GitHub personal access token (for private repositories)
|
||||
|
||||
### Notifications
|
||||
- `DISCORD_WEBHOOK_URL`: Discord webhook URL for notifications
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Deploy the Infrastructure
|
||||
|
||||
```bash
|
||||
# Navigate to the terraform directory
|
||||
cd .buildkite/macos-runners/terraform
|
||||
|
||||
# Initialize Terraform
|
||||
terraform init
|
||||
|
||||
# Create or select workspace
|
||||
terraform workspace new production
|
||||
|
||||
# Plan the deployment
|
||||
terraform plan -var-file="production.tfvars"
|
||||
|
||||
# Apply the deployment
|
||||
terraform apply -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
### 2. Build VM Images
|
||||
|
||||
```bash
|
||||
# Navigate to the packer directory
|
||||
cd .buildkite/macos-runners/packer
|
||||
|
||||
# Build macOS 15 image
|
||||
packer build -var "macos_version=15" macos-base.pkr.hcl
|
||||
|
||||
# Build macOS 14 image
|
||||
packer build -var "macos_version=14" macos-base.pkr.hcl
|
||||
|
||||
# Build macOS 13 image
|
||||
packer build -var "macos_version=13" macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
### 3. Enable Automation
|
||||
|
||||
The GitHub Actions workflows will automatically:
|
||||
- Rebuild images daily at 2 AM UTC
|
||||
- Deploy fleet changes when configuration is updated
|
||||
- Clean up old images and snapshots
|
||||
- Monitor VM health and connectivity
|
||||
|
||||
## Configuration
|
||||
|
||||
### Fleet Size Configuration
|
||||
|
||||
Modify fleet sizes in `terraform/variables.tf`:
|
||||
|
||||
```hcl
|
||||
variable "fleet_size" {
|
||||
default = {
|
||||
macos_13 = 4 # Number of macOS 13 VMs
|
||||
macos_14 = 6 # Number of macOS 14 VMs
|
||||
macos_15 = 8 # Number of macOS 15 VMs
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### VM Configuration
|
||||
|
||||
Adjust VM specifications in `terraform/variables.tf`:
|
||||
|
||||
```hcl
|
||||
variable "vm_configuration" {
|
||||
default = {
|
||||
cpu_count = 12 # Number of CPU cores
|
||||
memory_gb = 32 # Memory in GB
|
||||
disk_size = 500 # Disk size in GB
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Auto-Scaling Configuration
|
||||
|
||||
Configure auto-scaling parameters:
|
||||
|
||||
```hcl
|
||||
variable "autoscaling_config" {
|
||||
default = {
|
||||
min_size = 2
|
||||
max_size = 30
|
||||
desired_capacity = 10
|
||||
scale_up_threshold = 80
|
||||
scale_down_threshold = 20
|
||||
scale_up_adjustment = 2
|
||||
scale_down_adjustment = 1
|
||||
cooldown_period = 300
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Software Included
|
||||
|
||||
Each VM image includes:
|
||||
|
||||
### Development Tools
|
||||
- Xcode Command Line Tools
|
||||
- LLVM/Clang 19.1.7 (exact version)
|
||||
- CMake 3.30.5 (exact version)
|
||||
- Ninja build system
|
||||
- pkg-config
|
||||
- ccache
|
||||
|
||||
### Programming Languages
|
||||
- Node.js 24.3.0 (exact version, matches bootstrap.sh)
|
||||
- Bun 1.2.17 (exact version, matches bootstrap.sh)
|
||||
- Python 3.11 and 3.12
|
||||
- Go (latest)
|
||||
- Rust (latest stable)
|
||||
|
||||
### Package Managers
|
||||
- Homebrew
|
||||
- npm
|
||||
- yarn
|
||||
- pip
|
||||
- cargo
|
||||
|
||||
### Build Tools
|
||||
- make
|
||||
- autotools
|
||||
- meson
|
||||
- libtool
|
||||
|
||||
### Version Control
|
||||
- Git
|
||||
- GitHub CLI
|
||||
|
||||
### Utilities
|
||||
- curl
|
||||
- wget
|
||||
- jq
|
||||
- tree
|
||||
- htop
|
||||
- tmux
|
||||
- screen
|
||||
|
||||
### Development Dependencies
|
||||
- Docker Desktop
|
||||
- Tailscale (for VPN connectivity)
|
||||
- Age (for encryption)
|
||||
- macFUSE (for filesystem testing)
|
||||
- Chromium (for browser testing)
|
||||
- Various system libraries and headers
|
||||
|
||||
### Quality Assurance
|
||||
- **Flakiness Testing**: Each image undergoes multiple test iterations to ensure reliability
|
||||
- **Software Validation**: All tools are tested for proper installation and functionality
|
||||
- **Version Verification**: Exact version matching ensures consistency with bootstrap.sh
|
||||
|
||||
## User Isolation
|
||||
|
||||
Each Buildkite job runs in complete isolation:
|
||||
|
||||
1. **Unique User**: Each job gets a unique user account (`bk-<job-id>`)
|
||||
2. **Isolated Environment**: Separate home directory and environment variables
|
||||
3. **Process Isolation**: All processes are killed after job completion
|
||||
4. **File System Cleanup**: Temporary files and caches are cleaned up
|
||||
5. **Network Isolation**: No shared network resources between jobs
|
||||
|
||||
## Monitoring and Alerting
|
||||
|
||||
The infrastructure includes comprehensive monitoring:
|
||||
|
||||
- **Health Checks**: HTTP health endpoints on each VM
|
||||
- **CloudWatch Metrics**: CPU, memory, disk usage monitoring
|
||||
- **Buildkite Integration**: Agent connectivity monitoring
|
||||
- **Slack Notifications**: Success/failure notifications
|
||||
- **Log Aggregation**: Centralized logging for troubleshooting
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- **Encrypted Disks**: All VM disks are encrypted
|
||||
- **Network Security**: Security groups restrict network access
|
||||
- **SSH Key Management**: Secure SSH key distribution
|
||||
- **Regular Updates**: Automatic security updates
|
||||
- **Process Isolation**: Complete isolation between jobs
|
||||
- **Secure Credential Handling**: Secrets are managed securely
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **VM Not Responding to Health Checks**
|
||||
```bash
|
||||
# Check VM status
|
||||
orka vm list
|
||||
|
||||
# Check VM logs
|
||||
orka vm logs <vm-name>
|
||||
|
||||
# Restart VM
|
||||
orka vm restart <vm-name>
|
||||
```
|
||||
|
||||
2. **Buildkite Agent Not Connecting**
|
||||
```bash
|
||||
# Check agent status
|
||||
sudo launchctl list | grep buildkite
|
||||
|
||||
# Check agent logs
|
||||
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
|
||||
|
||||
# Restart agent
|
||||
sudo launchctl unload /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
```
|
||||
|
||||
3. **User Creation Failures**
|
||||
```bash
|
||||
# Check user creation logs
|
||||
tail -f /var/log/system.log | grep "create-build-user"
|
||||
|
||||
# Manual cleanup
|
||||
sudo /usr/local/bin/bun-ci/cleanup-build-user.sh <username>
|
||||
```
|
||||
|
||||
4. **Disk Space Issues**
|
||||
```bash
|
||||
# Check disk usage
|
||||
df -h
|
||||
|
||||
# Clean up old files
|
||||
sudo /usr/local/bin/bun-ci/cleanup-build-user.sh --cleanup-all
|
||||
```
|
||||
|
||||
### Debugging Commands
|
||||
|
||||
```bash
|
||||
# Check system status
|
||||
sudo /usr/local/bin/bun-ci/job-runner.sh health
|
||||
|
||||
# View active processes
|
||||
ps aux | grep buildkite
|
||||
|
||||
# Check network connectivity
|
||||
curl -v http://localhost:8080/health
|
||||
|
||||
# View system logs
|
||||
tail -f /var/log/system.log
|
||||
|
||||
# Check Docker status
|
||||
docker info
|
||||
```
|
||||
|
||||
## Maintenance
|
||||
|
||||
### Regular Tasks
|
||||
|
||||
1. **Image Updates**: Images are rebuilt daily automatically
|
||||
2. **Fleet Updates**: Terraform changes are applied automatically
|
||||
3. **Cleanup**: Old images and snapshots are cleaned up automatically
|
||||
4. **Monitoring**: Health checks run continuously
|
||||
|
||||
### Manual Maintenance
|
||||
|
||||
```bash
|
||||
# Force image rebuild
|
||||
gh workflow run image-rebuild.yml -f force_rebuild=true
|
||||
|
||||
# Scale fleet manually
|
||||
gh workflow run deploy-fleet.yml -f fleet_size_macos_15=10
|
||||
|
||||
# Clean up old resources
|
||||
cd terraform
|
||||
terraform apply -refresh-only
|
||||
```
|
||||
|
||||
## Cost Optimization
|
||||
|
||||
- **Right-Sizing**: VMs are sized appropriately for Bun workloads
|
||||
- **Auto-Scaling**: Automatic scaling prevents over-provisioning
|
||||
- **Resource Cleanup**: Aggressive cleanup prevents resource waste
|
||||
- **Scheduled Shutdowns**: VMs can be scheduled for shutdown during low-usage periods
|
||||
|
||||
## Support and Contributing
|
||||
|
||||
For issues or questions:
|
||||
1. Check the troubleshooting section above
|
||||
2. Review GitHub Actions workflow logs
|
||||
3. Check MacStadium Orka console
|
||||
4. Open an issue in the repository
|
||||
|
||||
When contributing:
|
||||
1. Test changes in a staging environment first
|
||||
2. Update documentation as needed
|
||||
3. Follow the existing code style
|
||||
4. Add appropriate tests and validation
|
||||
|
||||
## License
|
||||
|
||||
This infrastructure code is part of the Bun project and follows the same license terms.
|
||||
@@ -1,376 +0,0 @@
|
||||
name: Deploy macOS Runner Fleet
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Deployment environment'
|
||||
required: true
|
||||
default: 'production'
|
||||
type: choice
|
||||
options:
|
||||
- production
|
||||
- staging
|
||||
- development
|
||||
fleet_size_macos_13:
|
||||
description: 'Number of macOS 13 VMs'
|
||||
required: false
|
||||
default: '4'
|
||||
fleet_size_macos_14:
|
||||
description: 'Number of macOS 14 VMs'
|
||||
required: false
|
||||
default: '6'
|
||||
fleet_size_macos_15:
|
||||
description: 'Number of macOS 15 VMs'
|
||||
required: false
|
||||
default: '8'
|
||||
force_deploy:
|
||||
description: 'Force deployment even if no changes'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
TERRAFORM_VERSION: "1.6.0"
|
||||
AWS_REGION: "us-west-2"
|
||||
|
||||
jobs:
|
||||
validate-inputs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
validated: ${{ steps.validate.outputs.validated }}
|
||||
total_vms: ${{ steps.validate.outputs.total_vms }}
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
id: validate
|
||||
run: |
|
||||
# Validate fleet sizes
|
||||
macos_13="${{ github.event.inputs.fleet_size_macos_13 }}"
|
||||
macos_14="${{ github.event.inputs.fleet_size_macos_14 }}"
|
||||
macos_15="${{ github.event.inputs.fleet_size_macos_15 }}"
|
||||
|
||||
# Check if inputs are valid numbers
|
||||
if ! [[ "$macos_13" =~ ^[0-9]+$ ]] || ! [[ "$macos_14" =~ ^[0-9]+$ ]] || ! [[ "$macos_15" =~ ^[0-9]+$ ]]; then
|
||||
echo "Error: Fleet sizes must be valid numbers"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if at least one VM is requested
|
||||
total_vms=$((macos_13 + macos_14 + macos_15))
|
||||
if [[ $total_vms -eq 0 ]]; then
|
||||
echo "Error: At least one VM must be requested"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check reasonable limits
|
||||
if [[ $total_vms -gt 50 ]]; then
|
||||
echo "Error: Total VMs cannot exceed 50"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "validated=true" >> $GITHUB_OUTPUT
|
||||
echo "total_vms=$total_vms" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "Validation passed:"
|
||||
echo "- macOS 13: $macos_13 VMs"
|
||||
echo "- macOS 14: $macos_14 VMs"
|
||||
echo "- macOS 15: $macos_15 VMs"
|
||||
echo "- Total: $total_vms VMs"
|
||||
|
||||
plan-deployment:
|
||||
runs-on: ubuntu-latest
|
||||
needs: validate-inputs
|
||||
if: needs.validate-inputs.outputs.validated == 'true'
|
||||
outputs:
|
||||
plan_status: ${{ steps.plan.outputs.plan_status }}
|
||||
has_changes: ${{ steps.plan.outputs.has_changes }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: ${{ env.TERRAFORM_VERSION }}
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Initialize Terraform
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform init
|
||||
terraform workspace select ${{ github.event.inputs.environment }} || terraform workspace new ${{ github.event.inputs.environment }}
|
||||
|
||||
- name: Create terraform variables file
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
cat > terraform.tfvars << EOF
|
||||
environment = "${{ github.event.inputs.environment }}"
|
||||
fleet_size = {
|
||||
macos_13 = ${{ github.event.inputs.fleet_size_macos_13 }}
|
||||
macos_14 = ${{ github.event.inputs.fleet_size_macos_14 }}
|
||||
macos_15 = ${{ github.event.inputs.fleet_size_macos_15 }}
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Plan Terraform deployment
|
||||
id: plan
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Run terraform plan
|
||||
terraform plan \
|
||||
-var "macstadium_api_key=${{ secrets.MACSTADIUM_API_KEY }}" \
|
||||
-var "buildkite_agent_token=${{ secrets.BUILDKITE_AGENT_TOKEN }}" \
|
||||
-var "github_token=${{ secrets.GITHUB_TOKEN }}" \
|
||||
-out=tfplan \
|
||||
-detailed-exitcode > plan_output.txt 2>&1
|
||||
|
||||
plan_exit_code=$?
|
||||
|
||||
# Check plan results
|
||||
if [[ $plan_exit_code -eq 0 ]]; then
|
||||
echo "plan_status=no_changes" >> $GITHUB_OUTPUT
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
elif [[ $plan_exit_code -eq 2 ]]; then
|
||||
echo "plan_status=has_changes" >> $GITHUB_OUTPUT
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plan_status=failed" >> $GITHUB_OUTPUT
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
cat plan_output.txt
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Save plan output
|
||||
echo "Plan output:"
|
||||
cat plan_output.txt
|
||||
|
||||
- name: Upload plan
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: terraform-plan
|
||||
path: |
|
||||
.buildkite/macos-runners/terraform/tfplan
|
||||
.buildkite/macos-runners/terraform/plan_output.txt
|
||||
retention-days: 30
|
||||
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment]
|
||||
if: needs.plan-deployment.outputs.has_changes == 'true' || github.event.inputs.force_deploy == 'true'
|
||||
environment: ${{ github.event.inputs.environment }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: ${{ env.TERRAFORM_VERSION }}
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Download plan
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: terraform-plan
|
||||
path: .buildkite/macos-runners/terraform/
|
||||
|
||||
- name: Initialize Terraform
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform init
|
||||
terraform workspace select ${{ github.event.inputs.environment }}
|
||||
|
||||
- name: Apply Terraform deployment
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
echo "Applying Terraform deployment..."
|
||||
terraform apply -auto-approve tfplan
|
||||
|
||||
- name: Get deployment outputs
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform output -json > terraform-outputs.json
|
||||
echo "Deployment outputs:"
|
||||
cat terraform-outputs.json | jq .
|
||||
|
||||
- name: Upload deployment outputs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: deployment-outputs-${{ github.event.inputs.environment }}
|
||||
path: .buildkite/macos-runners/terraform/terraform-outputs.json
|
||||
retention-days: 90
|
||||
|
||||
- name: Verify deployment
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
echo "Verifying deployment..."
|
||||
|
||||
# Check VM count
|
||||
vm_count=$(terraform output -json vm_instances | jq 'length')
|
||||
expected_count=${{ needs.validate-inputs.outputs.total_vms }}
|
||||
|
||||
if [[ $vm_count -eq $expected_count ]]; then
|
||||
echo "✅ VM count matches expected: $vm_count"
|
||||
else
|
||||
echo "❌ VM count mismatch: expected $expected_count, got $vm_count"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check VM states
|
||||
terraform output -json vm_instances | jq -r 'to_entries[] | "\(.key): \(.value.name) - \(.value.status)"' | while read vm_info; do
|
||||
echo "VM: $vm_info"
|
||||
done
|
||||
|
||||
health-check:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment, deploy]
|
||||
if: always() && needs.deploy.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y jq curl
|
||||
|
||||
- name: Download deployment outputs
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: deployment-outputs-${{ github.event.inputs.environment }}
|
||||
path: ./
|
||||
|
||||
- name: Wait for VMs to be ready
|
||||
run: |
|
||||
echo "Waiting for VMs to be ready..."
|
||||
sleep 300 # Wait 5 minutes for VMs to initialize
|
||||
|
||||
- name: Check VM health
|
||||
run: |
|
||||
echo "Checking VM health..."
|
||||
|
||||
# Read VM details from outputs
|
||||
jq -r '.vm_instances.value | to_entries[] | "\(.value.name) \(.value.ip_address)"' terraform-outputs.json | while read vm_name vm_ip; do
|
||||
echo "Checking VM: $vm_name ($vm_ip)"
|
||||
|
||||
# Check health endpoint
|
||||
max_attempts=12
|
||||
attempt=1
|
||||
|
||||
while [[ $attempt -le $max_attempts ]]; do
|
||||
if curl -f -s --max-time 30 "http://$vm_ip:8080/health" > /dev/null; then
|
||||
echo "✅ $vm_name is healthy"
|
||||
break
|
||||
else
|
||||
echo "⏳ $vm_name not ready yet (attempt $attempt/$max_attempts)"
|
||||
sleep 30
|
||||
((attempt++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $attempt -gt $max_attempts ]]; then
|
||||
echo "❌ $vm_name failed health check"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Check Buildkite connectivity
|
||||
run: |
|
||||
echo "Checking Buildkite agent connectivity..."
|
||||
|
||||
# Wait a bit more for agents to connect
|
||||
sleep 60
|
||||
|
||||
# Check connected agents
|
||||
curl -s -H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
|
||||
"https://api.buildkite.com/v2/organizations/${{ secrets.BUILDKITE_ORG }}/agents" | \
|
||||
jq -r '.[] | select(.name | test("^bun-runner-")) | "\(.name) \(.connection_state) \(.hostname)"' | \
|
||||
while read agent_name state hostname; do
|
||||
echo "Agent: $agent_name - State: $state - Host: $hostname"
|
||||
done
|
||||
|
||||
notify-success:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment, deploy, health-check]
|
||||
if: always() && needs.deploy.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: Notify success
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: success
|
||||
title: "macOS runner fleet deployed successfully"
|
||||
description: |
|
||||
🚀 **macOS runner fleet deployed successfully**
|
||||
|
||||
**Environment:** ${{ github.event.inputs.environment }}
|
||||
**Total VMs:** ${{ needs.validate-inputs.outputs.total_vms }}
|
||||
|
||||
**Fleet composition:**
|
||||
- macOS 13: ${{ github.event.inputs.fleet_size_macos_13 }} VMs
|
||||
- macOS 14: ${{ github.event.inputs.fleet_size_macos_14 }} VMs
|
||||
- macOS 15: ${{ github.event.inputs.fleet_size_macos_15 }} VMs
|
||||
|
||||
**Repository:** ${{ github.repository }}
|
||||
[View Deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
color: 0x00ff00
|
||||
username: "GitHub Actions"
|
||||
|
||||
notify-failure:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment, deploy, health-check]
|
||||
if: always() && (needs.validate-inputs.result == 'failure' || needs.plan-deployment.result == 'failure' || needs.deploy.result == 'failure')
|
||||
|
||||
steps:
|
||||
- name: Notify failure
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: failure
|
||||
title: "macOS runner fleet deployment failed"
|
||||
description: |
|
||||
🔴 **macOS runner fleet deployment failed**
|
||||
|
||||
**Environment:** ${{ github.event.inputs.environment }}
|
||||
**Failed stage:** ${{ needs.validate-inputs.result == 'failure' && 'Validation' || needs.plan-deployment.result == 'failure' && 'Planning' || 'Deployment' }}
|
||||
|
||||
**Repository:** ${{ github.repository }}
|
||||
[View Deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
|
||||
Please check the logs for more details.
|
||||
color: 0xff0000
|
||||
username: "GitHub Actions"
|
||||
|
||||
notify-no-changes:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment]
|
||||
if: needs.plan-deployment.outputs.has_changes == 'false' && github.event.inputs.force_deploy != 'true'
|
||||
|
||||
steps:
|
||||
- name: Notify no changes
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: cancelled
|
||||
title: "macOS runner fleet deployment skipped"
|
||||
description: |
|
||||
ℹ️ **macOS runner fleet deployment skipped** - no changes detected in Terraform plan
|
||||
color: 0x808080
|
||||
username: "GitHub Actions"
|
||||
@@ -1,515 +0,0 @@
|
||||
name: Rebuild macOS Runner Images
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run daily at 2 AM UTC
|
||||
- cron: '0 2 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
macos_versions:
|
||||
description: 'macOS versions to rebuild (comma-separated: 13,14,15)'
|
||||
required: false
|
||||
default: '13,14,15'
|
||||
force_rebuild:
|
||||
description: 'Force rebuild even if no changes detected'
|
||||
required: false
|
||||
default: 'false'
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
PACKER_VERSION: "1.9.4"
|
||||
TERRAFORM_VERSION: "1.6.0"
|
||||
|
||||
jobs:
|
||||
check-changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_rebuild: ${{ steps.check.outputs.should_rebuild }}
|
||||
changed_files: ${{ steps.check.outputs.changed_files }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Check for changes
|
||||
id: check
|
||||
run: |
|
||||
# Check if any relevant files have changed in the last 24 hours
|
||||
changed_files=$(git diff --name-only HEAD~1 HEAD | grep -E "(bootstrap|packer|\.buildkite/macos-runners)" | head -20)
|
||||
|
||||
if [[ -n "$changed_files" ]] || [[ "${{ github.event.inputs.force_rebuild }}" == "true" ]]; then
|
||||
echo "should_rebuild=true" >> $GITHUB_OUTPUT
|
||||
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$changed_files" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "should_rebuild=false" >> $GITHUB_OUTPUT
|
||||
echo "changed_files=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
build-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.should_rebuild == 'true'
|
||||
strategy:
|
||||
matrix:
|
||||
macos_version: [13, 14, 15]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Packer
|
||||
uses: hashicorp/setup-packer@main
|
||||
with:
|
||||
version: ${{ env.PACKER_VERSION }}
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: ${{ env.TERRAFORM_VERSION }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y jq curl
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Validate Packer configuration
|
||||
working-directory: .buildkite/macos-runners/packer
|
||||
run: |
|
||||
packer validate \
|
||||
-var "macos_version=${{ matrix.macos_version }}" \
|
||||
-var "orka_endpoint=${{ secrets.ORKA_ENDPOINT }}" \
|
||||
-var "orka_auth_token=${{ secrets.ORKA_AUTH_TOKEN }}" \
|
||||
macos-base.pkr.hcl
|
||||
|
||||
- name: Build macOS ${{ matrix.macos_version }} image
|
||||
working-directory: .buildkite/macos-runners/packer
|
||||
run: |
|
||||
echo "Building macOS ${{ matrix.macos_version }} image..."
|
||||
|
||||
# Set build variables
|
||||
export PACKER_LOG=1
|
||||
export PACKER_LOG_PATH="./packer-build-macos-${{ matrix.macos_version }}.log"
|
||||
|
||||
# Build the image
|
||||
packer build \
|
||||
-var "macos_version=${{ matrix.macos_version }}" \
|
||||
-var "orka_endpoint=${{ secrets.ORKA_ENDPOINT }}" \
|
||||
-var "orka_auth_token=${{ secrets.ORKA_AUTH_TOKEN }}" \
|
||||
-var "base_image=base-images/macos-${{ matrix.macos_version }}-$([ ${{ matrix.macos_version }} -eq 13 ] && echo 'ventura' || [ ${{ matrix.macos_version }} -eq 14 ] && echo 'sonoma' || echo 'sequoia')" \
|
||||
macos-base.pkr.hcl
|
||||
|
||||
- name: Validate built image
|
||||
working-directory: .buildkite/macos-runners/packer
|
||||
run: |
|
||||
echo "Validating built image..."
|
||||
|
||||
# Get the latest built image ID
|
||||
IMAGE_ID=$(orka image list --output json | jq -r '.[] | select(.name | test("^bun-macos-${{ matrix.macos_version }}-")) | .id' | head -1)
|
||||
|
||||
if [ -z "$IMAGE_ID" ]; then
|
||||
echo "❌ No image found for macOS ${{ matrix.macos_version }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Found image: $IMAGE_ID"
|
||||
|
||||
# Create a test VM to validate the image
|
||||
VM_NAME="test-validation-${{ matrix.macos_version }}-$(date +%s)"
|
||||
|
||||
echo "Creating test VM: $VM_NAME"
|
||||
orka vm create \
|
||||
--name "$VM_NAME" \
|
||||
--image "$IMAGE_ID" \
|
||||
--cpu 4 \
|
||||
--memory 8 \
|
||||
--wait
|
||||
|
||||
# Wait for VM to be ready
|
||||
sleep 60
|
||||
|
||||
# Get VM IP
|
||||
VM_IP=$(orka vm show "$VM_NAME" --output json | jq -r '.ip_address')
|
||||
|
||||
echo "Testing VM at IP: $VM_IP"
|
||||
|
||||
# Test software installations
|
||||
echo "Testing software installations..."
|
||||
|
||||
# Test Node.js
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'node --version' || exit 1
|
||||
|
||||
# Test Bun
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'bun --version' || exit 1
|
||||
|
||||
# Test build tools
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'cmake --version' || exit 1
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'clang --version' || exit 1
|
||||
|
||||
# Test Docker
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'docker --version' || exit 1
|
||||
|
||||
# Test Tailscale
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'tailscale --version' || exit 1
|
||||
|
||||
# Test health endpoint
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'curl -f http://localhost:8080/health' || exit 1
|
||||
|
||||
echo "✅ All software validations passed"
|
||||
|
||||
# Clean up test VM
|
||||
orka vm delete "$VM_NAME" --force
|
||||
|
||||
echo "✅ Image validation completed successfully"
|
||||
|
||||
- name: Run flakiness checks
|
||||
working-directory: .buildkite/macos-runners/packer
|
||||
run: |
|
||||
echo "Running flakiness checks..."
|
||||
|
||||
# Get the latest built image ID
|
||||
IMAGE_ID=$(orka image list --output json | jq -r '.[] | select(.name | test("^bun-macos-${{ matrix.macos_version }}-")) | .id' | head -1)
|
||||
|
||||
# Run multiple test iterations to check for flakiness
|
||||
ITERATIONS=3
|
||||
PASSED=0
|
||||
FAILED=0
|
||||
|
||||
for i in $(seq 1 $ITERATIONS); do
|
||||
echo "Running flakiness test iteration $i/$ITERATIONS..."
|
||||
|
||||
VM_NAME="flakiness-test-${{ matrix.macos_version }}-$i-$(date +%s)"
|
||||
|
||||
# Create test VM
|
||||
orka vm create \
|
||||
--name "$VM_NAME" \
|
||||
--image "$IMAGE_ID" \
|
||||
--cpu 4 \
|
||||
--memory 8 \
|
||||
--wait
|
||||
|
||||
sleep 30
|
||||
|
||||
# Get VM IP
|
||||
VM_IP=$(orka vm show "$VM_NAME" --output json | jq -r '.ip_address')
|
||||
|
||||
# Run a series of quick tests
|
||||
TEST_PASSED=true
|
||||
|
||||
# Test 1: Basic command execution
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'echo "test" > /tmp/test.txt && cat /tmp/test.txt'; then
|
||||
echo "❌ Basic command test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 2: Node.js execution
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'node -e "console.log(\"Node.js test\")"'; then
|
||||
echo "❌ Node.js test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 3: Bun execution
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'bun -e "console.log(\"Bun test\")"'; then
|
||||
echo "❌ Bun test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 4: Build tools
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'clang --version > /tmp/clang_version.txt'; then
|
||||
echo "❌ Clang test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 5: File system operations
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'mkdir -p /tmp/test_dir && touch /tmp/test_dir/test_file'; then
|
||||
echo "❌ File system test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 6: Process creation
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'ps aux | grep -v grep | wc -l'; then
|
||||
echo "❌ Process test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Clean up test VM
|
||||
orka vm delete "$VM_NAME" --force
|
||||
|
||||
if [ "$TEST_PASSED" = true ]; then
|
||||
echo "✅ Iteration $i passed"
|
||||
PASSED=$((PASSED + 1))
|
||||
else
|
||||
echo "❌ Iteration $i failed"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
|
||||
# Short delay between iterations
|
||||
sleep 10
|
||||
done
|
||||
|
||||
echo "Flakiness check results:"
|
||||
echo "- Passed: $PASSED/$ITERATIONS"
|
||||
echo "- Failed: $FAILED/$ITERATIONS"
|
||||
|
||||
# Calculate success rate
|
||||
SUCCESS_RATE=$((PASSED * 100 / ITERATIONS))
|
||||
echo "- Success rate: $SUCCESS_RATE%"
|
||||
|
||||
# Fail if success rate is below 80%
|
||||
if [ $SUCCESS_RATE -lt 80 ]; then
|
||||
echo "❌ Image is too flaky! Success rate: $SUCCESS_RATE% (minimum: 80%)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Flakiness checks passed with $SUCCESS_RATE% success rate"
|
||||
|
||||
- name: Upload build logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: packer-logs-macos-${{ matrix.macos_version }}
|
||||
path: .buildkite/macos-runners/packer/packer-build-macos-${{ matrix.macos_version }}.log
|
||||
retention-days: 7
|
||||
|
||||
- name: Notify on failure
|
||||
if: failure()
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: failure
|
||||
title: "macOS ${{ matrix.macos_version }} image build failed"
|
||||
description: |
|
||||
🔴 **macOS ${{ matrix.macos_version }} image build failed**
|
||||
|
||||
**Repository:** ${{ github.repository }}
|
||||
**Branch:** ${{ github.ref }}
|
||||
**Commit:** ${{ github.sha }}
|
||||
|
||||
[Check the logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
color: 0xff0000
|
||||
username: "GitHub Actions"
|
||||
|
||||
update-terraform:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-changes, build-images]
|
||||
if: needs.check-changes.outputs.should_rebuild == 'true' && needs.build-images.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: ${{ env.TERRAFORM_VERSION }}
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Initialize Terraform
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform init
|
||||
terraform workspace select production || terraform workspace new production
|
||||
|
||||
- name: Plan Terraform changes
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform plan \
|
||||
-var "macstadium_api_key=${{ secrets.MACSTADIUM_API_KEY }}" \
|
||||
-var "buildkite_agent_token=${{ secrets.BUILDKITE_AGENT_TOKEN }}" \
|
||||
-var "github_token=${{ secrets.GITHUB_TOKEN }}" \
|
||||
-out=tfplan
|
||||
|
||||
- name: Apply Terraform changes
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform apply -auto-approve tfplan
|
||||
|
||||
- name: Save Terraform outputs
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform output -json > terraform-outputs.json
|
||||
|
||||
- name: Upload Terraform outputs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: terraform-outputs
|
||||
path: .buildkite/macos-runners/terraform/terraform-outputs.json
|
||||
retention-days: 30
|
||||
|
||||
cleanup-old-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-changes, build-images, update-terraform]
|
||||
if: always() && needs.check-changes.outputs.should_rebuild == 'true'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup AWS CLI
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Install MacStadium CLI
|
||||
run: |
|
||||
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
|
||||
sudo mv orka-cli /usr/local/bin/orka
|
||||
chmod +x /usr/local/bin/orka
|
||||
|
||||
- name: Configure MacStadium CLI
|
||||
run: |
|
||||
orka config set endpoint ${{ secrets.ORKA_ENDPOINT }}
|
||||
orka auth token ${{ secrets.ORKA_AUTH_TOKEN }}
|
||||
|
||||
- name: Clean up old images
|
||||
run: |
|
||||
echo "Cleaning up old images..."
|
||||
|
||||
# Get list of all images
|
||||
orka image list --output json > images.json
|
||||
|
||||
# Find images older than 7 days
|
||||
cutoff_date=$(date -d '7 days ago' +%s)
|
||||
|
||||
# Parse and delete old images
|
||||
jq -r '.[] | select(.name | test("^bun-macos-")) | select(.created_at | strptime("%Y-%m-%dT%H:%M:%SZ") | mktime < '$cutoff_date') | .name' images.json | while read image_name; do
|
||||
echo "Deleting old image: $image_name"
|
||||
orka image delete "$image_name" || echo "Failed to delete $image_name"
|
||||
done
|
||||
|
||||
- name: Clean up old snapshots
|
||||
run: |
|
||||
echo "Cleaning up old snapshots..."
|
||||
|
||||
# Get list of all snapshots
|
||||
orka snapshot list --output json > snapshots.json
|
||||
|
||||
# Find snapshots older than 7 days
|
||||
cutoff_date=$(date -d '7 days ago' +%s)
|
||||
|
||||
# Parse and delete old snapshots
|
||||
jq -r '.[] | select(.name | test("^bun-macos-")) | select(.created_at | strptime("%Y-%m-%dT%H:%M:%SZ") | mktime < '$cutoff_date') | .name' snapshots.json | while read snapshot_name; do
|
||||
echo "Deleting old snapshot: $snapshot_name"
|
||||
orka snapshot delete "$snapshot_name" || echo "Failed to delete $snapshot_name"
|
||||
done
|
||||
|
||||
health-check:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-changes, build-images, update-terraform]
|
||||
if: always() && needs.check-changes.outputs.should_rebuild == 'true'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup AWS CLI
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Install MacStadium CLI
|
||||
run: |
|
||||
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
|
||||
sudo mv orka-cli /usr/local/bin/orka
|
||||
chmod +x /usr/local/bin/orka
|
||||
|
||||
- name: Configure MacStadium CLI
|
||||
run: |
|
||||
orka config set endpoint ${{ secrets.ORKA_ENDPOINT }}
|
||||
orka auth token ${{ secrets.ORKA_AUTH_TOKEN }}
|
||||
|
||||
- name: Health check VMs
|
||||
run: |
|
||||
echo "Performing health check on VMs..."
|
||||
|
||||
# Get list of running VMs
|
||||
orka vm list --output json > vms.json
|
||||
|
||||
# Check each VM
|
||||
jq -r '.[] | select(.name | test("^bun-runner-")) | select(.status == "running") | "\(.name) \(.ip_address)"' vms.json | while read vm_name vm_ip; do
|
||||
echo "Checking VM: $vm_name ($vm_ip)"
|
||||
|
||||
# Check if VM is responding to health checks
|
||||
if curl -f -s --max-time 30 "http://$vm_ip:8080/health" > /dev/null; then
|
||||
echo "✅ $vm_name is healthy"
|
||||
else
|
||||
echo "❌ $vm_name is not responding to health checks"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Check Buildkite agent connectivity
|
||||
run: |
|
||||
echo "Checking Buildkite agent connectivity..."
|
||||
|
||||
# Use Buildkite API to check connected agents
|
||||
curl -s -H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
|
||||
"https://api.buildkite.com/v2/organizations/${{ secrets.BUILDKITE_ORG }}/agents" | \
|
||||
jq -r '.[] | select(.name | test("^bun-runner-")) | "\(.name) \(.connection_state)"' | \
|
||||
while read agent_name state; do
|
||||
echo "Agent: $agent_name - State: $state"
|
||||
done
|
||||
|
||||
notify-success:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-changes, build-images, update-terraform, cleanup-old-images, health-check]
|
||||
if: always() && needs.check-changes.outputs.should_rebuild == 'true' && needs.build-images.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: Notify success
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: success
|
||||
title: "macOS runner images rebuilt successfully"
|
||||
description: |
|
||||
✅ **macOS runner images rebuilt successfully**
|
||||
|
||||
**Repository:** ${{ github.repository }}
|
||||
**Branch:** ${{ github.ref }}
|
||||
**Commit:** ${{ github.sha }}
|
||||
|
||||
**Changes detected in:**
|
||||
${{ needs.check-changes.outputs.changed_files }}
|
||||
|
||||
**Images built:** ${{ join(github.event.inputs.macos_versions || '13,14,15', ', ') }}
|
||||
|
||||
[Check the deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
color: 0x00ff00
|
||||
username: "GitHub Actions"
|
||||
|
||||
notify-skip:
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.should_rebuild == 'false'
|
||||
|
||||
steps:
|
||||
- name: Notify skip
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: cancelled
|
||||
title: "macOS runner image rebuild skipped"
|
||||
description: |
|
||||
ℹ️ **macOS runner image rebuild skipped** - no changes detected in the last 24 hours
|
||||
color: 0x808080
|
||||
username: "GitHub Actions"
|
||||
@@ -1,270 +0,0 @@
|
||||
packer {
|
||||
required_plugins {
|
||||
macstadium-orka = {
|
||||
version = ">= 3.0.0"
|
||||
source = "github.com/macstadium/macstadium-orka"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "orka_endpoint" {
|
||||
description = "MacStadium Orka endpoint"
|
||||
type = string
|
||||
default = env("ORKA_ENDPOINT")
|
||||
}
|
||||
|
||||
variable "orka_auth_token" {
|
||||
description = "MacStadium Orka auth token"
|
||||
type = string
|
||||
default = env("ORKA_AUTH_TOKEN")
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "base_image" {
|
||||
description = "Base macOS image to use"
|
||||
type = string
|
||||
default = "base-images/macos-15-sequoia"
|
||||
}
|
||||
|
||||
variable "macos_version" {
|
||||
description = "macOS version (13, 14, 15)"
|
||||
type = string
|
||||
default = "15"
|
||||
}
|
||||
|
||||
variable "cpu_count" {
|
||||
description = "Number of CPU cores"
|
||||
type = number
|
||||
default = 12
|
||||
}
|
||||
|
||||
variable "memory_gb" {
|
||||
description = "Memory in GB"
|
||||
type = number
|
||||
default = 32
|
||||
}
|
||||
|
||||
source "macstadium-orka" "base" {
|
||||
orka_endpoint = var.orka_endpoint
|
||||
orka_auth_token = var.orka_auth_token
|
||||
|
||||
source_image = var.base_image
|
||||
image_name = "bun-macos-${var.macos_version}-${formatdate("YYYY-MM-DD", timestamp())}"
|
||||
|
||||
ssh_username = "admin"
|
||||
ssh_password = "admin"
|
||||
ssh_timeout = "20m"
|
||||
|
||||
vm_name = "packer-build-${formatdate("YYYY-MM-DD-hhmm", timestamp())}"
|
||||
cpu_count = var.cpu_count
|
||||
memory_gb = var.memory_gb
|
||||
|
||||
# Enable GPU acceleration for better performance
|
||||
gpu_passthrough = true
|
||||
|
||||
# Network configuration
|
||||
vnc_bind_address = "0.0.0.0"
|
||||
vnc_port_min = 5900
|
||||
vnc_port_max = 5999
|
||||
|
||||
# Cleanup settings
|
||||
cleanup_pause_time = "30s"
|
||||
create_snapshot = true
|
||||
|
||||
# Boot wait time
|
||||
boot_wait = "2m"
|
||||
}
|
||||
|
||||
build {
|
||||
sources = [
|
||||
"source.macstadium-orka.base"
|
||||
]
|
||||
|
||||
# Wait for SSH to be ready
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Waiting for system to be ready...'",
|
||||
"until ping -c1 google.com &>/dev/null; do sleep 1; done",
|
||||
"echo 'Network is ready'"
|
||||
]
|
||||
timeout = "10m"
|
||||
}
|
||||
|
||||
# Install Xcode Command Line Tools
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Installing Xcode Command Line Tools...'",
|
||||
"xcode-select --install || true",
|
||||
"until xcode-select -p &>/dev/null; do sleep 10; done",
|
||||
"echo 'Xcode Command Line Tools installed'"
|
||||
]
|
||||
timeout = "30m"
|
||||
}
|
||||
|
||||
# Copy and run bootstrap script
|
||||
provisioner "file" {
|
||||
source = "${path.root}/../scripts/bootstrap-macos.sh"
|
||||
destination = "/tmp/bootstrap-macos.sh"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"chmod +x /tmp/bootstrap-macos.sh",
|
||||
"sudo /tmp/bootstrap-macos.sh --ci"
|
||||
]
|
||||
timeout = "60m"
|
||||
}
|
||||
|
||||
# Install additional macOS-specific tools
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Installing additional macOS tools...'",
|
||||
"brew install --cask docker",
|
||||
"brew install gh",
|
||||
"brew install jq",
|
||||
"brew install coreutils",
|
||||
"brew install gnu-sed",
|
||||
"brew install gnu-tar",
|
||||
"brew install findutils",
|
||||
"brew install grep",
|
||||
"brew install make",
|
||||
"brew install cmake",
|
||||
"brew install ninja",
|
||||
"brew install pkg-config",
|
||||
"brew install python@3.11",
|
||||
"brew install python@3.12",
|
||||
"brew install go",
|
||||
"brew install rust",
|
||||
"brew install node",
|
||||
"brew install bun",
|
||||
"brew install wget",
|
||||
"brew install tree",
|
||||
"brew install htop",
|
||||
"brew install watch",
|
||||
"brew install tmux",
|
||||
"brew install screen"
|
||||
]
|
||||
timeout = "30m"
|
||||
}
|
||||
|
||||
# Install Buildkite agent
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Installing Buildkite agent...'",
|
||||
"brew install buildkite/buildkite/buildkite-agent",
|
||||
"sudo mkdir -p /usr/local/var/buildkite-agent",
|
||||
"sudo mkdir -p /usr/local/var/log/buildkite-agent",
|
||||
"sudo chown -R admin:admin /usr/local/var/buildkite-agent",
|
||||
"sudo chown -R admin:admin /usr/local/var/log/buildkite-agent"
|
||||
]
|
||||
timeout = "10m"
|
||||
}
|
||||
|
||||
# Copy user management scripts
|
||||
provisioner "file" {
|
||||
source = "${path.root}/../scripts/"
|
||||
destination = "/tmp/scripts/"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"sudo mkdir -p /usr/local/bin/bun-ci",
|
||||
"sudo cp /tmp/scripts/create-build-user.sh /usr/local/bin/bun-ci/",
|
||||
"sudo cp /tmp/scripts/cleanup-build-user.sh /usr/local/bin/bun-ci/",
|
||||
"sudo cp /tmp/scripts/job-runner.sh /usr/local/bin/bun-ci/",
|
||||
"sudo chmod +x /usr/local/bin/bun-ci/*.sh"
|
||||
]
|
||||
}
|
||||
|
||||
# Configure system settings for CI
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Configuring system for CI...'",
|
||||
"# Disable sleep and screensaver",
|
||||
"sudo pmset -a displaysleep 0 sleep 0 disksleep 0",
|
||||
"sudo pmset -a womp 1",
|
||||
"# Disable automatic updates",
|
||||
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false",
|
||||
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -bool false",
|
||||
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool false",
|
||||
"# Increase file descriptor limits",
|
||||
"echo 'kern.maxfiles=1048576' | sudo tee -a /etc/sysctl.conf",
|
||||
"echo 'kern.maxfilesperproc=1048576' | sudo tee -a /etc/sysctl.conf",
|
||||
"# Enable core dumps",
|
||||
"sudo mkdir -p /cores",
|
||||
"sudo chmod 777 /cores",
|
||||
"echo 'kern.corefile=/cores/core.%P' | sudo tee -a /etc/sysctl.conf"
|
||||
]
|
||||
}
|
||||
|
||||
# Configure LaunchDaemon for Buildkite agent
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Configuring Buildkite LaunchDaemon...'",
|
||||
"sudo tee /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist > /dev/null <<EOF",
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>",
|
||||
"<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">",
|
||||
"<plist version=\"1.0\">",
|
||||
"<dict>",
|
||||
" <key>Label</key>",
|
||||
" <string>com.buildkite.buildkite-agent</string>",
|
||||
" <key>ProgramArguments</key>",
|
||||
" <array>",
|
||||
" <string>/usr/local/bin/bun-ci/job-runner.sh</string>",
|
||||
" </array>",
|
||||
" <key>RunAtLoad</key>",
|
||||
" <true/>",
|
||||
" <key>KeepAlive</key>",
|
||||
" <true/>",
|
||||
" <key>StandardOutPath</key>",
|
||||
" <string>/usr/local/var/log/buildkite-agent/buildkite-agent.log</string>",
|
||||
" <key>StandardErrorPath</key>",
|
||||
" <string>/usr/local/var/log/buildkite-agent/buildkite-agent.error.log</string>",
|
||||
" <key>EnvironmentVariables</key>",
|
||||
" <dict>",
|
||||
" <key>PATH</key>",
|
||||
" <string>/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin</string>",
|
||||
" </dict>",
|
||||
"</dict>",
|
||||
"</plist>",
|
||||
"EOF"
|
||||
]
|
||||
}
|
||||
|
||||
# Clean up
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Cleaning up...'",
|
||||
"rm -rf /tmp/bootstrap-macos.sh /tmp/scripts/",
|
||||
"sudo rm -rf /var/log/*.log /var/log/*/*.log",
|
||||
"sudo rm -rf /tmp/* /var/tmp/*",
|
||||
"# Clean Homebrew cache",
|
||||
"brew cleanup --prune=all",
|
||||
"# Clean npm cache",
|
||||
"npm cache clean --force",
|
||||
"# Clean pip cache",
|
||||
"pip3 cache purge || true",
|
||||
"# Clean cargo cache",
|
||||
"cargo cache --remove-if-older-than 1d || true",
|
||||
"# Clean system caches",
|
||||
"sudo rm -rf /System/Library/Caches/*",
|
||||
"sudo rm -rf /Library/Caches/*",
|
||||
"rm -rf ~/Library/Caches/*",
|
||||
"echo 'Cleanup completed'"
|
||||
]
|
||||
}
|
||||
|
||||
# Final system preparation
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Final system preparation...'",
|
||||
"# Ensure proper permissions",
|
||||
"sudo chown -R admin:admin /usr/local/bin/bun-ci",
|
||||
"sudo chown -R admin:admin /usr/local/var/buildkite-agent",
|
||||
"sudo chown -R admin:admin /usr/local/var/log/buildkite-agent",
|
||||
"# Load the LaunchDaemon",
|
||||
"sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist",
|
||||
"echo 'Image preparation completed'"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,400 +0,0 @@
|
||||
#!/bin/bash
|
||||
# macOS-specific bootstrap script for Bun CI runners
|
||||
# Based on the main bootstrap.sh but optimized for macOS CI environments
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
print() {
|
||||
echo "$@"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "error: $@" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
execute() {
|
||||
print "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
error "Command failed: $@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if running as root
|
||||
if [[ $EUID -eq 0 ]]; then
|
||||
error "This script should not be run as root"
|
||||
fi
|
||||
|
||||
# Check if running on macOS
|
||||
if [[ "$(uname -s)" != "Darwin" ]]; then
|
||||
error "This script is designed for macOS only"
|
||||
fi
|
||||
|
||||
print "Starting macOS bootstrap for Bun CI..."
|
||||
|
||||
# Get macOS version
|
||||
MACOS_VERSION=$(sw_vers -productVersion)
|
||||
MACOS_MAJOR=$(echo "$MACOS_VERSION" | cut -d. -f1)
|
||||
MACOS_MINOR=$(echo "$MACOS_VERSION" | cut -d. -f2)
|
||||
|
||||
print "macOS Version: $MACOS_VERSION"
|
||||
|
||||
# Install Xcode Command Line Tools if not already installed
|
||||
if ! xcode-select -p &>/dev/null; then
|
||||
print "Installing Xcode Command Line Tools..."
|
||||
xcode-select --install
|
||||
# Wait for installation to complete
|
||||
until xcode-select -p &>/dev/null; do
|
||||
sleep 10
|
||||
done
|
||||
fi
|
||||
|
||||
# Install Homebrew if not already installed
|
||||
if ! command -v brew &>/dev/null; then
|
||||
print "Installing Homebrew..."
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||
|
||||
# Add Homebrew to PATH
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
echo 'export PATH="/opt/homebrew/bin:$PATH"' >> ~/.zprofile
|
||||
export PATH="/opt/homebrew/bin:$PATH"
|
||||
else
|
||||
echo 'export PATH="/usr/local/bin:$PATH"' >> ~/.zprofile
|
||||
export PATH="/usr/local/bin:$PATH"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Configure Homebrew for CI
|
||||
export HOMEBREW_NO_INSTALL_CLEANUP=1
|
||||
export HOMEBREW_NO_AUTO_UPDATE=1
|
||||
export HOMEBREW_NO_ANALYTICS=1
|
||||
|
||||
# Update Homebrew
|
||||
print "Updating Homebrew..."
|
||||
brew update
|
||||
|
||||
# Install essential packages
|
||||
print "Installing essential packages..."
|
||||
brew install \
|
||||
bash \
|
||||
coreutils \
|
||||
findutils \
|
||||
gnu-tar \
|
||||
gnu-sed \
|
||||
gawk \
|
||||
gnutls \
|
||||
gnu-indent \
|
||||
gnu-getopt \
|
||||
grep \
|
||||
make \
|
||||
cmake \
|
||||
ninja \
|
||||
pkg-config \
|
||||
python@3.11 \
|
||||
python@3.12 \
|
||||
go \
|
||||
rust \
|
||||
node \
|
||||
bun \
|
||||
git \
|
||||
wget \
|
||||
curl \
|
||||
jq \
|
||||
tree \
|
||||
htop \
|
||||
watch \
|
||||
tmux \
|
||||
screen \
|
||||
gh
|
||||
|
||||
# Install Docker Desktop
|
||||
print "Installing Docker Desktop..."
|
||||
if [[ ! -d "/Applications/Docker.app" ]]; then
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
curl -L "https://desktop.docker.com/mac/main/arm64/Docker.dmg" -o /tmp/Docker.dmg
|
||||
else
|
||||
curl -L "https://desktop.docker.com/mac/main/amd64/Docker.dmg" -o /tmp/Docker.dmg
|
||||
fi
|
||||
|
||||
hdiutil attach /tmp/Docker.dmg
|
||||
cp -R /Volumes/Docker/Docker.app /Applications/
|
||||
hdiutil detach /Volumes/Docker
|
||||
rm /tmp/Docker.dmg
|
||||
fi
|
||||
|
||||
# Install Buildkite agent
|
||||
print "Installing Buildkite agent..."
|
||||
brew install buildkite/buildkite/buildkite-agent
|
||||
|
||||
# Create directories for Buildkite
|
||||
sudo mkdir -p /usr/local/var/buildkite-agent
|
||||
sudo mkdir -p /usr/local/var/log/buildkite-agent
|
||||
sudo chown -R "$(whoami):admin" /usr/local/var/buildkite-agent
|
||||
sudo chown -R "$(whoami):admin" /usr/local/var/log/buildkite-agent
|
||||
|
||||
# Install Node.js versions (exact version from bootstrap.sh)
|
||||
print "Installing specific Node.js version..."
|
||||
NODE_VERSION="24.3.0"
|
||||
if [[ "$(node --version 2>/dev/null || echo '')" != "v$NODE_VERSION" ]]; then
|
||||
# Remove any existing Node.js installations
|
||||
brew uninstall --ignore-dependencies node 2>/dev/null || true
|
||||
|
||||
# Install specific Node.js version
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
NODE_ARCH="arm64"
|
||||
else
|
||||
NODE_ARCH="x64"
|
||||
fi
|
||||
|
||||
NODE_URL="https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-darwin-$NODE_ARCH.tar.gz"
|
||||
NODE_TAR="/tmp/node-v$NODE_VERSION-darwin-$NODE_ARCH.tar.gz"
|
||||
|
||||
curl -fsSL "$NODE_URL" -o "$NODE_TAR"
|
||||
sudo tar -xzf "$NODE_TAR" -C /usr/local --strip-components=1
|
||||
rm "$NODE_TAR"
|
||||
|
||||
# Verify installation
|
||||
if [[ "$(node --version)" != "v$NODE_VERSION" ]]; then
|
||||
error "Node.js installation failed: expected v$NODE_VERSION, got $(node --version)"
|
||||
fi
|
||||
|
||||
print "Node.js v$NODE_VERSION installed successfully"
|
||||
fi
|
||||
|
||||
# Install Node.js headers (matching bootstrap.sh)
|
||||
print "Installing Node.js headers..."
|
||||
NODE_HEADERS_URL="https://nodejs.org/download/release/v$NODE_VERSION/node-v$NODE_VERSION-headers.tar.gz"
|
||||
NODE_HEADERS_TAR="/tmp/node-v$NODE_VERSION-headers.tar.gz"
|
||||
curl -fsSL "$NODE_HEADERS_URL" -o "$NODE_HEADERS_TAR"
|
||||
sudo tar -xzf "$NODE_HEADERS_TAR" -C /usr/local --strip-components=1
|
||||
rm "$NODE_HEADERS_TAR"
|
||||
|
||||
# Set up node-gyp cache
|
||||
NODE_GYP_CACHE_DIR="$HOME/.cache/node-gyp/$NODE_VERSION"
|
||||
mkdir -p "$NODE_GYP_CACHE_DIR/include"
|
||||
cp -R /usr/local/include/node "$NODE_GYP_CACHE_DIR/include/" 2>/dev/null || true
|
||||
echo "11" > "$NODE_GYP_CACHE_DIR/installVersion" 2>/dev/null || true
|
||||
|
||||
# Install Bun specific version (exact version from bootstrap.sh)
|
||||
print "Installing specific Bun version..."
|
||||
BUN_VERSION="1.2.17"
|
||||
if [[ "$(bun --version 2>/dev/null || echo '')" != "$BUN_VERSION" ]]; then
|
||||
# Remove any existing Bun installations
|
||||
brew uninstall --ignore-dependencies bun 2>/dev/null || true
|
||||
rm -rf "$HOME/.bun" 2>/dev/null || true
|
||||
|
||||
# Install specific Bun version
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
BUN_TRIPLET="bun-darwin-aarch64"
|
||||
else
|
||||
BUN_TRIPLET="bun-darwin-x64"
|
||||
fi
|
||||
|
||||
BUN_URL="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v$BUN_VERSION/$BUN_TRIPLET.zip"
|
||||
BUN_ZIP="/tmp/$BUN_TRIPLET.zip"
|
||||
|
||||
curl -fsSL "$BUN_URL" -o "$BUN_ZIP"
|
||||
unzip -q "$BUN_ZIP" -d /tmp/
|
||||
sudo mv "/tmp/$BUN_TRIPLET/bun" /usr/local/bin/
|
||||
sudo ln -sf /usr/local/bin/bun /usr/local/bin/bunx
|
||||
rm -rf "$BUN_ZIP" "/tmp/$BUN_TRIPLET"
|
||||
|
||||
# Verify installation
|
||||
if [[ "$(bun --version)" != "$BUN_VERSION" ]]; then
|
||||
error "Bun installation failed: expected $BUN_VERSION, got $(bun --version)"
|
||||
fi
|
||||
|
||||
print "Bun v$BUN_VERSION installed successfully"
|
||||
fi
|
||||
|
||||
# Install Rust toolchain
|
||||
print "Configuring Rust toolchain..."
|
||||
if command -v rustup &>/dev/null; then
|
||||
rustup update
|
||||
rustup target add x86_64-apple-darwin
|
||||
rustup target add aarch64-apple-darwin
|
||||
fi
|
||||
|
||||
# Install LLVM (exact version from bootstrap.sh)
|
||||
print "Installing LLVM..."
|
||||
LLVM_VERSION="19"
|
||||
brew install "llvm@$LLVM_VERSION"
|
||||
|
||||
# Install additional development tools
|
||||
print "Installing additional development tools..."
|
||||
brew install \
|
||||
clang-format \
|
||||
ccache \
|
||||
ninja \
|
||||
meson \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
gettext \
|
||||
openssl \
|
||||
readline \
|
||||
sqlite \
|
||||
xz \
|
||||
zlib \
|
||||
libyaml \
|
||||
libffi \
|
||||
pkg-config
|
||||
|
||||
# Install CMake (specific version from bootstrap.sh)
|
||||
print "Installing CMake..."
|
||||
CMAKE_VERSION="3.30.5"
|
||||
brew uninstall --ignore-dependencies cmake 2>/dev/null || true
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
CMAKE_ARCH="macos-universal"
|
||||
else
|
||||
CMAKE_ARCH="macos-universal"
|
||||
fi
|
||||
CMAKE_URL="https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-$CMAKE_ARCH.tar.gz"
|
||||
CMAKE_TAR="/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH.tar.gz"
|
||||
curl -fsSL "$CMAKE_URL" -o "$CMAKE_TAR"
|
||||
tar -xzf "$CMAKE_TAR" -C /tmp/
|
||||
sudo cp -R "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH/CMake.app/Contents/bin/"* /usr/local/bin/
|
||||
sudo cp -R "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH/CMake.app/Contents/share/"* /usr/local/share/
|
||||
rm -rf "$CMAKE_TAR" "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH"
|
||||
|
||||
# Install Age for core dump encryption (macOS equivalent)
|
||||
print "Installing Age for encryption..."
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
AGE_URL="https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-darwin-arm64.tar.gz"
|
||||
AGE_SHA256="4a3c7d8e12fb8b8b7b8c8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b"
|
||||
else
|
||||
AGE_URL="https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-darwin-amd64.tar.gz"
|
||||
AGE_SHA256="5a3c7d8e12fb8b8b7b8c8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b"
|
||||
fi
|
||||
AGE_TAR="/tmp/age.tar.gz"
|
||||
curl -fsSL "$AGE_URL" -o "$AGE_TAR"
|
||||
tar -xzf "$AGE_TAR" -C /tmp/
|
||||
sudo mv /tmp/age/age /usr/local/bin/
|
||||
rm -rf "$AGE_TAR" /tmp/age
|
||||
|
||||
# Install Tailscale (matching bootstrap.sh implementation)
|
||||
print "Installing Tailscale..."
|
||||
if [[ "$docker" != "1" ]]; then
|
||||
if [[ ! -d "/Applications/Tailscale.app" ]]; then
|
||||
# Install via Homebrew for easier management
|
||||
brew install --cask tailscale
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install Chromium dependencies for testing
|
||||
print "Installing Chromium for testing..."
|
||||
brew install --cask chromium
|
||||
|
||||
# Install Python FUSE equivalent for macOS
|
||||
print "Installing macFUSE..."
|
||||
if [[ ! -d "/Library/Frameworks/macFUSE.framework" ]]; then
|
||||
brew install --cask macfuse
|
||||
fi
|
||||
|
||||
# Install python-fuse
|
||||
pip3 install fusepy
|
||||
|
||||
# Configure system settings
|
||||
print "Configuring system settings..."
|
||||
|
||||
# Disable sleep and screensaver
|
||||
sudo pmset -a displaysleep 0 sleep 0 disksleep 0
|
||||
sudo pmset -a womp 1
|
||||
|
||||
# Disable automatic updates
|
||||
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
|
||||
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -bool false
|
||||
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool false
|
||||
|
||||
# Increase file descriptor limits
|
||||
echo 'kern.maxfiles=1048576' | sudo tee -a /etc/sysctl.conf
|
||||
echo 'kern.maxfilesperproc=1048576' | sudo tee -a /etc/sysctl.conf
|
||||
|
||||
# Enable core dumps
|
||||
sudo mkdir -p /cores
|
||||
sudo chmod 777 /cores
|
||||
echo 'kern.corefile=/cores/core.%P' | sudo tee -a /etc/sysctl.conf
|
||||
|
||||
# Configure shell environment
|
||||
print "Configuring shell environment..."
|
||||
|
||||
# Add Homebrew paths to shell profiles
|
||||
SHELL_PROFILES=(.zshrc .zprofile .bash_profile .bashrc)
|
||||
for profile in "${SHELL_PROFILES[@]}"; do
|
||||
if [[ -f "$HOME/$profile" ]] || [[ "$1" == "--ci" ]]; then
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
echo 'export PATH="/opt/homebrew/bin:$PATH"' >> "$HOME/$profile"
|
||||
else
|
||||
echo 'export PATH="/usr/local/bin:$PATH"' >> "$HOME/$profile"
|
||||
fi
|
||||
|
||||
# Add other useful paths
|
||||
echo 'export PATH="/usr/local/bin/bun-ci:$PATH"' >> "$HOME/$profile"
|
||||
echo 'export PATH="/usr/local/sbin:$PATH"' >> "$HOME/$profile"
|
||||
|
||||
# Environment variables for CI
|
||||
echo 'export HOMEBREW_NO_INSTALL_CLEANUP=1' >> "$HOME/$profile"
|
||||
echo 'export HOMEBREW_NO_AUTO_UPDATE=1' >> "$HOME/$profile"
|
||||
echo 'export HOMEBREW_NO_ANALYTICS=1' >> "$HOME/$profile"
|
||||
echo 'export CI=1' >> "$HOME/$profile"
|
||||
echo 'export BUILDKITE=true' >> "$HOME/$profile"
|
||||
|
||||
# Development environment variables
|
||||
echo 'export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"' >> "$HOME/$profile"
|
||||
echo 'export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"' >> "$HOME/$profile"
|
||||
|
||||
# Node.js and npm configuration
|
||||
echo 'export NODE_OPTIONS="--max-old-space-size=8192"' >> "$HOME/$profile"
|
||||
echo 'export NPM_CONFIG_CACHE="$HOME/.npm"' >> "$HOME/$profile"
|
||||
|
||||
# Rust configuration
|
||||
echo 'export CARGO_HOME="$HOME/.cargo"' >> "$HOME/$profile"
|
||||
echo 'export RUSTUP_HOME="$HOME/.rustup"' >> "$HOME/$profile"
|
||||
echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> "$HOME/$profile"
|
||||
|
||||
# Go configuration
|
||||
echo 'export GOPATH="$HOME/go"' >> "$HOME/$profile"
|
||||
echo 'export PATH="$GOPATH/bin:$PATH"' >> "$HOME/$profile"
|
||||
|
||||
# Python configuration
|
||||
echo 'export PYTHONPATH="/usr/local/lib/python3.11/site-packages:/usr/local/lib/python3.12/site-packages:$PYTHONPATH"' >> "$HOME/$profile"
|
||||
|
||||
# Bun configuration
|
||||
echo 'export BUN_INSTALL="$HOME/.bun"' >> "$HOME/$profile"
|
||||
echo 'export PATH="$BUN_INSTALL/bin:$PATH"' >> "$HOME/$profile"
|
||||
|
||||
# LLVM configuration
|
||||
echo 'export PATH="/usr/local/opt/llvm/bin:$PATH"' >> "$HOME/$profile"
|
||||
echo 'export LDFLAGS="-L/usr/local/opt/llvm/lib"' >> "$HOME/$profile"
|
||||
echo 'export CPPFLAGS="-I/usr/local/opt/llvm/include"' >> "$HOME/$profile"
|
||||
fi
|
||||
done
|
||||
|
||||
# Create symbolic links for GNU tools
|
||||
print "Creating symbolic links for GNU tools..."
|
||||
GNU_TOOLS=(
|
||||
"tar:gtar"
|
||||
"sed:gsed"
|
||||
"awk:gawk"
|
||||
"find:gfind"
|
||||
"xargs:gxargs"
|
||||
"grep:ggrep"
|
||||
"make:gmake"
|
||||
)
|
||||
|
||||
for tool_pair in "${GNU_TOOLS[@]}"; do
|
||||
tool_name="${tool_pair%%:*}"
|
||||
gnu_name="${tool_pair##*:}"
|
||||
|
||||
if command -v "$gnu_name" &>/dev/null; then
|
||||
sudo ln -sf "$(which "$gnu_name")" "/usr/local/bin/$tool_name"
|
||||
fi
|
||||
done
|
||||
|
||||
# Clean up
|
||||
print "Cleaning up..."
|
||||
brew cleanup --prune=all
|
||||
sudo rm -rf /tmp/* /var/tmp/* || true
|
||||
|
||||
print "macOS bootstrap completed successfully!"
|
||||
print "System is ready for Bun CI workloads."
|
||||
@@ -1,141 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Clean up build user and all associated processes/files
|
||||
# This ensures complete cleanup after each job
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
print() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "ERROR: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if running as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
error "This script must be run as root"
|
||||
fi
|
||||
|
||||
USERNAME="${1:-}"
|
||||
if [[ -z "$USERNAME" ]]; then
|
||||
error "Usage: $0 <username>"
|
||||
fi
|
||||
|
||||
print "Cleaning up build user: ${USERNAME}"
|
||||
|
||||
# Check if user exists
|
||||
if ! id "${USERNAME}" &>/dev/null; then
|
||||
print "User ${USERNAME} does not exist, nothing to clean up"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
USER_HOME="/Users/${USERNAME}"
|
||||
|
||||
# Stop any background timeout processes
|
||||
pkill -f "job-timeout.sh" || true
|
||||
|
||||
# Kill all processes owned by the user
|
||||
print "Killing all processes owned by ${USERNAME}..."
|
||||
pkill -TERM -u "${USERNAME}" || true
|
||||
sleep 2
|
||||
pkill -KILL -u "${USERNAME}" || true
|
||||
|
||||
# Wait for processes to be cleaned up
|
||||
sleep 1
|
||||
|
||||
# Remove from groups
|
||||
dscl . delete /Groups/admin GroupMembership "${USERNAME}" 2>/dev/null || true
|
||||
dscl . delete /Groups/wheel GroupMembership "${USERNAME}" 2>/dev/null || true
|
||||
dscl . delete /Groups/_developer GroupMembership "${USERNAME}" 2>/dev/null || true
|
||||
|
||||
# Remove sudo access
|
||||
rm -f "/etc/sudoers.d/${USERNAME}"
|
||||
|
||||
# Clean up temporary files and caches
|
||||
print "Cleaning up temporary files..."
|
||||
if [[ -d "${USER_HOME}" ]]; then
|
||||
# Clean up known cache directories
|
||||
rm -rf "${USER_HOME}/.npm/_cacache" || true
|
||||
rm -rf "${USER_HOME}/.npm/_logs" || true
|
||||
rm -rf "${USER_HOME}/.cargo/registry" || true
|
||||
rm -rf "${USER_HOME}/.cargo/git" || true
|
||||
rm -rf "${USER_HOME}/.rustup/tmp" || true
|
||||
rm -rf "${USER_HOME}/.cache" || true
|
||||
rm -rf "${USER_HOME}/Library/Caches" || true
|
||||
rm -rf "${USER_HOME}/Library/Logs" || true
|
||||
rm -rf "${USER_HOME}/Library/Application Support/Crash Reports" || true
|
||||
rm -rf "${USER_HOME}/tmp" || true
|
||||
rm -rf "${USER_HOME}/.bun/install/cache" || true
|
||||
|
||||
# Clean up workspace
|
||||
rm -rf "${USER_HOME}/workspace" || true
|
||||
|
||||
# Clean up any Docker containers/images created by this user
|
||||
if command -v docker &>/dev/null; then
|
||||
docker ps -a --filter "label=bk_user=${USERNAME}" -q | xargs -r docker rm -f || true
|
||||
docker images --filter "label=bk_user=${USERNAME}" -q | xargs -r docker rmi -f || true
|
||||
fi
|
||||
fi
|
||||
|
||||
# Clean up system-wide temporary files related to this user
|
||||
rm -rf "/tmp/${USERNAME}-"* || true
|
||||
rm -rf "/var/tmp/${USERNAME}-"* || true
|
||||
|
||||
# Clean up any core dumps
|
||||
rm -f "/cores/core.${USERNAME}."* || true
|
||||
|
||||
# Clean up any launchd jobs
|
||||
launchctl list | grep -E "^[0-9].*${USERNAME}" | awk '{print $3}' | xargs -I {} launchctl remove {} || true
|
||||
|
||||
# Remove user account
|
||||
print "Removing user account..."
|
||||
dscl . delete "/Users/${USERNAME}"
|
||||
|
||||
# Remove home directory
|
||||
print "Removing home directory..."
|
||||
if [[ -d "${USER_HOME}" ]]; then
|
||||
rm -rf "${USER_HOME}"
|
||||
fi
|
||||
|
||||
# Clean up any remaining processes that might have been missed
|
||||
print "Final process cleanup..."
|
||||
ps aux | grep -E "^${USERNAME}\s" | awk '{print $2}' | xargs -r kill -9 || true
|
||||
|
||||
# Clean up shared memory segments
|
||||
ipcs -m | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -m || true
|
||||
|
||||
# Clean up semaphores
|
||||
ipcs -s | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -s || true
|
||||
|
||||
# Clean up message queues
|
||||
ipcs -q | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -q || true
|
||||
|
||||
# Clean up any remaining files owned by the user
|
||||
print "Cleaning up remaining files..."
|
||||
find /tmp -user "${USERNAME}" -exec rm -rf {} + 2>/dev/null || true
|
||||
find /var/tmp -user "${USERNAME}" -exec rm -rf {} + 2>/dev/null || true
|
||||
|
||||
# Clean up any network interfaces or ports that might be held
|
||||
lsof -t -u "${USERNAME}" 2>/dev/null | xargs -r kill -9 || true
|
||||
|
||||
# Clean up any mount points
|
||||
mount | grep "${USERNAME}" | awk '{print $3}' | xargs -r umount || true
|
||||
|
||||
# Verify cleanup
|
||||
if id "${USERNAME}" &>/dev/null; then
|
||||
error "Failed to remove user ${USERNAME}"
|
||||
fi
|
||||
|
||||
if [[ -d "${USER_HOME}" ]]; then
|
||||
error "Failed to remove home directory ${USER_HOME}"
|
||||
fi
|
||||
|
||||
print "Build user ${USERNAME} cleaned up successfully"
|
||||
|
||||
# Free up memory
|
||||
sync
|
||||
purge || true
|
||||
|
||||
print "Cleanup completed"
|
||||
@@ -1,158 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Create isolated build user for each Buildkite job
|
||||
# This ensures complete isolation between jobs
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
print() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "ERROR: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if running as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
error "This script must be run as root"
|
||||
fi
|
||||
|
||||
# Generate unique user name
|
||||
JOB_ID="${BUILDKITE_JOB_ID:-$(uuidgen | tr '[:upper:]' '[:lower:]' | tr -d '-' | cut -c1-8)}"
|
||||
USERNAME="bk-${JOB_ID}"
|
||||
USER_HOME="/Users/${USERNAME}"
|
||||
|
||||
print "Creating build user: ${USERNAME}"
|
||||
|
||||
# Check if user already exists
|
||||
if id "${USERNAME}" &>/dev/null; then
|
||||
print "User ${USERNAME} already exists, cleaning up first..."
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "${USERNAME}"
|
||||
fi
|
||||
|
||||
# Find next available UID (starting from 1000)
|
||||
NEXT_UID=1000
|
||||
while id -u "${NEXT_UID}" &>/dev/null; do
|
||||
((NEXT_UID++))
|
||||
done
|
||||
|
||||
print "Using UID: ${NEXT_UID}"
|
||||
|
||||
# Create user account
|
||||
dscl . create "/Users/${USERNAME}"
|
||||
dscl . create "/Users/${USERNAME}" UserShell /bin/bash
|
||||
dscl . create "/Users/${USERNAME}" RealName "Buildkite Job ${JOB_ID}"
|
||||
dscl . create "/Users/${USERNAME}" UniqueID "${NEXT_UID}"
|
||||
dscl . create "/Users/${USERNAME}" PrimaryGroupID 20 # staff group
|
||||
dscl . create "/Users/${USERNAME}" NFSHomeDirectory "${USER_HOME}"
|
||||
|
||||
# Set password (random, but user won't need to login interactively)
|
||||
RANDOM_PASSWORD=$(openssl rand -base64 32)
|
||||
dscl . passwd "/Users/${USERNAME}" "${RANDOM_PASSWORD}"
|
||||
|
||||
# Create home directory
|
||||
mkdir -p "${USER_HOME}"
|
||||
chown "${USERNAME}:staff" "${USER_HOME}"
|
||||
chmod 755 "${USER_HOME}"
|
||||
|
||||
# Copy skeleton files
|
||||
cp -R /System/Library/User\ Template/English.lproj/. "${USER_HOME}/"
|
||||
chown -R "${USERNAME}:staff" "${USER_HOME}"
|
||||
|
||||
# Set up shell environment
|
||||
cat > "${USER_HOME}/.zshrc" << 'EOF'
|
||||
# Buildkite job environment
|
||||
export PATH="/usr/local/bin:/usr/local/sbin:/opt/homebrew/bin:/opt/homebrew/sbin:$PATH"
|
||||
export HOMEBREW_NO_INSTALL_CLEANUP=1
|
||||
export HOMEBREW_NO_AUTO_UPDATE=1
|
||||
export HOMEBREW_NO_ANALYTICS=1
|
||||
export CI=1
|
||||
export BUILDKITE=true
|
||||
|
||||
# Development environment
|
||||
export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"
|
||||
export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"
|
||||
|
||||
# Node.js and npm
|
||||
export NODE_OPTIONS="--max-old-space-size=8192"
|
||||
export NPM_CONFIG_CACHE="$HOME/.npm"
|
||||
|
||||
# Rust
|
||||
export CARGO_HOME="$HOME/.cargo"
|
||||
export RUSTUP_HOME="$HOME/.rustup"
|
||||
export PATH="$HOME/.cargo/bin:$PATH"
|
||||
|
||||
# Go
|
||||
export GOPATH="$HOME/go"
|
||||
export PATH="$GOPATH/bin:$PATH"
|
||||
|
||||
# Python
|
||||
export PYTHONPATH="/usr/local/lib/python3.11/site-packages:/usr/local/lib/python3.12/site-packages:$PYTHONPATH"
|
||||
|
||||
# Bun
|
||||
export BUN_INSTALL="$HOME/.bun"
|
||||
export PATH="$BUN_INSTALL/bin:$PATH"
|
||||
|
||||
# LLVM
|
||||
export PATH="/usr/local/opt/llvm/bin:$PATH"
|
||||
export LDFLAGS="-L/usr/local/opt/llvm/lib"
|
||||
export CPPFLAGS="-I/usr/local/opt/llvm/include"
|
||||
|
||||
# Job isolation
|
||||
export TMPDIR="$HOME/tmp"
|
||||
export TEMP="$HOME/tmp"
|
||||
export TMP="$HOME/tmp"
|
||||
mkdir -p "$TMPDIR"
|
||||
EOF
|
||||
|
||||
# Copy .zshrc to other shell profiles
|
||||
cp "${USER_HOME}/.zshrc" "${USER_HOME}/.bash_profile"
|
||||
cp "${USER_HOME}/.zshrc" "${USER_HOME}/.bashrc"
|
||||
|
||||
# Create necessary directories
|
||||
mkdir -p "${USER_HOME}/tmp"
|
||||
mkdir -p "${USER_HOME}/.npm"
|
||||
mkdir -p "${USER_HOME}/.cargo"
|
||||
mkdir -p "${USER_HOME}/.rustup"
|
||||
mkdir -p "${USER_HOME}/go"
|
||||
mkdir -p "${USER_HOME}/.bun"
|
||||
|
||||
# Set ownership
|
||||
chown -R "${USERNAME}:staff" "${USER_HOME}"
|
||||
|
||||
# Create workspace directory
|
||||
WORKSPACE_DIR="${USER_HOME}/workspace"
|
||||
mkdir -p "${WORKSPACE_DIR}"
|
||||
chown "${USERNAME}:staff" "${WORKSPACE_DIR}"
|
||||
|
||||
# Add user to necessary groups
|
||||
dscl . append /Groups/admin GroupMembership "${USERNAME}"
|
||||
dscl . append /Groups/wheel GroupMembership "${USERNAME}"
|
||||
dscl . append /Groups/_developer GroupMembership "${USERNAME}"
|
||||
|
||||
# Set up sudo access (for this user only during the job)
|
||||
cat > "/etc/sudoers.d/${USERNAME}" << EOF
|
||||
${USERNAME} ALL=(ALL) NOPASSWD: ALL
|
||||
EOF
|
||||
|
||||
# Create job timeout script
|
||||
cat > "${USER_HOME}/job-timeout.sh" << 'EOF'
|
||||
#!/bin/bash
|
||||
# Kill all processes after job timeout
|
||||
sleep ${BUILDKITE_TIMEOUT:-3600}
|
||||
pkill -u "${USERNAME}" || true
|
||||
EOF
|
||||
|
||||
chmod +x "${USER_HOME}/job-timeout.sh"
|
||||
chown "${USERNAME}:staff" "${USER_HOME}/job-timeout.sh"
|
||||
|
||||
print "Build user ${USERNAME} created successfully"
|
||||
print "Home directory: ${USER_HOME}"
|
||||
print "Workspace directory: ${WORKSPACE_DIR}"
|
||||
|
||||
# Output user info for the calling script
|
||||
echo "BK_USER=${USERNAME}"
|
||||
echo "BK_HOME=${USER_HOME}"
|
||||
echo "BK_WORKSPACE=${WORKSPACE_DIR}"
|
||||
echo "BK_UID=${NEXT_UID}"
|
||||
@@ -1,242 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Main job runner script that manages the lifecycle of Buildkite jobs
|
||||
# This script creates users, runs jobs, and cleans up afterward
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
print() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "ERROR: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Ensure running as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
error "This script must be run as root"
|
||||
fi
|
||||
|
||||
# Configuration
|
||||
BUILDKITE_AGENT_TOKEN="${BUILDKITE_AGENT_TOKEN:-}"
|
||||
BUILDKITE_QUEUE="${BUILDKITE_QUEUE:-default}"
|
||||
BUILDKITE_TAGS="${BUILDKITE_TAGS:-queue=$BUILDKITE_QUEUE,os=macos,arch=$(uname -m)}"
|
||||
LOG_DIR="/usr/local/var/log/buildkite-agent"
|
||||
AGENT_CONFIG_DIR="/usr/local/var/buildkite-agent"
|
||||
|
||||
# Ensure directories exist
|
||||
mkdir -p "$LOG_DIR"
|
||||
mkdir -p "$AGENT_CONFIG_DIR"
|
||||
|
||||
# Function to cleanup on exit
|
||||
cleanup() {
|
||||
local exit_code=$?
|
||||
print "Job runner exiting with code $exit_code"
|
||||
|
||||
# Clean up current user if set
|
||||
if [[ -n "${CURRENT_USER:-}" ]]; then
|
||||
print "Cleaning up user: $CURRENT_USER"
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "$CURRENT_USER" || true
|
||||
fi
|
||||
|
||||
# Kill any remaining buildkite-agent processes
|
||||
pkill -f "buildkite-agent" || true
|
||||
|
||||
exit $exit_code
|
||||
}
|
||||
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
# Function to run a single job
|
||||
run_job() {
|
||||
local job_id="$1"
|
||||
local user_info
|
||||
|
||||
print "Starting job: $job_id"
|
||||
|
||||
# Create isolated user for this job
|
||||
print "Creating isolated build user..."
|
||||
user_info=$(/usr/local/bin/bun-ci/create-build-user.sh)
|
||||
|
||||
# Parse user info
|
||||
export BK_USER=$(echo "$user_info" | grep "BK_USER=" | cut -d= -f2)
|
||||
export BK_HOME=$(echo "$user_info" | grep "BK_HOME=" | cut -d= -f2)
|
||||
export BK_WORKSPACE=$(echo "$user_info" | grep "BK_WORKSPACE=" | cut -d= -f2)
|
||||
export BK_UID=$(echo "$user_info" | grep "BK_UID=" | cut -d= -f2)
|
||||
|
||||
CURRENT_USER="$BK_USER"
|
||||
|
||||
print "Job will run as user: $BK_USER"
|
||||
print "Workspace: $BK_WORKSPACE"
|
||||
|
||||
# Create job-specific configuration
|
||||
local job_config="${AGENT_CONFIG_DIR}/buildkite-agent-${job_id}.cfg"
|
||||
cat > "$job_config" << EOF
|
||||
token="${BUILDKITE_AGENT_TOKEN}"
|
||||
name="macos-$(hostname)-${job_id}"
|
||||
tags="${BUILDKITE_TAGS}"
|
||||
build-path="${BK_WORKSPACE}"
|
||||
hooks-path="/usr/local/bin/bun-ci/hooks"
|
||||
plugins-path="${BK_HOME}/.buildkite-agent/plugins"
|
||||
git-clean-flags="-fdq"
|
||||
git-clone-flags="-v"
|
||||
shell="/bin/bash -l"
|
||||
spawn=1
|
||||
priority=normal
|
||||
disconnect-after-job=true
|
||||
disconnect-after-idle-timeout=300
|
||||
cancel-grace-period=10
|
||||
enable-job-log-tmpfile=true
|
||||
job-log-tmpfile-path="/tmp/buildkite-job-${job_id}.log"
|
||||
timestamp-lines=true
|
||||
EOF
|
||||
|
||||
# Set permissions
|
||||
chown "$BK_USER:staff" "$job_config"
|
||||
chmod 600 "$job_config"
|
||||
|
||||
# Start timeout monitor in background
|
||||
(
|
||||
sleep "${BUILDKITE_TIMEOUT:-3600}"
|
||||
print "Job timeout reached, killing all processes for user $BK_USER"
|
||||
pkill -TERM -u "$BK_USER" || true
|
||||
sleep 10
|
||||
pkill -KILL -u "$BK_USER" || true
|
||||
) &
|
||||
local timeout_pid=$!
|
||||
|
||||
# Run buildkite-agent as the isolated user
|
||||
print "Starting Buildkite agent for job $job_id..."
|
||||
|
||||
local agent_exit_code=0
|
||||
sudo -u "$BK_USER" -H /usr/local/bin/buildkite-agent start \
|
||||
--config "$job_config" \
|
||||
--log-level info \
|
||||
--no-color \
|
||||
2>&1 | tee -a "$LOG_DIR/job-${job_id}.log" || agent_exit_code=$?
|
||||
|
||||
# Kill timeout monitor
|
||||
kill $timeout_pid 2>/dev/null || true
|
||||
|
||||
print "Job $job_id completed with exit code: $agent_exit_code"
|
||||
|
||||
# Clean up job-specific files
|
||||
rm -f "$job_config"
|
||||
rm -f "/tmp/buildkite-job-${job_id}.log"
|
||||
|
||||
# Clean up the user
|
||||
print "Cleaning up user $BK_USER..."
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "$BK_USER" || true
|
||||
CURRENT_USER=""
|
||||
|
||||
return $agent_exit_code
|
||||
}
|
||||
|
||||
# Function to wait for jobs
|
||||
wait_for_jobs() {
|
||||
print "Waiting for Buildkite jobs..."
|
||||
|
||||
# Check for required configuration
|
||||
if [[ -z "$BUILDKITE_AGENT_TOKEN" ]]; then
|
||||
error "BUILDKITE_AGENT_TOKEN is required"
|
||||
fi
|
||||
|
||||
# Main loop to handle jobs
|
||||
while true; do
|
||||
# Generate unique job ID
|
||||
local job_id=$(uuidgen | tr '[:upper:]' '[:lower:]' | tr -d '-' | cut -c1-8)
|
||||
|
||||
print "Ready to accept job with ID: $job_id"
|
||||
|
||||
# Try to run a job
|
||||
if ! run_job "$job_id"; then
|
||||
print "Job $job_id failed, continuing..."
|
||||
fi
|
||||
|
||||
# Brief pause before accepting next job
|
||||
sleep 5
|
||||
|
||||
# Clean up any remaining processes
|
||||
print "Performing system cleanup..."
|
||||
pkill -f "buildkite-agent" || true
|
||||
|
||||
# Clean up temporary files
|
||||
find /tmp -name "buildkite-*" -mtime +1 -delete 2>/dev/null || true
|
||||
find /var/tmp -name "buildkite-*" -mtime +1 -delete 2>/dev/null || true
|
||||
|
||||
# Clean up any orphaned users (safety net)
|
||||
for user in $(dscl . list /Users | grep "^bk-"); do
|
||||
if [[ -n "$user" ]]; then
|
||||
print "Cleaning up orphaned user: $user"
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "$user" || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Free up memory
|
||||
sync
|
||||
purge || true
|
||||
|
||||
print "System cleanup completed, ready for next job"
|
||||
done
|
||||
}
|
||||
|
||||
# Function to perform health checks
|
||||
health_check() {
|
||||
print "Performing health check..."
|
||||
|
||||
# Check disk space
|
||||
local disk_usage=$(df -h / | awk 'NR==2 {print $5}' | sed 's/%//')
|
||||
if [[ $disk_usage -gt 90 ]]; then
|
||||
error "Disk usage is too high: ${disk_usage}%"
|
||||
fi
|
||||
|
||||
# Check memory
|
||||
local memory_pressure=$(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}' | sed 's/%//')
|
||||
if [[ $memory_pressure -lt 10 ]]; then
|
||||
error "Memory pressure is too high: ${memory_pressure}% free"
|
||||
fi
|
||||
|
||||
# Check if Docker is running
|
||||
if ! pgrep -x "Docker" > /dev/null; then
|
||||
print "Docker is not running, attempting to start..."
|
||||
open -a Docker || true
|
||||
sleep 30
|
||||
fi
|
||||
|
||||
# Check if required commands are available
|
||||
local required_commands=("git" "node" "npm" "bun" "python3" "go" "rustc" "cargo" "cmake" "make")
|
||||
for cmd in "${required_commands[@]}"; do
|
||||
if ! command -v "$cmd" &>/dev/null; then
|
||||
error "Required command not found: $cmd"
|
||||
fi
|
||||
done
|
||||
|
||||
print "Health check passed"
|
||||
}
|
||||
|
||||
# Main execution
|
||||
case "${1:-start}" in
|
||||
start)
|
||||
print "Starting Buildkite job runner for macOS"
|
||||
health_check
|
||||
wait_for_jobs
|
||||
;;
|
||||
health)
|
||||
health_check
|
||||
;;
|
||||
cleanup)
|
||||
print "Performing manual cleanup..."
|
||||
# Clean up any existing users
|
||||
for user in $(dscl . list /Users | grep "^bk-"); do
|
||||
if [[ -n "$user" ]]; then
|
||||
print "Cleaning up user: $user"
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "$user" || true
|
||||
fi
|
||||
done
|
||||
print "Manual cleanup completed"
|
||||
;;
|
||||
*)
|
||||
error "Usage: $0 {start|health|cleanup}"
|
||||
;;
|
||||
esac
|
||||
@@ -1,433 +0,0 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
macstadium = {
|
||||
source = "macstadium/macstadium"
|
||||
version = "~> 1.0"
|
||||
}
|
||||
}
|
||||
|
||||
backend "s3" {
|
||||
bucket = "bun-terraform-state"
|
||||
key = "macos-runners/terraform.tfstate"
|
||||
region = "us-west-2"
|
||||
}
|
||||
}
|
||||
|
||||
provider "macstadium" {
|
||||
api_key = var.macstadium_api_key
|
||||
endpoint = var.macstadium_endpoint
|
||||
}
|
||||
|
||||
# Variables
|
||||
variable "macstadium_api_key" {
|
||||
description = "MacStadium API key"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "macstadium_endpoint" {
|
||||
description = "MacStadium API endpoint"
|
||||
type = string
|
||||
default = "https://api.macstadium.com"
|
||||
}
|
||||
|
||||
variable "buildkite_agent_token" {
|
||||
description = "Buildkite agent token"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "github_token" {
|
||||
description = "GitHub token for accessing private repositories"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "image_name_prefix" {
|
||||
description = "Prefix for VM image names"
|
||||
type = string
|
||||
default = "bun-macos"
|
||||
}
|
||||
|
||||
variable "fleet_size" {
|
||||
description = "Number of VMs per macOS version"
|
||||
type = object({
|
||||
macos_13 = number
|
||||
macos_14 = number
|
||||
macos_15 = number
|
||||
})
|
||||
default = {
|
||||
macos_13 = 4
|
||||
macos_14 = 6
|
||||
macos_15 = 8
|
||||
}
|
||||
}
|
||||
|
||||
variable "vm_configuration" {
|
||||
description = "VM configuration settings"
|
||||
type = object({
|
||||
cpu_count = number
|
||||
memory_gb = number
|
||||
disk_size = number
|
||||
})
|
||||
default = {
|
||||
cpu_count = 12
|
||||
memory_gb = 32
|
||||
disk_size = 500
|
||||
}
|
||||
}
|
||||
|
||||
# Data sources to get latest images
|
||||
data "macstadium_image" "macos_13" {
|
||||
name_regex = "^${var.image_name_prefix}-13-.*"
|
||||
most_recent = true
|
||||
}
|
||||
|
||||
data "macstadium_image" "macos_14" {
|
||||
name_regex = "^${var.image_name_prefix}-14-.*"
|
||||
most_recent = true
|
||||
}
|
||||
|
||||
data "macstadium_image" "macos_15" {
|
||||
name_regex = "^${var.image_name_prefix}-15-.*"
|
||||
most_recent = true
|
||||
}
|
||||
|
||||
# Local values
|
||||
locals {
|
||||
common_tags = {
|
||||
Project = "bun-ci"
|
||||
Environment = "production"
|
||||
ManagedBy = "terraform"
|
||||
Purpose = "buildkite-runners"
|
||||
}
|
||||
|
||||
vm_configs = {
|
||||
macos_13 = {
|
||||
image_id = data.macstadium_image.macos_13.id
|
||||
count = var.fleet_size.macos_13
|
||||
version = "13"
|
||||
}
|
||||
macos_14 = {
|
||||
image_id = data.macstadium_image.macos_14.id
|
||||
count = var.fleet_size.macos_14
|
||||
version = "14"
|
||||
}
|
||||
macos_15 = {
|
||||
image_id = data.macstadium_image.macos_15.id
|
||||
count = var.fleet_size.macos_15
|
||||
version = "15"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# VM instances for each macOS version
|
||||
resource "macstadium_vm" "runners" {
|
||||
for_each = {
|
||||
for vm_combo in flatten([
|
||||
for version, config in local.vm_configs : [
|
||||
for i in range(config.count) : {
|
||||
key = "${version}-${i + 1}"
|
||||
version = version
|
||||
config = config
|
||||
index = i + 1
|
||||
}
|
||||
]
|
||||
]) : vm_combo.key => vm_combo
|
||||
}
|
||||
|
||||
name = "bun-runner-${each.value.version}-${each.value.index}"
|
||||
image_id = each.value.config.image_id
|
||||
|
||||
cpu_count = var.vm_configuration.cpu_count
|
||||
memory_gb = var.vm_configuration.memory_gb
|
||||
disk_size = var.vm_configuration.disk_size
|
||||
|
||||
# Network configuration
|
||||
network_interface {
|
||||
network_id = macstadium_network.runner_network.id
|
||||
ip_address = cidrhost(macstadium_network.runner_network.cidr_block, 10 + index(keys(local.vm_configs), each.value.version) * 100 + each.value.index)
|
||||
}
|
||||
|
||||
# Enable GPU passthrough for better performance
|
||||
gpu_passthrough = true
|
||||
|
||||
# Enable VNC for debugging
|
||||
vnc_enabled = true
|
||||
|
||||
# SSH configuration
|
||||
ssh_keys = [macstadium_ssh_key.runner_key.id]
|
||||
|
||||
# Startup script
|
||||
user_data = templatefile("${path.module}/user-data.sh", {
|
||||
buildkite_agent_token = var.buildkite_agent_token
|
||||
github_token = var.github_token
|
||||
macos_version = each.value.version
|
||||
vm_name = "bun-runner-${each.value.version}-${each.value.index}"
|
||||
})
|
||||
|
||||
# Auto-start VM
|
||||
auto_start = true
|
||||
|
||||
# Shutdown behavior
|
||||
auto_shutdown = false
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-${each.value.version}-${each.value.index}"
|
||||
MacOSVersion = each.value.version
|
||||
VmIndex = each.value.index
|
||||
})
|
||||
}
|
||||
|
||||
# Network configuration
|
||||
resource "macstadium_network" "runner_network" {
|
||||
name = "bun-runner-network"
|
||||
cidr_block = "10.0.0.0/16"
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-network"
|
||||
})
|
||||
}
|
||||
|
||||
# SSH key for VM access
|
||||
resource "macstadium_ssh_key" "runner_key" {
|
||||
name = "bun-runner-key"
|
||||
public_key = file("${path.module}/ssh-keys/bun-runner.pub")
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-key"
|
||||
})
|
||||
}
|
||||
|
||||
# Security group for runner VMs
|
||||
resource "macstadium_security_group" "runner_sg" {
|
||||
name = "bun-runner-sg"
|
||||
description = "Security group for Bun CI runner VMs"
|
||||
|
||||
# SSH access
|
||||
ingress {
|
||||
from_port = 22
|
||||
to_port = 22
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
# VNC access (for debugging)
|
||||
ingress {
|
||||
from_port = 5900
|
||||
to_port = 5999
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["10.0.0.0/16"]
|
||||
}
|
||||
|
||||
# HTTP/HTTPS outbound
|
||||
egress {
|
||||
from_port = 80
|
||||
to_port = 80
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
egress {
|
||||
from_port = 443
|
||||
to_port = 443
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
# Git (SSH)
|
||||
egress {
|
||||
from_port = 22
|
||||
to_port = 22
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
# DNS
|
||||
egress {
|
||||
from_port = 53
|
||||
to_port = 53
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
egress {
|
||||
from_port = 53
|
||||
to_port = 53
|
||||
protocol = "udp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-sg"
|
||||
})
|
||||
}
|
||||
|
||||
# Load balancer for distributing jobs
|
||||
resource "macstadium_load_balancer" "runner_lb" {
|
||||
name = "bun-runner-lb"
|
||||
load_balancer_type = "application"
|
||||
|
||||
# Health check configuration
|
||||
health_check {
|
||||
enabled = true
|
||||
healthy_threshold = 2
|
||||
unhealthy_threshold = 3
|
||||
timeout = 5
|
||||
interval = 30
|
||||
path = "/health"
|
||||
port = 8080
|
||||
protocol = "HTTP"
|
||||
}
|
||||
|
||||
# Target group for all runner VMs
|
||||
target_group {
|
||||
name = "bun-runners"
|
||||
port = 8080
|
||||
protocol = "HTTP"
|
||||
|
||||
targets = [
|
||||
for vm in macstadium_vm.runners : {
|
||||
id = vm.id
|
||||
port = 8080
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-lb"
|
||||
})
|
||||
}
|
||||
|
||||
# Auto-scaling configuration
|
||||
resource "macstadium_autoscaling_group" "runner_asg" {
|
||||
name = "bun-runner-asg"
|
||||
min_size = 2
|
||||
max_size = 20
|
||||
desired_capacity = sum(values(var.fleet_size))
|
||||
health_check_type = "ELB"
|
||||
health_check_grace_period = 300
|
||||
|
||||
# Launch template reference
|
||||
launch_template {
|
||||
id = macstadium_launch_template.runner_template.id
|
||||
version = "$Latest"
|
||||
}
|
||||
|
||||
# Scaling policies
|
||||
target_group_arns = [macstadium_load_balancer.runner_lb.target_group[0].arn]
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-asg"
|
||||
})
|
||||
}
|
||||
|
||||
# Launch template for auto-scaling
|
||||
resource "macstadium_launch_template" "runner_template" {
|
||||
name = "bun-runner-template"
|
||||
image_id = data.macstadium_image.macos_15.id
|
||||
instance_type = "mac-mini-m2-pro"
|
||||
|
||||
key_name = macstadium_ssh_key.runner_key.name
|
||||
|
||||
security_group_ids = [macstadium_security_group.runner_sg.id]
|
||||
|
||||
user_data = base64encode(templatefile("${path.module}/user-data.sh", {
|
||||
buildkite_agent_token = var.buildkite_agent_token
|
||||
github_token = var.github_token
|
||||
macos_version = "15"
|
||||
vm_name = "bun-runner-asg-${timestamp()}"
|
||||
}))
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-template"
|
||||
})
|
||||
}
|
||||
|
||||
# CloudWatch alarms for scaling
|
||||
resource "macstadium_cloudwatch_metric_alarm" "scale_up" {
|
||||
alarm_name = "bun-runner-scale-up"
|
||||
comparison_operator = "GreaterThanThreshold"
|
||||
evaluation_periods = "2"
|
||||
metric_name = "CPUUtilization"
|
||||
namespace = "AWS/EC2"
|
||||
period = "300"
|
||||
statistic = "Average"
|
||||
threshold = "80"
|
||||
alarm_description = "This metric monitors ec2 cpu utilization"
|
||||
alarm_actions = [macstadium_autoscaling_policy.scale_up.arn]
|
||||
|
||||
dimensions = {
|
||||
AutoScalingGroupName = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
}
|
||||
|
||||
resource "macstadium_cloudwatch_metric_alarm" "scale_down" {
|
||||
alarm_name = "bun-runner-scale-down"
|
||||
comparison_operator = "LessThanThreshold"
|
||||
evaluation_periods = "2"
|
||||
metric_name = "CPUUtilization"
|
||||
namespace = "AWS/EC2"
|
||||
period = "300"
|
||||
statistic = "Average"
|
||||
threshold = "20"
|
||||
alarm_description = "This metric monitors ec2 cpu utilization"
|
||||
alarm_actions = [macstadium_autoscaling_policy.scale_down.arn]
|
||||
|
||||
dimensions = {
|
||||
AutoScalingGroupName = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
}
|
||||
|
||||
# Scaling policies
|
||||
resource "macstadium_autoscaling_policy" "scale_up" {
|
||||
name = "bun-runner-scale-up"
|
||||
scaling_adjustment = 2
|
||||
adjustment_type = "ChangeInCapacity"
|
||||
cooldown = 300
|
||||
autoscaling_group_name = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
|
||||
resource "macstadium_autoscaling_policy" "scale_down" {
|
||||
name = "bun-runner-scale-down"
|
||||
scaling_adjustment = -1
|
||||
adjustment_type = "ChangeInCapacity"
|
||||
cooldown = 300
|
||||
autoscaling_group_name = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
|
||||
# Outputs
|
||||
output "vm_instances" {
|
||||
description = "Details of created VM instances"
|
||||
value = {
|
||||
for key, vm in macstadium_vm.runners : key => {
|
||||
id = vm.id
|
||||
name = vm.name
|
||||
ip_address = vm.network_interface[0].ip_address
|
||||
image_id = vm.image_id
|
||||
status = vm.status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output "load_balancer_dns" {
|
||||
description = "DNS name of the load balancer"
|
||||
value = macstadium_load_balancer.runner_lb.dns_name
|
||||
}
|
||||
|
||||
output "network_id" {
|
||||
description = "ID of the runner network"
|
||||
value = macstadium_network.runner_network.id
|
||||
}
|
||||
|
||||
output "security_group_id" {
|
||||
description = "ID of the runner security group"
|
||||
value = macstadium_security_group.runner_sg.id
|
||||
}
|
||||
|
||||
output "autoscaling_group_name" {
|
||||
description = "Name of the autoscaling group"
|
||||
value = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
@@ -1,245 +0,0 @@
|
||||
# VM instance outputs
|
||||
output "vm_instances" {
|
||||
description = "Details of all created VM instances"
|
||||
value = {
|
||||
for key, vm in macstadium_vm.runners : key => {
|
||||
id = vm.id
|
||||
name = vm.name
|
||||
ip_address = vm.network_interface[0].ip_address
|
||||
image_id = vm.image_id
|
||||
status = vm.status
|
||||
macos_version = regex("macos-([0-9]+)", key)[0]
|
||||
instance_type = vm.instance_type
|
||||
cpu_count = vm.cpu_count
|
||||
memory_gb = vm.memory_gb
|
||||
disk_size = vm.disk_size
|
||||
created_at = vm.created_at
|
||||
updated_at = vm.updated_at
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output "vm_instances_by_version" {
|
||||
description = "VM instances grouped by macOS version"
|
||||
value = {
|
||||
for version in ["13", "14", "15"] : "macos_${version}" => {
|
||||
for key, vm in macstadium_vm.runners : key => {
|
||||
id = vm.id
|
||||
name = vm.name
|
||||
ip_address = vm.network_interface[0].ip_address
|
||||
status = vm.status
|
||||
}
|
||||
if can(regex("^${version}-", key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Network outputs
|
||||
output "network_details" {
|
||||
description = "Network configuration details"
|
||||
value = {
|
||||
network_id = macstadium_network.runner_network.id
|
||||
cidr_block = macstadium_network.runner_network.cidr_block
|
||||
name = macstadium_network.runner_network.name
|
||||
status = macstadium_network.runner_network.status
|
||||
}
|
||||
}
|
||||
|
||||
output "security_group_details" {
|
||||
description = "Security group configuration details"
|
||||
value = {
|
||||
security_group_id = macstadium_security_group.runner_sg.id
|
||||
name = macstadium_security_group.runner_sg.name
|
||||
description = macstadium_security_group.runner_sg.description
|
||||
ingress_rules = macstadium_security_group.runner_sg.ingress
|
||||
egress_rules = macstadium_security_group.runner_sg.egress
|
||||
}
|
||||
}
|
||||
|
||||
# Load balancer outputs
|
||||
output "load_balancer_details" {
|
||||
description = "Load balancer configuration details"
|
||||
value = {
|
||||
dns_name = macstadium_load_balancer.runner_lb.dns_name
|
||||
zone_id = macstadium_load_balancer.runner_lb.zone_id
|
||||
load_balancer_type = macstadium_load_balancer.runner_lb.load_balancer_type
|
||||
target_group_arn = macstadium_load_balancer.runner_lb.target_group[0].arn
|
||||
health_check = macstadium_load_balancer.runner_lb.health_check[0]
|
||||
}
|
||||
}
|
||||
|
||||
# Auto-scaling outputs
|
||||
output "autoscaling_details" {
|
||||
description = "Auto-scaling group configuration details"
|
||||
value = {
|
||||
asg_name = macstadium_autoscaling_group.runner_asg.name
|
||||
min_size = macstadium_autoscaling_group.runner_asg.min_size
|
||||
max_size = macstadium_autoscaling_group.runner_asg.max_size
|
||||
desired_capacity = macstadium_autoscaling_group.runner_asg.desired_capacity
|
||||
launch_template = macstadium_autoscaling_group.runner_asg.launch_template[0]
|
||||
}
|
||||
}
|
||||
|
||||
# SSH key outputs
|
||||
output "ssh_key_details" {
|
||||
description = "SSH key configuration details"
|
||||
value = {
|
||||
key_name = macstadium_ssh_key.runner_key.name
|
||||
fingerprint = macstadium_ssh_key.runner_key.fingerprint
|
||||
key_pair_id = macstadium_ssh_key.runner_key.id
|
||||
}
|
||||
}
|
||||
|
||||
# Image outputs
|
||||
output "image_details" {
|
||||
description = "Details of images used for VM creation"
|
||||
value = {
|
||||
macos_13 = {
|
||||
id = data.macstadium_image.macos_13.id
|
||||
name = data.macstadium_image.macos_13.name
|
||||
description = data.macstadium_image.macos_13.description
|
||||
created_date = data.macstadium_image.macos_13.creation_date
|
||||
size = data.macstadium_image.macos_13.size
|
||||
}
|
||||
macos_14 = {
|
||||
id = data.macstadium_image.macos_14.id
|
||||
name = data.macstadium_image.macos_14.name
|
||||
description = data.macstadium_image.macos_14.description
|
||||
created_date = data.macstadium_image.macos_14.creation_date
|
||||
size = data.macstadium_image.macos_14.size
|
||||
}
|
||||
macos_15 = {
|
||||
id = data.macstadium_image.macos_15.id
|
||||
name = data.macstadium_image.macos_15.name
|
||||
description = data.macstadium_image.macos_15.description
|
||||
created_date = data.macstadium_image.macos_15.creation_date
|
||||
size = data.macstadium_image.macos_15.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Fleet statistics
|
||||
output "fleet_statistics" {
|
||||
description = "Statistics about the VM fleet"
|
||||
value = {
|
||||
total_vms = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
])
|
||||
vms_by_version = {
|
||||
macos_13 = var.fleet_size.macos_13
|
||||
macos_14 = var.fleet_size.macos_14
|
||||
macos_15 = var.fleet_size.macos_15
|
||||
}
|
||||
total_cpu_cores = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * var.vm_configuration.cpu_count
|
||||
total_memory_gb = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * var.vm_configuration.memory_gb
|
||||
total_disk_gb = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * var.vm_configuration.disk_size
|
||||
}
|
||||
}
|
||||
|
||||
# Connection information
|
||||
output "connection_info" {
|
||||
description = "Information for connecting to the infrastructure"
|
||||
value = {
|
||||
ssh_command_template = "ssh -i ~/.ssh/bun-runner admin@{vm_ip_address}"
|
||||
vnc_port_range = "5900-5999"
|
||||
health_check_url = "http://{vm_ip_address}:8080/health"
|
||||
buildkite_tags = "queue=macos,os=macos,arch=$(uname -m)"
|
||||
}
|
||||
}
|
||||
|
||||
# Resource ARNs and IDs
|
||||
output "resource_arns" {
|
||||
description = "ARNs and IDs of created resources"
|
||||
value = {
|
||||
vm_ids = [
|
||||
for vm in macstadium_vm.runners : vm.id
|
||||
]
|
||||
network_id = macstadium_network.runner_network.id
|
||||
security_group_id = macstadium_security_group.runner_sg.id
|
||||
load_balancer_arn = macstadium_load_balancer.runner_lb.arn
|
||||
autoscaling_group_arn = macstadium_autoscaling_group.runner_asg.arn
|
||||
launch_template_id = macstadium_launch_template.runner_template.id
|
||||
}
|
||||
}
|
||||
|
||||
# Monitoring and alerting
|
||||
output "monitoring_endpoints" {
|
||||
description = "Monitoring and alerting endpoints"
|
||||
value = {
|
||||
cloudwatch_namespace = "BunCI/MacOSRunners"
|
||||
alarm_arns = [
|
||||
macstadium_cloudwatch_metric_alarm.scale_up.arn,
|
||||
macstadium_cloudwatch_metric_alarm.scale_down.arn
|
||||
]
|
||||
scaling_policy_arns = [
|
||||
macstadium_autoscaling_policy.scale_up.arn,
|
||||
macstadium_autoscaling_policy.scale_down.arn
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# Cost information
|
||||
output "cost_information" {
|
||||
description = "Cost-related information"
|
||||
value = {
|
||||
estimated_hourly_cost = format("$%.2f", sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * 0.50) # Estimated cost per hour per VM
|
||||
estimated_monthly_cost = format("$%.2f", sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * 0.50 * 24 * 30) # Estimated monthly cost
|
||||
cost_optimization_enabled = var.cost_optimization.enable_spot_instances
|
||||
}
|
||||
}
|
||||
|
||||
# Terraform state information
|
||||
output "terraform_state" {
|
||||
description = "Terraform state information"
|
||||
value = {
|
||||
workspace = terraform.workspace
|
||||
terraform_version = "~> 1.0"
|
||||
provider_versions = {
|
||||
macstadium = "~> 1.0"
|
||||
}
|
||||
last_updated = timestamp()
|
||||
}
|
||||
}
|
||||
|
||||
# Summary output for easy reference
|
||||
output "deployment_summary" {
|
||||
description = "Summary of the deployment"
|
||||
value = {
|
||||
project_name = var.project_name
|
||||
environment = var.environment
|
||||
region = var.region
|
||||
total_vms = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
])
|
||||
load_balancer_dns = macstadium_load_balancer.runner_lb.dns_name
|
||||
autoscaling_enabled = var.autoscaling_enabled
|
||||
backup_enabled = var.backup_config.enable_snapshots
|
||||
monitoring_enabled = var.monitoring_config.enable_cloudwatch
|
||||
deployment_time = timestamp()
|
||||
status = "deployed"
|
||||
}
|
||||
}
|
||||
@@ -1,266 +0,0 @@
|
||||
#!/bin/bash
|
||||
# User data script for macOS VM initialization
|
||||
# This script runs when the VM starts up
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Variables passed from Terraform
|
||||
BUILDKITE_AGENT_TOKEN="${buildkite_agent_token}"
|
||||
GITHUB_TOKEN="${github_token}"
|
||||
MACOS_VERSION="${macos_version}"
|
||||
VM_NAME="${vm_name}"
|
||||
|
||||
# Logging
|
||||
LOG_FILE="/var/log/vm-init.log"
|
||||
exec 1> >(tee -a "$LOG_FILE")
|
||||
exec 2> >(tee -a "$LOG_FILE" >&2)
|
||||
|
||||
print() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
print "Starting VM initialization for $VM_NAME (macOS $MACOS_VERSION)"
|
||||
|
||||
# Wait for system to be ready
|
||||
print "Waiting for system to be ready..."
|
||||
until ping -c1 google.com &>/dev/null; do
|
||||
sleep 10
|
||||
done
|
||||
|
||||
# Set timezone
|
||||
print "Setting timezone to UTC..."
|
||||
sudo systemsetup -settimezone UTC
|
||||
|
||||
# Configure hostname
|
||||
print "Setting hostname to $VM_NAME..."
|
||||
sudo scutil --set HostName "$VM_NAME"
|
||||
sudo scutil --set LocalHostName "$VM_NAME"
|
||||
sudo scutil --set ComputerName "$VM_NAME"
|
||||
|
||||
# Update system
|
||||
print "Checking for system updates..."
|
||||
sudo softwareupdate -i -a --no-scan || true
|
||||
|
||||
# Configure Buildkite agent
|
||||
print "Configuring Buildkite agent..."
|
||||
mkdir -p /usr/local/var/buildkite-agent
|
||||
mkdir -p /usr/local/var/log/buildkite-agent
|
||||
|
||||
# Create Buildkite agent configuration
|
||||
cat > /usr/local/var/buildkite-agent/buildkite-agent.cfg << EOF
|
||||
token="$BUILDKITE_AGENT_TOKEN"
|
||||
name="$VM_NAME"
|
||||
tags="queue=macos,os=macos,arch=$(uname -m),version=$MACOS_VERSION,hostname=$VM_NAME"
|
||||
build-path="/Users/buildkite/workspace"
|
||||
hooks-path="/usr/local/bin/bun-ci/hooks"
|
||||
plugins-path="/Users/buildkite/.buildkite-agent/plugins"
|
||||
git-clean-flags="-fdq"
|
||||
git-clone-flags="-v"
|
||||
shell="/bin/bash -l"
|
||||
spawn=1
|
||||
priority=normal
|
||||
disconnect-after-job=false
|
||||
disconnect-after-idle-timeout=0
|
||||
cancel-grace-period=10
|
||||
enable-job-log-tmpfile=true
|
||||
timestamp-lines=true
|
||||
EOF
|
||||
|
||||
# Set up GitHub token for private repositories
|
||||
print "Configuring GitHub access..."
|
||||
if [[ -n "$GITHUB_TOKEN" ]]; then
|
||||
# Configure git to use the token
|
||||
git config --global url."https://oauth2:$GITHUB_TOKEN@github.com/".insteadOf "https://github.com/"
|
||||
git config --global url."https://oauth2:$GITHUB_TOKEN@github.com/".insteadOf "git@github.com:"
|
||||
|
||||
# Configure npm to use the token
|
||||
npm config set @oven-sh:registry https://npm.pkg.github.com/
|
||||
echo "//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN" >> ~/.npmrc
|
||||
fi
|
||||
|
||||
# Set up SSH keys for GitHub (if available)
|
||||
if [[ -f "/usr/local/etc/ssh/github_rsa" ]]; then
|
||||
print "Configuring SSH keys for GitHub..."
|
||||
mkdir -p ~/.ssh
|
||||
cp /usr/local/etc/ssh/github_rsa ~/.ssh/
|
||||
cp /usr/local/etc/ssh/github_rsa.pub ~/.ssh/
|
||||
chmod 600 ~/.ssh/github_rsa
|
||||
chmod 644 ~/.ssh/github_rsa.pub
|
||||
|
||||
# Configure SSH to use the key
|
||||
cat > ~/.ssh/config << EOF
|
||||
Host github.com
|
||||
HostName github.com
|
||||
User git
|
||||
IdentityFile ~/.ssh/github_rsa
|
||||
StrictHostKeyChecking no
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Create health check endpoint
|
||||
print "Setting up health check endpoint..."
|
||||
cat > /usr/local/bin/health-check.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
# Health check script for load balancer
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Check if system is ready
|
||||
if ! ping -c1 google.com &>/dev/null; then
|
||||
echo "Network not ready"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check disk space
|
||||
DISK_USAGE=$(df -h / | awk 'NR==2 {print $5}' | sed 's/%//')
|
||||
if [[ $DISK_USAGE -gt 95 ]]; then
|
||||
echo "Disk usage too high: ${DISK_USAGE}%"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check memory
|
||||
MEMORY_PRESSURE=$(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}' | sed 's/%//')
|
||||
if [[ $MEMORY_PRESSURE -lt 5 ]]; then
|
||||
echo "Memory pressure too high: ${MEMORY_PRESSURE}% free"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if required services are running
|
||||
if ! pgrep -f "job-runner.sh" > /dev/null; then
|
||||
echo "Job runner not running"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "OK"
|
||||
exit 0
|
||||
EOF
|
||||
|
||||
chmod +x /usr/local/bin/health-check.sh
|
||||
|
||||
# Start simple HTTP server for health checks
|
||||
print "Starting health check server..."
|
||||
cat > /usr/local/bin/health-server.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
# Simple HTTP server for health checks
|
||||
|
||||
PORT=8080
|
||||
while true; do
|
||||
echo -e "HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\n\r\n$(/usr/local/bin/health-check.sh)" | nc -l -p $PORT
|
||||
done
|
||||
EOF
|
||||
|
||||
chmod +x /usr/local/bin/health-server.sh
|
||||
|
||||
# Create LaunchDaemon for health check server
|
||||
cat > /Library/LaunchDaemons/com.bun.health-server.plist << 'EOF'
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.bun.health-server</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/health-server.sh</string>
|
||||
</array>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
<key>StandardOutPath</key>
|
||||
<string>/var/log/health-server.log</string>
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/var/log/health-server.error.log</string>
|
||||
</dict>
|
||||
</plist>
|
||||
EOF
|
||||
|
||||
# Load and start the health check server
|
||||
sudo launchctl load /Library/LaunchDaemons/com.bun.health-server.plist
|
||||
sudo launchctl start com.bun.health-server
|
||||
|
||||
# Configure log rotation
|
||||
print "Configuring log rotation..."
|
||||
cat > /etc/newsyslog.d/bun-ci.conf << 'EOF'
|
||||
# Log rotation for Bun CI
|
||||
/usr/local/var/log/buildkite-agent/*.log 644 5 1000 * GZ
|
||||
/var/log/vm-init.log 644 5 1000 * GZ
|
||||
/var/log/health-server.log 644 5 1000 * GZ
|
||||
/var/log/health-server.error.log 644 5 1000 * GZ
|
||||
EOF
|
||||
|
||||
# Restart syslog to pick up new configuration
|
||||
sudo launchctl unload /System/Library/LaunchDaemons/com.apple.syslogd.plist
|
||||
sudo launchctl load /System/Library/LaunchDaemons/com.apple.syslogd.plist
|
||||
|
||||
# Configure system monitoring
|
||||
print "Setting up system monitoring..."
|
||||
cat > /usr/local/bin/system-monitor.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
# System monitoring script
|
||||
|
||||
LOG_FILE="/var/log/system-monitor.log"
|
||||
|
||||
while true; do
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] System Stats:" >> "$LOG_FILE"
|
||||
echo " CPU: $(top -l 1 -n 0 | grep "CPU usage" | awk '{print $3}' | sed 's/%//')" >> "$LOG_FILE"
|
||||
echo " Memory: $(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}')" >> "$LOG_FILE"
|
||||
echo " Disk: $(df -h / | awk 'NR==2 {print $5}')" >> "$LOG_FILE"
|
||||
echo " Load: $(uptime | awk -F'load averages:' '{print $2}')" >> "$LOG_FILE"
|
||||
echo " Processes: $(ps aux | wc -l)" >> "$LOG_FILE"
|
||||
echo "" >> "$LOG_FILE"
|
||||
|
||||
sleep 300 # 5 minutes
|
||||
done
|
||||
EOF
|
||||
|
||||
chmod +x /usr/local/bin/system-monitor.sh
|
||||
|
||||
# Create LaunchDaemon for system monitoring
|
||||
cat > /Library/LaunchDaemons/com.bun.system-monitor.plist << 'EOF'
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.bun.system-monitor</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/system-monitor.sh</string>
|
||||
</array>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
EOF
|
||||
|
||||
# Load and start the system monitor
|
||||
sudo launchctl load /Library/LaunchDaemons/com.bun.system-monitor.plist
|
||||
sudo launchctl start com.bun.system-monitor
|
||||
|
||||
# Final configuration
|
||||
print "Performing final configuration..."
|
||||
|
||||
# Ensure all services are running
|
||||
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
sudo launchctl start com.buildkite.buildkite-agent
|
||||
|
||||
# Create marker file to indicate initialization is complete
|
||||
touch /var/tmp/vm-init-complete
|
||||
echo "$(date '+%Y-%m-%d %H:%M:%S'): VM initialization completed" >> /var/tmp/vm-init-complete
|
||||
|
||||
print "VM initialization completed successfully!"
|
||||
print "VM Name: $VM_NAME"
|
||||
print "macOS Version: $MACOS_VERSION"
|
||||
print "Status: Ready for Buildkite jobs"
|
||||
|
||||
# Log final system state
|
||||
print "Final system state:"
|
||||
print " Hostname: $(hostname)"
|
||||
print " Uptime: $(uptime)"
|
||||
print " Disk usage: $(df -h / | awk 'NR==2 {print $5}')"
|
||||
print " Memory: $(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}')"
|
||||
|
||||
print "Health check available at: http://$(hostname):8080/health"
|
||||
@@ -1,302 +0,0 @@
|
||||
# Core infrastructure variables
|
||||
variable "project_name" {
|
||||
description = "Name of the project"
|
||||
type = string
|
||||
default = "bun-ci"
|
||||
}
|
||||
|
||||
variable "environment" {
|
||||
description = "Environment name"
|
||||
type = string
|
||||
default = "production"
|
||||
}
|
||||
|
||||
variable "region" {
|
||||
description = "MacStadium region"
|
||||
type = string
|
||||
default = "us-west-1"
|
||||
}
|
||||
|
||||
# MacStadium configuration
|
||||
variable "macstadium_api_key" {
|
||||
description = "MacStadium API key"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "macstadium_endpoint" {
|
||||
description = "MacStadium API endpoint"
|
||||
type = string
|
||||
default = "https://api.macstadium.com"
|
||||
}
|
||||
|
||||
# Buildkite configuration
|
||||
variable "buildkite_agent_token" {
|
||||
description = "Buildkite agent token"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "buildkite_org" {
|
||||
description = "Buildkite organization slug"
|
||||
type = string
|
||||
default = "bun"
|
||||
}
|
||||
|
||||
variable "buildkite_queues" {
|
||||
description = "Buildkite queues to register agents with"
|
||||
type = list(string)
|
||||
default = ["macos", "macos-arm64", "macos-x86_64"]
|
||||
}
|
||||
|
||||
# GitHub configuration
|
||||
variable "github_token" {
|
||||
description = "GitHub token for accessing private repositories"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "github_org" {
|
||||
description = "GitHub organization"
|
||||
type = string
|
||||
default = "oven-sh"
|
||||
}
|
||||
|
||||
# VM fleet configuration
|
||||
variable "fleet_size" {
|
||||
description = "Number of VMs per macOS version"
|
||||
type = object({
|
||||
macos_13 = number
|
||||
macos_14 = number
|
||||
macos_15 = number
|
||||
})
|
||||
default = {
|
||||
macos_13 = 4
|
||||
macos_14 = 6
|
||||
macos_15 = 8
|
||||
}
|
||||
|
||||
validation {
|
||||
condition = alltrue([
|
||||
var.fleet_size.macos_13 >= 0,
|
||||
var.fleet_size.macos_14 >= 0,
|
||||
var.fleet_size.macos_15 >= 0,
|
||||
var.fleet_size.macos_13 + var.fleet_size.macos_14 + var.fleet_size.macos_15 > 0
|
||||
])
|
||||
error_message = "Fleet sizes must be non-negative and at least one version must have VMs."
|
||||
}
|
||||
}
|
||||
|
||||
variable "vm_configuration" {
|
||||
description = "VM configuration settings"
|
||||
type = object({
|
||||
cpu_count = number
|
||||
memory_gb = number
|
||||
disk_size = number
|
||||
})
|
||||
default = {
|
||||
cpu_count = 12
|
||||
memory_gb = 32
|
||||
disk_size = 500
|
||||
}
|
||||
|
||||
validation {
|
||||
condition = alltrue([
|
||||
var.vm_configuration.cpu_count >= 4,
|
||||
var.vm_configuration.memory_gb >= 16,
|
||||
var.vm_configuration.disk_size >= 100
|
||||
])
|
||||
error_message = "VM configuration must have at least 4 CPUs, 16GB memory, and 100GB disk."
|
||||
}
|
||||
}
|
||||
|
||||
# Auto-scaling configuration
|
||||
variable "autoscaling_enabled" {
|
||||
description = "Enable auto-scaling for VM fleet"
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "autoscaling_config" {
|
||||
description = "Auto-scaling configuration"
|
||||
type = object({
|
||||
min_size = number
|
||||
max_size = number
|
||||
desired_capacity = number
|
||||
scale_up_threshold = number
|
||||
scale_down_threshold = number
|
||||
scale_up_adjustment = number
|
||||
scale_down_adjustment = number
|
||||
cooldown_period = number
|
||||
})
|
||||
default = {
|
||||
min_size = 2
|
||||
max_size = 30
|
||||
desired_capacity = 10
|
||||
scale_up_threshold = 80
|
||||
scale_down_threshold = 20
|
||||
scale_up_adjustment = 2
|
||||
scale_down_adjustment = 1
|
||||
cooldown_period = 300
|
||||
}
|
||||
}
|
||||
|
||||
# Image configuration
|
||||
variable "image_name_prefix" {
|
||||
description = "Prefix for VM image names"
|
||||
type = string
|
||||
default = "bun-macos"
|
||||
}
|
||||
|
||||
variable "image_rebuild_schedule" {
|
||||
description = "Cron schedule for rebuilding images"
|
||||
type = string
|
||||
default = "0 2 * * *" # Daily at 2 AM
|
||||
}
|
||||
|
||||
variable "image_retention_days" {
|
||||
description = "Number of days to retain old images"
|
||||
type = number
|
||||
default = 7
|
||||
}
|
||||
|
||||
# Network configuration
|
||||
variable "network_config" {
|
||||
description = "Network configuration"
|
||||
type = object({
|
||||
cidr_block = string
|
||||
enable_nat = bool
|
||||
enable_vpn = bool
|
||||
allowed_cidrs = list(string)
|
||||
})
|
||||
default = {
|
||||
cidr_block = "10.0.0.0/16"
|
||||
enable_nat = true
|
||||
enable_vpn = false
|
||||
allowed_cidrs = ["0.0.0.0/0"]
|
||||
}
|
||||
}
|
||||
|
||||
# Security configuration
|
||||
variable "security_config" {
|
||||
description = "Security configuration"
|
||||
type = object({
|
||||
enable_ssh_access = bool
|
||||
enable_vnc_access = bool
|
||||
ssh_allowed_cidrs = list(string)
|
||||
vnc_allowed_cidrs = list(string)
|
||||
enable_disk_encryption = bool
|
||||
})
|
||||
default = {
|
||||
enable_ssh_access = true
|
||||
enable_vnc_access = true
|
||||
ssh_allowed_cidrs = ["0.0.0.0/0"]
|
||||
vnc_allowed_cidrs = ["10.0.0.0/16"]
|
||||
enable_disk_encryption = true
|
||||
}
|
||||
}
|
||||
|
||||
# Monitoring configuration
|
||||
variable "monitoring_config" {
|
||||
description = "Monitoring configuration"
|
||||
type = object({
|
||||
enable_cloudwatch = bool
|
||||
enable_custom_metrics = bool
|
||||
log_retention_days = number
|
||||
alert_email = string
|
||||
})
|
||||
default = {
|
||||
enable_cloudwatch = true
|
||||
enable_custom_metrics = true
|
||||
log_retention_days = 30
|
||||
alert_email = "devops@oven.sh"
|
||||
}
|
||||
}
|
||||
|
||||
# Backup configuration
|
||||
variable "backup_config" {
|
||||
description = "Backup configuration"
|
||||
type = object({
|
||||
enable_snapshots = bool
|
||||
snapshot_schedule = string
|
||||
snapshot_retention = number
|
||||
enable_cross_region = bool
|
||||
})
|
||||
default = {
|
||||
enable_snapshots = true
|
||||
snapshot_schedule = "0 4 * * *" # Daily at 4 AM
|
||||
snapshot_retention = 7
|
||||
enable_cross_region = false
|
||||
}
|
||||
}
|
||||
|
||||
# Cost optimization
|
||||
variable "cost_optimization" {
|
||||
description = "Cost optimization settings"
|
||||
type = object({
|
||||
enable_spot_instances = bool
|
||||
spot_price_max = number
|
||||
enable_hibernation = bool
|
||||
idle_shutdown_timeout = number
|
||||
})
|
||||
default = {
|
||||
enable_spot_instances = false
|
||||
spot_price_max = 0.0
|
||||
enable_hibernation = false
|
||||
idle_shutdown_timeout = 3600 # 1 hour
|
||||
}
|
||||
}
|
||||
|
||||
# Maintenance configuration
|
||||
variable "maintenance_config" {
|
||||
description = "Maintenance configuration"
|
||||
type = object({
|
||||
maintenance_window_start = string
|
||||
maintenance_window_end = string
|
||||
auto_update_enabled = bool
|
||||
patch_schedule = string
|
||||
})
|
||||
default = {
|
||||
maintenance_window_start = "02:00"
|
||||
maintenance_window_end = "06:00"
|
||||
auto_update_enabled = true
|
||||
patch_schedule = "0 3 * * 0" # Weekly on Sunday at 3 AM
|
||||
}
|
||||
}
|
||||
|
||||
# Tagging
|
||||
variable "tags" {
|
||||
description = "Additional tags to apply to resources"
|
||||
type = map(string)
|
||||
default = {}
|
||||
}
|
||||
|
||||
# SSH key configuration
|
||||
variable "ssh_key_name" {
|
||||
description = "Name of the SSH key pair"
|
||||
type = string
|
||||
default = "bun-runner-key"
|
||||
}
|
||||
|
||||
variable "ssh_public_key_path" {
|
||||
description = "Path to the SSH public key file"
|
||||
type = string
|
||||
default = "~/.ssh/id_rsa.pub"
|
||||
}
|
||||
|
||||
# Feature flags
|
||||
variable "feature_flags" {
|
||||
description = "Feature flags for experimental features"
|
||||
type = object({
|
||||
enable_gpu_passthrough = bool
|
||||
enable_nested_virt = bool
|
||||
enable_secure_boot = bool
|
||||
enable_tpm = bool
|
||||
})
|
||||
default = {
|
||||
enable_gpu_passthrough = true
|
||||
enable_nested_virt = false
|
||||
enable_secure_boot = false
|
||||
enable_tpm = false
|
||||
}
|
||||
}
|
||||
47
.github/pull_request_template.md
vendored
47
.github/pull_request_template.md
vendored
@@ -1,50 +1,3 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
|
||||
103
.github/workflows/CLAUDE.md
vendored
Normal file
103
.github/workflows/CLAUDE.md
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
# GitHub Actions Workflow Maintenance Guide
|
||||
|
||||
This document provides guidance for maintaining the GitHub Actions workflows in this repository.
|
||||
|
||||
## format.yml Workflow
|
||||
|
||||
### Overview
|
||||
The `format.yml` workflow runs code formatters (Prettier, clang-format, and Zig fmt) on pull requests and pushes to main. It's optimized for speed by running all formatters in parallel.
|
||||
|
||||
### Key Components
|
||||
|
||||
#### 1. Clang-format Script (`scripts/run-clang-format.sh`)
|
||||
- **Purpose**: Formats C++ source and header files
|
||||
- **What it does**:
|
||||
- Reads C++ files from `cmake/sources/CxxSources.txt`
|
||||
- Finds all header files in `src/` and `packages/`
|
||||
- Excludes third-party directories (libuv, napi, deps, vendor, sqlite, etc.)
|
||||
- Requires specific clang-format version (no fallbacks)
|
||||
|
||||
**Important exclusions**:
|
||||
- `src/napi/` - Node API headers (third-party)
|
||||
- `src/bun.js/bindings/libuv/` - libuv headers (third-party)
|
||||
- `src/bun.js/bindings/sqlite/` - SQLite headers (third-party)
|
||||
- `src/bun.js/api/ffi-*.h` - FFI headers (generated/third-party)
|
||||
- `src/deps/` - Dependencies (third-party)
|
||||
- Files in `vendor/`, `third_party/`, `generated/` directories
|
||||
|
||||
#### 2. Parallel Execution
|
||||
The workflow runs all three formatters simultaneously:
|
||||
- Each formatter outputs with a prefix (`[prettier]`, `[clang-format]`, `[zig]`)
|
||||
- Output is streamed in real-time without blocking
|
||||
- Uses GitHub Actions groups (`::group::`) for collapsible sections
|
||||
|
||||
#### 3. Tool Installation
|
||||
|
||||
##### Clang-format-19
|
||||
- Installs ONLY `clang-format-19` package (not the entire LLVM toolchain)
|
||||
- Uses `--no-install-recommends --no-install-suggests` to skip unnecessary packages
|
||||
- Quiet installation with `-qq` and `-o=Dpkg::Use-Pty=0`
|
||||
|
||||
##### Zig
|
||||
- Downloads from `oven-sh/zig` releases (musl build for static linking)
|
||||
- URL: `https://github.com/oven-sh/zig/releases/download/autobuild-{COMMIT}/bootstrap-x86_64-linux-musl.zip`
|
||||
- Extracts to temp directory to avoid polluting the repository
|
||||
- Directory structure: `bootstrap-x86_64-linux-musl/zig`
|
||||
|
||||
### Updating the Workflow
|
||||
|
||||
#### To update Zig version:
|
||||
1. Find the new commit hash from https://github.com/oven-sh/zig/releases
|
||||
2. Replace the hash in the wget URL (line 65 of format.yml)
|
||||
3. Test that the URL is valid and the binary works
|
||||
|
||||
#### To update clang-format version:
|
||||
1. Update `LLVM_VERSION_MAJOR` environment variable at the top of format.yml
|
||||
2. Update the version check in `scripts/run-clang-format.sh`
|
||||
|
||||
#### To add/remove file exclusions:
|
||||
1. Edit the exclusion patterns in `scripts/run-clang-format.sh` (lines 34-39)
|
||||
2. Test locally to ensure the right files are being formatted
|
||||
|
||||
### Performance Optimizations
|
||||
1. **Parallel execution**: All formatters run simultaneously
|
||||
2. **Minimal installations**: Only required packages, no extras
|
||||
3. **Temp directories**: Tools downloaded to temp dirs, cleaned up after use
|
||||
4. **Streaming output**: Real-time feedback without buffering
|
||||
5. **Early start**: Formatting begins immediately after each tool is ready
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
**If formatters appear to run sequentially:**
|
||||
- Check if output is being buffered (should use `sed` for line prefixing)
|
||||
- Ensure background processes use `&` and proper wait commands
|
||||
|
||||
**If third-party files are being formatted:**
|
||||
- Review exclusion patterns in `scripts/run-clang-format.sh`
|
||||
- Check if new third-party directories were added that need exclusion
|
||||
|
||||
**If clang-format installation is slow:**
|
||||
- Ensure using minimal package installation flags
|
||||
- Check if apt cache needs updating
|
||||
- Consider caching the clang-format binary between runs
|
||||
|
||||
### Testing Changes Locally
|
||||
|
||||
```bash
|
||||
# Test the clang-format script
|
||||
export LLVM_VERSION_MAJOR=19
|
||||
./scripts/run-clang-format.sh format
|
||||
|
||||
# Test with check mode (no modifications)
|
||||
./scripts/run-clang-format.sh check
|
||||
|
||||
# Test specific file exclusions
|
||||
./scripts/run-clang-format.sh format 2>&1 | grep -E "(libuv|napi|deps)"
|
||||
# Should return nothing if exclusions work correctly
|
||||
```
|
||||
|
||||
### Important Notes
|
||||
- The script defaults to **format** mode (modifies files)
|
||||
- Always test locally before pushing workflow changes
|
||||
- The musl Zig build works on glibc systems due to static linking
|
||||
- Keep the exclusion list updated as new third-party code is added
|
||||
84
.github/workflows/format.yml
vendored
84
.github/workflows/format.yml
vendored
@@ -37,23 +37,73 @@ jobs:
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Install LLVM
|
||||
- name: Format Code
|
||||
run: |
|
||||
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
|
||||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
# Start prettier in background with prefixed output
|
||||
echo "::group::Prettier"
|
||||
(bun run prettier 2>&1 | sed 's/^/[prettier] /' || echo "[prettier] Failed with exit code $?") &
|
||||
PRETTIER_PID=$!
|
||||
|
||||
# Start clang-format installation and formatting in background with prefixed output
|
||||
echo "::group::Clang-format"
|
||||
(
|
||||
echo "[clang-format] Installing clang-format-${{ env.LLVM_VERSION_MAJOR }}..."
|
||||
wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc > /dev/null
|
||||
echo "deb http://apt.llvm.org/$(lsb_release -cs)/ llvm-toolchain-$(lsb_release -cs)-${{ env.LLVM_VERSION_MAJOR }} main" | sudo tee /etc/apt/sources.list.d/llvm.list > /dev/null
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y -qq --no-install-recommends --no-install-suggests -o=Dpkg::Use-Pty=0 clang-format-${{ env.LLVM_VERSION_MAJOR }}
|
||||
echo "[clang-format] Running clang-format..."
|
||||
LLVM_VERSION_MAJOR=${{ env.LLVM_VERSION_MAJOR }} ./scripts/run-clang-format.sh format 2>&1 | sed 's/^/[clang-format] /'
|
||||
) &
|
||||
CLANG_PID=$!
|
||||
|
||||
# Setup Zig in temp directory and run zig fmt in background with prefixed output
|
||||
echo "::group::Zig fmt"
|
||||
(
|
||||
ZIG_TEMP=$(mktemp -d)
|
||||
echo "[zig] Downloading Zig (musl build)..."
|
||||
wget -q -O "$ZIG_TEMP/zig.zip" https://github.com/oven-sh/zig/releases/download/autobuild-d1a4e0b0ddc75f37c6a090b97eef0cbb6335556e/bootstrap-x86_64-linux-musl.zip
|
||||
unzip -q -d "$ZIG_TEMP" "$ZIG_TEMP/zig.zip"
|
||||
export PATH="$ZIG_TEMP/bootstrap-x86_64-linux-musl:$PATH"
|
||||
echo "[zig] Running zig fmt..."
|
||||
zig fmt src 2>&1 | sed 's/^/[zig] /'
|
||||
./scripts/sort-imports.ts src 2>&1 | sed 's/^/[zig] /'
|
||||
zig fmt src 2>&1 | sed 's/^/[zig] /'
|
||||
rm -rf "$ZIG_TEMP"
|
||||
) &
|
||||
ZIG_PID=$!
|
||||
|
||||
# Wait for all formatting tasks to complete
|
||||
echo ""
|
||||
echo "Running formatters in parallel..."
|
||||
FAILED=0
|
||||
|
||||
if ! wait $PRETTIER_PID; then
|
||||
echo "::error::Prettier failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! wait $CLANG_PID; then
|
||||
echo "::error::Clang-format failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! wait $ZIG_PID; then
|
||||
echo "::error::Zig fmt failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
# Exit with error if any formatter failed
|
||||
if [ $FAILED -eq 1 ]; then
|
||||
echo "::error::One or more formatters failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All formatters completed successfully"
|
||||
- name: Ban Words
|
||||
run: |
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src
|
||||
bun scripts/sortImports src
|
||||
zig fmt src
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
bun run prettier
|
||||
- name: Clang Format
|
||||
run: |
|
||||
bun run clang-format
|
||||
bun ./test/internal/ban-words.test.ts
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
102
.github/workflows/update-hdrhistogram.yml
vendored
Normal file
102
.github/workflows/update-hdrhistogram.yml
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
name: Update hdrhistogram
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check hdrhistogram version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHdrHistogram.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHdrHistogram.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHdrHistogram.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/HdrHistogram/HdrHistogram_c/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
if [ -z "$LATEST_SHA" ]; then
|
||||
# Lightweight tag - SHA points directly to commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHdrHistogram.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHdrHistogram.cmake
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates hdrhistogram to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/HdrHistogram/HdrHistogram_c/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-hdrhistogram.yml)
|
||||
118
.github/workflows/update-highway.yml
vendored
Normal file
118
.github/workflows/update-highway.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
name: Update highway
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check highway version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHighway.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHighway.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHighway.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/google/highway/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle both lightweight tags (type: commit) and annotated tags (type: tag)
|
||||
if [ "$TAG_OBJECT_TYPE" = "commit" ]; then
|
||||
# Lightweight tag - object.sha is already the commit SHA
|
||||
LATEST_SHA="$TAG_OBJECT_SHA"
|
||||
elif [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# Annotated tag - need to fetch the tag object to get the commit SHA
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$TAG_OBJECT_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $TAG_OBJECT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error: Unexpected tag object type: $TAG_OBJECT_TYPE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHighway.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHighway.cmake
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates highway to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/google/highway/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-highway.yml)
|
||||
22
.github/workflows/update-lolhtml.yml
vendored
22
.github/workflows/update-lolhtml.yml
vendored
@@ -50,15 +50,27 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
# Get the commit SHA that the tag points to
|
||||
# This handles both lightweight tags (direct commit refs) and annotated tags (tag objects)
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# This is a lightweight tag pointing directly to a commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
|
||||
27
.github/workflows/update-lshpack.yml
vendored
27
.github/workflows/update-lshpack.yml
vendored
@@ -50,15 +50,32 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
# Get the tag reference, which contains both SHA and type
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
|
||||
# If it's an annotated tag, we need to dereference it to get the commit SHA
|
||||
# If it's a lightweight tag, the SHA already points to the commit
|
||||
if [ "$TAG_TYPE" = "tag" ]; then
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# For lightweight tags, the SHA is already the commit SHA
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
|
||||
47
.github/workflows/vscode-release.yml
vendored
Normal file
47
.github/workflows/vscode-release.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: VSCode Extension Publish
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to publish (e.g. 0.0.25) - Check the marketplace for the latest version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish to VS Code Marketplace"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.18"
|
||||
|
||||
- name: Install dependencies (root)
|
||||
run: bun install
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Set Version
|
||||
run: bun pm version ${{ github.event.inputs.version }} --no-git-tag-version --allow-same-version
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Build (inspector protocol)
|
||||
run: bun install && bun run build
|
||||
working-directory: packages/bun-inspector-protocol
|
||||
|
||||
- name: Build (vscode extension)
|
||||
run: bun run build
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Publish
|
||||
if: success()
|
||||
run: bunx vsce publish
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -184,4 +184,6 @@ codegen-for-zig-team.tar.gz
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
*.bun-build
|
||||
|
||||
scripts/lldb-inline
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -168,5 +168,5 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
"bun.test.customScript": "bun-debug test"
|
||||
"bun.test.customScript": "./build/debug/bun-debug test"
|
||||
}
|
||||
|
||||
32
CLAUDE.md
32
CLAUDE.md
@@ -4,9 +4,9 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- **Build debug version**: `bun bd`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient.
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
@@ -59,8 +59,8 @@ test("my feature", async () => {
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.stdout.text(),
|
||||
proc.stderr.text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
@@ -69,6 +69,8 @@ test("my feature", async () => {
|
||||
});
|
||||
```
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
### Language Structure
|
||||
@@ -133,7 +135,6 @@ test("my feature", async () => {
|
||||
When implementing JavaScript classes in C++:
|
||||
|
||||
1. Create three classes if there's a public constructor:
|
||||
|
||||
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
|
||||
- `class FooPrototype : public JSC::JSNonFinalObject`
|
||||
- `class FooConstructor : public JSC::InternalFunction`
|
||||
@@ -193,7 +194,6 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
@@ -221,15 +221,17 @@ bun ci
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
|
||||
3. **Follow existing code style** - check neighboring files for patterns
|
||||
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
|
||||
5. **Use absolute paths** - Always use absolute paths in file operations
|
||||
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
|
||||
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
|
||||
2. **All changes must be tested** - if you're not testing your changes, you're not done.
|
||||
3. **Get your tests to pass**. If you didn't run the tests, your code does not work.
|
||||
4. **Follow existing code style** - check neighboring files for patterns
|
||||
5. **Create tests in the right folder** in `test/` and the test must end in `.test.ts` or `.test.tsx`
|
||||
6. **Use absolute paths** - Always use absolute paths in file operations
|
||||
7. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
8. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
|
||||
|
||||
## Key APIs and Features
|
||||
|
||||
|
||||
@@ -160,6 +160,7 @@ In particular, these are:
|
||||
|
||||
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
|
||||
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
|
||||
- `./src/codegen/cppbind.ts` -- Generates automatic Zig bindings for C++ functions marked with `[[ZIG_EXPORT]]` attributes.
|
||||
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
|
||||
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
|
||||
|
||||
|
||||
@@ -28,10 +28,4 @@ bench("brotli compress stream", async () => {
|
||||
await pipeline(source, compress);
|
||||
});
|
||||
|
||||
bench("brotli decompress stream", async () => {
|
||||
const source = Readable.from([compressed]);
|
||||
const decompress = createBrotliDecompress();
|
||||
await pipeline(source, decompress);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -752,6 +752,13 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
});
|
||||
}
|
||||
}
|
||||
{
|
||||
const cppImport = b.createModule(.{
|
||||
.root_source_file = (std.Build.LazyPath{ .cwd_relative = opts.codegen_path }).path(b, "cpp.zig"),
|
||||
});
|
||||
mod.addImport("cpp", cppImport);
|
||||
cppImport.addImport("bun", mod);
|
||||
}
|
||||
inline for (.{
|
||||
.{ .import = "completions-bash", .file = b.path("completions/bun.bash") },
|
||||
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
|
||||
|
||||
220
bun.lock
220
bun.lock
@@ -4,6 +4,9 @@
|
||||
"": {
|
||||
"name": "bun",
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -12,7 +15,7 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "^5.7.2",
|
||||
"typescript": "5.9.2",
|
||||
},
|
||||
},
|
||||
"packages/@types/bun": {
|
||||
@@ -29,7 +32,6 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
"typescript": "^5.0.2",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19",
|
||||
@@ -87,41 +89,191 @@
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="],
|
||||
|
||||
"@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="],
|
||||
|
||||
"@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="],
|
||||
|
||||
"@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="],
|
||||
|
||||
"@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="],
|
||||
|
||||
"@octokit/app": ["@octokit/app@14.1.0", "", { "dependencies": { "@octokit/auth-app": "^6.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-rest": "^9.0.0", "@octokit/types": "^12.0.0", "@octokit/webhooks": "^12.0.4" } }, "sha512-g3uEsGOQCBl1+W1rgfwoRFUIR6PtvB2T1E4RpygeUU5LrLvlOqcxrt5lfykIeRpUPpupreGJUYl70fqMDXdTpw=="],
|
||||
|
||||
"@octokit/auth-app": ["@octokit/auth-app@6.1.4", "", { "dependencies": { "@octokit/auth-oauth-app": "^7.1.0", "@octokit/auth-oauth-user": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/request-error": "^5.1.0", "@octokit/types": "^13.1.0", "deprecation": "^2.3.1", "lru-cache": "npm:@wolfy1339/lru-cache@^11.0.2-patch.1", "universal-github-app-jwt": "^1.1.2", "universal-user-agent": "^6.0.0" } }, "sha512-QkXkSOHZK4dA5oUqY5Dk3S+5pN2s1igPjEASNQV8/vgJgW034fQWR16u7VsNOK/EljA00eyjYF5mWNxWKWhHRQ=="],
|
||||
|
||||
"@octokit/auth-oauth-app": ["@octokit/auth-oauth-app@7.1.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^6.1.0", "@octokit/auth-oauth-user": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "@types/btoa-lite": "^1.0.0", "btoa-lite": "^1.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-w+SyJN/b0l/HEb4EOPRudo7uUOSW51jcK1jwLa+4r7PA8FPFpoxEnHBHMITqCsc/3Vo2qqFjgQfz/xUUvsSQnA=="],
|
||||
|
||||
"@octokit/auth-oauth-device": ["@octokit/auth-oauth-device@6.1.0", "", { "dependencies": { "@octokit/oauth-methods": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-FNQ7cb8kASufd6Ej4gnJ3f1QB5vJitkoV1O0/g6e6lUsQ7+VsSNRHRmFScN2tV4IgKA12frrr/cegUs0t+0/Lw=="],
|
||||
|
||||
"@octokit/auth-oauth-user": ["@octokit/auth-oauth-user@4.1.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^6.1.0", "@octokit/oauth-methods": "^4.1.0", "@octokit/request": "^8.3.1", "@octokit/types": "^13.0.0", "btoa-lite": "^1.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-FrEp8mtFuS/BrJyjpur+4GARteUCrPeR/tZJzD8YourzoVhRics7u7we/aDcKv+yywRNwNi/P4fRi631rG/OyQ=="],
|
||||
|
||||
"@octokit/auth-token": ["@octokit/auth-token@4.0.0", "", {}, "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA=="],
|
||||
|
||||
"@octokit/auth-unauthenticated": ["@octokit/auth-unauthenticated@5.0.1", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/types": "^12.0.0" } }, "sha512-oxeWzmBFxWd+XolxKTc4zr+h3mt+yofn4r7OfoIkR/Cj/o70eEGmPsFbueyJE2iBAGpjgTnEOKM3pnuEGVmiqg=="],
|
||||
|
||||
"@octokit/core": ["@octokit/core@5.2.2", "", { "dependencies": { "@octokit/auth-token": "^4.0.0", "@octokit/graphql": "^7.1.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" } }, "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg=="],
|
||||
|
||||
"@octokit/endpoint": ["@octokit/endpoint@9.0.6", "", { "dependencies": { "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw=="],
|
||||
|
||||
"@octokit/graphql": ["@octokit/graphql@7.1.1", "", { "dependencies": { "@octokit/request": "^8.4.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g=="],
|
||||
|
||||
"@octokit/oauth-app": ["@octokit/oauth-app@6.1.0", "", { "dependencies": { "@octokit/auth-oauth-app": "^7.0.0", "@octokit/auth-oauth-user": "^4.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-authorization-url": "^6.0.2", "@octokit/oauth-methods": "^4.0.0", "@types/aws-lambda": "^8.10.83", "universal-user-agent": "^6.0.0" } }, "sha512-nIn/8eUJ/BKUVzxUXd5vpzl1rwaVxMyYbQkNZjHrF7Vk/yu98/YDF/N2KeWO7uZ0g3b5EyiFXFkZI8rJ+DH1/g=="],
|
||||
|
||||
"@octokit/oauth-authorization-url": ["@octokit/oauth-authorization-url@6.0.2", "", {}, "sha512-CdoJukjXXxqLNK4y/VOiVzQVjibqoj/xHgInekviUJV73y/BSIcwvJ/4aNHPBPKcPWFnd4/lO9uqRV65jXhcLA=="],
|
||||
|
||||
"@octokit/oauth-methods": ["@octokit/oauth-methods@4.1.0", "", { "dependencies": { "@octokit/oauth-authorization-url": "^6.0.2", "@octokit/request": "^8.3.1", "@octokit/request-error": "^5.1.0", "@octokit/types": "^13.0.0", "btoa-lite": "^1.0.0" } }, "sha512-4tuKnCRecJ6CG6gr0XcEXdZtkTDbfbnD5oaHBmLERTjTMZNi2CbfEHZxPU41xXLDG4DfKf+sonu00zvKI9NSbw=="],
|
||||
|
||||
"@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
|
||||
|
||||
"@octokit/plugin-paginate-graphql": ["@octokit/plugin-paginate-graphql@4.0.1", "", { "peerDependencies": { "@octokit/core": ">=5" } }, "sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@11.4.4-cjs.2", "", { "dependencies": { "@octokit/types": "^13.7.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1", "", { "dependencies": { "@octokit/types": "^13.8.0" }, "peerDependencies": { "@octokit/core": "^5" } }, "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ=="],
|
||||
|
||||
"@octokit/plugin-retry": ["@octokit/plugin-retry@6.1.0", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/types": "^13.0.0", "bottleneck": "^2.15.3" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-WrO3bvq4E1Xh1r2mT9w6SDFg01gFmP81nIG77+p/MqW1JeXXgL++6umim3t6x0Zj5pZm3rXAN+0HEjmmdhIRig=="],
|
||||
|
||||
"@octokit/plugin-throttling": ["@octokit/plugin-throttling@8.2.0", "", { "dependencies": { "@octokit/types": "^12.2.0", "bottleneck": "^2.15.3" }, "peerDependencies": { "@octokit/core": "^5.0.0" } }, "sha512-nOpWtLayKFpgqmgD0y3GqXafMFuKcA4tRPZIfu7BArd2lEZeb1988nhWhwx4aZWmjDmUfdgVf7W+Tt4AmvRmMQ=="],
|
||||
|
||||
"@octokit/request": ["@octokit/request@8.4.1", "", { "dependencies": { "@octokit/endpoint": "^9.0.6", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw=="],
|
||||
|
||||
"@octokit/request-error": ["@octokit/request-error@5.1.1", "", { "dependencies": { "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g=="],
|
||||
|
||||
"@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
|
||||
|
||||
"@octokit/webhooks": ["@octokit/webhooks@12.3.2", "", { "dependencies": { "@octokit/request-error": "^5.0.0", "@octokit/webhooks-methods": "^4.1.0", "@octokit/webhooks-types": "7.6.1", "aggregate-error": "^3.1.0" } }, "sha512-exj1MzVXoP7xnAcAB3jZ97pTvVPkQF9y6GA/dvYC47HV7vLv+24XRS6b/v/XnyikpEuvMhugEXdGtAlU086WkQ=="],
|
||||
|
||||
"@octokit/webhooks-methods": ["@octokit/webhooks-methods@5.1.1", "", {}, "sha512-NGlEHZDseJTCj8TMMFehzwa9g7On4KJMPVHDSrHxCQumL6uSQR8wIkP/qesv52fXqV1BPf4pTxwtS31ldAt9Xg=="],
|
||||
|
||||
"@octokit/webhooks-types": ["@octokit/webhooks-types@7.6.1", "", {}, "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw=="],
|
||||
|
||||
"@sentry/types": ["@sentry/types@7.120.3", "", {}, "sha512-C4z+3kGWNFJ303FC+FxAd4KkHvxpNFYAFN8iMIgBwJdpIl25KZ8Q/VdGn0MLLUEHNLvjob0+wvwlcRBBNLXOow=="],
|
||||
|
||||
"@types/aws-lambda": ["@types/aws-lambda@8.10.152", "", {}, "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw=="],
|
||||
|
||||
"@types/btoa-lite": ["@types/btoa-lite@1.0.2", "", {}, "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg=="],
|
||||
|
||||
"@types/bun": ["@types/bun@workspace:packages/@types/bun"],
|
||||
|
||||
"@types/node": ["@types/node@22.15.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-v1DKRfUdyW+jJhZNEI1PYy29S2YRxMV5AOO/x/SjKmW0acCIOqmbj6Haf9eHAhsPmrhlHSxEhv/1WszcLWV4cg=="],
|
||||
"@types/jsonwebtoken": ["@types/jsonwebtoken@9.0.10", "", { "dependencies": { "@types/ms": "*", "@types/node": "*" } }, "sha512-asx5hIG9Qmf/1oStypjanR7iKTv0gXQ1Ov/jfrX6kS/EO0OFni8orbmGCn0672NHR3kXHwpAwR+B368ZGN/2rA=="],
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@24.1.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
|
||||
|
||||
"aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="],
|
||||
|
||||
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
|
||||
|
||||
"bottleneck": ["bottleneck@2.19.5", "", {}, "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw=="],
|
||||
|
||||
"btoa-lite": ["btoa-lite@1.0.0", "", {}, "sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA=="],
|
||||
|
||||
"buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="],
|
||||
|
||||
"bun-tracestrings": ["bun-tracestrings@github:oven-sh/bun.report#912ca63", { "dependencies": { "@octokit/webhooks-methods": "^5.1.0", "@sentry/types": "^7.112.2", "@types/bun": "^1.2.6", "html-minifier": "^4.0.0", "lightningcss": "^1.24.1", "marked": "^12.0.1", "octokit": "^3.2.0", "prettier": "^3.2.5", "typescript": "^5.0.0" }, "bin": { "ci-remap-server": "./bin/ci-remap-server.ts" } }, "oven-sh-bun.report-912ca63"],
|
||||
|
||||
"bun-types": ["bun-types@workspace:packages/bun-types"],
|
||||
|
||||
"camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
"camel-case": ["camel-case@3.0.0", "", { "dependencies": { "no-case": "^2.2.0", "upper-case": "^1.1.1" } }, "sha512-+MbKztAYHXPr1jNTSKQF52VpcFjwY5RkR7fxksV8Doo4KAYc5Fl4UJRgthBbTmEx8C54DqahhbLJkDwjI3PI/w=="],
|
||||
|
||||
"capital-case": ["capital-case@1.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A=="],
|
||||
|
||||
"change-case": ["change-case@4.1.2", "", { "dependencies": { "camel-case": "^4.1.2", "capital-case": "^1.0.4", "constant-case": "^3.0.4", "dot-case": "^3.0.4", "header-case": "^2.0.4", "no-case": "^3.0.4", "param-case": "^3.0.4", "pascal-case": "^3.1.2", "path-case": "^3.0.4", "sentence-case": "^3.0.4", "snake-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A=="],
|
||||
|
||||
"clean-css": ["clean-css@4.2.4", "", { "dependencies": { "source-map": "~0.6.0" } }, "sha512-EJUDT7nDVFDvaQgAo2G/PJvxmp1o/c6iXLbswsBbUFXi1Nr+AjA2cKmfbKDMjMvzEe75g3P6JkaDDAKk96A85A=="],
|
||||
|
||||
"clean-stack": ["clean-stack@2.2.0", "", {}, "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A=="],
|
||||
|
||||
"commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
|
||||
|
||||
"constant-case": ["constant-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case": "^2.0.2" } }, "sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="],
|
||||
|
||||
"dot-case": ["dot-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w=="],
|
||||
|
||||
"ecdsa-sig-formatter": ["ecdsa-sig-formatter@1.0.11", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ=="],
|
||||
|
||||
"esbuild": ["esbuild@0.21.5", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.21.5", "@esbuild/android-arm": "0.21.5", "@esbuild/android-arm64": "0.21.5", "@esbuild/android-x64": "0.21.5", "@esbuild/darwin-arm64": "0.21.5", "@esbuild/darwin-x64": "0.21.5", "@esbuild/freebsd-arm64": "0.21.5", "@esbuild/freebsd-x64": "0.21.5", "@esbuild/linux-arm": "0.21.5", "@esbuild/linux-arm64": "0.21.5", "@esbuild/linux-ia32": "0.21.5", "@esbuild/linux-loong64": "0.21.5", "@esbuild/linux-mips64el": "0.21.5", "@esbuild/linux-ppc64": "0.21.5", "@esbuild/linux-riscv64": "0.21.5", "@esbuild/linux-s390x": "0.21.5", "@esbuild/linux-x64": "0.21.5", "@esbuild/netbsd-x64": "0.21.5", "@esbuild/openbsd-x64": "0.21.5", "@esbuild/sunos-x64": "0.21.5", "@esbuild/win32-arm64": "0.21.5", "@esbuild/win32-ia32": "0.21.5", "@esbuild/win32-x64": "0.21.5" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw=="],
|
||||
|
||||
"he": ["he@1.2.0", "", { "bin": { "he": "bin/he" } }, "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw=="],
|
||||
|
||||
"header-case": ["header-case@2.0.4", "", { "dependencies": { "capital-case": "^1.0.4", "tslib": "^2.0.3" } }, "sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q=="],
|
||||
|
||||
"html-minifier": ["html-minifier@4.0.0", "", { "dependencies": { "camel-case": "^3.0.0", "clean-css": "^4.2.1", "commander": "^2.19.0", "he": "^1.2.0", "param-case": "^2.1.1", "relateurl": "^0.2.7", "uglify-js": "^3.5.1" }, "bin": { "html-minifier": "./cli.js" } }, "sha512-aoGxanpFPLg7MkIl/DDFYtb0iWz7jMFGqFhvEDZga6/4QTjneiD8I/NXL1x5aaoCp7FSIT6h/OhykDdPsbtMig=="],
|
||||
|
||||
"indent-string": ["indent-string@4.0.0", "", {}, "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"jsonwebtoken": ["jsonwebtoken@9.0.2", "", { "dependencies": { "jws": "^3.2.2", "lodash.includes": "^4.3.0", "lodash.isboolean": "^3.0.3", "lodash.isinteger": "^4.0.4", "lodash.isnumber": "^3.0.3", "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1", "lodash.once": "^4.0.0", "ms": "^2.1.1", "semver": "^7.5.4" } }, "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ=="],
|
||||
|
||||
"jwa": ["jwa@1.4.2", "", { "dependencies": { "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw=="],
|
||||
|
||||
"jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="],
|
||||
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ=="],
|
||||
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA=="],
|
||||
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig=="],
|
||||
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.1", "", { "os": "linux", "cpu": "arm" }, "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q=="],
|
||||
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw=="],
|
||||
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ=="],
|
||||
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw=="],
|
||||
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ=="],
|
||||
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA=="],
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.1", "", { "os": "win32", "cpu": "x64" }, "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg=="],
|
||||
|
||||
"lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
|
||||
|
||||
"lodash.isboolean": ["lodash.isboolean@3.0.3", "", {}, "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="],
|
||||
|
||||
"lodash.isinteger": ["lodash.isinteger@4.0.4", "", {}, "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA=="],
|
||||
|
||||
"lodash.isnumber": ["lodash.isnumber@3.0.3", "", {}, "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw=="],
|
||||
|
||||
"lodash.isplainobject": ["lodash.isplainobject@4.0.6", "", {}, "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="],
|
||||
|
||||
"lodash.isstring": ["lodash.isstring@4.0.1", "", {}, "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="],
|
||||
|
||||
"lodash.once": ["lodash.once@4.1.1", "", {}, "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"lower-case": ["lower-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg=="],
|
||||
|
||||
"lru-cache": ["@wolfy1339/lru-cache@11.0.2-patch.1", "", {}, "sha512-BgYZfL2ADCXKOw2wJtkM3slhHotawWkgIRRxq4wEybnZQPjvAp71SPX35xepMykTw8gXlzWcWPTY31hlbnRsDA=="],
|
||||
|
||||
"marked": ["marked@12.0.2", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-qXUm7e/YKFoqFPYPa3Ukg9xlI5cyAtGmyEIzMfW//m6kXwCy2Ps9DYf5ioijFKQ8qyuscrHoY04iJGctu2Kg0Q=="],
|
||||
|
||||
"mitata": ["mitata@0.1.14", "", {}, "sha512-8kRs0l636eT4jj68PFXOR2D5xl4m56T478g16SzUPOYgkzQU+xaw62guAQxzBPm+SXb15GQi1cCpDxJfkr4CSA=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"no-case": ["no-case@3.0.4", "", { "dependencies": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg=="],
|
||||
|
||||
"param-case": ["param-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A=="],
|
||||
"octokit": ["octokit@3.2.2", "", { "dependencies": { "@octokit/app": "^14.0.2", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-graphql": "^4.0.0", "@octokit/plugin-paginate-rest": "11.4.4-cjs.2", "@octokit/plugin-rest-endpoint-methods": "13.3.2-cjs.1", "@octokit/plugin-retry": "^6.0.0", "@octokit/plugin-throttling": "^8.0.0", "@octokit/request-error": "^5.0.0", "@octokit/types": "^13.0.0", "@octokit/webhooks": "^12.3.1" } }, "sha512-7Abo3nADdja8l/aglU6Y3lpnHSfv0tw7gFPiqzry/yCU+2gTAX7R1roJ8hJrxIK+S1j+7iqRJXtmuHJ/UDsBhQ=="],
|
||||
|
||||
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||
|
||||
"param-case": ["param-case@2.1.1", "", { "dependencies": { "no-case": "^2.2.0" } }, "sha512-eQE845L6ot89sk2N8liD8HAuH4ca6Vvr7VWAWwt7+kvvG5aBcPmmphQ68JsEG2qa9n1TykS2DLeMt363AAH8/w=="],
|
||||
|
||||
"pascal-case": ["pascal-case@3.1.2", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g=="],
|
||||
|
||||
@@ -129,30 +281,78 @@
|
||||
|
||||
"peechy": ["peechy@0.4.34", "", { "dependencies": { "change-case": "^4.1.2" }, "bin": { "peechy": "cli.js" } }, "sha512-Cpke/cCqqZHhkyxz7mdqS8ZAGJFUi5icu3ZGqxm9GC7g2VrhH0tmjPhZoWHAN5ghw1m1wq5+2YvfbDSqgC4+Zg=="],
|
||||
|
||||
"prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="],
|
||||
"prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.1.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0" }, "optionalPeers": ["vue-tsc"] }, "sha512-5aWRdCgv645xaa58X8lOxzZoiHAldAPChljr/MT0crXVOWTZ+Svl4hIWlz+niYSlO6ikE5UXkN1JrRvIP2ut0A=="],
|
||||
"prettier-plugin-organize-imports": ["prettier-plugin-organize-imports@4.2.0", "", { "peerDependencies": { "prettier": ">=2.0", "typescript": ">=2.9", "vue-tsc": "^2.1.0 || 3" }, "optionalPeers": ["vue-tsc"] }, "sha512-Zdy27UhlmyvATZi67BTnLcKTo8fm6Oik59Sz6H64PgZJVs6NJpPD1mT240mmJn62c98/QaL+r3kx9Q3gRpDajg=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"relateurl": ["relateurl@0.2.7", "", {}, "sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog=="],
|
||||
|
||||
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"sentence-case": ["sentence-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg=="],
|
||||
|
||||
"snake-case": ["snake-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg=="],
|
||||
|
||||
"source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
||||
|
||||
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
|
||||
|
||||
"upper-case": ["upper-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg=="],
|
||||
"undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="],
|
||||
|
||||
"universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="],
|
||||
|
||||
"universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
|
||||
|
||||
"upper-case": ["upper-case@1.1.3", "", {}, "sha512-WRbjgmYzgXkCV7zNVpy5YgrHgbBv126rMALQQMrmzOVC4GM2waQ9x7xtm8VU+1yF2kWyPzI9zbZ48n4vSxwfSA=="],
|
||||
|
||||
"upper-case-first": ["upper-case-first@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg=="],
|
||||
|
||||
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||
|
||||
"@octokit/app/@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@9.2.2", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ=="],
|
||||
|
||||
"@octokit/app/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/auth-unauthenticated/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/plugin-throttling/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/webhooks/@octokit/webhooks-methods": ["@octokit/webhooks-methods@4.1.0", "", {}, "sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ=="],
|
||||
|
||||
"bun-tracestrings/typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
|
||||
"camel-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
|
||||
|
||||
"change-case/camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
|
||||
"change-case/param-case": ["param-case@3.0.4", "", { "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A=="],
|
||||
|
||||
"constant-case/upper-case": ["upper-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg=="],
|
||||
|
||||
"param-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
|
||||
|
||||
"@octokit/app/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/auth-unauthenticated/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/plugin-throttling/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"camel-case/no-case/lower-case": ["lower-case@1.1.4", "", {}, "sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA=="],
|
||||
|
||||
"param-case/no-case/lower-case": ["lower-case@1.1.4", "", {}, "sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,3 +7,6 @@
|
||||
# Instead, we can only scan the test directory for Bun's runtime tests
|
||||
root = "test"
|
||||
preload = "./test/preload.ts"
|
||||
|
||||
[install]
|
||||
linker = "isolated"
|
||||
|
||||
@@ -95,13 +95,18 @@ if(LINUX)
|
||||
optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF)
|
||||
endif()
|
||||
|
||||
if(DEBUG AND APPLE AND ARCH STREQUAL "aarch64")
|
||||
if(DEBUG AND ((APPLE AND ARCH STREQUAL "aarch64") OR LINUX))
|
||||
set(DEFAULT_ASAN ON)
|
||||
else()
|
||||
set(DEFAULT_ASAN OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ${DEFAULT_ASAN})
|
||||
optionx(ENABLE_ZIG_ASAN BOOL "If Zig ASAN support should be enabled" DEFAULT ${ENABLE_ASAN})
|
||||
|
||||
if (NOT ENABLE_ASAN)
|
||||
set(ENABLE_ZIG_ASAN OFF)
|
||||
endif()
|
||||
|
||||
if(RELEASE AND LINUX AND CI AND NOT ENABLE_ASSERTIONS AND NOT ENABLE_ASAN)
|
||||
set(DEFAULT_LTO ON)
|
||||
@@ -139,10 +144,10 @@ endif()
|
||||
optionx(REVISION STRING "The git revision of the build" DEFAULT ${DEFAULT_REVISION})
|
||||
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
optionx(NODEJS_VERSION STRING "The version of Node.js to report" DEFAULT "24.3.0")
|
||||
setx(NODEJS_VERSION "24.3.0")
|
||||
|
||||
# Used in process.versions.modules and compared while loading V8 modules
|
||||
optionx(NODEJS_ABI_VERSION STRING "The ABI version of Node.js to report" DEFAULT "137")
|
||||
setx(NODEJS_ABI_VERSION "137")
|
||||
|
||||
if(APPLE)
|
||||
set(DEFAULT_STATIC_SQLITE OFF)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
src/bake/bake.bind.ts
|
||||
src/bake/bake.d.ts
|
||||
src/bake/bake.private.d.ts
|
||||
src/bake/bun-framework-react/index.ts
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
src/bake/bake.bind.ts
|
||||
src/bake.bind.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
|
||||
@@ -350,6 +350,7 @@ src/bun.js/bindings/webcore/JSTextEncoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSURLSearchParams.cpp
|
||||
src/bun.js/bindings/webcore/JSWasmStreamingCompiler.cpp
|
||||
src/bun.js/bindings/webcore/JSWebSocket.cpp
|
||||
src/bun.js/bindings/webcore/JSWorker.cpp
|
||||
src/bun.js/bindings/webcore/JSWorkerOptions.cpp
|
||||
|
||||
@@ -8,6 +8,7 @@ src/codegen/bundle-functions.ts
|
||||
src/codegen/bundle-modules.ts
|
||||
src/codegen/class-definitions.ts
|
||||
src/codegen/client-js.ts
|
||||
src/codegen/cppbind.ts
|
||||
src/codegen/create-hash-table.ts
|
||||
src/codegen/generate-classes.ts
|
||||
src/codegen/generate-compact-string-table.ts
|
||||
@@ -17,3 +18,4 @@ src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/shared-types.ts
|
||||
|
||||
@@ -29,6 +29,7 @@ src/js/builtins/TransformStream.ts
|
||||
src/js/builtins/TransformStreamDefaultController.ts
|
||||
src/js/builtins/TransformStreamInternals.ts
|
||||
src/js/builtins/UtilInspect.ts
|
||||
src/js/builtins/WasmStreaming.ts
|
||||
src/js/builtins/WritableStreamDefaultController.ts
|
||||
src/js/builtins/WritableStreamDefaultWriter.ts
|
||||
src/js/builtins/WritableStreamInternals.ts
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
src/allocators.zig
|
||||
src/allocators/AllocationScope.zig
|
||||
src/allocators/linux_memfd_allocator.zig
|
||||
src/allocators/max_heap_allocator.zig
|
||||
src/allocators/memory_allocator.zig
|
||||
src/allocators/basic.zig
|
||||
src/allocators/LinuxMemFdAllocator.zig
|
||||
src/allocators/MaxHeapAllocator.zig
|
||||
src/allocators/MemoryReportingAllocator.zig
|
||||
src/allocators/mimalloc_arena.zig
|
||||
src/allocators/mimalloc.zig
|
||||
src/allocators/MimallocArena.zig
|
||||
src/allocators/NullableAllocator.zig
|
||||
src/analytics/analytics_schema.zig
|
||||
src/analytics/analytics_thread.zig
|
||||
src/analytics.zig
|
||||
src/analytics/schema.zig
|
||||
src/api/schema.zig
|
||||
src/asan.zig
|
||||
src/ast.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
src/ast/B.zig
|
||||
@@ -17,36 +19,70 @@ src/ast/base.zig
|
||||
src/ast/Binding.zig
|
||||
src/ast/BundledAst.zig
|
||||
src/ast/CharFreq.zig
|
||||
src/ast/ConvertESMExportsForHmr.zig
|
||||
src/ast/E.zig
|
||||
src/ast/Expr.zig
|
||||
src/ast/foldStringAddition.zig
|
||||
src/ast/G.zig
|
||||
src/ast/ImportScanner.zig
|
||||
src/ast/KnownGlobal.zig
|
||||
src/ast/Macro.zig
|
||||
src/ast/maybe.zig
|
||||
src/ast/NewStore.zig
|
||||
src/ast/Op.zig
|
||||
src/ast/P.zig
|
||||
src/ast/parse.zig
|
||||
src/ast/parseFn.zig
|
||||
src/ast/parseImportExport.zig
|
||||
src/ast/parseJSXElement.zig
|
||||
src/ast/parsePrefix.zig
|
||||
src/ast/parseProperty.zig
|
||||
src/ast/Parser.zig
|
||||
src/ast/parseStmt.zig
|
||||
src/ast/parseSuffix.zig
|
||||
src/ast/parseTypescript.zig
|
||||
src/ast/S.zig
|
||||
src/ast/Scope.zig
|
||||
src/ast/ServerComponentBoundary.zig
|
||||
src/ast/SideEffects.zig
|
||||
src/ast/skipTypescript.zig
|
||||
src/ast/Stmt.zig
|
||||
src/ast/Symbol.zig
|
||||
src/ast/symbols.zig
|
||||
src/ast/TS.zig
|
||||
src/ast/TypeScript.zig
|
||||
src/ast/UseDirective.zig
|
||||
src/ast/visit.zig
|
||||
src/ast/visitBinaryExpression.zig
|
||||
src/ast/visitExpr.zig
|
||||
src/ast/visitStmt.zig
|
||||
src/async/posix_event_loop.zig
|
||||
src/async/stub_event_loop.zig
|
||||
src/async/windows_event_loop.zig
|
||||
src/baby_list.zig
|
||||
src/bake/bake.zig
|
||||
src/bake.zig
|
||||
src/bake/DevServer.zig
|
||||
src/bake/DevServer/Assets.zig
|
||||
src/bake/DevServer/DirectoryWatchStore.zig
|
||||
src/bake/DevServer/ErrorReportRequest.zig
|
||||
src/bake/DevServer/HmrSocket.zig
|
||||
src/bake/DevServer/HotReloadEvent.zig
|
||||
src/bake/DevServer/IncrementalGraph.zig
|
||||
src/bake/DevServer/memory_cost.zig
|
||||
src/bake/DevServer/PackedMap.zig
|
||||
src/bake/DevServer/RouteBundle.zig
|
||||
src/bake/DevServer/SerializedFailure.zig
|
||||
src/bake/DevServer/SourceMapStore.zig
|
||||
src/bake/DevServer/WatcherAtomics.zig
|
||||
src/bake/FrameworkRouter.zig
|
||||
src/bake/production.zig
|
||||
src/base64/base64.zig
|
||||
src/bit_set.zig
|
||||
src/bits.zig
|
||||
src/boringssl.zig
|
||||
src/brotli.zig
|
||||
src/btjs.zig
|
||||
src/bun_js.zig
|
||||
src/bun.js.zig
|
||||
src/bun.js/api.zig
|
||||
src/bun.js/api/bun/dns_resolver.zig
|
||||
src/bun.js/api/bun/dns.zig
|
||||
src/bun.js/api/bun/h2_frame_parser.zig
|
||||
src/bun.js/api/bun/lshpack.zig
|
||||
src/bun.js/api/bun/process.zig
|
||||
@@ -96,6 +132,7 @@ src/bun.js/api/Timer/EventLoopTimer.zig
|
||||
src/bun.js/api/Timer/ImmediateObject.zig
|
||||
src/bun.js/api/Timer/TimeoutObject.zig
|
||||
src/bun.js/api/Timer/TimerObjectInternals.zig
|
||||
src/bun.js/api/Timer/WTFTimer.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
src/bun.js/api/UnsafeObject.zig
|
||||
src/bun.js/bindgen_test.zig
|
||||
@@ -237,12 +274,13 @@ src/bun.js/RuntimeTranspilerCache.zig
|
||||
src/bun.js/SavedSourceMap.zig
|
||||
src/bun.js/Strong.zig
|
||||
src/bun.js/test/diff_format.zig
|
||||
src/bun.js/test/diff/diff_match_patch.zig
|
||||
src/bun.js/test/diff/printDiff.zig
|
||||
src/bun.js/test/expect.zig
|
||||
src/bun.js/test/jest.zig
|
||||
src/bun.js/test/pretty_format.zig
|
||||
src/bun.js/test/snapshot.zig
|
||||
src/bun.js/test/test.zig
|
||||
src/bun.js/unbounded_queue.zig
|
||||
src/bun.js/uuid.zig
|
||||
src/bun.js/virtual_machine_exports.zig
|
||||
src/bun.js/VirtualMachine.zig
|
||||
@@ -281,7 +319,6 @@ src/bun.js/webcore/streams.zig
|
||||
src/bun.js/webcore/TextDecoder.zig
|
||||
src/bun.js/webcore/TextEncoder.zig
|
||||
src/bun.js/webcore/TextEncoderStreamEncoder.zig
|
||||
src/bun.js/WTFTimer.zig
|
||||
src/bun.zig
|
||||
src/bundler/AstBuilder.zig
|
||||
src/bundler/bundle_v2.zig
|
||||
@@ -305,12 +342,14 @@ src/bundler/linker_context/generateCodeForLazyExport.zig
|
||||
src/bundler/linker_context/generateCompileResultForCssChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForJSChunk.zig
|
||||
src/bundler/linker_context/OutputFileListBuilder.zig
|
||||
src/bundler/linker_context/postProcessCSSChunk.zig
|
||||
src/bundler/linker_context/postProcessHTMLChunk.zig
|
||||
src/bundler/linker_context/postProcessJSChunk.zig
|
||||
src/bundler/linker_context/prepareCssAstsForChunk.zig
|
||||
src/bundler/linker_context/renameSymbolsInChunk.zig
|
||||
src/bundler/linker_context/scanImportsAndExports.zig
|
||||
src/bundler/linker_context/StaticRouteVisitor.zig
|
||||
src/bundler/linker_context/writeOutputFilesToDisk.zig
|
||||
src/bundler/LinkerContext.zig
|
||||
src/bundler/LinkerGraph.zig
|
||||
@@ -356,9 +395,15 @@ src/cli/test_command.zig
|
||||
src/cli/test/Scanner.zig
|
||||
src/cli/unlink_command.zig
|
||||
src/cli/update_command.zig
|
||||
src/cli/update_interactive_command.zig
|
||||
src/cli/upgrade_command.zig
|
||||
src/cli/why_command.zig
|
||||
src/codegen/process_windows_translate_c.zig
|
||||
src/collections.zig
|
||||
src/collections/baby_list.zig
|
||||
src/collections/bit_set.zig
|
||||
src/collections/hive_array.zig
|
||||
src/collections/multi_array_list.zig
|
||||
src/compile_target.zig
|
||||
src/comptime_string_map.zig
|
||||
src/copy_file.zig
|
||||
@@ -468,7 +513,6 @@ src/defines.zig
|
||||
src/deps/boringssl.translated.zig
|
||||
src/deps/brotli_c.zig
|
||||
src/deps/c_ares.zig
|
||||
src/deps/diffz/DiffMatchPatch.zig
|
||||
src/deps/libdeflate.zig
|
||||
src/deps/libuv.zig
|
||||
src/deps/lol-html.zig
|
||||
@@ -507,7 +551,6 @@ src/env.zig
|
||||
src/errno/darwin_errno.zig
|
||||
src/errno/linux_errno.zig
|
||||
src/errno/windows_errno.zig
|
||||
src/exact_size_matcher.zig
|
||||
src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
@@ -519,10 +562,8 @@ src/glob.zig
|
||||
src/glob/GlobWalker.zig
|
||||
src/glob/match.zig
|
||||
src/Global.zig
|
||||
src/grapheme.zig
|
||||
src/heap_breakdown.zig
|
||||
src/highway.zig
|
||||
src/hive_array.zig
|
||||
src/hmac.zig
|
||||
src/HTMLScanner.zig
|
||||
src/http.zig
|
||||
@@ -530,6 +571,7 @@ src/http/AsyncHTTP.zig
|
||||
src/http/CertificateInfo.zig
|
||||
src/http/Decompressor.zig
|
||||
src/http/Encoding.zig
|
||||
src/http/ETag.zig
|
||||
src/http/FetchRedirect.zig
|
||||
src/http/HeaderBuilder.zig
|
||||
src/http/Headers.zig
|
||||
@@ -566,6 +608,7 @@ src/install/install_binding.zig
|
||||
src/install/install.zig
|
||||
src/install/integrity.zig
|
||||
src/install/isolated_install.zig
|
||||
src/install/isolated_install/FileCopier.zig
|
||||
src/install/isolated_install/Hardlinker.zig
|
||||
src/install/isolated_install/Installer.zig
|
||||
src/install/isolated_install/Store.zig
|
||||
@@ -616,6 +659,11 @@ src/install/resolvers/folder_resolver.zig
|
||||
src/install/versioned_url.zig
|
||||
src/install/windows-shim/BinLinkingShim.zig
|
||||
src/install/windows-shim/bun_shim_impl.zig
|
||||
src/install/yarn.zig
|
||||
src/interchange.zig
|
||||
src/interchange/json.zig
|
||||
src/interchange/toml.zig
|
||||
src/interchange/toml/lexer.zig
|
||||
src/io/heap.zig
|
||||
src/io/io.zig
|
||||
src/io/MaxBuf.zig
|
||||
@@ -624,14 +672,12 @@ src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/js_ast.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
src/js_lexer/identifier.zig
|
||||
src/js_parser.zig
|
||||
src/js_printer.zig
|
||||
src/jsc_stub.zig
|
||||
src/json_parser.zig
|
||||
src/libarchive/libarchive-bindings.zig
|
||||
src/libarchive/libarchive.zig
|
||||
src/linear_fifo.zig
|
||||
@@ -643,7 +689,6 @@ src/main_test.zig
|
||||
src/main_wasm.zig
|
||||
src/main.zig
|
||||
src/meta.zig
|
||||
src/multi_array_list.zig
|
||||
src/napi/napi.zig
|
||||
src/node_fallbacks.zig
|
||||
src/open.zig
|
||||
@@ -685,6 +730,11 @@ src/s3/multipart_options.zig
|
||||
src/s3/multipart.zig
|
||||
src/s3/simple_request.zig
|
||||
src/s3/storage_class.zig
|
||||
src/safety.zig
|
||||
src/safety/alloc.zig
|
||||
src/safety/CriticalSection.zig
|
||||
src/safety/thread_id.zig
|
||||
src/safety/ThreadLock.zig
|
||||
src/semver.zig
|
||||
src/semver/ExternalString.zig
|
||||
src/semver/SemverObject.zig
|
||||
@@ -818,34 +868,40 @@ src/sql/postgres/types/PostgresString.zig
|
||||
src/sql/postgres/types/Tag.zig
|
||||
src/StandaloneModuleGraph.zig
|
||||
src/StaticHashMap.zig
|
||||
src/string_immutable.zig
|
||||
src/string_types.zig
|
||||
src/string.zig
|
||||
src/string/escapeHTML.zig
|
||||
src/string/HashedString.zig
|
||||
src/string/immutable.zig
|
||||
src/string/immutable/escapeHTML.zig
|
||||
src/string/immutable/exact_size_matcher.zig
|
||||
src/string/immutable/grapheme.zig
|
||||
src/string/immutable/paths.zig
|
||||
src/string/immutable/unicode.zig
|
||||
src/string/immutable/visible.zig
|
||||
src/string/MutableString.zig
|
||||
src/string/paths.zig
|
||||
src/string/PathString.zig
|
||||
src/string/SmolStr.zig
|
||||
src/string/StringBuilder.zig
|
||||
src/string/StringJoiner.zig
|
||||
src/string/unicode.zig
|
||||
src/string/visible.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sync.zig
|
||||
src/sys_uv.zig
|
||||
src/sys.zig
|
||||
src/sys/coreutils_error_map.zig
|
||||
src/sys/Error.zig
|
||||
src/sys/File.zig
|
||||
src/sys/libuv_error_map.zig
|
||||
src/system_timer.zig
|
||||
src/test/fixtures.zig
|
||||
src/test/recover.zig
|
||||
src/thread_pool.zig
|
||||
src/threading.zig
|
||||
src/threading/channel.zig
|
||||
src/threading/Condition.zig
|
||||
src/threading/Futex.zig
|
||||
src/threading/guarded_value.zig
|
||||
src/threading/Mutex.zig
|
||||
src/threading/ThreadPool.zig
|
||||
src/threading/unbounded_queue.zig
|
||||
src/threading/WaitGroup.zig
|
||||
src/tmp.zig
|
||||
src/toml/toml_lexer.zig
|
||||
src/toml/toml_parser.zig
|
||||
src/tracy.zig
|
||||
src/trait.zig
|
||||
src/transpiler.zig
|
||||
|
||||
@@ -255,6 +255,10 @@ set(BUN_ZIG_GENERATED_CLASSES_SCRIPT ${CWD}/src/codegen/generate-classes.ts)
|
||||
|
||||
absolute_sources(BUN_ZIG_GENERATED_CLASSES_SOURCES ${CWD}/cmake/sources/ZigGeneratedClassesSources.txt)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
set(BUN_ZIG_GENERATED_CLASSES_OUTPUTS
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.h
|
||||
${CODEGEN_PATH}/ZigGeneratedClasses.cpp
|
||||
@@ -308,6 +312,27 @@ set(BUN_JAVASCRIPT_OUTPUTS
|
||||
${CWD}/src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
)
|
||||
|
||||
set(BUN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/cpp.zig
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-cppbind
|
||||
COMMENT
|
||||
"Generating C++ --> Zig bindings"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${CWD}/src/codegen/cppbind.ts
|
||||
${CWD}/src
|
||||
${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_CXX_SOURCES}
|
||||
OUTPUTS
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
@@ -537,6 +562,7 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ERROR_CODE_OUTPUTS}
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
# In debug builds, these are not embedded, but rather referenced at runtime.
|
||||
@@ -592,7 +618,7 @@ register_command(
|
||||
-Doptimize=${ZIG_OPTIMIZE}
|
||||
-Dcpu=${ZIG_CPU}
|
||||
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ASAN}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Dversion=${VERSION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
@@ -606,6 +632,7 @@ register_command(
|
||||
TARGETS
|
||||
clone-zig
|
||||
clone-zstd
|
||||
bun-cppbind
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
@@ -618,10 +645,6 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
|
||||
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
absolute_sources(BUN_CXX_SOURCES ${CWD}/cmake/sources/CxxSources.txt)
|
||||
absolute_sources(BUN_C_SOURCES ${CWD}/cmake/sources/CSources.txt)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
|
||||
@@ -685,7 +708,7 @@ if(WIN32)
|
||||
${CODEGEN_PATH}/windows-app-info.rc
|
||||
@ONLY
|
||||
)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.rc ${CWD}/src/bun.exe.manifest)
|
||||
endif()
|
||||
|
||||
# --- Executable ---
|
||||
@@ -958,6 +981,16 @@ if(APPLE)
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
|
||||
if(DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
# Suppress ALL linker warnings on macOS.
|
||||
# The intent is to only suppress linker alignment warnings.
|
||||
# As of July 21st, 2025 there doesn't seem to be a more specific suppression just for linker alignment warnings.
|
||||
# If you find one, please update this to only be for linker alignment.
|
||||
-Wl,-w
|
||||
)
|
||||
endif()
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
@@ -1001,7 +1034,6 @@ if(LINUX)
|
||||
--ld-path=${LLD_PROGRAM}
|
||||
-fno-pic
|
||||
-Wl,-no-pie
|
||||
-Wl,-icf=safe
|
||||
-Wl,--as-needed
|
||||
-Wl,-z,stack-size=12800000
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
@@ -1027,6 +1059,13 @@ if(LINUX)
|
||||
-Wl,--gc-sections
|
||||
)
|
||||
endif()
|
||||
|
||||
if (NOT DEBUG AND NOT ENABLE_ASAN)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,-icf=safe
|
||||
)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
# --- Symbols list ---
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
HdrHistogram/HdrHistogram_c
|
||||
COMMIT
|
||||
652d51bcc36744fd1a6debfeb1a8a5f58b14022c
|
||||
8dcce8f68512fca460b171bccc3a5afce0048779
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
libarchive/libarchive
|
||||
COMMIT
|
||||
898dc8319355b7e985f68a9819f182aaed61b53a
|
||||
9525f90ca4bd14c7b335e2f8c84a4607b0af6bdf
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
@@ -20,11 +20,14 @@ register_cmake_command(
|
||||
-DENABLE_WERROR=OFF
|
||||
-DENABLE_BZip2=OFF
|
||||
-DENABLE_CAT=OFF
|
||||
-DENABLE_CPIO=OFF
|
||||
-DENABLE_UNZIP=OFF
|
||||
-DENABLE_EXPAT=OFF
|
||||
-DENABLE_ICONV=OFF
|
||||
-DENABLE_LIBB2=OFF
|
||||
-DENABLE_LibGCC=OFF
|
||||
-DENABLE_LIBXML2=OFF
|
||||
-DENABLE_WIN32_XMLLITE=OFF
|
||||
-DENABLE_LZ4=OFF
|
||||
-DENABLE_LZMA=OFF
|
||||
-DENABLE_LZO=OFF
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
67f1d4ffd6b74db7e053fb129dcce620193c180d
|
||||
d64457d9ff0143deef025d5df7e8586092b9afb7
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
|
||||
@@ -13,14 +13,52 @@ set(MIMALLOC_CMAKE_ARGS
|
||||
-DMI_BUILD_SHARED=OFF
|
||||
-DMI_BUILD_TESTS=OFF
|
||||
-DMI_USE_CXX=ON
|
||||
-DMI_OVERRIDE=OFF
|
||||
-DMI_OSX_ZONE=OFF
|
||||
-DMI_OSX_INTERPOSE=OFF
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=ON
|
||||
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
#
|
||||
# Peak memory usage: 52 MB
|
||||
#
|
||||
# ❯ mimalloc_allow_large_os_pages=1 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
#
|
||||
# Peak memory usage: 74 MB
|
||||
# ```
|
||||
#
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=1 mem bun --eval 1
|
||||
#
|
||||
# Peak memory usage: 52 MB
|
||||
#
|
||||
# ❯ mimalloc_allow_large_os_pages=0 mem bun --eval 1
|
||||
#
|
||||
# Peak memory usage: 30 MB
|
||||
# ```
|
||||
-DMI_NO_THP=1
|
||||
)
|
||||
|
||||
if (ABI STREQUAL "musl")
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_LIBC_MUSL=ON)
|
||||
endif()
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_TRACK_ASAN=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
|
||||
elseif(APPLE OR LINUX)
|
||||
if(APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
else()
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(DEBUG)
|
||||
@@ -31,6 +69,12 @@ if(ENABLE_VALGRIND)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
|
||||
endif()
|
||||
|
||||
# Enable SIMD optimizations when not building for baseline (older CPUs)
|
||||
if(NOT ENABLE_BASELINE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_ARCH=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static-debug)
|
||||
@@ -53,6 +97,7 @@ if(APPLE OR (LINUX AND NOT DEBUG))
|
||||
set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o)
|
||||
endif()
|
||||
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
mimalloc
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 1098cc50652ab1eab171f58f7669e19ca6c276ae)
|
||||
set(WEBKIT_VERSION 684d4551ce5f62683476409d7402424e0f6eafb5)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "0a0120fa92cd7f6ab244865688b351df634f0707")
|
||||
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
4026
completions/bun-cli.json
Normal file
4026
completions/bun-cli.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -164,6 +164,70 @@ Static responses do not allocate additional memory after initialization. You can
|
||||
|
||||
Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`.
|
||||
|
||||
### File Responses vs Static Responses
|
||||
|
||||
When serving files in routes, there are two distinct behaviors depending on whether you buffer the file content or serve it directly:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// Static route - content is buffered in memory at startup
|
||||
"/logo.png": new Response(await Bun.file("./logo.png").bytes()),
|
||||
|
||||
// File route - content is read from filesystem on each request
|
||||
"/download.zip": new Response(Bun.file("./download.zip")),
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
**Static routes** (`new Response(await file.bytes())`) buffer content in memory at startup:
|
||||
|
||||
- **Zero filesystem I/O** during requests - content served entirely from memory
|
||||
- **ETag support** - Automatically generates and validates ETags for caching
|
||||
- **If-None-Match** - Returns `304 Not Modified` when client ETag matches
|
||||
- **No 404 handling** - Missing files cause startup errors, not runtime 404s
|
||||
- **Memory usage** - Full file content stored in RAM
|
||||
- **Best for**: Small static assets, API responses, frequently accessed files
|
||||
|
||||
**File routes** (`new Response(Bun.file(path))`) read from filesystem per request:
|
||||
|
||||
- **Filesystem reads** on each request - checks file existence and reads content
|
||||
- **Built-in 404 handling** - Returns `404 Not Found` if file doesn't exist or becomes inaccessible
|
||||
- **Last-Modified support** - Uses file modification time for `If-Modified-Since` headers
|
||||
- **If-Modified-Since** - Returns `304 Not Modified` when file hasn't changed since client's cached version
|
||||
- **Range request support** - Automatically handles partial content requests with `Content-Range` headers
|
||||
- **Streaming transfers** - Uses buffered reader with backpressure handling for efficient memory usage
|
||||
- **Memory efficient** - Only buffers small chunks during transfer, not entire file
|
||||
- **Best for**: Large files, dynamic content, user uploads, files that change frequently
|
||||
|
||||
### HTTP Caching Behavior
|
||||
|
||||
Both route types implement HTTP caching standards but with different strategies:
|
||||
|
||||
#### Static Routes Caching
|
||||
|
||||
- **ETag generation**: Automatically computes ETag hash from content at startup
|
||||
- **If-None-Match**: Validates client ETag against server ETag
|
||||
- **304 responses**: Returns `304 Not Modified` with empty body when ETags match
|
||||
- **Cache headers**: Inherits any `Cache-Control` headers you provide in the Response
|
||||
- **Consistency**: ETag remains constant until server restart or route reload
|
||||
|
||||
#### File Routes Caching
|
||||
|
||||
- **Last-Modified**: Uses file's `mtime` for `Last-Modified` header
|
||||
- **If-Modified-Since**: Compares client date with file modification time
|
||||
- **304 responses**: Returns `304 Not Modified` when file unchanged since client's cached version
|
||||
- **Content-Length**: Automatically set based on current file size
|
||||
- **Dynamic validation**: Checks file modification time on each request
|
||||
|
||||
#### Status Code Handling
|
||||
|
||||
Both route types automatically adjust status codes:
|
||||
|
||||
- **200 → 204**: Empty files (0 bytes) return `204 No Content` instead of `200 OK`
|
||||
- **200 → 304**: Successful cache validation returns `304 Not Modified`
|
||||
- **File routes only**: Missing or inaccessible files return `404 Not Found`
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
|
||||
@@ -208,8 +208,8 @@ export class ArrayBufferSink {
|
||||
*
|
||||
* This API might change later to separate Uint8ArraySink and ArrayBufferSink
|
||||
*/
|
||||
flush(): number | Uint8Array | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array;
|
||||
flush(): number | Uint8Array<ArrayBuffer> | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array<ArrayBuffer>;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -88,6 +88,20 @@ The order of the `--target` flag does not matter, as long as they're delimited b
|
||||
|
||||
On x64 platforms, Bun uses SIMD optimizations which require a modern CPU supporting AVX2 instructions. The `-baseline` build of Bun is for older CPUs that don't support these optimizations. Normally, when you install Bun we automatically detect which version to use but this can be harder to do when cross-compiling since you might not know the target CPU. You usually don't need to worry about it on Darwin x64, but it is relevant for Windows x64 and Linux x64. If you or your users see `"Illegal instruction"` errors, you might need to use the baseline version.
|
||||
|
||||
## Build-time constants
|
||||
|
||||
Use the `--define` flag to inject build-time constants into your executable, such as version numbers, build timestamps, or configuration values:
|
||||
|
||||
```bash
|
||||
$ bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
These constants are embedded directly into your compiled binary at build time, providing zero runtime overhead and enabling dead code elimination optimizations.
|
||||
|
||||
{% callout type="info" %}
|
||||
For comprehensive examples and advanced patterns, see the [Build-time constants guide](/guides/runtime/build-time-constants).
|
||||
{% /callout %}
|
||||
|
||||
## Deploying to production
|
||||
|
||||
Compiled executables reduce memory usage and improve Bun's start time.
|
||||
|
||||
@@ -183,6 +183,30 @@ Bun supports installing dependencies from Git, GitHub, and local or remotely-hos
|
||||
}
|
||||
```
|
||||
|
||||
## Installation strategies
|
||||
|
||||
Bun supports two package installation strategies that determine how dependencies are organized in `node_modules`:
|
||||
|
||||
### Hoisted installs (default for single projects)
|
||||
|
||||
The traditional npm/Yarn approach that flattens dependencies into a shared `node_modules` directory:
|
||||
|
||||
```bash
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
### Isolated installs
|
||||
|
||||
A pnpm-like approach that creates strict dependency isolation to prevent phantom dependencies:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
Isolated installs create a central package store in `node_modules/.bun/` with symlinks in the top-level `node_modules`. This ensures packages can only access their declared dependencies.
|
||||
|
||||
For complete documentation on isolated installs, refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
@@ -213,11 +237,15 @@ dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
Looking to speed up your CI? Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline.
|
||||
Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline:
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
@@ -236,4 +264,31 @@ jobs:
|
||||
run: bun run build
|
||||
```
|
||||
|
||||
For CI/CD environments that want to enforce reproducible builds, use `bun ci` to fail the build if the package.json is out of sync with the lockfile:
|
||||
|
||||
```bash
|
||||
$ bun ci
|
||||
```
|
||||
|
||||
This is equivalent to `bun install --frozen-lockfile`. It installs exact versions from `bun.lock` and fails if `package.json` doesn't match the lockfile. To use `bun ci` or `bun install --frozen-lockfile`, you must commit `bun.lock` to version control.
|
||||
|
||||
And instead of running `bun install`, run `bun ci`.
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
jobs:
|
||||
build:
|
||||
name: build-app
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
- name: Install dependencies
|
||||
run: bun ci
|
||||
- name: Build app
|
||||
run: bun run build
|
||||
```
|
||||
|
||||
{% bunCLIUsage command="install" /%}
|
||||
|
||||
@@ -158,7 +158,7 @@ See [Test > Lifecycle](https://bun.com/docs/test/lifecycle) for complete documen
|
||||
|
||||
## Mocks
|
||||
|
||||
Create mock functions with the `mock` function. Mocks are automatically reset between tests.
|
||||
Create mock functions with the `mock` function.
|
||||
|
||||
```ts
|
||||
import { test, expect, mock } from "bun:test";
|
||||
@@ -248,4 +248,33 @@ $ bun test foo
|
||||
|
||||
Any test file in the directory with an _absolute path_ that contains one of the targets will run. Glob patterns are not yet supported. -->
|
||||
|
||||
## AI Agent Integration
|
||||
|
||||
When using Bun's test runner with AI coding assistants, you can enable quieter output to improve readability and reduce context noise. This feature minimizes test output verbosity while preserving essential failure information.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Set any of the following environment variables to enable AI-friendly output:
|
||||
|
||||
- `CLAUDECODE=1` - For Claude Code
|
||||
- `REPL_ID=1` - For Replit
|
||||
- `AGENT=1` - Generic AI agent flag
|
||||
|
||||
### Behavior
|
||||
|
||||
When an AI agent environment is detected:
|
||||
|
||||
- Only test failures are displayed in detail
|
||||
- Passing, skipped, and todo test indicators are hidden
|
||||
- Summary statistics remain intact
|
||||
|
||||
```bash
|
||||
# Example: Enable quiet output for Claude Code
|
||||
$ CLAUDECODE=1 bun test
|
||||
|
||||
# Still shows failures and summary, but hides verbose passing test output
|
||||
```
|
||||
|
||||
This feature is particularly useful in AI-assisted development workflows where reduced output verbosity improves context efficiency while maintaining visibility into test failures.
|
||||
|
||||
{% bunCLIUsage command="test" /%}
|
||||
|
||||
@@ -10,6 +10,86 @@ To update a specific dependency to the latest version:
|
||||
$ bun update [package]
|
||||
```
|
||||
|
||||
## `--interactive`
|
||||
|
||||
For a more controlled update experience, use the `--interactive` flag to select which packages to update:
|
||||
|
||||
```sh
|
||||
$ bun update --interactive
|
||||
$ bun update -i
|
||||
```
|
||||
|
||||
This launches an interactive terminal interface that shows all outdated packages with their current and target versions. You can then select which packages to update.
|
||||
|
||||
### Interactive Interface
|
||||
|
||||
The interface displays packages grouped by dependency type:
|
||||
|
||||
```
|
||||
? Select packages to update - Space to toggle, Enter to confirm, a to select all, n to select none, i to invert, l to toggle latest
|
||||
|
||||
dependencies Current Target Latest
|
||||
□ react 17.0.2 18.2.0 18.3.1
|
||||
□ lodash 4.17.20 4.17.21 4.17.21
|
||||
|
||||
devDependencies Current Target Latest
|
||||
□ typescript 4.8.0 5.0.0 5.3.3
|
||||
□ @types/node 16.11.7 18.0.0 20.11.5
|
||||
|
||||
optionalDependencies Current Target Latest
|
||||
□ some-optional-package 1.0.0 1.1.0 1.2.0
|
||||
```
|
||||
|
||||
**Sections:**
|
||||
|
||||
- Packages are grouped under section headers: `dependencies`, `devDependencies`, `peerDependencies`, `optionalDependencies`
|
||||
- Each section shows column headers aligned with the package data
|
||||
|
||||
**Columns:**
|
||||
|
||||
- **Package**: Package name (may have suffix like ` dev`, ` peer`, ` optional` for clarity)
|
||||
- **Current**: Currently installed version
|
||||
- **Target**: Version that would be installed (respects semver constraints)
|
||||
- **Latest**: Latest available version
|
||||
|
||||
### Keyboard Controls
|
||||
|
||||
**Selection:**
|
||||
|
||||
- **Space**: Toggle package selection
|
||||
- **Enter**: Confirm selections and update
|
||||
- **a/A**: Select all packages
|
||||
- **n/N**: Select none
|
||||
- **i/I**: Invert selection
|
||||
|
||||
**Navigation:**
|
||||
|
||||
- **↑/↓ Arrow keys** or **j/k**: Move cursor
|
||||
- **l/L**: Toggle between target and latest version for current package
|
||||
|
||||
**Exit:**
|
||||
|
||||
- **Ctrl+C** or **Ctrl+D**: Cancel without updating
|
||||
|
||||
### Visual Indicators
|
||||
|
||||
- **☑** Selected packages (will be updated)
|
||||
- **□** Unselected packages
|
||||
- **>** Current cursor position
|
||||
- **Colors**: Red (major), yellow (minor), green (patch) version changes
|
||||
- **Underlined**: Currently selected update target
|
||||
|
||||
### Package Grouping
|
||||
|
||||
Packages are organized in sections by dependency type:
|
||||
|
||||
- **dependencies** - Regular runtime dependencies
|
||||
- **devDependencies** - Development dependencies
|
||||
- **peerDependencies** - Peer dependencies
|
||||
- **optionalDependencies** - Optional dependencies
|
||||
|
||||
Within each section, individual packages may have additional suffixes (` dev`, ` peer`, ` optional`) for extra clarity.
|
||||
|
||||
## `--latest`
|
||||
|
||||
By default, `bun update` will update to the latest version of a dependency that satisfies the version range specified in your `package.json`.
|
||||
@@ -20,6 +100,8 @@ To update to the latest version, regardless of if it's compatible with the curre
|
||||
$ bun update --latest
|
||||
```
|
||||
|
||||
In interactive mode, you can toggle individual packages between their target version (respecting semver) and latest version using the **l** key.
|
||||
|
||||
For example, with the following `package.json`:
|
||||
|
||||
```json
|
||||
|
||||
293
docs/guides/runtime/build-time-constants.md
Normal file
293
docs/guides/runtime/build-time-constants.md
Normal file
@@ -0,0 +1,293 @@
|
||||
---
|
||||
name: Build-time constants with --define
|
||||
---
|
||||
|
||||
The `--define` flag can be used with `bun build` and `bun build --compile` to inject build-time constants into your application. This is especially useful for embedding metadata like build versions, timestamps, or configuration flags directly into your compiled executables.
|
||||
|
||||
```sh
|
||||
$ bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/index.ts --outfile myapp
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Why use build-time constants?
|
||||
|
||||
Build-time constants are embedded directly into your compiled code, making them:
|
||||
|
||||
- **Zero runtime overhead** - No environment variable lookups or file reads
|
||||
- **Immutable** - Values are baked into the binary at compile time
|
||||
- **Optimizable** - Dead code elimination can remove unused branches
|
||||
- **Secure** - No external dependencies or configuration files to manage
|
||||
|
||||
This is similar to `gcc -D` or `#define` in C/C++, but for JavaScript/TypeScript.
|
||||
|
||||
---
|
||||
|
||||
## Basic usage
|
||||
|
||||
### With `bun build`
|
||||
|
||||
```sh
|
||||
# Bundle with build-time constants
|
||||
$ bun build --define BUILD_VERSION='"1.0.0"' --define NODE_ENV='"production"' src/index.ts --outdir ./dist
|
||||
```
|
||||
|
||||
### With `bun build --compile`
|
||||
|
||||
```sh
|
||||
# Compile to executable with build-time constants
|
||||
$ bun build --compile --define BUILD_VERSION='"1.0.0"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
### JavaScript API
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
BUILD_VERSION: '"1.0.0"',
|
||||
BUILD_TIME: '"2024-01-15T10:30:00Z"',
|
||||
DEBUG: "false",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common use cases
|
||||
|
||||
### Version information
|
||||
|
||||
Embed version and build metadata directly into your executable:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#src/version.ts
|
||||
// These constants are replaced at build time
|
||||
declare const BUILD_VERSION: string;
|
||||
declare const BUILD_TIME: string;
|
||||
declare const GIT_COMMIT: string;
|
||||
|
||||
export function getVersion() {
|
||||
return {
|
||||
version: BUILD_VERSION,
|
||||
buildTime: BUILD_TIME,
|
||||
commit: GIT_COMMIT,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
```sh#Build command
|
||||
$ bun build --compile \
|
||||
--define BUILD_VERSION='"1.2.3"' \
|
||||
--define BUILD_TIME='"2024-01-15T10:30:00Z"' \
|
||||
--define GIT_COMMIT='"abc123"' \
|
||||
src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Feature flags
|
||||
|
||||
Use build-time constants to enable/disable features:
|
||||
|
||||
```ts
|
||||
// Replaced at build time
|
||||
declare const ENABLE_ANALYTICS: boolean;
|
||||
declare const ENABLE_DEBUG: boolean;
|
||||
|
||||
function trackEvent(event: string) {
|
||||
if (ENABLE_ANALYTICS) {
|
||||
// This entire block is removed if ENABLE_ANALYTICS is false
|
||||
console.log("Tracking:", event);
|
||||
}
|
||||
}
|
||||
|
||||
if (ENABLE_DEBUG) {
|
||||
console.log("Debug mode enabled");
|
||||
}
|
||||
```
|
||||
|
||||
```sh
|
||||
# Production build - analytics enabled, debug disabled
|
||||
$ bun build --compile --define ENABLE_ANALYTICS=true --define ENABLE_DEBUG=false src/app.ts --outfile app-prod
|
||||
|
||||
# Development build - both enabled
|
||||
$ bun build --compile --define ENABLE_ANALYTICS=false --define ENABLE_DEBUG=true src/app.ts --outfile app-dev
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Replace configuration objects at build time:
|
||||
|
||||
```ts
|
||||
declare const CONFIG: {
|
||||
apiUrl: string;
|
||||
timeout: number;
|
||||
retries: number;
|
||||
};
|
||||
|
||||
// CONFIG is replaced with the actual object at build time
|
||||
const response = await fetch(CONFIG.apiUrl, {
|
||||
timeout: CONFIG.timeout,
|
||||
});
|
||||
```
|
||||
|
||||
```sh
|
||||
$ bun build --compile --define 'CONFIG={"apiUrl":"https://api.example.com","timeout":5000,"retries":3}' src/app.ts --outfile app
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Advanced patterns
|
||||
|
||||
### Environment-specific builds
|
||||
|
||||
Create different executables for different environments:
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"build:dev": "bun build --compile --define NODE_ENV='\"development\"' --define API_URL='\"http://localhost:3000\"' src/app.ts --outfile app-dev",
|
||||
"build:staging": "bun build --compile --define NODE_ENV='\"staging\"' --define API_URL='\"https://staging.example.com\"' src/app.ts --outfile app-staging",
|
||||
"build:prod": "bun build --compile --define NODE_ENV='\"production\"' --define API_URL='\"https://api.example.com\"' src/app.ts --outfile app-prod"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Using shell commands for dynamic values
|
||||
|
||||
Generate build-time constants from shell commands:
|
||||
|
||||
```sh
|
||||
# Use git to get current commit and timestamp
|
||||
$ bun build --compile \
|
||||
--define BUILD_VERSION="\"$(git describe --tags --always)\"" \
|
||||
--define BUILD_TIME="\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"" \
|
||||
--define GIT_COMMIT="\"$(git rev-parse HEAD)\"" \
|
||||
src/cli.ts --outfile mycli
|
||||
```
|
||||
|
||||
### Build automation script
|
||||
|
||||
Create a build script that automatically injects build metadata:
|
||||
|
||||
```ts
|
||||
// build.ts
|
||||
import { $ } from "bun";
|
||||
|
||||
const version = await $`git describe --tags --always`.text();
|
||||
const buildTime = new Date().toISOString();
|
||||
const gitCommit = await $`git rev-parse HEAD`.text();
|
||||
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/cli.ts"],
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
BUILD_VERSION: JSON.stringify(version.trim()),
|
||||
BUILD_TIME: JSON.stringify(buildTime),
|
||||
GIT_COMMIT: JSON.stringify(gitCommit.trim()),
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Built with version ${version.trim()}`);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Important considerations
|
||||
|
||||
### Value format
|
||||
|
||||
Values must be valid JSON that will be parsed and inlined as JavaScript expressions:
|
||||
|
||||
```sh
|
||||
# ✅ Strings must be JSON-quoted
|
||||
--define VERSION='"1.0.0"'
|
||||
|
||||
# ✅ Numbers are JSON literals
|
||||
--define PORT=3000
|
||||
|
||||
# ✅ Booleans are JSON literals
|
||||
--define DEBUG=true
|
||||
|
||||
# ✅ Objects and arrays (use single quotes to wrap the JSON)
|
||||
--define 'CONFIG={"host":"localhost","port":3000}'
|
||||
|
||||
# ✅ Arrays work too
|
||||
--define 'FEATURES=["auth","billing","analytics"]'
|
||||
|
||||
# ❌ This won't work - missing quotes around string
|
||||
--define VERSION=1.0.0
|
||||
```
|
||||
|
||||
### Property keys
|
||||
|
||||
You can use property access patterns as keys, not just simple identifiers:
|
||||
|
||||
```sh
|
||||
# ✅ Replace process.env.NODE_ENV with "production"
|
||||
--define 'process.env.NODE_ENV="production"'
|
||||
|
||||
# ✅ Replace process.env.API_KEY with the actual key
|
||||
--define 'process.env.API_KEY="abc123"'
|
||||
|
||||
# ✅ Replace nested properties
|
||||
--define 'window.myApp.version="1.0.0"'
|
||||
|
||||
# ✅ Replace array access
|
||||
--define 'process.argv[2]="--production"'
|
||||
```
|
||||
|
||||
This is particularly useful for environment variables:
|
||||
|
||||
```ts
|
||||
// Before compilation
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
console.log("Production mode");
|
||||
}
|
||||
|
||||
// After compilation with --define 'process.env.NODE_ENV="production"'
|
||||
if ("production" === "production") {
|
||||
console.log("Production mode");
|
||||
}
|
||||
|
||||
// After optimization
|
||||
console.log("Production mode");
|
||||
```
|
||||
|
||||
### TypeScript declarations
|
||||
|
||||
For TypeScript projects, declare your constants to avoid type errors:
|
||||
|
||||
```ts
|
||||
// types/build-constants.d.ts
|
||||
declare const BUILD_VERSION: string;
|
||||
declare const BUILD_TIME: string;
|
||||
declare const NODE_ENV: "development" | "staging" | "production";
|
||||
declare const DEBUG: boolean;
|
||||
```
|
||||
|
||||
### Cross-platform compatibility
|
||||
|
||||
When building for multiple platforms, constants work the same way:
|
||||
|
||||
```sh
|
||||
# Linux
|
||||
$ bun build --compile --target=bun-linux-x64 --define PLATFORM='"linux"' src/app.ts --outfile app-linux
|
||||
|
||||
# macOS
|
||||
$ bun build --compile --target=bun-darwin-x64 --define PLATFORM='"darwin"' src/app.ts --outfile app-macos
|
||||
|
||||
# Windows
|
||||
$ bun build --compile --target=bun-windows-x64 --define PLATFORM='"windows"' src/app.ts --outfile app-windows.exe
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related
|
||||
|
||||
- [Define constants at runtime](/guides/runtime/define-constant) - Using `--define` with `bun run`
|
||||
- [Building executables](/bundler/executables) - Complete guide to `bun build --compile`
|
||||
- [Bundler API](/bundler) - Full bundler documentation including `define` option
|
||||
@@ -14,7 +14,7 @@ if (typeof Bun !== "undefined") {
|
||||
|
||||
---
|
||||
|
||||
In TypeScript environments, the previous approach will result in a type error unless `bun-types` is globally installed. To avoid this, you can check `process.versions` instead.
|
||||
In TypeScript environments, the previous approach will result in a type error unless `@types/bun` is installed. To avoid this, you can check `process.versions` instead.
|
||||
|
||||
```ts
|
||||
if (process.versions.bun) {
|
||||
|
||||
@@ -81,6 +81,14 @@ $ bun install --verbose # debug logging
|
||||
$ bun install --silent # no logging
|
||||
```
|
||||
|
||||
To use isolated installs instead of the default hoisted strategy:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
Isolated installs create strict dependency isolation similar to pnpm, preventing phantom dependencies and ensuring more deterministic builds. For complete documentation, see [Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
{% details summary="Configuring behavior" %}
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`:
|
||||
|
||||
@@ -110,6 +118,10 @@ dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
|
||||
# installation strategy: "hoisted" or "isolated"
|
||||
# default: "hoisted"
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
195
docs/install/isolated.md
Normal file
195
docs/install/isolated.md
Normal file
@@ -0,0 +1,195 @@
|
||||
Bun provides an alternative package installation strategy called **isolated installs** that creates strict dependency isolation similar to pnpm's approach. This mode prevents phantom dependencies and ensures reproducible, deterministic builds.
|
||||
|
||||
## What are isolated installs?
|
||||
|
||||
Isolated installs create a non-hoisted dependency structure where packages can only access their explicitly declared dependencies. This differs from the traditional "hoisted" installation strategy used by npm and Yarn, where dependencies are flattened into a shared `node_modules` directory.
|
||||
|
||||
### Key benefits
|
||||
|
||||
- **Prevents phantom dependencies** — Packages cannot accidentally import dependencies they haven't declared
|
||||
- **Deterministic resolution** — Same dependency tree regardless of what else is installed
|
||||
- **Better for monorepos** — Workspace isolation prevents cross-contamination between packages
|
||||
- **Reproducible builds** — More predictable resolution behavior across environments
|
||||
|
||||
## Using isolated installs
|
||||
|
||||
### Command line
|
||||
|
||||
Use the `--linker` flag to specify the installation strategy:
|
||||
|
||||
```bash
|
||||
# Use isolated installs
|
||||
$ bun install --linker isolated
|
||||
|
||||
# Use traditional hoisted installs
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
### Configuration file
|
||||
|
||||
Set the default linker strategy in your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install]
|
||||
linker = "isolated"
|
||||
```
|
||||
|
||||
### Default behavior
|
||||
|
||||
By default, Bun uses the **hoisted** installation strategy for all projects. To use isolated installs, you must explicitly specify the `--linker isolated` flag or set it in your configuration file.
|
||||
|
||||
## How isolated installs work
|
||||
|
||||
### Directory structure
|
||||
|
||||
Instead of hoisting dependencies, isolated installs create a two-tier structure:
|
||||
|
||||
```
|
||||
node_modules/
|
||||
├── .bun/ # Central package store
|
||||
│ ├── package@1.0.0/ # Versioned package installations
|
||||
│ │ └── node_modules/
|
||||
│ │ └── package/ # Actual package files
|
||||
│ ├── @scope+package@2.1.0/ # Scoped packages (+ replaces /)
|
||||
│ │ └── node_modules/
|
||||
│ │ └── @scope/
|
||||
│ │ └── package/
|
||||
│ └── ...
|
||||
└── package-name -> .bun/package@1.0.0/node_modules/package # Symlinks
|
||||
```
|
||||
|
||||
### Resolution algorithm
|
||||
|
||||
1. **Central store** — All packages are installed in `node_modules/.bun/package@version/` directories
|
||||
2. **Symlinks** — Top-level `node_modules` contains symlinks pointing to the central store
|
||||
3. **Peer resolution** — Complex peer dependencies create specialized directory names
|
||||
4. **Deduplication** — Packages with identical package IDs and peer dependency sets are shared
|
||||
|
||||
### Workspace handling
|
||||
|
||||
In monorepos, workspace dependencies are handled specially:
|
||||
|
||||
- **Workspace packages** — Symlinked directly to their source directories, not the store
|
||||
- **Workspace dependencies** — Can access other workspace packages in the monorepo
|
||||
- **External dependencies** — Installed in the isolated store with proper isolation
|
||||
|
||||
## Comparison with hoisted installs
|
||||
|
||||
| Aspect | Hoisted (npm/Yarn) | Isolated (pnpm-like) |
|
||||
| ------------------------- | ------------------------------------------ | --------------------------------------- |
|
||||
| **Dependency access** | Packages can access any hoisted dependency | Packages only see declared dependencies |
|
||||
| **Phantom dependencies** | ❌ Possible | ✅ Prevented |
|
||||
| **Disk usage** | ✅ Lower (shared installs) | ✅ Similar (uses symlinks) |
|
||||
| **Determinism** | ❌ Less deterministic | ✅ More deterministic |
|
||||
| **Node.js compatibility** | ✅ Standard behavior | ✅ Compatible via symlinks |
|
||||
| **Best for** | Single projects, legacy code | Monorepos, strict dependency management |
|
||||
|
||||
## Advanced features
|
||||
|
||||
### Peer dependency handling
|
||||
|
||||
Isolated installs handle peer dependencies through sophisticated resolution:
|
||||
|
||||
```bash
|
||||
# Package with peer dependencies creates specialized paths
|
||||
node_modules/.bun/package@1.0.0_react@18.2.0/
|
||||
```
|
||||
|
||||
The directory name encodes both the package version and its peer dependency versions, ensuring each unique combination gets its own installation.
|
||||
|
||||
### Backend strategies
|
||||
|
||||
Bun uses different file operation strategies for performance:
|
||||
|
||||
- **Clonefile** (macOS) — Copy-on-write filesystem clones for maximum efficiency
|
||||
- **Hardlink** (Linux/Windows) — Hardlinks to save disk space
|
||||
- **Copyfile** (fallback) — Full file copies when other methods aren't available
|
||||
|
||||
### Debugging isolated installs
|
||||
|
||||
Enable verbose logging to understand the installation process:
|
||||
|
||||
```bash
|
||||
$ bun install --linker isolated --verbose
|
||||
```
|
||||
|
||||
This shows:
|
||||
|
||||
- Store entry creation
|
||||
- Symlink operations
|
||||
- Peer dependency resolution
|
||||
- Deduplication decisions
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Compatibility issues
|
||||
|
||||
Some packages may not work correctly with isolated installs due to:
|
||||
|
||||
- **Hardcoded paths** — Packages that assume a flat `node_modules` structure
|
||||
- **Dynamic imports** — Runtime imports that don't follow Node.js resolution
|
||||
- **Build tools** — Tools that scan `node_modules` directly
|
||||
|
||||
If you encounter issues, you can:
|
||||
|
||||
1. **Switch to hoisted mode** for specific projects:
|
||||
|
||||
```bash
|
||||
$ bun install --linker hoisted
|
||||
```
|
||||
|
||||
2. **Report compatibility issues** to help improve isolated install support
|
||||
|
||||
### Performance considerations
|
||||
|
||||
- **Install time** — May be slightly slower due to symlink operations
|
||||
- **Disk usage** — Similar to hoisted (uses symlinks, not file copies)
|
||||
- **Memory usage** — Higher during install due to complex peer resolution
|
||||
|
||||
## Migration guide
|
||||
|
||||
### From npm/Yarn
|
||||
|
||||
```bash
|
||||
# Remove existing node_modules and lockfiles
|
||||
$ rm -rf node_modules package-lock.json yarn.lock
|
||||
|
||||
# Install with isolated linker
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
### From pnpm
|
||||
|
||||
Isolated installs are conceptually similar to pnpm, so migration should be straightforward:
|
||||
|
||||
```bash
|
||||
# Remove pnpm files
|
||||
$ rm -rf node_modules pnpm-lock.yaml
|
||||
|
||||
# Install with Bun's isolated linker
|
||||
$ bun install --linker isolated
|
||||
```
|
||||
|
||||
The main difference is that Bun uses symlinks in `node_modules` while pnpm uses a global store with symlinks.
|
||||
|
||||
## When to use isolated installs
|
||||
|
||||
**Use isolated installs when:**
|
||||
|
||||
- Working in monorepos with multiple packages
|
||||
- Strict dependency management is required
|
||||
- Preventing phantom dependencies is important
|
||||
- Building libraries that need deterministic dependencies
|
||||
|
||||
**Use hoisted installs when:**
|
||||
|
||||
- Working with legacy code that assumes flat `node_modules`
|
||||
- Compatibility with existing build tools is required
|
||||
- Working in environments where symlinks aren't well supported
|
||||
- You prefer the simpler traditional npm behavior
|
||||
|
||||
## Related documentation
|
||||
|
||||
- [Package manager > Workspaces](https://bun.com/docs/install/workspaces) — Monorepo workspace management
|
||||
- [Package manager > Lockfile](https://bun.com/docs/install/lockfile) — Understanding Bun's lockfile format
|
||||
- [CLI > install](https://bun.com/docs/cli/install) — Complete `bun install` command reference
|
||||
@@ -183,6 +183,9 @@ export default {
|
||||
description:
|
||||
"Bun's package manager installs all packages into a shared global cache to avoid redundant re-downloads.",
|
||||
}),
|
||||
page("install/isolated", "Isolated installs", {
|
||||
description: "Create strict dependency isolation, preventing phantom dependencies.",
|
||||
}),
|
||||
page("install/workspaces", "Workspaces", {
|
||||
description: "Bun's package manager supports workspaces and monorepo development workflows.",
|
||||
}),
|
||||
|
||||
@@ -20,7 +20,7 @@ this one:
|
||||
Given a file implementing a simple function, such as `add`
|
||||
|
||||
```zig#src/bun.js/math.zig
|
||||
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
pub fn add(global: *jsc.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
return std.math.add(i32, a, b) catch {
|
||||
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
|
||||
// Others like `error.Overflow` from `std.math.add` must be converted.
|
||||
@@ -33,7 +33,7 @@ const gen = bun.gen.math; // "math" being this file's basename
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const JSC = bun.JSC;
|
||||
const jsc = bun.jsc;
|
||||
```
|
||||
|
||||
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
|
||||
|
||||
@@ -148,7 +148,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:vm`](https://nodejs.org/api/vm.html)
|
||||
|
||||
🟡 Core functionality works, but experimental VM ES modules are not implemented, including `vm.Module`, `vm.SourceTextModule`, `vm.SyntheticModule`,`importModuleDynamically`, and `vm.measureMemory`. Options like `timeout`, `breakOnSigint`, `cachedData` are not implemented yet.
|
||||
🟡 Core functionality and ES modules are implemented, including `vm.Script`, `vm.createContext`, `vm.runInContext`, `vm.runInNewContext`, `vm.runInThisContext`, `vm.compileFunction`, `vm.isContext`, `vm.Module`, `vm.SourceTextModule`, `vm.SyntheticModule`, and `importModuleDynamically` support. Options like `timeout` and `breakOnSigint` are fully supported. Missing `vm.measureMemory` and some `cachedData` functionality.
|
||||
|
||||
### [`node:wasi`](https://nodejs.org/api/wasi.html)
|
||||
|
||||
@@ -214,6 +214,10 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`Atomics`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Atomics)
|
||||
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel)
|
||||
|
||||
🟢 Fully implemented.
|
||||
|
||||
@@ -426,6 +426,54 @@ test("exactly two assertions", () => {
|
||||
|
||||
This helps ensure all your assertions run, especially in complex async code with multiple code paths.
|
||||
|
||||
## Type Testing
|
||||
|
||||
Bun includes `expectTypeOf` for testing typescript types, compatible with Vitest.
|
||||
|
||||
### expectTypeOf
|
||||
|
||||
{% callout %}
|
||||
|
||||
**Note** — These functions are no-ops at runtime - you need to run TypeScript separately to verify the type checks.
|
||||
|
||||
{% endcallout %}
|
||||
|
||||
The `expectTypeOf` function provides type-level assertions that are checked by TypeScript's type checker. **Important**:
|
||||
|
||||
To test your types:
|
||||
|
||||
1. Write your type assertions using `expectTypeOf`
|
||||
2. Run `bunx tsc --noEmit` to check that your types are correct
|
||||
|
||||
```ts
|
||||
import { expectTypeOf } from "bun:test";
|
||||
|
||||
// Basic type assertions
|
||||
expectTypeOf<string>().toEqualTypeOf<string>();
|
||||
expectTypeOf(123).toBeNumber();
|
||||
expectTypeOf("hello").toBeString();
|
||||
|
||||
// Object type matching
|
||||
expectTypeOf({ a: 1, b: "hello" }).toMatchObjectType<{ a: number }>();
|
||||
|
||||
// Function types
|
||||
function greet(name: string): string {
|
||||
return `Hello ${name}`;
|
||||
}
|
||||
|
||||
expectTypeOf(greet).toBeFunction();
|
||||
expectTypeOf(greet).parameters.toEqualTypeOf<[string]>();
|
||||
expectTypeOf(greet).returns.toEqualTypeOf<string>();
|
||||
|
||||
// Array types
|
||||
expectTypeOf([1, 2, 3]).items.toBeNumber();
|
||||
|
||||
// Promise types
|
||||
expectTypeOf(Promise.resolve(42)).resolves.toBeNumber();
|
||||
```
|
||||
|
||||
For full documentation on expectTypeOf matchers, see the [API Reference](/reference/bun/test/expectTypeOf)
|
||||
|
||||
## Matchers
|
||||
|
||||
Bun implements the following matchers. Full Jest compatibility is on the roadmap; track progress [here](https://github.com/oven-sh/bun/issues/1825).
|
||||
@@ -629,17 +677,17 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toHaveReturnedWith()`](https://jestjs.io/docs/expect#tohavereturnedwithvalue)
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toHaveLastReturnedWith()`](https://jestjs.io/docs/expect#tohavelastreturnedwithvalue)
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toHaveNthReturnedWith()`](https://jestjs.io/docs/expect#tohaventhreturnedwithnthcall-value)
|
||||
|
||||
---
|
||||
|
||||
1
examples/.gitignore
vendored
1
examples/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
bun-examples-all
|
||||
@@ -1,7 +0,0 @@
|
||||
#[no_mangle]
|
||||
pub extern "C" fn add(a: i32, b: i32) -> i32 {
|
||||
a + b
|
||||
}
|
||||
|
||||
// to compile:
|
||||
// rustc --crate-type cdylib add.rs
|
||||
@@ -1,12 +0,0 @@
|
||||
import { dlopen, suffix } from "bun:ffi";
|
||||
|
||||
const {
|
||||
symbols: { add },
|
||||
} = dlopen(`./libadd.${suffix}`, {
|
||||
add: {
|
||||
args: ["i32", "i32"],
|
||||
returns: "i32",
|
||||
},
|
||||
});
|
||||
|
||||
console.log(add(1, 2));
|
||||
@@ -1,6 +0,0 @@
|
||||
pub export fn add(a: i32, b: i32) i32 {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
// to compile:
|
||||
// zig build-lib -OReleaseFast ./add.zig -dynamic --name add
|
||||
@@ -1,89 +0,0 @@
|
||||
// To run this example:
|
||||
//
|
||||
// bun --hot bun-hot-websockets.js
|
||||
//
|
||||
|
||||
const css = ([inner]) => {
|
||||
return inner;
|
||||
};
|
||||
|
||||
const styles = css`
|
||||
#bun {
|
||||
margin: 0 auto;
|
||||
margin-top: 200px;
|
||||
object-fit: cover;
|
||||
}
|
||||
html,
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
body {
|
||||
background: #f1239f;
|
||||
font-family: "Inter", sans-serif;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
align-content: center;
|
||||
color: white;
|
||||
}
|
||||
h1 {
|
||||
padding: 0;
|
||||
text-align: center;
|
||||
font-size: 3rem;
|
||||
-webkit-text-stroke: 2px black;
|
||||
}
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
`;
|
||||
|
||||
Bun.serve({
|
||||
websocket: {
|
||||
message(ws, msg) {
|
||||
ws.send(styles);
|
||||
},
|
||||
},
|
||||
fetch(req, server) {
|
||||
if (req.url.endsWith("/hot")) {
|
||||
if (server.upgrade(req))
|
||||
return new Response("", {
|
||||
status: 101,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(
|
||||
`
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>WebSockets</title>
|
||||
</head>
|
||||
<body>
|
||||
<style></style>
|
||||
<script>
|
||||
const ws = new WebSocket("ws://localhost:3000/hot");
|
||||
const style = document.querySelector("style");
|
||||
ws.onmessage = (e) => {
|
||||
style.innerHTML = e.data;
|
||||
};
|
||||
setInterval(() => {
|
||||
ws.send("ping");
|
||||
}, 8);
|
||||
</script>
|
||||
<div id="app">
|
||||
<img src="https://bun.com/logo.svg" alt="Bun" id='bun' />
|
||||
<h1>bun --hot websockets</h1>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "text/html; charset=utf-8",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
const { write, stdout, file } = Bun;
|
||||
import { argv } from "process";
|
||||
|
||||
const path = resolve(argv.at(-1)!);
|
||||
await write(stdout, file(path));
|
||||
|
||||
Bun.stdout;
|
||||
process.stdout;
|
||||
@@ -1,11 +0,0 @@
|
||||
const sequence = [1, 2, 3];
|
||||
sequence.toReversed(); // => [3, 2, 1]
|
||||
sequence; // => [1, 2, 3]
|
||||
|
||||
const outOfOrder = new Uint8Array([3, 1, 2]);
|
||||
outOfOrder.toSorted(); // => Uint8Array [1, 2, 3]
|
||||
outOfOrder; // => Uint8Array [3, 1, 2]
|
||||
|
||||
const correctionNeeded = [1, 1, 3];
|
||||
correctionNeeded.with(1, 2); // => [1, 2, 3]
|
||||
correctionNeeded; // => [1, 1, 3]
|
||||
@@ -1,23 +0,0 @@
|
||||
// Accepts a string, TypedArray, or Blob (file blob support is not implemented but planned)
|
||||
const input = "hello world".repeat(400);
|
||||
|
||||
// Bun.hash() defaults to Wyhash because it's fast
|
||||
console.log(Bun.hash(input));
|
||||
|
||||
console.log(Bun.hash.wyhash(input));
|
||||
// and returns a bigint
|
||||
// all of these hashing functions return number if 32-bit or bigint if 64-bit, not typed arrays.
|
||||
console.log(Bun.hash.adler32(input)); // number
|
||||
console.log(Bun.hash.crc32(input)); // number
|
||||
console.log(Bun.hash.cityHash32(input)); // number
|
||||
console.log(Bun.hash.cityHash64(input)); // bigint
|
||||
console.log(Bun.hash.xxHash32(input)); // number
|
||||
console.log(Bun.hash.xxHash64(input)); // bigint
|
||||
console.log(Bun.hash.xxHash3(input)); // bigint
|
||||
console.log(Bun.hash.murmur32v3(input)); // number
|
||||
console.log(Bun.hash.murmur32v2(input)); // number
|
||||
console.log(Bun.hash.murmur64v2(input)); // bigint
|
||||
console.log(Bun.hash.rapidhash(input)); // bigint
|
||||
|
||||
// Second argument accepts a seed where relevant
|
||||
console.log(Bun.hash(input, 12345));
|
||||
@@ -1,37 +0,0 @@
|
||||
// Start a fast HTTP server from a function
|
||||
|
||||
Bun.serve({
|
||||
async fetch(req) {
|
||||
const { pathname } = new URL(req.url);
|
||||
if (!(pathname.startsWith("/https://") || pathname.startsWith("/http://"))) {
|
||||
return new Response("Enter a path that starts with https:// or http://\n", {
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const response = await fetch(req.url.substring("http://localhost:3000/".length), req.clone());
|
||||
|
||||
return new HTMLRewriter()
|
||||
.on("a[href]", {
|
||||
element(element) {
|
||||
element.setAttribute("href", "https://www.youtube.com/watch?v=dQw4w9WgXcQ");
|
||||
},
|
||||
})
|
||||
.transform(response);
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
});
|
||||
@@ -1,76 +0,0 @@
|
||||
import { file, serve } from "bun";
|
||||
import { existsSync, statSync } from "fs";
|
||||
|
||||
serve({
|
||||
fetch(req: Request) {
|
||||
let pathname = new URL(req.url).pathname.substring(1);
|
||||
if (pathname == "") {
|
||||
pathname = import.meta.url.replace("file://", "");
|
||||
}
|
||||
|
||||
if (!existsSync(pathname)) {
|
||||
return new Response(null, { status: 404 });
|
||||
}
|
||||
|
||||
const stats = statSync(pathname);
|
||||
|
||||
// https://github.com/gornostay25/svelte-adapter-bun/blob/master/src/sirv.js
|
||||
const headers = new Headers({
|
||||
"Content-Length": "" + stats.size,
|
||||
"Last-Modified": stats.mtime.toUTCString(),
|
||||
ETag: `W/"${stats.size}-${stats.mtime.getTime()}"`,
|
||||
});
|
||||
|
||||
if (req.headers.get("if-none-match") === headers.get("ETag")) {
|
||||
return new Response(null, { status: 304 });
|
||||
}
|
||||
|
||||
const opts = { code: 200, start: 0, end: Infinity, range: false };
|
||||
|
||||
if (req.headers.has("range")) {
|
||||
opts.code = 206;
|
||||
let [x, y] = req.headers.get("range")!.replace("bytes=", "").split("-");
|
||||
let end = (opts.end = parseInt(y, 10) || stats.size - 1);
|
||||
let start = (opts.start = parseInt(x, 10) || 0);
|
||||
|
||||
if (start >= stats.size || end >= stats.size) {
|
||||
headers.set("Content-Range", `bytes */${stats.size}`);
|
||||
return new Response(null, {
|
||||
headers: headers,
|
||||
status: 416,
|
||||
});
|
||||
}
|
||||
|
||||
headers.set("Content-Range", `bytes ${start}-${end}/${stats.size}`);
|
||||
headers.set("Content-Length", "" + (end - start + 1));
|
||||
headers.set("Accept-Ranges", "bytes");
|
||||
opts.range = true;
|
||||
}
|
||||
|
||||
if (opts.range) {
|
||||
return new Response(file(pathname).slice(opts.start, opts.end), {
|
||||
headers,
|
||||
status: opts.code,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(file(pathname), { headers, status: opts.code });
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
hostname: "localhost", // defaults to 0.0.0.0
|
||||
});
|
||||
@@ -1,31 +0,0 @@
|
||||
import { file, serve } from "bun";
|
||||
|
||||
serve({
|
||||
fetch(req: Request) {
|
||||
const pathname = new URL(req.url).pathname.substring(1);
|
||||
|
||||
// If the URL is empty, display this file.
|
||||
if (pathname === "") {
|
||||
return new Response(file(import.meta.url.replace("file://", "")));
|
||||
}
|
||||
|
||||
return new Response(file(pathname));
|
||||
},
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables the bun's default error handler
|
||||
// sometime after the initial release, it will auto reload as well
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
hostname: "localhost", // defaults to 0.0.0.0
|
||||
});
|
||||
@@ -1,17 +0,0 @@
|
||||
import { serve } from "bun";
|
||||
|
||||
serve({
|
||||
async fetch(req) {
|
||||
// body is a ReadableStream
|
||||
const body = req.body;
|
||||
|
||||
const writer = Bun.file(`upload.${Date.now()}.txt`).writer();
|
||||
for await (const chunk of body!) {
|
||||
writer.write(chunk);
|
||||
}
|
||||
const wrote = await writer.end();
|
||||
|
||||
// @ts-ignore
|
||||
return Response.json({ wrote, type: req.headers.get("Content-Type") });
|
||||
},
|
||||
});
|
||||
@@ -1,12 +0,0 @@
|
||||
import { serve } from "bun";
|
||||
|
||||
const server = serve({
|
||||
fetch(req) {
|
||||
return new Response(`Pending requests count: ${this.pendingRequests}`);
|
||||
},
|
||||
});
|
||||
|
||||
// Stop the server after 5 seconds
|
||||
setTimeout(() => {
|
||||
server.stop();
|
||||
}, 5000);
|
||||
@@ -1,34 +0,0 @@
|
||||
// Start a fast HTTP server from a function
|
||||
Bun.serve({
|
||||
fetch(req: Request) {
|
||||
return new Response(`Echo: ${req.url}`);
|
||||
},
|
||||
|
||||
// baseURI: "http://localhost:3000",
|
||||
|
||||
// this is called when fetch() throws or rejects
|
||||
// error(err: Error) {
|
||||
// return new Response("uh oh! :(\n" + err.toString(), { status: 500 });
|
||||
// },
|
||||
|
||||
// this boolean enables bun's default error handler
|
||||
development: process.env.NODE_ENV !== "production",
|
||||
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
|
||||
|
||||
// SSL is enabled if these two are set
|
||||
// certFile: './cert.pem',
|
||||
// keyFile: './key.pem',
|
||||
|
||||
port: 3000, // number or string
|
||||
});
|
||||
// Start a fast HTTP server from the main file's export
|
||||
// export default {
|
||||
// fetch(req) {
|
||||
// return new Response(
|
||||
// `This is another way to start a server!
|
||||
// if the main file export default's an object
|
||||
// with 'fetch'. Bun automatically calls Bun.serve`
|
||||
// );
|
||||
// },
|
||||
// // so autocomplete & type checking works
|
||||
// } as Bun.Serve;
|
||||
@@ -1,193 +0,0 @@
|
||||
const { AWS_LAMBDA_RUNTIME_API, LAMBDA_TASK_ROOT, _HANDLER } = process.env;
|
||||
|
||||
if (!AWS_LAMBDA_RUNTIME_API || AWS_LAMBDA_RUNTIME_API === "") {
|
||||
throw new Error("AWS_LAMBDA_RUNTIME_API is not set");
|
||||
}
|
||||
|
||||
const nextURL = `http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/next`;
|
||||
const sourceDir = LAMBDA_TASK_ROOT;
|
||||
if (!sourceDir) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
if (!_HANDLER) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
|
||||
// don't care if this fails
|
||||
if (process.cwd() !== sourceDir) {
|
||||
try {
|
||||
process.chdir(sourceDir);
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
var handlerDot = _HANDLER.lastIndexOf(".");
|
||||
var sourcefile = handlerDot > 0 ? _HANDLER.substring(0, handlerDot) : _HANDLER;
|
||||
if (sourcefile.length === 0) {
|
||||
throw new Error("handler is not set");
|
||||
}
|
||||
if (!sourcefile.startsWith("/")) {
|
||||
sourcefile = `./${sourcefile}`;
|
||||
}
|
||||
function noop() {}
|
||||
const method = (handlerDot > 0 ? _HANDLER.substring(handlerDot) : "") || "GET";
|
||||
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.time(`Loaded ${sourcefile}`);
|
||||
}
|
||||
var Handler;
|
||||
|
||||
try {
|
||||
Handler = await import(sourcefile);
|
||||
} catch (e: any) {
|
||||
console.error("Error loading sourcefile:", e);
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.timeEnd(`Loaded ${sourcefile}`);
|
||||
}
|
||||
|
||||
const handlerFunction = Handler.default?.fetch;
|
||||
if (typeof handlerFunction !== "function") {
|
||||
const e = new Error(`${sourcefile} must export default a function called fetch
|
||||
|
||||
Here is an example:
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
return new Response("Hello World");
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
console.error(e);
|
||||
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var baseURLString = AWS_LAMBDA_RUNTIME_API;
|
||||
if ("baseURI" in Handler.default) {
|
||||
baseURLString = Handler.default.baseURI?.toString();
|
||||
}
|
||||
|
||||
var baseURL;
|
||||
try {
|
||||
baseURL = new URL(baseURLString);
|
||||
} catch (e: any) {
|
||||
console.error("Error parsing baseURI:", e);
|
||||
try {
|
||||
await fetch(new URL(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/init/error`).href, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
errorMessage: e.message,
|
||||
errorType: e.name,
|
||||
stackTrace: e?.stack?.split("\n") || [],
|
||||
}),
|
||||
});
|
||||
} catch (e2) {
|
||||
console.error("Error sending error to runtime:", e2);
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function runHandler(response: Response) {
|
||||
const traceID = response.headers.get("Lambda-Runtime-Trace-Id");
|
||||
const requestID = response.headers.get("Lambda-Runtime-Aws-Request-Id");
|
||||
var request = new Request(baseURL.href, {
|
||||
method,
|
||||
headers: response.headers,
|
||||
body: parseInt(response.headers.get("Content-Length") || "0", 10) > 0 ? await response.blob() : undefined,
|
||||
});
|
||||
// we are done with the Response object here
|
||||
// allow it to be GC'd
|
||||
(response as any) = undefined;
|
||||
|
||||
var result: Response;
|
||||
try {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.time(`[${traceID}] Run ${request.url}`);
|
||||
}
|
||||
result = handlerFunction(request, {});
|
||||
if (result && (result as any).then) {
|
||||
await result;
|
||||
}
|
||||
} catch (e1: any) {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.error(`[${traceID}] Error running handler:`, e1);
|
||||
}
|
||||
fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/error`, {
|
||||
method: "POST",
|
||||
|
||||
body: JSON.stringify({
|
||||
errorMessage: e1.message,
|
||||
errorType: e1.name,
|
||||
stackTrace: e1?.stack?.split("\n") ?? [],
|
||||
}),
|
||||
}).finally(noop);
|
||||
return;
|
||||
} finally {
|
||||
if (typeof process.env.VERBOSE !== "undefined") {
|
||||
console.timeEnd(`[${traceID}] Run ${request.url}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!result || !("headers" in result)) {
|
||||
await fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/error`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
errorMessage: "Expected Response object",
|
||||
errorType: "ExpectedResponseObject",
|
||||
stackTrace: [],
|
||||
}),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await fetch(`http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/${requestID}/response`, {
|
||||
method: "POST",
|
||||
headers: result.headers,
|
||||
body: await result.blob(),
|
||||
});
|
||||
(result as any) = undefined;
|
||||
}
|
||||
|
||||
while (true) {
|
||||
fetch(nextURL).then(runHandler, console.error);
|
||||
}
|
||||
|
||||
export {};
|
||||
@@ -1,48 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "macros",
|
||||
"dependencies": {
|
||||
"moment": "^2.29.1",
|
||||
"papaparse": "^5.3.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-refresh": "^0.10.0",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^17.0.24",
|
||||
"@types/react-dom": "^17.0.9",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/prop-types": ["@types/prop-types@15.7.5", "", {}, "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w=="],
|
||||
|
||||
"@types/react": ["@types/react@17.0.53", "", { "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", "csstype": "^3.0.2" } }, "sha512-1yIpQR2zdYu1Z/dc1OxC+MA6GR240u3gcnP4l6mvj/PJiVaqHsQPmWttsvHsfnhfPbU2FuGmo0wSITPygjBmsw=="],
|
||||
|
||||
"@types/react-dom": ["@types/react-dom@17.0.19", "", { "dependencies": { "@types/react": "^17" } }, "sha512-PiYG40pnQRdPHnlf7tZnp0aQ6q9tspYr72vD61saO6zFCybLfMqwUCN0va1/P+86DXn18ZWeW30Bk7xlC5eEAQ=="],
|
||||
|
||||
"@types/scheduler": ["@types/scheduler@0.16.2", "", {}, "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew=="],
|
||||
|
||||
"csstype": ["csstype@3.1.1", "", {}, "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"moment": ["moment@2.29.4", "", {}, "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w=="],
|
||||
|
||||
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
|
||||
|
||||
"papaparse": ["papaparse@5.3.2", "", {}, "sha512-6dNZu0Ki+gyV0eBsFKJhYr+MdQYAzFUGlBMNj3GNrmHxmz1lfRa24CjFObPXtjcetlOv5Ad299MhIK0znp3afw=="],
|
||||
|
||||
"react": ["react@17.0.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA=="],
|
||||
|
||||
"react-dom": ["react-dom@17.0.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "scheduler": "^0.20.2" }, "peerDependencies": { "react": "17.0.2" } }, "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA=="],
|
||||
|
||||
"react-refresh": ["react-refresh@0.10.0", "", {}, "sha512-PgidR3wST3dDYKr6b4pJoqQFpPGNKDSCDx4cZoshjXipw3LzO7mG1My2pwEzz2JVkF+inx3xRpDeQLFQGH/hsQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.20.2", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ=="],
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
import { fetchCSV } from "macro:fetchCSV";
|
||||
|
||||
export const Covid19 = () => {
|
||||
const rows = fetchCSV("https://covid19.who.int/WHO-COVID-19-global-data.csv", {
|
||||
last: 100,
|
||||
columns: ["New_cases", "Date_reported", "Country"],
|
||||
});
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h2>Covid-19</h2>
|
||||
<h6>last {rows.length} updates from the WHO</h6>
|
||||
<div className="Table">
|
||||
<div className="Header">
|
||||
<div className="Heading">New Cases</div>
|
||||
<div className="Heading">Date</div>
|
||||
<div className="Heading">Country</div>
|
||||
</div>
|
||||
|
||||
{rows.map((row, index) => (
|
||||
<div className="Row" key={index}>
|
||||
<div className="Column">{row[0]}</div>
|
||||
<div className="Column">{row[1]}</div>
|
||||
<div className="Column">{row[2]}</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,15 +0,0 @@
|
||||
// source code
|
||||
import { matchInFile } from "macro:matchInFile";
|
||||
|
||||
export const IPAddresses = () => (
|
||||
<div>
|
||||
<h2>recent ip addresses</h2>
|
||||
<div className="Lines">
|
||||
{matchInFile("access.log", /^(?:[0-9]{1,3}\.){3}[0-9]{1,3}/).map((ipAddress, index) => (
|
||||
<div className="Line" key={index}>
|
||||
{ipAddress}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -1,15 +0,0 @@
|
||||
import * as ReactDOM from "react-dom";
|
||||
import * as React from "react";
|
||||
import { IPAddresses } from "./example";
|
||||
import { Covid19 } from "./covid19";
|
||||
|
||||
const Start = function () {
|
||||
const root = document.createElement("div");
|
||||
document.body.appendChild(root);
|
||||
|
||||
// comment out to switch between examples
|
||||
// ReactDOM.render(<IPAddresses />, root);
|
||||
ReactDOM.render(<Covid19 />, root);
|
||||
};
|
||||
|
||||
Start();
|
||||
@@ -1,4 +0,0 @@
|
||||
// source code
|
||||
import { mysteryBox } from "macro:./mystery-box";
|
||||
|
||||
export default "You roll! " + mysteryBox(123);
|
||||
@@ -1,54 +0,0 @@
|
||||
import Pappa from "papaparse";
|
||||
// Example usage:
|
||||
// const rows = fetchCSV(
|
||||
// "https://covid19.who.int/WHO-COVID-19-global-data.csv",
|
||||
// {
|
||||
// last: 100,
|
||||
// columns: ["New_cases", "Date_reported", "Country"],
|
||||
// }
|
||||
// );
|
||||
export async function fetchCSV(callExpression) {
|
||||
console.time("fetchCSV Total");
|
||||
const [
|
||||
urlNode,
|
||||
{
|
||||
properties: { last: limit = 10, columns = [] },
|
||||
},
|
||||
] = callExpression.arguments;
|
||||
const url = urlNode.get();
|
||||
|
||||
console.time("Fetch");
|
||||
const response = await fetch(url);
|
||||
const csvText = await response.text();
|
||||
console.timeEnd("Fetch");
|
||||
|
||||
console.time("Parse");
|
||||
let rows = Pappa.parse(csvText, { fastMode: true }).data;
|
||||
console.timeEnd("Parse");
|
||||
|
||||
console.time("Render");
|
||||
const columnIndices = new Array(columns.length);
|
||||
|
||||
for (let i = 0; i < columns.length; i++) {
|
||||
columnIndices[i] = rows[0].indexOf(columns[i]);
|
||||
}
|
||||
|
||||
rows = rows
|
||||
.slice(Math.max(limit, rows.length) - limit)
|
||||
.reverse()
|
||||
.filter(columns => columns.every(Boolean));
|
||||
const value = (
|
||||
<array>
|
||||
{rows.map(columns => (
|
||||
<array>
|
||||
{columnIndices.map(columnIndex => (
|
||||
<string value={columns[columnIndex]} />
|
||||
))}
|
||||
</array>
|
||||
))}
|
||||
</array>
|
||||
);
|
||||
console.timeEnd("Render");
|
||||
console.timeEnd("fetchCSV Total");
|
||||
return value;
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
// macro code
|
||||
export async function matchInFile(callExpression: BunAST.CallExpression) {
|
||||
const [filePathNode, matcherNode] = callExpression.arguments;
|
||||
let filePath: string;
|
||||
filePath = filePathNode.get();
|
||||
|
||||
let matcher: RegExp;
|
||||
matcher = matcherNode.get();
|
||||
const file: string = await Bun.file(Bun.cwd + filePath).text();
|
||||
|
||||
return (
|
||||
<array>
|
||||
{file
|
||||
.split("\n")
|
||||
.map(line => line.match(matcher))
|
||||
.filter(Boolean)
|
||||
.reverse()
|
||||
.map(line => (
|
||||
<string value={line[0]} />
|
||||
))}
|
||||
</array>
|
||||
);
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
export function mysteryBox(callExpression) {
|
||||
console.log(callExpression.log);
|
||||
// get arguments
|
||||
const [countNode] = callExpression.arguments;
|
||||
const countString: string = countNode.get();
|
||||
const count: number = parseInt(countString, 10);
|
||||
|
||||
// validate
|
||||
if (!(count >= 1 && count <= 1000)) return new Error(`Argument ${countString} is expected to be between 1 and 1000`);
|
||||
|
||||
// return a value
|
||||
return (Math.random() * count) | 0;
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import moment from "moment";
|
||||
export function now(node) {
|
||||
var fmt = "HH:mm:ss";
|
||||
const args = node.arguments;
|
||||
if (args[0] instanceof <string />) {
|
||||
fmt = args[0].get();
|
||||
}
|
||||
const time = moment().format(fmt);
|
||||
return <string value={time}></string>;
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"name": "macros",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"moment": "^2.29.1",
|
||||
"papaparse": "^5.3.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-refresh": "^0.10.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^17.0.24",
|
||||
"@types/react-dom": "^17.0.9"
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Macro test</title>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
|
||||
<link rel="stylesheet" href="/styles.css" type="text/css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<script async type="module" src="/components/index.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,47 +0,0 @@
|
||||
html {
|
||||
font-size: 4rem;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background-color: black;
|
||||
|
||||
color: rgb(0, 255, 0);
|
||||
font-family: "Courier";
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 48px auto;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.Line {
|
||||
font-size: 0.5rem;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
.Table {
|
||||
display: grid;
|
||||
width: fit-content;
|
||||
}
|
||||
|
||||
.Row,
|
||||
.Header {
|
||||
display: grid;
|
||||
grid-template-columns: 2fr 1fr 1fr;
|
||||
text-align: right;
|
||||
|
||||
column-gap: 2rem;
|
||||
}
|
||||
|
||||
.Heading {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.Header {
|
||||
border-bottom: 1px solid rgb(0, 255, 0);
|
||||
margin-bottom: 20px;
|
||||
padding-bottom: 20px;
|
||||
}
|
||||
|
||||
.Heading:nth-of-type(2) {
|
||||
text-align: left;
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"paths": {},
|
||||
"jsx": "preserve"
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
const map = Bun.mmap("./mmap.txt", { shared: true });
|
||||
const utf8decoder = new TextDecoder("utf-8");
|
||||
|
||||
let old = new TextEncoder().encode("12345");
|
||||
|
||||
setInterval(() => {
|
||||
old = old.sort((a, b) => (Math.random() > 0.5 ? -1 : 1));
|
||||
console.log(`changing mmap to ~> ${utf8decoder.decode(old)}`);
|
||||
|
||||
map.set(old);
|
||||
}, 4);
|
||||
@@ -1,22 +0,0 @@
|
||||
const map = Bun.mmap("./mmap.txt");
|
||||
|
||||
function buffer_hash(buffer) {
|
||||
let hash = 0;
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
hash = (hash << 5) - hash + buffer[i];
|
||||
hash |= 0; // Convert to 32bit integer
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
let hash = buffer_hash(map);
|
||||
console.log(decoder.decode(map));
|
||||
|
||||
while (true) {
|
||||
if (buffer_hash(map) !== hash) {
|
||||
hash = buffer_hash(map);
|
||||
console.log(`mmap changed to ~> ${decoder.decode(map)}`);
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
43521
|
||||
@@ -1,23 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
import { parse } from "querystring";
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/favicon.ico") return new Response("nooo dont open favicon in editor", { status: 404 });
|
||||
|
||||
var pathname = req.url.substring(1);
|
||||
const q = pathname.indexOf("?");
|
||||
var { editor } = parse(pathname.substring(q + 1)) || {};
|
||||
|
||||
if (q > 0) {
|
||||
pathname = pathname.substring(0, q);
|
||||
}
|
||||
|
||||
Bun.openInEditor(resolve(pathname), {
|
||||
editor,
|
||||
});
|
||||
|
||||
return new Response(`Opened ${req.url}`);
|
||||
},
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"name": "simple-react",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@emotion/css": "^11.1.3",
|
||||
"@vitejs/plugin-react-refresh": "^1.3.3",
|
||||
"antd": "^4.16.1",
|
||||
"left-pad": "^1.3.0",
|
||||
"next": "^11.0.0",
|
||||
"parcel": "2.0.0-beta.3",
|
||||
"react": "^17.0.2",
|
||||
"react-bootstrap": "^1.6.1",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-form": "^4.0.1",
|
||||
"react-hook-form": "^7.8.3"
|
||||
},
|
||||
"parcel": "parceldist/index.js",
|
||||
"targets": {
|
||||
"parcel": {
|
||||
"outputFormat": "esmodule",
|
||||
"sourceMap": false,
|
||||
"optimize": false,
|
||||
"engines": {
|
||||
"chrome": "last 1 version"
|
||||
}
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@snowpack/plugin-react-refresh": "^2.5.0",
|
||||
"typescript": "^4.3.4"
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
crossorigin="anonymous"
|
||||
href="https://fonts.googleapis.com/css2?family=IBM+Plex+Sans:wght@400;700&family=Space+Mono:wght@400;700&display=swap"
|
||||
/>
|
||||
</head>
|
||||
<body>
|
||||
<div id="reactroot"></div>
|
||||
<link rel="stylesheet" href="./src/index.css" />
|
||||
<script src="./src/index.tsx" async type="module"></script>
|
||||
</body>
|
||||
</html>
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,14 +0,0 @@
|
||||
:root {
|
||||
--timestamp: "0";
|
||||
--interval: "8";
|
||||
--progress-bar: 11.83299999999997%;
|
||||
--spinner-1-muted: rgb(142, 6, 182);
|
||||
--spinner-1-primary: rgb(177, 8, 227);
|
||||
--spinner-2-muted: rgb(110, 148, 190);
|
||||
--spinner-2-primary: rgb(138, 185, 238);
|
||||
--spinner-3-muted: rgb(75, 45, 64);
|
||||
--spinner-3-primary: rgb(94, 56, 80);
|
||||
--spinner-4-muted: rgb(155, 129, 108);
|
||||
--spinner-4-primary: rgb(194, 161, 135);
|
||||
--spinner-rotate: 213deg;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user