Compare commits

...

5 Commits

Author SHA1 Message Date
Jarred Sumner
11a95d2cf8 [build images] 2024-12-15 05:25:56 -08:00
Jarred Sumner
3cf2636a53 [build images] 2024-12-15 04:44:11 -08:00
Jarred Sumner
9dd2790f9d [build images] 2024-12-15 01:53:08 -08:00
Jarred Sumner
d6d651b768 [build images] 2024-12-15 00:34:44 -08:00
Jarred Sumner
b825ba4e64 Use a consistent version of node for most targets [build images] 2024-12-15 00:33:53 -08:00
3 changed files with 87 additions and 37 deletions

View File

@@ -724,6 +724,29 @@ nodejs_version() {
}
install_nodejs() {
if [ "$os" = "linux" ]; then
if [ "$arch" = "x64" ]; then
if [ "$abi" = "musl" ]; then
# Use the version of Node.js compiled specifically for musl
nodejs_url="https://unofficial-builds.nodejs.org/download/release/v22.9.0/node-v22.9.0-linux-x64-musl.tar.xz"
install_nodejs_from_tar "$nodejs_url"
return
elif [ "$abi" = "gnu" ]; then
# Use a Node.js version that will work with very old versions of glibc, so we don't have to worry about it.
nodejs_url="https://unofficial-builds.nodejs.org/download/release/v22.9.0/node-v22.9.0-linux-x64-glibc-217.tar.xz"
install_nodejs_from_tar "$nodejs_url"
return
fi
elif [ "$arch" = "aarch64" ]; then
if [ "$abi" = "musl" ]; then
nodejs_url="https://pub-63496db56c0141b88a12fb16f0e966b5.r2.dev/node-v22.9.0-linux-arm64-musl.tar.xz"
install_nodejs_from_tar "$nodejs_url"
return
fi
# unofficial-builds.nodejs.org does not have a Node.js version for old glibc on aarch64
fi
fi
case "$pm" in
dnf | yum)
bash="$(require bash)"
@@ -755,6 +778,26 @@ install_nodejs() {
esac
}
install_nodejs_from_tar() {
nodejs_url="$1"
nodejs_tar="$(download_file "$nodejs_url")"
nodejs_dir="$(dirname "$nodejs_tar")"
# Extract to /usr/local
execute_sudo tar -xJf "$nodejs_tar" -C /usr/local --strip-components=1
# Create symlinks if they don't exist
nodejs_bin_dir="/usr/local/bin"
for binary in node npm npx corepack; do
execute_sudo ln -sf "$nodejs_bin_dir/$binary" "/usr/bin/$binary"
execute_sudo chmod +x "/usr/bin/$binary"
done
# Skip downloading v8 headers
execute_sudo cp -r /usr/local/include/node /usr/include
append_to_profile "export npm_config_nodedir=/usr"
}
install_nodejs_headers() {
nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")"
nodejs_headers_dir="$(dirname "$nodejs_headers_tar")"
@@ -762,6 +805,9 @@ install_nodejs_headers() {
nodejs_headers_include="$nodejs_headers_dir/node-v$(nodejs_version_exact)/include"
execute_sudo cp -R "$nodejs_headers_include/" "/usr"
# Skip downloading v8 headers
append_to_profile "export npm_config_nodedir=/usr"
}
bun_version_exact() {

View File

@@ -433,6 +433,9 @@ const aws = {
...device,
Ebs: {
VolumeSize: getDiskSize(options),
VolumeType: "gp3",
Iops: 6000,
Throughput: 250,
},
};
}

View File

@@ -15,47 +15,48 @@ describe("doesnt_crash", async () => {
files = readdirSync(files_dir).map(file => path.join(files_dir, file));
console.log("Tempdir", temp_dir);
files.map(absolute => {
files.forEach(absolute => {
absolute = absolute.replaceAll("\\", "/");
const file = path.basename(absolute);
const outfile1 = path.join(temp_dir, "file-1" + file).replaceAll("\\", "/");
const outfile2 = path.join(temp_dir, "file-2" + file).replaceAll("\\", "/");
const outfile3 = path.join(temp_dir, "file-3" + file).replaceAll("\\", "/");
const outfile4 = path.join(temp_dir, "file-4" + file).replaceAll("\\", "/");
test(file, async () => {
{
const { stdout, stderr, exitCode } =
await Bun.$`${bunExe()} build --experimental-css ${absolute} --outfile=${outfile1}`.quiet().env(bunEnv);
expect(exitCode).toBe(0);
expect(stdout.toString()).not.toContain("error");
expect(stderr.toString()).toBeEmpty();
}
for (let minify of [false, true]) {
test(`${file} - ${minify ? "minify" : "not minify"}`, async () => {
const timeLog = `Transpiled ${file} - ${minify ? "minify" : "not minify"}`;
console.time(timeLog);
const { logs, outputs } = await Bun.build({
entrypoints: [absolute],
experimentalCss: true,
minify: minify,
});
console.timeEnd(timeLog);
const { stdout, stderr, exitCode } =
await Bun.$`${bunExe()} build --experimental-css ${outfile1} --outfile=${outfile2}`.quiet().env(bunEnv);
expect(exitCode).toBe(0);
expect(stdout.toString()).not.toContain("error");
expect(stderr.toString()).toBeEmpty();
});
if (logs?.length) {
throw new Error(logs.join("\n"));
}
test(`(minify) ${file}`, async () => {
{
const { stdout, stderr, exitCode } =
await Bun.$`${bunExe()} build --experimental-css ${absolute} --minify --outfile=${outfile3}`
.quiet()
.env(bunEnv);
expect(exitCode).toBe(0);
expect(stdout.toString()).not.toContain("error");
expect(stderr.toString()).toBeEmpty();
}
const { stdout, stderr, exitCode } =
await Bun.$`${bunExe()} build --experimental-css ${outfile3} --minify --outfile=${outfile4}`
.quiet()
.env(bunEnv);
expect(exitCode).toBe(0);
expect(stdout.toString()).not.toContain("error");
expect(stderr.toString()).toBeEmpty();
});
expect(outputs.length).toBe(1);
const outfile1 = path.join(temp_dir, "file-1" + file).replaceAll("\\", "/");
await Bun.write(outfile1, outputs[0]);
{
const timeLog = `Re-transpiled ${file} - ${minify ? "minify" : "not minify"}`;
console.time(timeLog);
const { logs, outputs } = await Bun.build({
entrypoints: [outfile1],
experimentalCss: true,
minify: minify,
});
if (logs?.length) {
throw new Error(logs.join("\n"));
}
expect(outputs.length).toBe(1);
expect(await outputs[0].text()).not.toBeEmpty();
console.timeEnd(timeLog);
}
});
}
});
});