mirror of
https://github.com/isledecomp/isle.git
synced 2025-10-22 16:04:17 +00:00
Improve performance of entropy build action (#1407)
* Use multiple threads for entropy builds
* Verify builds parameter
* Revert "Verify builds parameter"
This reverts commit 460d3d3b55
.
* Use options instead
* Seed fix
* 256 samples on push
This commit is contained in:
130
.github/workflows/compare.yml
vendored
130
.github/workflows/compare.yml
vendored
@@ -5,6 +5,13 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
builds_per_job:
|
||||||
|
description: 'How many builds to run in parallel on each job.'
|
||||||
|
default: 4
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options: [1, 2, 4, 8, 16, 32, 64]
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
@@ -15,14 +22,40 @@ jobs:
|
|||||||
name: Download original binaries
|
name: Download original binaries
|
||||||
uses: ./.github/workflows/legobin.yml
|
uses: ./.github/workflows/legobin.yml
|
||||||
|
|
||||||
|
reccmp:
|
||||||
|
name: Setup python environment
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
# The typical cache key would include a hash on requirements.txt.
|
||||||
|
# We currently run reccmp from latest so we would have to manually purge it.
|
||||||
|
# The goal is simply to restore the cache across entropy build jobs.
|
||||||
|
- name: Cache venv
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
key: venv-entropy-${{ github.run_id }}
|
||||||
|
path: .venv
|
||||||
|
|
||||||
|
- name: Install python packages
|
||||||
|
run: |
|
||||||
|
python -m venv .venv
|
||||||
|
.venv\Scripts\Activate
|
||||||
|
pip install -r tools/requirements.txt
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: 'MSVC 4.20'
|
name: 'MSVC 4.20'
|
||||||
needs: [fetch-deps]
|
needs: [fetch-deps, reccmp]
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
high: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
|
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
|
||||||
low: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
|
builds:
|
||||||
|
- ${{ inputs.builds_per_job && inputs.builds_per_job || 16 }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -46,26 +79,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
tools/patch_c2.py msvc420/bin/C2.EXE
|
tools/patch_c2.py msvc420/bin/C2.EXE
|
||||||
|
|
||||||
- name: Generate Entropy
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
# Calculate instance number based on matrix inputs
|
|
||||||
INSTANCE=$((${{ matrix.high }} << 4 | ${{ matrix.low }}))
|
|
||||||
|
|
||||||
# Get the first 8 characters of the SHA (enough for a decent seed)
|
|
||||||
SHA_PREFIX=$(echo "${{ github.sha }}" | cut -c 1-8)
|
|
||||||
ENTROPY_SEED=$((16#$SHA_PREFIX + $INSTANCE))
|
|
||||||
|
|
||||||
echo "Using seed: $ENTROPY_SEED (instance $INSTANCE)"
|
|
||||||
python3 tools/entropy.py $ENTROPY_SEED > entropy.h
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
shell: cmd
|
|
||||||
run: |
|
|
||||||
call .\msvc420\bin\VCVARS32.BAT x86
|
|
||||||
cmake -B build -DCMAKE_BUILD_TYPE=RelWithDebInfo -DISLE_INCLUDE_ENTROPY=ON -G "NMake Makefiles"
|
|
||||||
cmake --build build
|
|
||||||
|
|
||||||
- name: Restore cached original binaries
|
- name: Restore cached original binaries
|
||||||
id: cache-original-binaries
|
id: cache-original-binaries
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
@@ -73,32 +86,47 @@ jobs:
|
|||||||
enableCrossOsArchive: true
|
enableCrossOsArchive: true
|
||||||
path: legobin
|
path: legobin
|
||||||
key: legobin
|
key: legobin
|
||||||
|
|
||||||
- name: Install python packages
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
pip install -r tools/requirements.txt
|
|
||||||
|
|
||||||
- name: Detect binaries
|
|
||||||
run: |
|
|
||||||
reccmp-project detect --what original --search-path legobin
|
|
||||||
reccmp-project detect --what recompiled --search-path build
|
|
||||||
|
|
||||||
- name: Summarize Accuracy
|
- name: Install python packages
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
reccmp-reccmp --target CONFIG --json CONFIGPROGRESS.json
|
python -m venv .venv
|
||||||
reccmp-reccmp --target ISLE --json ISLEPROGRESS.json
|
.venv\Scripts\Activate
|
||||||
reccmp-reccmp --target LEGO1 --json LEGO1PROGRESS.json
|
echo ($env:VIRTUAL_ENV + "\Scripts") >> $env:GITHUB_PATH
|
||||||
|
echo ("VIRTUAL_ENV=" + $env:VIRTUAL_ENV) >> $env:GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Restore cached virtualenv
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
key: venv-entropy-${{ github.run_id }}
|
||||||
|
path: .venv
|
||||||
|
|
||||||
|
- name: Prepare builds
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
cmd /c "call `".\msvc420\bin\VCVARS32.BAT`" x86 && set > %temp%\vcvars32.txt"
|
||||||
|
Get-Content "$env:temp\vcvars32.txt" | Foreach-Object { if ($_ -match "^(.*?)=(.*)$") { Set-Content "env:\$($matches[1])" $matches[2] } }
|
||||||
|
.\tools\multi-prepare.ps1 ${{ matrix.job }} ${{ matrix.builds }}
|
||||||
|
|
||||||
|
- name: Run builds
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
cmd /c "call `".\msvc420\bin\VCVARS32.BAT`" x86 && set > %temp%\vcvars32.txt"
|
||||||
|
Get-Content "$env:temp\vcvars32.txt" | Foreach-Object { if ($_ -match "^(.*?)=(.*)$") { Set-Content "env:\$($matches[1])" $matches[2] } }
|
||||||
|
.\tools\multi-build.ps1 ${{ matrix.builds }}
|
||||||
|
|
||||||
|
- name: Analyze builds
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
.\tools\multi-analyze.ps1 ${{ matrix.builds }}
|
||||||
|
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@main
|
uses: actions/upload-artifact@main
|
||||||
with:
|
with:
|
||||||
name: Win32-Entropy-${{ matrix.high }}-${{ matrix.low }}
|
name: Win32-Entropy-${{ matrix.job }}
|
||||||
path: |
|
path: |
|
||||||
CONFIGPROGRESS.json
|
CONFIGPROGRESS*
|
||||||
ISLEPROGRESS.json
|
ISLEPROGRESS*
|
||||||
LEGO1PROGRESS.json
|
LEGO1PROGRESS*
|
||||||
|
|
||||||
merge-artifacts:
|
merge-artifacts:
|
||||||
name: 'Merge entropy artifacts'
|
name: 'Merge entropy artifacts'
|
||||||
@@ -129,16 +157,24 @@ jobs:
|
|||||||
path: build-entropy
|
path: build-entropy
|
||||||
|
|
||||||
- name: Install python packages
|
- name: Install python packages
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
pip install -r tools/requirements.txt
|
python -m venv .venv
|
||||||
|
.venv\Scripts\Activate
|
||||||
|
echo ($env:VIRTUAL_ENV + "\Scripts") >> $env:GITHUB_PATH
|
||||||
|
echo ("VIRTUAL_ENV=" + $env:VIRTUAL_ENV) >> $env:GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Restore cached virtualenv
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
key: venv-entropy-${{ github.run_id }}
|
||||||
|
path: .venv
|
||||||
|
|
||||||
- name: Aggregate Accuracy
|
- name: Aggregate Accuracy
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
reccmp-aggregate --samples $(find build-entropy -type f -name "CONFIGPROGRESS.json") --output CONFIGPROGRESS-agg.json --html CONFIGPROGRESS-agg.html
|
reccmp-aggregate --samples $(find build-entropy -type f -name "CONFIGPROGRESS*.json") --output CONFIGPROGRESS-agg.json --html CONFIGPROGRESS-agg.html
|
||||||
reccmp-aggregate --samples $(find build-entropy -type f -name "ISLEPROGRESS.json") --output ISLEPROGRESS-agg.json --html ISLEPROGRESS-agg.html
|
reccmp-aggregate --samples $(find build-entropy -type f -name "ISLEPROGRESS*.json") --output ISLEPROGRESS-agg.json --html ISLEPROGRESS-agg.html
|
||||||
reccmp-aggregate --samples $(find build-entropy -type f -name "LEGO1PROGRESS.json") --output LEGO1PROGRESS-agg.json --html LEGO1PROGRESS-agg.html
|
reccmp-aggregate --samples $(find build-entropy -type f -name "LEGO1PROGRESS*.json") --output LEGO1PROGRESS-agg.json --html LEGO1PROGRESS-agg.html
|
||||||
|
|
||||||
- name: Compare Aggregate Accuracy With Current Master
|
- name: Compare Aggregate Accuracy With Current Master
|
||||||
shell: bash
|
shell: bash
|
||||||
|
@@ -67,6 +67,7 @@ cmake_dependent_option(ISLE_USE_DX5_LIBS "Build with internal DirectX 5 SDK Libr
|
|||||||
option(ISLE_BUILD_LEGO1 "Build LEGO1.DLL library" ON)
|
option(ISLE_BUILD_LEGO1 "Build LEGO1.DLL library" ON)
|
||||||
option(ISLE_BUILD_BETA10 "Build BETA10.DLL library" OFF)
|
option(ISLE_BUILD_BETA10 "Build BETA10.DLL library" OFF)
|
||||||
option(ISLE_INCLUDE_ENTROPY "Build with entropy.h" OFF)
|
option(ISLE_INCLUDE_ENTROPY "Build with entropy.h" OFF)
|
||||||
|
option(ISLE_ENTROPY_FILENAME "Entropy header filename" "entropy.h")
|
||||||
|
|
||||||
if(NOT (ISLE_BUILD_LEGO1 OR ISLE_BUILD_BETA10))
|
if(NOT (ISLE_BUILD_LEGO1 OR ISLE_BUILD_BETA10))
|
||||||
message(FATAL_ERROR "ISLE_BUILD_LEGO1 AND ISLE_BUILD_BETA10 cannot be both disabled")
|
message(FATAL_ERROR "ISLE_BUILD_LEGO1 AND ISLE_BUILD_BETA10 cannot be both disabled")
|
||||||
@@ -586,14 +587,15 @@ if (MSVC_FOR_DECOMP)
|
|||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (ISLE_INCLUDE_ENTROPY)
|
if (ISLE_INCLUDE_ENTROPY)
|
||||||
|
message(STATUS "Using entropy file: ${ISLE_ENTROPY_FILENAME}")
|
||||||
foreach(tgt IN LISTS lego1_targets beta10_targets)
|
foreach(tgt IN LISTS lego1_targets beta10_targets)
|
||||||
target_compile_options(${tgt} PRIVATE /FI${PROJECT_SOURCE_DIR}/entropy.h)
|
target_compile_options(${tgt} PRIVATE /FI${PROJECT_SOURCE_DIR}/${ISLE_ENTROPY_FILENAME})
|
||||||
endforeach()
|
endforeach()
|
||||||
if (TARGET isle)
|
if (TARGET isle)
|
||||||
target_compile_options(isle PRIVATE /FI${PROJECT_SOURCE_DIR}/entropy.h)
|
target_compile_options(isle PRIVATE /FI${PROJECT_SOURCE_DIR}/${ISLE_ENTROPY_FILENAME})
|
||||||
endif()
|
endif()
|
||||||
if (TARGET config)
|
if (TARGET config)
|
||||||
target_compile_options(config PRIVATE /FI${PROJECT_SOURCE_DIR}/entropy.h)
|
target_compile_options(config PRIVATE /FI${PROJECT_SOURCE_DIR}/${ISLE_ENTROPY_FILENAME})
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
62
tools/multi-analyze.ps1
Normal file
62
tools/multi-analyze.ps1
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
if ($args.count -lt 1) {
|
||||||
|
Write-Error "Requires 1 arg: number of builds for this job."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
$BuildCount = [int]$args[0]
|
||||||
|
$build_ids = 0..($BuildCount-1)
|
||||||
|
|
||||||
|
$build_dirs = foreach($i in $build_ids) { "build$i" }
|
||||||
|
$stdout_files = foreach($i in $build_ids) { "stdout$i.txt" }
|
||||||
|
$stderr_files = foreach($i in $build_ids) { "stderr$i.txt" }
|
||||||
|
|
||||||
|
$artifacts = @(
|
||||||
|
@{prog = "CONFIGPROGRESS"; binfile = "CONFIG.EXE"; pdbfile = "CONFIG.PDB"; codedir = "."}
|
||||||
|
@{prog = "ISLEPROGRESS"; binfile = "ISLE.EXE"; pdbfile = "ISLE.PDB"; codedir = "."}
|
||||||
|
@{prog = "LEGO1PROGRESS"; binfile = "LEGO1.DLL"; pdbfile = "LEGO1.PDB"; codedir = "LEGO1"}
|
||||||
|
)
|
||||||
|
|
||||||
|
foreach($a in $artifacts) {
|
||||||
|
$procs = New-Object System.Collections.Generic.List[System.Diagnostics.Process]
|
||||||
|
|
||||||
|
foreach($i in $build_ids) {
|
||||||
|
$params = @{
|
||||||
|
FilePath = "reccmp-reccmp"
|
||||||
|
PassThru = $null
|
||||||
|
ArgumentList = @(
|
||||||
|
"--paths",
|
||||||
|
$("legobin/" + $a["binfile"]),
|
||||||
|
$($build_dirs[$i] + "/" + $a["binfile"]),
|
||||||
|
$($build_dirs[$i] + "/" + $a["pdbfile"]),
|
||||||
|
$a["codedir"],
|
||||||
|
"--json",
|
||||||
|
$($a["prog"] + "$i.json"),
|
||||||
|
"--silent"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
# For the first job, display stdout and stderr.
|
||||||
|
# Else dump to file so we don't see 50 at once.
|
||||||
|
if ($i -eq 0) {
|
||||||
|
$params.Add("NoNewWindow", $null)
|
||||||
|
} else {
|
||||||
|
$params.Add("RedirectStandardOutput", $stdout_files[$i])
|
||||||
|
$params.Add("RedirectStandardError", $stderr_files[$i])
|
||||||
|
}
|
||||||
|
|
||||||
|
$procs.Add($(Start-Process @params))
|
||||||
|
}
|
||||||
|
|
||||||
|
$failed = $false
|
||||||
|
try { Wait-Process -InputObject $procs } catch { $failed = $true }
|
||||||
|
|
||||||
|
foreach($i in $build_ids) {
|
||||||
|
if ($procs[$i].ExitCode -ne 0) {
|
||||||
|
Get-Content $stdout_files[$i] -Tail 20
|
||||||
|
Get-Content $stderr_files[$i] -Tail 20
|
||||||
|
$failed = $true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($failed) { exit 1 }
|
||||||
|
}
|
56
tools/multi-build.ps1
Normal file
56
tools/multi-build.ps1
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
if ($args.count -lt 1) {
|
||||||
|
Write-Error "Requires 1 arg: number of builds for this job."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
$BuildCount = [int]$args[0]
|
||||||
|
|
||||||
|
$build_ids = 0..($BuildCount - 1)
|
||||||
|
$build_dirs = foreach($i in $build_ids) { "build$i" }
|
||||||
|
$stdout_files = foreach($i in $build_ids) { "stdout$i.txt" }
|
||||||
|
$stderr_files = foreach($i in $build_ids) { "stderr$i.txt" }
|
||||||
|
|
||||||
|
# Create unique temp dir for each build thread
|
||||||
|
$temp_dirs = foreach($dir in $build_dirs) { "$env:temp\$dir" }
|
||||||
|
New-Item -ItemType Directory -Force -Path $temp_dirs
|
||||||
|
|
||||||
|
$procs = New-Object System.Collections.Generic.List[System.Diagnostics.Process]
|
||||||
|
|
||||||
|
foreach($i in $build_ids) {
|
||||||
|
$params = @{
|
||||||
|
FilePath = "cmake"
|
||||||
|
PassThru = $null
|
||||||
|
ArgumentList = @("--build", $build_dirs[$i])
|
||||||
|
Environment = @{ TEMP = $temp_dirs[$i]; TMP = $temp_dirs[$i] }
|
||||||
|
}
|
||||||
|
|
||||||
|
# For the first job, display stdout and stderr.
|
||||||
|
# Else dump to file so we don't see 50 at once.
|
||||||
|
if ($i -eq 0) {
|
||||||
|
$params.Add("NoNewWindow", $null)
|
||||||
|
} else {
|
||||||
|
$params.Add("RedirectStandardOutput", $stdout_files[$i])
|
||||||
|
$params.Add("RedirectStandardError", $stderr_files[$i])
|
||||||
|
}
|
||||||
|
|
||||||
|
$procs.Add($(Start-Process @params))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
$failed = $false
|
||||||
|
|
||||||
|
# Wait for all builds to finish
|
||||||
|
try { Wait-Process -InputObject $procs } catch { $failed = $true }
|
||||||
|
|
||||||
|
# Check for failure
|
||||||
|
foreach($i in $build_ids) {
|
||||||
|
if ($procs[$i].ExitCode -ne 0) {
|
||||||
|
if ($i -ne 0) {
|
||||||
|
Get-Content $stdout_files[$i] -Tail 10
|
||||||
|
Get-Content $stderr_files[$i] -Tail 10
|
||||||
|
}
|
||||||
|
$failed = $true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($failed) { exit 1 }
|
81
tools/multi-prepare.ps1
Normal file
81
tools/multi-prepare.ps1
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
if ($args.count -lt 2) {
|
||||||
|
Write-Error "Requires 2 args: job matrix number and number of builds for this job."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
function Get-BaseSeed {
|
||||||
|
Param (
|
||||||
|
[int]$Matrix
|
||||||
|
)
|
||||||
|
|
||||||
|
# GITHUB_SHA is the commit hash. This means the entropy files will be consistent
|
||||||
|
# unless you push a new commit.
|
||||||
|
$sha = [System.Convert]::ToUInt32($env:GITHUB_SHA.Substring(0, 8), 16)
|
||||||
|
|
||||||
|
# Mask off the last 16 bits
|
||||||
|
$base_seed = ($sha -band 0xffff0000)
|
||||||
|
|
||||||
|
# Add the matrix number * 256. We can run 256 unique builds on this job.
|
||||||
|
return $base_seed + ($Matrix -shl 8)
|
||||||
|
}
|
||||||
|
|
||||||
|
$MatrixNo = [int]$args[0]
|
||||||
|
$BuildCount = [int]$args[1]
|
||||||
|
$base_seed = $(Get-BaseSeed -Matrix $MatrixNo)
|
||||||
|
|
||||||
|
$build_ids = 0..($BuildCount - 1)
|
||||||
|
|
||||||
|
$build_dirs = foreach($i in $build_ids) { "build$i" }
|
||||||
|
$stdout_files = foreach($i in $build_ids) { "stdout$i.txt" }
|
||||||
|
$stderr_files = foreach($i in $build_ids) { "stderr$i.txt" }
|
||||||
|
|
||||||
|
$procs = New-Object System.Collections.Generic.List[System.Diagnostics.Process]
|
||||||
|
|
||||||
|
foreach($i in $build_ids) {
|
||||||
|
# Create the entropy file
|
||||||
|
$entropy_file = "entropy$i.h"
|
||||||
|
$seed = $base_seed + $i
|
||||||
|
|
||||||
|
Write-Output "Using seed: $seed (instance $i)"
|
||||||
|
python3 tools/entropy.py $seed > $entropy_file
|
||||||
|
|
||||||
|
# Prepare to build
|
||||||
|
$params = @{
|
||||||
|
FilePath = "cmake"
|
||||||
|
PassThru = $null
|
||||||
|
ArgumentList = @(
|
||||||
|
"-B", $build_dirs[$i],
|
||||||
|
"-DCMAKE_BUILD_TYPE=RelWithDebInfo",
|
||||||
|
"-DISLE_INCLUDE_ENTROPY=ON",
|
||||||
|
"-DISLE_ENTROPY_FILENAME=$entropy_file",
|
||||||
|
"-G", "`"NMake Makefiles`""
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
# For the first job, display stdout and stderr.
|
||||||
|
# Else dump to file so we don't see 50 at once.
|
||||||
|
if ($i -eq 0) {
|
||||||
|
$params.Add("NoNewWindow", $null)
|
||||||
|
} else {
|
||||||
|
$params.Add("RedirectStandardOutput", $stdout_files[$i])
|
||||||
|
$params.Add("RedirectStandardError", $stderr_files[$i])
|
||||||
|
}
|
||||||
|
|
||||||
|
$procs.Add($(Start-Process @params))
|
||||||
|
}
|
||||||
|
|
||||||
|
$failed = $false
|
||||||
|
try { Wait-Process -InputObject $procs } catch { $failed = $true }
|
||||||
|
|
||||||
|
# Check for failure
|
||||||
|
foreach($i in $build_ids) {
|
||||||
|
if ($procs[$i].ExitCode -ne 0) {
|
||||||
|
if ($i -ne 0) {
|
||||||
|
Get-Content $stdout_files[$i] -Tail 10
|
||||||
|
Get-Content $stderr_files[$i] -Tail 10
|
||||||
|
}
|
||||||
|
$failed = $true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($failed) { exit 1 }
|
Reference in New Issue
Block a user