Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
b10438d
unity-cli@v1.8.2
StephenHodgson Dec 20, 2025
ed17047
bump
StephenHodgson Dec 20, 2025
80515c3
integrate tests
StephenHodgson Dec 20, 2025
f38b099
default permissions
StephenHodgson Dec 20, 2025
ca76f67
update test logic
StephenHodgson Dec 20, 2025
b531116
update artifact names
StephenHodgson Dec 20, 2025
0d99a0c
test dev job builder
StephenHodgson Dec 22, 2025
7d0df3d
fix permissions
StephenHodgson Dec 22, 2025
2ce7a94
misc
StephenHodgson Dec 22, 2025
28d2f8e
don't write summary if no telemetry output
StephenHodgson Dec 22, 2025
fe5423a
add additional utp types for logging [skip ci]
StephenHodgson Dec 28, 2025
545be7a
rework tests
StephenHodgson Dec 28, 2025
5b67358
tweaks
StephenHodgson Dec 28, 2025
c7bbdd1
fixes to workflow
StephenHodgson Dec 28, 2025
4a6cacc
fix openupm installs
StephenHodgson Dec 28, 2025
32278ac
update matrix build artifact names
StephenHodgson Jan 4, 2026
15bce31
don't clean between runs
StephenHodgson Jan 5, 2026
937832a
cleanup artifacts between tests
StephenHodgson Jan 5, 2026
1c53207
upgate utp tests
StephenHodgson Jan 10, 2026
a9f7246
update run utp tests
StephenHodgson Jan 11, 2026
c48ad26
relax expected message success check
StephenHodgson Jan 11, 2026
cf870e6
update utp tests
StephenHodgson Jan 11, 2026
5e644f2
update utp tests
StephenHodgson Jan 11, 2026
78cd83d
update edit mode test error
StephenHodgson Jan 11, 2026
e4a60c6
update tests
StephenHodgson Jan 11, 2026
556902e
add editor assembly for editor tests
StephenHodgson Jan 12, 2026
f7bfda9
remove cp warning for gnu clobber
StephenHodgson Jan 12, 2026
9410cc8
fix test
StephenHodgson Jan 12, 2026
ede76ee
fix macos tests
StephenHodgson Jan 12, 2026
0f3a2c5
refactor tets
StephenHodgson Jan 12, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
*.sh text eol=lf
54 changes: 54 additions & 0 deletions .github/actions/run-unity-test-batch/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
name: Run Unity UTP Test Batch
description: Runs a batch of Unity UTP tests in a given Unity project.
inputs:
unity-project-path:
description: Absolute path to the Unity project.
required: true
build-target:
description: Build target to use.
required: true
build-args:
description: Additional build args.
required: false
default: ""
artifact-name:
description: Artifact name for uploaded UTP logs (must be unique per matrix job).
required: false
default: unity-tests-batch-utp-logs
runs:
using: composite
steps:
- name: Prepare test list and install packages
shell: bash
working-directory: ${{ inputs.unity-project-path }}
run: |
set -euo pipefail
tests_input="CompilerWarnings,CompilerErrors,BuildWarnings,BuildErrors,PlaymodeTestsErrors,EditmodeTestsErrors"
echo "TESTS_INPUT=$tests_input" >> $GITHUB_ENV

needs_test_framework=false
if [[ "$tests_input" == *"PlaymodeTestsErrors"* || "$tests_input" == *"EditmodeTestsErrors"* ]]; then
needs_test_framework=true
fi

npm install -g openupm-cli
openupm add com.utilities.buildpipeline
if [ "$needs_test_framework" = true ]; then
openupm add com.unity.test-framework
fi

- name: Run tests
shell: bash
env:
UNITY_PROJECT_PATH: ${{ inputs.unity-project-path }}
BUILD_TARGET: ${{ inputs.build-target }}
BUILD_ARGS: ${{ inputs.build-args }}
run: |
bash "${GITHUB_WORKSPACE}/.github/actions/scripts/run-utp-tests.sh"

- name: Upload UTP logs
uses: actions/upload-artifact@v6
with:
name: ${{ inputs.artifact-name }}
path: utp-artifacts/**/*-utp-json.log
if-no-files-found: ignore
219 changes: 219 additions & 0 deletions .github/actions/scripts/run-utp-tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,219 @@
#!/usr/bin/env bash
set -uo pipefail

UNITY_PROJECT_PATH=${UNITY_PROJECT_PATH:?UNITY_PROJECT_PATH is required}
BUILD_TARGET=${BUILD_TARGET:?BUILD_TARGET is required}
BUILD_ARGS=${BUILD_ARGS:-}
TESTS_INPUT=${TESTS_INPUT:-}

if printf '%s' "$BUILD_ARGS" | grep -qE '[;&`|]'; then
echo "::error::BUILD_ARGS contains disallowed shell metacharacters"
exit 1
fi

build_args=()
if [ -n "$BUILD_ARGS" ]; then
# Split on whitespace into an array without invoking the shell
read -r -a build_args <<< "$BUILD_ARGS"
fi

IFS=',' read -ra tests <<< "$TESTS_INPUT"
failures=0

clean_tests() {
rm -f "$UNITY_PROJECT_PATH/Assets/UnityCliTests"/*.cs 2>/dev/null || true
rm -f "$UNITY_PROJECT_PATH/Assets/Editor/UnityCliTests"/*.cs 2>/dev/null || true
rm -f "$UNITY_PROJECT_PATH/Assets/Tests/PlayMode/UnityCliTests"/*.cs 2>/dev/null || true
rm -f "$UNITY_PROJECT_PATH/Assets/Tests/EditMode/UnityCliTests"/*.cs 2>/dev/null || true
rm -f "$UNITY_PROJECT_PATH/Assets/Tests/EditMode/UnityCliTests"/*.asmdef 2>/dev/null || true
rm -f "$UNITY_PROJECT_PATH/Assets/Tests/EditMode/Editor/UnityCliTests"/*.cs 2>/dev/null || true
}

clean_build_outputs() {
rm -rf "$UNITY_PROJECT_PATH/Builds" 2>/dev/null || true
mkdir -p "$UNITY_PROJECT_PATH/Builds/Logs"
}

# Expectations for each synthetic test
# expected_status: 0 = should succeed, 1 = should fail
expected_status_for() {
case "$1" in
CompilerWarnings) echo 0 ;;
BuildWarnings) echo 0 ;;
CompilerErrors) echo 1 ;;
BuildErrors) echo 1 ;;
PlaymodeTestsErrors) echo 1 ;;
EditmodeTestsErrors) echo 1 ;;
*) echo 0 ;;
esac
}

expected_message_for() {
case "$1" in
CompilerErrors) echo "Intentional compiler error" ;;
BuildErrors) echo "Intentional build failure" ;;
PlaymodeTestsErrors) echo "Intentional playmode failure" ;;
EditmodeTestsErrors) echo "Intentional editmode failure" ;;
CompilerWarnings) echo "Intentional warning" ;;
BuildWarnings) echo "Intentional build warning" ;;
*) echo "" ;;
esac
}

mkdir -p "$GITHUB_WORKSPACE/utp-artifacts"

for raw_test in "${tests[@]}"; do
test_name="$(echo "$raw_test" | xargs)"
if [ -z "$test_name" ] || [ "$test_name" = "None" ]; then
echo "Skipping empty/None test entry"
continue
fi

src="$GITHUB_WORKSPACE/unity-tests/${test_name}.cs"
if [ ! -f "$src" ]; then
echo "::error::Requested test '$test_name' not found at $src"
failures=$((failures+1))
continue
fi

clean_tests
clean_build_outputs

asmdef_src=""

case "$test_name" in
CompilerWarnings|CompilerErrors)
dest="$UNITY_PROJECT_PATH/Assets/UnityCliTests"
;;
BuildWarnings|BuildErrors)
dest="$UNITY_PROJECT_PATH/Assets/Editor/UnityCliTests"
;;
PlaymodeTestsErrors)
dest="$UNITY_PROJECT_PATH/Assets/Tests/PlayMode/UnityCliTests"
;;
EditmodeTestsErrors)
dest="$UNITY_PROJECT_PATH/Assets/Tests/EditMode/UnityCliTests"
asmdef_src="$GITHUB_WORKSPACE/unity-tests/UnityCliTests.EditMode.Editor.asmdef"
;;
*)
echo "::error::Unknown test selection '$test_name'"
failures=$((failures+1))
continue
;;
esac

mkdir -p "$dest"
if [ -n "$asmdef_src" ]; then
if [ ! -f "$asmdef_src" ]; then
echo "::error::Assembly definition for editmode tests not found at $asmdef_src"
failures=$((failures+1))
continue
fi
cp "$asmdef_src" "$dest/"
fi
cp "$src" "$dest/"
echo "Running test: $test_name (copied to $dest)"

validate_rc=0
build_rc=0

ran_custom_flow=0

if [ "$test_name" = "EditmodeTestsErrors" ]; then
unity-cli run --log-name "${test_name}-EditMode" -runTests -testPlatform editmode -assemblyNames "UnityCli.EditMode.EditorTests" -testResults "$UNITY_PROJECT_PATH/Builds/Logs/${test_name}-results.xml" -quit || validate_rc=$?

results_xml="$UNITY_PROJECT_PATH/Builds/Logs/${test_name}-results.xml"
if ! grep -q "<test-case " "$results_xml" 2>/dev/null; then
validate_rc=1
fi
build_rc=$validate_rc
ran_custom_flow=1
fi

if [ "$ran_custom_flow" -eq 0 ]; then
unity-cli run --log-name "${test_name}-Validate" -quit -executeMethod Utilities.Editor.BuildPipeline.UnityPlayerBuildTools.ValidateProject -importTMProEssentialsAsset || validate_rc=$?
unity-cli run --log-name "${test_name}-Build" -buildTarget "$BUILD_TARGET" -quit -executeMethod Utilities.Editor.BuildPipeline.UnityPlayerBuildTools.StartCommandLineBuild -sceneList Assets/Scenes/SampleScene.unity "${build_args[@]}" || build_rc=$?
fi

expected=$(expected_status_for "$test_name")
exp_msg=$(expected_message_for "$test_name")

test_failed=0
message_found=0
utp_error_found=0

if [ -n "$exp_msg" ]; then
while IFS= read -r log_file; do
if [ -z "$log_file" ]; then
continue
fi
if grep -qi -- "$exp_msg" "$log_file" 2>/dev/null; then
message_found=1
break
fi
done < <(find "$UNITY_PROJECT_PATH/Builds/Logs" -maxdepth 1 -type f -name "*${test_name}*.log")
fi

# Look for error-level UTP entries for this test to treat as expected failure evidence.
while IFS= read -r utp_file; do
if [ -z "$utp_file" ]; then
continue
fi
if grep -qi '"severity"[[:space:]]*:[[:space:]]*"\(Error\|Exception\|Assert\)"' "$utp_file" 2>/dev/null; then
utp_error_found=1
break
fi
done < <(find "$UNITY_PROJECT_PATH/Builds/Logs" -maxdepth 1 -type f -name "*${test_name}*-utp-json.log")

if [ "$expected" -eq 0 ]; then
if [ "$validate_rc" -ne 0 ] || [ "$build_rc" -ne 0 ]; then
echo "::error::Test $test_name was expected to succeed but failed (validate_rc=$validate_rc, build_rc=$build_rc)"
test_failed=1
fi
if [ "$utp_error_found" -eq 1 ]; then
echo "::error::Test $test_name produced UTP errors but was expected to succeed"
test_failed=1
fi
if [ -n "$exp_msg" ] && [ "$message_found" -eq 0 ]; then
echo "::error::Test $test_name did not emit expected message '$exp_msg'"
test_failed=1
fi
else
if [ "$validate_rc" -ne 0 ] || [ "$build_rc" -ne 0 ] || [ "$message_found" -eq 1 ] || [ "$utp_error_found" -eq 1 ]; then
: # Expected failure observed
else
echo "::error::Test $test_name was expected to fail but succeeded"
test_failed=1
fi

# Only insist on the expected message if both invocations claimed success.
if [ -n "$exp_msg" ] && [ "$message_found" -eq 0 ] && [ "$validate_rc" -eq 0 ] && [ "$build_rc" -eq 0 ]; then
echo "::error::Test $test_name did not emit expected message '$exp_msg'"
test_failed=1
fi
fi

if [ "$test_failed" -eq 0 ]; then
echo "::notice::Test $test_name behaved as expected (validate_rc=$validate_rc, build_rc=$build_rc)"
else
failures=$((failures+1))
fi

test_artifacts="$GITHUB_WORKSPACE/utp-artifacts/$test_name"
mkdir -p "$test_artifacts"
find "$GITHUB_WORKSPACE" -path "$test_artifacts" -prune -o -type f -name "*${test_name}*-utp-json.log" -print | while IFS= read -r utp_src; do
[ -z "$utp_src" ] && continue
dest_file="$test_artifacts/$(basename "$utp_src")"
if [ ! -f "$dest_file" ]; then
cp "$utp_src" "$dest_file" || true
fi
done || true

done

if [ "$failures" -gt 0 ]; then
echo "::error::One or more tests did not meet expectations ($failures)"
exit 1
fi

exit 0
3 changes: 2 additions & 1 deletion .github/workflows/build-options.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@
"2022.3.*",
"6000.0.x",
"6000.1.*",
"6000.2"
"6000.2",
"6000"
],
"include": [
{
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
checks: write # to publish unit test results via checks github api
steps:
- uses: actions/checkout@v6
with:
Expand All @@ -32,6 +33,7 @@ jobs:
name: build ${{ matrix.jobs.name }}
permissions:
contents: read
checks: write # required by nested unity-build workflow
strategy:
matrix: ${{ fromJSON(needs.setup.outputs.jobs) }}
fail-fast: false
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Publish
name: publish
on:
push:
branches: [main]
Expand Down
Loading
Loading