Skip to content

Deployment E2E Tests #262

Deployment E2E Tests

Deployment E2E Tests #262

# End-to-end deployment tests that deploy Aspire applications to real Azure infrastructure
#
# Triggers:
# - workflow_dispatch: Manual trigger with scenario selection
# - schedule: Nightly at 03:00 UTC
# - /deployment-test command on PRs (via deployment-test-command.yml)
#
# Security:
# - Uses OIDC (Workload Identity Federation) for Azure authentication
# - No stored Azure secrets
# - Only dotnet org members can trigger via PR command
#
name: Deployment E2E Tests
on:
workflow_dispatch:
inputs:
pr_number:
description: 'PR number to test (for testing PR builds)'
required: false
type: string
default: ''
schedule:
# Run nightly at 03:00 UTC
- cron: '0 3 * * *'
# Limit concurrent runs to avoid Azure quota issues
concurrency:
group: deployment-e2e-${{ github.ref }}
cancel-in-progress: true
jobs:
# Post "starting" comment to PR when triggered via /deployment-test command
notify-start:
name: Notify PR
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'microsoft' && inputs.pr_number != '' }}
permissions:
pull-requests: write
steps:
- name: Post starting comment
env:
GH_TOKEN: ${{ github.token }}
run: |
PR_NUMBER="${{ inputs.pr_number }}"
RUN_URL="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
gh pr comment "${PR_NUMBER}" --repo "${{ github.repository }}" --body \
"🚀 **Deployment tests starting** on PR #${PR_NUMBER}...
This will deploy to real Azure infrastructure. Results will be posted here when complete.
[View workflow run](${RUN_URL})"
# Enumerate test classes to build the matrix
enumerate:
name: Enumerate Tests
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'microsoft' }}
permissions:
contents: read
outputs:
matrix: ${{ steps.enumerate.outputs.all_tests }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: ./.github/actions/enumerate-tests
id: enumerate
with:
buildArgs: '/p:OnlyDeploymentTests=true'
- name: Display test matrix
run: |
echo "Deployment test matrix:"
echo '${{ steps.enumerate.outputs.all_tests }}' | jq .
# Build solution and CLI once, share via artifacts
build:
name: Build
runs-on: 8-core-ubuntu-latest
if: ${{ github.repository_owner == 'microsoft' }}
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Setup .NET
uses: actions/setup-dotnet@c2fa09f4bde5ebb9d1777cf28262a3eb3db3ced7 # v5.2.0
with:
global-json-file: global.json
- name: Restore solution
run: ./restore.sh
- name: Build solution and pack CLI
run: |
# Build the full solution and pack CLI for local testing
./build.sh --build --pack -c Release
env:
# Skip native build to save time - we'll use the non-native CLI
SkipNativeBuild: true
- name: Prepare CLI artifacts
run: |
# Create a clean artifact directory with CLI and packages
ARTIFACT_DIR="${{ github.workspace }}/cli-artifacts"
mkdir -p "$ARTIFACT_DIR/bin"
mkdir -p "$ARTIFACT_DIR/packages"
# Copy CLI binary and dependencies
cp -r "${{ github.workspace }}/artifacts/bin/Aspire.Cli/Release/net10.0/"* "$ARTIFACT_DIR/bin/"
# Copy NuGet packages
PACKAGES_DIR="${{ github.workspace }}/artifacts/packages/Release/Shipping"
if [ -d "$PACKAGES_DIR" ]; then
find "$PACKAGES_DIR" -name "*.nupkg" -exec cp {} "$ARTIFACT_DIR/packages/" \;
fi
echo "CLI artifacts prepared:"
ls -la "$ARTIFACT_DIR/bin/"
echo "Package count: $(find "$ARTIFACT_DIR/packages" -name "*.nupkg" | wc -l)"
- name: Upload CLI artifacts
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: aspire-cli-artifacts
path: ${{ github.workspace }}/cli-artifacts/
retention-days: 1
# Run each test class in parallel
deploy-test:
name: Deploy (${{ matrix.shortname }})
needs: [enumerate, build]
if: ${{ needs.enumerate.outputs.matrix != '{"include":[]}' && needs.enumerate.outputs.matrix != '' }}
runs-on: 8-core-ubuntu-latest
environment: deployment-testing
permissions:
id-token: write # For OIDC Azure login
contents: read
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.enumerate.outputs.matrix) }}
env:
ASPIRE_DEPLOYMENT_TEST_SUBSCRIPTION: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
ASPIRE_DEPLOYMENT_TEST_RG_PREFIX: ${{ vars.ASPIRE_DEPLOYMENT_TEST_RG_PREFIX || 'aspire-e2e' }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Setup .NET
uses: actions/setup-dotnet@c2fa09f4bde5ebb9d1777cf28262a3eb3db3ced7 # v5.2.0
with:
global-json-file: global.json
- name: Restore and build test project
run: |
./restore.sh
./build.sh -restore -ci -build -projects ${{ github.workspace }}/tests/Aspire.Deployment.EndToEnd.Tests/Aspire.Deployment.EndToEnd.Tests.csproj -c Release
env:
SkipNativeBuild: true
- name: Download CLI artifacts
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
name: aspire-cli-artifacts
path: ${{ github.workspace }}/cli-artifacts
- name: Install Aspire CLI from artifacts
run: |
ASPIRE_HOME="$HOME/.aspire"
mkdir -p "$ASPIRE_HOME/bin"
# Copy CLI binary and dependencies
cp -r "${{ github.workspace }}/cli-artifacts/bin/"* "$ASPIRE_HOME/bin/"
chmod +x "$ASPIRE_HOME/bin/aspire"
# Add to PATH for this job
echo "$ASPIRE_HOME/bin" >> $GITHUB_PATH
# Set up NuGet hive for local packages
HIVE_DIR="$ASPIRE_HOME/hives/local/packages"
mkdir -p "$HIVE_DIR"
cp "${{ github.workspace }}/cli-artifacts/packages/"*.nupkg "$HIVE_DIR/" 2>/dev/null || true
# Configure CLI to use local channel
"$ASPIRE_HOME/bin/aspire" config set channel local --global || true
echo "✅ Aspire CLI installed:"
"$ASPIRE_HOME/bin/aspire" --version
- name: Azure Login (OIDC)
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
env:
AZURE_CLIENT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_TENANT_ID }}
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
with:
script: |
const token = await core.getIDToken('api://AzureADTokenExchange');
core.setSecret(token);
// Login directly - token never leaves this step
await exec.exec('az', [
'login', '--service-principal',
'--username', process.env.AZURE_CLIENT_ID,
'--tenant', process.env.AZURE_TENANT_ID,
'--federated-token', token,
'--allow-no-subscriptions'
]);
await exec.exec('az', [
'account', 'set',
'--subscription', process.env.AZURE_SUBSCRIPTION_ID
]);
- name: Verify Azure authentication
run: |
echo "Verifying Azure authentication..."
az account show --query "{subscriptionId:id, tenantId:tenantId, user:user.name}" -o table
echo "✅ Azure authentication successful"
- name: Verify Docker is running
run: |
echo "Verifying Docker daemon..."
docker version
docker info | head -20
echo "✅ Docker is available"
- name: Run deployment test (${{ matrix.shortname }})
id: run_tests
env:
GITHUB_PR_NUMBER: ${{ inputs.pr_number || '' }}
GITHUB_PR_HEAD_SHA: ${{ github.sha }}
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_CLIENT_ID }}
Azure__SubscriptionId: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
Azure__Location: westus3
GH_TOKEN: ${{ github.token }}
run: |
./dotnet.sh test tests/Aspire.Deployment.EndToEnd.Tests/Aspire.Deployment.EndToEnd.Tests.csproj \
-c Release \
--logger "trx;LogFileName=${{ matrix.shortname }}.trx" \
--results-directory ${{ github.workspace }}/testresults \
-- \
--filter-not-trait "quarantined=true" \
${{ matrix.extraTestArgs }} \
|| echo "test_failed=true" >> $GITHUB_OUTPUT
- name: Upload test results
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: deployment-test-results-${{ matrix.shortname }}
path: |
${{ github.workspace }}/testresults/
retention-days: 30
- name: Upload recordings
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: deployment-test-recordings-${{ matrix.shortname }}
path: |
${{ github.workspace }}/testresults/recordings/
retention-days: 30
if-no-files-found: ignore
- name: Check for test failures
if: steps.run_tests.outputs.test_failed == 'true'
run: |
echo "::error::Deployment test ${{ matrix.shortname }} failed. Check the test results artifact for details."
exit 1
# Create GitHub issue on nightly failure
create_issue_on_failure:
name: Create Issue on Failure
needs: [deploy-test]
runs-on: ubuntu-latest
if: ${{ failure() && github.event_name == 'schedule' }}
permissions:
issues: write
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Create GitHub Issue
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const runUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
const date = new Date().toISOString().split('T')[0];
const issueTitle = `[Deployment E2E] Nightly test failure - ${date}`;
const issueBody = `## Deployment E2E Test Failure
The nightly deployment E2E tests failed on ${date}.
**Workflow Run:** ${runUrl}
### Next Steps
1. Check the workflow run for detailed error logs
2. Download test artifacts for asciinema recordings
3. Investigate and fix the failing tests
### Labels
This issue was automatically created by the deployment E2E test workflow.
/cc @microsoft/aspire-team
`;
// Check if a similar issue already exists (created today)
const existingIssues = await github.rest.issues.listForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'open',
labels: 'area-testing,deployment-e2e',
per_page: 10
});
const todayIssue = existingIssues.data.find(issue =>
issue.title.includes(date) && issue.title.includes('[Deployment E2E]')
);
if (todayIssue) {
console.log(`Issue already exists for today: ${todayIssue.html_url}`);
// Add a comment instead
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: todayIssue.number,
body: `Another failure occurred. See: ${runUrl}`
});
} else {
// Create new issue
const issue = await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: issueTitle,
body: issueBody,
labels: ['area-testing', 'deployment-e2e']
});
console.log(`Created issue: ${issue.data.html_url}`);
}
# Post completion comment back to PR when triggered via /deployment-test command
post_pr_comment:
name: Post PR Comment
needs: [deploy-test]
runs-on: ubuntu-latest
if: ${{ always() && inputs.pr_number != '' }}
permissions:
pull-requests: write
actions: read
steps:
- name: Get job results and download recording artifacts
id: get_results
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const fs = require('fs');
const path = require('path');
// Get all jobs for this workflow run to determine per-test results
const jobs = await github.paginate(
github.rest.actions.listJobsForWorkflowRun,
{
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.runId,
per_page: 100
}
);
console.log(`Total jobs found: ${jobs.length}`);
// Filter for deploy-test matrix jobs (format: "Deploy (TestClassName)")
const deployJobs = jobs.filter(job => job.name.startsWith('Deploy ('));
const passedTests = [];
const failedTests = [];
const cancelledTests = [];
for (const job of deployJobs) {
// Extract test name from job name "Deploy (TestClassName)"
const match = job.name.match(/^Deploy \((.+)\)$/);
const testName = match ? match[1] : job.name;
console.log(`Job "${job.name}" - conclusion: ${job.conclusion}, status: ${job.status}`);
if (job.conclusion === 'success') {
passedTests.push(testName);
} else if (job.conclusion === 'failure') {
failedTests.push(testName);
} else if (job.conclusion === 'cancelled') {
cancelledTests.push(testName);
}
}
console.log(`Passed: ${passedTests.length}, Failed: ${failedTests.length}, Cancelled: ${cancelledTests.length}`);
// Output results for later steps
core.setOutput('passed_tests', JSON.stringify(passedTests));
core.setOutput('failed_tests', JSON.stringify(failedTests));
core.setOutput('cancelled_tests', JSON.stringify(cancelledTests));
core.setOutput('total_tests', passedTests.length + failedTests.length + cancelledTests.length);
// List all artifacts for the current workflow run
const allArtifacts = await github.paginate(
github.rest.actions.listWorkflowRunArtifacts,
{
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.runId,
per_page: 100
}
);
console.log(`Total artifacts found: ${allArtifacts.length}`);
// Filter for deployment test recording artifacts
const recordingArtifacts = allArtifacts.filter(a =>
a.name.startsWith('deployment-test-recordings-')
);
console.log(`Found ${recordingArtifacts.length} recording artifacts`);
// Create recordings directory
const recordingsDir = 'recordings';
fs.mkdirSync(recordingsDir, { recursive: true });
// Download each artifact
for (const artifact of recordingArtifacts) {
console.log(`Downloading ${artifact.name}...`);
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: artifact.id,
archive_format: 'zip'
});
const artifactPath = path.join(recordingsDir, `${artifact.name}.zip`);
fs.writeFileSync(artifactPath, Buffer.from(download.data));
console.log(`Saved to ${artifactPath}`);
}
core.setOutput('artifact_count', recordingArtifacts.length);
- name: Extract recordings from artifacts
shell: bash
run: |
mkdir -p cast_files
for zipfile in recordings/*.zip; do
if [ -f "$zipfile" ]; then
echo "Extracting $zipfile..."
# Artifact zip name: deployment-test-recordings-{shortname}.zip
ARTIFACT_NAME=$(basename "$zipfile" .zip)
SHORTNAME=${ARTIFACT_NAME#deployment-test-recordings-}
EXTRACT_DIR="recordings/extracted_${ARTIFACT_NAME}"
unzip -o "$zipfile" -d "$EXTRACT_DIR" || true
# Rename .cast files to use the shortname (matching the job/test name)
CAST_INDEX=0
while IFS= read -r -d '' castfile; do
if [ $CAST_INDEX -eq 0 ]; then
cp "$castfile" "cast_files/${SHORTNAME}.cast"
else
cp "$castfile" "cast_files/${SHORTNAME}-${CAST_INDEX}.cast"
fi
CAST_INDEX=$((CAST_INDEX + 1))
done < <(find "$EXTRACT_DIR" -name "*.cast" -print0)
fi
done
echo "Found recordings:"
ls -la cast_files/ || echo "No .cast files found"
- name: Upload recordings to asciinema and post comment
env:
GH_TOKEN: ${{ github.token }}
PASSED_TESTS: ${{ steps.get_results.outputs.passed_tests }}
FAILED_TESTS: ${{ steps.get_results.outputs.failed_tests }}
CANCELLED_TESTS: ${{ steps.get_results.outputs.cancelled_tests }}
TOTAL_TESTS: ${{ steps.get_results.outputs.total_tests }}
shell: bash
run: |
PR_NUMBER="${{ inputs.pr_number }}"
RUN_ID="${{ github.run_id }}"
RUN_URL="https://github.com/${{ github.repository }}/actions/runs/${RUN_ID}"
TEST_RESULT="${{ needs.deploy-test.result }}"
# Parse the test results from JSON
PASSED_COUNT=$(echo "$PASSED_TESTS" | jq 'length')
FAILED_COUNT=$(echo "$FAILED_TESTS" | jq 'length')
CANCELLED_COUNT=$(echo "$CANCELLED_TESTS" | jq 'length')
# Determine overall status
if [ "$FAILED_COUNT" -gt 0 ]; then
EMOJI="❌"
STATUS="failed"
elif [ "$CANCELLED_COUNT" -gt 0 ] && [ "$PASSED_COUNT" -eq 0 ]; then
EMOJI="⚠️"
STATUS="cancelled"
elif [ "$PASSED_COUNT" -gt 0 ]; then
EMOJI="✅"
STATUS="passed"
else
EMOJI="❓"
STATUS="unknown"
fi
# Upload recordings first so we can include links in the unified table
RECORDINGS_DIR="cast_files"
declare -A RECORDING_URLS
if [ -d "$RECORDINGS_DIR" ] && compgen -G "$RECORDINGS_DIR"/*.cast > /dev/null; then
pip install --quiet asciinema
# Retry configuration for asciinema uploads
MAX_UPLOAD_RETRIES=5
RETRY_BASE_DELAY_SECONDS=30
UPLOAD_COUNT=0
for castfile in "$RECORDINGS_DIR"/*.cast; do
if [ -f "$castfile" ]; then
filename=$(basename "$castfile" .cast)
echo "Uploading $castfile..."
# Upload to asciinema with retry logic for transient failures
ASCIINEMA_URL=""
for attempt in $(seq 1 "$MAX_UPLOAD_RETRIES"); do
UPLOAD_OUTPUT=$(asciinema upload "$castfile" 2>&1) || true
ASCIINEMA_URL=$(echo "$UPLOAD_OUTPUT" | grep -oP 'https://asciinema\.org/a/[a-zA-Z0-9_-]+' | head -1) || true
if [ -n "$ASCIINEMA_URL" ]; then
break
fi
if [ "$attempt" -lt "$MAX_UPLOAD_RETRIES" ]; then
DELAY=$((attempt * RETRY_BASE_DELAY_SECONDS))
echo "Upload attempt $attempt failed, retrying in ${DELAY}s..."
sleep "$DELAY"
fi
done
if [ -n "$ASCIINEMA_URL" ]; then
RECORDING_URLS["$filename"]="$ASCIINEMA_URL"
echo "Uploaded: $ASCIINEMA_URL"
UPLOAD_COUNT=$((UPLOAD_COUNT + 1))
else
RECORDING_URLS["$filename"]="FAILED"
echo "Failed to upload $castfile after $MAX_UPLOAD_RETRIES attempts"
fi
fi
done
echo "Uploaded $UPLOAD_COUNT recordings"
else
echo "No recordings found in $RECORDINGS_DIR"
fi
# Build the comment with summary outside collapsible and details inside
COMMENT_MARKER="<!-- deployment-e2e-tests -->"
COMMENT_BODY="${COMMENT_MARKER}
${EMOJI} **Deployment E2E Tests ${STATUS}** — ${PASSED_COUNT} passed, ${FAILED_COUNT} failed, ${CANCELLED_COUNT} cancelled
<details>
<summary>View test results and recordings</summary>
[View workflow run](${RUN_URL})
| Test | Result | Recording |
|------|--------|-----------|"
# Add passed tests
while IFS= read -r test; do
RECORDING_LINK=""
if [ -n "${RECORDING_URLS[$test]+x}" ]; then
if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
RECORDING_LINK="❌ Upload failed"
else
RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
fi
fi
COMMENT_BODY="${COMMENT_BODY}
| ${test} | ✅ Passed | ${RECORDING_LINK} |"
done < <(echo "$PASSED_TESTS" | jq -r '.[]')
# Add failed tests
while IFS= read -r test; do
RECORDING_LINK=""
if [ -n "${RECORDING_URLS[$test]+x}" ]; then
if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
RECORDING_LINK="❌ Upload failed"
else
RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
fi
fi
COMMENT_BODY="${COMMENT_BODY}
| ${test} | ❌ Failed | ${RECORDING_LINK} |"
done < <(echo "$FAILED_TESTS" | jq -r '.[]')
# Add cancelled tests
while IFS= read -r test; do
RECORDING_LINK=""
if [ -n "${RECORDING_URLS[$test]+x}" ]; then
if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
RECORDING_LINK="❌ Upload failed"
else
RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
fi
fi
COMMENT_BODY="${COMMENT_BODY}
| ${test} | ⚠️ Cancelled | ${RECORDING_LINK} |"
done < <(echo "$CANCELLED_TESTS" | jq -r '.[]')
COMMENT_BODY="${COMMENT_BODY}
</details>"
# Delete any existing deployment test comments, then post the new one
EXISTING_COMMENT_IDS=$(gh api graphql -f query='
query($owner: String!, $repo: String!, $pr: Int!) {
repository(owner: $owner, name: $repo) {
pullRequest(number: $pr) {
comments(first: 100) {
nodes {
databaseId
author { login }
body
}
}
}
}
}' -f owner="${{ github.repository_owner }}" -f repo="${{ github.event.repository.name }}" -F pr="$PR_NUMBER" \
--jq '.data.repository.pullRequest.comments.nodes[] | select(.author.login == "github-actions" and (.body | contains("'"${COMMENT_MARKER}"'"))) | .databaseId') || true
for COMMENT_ID in $EXISTING_COMMENT_IDS; do
echo "Deleting old comment $COMMENT_ID"
gh api \
--method DELETE \
-H "Accept: application/vnd.github+json" \
"/repos/${{ github.repository }}/issues/comments/${COMMENT_ID}" || true
done
echo "Creating new comment on PR #${PR_NUMBER}"
gh pr comment "${PR_NUMBER}" --repo "${{ github.repository }}" --body "$COMMENT_BODY"
echo "Posted comment to PR #${PR_NUMBER}"