-
Notifications
You must be signed in to change notification settings - Fork 866
649 lines (554 loc) · 24.5 KB
/
deployment-tests.yml
File metadata and controls
649 lines (554 loc) · 24.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
# End-to-end deployment tests that deploy Aspire applications to real Azure infrastructure
#
# Triggers:
# - workflow_dispatch: Manual trigger with scenario selection
# - schedule: Nightly at 03:00 UTC
# - /deployment-test command on PRs (via deployment-test-command.yml)
#
# Security:
# - Uses OIDC (Workload Identity Federation) for Azure authentication
# - No stored Azure secrets
# - Only dotnet org members can trigger via PR command
#
name: Deployment E2E Tests
on:
workflow_dispatch:
inputs:
pr_number:
description: 'PR number to test (for testing PR builds)'
required: false
type: string
default: ''
schedule:
# Run nightly at 03:00 UTC
- cron: '0 3 * * *'
# Limit concurrent runs to avoid Azure quota issues
concurrency:
group: deployment-e2e-${{ github.ref }}
cancel-in-progress: true
jobs:
# Post "starting" comment to PR when triggered via /deployment-test command
notify-start:
name: Notify PR
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'microsoft' && inputs.pr_number != '' }}
permissions:
pull-requests: write
steps:
- name: Post starting comment
env:
GH_TOKEN: ${{ github.token }}
run: |
PR_NUMBER="${{ inputs.pr_number }}"
RUN_URL="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
gh pr comment "${PR_NUMBER}" --repo "${{ github.repository }}" --body \
"🚀 **Deployment tests starting** on PR #${PR_NUMBER}...
This will deploy to real Azure infrastructure. Results will be posted here when complete.
[View workflow run](${RUN_URL})"
# Enumerate test classes to build the matrix
enumerate:
name: Enumerate Tests
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'microsoft' }}
permissions:
contents: read
outputs:
matrix: ${{ steps.enumerate.outputs.all_tests }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: ./.github/actions/enumerate-tests
id: enumerate
with:
buildArgs: '/p:OnlyDeploymentTests=true'
- name: Display test matrix
run: |
echo "Deployment test matrix:"
echo '${{ steps.enumerate.outputs.all_tests }}' | jq .
# Build solution and CLI once, share via artifacts
build:
name: Build
runs-on: 8-core-ubuntu-latest
if: ${{ github.repository_owner == 'microsoft' }}
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Setup .NET
uses: actions/setup-dotnet@c2fa09f4bde5ebb9d1777cf28262a3eb3db3ced7 # v5.2.0
with:
global-json-file: global.json
- name: Restore solution
run: ./restore.sh
- name: Build solution and pack CLI
run: |
# Build the full solution and pack CLI for local testing
./build.sh --build --pack -c Release
env:
# Skip native build to save time - we'll use the non-native CLI
SkipNativeBuild: true
- name: Prepare CLI artifacts
run: |
# Create a clean artifact directory with CLI and packages
ARTIFACT_DIR="${{ github.workspace }}/cli-artifacts"
mkdir -p "$ARTIFACT_DIR/bin"
mkdir -p "$ARTIFACT_DIR/packages"
# Copy CLI binary and dependencies
cp -r "${{ github.workspace }}/artifacts/bin/Aspire.Cli/Release/net10.0/"* "$ARTIFACT_DIR/bin/"
# Copy NuGet packages
PACKAGES_DIR="${{ github.workspace }}/artifacts/packages/Release/Shipping"
if [ -d "$PACKAGES_DIR" ]; then
find "$PACKAGES_DIR" -name "*.nupkg" -exec cp {} "$ARTIFACT_DIR/packages/" \;
fi
echo "CLI artifacts prepared:"
ls -la "$ARTIFACT_DIR/bin/"
echo "Package count: $(find "$ARTIFACT_DIR/packages" -name "*.nupkg" | wc -l)"
- name: Upload CLI artifacts
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: aspire-cli-artifacts
path: ${{ github.workspace }}/cli-artifacts/
retention-days: 1
# Run each test class in parallel
deploy-test:
name: Deploy (${{ matrix.shortname }})
needs: [enumerate, build]
if: ${{ needs.enumerate.outputs.matrix != '{"include":[]}' && needs.enumerate.outputs.matrix != '' }}
runs-on: 8-core-ubuntu-latest
environment: deployment-testing
permissions:
id-token: write # For OIDC Azure login
contents: read
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.enumerate.outputs.matrix) }}
env:
ASPIRE_DEPLOYMENT_TEST_SUBSCRIPTION: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
ASPIRE_DEPLOYMENT_TEST_RG_PREFIX: ${{ vars.ASPIRE_DEPLOYMENT_TEST_RG_PREFIX || 'aspire-e2e' }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Setup .NET
uses: actions/setup-dotnet@c2fa09f4bde5ebb9d1777cf28262a3eb3db3ced7 # v5.2.0
with:
global-json-file: global.json
- name: Restore and build test project
run: |
./restore.sh
./build.sh -restore -ci -build -projects ${{ github.workspace }}/tests/Aspire.Deployment.EndToEnd.Tests/Aspire.Deployment.EndToEnd.Tests.csproj -c Release
env:
SkipNativeBuild: true
- name: Download CLI artifacts
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
name: aspire-cli-artifacts
path: ${{ github.workspace }}/cli-artifacts
- name: Install Aspire CLI from artifacts
run: |
ASPIRE_HOME="$HOME/.aspire"
mkdir -p "$ASPIRE_HOME/bin"
# Copy CLI binary and dependencies
cp -r "${{ github.workspace }}/cli-artifacts/bin/"* "$ASPIRE_HOME/bin/"
chmod +x "$ASPIRE_HOME/bin/aspire"
# Add to PATH for this job
echo "$ASPIRE_HOME/bin" >> $GITHUB_PATH
# Set up NuGet hive for local packages
HIVE_DIR="$ASPIRE_HOME/hives/local/packages"
mkdir -p "$HIVE_DIR"
cp "${{ github.workspace }}/cli-artifacts/packages/"*.nupkg "$HIVE_DIR/" 2>/dev/null || true
# Configure CLI to use local channel
"$ASPIRE_HOME/bin/aspire" config set channel local --global || true
echo "✅ Aspire CLI installed:"
"$ASPIRE_HOME/bin/aspire" --version
- name: Azure Login (OIDC)
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
env:
AZURE_CLIENT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_TENANT_ID }}
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
with:
script: |
const token = await core.getIDToken('api://AzureADTokenExchange');
core.setSecret(token);
// Login directly - token never leaves this step
await exec.exec('az', [
'login', '--service-principal',
'--username', process.env.AZURE_CLIENT_ID,
'--tenant', process.env.AZURE_TENANT_ID,
'--federated-token', token,
'--allow-no-subscriptions'
]);
await exec.exec('az', [
'account', 'set',
'--subscription', process.env.AZURE_SUBSCRIPTION_ID
]);
- name: Verify Azure authentication
run: |
echo "Verifying Azure authentication..."
az account show --query "{subscriptionId:id, tenantId:tenantId, user:user.name}" -o table
echo "✅ Azure authentication successful"
- name: Verify Docker is running
run: |
echo "Verifying Docker daemon..."
docker version
docker info | head -20
echo "✅ Docker is available"
- name: Run deployment test (${{ matrix.shortname }})
id: run_tests
env:
GITHUB_PR_NUMBER: ${{ inputs.pr_number || '' }}
GITHUB_PR_HEAD_SHA: ${{ github.sha }}
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_CLIENT_ID }}
Azure__SubscriptionId: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
Azure__Location: westus3
GH_TOKEN: ${{ github.token }}
run: |
./dotnet.sh test tests/Aspire.Deployment.EndToEnd.Tests/Aspire.Deployment.EndToEnd.Tests.csproj \
-c Release \
--logger "trx;LogFileName=${{ matrix.shortname }}.trx" \
--results-directory ${{ github.workspace }}/testresults \
-- \
--filter-not-trait "quarantined=true" \
${{ matrix.extraTestArgs }} \
|| echo "test_failed=true" >> $GITHUB_OUTPUT
- name: Upload test results
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: deployment-test-results-${{ matrix.shortname }}
path: |
${{ github.workspace }}/testresults/
retention-days: 30
- name: Upload recordings
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: deployment-test-recordings-${{ matrix.shortname }}
path: |
${{ github.workspace }}/testresults/recordings/
retention-days: 30
if-no-files-found: ignore
- name: Check for test failures
if: steps.run_tests.outputs.test_failed == 'true'
run: |
echo "::error::Deployment test ${{ matrix.shortname }} failed. Check the test results artifact for details."
exit 1
# Create GitHub issue on nightly failure
create_issue_on_failure:
name: Create Issue on Failure
needs: [deploy-test]
runs-on: ubuntu-latest
if: ${{ failure() && github.event_name == 'schedule' }}
permissions:
issues: write
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Create GitHub Issue
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const runUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
const date = new Date().toISOString().split('T')[0];
const issueTitle = `[Deployment E2E] Nightly test failure - ${date}`;
const issueBody = `## Deployment E2E Test Failure
The nightly deployment E2E tests failed on ${date}.
**Workflow Run:** ${runUrl}
### Next Steps
1. Check the workflow run for detailed error logs
2. Download test artifacts for asciinema recordings
3. Investigate and fix the failing tests
### Labels
This issue was automatically created by the deployment E2E test workflow.
/cc @microsoft/aspire-team
`;
// Check if a similar issue already exists (created today)
const existingIssues = await github.rest.issues.listForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'open',
labels: 'area-testing,deployment-e2e',
per_page: 10
});
const todayIssue = existingIssues.data.find(issue =>
issue.title.includes(date) && issue.title.includes('[Deployment E2E]')
);
if (todayIssue) {
console.log(`Issue already exists for today: ${todayIssue.html_url}`);
// Add a comment instead
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: todayIssue.number,
body: `Another failure occurred. See: ${runUrl}`
});
} else {
// Create new issue
const issue = await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: issueTitle,
body: issueBody,
labels: ['area-testing', 'deployment-e2e']
});
console.log(`Created issue: ${issue.data.html_url}`);
}
# Post completion comment back to PR when triggered via /deployment-test command
post_pr_comment:
name: Post PR Comment
needs: [deploy-test]
runs-on: ubuntu-latest
if: ${{ always() && inputs.pr_number != '' }}
permissions:
pull-requests: write
actions: read
steps:
- name: Get job results and download recording artifacts
id: get_results
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const fs = require('fs');
const path = require('path');
// Get all jobs for this workflow run to determine per-test results
const jobs = await github.paginate(
github.rest.actions.listJobsForWorkflowRun,
{
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.runId,
per_page: 100
}
);
console.log(`Total jobs found: ${jobs.length}`);
// Filter for deploy-test matrix jobs (format: "Deploy (TestClassName)")
const deployJobs = jobs.filter(job => job.name.startsWith('Deploy ('));
const passedTests = [];
const failedTests = [];
const cancelledTests = [];
for (const job of deployJobs) {
// Extract test name from job name "Deploy (TestClassName)"
const match = job.name.match(/^Deploy \((.+)\)$/);
const testName = match ? match[1] : job.name;
console.log(`Job "${job.name}" - conclusion: ${job.conclusion}, status: ${job.status}`);
if (job.conclusion === 'success') {
passedTests.push(testName);
} else if (job.conclusion === 'failure') {
failedTests.push(testName);
} else if (job.conclusion === 'cancelled') {
cancelledTests.push(testName);
}
}
console.log(`Passed: ${passedTests.length}, Failed: ${failedTests.length}, Cancelled: ${cancelledTests.length}`);
// Output results for later steps
core.setOutput('passed_tests', JSON.stringify(passedTests));
core.setOutput('failed_tests', JSON.stringify(failedTests));
core.setOutput('cancelled_tests', JSON.stringify(cancelledTests));
core.setOutput('total_tests', passedTests.length + failedTests.length + cancelledTests.length);
// List all artifacts for the current workflow run
const allArtifacts = await github.paginate(
github.rest.actions.listWorkflowRunArtifacts,
{
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.runId,
per_page: 100
}
);
console.log(`Total artifacts found: ${allArtifacts.length}`);
// Filter for deployment test recording artifacts
const recordingArtifacts = allArtifacts.filter(a =>
a.name.startsWith('deployment-test-recordings-')
);
console.log(`Found ${recordingArtifacts.length} recording artifacts`);
// Create recordings directory
const recordingsDir = 'recordings';
fs.mkdirSync(recordingsDir, { recursive: true });
// Download each artifact
for (const artifact of recordingArtifacts) {
console.log(`Downloading ${artifact.name}...`);
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: artifact.id,
archive_format: 'zip'
});
const artifactPath = path.join(recordingsDir, `${artifact.name}.zip`);
fs.writeFileSync(artifactPath, Buffer.from(download.data));
console.log(`Saved to ${artifactPath}`);
}
core.setOutput('artifact_count', recordingArtifacts.length);
- name: Extract recordings from artifacts
shell: bash
run: |
mkdir -p cast_files
for zipfile in recordings/*.zip; do
if [ -f "$zipfile" ]; then
echo "Extracting $zipfile..."
# Artifact zip name: deployment-test-recordings-{shortname}.zip
ARTIFACT_NAME=$(basename "$zipfile" .zip)
SHORTNAME=${ARTIFACT_NAME#deployment-test-recordings-}
EXTRACT_DIR="recordings/extracted_${ARTIFACT_NAME}"
unzip -o "$zipfile" -d "$EXTRACT_DIR" || true
# Rename .cast files to use the shortname (matching the job/test name)
CAST_INDEX=0
while IFS= read -r -d '' castfile; do
if [ $CAST_INDEX -eq 0 ]; then
cp "$castfile" "cast_files/${SHORTNAME}.cast"
else
cp "$castfile" "cast_files/${SHORTNAME}-${CAST_INDEX}.cast"
fi
CAST_INDEX=$((CAST_INDEX + 1))
done < <(find "$EXTRACT_DIR" -name "*.cast" -print0)
fi
done
echo "Found recordings:"
ls -la cast_files/ || echo "No .cast files found"
- name: Upload recordings to asciinema and post comment
env:
GH_TOKEN: ${{ github.token }}
PASSED_TESTS: ${{ steps.get_results.outputs.passed_tests }}
FAILED_TESTS: ${{ steps.get_results.outputs.failed_tests }}
CANCELLED_TESTS: ${{ steps.get_results.outputs.cancelled_tests }}
TOTAL_TESTS: ${{ steps.get_results.outputs.total_tests }}
shell: bash
run: |
PR_NUMBER="${{ inputs.pr_number }}"
RUN_ID="${{ github.run_id }}"
RUN_URL="https://github.com/${{ github.repository }}/actions/runs/${RUN_ID}"
TEST_RESULT="${{ needs.deploy-test.result }}"
# Parse the test results from JSON
PASSED_COUNT=$(echo "$PASSED_TESTS" | jq 'length')
FAILED_COUNT=$(echo "$FAILED_TESTS" | jq 'length')
CANCELLED_COUNT=$(echo "$CANCELLED_TESTS" | jq 'length')
# Determine overall status
if [ "$FAILED_COUNT" -gt 0 ]; then
EMOJI="❌"
STATUS="failed"
elif [ "$CANCELLED_COUNT" -gt 0 ] && [ "$PASSED_COUNT" -eq 0 ]; then
EMOJI="⚠️"
STATUS="cancelled"
elif [ "$PASSED_COUNT" -gt 0 ]; then
EMOJI="✅"
STATUS="passed"
else
EMOJI="❓"
STATUS="unknown"
fi
# Upload recordings first so we can include links in the unified table
RECORDINGS_DIR="cast_files"
declare -A RECORDING_URLS
if [ -d "$RECORDINGS_DIR" ] && compgen -G "$RECORDINGS_DIR"/*.cast > /dev/null; then
pip install --quiet asciinema
# Retry configuration for asciinema uploads
MAX_UPLOAD_RETRIES=5
RETRY_BASE_DELAY_SECONDS=30
UPLOAD_COUNT=0
for castfile in "$RECORDINGS_DIR"/*.cast; do
if [ -f "$castfile" ]; then
filename=$(basename "$castfile" .cast)
echo "Uploading $castfile..."
# Upload to asciinema with retry logic for transient failures
ASCIINEMA_URL=""
for attempt in $(seq 1 "$MAX_UPLOAD_RETRIES"); do
UPLOAD_OUTPUT=$(asciinema upload "$castfile" 2>&1) || true
ASCIINEMA_URL=$(echo "$UPLOAD_OUTPUT" | grep -oP 'https://asciinema\.org/a/[a-zA-Z0-9_-]+' | head -1) || true
if [ -n "$ASCIINEMA_URL" ]; then
break
fi
if [ "$attempt" -lt "$MAX_UPLOAD_RETRIES" ]; then
DELAY=$((attempt * RETRY_BASE_DELAY_SECONDS))
echo "Upload attempt $attempt failed, retrying in ${DELAY}s..."
sleep "$DELAY"
fi
done
if [ -n "$ASCIINEMA_URL" ]; then
RECORDING_URLS["$filename"]="$ASCIINEMA_URL"
echo "Uploaded: $ASCIINEMA_URL"
UPLOAD_COUNT=$((UPLOAD_COUNT + 1))
else
RECORDING_URLS["$filename"]="FAILED"
echo "Failed to upload $castfile after $MAX_UPLOAD_RETRIES attempts"
fi
fi
done
echo "Uploaded $UPLOAD_COUNT recordings"
else
echo "No recordings found in $RECORDINGS_DIR"
fi
# Build the comment with summary outside collapsible and details inside
COMMENT_MARKER="<!-- deployment-e2e-tests -->"
COMMENT_BODY="${COMMENT_MARKER}
${EMOJI} **Deployment E2E Tests ${STATUS}** — ${PASSED_COUNT} passed, ${FAILED_COUNT} failed, ${CANCELLED_COUNT} cancelled
<details>
<summary>View test results and recordings</summary>
[View workflow run](${RUN_URL})
| Test | Result | Recording |
|------|--------|-----------|"
# Add passed tests
while IFS= read -r test; do
RECORDING_LINK=""
if [ -n "${RECORDING_URLS[$test]+x}" ]; then
if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
RECORDING_LINK="❌ Upload failed"
else
RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
fi
fi
COMMENT_BODY="${COMMENT_BODY}
| ${test} | ✅ Passed | ${RECORDING_LINK} |"
done < <(echo "$PASSED_TESTS" | jq -r '.[]')
# Add failed tests
while IFS= read -r test; do
RECORDING_LINK=""
if [ -n "${RECORDING_URLS[$test]+x}" ]; then
if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
RECORDING_LINK="❌ Upload failed"
else
RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
fi
fi
COMMENT_BODY="${COMMENT_BODY}
| ${test} | ❌ Failed | ${RECORDING_LINK} |"
done < <(echo "$FAILED_TESTS" | jq -r '.[]')
# Add cancelled tests
while IFS= read -r test; do
RECORDING_LINK=""
if [ -n "${RECORDING_URLS[$test]+x}" ]; then
if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
RECORDING_LINK="❌ Upload failed"
else
RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
fi
fi
COMMENT_BODY="${COMMENT_BODY}
| ${test} | ⚠️ Cancelled | ${RECORDING_LINK} |"
done < <(echo "$CANCELLED_TESTS" | jq -r '.[]')
COMMENT_BODY="${COMMENT_BODY}
</details>"
# Delete any existing deployment test comments, then post the new one
EXISTING_COMMENT_IDS=$(gh api graphql -f query='
query($owner: String!, $repo: String!, $pr: Int!) {
repository(owner: $owner, name: $repo) {
pullRequest(number: $pr) {
comments(first: 100) {
nodes {
databaseId
author { login }
body
}
}
}
}
}' -f owner="${{ github.repository_owner }}" -f repo="${{ github.event.repository.name }}" -F pr="$PR_NUMBER" \
--jq '.data.repository.pullRequest.comments.nodes[] | select(.author.login == "github-actions" and (.body | contains("'"${COMMENT_MARKER}"'"))) | .databaseId') || true
for COMMENT_ID in $EXISTING_COMMENT_IDS; do
echo "Deleting old comment $COMMENT_ID"
gh api \
--method DELETE \
-H "Accept: application/vnd.github+json" \
"/repos/${{ github.repository }}/issues/comments/${COMMENT_ID}" || true
done
echo "Creating new comment on PR #${PR_NUMBER}"
gh pr comment "${PR_NUMBER}" --repo "${{ github.repository }}" --body "$COMMENT_BODY"
echo "Posted comment to PR #${PR_NUMBER}"