Skip to content

Upload test results #3079

Upload test results

Upload test results #3079

name: Upload test results
on:
workflow_run:
workflows: ["ci", "docs"]
types:
- completed
permissions:
actions: read
defaults:
run:
shell: bash --noprofile --norc -euo pipefail {0}
jobs:
upload-to-s3:
if: github.secret_source != 'None' && github.event.workflow_run.conclusion != 'cancelled'
runs-on: ubuntu-latest
steps:
- name: 'Download artifact'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const opts = github.rest.actions.listWorkflowRunArtifacts.endpoint.merge({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
const artifacts = await github.paginate(opts);
for (const artifact of artifacts) {
if (!artifact.name.startsWith('test report ')) {
continue;
}
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: artifact.id,
archive_format: 'zip',
});
fs.writeFileSync('${{github.workspace}}/' + artifact.name + '.zip', Buffer.from(download.data));
}
- run: |
for archive in *.zip; do
# $archive is literally *.zip if there are no zip files matching the glob expression
[ -f "$archive" ] || continue
name=$(basename "$archive" .zip)
mkdir "$name"
(cd "$name" && unzip ../"$archive")
done
- name: Upload test results to S3
env:
S3_BUCKET: ${{ vars.TEST_RESULTS_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ vars.TEST_RESULTS_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.TEST_RESULTS_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-2
BRANCH_NAME: ${{ github.event.workflow_run.head_branch }}
if: env.S3_BUCKET != '' && env.AWS_ACCESS_KEY_ID != '' && env.AWS_SECRET_ACCESS_KEY != ''
shell: bash --noprofile --norc -euo pipefail {0}
run: |
# Don't prefix attributes, because +@ (the default prefix) is not a valid character in nested row field names in the Hive connector for JSON files.
yq_opts=(
--input-format=xml
--output-format=json
--xml-attribute-prefix=''
--xml-content-name='content'
--xml-skip-directives
--xml-skip-proc-inst
)
# 1. When converting to JSON, make sure 'testcase' is always an array: https://mikefarah.gitbook.io/yq/usage/xml#parse-xml-force-as-an-array
# 2. Remove system-err and system-out, because they cannot be easily parsed and add significant bloat, making storing and processing the data much more costly.
# 3. Remove properties, because they leak secret values.
# 4. Truncate all strings to 1k characters to avoid having lines longer than 100MB.
surefire_selector='.testsuite.testcase |= ([] + .) | .testsuite.testcase[] |= del(.system-err, .system-out) | .testsuite |= del(.properties) | .. |= select(tag == "!!str") |= sub("(.{0,1000}).*", "${1}")'
# 1. Convert dashes to underscores in all map keys.
# 2. Make sure all arrays with only one item never get converted to an object
# 3. Truncate all strings to 1k characters to avoid having lines longer than 100MB.
testng_selector='.. |= select(tag == "!!map") |= with_entries(.key |= sub("-", "_")) | .suite.group.method |= ([] + .) | .suite.test.class[].test_method |= ([] + .) | .suite.test.class[].test_method[].params |= ([] + .) | .. |= select(tag == "!!str") |= sub("(.{0,1000}).*", "${1}")'
artifact_id='${{ github.event.workflow_run.id }}-${{ github.event.workflow_run.run_attempt }}.json.gz'
find . \
-name TEST-\*.xml \
-exec yq "${yq_opts[@]}" "$surefire_selector" {} \; \
| jq -c > surefire.ndjson
find . \
-name testng-results.xml \
-exec yq "${yq_opts[@]}" "$testng_selector" {} \; \
| jq -c > testng.ndjson
for filename in *.ndjson; do
if [ ! -s "$filename" ]; then
continue;
fi
jq -c \
--argjson addObj '{"branch":"","git_sha":"${{ github.event.workflow_run.head_sha }}","workflow_name":"${{ github.event.workflow.name }}","workflow_run":"${{ github.event.workflow_run.id }}","workflow_conclusion":"${{ github.event.workflow_run.conclusion }}","workflow_job":"","workflow_run_attempt":"${{ github.event.workflow_run.run_attempt }}","timestamp":""}' \
--arg timestamp "$(date -u '+%F %T.%3NZ')" \
--arg branch "$BRANCH_NAME" \
'. + $addObj | .branch=$branch | .timestamp=$timestamp' "$filename" | gzip -c > "$artifact_id"
aws s3 cp --no-progress "$artifact_id" "s3://$S3_BUCKET/tests/results/type=$(basename "$filename" .ndjson)/repo=$(basename "${{ github.repository }}")/date_created=$(date -u '+%Y-%m-%d')/$artifact_id"
done