-
Notifications
You must be signed in to change notification settings - Fork 0
/
action.yaml
134 lines (119 loc) · 4.86 KB
/
action.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
name: S3 upload
description: Upload a set of artifacts to S3
inputs:
# Path(s) to the artifacts to upload
path:
required: true
description: Path(s) to the artifacts to upload
# S3 url for bucket and path prefix of the artifact
s3uri:
required: true
description: S3 url for bucket and path prefix of the artifact
# Artifact key name (a unique hash or timestamp or other identifier)
key:
required: false
description: Artifact key name (a unique hash or timestamp or other identifier)
default: ${{ github.sha }}-${{ github.run_number }}-${{ github.run_attempt }}
aws-access-key-id:
required: false
description: AWS access key ID of the S3 location
aws-secret-access-key:
required: false
description: AWS secret access key ID of the S3 location
aws-region:
required: false
description: AWS region of the S3 location
default: us-east-1
compress:
required: false
description: Whether to build a tarball of the artifacts before uploading
default: "true"
outputs:
s3uri:
description: S3 URL for uploaded artifact
value: ${{ steps.s3.outputs.s3uri }}
runs:
using: composite
steps:
- name: Collect artifacts into temporary path
id: collect
shell: bash
run: |
# Create our temporary directory parent for our artifacts
mkdir -p "$RUNNER_TEMP/actions-s3-artifact"
# Create a unique directory for this particular action run
TMPDIR="$(mktemp -d -p "$RUNNER_TEMP" "actions-s3-artifact/upload.XXXXXXXX")"
echo "tmpdir=$TMPDIR" >> $GITHUB_OUTPUT
echo "::debug::Created temporary directory $TMPDIR"
# Assign the tarball file name for future use
TMPTAR="$TMPDIR/artifacts.tar.gz"
# Create a path within our temporary directory to collect all the artifacts
TMPARTIFACT="$TMPDIR/artifacts"
mkdir -p "$TMPARTIFACT"
# Read the path string into a bash array for easy looping
readarray -t ARTIFACT_PATHS <<< "${{ inputs.path }}"
# Iterate through each artifact path and copy it to the temporary path
for name in ${ARTIFACT_PATHS[@]}; do
if [[ -z "$name" ]]; then
echo "::debug::Skipping empty"
continue
fi
echo "Adding '$name'"
mkdir -p "$TMPARTIFACT/$(dirname "$name")"
cp -r "$name" "$TMPARTIFACT/$(dirname "$name")"
done
# List out everything in the temporary path
if [[ -n "$RUNNER_DEBUG" ]]; then
echo "::debug::Contents of our temporary artifact build"
echo "$(tree -a "$TMPDIR" 2>&1)"
fi
# Tarball the temporary path into a single object if "compress" is true, return the folder path otherwise
if [[ "${{ inputs.compress }}" == "true" ]]; then
echo "Creating artifact tarball"
tar -czvf "$TMPTAR" -C "$TMPARTIFACT" --transform='s/^\.\///' \
--show-transformed .
# List the actual contents of the archive
if [[ -n "$RUNNER_DEBUG" ]]; then
echo "::debug::Artifact contents"
echo "$(tar -ztvf "$TMPTAR" 2>&1)"
fi
# Output the compressed file keyname for use in subsequent steps
echo "tarball=$TMPTAR" >> $GITHUB_OUTPUT
else
# Output the folder keyname for use in subsequent steps
echo "folder=$TMPARTIFACT" >> $GITHUB_OUTPUT
fi
- name: Configure AWS credentials
if: inputs.aws-access-key-id != '' && inputs.aws-secret-access-key != ''
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ inputs.aws-access-key-id }}
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
aws-region: ${{ inputs.aws-region }}
- name: Upload artifact to S3
id: s3
shell: bash
run: |
AWSCMD=${AWSCMD:-aws}
S3URI="${{ inputs.s3uri }}"
if [[ "$S3URI" != s3://* ]]; then
echo "::warning::'s3uri' should start with s3://"
S3URI="s3://$S3URI"
fi
S3URI="${S3URI%/}/${{ inputs.key }}"
if [[ "$S3URI" == *.tgz || "$S3URI" == *.tar.gz || "${{ inputs.compress }}" != "true" ]]; then
echo "::debug::s3uri does not need .tgz extension, skipping"
else
S3URI="$S3URI.tgz"
fi
echo "::debug::Using AWS CLI: $AWSCMD"
# Upload the artifact to S3 based on keyname from previous step
if [[ "${{ inputs.compress }}" == "true" ]]; then
echo "Uploading '${{ steps.collect.outputs.tarball }}' to S3 '$S3URI'"
$AWSCMD s3 cp "${{ steps.collect.outputs.tarball }}" "$S3URI"
else
echo "Uploading '${{ steps.collect.outputs.folder }}' to S3 '$S3URI'"
$AWSCMD s3 cp --recursive "${{ steps.collect.outputs.folder }}" "$S3URI"
fi
# Output the S3 URL for use in subsequent steps
echo "s3uri=$S3URI" >> $GITHUB_OUTPUT