-
Notifications
You must be signed in to change notification settings - Fork 9
235 lines (200 loc) · 6.35 KB
/
unit_tests.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
name: Unit Tests & Code Coverage
on:
# Triggers the workflow on push or pull request events but only for the main branch
push:
branches:
- main
pull_request:
branches:
- main
workflow_dispatch: # allow manual triggering
defaults:
run:
shell: bash -l {0}
jobs:
lint:
name: Code style
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install flake8
- name: Run flake8
run: |
flake8
- name: Run black
uses: rickstaa/action-black@v1
with:
black_args: ". --check"
install-catalogs:
name: Install & cache databroker catalogs
runs-on: ubuntu-latest
needs: lint
strategy:
matrix:
python-version:
- "3.11"
max-parallel: 5
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Create Python ${{ matrix.python-version }} environment
uses: mamba-org/setup-micromamba@v1
with:
cache-environment: true
cache-environment-key: env-key-${{ matrix.python-version }}
condarc: |
channel-priority: flexible
environment-file: environment.yml
environment-name: anaconda-test-env-py-${{ matrix.python-version }}
- name: Unpack
run: |
which databroker-pack
which databroker-unpack
cd resources
bash ./unpack.sh
cd ..
- name: Directory Listings
run: |
ls -lAFghR ~/.local/
ls -lAFghR /tmp/*_test/
- name: Prepare archival content
run: |
mkdir -p ~/databroker_catalogs/
mv ~/.local ~/databroker_catalogs/
mv /tmp/*_test ~/databroker_catalogs/
- name: Archive catalog artifacts
uses: actions/upload-artifact@v3
with:
name: databroker_catalogs
path: ~/databroker_catalogs
test-matrix:
name: Python ${{ matrix.python-version }}
runs-on: ubuntu-latest
needs: install-catalogs
strategy:
matrix:
python-version:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
max-parallel: 5
steps:
- uses: actions/checkout@v3
- name: Create Python ${{ matrix.python-version }} environment
uses: mamba-org/setup-micromamba@v1
with:
cache-environment: true
cache-environment-key: env-key-${{ matrix.python-version }}
condarc: |
channel-priority: flexible
environment-file: environment.yml
environment-name: anaconda-test-env-py-${{ matrix.python-version }}
create-args: >-
coveralls
pytest
pytest-cov
python=${{ matrix.python-version }}
setuptools-scm
- name: Initial diagnostics
run: |
micromamba info
micromamba list
conda config --show-sources
conda config --show
micromamba env list
printenv | sort
- name: Directories before Docker
run: ls -lAFghrt ~/
- name: Start EPICS IOCs in Docker
run: |
bash ./.github/scripts/iocmgr.sh start GP gp
bash ./.github/scripts/iocmgr.sh start ADSIM ad
docker ps -a
ls -lAFgh /tmp/docker_ioc/iocad/
ls -lAFgh /tmp/docker_ioc/iocgp/
- name: Directories after Docker
run: ls -lAFghrt ~/
- name: Confirm EPICS IOC is available via caget
shell: bash -l {0}
run: |
docker exec iocgp grep float1 /home/iocgp/dbl-all.txt
docker exec iocgp /opt/base/bin/linux-x86_64/caget gp:UPTIME gp:gp:float1
docker exec iocad /opt/base/bin/linux-x86_64/caget ad:cam1:Acquire_RBV
which caget
caget gp:UPTIME
caget gp:gp:float1
caget ad:cam1:Acquire_RBV
- name: Confirm EPICS IOC is available via PyEpics
shell: bash -l {0}
run: |
python -c "import epics; print(epics.caget('gp:UPTIME'))"
- name: Confirm EPICS IOC is available via ophyd
shell: bash -l {0}
run: |
CMD="import ophyd"
CMD+="; up = ophyd.EpicsSignalRO('gp:UPTIME', name='up')"
CMD+="; pv = ophyd.EpicsSignalRO('gp:gp:float1', name='pv')"
CMD+="; up.wait_for_connection()"
CMD+="; print(up.get(), pv.get())"
python -c "${CMD}"
- name: Download catalog artifacts
uses: actions/download-artifact@v3
with:
name: databroker_catalogs
path: ~/databroker_catalogs
- name: Restore archival content
run: |
mkdir -p ~/.local/share/intake
mv ~/databroker_catalogs/.local/share/intake/* ~/.local/share/intake
mv ~/databroker_catalogs/*_test /tmp/
- name: Diagnostics
shell: bash -l {0}
run: |
df -HT
micromamba list
- name: Test catalog length, expect 53
shell: bash -l {0}
run: python -c "import databroker; print(len(databroker.catalog['apstools_test']))"
- name: Run tests with pytest & coverage
shell: bash -l {0}
run: |
coverage run --concurrency=thread --parallel-mode -m pytest -vvv --exitfirst .
coverage combine
coverage report --precision 3
- name: Upload coverage data to coveralls.io
shell: bash -l {0}
run: |
micromamba list coveralls
which coveralls
coveralls debug
coveralls --service=github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: ${{ matrix.python-version }}
COVERALLS_PARALLEL: true
# https://coveralls-python.readthedocs.io/en/latest/usage/configuration.html#github-actions-support
coveralls:
name: Report unit test coverage to coveralls
needs: test-matrix
runs-on: ubuntu-latest
container: python:3-slim
steps:
- name: Gather coverage and report to Coveralls
run: |
echo "Finally!"
pip3 install --upgrade coveralls
# debug mode: output prepared json and reported files list to stdout
# https://coveralls-python.readthedocs.io/en/latest/troubleshooting.html
coveralls debug
coveralls --service=github --finish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}