-
Notifications
You must be signed in to change notification settings - Fork 0
/
.gitlab-ci.yml
147 lines (132 loc) · 5.21 KB
/
.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
# This example shows how to run across multiple platforms
# using common sections that are executed by both platforms.
# These common sections of code are accessed using the extends
# gitlab-ci keyword. It also shows dependencies and allows
# other jobs to continue even if one of the paths fails.
stages:
- build
- test
.project_activation: &project_activation
# Will ensure that the CI pipeline will 'only' run when initiated via the
# the web GUI (CI / CD -> Pipelines -> Run Pipeline).
only:
- web
# Set up these general platform 1 shell runner arguments
# We can use this section in multiple jobs below
.runner_platform1_shell: &runner_platform1_shell
tags:
- SLES12
- shell
- anl-theta-xeon-001
- cray-xc40
- ecp-theta
- ecp
- test
# Set up these general platform 2 batch runner arguments
# We can use this section in multiple jobs below
.runner_platform2_batch: &runner_platform2_batch
# We are leveraging the GitLab Batch runner and as
# such we will need to provide the job submission parameters.
variables:
SCHEDULER_PARAMETERS: "-A Operations -n 1 -t 10 -q default"
tags:
- CLE
- batch
- anl-theta-knl-001
- cray-xc40
- ecp-theta
- ecp
- cobalt
- test
# Execute this code section before running the other
# jobs/stages
before_script:
- export TERM=xterm
- source $HOME/spack/share/spack/setup-env.sh
# Share this common section of code across jobs/machines
.build_spack_stage:
stage: build
script:
- spack --version
- spack env create myciminiqmc || echo "Spack env myciminiqmc already created"
- spack env list
- spack env activate myciminiqmc
- spack env status
- spack install miniqmc^[email protected] ^mpich os=cnl6 target=knl
- ls -lastr $HOME/spack/var/spack/environments/myciminiqmc
- spack env status
- spack env deactivate
- spack env status
- spack env create myopenspeedshop || echo "Spack env myopenspeedshop already created"
- spack env list
- spack env activate myopenspeedshop
- spack env status
- module unload PrgEnv-intel
- spack install openspeedshop %[email protected] +cuda+mpich ^[email protected] os=sles12 target=x86_64
- ls -lastr $HOME/spack/var/spack/environments/myopenspeedshop
# Share this common section of code across jobs/machines
.test_spack_stage:
stage: test
script:
- pwd
- source $HOME/spack/share/spack/setup-env.sh
- module avail
- hostname
- spack env list
- spack env activate myciminiqmc
- spack env status
- ls -lastr $HOME/spack/var/spack/environments/myciminiqmc
- spack find
- which miniqmc
- which aprun
#- aprun -np 2 miniqmc < ./input_args
- spack env list
- spack env create myopenspeedshop || echo "Spack env myopenspeedshop already created"
- spack env list
- spack env activate myopenspeedshop
# If you need to load the module associated with the package you are
# building. This code will load the package's module for you
- spack module tcl find --full-path openspeedshop > /tmp/openspeedshop_module
- while read line; do echo $line; module load $line; done < /tmp/openspeedshop_module
- module list
#- osspcsamp "aprun -n 2 `which miniqmc` < ./input_args"
# Use shared .build_spack_stage code and project activation code
# Build on platform 1 - shell executor by extending the common shared .build_spack_stage code
# Also funnel in the project activation settings and the tags/variables for the platform
build_spack_shell:
extends: .build_spack_stage
<<: *runner_platform1_shell
<<: *project_activation
# Because this job may not be the last one executed, allow it to fail so the others can still run
allow_failure: true
# Use shared .build_spack_stage code and project activation code
# Build on platform2 - batch executor by extending the common shared .build_spack_stage code
# Also funnel in the project activation settings and the tags/variables for the platform
build_spack_batch:
extends: .build_spack_stage
<<: *runner_platform2_batch
<<: *project_activation
# Because this job may not be the last one executed, allow it to fail so the others can still run
allow_failure: true
# Use shared .test_spack_stage code and project activation code
# Test on platform1 - shell executor by extending the common shared .test_spack_stage code
# Also funnel in the project activation settings and the tags/variables for the platform
test_spack_shell:
extends: .test_spack_stage
<<: *runner_platform1_shell
<<: *project_activation
dependencies:
- build_spack_shell
# Because this job may not be the last one executed, allow it to fail so the others can still run
allow_failure: true
# Use shared .test_spack_stage code and project activation code
# Test on platform2 - batch executor by extending the common shared .test_spack_stage code
# Also funnel in the project activation settings and the tags/variables for the platform
test_spack_batch:
extends: .test_spack_stage
<<: *runner_platform2_batch
<<: *project_activation
dependencies:
- build_spack_batch
# Because this job may not be the last one executed, allow it to fail so the others can still run
allow_failure: true