1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
|
// Jenkinsfile support utilities
import BuildConfig.BuildConfig
import groovy.io.FileType
import groovy.json.JsonOutput
import org.apache.commons.lang3.SerializationUtils
import org.apache.commons.io.FilenameUtils
// Clone the source repository and examine the most recent commit message.
// If a '[ci skip]' or '[skip ci]' directive is present, immediately
// terminate the job with a success code.
// If no skip directive is found, or skip_disable is true, stash all the
// source files for efficient retrieval by subsequent nodes.
//def scm_checkout(skip_disable=false) {
def scm_checkout(args = ['skip_disable':false]) {
skip_job = 0
node("on-master") {
stage("Setup") {
checkout(scm)
println("args['skip_disable'] = ${args['skip_disable']}")
if (args['skip_disable'] == false) {
// Obtain the last commit message and examine it for skip directives.
logoutput = sh(script:"git log -1 --pretty=%B", returnStdout: true).trim()
if (logoutput.contains("[ci skip]") || logoutput.contains("[skip ci]")) {
skip_job = 1
currentBuild.result = 'SUCCESS'
println("\nBuild skipped due to commit message directive.\n")
return skip_job
}
}
stash includes: '**/*', name: 'source_tree', useDefaultExcludes: false
}
}
return skip_job
}
// Returns true if the conda exe is somewhere in the $PATH, false otherwise.
def conda_present() {
def success = sh(script: "conda --version", returnStatus: true)
if (success == 0) {
return true
} else {
return false
}
}
// Install a particular version of conda by downloading and running the miniconda
// installer and then installing conda at the specified version.
// A version argument of 'null' will result in the latest available conda
// version being installed.
def install_conda(version, install_dir) {
installer_ver = '4.5.4'
default_conda_version = '4.5.4'
default_dir = 'miniconda'
if (version == null) {
version = default_conda_version
}
if (install_dir == null) {
install_dir = default_dir
}
def conda_base_url = "https://repo.continuum.io/miniconda"
def OSname = null
def uname = sh(script: "uname", returnStdout: true).trim()
if (uname == "Darwin") {
OSname = "MacOSX"
println("OSname=${OSname}")
}
if (uname == "Linux") {
OSname = uname
println("OSname=${OSname}")
}
assert uname != null
// Check for the availability of a download tool and then use it
// to get the conda installer.
def dl_cmds = ["curl -OSs",
"wget --no-verbose --server-response --no-check-certificate"]
def dl_cmd = null
def stat1 = 999
for (cmd in dl_cmds) {
stat1 = sh(script: "which ${cmd.tokenize()[0]}", returnStatus: true)
if( stat1 == 0 ) {
dl_cmd = cmd
break
}
}
if (stat1 != 0) {
println("Could not find a download tool for obtaining conda. Unable to proceed.")
return false
}
def cwd = pwd()
def conda_install_dir = "${cwd}/${install_dir}"
def conda_installer = "Miniconda3-${installer_ver}-${OSname}-x86_64.sh"
dl_cmd = dl_cmd + " ${conda_base_url}/${conda_installer}"
if (!fileExists("./${conda_installer}")) {
sh dl_cmd
}
// Install miniconda
sh "bash ./${conda_installer} -b -p ${install_dir}"
return true
}
// Execute build/test task(s) based on passed-in configuration(s).
// Each task is defined by a BuildConfig object.
// A list of such objects is iterated over to process all configurations.
// Arguments:
// configs - list of BuildConfig objects
// (optional) concurrent - boolean, whether or not to run all build
// configurations in parallel. The default is
// true when no value is provided.
def run(configs, concurrent = true) {
def tasks = [:]
configs.eachWithIndex { config, index ->
def BuildConfig myconfig = new BuildConfig() // MUST be inside eachWith loop.
myconfig = SerializationUtils.clone(config)
def config_name = ""
config_name = config.name
println("config_name: ${config_name}")
// Test for GStrings (double quoted). These perform string interpolation
// immediately and may not what the user intends to do when defining
// environment variables to use in the build. Disallow them here.
config.env_vars.each { evar ->
println(evar)
if (evar.getClass() == org.codehaus.groovy.runtime.GStringImpl) {
msg = "Immediate interpolation of variables in the 'env_vars'" +
" list is not supported and will probably not do what" +
" you expect. Please change the double quotes (\") to " +
"single quotes (') in each value of the 'env_vars' list."
println(msg)
error('Abort the build.')
}
}
def conda_exe = null
def conda_inst_dir = null
// For containerized CI builds, code defined within 'tasks' is eventually executed
// on a separate node. Parallel builds on the RT system each get assigned a new
// workspace directory by Jenkins. i.e. workspace, workspace@2, etc.
// 'tasks' is a java.util.LinkedHashMap, which preserves insertion order.
tasks["${myconfig.nodetype}/${config_name}"] = {
node(myconfig.nodetype) {
deleteDir()
def runtime = []
// If conda packages were specified, create an environment containing
// them and then 'activate' it. If a specific python version is
// desired, it must be specified as a package, i.e. 'python=3.6'
// in the list config.conda_packages.
if (myconfig.conda_packages.size() > 0) {
// Test for presence of conda. If not available, install it in
// a prefix unique to this build configuration.
if (!conda_present()) {
println('Conda not found. Installing.')
conda_inst_dir = "${env.WORKSPACE}/miniconda-bconf${index}"
println("conda_inst_dir = ${conda_inst_dir}")
install_conda(myconfig.conda_ver, conda_inst_dir)
conda_exe = "${conda_inst_dir}/bin/conda"
println("conda_exe = ${conda_exe}")
} else {
conda_exe = sh(script: "which conda", returnStdout: true).trim()
println("Found conda exe at ${conda_exe}.")
}
def conda_root = conda_exe.replace("/bin/conda", "").trim()
def env_name = "tmp_env${index}"
def conda_prefix = "${conda_root}/envs/${env_name}".trim()
def packages = ""
for (pkg in myconfig.conda_packages) {
packages = "${packages} ${pkg}"
}
// Override removes the implicit 'defaults' channel from the channels
// to be used, The conda_channels list is then used verbatim (in
// priority order) by conda.
def override = ""
if (myconfig.conda_override_channels.toString() == 'true') {
override = "--override-channels"
}
def chans = ""
for (chan in myconfig.conda_channels) {
chans = "${chans} -c ${chan}"
}
sh(script: "${conda_exe} create -q -y -n ${env_name} ${override} ${chans} ${packages}")
// Configure job to use this conda environment.
myconfig.env_vars.add(0, "CONDA_SHLVL=1")
myconfig.env_vars.add(0, "CONDA_PROMPT_MODIFIER=${env_name}")
myconfig.env_vars.add(0, "CONDA_EXE=${conda_exe}")
myconfig.env_vars.add(0, "CONDA_PREFIX=${conda_prefix}")
myconfig.env_vars.add(0, "CONDA_PYTHON_EXE=${conda_prefix}/bin/python")
myconfig.env_vars.add(0, "CONDA_DEFAULT_ENV=${env_name}")
// Prepend the PATH var adjustment to the list that gets processed below.
def conda_path = "PATH=${conda_prefix}/bin:$PATH"
myconfig.env_vars.add(0, conda_path)
}
// Expand environment variable specifications by using the shell
// to dereference any var references and then render the entire
// value as a canonical path.
for (var in myconfig.env_vars) {
// Process each var in an environment defined by all the prior vars.
withEnv(runtime) {
def varName = var.tokenize("=")[0].trim()
def varValue = var.tokenize("=")[1].trim()
// examine var value, if it contains var refs, expand them.
def expansion = varValue
if (varValue.contains("\$")) {
expansion = sh(script: "echo \"${varValue}\"", returnStdout: true)
}
// Change values of '.' and './' to the node's WORKSPACE.
// Replace a leading './' with the node's WORKSPACE.
if (expansion == '.' || expansion == './') {
expansion = env.WORKSPACE
} else if(expansion.size() > 2 && expansion[0..1] == './') {
expansion = "${env.WORKSPACE}/${expansion[2..-1]}"
}
// Replace all ':.' combinations with the node's WORKSPACE.
expansion = expansion.replaceAll(':\\.', ":${env.WORKSPACE}")
// Convert var value to canonical based on a WORKSPACE base directory.
if (expansion.contains('..')) {
expansion = new File(expansion).getCanonicalPath()
}
expansion = expansion.trim()
runtime.add("${varName}=${expansion}")
} // end withEnv
}
for (var in myconfig.env_vars_raw) {
runtime.add(var)
}
withEnv(runtime) {
stage("Build (${myconfig.name})") {
println('About to unstash')
sh "pwd"
sh "ls -al"
unstash "source_tree"
println('Unstash complete')
for (cmd in myconfig.build_cmds) {
sh(script: cmd)
}
}
if (myconfig.test_cmds.size() > 0) {
try {
stage("Test (${myconfig.name})") {
for (cmd in myconfig.test_cmds) {
sh(script: cmd)
}
}
}
finally {
// Perform Artifactory upload if required
if (myconfig.test_configs.size() > 0) {
stage("Artifactory (${myconfig.name})") {
def buildInfo = Artifactory.newBuildInfo()
buildInfo.env.capture = true
buildInfo.env.collect()
def server
for (artifact in myconfig.test_configs) {
server = Artifactory.server artifact.server_id
// Construct absolute path to data
def path = FilenameUtils.getFullPath(
"${env.WORKSPACE}/${artifact.root}"
)
// Record listing of all files starting at ${path}
// (Native Java and Groovy approaches will not
// work here)
sh(script: "find ${path} -type f",
returnStdout: true).trim().tokenize('\n').each {
// Semi-wildcard matching of JSON input files
// ex:
// it = "test_1234_result.json"
// artifact.match_prefix = "(.*)_result"
//
// pattern becomes: (.*)_result(.*)\\.json
if (it.matches(
artifact.match_prefix + '(.*)\\.json')) {
def basename = FilenameUtils.getBaseName(it)
def data = readFile(it)
// Store JSON in a logical map
// i.e. ["basename": [data]]
artifact.insert(basename, data)
}
} // end find.each
// Submit each request to the Artifactory server
artifact.data.each { blob ->
def bi_temp = server.upload spec: blob.value
// Define retention scheme
// Defaults: see DataConfig.groovy
bi_temp.retention \
maxBuilds: artifact.keep_builds, \
maxDays: artifact.keep_days, \
deleteBuildArtifacts: !artifact.keep_data
buildInfo.append bi_temp
}
} // end for-loop
server.publishBuildInfo buildInfo
} // end stage Artifactory
} // end test_configs check
// If a non-JUnit format .xml file exists in the
// root of the workspace, the XUnitBuilder report
// ingestion will fail.
report_exists = sh(script: "test -e *.xml", returnStatus: true)
if (report_exists == 0) {
step([$class: 'XUnitBuilder',
thresholds: [
[$class: 'SkippedThreshold', unstableThreshold: "${myconfig.skippedUnstableThresh}"],
[$class: 'SkippedThreshold', failureThreshold: "${myconfig.skippedFailureThresh}"],
[$class: 'FailedThreshold', unstableThreshold: "${myconfig.failedUnstableThresh}"],
[$class: 'FailedThreshold', failureThreshold: "${myconfig.failedFailureThresh}"]],
tools: [[$class: 'JUnitType', pattern: '*.xml']]])
} else {
println("No .xml files found in workspace. Test report ingestion skipped.")
}
} // end test test_cmd finally clause
} // end stage test_cmd
} // end withEnv
} // end node
} //end tasks
} //end for
if (concurrent == true) {
stage("Matrix") {
parallel(tasks)
}
} else {
// Run tasks sequentially. Any failure halts the sequence.
def iter = 0
for (task in tasks) {
def localtask = [:]
localtask[task.key] = task.value
stage("Serial-${iter}") {
parallel(localtask)
}
iter++
}
}
}
// Convenience function that performs a deep copy on the supplied object.
def copy(obj) {
return SerializationUtils.clone(obj)
}
|