aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatt Rendina <rendinam@users.noreply.github.com>2019-02-12 16:35:02 -0500
committerGitHub <noreply@github.com>2019-02-12 16:35:02 -0500
commit7d9d15de3935e853034367df3012ecaf14a77d0f (patch)
tree6c8821b25986fa23561c85574407ff080f089094
parentd5acca400a4d5648f70a6e01815e24bf4dc5c4ad (diff)
parent68341c08eca5025fe17a8e737afea7c51ea2c050 (diff)
downloaddelivery_control-7d9d15de3935e853034367df3012ecaf14a77d0f.tar.gz
Merge pull request #11 from rendinam/build_and_test
Update to handle integrated regression testing of HSTDP delivery set
-rw-r--r--.gitignore1
-rw-r--r--JenkinsfileRT58
-rw-r--r--deliver.groovy231
3 files changed, 263 insertions, 27 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..1377554
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+*.swp
diff --git a/JenkinsfileRT b/JenkinsfileRT
new file mode 100644
index 0000000..aac2be4
--- /dev/null
+++ b/JenkinsfileRT
@@ -0,0 +1,58 @@
+// Obtain files from source control system.
+// [skip ci] and [ci skip] have no effect here.
+ if (utils.scm_checkout(['skip_disable':true])) return
+
+// Allow modification of the job configuration, affects all relevant build configs.
+// Pass this object in the argument list to the`run()` function below to apply these settings to the job's execution.
+jobconfig = new JobConfig()
+jobconfig.post_test_summary = true
+
+// Config data to share between builds.
+CFLAGS = ''
+LDFLAGS = ''
+DEFAULT_FLAGS = "${CFLAGS} ${LDFLAGS}"
+// Some waf flags cause a prompt for input during configuration, hence the 'yes'.
+configure_cmd = "yes '' | ./waf configure --prefix=./_install ${DEFAULT_FLAGS}"
+
+// Configure artifactory ingest
+data_config = new DataConfig()
+data_config.server_id = 'bytesalad'
+data_config.root = 'tests_output'
+data_config.match_prefix = '(.*)_result' // .json is appended automatically
+
+// Run nightly tests, which include the slow ones.
+bc = new BuildConfig()
+bc.nodetype = "RHEL-6"
+bc.name = "release"
+bc.env_vars = ['PATH=./_install/bin:$PATH',
+ 'OMP_NUM_THREADS=8',
+ 'TEST_BIGDATA=https://bytesalad.stsci.edu/artifactory',
+ 'jref=/grp/hst/cdbs/jref/',
+ 'iref=/grp/hst/cdbs/iref/',
+ 'oref=/grp/hst/cdbs/oref/']
+bc.conda_channels = ['http://ssb.stsci.edu/astroconda']
+bc.conda_packages = ['python=3.6',
+ 'cfitsio',
+ 'pkg-config',
+ 'pytest',
+ 'requests',
+ 'astropy',
+ 'ci-watson']
+
+bc.build_cmds = [//"pip install git+https://github.com/spacetelescope/ci_watson.git@master",
+ "${configure_cmd} --release-with-symbols",
+ "./waf build",
+ "./waf install",
+ "calacs.e --version"]
+//bc.test_cmds = ["pytest tests --basetemp=tests_output --junitxml results.xml --bigdata --slow -v"]
+bc.test_cmds = ["pytest tests/acs/test_hrc_single.py --basetemp=tests_output --junitxml results.xml --bigdata --slow -v"]
+bc.failedUnstableThresh = 1
+bc.failedFailureThresh = 6
+
+// Upload artifacts
+bc.test_configs = [data_config]
+
+// Iterate over configurations that define the (distributed) build matrix.
+// Spawn a host of the given nodetype for each combination and run in parallel.
+// Also apply the job configuration defined in `jobconfig` above.
+ utils.run([bc, jobconfig])
diff --git a/deliver.groovy b/deliver.groovy
index cb45747..5d78e88 100644
--- a/deliver.groovy
+++ b/deliver.groovy
@@ -1,12 +1,30 @@
+import org.jenkinsci.plugins.pipeline.utility.steps.shaded.org.yaml.snakeyaml.Yaml
@Library('env_utils') _
+library('utils')
+//
+// Note: Jenkinsfiles for projects that produce dev packages to be used
+// in the assembling of delivery sets must have the build config
+// used to run the intended regression test suite named 'bc'.
+//
// Parameters made available from Jenkins job configuration:
// delivery_name
+// python_version
// aux_packages
// conda_installer_version
// conda_version
// output_dir
-def gen_specfiles(label) {
+
+// To sidestep not-serializable exceptions, encapsulate the YAML reading
+// step in a method of its own, used below.
+@NonCPS
+def readYaml(data) {
+ def yaml = new Yaml()
+ payload = yaml.load(data)
+ return payload
+}
+
+def gen_specfiles(label, run_tests) {
node(label) {
@@ -14,6 +32,9 @@ def gen_specfiles(label) {
// The directory deleted is the one named after the jenkins pipeline job.
deleteDir()
+ println("PATH ----")
+ println("${env.PATH}")
+
unstash "script"
env.PYTHONPATH = ""
@@ -23,29 +44,195 @@ def gen_specfiles(label) {
def WORKDIR = pwd()
println("WORKDIR = ${WORKDIR}")
- conda.install()
-
- PATH = "${WORKDIR}/miniconda/bin:${PATH}"
- def cpkgs = "conda=${conda_version}"
- def pkg_list = aux_packages.replaceAll('\n', ' ')
-
def flags = ""
println("${finalize} - finalize")
if ("${finalize}" == "true") {
flags = "--final"
}
- withEnv(["HOME=${HOME}", "PATH=${PATH}"]) {
+ conda.install()
+
+ path_with_conda = "${WORKDIR}/miniconda/bin:${PATH}"
+ def cpkgs = "conda=${conda_version}"
+ def pkg_list = aux_packages.replaceAll('\n', ' ')
+
+
+ withEnv(["HOME=${WORKDIR}", "PATH=${path_with_conda}"]) {
+ sh "env | sort"
+ conda_exe = sh(script: "which conda", returnStdout: true).trim()
+ println("Found conda exe at ${conda_exe}.")
+ conda_root = conda_exe.replace("/bin/conda", "").trim()
sh "conda install --quiet --yes ${cpkgs}"
// Generate spec files
- sh "${WORKDIR}/mktestenv.sh -n ${delivery_name} -p 3.5 ${flags} ${pkg_list}"
- sh "${WORKDIR}/mktestenv.sh -n ${delivery_name} -p 3.6 ${flags} ${pkg_list}"
+ sh "${WORKDIR}/mktestenv.sh -n ${delivery_name} -p ${python_version} ${flags} ${pkg_list}"
// Make spec files available to master node.
stash name: "spec-stash-${label}", includes: "hstdp*.txt"
+
+ // List environments for testing
+ sh "conda env list"
}
- }
+
+ // Get source repo for each aux package _at the commit used to
+ // build the pkg_.
+ // For each package in pkg_list, locate installed package in local conda
+ // dir, extract metadata to obtain:
+ // commit hash
+ // source repository
+ if (run_tests) {
+ def env_name = sh(script: "ls hstdp*", returnStdout: true).trim()[0..-5]
+ println("env_name: ${env_name}")
+ for (pkg in pkg_list.tokenize()) {
+ println("Extracting metadata for ${pkg}...")
+ pkg_name = pkg.tokenize('=')[0]
+ println("pkg_name: ${pkg_name}")
+ pkg_version = pkg.tokenize('=')[1]
+ ccmd = "${conda_exe} list -n ${env_name} | grep ${pkg_name} | grep dev"
+ pkg_info = sh(script: ccmd,
+ returnStdout: true).trim()
+ println("pkg_info: ${pkg_info}")
+ meta_base = pkg_info.tokenize()[1]
+ meta_rev = pkg_info.tokenize()[2]
+ meta_dir = "${pkg_name}-${meta_base}-${meta_rev}"
+ metafile = "${conda_root}/pkgs/${meta_dir}/info/recipe/meta.yaml"
+ filedata = readFile(metafile)
+ meta = readYaml(filedata)
+ git_url = meta['source'].git_url
+
+ // Use this clone to run the full test suite.
+ sh "git clone ${git_url} ./${pkg_name}"
+
+ // For development only. Disable before use.
+ //if (pkg_name == "hstcal") {
+ // jenkinsfile = "./JenkinsfileRT"
+ //} else {
+ jenkinsfile = "${WORKDIR}/${pkg_name}/JenkinsfileRT"
+ //}
+
+ // Post-process each -dev project's JenkinsfileRT to allow
+ // importing of only the configuration values without running
+ // the processing machinery called within which would complicate
+ // matters here.
+ // Disable scm_checkout call in project's JenkinsfileRT.
+ // NOTE: 'if' statement must be on same line as scm_checkout call.
+ // TODO: Generalize to make this more robust against layout
+ // changes in file.
+ sh "sed -i 's/^\\s*if/\\/\\/if/' ${jenkinsfile}"
+ // Disable utils.run line in projects's JenkinsfileRT.
+ sh "sed -i 's/^\\s*utils.run/\\/\\/utils.run/' ${jenkinsfile}"
+
+ println("About lo load local modified JenkinsfileRT")
+ sh "ls -al"
+ // Add declarations from file to this namespace. Provides 'bc'.
+ // TODO: Iterate through namespace to locate buildConfig
+ // objects without a standard name?
+ // NOTE: 'evaluate' runs the methods it encounters.
+ // Edit to disable to prevent undesired side effects.
+ jf = evaluate readFile(jenkinsfile)
+
+ def conda_prefix = "${conda_root}/envs/${env_name}".trim()
+
+ def path_found = false
+ for (var in bc.env_vars) {
+ println("--> ${var}")
+ if (var[0..3] == 'PATH') {
+ path_found = true
+ pathvar_idx = bc.env_vars.indexOf(var)
+ bc.env_vars.remove(pathvar_idx)
+ println(" --- ADJUSTING PATH FOR CONDA ENV ----")
+ var = "PATH=${conda_prefix}/bin:${WORKDIR}/miniconda/bin:${var[5..-1]}"
+ println(var)
+ bc.env_vars.add(var)
+ }
+ }
+ if (!path_found) {
+ println("--- Adding PATH prefix for conda environment and conda exe.")
+ bc.env_vars.add("PATH=${conda_prefix}/bin:${WORKDIR}/miniconda/bin:\$PATH")
+ }
+ bc.env_vars.add("ENVIRONMENT=VARIABLE-${pkg}")
+
+ def bconfig = utils.expandEnvVars(bc) // expand vars into .runtime
+
+ def env_vars = bconfig.runtime
+ // 'Activate' conda env and operate within it.
+ env_vars.add(0, "CONDA_SHLVL=1")
+ env_vars.add(0, "CONDA_PROMPT_MODIFIER=${env_name}")
+ env_vars.add(0, "CONDA_EXE=${conda_exe}")
+ env_vars.add(0, "CONDA_PREFIX=${conda_prefix}")
+ env_vars.add(0, "CONDA_PYTHON_EXE=${conda_prefix}/bin/python")
+ env_vars.add(0, "CONDA_DEFAULT_ENV=${env_name}")
+
+ println('FINAL ENV VARS')
+ for (var in env_vars) {
+ println(var)
+ }
+
+ // Run testing commands
+ withEnv(env_vars) {
+ println("============= withEnv ==========")
+ sh "env | sort"
+ // Obtain pip for build/install use
+ sh "conda install pip"
+ dir(pkg_name) {
+ sh "ls -l"
+ def conda_pkgs = ""
+ for (cpkg in bc.conda_packages) {
+ conda_pkgs = "${conda_pkgs} ${cpkg} "
+ }
+ sh "conda install ${conda_pkgs} pytest -c http://ssb.stsci.edu/astroconda -c defaults -q -y"
+
+ // If setup.py exists in project, run `python setup.py build` to prepare
+ // local source tree for testing. This is required for projects with C
+ // extensions.
+ if (fileExists("setup.py")) {
+ sh(script: "pip install --no-deps -e .")
+ }
+ println("Test commands(s):")
+ println(bc.test_cmds)
+ for (tcmd in bc.test_cmds) {
+ sh(script: "${tcmd} || true")
+ }
+ // Uninstall packages pulled in for this test run.
+ sh "conda remove ${conda_pkgs} --force -y"
+
+ // Read in test reports for display.
+ // TODO: Use shared lib for this functionality.
+ // Process the results file to include the build config name as a prefix
+ // on each test name so that it is obvious from where each test result
+ // comes.
+ report_exists = sh(script: "test -e *.xml", returnStatus: true)
+ if (report_exists == 0) {
+ repfile = sh(script:"find *.xml", returnStdout: true).trim()
+ command = "cp ${repfile} ${repfile}.modified"
+ sh(script:command)
+ sh(script:"sed -i 's/ name=\"/ name=\"[${pkg_name}:${bc.name}] /g' *.xml.modified")
+ step([$class: 'XUnitBuilder',
+ thresholds: [
+ [$class: 'SkippedThreshold', unstableThreshold: "${bc.skippedUnstableThresh}"],
+ [$class: 'SkippedThreshold', failureThreshold: "${bc.skippedFailureThresh}"],
+ [$class: 'FailedThreshold', unstableThreshold: "${bc.failedUnstableThresh}"],
+ [$class: 'FailedThreshold', failureThreshold: "${bc.failedFailureThresh}"]],
+ tools: [[$class: 'JUnitType', pattern: '*.xml.modified']]])
+ } else {
+ println("No .xml files found in directory ${pkg_name}. Test report ingestion skipped.")
+ }
+ }
+ }
+ } // endfor pkg
+ }
+
+ stage('archive') {
+ hostname = sh(script: "hostname", returnStdout: true).tokenize(".")[0]
+ println(hostname)
+ withCredentials([usernamePassword(credentialsId: '322ad15d-2f5b-4d06-87fa-b45a88596f30',
+ usernameVariable: 'USERNAME',
+ passwordVariable: 'PASSWORD')]) {
+ sh "rsync -avzr hstdp*.txt ${USERNAME}@${hostname}:${output_dir}"
+ }
+ }
+
+ } // end node()
}
@@ -59,23 +246,13 @@ node('master') {
numToKeepStr: '4')), pipelineTriggers([])])
stage('create specfiles') {
deleteDir()
- sh "cp -r ${WORKSPACE}@script/*.sh ."
- stash name: "script", includes: "*.sh"
+ sh "cp -r ${WORKSPACE}@script/* ."
+ // Carry delivery_control files into each remote node's workspace.
+ stash name: "script", includes: "*"
parallel(
- Linux: { gen_specfiles('RHEL-6') },
- MacOS: { gen_specfiles('OSX-10.11') }
+ // Generate spec files. Only run tests on Linux (for now).
+ Linux: { gen_specfiles('RHEL-6', true) },
+ //MacOS: { gen_specfiles('OSX-10.13', false) }
)
}
-
- stage('archive') {
- // Retrieve the spec files from the nodes where they were created.
- unstash "spec-stash-RHEL-6"
- unstash "spec-stash-OSX-10.11"
- hostname = sh(script: "hostname", returnStdout: true).tokenize(".")[0]
- withCredentials([usernamePassword(credentialsId: '322ad15d-2f5b-4d06-87fa-b45a88596f30',
- usernameVariable: 'USERNAME',
- passwordVariable: 'PASSWORD')]) {
- sh "rsync -avzr hstdp*.txt ${USERNAME}@${hostname}:${output_dir}"
- }
- }
}