Implement an optional compiler bootstrapping phase

This commit is contained in:
Scott Wittenburg 2019-07-16 08:36:31 -06:00 committed by Todd Gamblin
parent 5323a5cff9
commit 6d745a56fd
10 changed files with 695 additions and 352 deletions

View file

@ -1,9 +1,11 @@
generate ci jobs: generate ci jobs:
variables:
git_strategy: clone
script: script:
- "./bin/generate-gitlab-ci-yml.sh" - "./bin/generate-gitlab-ci-yml.sh"
tags: tags:
- "spack-k8s" - "spack-k8s"
image: "spack/ubuntu:18.04" image: "scottwittenburg/spack_ci_generator_alpine"
artifacts: artifacts:
paths: paths:
- ci-generation - ci-generation

View file

@ -19,8 +19,10 @@ if [ -z "${SPACK_RELEASE_ENVIRONMENT_PATH}" ] ; then
fi fi
if [ -z "${CDASH_AUTH_TOKEN}" ] ; then if [ -z "${CDASH_AUTH_TOKEN}" ] ; then
echo "ERROR: missing variable: CDASH_AUTH_TOKEN" >&2 echo "WARNING: missing variable: CDASH_AUTH_TOKEN" >&2
exit 1 else
token_file="${temp_dir}/cdash_auth_token"
echo ${CDASH_AUTH_TOKEN} > ${token_file}
fi fi
if [ -z "${SPACK_RELEASE_ENVIRONMENT_REPO}" ] ; then if [ -z "${SPACK_RELEASE_ENVIRONMENT_REPO}" ] ; then
@ -51,11 +53,14 @@ fi
cd $env_dir cd $env_dir
token_file="${temp_dir}/cdash_auth_token" # The next commands generates the .gitlab-ci.yml (and optionally creates a
echo ${CDASH_AUTH_TOKEN} > ${token_file} # buildgroup in cdash)
RELEASE_JOBS_ARGS=("--output-file" "${gen_ci_file}")
if [ ! -z "${token_file}" ]; then
RELEASE_JOBS_ARGS+=("--cdash-credentials" "${token_file}")
fi
# This commands generates the .gitlab-ci.yml and creates buildgroup in cdash spack release-jobs "${RELEASE_JOBS_ARGS[@]}"
spack release-jobs --force --output-file ${gen_ci_file} --cdash-credentials ${token_file}
if [[ $? -ne 0 ]]; then if [[ $? -ne 0 ]]; then
echo "spack release-jobs command failed" echo "spack release-jobs command failed"
@ -64,6 +69,7 @@ fi
cp ${gen_ci_file} "${original_directory}/.gitlab-ci.yml" cp ${gen_ci_file} "${original_directory}/.gitlab-ci.yml"
# Remove global from here, it's clobbering people git identity config
git config --global user.email "robot@spack.io" git config --global user.email "robot@spack.io"
git config --global user.name "Build Robot" git config --global user.name "Build Robot"

View file

@ -12,29 +12,44 @@
### not (i.e. the source code has changed in a way that caused a change in the ### not (i.e. the source code has changed in a way that caused a change in the
### full_hash of the spec), this script will build the package, create a ### full_hash of the spec), this script will build the package, create a
### binary cache for it, and then push all related files to the remote binary ### binary cache for it, and then push all related files to the remote binary
### mirror. This script also communicates with a remote CDash instance to ### mirror. This script also optionally communicates with a remote CDash
### share status on the package build process. ### instance to share status on the package build process.
### ###
### The following environment variables are expected to be set in order for ### The following environment variables are (possibly) used within this script
### the various elements in this script to function properly. Listed first ### in order for the various elements function properly.
### are two defaults we rely on from gitlab, then three we set up in the ###
### variables section of gitlab ourselves, and finally four variables ### First are two defaults we rely on from gitlab:
### written into the .gitlab-ci.yml file.
### ###
### CI_PROJECT_DIR ### CI_PROJECT_DIR
### CI_JOB_NAME ### CI_JOB_NAME
### ###
### The following must be set up in the variables section of gitlab:
###
### AWS_ACCESS_KEY_ID ### AWS_ACCESS_KEY_ID
### AWS_SECRET_ACCESS_KEY ### AWS_SECRET_ACCESS_KEY
### SPACK_SIGNING_KEY ### SPACK_SIGNING_KEY
### ###
### CDASH_BASE_URL ### SPACK_S3_UPLOAD_MIRROR_URL // only required in the short term for the cloud case
### CDASH_PROJECT ###
### CDASH_PROJECT_ENC ### The following variabes are defined by the ci generation process and are
### CDASH_BUILD_NAME ### required:
### ROOT_SPEC ###
### DEPENDENCIES ### SPACK_ENABLE_CDASH
### MIRROR_URL ### SPACK_ROOT_SPEC
### SPACK_MIRROR_URL
### SPACK_JOB_SPEC_PKG_NAME
### SPACK_COMPILER_ACTION
###
### Finally, these variables are optionally defined by the ci generation
### process, and may or may not be present:
###
### SPACK_CDASH_BASE_URL
### SPACK_CDASH_PROJECT
### SPACK_CDASH_PROJECT_ENC
### SPACK_CDASH_BUILD_NAME
### SPACK_CDASH_SITE
### SPACK_RELATED_BUILDS
### SPACK_JOB_SPEC_BUILDGROUP
### ###
shopt -s expand_aliases shopt -s expand_aliases
@ -48,14 +63,19 @@ SPEC_DIR="${TEMP_DIR}/specs"
LOCAL_MIRROR="${CI_PROJECT_DIR}/local_mirror" LOCAL_MIRROR="${CI_PROJECT_DIR}/local_mirror"
BUILD_CACHE_DIR="${LOCAL_MIRROR}/build_cache" BUILD_CACHE_DIR="${LOCAL_MIRROR}/build_cache"
SPACK_BIN_DIR="${CI_PROJECT_DIR}/bin" SPACK_BIN_DIR="${CI_PROJECT_DIR}/bin"
CDASH_UPLOAD_URL="${CDASH_BASE_URL}/submit.php?project=${CDASH_PROJECT_ENC}"
DEP_JOB_RELATEBUILDS_URL="${CDASH_BASE_URL}/api/v1/relateBuilds.php" if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
CDASH_UPLOAD_URL="${SPACK_CDASH_BASE_URL}/submit.php?project=${SPACK_CDASH_PROJECT_ENC}"
DEP_JOB_RELATEBUILDS_URL="${SPACK_CDASH_BASE_URL}/api/v1/relateBuilds.php"
declare -a JOB_DEPS_PKG_NAMES declare -a JOB_DEPS_PKG_NAMES
fi
export SPACK_ROOT=${CI_PROJECT_DIR} export SPACK_ROOT=${CI_PROJECT_DIR}
export PATH="${SPACK_BIN_DIR}:${PATH}" # export PATH="${SPACK_BIN_DIR}:${PATH}"
export GNUPGHOME="${CI_PROJECT_DIR}/opt/spack/gpg" export GNUPGHOME="${CI_PROJECT_DIR}/opt/spack/gpg"
. "${CI_PROJECT_DIR}/share/spack/setup-env.sh"
mkdir -p ${JOB_LOG_DIR} mkdir -p ${JOB_LOG_DIR}
mkdir -p ${SPEC_DIR} mkdir -p ${SPEC_DIR}
@ -160,53 +180,43 @@ EOF
} }
gen_full_specs_for_job_and_deps() { gen_full_specs_for_job_and_deps() {
SPEC_YAML_PATH="${SPEC_DIR}/${SPACK_JOB_SPEC_PKG_NAME}.yaml"
local spec_names_to_save="${SPACK_JOB_SPEC_PKG_NAME}"
read -ra PARTSARRAY <<< "${CI_JOB_NAME}" if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
local pkgName="${PARTSARRAY[0]}" IFS=';' read -ra DEPS <<< "${SPACK_RELATED_BUILDS}"
local pkgVersion="${PARTSARRAY[1]}"
local compiler="${PARTSARRAY[2]}"
local osarch="${PARTSARRAY[3]}"
local buildGroup="${PARTSARRAY[@]:4}" # get everything after osarch
JOB_GROUP="${buildGroup}"
JOB_PKG_NAME="${pkgName}"
SPEC_YAML_PATH="${SPEC_DIR}/${pkgName}.yaml"
local root_spec_name="${ROOT_SPEC}"
local spec_names_to_save="${pkgName}"
IFS=';' read -ra DEPS <<< "${DEPENDENCIES}"
for i in "${DEPS[@]}"; do for i in "${DEPS[@]}"; do
read -ra PARTSARRAY <<< "${i}" depPkgName="${i}"
pkgName="${PARTSARRAY[0]}" spec_names_to_save="${spec_names_to_save} ${depPkgName}"
spec_names_to_save="${spec_names_to_save} ${pkgName}" JOB_DEPS_PKG_NAMES+=("${depPkgName}")
JOB_DEPS_PKG_NAMES+=("${pkgName}")
done done
fi
spack -d buildcache save-yaml --specs "${spec_names_to_save}" --root-spec "${root_spec_name}" --yaml-dir "${SPEC_DIR}" if [ "${SPACK_COMPILER_ACTION}" == "FIND_ANY" ]; then
# This corresponds to a bootstrapping phase where we need to
# rely on any available compiler to build the package (i.e. the
# compiler needed to be stripped from the spec), and thus we need
# to concretize the root spec again.
spack -d buildcache save-yaml --specs "${spec_names_to_save}" --root-spec "${SPACK_ROOT_SPEC}" --yaml-dir "${SPEC_DIR}"
else
# in this case, either we're relying on Spack to install missing compiler
# bootstrapped in a previous phase, or else we only had one phase (like a
# site which already knows what compilers are available on it's runners),
# so we don't want to concretize that root spec again. The reason we need
# this in the first case (bootstrapped compiler), is that we can't concretize
# a spec at this point if we're going to ask spack to "install_missing_compilers".
tmp_dir=$(mktemp -d)
TMP_YAML_PATH="${tmp_dir}/root.yaml"
ROOT_SPEC_YAML=$(spack python -c "import base64 ; import zlib ; print(str(zlib.decompress(base64.b64decode('${SPACK_ROOT_SPEC}')).decode('utf-8')))")
echo "${ROOT_SPEC_YAML}" > "${TMP_YAML_PATH}"
spack -d buildcache save-yaml --specs "${spec_names_to_save}" --root-spec-yaml "${TMP_YAML_PATH}" --yaml-dir "${SPEC_DIR}"
rm -rf ${tmp_dir}
fi
} }
begin_logging begin_logging
gen_full_specs_for_job_and_deps echo "Running job for spec: ${CI_JOB_NAME}"
echo "Building package ${CDASH_BUILD_NAME}, ${HASH}, ${MIRROR_URL}"
# Finally, list the compilers spack knows about
echo "Compiler Configurations:"
spack config get compilers
# Make the build_cache directory if it doesn't exist
mkdir -p "${BUILD_CACHE_DIR}"
# Get buildcache name so we can write a CDash build id file in the right place.
# If we're unable to get the buildcache name, we may have encountered a problem
# concretizing the spec, or some other issue that will eventually cause the job
# to fail.
JOB_BUILD_CACHE_ENTRY_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${SPEC_YAML_PATH}"`
if [[ $? -ne 0 ]]; then
echo "ERROR, unable to get buildcache entry name for job ${CI_JOB_NAME} (spec: ${CDASH_BUILD_NAME})"
exit 1
fi
# This should create the directory we referred to as GNUPGHOME earlier # This should create the directory we referred to as GNUPGHOME earlier
spack gpg list spack gpg list
@ -221,25 +231,83 @@ set -x
spack gpg list --trusted spack gpg list --trusted
spack gpg list --signing spack gpg list --signing
# To have spack install missing compilers, we need to add a custom
# configuration scope, then we pass that to the package installation
# command
CUSTOM_CONFIG_SCOPE_DIR="${TEMP_DIR}/config_scope"
mkdir -p "${CUSTOM_CONFIG_SCOPE_DIR}"
CUSTOM_CONFIG_SCOPE_ARG=""
if [ "${SPACK_COMPILER_ACTION}" == "INSTALL_MISSING" ]; then
echo "Make sure bootstrapped compiler will be installed"
custom_config_file_path="${CUSTOM_CONFIG_SCOPE_DIR}/config.yaml"
cat <<CONFIG_STUFF > "${custom_config_file_path}"
config:
install_missing_compilers: true
CONFIG_STUFF
CUSTOM_CONFIG_SCOPE_ARG="-C ${CUSTOM_CONFIG_SCOPE_DIR}"
# Configure the binary mirror where, if needed, this jobs compiler
# was installed in binary pacakge form, then tell spack to
# install_missing_compilers.
elif [ "${SPACK_COMPILER_ACTION}" == "FIND_ANY" ]; then
echo "Just find any available compiler"
spack compiler find
else
echo "No compiler action to be taken"
fi
# Finally, list the compilers spack knows about
echo "Compiler Configurations:"
spack config get compilers
# Write full-deps yamls for this job spec and its dependencies
gen_full_specs_for_job_and_deps
# Make the build_cache directory if it doesn't exist
mkdir -p "${BUILD_CACHE_DIR}"
# Get buildcache name so we can write a CDash build id file in the right place.
# If we're unable to get the buildcache name, we may have encountered a problem
# concretizing the spec, or some other issue that will eventually cause the job
# to fail.
JOB_BUILD_CACHE_ENTRY_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${SPEC_YAML_PATH}"`
if [[ $? -ne 0 ]]; then
echo "ERROR, unable to get buildcache entry name for job ${CI_JOB_NAME}"
exit 1
fi
if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
# Whether we have to build the spec or download it pre-built, we expect to find # Whether we have to build the spec or download it pre-built, we expect to find
# the cdash build id file sitting in this location afterwards. # the cdash build id file sitting in this location afterwards.
JOB_CDASH_ID_FILE="${BUILD_CACHE_DIR}/${JOB_BUILD_CACHE_ENTRY_NAME}.cdashid" JOB_CDASH_ID_FILE="${BUILD_CACHE_DIR}/${JOB_BUILD_CACHE_ENTRY_NAME}.cdashid"
fi
# Finally, we can check the spec we have been tasked with build against # Finally, we can check the spec we have been tasked with build against
# the built binary on the remote mirror to see if it needs to be rebuilt # the built binary on the remote mirror to see if it needs to be rebuilt
spack -d buildcache check --spec-yaml "${SPEC_YAML_PATH}" --mirror-url "${MIRROR_URL}" --rebuild-on-error spack -d buildcache check --spec-yaml "${SPEC_YAML_PATH}" --mirror-url "${SPACK_MIRROR_URL}" --rebuild-on-error
if [[ $? -ne 0 ]]; then if [[ $? -ne 0 ]]; then
# Configure mirror # Configure mirror
spack mirror add local_artifact_mirror "file://${LOCAL_MIRROR}" spack mirror add local_artifact_mirror "file://${LOCAL_MIRROR}"
if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
JOB_CDASH_ID="NONE" JOB_CDASH_ID="NONE"
# Install package, using the buildcache from the local mirror to # Install package, using the buildcache from the local mirror to
# satisfy dependencies. # satisfy dependencies.
BUILD_ID_LINE=`spack -d -k -v install --use-cache --keep-stage --cdash-upload-url "${CDASH_UPLOAD_URL}" --cdash-build "${CDASH_BUILD_NAME}" --cdash-site "Spack AWS Gitlab Instance" --cdash-track "${JOB_GROUP}" -f "${SPEC_YAML_PATH}" | grep "buildSummary\\.php"` BUILD_ID_LINE=`spack -d -k -v "${CUSTOM_CONFIG_SCOPE_ARG}" install --keep-stage --cdash-upload-url "${CDASH_UPLOAD_URL}" --cdash-build "${SPACK_CDASH_BUILD_NAME}" --cdash-site "${SPACK_CDASH_SITE}" --cdash-track "${SPACK_JOB_SPEC_BUILDGROUP}" -f "${SPEC_YAML_PATH}" | grep "buildSummary\\.php"`
check_error $? "spack install" check_error $? "spack install"
# By parsing the output of the "spack install" command, we can get the
# buildid generated for us by CDash
JOB_CDASH_ID=$(extract_build_id "${BUILD_ID_LINE}")
# Write the .cdashid file to the buildcache as well
echo "${JOB_CDASH_ID}" >> ${JOB_CDASH_ID_FILE}
else
spack -d -k -v "${CUSTOM_CONFIG_SCOPE_ARG}" install --keep-stage -f "${SPEC_YAML_PATH}"
fi
# Copy some log files into an artifact location, once we have a way # Copy some log files into an artifact location, once we have a way
# to provide a spec.yaml file to more spack commands (e.g. "location") # to provide a spec.yaml file to more spack commands (e.g. "location")
# stage_dir=$(spack location --stage-dir -f "${SPEC_YAML_PATH}") # stage_dir=$(spack location --stage-dir -f "${SPEC_YAML_PATH}")
@ -248,35 +316,37 @@ if [[ $? -ne 0 ]]; then
# cp "${build_log_file}" "${JOB_LOG_DIR}/" # cp "${build_log_file}" "${JOB_LOG_DIR}/"
# cp "${config_log_file}" "${JOB_LOG_DIR}/" # cp "${config_log_file}" "${JOB_LOG_DIR}/"
# By parsing the output of the "spack install" command, we can get the
# buildid generated for us by CDash
JOB_CDASH_ID=$(extract_build_id "${BUILD_ID_LINE}")
# Create buildcache entry for this package, reading the spec from the yaml # Create buildcache entry for this package, reading the spec from the yaml
# file. # file.
spack -d buildcache create --spec-yaml "${SPEC_YAML_PATH}" -a -f -d "${LOCAL_MIRROR}" --no-rebuild-index spack -d buildcache create --spec-yaml "${SPEC_YAML_PATH}" -a -f -d "${LOCAL_MIRROR}" --no-rebuild-index
check_error $? "spack buildcache create" check_error $? "spack buildcache create"
# Write the .cdashid file to the buildcache as well
echo "${JOB_CDASH_ID}" >> ${JOB_CDASH_ID_FILE}
# TODO: The upload-s3 command should eventually be replaced with something # TODO: The upload-s3 command should eventually be replaced with something
# like: "spack buildcache put <mirror> <spec>", when that subcommand is # like: "spack buildcache put <mirror> <spec>", when that subcommand is
# properly implemented. # properly implemented.
spack -d upload-s3 spec --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}" if [ ! -z "${SPACK_S3_UPLOAD_MIRROR_URL}" ] ; then
spack -d upload-s3 spec --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}" --endpoint-url "${SPACK_S3_UPLOAD_MIRROR_URL}"
check_error $? "spack upload-s3 spec" check_error $? "spack upload-s3 spec"
else else
echo "spec ${CDASH_BUILD_NAME} is already up to date on remote mirror, downloading it" spack -d buildcache copy --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}" --destination-url "${SPACK_MIRROR_URL}"
fi
else
echo "spec ${CI_JOB_NAME} is already up to date on remote mirror, downloading it"
# Configure remote mirror so we can download buildcache entry # Configure remote mirror so we can download buildcache entry
spack mirror add remote_binary_mirror ${MIRROR_URL} spack mirror add remote_binary_mirror ${SPACK_MIRROR_URL}
# Now download it # Now download it
spack -d buildcache download --spec-yaml "${SPEC_YAML_PATH}" --path "${BUILD_CACHE_DIR}/" --require-cdashid BUILDCACHE_DL_ARGS=("--spec-yaml" "${SPEC_YAML_PATH}" "--path" "${BUILD_CACHE_DIR}/" )
if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
BUILDCACHE_DL_ARGS+=( "--require-cdashid" )
fi
spack -d buildcache download "${BUILDCACHE_DL_ARGS[@]}"
check_error $? "spack buildcache download" check_error $? "spack buildcache download"
fi fi
# The next step is to relate this job to the jobs it depends on # The next step is to relate this job to the jobs it depends on
if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then
if [ -f "${JOB_CDASH_ID_FILE}" ]; then if [ -f "${JOB_CDASH_ID_FILE}" ]; then
JOB_CDASH_BUILD_ID=$(<${JOB_CDASH_ID_FILE}) JOB_CDASH_BUILD_ID=$(<${JOB_CDASH_ID_FILE})
@ -299,8 +369,8 @@ if [ -f "${JOB_CDASH_ID_FILE}" ]; then
if [ -f "${DEP_JOB_ID_FILE}" ]; then if [ -f "${DEP_JOB_ID_FILE}" ]; then
DEP_JOB_CDASH_BUILD_ID=$(<${DEP_JOB_ID_FILE}) DEP_JOB_CDASH_BUILD_ID=$(<${DEP_JOB_ID_FILE})
echo "File ${DEP_JOB_ID_FILE} contained value ${DEP_JOB_CDASH_BUILD_ID}" echo "File ${DEP_JOB_ID_FILE} contained value ${DEP_JOB_CDASH_BUILD_ID}"
echo "Relating builds -> ${CDASH_BUILD_NAME} (buildid=${JOB_CDASH_BUILD_ID}) depends on ${DEP_PKG_NAME} (buildid=${DEP_JOB_CDASH_BUILD_ID})" echo "Relating builds -> ${SPACK_CDASH_BUILD_NAME} (buildid=${JOB_CDASH_BUILD_ID}) depends on ${DEP_PKG_NAME} (buildid=${DEP_JOB_CDASH_BUILD_ID})"
relateBuildsPostBody="$(get_relate_builds_post_data "${CDASH_PROJECT}" ${JOB_CDASH_BUILD_ID} ${DEP_JOB_CDASH_BUILD_ID})" relateBuildsPostBody="$(get_relate_builds_post_data "${SPACK_CDASH_PROJECT}" ${JOB_CDASH_BUILD_ID} ${DEP_JOB_CDASH_BUILD_ID})"
relateBuildsResult=`curl "${DEP_JOB_RELATEBUILDS_URL}" -H "Content-Type: application/json" -H "Accept: application/json" -d "${relateBuildsPostBody}"` relateBuildsResult=`curl "${DEP_JOB_RELATEBUILDS_URL}" -H "Content-Type: application/json" -H "Accept: application/json" -d "${relateBuildsPostBody}"`
echo "Result of curl request: ${relateBuildsResult}" echo "Result of curl request: ${relateBuildsResult}"
else else
@ -316,6 +386,7 @@ else
echo "ERROR: Did not find expected .cdashid file ${JOB_CDASH_ID_FILE}" echo "ERROR: Did not find expected .cdashid file ${JOB_CDASH_ID_FILE}"
exit 1 exit 1
fi fi
fi
# Show the size of the buildcache and a list of what's in it, directly # Show the size of the buildcache and a list of what's in it, directly
# in the gitlab log output # in the gitlab log output

View file

@ -1,11 +1,19 @@
spack: spack:
definitions: definitions:
- compiler-pkgs:
- 'llvm+clang@6.0.1 os=centos7'
- 'gcc@6.5.0 os=centos7'
- 'llvm+clang@6.0.1 os=ubuntu18.04'
- 'gcc@6.5.0 os=ubuntu18.04'
- pkgs: - pkgs:
- readline@7.0 - readline@7.0
# - xsdk@0.4.0
- compilers: - compilers:
- '%gcc@5.5.0' - '%gcc@5.5.0'
- '%gcc@6.5.0'
- '%gcc@7.3.0' - '%gcc@7.3.0'
- '%clang@6.0.0' - '%clang@6.0.0'
- '%clang@6.0.1'
- oses: - oses:
- os=ubuntu18.04 - os=ubuntu18.04
- os=centos7 - os=centos7
@ -17,15 +25,15 @@ spack:
- [$oses] - [$oses]
exclude: exclude:
- '%gcc@7.3.0 os=centos7' - '%gcc@7.3.0 os=centos7'
- '%gcc@5.5.0 os=ubuntu18.04'
mirrors: mirrors:
cloud_gitlab: https://mirror.spack.io cloud_gitlab: https://mirror.spack.io
compilers: compilers:
# The .gitlab-ci.yml for this project picks a Docker container which is # The .gitlab-ci.yml for this project picks a Docker container which does
# based on ubuntu18.04 and which already has some compilers configured. # not have any compilers pre-built and ready to use, so we need to fake the
# Here we just add some of the ones which are defined on a different # existence of those here.
# builder image.
- compiler: - compiler:
operating_system: centos7 operating_system: centos7
modules: [] modules: []
@ -36,6 +44,16 @@ spack:
fc: /not/used fc: /not/used
spec: gcc@5.5.0 spec: gcc@5.5.0
target: x86_64 target: x86_64
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@6.5.0
target: x86_64
- compiler: - compiler:
operating_system: centos7 operating_system: centos7
modules: [] modules: []
@ -46,11 +64,64 @@ spack:
fc: /not/used fc: /not/used
spec: clang@6.0.0 spec: clang@6.0.0
target: x86_64 target: x86_64
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.1
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.0
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.1
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@6.5.0
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@7.3.0
target: x86_64
gitlab-ci: gitlab-ci:
bootstrap:
- name: compiler-pkgs
compiler-agnostic: true
mappings: mappings:
- spack-cloud-ubuntu: - # spack-cloud-ubuntu
match: match:
# these are specs, if *any* match the spec under consideration, this # these are specs, if *any* match the spec under consideration, this
# 'mapping' will be used to generate the CI job # 'mapping' will be used to generate the CI job
@ -61,8 +132,10 @@ spack:
# a part of the CI workflow # a part of the CI workflow
tags: tags:
- spack-k8s - spack-k8s
image: scottwittenburg/spack_builder_ubuntu_18.04 image:
- spack-cloud-centos: name: scottwittenburg/spack_builder_ubuntu_18.04
entrypoint: [""]
- # spack-cloud-centos
match: match:
# these are specs, if *any* match the spec under consideration, this # these are specs, if *any* match the spec under consideration, this
# 'mapping' will be used to generate the CI job # 'mapping' will be used to generate the CI job
@ -70,28 +143,15 @@ spack:
runner-attributes: runner-attributes:
tags: tags:
- spack-k8s - spack-k8s
image: spack/centos:7 image:
- summit: name: scottwittenburg/spack_builder_centos_7
match: entrypoint: [""]
- os=rhel7
- target=power9
- platform=secret-sauce
runner-attributes:
tags:
# this is a set of tags
- summit
- '{os}-{target}'
- rhel7
- centos7
- x86_64
variables:
SCHEDULER_ARGS: "arg2 arg2"
cdash: cdash:
build-group: Release Testing build-group: Release Testing
url: https://cdash.spack.io url: http://cdash
project: Spack Testing project: Spack Testing
site: Spack AWS Gitlab Instance site: Spack Docker-Compose Workflow
repos: [] repos: []
upstreams: {} upstreams: {}

View file

@ -801,7 +801,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
for description in descriptions: for description in descriptions:
url = os.path.join(mirror_root, description['url']) url = os.path.join(mirror_root, description['url'])
path = description['path'] path = description['path']
fail_if_missing = not description['required'] fail_if_missing = description['required']
mkdirp(path) mkdirp(path)

View file

@ -5,6 +5,7 @@
import argparse import argparse
import os import os
import shutil
import sys import sys
import llnl.util.tty as tty import llnl.util.tty as tty
@ -176,8 +177,11 @@ def setup_parser(subparser):
saveyaml = subparsers.add_parser('save-yaml', saveyaml = subparsers.add_parser('save-yaml',
help=save_spec_yamls.__doc__) help=save_spec_yamls.__doc__)
saveyaml.add_argument( saveyaml.add_argument(
'-r', '--root-spec', default=None, '--root-spec', default=None,
help='Root spec of dependent spec') help='Root spec of dependent spec')
saveyaml.add_argument(
'--root-spec-yaml', default=None,
help='Path to yaml file containing root spec of dependent spec')
saveyaml.add_argument( saveyaml.add_argument(
'-s', '--specs', default=None, '-s', '--specs', default=None,
help='List of dependent specs for which saved yaml is desired') help='List of dependent specs for which saved yaml is desired')
@ -186,6 +190,19 @@ def setup_parser(subparser):
help='Path to directory where spec yamls should be saved') help='Path to directory where spec yamls should be saved')
saveyaml.set_defaults(func=save_spec_yamls) saveyaml.set_defaults(func=save_spec_yamls)
# Copy buildcache from some directory to another mirror url
copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__)
copy.add_argument(
'--base-dir', default=None,
help='Path to mirror directory (root of existing buildcache)')
copy.add_argument(
'--spec-yaml', default=None,
help='Path to spec yaml file representing buildcache entry to copy')
copy.add_argument(
'--destination-url', default=None,
help='Destination mirror url')
copy.set_defaults(func=buildcache_copy)
def find_matching_specs(pkgs, allow_multiple_matches=False, env=None): def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
"""Returns a list of specs matching the not necessarily """Returns a list of specs matching the not necessarily
@ -526,7 +543,7 @@ def save_spec_yamls(args):
successful. If any errors or exceptions are encountered, or if expected successful. If any errors or exceptions are encountered, or if expected
command-line arguments are not provided, then the exit code will be command-line arguments are not provided, then the exit code will be
non-zero.""" non-zero."""
if not args.root_spec: if not args.root_spec and not args.root_spec_yaml:
tty.msg('No root spec provided, exiting.') tty.msg('No root spec provided, exiting.')
sys.exit(1) sys.exit(1)
@ -538,6 +555,10 @@ def save_spec_yamls(args):
tty.msg('No yaml directory provided, exiting.') tty.msg('No yaml directory provided, exiting.')
sys.exit(1) sys.exit(1)
if args.root_spec_yaml:
with open(args.root_spec_yaml) as fd:
root_spec_as_yaml = fd.read()
else:
root_spec = Spec(args.root_spec) root_spec = Spec(args.root_spec)
root_spec.concretize() root_spec.concretize()
root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash) root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash)
@ -548,6 +569,78 @@ def save_spec_yamls(args):
sys.exit(0) sys.exit(0)
def buildcache_copy(args):
"""Copy a buildcache entry and all its files from one mirror, given as
'--base-dir', to some other mirror, specified as '--destination-url'.
The specific buildcache entry to be copied from one location to the
other is identified using the '--spec-yaml' argument."""
# TODO: This sub-command should go away once #11117 is merged
if not args.spec_yaml:
tty.msg('No spec yaml provided, exiting.')
sys.exit(1)
if not args.base_dir:
tty.msg('No base directory provided, exiting.')
sys.exit(1)
if not args.destination_url:
tty.msg('No destination mirror url provided, exiting.')
sys.exit(1)
dest_url = args.destination_url
if dest_url[0:7] != 'file://' and dest_url[0] != '/':
tty.msg('Only urls beginning with "file://" or "/" are supported ' +
'by buildcache copy.')
sys.exit(1)
try:
with open(args.spec_yaml, 'r') as fd:
spec = Spec.from_yaml(fd.read())
except Exception as e:
tty.debug(e)
tty.error('Unable to concrectize spec from yaml {0}'.format(
args.spec_yaml))
sys.exit(1)
dest_root_path = dest_url
if dest_url[0:7] == 'file://':
dest_root_path = dest_url[7:]
build_cache_dir = bindist.build_cache_relative_path()
tarball_rel_path = os.path.join(
build_cache_dir, bindist.tarball_path_name(spec, '.spack'))
tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)
specfile_rel_path = os.path.join(
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)
cdashidfile_rel_path = os.path.join(
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)
# Make sure directory structure exists before attempting to copy
os.makedirs(os.path.dirname(tarball_dest_path))
# Now copy the specfile and tarball files to the destination mirror
tty.msg('Copying {0}'.format(tarball_rel_path))
shutil.copyfile(tarball_src_path, tarball_dest_path)
tty.msg('Copying {0}'.format(specfile_rel_path))
shutil.copyfile(specfile_src_path, specfile_dest_path)
# Copy the cdashid file (if exists) to the destination mirror
if os.path.exists(cdashid_src_path):
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
def buildcache(parser, args): def buildcache(parser, args):
if args.func: if args.func:
args.func(args) args.func(args)

View file

@ -3,9 +3,10 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import base64
import json import json
import zlib
from jsonschema import validate, ValidationError
from six import iteritems from six import iteritems
from six.moves.urllib.error import HTTPError, URLError from six.moves.urllib.error import HTTPError, URLError
from six.moves.urllib.parse import urlencode from six.moves.urllib.parse import urlencode
@ -14,10 +15,11 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.environment as ev import spack.environment as ev
import spack.compilers as compilers
from spack.dependency import all_deptypes from spack.dependency import all_deptypes
from spack.error import SpackError from spack.error import SpackError
import spack.hash_types as ht
from spack.spec import Spec from spack.spec import Spec
from spack.schema.specs_deps import schema as specs_deps_schema
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
description = "generate release build set as .gitlab-ci.yml" description = "generate release build set as .gitlab-ci.yml"
@ -26,18 +28,10 @@
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true', default=False,
help="Force re-concretization of environment first")
subparser.add_argument( subparser.add_argument(
'-o', '--output-file', default=".gitlab-ci.yml", '-o', '--output-file', default=".gitlab-ci.yml",
help="path to output file to write") help="path to output file to write")
subparser.add_argument(
'-k', '--signing-key', default=None,
help="hash of gpg key to use for package signing")
subparser.add_argument( subparser.add_argument(
'-p', '--print-summary', action='store_true', default=False, '-p', '--print-summary', action='store_true', default=False,
help="Print summary of staged jobs to standard output") help="Print summary of staged jobs to standard output")
@ -54,7 +48,9 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
"type": group_type "type": group_type
} }
request = Request(url, data=json.dumps(data), headers=headers) enc_data = json.dumps(data).encode('utf-8')
request = Request(url, data=enc_data, headers=headers)
response = opener.open(request) response = opener.open(request)
response_code = response.getcode() response_code = response.getcode()
@ -103,7 +99,9 @@ def populate_buildgroup(job_names, group_name, project, site,
} for name in job_names] } for name in job_names]
} }
request = Request(url, data=json.dumps(data), headers=headers) enc_data = json.dumps(data).encode('utf-8')
request = Request(url, data=enc_data, headers=headers)
request.get_method = lambda: 'PUT' request.get_method = lambda: 'PUT'
response = opener.open(request) response = opener.open(request)
@ -115,9 +113,43 @@ def populate_buildgroup(job_names, group_name, project, site,
raise SpackError(msg) raise SpackError(msg)
def get_job_name(spec, osarch, build_group): def is_main_phase(phase_name):
return '{0} {1} {2} {3} {4}'.format( return True if phase_name == 'specs' else False
spec.name, spec.version, spec.compiler, osarch, build_group)
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
item_idx = 0
format_str = ''
format_args = []
if phase:
format_str += '({{{0}}})'.format(item_idx)
format_args.append(phase)
item_idx += 1
format_str += ' {{{0}}}'.format(item_idx)
format_args.append(spec.name)
item_idx += 1
format_str += ' {{{0}}}'.format(item_idx)
format_args.append(spec.version)
item_idx += 1
if is_main_phase(phase) is True or strip_compiler is False:
format_str += ' {{{0}}}'.format(item_idx)
format_args.append(spec.compiler)
item_idx += 1
format_str += ' {{{0}}}'.format(item_idx)
format_args.append(osarch)
item_idx += 1
if build_group:
format_str += ' {{{0}}}'.format(item_idx)
format_args.append(build_group)
item_idx += 1
return format_str.format(*format_args)
def get_cdash_build_name(spec, build_group): def get_cdash_build_name(spec, build_group):
@ -137,6 +169,17 @@ def get_spec_string(spec):
return spec.format(''.join(format_elements)) return spec.format(''.join(format_elements))
def format_root_spec(spec, main_phase, strip_compiler):
if main_phase is False and strip_compiler is True:
return '{0}@{1} arch={2}'.format(
spec.name, spec.version, spec.architecture)
else:
spec_yaml = spec.to_yaml(hash=ht.build_hash).encode('utf-8')
return str(base64.b64encode(zlib.compress(spec_yaml)).decode('utf-8'))
# return '{0}@{1}%{2} arch={3}'.format(
# spec.name, spec.version, spec.compiler, spec.architecture)
def spec_deps_key_label(s): def spec_deps_key_label(s):
return s.dag_hash(), "%s/%s" % (s.name, s.dag_hash(7)) return s.dag_hash(), "%s/%s" % (s.name, s.dag_hash(7))
@ -152,14 +195,6 @@ def _add_dependency(spec_label, dep_label, deps):
def get_spec_dependencies(specs, deps, spec_labels): def get_spec_dependencies(specs, deps, spec_labels):
spec_deps_obj = compute_spec_deps(specs) spec_deps_obj = compute_spec_deps(specs)
try:
validate(spec_deps_obj, specs_deps_schema)
except ValidationError as val_err:
tty.error('Ill-formed specs dependencies JSON object')
tty.error(spec_deps_obj)
tty.debug(val_err)
return
if spec_deps_obj: if spec_deps_obj:
dependencies = spec_deps_obj['dependencies'] dependencies = spec_deps_obj['dependencies']
specs = spec_deps_obj['specs'] specs = spec_deps_obj['specs']
@ -259,7 +294,7 @@ def print_staging_summary(spec_labels, dependencies, stages):
stage_index += 1 stage_index += 1
def compute_spec_deps(spec_list, stream_like=None): def compute_spec_deps(spec_list):
""" """
Computes all the dependencies for the spec(s) and generates a JSON Computes all the dependencies for the spec(s) and generates a JSON
object which provides both a list of unique spec names as well as a object which provides both a list of unique spec names as well as a
@ -311,10 +346,6 @@ def compute_spec_deps(spec_list, stream_like=None):
] ]
} }
The object can be optionally written out to some stream. This is
useful, for example, when we need to concretize and generate the
dependencies of a spec in a specific docker container.
""" """
deptype = all_deptypes deptype = all_deptypes
spec_labels = {} spec_labels = {}
@ -331,7 +362,8 @@ def append_dep(s, d):
for spec in spec_list: for spec in spec_list:
spec.concretize() spec.concretize()
root_spec = get_spec_string(spec) # root_spec = get_spec_string(spec)
root_spec = spec
rkey, rlabel = spec_deps_key_label(spec) rkey, rlabel = spec_deps_key_label(spec)
@ -359,9 +391,6 @@ def append_dep(s, d):
'dependencies': dependencies, 'dependencies': dependencies,
} }
if stream_like:
stream_like.write(json.dumps(deps_json_obj))
return deps_json_obj return deps_json_obj
@ -379,7 +408,6 @@ def find_matching_config(spec, ci_mappings):
def release_jobs(parser, args): def release_jobs(parser, args):
env = ev.get_env(args, 'release-jobs', required=True) env = ev.get_env(args, 'release-jobs', required=True)
env.concretize(force=args.force)
# FIXME: What's the difference between one that opens with 'spack' # FIXME: What's the difference between one that opens with 'spack'
# and one that opens with 'env'? This will only handle the former. # and one that opens with 'env'? This will only handle the former.
@ -390,6 +418,12 @@ def release_jobs(parser, args):
ci_mappings = yaml_root['gitlab-ci']['mappings'] ci_mappings = yaml_root['gitlab-ci']['mappings']
build_group = None
enable_cdash_reporting = False
cdash_auth_token = None
if 'cdash' in yaml_root:
enable_cdash_reporting = True
ci_cdash = yaml_root['cdash'] ci_cdash = yaml_root['cdash']
build_group = ci_cdash['build-group'] build_group = ci_cdash['build-group']
cdash_url = ci_cdash['url'] cdash_url = ci_cdash['url']
@ -398,7 +432,6 @@ def release_jobs(parser, args):
eq_idx = proj_enc.find('=') + 1 eq_idx = proj_enc.find('=') + 1
cdash_project_enc = proj_enc[eq_idx:] cdash_project_enc = proj_enc[eq_idx:]
cdash_site = ci_cdash['site'] cdash_site = ci_cdash['site']
cdash_auth_token = None
if args.cdash_credentials: if args.cdash_credentials:
with open(args.cdash_credentials) as fd: with open(args.cdash_credentials) as fd:
@ -406,32 +439,71 @@ def release_jobs(parser, args):
cdash_auth_token = cdash_auth_token.strip() cdash_auth_token = cdash_auth_token.strip()
ci_mirrors = yaml_root['mirrors'] ci_mirrors = yaml_root['mirrors']
mirror_urls = ci_mirrors.values() mirror_urls = [url for url in ci_mirrors.values()]
spec_labels, dependencies, stages = stage_spec_jobs(env.all_specs()) bootstrap_specs = []
phases = []
if 'bootstrap' in yaml_root['gitlab-ci']:
for phase in yaml_root['gitlab-ci']['bootstrap']:
try:
phase_name = phase.get('name')
strip_compilers = phase.get('compiler-agnostic')
except AttributeError:
phase_name = phase
strip_compilers = False
phases.append({
'name': phase_name,
'strip-compilers': strip_compilers,
})
if not stages: for bs in env.spec_lists[phase_name]:
tty.msg('No jobs staged, exiting.') bootstrap_specs.append({
return 'spec': bs,
'phase-name': phase_name,
'strip-compilers': strip_compilers,
})
phases.append({
'name': 'specs',
'strip-compilers': False,
})
staged_phases = {}
for phase in phases:
phase_name = phase['name']
staged_phases[phase_name] = stage_spec_jobs(env.spec_lists[phase_name])
if args.print_summary: if args.print_summary:
print_staging_summary(spec_labels, dependencies, stages) for phase in phases:
phase_name = phase['name']
tty.msg('Stages for phase "{0}"'.format(phase_name))
phase_stages = staged_phases[phase_name]
print_staging_summary(*phase_stages)
all_job_names = [] all_job_names = []
output_object = {} output_object = {}
job_count = 0 job_id = 0
stage_id = 0
stage_names = ['stage-{0}'.format(i) for i in range(len(stages))] stage_names = []
stage = 0
for phase in phases:
phase_name = phase['name']
strip_compilers = phase['strip-compilers']
main_phase = is_main_phase(phase_name)
spec_labels, dependencies, stages = staged_phases[phase_name]
for stage_jobs in stages: for stage_jobs in stages:
stage_name = stage_names[stage] stage_name = 'stage-{0}'.format(stage_id)
stage_names.append(stage_name)
stage_id += 1
for spec_label in stage_jobs: for spec_label in stage_jobs:
release_spec = spec_labels[spec_label]['spec'] release_spec = spec_labels[spec_label]['spec']
root_spec = spec_labels[spec_label]['rootSpec'] root_spec = spec_labels[spec_label]['rootSpec']
runner_attribs = find_matching_config(release_spec, ci_mappings) runner_attribs = find_matching_config(root_spec, ci_mappings)
if not runner_attribs: if not runner_attribs:
tty.warn('No match found for {0}, skipping it'.format( tty.warn('No match found for {0}, skipping it'.format(
@ -444,68 +516,121 @@ def release_jobs(parser, args):
if 'variables' in runner_attribs: if 'variables' in runner_attribs:
variables.update(runner_attribs['variables']) variables.update(runner_attribs['variables'])
build_image = None image_name = None
image_entry = None
if 'image' in runner_attribs: if 'image' in runner_attribs:
build_image = runner_attribs['image'] build_image = runner_attribs['image']
try:
image_name = build_image.get('name')
entrypoint = build_image.get('entrypoint')
image_entry = [p for p in entrypoint]
except AttributeError:
image_name = build_image
osname = str(release_spec.architecture) osname = str(release_spec.architecture)
job_name = get_job_name(release_spec, osname, build_group) job_name = get_job_name(phase_name, strip_compilers,
cdash_build_name = get_cdash_build_name(release_spec, build_group) release_spec, osname, build_group)
all_job_names.append(cdash_build_name)
job_scripts = ['./bin/rebuild-package.sh'] job_scripts = ['./bin/rebuild-package.sh']
compiler_action = 'NONE'
if len(phases) > 1:
compiler_action = 'FIND_ANY'
if is_main_phase(phase_name):
compiler_action = 'INSTALL_MISSING'
job_vars = {
'SPACK_MIRROR_URL': mirror_urls[0],
'SPACK_ROOT_SPEC': format_root_spec(
root_spec, main_phase, strip_compilers),
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
'SPACK_COMPILER_ACTION': compiler_action,
}
job_dependencies = [] job_dependencies = []
if spec_label in dependencies: if spec_label in dependencies:
job_dependencies = ( job_dependencies = (
[get_job_name(spec_labels[d]['spec'], osname, build_group) [get_job_name(phase_name, strip_compilers,
spec_labels[dep_label]['spec'],
osname, build_group)
for dep_label in dependencies[spec_label]])
# This next section helps gitlab make sure the right
# bootstrapped compiler exists in the artifacts buildcache by
# creating an artificial dependency between this spec and its
# compiler. So, if we are in the main phase, and if the
# compiler we are supposed to use is listed in any of the
# bootstrap spec lists, then we will add one more dependency to
# "job_dependencies" (that compiler).
if is_main_phase(phase_name):
compiler_pkg_spec = compilers.pkg_spec_for_compiler(
release_spec.compiler)
for bs in bootstrap_specs:
bs_arch = bs['spec'].architecture
if (bs['spec'].satisfies(compiler_pkg_spec) and
bs_arch == release_spec.architecture):
c_job_name = get_job_name(bs['phase-name'],
bs['strip-compilers'],
bs['spec'],
str(bs_arch),
build_group)
job_dependencies.append(c_job_name)
if enable_cdash_reporting:
cdash_build_name = get_cdash_build_name(
release_spec, build_group)
all_job_names.append(cdash_build_name)
related_builds = [] # Used for relating CDash builds
if spec_label in dependencies:
related_builds = (
[spec_labels[d]['spec'].name
for d in dependencies[spec_label]]) for d in dependencies[spec_label]])
job_variables = { job_vars['SPACK_CDASH_BASE_URL'] = cdash_url
'MIRROR_URL': mirror_urls[0], job_vars['SPACK_CDASH_PROJECT'] = cdash_project
'CDASH_BASE_URL': cdash_url, job_vars['SPACK_CDASH_PROJECT_ENC'] = cdash_project_enc
'CDASH_PROJECT': cdash_project, job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
'CDASH_PROJECT_ENC': cdash_project_enc, job_vars['SPACK_CDASH_SITE'] = cdash_site
'CDASH_BUILD_NAME': cdash_build_name, job_vars['SPACK_RELATED_BUILDS'] = ';'.join(related_builds)
'DEPENDENCIES': ';'.join(job_dependencies), job_vars['SPACK_JOB_SPEC_BUILDGROUP'] = build_group
'ROOT_SPEC': str(root_spec),
}
if args.signing_key: job_vars['SPACK_ENABLE_CDASH'] = str(enable_cdash_reporting)
job_variables['SIGN_KEY_HASH'] = args.signing_key
variables.update(job_variables) variables.update(job_vars)
job_object = { job_object = {
'stage': stage_name, 'stage': stage_name,
'variables': variables, 'variables': variables,
'script': job_scripts, 'script': job_scripts,
'tags': tags,
'artifacts': { 'artifacts': {
'paths': [ 'paths': [
'local_mirror/build_cache',
'jobs_scratch_dir', 'jobs_scratch_dir',
'cdash_report', 'cdash_report',
'local_mirror/build_cache',
], ],
'when': 'always', 'when': 'always',
}, },
'dependencies': job_dependencies, 'dependencies': job_dependencies,
'tags': tags,
} }
if build_image: if image_name:
job_object['image'] = build_image job_object['image'] = image_name
if image_entry is not None:
job_object['image'] = {
'name': image_name,
'entrypoint': image_entry,
}
output_object[job_name] = job_object output_object[job_name] = job_object
job_count += 1 job_id += 1
stage += 1
tty.msg('{0} build jobs generated in {1} stages'.format( tty.msg('{0} build jobs generated in {1} stages'.format(
job_count, len(stages))) job_id, stage_id))
# Use "all_job_names" to populate the build group for this set # Use "all_job_names" to populate the build group for this set
if cdash_auth_token: if enable_cdash_reporting and cdash_auth_token:
try: try:
populate_buildgroup(all_job_names, build_group, cdash_project, populate_buildgroup(all_job_names, build_group, cdash_project,
cdash_site, cdash_auth_token, cdash_url) cdash_site, cdash_auth_token, cdash_url)
@ -521,7 +646,7 @@ def release_jobs(parser, args):
'variables': { 'variables': {
'MIRROR_URL': mirror_urls[0], 'MIRROR_URL': mirror_urls[0],
}, },
'image': 'scottwittenburg/spack_ci_generator_alpine', # just needs some basic python image 'image': 'scottwittenburg/spack_ci_generator_alpine',
'script': './bin/rebuild-index.sh', 'script': './bin/rebuild-index.sh',
'tags': ['spack-k8s'] # may want a runner to handle this 'tags': ['spack-k8s'] # may want a runner to handle this
} }

View file

@ -17,19 +17,38 @@
'additionalProperties': False, 'additionalProperties': False,
'required': ['mappings'], 'required': ['mappings'],
'patternProperties': { 'patternProperties': {
r'mappings': { 'bootstrap': {
'type': 'array', 'type': 'array',
'default': {}, 'items': {
'anyOf': [
{
'type': 'string',
}, {
'type': 'object',
'additionalProperties': False, 'additionalProperties': False,
'patternProperties': { 'required': ['name'],
r'[\w\d\-_\.]+': { 'properties': {
'name': {
'type': 'string',
},
'compiler-agnostic': {
'type': 'boolean',
'default': False,
},
},
},
],
},
},
'mappings': {
'type': 'array',
'items': {
'type': 'object', 'type': 'object',
'additionalProperties': False, 'additionalProperties': False,
'required': ['match', 'runner-attributes'], 'required': ['match', 'runner-attributes'],
'properties': { 'properties': {
'match': { 'match': {
'type': 'array', 'type': 'array',
'default': [],
'items': { 'items': {
'type': 'string', 'type': 'string',
}, },
@ -39,7 +58,24 @@
'additionalProperties': True, 'additionalProperties': True,
'required': ['tags'], 'required': ['tags'],
'properties': { 'properties': {
'image': {'type': 'string'}, 'image': {
'oneOf': [
{
'type': 'string'
}, {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'entrypoint': {
'type': 'array',
'items': {
'type': 'string',
},
},
},
},
],
},
'tags': { 'tags': {
'type': 'array', 'type': 'array',
'default': [], 'default': [],
@ -61,7 +97,6 @@
}, },
}, },
}, },
},
} }

View file

@ -1,48 +0,0 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for expressing dependencies of a set of specs in a JSON file
.. literalinclude:: _spack_root/lib/spack/spack/schema/specs_deps.py
:lines: 32-
"""
schema = {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack schema for the dependencies of a set of specs',
'type': 'object',
'additionalProperties': False,
'required': ['specs'],
'properties': {
r'dependencies': {
'type': 'array',
'default': [],
'items': {
'type': 'object',
'additionalProperties': False,
'required': ['depends', 'spec'],
'properties': {
r'depends': {'type': 'string'},
r'spec': {'type': 'string'},
},
},
},
r'specs': {
'type': 'array',
'default': [],
'items': {
'type': 'object',
'additionalProperties': False,
'required': ['root_spec', 'spec', 'label'],
'properties': {
r'root_spec': {'type': 'string'},
r'spec': {'type': 'string'},
r'label': {'type': 'string'},
}
},
},
},
}

View file

@ -102,8 +102,7 @@ def test_release_jobs_with_env(tmpdir, mutable_mock_env_path, env_deactivate,
some-mirror: https://my.fake.mirror some-mirror: https://my.fake.mirror
gitlab-ci: gitlab-ci:
mappings: mappings:
- some-runner-mapping: - match:
match:
- archive-files - archive-files
runner-attributes: runner-attributes:
tags: tags: