Refactor release-jobs cmd based on use of environments (no docker either)
This commit is contained in:
parent
73c161b7de
commit
fabbb3d58a
4 changed files with 198 additions and 315 deletions
|
@ -29,6 +29,9 @@
|
||||||
### SPACK_SIGNING_KEY
|
### SPACK_SIGNING_KEY
|
||||||
###
|
###
|
||||||
### CDASH_BASE_URL
|
### CDASH_BASE_URL
|
||||||
|
### CDASH_PROJECT
|
||||||
|
### CDASH_PROJECT_ENC
|
||||||
|
### CDASH_BUILD_NAME
|
||||||
### ROOT_SPEC
|
### ROOT_SPEC
|
||||||
### DEPENDENCIES
|
### DEPENDENCIES
|
||||||
### MIRROR_URL
|
### MIRROR_URL
|
||||||
|
@ -45,7 +48,7 @@ SPEC_DIR="${TEMP_DIR}/specs"
|
||||||
LOCAL_MIRROR="${CI_PROJECT_DIR}/local_mirror"
|
LOCAL_MIRROR="${CI_PROJECT_DIR}/local_mirror"
|
||||||
BUILD_CACHE_DIR="${LOCAL_MIRROR}/build_cache"
|
BUILD_CACHE_DIR="${LOCAL_MIRROR}/build_cache"
|
||||||
SPACK_BIN_DIR="${CI_PROJECT_DIR}/bin"
|
SPACK_BIN_DIR="${CI_PROJECT_DIR}/bin"
|
||||||
CDASH_UPLOAD_URL="${CDASH_BASE_URL}/submit.php?project=Spack"
|
CDASH_UPLOAD_URL="${CDASH_BASE_URL}/submit.php?project=${CDASH_PROJECT_ENC}"
|
||||||
DEP_JOB_RELATEBUILDS_URL="${CDASH_BASE_URL}/api/v1/relateBuilds.php"
|
DEP_JOB_RELATEBUILDS_URL="${CDASH_BASE_URL}/api/v1/relateBuilds.php"
|
||||||
declare -a JOB_DEPS_PKG_NAMES
|
declare -a JOB_DEPS_PKG_NAMES
|
||||||
|
|
||||||
|
@ -163,8 +166,9 @@ gen_full_specs_for_job_and_deps() {
|
||||||
local pkgVersion="${PARTSARRAY[1]}"
|
local pkgVersion="${PARTSARRAY[1]}"
|
||||||
local compiler="${PARTSARRAY[2]}"
|
local compiler="${PARTSARRAY[2]}"
|
||||||
local osarch="${PARTSARRAY[3]}"
|
local osarch="${PARTSARRAY[3]}"
|
||||||
|
local buildGroup="${PARTSARRAY[@]:4}" # get everything after osarch
|
||||||
|
|
||||||
JOB_SPEC_NAME="${pkgName}@${pkgVersion}%${compiler} arch=${osarch}"
|
JOB_GROUP="${buildGroup}"
|
||||||
JOB_PKG_NAME="${pkgName}"
|
JOB_PKG_NAME="${pkgName}"
|
||||||
SPEC_YAML_PATH="${SPEC_DIR}/${pkgName}.yaml"
|
SPEC_YAML_PATH="${SPEC_DIR}/${pkgName}.yaml"
|
||||||
local root_spec_name="${ROOT_SPEC}"
|
local root_spec_name="${ROOT_SPEC}"
|
||||||
|
@ -185,7 +189,7 @@ begin_logging
|
||||||
|
|
||||||
gen_full_specs_for_job_and_deps
|
gen_full_specs_for_job_and_deps
|
||||||
|
|
||||||
echo "Building package ${JOB_SPEC_NAME}, ${HASH}, ${MIRROR_URL}"
|
echo "Building package ${CDASH_BUILD_NAME}, ${HASH}, ${MIRROR_URL}"
|
||||||
|
|
||||||
# Finally, list the compilers spack knows about
|
# Finally, list the compilers spack knows about
|
||||||
echo "Compiler Configurations:"
|
echo "Compiler Configurations:"
|
||||||
|
@ -200,7 +204,7 @@ mkdir -p "${BUILD_CACHE_DIR}"
|
||||||
# to fail.
|
# to fail.
|
||||||
JOB_BUILD_CACHE_ENTRY_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${SPEC_YAML_PATH}"`
|
JOB_BUILD_CACHE_ENTRY_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${SPEC_YAML_PATH}"`
|
||||||
if [[ $? -ne 0 ]]; then
|
if [[ $? -ne 0 ]]; then
|
||||||
echo "ERROR, unable to get buildcache entry name for job ${CI_JOB_NAME} (spec: ${JOB_SPEC_NAME})"
|
echo "ERROR, unable to get buildcache entry name for job ${CI_JOB_NAME} (spec: ${CDASH_BUILD_NAME})"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -233,9 +237,17 @@ if [[ $? -ne 0 ]]; then
|
||||||
|
|
||||||
# Install package, using the buildcache from the local mirror to
|
# Install package, using the buildcache from the local mirror to
|
||||||
# satisfy dependencies.
|
# satisfy dependencies.
|
||||||
BUILD_ID_LINE=`spack -d -k -v install --use-cache --cdash-upload-url "${CDASH_UPLOAD_URL}" --cdash-build "${JOB_SPEC_NAME}" --cdash-site "Spack AWS Gitlab Instance" --cdash-track "Experimental" -f "${SPEC_YAML_PATH}" | grep "buildSummary\\.php"`
|
BUILD_ID_LINE=`spack -d -k -v install --use-cache --keep-stage --cdash-upload-url "${CDASH_UPLOAD_URL}" --cdash-build "${CDASH_BUILD_NAME}" --cdash-site "Spack AWS Gitlab Instance" --cdash-track "${JOB_GROUP}" -f "${SPEC_YAML_PATH}" | grep "buildSummary\\.php"`
|
||||||
check_error $? "spack install"
|
check_error $? "spack install"
|
||||||
|
|
||||||
|
# Copy some log files into an artifact location, once we have a way
|
||||||
|
# to provide a spec.yaml file to more spack commands (e.g. "location")
|
||||||
|
# stage_dir=$(spack location --stage-dir -f "${SPEC_YAML_PATH}")
|
||||||
|
# build_log_file=$(find -L "${stage_dir}" | grep "spack-build\\.out")
|
||||||
|
# config_log_file=$(find -L "${stage_dir}" | grep "config\\.log")
|
||||||
|
# cp "${build_log_file}" "${JOB_LOG_DIR}/"
|
||||||
|
# cp "${config_log_file}" "${JOB_LOG_DIR}/"
|
||||||
|
|
||||||
# By parsing the output of the "spack install" command, we can get the
|
# By parsing the output of the "spack install" command, we can get the
|
||||||
# buildid generated for us by CDash
|
# buildid generated for us by CDash
|
||||||
JOB_CDASH_ID=$(extract_build_id "${BUILD_ID_LINE}")
|
JOB_CDASH_ID=$(extract_build_id "${BUILD_ID_LINE}")
|
||||||
|
@ -254,7 +266,7 @@ if [[ $? -ne 0 ]]; then
|
||||||
spack -d upload-s3 spec --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}"
|
spack -d upload-s3 spec --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}"
|
||||||
check_error $? "spack upload-s3 spec"
|
check_error $? "spack upload-s3 spec"
|
||||||
else
|
else
|
||||||
echo "spec ${JOB_SPEC_NAME} is already up to date on remote mirror, downloading it"
|
echo "spec ${CDASH_BUILD_NAME} is already up to date on remote mirror, downloading it"
|
||||||
|
|
||||||
# Configure remote mirror so we can download buildcache entry
|
# Configure remote mirror so we can download buildcache entry
|
||||||
spack mirror add remote_binary_mirror ${MIRROR_URL}
|
spack mirror add remote_binary_mirror ${MIRROR_URL}
|
||||||
|
@ -287,8 +299,8 @@ if [ -f "${JOB_CDASH_ID_FILE}" ]; then
|
||||||
if [ -f "${DEP_JOB_ID_FILE}" ]; then
|
if [ -f "${DEP_JOB_ID_FILE}" ]; then
|
||||||
DEP_JOB_CDASH_BUILD_ID=$(<${DEP_JOB_ID_FILE})
|
DEP_JOB_CDASH_BUILD_ID=$(<${DEP_JOB_ID_FILE})
|
||||||
echo "File ${DEP_JOB_ID_FILE} contained value ${DEP_JOB_CDASH_BUILD_ID}"
|
echo "File ${DEP_JOB_ID_FILE} contained value ${DEP_JOB_CDASH_BUILD_ID}"
|
||||||
echo "Relating builds -> ${JOB_SPEC_NAME} (buildid=${JOB_CDASH_BUILD_ID}) depends on ${DEP_PKG_NAME} (buildid=${DEP_JOB_CDASH_BUILD_ID})"
|
echo "Relating builds -> ${CDASH_BUILD_NAME} (buildid=${JOB_CDASH_BUILD_ID}) depends on ${DEP_PKG_NAME} (buildid=${DEP_JOB_CDASH_BUILD_ID})"
|
||||||
relateBuildsPostBody="$(get_relate_builds_post_data "Spack" ${JOB_CDASH_BUILD_ID} ${DEP_JOB_CDASH_BUILD_ID})"
|
relateBuildsPostBody="$(get_relate_builds_post_data "${CDASH_PROJECT}" ${JOB_CDASH_BUILD_ID} ${DEP_JOB_CDASH_BUILD_ID})"
|
||||||
relateBuildsResult=`curl "${DEP_JOB_RELATEBUILDS_URL}" -H "Content-Type: application/json" -H "Accept: application/json" -d "${relateBuildsPostBody}"`
|
relateBuildsResult=`curl "${DEP_JOB_RELATEBUILDS_URL}" -H "Content-Type: application/json" -H "Accept: application/json" -d "${relateBuildsPostBody}"`
|
||||||
echo "Result of curl request: ${relateBuildsResult}"
|
echo "Result of curl request: ${relateBuildsResult}"
|
||||||
else
|
else
|
||||||
|
|
|
@ -3,26 +3,20 @@
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import argparse
|
|
||||||
import json
|
import json
|
||||||
import os
|
import sys
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
from jsonschema import validate, ValidationError
|
from jsonschema import validate, ValidationError
|
||||||
from six import iteritems
|
from six import iteritems
|
||||||
|
from six.moves.urllib.request import build_opener, HTTPHandler, Request
|
||||||
|
from six.moves.urllib.parse import urlencode
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
from spack.architecture import sys_type
|
import spack.environment as ev
|
||||||
from spack.dependency import all_deptypes
|
from spack.dependency import all_deptypes
|
||||||
from spack.spec import Spec, CompilerSpec
|
from spack.spec import Spec
|
||||||
from spack.paths import spack_root
|
|
||||||
from spack.error import SpackError
|
|
||||||
from spack.schema.os_container_mapping import schema as mapping_schema
|
|
||||||
from spack.schema.specs_deps import schema as specs_deps_schema
|
from spack.schema.specs_deps import schema as specs_deps_schema
|
||||||
from spack.spec_set import CombinatorialSpecSet
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
description = "generate release build set as .gitlab-ci.yml"
|
description = "generate release build set as .gitlab-ci.yml"
|
||||||
|
@ -32,57 +26,101 @@
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-s', '--spec-set', default=None,
|
'-f', '--force', action='store_true', default=False,
|
||||||
help="path to release spec-set yaml file")
|
help="Force re-concretization of environment first")
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
'-m', '--mirror-url', default=None,
|
|
||||||
help="url of binary mirror where builds should be pushed")
|
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-o', '--output-file', default=".gitlab-ci.yml",
|
'-o', '--output-file', default=".gitlab-ci.yml",
|
||||||
help="path to output file to write")
|
help="path to output file to write")
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
'-t', '--shared-runner-tag', default=None,
|
|
||||||
help="tag to add to jobs for shared runner selection")
|
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-k', '--signing-key', default=None,
|
'-k', '--signing-key', default=None,
|
||||||
help="hash of gpg key to use for package signing")
|
help="hash of gpg key to use for package signing")
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
'-c', '--cdash-url', default='https://cdash.spack.io',
|
|
||||||
help="Base url of CDash instance jobs should communicate with")
|
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-p', '--print-summary', action='store_true', default=False,
|
'-p', '--print-summary', action='store_true', default=False,
|
||||||
help="Print summary of staged jobs to standard output")
|
help="Print summary of staged jobs to standard output")
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--resolve-deps-locally', action='store_true', default=False,
|
'-c', '--cdash-credentials', default=None,
|
||||||
help="Use only the current machine to concretize specs, " +
|
help="Path to file containing CDash authentication token")
|
||||||
"instead of iterating over items in os-container-mapping.yaml " +
|
|
||||||
"and using docker run. Assumes the current machine architecure " +
|
|
||||||
"is listed in the os-container-mapping.yaml config file.")
|
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
'--specs-deps-output', default='/dev/stdout',
|
|
||||||
help="A file path to which spec deps should be written. This " +
|
|
||||||
"argument is generally for internal use, and should not be " +
|
|
||||||
"provided by end-users under normal conditions.")
|
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
'specs', nargs=argparse.REMAINDER,
|
|
||||||
help="These positional arguments are generally for internal use. " +
|
|
||||||
"The --spec-set argument should be used to identify a yaml " +
|
|
||||||
"file describing the set of release specs to include in the " +
|
|
||||||
".gitlab-ci.yml file.")
|
|
||||||
|
|
||||||
|
|
||||||
def get_job_name(spec, osarch):
|
def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
||||||
return '{0} {1} {2} {3}'.format(spec.name, spec.version,
|
data = {
|
||||||
spec.compiler, osarch)
|
"newbuildgroup": group_name,
|
||||||
|
"project": project,
|
||||||
|
"type": group_type
|
||||||
|
}
|
||||||
|
|
||||||
|
request = Request(url, data=json.dumps(data), headers=headers)
|
||||||
|
|
||||||
|
response = opener.open(request)
|
||||||
|
response_code = response.getcode()
|
||||||
|
|
||||||
|
if response_code != 200 and response_code != 201:
|
||||||
|
print('Creating buildgroup failed (response code = {0}'.format(
|
||||||
|
response_code))
|
||||||
|
return None
|
||||||
|
|
||||||
|
response_text = response.read()
|
||||||
|
response_json = json.loads(response_text)
|
||||||
|
build_group_id = response_json['id']
|
||||||
|
|
||||||
|
return build_group_id
|
||||||
|
|
||||||
|
|
||||||
|
def populate_buildgroup(job_names, group_name, project, site,
|
||||||
|
credentials, cdash_url, exit_on_fail=False):
|
||||||
|
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Authorization': 'Bearer {0}'.format(credentials),
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
|
||||||
|
opener = build_opener(HTTPHandler)
|
||||||
|
|
||||||
|
parent_group_id = _create_buildgroup(
|
||||||
|
opener, headers, url, project, group_name, 'Daily')
|
||||||
|
group_id = _create_buildgroup(
|
||||||
|
opener, headers, url, project, 'Latest {0}'.format(group_name),
|
||||||
|
'Latest')
|
||||||
|
|
||||||
|
if not parent_group_id or not group_id:
|
||||||
|
print('Unable to create or retrieve the build groups')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'project': project,
|
||||||
|
'buildgroupid': group_id,
|
||||||
|
'dynamiclist': [{
|
||||||
|
'match': name,
|
||||||
|
'parentgroupid': parent_group_id,
|
||||||
|
'site': site
|
||||||
|
} for name in job_names]
|
||||||
|
}
|
||||||
|
|
||||||
|
request = Request(url, data=json.dumps(data), headers=headers)
|
||||||
|
request.get_method = lambda: 'PUT'
|
||||||
|
|
||||||
|
response = opener.open(request)
|
||||||
|
response_code = response.getcode()
|
||||||
|
|
||||||
|
if response_code != 200 and exit_on_fail:
|
||||||
|
print('Unexpected response ({0}) when populating buildgroup'.format(
|
||||||
|
response_code))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_job_name(spec, osarch, build_group):
|
||||||
|
return '{0} {1} {2} {3} {4}'.format(
|
||||||
|
spec.name, spec.version, spec.compiler, osarch, build_group)
|
||||||
|
|
||||||
|
|
||||||
|
def get_cdash_build_name(spec, build_group):
|
||||||
|
return '{0}@{1}%{2} arch={3} ({4})'.format(
|
||||||
|
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||||
|
|
||||||
|
|
||||||
def get_spec_string(spec):
|
def get_spec_string(spec):
|
||||||
|
@ -109,84 +147,8 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||||
deps[spec_label].add(dep_label)
|
deps[spec_label].add(dep_label)
|
||||||
|
|
||||||
|
|
||||||
def get_deps_using_container(specs, image):
|
def get_spec_dependencies(specs, deps, spec_labels):
|
||||||
image_home_dir = '/home/spackuser'
|
spec_deps_obj = compute_spec_deps(specs)
|
||||||
repo_mount_location = '{0}/spack'.format(image_home_dir)
|
|
||||||
temp_dir = tempfile.mkdtemp(dir='/tmp')
|
|
||||||
|
|
||||||
# The paths this module will see (from outside the container)
|
|
||||||
temp_file = os.path.join(temp_dir, 'spec_deps.json')
|
|
||||||
temp_err = os.path.join(temp_dir, 'std_err.log')
|
|
||||||
|
|
||||||
# The paths the bash_command will see inside the container
|
|
||||||
json_output = '/work/spec_deps.json'
|
|
||||||
std_error = '/work/std_err.log'
|
|
||||||
|
|
||||||
specs_arg = ' '.join([str(spec) for spec in specs])
|
|
||||||
|
|
||||||
bash_command = " ".join(["source {0}/share/spack/setup-env.sh ;",
|
|
||||||
"spack release-jobs",
|
|
||||||
"--specs-deps-output {1}",
|
|
||||||
"{2}",
|
|
||||||
"2> {3}"]).format(
|
|
||||||
repo_mount_location, json_output, specs_arg, std_error)
|
|
||||||
|
|
||||||
docker_cmd_to_run = [
|
|
||||||
'docker', 'run', '--rm',
|
|
||||||
'-v', '{0}:{1}'.format(spack_root, repo_mount_location),
|
|
||||||
'-v', '{0}:{1}'.format(temp_dir, '/work'),
|
|
||||||
'--entrypoint', 'bash',
|
|
||||||
'-t', str(image),
|
|
||||||
'-c',
|
|
||||||
bash_command,
|
|
||||||
]
|
|
||||||
|
|
||||||
tty.debug('Running subprocess command:')
|
|
||||||
tty.debug(' '.join(docker_cmd_to_run))
|
|
||||||
|
|
||||||
# Docker is going to merge the stdout/stderr from the script and write it
|
|
||||||
# all to the stdout of the running container. For this reason, we won't
|
|
||||||
# pipe any stdout/stderr from the docker command, but rather write the
|
|
||||||
# output we care about to a file in a mounted directory. Similarly, any
|
|
||||||
# errors from running the spack command inside the container are redirected
|
|
||||||
# to another file in the mounted directory.
|
|
||||||
proc = subprocess.Popen(docker_cmd_to_run)
|
|
||||||
proc.wait()
|
|
||||||
|
|
||||||
# Check for errors from spack command
|
|
||||||
if os.path.exists(temp_err) and os.path.getsize(temp_err) > 0:
|
|
||||||
# Spack wrote something to stderr inside the container. We will
|
|
||||||
# print out whatever it is, but attempt to carry on with the process.
|
|
||||||
tty.error('Encountered spack error running command in container:')
|
|
||||||
with open(temp_err, 'r') as err:
|
|
||||||
tty.error(err.read())
|
|
||||||
|
|
||||||
spec_deps_obj = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Finally, try to read/parse the output we really care about: the
|
|
||||||
# specs and dependency edges for the provided spec, as it was
|
|
||||||
# concretized in the appropriate container.
|
|
||||||
with open(temp_file, 'r') as fd:
|
|
||||||
spec_deps_obj = json.loads(fd.read())
|
|
||||||
|
|
||||||
except ValueError as val_err:
|
|
||||||
tty.error('Failed to read json object from spec-deps output file:')
|
|
||||||
tty.error(str(val_err))
|
|
||||||
except IOError as io_err:
|
|
||||||
tty.error('Problem reading from spec-deps json output file:')
|
|
||||||
tty.error(str(io_err))
|
|
||||||
finally:
|
|
||||||
shutil.rmtree(temp_dir)
|
|
||||||
|
|
||||||
return spec_deps_obj
|
|
||||||
|
|
||||||
|
|
||||||
def get_spec_dependencies(specs, deps, spec_labels, image=None):
|
|
||||||
if image:
|
|
||||||
spec_deps_obj = get_deps_using_container(specs, image)
|
|
||||||
else:
|
|
||||||
spec_deps_obj = compute_spec_deps(specs)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
validate(spec_deps_obj, specs_deps_schema)
|
validate(spec_deps_obj, specs_deps_schema)
|
||||||
|
@ -210,7 +172,7 @@ def get_spec_dependencies(specs, deps, spec_labels, image=None):
|
||||||
_add_dependency(entry['spec'], entry['depends'], deps)
|
_add_dependency(entry['spec'], entry['depends'], deps)
|
||||||
|
|
||||||
|
|
||||||
def stage_spec_jobs(spec_set, containers, current_system=None):
|
def stage_spec_jobs(specs):
|
||||||
"""Take a set of release specs along with a dictionary describing the
|
"""Take a set of release specs along with a dictionary describing the
|
||||||
available docker containers and what compilers they have, and generate
|
available docker containers and what compilers they have, and generate
|
||||||
a list of "stages", where the jobs in any stage are dependent only on
|
a list of "stages", where the jobs in any stage are dependent only on
|
||||||
|
@ -265,46 +227,7 @@ def remove_satisfied_deps(deps, satisfied_list):
|
||||||
deps = {}
|
deps = {}
|
||||||
spec_labels = {}
|
spec_labels = {}
|
||||||
|
|
||||||
if current_system:
|
get_spec_dependencies(specs, deps, spec_labels)
|
||||||
if current_system not in containers:
|
|
||||||
error_msg = ' '.join(['Current system ({0}) does not appear in',
|
|
||||||
'os_container_mapping.yaml, ignoring',
|
|
||||||
'request']).format(
|
|
||||||
current_system)
|
|
||||||
raise SpackError(error_msg)
|
|
||||||
os_names = [current_system]
|
|
||||||
else:
|
|
||||||
os_names = [name for name in containers]
|
|
||||||
|
|
||||||
container_specs = {}
|
|
||||||
for name in os_names:
|
|
||||||
container_specs[name] = {'image': None, 'specs': []}
|
|
||||||
|
|
||||||
# Collect together all the specs that should be concretized in each
|
|
||||||
# container so they can all be done at once, avoiding the need to
|
|
||||||
# run the docker container for each spec separately.
|
|
||||||
for spec in spec_set:
|
|
||||||
for osname in os_names:
|
|
||||||
container_info = containers[osname]
|
|
||||||
image = None if current_system else container_info['image']
|
|
||||||
if image:
|
|
||||||
container_specs[osname]['image'] = image
|
|
||||||
if 'compilers' in container_info:
|
|
||||||
found_at_least_one = False
|
|
||||||
for item in container_info['compilers']:
|
|
||||||
container_compiler_spec = CompilerSpec(item['name'])
|
|
||||||
if spec.compiler == container_compiler_spec:
|
|
||||||
container_specs[osname]['specs'].append(spec)
|
|
||||||
found_at_least_one = True
|
|
||||||
if not found_at_least_one:
|
|
||||||
tty.warn('No compiler in {0} satisfied {1}'.format(
|
|
||||||
osname, spec.compiler))
|
|
||||||
|
|
||||||
for osname in container_specs:
|
|
||||||
if container_specs[osname]['specs']:
|
|
||||||
image = container_specs[osname]['image']
|
|
||||||
specs = container_specs[osname]['specs']
|
|
||||||
get_spec_dependencies(specs, deps, spec_labels, image)
|
|
||||||
|
|
||||||
# Save the original deps, as we need to return them at the end of the
|
# Save the original deps, as we need to return them at the end of the
|
||||||
# function. In the while loop below, the "dependencies" variable is
|
# function. In the while loop below, the "dependencies" variable is
|
||||||
|
@ -452,49 +375,48 @@ def append_dep(s, d):
|
||||||
return deps_json_obj
|
return deps_json_obj
|
||||||
|
|
||||||
|
|
||||||
|
def spec_matches(spec, match_string):
|
||||||
|
return spec.satisfies(match_string)
|
||||||
|
|
||||||
|
|
||||||
|
def find_matching_config(spec, ci_mappings):
|
||||||
|
for ci_mapping in ci_mappings:
|
||||||
|
for match_string in ci_mapping['match']:
|
||||||
|
if spec_matches(spec, match_string):
|
||||||
|
return ci_mapping['runner-attributes']
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def release_jobs(parser, args):
|
def release_jobs(parser, args):
|
||||||
share_path = os.path.join(spack_root, 'share', 'spack', 'docker')
|
env = ev.get_env(args, 'release-jobs', required=True)
|
||||||
os_container_mapping_path = os.path.join(
|
env.concretize(force=args.force)
|
||||||
share_path, 'os-container-mapping.yaml')
|
|
||||||
|
|
||||||
with open(os_container_mapping_path, 'r') as fin:
|
yaml_root = env.yaml['spack']
|
||||||
os_container_mapping = syaml.load(fin)
|
|
||||||
|
|
||||||
try:
|
if 'gitlab-ci' not in yaml_root:
|
||||||
validate(os_container_mapping, mapping_schema)
|
tty.die('Environment yaml does not have "gitlab-ci" section')
|
||||||
except ValidationError as val_err:
|
|
||||||
tty.error('Ill-formed os-container-mapping configuration object')
|
|
||||||
tty.error(os_container_mapping)
|
|
||||||
tty.debug(val_err)
|
|
||||||
return
|
|
||||||
|
|
||||||
containers = os_container_mapping['containers']
|
ci_mappings = yaml_root['gitlab-ci']['mappings']
|
||||||
|
|
||||||
if args.specs:
|
ci_cdash = yaml_root['cdash']
|
||||||
# Just print out the spec labels and all dependency edges in
|
build_group = ci_cdash['build-group']
|
||||||
# a json format.
|
cdash_url = ci_cdash['url']
|
||||||
spec_list = [Spec(s) for s in args.specs]
|
cdash_project = ci_cdash['project']
|
||||||
with open(args.specs_deps_output, 'w') as out:
|
proj_enc = urlencode({'project': cdash_project})
|
||||||
compute_spec_deps(spec_list, out)
|
eq_idx = proj_enc.find('=') + 1
|
||||||
return
|
cdash_project_enc = proj_enc[eq_idx:]
|
||||||
|
cdash_site = ci_cdash['site']
|
||||||
|
cdash_auth_token = None
|
||||||
|
|
||||||
current_system = sys_type() if args.resolve_deps_locally else None
|
if args.cdash_credentials:
|
||||||
|
with open(args.cdash_credentials) as fd:
|
||||||
|
cdash_auth_token = fd.read()
|
||||||
|
cdash_auth_token = cdash_auth_token.strip()
|
||||||
|
|
||||||
release_specs_path = args.spec_set
|
ci_mirrors = yaml_root['mirrors']
|
||||||
if not release_specs_path:
|
mirror_urls = ci_mirrors.values()
|
||||||
raise SpackError('Must provide path to release spec-set')
|
|
||||||
|
|
||||||
release_spec_set = CombinatorialSpecSet.from_file(release_specs_path)
|
spec_labels, dependencies, stages = stage_spec_jobs(env.all_specs())
|
||||||
|
|
||||||
mirror_url = args.mirror_url
|
|
||||||
|
|
||||||
if not mirror_url:
|
|
||||||
raise SpackError('Must provide url of target binary mirror')
|
|
||||||
|
|
||||||
cdash_url = args.cdash_url
|
|
||||||
|
|
||||||
spec_labels, dependencies, stages = stage_spec_jobs(
|
|
||||||
release_spec_set, containers, current_system)
|
|
||||||
|
|
||||||
if not stages:
|
if not stages:
|
||||||
tty.msg('No jobs staged, exiting.')
|
tty.msg('No jobs staged, exiting.')
|
||||||
|
@ -503,6 +425,7 @@ def release_jobs(parser, args):
|
||||||
if args.print_summary:
|
if args.print_summary:
|
||||||
print_staging_summary(spec_labels, dependencies, stages)
|
print_staging_summary(spec_labels, dependencies, stages)
|
||||||
|
|
||||||
|
all_job_names = []
|
||||||
output_object = {}
|
output_object = {}
|
||||||
job_count = 0
|
job_count = 0
|
||||||
|
|
||||||
|
@ -516,37 +439,56 @@ def release_jobs(parser, args):
|
||||||
release_spec = spec_labels[spec_label]['spec']
|
release_spec = spec_labels[spec_label]['spec']
|
||||||
root_spec = spec_labels[spec_label]['rootSpec']
|
root_spec = spec_labels[spec_label]['rootSpec']
|
||||||
|
|
||||||
pkg_compiler = release_spec.compiler
|
runner_attribs = find_matching_config(release_spec, ci_mappings)
|
||||||
pkg_hash = release_spec.dag_hash()
|
|
||||||
|
if not runner_attribs:
|
||||||
|
tty.warn('No match found for {0}, skipping it'.format(
|
||||||
|
release_spec))
|
||||||
|
continue
|
||||||
|
|
||||||
|
tags = [tag for tag in runner_attribs['tags']]
|
||||||
|
|
||||||
|
variables = {}
|
||||||
|
if 'variables' in runner_attribs:
|
||||||
|
variables.update(runner_attribs['variables'])
|
||||||
|
|
||||||
|
build_image = None
|
||||||
|
if 'image' in runner_attribs:
|
||||||
|
build_image = runner_attribs['image']
|
||||||
|
|
||||||
osname = str(release_spec.architecture)
|
osname = str(release_spec.architecture)
|
||||||
job_name = get_job_name(release_spec, osname)
|
job_name = get_job_name(release_spec, osname, build_group)
|
||||||
container_info = containers[osname]
|
cdash_build_name = get_cdash_build_name(release_spec, build_group)
|
||||||
build_image = container_info['image']
|
|
||||||
|
all_job_names.append(cdash_build_name)
|
||||||
|
|
||||||
job_scripts = ['./bin/rebuild-package.sh']
|
job_scripts = ['./bin/rebuild-package.sh']
|
||||||
|
|
||||||
if 'setup_script' in container_info:
|
|
||||||
job_scripts.insert(
|
|
||||||
0, container_info['setup_script'] % pkg_compiler)
|
|
||||||
|
|
||||||
job_dependencies = []
|
job_dependencies = []
|
||||||
if spec_label in dependencies:
|
if spec_label in dependencies:
|
||||||
job_dependencies = (
|
job_dependencies = (
|
||||||
[get_job_name(spec_labels[dep_label]['spec'], osname)
|
[get_job_name(spec_labels[d]['spec'], osname, build_group)
|
||||||
for dep_label in dependencies[spec_label]])
|
for d in dependencies[spec_label]])
|
||||||
|
|
||||||
|
job_variables = {
|
||||||
|
'MIRROR_URL': mirror_urls[0],
|
||||||
|
'CDASH_BASE_URL': cdash_url,
|
||||||
|
'CDASH_PROJECT': cdash_project,
|
||||||
|
'CDASH_PROJECT_ENC': cdash_project_enc,
|
||||||
|
'CDASH_BUILD_NAME': cdash_build_name,
|
||||||
|
'DEPENDENCIES': ';'.join(job_dependencies),
|
||||||
|
'ROOT_SPEC': str(root_spec),
|
||||||
|
}
|
||||||
|
|
||||||
|
if args.signing_key:
|
||||||
|
job_variables['SIGN_KEY_HASH'] = args.signing_key
|
||||||
|
|
||||||
|
variables.update(job_variables)
|
||||||
|
|
||||||
job_object = {
|
job_object = {
|
||||||
'stage': stage_name,
|
'stage': stage_name,
|
||||||
'variables': {
|
'variables': variables,
|
||||||
'MIRROR_URL': mirror_url,
|
|
||||||
'CDASH_BASE_URL': cdash_url,
|
|
||||||
'HASH': pkg_hash,
|
|
||||||
'DEPENDENCIES': ';'.join(job_dependencies),
|
|
||||||
'ROOT_SPEC': str(root_spec),
|
|
||||||
},
|
|
||||||
'script': job_scripts,
|
'script': job_scripts,
|
||||||
'image': build_image,
|
|
||||||
'artifacts': {
|
'artifacts': {
|
||||||
'paths': [
|
'paths': [
|
||||||
'local_mirror/build_cache',
|
'local_mirror/build_cache',
|
||||||
|
@ -556,51 +498,41 @@ def release_jobs(parser, args):
|
||||||
'when': 'always',
|
'when': 'always',
|
||||||
},
|
},
|
||||||
'dependencies': job_dependencies,
|
'dependencies': job_dependencies,
|
||||||
|
'tags': tags,
|
||||||
}
|
}
|
||||||
|
|
||||||
# If we see 'compilers' in the container iformation, it's a
|
if build_image:
|
||||||
# filter for the compilers this container can handle, else we
|
job_object['image'] = build_image
|
||||||
# assume it can handle any compiler
|
|
||||||
if 'compilers' in container_info:
|
|
||||||
do_job = False
|
|
||||||
for item in container_info['compilers']:
|
|
||||||
container_compiler_spec = CompilerSpec(item['name'])
|
|
||||||
if pkg_compiler == container_compiler_spec:
|
|
||||||
do_job = True
|
|
||||||
else:
|
|
||||||
do_job = True
|
|
||||||
|
|
||||||
if args.shared_runner_tag:
|
output_object[job_name] = job_object
|
||||||
job_object['tags'] = [args.shared_runner_tag]
|
job_count += 1
|
||||||
|
|
||||||
if args.signing_key:
|
|
||||||
job_object['variables']['SIGN_KEY_HASH'] = args.signing_key
|
|
||||||
|
|
||||||
if do_job:
|
|
||||||
output_object[job_name] = job_object
|
|
||||||
job_count += 1
|
|
||||||
|
|
||||||
stage += 1
|
stage += 1
|
||||||
|
|
||||||
tty.msg('{0} build jobs generated in {1} stages'.format(
|
tty.msg('{0} build jobs generated in {1} stages'.format(
|
||||||
job_count, len(stages)))
|
job_count, len(stages)))
|
||||||
|
|
||||||
final_stage = 'stage-rebuild-index'
|
# Use "all_job_names" to populate the build group for this set
|
||||||
|
if cdash_auth_token:
|
||||||
|
populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||||
|
cdash_site, cdash_auth_token, cdash_url)
|
||||||
|
else:
|
||||||
|
tty.warn('Unable to populate buildgroup without CDash credentials')
|
||||||
|
|
||||||
|
# Add an extra, final job to regenerate the index
|
||||||
|
final_stage = 'stage-rebuild-index'
|
||||||
final_job = {
|
final_job = {
|
||||||
'stage': final_stage,
|
'stage': final_stage,
|
||||||
'variables': {
|
'variables': {
|
||||||
'MIRROR_URL': mirror_url,
|
'MIRROR_URL': mirror_urls[0],
|
||||||
},
|
},
|
||||||
'image': build_image,
|
'image': 'scottwittenburg/spack_ci_generator_alpine', # just needs some basic python image
|
||||||
'script': './bin/rebuild-index.sh',
|
'script': './bin/rebuild-index.sh',
|
||||||
|
'tags': ['spack-k8s'] # may want a runner to handle this
|
||||||
}
|
}
|
||||||
|
|
||||||
if args.shared_runner_tag:
|
|
||||||
final_job['tags'] = [args.shared_runner_tag]
|
|
||||||
|
|
||||||
output_object['rebuild-index'] = final_job
|
output_object['rebuild-index'] = final_job
|
||||||
stage_names.append(final_stage)
|
stage_names.append(final_stage)
|
||||||
|
|
||||||
output_object['stages'] = stage_names
|
output_object['stages'] = stage_names
|
||||||
|
|
||||||
with open(args.output_file, 'w') as outf:
|
with open(args.output_file, 'w') as outf:
|
||||||
|
|
|
@ -1,50 +0,0 @@
|
||||||
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
"""Schema for os-container-mapping.yaml configuration file.
|
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/os_container_mapping.py
|
|
||||||
:lines: 32-
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
schema = {
|
|
||||||
'$schema': 'http://json-schema.org/schema#',
|
|
||||||
'title': 'Spack release builds os/container mapping config file schema',
|
|
||||||
'type': 'object',
|
|
||||||
'additionalProperties': False,
|
|
||||||
'patternProperties': {
|
|
||||||
r'containers': {
|
|
||||||
'type': 'object',
|
|
||||||
'default': {},
|
|
||||||
'patternProperties': {
|
|
||||||
r'[\w\d\-_\.]+': {
|
|
||||||
'type': 'object',
|
|
||||||
'default': {},
|
|
||||||
'additionalProperties': False,
|
|
||||||
'required': ['image'],
|
|
||||||
'properties': {
|
|
||||||
'image': {'type': 'string'},
|
|
||||||
'setup_script': {'type': 'string'},
|
|
||||||
'compilers': {
|
|
||||||
'type': 'array',
|
|
||||||
'default': [],
|
|
||||||
'items': {
|
|
||||||
'type': 'object',
|
|
||||||
'default': {},
|
|
||||||
'additionalProperties': False,
|
|
||||||
'required': ['name'],
|
|
||||||
'properties': {
|
|
||||||
'name': {'type': 'string'},
|
|
||||||
'path': {'type': 'string'},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
containers:
|
|
||||||
linux-ubuntu18.04-x86_64:
|
|
||||||
image: scottwittenburg/spack_builder_ubuntu_18.04
|
|
||||||
compilers:
|
|
||||||
- name: gcc@5.5.0
|
|
||||||
- name: clang@6.0.0-1ubuntu2
|
|
||||||
linux-centos7-x86_64:
|
|
||||||
image: scottwittenburg/spack_builder_centos_7
|
|
||||||
compilers:
|
|
||||||
- name: gcc@5.5.0
|
|
||||||
- name: clang@6.0.0
|
|
Loading…
Reference in a new issue