Spack CI: Refactor process_command for Cross Platform support (#39739)
Generate CI scripts as powershell on Windows. This is intended to output exactly the same bash scripts as before on Linux. Co-authored-by: Ryan Krattiger <ryan.krattiger@kitware.com>
This commit is contained in:
parent
434836be81
commit
397334a4be
1 changed files with 191 additions and 163 deletions
|
@ -70,7 +70,7 @@
|
||||||
JOB_NAME_FORMAT = (
|
JOB_NAME_FORMAT = (
|
||||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
||||||
)
|
)
|
||||||
|
IS_WINDOWS = sys.platform == "win32"
|
||||||
spack_gpg = spack.main.SpackCommand("gpg")
|
spack_gpg = spack.main.SpackCommand("gpg")
|
||||||
spack_compiler = spack.main.SpackCommand("compiler")
|
spack_compiler = spack.main.SpackCommand("compiler")
|
||||||
|
|
||||||
|
@ -103,7 +103,7 @@ def get_job_name(spec: spack.spec.Spec, build_group: str = ""):
|
||||||
job_name = spec.format(JOB_NAME_FORMAT)
|
job_name = spec.format(JOB_NAME_FORMAT)
|
||||||
|
|
||||||
if build_group:
|
if build_group:
|
||||||
job_name = "{0} {1}".format(job_name, build_group)
|
job_name = f"{job_name} {build_group}"
|
||||||
|
|
||||||
return job_name[:255]
|
return job_name[:255]
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ def _remove_reserved_tags(tags):
|
||||||
|
|
||||||
|
|
||||||
def _spec_deps_key(s):
|
def _spec_deps_key(s):
|
||||||
return "{0}/{1}".format(s.name, s.dag_hash(7))
|
return f"{s.name}/{s.dag_hash(7)}"
|
||||||
|
|
||||||
|
|
||||||
def _add_dependency(spec_label, dep_label, deps):
|
def _add_dependency(spec_label, dep_label, deps):
|
||||||
|
@ -213,7 +213,7 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
||||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
||||||
tty.msg("Checked the following mirrors for binaries:")
|
tty.msg("Checked the following mirrors for binaries:")
|
||||||
for m in mirrors.values():
|
for m in mirrors.values():
|
||||||
tty.msg(" {0}".format(m.fetch_url))
|
tty.msg(f" {m.fetch_url}")
|
||||||
|
|
||||||
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
||||||
for stage_index, stage in enumerate(stages):
|
for stage_index, stage in enumerate(stages):
|
||||||
|
@ -296,7 +296,7 @@ def append_dep(s, d):
|
||||||
for spec in spec_list:
|
for spec in spec_list:
|
||||||
for s in spec.traverse(deptype="all"):
|
for s in spec.traverse(deptype="all"):
|
||||||
if s.external:
|
if s.external:
|
||||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
tty.msg(f"Will not stage external pkg: {s}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
skey = _spec_deps_key(s)
|
skey = _spec_deps_key(s)
|
||||||
|
@ -305,7 +305,7 @@ def append_dep(s, d):
|
||||||
for d in s.dependencies(deptype="all"):
|
for d in s.dependencies(deptype="all"):
|
||||||
dkey = _spec_deps_key(d)
|
dkey = _spec_deps_key(d)
|
||||||
if d.external:
|
if d.external:
|
||||||
tty.msg("Will not stage external dep: {0}".format(d))
|
tty.msg(f"Will not stage external dep: {d}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
append_dep(skey, dkey)
|
append_dep(skey, dkey)
|
||||||
|
@ -374,8 +374,8 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||||
|
|
||||||
for path in lines:
|
for path in lines:
|
||||||
if ".gitlab-ci.yml" in path or path in env_path:
|
if ".gitlab-ci.yml" in path or path in env_path:
|
||||||
tty.debug("env represented by {0} changed".format(env_path))
|
tty.debug(f"env represented by {env_path} changed")
|
||||||
tty.debug("touched file: {0}".format(path))
|
tty.debug(f"touched file: {path}")
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -419,7 +419,7 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||||
all_concrete_specs = env.all_specs()
|
all_concrete_specs = env.all_specs()
|
||||||
tty.debug("All concrete environment specs:")
|
tty.debug("All concrete environment specs:")
|
||||||
for s in all_concrete_specs:
|
for s in all_concrete_specs:
|
||||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
tty.debug(f" {s.name}/{s.dag_hash()[:7]}")
|
||||||
affected_pkgs = frozenset(affected_pkgs)
|
affected_pkgs = frozenset(affected_pkgs)
|
||||||
env_matches = [s for s in all_concrete_specs if s.name in affected_pkgs]
|
env_matches = [s for s in all_concrete_specs if s.name in affected_pkgs]
|
||||||
visited = set()
|
visited = set()
|
||||||
|
@ -510,7 +510,7 @@ def __is_named(self, section):
|
||||||
and if so return the name otherwise return none.
|
and if so return the name otherwise return none.
|
||||||
"""
|
"""
|
||||||
for _name in self.named_jobs:
|
for _name in self.named_jobs:
|
||||||
keys = ["{0}-job".format(_name), "{0}-job-remove".format(_name)]
|
keys = [f"{_name}-job", f"{_name}-job-remove"]
|
||||||
if any([key for key in keys if key in section]):
|
if any([key for key in keys if key in section]):
|
||||||
return _name
|
return _name
|
||||||
|
|
||||||
|
@ -525,9 +525,9 @@ def __job_name(name, suffix=""):
|
||||||
|
|
||||||
jname = name
|
jname = name
|
||||||
if suffix:
|
if suffix:
|
||||||
jname = "{0}-job{1}".format(name, suffix)
|
jname = f"{name}-job{suffix}"
|
||||||
else:
|
else:
|
||||||
jname = "{0}-job".format(name)
|
jname = f"{name}-job"
|
||||||
|
|
||||||
return jname
|
return jname
|
||||||
|
|
||||||
|
@ -739,7 +739,7 @@ def generate_gitlab_ci_yaml(
|
||||||
# Requested to prune untouched packages, but assume we won't do that
|
# Requested to prune untouched packages, but assume we won't do that
|
||||||
# unless we're actually in a git repo.
|
# unless we're actually in a git repo.
|
||||||
rev1, rev2 = get_change_revisions()
|
rev1, rev2 = get_change_revisions()
|
||||||
tty.debug("Got following revisions: rev1={0}, rev2={1}".format(rev1, rev2))
|
tty.debug(f"Got following revisions: rev1={rev1}, rev2={rev2}")
|
||||||
if rev1 and rev2:
|
if rev1 and rev2:
|
||||||
# If the stack file itself did not change, proceed with pruning
|
# If the stack file itself did not change, proceed with pruning
|
||||||
if not get_stack_changed(env.manifest_path, rev1, rev2):
|
if not get_stack_changed(env.manifest_path, rev1, rev2):
|
||||||
|
@ -747,13 +747,13 @@ def generate_gitlab_ci_yaml(
|
||||||
affected_pkgs = compute_affected_packages(rev1, rev2)
|
affected_pkgs = compute_affected_packages(rev1, rev2)
|
||||||
tty.debug("affected pkgs:")
|
tty.debug("affected pkgs:")
|
||||||
for p in affected_pkgs:
|
for p in affected_pkgs:
|
||||||
tty.debug(" {0}".format(p))
|
tty.debug(f" {p}")
|
||||||
affected_specs = get_spec_filter_list(
|
affected_specs = get_spec_filter_list(
|
||||||
env, affected_pkgs, dependent_traverse_depth=dependent_depth
|
env, affected_pkgs, dependent_traverse_depth=dependent_depth
|
||||||
)
|
)
|
||||||
tty.debug("all affected specs:")
|
tty.debug("all affected specs:")
|
||||||
for s in affected_specs:
|
for s in affected_specs:
|
||||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
tty.debug(f" {s.name}/{s.dag_hash()[:7]}")
|
||||||
|
|
||||||
# Allow overriding --prune-dag cli opt with environment variable
|
# Allow overriding --prune-dag cli opt with environment variable
|
||||||
prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None)
|
prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None)
|
||||||
|
@ -978,7 +978,7 @@ def generate_gitlab_ci_yaml(
|
||||||
rebuild_decisions = {}
|
rebuild_decisions = {}
|
||||||
|
|
||||||
for stage_jobs in stages:
|
for stage_jobs in stages:
|
||||||
stage_name = "stage-{0}".format(stage_id)
|
stage_name = f"stage-{stage_id}"
|
||||||
stage_names.append(stage_name)
|
stage_names.append(stage_name)
|
||||||
stage_id += 1
|
stage_id += 1
|
||||||
|
|
||||||
|
@ -1009,7 +1009,7 @@ def generate_gitlab_ci_yaml(
|
||||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||||
|
|
||||||
if not job_object:
|
if not job_object:
|
||||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
tty.warn(f"No match found for {release_spec}, skipping it")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if spack_pipeline_type is not None:
|
if spack_pipeline_type is not None:
|
||||||
|
@ -1119,7 +1119,7 @@ def main_script_replacements(cmd):
|
||||||
|
|
||||||
if artifacts_root:
|
if artifacts_root:
|
||||||
job_object["needs"].append(
|
job_object["needs"].append(
|
||||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
{"job": generate_job_name, "pipeline": f"{parent_pipeline_id}"}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||||
|
@ -1185,19 +1185,17 @@ def main_script_replacements(cmd):
|
||||||
if spack_pipeline_type == "spack_pull_request":
|
if spack_pipeline_type == "spack_pull_request":
|
||||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||||
|
|
||||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||||
|
|
||||||
if job_id > 0:
|
if job_id > 0:
|
||||||
tty.debug(
|
tty.debug(f"The max_needs_job is {max_needs_job}, with {max_length_needs} needs")
|
||||||
"The max_needs_job is {0}, with {1} needs".format(max_needs_job, max_length_needs)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Use "all_job_names" to populate the build group for this set
|
# Use "all_job_names" to populate the build group for this set
|
||||||
if cdash_handler and cdash_handler.auth_token:
|
if cdash_handler and cdash_handler.auth_token:
|
||||||
try:
|
try:
|
||||||
cdash_handler.populate_buildgroup(all_job_names)
|
cdash_handler.populate_buildgroup(all_job_names)
|
||||||
except (SpackError, HTTPError, URLError) as err:
|
except (SpackError, HTTPError, URLError) as err:
|
||||||
tty.warn("Problem populating buildgroup: {0}".format(err))
|
tty.warn(f"Problem populating buildgroup: {err}")
|
||||||
else:
|
else:
|
||||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||||
|
|
||||||
|
@ -1211,9 +1209,7 @@ def main_script_replacements(cmd):
|
||||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||||
sync_job["stage"] = "copy"
|
sync_job["stage"] = "copy"
|
||||||
if artifacts_root:
|
if artifacts_root:
|
||||||
sync_job["needs"] = [
|
sync_job["needs"] = [{"job": generate_job_name, "pipeline": f"{parent_pipeline_id}"}]
|
||||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
|
||||||
]
|
|
||||||
|
|
||||||
if "variables" not in sync_job:
|
if "variables" not in sync_job:
|
||||||
sync_job["variables"] = {}
|
sync_job["variables"] = {}
|
||||||
|
@ -1230,6 +1226,7 @@ def main_script_replacements(cmd):
|
||||||
# TODO: Remove this condition in Spack 0.23
|
# TODO: Remove this condition in Spack 0.23
|
||||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||||
|
sync_job["dependencies"] = []
|
||||||
|
|
||||||
output_object["copy"] = sync_job
|
output_object["copy"] = sync_job
|
||||||
job_id += 1
|
job_id += 1
|
||||||
|
@ -1348,7 +1345,7 @@ def main_script_replacements(cmd):
|
||||||
|
|
||||||
copy_specs_file = os.path.join(
|
copy_specs_file = os.path.join(
|
||||||
copy_specs_dir,
|
copy_specs_dir,
|
||||||
"copy_{}_specs.json".format(spack_stack_name if spack_stack_name else "rebuilt"),
|
f"copy_{spack_stack_name if spack_stack_name else 'rebuilt'}_specs.json",
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(copy_specs_file, "w") as fd:
|
with open(copy_specs_file, "w") as fd:
|
||||||
|
@ -1440,7 +1437,7 @@ def import_signing_key(base64_signing_key):
|
||||||
fd.write(decoded_key)
|
fd.write(decoded_key)
|
||||||
|
|
||||||
key_import_output = spack_gpg("trust", sign_key_path, output=str)
|
key_import_output = spack_gpg("trust", sign_key_path, output=str)
|
||||||
tty.debug("spack gpg trust {0}".format(sign_key_path))
|
tty.debug(f"spack gpg trust {sign_key_path}")
|
||||||
tty.debug(key_import_output)
|
tty.debug(key_import_output)
|
||||||
|
|
||||||
# Now print the keys we have for verifying and signing
|
# Now print the keys we have for verifying and signing
|
||||||
|
@ -1469,7 +1466,7 @@ def can_verify_binaries():
|
||||||
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
||||||
"""Unchecked version of the public API, for easier mocking"""
|
"""Unchecked version of the public API, for easier mocking"""
|
||||||
unsigned = not sign_binaries
|
unsigned = not sign_binaries
|
||||||
tty.debug("Creating buildcache ({0})".format("unsigned" if unsigned else "signed"))
|
tty.debug(f"Creating buildcache ({'unsigned' if unsigned else 'signed'})")
|
||||||
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
|
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
|
||||||
return bindist.push(input_spec, push_url, bindist.PushOptions(force=True, unsigned=unsigned))
|
return bindist.push(input_spec, push_url, bindist.PushOptions(force=True, unsigned=unsigned))
|
||||||
|
|
||||||
|
@ -1498,9 +1495,9 @@ def push_mirror_contents(input_spec: spack.spec.Spec, mirror_url, sign_binaries)
|
||||||
# Exception
|
# Exception
|
||||||
# BaseException
|
# BaseException
|
||||||
# object
|
# object
|
||||||
err_msg = "Error msg: {0}".format(inst)
|
err_msg = f"Error msg: {inst}"
|
||||||
if any(x in err_msg for x in ["Access Denied", "InvalidAccessKeyId"]):
|
if any(x in err_msg for x in ["Access Denied", "InvalidAccessKeyId"]):
|
||||||
tty.msg("Permission problem writing to {0}".format(mirror_url))
|
tty.msg(f"Permission problem writing to {mirror_url}")
|
||||||
tty.msg(err_msg)
|
tty.msg(err_msg)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
|
@ -1531,8 +1528,9 @@ def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
try:
|
try:
|
||||||
fs.copy(src, artifacts_dir)
|
fs.copy(src, artifacts_dir)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
|
msg = (
|
||||||
src, artifacts_dir, str(err)
|
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||||
|
f"exception: {str(err)}"
|
||||||
)
|
)
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
|
|
||||||
|
@ -1548,23 +1546,23 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||||
job_spec: spec associated with spack install log
|
job_spec: spec associated with spack install log
|
||||||
job_log_dir: path into which build log should be copied
|
job_log_dir: path into which build log should be copied
|
||||||
"""
|
"""
|
||||||
tty.debug("job spec: {0}".format(job_spec))
|
tty.debug(f"job spec: {job_spec}")
|
||||||
if not job_spec:
|
if not job_spec:
|
||||||
msg = "Cannot copy stage logs: job spec ({0}) is required"
|
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
|
||||||
tty.error(msg.format(job_spec))
|
tty.error(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||||
job_pkg = pkg_cls(job_spec)
|
job_pkg = pkg_cls(job_spec)
|
||||||
tty.debug("job package: {0}".format(job_pkg))
|
tty.debug(f"job package: {job_pkg}")
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
msg = "Cannot copy stage logs: job spec ({0}) must be concrete"
|
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
||||||
tty.error(msg.format(job_spec))
|
tty.error(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
stage_dir = job_pkg.stage.path
|
stage_dir = job_pkg.stage.path
|
||||||
tty.debug("stage dir: {0}".format(stage_dir))
|
tty.debug(f"stage dir: {stage_dir}")
|
||||||
for file in [job_pkg.log_path, job_pkg.env_mods_path, *job_pkg.builder.archive_files]:
|
for file in [job_pkg.log_path, job_pkg.env_mods_path, *job_pkg.builder.archive_files]:
|
||||||
copy_files_to_artifacts(file, job_log_dir)
|
copy_files_to_artifacts(file, job_log_dir)
|
||||||
|
|
||||||
|
@ -1577,10 +1575,10 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||||
test_stage (str): test stage path
|
test_stage (str): test stage path
|
||||||
job_test_dir (str): the destination artifacts test directory
|
job_test_dir (str): the destination artifacts test directory
|
||||||
"""
|
"""
|
||||||
tty.debug("test stage: {0}".format(test_stage))
|
tty.debug(f"test stage: {test_stage}")
|
||||||
if not os.path.exists(test_stage):
|
if not os.path.exists(test_stage):
|
||||||
msg = "Cannot copy test logs: job test stage ({0}) does not exist"
|
msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
|
||||||
tty.error(msg.format(test_stage))
|
tty.error(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
@ -1595,7 +1593,7 @@ def download_and_extract_artifacts(url, work_dir):
|
||||||
url (str): Complete url to artifacts.zip file
|
url (str): Complete url to artifacts.zip file
|
||||||
work_dir (str): Path to destination where artifacts should be extracted
|
work_dir (str): Path to destination where artifacts should be extracted
|
||||||
"""
|
"""
|
||||||
tty.msg("Fetching artifacts from: {0}\n".format(url))
|
tty.msg(f"Fetching artifacts from: {url}\n")
|
||||||
|
|
||||||
headers = {"Content-Type": "application/zip"}
|
headers = {"Content-Type": "application/zip"}
|
||||||
|
|
||||||
|
@ -1612,7 +1610,7 @@ def download_and_extract_artifacts(url, work_dir):
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
msg = "Error response code ({0}) in reproduce_ci_job".format(response_code)
|
msg = f"Error response code ({response_code}) in reproduce_ci_job"
|
||||||
raise SpackError(msg)
|
raise SpackError(msg)
|
||||||
|
|
||||||
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
||||||
|
@ -1642,7 +1640,7 @@ def get_spack_info():
|
||||||
|
|
||||||
return git_log
|
return git_log
|
||||||
|
|
||||||
return "no git repo, use spack {0}".format(spack.spack_version)
|
return f"no git repo, use spack {spack.spack_version}"
|
||||||
|
|
||||||
|
|
||||||
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||||
|
@ -1665,8 +1663,8 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||||
"""
|
"""
|
||||||
# figure out the path to the spack git version being used for the
|
# figure out the path to the spack git version being used for the
|
||||||
# reproduction
|
# reproduction
|
||||||
print("checkout_commit: {0}".format(checkout_commit))
|
print(f"checkout_commit: {checkout_commit}")
|
||||||
print("merge_commit: {0}".format(merge_commit))
|
print(f"merge_commit: {merge_commit}")
|
||||||
|
|
||||||
dot_git_path = os.path.join(spack.paths.prefix, ".git")
|
dot_git_path = os.path.join(spack.paths.prefix, ".git")
|
||||||
if not os.path.exists(dot_git_path):
|
if not os.path.exists(dot_git_path):
|
||||||
|
@ -1685,14 +1683,14 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||||
git("log", "-1", checkout_commit, output=str, error=os.devnull, fail_on_error=False)
|
git("log", "-1", checkout_commit, output=str, error=os.devnull, fail_on_error=False)
|
||||||
|
|
||||||
if git.returncode != 0:
|
if git.returncode != 0:
|
||||||
tty.error("Missing commit: {0}".format(checkout_commit))
|
tty.error(f"Missing commit: {checkout_commit}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if merge_commit:
|
if merge_commit:
|
||||||
git("log", "-1", merge_commit, output=str, error=os.devnull, fail_on_error=False)
|
git("log", "-1", merge_commit, output=str, error=os.devnull, fail_on_error=False)
|
||||||
|
|
||||||
if git.returncode != 0:
|
if git.returncode != 0:
|
||||||
tty.error("Missing commit: {0}".format(merge_commit))
|
tty.error(f"Missing commit: {merge_commit}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Next attempt to clone your local spack repo into the repro dir
|
# Next attempt to clone your local spack repo into the repro dir
|
||||||
|
@ -1715,7 +1713,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
if git.returncode != 0:
|
if git.returncode != 0:
|
||||||
tty.error("Unable to checkout {0}".format(checkout_commit))
|
tty.error(f"Unable to checkout {checkout_commit}")
|
||||||
tty.msg(co_out)
|
tty.msg(co_out)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -1734,7 +1732,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
if git.returncode != 0:
|
if git.returncode != 0:
|
||||||
tty.error("Unable to merge {0}".format(merge_commit))
|
tty.error(f"Unable to merge {merge_commit}")
|
||||||
tty.msg(merge_out)
|
tty.msg(merge_out)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -1755,6 +1753,7 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
commands to run to reproduce the build once inside the container.
|
commands to run to reproduce the build once inside the container.
|
||||||
"""
|
"""
|
||||||
work_dir = os.path.realpath(work_dir)
|
work_dir = os.path.realpath(work_dir)
|
||||||
|
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
||||||
download_and_extract_artifacts(url, work_dir)
|
download_and_extract_artifacts(url, work_dir)
|
||||||
|
|
||||||
gpg_path = None
|
gpg_path = None
|
||||||
|
@ -1765,13 +1764,13 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||||
repro_lock_dir = os.path.dirname(lock_file)
|
repro_lock_dir = os.path.dirname(lock_file)
|
||||||
|
|
||||||
tty.debug("Found lock file in: {0}".format(repro_lock_dir))
|
tty.debug(f"Found lock file in: {repro_lock_dir}")
|
||||||
|
|
||||||
yaml_files = fs.find(work_dir, ["*.yaml", "*.yml"])
|
yaml_files = fs.find(work_dir, ["*.yaml", "*.yml"])
|
||||||
|
|
||||||
tty.debug("yaml files:")
|
tty.debug("yaml files:")
|
||||||
for yaml_file in yaml_files:
|
for yaml_file in yaml_files:
|
||||||
tty.debug(" {0}".format(yaml_file))
|
tty.debug(f" {yaml_file}")
|
||||||
|
|
||||||
pipeline_yaml = None
|
pipeline_yaml = None
|
||||||
|
|
||||||
|
@ -1786,10 +1785,10 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
pipeline_yaml = yaml_obj
|
pipeline_yaml = yaml_obj
|
||||||
|
|
||||||
if pipeline_yaml:
|
if pipeline_yaml:
|
||||||
tty.debug("\n{0} is likely your pipeline file".format(yf))
|
tty.debug(f"\n{yf} is likely your pipeline file")
|
||||||
|
|
||||||
relative_concrete_env_dir = pipeline_yaml["variables"]["SPACK_CONCRETE_ENV_DIR"]
|
relative_concrete_env_dir = pipeline_yaml["variables"]["SPACK_CONCRETE_ENV_DIR"]
|
||||||
tty.debug("Relative environment path used by cloud job: {0}".format(relative_concrete_env_dir))
|
tty.debug(f"Relative environment path used by cloud job: {relative_concrete_env_dir}")
|
||||||
|
|
||||||
# Using the relative concrete environment path found in the generated
|
# Using the relative concrete environment path found in the generated
|
||||||
# pipeline variable above, copy the spack environment files so they'll
|
# pipeline variable above, copy the spack environment files so they'll
|
||||||
|
@ -1803,10 +1802,11 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
shutil.copyfile(orig_yaml_path, copy_yaml_path)
|
shutil.copyfile(orig_yaml_path, copy_yaml_path)
|
||||||
|
|
||||||
# Find the install script in the unzipped artifacts and make it executable
|
# Find the install script in the unzipped artifacts and make it executable
|
||||||
install_script = fs.find(work_dir, "install.sh")[0]
|
install_script = fs.find(work_dir, f"install.{platform_script_ext}")[0]
|
||||||
|
if not IS_WINDOWS:
|
||||||
|
# pointless on Windows
|
||||||
st = os.stat(install_script)
|
st = os.stat(install_script)
|
||||||
os.chmod(install_script, st.st_mode | stat.S_IEXEC)
|
os.chmod(install_script, st.st_mode | stat.S_IEXEC)
|
||||||
|
|
||||||
# Find the repro details file. This just includes some values we wrote
|
# Find the repro details file. This just includes some values we wrote
|
||||||
# during `spack ci rebuild` to make reproduction easier. E.g. the job
|
# during `spack ci rebuild` to make reproduction easier. E.g. the job
|
||||||
# name is written here so we can easily find the configuration of the
|
# name is written here so we can easily find the configuration of the
|
||||||
|
@ -1844,7 +1844,7 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
job_image = job_image_elt["name"]
|
job_image = job_image_elt["name"]
|
||||||
else:
|
else:
|
||||||
job_image = job_image_elt
|
job_image = job_image_elt
|
||||||
tty.msg("Job ran with the following image: {0}".format(job_image))
|
tty.msg(f"Job ran with the following image: {job_image}")
|
||||||
|
|
||||||
# Because we found this job was run with a docker image, so we will try
|
# Because we found this job was run with a docker image, so we will try
|
||||||
# to print a "docker run" command that bind-mounts the directory where
|
# to print a "docker run" command that bind-mounts the directory where
|
||||||
|
@ -1919,40 +1919,49 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
job_tags = None
|
job_tags = None
|
||||||
if "tags" in job_yaml:
|
if "tags" in job_yaml:
|
||||||
job_tags = job_yaml["tags"]
|
job_tags = job_yaml["tags"]
|
||||||
tty.msg("Job ran with the following tags: {0}".format(job_tags))
|
tty.msg(f"Job ran with the following tags: {job_tags}")
|
||||||
|
|
||||||
entrypoint_script = [
|
entrypoint_script = [
|
||||||
["git", "config", "--global", "--add", "safe.directory", mount_as_dir],
|
["git", "config", "--global", "--add", "safe.directory", mount_as_dir],
|
||||||
[".", os.path.join(mount_as_dir if job_image else work_dir, "share/spack/setup-env.sh")],
|
[
|
||||||
|
".",
|
||||||
|
os.path.join(
|
||||||
|
mount_as_dir if job_image else work_dir,
|
||||||
|
f"share/spack/setup-env.{platform_script_ext}",
|
||||||
|
),
|
||||||
|
],
|
||||||
["spack", "gpg", "trust", mounted_gpg_path if job_image else gpg_path] if gpg_path else [],
|
["spack", "gpg", "trust", mounted_gpg_path if job_image else gpg_path] if gpg_path else [],
|
||||||
["spack", "env", "activate", mounted_env_dir if job_image else repro_dir],
|
["spack", "env", "activate", mounted_env_dir if job_image else repro_dir],
|
||||||
[os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script],
|
[
|
||||||
|
(
|
||||||
|
os.path.join(mounted_repro_dir, f"install.{platform_script_ext}")
|
||||||
|
if job_image
|
||||||
|
else install_script
|
||||||
|
)
|
||||||
|
],
|
||||||
]
|
]
|
||||||
|
entry_script = os.path.join(mounted_workdir, f"entrypoint.{platform_script_ext}")
|
||||||
inst_list = []
|
inst_list = []
|
||||||
# Finally, print out some instructions to reproduce the build
|
# Finally, print out some instructions to reproduce the build
|
||||||
if job_image:
|
if job_image:
|
||||||
# Allow interactive
|
# Allow interactive
|
||||||
entrypoint_script.extend(
|
install_mechanism = (
|
||||||
[
|
os.path.join(mounted_repro_dir, f"install.{platform_script_ext}")
|
||||||
[
|
|
||||||
"echo",
|
|
||||||
"Re-run install script using:\n\t{0}".format(
|
|
||||||
os.path.join(mounted_repro_dir, "install.sh")
|
|
||||||
if job_image
|
if job_image
|
||||||
else install_script
|
else install_script
|
||||||
),
|
|
||||||
],
|
|
||||||
# Allow interactive
|
|
||||||
["exec", "$@"],
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
entrypoint_script.append(["echo", f"Re-run install script using:\n\t{install_mechanism}"])
|
||||||
|
# Allow interactive
|
||||||
|
if IS_WINDOWS:
|
||||||
|
entrypoint_script.extend(["&", "($args -Join ' ')", "-NoExit"])
|
||||||
|
else:
|
||||||
|
entrypoint_script.extend(["exec", "$@"])
|
||||||
|
|
||||||
process_command(
|
process_command(
|
||||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||||
)
|
)
|
||||||
|
|
||||||
docker_command = [
|
docker_command = [
|
||||||
[
|
|
||||||
runtime,
|
runtime,
|
||||||
"run",
|
"run",
|
||||||
"-i",
|
"-i",
|
||||||
|
@ -1973,11 +1982,12 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
"-v",
|
"-v",
|
||||||
":".join([os.path.join(work_dir, "spack"), mount_as_dir, "Z"]),
|
":".join([os.path.join(work_dir, "spack"), mount_as_dir, "Z"]),
|
||||||
"--entrypoint",
|
"--entrypoint",
|
||||||
os.path.join(mounted_workdir, "entrypoint.sh"),
|
|
||||||
job_image,
|
|
||||||
"bash",
|
|
||||||
]
|
|
||||||
]
|
]
|
||||||
|
if IS_WINDOWS:
|
||||||
|
docker_command.extend(["powershell.exe", job_image, entry_script, "powershell.exe"])
|
||||||
|
else:
|
||||||
|
docker_command.extend([entry_script, job_image, "bash"])
|
||||||
|
docker_command = [docker_command]
|
||||||
autostart = autostart and setup_result
|
autostart = autostart and setup_result
|
||||||
process_command("start", docker_command, work_dir, run=autostart)
|
process_command("start", docker_command, work_dir, run=autostart)
|
||||||
|
|
||||||
|
@ -1986,22 +1996,20 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||||
inst_list.extend(
|
inst_list.extend(
|
||||||
[
|
[
|
||||||
" - Start the docker container install",
|
" - Start the docker container install",
|
||||||
" $ {0}/start.sh".format(work_dir),
|
f" $ {work_dir}/start.{platform_script_ext}",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
process_command("reproducer", entrypoint_script, work_dir, run=False)
|
process_command("reproducer", entrypoint_script, work_dir, run=False)
|
||||||
|
|
||||||
inst_list.append("\nOnce on the tagged runner:\n\n")
|
inst_list.append("\nOnce on the tagged runner:\n\n")
|
||||||
inst_list.extent(
|
inst_list.extent([" - Run the reproducer script", f" $ {work_dir}/reproducer.py"])
|
||||||
[" - Run the reproducer script", " $ {0}/reproducer.sh".format(work_dir)]
|
|
||||||
)
|
|
||||||
|
|
||||||
if not setup_result:
|
if not setup_result:
|
||||||
inst_list.append("\n - Clone spack and acquire tested commit")
|
inst_list.append("\n - Clone spack and acquire tested commit")
|
||||||
inst_list.append("\n {0}\n".format(spack_info))
|
inst_list.append(f"\n {spack_info}\n")
|
||||||
inst_list.append("\n")
|
inst_list.append("\n")
|
||||||
inst_list.append("\n Path to clone spack: {0}/spack\n\n".format(work_dir))
|
inst_list.append(f"\n Path to clone spack: {work_dir}/spack\n\n")
|
||||||
|
|
||||||
tty.msg("".join(inst_list))
|
tty.msg("".join(inst_list))
|
||||||
|
|
||||||
|
@ -2020,50 +2028,78 @@ def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||||
|
|
||||||
Returns: the exit code from processing the command
|
Returns: the exit code from processing the command
|
||||||
"""
|
"""
|
||||||
tty.debug("spack {0} arguments: {1}".format(name, commands))
|
|
||||||
|
|
||||||
|
tty.debug(f"spack {name} arguments: {commands}")
|
||||||
if len(commands) == 0 or isinstance(commands[0], str):
|
if len(commands) == 0 or isinstance(commands[0], str):
|
||||||
commands = [commands]
|
commands = [commands]
|
||||||
|
|
||||||
# Create a string [command 1] && [command 2] && ... && [command n] with commands
|
def compose_command_err_handling(args):
|
||||||
# quoted using double quotes.
|
if not IS_WINDOWS:
|
||||||
args_to_string = lambda args: " ".join('"{}"'.format(arg) for arg in args)
|
args = [f'"{arg}"' for arg in args]
|
||||||
full_command = " \n ".join(map(args_to_string, commands))
|
arg_str = " ".join(args)
|
||||||
|
result = arg_str + "\n"
|
||||||
|
# ErrorActionPreference will handle PWSH commandlets (Spack calls),
|
||||||
|
# but we need to handle EXEs (git, etc) ourselves
|
||||||
|
catch_exe_failure = (
|
||||||
|
"""
|
||||||
|
if ($LASTEXITCODE -ne 0){
|
||||||
|
throw "Command {} has failed"
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
if IS_WINDOWS
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
if exit_on_failure and catch_exe_failure:
|
||||||
|
result += catch_exe_failure.format(arg_str)
|
||||||
|
return result
|
||||||
|
|
||||||
# Write the command to a shell script
|
# Create a string [command 1] \n [command 2] \n ... \n [command n] with
|
||||||
script = "{0}.sh".format(name)
|
# commands composed into a platform dependent shell script, pwsh on Windows,
|
||||||
with open(script, "w") as fd:
|
full_command = "\n".join(map(compose_command_err_handling, commands))
|
||||||
fd.write("#!/bin/sh\n\n")
|
# Write the command to a python script
|
||||||
fd.write("\n# spack {0} command\n".format(name))
|
if IS_WINDOWS:
|
||||||
|
script = f"{name}.ps1"
|
||||||
|
script_content = [f"\n# spack {name} command\n"]
|
||||||
if exit_on_failure:
|
if exit_on_failure:
|
||||||
fd.write("set -e\n")
|
script_content.append('$ErrorActionPreference = "Stop"\n')
|
||||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||||
fd.write("set -x\n")
|
script_content.append("Set-PSDebug -Trace 2\n")
|
||||||
fd.write(full_command)
|
else:
|
||||||
fd.write("\n")
|
script = f"{name}.sh"
|
||||||
|
script_content = ["#!/bin/sh\n\n", f"\n# spack {name} command\n"]
|
||||||
|
if exit_on_failure:
|
||||||
|
script_content.append("set -e\n")
|
||||||
|
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||||
|
script_content.append("set -x\n")
|
||||||
|
script_content.append(full_command)
|
||||||
|
script_content.append("\n")
|
||||||
|
|
||||||
st = os.stat(script)
|
with open(script, "w") as fd:
|
||||||
os.chmod(script, st.st_mode | stat.S_IEXEC)
|
for line in script_content:
|
||||||
|
fd.write(line)
|
||||||
|
|
||||||
copy_path = os.path.join(repro_dir, script)
|
copy_path = os.path.join(repro_dir, script)
|
||||||
shutil.copyfile(script, copy_path)
|
shutil.copyfile(script, copy_path)
|
||||||
|
if not IS_WINDOWS:
|
||||||
st = os.stat(copy_path)
|
st = os.stat(copy_path)
|
||||||
os.chmod(copy_path, st.st_mode | stat.S_IEXEC)
|
os.chmod(copy_path, st.st_mode | stat.S_IEXEC)
|
||||||
|
|
||||||
# Run the generated install.sh shell script as if it were being run in
|
# Run the generated shell script as if it were being run in
|
||||||
# a login shell.
|
# a login shell.
|
||||||
exit_code = None
|
exit_code = None
|
||||||
if run:
|
if run:
|
||||||
try:
|
try:
|
||||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
# We use sh as executor on Linux like platforms, pwsh on Windows
|
||||||
|
interpreter = "powershell.exe" if IS_WINDOWS else "/bin/sh"
|
||||||
|
cmd_process = subprocess.Popen([interpreter, f"./{script}"])
|
||||||
cmd_process.wait()
|
cmd_process.wait()
|
||||||
exit_code = cmd_process.returncode
|
exit_code = cmd_process.returncode
|
||||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||||
tty.error("Encountered error running {0} script".format(name))
|
tty.error(f"Encountered error running {name} script")
|
||||||
tty.error(err)
|
tty.error(err)
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
|
|
||||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
tty.debug(f"spack {name} exited {exit_code}")
|
||||||
else:
|
else:
|
||||||
# Delete the script, it is copied to the destination dir
|
# Delete the script, it is copied to the destination dir
|
||||||
os.remove(script)
|
os.remove(script)
|
||||||
|
@ -2122,7 +2158,7 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||||
# If there is an S3 error (e.g., access denied or connection
|
# If there is an S3 error (e.g., access denied or connection
|
||||||
# error), the first non boto-specific class in the exception
|
# error), the first non boto-specific class in the exception
|
||||||
# hierarchy is Exception. Just print a warning and return
|
# hierarchy is Exception. Just print a warning and return
|
||||||
msg = "Error writing to broken specs list {0}: {1}".format(url, err)
|
msg = f"Error writing to broken specs list {url}: {err}"
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
|
@ -2135,7 +2171,7 @@ def read_broken_spec(broken_spec_url):
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||||
except (URLError, web_util.SpackWebError, HTTPError):
|
except (URLError, web_util.SpackWebError, HTTPError):
|
||||||
tty.warn("Unable to read broken spec from {0}".format(broken_spec_url))
|
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
broken_spec_contents = codecs.getreader("utf-8")(fs).read()
|
broken_spec_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
|
@ -2150,14 +2186,14 @@ def display_broken_spec_messages(base_url, hashes):
|
||||||
for spec_hash, broken_spec in [tup for tup in broken_specs if tup[1]]:
|
for spec_hash, broken_spec in [tup for tup in broken_specs if tup[1]]:
|
||||||
details = broken_spec["broken-spec"]
|
details = broken_spec["broken-spec"]
|
||||||
if "job-name" in details:
|
if "job-name" in details:
|
||||||
item_name = "{0}/{1}".format(details["job-name"], spec_hash[:7])
|
item_name = f"{details['job-name']}/{spec_hash[:7]}"
|
||||||
else:
|
else:
|
||||||
item_name = spec_hash
|
item_name = spec_hash
|
||||||
|
|
||||||
if "job-stack" in details:
|
if "job-stack" in details:
|
||||||
item_name = "{0} (in stack {1})".format(item_name, details["job-stack"])
|
item_name = f"{item_name} (in stack {details['job-stack']})"
|
||||||
|
|
||||||
msg = " {0} was reported broken here: {1}".format(item_name, details["job-url"])
|
msg = f" {item_name} was reported broken here: {details['job-url']}"
|
||||||
tty.msg(msg)
|
tty.msg(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@ -2180,7 +2216,7 @@ def run_standalone_tests(**kwargs):
|
||||||
log_file = kwargs.get("log_file")
|
log_file = kwargs.get("log_file")
|
||||||
|
|
||||||
if cdash and log_file:
|
if cdash and log_file:
|
||||||
tty.msg("The test log file {0} option is ignored with CDash reporting".format(log_file))
|
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
|
||||||
log_file = None
|
log_file = None
|
||||||
|
|
||||||
# Error out but do NOT terminate if there are missing required arguments.
|
# Error out but do NOT terminate if there are missing required arguments.
|
||||||
|
@ -2206,10 +2242,10 @@ def run_standalone_tests(**kwargs):
|
||||||
test_args.extend(["--log-file", log_file])
|
test_args.extend(["--log-file", log_file])
|
||||||
test_args.append(job_spec.name)
|
test_args.append(job_spec.name)
|
||||||
|
|
||||||
tty.debug("Running {0} stand-alone tests".format(job_spec.name))
|
tty.debug(f"Running {job_spec.name} stand-alone tests")
|
||||||
exit_code = process_command("test", test_args, repro_dir)
|
exit_code = process_command("test", test_args, repro_dir)
|
||||||
|
|
||||||
tty.debug("spack test exited {0}".format(exit_code))
|
tty.debug(f"spack test exited {exit_code}")
|
||||||
|
|
||||||
|
|
||||||
class CDashHandler:
|
class CDashHandler:
|
||||||
|
@ -2232,7 +2268,7 @@ def __init__(self, ci_cdash):
|
||||||
# append runner description to the site if available
|
# append runner description to the site if available
|
||||||
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
|
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
|
||||||
if runner:
|
if runner:
|
||||||
self.site += " ({0})".format(runner)
|
self.site += f" ({runner})"
|
||||||
|
|
||||||
# track current spec, if any
|
# track current spec, if any
|
||||||
self.current_spec = None
|
self.current_spec = None
|
||||||
|
@ -2260,21 +2296,13 @@ def build_name(self):
|
||||||
Returns: (str) current spec's CDash build name."""
|
Returns: (str) current spec's CDash build name."""
|
||||||
spec = self.current_spec
|
spec = self.current_spec
|
||||||
if spec:
|
if spec:
|
||||||
build_name = "{0}@{1}%{2} hash={3} arch={4} ({5})".format(
|
build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
|
||||||
spec.name,
|
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
|
||||||
spec.version,
|
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
||||||
spec.compiler,
|
|
||||||
spec.dag_hash(),
|
|
||||||
spec.architecture,
|
|
||||||
self.build_group,
|
|
||||||
)
|
|
||||||
tty.debug(
|
|
||||||
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
|
|
||||||
)
|
|
||||||
return build_name
|
return build_name
|
||||||
|
|
||||||
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
||||||
tty.debug("Using CDash build name ({0}) from the environment".format(build_name))
|
tty.debug(f"Using CDash build name ({build_name}) from the environment")
|
||||||
return build_name
|
return build_name
|
||||||
|
|
||||||
@property # type: ignore
|
@property # type: ignore
|
||||||
|
@ -2288,25 +2316,25 @@ def build_stamp(self):
|
||||||
Returns: (str) current CDash build stamp"""
|
Returns: (str) current CDash build stamp"""
|
||||||
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
||||||
if build_stamp:
|
if build_stamp:
|
||||||
tty.debug("Using build stamp ({0}) from the environment".format(build_stamp))
|
tty.debug(f"Using build stamp ({build_stamp}) from the environment")
|
||||||
return build_stamp
|
return build_stamp
|
||||||
|
|
||||||
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
||||||
tty.debug("Generated new build stamp ({0})".format(build_stamp))
|
tty.debug(f"Generated new build stamp ({build_stamp})")
|
||||||
return build_stamp
|
return build_stamp
|
||||||
|
|
||||||
@property # type: ignore
|
@property # type: ignore
|
||||||
@memoized
|
@memoized
|
||||||
def project_enc(self):
|
def project_enc(self):
|
||||||
tty.debug("Encoding project ({0}): {1})".format(type(self.project), self.project))
|
tty.debug(f"Encoding project ({type(self.project)}): {self.project})")
|
||||||
encode = urlencode({"project": self.project})
|
encode = urlencode({"project": self.project})
|
||||||
index = encode.find("=") + 1
|
index = encode.find("=") + 1
|
||||||
return encode[index:]
|
return encode[index:]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def upload_url(self):
|
def upload_url(self):
|
||||||
url_format = "{0}/submit.php?project={1}"
|
url_format = f"{self.url}/submit.php?project={self.project_enc}"
|
||||||
return url_format.format(self.url, self.project_enc)
|
return url_format
|
||||||
|
|
||||||
def copy_test_results(self, source, dest):
|
def copy_test_results(self, source, dest):
|
||||||
"""Copy test results to artifacts directory."""
|
"""Copy test results to artifacts directory."""
|
||||||
|
@ -2324,7 +2352,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code not in [200, 201]:
|
if response_code not in [200, 201]:
|
||||||
msg = "Creating buildgroup failed (response code = {0})".format(response_code)
|
msg = f"Creating buildgroup failed (response code = {response_code})"
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -2335,10 +2363,10 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||||
return build_group_id
|
return build_group_id
|
||||||
|
|
||||||
def populate_buildgroup(self, job_names):
|
def populate_buildgroup(self, job_names):
|
||||||
url = "{0}/api/v1/buildgroup.php".format(self.url)
|
url = f"{self.url}/api/v1/buildgroup.php"
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": "Bearer {0}".format(self.auth_token),
|
"Authorization": f"Bearer {self.auth_token}",
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2346,11 +2374,11 @@ def populate_buildgroup(self, job_names):
|
||||||
|
|
||||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||||
group_id = self.create_buildgroup(
|
group_id = self.create_buildgroup(
|
||||||
opener, headers, url, "Latest {0}".format(self.build_group), "Latest"
|
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
||||||
)
|
)
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
if not parent_group_id or not group_id:
|
||||||
msg = "Failed to create or retrieve buildgroups for {0}".format(self.build_group)
|
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -2370,7 +2398,7 @@ def populate_buildgroup(self, job_names):
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
msg = "Error response code ({0}) in populate_buildgroup".format(response_code)
|
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
|
|
||||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||||
|
|
Loading…
Reference in a new issue