pipelines: Store details about specs broken on develop (#24637)
When a spec fails to build on `develop`, instead of storing an empty file as the entry in the broken specs list, this change stores the full spec yaml as well as links to the failing pipeline and job.
This commit is contained in:
parent
3df1d9062e
commit
f591e9788d
2 changed files with 30 additions and 3 deletions
|
@ -22,6 +22,7 @@
|
|||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
|
@ -30,6 +31,7 @@
|
|||
level = "long"
|
||||
|
||||
CI_REBUILD_INSTALL_BASE_ARGS = ['spack', '-d', '-v']
|
||||
INSTALL_FAIL_CODE = 1
|
||||
|
||||
|
||||
def get_env_var(variable_name):
|
||||
|
@ -491,7 +493,7 @@ def ci_rebuild(args):
|
|||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == 1 and spack_is_develop_pipeline:
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
|
@ -502,9 +504,17 @@ def ci_rebuild(args):
|
|||
tmpdir = tempfile.mkdtemp()
|
||||
empty_file_path = os.path.join(tmpdir, 'empty.txt')
|
||||
|
||||
broken_spec_details = {
|
||||
'broken-spec': {
|
||||
'job-url': get_env_var('CI_JOB_URL'),
|
||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(empty_file_path, 'w') as efd:
|
||||
efd.write('')
|
||||
efd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
empty_file_path,
|
||||
broken_spec_path,
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
import spack.repo as repo
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
from spack.schema.buildcache_spec import schema as spec_yaml_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.schema.gitlab_ci import schema as gitlab_ci_schema
|
||||
|
@ -689,9 +690,13 @@ def test_ci_rebuild(tmpdir, mutable_mock_env_path, env_deactivate,
|
|||
mirror_dir = working_dir.join('mirror')
|
||||
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
|
||||
|
||||
broken_specs_url = 's3://some-bucket/naughty-list'
|
||||
broken_specs_path = os.path.join(working_dir.strpath, 'naughty-list')
|
||||
broken_specs_url = url_util.join('file://', broken_specs_path)
|
||||
temp_storage_url = 'file:///path/to/per/pipeline/storage'
|
||||
|
||||
ci_job_url = 'https://some.domain/group/project/-/jobs/42'
|
||||
ci_pipeline_url = 'https://some.domain/group/project/-/pipelines/7'
|
||||
|
||||
signing_key_dir = spack_paths.mock_gpg_keys_path
|
||||
signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
|
||||
with open(signing_key_path) as fd:
|
||||
|
@ -743,14 +748,17 @@ def test_ci_rebuild(tmpdir, mutable_mock_env_path, env_deactivate,
|
|||
|
||||
root_spec_build_hash = None
|
||||
job_spec_dag_hash = None
|
||||
job_spec_full_hash = None
|
||||
|
||||
for h, s in env.specs_by_hash.items():
|
||||
if s.name == 'archive-files':
|
||||
root_spec_build_hash = h
|
||||
job_spec_dag_hash = s.dag_hash()
|
||||
job_spec_full_hash = s.full_hash()
|
||||
|
||||
assert root_spec_build_hash
|
||||
assert job_spec_dag_hash
|
||||
assert job_spec_full_hash
|
||||
|
||||
def fake_cdash_register(build_name, base_url, project, site, track):
|
||||
return ('fakebuildid', 'fakestamp')
|
||||
|
@ -760,6 +768,7 @@ def fake_cdash_register(build_name, base_url, project, site, track):
|
|||
monkeypatch.setattr(spack.cmd.ci, 'CI_REBUILD_INSTALL_BASE_ARGS', [
|
||||
'notcommand'
|
||||
])
|
||||
monkeypatch.setattr(spack.cmd.ci, 'INSTALL_FAIL_CODE', 127)
|
||||
|
||||
with env_dir.as_cwd():
|
||||
env_cmd('activate', '--without-view', '--sh', '-d', '.')
|
||||
|
@ -780,6 +789,8 @@ def fake_cdash_register(build_name, base_url, project, site, track):
|
|||
set_env_var('SPACK_RELATED_BUILDS_CDASH', '')
|
||||
set_env_var('SPACK_REMOTE_MIRROR_URL', mirror_url)
|
||||
set_env_var('SPACK_PIPELINE_TYPE', 'spack_protected_branch')
|
||||
set_env_var('CI_JOB_URL', ci_job_url)
|
||||
set_env_var('CI_PIPELINE_URL', ci_pipeline_url)
|
||||
|
||||
ci_cmd('rebuild', fail_on_error=False)
|
||||
|
||||
|
@ -815,6 +826,12 @@ def mystrip(s):
|
|||
flag_index = install_parts.index('-f')
|
||||
assert('archive-files.yaml' in install_parts[flag_index + 1])
|
||||
|
||||
broken_spec_file = os.path.join(broken_specs_path, job_spec_full_hash)
|
||||
with open(broken_spec_file) as fd:
|
||||
broken_spec_content = fd.read()
|
||||
assert(ci_job_url in broken_spec_content)
|
||||
assert(ci_pipeline_url) in broken_spec_content
|
||||
|
||||
env_cmd('deactivate')
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue