Pipelines: Move PR testing stacks (currently only E4S) into spack (#21714)

This commit is contained in:
Scott Wittenburg 2021-02-18 18:50:57 -07:00 committed by GitHub
parent 0da2b82df2
commit 6b509a95da
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 273 additions and 61 deletions

View file

@ -22,7 +22,7 @@
import spack import spack
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.cmd.buildcache as buildcache import spack.cmd
import spack.compilers as compilers import spack.compilers as compilers
import spack.config as cfg import spack.config as cfg
import spack.environment as ev import spack.environment as ev
@ -41,7 +41,7 @@
'always', 'always',
] ]
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-pr-mirrors' SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror' TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
spack_gpg = spack.main.SpackCommand('gpg') spack_gpg = spack.main.SpackCommand('gpg')
@ -943,7 +943,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, prune_dag=False,
cleanup_job['stage'] = 'cleanup-temp-storage' cleanup_job['stage'] = 'cleanup-temp-storage'
cleanup_job['script'] = [ cleanup_job['script'] = [
'spack mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format( 'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format(
temp_storage_url_prefix) temp_storage_url_prefix)
] ]
cleanup_job['when'] = 'always' cleanup_job['when'] = 'always'
@ -1262,16 +1262,35 @@ def read_cdashid_from_mirror(spec, mirror_url):
def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id, def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id,
sign_binaries): sign_binaries):
if mirror_url: if mirror_url:
unsigned = not sign_binaries try:
tty.debug('Creating buildcache ({0})'.format( unsigned = not sign_binaries
'unsigned' if unsigned else 'signed')) tty.debug('Creating buildcache ({0})'.format(
buildcache._createtarball(env, spec_yaml=yaml_path, add_deps=False, 'unsigned' if unsigned else 'signed'))
output_location=mirror_url, force=True, spack.cmd.buildcache._createtarball(
allow_root=True, unsigned=unsigned) env, spec_yaml=yaml_path, add_deps=False,
if build_id: output_location=mirror_url, force=True, allow_root=True,
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format( unsigned=unsigned)
build_id, mirror_url)) if build_id:
write_cdashid_to_mirror(build_id, spec, mirror_url) tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
build_id, mirror_url))
write_cdashid_to_mirror(build_id, spec, mirror_url)
except Exception as inst:
# If the mirror we're pushing to is on S3 and there's some
# permissions problem, for example, we can't just target
# that exception type here, since users of the
# `spack ci rebuild' may not need or want any dependency
# on boto3. So we use the first non-boto exception type
# in the heirarchy:
# boto3.exceptions.S3UploadFailedError
# boto3.exceptions.Boto3Error
# Exception
# BaseException
# object
err_msg = 'Error msg: {0}'.format(inst)
if 'Access Denied' in err_msg:
tty.msg('Permission problem writing to {0}'.format(
mirror_url))
tty.msg(err_msg)
def copy_stage_logs_to_artifacts(job_spec, job_log_dir): def copy_stage_logs_to_artifacts(job_spec, job_log_dir):

View file

@ -413,33 +413,18 @@ def add_mirror(mirror_name, mirror_url):
else: else:
buildcache_mirror_url = remote_mirror_url buildcache_mirror_url = remote_mirror_url
try: # Create buildcache in either the main remote mirror, or in the
spack_ci.push_mirror_contents( # per-PR mirror, if this is a PR pipeline
env, job_spec, job_spec_yaml_path, buildcache_mirror_url, spack_ci.push_mirror_contents(
cdash_build_id, sign_binaries) env, job_spec, job_spec_yaml_path, buildcache_mirror_url,
except Exception as inst: cdash_build_id, sign_binaries)
# If the mirror we're pushing to is on S3 and there's some
# permissions problem, for example, we can't just target
# that exception type here, since users of the
# `spack ci rebuild' may not need or want any dependency
# on boto3. So we use the first non-boto exception type
# in the heirarchy:
# boto3.exceptions.S3UploadFailedError
# boto3.exceptions.Boto3Error
# Exception
# BaseException
# object
err_msg = 'Error msg: {0}'.format(inst)
if 'Access Denied' in err_msg:
tty.msg('Permission problem writing to mirror')
tty.msg(err_msg)
# Create another copy of that buildcache in the per-pipeline # Create another copy of that buildcache in the per-pipeline
# temporary storage mirror (this is only done if either artifacts # temporary storage mirror (this is only done if either artifacts
# buildcache is enabled or a temporary storage url prefix is set) # buildcache is enabled or a temporary storage url prefix is set)
spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path, spack_ci.push_mirror_contents(
pipeline_mirror_url, cdash_build_id, env, job_spec, job_spec_yaml_path, pipeline_mirror_url,
sign_binaries) cdash_build_id, sign_binaries)
# Relate this build to its dependencies on CDash (if enabled) # Relate this build to its dependencies on CDash (if enabled)
if enable_cdash: if enable_cdash:

View file

@ -878,6 +878,26 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
assert(len(dl_dir_list) == 3) assert(len(dl_dir_list) == 3)
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
def faked(env, spec_yaml=None, packages=None, add_spec=True,
add_deps=True, output_location=os.getcwd(),
signing_key=None, force=False, make_relative=False,
unsigned=False, allow_root=False, rebuild_index=False):
raise Exception('Error: Access Denied')
import spack.cmd.buildcache as buildcache
monkeypatch.setattr(buildcache, '_createtarball', faked)
url = 'fakejunk'
ci.push_mirror_contents(None, None, None, url, None, None)
captured = capsys.readouterr()
std_out = captured[0]
expect_msg = 'Permission problem writing to {0}'.format(url)
assert(expect_msg in std_out)
def test_ci_generate_override_runner_attrs(tmpdir, mutable_mock_env_path, def test_ci_generate_override_runner_attrs(tmpdir, mutable_mock_env_path,
env_deactivate, install_mockery, env_deactivate, install_mockery,
mock_packages, monkeypatch): mock_packages, monkeypatch):
@ -1373,7 +1393,7 @@ def test_ci_generate_temp_storage_url(tmpdir, mutable_mock_env_path,
assert('script' in cleanup_job) assert('script' in cleanup_job)
cleanup_task = cleanup_job['script'][0] cleanup_task = cleanup_job['script'][0]
assert(cleanup_task.startswith('spack mirror destroy')) assert(cleanup_task.startswith('spack -d mirror destroy'))
assert('stages' in pipeline_doc) assert('stages' in pipeline_doc)
stages = pipeline_doc['stages'] stages = pipeline_doc['stages']

View file

@ -1,24 +0,0 @@
pr_pipeline:
only:
- /^github\/pr[\d]+_.*$/
variables:
SPACK_REF: ${CI_COMMIT_SHA}
SPACK_PR_BRANCH: ${CI_COMMIT_REF_NAME}
SPACK_IS_PR_PIPELINE: "True"
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
trigger:
project: spack/e4s
strategy: depend
develop_pipeline:
only:
- /^github\/develop$/
variables:
SPACK_REF: ${CI_COMMIT_SHA}
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
SPACK_SIGNING_KEY: ${SPACK_SIGNING_KEY}
trigger:
project: spack/e4s
strategy: depend

View file

@ -0,0 +1,61 @@
stages: [generate, build]
.generate:
stage: generate
script:
- . "./share/spack/setup-env.sh"
- spack --version
- cd share/spack/gitlab/cloud_e4s_pipelines/stacks/e4s
- spack env activate --without-view .
- spack -d ci generate --check-index-only
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/e4s_pipeline.yml"
artifacts:
paths:
- "${CI_PROJECT_DIR}/jobs_scratch_dir/e4s_pipeline.yml"
tags: ["spack", "public", "medium"]
e4s-pr-generate:
only:
- /^github\/pr[\d]+_.*$/
- move-pr-stack-into-spack
extends: .generate
before_script:
- export SPACK_PR_BRANCH=${CI_COMMIT_REF_NAME}
- export SPACK_IS_PR_PIPELINE="True"
image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01", "entrypoint": [""] }
e4s-develop-generate:
only:
- /^github\/develop$/
extends: .generate
image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01", "entrypoint": [""] }
e4s-pr-build:
only:
- /^github\/pr[\d]+_.*$/
- move-pr-stack-into-spack
stage: build
trigger:
include:
- artifact: "jobs_scratch_dir/e4s_pipeline.yml"
job: e4s-pr-generate
strategy: depend
variables:
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
SPACK_PR_BRANCH: ${CI_COMMIT_REF_NAME}
SPACK_IS_PR_PIPELINE: "True"
e4s-develop-build:
only:
- /^github\/develop$/
stage: build
trigger:
include:
- artifact: "jobs_scratch_dir/e4s_pipeline.yml"
job: e4s-develop-generate
strategy: depend
variables:
AWS_ACCESS_KEY_ID: ${MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${MIRRORS_AWS_SECRET_ACCESS_KEY}
SPACK_SIGNING_KEY: ${PACKAGE_SIGNING_KEY}

View file

@ -0,0 +1,151 @@
spack:
view: false
concretization: separately
config:
install_tree:
root: /home/software/spack
padded_length: 512
projections:
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
packages:
all:
target: [x86_64]
providers:
blas:
- openblas
mpi:
- mpich
variants: +mpi
binutils:
variants: +gold+headers+libiberty~nls
version:
- 2.33.1
cmake:
version: [3.18.4]
hdf5:
variants: +fortran
mpich:
variants: ~wrapperrpath
openblas:
version: [0.3.10]
slepc:
version: [3.14.0]
definitions:
- e4s:
- adios
- adios2
- aml
- amrex
- arborx
- argobots
- ascent
- axom
- bolt
- caliper
- darshan-runtime
- darshan-util
- dyninst
- faodel
- flecsi+cinch
- flit
- gasnet
- ginkgo
- globalarrays
- gotcha
- hdf5
- hpctoolkit
- hpx
- hypre
- kokkos-kernels+openmp
- kokkos+openmp
- legion
- libnrm
- libquo
- magma cuda_arch=70 ^cuda@10.2.89
- mercury
- mfem
- mpifileutils@develop~xattr
- ninja
- omega-h
- openmpi
- openpmd-api
- papi
- papyrus@1.0.1
- parallel-netcdf
- pdt
- petsc
- phist
- plasma
- precice
- pumi
- py-jupyterhub
- py-libensemble
- py-petsc4py
- qthreads scheduler=distrib
- raja
- rempi
- scr
- slate ^openblas@0.3.5 threads=openmp ^cuda@10.2.89
- slepc
- stc
- strumpack ~slate
- sundials
- superlu
- superlu-dist
- swig
- sz
- tasmanian
- tau
- trilinos
- turbine
- umap
- umpire
- unifyfs
- upcxx
- veloc
- zfp
- arch:
- '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
specs:
- matrix:
- - $e4s
- - $arch
mirrors: { "mirror": "s3://spack-binaries-develop/e4s-new-cluster" }
gitlab-ci:
script:
- . "./share/spack/setup-env.sh"
- spack --version
- cd share/spack/gitlab/cloud_e4s_pipelines/stacks/e4s
- spack env activate --without-view .
- spack -d ci rebuild
mappings:
- match: [cuda, dyninst, hpx, precice, strumpack, sundials, trilinos, vtk-h, vtk-m]
runner-attributes:
image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01", "entrypoint": [""] }
tags: ["spack", "public", "xlarge"]
- match: ['os=ubuntu18.04']
runner-attributes:
image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01", "entrypoint": [""] }
tags: ["spack", "public", "large"]
temporary-storage-url-prefix: "s3://spack-binaries-prs/pipeline-storage"
service-job-attributes:
before_script:
- . "./share/spack/setup-env.sh"
- spack --version
- cd share/spack/gitlab/cloud_e4s_pipelines/stacks/e4s
- spack env activate --without-view .
image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01", "entrypoint": [""] }
tags: ["spack", "public", "medium"]
cdash:
build-group: New PR testing workflow
url: https://cdash.spack.io
project: Spack Testing
site: Cloud Gitlab Infrastructure