spack buildcache create: push all deps / cleanup (#34860)

This commit is contained in:
Harmen Stoppels 2023-02-16 15:08:49 +01:00 committed by GitHub
parent 09eb86e077
commit 44ed0de8c0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 28 additions and 38 deletions

View file

@ -40,6 +40,7 @@
import spack.relocate as relocate
import spack.repo
import spack.store
import spack.traverse as traverse
import spack.util.file_cache as file_cache
import spack.util.gpg
import spack.util.spack_json as sjson
@ -1308,57 +1309,44 @@ def _build_tarball(
return None
def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
def nodes_to_be_packaged(specs, root=True, dependencies=True):
"""Return the list of nodes to be packaged, given a list of specs.
Args:
specs (List[spack.spec.Spec]): list of root specs to be processed
include_root (bool): include the root of each spec in the nodes
include_dependencies (bool): include the dependencies of each
root (bool): include the root of each spec in the nodes
dependencies (bool): include the dependencies of each
spec in the nodes
"""
if not include_root and not include_dependencies:
return set()
if not root and not dependencies:
return []
elif dependencies:
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
else:
nodes = set(specs)
def skip_node(current_node):
if current_node.external or current_node.virtual:
return True
return spack.store.db.query_one(current_node) is None
# Limit to installed non-externals.
packageable = lambda n: not n.external and n.installed
expanded_set = set()
for current_spec in specs:
if not include_dependencies:
nodes = [current_spec]
else:
nodes = [
n
for n in current_spec.traverse(
order="post", root=include_root, deptype=("link", "run")
)
]
for node in nodes:
if not skip_node(node):
expanded_set.add(node)
return expanded_set
# Mass install check
with spack.store.db.read_transaction():
return list(filter(packageable, nodes))
def push(specs, push_url, specs_kwargs=None, **kwargs):
def push(specs, push_url, include_root=True, include_dependencies=True, **kwargs):
"""Create a binary package for each of the specs passed as input and push them
to a given push URL.
Args:
specs (List[spack.spec.Spec]): installed specs to be packaged
push_url (str): url where to push the binary package
specs_kwargs (dict): dictionary with two possible boolean keys, "include_root"
and "include_dependencies", which determine which part of each spec is
packaged and pushed to the mirror
include_root (bool): include the root of each spec in the nodes
include_dependencies (bool): include the dependencies of each
spec in the nodes
**kwargs: TODO
"""
specs_kwargs = specs_kwargs or {"include_root": True, "include_dependencies": True}
nodes = nodes_to_be_packaged(specs, **specs_kwargs)
nodes = nodes_to_be_packaged(specs, root=include_root, dependencies=include_dependencies)
# TODO: This seems to be an easy target for task
# TODO: distribution using a parallel pool

View file

@ -402,7 +402,7 @@ def _matching_specs(specs, spec_file):
return spack.store.find(constraints, hashes=hashes)
if env:
return [env.specs_by_hash[h] for h in env.concretized_order]
return [concrete for _, concrete in env.concretized_specs()]
tty.die(
"build cache file creation requires at least one"
@ -461,10 +461,6 @@ def create_fn(args):
msg = "Pushing binary packages to {0}/build_cache".format(url)
tty.msg(msg)
specs_kwargs = {
"include_root": "package" in args.things_to_install,
"include_dependencies": "dependencies" in args.things_to_install,
}
kwargs = {
"key": args.key,
"force": args.force,
@ -473,7 +469,13 @@ def create_fn(args):
"allow_root": args.allow_root,
"regenerate_index": args.rebuild_index,
}
bindist.push(matches, url, specs_kwargs, **kwargs)
bindist.push(
matches,
url,
include_root="package" in args.things_to_install,
include_dependencies="dependencies" in args.things_to_install,
**kwargs,
)
def install_fn(args):