Add a new configuration option to select among different concretization modes

The "concretizer" section has been extended with a "duplicates:strategy"
attribute, that can take three values:

- "none": only 1 node per package
- "minimal": allow multiple nodes opf specific packages
- "full": allow full duplication for a build tool
This commit is contained in:
Massimiliano Culpo 2023-06-29 11:26:25 +02:00 committed by Todd Gamblin
parent a4301badef
commit 9f8edbf6bf
5 changed files with 198 additions and 49 deletions

View file

@ -36,3 +36,9 @@ concretizer:
# on each root spec, allowing different versions and variants of the same package in
# an environment.
unify: true
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
duplicates:
# "none": allows a single node for any package in the DAG.
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: none

View file

@ -28,6 +28,12 @@
"unify": {
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
},
"duplicates": {
"type": "object",
"properties": {
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
},
},
},
}
}

View file

@ -50,6 +50,8 @@
import spack.version as vn
import spack.version.git_ref_lookup
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
# these are from clingo.ast and bootstrapped later
ASTType = None
parse_files = None
@ -324,6 +326,15 @@ def __getattr__(self, name):
fn = AspFunctionBuilder()
def _create_counter(specs, tests):
strategy = spack.config.config.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests)
if strategy == "minimal":
return MinimalDuplicatesCounter(specs, tests=tests)
return NoDuplicatesCounter(specs, tests=tests)
def all_compilers_in_config():
return spack.compilers.all_compilers()
@ -2290,34 +2301,18 @@ def setup(self, driver, specs, reuse=None):
# get list of all possible dependencies
self.possible_virtuals = set(x.name for x in specs if x.virtual)
link_run_dependency_types = ("link", "run", "test")
dependency_types = spack.dependency.all_deptypes
if not self.tests:
link_run_dependency_types = ("link", "run")
dependency_types = ("link", "run", "build")
link_run = spack.package_base.possible_dependencies(
*specs, virtuals=self.possible_virtuals, deptype=link_run_dependency_types
)
direct_build = set()
for x in link_run:
current = spack.repo.path.get_pkg_class(x).dependencies_of_type("build")
direct_build.update(current)
total_build = spack.package_base.possible_dependencies(
*direct_build, virtuals=self.possible_virtuals, deptype=dependency_types
)
possible = set(link_run) | set(total_build)
node_counter = _create_counter(specs, tests=self.tests)
self.possible_virtuals = node_counter.possible_virtuals()
self.pkgs = node_counter.possible_dependencies()
# Fail if we already know an unreachable node is requested
for spec in specs:
missing_deps = [
str(d) for d in spec.traverse() if d.name not in possible and not d.virtual
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
]
if missing_deps:
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
self.pkgs = set(possible)
# driver is used by all the functions below to add facts and
# rules to generate an ASP program.
self.gen = driver
@ -2341,31 +2336,16 @@ def setup(self, driver, specs, reuse=None):
self.possible_compilers = self.generate_possible_compilers(specs)
self.gen.h1("Concrete input spec definitions")
self.define_concrete_input_specs(specs, possible)
self.define_concrete_input_specs(specs, self.pkgs)
if reuse:
self.gen.h1("Reusable specs")
self.gen.fact(fn.optimize_for_reuse())
for reusable_spec in reuse:
self._facts_from_concrete_spec(reusable_spec, possible)
self._facts_from_concrete_spec(reusable_spec, self.pkgs)
self.gen.h1("Generic statements on possible packages")
counter = collections.Counter(list(link_run) + list(total_build) + list(set(direct_build)))
self.gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 1)
self.gen.fact(fn.max_nodes(pkg, count))
self.gen.newline()
self.gen.h2("Build unification sets ")
for name in spack.repo.path.packages_with_tags("build-tools"):
self.gen.fact(fn.multiple_unification_sets(name))
self.gen.newline()
self.gen.h2("Possible package in link-run subDAG")
for name in sorted(link_run):
self.gen.fact(fn.possible_in_link_run(name))
self.gen.newline()
node_counter.possible_packages_facts(self.gen, fn)
self.gen.h1("Possible flags on nodes")
for flag in spack.spec.FlagMap.valid_compiler_flags():
@ -2386,7 +2366,7 @@ def setup(self, driver, specs, reuse=None):
self.external_packages()
# traverse all specs and packages to build dict of possible versions
self.build_version_dict(possible)
self.build_version_dict(self.pkgs)
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
@ -2948,7 +2928,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
Arguments:
specs (list): List of ``Spec`` objects to solve for.
out: Optionally write the generate ASP program to a file-like object.
timers (bool): Print out coarse fimers for different solve phases.
timers (bool): Print out coarse timers for different solve phases.
stats (bool): Print out detailed stats from clingo.
tests (bool or tuple): If True, concretize test dependencies for all packages.
If a tuple of package names, concretize test dependencies for named

View file

@ -10,7 +10,8 @@
#const root_node_id = 0.
#const link_run = 0.
#const direct_build = 1.
#const direct_link_run =1.
#const direct_build = 2.
% Allow clingo to create nodes
{ attr("node", node(0..X-1, Package)) } :- max_nodes(Package, X), not virtual(Package).
@ -58,17 +59,18 @@ unification_set(SetID, ChildNode) :- attr("depends_on", ParentNode, ChildNode, T
unification_set(("build", node(X, Child)), node(X, Child))
:- attr("depends_on", ParentNode, node(X, Child), Type),
Type == "build",
SetID != "generic_build",
multiple_unification_sets(Child),
unification_set("root", ParentNode).
unification_set(SetID, ParentNode).
unification_set("generic_build", node(X, Child))
:- attr("depends_on", ParentNode, node(X, Child), Type),
Type == "build",
not multiple_unification_sets(Child),
unification_set("root", ParentNode).
unification_set(_, ParentNode).
% Any dependency of type "build" in a unification set that is not "root", stays in that unification set
unification_set(SetID, ChildNode) :- attr("depends_on", ParentNode, ChildNode, Type), Type == "build", SetID != "root", unification_set(SetID, ParentNode).
unification_set(SetID, ChildNode) :- attr("depends_on", ParentNode, ChildNode, Type), Type == "build", SetID == "generic_build", unification_set(SetID, ParentNode).
unification_set(SetID, VirtualNode) :- provider(PackageNode, VirtualNode), unification_set(SetID, PackageNode).
#defined multiple_unification_sets/1.
@ -261,12 +263,19 @@ condition_set(PackageNode, PackageNode, link_run) :- provider(PackageNode, Virtu
condition_set(PackageNode, VirtualNode, link_run) :- provider(PackageNode, VirtualNode).
condition_set(ID, DependencyNode, link_run)
:- condition_set(ID, PackageNode, link_run),
attr("depends_on", PackageNode, DependencyNode, Type),
Type != "build".
condition_set(PackageNode, DependencyNode, direct_build) :- condition_set(PackageNode, PackageNode, link_run), attr("depends_on", PackageNode, DependencyNode, "build").
condition_set(PackageNode, DependencyNode, direct_link_run) :- condition_set(PackageNode, PackageNode, link_run), attr("depends_on", PackageNode, DependencyNode, Type), Type != "build".
% Add transitive link_run dependencies, but only if they are not clashing with some direct dependency
% (otherwise we might create an unsolvable problem when the transitive dependency has requirements that
% are in conflict with the direct dependency)
condition_set(ID, node(DependencyID, Dependency), link_run)
:- condition_set(ID, PackageNode, link_run),
PackageNode != ID, Type != "build",
not condition_set(ID, node(_, Dependency), direct_build),
not condition_set(ID, node(_, Dependency), direct_link_run),
attr("depends_on", PackageNode, node(DependencyID, Dependency), Type).
condition_set(ID, VirtualNode, Type) :- condition_set(ID, PackageNode, Type), provider(PackageNode, VirtualNode).
condition_set(ID, PackageNode) :- condition_set(ID, PackageNode, _).

View file

@ -0,0 +1,148 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
from typing import List, Set, Tuple
import spack.dependency
import spack.package_base
PossibleDependencies = Set[str]
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
self.specs = specs
self.link_run_types: Tuple[str, ...] = ("link", "run", "test")
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes
if not tests:
self.link_run_types = ("link", "run")
self.all_types = ("link", "run", "build")
self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
def possible_dependencies(self) -> PossibleDependencies:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self):
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self):
result = spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types
)
self._possible_dependencies = set(result)
def possible_packages_facts(self, gen, fn):
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_nodes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_nodes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(self, specs, tests):
super().__init__(specs, tests)
self._link_run: PossibleDependencies = set()
self._direct_build: PossibleDependencies = set()
self._total_build: PossibleDependencies = set()
def _compute_cache_values(self):
self._link_run = set(
spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types
)
)
for x in self._link_run:
current = spack.repo.path.get_pkg_class(x).dependencies_of_type("build")
self._direct_build.update(current)
self._total_build = set(
spack.package_base.possible_dependencies(
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types
)
)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_nodes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_nodes(package_name, 1))
gen.newline()
gen.h2("Build unification sets ")
for name in spack.repo.path.packages_with_tags("build-tools"):
if name not in self.possible_dependencies():
continue
gen.fact(fn.multiple_unification_sets(name))
gen.fact(fn.max_nodes(name, 2))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_nodes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in spack.repo.path.packages_with_tags("build-tools"):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()