Speed-up spack external find
execution (#39843)
* Perform external spec detection with multiple workers The logic to perform external spec detection has been refactored into classes. These classes use the GoF "template" pattern to account for the small differences between searching for "executables" and for "libraries", while unifying the larger part of the algorithm. A ProcessPoolExecutor is used to parallelize the work. * Speed-up external find by tagging detectable packages automatically Querying packages by tag is much faster than inspecting the repository, since tags are cached. This commit adds a "detectable" tag to every package that implements the detection protocol, and external detection uses it to search for packages. * Pass package names instead of package classes to workers The slowest part of the search is importing the Python modules associated with candidate packages. The import is done serially before we distribute the work to the pool of executors. This commit pushes the import of the Python module to the job performed by the workers, and passes just the name of the packages to the executors. In this way imports can be done in parallel. * Rework unit-tests for Windows Some unit tests were doing a full e2e run of a command just to check a input handling. Make the test more focused by just stressing a specific function. Mark as xfailed 2 tests on Windows, that will be fixed by a PR in the queue. The tests are failing because we monkeypatch internals in the parent process, but the monkeypatching is not done in the "spawned" child process.
This commit is contained in:
parent
7631b5ea14
commit
39b9f214a8
13 changed files with 324 additions and 349 deletions
3
.github/workflows/windows_python.yml
vendored
3
.github/workflows/windows_python.yml
vendored
|
@ -75,6 +75,5 @@ jobs:
|
|||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d external find cmake ninja
|
||||
spack -d install abseil-cpp
|
||||
|
|
|
@ -476,16 +476,16 @@ def ensure_executables_in_path_or_raise(
|
|||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
"cmake",
|
||||
"bison",
|
||||
# GnuPG
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
"gawk",
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
"git",
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
search_list.append("winbison")
|
||||
externals = spack.detection.by_path(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
|
|
|
@ -300,8 +300,8 @@ def get_external_python_for_prefix(self):
|
|||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
import errno
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
@ -54,7 +55,7 @@ def setup_parser(subparser):
|
|||
find_parser.add_argument(
|
||||
"--all", action="store_true", help="search for all packages that Spack knows about"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags"])
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
|
||||
find_parser.epilog = (
|
||||
'The search is by default on packages tagged with the "build-tools" or '
|
||||
|
@ -120,46 +121,23 @@ def external_find(args):
|
|||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ["core-packages", "build-tools"]
|
||||
# Outside the Cray manifest, the search is done by tag for performance reasons,
|
||||
# since tags are cached.
|
||||
|
||||
# If the user specified both --all and --tag, then --all has precedence
|
||||
if args.all and args.tags:
|
||||
args.tags = []
|
||||
if args.all or args.packages:
|
||||
# Each detectable package has at least the detectable tag
|
||||
args.tags = ["detectable"]
|
||||
elif not args.tags:
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
args.tags = ["core-packages", "build-tools"]
|
||||
|
||||
# Construct the list of possible packages to be detected
|
||||
pkg_cls_to_check = []
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
# If we arrived here we didn't have any explicit package passed
|
||||
# as argument, which means to search all packages.
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
pkg_cls_to_check = [pkg for pkg in pkg_cls_to_check if pkg.name not in args.exclude]
|
||||
|
||||
detected_packages = spack.detection.by_executable(pkg_cls_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(pkg_cls_to_check, path_hints=args.path))
|
||||
candidate_packages = packages_to_search_for(
|
||||
names=args.packages, tags=args.tags, exclude=args.exclude
|
||||
)
|
||||
detected_packages = spack.detection.by_path(
|
||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||
)
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
|
@ -173,6 +151,19 @@ def external_find(args):
|
|||
tty.msg("No new external packages detected")
|
||||
|
||||
|
||||
def packages_to_search_for(
|
||||
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
|
||||
):
|
||||
result = []
|
||||
for current_tag in tags:
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag))
|
||||
if names:
|
||||
result = [x for x in result if x in names]
|
||||
if exclude:
|
||||
result = [x for x in result if x not in exclude]
|
||||
return result
|
||||
|
||||
|
||||
def external_read_cray_manifest(args):
|
||||
_collect_and_consume_cray_manifest_files(
|
||||
manifest_file=args.file,
|
||||
|
|
|
@ -3,12 +3,11 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_executable, by_library, executables_in_path
|
||||
from .path import by_path, executables_in_path
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
"by_library",
|
||||
"by_executable",
|
||||
"by_path",
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
|
|
|
@ -38,6 +38,16 @@ class DetectedPackage(NamedTuple):
|
|||
#: Prefix of the spec
|
||||
prefix: str
|
||||
|
||||
def __reduce__(self):
|
||||
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
|
||||
|
||||
@staticmethod
|
||||
def restore(
|
||||
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
|
||||
) -> "DetectedPackage":
|
||||
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
|
||||
return DetectedPackage(spec=spec, prefix=prefix)
|
||||
|
||||
|
||||
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
"""Returns all the specs mentioned as externals in packages.yaml"""
|
||||
|
@ -383,7 +393,7 @@ def find_win32_additional_install_paths() -> List[str]:
|
|||
return windows_search_ext
|
||||
|
||||
|
||||
def compute_windows_program_path_for_package(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Given a package, attempts to compute its Windows program files location,
|
||||
and returns the list of best guesses.
|
||||
|
||||
|
@ -403,7 +413,7 @@ def compute_windows_program_path_for_package(pkg: spack.package_base.PackageBase
|
|||
]
|
||||
|
||||
|
||||
def compute_windows_user_path_for_package(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
def compute_windows_user_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Given a package attempt to compute its user scoped
|
||||
install location, return list of potential locations based
|
||||
on common heuristics. For more info on Windows user specific
|
||||
|
|
|
@ -6,12 +6,13 @@
|
|||
and running executables.
|
||||
"""
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Set
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
|
@ -32,6 +33,11 @@
|
|||
path_to_dict,
|
||||
)
|
||||
|
||||
#: Timeout used for package detection (seconds)
|
||||
DETECTION_TIMEOUT = 60
|
||||
if sys.platform == "win32":
|
||||
DETECTION_TIMEOUT = 120
|
||||
|
||||
|
||||
def common_windows_package_paths() -> List[str]:
|
||||
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
||||
|
@ -116,215 +122,243 @@ def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
|||
return groups
|
||||
|
||||
|
||||
# TODO consolidate this with by_executable
|
||||
# Packages should be able to define both .libraries and .executables in the future
|
||||
# determine_spec_details should get all relevant libraries and executables in one call
|
||||
def by_library(
|
||||
packages_to_check: List[spack.package_base.PackageBase], path_hints: Optional[List[str]] = None
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
# Techniques for finding libraries is determined on a per recipe basis in
|
||||
# the determine_version class method. Some packages will extract the
|
||||
# version number from a shared libraries filename.
|
||||
# Other libraries could use the strings function to extract it as described
|
||||
# in https://unix.stackexchange.com/questions/58846/viewing-linux-library-executable-version-info
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH,
|
||||
DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths.
|
||||
class Finder:
|
||||
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
|
||||
|
||||
Args:
|
||||
packages_to_check: list of packages to be detected
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH environment variables
|
||||
and standard system library paths.
|
||||
"""
|
||||
# If no path hints from command line, intialize to empty list so
|
||||
# we can add default hints on a per package basis
|
||||
path_hints = [] if path_hints is None else path_hints
|
||||
def path_hints(
|
||||
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
|
||||
) -> List[str]:
|
||||
"""Returns the list of paths to be searched.
|
||||
|
||||
lib_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, "libraries"):
|
||||
for lib in pkg.libraries:
|
||||
lib_pattern_to_pkgs[lib].append(pkg)
|
||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
Args:
|
||||
pkg: package being detected
|
||||
initial_guess: initial list of paths from caller
|
||||
"""
|
||||
result = initial_guess or []
|
||||
result.extend(compute_windows_user_path_for_package(pkg))
|
||||
result.extend(compute_windows_program_path_for_package(pkg))
|
||||
return result
|
||||
|
||||
path_to_lib_name = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=path_hints)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(path_hints)
|
||||
)
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
|
||||
pkg_to_found_libs = collections.defaultdict(set)
|
||||
for lib_pattern, pkgs in lib_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(lib_pattern)
|
||||
for path, lib in path_to_lib_name.items():
|
||||
if compiled_re.search(lib):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_libs[pkg].add(path)
|
||||
Args:
|
||||
pkg: package being detected
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> lib found for the spec
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
"""Returns a list of candidate files found on the system.
|
||||
|
||||
for pkg, libs in pkg_to_found_libs.items():
|
||||
Args:
|
||||
patterns: search patterns to be used for matching files
|
||||
paths: paths where to search for files
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
"""Given a path where a file was found, returns the corresponding prefix.
|
||||
|
||||
Args:
|
||||
path: path of a detected file
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
|
||||
) -> List[DetectedPackage]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg: package being detected
|
||||
paths: files matching the package search patterns
|
||||
"""
|
||||
if not hasattr(pkg, "determine_spec_details"):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name)
|
||||
warnings.warn(
|
||||
f"{pkg.name} must define 'determine_spec_details' in order"
|
||||
f" for Spack to detect externally-provided instances"
|
||||
f" of the package."
|
||||
)
|
||||
continue
|
||||
return []
|
||||
|
||||
for prefix, libs_in_prefix in sorted(_group_by_prefix(libs).items()):
|
||||
try:
|
||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, libs_in_prefix))
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
"The following libraries in {0} were decidedly not "
|
||||
"part of the package {1}: {2}".format(
|
||||
prefix, pkg.name, ", ".join(_convert_to_iterable(libs_in_prefix))
|
||||
)
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = library_prefix(prefix)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no lib/ or lib64/ dir found in {0}. Cannot "
|
||||
"add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Libraries in {0} and {1} are both associated"
|
||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
||||
)
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'"{0}" has been detected on the system but will '
|
||||
"not be added to packages.yaml [reason={1}]"
|
||||
)
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
|
||||
|
||||
def by_executable(
|
||||
packages_to_check: List[spack.package_base.PackageBase], path_hints: Optional[List[str]] = None
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_check: list of package classes to be detected
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
path_hints = spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
exe_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
for exe in pkg.platform_executables():
|
||||
exe_pattern_to_pkgs[exe].append(pkg)
|
||||
# Add Windows specific, package related paths to the search paths
|
||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
|
||||
path_to_exe_name = executables_in_path(path_hints=path_hints)
|
||||
pkg_to_found_exes = collections.defaultdict(set)
|
||||
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(exe_pattern)
|
||||
for path, exe in path_to_exe_name.items():
|
||||
if compiled_re.search(exe):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_exes[pkg].add(path)
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> exe found for the spec
|
||||
|
||||
for pkg, exes in pkg_to_found_exes.items():
|
||||
if not hasattr(pkg, "determine_spec_details"):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name)
|
||||
)
|
||||
continue
|
||||
|
||||
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes).items()):
|
||||
result = []
|
||||
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
# naming scheme which differentiates them), the spec won't be
|
||||
# usable.
|
||||
try:
|
||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, exes_in_prefix))
|
||||
specs = _convert_to_iterable(
|
||||
pkg.determine_spec_details(candidate_path, items_in_prefix)
|
||||
)
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
"The following executables in {0} were decidedly not "
|
||||
"part of the package {1}: {2}".format(
|
||||
prefix, pkg.name, ", ".join(_convert_to_iterable(exes_in_prefix))
|
||||
)
|
||||
warnings.warn(
|
||||
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = executable_prefix(prefix)
|
||||
if not specs:
|
||||
files = ", ".join(_convert_to_iterable(items_in_prefix))
|
||||
llnl.util.tty.debug(
|
||||
f"The following files in {candidate_path} were decidedly not "
|
||||
f"part of the package {pkg.name}: {files}"
|
||||
)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> exe found for the spec
|
||||
for spec in specs:
|
||||
prefix = self.prefix_from_path(path=candidate_path)
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Executables in {0} and {1} are both associated"
|
||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
||||
f"Files in {candidate_path} and {prior_prefix} are both associated"
|
||||
f" with the same spec {str(spec)}"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
resolved_specs[spec] = candidate_path
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'"{0}" has been detected on the system but will '
|
||||
"not be added to packages.yaml [reason={1}]"
|
||||
f'"{spec}" has been detected on the system but will '
|
||||
f"not be added to packages.yaml [reason={str(e)}]"
|
||||
)
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
warnings.warn(msg)
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
||||
result.append(DetectedPackage(spec=spec, prefix=prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
return result
|
||||
|
||||
def find(
|
||||
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
|
||||
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
|
||||
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
|
||||
return result
|
||||
|
||||
|
||||
class ExecutablesFinder(Finder):
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
result = pkg.platform_executables()
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
executables_by_path = executables_in_path(path_hints=paths)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in executables_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return list(sorted(set(result)))
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = executable_prefix(path)
|
||||
if not result:
|
||||
msg = f"no bin/ dir found in {path}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg)
|
||||
return result
|
||||
|
||||
|
||||
class LibrariesFinder(Finder):
|
||||
"""Finds libraries on the system, searching by LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||
"""
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "libraries"):
|
||||
result = pkg.libraries
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
libraries_by_path = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=paths)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(paths)
|
||||
)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in libraries_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return result
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = library_prefix(path)
|
||||
if not result:
|
||||
msg = f"no lib/ or lib64/ dir found in {path}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg)
|
||||
return result
|
||||
|
||||
|
||||
def by_path(
|
||||
packages_to_search: List[str],
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_search: list of package classes to be detected
|
||||
path_hints: initial list of paths to be searched
|
||||
"""
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
|
||||
executables_path_guess = (
|
||||
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
)
|
||||
libraries_path_guess = [] if path_hints is None else path_hints
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
for pkg_name, futures in detected_specs_by_package.items():
|
||||
for future in futures:
|
||||
try:
|
||||
detected = future.result(timeout=DETECTION_TIMEOUT)
|
||||
if detected:
|
||||
result[pkg_name].extend(detected)
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
)
|
||||
|
||||
return result
|
||||
|
|
|
@ -180,6 +180,8 @@ class DetectablePackageMeta:
|
|||
for the detection function.
|
||||
"""
|
||||
|
||||
TAG = "detectable"
|
||||
|
||||
def __init__(cls, name, bases, attr_dict):
|
||||
if hasattr(cls, "executables") and hasattr(cls, "libraries"):
|
||||
msg = "a package can have either an 'executables' or 'libraries' attribute"
|
||||
|
@ -195,6 +197,11 @@ def __init__(cls, name, bases, attr_dict):
|
|||
# If a package has the executables or libraries attribute then it's
|
||||
# assumed to be detectable
|
||||
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
||||
# Append a tag to each detectable package, so that finding them is faster
|
||||
if hasattr(cls, "tags"):
|
||||
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
|
||||
else:
|
||||
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
||||
|
||||
@classmethod
|
||||
def platform_executables(cls):
|
||||
|
|
|
@ -42,45 +42,36 @@ def define_plat_exe(exe):
|
|||
return exe
|
||||
|
||||
|
||||
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
|
||||
pkgs_to_check = [spack.repo.PATH.get_pkg_class("cmake")]
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_find_external_single_package(mock_executable):
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
executables_found({str(cmake_path): define_plat_exe("cmake")})
|
||||
search_dir = cmake_path.parent.parent
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
specs_by_package = spack.detection.by_path(["cmake"], path_hints=[str(search_dir)])
|
||||
|
||||
pkg, entries = next(iter(pkg_to_entries.items()))
|
||||
single_entry = next(iter(entries))
|
||||
|
||||
assert single_entry.spec == Spec("cmake@1.foo")
|
||||
assert len(specs_by_package) == 1 and "cmake" in specs_by_package
|
||||
detected_spec = specs_by_package["cmake"]
|
||||
assert len(detected_spec) == 1 and detected_spec[0].spec == Spec("cmake@1.foo")
|
||||
|
||||
|
||||
def test_find_external_two_instances_same_package(
|
||||
mock_executable, executables_found, _platform_executables
|
||||
):
|
||||
pkgs_to_check = [spack.repo.PATH.get_pkg_class("cmake")]
|
||||
|
||||
def test_find_external_two_instances_same_package(mock_executable, _platform_executables):
|
||||
# Each of these cmake instances is created in a different prefix
|
||||
# In Windows, quoted strings are echo'd with quotes includes
|
||||
# we need to avoid that for proper regex.
|
||||
cmake_path1 = mock_executable(
|
||||
"cmake", output="echo cmake version 1.foo", subdir=("base1", "bin")
|
||||
)
|
||||
cmake_path2 = mock_executable(
|
||||
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
|
||||
)
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
|
||||
cmake1 = mock_executable("cmake", output="echo cmake version 1.foo", subdir=("base1", "bin"))
|
||||
cmake2 = mock_executable("cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin"))
|
||||
search_paths = [str(cmake1.parent.parent), str(cmake2.parent.parent)]
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
detected_specs = finder.find(pkg_name="cmake", initial_guess=search_paths)
|
||||
|
||||
pkg, entries = next(iter(pkg_to_entries.items()))
|
||||
spec_to_path = dict((e.spec, e.prefix) for e in entries)
|
||||
assert len(detected_specs) == 2
|
||||
spec_to_path = {e.spec: e.prefix for e in detected_specs}
|
||||
assert spec_to_path[Spec("cmake@1.foo")] == (
|
||||
spack.detection.executable_prefix(os.path.dirname(cmake_path1))
|
||||
spack.detection.executable_prefix(str(cmake1.parent))
|
||||
)
|
||||
assert spec_to_path[Spec("cmake@3.17.2")] == (
|
||||
spack.detection.executable_prefix(os.path.dirname(cmake_path2))
|
||||
spack.detection.executable_prefix(str(cmake2.parent))
|
||||
)
|
||||
|
||||
|
||||
|
@ -112,23 +103,6 @@ def test_get_executables(working_env, mock_executable):
|
|||
external = SpackCommand("external")
|
||||
|
||||
|
||||
def test_find_external_cmd(mutable_config, working_env, mock_executable, _platform_executables):
|
||||
"""Test invoking 'spack external find' with additional package arguments,
|
||||
which restricts the set of packages that Spack looks for.
|
||||
"""
|
||||
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
prefix = os.path.dirname(os.path.dirname(cmake_path1))
|
||||
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
|
||||
external("find", "cmake")
|
||||
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
cmake_cfg = pkgs_cfg["cmake"]
|
||||
cmake_externals = cmake_cfg["externals"]
|
||||
|
||||
assert {"spec": "cmake@1.foo", "prefix": prefix} in cmake_externals
|
||||
|
||||
|
||||
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
|
||||
"""When the user invokes 'spack external find --not-buildable', the config
|
||||
for any package where Spack finds an external version should be marked as
|
||||
|
@ -138,50 +112,29 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
|
|||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
|
||||
external("find", "--not-buildable", "cmake")
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
assert "cmake" in pkgs_cfg
|
||||
assert not pkgs_cfg["cmake"]["buildable"]
|
||||
|
||||
|
||||
def test_find_external_cmd_full_repo(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo, _platform_executables
|
||||
):
|
||||
"""Test invoking 'spack external find' with no additional arguments, which
|
||||
iterates through each package in the repository.
|
||||
"""
|
||||
exe_path1 = mock_executable("find-externals1-exe", output="echo find-externals1 version 1.foo")
|
||||
prefix = os.path.dirname(os.path.dirname(exe_path1))
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(exe_path1)])
|
||||
external("find", "--all")
|
||||
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
pkg_cfg = pkgs_cfg["find-externals1"]
|
||||
pkg_externals = pkg_cfg["externals"]
|
||||
|
||||
assert {"spec": "find-externals1@1.foo", "prefix": prefix} in pkg_externals
|
||||
@pytest.mark.parametrize(
|
||||
"names,tags,exclude,expected",
|
||||
[
|
||||
# find --all
|
||||
(None, ["detectable"], [], ["find-externals1"]),
|
||||
# find --all --exclude find-externals1
|
||||
(None, ["detectable"], ["find-externals1"], []),
|
||||
# find cmake (and cmake is not detectable)
|
||||
(["cmake"], ["detectable"], [], []),
|
||||
],
|
||||
)
|
||||
def test_package_selection(names, tags, exclude, expected, mutable_mock_repo):
|
||||
"""Tests various cases of selecting packages"""
|
||||
# In the mock repo we only have 'find-externals1' that is detectable
|
||||
result = spack.cmd.external.packages_to_search_for(names=names, tags=tags, exclude=exclude)
|
||||
assert set(result) == set(expected)
|
||||
|
||||
|
||||
def test_find_external_cmd_exclude(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo, _platform_executables
|
||||
):
|
||||
"""Test invoking 'spack external find --all --exclude', to ensure arbitary
|
||||
external packages can be ignored.
|
||||
"""
|
||||
exe_path1 = mock_executable("find-externals1-exe", output="echo find-externals1 version 1.foo")
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(exe_path1)])
|
||||
external("find", "--all", "--exclude=find-externals1")
|
||||
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
|
||||
assert "find-externals1" not in pkgs_cfg.keys()
|
||||
|
||||
|
||||
def test_find_external_no_manifest(
|
||||
mutable_config,
|
||||
working_env,
|
||||
mock_executable,
|
||||
mutable_mock_repo,
|
||||
_platform_executables,
|
||||
monkeypatch,
|
||||
):
|
||||
def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_repo, monkeypatch):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files does not exist. Ensure that the command does not
|
||||
fail.
|
||||
|
@ -194,13 +147,7 @@ def test_find_external_no_manifest(
|
|||
|
||||
|
||||
def test_find_external_empty_default_manifest_dir(
|
||||
mutable_config,
|
||||
working_env,
|
||||
mock_executable,
|
||||
mutable_mock_repo,
|
||||
_platform_executables,
|
||||
tmpdir,
|
||||
monkeypatch,
|
||||
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
|
||||
):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but is empty. Ensure that the command does not
|
||||
|
@ -215,13 +162,7 @@ def test_find_external_empty_default_manifest_dir(
|
|||
@pytest.mark.not_on_windows("Can't chmod on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason="user is root")
|
||||
def test_find_external_manifest_with_bad_permissions(
|
||||
mutable_config,
|
||||
working_env,
|
||||
mock_executable,
|
||||
mutable_mock_repo,
|
||||
_platform_executables,
|
||||
tmpdir,
|
||||
monkeypatch,
|
||||
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
|
||||
):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but with insufficient permissions. Check that
|
||||
|
@ -262,12 +203,7 @@ def fail():
|
|||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database,
|
||||
mutable_mock_repo,
|
||||
_platform_executables,
|
||||
tmpdir,
|
||||
monkeypatch,
|
||||
directory_with_manifest,
|
||||
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
|
||||
):
|
||||
"""The user runs 'spack external find'; the default manifest directory
|
||||
contains a manifest file. Ensure that the specs are read.
|
||||
|
@ -312,6 +248,7 @@ def test_list_detectable_packages(mutable_config, mutable_mock_repo):
|
|||
assert external.returncode == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
# Prepare an environment to detect a fake gcc
|
||||
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
|
||||
|
@ -337,11 +274,8 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
|
|||
|
||||
|
||||
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
# Prepare an environment to detect a fake gcc that
|
||||
# override its external prefix
|
||||
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
|
||||
prefix = os.path.dirname(gcc_exe)
|
||||
monkeypatch.setenv("PATH", prefix)
|
||||
search_dir = gcc_exe.parent
|
||||
|
||||
@classmethod
|
||||
def _determine_variants(cls, exes, version_str):
|
||||
|
@ -350,18 +284,17 @@ def _determine_variants(cls, exes, version_str):
|
|||
gcc_cls = spack.repo.PATH.get_pkg_class("gcc")
|
||||
monkeypatch.setattr(gcc_cls, "determine_variants", _determine_variants)
|
||||
|
||||
# Find the external spec
|
||||
external("find", "gcc")
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
detected_specs = finder.find(pkg_name="gcc", initial_guess=[str(search_dir)])
|
||||
|
||||
# Check entries in 'packages.yaml'
|
||||
packages_yaml = spack.config.get("packages")
|
||||
assert "gcc" in packages_yaml
|
||||
assert "externals" in packages_yaml["gcc"]
|
||||
externals = packages_yaml["gcc"]["externals"]
|
||||
assert len(externals) == 1
|
||||
assert externals[0]["prefix"] == os.path.sep + os.path.join("opt", "gcc", "bin")
|
||||
assert len(detected_specs) == 1
|
||||
|
||||
gcc = detected_specs[0].spec
|
||||
assert gcc.name == "gcc"
|
||||
assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin")
|
||||
|
||||
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_new_entries_are_reported_correctly(
|
||||
mock_executable, mutable_config, monkeypatch, _platform_executables
|
||||
):
|
||||
|
|
|
@ -1943,7 +1943,7 @@ def test_external_python_extension_find_dependency_from_detection(self, monkeypa
|
|||
def find_fake_python(classes, path_hints):
|
||||
return {"python": [spack.detection.DetectedPackage(python_spec, prefix=path_hints[0])]}
|
||||
|
||||
monkeypatch.setattr(spack.detection, "by_executable", find_fake_python)
|
||||
monkeypatch.setattr(spack.detection, "by_path", find_fake_python)
|
||||
external_conf = {
|
||||
"py-extension1": {
|
||||
"buildable": False,
|
||||
|
|
|
@ -1119,7 +1119,7 @@ _spack_external() {
|
|||
_spack_external_find() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --not-buildable --exclude -p --path --scope --all -t --tag"
|
||||
SPACK_COMPREPLY="-h --help --not-buildable --exclude -p --path --scope --all -t --tag -j --jobs"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
|
|
|
@ -1586,7 +1586,7 @@ complete -c spack -n '__fish_spack_using_command external' -s h -l help -f -a he
|
|||
complete -c spack -n '__fish_spack_using_command external' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack external find
|
||||
set -g __fish_spack_optspecs_spack_external_find h/help not-buildable exclude= p/path= scope= all t/tag=
|
||||
set -g __fish_spack_optspecs_spack_external_find h/help not-buildable exclude= p/path= scope= all t/tag= j/jobs=
|
||||
|
||||
complete -c spack -n '__fish_spack_using_command external find' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command external find' -s h -l help -d 'show this help message and exit'
|
||||
|
@ -1602,6 +1602,8 @@ complete -c spack -n '__fish_spack_using_command external find' -l all -f -a all
|
|||
complete -c spack -n '__fish_spack_using_command external find' -l all -d 'search for all packages that Spack knows about'
|
||||
complete -c spack -n '__fish_spack_using_command external find' -s t -l tag -r -f -a tags
|
||||
complete -c spack -n '__fish_spack_using_command external find' -s t -l tag -r -d 'filter a package query by tag (multiple use allowed)'
|
||||
complete -c spack -n '__fish_spack_using_command external find' -s j -l jobs -r -f -a jobs
|
||||
complete -c spack -n '__fish_spack_using_command external find' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
|
||||
|
||||
# spack external list
|
||||
set -g __fish_spack_optspecs_spack_external_list h/help
|
||||
|
|
|
@ -1020,11 +1020,11 @@ def detect_gdc(self):
|
|||
"""
|
||||
# Detect GCC package in the directory of the GCC compiler
|
||||
# or in the $PATH if self.compiler.cc is not an absolute path:
|
||||
from spack.detection import by_executable
|
||||
from spack.detection import by_path
|
||||
|
||||
compiler_dir = os.path.dirname(self.compiler.cc)
|
||||
detected_packages = by_executable(
|
||||
[self.__class__], path_hints=([compiler_dir] if os.path.isdir(compiler_dir) else None)
|
||||
detected_packages = by_path(
|
||||
[self.name], path_hints=([compiler_dir] if os.path.isdir(compiler_dir) else None)
|
||||
)
|
||||
|
||||
# We consider only packages that satisfy the following constraint:
|
||||
|
|
Loading…
Reference in a new issue