diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index 828cc44773..ad70cd82ee 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -145,7 +145,7 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh - spack -d solve zlib + spack -d bootstrap now --dev spack unit-test -k 'not cvs and not svn and not hg' -x --verbose # Test for the clingo based solver (using clingo-cffi) clingo-cffi: diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py deleted file mode 100644 index c4cd1b1dd7..0000000000 --- a/lib/spack/spack/bootstrap.py +++ /dev/null @@ -1,1066 +0,0 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) -from __future__ import print_function - -import contextlib -import copy -import fnmatch -import functools -import json -import os -import os.path -import platform -import re -import sys -import sysconfig -import uuid -from typing import List - -import archspec.cpu - -import llnl.util.filesystem as fs -import llnl.util.tty as tty -from llnl.util.lang import GroupedExceptionHandler - -import spack.binary_distribution -import spack.config -import spack.detection -import spack.environment -import spack.modules -import spack.paths -import spack.platforms -import spack.repo -import spack.spec -import spack.store -import spack.user_environment -import spack.util.environment -import spack.util.executable -import spack.util.path -import spack.util.spack_yaml -import spack.util.url -import spack.version - -#: Name of the file containing metadata about the bootstrapping source -METADATA_YAML_FILENAME = "metadata.yaml" - -is_windows = sys.platform == "win32" - -#: Map a bootstrapper type to the corresponding class -_bootstrap_methods = {} - - -def _bootstrapper(type): - """Decorator to register classes implementing bootstrapping - methods. - - Args: - type (str): string identifying the class - """ - - def _register(cls): - _bootstrap_methods[type] = cls - return cls - - return _register - - -def _try_import_from_store(module, query_spec, query_info=None): - """Return True if the module can be imported from an already - installed spec, False otherwise. - - Args: - module: Python module to be imported - query_spec: spec that may provide the module - query_info (dict or None): if a dict is passed it is populated with the - command found and the concrete spec providing it - """ - # If it is a string assume it's one of the root specs by this module - if isinstance(query_spec, str): - # We have to run as part of this python interpreter - query_spec += " ^" + spec_for_current_python() - - installed_specs = spack.store.db.query(query_spec, installed=True) - - for candidate_spec in installed_specs: - pkg = candidate_spec["python"].package - module_paths: List[str] = [ - os.path.join(candidate_spec.prefix, pkg.purelib), - os.path.join(candidate_spec.prefix, pkg.platlib), - ] - path_before = list(sys.path) - - # NOTE: try module_paths first and last, last allows an existing version in path - # to be picked up and used, possibly depending on something in the store, first - # allows the bootstrap version to work when an incompatible version is in - # sys.path - orders = [ - module_paths + sys.path, - sys.path + module_paths, - ] - for path in orders: - sys.path = path - try: - _fix_ext_suffix(candidate_spec) - if _python_import(module): - msg = ( - '[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" ' - 'provides the "{0}" Python module' - ).format(module, query_spec, candidate_spec.dag_hash()) - tty.debug(msg) - if query_info is not None: - query_info["spec"] = candidate_spec - return True - except Exception as e: - msg = ( - "unexpected error while trying to import module " - '"{0}" from spec "{1}" [error="{2}"]' - ) - tty.warn(msg.format(module, candidate_spec, str(e))) - else: - msg = "Spec {0} did not provide module {1}" - tty.warn(msg.format(candidate_spec, module)) - - sys.path = path_before - - return False - - -def _fix_ext_suffix(candidate_spec): - """Fix the external suffixes of Python extensions on the fly for - platforms that may need it - - Args: - candidate_spec (Spec): installed spec with a Python module - to be checked. - """ - # Here we map target families to the patterns expected - # by pristine CPython. Only architectures with known issues - # are included. Known issues: - # - # [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734 - # - _suffix_to_be_checked = { - "ppc64le": { - "glob": "*.cpython-*-powerpc64le-linux-gnu.so", - "re": r".cpython-[\w]*-powerpc64le-linux-gnu.so", - "fmt": r"{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so", - } - } - - # If the current architecture is not problematic return - generic_target = archspec.cpu.host().family - if str(generic_target) not in _suffix_to_be_checked: - return - - # If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches - # the expectations, return since the package is surely good - ext_suffix = sysconfig.get_config_var("EXT_SUFFIX") - if ext_suffix is None: - return - - expected = _suffix_to_be_checked[str(generic_target)] - if fnmatch.fnmatch(ext_suffix, expected["glob"]): - return - - # If we are here it means the current interpreter expects different names - # than pristine CPython. So: - # 1. Find what we have installed - # 2. Create symbolic links for the other names, it they're not there already - - # Check if standard names are installed and if we have to create - # link for this interpreter - standard_extensions = fs.find(candidate_spec.prefix, expected["glob"]) - link_names = [re.sub(expected["re"], ext_suffix, s) for s in standard_extensions] - for file_name, link_name in zip(standard_extensions, link_names): - if os.path.exists(link_name): - continue - os.symlink(file_name, link_name) - - # Check if this interpreter installed something and we have to create - # links for a standard CPython interpreter - non_standard_extensions = fs.find(candidate_spec.prefix, "*" + ext_suffix) - for abs_path in non_standard_extensions: - directory, filename = os.path.split(abs_path) - module = filename.split(".")[0] - link_name = os.path.join( - directory, - expected["fmt"].format( - module=module, major=sys.version_info[0], minor=sys.version_info[1] - ), - ) - if os.path.exists(link_name): - continue - os.symlink(abs_path, link_name) - - -def _executables_in_store(executables, query_spec, query_info=None): - """Return True if at least one of the executables can be retrieved from - a spec in store, False otherwise. - - The different executables must provide the same functionality and are - "alternate" to each other, i.e. the function will exit True on the first - executable found. - - Args: - executables: list of executables to be searched - query_spec: spec that may provide the executable - query_info (dict or None): if a dict is passed it is populated with the - command found and the concrete spec providing it - """ - executables_str = ", ".join(executables) - msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'" - tty.debug(msg.format(executables_str, query_spec)) - installed_specs = spack.store.db.query(query_spec, installed=True) - if installed_specs: - for concrete_spec in installed_specs: - bin_dir = concrete_spec.prefix.bin - # IF we have a "bin" directory and it contains - # the executables we are looking for - if ( - os.path.exists(bin_dir) - and os.path.isdir(bin_dir) - and spack.util.executable.which_string(*executables, path=bin_dir) - ): - spack.util.environment.path_put_first("PATH", [bin_dir]) - if query_info is not None: - query_info["command"] = spack.util.executable.which(*executables, path=bin_dir) - query_info["spec"] = concrete_spec - return True - return False - - -class _BootstrapperBase(object): - """Base class to derive types that can bootstrap software for Spack""" - - config_scope_name = "" - - def __init__(self, conf): - self.name = conf["name"] - self.url = conf["info"]["url"] - - @property - def mirror_url(self): - # Absolute paths - if os.path.isabs(self.url): - return spack.util.url.format(self.url) - - # Check for :// and assume it's an url if we find it - if "://" in self.url: - return self.url - - # Otherwise, it's a relative path - return spack.util.url.format(os.path.join(self.metadata_dir, self.url)) - - @property - def mirror_scope(self): - return spack.config.InternalConfigScope( - self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}} - ) - - -@_bootstrapper(type="buildcache") -class _BuildcacheBootstrapper(_BootstrapperBase): - """Install the software needed during bootstrapping from a buildcache.""" - - def __init__(self, conf): - super(_BuildcacheBootstrapper, self).__init__(conf) - self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"]) - self.last_search = None - self.config_scope_name = "bootstrap_buildcache-{}".format(uuid.uuid4()) - - @staticmethod - def _spec_and_platform(abstract_spec_str): - """Return the spec object and platform we need to use when - querying the buildcache. - - Args: - abstract_spec_str: abstract spec string we are looking for - """ - # This import is local since it is needed only on Cray - import spack.platforms.linux - - # Try to install from an unsigned binary cache - abstract_spec = spack.spec.Spec(abstract_spec_str) - # On Cray we want to use Linux binaries if available from mirrors - bincache_platform = spack.platforms.real_host() - return abstract_spec, bincache_platform - - def _read_metadata(self, package_name): - """Return metadata about the given package.""" - json_filename = "{0}.json".format(package_name) - json_dir = self.metadata_dir - json_path = os.path.join(json_dir, json_filename) - with open(json_path) as f: - data = json.load(f) - return data - - def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform): - index_spec = next(x for x in index if x.dag_hash() == pkg_hash) - # Reconstruct the compiler that we need to use for bootstrapping - compiler_entry = { - "modules": [], - "operating_system": str(index_spec.os), - "paths": { - "cc": "/dev/null", - "cxx": "/dev/null", - "f77": "/dev/null", - "fc": "/dev/null", - }, - "spec": str(index_spec.compiler), - "target": str(index_spec.target.family), - } - with spack.platforms.use_platform(bincache_platform): - with spack.config.override("compilers", [{"compiler": compiler_entry}]): - spec_str = "/" + pkg_hash - query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True) - matches = spack.store.find([spec_str], multiple=False, query_fn=query) - for match in matches: - spack.binary_distribution.install_root_node( - match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256 - ) - - def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, test_fn): - # Ensure we see only the buildcache being used to bootstrap - with spack.config.override(self.mirror_scope): - # This index is currently needed to get the compiler used to build some - # specs that we know by dag hash. - spack.binary_distribution.binary_index.regenerate_spec_cache() - index = spack.binary_distribution.update_cache_and_get_specs() - - if not index: - raise RuntimeError("The binary index is empty") - - for item in bincache_data["verified"]: - candidate_spec = item["spec"] - # This will be None for things that don't depend on python - python_spec = item.get("python", None) - # Skip specs which are not compatible - if not abstract_spec.satisfies(candidate_spec): - continue - - if python_spec is not None and python_spec not in abstract_spec: - continue - - for pkg_name, pkg_hash, pkg_sha256 in item["binaries"]: - # TODO: undo installations that didn't complete? - self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform) - - info = {} - if test_fn(query_spec=abstract_spec, query_info=info): - self.last_search = info - return True - return False - - def try_import(self, module, abstract_spec_str): - test_fn, info = functools.partial(_try_import_from_store, module), {} - if test_fn(query_spec=abstract_spec_str, query_info=info): - return True - - tty.info("Bootstrapping {0} from pre-built binaries".format(module)) - abstract_spec, bincache_platform = self._spec_and_platform( - abstract_spec_str + " ^" + spec_for_current_python() - ) - data = self._read_metadata(module) - return self._install_and_test(abstract_spec, bincache_platform, data, test_fn) - - def try_search_path(self, executables, abstract_spec_str): - test_fn, info = functools.partial(_executables_in_store, executables), {} - if test_fn(query_spec=abstract_spec_str, query_info=info): - self.last_search = info - return True - - abstract_spec, bincache_platform = self._spec_and_platform(abstract_spec_str) - tty.info("Bootstrapping {0} from pre-built binaries".format(abstract_spec.name)) - data = self._read_metadata(abstract_spec.name) - return self._install_and_test(abstract_spec, bincache_platform, data, test_fn) - - -@_bootstrapper(type="install") -class _SourceBootstrapper(_BootstrapperBase): - """Install the software needed during bootstrapping from sources.""" - - def __init__(self, conf): - super(_SourceBootstrapper, self).__init__(conf) - self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"]) - self.conf = conf - self.last_search = None - self.config_scope_name = "bootstrap_source-{}".format(uuid.uuid4()) - - def try_import(self, module, abstract_spec_str): - info = {} - if _try_import_from_store(module, abstract_spec_str, query_info=info): - self.last_search = info - return True - - tty.info("Bootstrapping {0} from sources".format(module)) - - # If we compile code from sources detecting a few build tools - # might reduce compilation time by a fair amount - _add_externals_if_missing() - - # Try to build and install from sources - with spack_python_interpreter(): - # Add hint to use frontend operating system on Cray - concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python()) - - if module == "clingo": - # TODO: remove when the old concretizer is deprecated - concrete_spec._old_concretize(deprecation_warning=False) - else: - concrete_spec.concretize() - - msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources" - tty.debug(msg.format(module, abstract_spec_str)) - - # Install the spec that should make the module importable - with spack.config.override(self.mirror_scope): - concrete_spec.package.do_install(fail_fast=True) - - if _try_import_from_store(module, query_spec=concrete_spec, query_info=info): - self.last_search = info - return True - return False - - def try_search_path(self, executables, abstract_spec_str): - info = {} - if _executables_in_store(executables, abstract_spec_str, query_info=info): - self.last_search = info - return True - - tty.info("Bootstrapping {0} from sources".format(abstract_spec_str)) - - # If we compile code from sources detecting a few build tools - # might reduce compilation time by a fair amount - _add_externals_if_missing() - - concrete_spec = spack.spec.Spec(abstract_spec_str) - if concrete_spec.name == "patchelf": - concrete_spec._old_concretize(deprecation_warning=False) - else: - concrete_spec.concretize() - - msg = "[BOOTSTRAP] Try installing '{0}' from sources" - tty.debug(msg.format(abstract_spec_str)) - with spack.config.override(self.mirror_scope): - concrete_spec.package.do_install() - if _executables_in_store(executables, concrete_spec, query_info=info): - self.last_search = info - return True - return False - - -def _make_bootstrapper(conf): - """Return a bootstrap object built according to the - configuration argument - """ - btype = conf["type"] - return _bootstrap_methods[btype](conf) - - -def source_is_enabled_or_raise(conf): - """Raise ValueError if the source is not enabled for bootstrapping""" - trusted, name = spack.config.get("bootstrap:trusted"), conf["name"] - if not trusted.get(name, False): - raise ValueError("source is not trusted") - - -def spec_for_current_python(): - """For bootstrapping purposes we are just interested in the Python - minor version (all patches are ABI compatible with the same minor). - - See: - https://www.python.org/dev/peps/pep-0513/ - https://stackoverflow.com/a/35801395/771663 - """ - version_str = ".".join(str(x) for x in sys.version_info[:2]) - return "python@{0}".format(version_str) - - -@contextlib.contextmanager -def spack_python_interpreter(): - """Override the current configuration to set the interpreter under - which Spack is currently running as the only Python external spec - available. - """ - python_prefix = sys.exec_prefix - external_python = spec_for_current_python() - - entry = { - "buildable": False, - "externals": [{"prefix": python_prefix, "spec": str(external_python)}], - } - - with spack.config.override("packages:python::", entry): - yield - - -def ensure_module_importable_or_raise(module, abstract_spec=None): - """Make the requested module available for import, or raise. - - This function tries to import a Python module in the current interpreter - using, in order, the methods configured in bootstrap.yaml. - - If none of the methods succeed, an exception is raised. The function exits - on first success. - - Args: - module (str): module to be imported in the current interpreter - abstract_spec (str): abstract spec that might provide the module. If not - given it defaults to "module" - - Raises: - ImportError: if the module couldn't be imported - """ - # If we can import it already, that's great - tty.debug("[BOOTSTRAP MODULE {0}] Try importing from Python".format(module)) - if _python_import(module): - return - - abstract_spec = abstract_spec or module - - h = GroupedExceptionHandler() - - for current_config in bootstrapping_sources(): - with h.forward(current_config["name"]): - source_is_enabled_or_raise(current_config) - - b = _make_bootstrapper(current_config) - if b.try_import(module, abstract_spec): - return - - assert h, ( - "expected at least one exception to have been raised at this point: " - "while bootstrapping {0}".format(module) - ) - msg = 'cannot bootstrap the "{0}" Python module '.format(module) - if abstract_spec: - msg += 'from spec "{0}" '.format(abstract_spec) - if tty.is_debug(): - msg += h.grouped_message(with_tracebacks=True) - else: - msg += h.grouped_message(with_tracebacks=False) - msg += "\nRun `spack --debug ...` for more detailed errors" - raise ImportError(msg) - - -def ensure_executables_in_path_or_raise(executables, abstract_spec, cmd_check=None): - """Ensure that some executables are in path or raise. - - Args: - executables (list): list of executables to be searched in the PATH, - in order. The function exits on the first one found. - abstract_spec (str): abstract spec that provides the executables - cmd_check (object): callable predicate that takes a - ``spack.util.executable.Executable`` command and validate it. Should return - ``True`` if the executable is acceptable, ``False`` otherwise. - Can be used to, e.g., ensure a suitable version of the command before - accepting for bootstrapping. - - Raises: - RuntimeError: if the executables cannot be ensured to be in PATH - - Return: - Executable object - - """ - cmd = spack.util.executable.which(*executables) - if cmd: - if not cmd_check or cmd_check(cmd): - return cmd - - executables_str = ", ".join(executables) - - h = GroupedExceptionHandler() - - for current_config in bootstrapping_sources(): - with h.forward(current_config["name"]): - source_is_enabled_or_raise(current_config) - - b = _make_bootstrapper(current_config) - if b.try_search_path(executables, abstract_spec): - # Additional environment variables needed - concrete_spec, cmd = b.last_search["spec"], b.last_search["command"] - env_mods = spack.util.environment.EnvironmentModifications() - for dep in concrete_spec.traverse( - root=True, order="post", deptype=("link", "run") - ): - env_mods.extend( - spack.user_environment.environment_modifications_for_spec( - dep, set_package_py_globals=False - ) - ) - cmd.add_default_envmod(env_mods) - return cmd - - assert h, ( - "expected at least one exception to have been raised at this point: " - "while bootstrapping {0}".format(executables_str) - ) - msg = "cannot bootstrap any of the {0} executables ".format(executables_str) - if abstract_spec: - msg += 'from spec "{0}" '.format(abstract_spec) - if tty.is_debug(): - msg += h.grouped_message(with_tracebacks=True) - else: - msg += h.grouped_message(with_tracebacks=False) - msg += "\nRun `spack --debug ...` for more detailed errors" - raise RuntimeError(msg) - - -def _python_import(module): - try: - __import__(module) - except ImportError: - return False - return True - - -def _bootstrap_config_scopes(): - tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin") - config_scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)] - configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path())) - for name, path in configuration_paths: - platform = spack.platforms.host().name - platform_scope = spack.config.ConfigScope( - "/".join([name, platform]), os.path.join(path, platform) - ) - generic_scope = spack.config.ConfigScope(name, path) - config_scopes.extend([generic_scope, platform_scope]) - msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}" - tty.debug(msg.format(generic_scope.name, generic_scope.path)) - tty.debug(msg.format(platform_scope.name, platform_scope.path)) - return config_scopes - - -def _add_compilers_if_missing(): - arch = spack.spec.ArchSpec.frontend_arch() - if not spack.compilers.compilers_for_arch(arch): - new_compilers = spack.compilers.find_new_compilers() - if new_compilers: - spack.compilers.add_compilers_to_config(new_compilers, init_config=False) - - -def _add_externals_if_missing(): - search_list = [ - # clingo - spack.repo.path.get_pkg_class("cmake"), - spack.repo.path.get_pkg_class("bison"), - # GnuPG - spack.repo.path.get_pkg_class("gawk"), - ] - if is_windows: - search_list.append(spack.repo.path.get_pkg_class("winbison")) - detected_packages = spack.detection.by_executable(search_list) - spack.detection.update_configuration(detected_packages, scope="bootstrap") - - -#: Reference counter for the bootstrapping configuration context manager -_REF_COUNT = 0 - - -def is_bootstrapping(): - global _REF_COUNT - return _REF_COUNT > 0 - - -@contextlib.contextmanager -def ensure_bootstrap_configuration(): - # The context manager is reference counted to ensure we don't swap multiple - # times if there's nested use of it in the stack. One compelling use case - # is bootstrapping patchelf during the bootstrap of clingo. - global _REF_COUNT - already_swapped = bool(_REF_COUNT) - _REF_COUNT += 1 - try: - if already_swapped: - yield - else: - with _ensure_bootstrap_configuration(): - yield - finally: - _REF_COUNT -= 1 - - -@contextlib.contextmanager -def _ensure_bootstrap_configuration(): - bootstrap_store_path = store_path() - user_configuration = _read_and_sanitize_configuration() - with spack.environment.no_active_environment(): - with spack.platforms.prevent_cray_detection(): - with spack.platforms.use_platform(spack.platforms.real_host()): - with spack.repo.use_repositories(spack.paths.packages_path): - with spack.store.use_store(bootstrap_store_path): - # Default configuration scopes excluding command line - # and builtin but accounting for platform specific scopes - config_scopes = _bootstrap_config_scopes() - with spack.config.use_configuration(*config_scopes): - # We may need to compile code from sources, so ensure we - # have compilers for the current platform - _add_compilers_if_missing() - spack.config.set("bootstrap", user_configuration["bootstrap"]) - spack.config.set("config", user_configuration["config"]) - with spack.modules.disable_modules(): - with spack_python_interpreter(): - yield - - -def _read_and_sanitize_configuration(): - """Read the user configuration that needs to be reused for bootstrapping - and remove the entries that should not be copied over. - """ - # Read the "config" section but pop the install tree (the entry will not be - # considered due to the use_store context manager, so it will be confusing - # to have it in the configuration). - config_yaml = spack.config.get("config") - config_yaml.pop("install_tree", None) - user_configuration = {"bootstrap": spack.config.get("bootstrap"), "config": config_yaml} - return user_configuration - - -def store_path(): - """Path to the store used for bootstrapped software""" - enabled = spack.config.get("bootstrap:enable", True) - if not enabled: - msg = "bootstrapping is currently disabled. " 'Use "spack bootstrap enable" to enable it' - raise RuntimeError(msg) - - return _store_path() - - -def _root_path(): - """Root of all the bootstrap related folders""" - return spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path) - - -def _store_path(): - bootstrap_root_path = _root_path() - return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store")) - - -def _config_path(): - bootstrap_root_path = _root_path() - return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config")) - - -def _root_spec(spec_str): - """Add a proper compiler and target to a spec used during bootstrapping. - - Args: - spec_str (str): spec to be bootstrapped. Must be without compiler and target. - """ - # Add a proper compiler hint to the root spec. We use GCC for - # everything but MacOS and Windows. - if str(spack.platforms.host()) == "darwin": - spec_str += " %apple-clang" - elif str(spack.platforms.host()) == "windows": - spec_str += " %msvc" - else: - spec_str += " %gcc" - - target = archspec.cpu.host().family - spec_str += " target={0}".format(target) - - tty.debug("[BOOTSTRAP ROOT SPEC] {0}".format(spec_str)) - return spec_str - - -def clingo_root_spec(): - """Return the root spec used to bootstrap clingo""" - return _root_spec("clingo-bootstrap@spack+python") - - -def ensure_clingo_importable_or_raise(): - """Ensure that the clingo module is available for import.""" - ensure_module_importable_or_raise(module="clingo", abstract_spec=clingo_root_spec()) - - -def gnupg_root_spec(): - """Return the root spec used to bootstrap GnuPG""" - return _root_spec("gnupg@2.3:") - - -def ensure_gpg_in_path_or_raise(): - """Ensure gpg or gpg2 are in the PATH or raise.""" - return ensure_executables_in_path_or_raise( - executables=["gpg2", "gpg"], abstract_spec=gnupg_root_spec() - ) - - -def patchelf_root_spec(): - """Return the root spec used to bootstrap patchelf""" - # 0.13.1 is the last version not to require C++17. - return _root_spec("patchelf@0.13.1:") - - -def verify_patchelf(patchelf): - """Older patchelf versions can produce broken binaries, so we - verify the version here. - - Arguments: - - patchelf (spack.util.executable.Executable): patchelf executable - """ - out = patchelf("--version", output=str, error=os.devnull, fail_on_error=False).strip() - if patchelf.returncode != 0: - return False - parts = out.split(" ") - if len(parts) < 2: - return False - try: - version = spack.version.Version(parts[1]) - except ValueError: - return False - return version >= spack.version.Version("0.13.1") - - -def ensure_patchelf_in_path_or_raise(): - """Ensure patchelf is in the PATH or raise.""" - # The old concretizer is not smart and we're doing its job: if the latest patchelf - # does not concretize because the compiler doesn't support C++17, we try to - # concretize again with an upperbound @:13. - try: - return ensure_executables_in_path_or_raise( - executables=["patchelf"], abstract_spec=patchelf_root_spec(), cmd_check=verify_patchelf - ) - except RuntimeError: - return ensure_executables_in_path_or_raise( - executables=["patchelf"], - abstract_spec=_root_spec("patchelf@0.13.1:0.13"), - cmd_check=verify_patchelf, - ) - - -### -# Development dependencies -### - - -def isort_root_spec(): - return _root_spec("py-isort@4.3.5:") - - -def ensure_isort_in_path_or_raise(): - """Ensure that isort is in the PATH or raise.""" - executable, root_spec = "isort", isort_root_spec() - return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec) - - -def mypy_root_spec(): - return _root_spec("py-mypy@0.900:") - - -def ensure_mypy_in_path_or_raise(): - """Ensure that mypy is in the PATH or raise.""" - executable, root_spec = "mypy", mypy_root_spec() - return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec) - - -def black_root_spec(): - return _root_spec("py-black") - - -def ensure_black_in_path_or_raise(): - """Ensure that black is in the PATH or raise.""" - root_spec = black_root_spec() - - def check_black(black_cmd): - """Ensure sutable black version.""" - try: - output = black_cmd("--version", output=str) - except Exception as e: - tty.debug("Error getting version of %s: %s" % (black_cmd, e)) - return False - - match = re.match("black, ([^ ]+)", output) - if not match: - return False - - black_version = spack.version.Version(match.group(1)) - return black_version.satisfies(spack.spec.Spec(root_spec).versions) - - return ensure_executables_in_path_or_raise(["black"], root_spec, check_black) - - -def flake8_root_spec(): - return _root_spec("py-flake8") - - -def ensure_flake8_in_path_or_raise(): - """Ensure that flake8 is in the PATH or raise.""" - executable, root_spec = "flake8", flake8_root_spec() - return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec) - - -def all_root_specs(development=False): - """Return a list of all the root specs that may be used to bootstrap Spack. - - Args: - development (bool): if True include dev dependencies - """ - specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()] - if development: - specs += [isort_root_spec(), mypy_root_spec(), black_root_spec(), flake8_root_spec()] - return specs - - -def _missing(name, purpose, system_only=True): - """Message to be printed if an executable is not found""" - msg = '[{2}] MISSING "{0}": {1}' - if not system_only: - return msg.format(name, purpose, "@*y{{B}}") - return msg.format(name, purpose, "@*y{{-}}") - - -def _required_system_executable(exes, msg): - """Search for an executable is the system path only.""" - if isinstance(exes, str): - exes = (exes,) - if spack.util.executable.which_string(*exes): - return True, None - return False, msg - - -def _required_python_module(module, query_spec, msg): - """Check if a Python module is available in the current interpreter or - if it can be loaded from the bootstrap store - """ - if _python_import(module) or _try_import_from_store(module, query_spec): - return True, None - return False, msg - - -def _required_executable(exes, query_spec, msg): - """Search for an executable in the system path or in the bootstrap store.""" - if isinstance(exes, str): - exes = (exes,) - if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec): - return True, None - return False, msg - - -def _core_requirements(): - _core_system_exes = { - "make": _missing("make", "required to build software from sources"), - "patch": _missing("patch", "required to patch source code before building"), - "bash": _missing("bash", "required for Spack compiler wrapper"), - "tar": _missing("tar", "required to manage code archives"), - "gzip": _missing("gzip", "required to compress/decompress code archives"), - "unzip": _missing("unzip", "required to compress/decompress code archives"), - "bzip2": _missing("bzip2", "required to compress/decompress code archives"), - "git": _missing("git", "required to fetch/manage git repositories"), - } - if platform.system().lower() == "linux": - _core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives") - - # Executables that are not bootstrapped yet - result = [_required_system_executable(exe, msg) for exe, msg in _core_system_exes.items()] - # Python modules - result.append( - _required_python_module( - "clingo", clingo_root_spec(), _missing("clingo", "required to concretize specs", False) - ) - ) - return result - - -def _buildcache_requirements(): - _buildcache_exes = { - "file": _missing("file", "required to analyze files for buildcaches"), - ("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False), - } - if platform.system().lower() == "darwin": - _buildcache_exes["otool"] = _missing("otool", "required to relocate binaries") - - # Executables that are not bootstrapped yet - result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()] - - if platform.system().lower() == "linux": - result.append( - _required_executable( - "patchelf", - patchelf_root_spec(), - _missing("patchelf", "required to relocate binaries", False), - ) - ) - - return result - - -def _optional_requirements(): - _optional_exes = { - "zstd": _missing("zstd", "required to compress/decompress code archives"), - "svn": _missing("svn", "required to manage subversion repositories"), - "hg": _missing("hg", "required to manage mercurial repositories"), - } - # Executables that are not bootstrapped yet - result = [_required_system_executable(exe, msg) for exe, msg in _optional_exes.items()] - return result - - -def _development_requirements(): - return [ - _required_executable( - "isort", isort_root_spec(), _missing("isort", "required for style checks", False) - ), - _required_executable( - "mypy", mypy_root_spec(), _missing("mypy", "required for style checks", False) - ), - _required_executable( - "flake8", flake8_root_spec(), _missing("flake8", "required for style checks", False) - ), - _required_executable( - "black", black_root_spec(), _missing("black", "required for code formatting", False) - ), - ] - - -def status_message(section): - """Return a status message to be printed to screen that refers to the - section passed as argument and a bool which is True if there are missing - dependencies. - - Args: - section (str): either 'core' or 'buildcache' or 'optional' or 'develop' - """ - pass_token, fail_token = "@*g{[PASS]}", "@*r{[FAIL]}" - - # Contain the header of the section and a list of requirements - spack_sections = { - "core": ("{0} @*{{Core Functionalities}}", _core_requirements), - "buildcache": ("{0} @*{{Binary packages}}", _buildcache_requirements), - "optional": ("{0} @*{{Optional Features}}", _optional_requirements), - "develop": ("{0} @*{{Development Dependencies}}", _development_requirements), - } - msg, required_software = spack_sections[section] - - with ensure_bootstrap_configuration(): - missing_software = False - for found, err_msg in required_software(): - if not found: - missing_software = True - msg += "\n " + err_msg - msg += "\n" - msg = msg.format(pass_token if not missing_software else fail_token) - return msg, missing_software - - -def bootstrapping_sources(scope=None): - """Return the list of configured sources of software for bootstrapping Spack - - Args: - scope (str or None): if a valid configuration scope is given, return the - list only from that scope - """ - source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope) - source_configs = source_configs or [] - list_of_sources = [] - for entry in source_configs: - current = copy.copy(entry) - metadata_dir = spack.util.path.canonicalize_path(entry["metadata"]) - metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME) - with open(metadata_yaml) as f: - current.update(spack.util.spack_yaml.load(f)) - list_of_sources.append(current) - return list_of_sources diff --git a/lib/spack/spack/bootstrap/__init__.py b/lib/spack/spack/bootstrap/__init__.py new file mode 100644 index 0000000000..8e3737657c --- /dev/null +++ b/lib/spack/spack/bootstrap/__init__.py @@ -0,0 +1,25 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Function and classes needed to bootstrap Spack itself.""" + +from .config import ensure_bootstrap_configuration, is_bootstrapping +from .core import ( + all_core_root_specs, + ensure_core_dependencies, + ensure_patchelf_in_path_or_raise, +) +from .environment import BootstrapEnvironment, ensure_environment_dependencies +from .status import status_message + +__all__ = [ + "is_bootstrapping", + "ensure_bootstrap_configuration", + "ensure_core_dependencies", + "ensure_patchelf_in_path_or_raise", + "all_core_root_specs", + "ensure_environment_dependencies", + "BootstrapEnvironment", + "status_message", +] diff --git a/lib/spack/spack/bootstrap/_common.py b/lib/spack/spack/bootstrap/_common.py new file mode 100644 index 0000000000..0798c5a0b7 --- /dev/null +++ b/lib/spack/spack/bootstrap/_common.py @@ -0,0 +1,218 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Common basic functions used through the spack.bootstrap package""" +import fnmatch +import os.path +import re +import sys +import sysconfig +import warnings + +import archspec.cpu + +import llnl.util.filesystem as fs +from llnl.util import tty + +import spack.store +import spack.util.environment +import spack.util.executable + +from .config import spec_for_current_python + + +def _python_import(module): + try: + __import__(module) + except ImportError: + return False + return True + + +def _try_import_from_store(module, query_spec, query_info=None): + """Return True if the module can be imported from an already + installed spec, False otherwise. + + Args: + module: Python module to be imported + query_spec: spec that may provide the module + query_info (dict or None): if a dict is passed it is populated with the + command found and the concrete spec providing it + """ + # If it is a string assume it's one of the root specs by this module + if isinstance(query_spec, str): + # We have to run as part of this python interpreter + query_spec += " ^" + spec_for_current_python() + + installed_specs = spack.store.db.query(query_spec, installed=True) + + for candidate_spec in installed_specs: + pkg = candidate_spec["python"].package + module_paths = [ + os.path.join(candidate_spec.prefix, pkg.purelib), + os.path.join(candidate_spec.prefix, pkg.platlib), + ] # type: list[str] + path_before = list(sys.path) + + # NOTE: try module_paths first and last, last allows an existing version in path + # to be picked up and used, possibly depending on something in the store, first + # allows the bootstrap version to work when an incompatible version is in + # sys.path + orders = [ + module_paths + sys.path, + sys.path + module_paths, + ] + for path in orders: + sys.path = path + try: + _fix_ext_suffix(candidate_spec) + if _python_import(module): + msg = ( + f"[BOOTSTRAP MODULE {module}] The installed spec " + f'"{query_spec}/{candidate_spec.dag_hash()}" ' + f'provides the "{module}" Python module' + ) + tty.debug(msg) + if query_info is not None: + query_info["spec"] = candidate_spec + return True + except Exception as exc: # pylint: disable=broad-except + msg = ( + "unexpected error while trying to import module " + f'"{module}" from spec "{candidate_spec}" [error="{str(exc)}"]' + ) + warnings.warn(msg) + else: + msg = "Spec {0} did not provide module {1}" + warnings.warn(msg.format(candidate_spec, module)) + + sys.path = path_before + + return False + + +def _fix_ext_suffix(candidate_spec): + """Fix the external suffixes of Python extensions on the fly for + platforms that may need it + + Args: + candidate_spec (Spec): installed spec with a Python module + to be checked. + """ + # Here we map target families to the patterns expected + # by pristine CPython. Only architectures with known issues + # are included. Known issues: + # + # [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734 + # + _suffix_to_be_checked = { + "ppc64le": { + "glob": "*.cpython-*-powerpc64le-linux-gnu.so", + "re": r".cpython-[\w]*-powerpc64le-linux-gnu.so", + "fmt": r"{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so", + } + } + + # If the current architecture is not problematic return + generic_target = archspec.cpu.host().family + if str(generic_target) not in _suffix_to_be_checked: + return + + # If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches + # the expectations, return since the package is surely good + ext_suffix = sysconfig.get_config_var("EXT_SUFFIX") + if ext_suffix is None: + return + + expected = _suffix_to_be_checked[str(generic_target)] + if fnmatch.fnmatch(ext_suffix, expected["glob"]): + return + + # If we are here it means the current interpreter expects different names + # than pristine CPython. So: + # 1. Find what we have installed + # 2. Create symbolic links for the other names, it they're not there already + + # Check if standard names are installed and if we have to create + # link for this interpreter + standard_extensions = fs.find(candidate_spec.prefix, expected["glob"]) + link_names = [re.sub(expected["re"], ext_suffix, s) for s in standard_extensions] + for file_name, link_name in zip(standard_extensions, link_names): + if os.path.exists(link_name): + continue + os.symlink(file_name, link_name) + + # Check if this interpreter installed something and we have to create + # links for a standard CPython interpreter + non_standard_extensions = fs.find(candidate_spec.prefix, "*" + ext_suffix) + for abs_path in non_standard_extensions: + directory, filename = os.path.split(abs_path) + module = filename.split(".")[0] + link_name = os.path.join( + directory, + expected["fmt"].format( + module=module, major=sys.version_info[0], minor=sys.version_info[1] + ), + ) + if os.path.exists(link_name): + continue + os.symlink(abs_path, link_name) + + +def _executables_in_store(executables, query_spec, query_info=None): + """Return True if at least one of the executables can be retrieved from + a spec in store, False otherwise. + + The different executables must provide the same functionality and are + "alternate" to each other, i.e. the function will exit True on the first + executable found. + + Args: + executables: list of executables to be searched + query_spec: spec that may provide the executable + query_info (dict or None): if a dict is passed it is populated with the + command found and the concrete spec providing it + """ + executables_str = ", ".join(executables) + msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'" + tty.debug(msg.format(executables_str, query_spec)) + installed_specs = spack.store.db.query(query_spec, installed=True) + if installed_specs: + for concrete_spec in installed_specs: + bin_dir = concrete_spec.prefix.bin + # IF we have a "bin" directory and it contains + # the executables we are looking for + if ( + os.path.exists(bin_dir) + and os.path.isdir(bin_dir) + and spack.util.executable.which_string(*executables, path=bin_dir) + ): + spack.util.environment.path_put_first("PATH", [bin_dir]) + if query_info is not None: + query_info["command"] = spack.util.executable.which(*executables, path=bin_dir) + query_info["spec"] = concrete_spec + return True + return False + + +def _root_spec(spec_str): + """Add a proper compiler and target to a spec used during bootstrapping. + + Args: + spec_str (str): spec to be bootstrapped. Must be without compiler and target. + """ + # Add a proper compiler hint to the root spec. We use GCC for + # everything but MacOS and Windows. + if str(spack.platforms.host()) == "darwin": + spec_str += " %apple-clang" + elif str(spack.platforms.host()) == "windows": + spec_str += " %msvc" + else: + spec_str += " %gcc" + + target = archspec.cpu.host().family + spec_str += f" target={target}" + + tty.debug(f"[BOOTSTRAP ROOT SPEC] {spec_str}") + return spec_str diff --git a/lib/spack/spack/bootstrap/config.py b/lib/spack/spack/bootstrap/config.py new file mode 100644 index 0000000000..8fdda20966 --- /dev/null +++ b/lib/spack/spack/bootstrap/config.py @@ -0,0 +1,169 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Manage configuration swapping for bootstrapping purposes""" + +import contextlib +import os.path +import sys + +from llnl.util import tty + +import spack.compilers +import spack.config +import spack.environment +import spack.paths +import spack.platforms +import spack.repo +import spack.spec +import spack.store +import spack.util.path + +#: Reference counter for the bootstrapping configuration context manager +_REF_COUNT = 0 + + +def is_bootstrapping(): + """Return True if we are in a bootstrapping context, False otherwise.""" + return _REF_COUNT > 0 + + +def spec_for_current_python(): + """For bootstrapping purposes we are just interested in the Python + minor version (all patches are ABI compatible with the same minor). + + See: + https://www.python.org/dev/peps/pep-0513/ + https://stackoverflow.com/a/35801395/771663 + """ + version_str = ".".join(str(x) for x in sys.version_info[:2]) + return f"python@{version_str}" + + +def root_path(): + """Root of all the bootstrap related folders""" + return spack.util.path.canonicalize_path( + spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path) + ) + + +def store_path(): + """Path to the store used for bootstrapped software""" + enabled = spack.config.get("bootstrap:enable", True) + if not enabled: + msg = 'bootstrapping is currently disabled. Use "spack bootstrap enable" to enable it' + raise RuntimeError(msg) + + return _store_path() + + +@contextlib.contextmanager +def spack_python_interpreter(): + """Override the current configuration to set the interpreter under + which Spack is currently running as the only Python external spec + available. + """ + python_prefix = sys.exec_prefix + external_python = spec_for_current_python() + + entry = { + "buildable": False, + "externals": [{"prefix": python_prefix, "spec": str(external_python)}], + } + + with spack.config.override("packages:python::", entry): + yield + + +def _store_path(): + bootstrap_root_path = root_path() + return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store")) + + +def _config_path(): + bootstrap_root_path = root_path() + return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config")) + + +@contextlib.contextmanager +def ensure_bootstrap_configuration(): + """Swap the current configuration for the one used to bootstrap Spack. + + The context manager is reference counted to ensure we don't swap multiple + times if there's nested use of it in the stack. One compelling use case + is bootstrapping patchelf during the bootstrap of clingo. + """ + global _REF_COUNT # pylint: disable=global-statement + already_swapped = bool(_REF_COUNT) + _REF_COUNT += 1 + try: + if already_swapped: + yield + else: + with _ensure_bootstrap_configuration(): + yield + finally: + _REF_COUNT -= 1 + + +def _read_and_sanitize_configuration(): + """Read the user configuration that needs to be reused for bootstrapping + and remove the entries that should not be copied over. + """ + # Read the "config" section but pop the install tree (the entry will not be + # considered due to the use_store context manager, so it will be confusing + # to have it in the configuration). + config_yaml = spack.config.get("config") + config_yaml.pop("install_tree", None) + user_configuration = {"bootstrap": spack.config.get("bootstrap"), "config": config_yaml} + return user_configuration + + +def _bootstrap_config_scopes(): + tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin") + config_scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)] + configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path())) + for name, path in configuration_paths: + platform = spack.platforms.host().name + platform_scope = spack.config.ConfigScope( + "/".join([name, platform]), os.path.join(path, platform) + ) + generic_scope = spack.config.ConfigScope(name, path) + config_scopes.extend([generic_scope, platform_scope]) + msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}" + tty.debug(msg.format(generic_scope.name, generic_scope.path)) + tty.debug(msg.format(platform_scope.name, platform_scope.path)) + return config_scopes + + +def _add_compilers_if_missing(): + arch = spack.spec.ArchSpec.frontend_arch() + if not spack.compilers.compilers_for_arch(arch): + new_compilers = spack.compilers.find_new_compilers() + if new_compilers: + spack.compilers.add_compilers_to_config(new_compilers, init_config=False) + + +@contextlib.contextmanager +def _ensure_bootstrap_configuration(): + bootstrap_store_path = store_path() + user_configuration = _read_and_sanitize_configuration() + with spack.environment.no_active_environment(): + with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform( + spack.platforms.real_host() + ), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store( + bootstrap_store_path + ): + # Default configuration scopes excluding command line + # and builtin but accounting for platform specific scopes + config_scopes = _bootstrap_config_scopes() + with spack.config.use_configuration(*config_scopes): + # We may need to compile code from sources, so ensure we + # have compilers for the current platform + _add_compilers_if_missing() + spack.config.set("bootstrap", user_configuration["bootstrap"]) + spack.config.set("config", user_configuration["config"]) + with spack.modules.disable_modules(): + with spack_python_interpreter(): + yield diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py new file mode 100644 index 0000000000..9cf25b29e9 --- /dev/null +++ b/lib/spack/spack/bootstrap/core.py @@ -0,0 +1,574 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Bootstrap Spack core dependencies from binaries. + +This module contains logic to bootstrap software required by Spack from binaries served in the +bootstrapping mirrors. The logic is quite different from an installation done from a Spack user, +because of the following reasons: + + 1. The binaries are all compiled on the same OS for a given platform (e.g. they are compiled on + ``centos7`` on ``linux``), but they will be installed and used on the host OS. They are also + targeted at the most generic architecture possible. That makes the binaries difficult to reuse + with other specs in an environment without ad-hoc logic. + 2. Bootstrapping has a fallback procedure where we try to install software by default from the + most recent binaries, and proceed to older versions of the mirror, until we try building from + sources as a last resort. This allows us not to be blocked on architectures where we don't + have binaries readily available, but is also not compatible with the working of environments + (they don't have fallback procedures). + 3. Among the binaries we have clingo, so we can't concretize that with clingo :-) + 4. clingo, GnuPG and patchelf binaries need to be verified by sha256 sum (all the other binaries + we might add on top of that in principle can be verified with GPG signatures). +""" + +import copy +import functools +import json +import os +import os.path +import sys +import uuid + +from llnl.util import tty +from llnl.util.lang import GroupedExceptionHandler + +import spack.binary_distribution +import spack.config +import spack.detection +import spack.environment +import spack.modules +import spack.paths +import spack.platforms +import spack.platforms.linux +import spack.repo +import spack.spec +import spack.store +import spack.user_environment +import spack.util.environment +import spack.util.executable +import spack.util.path +import spack.util.spack_yaml +import spack.util.url +import spack.version + +from ._common import ( + _executables_in_store, + _python_import, + _root_spec, + _try_import_from_store, +) +from .config import spack_python_interpreter, spec_for_current_python + +#: Name of the file containing metadata about the bootstrapping source +METADATA_YAML_FILENAME = "metadata.yaml" + +#: Whether the current platform is Windows +IS_WINDOWS = sys.platform == "win32" + +#: Map a bootstrapper type to the corresponding class +_bootstrap_methods = {} + + +def bootstrapper(bootstrapper_type): + """Decorator to register classes implementing bootstrapping + methods. + + Args: + bootstrapper_type (str): string identifying the class + """ + + def _register(cls): + _bootstrap_methods[bootstrapper_type] = cls + return cls + + return _register + + +class Bootstrapper: + """Interface for "core" software bootstrappers""" + + config_scope_name = "" + + def __init__(self, conf): + self.conf = conf + self.name = conf["name"] + self.url = conf["info"]["url"] + self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"]) + + @property + def mirror_url(self): + """Mirror url associated with this bootstrapper""" + # Absolute paths + if os.path.isabs(self.url): + return spack.util.url.format(self.url) + + # Check for :// and assume it's an url if we find it + if "://" in self.url: + return self.url + + # Otherwise, it's a relative path + return spack.util.url.format(os.path.join(self.metadata_dir, self.url)) + + @property + def mirror_scope(self): + """Mirror scope to be pushed onto the bootstrapping configuration when using + this bootstrapper. + """ + return spack.config.InternalConfigScope( + self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}} + ) + + def try_import(self, module: str, abstract_spec_str: str): # pylint: disable=unused-argument + """Try to import a Python module from a spec satisfying the abstract spec + passed as argument. + + Args: + module (str): Python module name to try importing + abstract_spec_str (str): abstract spec that can provide the Python module + + Return: + True if the Python module could be imported, False otherwise + """ + return False + + def try_search_path(self, executables, abstract_spec_str): # pylint: disable=unused-argument + """Try to search some executables in the prefix of specs satisfying the abstract + spec passed as argument. + + Args: + executables (list of str): executables to be found + abstract_spec_str (str): abstract spec that can provide the Python module + + Return: + True if the executables are found, False otherwise + """ + return False + + +@bootstrapper(bootstrapper_type="buildcache") +class BuildcacheBootstrapper(Bootstrapper): + """Install the software needed during bootstrapping from a buildcache.""" + + def __init__(self, conf): + super().__init__(conf) + self.last_search = None + self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}" + + @staticmethod + def _spec_and_platform(abstract_spec_str): + """Return the spec object and platform we need to use when + querying the buildcache. + + Args: + abstract_spec_str: abstract spec string we are looking for + """ + # Try to install from an unsigned binary cache + abstract_spec = spack.spec.Spec(abstract_spec_str) + # On Cray we want to use Linux binaries if available from mirrors + bincache_platform = spack.platforms.real_host() + return abstract_spec, bincache_platform + + def _read_metadata(self, package_name): + """Return metadata about the given package.""" + json_filename = f"{package_name}.json" + json_dir = self.metadata_dir + json_path = os.path.join(json_dir, json_filename) + with open(json_path, encoding="utf-8") as stream: + data = json.load(stream) + return data + + def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform): + index_spec = next(x for x in index if x.dag_hash() == pkg_hash) + # Reconstruct the compiler that we need to use for bootstrapping + compiler_entry = { + "modules": [], + "operating_system": str(index_spec.os), + "paths": { + "cc": "/dev/null", + "cxx": "/dev/null", + "f77": "/dev/null", + "fc": "/dev/null", + }, + "spec": str(index_spec.compiler), + "target": str(index_spec.target.family), + } + with spack.platforms.use_platform(bincache_platform): + with spack.config.override("compilers", [{"compiler": compiler_entry}]): + spec_str = "/" + pkg_hash + query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True) + matches = spack.store.find([spec_str], multiple=False, query_fn=query) + for match in matches: + spack.binary_distribution.install_root_node( + match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256 + ) + + def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, test_fn): + # Ensure we see only the buildcache being used to bootstrap + with spack.config.override(self.mirror_scope): + # This index is currently needed to get the compiler used to build some + # specs that we know by dag hash. + spack.binary_distribution.binary_index.regenerate_spec_cache() + index = spack.binary_distribution.update_cache_and_get_specs() + + if not index: + raise RuntimeError("The binary index is empty") + + for item in bincache_data["verified"]: + candidate_spec = item["spec"] + # This will be None for things that don't depend on python + python_spec = item.get("python", None) + # Skip specs which are not compatible + if not abstract_spec.satisfies(candidate_spec): + continue + + if python_spec is not None and python_spec not in abstract_spec: + continue + + for _, pkg_hash, pkg_sha256 in item["binaries"]: + self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform) + + info = {} + if test_fn(query_spec=abstract_spec, query_info=info): + self.last_search = info + return True + return False + + def try_import(self, module, abstract_spec_str): + test_fn, info = functools.partial(_try_import_from_store, module), {} + if test_fn(query_spec=abstract_spec_str, query_info=info): + return True + + tty.debug(f"Bootstrapping {module} from pre-built binaries") + abstract_spec, bincache_platform = self._spec_and_platform( + abstract_spec_str + " ^" + spec_for_current_python() + ) + data = self._read_metadata(module) + return self._install_and_test(abstract_spec, bincache_platform, data, test_fn) + + def try_search_path(self, executables, abstract_spec_str): + test_fn, info = functools.partial(_executables_in_store, executables), {} + if test_fn(query_spec=abstract_spec_str, query_info=info): + self.last_search = info + return True + + abstract_spec, bincache_platform = self._spec_and_platform(abstract_spec_str) + tty.debug(f"Bootstrapping {abstract_spec.name} from pre-built binaries") + data = self._read_metadata(abstract_spec.name) + return self._install_and_test(abstract_spec, bincache_platform, data, test_fn) + + +@bootstrapper(bootstrapper_type="install") +class SourceBootstrapper(Bootstrapper): + """Install the software needed during bootstrapping from sources.""" + + def __init__(self, conf): + super().__init__(conf) + self.last_search = None + self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}" + + def try_import(self, module, abstract_spec_str): + info = {} + if _try_import_from_store(module, abstract_spec_str, query_info=info): + self.last_search = info + return True + + tty.debug(f"Bootstrapping {module} from sources") + + # If we compile code from sources detecting a few build tools + # might reduce compilation time by a fair amount + _add_externals_if_missing() + + # Try to build and install from sources + with spack_python_interpreter(): + # Add hint to use frontend operating system on Cray + concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python()) + + if module == "clingo": + # TODO: remove when the old concretizer is deprecated # pylint: disable=fixme + concrete_spec._old_concretize( # pylint: disable=protected-access + deprecation_warning=False + ) + else: + concrete_spec.concretize() + + msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources" + tty.debug(msg.format(module, abstract_spec_str)) + + # Install the spec that should make the module importable + with spack.config.override(self.mirror_scope): + concrete_spec.package.do_install(fail_fast=True) + + if _try_import_from_store(module, query_spec=concrete_spec, query_info=info): + self.last_search = info + return True + return False + + def try_search_path(self, executables, abstract_spec_str): + info = {} + if _executables_in_store(executables, abstract_spec_str, query_info=info): + self.last_search = info + return True + + tty.debug(f"Bootstrapping {abstract_spec_str} from sources") + + # If we compile code from sources detecting a few build tools + # might reduce compilation time by a fair amount + _add_externals_if_missing() + + concrete_spec = spack.spec.Spec(abstract_spec_str) + if concrete_spec.name == "patchelf": + concrete_spec._old_concretize( # pylint: disable=protected-access + deprecation_warning=False + ) + else: + concrete_spec.concretize() + + msg = "[BOOTSTRAP] Try installing '{0}' from sources" + tty.debug(msg.format(abstract_spec_str)) + with spack.config.override(self.mirror_scope): + concrete_spec.package.do_install() + if _executables_in_store(executables, concrete_spec, query_info=info): + self.last_search = info + return True + return False + + +def create_bootstrapper(conf): + """Return a bootstrap object built according to the configuration argument""" + btype = conf["type"] + return _bootstrap_methods[btype](conf) + + +def source_is_enabled_or_raise(conf): + """Raise ValueError if the source is not enabled for bootstrapping""" + trusted, name = spack.config.get("bootstrap:trusted"), conf["name"] + if not trusted.get(name, False): + raise ValueError("source is not trusted") + + +def ensure_module_importable_or_raise(module, abstract_spec=None): + """Make the requested module available for import, or raise. + + This function tries to import a Python module in the current interpreter + using, in order, the methods configured in bootstrap.yaml. + + If none of the methods succeed, an exception is raised. The function exits + on first success. + + Args: + module (str): module to be imported in the current interpreter + abstract_spec (str): abstract spec that might provide the module. If not + given it defaults to "module" + + Raises: + ImportError: if the module couldn't be imported + """ + # If we can import it already, that's great + tty.debug(f"[BOOTSTRAP MODULE {module}] Try importing from Python") + if _python_import(module): + return + + abstract_spec = abstract_spec or module + + exception_handler = GroupedExceptionHandler() + + for current_config in bootstrapping_sources(): + with exception_handler.forward(current_config["name"]): + source_is_enabled_or_raise(current_config) + current_bootstrapper = create_bootstrapper(current_config) + if current_bootstrapper.try_import(module, abstract_spec): + return + + assert exception_handler, ( + f"expected at least one exception to have been raised at this point: " + f"while bootstrapping {module}" + ) + msg = f'cannot bootstrap the "{module}" Python module ' + if abstract_spec: + msg += f'from spec "{abstract_spec}" ' + if tty.is_debug(): + msg += exception_handler.grouped_message(with_tracebacks=True) + else: + msg += exception_handler.grouped_message(with_tracebacks=False) + msg += "\nRun `spack --debug ...` for more detailed errors" + raise ImportError(msg) + + +def ensure_executables_in_path_or_raise(executables, abstract_spec, cmd_check=None): + """Ensure that some executables are in path or raise. + + Args: + executables (list): list of executables to be searched in the PATH, + in order. The function exits on the first one found. + abstract_spec (str): abstract spec that provides the executables + cmd_check (object): callable predicate that takes a + ``spack.util.executable.Executable`` command and validate it. Should return + ``True`` if the executable is acceptable, ``False`` otherwise. + Can be used to, e.g., ensure a suitable version of the command before + accepting for bootstrapping. + + Raises: + RuntimeError: if the executables cannot be ensured to be in PATH + + Return: + Executable object + + """ + cmd = spack.util.executable.which(*executables) + if cmd: + if not cmd_check or cmd_check(cmd): + return cmd + + executables_str = ", ".join(executables) + + exception_handler = GroupedExceptionHandler() + + for current_config in bootstrapping_sources(): + with exception_handler.forward(current_config["name"]): + source_is_enabled_or_raise(current_config) + current_bootstrapper = create_bootstrapper(current_config) + if current_bootstrapper.try_search_path(executables, abstract_spec): + # Additional environment variables needed + concrete_spec, cmd = ( + current_bootstrapper.last_search["spec"], + current_bootstrapper.last_search["command"], + ) + env_mods = spack.util.environment.EnvironmentModifications() + for dep in concrete_spec.traverse( + root=True, order="post", deptype=("link", "run") + ): + env_mods.extend( + spack.user_environment.environment_modifications_for_spec( + dep, set_package_py_globals=False + ) + ) + cmd.add_default_envmod(env_mods) + return cmd + + assert exception_handler, ( + f"expected at least one exception to have been raised at this point: " + f"while bootstrapping {executables_str}" + ) + msg = f"cannot bootstrap any of the {executables_str} executables " + if abstract_spec: + msg += f'from spec "{abstract_spec}" ' + if tty.is_debug(): + msg += exception_handler.grouped_message(with_tracebacks=True) + else: + msg += exception_handler.grouped_message(with_tracebacks=False) + msg += "\nRun `spack --debug ...` for more detailed errors" + raise RuntimeError(msg) + + +def _add_externals_if_missing(): + search_list = [ + # clingo + spack.repo.path.get_pkg_class("cmake"), + spack.repo.path.get_pkg_class("bison"), + # GnuPG + spack.repo.path.get_pkg_class("gawk"), + ] + if IS_WINDOWS: + search_list.append(spack.repo.path.get_pkg_class("winbison")) + detected_packages = spack.detection.by_executable(search_list) + spack.detection.update_configuration(detected_packages, scope="bootstrap") + + +def clingo_root_spec(): + """Return the root spec used to bootstrap clingo""" + return _root_spec("clingo-bootstrap@spack+python") + + +def ensure_clingo_importable_or_raise(): + """Ensure that the clingo module is available for import.""" + ensure_module_importable_or_raise(module="clingo", abstract_spec=clingo_root_spec()) + + +def gnupg_root_spec(): + """Return the root spec used to bootstrap GnuPG""" + return _root_spec("gnupg@2.3:") + + +def ensure_gpg_in_path_or_raise(): + """Ensure gpg or gpg2 are in the PATH or raise.""" + return ensure_executables_in_path_or_raise( + executables=["gpg2", "gpg"], abstract_spec=gnupg_root_spec() + ) + + +def patchelf_root_spec(): + """Return the root spec used to bootstrap patchelf""" + # 0.13.1 is the last version not to require C++17. + return _root_spec("patchelf@0.13.1:") + + +def verify_patchelf(patchelf): + """Older patchelf versions can produce broken binaries, so we + verify the version here. + + Arguments: + + patchelf (spack.util.executable.Executable): patchelf executable + """ + out = patchelf("--version", output=str, error=os.devnull, fail_on_error=False).strip() + if patchelf.returncode != 0: + return False + parts = out.split(" ") + if len(parts) < 2: + return False + try: + version = spack.version.Version(parts[1]) + except ValueError: + return False + return version >= spack.version.Version("0.13.1") + + +def ensure_patchelf_in_path_or_raise(): + """Ensure patchelf is in the PATH or raise.""" + # The old concretizer is not smart and we're doing its job: if the latest patchelf + # does not concretize because the compiler doesn't support C++17, we try to + # concretize again with an upperbound @:13. + try: + return ensure_executables_in_path_or_raise( + executables=["patchelf"], abstract_spec=patchelf_root_spec(), cmd_check=verify_patchelf + ) + except RuntimeError: + return ensure_executables_in_path_or_raise( + executables=["patchelf"], + abstract_spec=_root_spec("patchelf@0.13.1:0.13"), + cmd_check=verify_patchelf, + ) + + +def ensure_core_dependencies(): + """Ensure the presence of all the core dependencies.""" + if sys.platform.lower() == "linux": + ensure_patchelf_in_path_or_raise() + ensure_clingo_importable_or_raise() + ensure_gpg_in_path_or_raise() + + +def all_core_root_specs(): + """Return a list of all the core root specs that may be used to bootstrap Spack""" + return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()] + + +def bootstrapping_sources(scope=None): + """Return the list of configured sources of software for bootstrapping Spack + + Args: + scope (str or None): if a valid configuration scope is given, return the + list only from that scope + """ + source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope) + source_configs = source_configs or [] + list_of_sources = [] + for entry in source_configs: + current = copy.copy(entry) + metadata_dir = spack.util.path.canonicalize_path(entry["metadata"]) + metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME) + with open(metadata_yaml, encoding="utf-8") as stream: + current.update(spack.util.spack_yaml.load(stream)) + list_of_sources.append(current) + return list_of_sources diff --git a/lib/spack/spack/bootstrap/environment.py b/lib/spack/spack/bootstrap/environment.py new file mode 100644 index 0000000000..f92fd451d3 --- /dev/null +++ b/lib/spack/spack/bootstrap/environment.py @@ -0,0 +1,191 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Bootstrap non-core Spack dependencies from an environment.""" +import glob +import hashlib +import os +import pathlib +import sys +import warnings + +import archspec.cpu + +from llnl.util import tty + +import spack.build_environment +import spack.environment +import spack.tengine +import spack.util.executable + +from ._common import _root_spec +from .config import root_path, spec_for_current_python, store_path + + +class BootstrapEnvironment(spack.environment.Environment): + """Environment to install dependencies of Spack for a given interpreter and architecture""" + + @classmethod + def spack_dev_requirements(cls): + """Spack development requirements""" + return [ + isort_root_spec(), + mypy_root_spec(), + black_root_spec(), + flake8_root_spec(), + pytest_root_spec(), + ] + + @classmethod + def environment_root(cls): + """Environment root directory""" + bootstrap_root_path = root_path() + python_part = spec_for_current_python().replace("@", "") + arch_part = archspec.cpu.host().family + interpreter_part = hashlib.md5(sys.exec_prefix.encode()).hexdigest()[:5] + environment_dir = f"{python_part}-{arch_part}-{interpreter_part}" + return pathlib.Path( + spack.util.path.canonicalize_path( + os.path.join(bootstrap_root_path, "environments", environment_dir) + ) + ) + + @classmethod + def view_root(cls): + """Location of the view""" + return cls.environment_root().joinpath("view") + + @classmethod + def pythonpaths(cls): + """Paths to be added to sys.path or PYTHONPATH""" + python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}" + glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**")) + result = glob.glob(glob_expr) + if not result: + msg = f"Cannot find any Python path in {cls.view_root()}" + warnings.warn(msg) + return result + + @classmethod + def bin_dirs(cls): + """Paths to be added to PATH""" + return [cls.view_root().joinpath("bin")] + + @classmethod + def spack_yaml(cls): + """Environment spack.yaml file""" + return cls.environment_root().joinpath("spack.yaml") + + def __init__(self): + if not self.spack_yaml().exists(): + self._write_spack_yaml_file() + super().__init__(self.environment_root()) + + def update_installations(self): + """Update the installations of this environment. + + The update is done using a depfile on Linux and macOS, and using the ``install_all`` + method of environments on Windows. + """ + with tty.SuppressOutput(msg_enabled=False, warn_enabled=False): + specs = self.concretize() + if specs: + colorized_specs = [ + spack.spec.Spec(x).cformat("{name}{@version}") + for x in self.spack_dev_requirements() + ] + tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})") + self.write(regenerate=False) + if sys.platform == "win32": + self.install_all() + else: + self._install_with_depfile() + self.write(regenerate=True) + + def update_syspath_and_environ(self): + """Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to + the environment view. + """ + # Do minimal modifications to sys.path and environment variables. In particular, pay + # attention to have the smallest PYTHONPATH / sys.path possible, since that may impact + # the performance of the current interpreter + sys.path.extend(self.pythonpaths()) + os.environ["PATH"] = os.pathsep.join( + [str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep) + ) + os.environ["PYTHONPATH"] = os.pathsep.join( + os.environ.get("PYTHONPATH", "").split(os.pathsep) + + [str(x) for x in self.pythonpaths()] + ) + + def _install_with_depfile(self): + spackcmd = spack.util.executable.which("spack") + spackcmd( + "-e", + str(self.environment_root()), + "env", + "depfile", + "-o", + str(self.environment_root().joinpath("Makefile")), + ) + make = spack.util.executable.which("make") + kwargs = {} + if not tty.is_debug(): + kwargs = {"output": os.devnull, "error": os.devnull} + make( + "-C", + str(self.environment_root()), + "-j", + str(spack.build_environment.determine_number_of_jobs(parallel=True)), + **kwargs, + ) + + def _write_spack_yaml_file(self): + tty.msg( + "[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment" + ) + env = spack.tengine.make_environment() + template = env.get_template("bootstrap/spack.yaml") + context = { + "python_spec": spec_for_current_python(), + "python_prefix": sys.exec_prefix, + "architecture": archspec.cpu.host().family, + "environment_path": self.environment_root(), + "environment_specs": self.spack_dev_requirements(), + "store_path": store_path(), + } + self.environment_root().mkdir(parents=True, exist_ok=True) + self.spack_yaml().write_text(template.render(context), encoding="utf-8") + + +def isort_root_spec(): + """Return the root spec used to bootstrap isort""" + return _root_spec("py-isort@4.3.5:") + + +def mypy_root_spec(): + """Return the root spec used to bootstrap mypy""" + return _root_spec("py-mypy@0.900:") + + +def black_root_spec(): + """Return the root spec used to bootstrap black""" + return _root_spec("py-black") + + +def flake8_root_spec(): + """Return the root spec used to bootstrap flake8""" + return _root_spec("py-flake8") + + +def pytest_root_spec(): + """Return the root spec used to bootstrap flake8""" + return _root_spec("py-pytest") + + +def ensure_environment_dependencies(): + """Ensure Spack dependencies from the bootstrap environment are installed and ready to use""" + with BootstrapEnvironment() as env: + env.update_installations() + env.update_syspath_and_environ() diff --git a/lib/spack/spack/bootstrap/status.py b/lib/spack/spack/bootstrap/status.py new file mode 100644 index 0000000000..c8569ce7ea --- /dev/null +++ b/lib/spack/spack/bootstrap/status.py @@ -0,0 +1,169 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Query the status of bootstrapping on this machine""" +import platform + +import spack.util.executable + +from ._common import _executables_in_store, _python_import, _try_import_from_store +from .config import ensure_bootstrap_configuration +from .core import clingo_root_spec, patchelf_root_spec +from .environment import ( + BootstrapEnvironment, + black_root_spec, + flake8_root_spec, + isort_root_spec, + mypy_root_spec, + pytest_root_spec, +) + + +def _required_system_executable(exes, msg): + """Search for an executable is the system path only.""" + if isinstance(exes, str): + exes = (exes,) + if spack.util.executable.which_string(*exes): + return True, None + return False, msg + + +def _required_executable(exes, query_spec, msg): + """Search for an executable in the system path or in the bootstrap store.""" + if isinstance(exes, str): + exes = (exes,) + if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec): + return True, None + return False, msg + + +def _required_python_module(module, query_spec, msg): + """Check if a Python module is available in the current interpreter or + if it can be loaded from the bootstrap store + """ + if _python_import(module) or _try_import_from_store(module, query_spec): + return True, None + return False, msg + + +def _missing(name, purpose, system_only=True): + """Message to be printed if an executable is not found""" + msg = '[{2}] MISSING "{0}": {1}' + if not system_only: + return msg.format(name, purpose, "@*y{{B}}") + return msg.format(name, purpose, "@*y{{-}}") + + +def _core_requirements(): + _core_system_exes = { + "make": _missing("make", "required to build software from sources"), + "patch": _missing("patch", "required to patch source code before building"), + "bash": _missing("bash", "required for Spack compiler wrapper"), + "tar": _missing("tar", "required to manage code archives"), + "gzip": _missing("gzip", "required to compress/decompress code archives"), + "unzip": _missing("unzip", "required to compress/decompress code archives"), + "bzip2": _missing("bzip2", "required to compress/decompress code archives"), + "git": _missing("git", "required to fetch/manage git repositories"), + } + if platform.system().lower() == "linux": + _core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives") + + # Executables that are not bootstrapped yet + result = [_required_system_executable(exe, msg) for exe, msg in _core_system_exes.items()] + # Python modules + result.append( + _required_python_module( + "clingo", clingo_root_spec(), _missing("clingo", "required to concretize specs", False) + ) + ) + return result + + +def _buildcache_requirements(): + _buildcache_exes = { + "file": _missing("file", "required to analyze files for buildcaches"), + ("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False), + } + if platform.system().lower() == "darwin": + _buildcache_exes["otool"] = _missing("otool", "required to relocate binaries") + + # Executables that are not bootstrapped yet + result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()] + + if platform.system().lower() == "linux": + result.append( + _required_executable( + "patchelf", + patchelf_root_spec(), + _missing("patchelf", "required to relocate binaries", False), + ) + ) + + return result + + +def _optional_requirements(): + _optional_exes = { + "zstd": _missing("zstd", "required to compress/decompress code archives"), + "svn": _missing("svn", "required to manage subversion repositories"), + "hg": _missing("hg", "required to manage mercurial repositories"), + } + # Executables that are not bootstrapped yet + result = [_required_system_executable(exe, msg) for exe, msg in _optional_exes.items()] + return result + + +def _development_requirements(): + # Ensure we trigger environment modifications if we have an environment + if BootstrapEnvironment.spack_yaml().exists(): + with BootstrapEnvironment() as env: + env.update_syspath_and_environ() + + return [ + _required_executable( + "isort", isort_root_spec(), _missing("isort", "required for style checks", False) + ), + _required_executable( + "mypy", mypy_root_spec(), _missing("mypy", "required for style checks", False) + ), + _required_executable( + "flake8", flake8_root_spec(), _missing("flake8", "required for style checks", False) + ), + _required_executable( + "black", black_root_spec(), _missing("black", "required for code formatting", False) + ), + _required_python_module( + "pytest", pytest_root_spec(), _missing("pytest", "required to run unit-test", False) + ), + ] + + +def status_message(section): + """Return a status message to be printed to screen that refers to the + section passed as argument and a bool which is True if there are missing + dependencies. + + Args: + section (str): either 'core' or 'buildcache' or 'optional' or 'develop' + """ + pass_token, fail_token = "@*g{[PASS]}", "@*r{[FAIL]}" + + # Contain the header of the section and a list of requirements + spack_sections = { + "core": ("{0} @*{{Core Functionalities}}", _core_requirements), + "buildcache": ("{0} @*{{Binary packages}}", _buildcache_requirements), + "optional": ("{0} @*{{Optional Features}}", _optional_requirements), + "develop": ("{0} @*{{Development Dependencies}}", _development_requirements), + } + msg, required_software = spack_sections[section] + + with ensure_bootstrap_configuration(): + missing_software = False + for found, err_msg in required_software(): + if not found: + missing_software = True + msg += "\n " + err_msg + msg += "\n" + msg = msg.format(pass_token if not missing_software else fail_token) + return msg, missing_software diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py index 6536cec10f..bef1979f0a 100644 --- a/lib/spack/spack/cmd/bootstrap.py +++ b/lib/spack/spack/cmd/bootstrap.py @@ -5,7 +5,6 @@ from __future__ import print_function import os.path -import platform import shutil import tempfile @@ -15,6 +14,8 @@ import spack import spack.bootstrap +import spack.bootstrap.config +import spack.bootstrap.core import spack.cmd.common.arguments import spack.config import spack.main @@ -75,7 +76,8 @@ def _add_scope_option(parser): def setup_parser(subparser): sp = subparser.add_subparsers(dest="subcommand") - sp.add_parser("now", help="Spack ready, right now!") + now = sp.add_parser("now", help="Spack ready, right now!") + now.add_argument("--dev", action="store_true", help="bootstrap dev dependencies too") status = sp.add_parser("status", help="get the status of Spack") status.add_argument( @@ -194,7 +196,7 @@ def _root(args): def _list(args): - sources = spack.bootstrap.bootstrapping_sources(scope=args.scope) + sources = spack.bootstrap.core.bootstrapping_sources(scope=args.scope) if not sources: llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies") return @@ -298,7 +300,7 @@ def _status(args): sections.append("develop") header = "@*b{{Spack v{0} - {1}}}".format( - spack.spack_version, spack.bootstrap.spec_for_current_python() + spack.spack_version, spack.bootstrap.config.spec_for_current_python() ) print(llnl.util.tty.color.colorize(header)) print() @@ -323,7 +325,7 @@ def _status(args): def _add(args): - initial_sources = spack.bootstrap.bootstrapping_sources() + initial_sources = spack.bootstrap.core.bootstrapping_sources() names = [s["name"] for s in initial_sources] # If the name is already used error out @@ -353,7 +355,7 @@ def _add(args): def _remove(args): - initial_sources = spack.bootstrap.bootstrapping_sources() + initial_sources = spack.bootstrap.core.bootstrapping_sources() names = [s["name"] for s in initial_sources] if args.name not in names: msg = ( @@ -386,7 +388,10 @@ def _mirror(args): # TODO: Here we are adding gnuconfig manually, but this can be fixed # TODO: as soon as we have an option to add to a mirror all the possible # TODO: dependencies of a spec - root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ["gnuconfig"] + root_specs = spack.bootstrap.all_core_root_specs() + ["gnuconfig"] + if args.dev: + root_specs += spack.bootstrap.BootstrapEnvironment.spack_dev_requirements() + for spec_str in root_specs: msg = 'Adding "{0}" and dependencies to the mirror at {1}' llnl.util.tty.msg(msg.format(spec_str, mirror_dir)) @@ -436,10 +441,9 @@ def write_metadata(subdir, metadata): def _now(args): with spack.bootstrap.ensure_bootstrap_configuration(): - if platform.system().lower() == "linux": - spack.bootstrap.ensure_patchelf_in_path_or_raise() - spack.bootstrap.ensure_clingo_importable_or_raise() - spack.bootstrap.ensure_gpg_in_path_or_raise() + spack.bootstrap.ensure_core_dependencies() + if args.dev: + spack.bootstrap.ensure_environment_dependencies() def bootstrap(parser, args): diff --git a/lib/spack/spack/cmd/style.py b/lib/spack/spack/cmd/style.py index 0172c9f6d4..2be043425c 100644 --- a/lib/spack/spack/cmd/style.py +++ b/lib/spack/spack/cmd/style.py @@ -2,9 +2,6 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from __future__ import print_function - import argparse import os import re @@ -15,7 +12,6 @@ import llnl.util.tty.color as color from llnl.util.filesystem import working_dir -import spack.bootstrap import spack.paths from spack.util.executable import which @@ -25,7 +21,7 @@ def grouper(iterable, n, fillvalue=None): - "Collect data into fixed-length chunks or blocks" + """Collect data into fixed-length chunks or blocks""" # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n for group in zip_longest(*args, fillvalue=fillvalue): @@ -41,16 +37,13 @@ def grouper(iterable, n, fillvalue=None): #: double-check the results of other tools (if, e.g., --fix was provided) #: The list maps an executable name to a method to ensure the tool is #: bootstrapped or present in the environment. -tool_order = [ - ("isort", spack.bootstrap.ensure_isort_in_path_or_raise), - ("mypy", spack.bootstrap.ensure_mypy_in_path_or_raise), - ("black", spack.bootstrap.ensure_black_in_path_or_raise), - ("flake8", spack.bootstrap.ensure_flake8_in_path_or_raise), +tool_names = [ + "isort", + "mypy", + "black", + "flake8", ] -#: list of just the tool names -- for argparse -tool_names = [k for k, _ in tool_order] - #: tools we run in spack style tools = {} @@ -222,10 +215,8 @@ def translate(match): print(line) -def print_style_header(file_list, args, selected): - tools = [tool for tool in tool_names if tool in selected] - tty.msg("Running style checks on spack", "selected: " + ", ".join(tools)) - +def print_style_header(file_list, args, tools_to_run): + tty.msg("Running style checks on spack", "selected: " + ", ".join(tools_to_run)) # translate modified paths to cwd_relative if needed paths = [filename.strip() for filename in file_list] if not args.root_relative: @@ -384,6 +375,17 @@ def validate_toolset(arg_value): return tools +def missing_tools(tools_to_run): + return [t for t in tools_to_run if which(t) is None] + + +def _bootstrap_dev_dependencies(): + import spack.bootstrap + + with spack.bootstrap.ensure_bootstrap_configuration(): + spack.bootstrap.ensure_environment_dependencies() + + def style(parser, args): # save initial working directory for relativizing paths later args.initial_working_dir = os.getcwd() @@ -418,25 +420,20 @@ def prefix_relative(path): tty.msg("Nothing to run.") return + tools_to_run = [t for t in tool_names if t in selected] + if missing_tools(tools_to_run): + _bootstrap_dev_dependencies() + return_code = 0 with working_dir(args.root): if not file_list: file_list = changed_files(args.base, args.untracked, args.all) - print_style_header(file_list, args, selected) - - tools_to_run = [(tool, fn) for tool, fn in tool_order if tool in selected] - commands = {} - with spack.bootstrap.ensure_bootstrap_configuration(): - # bootstrap everything first to get commands - for tool_name, bootstrap_fn in tools_to_run: - commands[tool_name] = bootstrap_fn() - - # run tools once bootstrapping is done - for tool_name, bootstrap_fn in tools_to_run: - run_function, required = tools[tool_name] - print_tool_header(tool_name) - return_code |= run_function(commands[tool_name], file_list, args) + print_style_header(file_list, args, tools_to_run) + for tool_name in tools_to_run: + run_function, required = tools[tool_name] + print_tool_header(tool_name) + return_code |= run_function(which(tool_name), file_list, args) if return_code == 0: tty.msg(color.colorize("@*{spack style checks were clean}")) diff --git a/lib/spack/spack/cmd/unit_test.py b/lib/spack/spack/cmd/unit_test.py index 55c8b17f9c..8583c35710 100644 --- a/lib/spack/spack/cmd/unit_test.py +++ b/lib/spack/spack/cmd/unit_test.py @@ -21,7 +21,6 @@ import llnl.util.tty.color as color from llnl.util.tty.colify import colify -import spack.bootstrap import spack.paths description = "run spack's unit tests (wrapper around pytest)" @@ -207,6 +206,7 @@ def add_back_pytest_args(args, unknown_args): def unit_test(parser, args, unknown_args): global pytest + import spack.bootstrap # Ensure clingo is available before switching to the # mock configuration used by unit tests @@ -214,12 +214,10 @@ def unit_test(parser, args, unknown_args): # clingo is wholly unsupported from bootstrap if not is_windows: with spack.bootstrap.ensure_bootstrap_configuration(): - spack.bootstrap.ensure_clingo_importable_or_raise() - - if pytest is None: - vendored_pytest_dir = os.path.join(spack.paths.external_path, "pytest-fallback") - sys.path.append(vendored_pytest_dir) - import pytest + spack.bootstrap.ensure_core_dependencies() + if pytest is None: + spack.bootstrap.ensure_environment_dependencies() + import pytest if args.pytest_help: # make the pytest.main help output more accurate diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 6ac6908ff3..7d23f6aa6c 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -19,7 +19,6 @@ from llnl.util.lang import dedupe from llnl.util.symlink import symlink -import spack.bootstrap import spack.compilers import spack.concretize import spack.config @@ -1344,6 +1343,8 @@ def _concretize_separately(self, tests=False): """Concretization strategy that concretizes separately one user spec after the other. """ + import spack.bootstrap + # keep any concretized specs whose user specs are still in the manifest old_concretized_user_specs = self.concretized_user_specs old_concretized_order = self.concretized_order @@ -1368,7 +1369,7 @@ def _concretize_separately(self, tests=False): # Ensure we don't try to bootstrap clingo in parallel if spack.config.get("config:concretizer", "clingo") == "clingo": with spack.bootstrap.ensure_bootstrap_configuration(): - spack.bootstrap.ensure_clingo_importable_or_raise() + spack.bootstrap.ensure_core_dependencies() # Ensure all the indexes have been built or updated, since # otherwise the processes in the pool may timeout on waiting diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index f7231a9b34..ec864c2d84 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -18,7 +18,6 @@ from llnl.util.lang import memoized from llnl.util.symlink import symlink -import spack.bootstrap import spack.paths import spack.platforms import spack.repo @@ -92,6 +91,8 @@ def __init__(self, old, new, full_old_string): @memoized def _patchelf(): """Return the full path to the patchelf binary, if available, else None.""" + import spack.bootstrap + if is_macos: return None diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 8809121e1d..c15793a230 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -30,7 +30,6 @@ import spack import spack.binary_distribution -import spack.bootstrap import spack.cmd import spack.compilers import spack.config @@ -541,8 +540,10 @@ def bootstrap_clingo(): global clingo, ASTType, parse_files if not clingo: + import spack.bootstrap + with spack.bootstrap.ensure_bootstrap_configuration(): - spack.bootstrap.ensure_clingo_importable_or_raise() + spack.bootstrap.ensure_core_dependencies() import clingo from clingo.ast import ASTType diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py index bfb8eee194..5823800751 100644 --- a/lib/spack/spack/store.py +++ b/lib/spack/spack/store.py @@ -191,18 +191,6 @@ def _store(): root, unpadded_root, projections = parse_install_tree(config_dict) hash_length = spack.config.get("config:install_hash_length") - # Check that the user is not trying to install software into the store - # reserved by Spack to bootstrap its own dependencies, since this would - # lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe - # user installed software) - enable_bootstrap = spack.config.get("bootstrap:enable", True) - if enable_bootstrap and spack.bootstrap.store_path() == root: - msg = ( - 'please change the install tree root "{0}" in your ' - "configuration [path reserved for Spack internal use]" - ) - raise ValueError(msg.format(root)) - return Store( root=root, unpadded_root=unpadded_root, projections=projections, hash_length=hash_length ) diff --git a/lib/spack/spack/test/bootstrap.py b/lib/spack/spack/test/bootstrap.py index 06e9e5f2bc..507bcf5e54 100644 --- a/lib/spack/spack/test/bootstrap.py +++ b/lib/spack/spack/test/bootstrap.py @@ -7,6 +7,8 @@ import pytest import spack.bootstrap +import spack.bootstrap.config +import spack.bootstrap.core import spack.compilers import spack.environment import spack.store @@ -33,7 +35,7 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir): # Test that within the context manager we use the bootstrap store # and that outside we restore the correct location with spack.bootstrap.ensure_bootstrap_configuration(): - assert spack.store.root == spack.bootstrap.store_path() + assert spack.store.root == spack.bootstrap.config.store_path() assert spack.store.root == user_path @@ -51,7 +53,7 @@ def test_store_path_customization(config_value, expected, mutable_config): spack.config.set("bootstrap:root", config_value) # Check the store path - current = spack.bootstrap.store_path() + current = spack.bootstrap.config.store_path() assert current == spack.util.path.canonicalize_path(expected) @@ -61,7 +63,7 @@ def test_raising_exception_if_bootstrap_disabled(mutable_config): # Check the correct exception is raised with pytest.raises(RuntimeError, match="bootstrapping is currently disabled"): - spack.bootstrap.store_path() + spack.bootstrap.config.store_path() def test_raising_exception_module_importable(): @@ -69,7 +71,7 @@ def test_raising_exception_module_importable(): ImportError, match='cannot bootstrap the "asdf" Python module', ): - spack.bootstrap.ensure_module_importable_or_raise("asdf") + spack.bootstrap.core.ensure_module_importable_or_raise("asdf") def test_raising_exception_executables_in_path(): @@ -77,7 +79,7 @@ def test_raising_exception_executables_in_path(): RuntimeError, match="cannot bootstrap any of the asdf, fdsa executables", ): - spack.bootstrap.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python") + spack.bootstrap.core.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python") @pytest.mark.regression("25603") @@ -175,13 +177,15 @@ def test_nested_use_of_context_manager(mutable_config): def test_status_function_find_files( mutable_config, mock_executable, tmpdir, monkeypatch, expected_missing ): + import spack.bootstrap.status + if not expected_missing: mock_executable("foo", "echo Hello WWorld!") monkeypatch.setattr( - spack.bootstrap, + spack.bootstrap.status, "_optional_requirements", - lambda: [spack.bootstrap._required_system_executable("foo", "NOT FOUND")], + lambda: [spack.bootstrap.status._required_system_executable("foo", "NOT FOUND")], ) monkeypatch.setenv("PATH", str(tmpdir.join("bin"))) @@ -192,15 +196,15 @@ def test_status_function_find_files( @pytest.mark.regression("31042") def test_source_is_disabled(mutable_config): # Get the configuration dictionary of the current bootstrapping source - conf = next(iter(spack.bootstrap.bootstrapping_sources())) + conf = next(iter(spack.bootstrap.core.bootstrapping_sources())) # The source is not explicitly enabled or disabled, so the following # call should raise to skip using it for bootstrapping with pytest.raises(ValueError): - spack.bootstrap.source_is_enabled_or_raise(conf) + spack.bootstrap.core.source_is_enabled_or_raise(conf) # Try to explicitly disable the source and verify that the behavior # is the same as above spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False)) with pytest.raises(ValueError): - spack.bootstrap.source_is_enabled_or_raise(conf) + spack.bootstrap.core.source_is_enabled_or_raise(conf) diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index 68d8c8abaf..410799a011 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -149,7 +149,7 @@ @pytest.fixture(scope="function") -def wrapper_environment(): +def wrapper_environment(working_env): with set_env( SPACK_CC=real_cc, SPACK_CXX=real_cc, diff --git a/lib/spack/spack/test/cmd/bootstrap.py b/lib/spack/spack/test/cmd/bootstrap.py index e969f7fc6f..7c39806ac1 100644 --- a/lib/spack/spack/test/cmd/bootstrap.py +++ b/lib/spack/spack/test/cmd/bootstrap.py @@ -7,6 +7,8 @@ import pytest +import spack.bootstrap +import spack.bootstrap.core import spack.config import spack.environment as ev import spack.main @@ -157,17 +159,17 @@ def test_remove_failure_for_non_existing_names(mutable_config): def test_remove_and_add_a_source(mutable_config): # Check we start with a single bootstrapping source - sources = spack.bootstrap.bootstrapping_sources() + sources = spack.bootstrap.core.bootstrapping_sources() assert len(sources) == 1 # Remove it and check the result _bootstrap("remove", "github-actions") - sources = spack.bootstrap.bootstrapping_sources() + sources = spack.bootstrap.core.bootstrapping_sources() assert not sources # Add it back and check we restored the initial state _bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.3") - sources = spack.bootstrap.bootstrapping_sources() + sources = spack.bootstrap.core.bootstrapping_sources() assert len(sources) == 1 @@ -206,4 +208,4 @@ def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir _bootstrap("add", "--trust", "test-mirror", str(metadata_dir)) assert _bootstrap.returncode == 0 - assert any(m["name"] == "test-mirror" for m in spack.bootstrap.bootstrapping_sources()) + assert any(m["name"] == "test-mirror" for m in spack.bootstrap.core.bootstrapping_sources()) diff --git a/lib/spack/spack/util/gpg.py b/lib/spack/spack/util/gpg.py index ffee274c3f..0efdef99c2 100644 --- a/lib/spack/spack/util/gpg.py +++ b/lib/spack/spack/util/gpg.py @@ -8,7 +8,6 @@ import os import re -import spack.bootstrap import spack.error import spack.paths import spack.util.executable @@ -47,6 +46,8 @@ def init(gnupghome=None, force=False): global objects are set already """ global GPG, GPGCONF, SOCKET_DIR, GNUPGHOME + import spack.bootstrap + if force: clear() @@ -59,7 +60,7 @@ def init(gnupghome=None, force=False): # Set the executable objects for "gpg" and "gpgconf" with spack.bootstrap.ensure_bootstrap_configuration(): - spack.bootstrap.ensure_gpg_in_path_or_raise() + spack.bootstrap.ensure_core_dependencies() GPG, GPGCONF = _gpg(), _gpgconf() GPG.add_default_env("GNUPGHOME", GNUPGHOME) diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index b66f4cb1aa..5c90b1b5f3 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -408,7 +408,7 @@ _spack_bootstrap() { } _spack_bootstrap_now() { - SPACK_COMPREPLY="-h --help" + SPACK_COMPREPLY="-h --help --dev" } _spack_bootstrap_status() { diff --git a/share/spack/templates/bootstrap/spack.yaml b/share/spack/templates/bootstrap/spack.yaml new file mode 100644 index 0000000000..4573bb485e --- /dev/null +++ b/share/spack/templates/bootstrap/spack.yaml @@ -0,0 +1,34 @@ +# This environment contains Spack non-core dependencies for the +# following configuration +# +# Python spec: {{ python_spec }} +# Python interpreter: {{ python_prefix }} +# Architecture: {{ architecture }} +# +spack: + specs: +{% for spec in environment_specs %} + - "{{ spec }}" +{% endfor %} + view: {{ environment_path }}/view + + config: + install_tree: + root: {{ store_path }} + + packages: + python: + buildable: false + externals: + - spec: "{{ python_spec }}" + prefix: "{{ python_prefix }}" + + py-typed-ast: + require: "+wheel" + + py-platformdirs: + require: "+wheel" + + concretizer: + reuse: false + unify: true diff --git a/var/spack/repos/builtin/packages/py-black/package.py b/var/spack/repos/builtin/packages/py-black/package.py index d2d3cd286f..21c114aa0b 100644 --- a/var/spack/repos/builtin/packages/py-black/package.py +++ b/var/spack/repos/builtin/packages/py-black/package.py @@ -52,6 +52,9 @@ class PyBlack(PythonPackage): depends_on("py-ipython@7.8:", when="+jupyter", type=("build", "run")) depends_on("py-tokenize-rt@3.2:", when="+jupyter", type=("build", "run")) + # Needed because this package is used to bootstrap Spack (Spack supports Python 3.6+) + depends_on("py-dataclasses@0.6:", when="^python@:3.6", type=("build", "run")) + # see: https://github.com/psf/black/issues/2964 # note that pip doesn't know this constraint. depends_on("py-click@:8.0", when="@:22.2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-dataclasses/package.py b/var/spack/repos/builtin/packages/py-dataclasses/package.py new file mode 100644 index 0000000000..a3de7b2313 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dataclasses/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyDataclasses(PythonPackage): + """A backport of the dataclasses module for Python 3.6""" + + homepage = "https://github.com/ericvsmith/dataclasses" + pypi = "dataclasses/dataclasses-0.7.tar.gz" + + version("0.8", sha256="8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97") + version("0.7", sha256="494a6dcae3b8bcf80848eea2ef64c0cc5cd307ffc263e17cdf42f3e5420808e6") + + depends_on("python@3.6.00:3.6", type=("build", "run")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-platformdirs/package.py b/var/spack/repos/builtin/packages/py-platformdirs/package.py index f226506db5..ee5a832315 100644 --- a/var/spack/repos/builtin/packages/py-platformdirs/package.py +++ b/var/spack/repos/builtin/packages/py-platformdirs/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import spack.build_systems.python from spack.package import * @@ -18,9 +19,24 @@ class PyPlatformdirs(PythonPackage): version("2.4.0", sha256="367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2") version("2.3.0", sha256="15b056538719b1c94bdaccb29e5f81879c7f7f0f4a153f46086d155dffcd4f0f") + variant( + "wheel", + default=False, + sticky=True, + description="Install from wheel (required for bootstrapping Spack)", + ) + depends_on("python@3.7:", when="@2.4.1:", type=("build", "run")) depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools@44:", when="@:2.5.1", type="build") depends_on("py-setuptools-scm@5:+toml", when="@:2.5.1", type="build") depends_on("py-hatchling@0.22.0:", when="@2.5.2:", type="build") depends_on("py-hatch-vcs", when="@2.5.2:", type="build") + + +class PythonPipBuilder(spack.build_systems.python.PythonPipBuilder): + @when("+wheel") + def install(self, pkg, spec, prefix): + args = list(filter(lambda x: x != "--no-index", self.std_args(self.pkg))) + args += [f"--prefix={prefix}", self.spec.format("platformdirs=={version}")] + pip(*args) diff --git a/var/spack/repos/builtin/packages/py-typed-ast/package.py b/var/spack/repos/builtin/packages/py-typed-ast/package.py index 4dc338e06a..16f6fc03ab 100644 --- a/var/spack/repos/builtin/packages/py-typed-ast/package.py +++ b/var/spack/repos/builtin/packages/py-typed-ast/package.py @@ -2,7 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +import spack.build_systems.python from spack.package import * @@ -23,7 +23,22 @@ class PyTypedAst(PythonPackage): url="https://files.pythonhosted.org/packages/source/t/typed-ast/typed-ast-1.3.5.tar.gz", ) + variant( + "wheel", + default=False, + sticky=True, + description="Install from wheel (required for bootstrapping Spack)", + ) + depends_on("python@3.3:", type=("build", "link", "run")) depends_on("python@3.6:", when="@1.5.4:", type=("build", "link", "run")) depends_on("python@:3.8", when="@:1.4.0") # build errors with 3.9 until 1.4.1 depends_on("py-setuptools", type="build") + + +class PythonPipBuilder(spack.build_systems.python.PythonPipBuilder): + @when("+wheel") + def install(self, pkg, spec, prefix): + args = list(filter(lambda x: x != "--no-index", self.std_args(self.pkg))) + args += [f"--prefix={prefix}", self.spec.format("typed-ast=={version}")] + pip(*args) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 513364e05e..3d3e955024 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -42,7 +42,7 @@ class Python(Package): #: phase install_targets = ["install"] - build_targets = [] # type: List[str] + build_targets: List[str] = [] version("3.11.0", sha256="64424e96e2457abbac899b90f9530985b51eef2905951febd935f0e73414caeb") version( @@ -107,6 +107,13 @@ class Python(Package): version("3.7.1", sha256="36c1b81ac29d0f8341f727ef40864d99d8206897be96be73dc34d4739c9c9f06") version("3.7.0", sha256="85bb9feb6863e04fb1700b018d9d42d1caac178559ffa453d7e6a436e259fd0d") + # Python 3.6.15 has been added back only to allow bootstrapping Spack on Python 3.6 + version( + "3.6.15", + sha256="54570b7e339e2cfd72b29c7e2fdb47c0b7b18b7412e61de5b463fc087c13b043", + deprecated=True, + ) + extendable = True # Variants to avoid cyclical dependencies for concretizer @@ -226,7 +233,7 @@ class Python(Package): conflicts("%nvhpc") # Used to cache various attributes that are expensive to compute - _config_vars = {} # type: Dict[str, Dict[str, str]] + _config_vars: Dict[str, Dict[str, str]] = {} # An in-source build with --enable-optimizations fails for python@3.X build_directory = "spack-build" @@ -727,6 +734,12 @@ def command(self): return Executable(path) else: + # Give a last try at rhel8 platform python + if self.spec.external and self.prefix == "/usr" and self.spec.satisfies("os=rhel8"): + path = os.path.join(self.prefix, "libexec", "platform-python") + if os.path.exists(path): + return Executable(path) + msg = "Unable to locate {0} command in {1}" raise RuntimeError(msg.format(self.name, self.prefix.bin))