diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst index abf25f149b..8b8ccb8f35 100644 --- a/lib/spack/docs/build_systems/autotoolspackage.rst +++ b/lib/spack/docs/build_systems/autotoolspackage.rst @@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add: .. code-block:: python - depends_on('autoconf', type='build', when='@master') - depends_on('automake', type='build', when='@master') - depends_on('libtool', type='build', when='@master') + depends_on("autoconf", type="build", when="@master") + depends_on("automake", type="build", when="@master") + depends_on("libtool", type="build", when="@master") It is typically redundant to list the ``m4`` macro processor package as a dependency, since ``autoconf`` already depends on it. @@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script. .. code-block:: python def autoreconf(self, spec, prefix): - which('bash')('autogen.sh') + which("bash")("autogen.sh") """"""""""""""""""""""""""""""""""""""" patching configure or Makefile.in files @@ -186,9 +186,9 @@ To opt out of this feature, use the following setting: To enable it conditionally on different architectures, define a property and make the package depend on ``gnuconfig`` as a build dependency: -.. code-block +.. code-block:: python - depends_on('gnuconfig', when='@1.0:') + depends_on("gnuconfig", when="@1.0:") @property def patch_config_files(self): @@ -230,7 +230,7 @@ version, this can be done like so: @property def force_autoreconf(self): - return self.version == Version('1.2.3') + return self.version == Version("1.2.3") ^^^^^^^^^^^^^^^^^^^^^^^ Finding configure flags @@ -278,13 +278,22 @@ function like so: def configure_args(self): args = [] - if '+mpi' in self.spec: - args.append('--enable-mpi') + if self.spec.satisfies("+mpi"): + args.append("--enable-mpi") else: - args.append('--disable-mpi') + args.append("--disable-mpi") return args + +Alternatively, you can use the :ref:`enable_or_disable ` helper: + +.. code-block:: python + + def configure_args(self): + return [self.enable_or_disable("mpi")] + + Note that we are explicitly disabling MPI support if it is not requested. This is important, as many Autotools packages will enable options by default if the dependencies are found, and disable them @@ -295,9 +304,11 @@ and `here `_ @@ -113,7 +113,7 @@ you can do this like so: .. code-block:: python - build_targets = ['CC=cc'] + build_targets = ["CC=cc"] If you do need access to the spec, you can create a property like so: @@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so: spec = self.spec return [ - 'CC=cc', - 'BLASLIB={0}'.format(spec['blas'].libs.ld_flags), + "CC=cc", + f"BLASLIB={spec['blas'].libs.ld_flags}", ] @@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example: .. code-block:: python def edit(self, spec, prefix): - makefile = FileFilter('Makefile') + makefile = FileFilter("Makefile") - makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc) - makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx) - makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77) - makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc) + makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}") + makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}") + makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}") + makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}") `stream `_ @@ -181,16 +181,16 @@ well for storing variables: def edit(self, spec, prefix): config = { - 'CC': 'cc', - 'MAKE': 'make', + "CC": "cc", + "MAKE": "make", } - if '+blas' in spec: - config['BLAS_LIBS'] = spec['blas'].libs.joined() + if spec.satisfies("+blas"): + config["BLAS_LIBS"] = spec["blas"].libs.joined() - with open('make.inc', 'w') as inc: + with open("make.inc", "w") as inc: for key in config: - inc.write('{0} = {1}\n'.format(key, config[key])) + inc.write(f"{key} = {config[key]}\n") `elk `_ @@ -204,14 +204,14 @@ them in a list: def edit(self, spec, prefix): config = [ - 'INSTALL_DIR = {0}'.format(prefix), - 'INCLUDE_DIR = $(INSTALL_DIR)/include', - 'LIBRARY_DIR = $(INSTALL_DIR)/lib', + f"INSTALL_DIR = {prefix}", + "INCLUDE_DIR = $(INSTALL_DIR)/include", + "LIBRARY_DIR = $(INSTALL_DIR)/lib", ] - with open('make.inc', 'w') as inc: + with open("make.inc", "w") as inc: for var in config: - inc.write('{0}\n'.format(var)) + inc.write(f"{var}\n") `hpl `_ @@ -284,7 +284,7 @@ can tell Spack where to locate it like so: .. code-block:: python - build_directory = 'src' + build_directory = "src" ^^^^^^^^^^^^^^^^^^^ @@ -299,8 +299,8 @@ install the package: def install(self, spec, prefix): mkdir(prefix.bin) - install('foo', prefix.bin) - install_tree('lib', prefix.lib) + install("foo", prefix.bin) + install_tree("lib", prefix.lib) ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/build_systems/pythonpackage.rst b/lib/spack/docs/build_systems/pythonpackage.rst index 17295a457f..168ff5dc88 100644 --- a/lib/spack/docs/build_systems/pythonpackage.rst +++ b/lib/spack/docs/build_systems/pythonpackage.rst @@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``, .. code-block:: python - homepage = 'https://pypi.org/project/setuptools/' - url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip' - list_url = 'https://pypi.org/simple/setuptools/' + homepage = "https://pypi.org/project/setuptools/" + url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip" + list_url = "https://pypi.org/simple/setuptools/" is equivalent to: .. code-block:: python - pypi = 'setuptools/setuptools-49.2.0.zip' + pypi = "setuptools/setuptools-49.2.0.zip" If a package has a different homepage listed on PyPI, you can @@ -208,7 +208,7 @@ dependencies to your package: .. code-block:: python - depends_on('py-setuptools@42:', type='build') + depends_on("py-setuptools@42:", type="build") Note that ``py-wheel`` is already listed as a build dependency in the @@ -232,7 +232,7 @@ Look for dependencies under the following keys: * ``dependencies`` under ``[project]`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``[project.optional-dependencies]`` @@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to * ``setup_requires`` These packages are usually only needed at build-time, so you can - add them with ``type='build'``. + add them with ``type="build"``. * ``install_requires`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``extras_require`` @@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to These are packages that are required to run the unit tests for the package. These dependencies can be specified using the - ``type='test'`` dependency type. However, the PyPI tarballs rarely + ``type="test"`` dependency type. However, the PyPI tarballs rarely contain unit tests, so there is usually no reason to add these. See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html @@ -321,7 +321,7 @@ older versions of flit may use the following keys: * ``requires`` under ``[tool.flit.metadata]`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``[tool.flit.metadata.requires-extra]`` @@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for: .. code-block:: python - depends_on('py-pip@22.1:', type='build') + depends_on("py-pip@22.1:", type="build") def config_settings(self, spec, prefix): return { - 'blas': spec['blas'].libs.names[0], - 'lapack': spec['lapack'].libs.names[0], + "blas": spec["blas"].libs.names[0], + "lapack": spec["lapack"].libs.names[0], } @@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so: def global_options(self, spec, prefix): options = [] - if '+libyaml' in spec: - options.append('--with-libyaml') + if spec.satisfies("+libyaml"): + options.append("--with-libyaml") else: - options.append('--without-libyaml') + options.append("--without-libyaml") return options @@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``: def install_options(self, spec, prefix): options = [] - if '+libyaml' in spec: + if spec.satisfies("+libyaml"): options.extend([ - spec['libyaml'].libs.search_flags, - spec['libyaml'].headers.include_flags, + spec["libyaml"].libs.search_flags, + spec["libyaml"].headers.include_flags, ]) return options @@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding .. code-block:: python - import_modules = ['six'] + import_modules = ["six"] Sometimes the list of module names to import depends on how the @@ -571,9 +571,9 @@ This can be expressed like so: @property def import_modules(self): - modules = ['yaml'] - if '+libyaml' in self.spec: - modules.append('yaml.cyaml') + modules = ["yaml"] + if self.spec.satisfies("+libyaml"): + modules.append("yaml.cyaml") return modules @@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset of module names to be skipped can be defined by using ``skip_modules``. If a defined module has submodules, they are skipped as well, e.g., in case the ``plotting`` modules should be excluded from the -automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface', -'nilearn.plotting', 'nilearn.plotting.data']`` set: +automatically detected ``import_modules`` ``["nilearn", "nilearn.surface", +"nilearn.plotting", "nilearn.plotting.data"]`` set: .. code-block:: python - skip_modules = ['nilearn.plotting'] + skip_modules = ["nilearn.plotting"] -This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']`` +This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]`` Import tests can be run during the installation using ``spack install --test=root`` or at any time after the installation using @@ -612,11 +612,11 @@ after the ``install`` phase: .. code-block:: python - @run_after('install') + @run_after("install") @on_package_attributes(run_tests=True) def install_test(self): - with working_dir('spack-test', create=True): - python('-c', 'import numpy; numpy.test("full", verbose=2)') + with working_dir("spack-test", create=True): + python("-c", "import numpy; numpy.test('full', verbose=2)") when testing is enabled during the installation (i.e., ``spack install @@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use: .. code-block:: python - build_directory = 'python' + build_directory = "python" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/build_systems/rocmpackage.rst b/lib/spack/docs/build_systems/rocmpackage.rst index 636e5b8126..8f90794dfb 100644 --- a/lib/spack/docs/build_systems/rocmpackage.rst +++ b/lib/spack/docs/build_systems/rocmpackage.rst @@ -81,28 +81,27 @@ class of your package. For example, you can add it to your class MyRocmPackage(CMakePackage, ROCmPackage): ... # Ensure +rocm and amdgpu_targets are passed to dependencies - depends_on('mydeppackage', when='+rocm') + depends_on("mydeppackage", when="+rocm") for val in ROCmPackage.amdgpu_targets: - depends_on('mydeppackage amdgpu_target={0}'.format(val), - when='amdgpu_target={0}'.format(val)) + depends_on(f"mydeppackage amdgpu_target={val}", + when=f"amdgpu_target={val}") ... def cmake_args(self): spec = self.spec args = [] ... - if '+rocm' in spec: + if spec.satisfies("+rocm"): # Set up the hip macros needed by the build args.extend([ - '-DENABLE_HIP=ON', - '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)]) - rocm_archs = spec.variants['amdgpu_target'].value - if 'none' not in rocm_archs: - args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}' - .format(",".join(rocm_archs))) + "-DENABLE_HIP=ON", + f"-DHIP_ROOT_DIR={spec['hip'].prefix}"]) + rocm_archs = spec.variants["amdgpu_target"].value + if "none" not in rocm_archs: + args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}") else: # Ensure build with hip is disabled - args.append('-DENABLE_HIP=OFF') + args.append("-DENABLE_HIP=OFF") ... return args ... @@ -114,7 +113,7 @@ build. This example also illustrates how to check for the ``rocm`` variant using ``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value -using ``self.spec.variants['amdgpu_target'].value``. +using ``self.spec.variants["amdgpu_target"].value``. All five packages using ``ROCmPackage`` as of January 2021 also use the :ref:`CudaPackage `. So it is worth looking at those packages diff --git a/lib/spack/docs/build_systems/sconspackage.rst b/lib/spack/docs/build_systems/sconspackage.rst index 18002586a0..a17e1271b8 100644 --- a/lib/spack/docs/build_systems/sconspackage.rst +++ b/lib/spack/docs/build_systems/sconspackage.rst @@ -57,7 +57,7 @@ overridden like so: .. code-block:: python def test(self): - scons('check') + scons("check") ^^^^^^^^^^^^^^^ @@ -88,7 +88,7 @@ base class already contains: .. code-block:: python - depends_on('scons', type='build') + depends_on("scons", type="build") If you want to specify a particular version requirement, you can override @@ -96,7 +96,7 @@ this in your package: .. code-block:: python - depends_on('scons@2.3.0:', type='build') + depends_on("scons@2.3.0:", type="build") ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so: def build_args(self, spec, prefix): args = [ - 'PREFIX={0}'.format(prefix), - 'ZLIB={0}'.format(spec['zlib'].prefix), + f"PREFIX={prefix}", + f"ZLIB={spec['zlib'].prefix}", ] - if '+debug' in spec: - args.append('DEBUG=yes') + if spec.satisfies("+debug"): + args.append("DEBUG=yes") else: - args.append('DEBUG=no') + args.append("DEBUG=no") return args @@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option: * env_vars: [ string ] Environment variables to propagate through to SCons. Either the string "all" or a comma separated list of variable names, e.g. - 'LD_LIBRARY_PATH,HOME'. - - default: 'LD_LIBRARY_PATH,PYTHONPATH' + "LD_LIBRARY_PATH,HOME". + - default: "LD_LIBRARY_PATH,PYTHONPATH" In the case of cantera, using ``env_vars=all`` allows us to use diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 157236ebfc..d488ae0c7f 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1549,7 +1549,7 @@ its value: def configure_args(self): ... - if "+shared" in self.spec: + if self.spec.satisfies("+shared"): extra_args.append("--enable-shared") else: extra_args.append("--disable-shared") @@ -1636,7 +1636,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s .. code-block:: python - if "languages=jit" in spec: + if spec.satisfies("languages=jit"): options.append("--enable-host-shared") """"""""""""""""""""""""""""""""""""""""""" @@ -3528,7 +3528,7 @@ need to override methods like ``configure_args``: def configure_args(self): args = ["--enable-cxx"] + self.enable_or_disable("libs") - if "libs=static" in self.spec: + if self.spec.satisfies("libs=static"): args.append("--with-pic") return args @@ -4391,7 +4391,7 @@ for supported features, for instance: .. code-block:: python - if "avx512" in spec.target: + if spec.satisfies("target=avx512"): args.append("--with-avx512") The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding