From 59f89dd3bee0ee2b668554c537d1d18404108ec4 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Fri, 29 May 2015 11:04:57 -0700 Subject: [PATCH 001/189] Allow long names in format string variables --- lib/spack/spack/spec.py | 71 ++++++++++++++++++++++++++++++++++++----- 1 file changed, 63 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index e1fbb84423..3b5d16c7a7 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1503,14 +1503,28 @@ def format(self, format_string='$_$@$%@$+$=', **kwargs): in the format string. The format strings you can provide are:: $_ Package name - $@ Version - $% Compiler - $%@ Compiler & compiler version - $+ Options - $= Architecture - $# 7-char prefix of DAG hash + $@ Version with '@' prefix + $% Compiler with '%' prefix + $%@ Compiler with '%' prefix & compiler version with '@' prefix + $+ Options + $= Architecture with '=' prefix + $# 7-char prefix of DAG hash with '-' prefix $$ $ + You can also use full-string versions, which leave off the prefixes: + + ${PACKAGE} Package name + ${VERSION} Version + ${COMPILER} Full compiler string + ${COMPILERNAME} Compiler name + ${COMPILERVER} Compiler version + ${OPTIONS} Options + ${ARCHITECTURE} Architecture + ${SHA1} Dependencies 8-char sha1 prefix + + ${SPACK_ROOT} The spack root directory + ${SPACK_INSTALL} The default spack install directory, ${SPACK_PREFIX}/opt + Optionally you can provide a width, e.g. $20_ for a 20-wide name. Like printf, you can provide '-' for left justification, e.g. $-20_ for a left-justified name. @@ -1526,7 +1540,8 @@ def format(self, format_string='$_$@$%@$+$=', **kwargs): color = kwargs.get('color', False) length = len(format_string) out = StringIO() - escape = compiler = False + named = escape = compiler = False + named_str = fmt = '' def write(s, c): if color: @@ -1566,9 +1581,12 @@ def write(s, c): elif c == '#': out.write('-' + fmt % (self.dag_hash(7))) elif c == '$': - if fmt != '': + if fmt != '%s': raise ValueError("Can't use format width with $$.") out.write('$') + elif c == '{': + named = True + named_str = '' escape = False elif compiler: @@ -1582,6 +1600,43 @@ def write(s, c): out.write(c) compiler = False + elif named: + if not c == '}': + if i == length - 1: + raise ValueError("Error: unterminated ${ in format: '%s'" + % format_string) + named_str += c + continue; + if named_str == 'PACKAGE': + write(fmt % self.name, '@') + if named_str == 'VERSION': + if self.versions and self.versions != _any_version: + write(fmt % str(self.versions), '@') + elif named_str == 'COMPILER': + if self.compiler: + write(fmt % self.compiler, '%') + elif named_str == 'COMPILERNAME': + if self.compiler: + write(fmt % self.compiler.name, '%') + elif named_str == 'COMPILERVER': + if self.compiler: + write(fmt % self.compiler.versions, '%') + elif named_str == 'OPTIONS': + if self.variants: + write(fmt % str(self.variants), '+') + elif named_str == 'ARCHITECTURE': + if self.architecture: + write(fmt % str(self.architecture), '=') + elif named_str == 'SHA1': + if self.dependencies: + out.write(fmt % str(self.dep_hash(8))) + elif named_str == 'SPACK_ROOT': + out.write(fmt % spack.prefix) + elif named_str == 'SPACK_INSTALL': + out.write(fmt % spack.install_path) + + named = False + elif c == '$': escape = True if i == length - 1: From b5c597b31864826050162b358998e240761c5d7e Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Fri, 29 May 2015 12:19:57 -0700 Subject: [PATCH 002/189] Allow specs to be sorted based on preferred packages, versions, compilers, variants and dependencies. --- lib/spack/spack/__init__.py | 8 ++ lib/spack/spack/config.py | 12 +- lib/spack/spack/preferred_packages.py | 172 ++++++++++++++++++++++++++ lib/spack/spack/spec.py | 34 +++++ 4 files changed, 222 insertions(+), 4 deletions(-) create mode 100644 lib/spack/spack/preferred_packages.py diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index caa09eb6e0..bd8478fb98 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -69,6 +69,14 @@ from spack.directory_layout import YamlDirectoryLayout install_layout = YamlDirectoryLayout(install_path) +# +# This controls how packages are sorted when trying to choose +# the most preferred package. More preferred packages are sorted +# first. +# +from spack.preferred_packages import PreferredPackages +pkgsort = PreferredPackages() + # # This controls how things are concretized in spack. # Replace it with a subclass if you want different diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 3e91958c2c..dbe225960a 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -89,8 +89,8 @@ import os import exceptions import sys +import copy -from external.ordereddict import OrderedDict from llnl.util.lang import memoized import spack.error @@ -114,8 +114,7 @@ def __init__(self, n, f, m): _ConfigCategory('compilers', 'compilers.yaml', True) _ConfigCategory('mirrors', 'mirrors.yaml', True) -_ConfigCategory('view', 'views.yaml', True) -_ConfigCategory('order', 'orders.yaml', True) +_ConfigCategory('preferred', 'preferred.yaml', True) """Names of scopes and their corresponding configuration files.""" config_scopes = [('site', os.path.join(spack.etc_path, 'spack')), @@ -156,7 +155,7 @@ def _merge_dicts(d1, d2): """Recursively merges two configuration trees, with entries in d2 taking precedence over d1""" if not d1: - return d2.copy() + return copy.copy(d2) if not d2: return d1 @@ -230,6 +229,11 @@ def get_mirror_config(): return get_config('mirrors') +def get_preferred_config(): + """Get the preferred configuration from config files""" + return get_config('preferred') + + def get_config_scope_dirname(scope): """For a scope return the config directory""" global config_scopes diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py new file mode 100644 index 0000000000..248508fe80 --- /dev/null +++ b/lib/spack/spack/preferred_packages.py @@ -0,0 +1,172 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +import spack +from spack.version import * + +class PreferredPackages(object): + _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] }, #Arbitrary, but consistent + + def __init__(self): + self.preferred = spack.config.get_preferred_config() + self._spec_for_pkgname_cache = {} + + #Given a package name, sort component (e.g, version, compiler, ...), and + # a second_key (used by providers), return the list + def _order_for_package(self, pkgname, component, second_key): + pkglist = [pkgname] + pkglist.append('all') + for pkg in pkglist: + if not pkg in self.preferred: + continue + orders = self.preferred[pkg] + if not type(orders) is dict: + continue + if not component in orders: + continue + order = orders[component] + if type(order) is dict: + if not second_key in order: + continue; + order = order[second_key] + if not type(order) is str: + tty.die('Expected version list in preferred config, but got %s' % str(order)) + order_list = order.split(',') + return [s.strip() for s in order_list] + return [] + + + # A generic sorting function. Given a package name and sort + # component, return less-than-0, 0, or greater-than-0 if + # a is respectively less-than, equal to, or greater than b. + def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key): + orderlist = self._order_for_package(pkgname, component, second_key) + a_in_list = str(a) in orderlist + b_in_list = str(b) in orderlist + if a_in_list and not b_in_list: + return -1 + elif b_in_list and not a_in_list: + return 1 + + cmp_a = None + cmp_b = None + reverse = None + if not a_in_list and not b_in_list: + cmp_a = a + cmp_b = b + reverse = -1 if reverse_natural_compare else 1 + else: + cmp_a = orderlist.index(str(a)) + cmp_b = orderlist.index(str(b)) + reverse = 1 + + if cmp_a < cmp_b: + return -1 * reverse + elif cmp_a > cmp_b: + return 1 * reverse + else: + return 0 + + + # A sorting function for specs. Similar to component_compare, but + # a and b are considered to match entries in the sorting list if they + # satisfy the list component. + def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key): + specs = self._spec_for_pkgname(pkgname, component, second_key) + a_index = None + b_index = None + reverse = -1 if reverse_natural_compare else 1 + for i, cspec in enumerate(specs): + if a_index == None and cspec.satisfies(a): + a_index = i + if b_index: + break + if b_index == None and cspec.satisfies(b): + b_index = i + if a_index: + break + + if a_index != None and b_index == None: return -1 + elif a_index == None and b_index != None: return 1 + elif a_index != None and b_index == a_index: return -1 * cmp(a, b) + elif a_index != None and b_index != None and a_index != b_index: return cmp(a_index, b_index) + elif a < b: return 1 * reverse + elif b < a: return -1 * reverse + else: return 0 + + + # Given a sort order specified by the pkgname/component/second_key, return + # a list of CompilerSpecs, VersionLists, or Specs for that sorting list. + def _spec_for_pkgname(self, pkgname, component, second_key): + key = (pkgname, component, second_key) + if not key in self._spec_for_pkgname_cache: + pkglist = self._order_for_package(pkgname, component, second_key) + if not pkglist: + if component in self._default_order: + pkglist = self._default_order[component] + if component == 'compiler': + self._spec_for_pkgname_cache[key] = [spack.spec.CompilerSpec(s) for s in pkglist] + elif component == 'version': + self._spec_for_pkgname_cache[key] = [VersionList(s) for s in pkglist] + else: + self._spec_for_pkgname_cache[key] = [spack.spec.Spec(s) for s in pkglist] + return self._spec_for_pkgname_cache[key] + + + def provider_compare(self, pkgname, provider_str, a, b): + """Return less-than-0, 0, or greater than 0 if a is respecively less-than, equal-to, or + greater-than b. A and b are possible implementations of provider_str. + One provider is less-than another if it is preferred over the other. + For example, provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would return -1 if + mvapich should be preferred over openmpi for scorep.""" + return self._spec_compare(pkgname, 'providers', a, b, False, provider_str) + + + def version_compare(self, pkgname, a, b): + """Return less-than-0, 0, or greater than 0 if version a of pkgname is + respecively less-than, equal-to, or greater-than version b of pkgname. + One version is less-than another if it is preferred over the other.""" + return self._spec_compare(pkgname, 'version', a, b, False, None) + + + def variant_compare(self, pkgname, a, b): + """Return less-than-0, 0, or greater than 0 if variant a of pkgname is + respecively less-than, equal-to, or greater-than variant b of pkgname. + One variant is less-than another if it is preferred over the other.""" + return self._component_compare(pkgname, 'variant', a, b, False, None) + + + def architecture_compare(self, pkgname, a, b): + """Return less-than-0, 0, or greater than 0 if architecture a of pkgname is + respecively less-than, equal-to, or greater-than architecture b of pkgname. + One architecture is less-than another if it is preferred over the other.""" + return self._component_compare(pkgname, 'architecture', a, b, False, None) + + + def compiler_compare(self, pkgname, a, b): + """Return less-than-0, 0, or greater than 0 if compiler a of pkgname is + respecively less-than, equal-to, or greater-than compiler b of pkgname. + One compiler is less-than another if it is preferred over the other.""" + return self._spec_compare(pkgname, 'compiler', a, b, False, None) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 3b5d16c7a7..83b1416e36 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1653,6 +1653,40 @@ def dep_string(self): return ''.join("^" + dep.format() for dep in self.sorted_deps()) + def __cmp__(self, other): + #Package name sort order is not configurable, always goes alphabetical + if self.name != other.name: + return cmp(self.name, other.name) + + #Package version is second in compare order + pkgname = self.name + if self.versions != other.versions: + return spack.pkgsort.version_compare(pkgname, + self.versions, other.versions) + + #Compiler is third + if self.compiler != other.compiler: + return spack.pkgsort.compiler_compare(pkgname, + self.compiler, other.compiler) + + #Variants + if self.variants != other.variants: + return spack.pkgsort.variant_compare(pkgname, + self.variants, other.variants) + + #Architecture + if self.architecture != other.architecture: + return spack.pkgsort.architecture_compare(pkgname, + self.architecture, other.architecture) + + #Dependency is not configurable + if self.dep_hash() != other.dep_hash(): + return -1 if self.dep_hash() < other.dep_hash() else 1 + + #Equal specs + return 0 + + def __str__(self): return self.format() + self.dep_string() From 8d7b7e5d5dadcec9b997b94d95898a4134e122b2 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Fri, 29 May 2015 12:20:32 -0700 Subject: [PATCH 003/189] Use preferred package rules when concretize'ing specs --- lib/spack/spack/concretize.py | 68 +++++++++++++++++++---------------- lib/spack/spack/spec.py | 2 +- 2 files changed, 38 insertions(+), 32 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 66002492cb..0f258c9096 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -38,6 +38,7 @@ import spack.architecture import spack.error from spack.version import * +from functools import partial @@ -49,8 +50,8 @@ class DefaultConcretizer(object): def concretize_version(self, spec): """If the spec is already concrete, return. Otherwise take - the most recent available version, and default to the package's - version if there are no avaialble versions. + the preferred version from spackconfig, and default to the package's + version if there are no available versions. TODO: In many cases we probably want to look for installed versions of each package and use an installed version @@ -68,12 +69,14 @@ def concretize_version(self, spec): # If there are known available versions, return the most recent # version that satisfies the spec pkg = spec.package + cmp_versions = partial(spack.pkgsort.version_compare, spec.name) valid_versions = sorted( [v for v in pkg.versions - if any(v.satisfies(sv) for sv in spec.versions)]) + if any(v.satisfies(sv) for sv in spec.versions)], + cmp=cmp_versions) if valid_versions: - spec.versions = ver([valid_versions[-1]]) + spec.versions = ver([valid_versions[0]]) else: # We don't know of any SAFE versions that match the given # spec. Grab the spec's versions and grab the highest @@ -138,10 +141,10 @@ def concretize_compiler(self, spec): """If the spec already has a compiler, we're done. If not, then take the compiler used for the nearest ancestor with a compiler spec and use that. If the ancestor's compiler is not - concrete, then give it a valid version. If there is no - ancestor with a compiler, use the system default compiler. + concrete, then used the preferred compiler as specified in + spackconfig. - Intuition: Use the system default if no package that depends on + Intuition: Use the spackconfig default if no package that depends on this one has a strict compiler requirement. Otherwise, try to build with the compiler that will be used by libraries that link to this one, to maximize compatibility. @@ -153,40 +156,43 @@ def concretize_compiler(self, spec): spec.compiler in all_compilers): return False - try: - nearest = next(p for p in spec.traverse(direction='parents') - if p.compiler is not None).compiler - - if not nearest in all_compilers: - # Take the newest compiler that saisfies the spec - matches = sorted(spack.compilers.find(nearest)) - if not matches: - raise UnavailableCompilerVersionError(nearest) - - # copy concrete version into nearest spec - nearest.versions = matches[-1].versions.copy() - assert(nearest.concrete) - - spec.compiler = nearest.copy() - - except StopIteration: - spec.compiler = spack.compilers.default_compiler().copy() - + # Find the parent spec that has a compiler, or the root if none do + parent_spec = next(p for p in spec.traverse(direction='parents') + if p.compiler is not None or not p.dependents) + parent_compiler = parent_spec.compiler + assert(parent_spec) + + # Check if the compiler is already fully specified + if parent_compiler in all_compilers: + spec.compiler = parent_compiler.copy() + return True + + # Filter the compilers into a sorted list based on the compiler_order from spackconfig + compiler_list = all_compilers if not parent_compiler else spack.compilers.find(parent_compiler) + cmp_compilers = partial(spack.pkgsort.compiler_compare, parent_spec.name) + matches = sorted(compiler_list, cmp=cmp_compilers) + if not matches: + raise UnavailableCompilerVersionError(parent_compiler) + + # copy concrete version into parent_compiler + spec.compiler = matches[0].copy() + assert(spec.compiler.concrete) return True # things changed. - def choose_provider(self, spec, providers): + def choose_provider(self, package_spec, spec, providers): """This is invoked for virtual specs. Given a spec with a virtual name, say "mpi", and a list of specs of possible providers of that spec, select a provider and return it. """ assert(spec.virtual) assert(providers) + + provider_cmp = partial(spack.pkgsort.provider_compare, package_spec.name, spec.name) + sorted_providers = sorted(providers, cmp=provider_cmp) + first_key = sorted_providers[0] - index = spack.spec.index_specs(providers) - first_key = sorted(index.keys())[0] - latest_version = sorted(index[first_key])[-1] - return latest_version + return first_key class UnavailableCompilerVersionError(spack.error.SpackError): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 83b1416e36..41496b0e9d 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -796,7 +796,7 @@ def _expand_virtual_packages(self): for spec in virtuals: providers = spack.db.providers_for(spec) - concrete = spack.concretizer.choose_provider(spec, providers) + concrete = spack.concretizer.choose_provider(self, spec, providers) concrete = concrete.copy() spec._replace_with(concrete) changed = True From ee68a76a193890231d6df7fa7934d42e3708540b Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Fri, 29 May 2015 14:57:24 -0700 Subject: [PATCH 004/189] Bug fixes from testing spack preferred packages --- lib/spack/spack/preferred_packages.py | 7 +++---- var/spack/mock_packages/mpich/package.py | 1 + 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index 248508fe80..bc5271f693 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -112,9 +112,8 @@ def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, secon elif a_index == None and b_index != None: return 1 elif a_index != None and b_index == a_index: return -1 * cmp(a, b) elif a_index != None and b_index != None and a_index != b_index: return cmp(a_index, b_index) - elif a < b: return 1 * reverse - elif b < a: return -1 * reverse - else: return 0 + else: return cmp(a, b) * reverse + # Given a sort order specified by the pkgname/component/second_key, return @@ -148,7 +147,7 @@ def version_compare(self, pkgname, a, b): """Return less-than-0, 0, or greater than 0 if version a of pkgname is respecively less-than, equal-to, or greater-than version b of pkgname. One version is less-than another if it is preferred over the other.""" - return self._spec_compare(pkgname, 'version', a, b, False, None) + return self._spec_compare(pkgname, 'version', a, b, True, None) def variant_compare(self, pkgname, a, b): diff --git a/var/spack/mock_packages/mpich/package.py b/var/spack/mock_packages/mpich/package.py index f77d3efc5d..e4110ad530 100644 --- a/var/spack/mock_packages/mpich/package.py +++ b/var/spack/mock_packages/mpich/package.py @@ -38,6 +38,7 @@ class Mpich(Package): version('3.0.2', 'foobarbaz') version('3.0.1', 'foobarbaz') version('3.0', 'foobarbaz') + version('1.0', 'foobarbas') provides('mpi@:3', when='@3:') provides('mpi@:1', when='@:1') From 987cd9e78f99aa6ee6f3a48a4b8f556a68cb2965 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Tue, 7 Jul 2015 16:27:13 -0700 Subject: [PATCH 005/189] Update docs for YAML configuration files and preferred concretization --- lib/spack/docs/basic_usage.rst | 37 +++++++----- lib/spack/docs/mirrors.rst | 9 ++- lib/spack/docs/packaging_guide.rst | 66 ++++++++++++++++++++- lib/spack/docs/site_configuration.rst | 83 --------------------------- 4 files changed, 91 insertions(+), 104 deletions(-) diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 0578f0c8db..5d5438220c 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -357,7 +357,7 @@ Spack, you can simply run ``spack compiler add`` with the path to where the compiler is installed. For example:: $ spack compiler add /usr/local/tools/ic-13.0.079 - ==> Added 1 new compiler to /Users/gamblin2/.spackconfig + ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml intel@13.0.079 Or you can run ``spack compiler add`` with no arguments to force @@ -367,7 +367,7 @@ installed, but you know that new compilers have been added to your $ module load gcc-4.9.0 $ spack compiler add - ==> Added 1 new compiler to /Users/gamblin2/.spackconfig + ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml gcc@4.9.0 This loads the environment module for gcc-4.9.0 to get it into the @@ -398,27 +398,34 @@ Manual compiler configuration ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If auto-detection fails, you can manually configure a compiler by -editing your ``~/.spackconfig`` file. You can do this by running -``spack config edit``, which will open the file in your ``$EDITOR``. +editing your ``~/.spack/compilers.yaml`` file. You can do this by running +``spack config edit compilers``, which will open the file in your ``$EDITOR``. Each compiler configuration in the file looks like this:: ... - [compiler "intel@15.0.0"] - cc = /usr/local/bin/icc-15.0.024-beta - cxx = /usr/local/bin/icpc-15.0.024-beta - f77 = /usr/local/bin/ifort-15.0.024-beta - fc = /usr/local/bin/ifort-15.0.024-beta - ... + chaos_5_x86_64_ib: + ... + intel@15.0.0: + cc: /usr/local/bin/icc-15.0.024-beta + cxx: /usr/local/bin/icpc-15.0.024-beta + f77: /usr/local/bin/ifort-15.0.024-beta + fc: /usr/local/bin/ifort-15.0.024-beta + ... + +The chaos_5_x86_64_ib string is an architecture string, and multiple +compilers can be listed underneath an architecture. The architecture +string may be replaced with the string 'all' to signify compilers that +work on all architectures. For compilers, like ``clang``, that do not support Fortran, put ``None`` for ``f77`` and ``fc``:: - [compiler "clang@3.3svn"] - cc = /usr/bin/clang - cxx = /usr/bin/clang++ - f77 = None - fc = None + clang@3.3svn: + cc: /usr/bin/clang + cxx: /usr/bin/clang++ + f77: None + fc: None Once you save the file, the configured compilers will show up in the list displayed by ``spack compilers``. diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst index d732a3dd54..7581a0e9ed 100644 --- a/lib/spack/docs/mirrors.rst +++ b/lib/spack/docs/mirrors.rst @@ -205,12 +205,11 @@ And, if you want to remove a mirror, just remove it by name:: Mirror precedence ---------------------------- -Adding a mirror really just adds a section in ``~/.spackconfig``:: +Adding a mirror really just adds a section in ``~/.spack/mirrors.yaml``:: - [mirror "local_filesystem"] - url = file:///Users/gamblin2/spack-mirror-2014-06-24 - [mirror "remote_server"] - url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24 + mirrors: + - local_filesystem: file:///Users/gamblin2/spack-mirror-2014-06-24 + - remote_server: https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24 If you want to change the order in which mirrors are searched for packages, you can edit this file and reorder the sections. Spack will diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 59ba63fa35..5094f739c4 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -632,7 +632,7 @@ Default revision instead. Revisions - Add ``hg`` and ``revision``parameters: + Add ``hg`` and ``revision`` parameters: .. code-block:: python @@ -1524,6 +1524,70 @@ This is useful when you want to know exactly what Spack will do when you ask for a particular spec. +``Concretization Policies`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A user may have certain perferrences for how packages should +be concretized on their system. For example, one user may prefer packages +built with OpenMPI and the Intel compiler. Another user may prefer +packages be built with MVAPICH and GCC. + +Spack's ``preferred`` configuration can be used to set defaults for sites or users. +Spack uses this configuration to make decisions about which compilers, package +versions, depends_on, and variants it should prefer during concretization. + +The preferred configuration can be controlled by editing the +``~/.spack/preferred.yaml`` file for user configuations, or the + + +Here's an example preferred.yaml file: + +.. code-block:: sh + + preferred: + dyninst: + compiler: gcc@4.9 + variants: +debug + gperftools: + version: 2.2, 2.4, 2.3 + all: + compiler: gcc@4.4.7, gcc@4.6:, intel, clang, pgi + providers: + mpi: mvapich, mpich, openmpi + +At a high level, this example is specifying how packages should be +concretized. The dyninst package should prefer using gcc 4.9 and +be built with debug options. The gperftools package should prefer version +2.2 over 2.4. Every package on the system should prefer mvapich for +its MPI and gcc 4.4.7 (except for Dyninst, which perfers gcc 4.9). +These options are used to fill in implicit defaults. Any of them can be overwritten +on the command line if explicitly requested. + +Each preferred.yaml file begin with the string ``preferred:`` and +each subsequent entry is indented underneath it. The next layer contains +package names or the special string ``all`` (which applies to +every package). Underneath each package name is +one or more components: ``compiler``, ``variants``, ``version``, +or ``providers``. Each component has an ordered list of spec +``constraints``, with earlier entries in the list being prefered over +latter entries. + +Sometimes a package installation may have constraints that forbid +the first concretization rule, in which case Spack will use the first +legal concretization rule. Going back to the example, if a user +requests gperftools 2.3 or latter, then Spack will install version 2.4 +as the 2.4 version of gperftools is preferred over 2.3. + +An explicit concretization rule in the preferred section will always +take preference over unlisted concretizations. In the above example, +xlc isn't listed in the compiler list. Every listed compiler from +gcc to pgi will thus be preferred over the xlc compiler. + +The syntax for the ``providers`` section differs slightly from other +concretization rules. A provider lists a value that packages may +``depend_on`` (e.g, mpi) and a list of rules for fulfilling that +dependency. + .. _install-method: Implementing the ``install`` method diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index b03df29573..1e6740a434 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -54,89 +54,6 @@ more elements to the list to indicate where your own site's temporary directory is. -.. _concretization-policies: - -Concretization policies ----------------------------- - -When a user asks for a package like ``mpileaks`` to be installed, -Spack has to make decisions like what version should be installed, -what compiler to use, and how its dependencies should be configured. -This process is called *concretization*, and it's covered in detail in -:ref:`its own section `. - -The default concretization policies are in the -:py:mod:`spack.concretize` module, specifically in the -:py:class:`spack.concretize.DefaultConcretizer` class. These are the -important methods used in the concretization process: - -* :py:meth:`concretize_version(self, spec) ` -* :py:meth:`concretize_architecture(self, spec) ` -* :py:meth:`concretize_compiler(self, spec) ` -* :py:meth:`choose_provider(self, spec, providers) ` - -The first three take a :py:class:`Spec ` object and -modify it by adding constraints for the version. For example, if the -input spec had a version range like `1.0:5.0.3`, then the -``concretize_version`` method should set the spec's version to a -*single* version in that range. Likewise, ``concretize_architecture`` -selects an architecture when the input spec does not have one, and -``concretize_compiler`` needs to set both a concrete compiler and a -concrete compiler version. - -``choose_provider()`` affects how concrete implementations are chosen -based on a virtual dependency spec. The input spec is some virtual -dependency and the ``providers`` index is a :py:class:`ProviderIndex -` object. The ``ProviderIndex`` maps -the virtual spec to specs for possible implementations, and -``choose_provider()`` should simply choose one of these. The -``concretize_*`` methods will be called on the chosen implementation -later, so there is no need to fully concretize the spec when returning -it. - -The ``DefaultConcretizer`` is intended to provide sensible defaults -for each policy, but there are certain choices that it can't know -about. For example, one site might prefer ``OpenMPI`` over ``MPICH``, -or another might prefer an old version of some packages. These types -of special cases can be integrated with custom concretizers. - -Writing a custom concretizer -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To write your own concretizer, you need only subclass -``DefaultConcretizer`` and override the methods you want to change. -For example, you might write a class like this to change *only* the -``concretize_version()`` behavior: - -.. code-block:: python - - from spack.concretize import DefaultConcretizer - - class MyConcretizer(DefaultConcretizer): - def concretize_version(self, spec): - # implement custom logic here. - -Once you have written your custom concretizer, you can make Spack use -it by editing ``globals.py``. Find this part of the file: - -.. code-block:: python - - # - # This controls how things are concretized in spack. - # Replace it with a subclass if you want different - # policies. - # - concretizer = DefaultConcretizer() - -Set concretizer to *your own* class instead of the default: - -.. code-block:: python - - concretizer = MyConcretizer() - -The next time you run Spack, your changes should take effect. - - Profiling ~~~~~~~~~~~~~~~~~~~~~ From 53cde110b1d25f8fcf89d7c26ef02bb073cb6a29 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Tue, 7 Jul 2015 16:27:46 -0700 Subject: [PATCH 006/189] Update Spack mirror command to match docs --- lib/spack/spack/cmd/mirror.py | 12 ++++++------ lib/spack/spack/stage.py | 5 ++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 2356170a9a..baf64d30fc 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -75,8 +75,8 @@ def mirror_add(args): if url.startswith('/'): url = 'file://' + url - mirror_dict = { args.name : url } - spack.config.add_to_mirror_config({ args.name : url }) + newmirror = [ { args.name : url } ] + spack.config.add_to_mirror_config(newmirror) def mirror_remove(args): @@ -90,15 +90,15 @@ def mirror_remove(args): def mirror_list(args): """Print out available mirrors to the console.""" - sec_names = spack.config.get_mirror_config() - if not sec_names: + mirrors = spack.config.get_mirror_config() + if not mirrors: tty.msg("No mirrors configured.") return - max_len = max(len(s) for s in sec_names) + max_len = max(len(name) for name,path in mirrors) fmt = "%%-%ds%%s" % (max_len + 4) - for name, val in sec_names.iteritems(): + for name, val in mirrors: print fmt % (name, val) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 008c5f0429..c70c7a84a4 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -26,6 +26,7 @@ import re import shutil import tempfile +import sys import llnl.util.tty as tty from llnl.util.filesystem import * @@ -344,9 +345,7 @@ def destroy(self): def _get_mirrors(): """Get mirrors from spack configuration.""" - config = spack.config.get_mirror_config() - return [val for name, val in config.iteritems()] - + return [path for name, path in spack.config.get_mirror_config()] def ensure_access(file=spack.stage_path): From 53d70fff0121a05fb21c02363570f81573bbeffa Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Tue, 7 Jul 2015 16:28:51 -0700 Subject: [PATCH 007/189] Fix type error with YAML config when merging lists from different configs. --- lib/spack/spack/config.py | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index dbe225960a..f3526b19fa 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -98,6 +98,7 @@ from external.yaml.error import MarkedYAMLError import llnl.util.tty as tty from llnl.util.filesystem import mkdirp +import copy _config_sections = {} class _ConfigCategory: @@ -159,21 +160,19 @@ def _merge_dicts(d1, d2): if not d2: return d1 - for key2, val2 in d2.iteritems(): - if not key2 in d1: - d1[key2] = val2 - continue - val1 = d1[key2] - if isinstance(val1, dict) and isinstance(val2, dict): - d1[key2] = _merge_dicts(val1, val2) - continue - if isinstance(val1, list) and isinstance(val2, list): - val1.extend(val2) - seen = set() - d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ] - continue - d1[key2] = val2 - return d1 + if (type(d1) is list) and (type(d2) is list): + d1.extend(d2) + return d1 + + if (type(d1) is dict) and (type(d2) is dict): + for key2, val2 in d2.iteritems(): + if not key2 in d1: + d1[key2] = val2 + else: + d1[key2] = _merge_dicts(d1[key2], val2) + return d1 + + return d2 def get_config(category_name): @@ -225,8 +224,8 @@ def get_compilers_config(arch=None): def get_mirror_config(): - """Get the mirror configuration from config files""" - return get_config('mirrors') + """Get the mirror configuration from config files as a list of name/location tuples""" + return [x.items()[0] for x in get_config('mirrors')] def get_preferred_config(): From 650c9d4e36c6a58cf6bca0e6abd580ee54d8e175 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Wed, 16 Sep 2015 10:56:11 -0700 Subject: [PATCH 008/189] Allow spack to build against external non-spack-installed packages. --- lib/spack/spack/__init__.py | 6 ++ lib/spack/spack/abi.py | 128 ++++++++++++++++++++++ lib/spack/spack/concretize.py | 140 ++++++++++++++++++++++--- lib/spack/spack/config.py | 59 ++++++++++- lib/spack/spack/directory_layout.py | 8 ++ lib/spack/spack/package.py | 3 + lib/spack/spack/preferred_packages.py | 10 +- lib/spack/spack/spec.py | 58 +++++----- var/spack/packages/mpich/package.py | 1 + var/spack/packages/mvapich2/package.py | 17 ++- 10 files changed, 387 insertions(+), 43 deletions(-) create mode 100644 lib/spack/spack/abi.py diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index bd8478fb98..5783005b5b 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -77,6 +77,12 @@ from spack.preferred_packages import PreferredPackages pkgsort = PreferredPackages() +# +# This tests ABI compatibility between packages +# +from spack.abi import ABI +abi = ABI() + # # This controls how things are concretized in spack. # Replace it with a subclass if you want different diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py new file mode 100644 index 0000000000..f0a997703c --- /dev/null +++ b/lib/spack/spack/abi.py @@ -0,0 +1,128 @@ +############################################################################## +# Copyright (c) 2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +import os +import spack +import spack.spec +from spack.spec import CompilerSpec +from spack.util.executable import Executable, ProcessError +from llnl.util.lang import memoized + +class ABI(object): + """This class provides methods to test ABI compatibility between specs. + The current implementation is rather rough and could be improved.""" + + def architecture_compatible(self, parent, child): + """Returns true iff the parent and child specs have ABI compatible architectures.""" + return not parent.architecture or not child.architecture or parent.architecture == child.architecture + + + @memoized + def _gcc_get_libstdcxx_version(self, version): + """Returns gcc ABI compatibility info by getting the library version of + a compiler's libstdc++.so or libgcc_s.so""" + spec = CompilerSpec("gcc", version) + compilers = spack.compilers.compilers_for_spec(spec) + if not compilers: + return None + compiler = compilers[0] + rungcc = None + libname = None + output = None + if compiler.cxx: + rungcc = Executable(compiler.cxx) + libname = "libstdc++.so" + elif compiler.cc: + rungcc = Executable(compiler.cc) + libname = "libgcc_s.so" + else: + return None + try: + output = rungcc("--print-file-name=%s" % libname, return_output=True) + except ProcessError, e: + return None + if not output: + return None + libpath = os.readlink(output.strip()) + if not libpath: + return None + return os.path.basename(libpath) + + + @memoized + def _gcc_compiler_compare(self, pversion, cversion): + """Returns true iff the gcc version pversion and cversion + are ABI compatible.""" + plib = self._gcc_get_libstdcxx_version(pversion) + clib = self._gcc_get_libstdcxx_version(cversion) + if not plib or not clib: + return False + return plib == clib + + + def _intel_compiler_compare(self, pversion, cversion): + """Returns true iff the intel version pversion and cversion + are ABI compatible""" + + #Test major and minor versions. Ignore build version. + if (len(pversion.version) < 2 or len(cversion.version) < 2): + return False + return (pversion.version[0] == cversion.version[0]) and \ + (pversion.version[1] == cversion.version[1]) + + + def compiler_compatible(self, parent, child, **kwargs): + """Returns true iff the compilers for parent and child specs are ABI compatible""" + if not parent.compiler or not child.compiler: + return True + + if parent.compiler.name != child.compiler.name: + #Different compiler families are assumed ABI incompatible + return False + + if kwargs.get('loose', False): + return True + + for pversion in parent.compiler.versions: + for cversion in child.compiler.versions: + #For a few compilers use specialized comparisons. Otherwise + # match on version match. + if pversion.satisfies(cversion): + return True + elif parent.compiler.name == "gcc" and \ + self._gcc_compiler_compare(pversion, cversion): + return True + elif parent.compiler.name == "intel" and \ + self._intel_compiler_compare(pversion, cversion): + return True + return False + + + def compatible(self, parent, child, **kwargs): + """Returns true iff a parent and child spec are ABI compatible""" + loosematch = kwargs.get('loose', False) + return self.architecture_compatible(parent, child) and \ + self.compiler_compatible(parent, child, loose=loosematch) + diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 0f258c9096..01ff163493 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -33,13 +33,16 @@ TODO: make this customizable and allow users to configure concretization policies. """ +import spack import spack.spec import spack.compilers import spack.architecture import spack.error from spack.version import * from functools import partial - +from spec import DependencyMap +from itertools import chain +from spack.config import * class DefaultConcretizer(object): @@ -48,6 +51,107 @@ class DefaultConcretizer(object): default concretization strategies, or you can override all of them. """ + def _find_other_spec(self, spec, condition): + """Searches the dag from spec in an intelligent order and looks + for a spec that matches a condition""" + dagiter = chain(spec.traverse(direction='parents'), spec.traverse(direction='children')) + found = next((x for x in dagiter if x is not spec and condition(x)), None) + if found: + return found + dagiter = chain(spec.traverse(direction='parents'), spec.traverse(direction='children')) + searched = list(dagiter) + found = next((x for x in spec.root.traverse() if x not in searched and x is not spec and condition(x)), None) + if found: + return found + if condition(spec): + return spec + return None + + + def _valid_virtuals_and_externals(self, spec): + """Returns a list of spec/external-path pairs for both virtuals and externals + that can concretize this spec.""" + + # Get a list of candidate packages that could satisfy this spec + packages = [] + if spec.virtual: + providers = spack.db.providers_for(spec) + if not providers: + raise UnsatisfiableProviderSpecError(providers[0], spec) + spec_w_preferred_providers = self._find_other_spec(spec, \ + lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name)) + if not spec_w_preferred_providers: + spec_w_preferred_providers = spec + provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name) + packages = sorted(providers, cmp=provider_cmp) + else: + if not spec_externals(spec) or spec.external: + return None + packages = [spec] + + # For each candidate package, if it has externals add those to the candidates + # if it's a nobuild, then only add the externals. + result = [] + all_compilers = spack.compilers.all_compilers() + for pkg in packages: + externals = spec_externals(pkg) + buildable = not is_spec_nobuild(pkg) + if buildable: + result.append((pkg, None)) + if externals: + sorted_externals = sorted(externals, cmp=lambda a,b: a[0].__cmp__(b[0])) + for external in sorted_externals: + if external[0].satisfies(spec): + result.append(external) + if not result: + raise NoBuildError(spec) + return result + + + def concretize_virtual_and_external(self, spec): + """From a list of candidate virtual and external packages, concretize to one that + is ABI compatible with the rest of the DAG.""" + candidates = self._valid_virtuals_and_externals(spec) + if not candidates: + return False + + #Find the another spec in the dag that has a compiler. We'll use that + # spec to test compiler compatibility. + other_spec = self._find_other_spec(spec, lambda(x): x.compiler) + if not other_spec: + other_spec = spec.root + + #Choose an ABI-compatible candidate, or the first match otherwise. + candidate = None + if other_spec: + candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec)), None) + if not candidate: + #Try a looser ABI matching + candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec, loose=True)), None) + if not candidate: + #Pick the first choice + candidate = candidates[0] + external = candidate[1] + candidate_spec = candidate[0] + + #Refine this spec to the candidate. + changed = False + if spec.virtual: + spec._replace_with(candidate_spec) + changed = True + if spec._dup(candidate_spec, deps=False, cleardeps=False): + changed = True + if not spec.external and external: + spec.external = external + changed = True + #If we're external then trim the dependencies + if external and spec.dependencies: + changed = True + spec.depencencies = DependencyMap() + + return changed + + def concretize_version(self, spec): """If the spec is already concrete, return. Otherwise take the preferred version from spackconfig, and default to the package's @@ -150,31 +254,32 @@ def concretize_compiler(self, spec): link to this one, to maximize compatibility. """ all_compilers = spack.compilers.all_compilers() - + if (spec.compiler and spec.compiler.concrete and spec.compiler in all_compilers): return False - # Find the parent spec that has a compiler, or the root if none do - parent_spec = next(p for p in spec.traverse(direction='parents') - if p.compiler is not None or not p.dependents) - parent_compiler = parent_spec.compiler - assert(parent_spec) + #Find the another spec that has a compiler, or the root if none do + other_spec = self._find_other_spec(spec, lambda(x) : x.compiler) + if not other_spec: + other_spec = spec.root + other_compiler = other_spec.compiler + assert(other_spec) # Check if the compiler is already fully specified - if parent_compiler in all_compilers: - spec.compiler = parent_compiler.copy() + if other_compiler in all_compilers: + spec.compiler = other_compiler.copy() return True # Filter the compilers into a sorted list based on the compiler_order from spackconfig - compiler_list = all_compilers if not parent_compiler else spack.compilers.find(parent_compiler) - cmp_compilers = partial(spack.pkgsort.compiler_compare, parent_spec.name) + compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler) + cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name) matches = sorted(compiler_list, cmp=cmp_compilers) if not matches: - raise UnavailableCompilerVersionError(parent_compiler) + raise UnavailableCompilerVersionError(other_compiler) - # copy concrete version into parent_compiler + # copy concrete version into other_compiler spec.compiler = matches[0].copy() assert(spec.compiler.concrete) return True # things changed. @@ -210,3 +315,12 @@ class NoValidVersionError(spack.error.SpackError): def __init__(self, spec): super(NoValidVersionError, self).__init__( "There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)) + + +class NoBuildError(spack.error.SpackError): + """Raised when a package is configured with the nobuild option, but + no satisfactory external versions can be found""" + def __init__(self, spec): + super(NoBuildError, self).__init__( + "The spec '%s' is configured as nobuild, and no matching external installs were found" % spec.name) + diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index f3526b19fa..60577c45b3 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -90,9 +90,12 @@ import exceptions import sys import copy - -from llnl.util.lang import memoized +import inspect +import glob +import imp +import spack.spec import spack.error +from llnl.util.lang import memoized from external import yaml from external.yaml.error import MarkedYAMLError @@ -116,6 +119,9 @@ def __init__(self, n, f, m): _ConfigCategory('compilers', 'compilers.yaml', True) _ConfigCategory('mirrors', 'mirrors.yaml', True) _ConfigCategory('preferred', 'preferred.yaml', True) +_ConfigCategory('view', 'views.yaml', True) +_ConfigCategory('preferred', 'preferred.yaml', True) +_ConfigCategory('packages', 'packages.yaml', True) """Names of scopes and their corresponding configuration files.""" config_scopes = [('site', os.path.join(spack.etc_path, 'spack')), @@ -233,6 +239,55 @@ def get_preferred_config(): return get_config('preferred') +@memoized +def get_packages_config(): + """Get the externals configuration from config files""" + package_config = get_config('packages') + if not package_config: + return {} + indexed_packages = {} + for p in package_config: + package_name = spack.spec.Spec(p.keys()[0]).name + if package_name not in indexed_packages: + indexed_packages[package_name] = [] + indexed_packages[package_name].append({ spack.spec.Spec(key) : val for key, val in p.iteritems() }) + return indexed_packages + + +def is_spec_nobuild(spec): + """Return true if the spec pkgspec is configured as nobuild""" + allpkgs = get_packages_config() + name = spec.name + if not name in allpkgs: + return False + for itm in allpkgs[name]: + for pkg,conf in itm.iteritems(): + if pkg.satisfies(spec): + if conf.get('nobuild', False): + return True + return False + + +def spec_externals(spec): + """Return a list of spec, directory pairs for each external location for spec""" + allpkgs = get_packages_config() + name = spec.name + spec_locations = [] + + if not name in allpkgs: + return [] + for itm in allpkgs[name]: + for pkg,conf in itm.iteritems(): + if not pkg.satisfies(spec): + continue + path = conf.get('path', None) + if not path: + continue + spec_locations.append( (pkg, path) ) + return spec_locations + + + def get_config_scope_dirname(scope): """For a scope return the config directory""" global config_scopes diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 85ecc1ce2b..83e6eb566a 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -187,6 +187,14 @@ def hidden_file_paths(self): def relative_path_for_spec(self, spec): _check_concrete(spec) + + if spec.external: + return spec.external + + enabled_variants = ( + '-' + v.name for v in spec.variants.values() + if v.enabled) + dir_name = "%s-%s-%s" % ( spec.name, spec.version, diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 61606d0590..1e2f0378c8 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -752,6 +752,9 @@ def do_install(self, if not self.spec.concrete: raise ValueError("Can only install concrete packages.") + if self.spec.external: + return + if os.path.exists(self.prefix): tty.msg("%s is already installed in %s." % (self.name, self.prefix)) return diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index bc5271f693..bc2a4ac234 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -35,9 +35,10 @@ def __init__(self): #Given a package name, sort component (e.g, version, compiler, ...), and # a second_key (used by providers), return the list - def _order_for_package(self, pkgname, component, second_key): + def _order_for_package(self, pkgname, component, second_key, test_all=True): pkglist = [pkgname] - pkglist.append('all') + if test_all: + pkglist.append('all') for pkg in pkglist: if not pkg in self.preferred: continue @@ -143,6 +144,11 @@ def provider_compare(self, pkgname, provider_str, a, b): return self._spec_compare(pkgname, 'providers', a, b, False, provider_str) + def spec_has_preferred_provider(self, pkgname, provider_str): + """Return True iff the named package has a list of preferred provider""" + return bool(self._order_for_package(pkgname, 'providers', provider_str, False)) + + def version_compare(self, pkgname, a, b): """Return less-than-0, 0, or greater than 0 if version a of pkgname is respecively less-than, equal-to, or greater-than version b of pkgname. diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 41496b0e9d..6984b4a174 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -419,6 +419,7 @@ def __init__(self, spec_like, *dep_like, **kwargs): # package.py files for. self._normal = kwargs.get('normal', False) self._concrete = kwargs.get('concrete', False) + self.external = None # This allows users to construct a spec DAG with literals. # Note that given two specs a and b, Spec(a) copies a, but @@ -426,7 +427,7 @@ def __init__(self, spec_like, *dep_like, **kwargs): for dep in dep_like: spec = dep if isinstance(dep, Spec) else Spec(dep) self._add_dependency(spec) - + # # Private routines here are called by the parser when building a spec. @@ -751,12 +752,11 @@ def _concretize_helper(self, presets=None, visited=None): # Concretize virtual dependencies last. Because they're added # to presets below, their constraints will all be merged, but we'll # still need to select a concrete package later. - if not self.virtual: - changed |= any( - (spack.concretizer.concretize_architecture(self), - spack.concretizer.concretize_compiler(self), - spack.concretizer.concretize_version(self), - spack.concretizer.concretize_variants(self))) + changed |= any( + (spack.concretizer.concretize_architecture(self), + spack.concretizer.concretize_compiler(self), + spack.concretizer.concretize_version(self), + spack.concretizer.concretize_variants(self))) presets[self.name] = self visited.add(self.name) @@ -789,21 +789,18 @@ def _expand_virtual_packages(self): a problem. """ changed = False - while True: - virtuals =[v for v in self.traverse() if v.virtual] - if not virtuals: - return changed + done = False + while not done: + done = True + for spec in list(self.traverse()): + if spack.concretizer.concretize_virtual_and_external(spec): + done = False + changed = True - for spec in virtuals: - providers = spack.db.providers_for(spec) - concrete = spack.concretizer.choose_provider(self, spec, providers) - concrete = concrete.copy() - spec._replace_with(concrete) - changed = True - - # If there are duplicate providers or duplicate provider deps, this - # consolidates them and merge constraints. - changed |= self.normalize(force=True) + # If there are duplicate providers or duplicate provider deps, this + # consolidates them and merge constraints. + changed |= self.normalize(force=True) + return changed def concretize(self): @@ -830,7 +827,6 @@ def concretize(self): self._concretize_helper()) changed = any(changes) force=True - self._concrete = True @@ -1346,15 +1342,26 @@ def _dup(self, other, **kwargs): Whether deps should be copied too. Set to false to copy a spec but not its dependencies. """ + + # We don't count dependencies as changes here + changed = True + if hasattr(self, 'name'): + changed = (self.name != other.name and self.versions != other.versions and \ + self.architecture != other.architecture and self.compiler != other.compiler and \ + self.variants != other.variants and self._normal != other._normal and \ + self.concrete != other.concrete and self.external != other.external) + # Local node attributes get copied first. self.name = other.name self.versions = other.versions.copy() self.architecture = other.architecture self.compiler = other.compiler.copy() if other.compiler else None - self.dependents = DependencyMap() - self.dependencies = DependencyMap() + if kwargs.get('cleardeps', True): + self.dependents = DependencyMap() + self.dependencies = DependencyMap() self.variants = other.variants.copy() self.variants.spec = self + self.external = other.external # If we copy dependencies, preserve DAG structure in the new spec if kwargs.get('deps', True): @@ -1372,6 +1379,8 @@ def _dup(self, other, **kwargs): # Since we preserved structure, we can copy _normal safely. self._normal = other._normal self._concrete = other._concrete + self.external = other.external + return changed def copy(self, **kwargs): @@ -1796,6 +1805,7 @@ def spec(self): spec.variants = VariantMap(spec) spec.architecture = None spec.compiler = None + spec.external = None spec.dependents = DependencyMap() spec.dependencies = DependencyMap() diff --git a/var/spack/packages/mpich/package.py b/var/spack/packages/mpich/package.py index b6b2dfde21..dfff22152d 100644 --- a/var/spack/packages/mpich/package.py +++ b/var/spack/packages/mpich/package.py @@ -45,6 +45,7 @@ def setup_dependent_environment(self, module, spec, dep_spec): os.environ['MPICH_F77'] = 'f77' os.environ['MPICH_F90'] = 'f90' + module.mpicc = join_path(self.prefix.bin, 'mpicc') def install(self, spec, prefix): config_args = ["--prefix=" + prefix, diff --git a/var/spack/packages/mvapich2/package.py b/var/spack/packages/mvapich2/package.py index ca0b1287c1..93bce011b7 100644 --- a/var/spack/packages/mvapich2/package.py +++ b/var/spack/packages/mvapich2/package.py @@ -11,10 +11,17 @@ class Mvapich2(Package): version('2.0', '9fbb68a4111a8b6338e476dc657388b4', url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz') + + version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6', + url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz') provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2 provides('mpi@:3.0', when='@2.0') # MVAPICH2-2.0 supports MPI 3.0 + variant('psm', default=False, description="build with psm") + + variant('pmi', default=False, description="build with pmi") + depends_on('pmgr_collective', when='+pmi') def install(self, spec, prefix): # we'll set different configure flags depending on our environment @@ -80,7 +87,13 @@ def install(self, spec, prefix): configure_args.append("--with-device=ch3:psm") else: # throw this flag on IB systems - configure_args.append("--with-device=ch3:mrail", "--with-rdma=gen2") + configure_args.append("--with-device=ch3:mrail") + configure_args.append("--with-rdma=gen2") + + if "+pmi" in spec: + configure_args.append("--with-pmi=pmgr_collective" % spec['pmgr_collective'].prefix) + else: + configure_args.append("--with-pmi=slurm") # TODO: shared-memory build @@ -93,7 +106,7 @@ def install(self, spec, prefix): "--enable-f77", "--enable-fc", "--enable-cxx", "--enable-shared", "--enable-sharedlibs=gcc", "--enable-debuginfo", - "--with-pm=no", "--with-pmi=slurm", + "--with-pm=no", "--enable-romio", "--with-file-system=lustre+nfs+ufs", "--disable-mpe", "--without-mpe", "--disable-silent-rules", From e4d2ba30b57618da388a1a990381d149b33d7aba Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Fri, 2 Oct 2015 11:00:41 -0700 Subject: [PATCH 009/189] Fix failure in spack.test.config.ConfigTest from incorrect compiler config merging --- lib/spack/spack/config.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 60577c45b3..712a2b78fc 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -174,8 +174,12 @@ def _merge_dicts(d1, d2): for key2, val2 in d2.iteritems(): if not key2 in d1: d1[key2] = val2 - else: + elif type(d1[key2]) is dict and type(val2) is dict: d1[key2] = _merge_dicts(d1[key2], val2) + elif (type(d1) is list) and (type(d2) is list): + d1.extend(d2) + else: + d1[key2] = val2 return d1 return d2 @@ -360,7 +364,7 @@ def add_to_mirror_config(addition_dict, scope=None): def add_to_compiler_config(addition_dict, scope=None, arch=None): - """Add compilerss to the configuration files""" + """Add compilers to the configuration files""" if not arch: arch = spack.architecture.sys_type() add_to_config('compilers', { arch : addition_dict }, scope) From 18f0b24a7f21ec7b46510f45867386b7600bbc55 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Mon, 5 Oct 2015 11:30:48 -0700 Subject: [PATCH 010/189] Add tests for spack external dependencies, plus fixes for issues found by those tests. --- lib/spack/spack/concretize.py | 9 +++-- lib/spack/spack/spec.py | 2 +- lib/spack/spack/test/concretize.py | 28 ++++++++++++++ .../site_spackconfig/packages.yaml | 13 +++++++ .../mock_packages/externalprereq/package.py | 34 +++++++++++++++++ .../mock_packages/externaltest/package.py | 37 +++++++++++++++++++ .../mock_packages/externaltool/package.py | 36 ++++++++++++++++++ .../mock_packages/externalvirtual/package.py | 37 +++++++++++++++++++ 8 files changed, 191 insertions(+), 5 deletions(-) create mode 100644 var/spack/mock_configs/site_spackconfig/packages.yaml create mode 100644 var/spack/mock_packages/externalprereq/package.py create mode 100644 var/spack/mock_packages/externaltest/package.py create mode 100644 var/spack/mock_packages/externaltool/package.py create mode 100644 var/spack/mock_packages/externalvirtual/package.py diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 01ff163493..c27a023136 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -85,8 +85,8 @@ def _valid_virtuals_and_externals(self, spec): provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name) packages = sorted(providers, cmp=provider_cmp) else: - if not spec_externals(spec) or spec.external: - return None + if spec.external: + return False packages = [spec] # For each candidate package, if it has externals add those to the candidates @@ -129,7 +129,7 @@ def concretize_virtual_and_external(self, spec): #Try a looser ABI matching candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec, loose=True)), None) if not candidate: - #Pick the first choice + #No ABI matches. Pick the top choice based on the orignal preferences. candidate = candidates[0] external = candidate[1] candidate_spec = candidate[0] @@ -144,10 +144,11 @@ def concretize_virtual_and_external(self, spec): if not spec.external and external: spec.external = external changed = True + #If we're external then trim the dependencies if external and spec.dependencies: changed = True - spec.depencencies = DependencyMap() + spec.dependencies = DependencyMap() return changed diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 6984b4a174..49b67cd361 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1022,7 +1022,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index): # if we descend into a virtual spec, there's nothing more # to normalize. Concretize will finish resolving it later. - if self.virtual: + if self.virtual or self.external: return False # Combine constraints from package deps with constraints from diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index b3a77d076a..f81a2f5af8 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -192,3 +192,31 @@ def test_compiler_inheritance(self): # TODO: not exactly the syntax I would like. self.assertTrue(spec['libdwarf'].compiler.satisfies('clang')) self.assertTrue(spec['libelf'].compiler.satisfies('clang')) + + + def test_external_package(self): + spec = Spec('externaltool') + spec.concretize() + + self.assertEqual(spec['externaltool'].external, '/path/to/external_tool') + self.assertFalse('externalprereq' in spec) + self.assertTrue(spec['externaltool'].compiler.satisfies('gcc')) + + + def test_nobuild_package(self): + got_error = False + spec = Spec('externaltool%clang') + try: + spec.concretize() + except spack.concretize.NoBuildError: + got_error = True + self.assertTrue(got_error) + + + def test_external_and_virtual(self): + spec = Spec('externaltest') + spec.concretize() + self.assertTrue(spec['externaltool'].external, '/path/to/external_tool') + self.assertTrue(spec['stuff'].external, '/path/to/external_virtual_gcc') + self.assertTrue(spec['externaltool'].compiler.satisfies('gcc')) + self.assertTrue(spec['stuff'].compiler.satisfies('gcc')) diff --git a/var/spack/mock_configs/site_spackconfig/packages.yaml b/var/spack/mock_configs/site_spackconfig/packages.yaml new file mode 100644 index 0000000000..eb52c6cf11 --- /dev/null +++ b/var/spack/mock_configs/site_spackconfig/packages.yaml @@ -0,0 +1,13 @@ +packages: + - externaltool: + nobuild: True + - externaltool@1.0%gcc@4.5.0: + path: /path/to/external_tool + - externalvirtual@2.0%clang@3.3: + path: /path/to/external_virtual_clang + nobuild: True + - externalvirtual@1.0%gcc@4.5.0: + path: /path/to/external_virtual_gcc + nobuild: True + + diff --git a/var/spack/mock_packages/externalprereq/package.py b/var/spack/mock_packages/externalprereq/package.py new file mode 100644 index 0000000000..7d63925693 --- /dev/null +++ b/var/spack/mock_packages/externalprereq/package.py @@ -0,0 +1,34 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Externalprereq(Package): + homepage = "http://somewhere.com" + url = "http://somewhere.com/prereq-1.0.tar.gz" + + version('1.4', 'f1234567890abcdef1234567890abcde') + + def install(self, spec, prefix): + pass diff --git a/var/spack/mock_packages/externaltest/package.py b/var/spack/mock_packages/externaltest/package.py new file mode 100644 index 0000000000..c546922f87 --- /dev/null +++ b/var/spack/mock_packages/externaltest/package.py @@ -0,0 +1,37 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Externaltest(Package): + homepage = "http://somewhere.com" + url = "http://somewhere.com/test-1.0.tar.gz" + + version('1.0', '1234567890abcdef1234567890abcdef') + + depends_on('stuff') + depends_on('externaltool') + + def install(self, spec, prefix): + pass diff --git a/var/spack/mock_packages/externaltool/package.py b/var/spack/mock_packages/externaltool/package.py new file mode 100644 index 0000000000..af902bd70e --- /dev/null +++ b/var/spack/mock_packages/externaltool/package.py @@ -0,0 +1,36 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Externaltool(Package): + homepage = "http://somewhere.com" + url = "http://somewhere.com/tool-1.0.tar.gz" + + version('1.0', '1234567890abcdef1234567890abcdef') + + depends_on('externalprereq') + + def install(self, spec, prefix): + pass diff --git a/var/spack/mock_packages/externalvirtual/package.py b/var/spack/mock_packages/externalvirtual/package.py new file mode 100644 index 0000000000..722c1e1c53 --- /dev/null +++ b/var/spack/mock_packages/externalvirtual/package.py @@ -0,0 +1,37 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Externalvirtual(Package): + homepage = "http://somewhere.com" + url = "http://somewhere.com/stuff-1.0.tar.gz" + + version('1.0', '1234567890abcdef1234567890abcdef') + version('2.0', '234567890abcdef1234567890abcdef1') + + provides('stuff') + + def install(self, spec, prefix): + pass From fac4428766fb0a6b6cd357b654215f55df1220d4 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Mon, 5 Oct 2015 14:04:33 -0700 Subject: [PATCH 011/189] Documentation for external packages. --- lib/spack/docs/site_configuration.rst | 73 +++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index 1e6740a434..a7211a9d95 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -54,6 +54,79 @@ more elements to the list to indicate where your own site's temporary directory is. +External Packages +~~~~~~~~~~~~~~~~~~~~~ +It's possible for Spack to use certain externally-installed +packages rather than always rebuilding packages. This may be desirable +if machines ship with system packages, such as a customized MPI +that should be used instead of Spack building its own MPI. + +External packages are configured through the ``packages.yaml`` file found +in a Spack installation's ``etc/spack/`` or a user's ``~/.spack/`` +directory. Here's an example of an external configuration:: + +.. code-block:: yaml + + packages: + - openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: + path: /opt/openmpi-1.4.3 + - openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: + path: /opt/openmpi-1.4.3-debug + - openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: + path: /opt/openmpi-1.6.5-intel + +This example lists three installations of OpenMPI, one built with gcc, +one built with gcc and debug information, and another built with OpenMPI. +If Spack is asked to build a package that uses one of these MPIs as a +dependency, it link the package to the pre-installed OpenMPI in +the given directory. + +Each ``packages.yaml`` should begin with a ``packages:`` token, followed +by a list of package specs. Specs in the ``packages.yaml`` have at most +one ``path`` tag, which specifies the top-level directory where the +spec is installed. + +Each spec should be as well-defined as reasonably possible. If a +package lacks a spec component, such as missing a compiler or +package version, then Spack will guess the missing component based +on its most-favored packages, and it may guess incorrectly. + +All package versions and compilers listed in ``packages.yaml`` should +have entries in Spack's packages and compiler configuration, even +the package and compiler may not actually be used. + +The packages configuration can tell Spack to use an external location +for certain package versions, but it does not restrict Spack to using +external packages. In the above example, if an OpenMPI 1.8.4 became +available Spack may choose to start building and linking with that version +rather than continue using the pre-installed OpenMPI versions. + +To prevent this, the ``packages.yaml`` configuration also allows packages +to be flagged as non-buildable. The previous example could be modified to +be:: + +.. code-block:: yaml + + packages: + - openmpi: + nobuild: True + - openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: + path: /opt/openmpi-1.4.3 + - openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: + path: /opt/openmpi-1.4.3-debug + - openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: + path: /opt/openmpi-1.6.5-intel + +The addition of the ``nobuild`` flag tells Spack that it should never build +its own version of OpenMPI, and it will instead always rely on a pre-built +OpenMPI. Similar to ``path``, ``nobuild`` is specified as a property under +a spec and will prevent building of anything that satisfies that spec. + +The ``nobuild`` does not need to be paired with external packages. +It could also be used alone to forbid versions of packages that may be +buggy or otherwise undesirable. + + Profiling ~~~~~~~~~~~~~~~~~~~~~ From dcddb19e5b55c8c61279a084bdbaa95047236ccb Mon Sep 17 00:00:00 2001 From: alalazo Date: Fri, 1 Jan 2016 17:35:01 +0100 Subject: [PATCH 012/189] added class decorator to define composite classes --- lib/spack/spack/util/pattern.py | 98 +++++++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 lib/spack/spack/util/pattern.py diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py new file mode 100644 index 0000000000..8d1584b4c2 --- /dev/null +++ b/lib/spack/spack/util/pattern.py @@ -0,0 +1,98 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import inspect +import collections +import functools + + +def composite(interface=None, method_list=None, container=list): + """ + Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given + interface. + + :param interface: class exposing the interface to which the composite object must conform. Only non-private and + non-special methods will be taken into account + + :param method_list: names of methods that should be part of the composite + + :param container: container for the composite object (default = list). Must fulfill the MutableSequence contract. + The composite class will expose the container API to manage object composition + + :return: class decorator + """ + # Check if container fulfills the MutableSequence contract and raise an exception if it doesn't + # The patched class returned by the decorator will inherit from the container class to expose the + # interface needed to manage objects composition + if not issubclass(container, collections.MutableSequence): + raise TypeError("Container must fulfill the MutableSequence contract") + + # Check if at least one of the 'interface' or the 'method_list' arguments are defined + if interface is None and method_list is None: + raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") + + def cls_decorator(cls): + # Retrieve the base class of the composite. Inspect its the methods and decide which ones will be overridden + def no_special_no_private(x): + return inspect.ismethod(x) and not x.__name__.startswith('_') + + # Patch the behavior of each of the methods in the previous list. This is done associating an instance of the + # descriptor below to any method that needs to be patched. + class IterateOver(object): + """ + Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the + associated attribute and calls for each of them an attribute with the same name + """ + def __init__(self, name, func=None): + self.name = name + self.func = func + + def __get__(self, instance, owner): + def getter(*args, **kwargs): + for item in instance: + getattr(item, self.name)(*args, **kwargs) + # If we are using this descriptor to wrap a method from an interface, then we must conditionally + # use the `functools.wraps` decorator to set the appropriate fields. + if self.func is not None: + getter = functools.wraps(self.func)(getter) + return getter + + dictionary_for_type_call = {} + # Construct a dictionary with the methods explicitly passed as name + if method_list is not None: + method_list_dict = {name: IterateOver(name) for name in method_list} + dictionary_for_type_call.update(method_list_dict) + # Construct a dictionary with the methods inspected from the interface + if interface is not None: + interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)} + interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()} + dictionary_for_type_call.update(interface_methods_dict) + # Get the methods that are defined in the scope of the composite class and override any previous definition + cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)} + dictionary_for_type_call.update(cls_method) + # Generate the new class on the fly and return it + wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call) + return wrapper_class + + return cls_decorator From f499f71f64994d1914ca3e8195a79828f442f6e8 Mon Sep 17 00:00:00 2001 From: alalazo Date: Fri, 1 Jan 2016 17:36:58 +0100 Subject: [PATCH 013/189] Package : factored out code in do_stage and do_fetch, changed mirror command accordingly --- lib/spack/spack/fetch_strategy.py | 116 ++++++++++++++---------------- lib/spack/spack/mirror.py | 80 ++++++++------------- lib/spack/spack/package.py | 113 +++++++++++++---------------- lib/spack/spack/stage.py | 51 ++++++++++++- 4 files changed, 181 insertions(+), 179 deletions(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index a9374fb34b..72b6abe270 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -55,53 +55,59 @@ from spack.version import Version, ver from spack.util.compression import decompressor_for, extension +import spack.util.pattern as pattern + """List of all fetch strategies, created by FetchStrategy metaclass.""" all_strategies = [] + def _needs_stage(fun): """Many methods on fetch strategies require a stage to be set using set_stage(). This decorator adds a check for self.stage.""" + @wraps(fun) def wrapper(self, *args, **kwargs): if not self.stage: raise NoStageError(fun) return fun(self, *args, **kwargs) + return wrapper class FetchStrategy(object): """Superclass of all fetch strategies.""" - enabled = False # Non-abstract subclasses should be enabled. + enabled = False # Non-abstract subclasses should be enabled. required_attributes = None # Attributes required in version() args. class __metaclass__(type): """This metaclass registers all fetch strategies in a list.""" + def __init__(cls, name, bases, dict): type.__init__(cls, name, bases, dict) if cls.enabled: all_strategies.append(cls) - def __init__(self): # The stage is initialized late, so that fetch strategies can be constructed # at package construction time. This is where things will be fetched. self.stage = None - def set_stage(self, stage): """This is called by Stage before any of the fetching methods are called on the stage.""" self.stage = stage - # Subclasses need to implement these methods def fetch(self): pass # Return True on success, False on fail. + def check(self): pass # Do checksum. + def expand(self): pass # Expand archive. + def reset(self): pass # Revert to freshly downloaded state. def archive(self, destination): pass # Used to create tarball for mirror. - def __str__(self): # Should be human readable URL. + def __str__(self): # Should be human readable URL. return "FetchStrategy.__str___" # This method is used to match fetch strategies to version() @@ -111,6 +117,15 @@ def matches(cls, args): return any(k in args for k in cls.required_attributes) +@pattern.composite(interface=FetchStrategy) +class FetchStrategyComposite(object): + """ + Composite for a FetchStrategy object. Implements the GoF composite pattern. + """ + matches = FetchStrategy.matches + set_stage = FetchStrategy.set_stage + + class URLFetchStrategy(FetchStrategy): """FetchStrategy that pulls source code from a URL for an archive, checks the archive against a checksum,and decompresses the archive. @@ -142,19 +157,19 @@ def fetch(self): tty.msg("Trying to fetch from %s" % self.url) - curl_args = ['-O', # save file to disk - '-f', # fail on >400 errors - '-D', '-', # print out HTML headers - '-L', self.url,] + curl_args = ['-O', # save file to disk + '-f', # fail on >400 errors + '-D', '-', # print out HTML headers + '-L', self.url, ] if sys.stdout.isatty(): curl_args.append('-#') # status bar when using a tty else: - curl_args.append('-sS') # just errors when not. + curl_args.append('-sS') # just errors when not. # Run curl but grab the mime type from the http headers headers = spack.curl( - *curl_args, return_output=True, fail_on_error=False) + *curl_args, return_output=True, fail_on_error=False) if spack.curl.returncode != 0: # clean up archive on failure. @@ -164,24 +179,23 @@ def fetch(self): if spack.curl.returncode == 22: # This is a 404. Curl will print the error. raise FailedDownloadError( - self.url, "URL %s was not found!" % self.url) + self.url, "URL %s was not found!" % self.url) elif spack.curl.returncode == 60: # This is a certificate error. Suggest spack -k raise FailedDownloadError( - self.url, - "Curl was unable to fetch due to invalid certificate. " - "This is either an attack, or your cluster's SSL configuration " - "is bad. If you believe your SSL configuration is bad, you " - "can try running spack -k, which will not check SSL certificates." - "Use this at your own risk.") + self.url, + "Curl was unable to fetch due to invalid certificate. " + "This is either an attack, or your cluster's SSL configuration " + "is bad. If you believe your SSL configuration is bad, you " + "can try running spack -k, which will not check SSL certificates." + "Use this at your own risk.") else: # This is some other curl error. Curl will print the # error, but print a spack message too raise FailedDownloadError( - self.url, "Curl failed with error %d" % spack.curl.returncode) - + self.url, "Curl failed with error %d" % spack.curl.returncode) # Check if we somehow got an HTML file rather than the archive we # asked for. We only look at the last content type, to handle @@ -196,7 +210,6 @@ def fetch(self): if not self.archive_file: raise FailedDownloadError(self.url) - @property def archive_file(self): """Path to the source archive within this stage directory.""" @@ -209,7 +222,7 @@ def expand(self): self.stage.chdir() if not self.archive_file: raise NoArchiveFileError("URLFetchStrategy couldn't find archive file", - "Failed on expand() for URL %s" % self.url) + "Failed on expand() for URL %s" % self.url) decompress = decompressor_for(self.archive_file) @@ -241,7 +254,6 @@ def expand(self): # Set the wd back to the stage when done. self.stage.chdir() - def archive(self, destination): """Just moves this archive to the destination.""" if not self.archive_file: @@ -252,7 +264,6 @@ def archive(self, destination): shutil.move(self.archive_file, destination) - @_needs_stage def check(self): """Check the downloaded archive against a checksum digest. @@ -263,9 +274,8 @@ def check(self): checker = crypto.Checker(self.digest) if not checker.check(self.archive_file): raise ChecksumError( - "%s checksum failed for %s." % (checker.hash_name, self.archive_file), - "Expected %s but got %s." % (self.digest, checker.sum)) - + "%s checksum failed for %s." % (checker.hash_name, self.archive_file), + "Expected %s but got %s." % (self.digest, checker.sum)) @_needs_stage def reset(self): @@ -277,12 +287,10 @@ def reset(self): shutil.rmtree(self.stage.source_path, ignore_errors=True) self.expand() - def __repr__(self): url = self.url if self.url else "no url" return "URLFetchStrategy<%s>" % url - def __str__(self): if self.url: return self.url @@ -298,33 +306,30 @@ def __init__(self, name, *rev_types, **kwargs): # Set a URL based on the type of fetch strategy. self.url = kwargs.get(name, None) if not self.url: raise ValueError( - "%s requires %s argument." % (self.__class__, name)) + "%s requires %s argument." % (self.__class__, name)) # Ensure that there's only one of the rev_types if sum(k in kwargs for k in rev_types) > 1: raise FetchStrategyError( - "Supply only one of %s to fetch with %s." % ( - comma_or(rev_types), name)) + "Supply only one of %s to fetch with %s." % ( + comma_or(rev_types), name)) # Set attributes for each rev type. for rt in rev_types: setattr(self, rt, kwargs.get(rt, None)) - @_needs_stage def check(self): tty.msg("No checksum needed when fetching with %s." % self.name) - @_needs_stage def expand(self): tty.debug("Source fetched with %s is already expanded." % self.name) - @_needs_stage def archive(self, destination, **kwargs): - assert(extension(destination) == 'tar.gz') - assert(self.stage.source_path.startswith(self.stage.path)) + assert (extension(destination) == 'tar.gz') + assert (self.stage.source_path.startswith(self.stage.path)) tar = which('tar', required=True) @@ -338,16 +343,13 @@ def archive(self, destination, **kwargs): self.stage.chdir() tar('-czf', destination, os.path.basename(self.stage.source_path)) - def __str__(self): return "VCS: %s" % self.url - def __repr__(self): return "%s<%s>" % (self.__class__, self.url) - class GitFetchStrategy(VCSFetchStrategy): """Fetch strategy that gets source code from a git repository. Use like this in a package: @@ -369,23 +371,20 @@ class GitFetchStrategy(VCSFetchStrategy): def __init__(self, **kwargs): super(GitFetchStrategy, self).__init__( - 'git', 'tag', 'branch', 'commit', **kwargs) + 'git', 'tag', 'branch', 'commit', **kwargs) self._git = None - @property def git_version(self): vstring = self.git('--version', return_output=True).lstrip('git version ') return Version(vstring) - @property def git(self): if not self._git: self._git = which('git', required=True) return self._git - @_needs_stage def fetch(self): self.stage.chdir() @@ -418,7 +417,7 @@ def fetch(self): if self.branch: args.extend(['--branch', self.branch]) elif self.tag and self.git_version >= ver('1.8.5.2'): - args.extend(['--branch', self.tag]) + args.extend(['--branch', self.tag]) # Try to be efficient if we're using a new enough git. # This checks out only one branch's history @@ -429,7 +428,7 @@ def fetch(self): # Yet more efficiency, only download a 1-commit deep tree if self.git_version >= ver('1.7.1'): try: - self.git(*(args + ['--depth','1', self.url])) + self.git(*(args + ['--depth', '1', self.url])) cloned = True except spack.error.SpackError: # This will fail with the dumb HTTP transport @@ -452,18 +451,15 @@ def fetch(self): self.git('pull', '--tags', ignore_errors=1) self.git('checkout', self.tag) - def archive(self, destination): super(GitFetchStrategy, self).archive(destination, exclude='.git') - @_needs_stage def reset(self): self.stage.chdir_to_source() self.git('checkout', '.') self.git('clean', '-f') - def __str__(self): return "[git] %s" % self.url @@ -484,19 +480,17 @@ class SvnFetchStrategy(VCSFetchStrategy): def __init__(self, **kwargs): super(SvnFetchStrategy, self).__init__( - 'svn', 'revision', **kwargs) + 'svn', 'revision', **kwargs) self._svn = None if self.revision is not None: self.revision = str(self.revision) - @property def svn(self): if not self._svn: self._svn = which('svn', required=True) return self._svn - @_needs_stage def fetch(self): self.stage.chdir() @@ -515,7 +509,6 @@ def fetch(self): self.svn(*args) self.stage.chdir_to_source() - def _remove_untracked_files(self): """Removes untracked files in an svn repository.""" status = self.svn('status', '--no-ignore', return_output=True) @@ -529,23 +522,19 @@ def _remove_untracked_files(self): elif os.path.isdir(path): shutil.rmtree(path, ignore_errors=True) - def archive(self, destination): super(SvnFetchStrategy, self).archive(destination, exclude='.svn') - @_needs_stage def reset(self): self.stage.chdir_to_source() self._remove_untracked_files() self.svn('revert', '.', '-R') - def __str__(self): return "[svn] %s" % self.url - class HgFetchStrategy(VCSFetchStrategy): """Fetch strategy that gets source code from a Mercurial repository. Use like this in a package: @@ -568,10 +557,9 @@ class HgFetchStrategy(VCSFetchStrategy): def __init__(self, **kwargs): super(HgFetchStrategy, self).__init__( - 'hg', 'revision', **kwargs) + 'hg', 'revision', **kwargs) self._hg = None - @property def hg(self): if not self._hg: @@ -597,11 +585,9 @@ def fetch(self): self.hg(*args) - def archive(self, destination): super(HgFetchStrategy, self).archive(destination, exclude='.hg') - @_needs_stage def reset(self): self.stage.chdir() @@ -619,7 +605,6 @@ def reset(self): shutil.move(scrubbed, source_path) self.stage.chdir_to_source() - def __str__(self): return "[hg] %s" % self.url @@ -693,9 +678,10 @@ def __init__(self, msg, long_msg): class FailedDownloadError(FetchError): """Raised wen a download fails.""" + def __init__(self, url, msg=""): super(FailedDownloadError, self).__init__( - "Failed to fetch file from URL: %s" % url, msg) + "Failed to fetch file from URL: %s" % url, msg) self.url = url @@ -718,12 +704,14 @@ def __init__(self, pkg, version): class ChecksumError(FetchError): """Raised when archive fails to checksum.""" + def __init__(self, message, long_msg=None): super(ChecksumError, self).__init__(message, long_msg) class NoStageError(FetchError): """Raised when fetch operations are called before set_stage().""" + def __init__(self, method): super(NoStageError, self).__init__( - "Must call FetchStrategy.set_stage() before calling %s" % method.__name__) + "Must call FetchStrategy.set_stage() before calling %s" % method.__name__) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 1d9b0e7ef2..44e03a8a45 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -45,12 +45,11 @@ from spack.util.compression import extension, allowed_archive -def mirror_archive_filename(spec): +def mirror_archive_filename(spec, fetcher): """Get the name of the spec's archive in the mirror.""" if not spec.version.concrete: raise ValueError("mirror.path requires spec with concrete version.") - fetcher = spec.package.fetcher if isinstance(fetcher, fs.URLFetchStrategy): # If we fetch this version with a URLFetchStrategy, use URL's archive type ext = url.downloaded_file_extension(fetcher.url) @@ -61,9 +60,9 @@ def mirror_archive_filename(spec): return "%s-%s.%s" % (spec.package.name, spec.version, ext) -def mirror_archive_path(spec): +def mirror_archive_path(spec, fetcher): """Get the relative path to the spec's archive within a mirror.""" - return join_path(spec.name, mirror_archive_filename(spec)) + return join_path(spec.name, mirror_archive_filename(spec, fetcher)) def get_matching_versions(specs, **kwargs): @@ -158,68 +157,47 @@ def create(path, specs, **kwargs): everything_already_exists = True for spec in version_specs: pkg = spec.package - - stage = None + tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) try: - # create a subdirectory for the current package@version - archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec))) - subdir = os.path.dirname(archive_path) - mkdirp(subdir) + for ii, stage in enumerate(pkg.stage): + fetcher = stage.fetcher + if ii == 0: + # create a subdirectory for the current package@version + archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher))) + name = spec.format("$_$@") + else: + resource = stage.resource + archive_path = join_path(subdir, suggest_archive_basename(resource)) + name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@")) + subdir = os.path.dirname(archive_path) + mkdirp(subdir) - if os.path.exists(archive_path): - tty.msg("Already added %s" % spec.format("$_$@")) - else: - everything_already_exists = False - # Set up a stage and a fetcher for the download - unique_fetch_name = spec.format("$_$@") - fetcher = fs.for_package_version(pkg, pkg.version) - stage = Stage(fetcher, name=unique_fetch_name) - fetcher.set_stage(stage) + if os.path.exists(archive_path): + tty.msg("{name} : already added".format(name=name)) + else: + everything_already_exists = False + fetcher.fetch() + if not kwargs.get('no_checksum', False): + fetcher.check() + tty.msg("{name} : checksum passed".format(name=name)) - # Do the fetch and checksum if necessary - fetcher.fetch() - if not kwargs.get('no_checksum', False): - fetcher.check() - tty.msg("Checksum passed for %s@%s" % (pkg.name, pkg.version)) - - # Fetchers have to know how to archive their files. Use - # that to move/copy/create an archive in the mirror. - fetcher.archive(archive_path) - tty.msg("Added %s." % spec.format("$_$@")) - - # Fetch resources if they are associated with the spec - resources = pkg._get_resources() - for resource in resources: - resource_archive_path = join_path(subdir, suggest_archive_basename(resource)) - if os.path.exists(resource_archive_path): - tty.msg("Already added resource %s (%s@%s)." % (resource.name, pkg.name, pkg.version)) - continue - everything_already_exists = False - resource_stage_folder = pkg._resource_stage(resource) - resource_stage = Stage(resource.fetcher, name=resource_stage_folder) - resource.fetcher.set_stage(resource_stage) - resource.fetcher.fetch() - if not kwargs.get('no_checksum', False): - resource.fetcher.check() - tty.msg("Checksum passed for the resource %s (%s@%s)" % (resource.name, pkg.name, pkg.version)) - resource.fetcher.archive(resource_archive_path) - tty.msg("Added resource %s (%s@%s)." % (resource.name, pkg.name, pkg.version)) + # Fetchers have to know how to archive their files. Use + # that to move/copy/create an archive in the mirror. + fetcher.archive(archive_path) + tty.msg("{name} : added".format(name=name)) if everything_already_exists: present.append(spec) else: mirrored.append(spec) - except Exception, e: if spack.debug: sys.excepthook(*sys.exc_info()) else: tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message) error.append(spec) - finally: - if stage: - stage.destroy() + pkg.stage.destroy() return (present, mirrored, error) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 6673e4f392..3ccedaa458 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -61,7 +61,7 @@ import spack.util.web import spack.fetch_strategy as fs from spack.version import * -from spack.stage import Stage +from spack.stage import Stage, ResourceStage, StageComposite from spack.util.compression import allowed_archive, extension from spack.util.executable import ProcessError @@ -431,23 +431,47 @@ def url_for_version(self, version): return spack.url.substitute_version(self.nearest_url(version), self.url_version(version)) + def _make_resource_stage(self, root_stage, fetcher, resource): + resource_stage_folder = self._resource_stage(resource) + # FIXME : works only for URLFetchStrategy + resource_mirror = join_path(self.name, os.path.basename(fetcher.url)) + stage = ResourceStage(resource.fetcher, root=root_stage, resource=resource, + name=resource_stage_folder, mirror_path=resource_mirror) + return stage + + def _make_root_stage(self, fetcher): + # Construct a mirror path (TODO: get this out of package.py) + mp = spack.mirror.mirror_archive_path(self.spec, fetcher) + # Construct a path where the stage should build.. + s = self.spec + stage_name = "%s-%s-%s" % (s.name, s.version, s.dag_hash()) + # Build the composite stage + stage = Stage(fetcher, mirror_path=mp, name=stage_name) + return stage + + def _make_stage(self): + # Construct a composite stage on top of the composite FetchStrategy + composite_fetcher = self.fetcher + composite_stage = StageComposite() + resources = self._get_resources() + for ii, fetcher in enumerate(composite_fetcher): + if ii == 0: + # Construct root stage first + stage = self._make_root_stage(fetcher) + else: + # Construct resource stage + resource = resources[ii - 1] # ii == 0 is root! + stage = self._make_resource_stage(composite_stage[0], fetcher, resource) + # Append the item to the composite + composite_stage.append(stage) + return composite_stage @property def stage(self): if not self.spec.concrete: raise ValueError("Can only get a stage for a concrete package.") - if self._stage is None: - # Construct a mirror path (TODO: get this out of package.py) - mp = spack.mirror.mirror_archive_path(self.spec) - - # Construct a path where the stage should build.. - s = self.spec - stage_name = "%s-%s-%s" % (s.name, s.version, s.dag_hash()) - - # Build the stage - self._stage = Stage(self.fetcher, mirror_path=mp, name=stage_name) - + self._stage = self._make_stage() return self._stage @@ -457,17 +481,25 @@ def stage(self, stage): self._stage = stage + def _make_fetcher(self): + # Construct a composite fetcher that always contains at least one element (the root package). In case there + # are resources associated with the package, append their fetcher to the composite. + root_fetcher = fs.for_package_version(self, self.version) + fetcher = fs.FetchStrategyComposite() # Composite fetcher + fetcher.append(root_fetcher) # Root fetcher is always present + resources = self._get_resources() + for resource in resources: + fetcher.append(resource.fetcher) + return fetcher + @property def fetcher(self): if not self.spec.versions.concrete: - raise ValueError( - "Can only get a fetcher for a package with concrete versions.") - + raise ValueError("Can only get a fetcher for a package with concrete versions.") if not self._fetcher: - self._fetcher = fs.for_package_version(self, self.version) + self._fetcher = self._make_fetcher() return self._fetcher - @fetcher.setter def fetcher(self, f): self._fetcher = f @@ -630,7 +662,7 @@ def remove_prefix(self): def do_fetch(self): - """Creates a stage directory and downloads the taball for this package. + """Creates a stage directory and downloads the tarball for this package. Working directory will be set to the stage directory. """ if not self.spec.concrete: @@ -656,20 +688,6 @@ def do_fetch(self): self.stage.fetch() - ########## - # Fetch resources - resources = self._get_resources() - for resource in resources: - resource_stage_folder = self._resource_stage(resource) - # FIXME : works only for URLFetchStrategy - resource_mirror = join_path(self.name, os.path.basename(resource.fetcher.url)) - resource_stage = Stage(resource.fetcher, name=resource_stage_folder, mirror_path=resource_mirror) - resource.fetcher.set_stage(resource_stage) - # Delegate to stage object to trigger mirror logic - resource_stage.fetch() - resource_stage.check() - ########## - self._fetch_time = time.time() - start_time if spack.do_checksum and self.version in self.versions: @@ -681,39 +699,10 @@ def do_stage(self): if not self.spec.concrete: raise ValueError("Can only stage concrete packages.") - def _expand_archive(stage, name=self.name): - archive_dir = stage.source_path - if not archive_dir: - stage.expand_archive() - tty.msg("Created stage in %s." % stage.path) - else: - tty.msg("Already staged %s in %s." % (name, stage.path)) - - self.do_fetch() - _expand_archive(self.stage) - - ########## - # Stage resources in appropriate path - resources = self._get_resources() - for resource in resources: - stage = resource.fetcher.stage - _expand_archive(stage, resource.name) - # Turn placement into a dict with relative paths - placement = os.path.basename(stage.source_path) if resource.placement is None else resource.placement - if not isinstance(placement, dict): - placement = {'': placement} - # Make the paths in the dictionary absolute and link - for key, value in placement.iteritems(): - link_path = join_path(self.stage.source_path, resource.destination, value) - source_path = join_path(stage.source_path, key) - if not os.path.exists(link_path): - # Create a symlink - os.symlink(source_path, link_path) - ########## + self.stage.expand_archive() self.stage.chdir_to_source() - def do_patch(self): """Calls do_stage(), then applied patches to the expanded tarball if they haven't been applied already.""" diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 754344fc01..9b4c8fdc1d 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -30,6 +30,8 @@ import llnl.util.tty as tty from llnl.util.filesystem import * +import spack.util.pattern as pattern + import spack import spack.config import spack.fetch_strategy as fs @@ -40,7 +42,7 @@ class Stage(object): - """A Stage object manaages a directory where some source code is + """A Stage object manages a directory where some source code is downloaded and built before being installed. It handles fetching the source code, either as an archive to be expanded or by checking it out of a repository. A stage's lifecycle @@ -276,7 +278,12 @@ def expand_archive(self): archive. Fail if the stage is not set up or if the archive is not yet downloaded. """ - self.fetcher.expand() + archive_dir = self.source_path + if not archive_dir: + self.fetcher.expand() + tty.msg("Created stage in %s." % self.path) + else: + tty.msg("Already staged %s in %s." % (self.name, self.path)) def chdir_to_source(self): @@ -310,6 +317,46 @@ def destroy(self): os.chdir(os.path.dirname(self.path)) +class ResourceStage(Stage): + def __init__(self, url_or_fetch_strategy, root, resource, **kwargs): + super(ResourceStage, self).__init__(url_or_fetch_strategy, **kwargs) + self.root_stage = root + self.resource = resource + + def expand_archive(self): + super(ResourceStage, self).expand_archive() + root_stage = self.root_stage + resource = self.resource + placement = os.path.basename(self.source_path) if resource.placement is None else resource.placement + if not isinstance(placement, dict): + placement = {'': placement} + # Make the paths in the dictionary absolute and link + for key, value in placement.iteritems(): + link_path = join_path(root_stage.source_path, resource.destination, value) + source_path = join_path(self.source_path, key) + if not os.path.exists(link_path): + # Create a symlink + os.symlink(source_path, link_path) + + +@pattern.composite(method_list=['fetch', 'check', 'expand_archive', 'restage', 'destroy']) +class StageComposite: + """ + Composite for Stage type objects. The first item in this composite is considered to be the root package, and + operations that return a value are forwarded to it. + """ + @property + def source_path(self): + return self[0].source_path + + @property + def path(self): + return self[0].path + + def chdir_to_source(self): + return self[0].chdir_to_source() + + class DIYStage(object): """Simple class that allows any directory to be a spack stage.""" def __init__(self, path): From d95d169ac50a3c253ef9712782626af1e1610ca7 Mon Sep 17 00:00:00 2001 From: alalazo Date: Sat, 2 Jan 2016 19:04:06 +0100 Subject: [PATCH 014/189] fixed broken unit tests --- lib/spack/spack/test/install.py | 7 +++++-- lib/spack/spack/util/pattern.py | 26 ++++++++++++++++++++++---- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 1ef4171fb2..bb7d3f4fd4 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -31,7 +31,7 @@ import spack from spack.stage import Stage -from spack.fetch_strategy import URLFetchStrategy +from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite from spack.directory_layout import YamlDirectoryLayout from spack.util.executable import which from spack.test.mock_packages_test import * @@ -81,7 +81,10 @@ def test_install_and_uninstall(self): pkg = spack.db.get(spec) # Fake the URL for the package so it downloads from a file. - pkg.fetcher = URLFetchStrategy(self.repo.url) + + fetcher = FetchStrategyComposite() + fetcher.append(URLFetchStrategy(self.repo.url)) + pkg.fetcher = fetcher try: pkg.do_install() diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py index 8d1584b4c2..73c1e26aa5 100644 --- a/lib/spack/spack/util/pattern.py +++ b/lib/spack/spack/util/pattern.py @@ -81,17 +81,35 @@ def getter(*args, **kwargs): dictionary_for_type_call = {} # Construct a dictionary with the methods explicitly passed as name if method_list is not None: - method_list_dict = {name: IterateOver(name) for name in method_list} + # python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list} + method_list_dict = {} + for name in method_list: + method_list_dict[name] = IterateOver(name) dictionary_for_type_call.update(method_list_dict) # Construct a dictionary with the methods inspected from the interface if interface is not None: - interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)} - interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()} + ########## + # python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)} + interface_methods = {} + for name, method in inspect.getmembers(interface, predicate=no_special_no_private): + interface_methods[name] = method + ########## + # python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()} + interface_methods_dict = {} + for name, method in interface_methods.iteritems(): + interface_methods_dict[name] = IterateOver(name, method) + ########## dictionary_for_type_call.update(interface_methods_dict) # Get the methods that are defined in the scope of the composite class and override any previous definition - cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)} + ########## + # python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)} + cls_method = {} + for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): + cls_method[name] = method + ########## dictionary_for_type_call.update(cls_method) # Generate the new class on the fly and return it + # FIXME : inherit from interface if we start to use ABC classes? wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call) return wrapper_class From bf4231dd4b906f20d522b9df5f2cae2ba6291174 Mon Sep 17 00:00:00 2001 From: alalazo Date: Sat, 2 Jan 2016 22:48:14 +0100 Subject: [PATCH 015/189] depends_on and when multimethod support boolean --- lib/spack/spack/directives.py | 6 +++- lib/spack/spack/multimethod.py | 9 ++++-- lib/spack/spack/util/architecture.py | 46 +++++++++++++++++++++++++++ var/spack/packages/openssl/package.py | 15 +++++++++ 4 files changed, 72 insertions(+), 4 deletions(-) create mode 100644 lib/spack/spack/util/architecture.py diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index aa9fbd8d33..02477bb904 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -174,7 +174,11 @@ def version(pkg, ver, checksum=None, **kwargs): def _depends_on(pkg, spec, when=None): - if when is None: + # If when is False do nothing + if when is False: + return + # If when is None or True make sure the condition is always satisfied + if when is None or when is True: when = pkg.name when_spec = parse_anonymous_spec(when, pkg.name) diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py index df9b9b2ab1..51c6a8e89d 100644 --- a/lib/spack/spack/multimethod.py +++ b/lib/spack/spack/multimethod.py @@ -193,10 +193,11 @@ def install(self, prefix): platform-specific versions. There's not much we can do to get around this because of the way decorators work. """ -class when(object): def __init__(self, spec): pkg = get_calling_module_name() - self.spec = parse_anonymous_spec(spec, pkg) + if spec is True: + spec = pkg + self.spec = parse_anonymous_spec(spec, pkg) if spec is not False else None def __call__(self, method): # Get the first definition of the method in the calling scope @@ -207,7 +208,9 @@ def __call__(self, method): if not type(original_method) == SpecMultiMethod: original_method = SpecMultiMethod(original_method) - original_method.register(self.spec, method) + if self.spec is not None: + original_method.register(self.spec, method) + return original_method diff --git a/lib/spack/spack/util/architecture.py b/lib/spack/spack/util/architecture.py new file mode 100644 index 0000000000..a020c74a7a --- /dev/null +++ b/lib/spack/spack/util/architecture.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +import sys + + +def os_is_in(*args): + """ + Return True if any element in the list is equal to sys.platform, False otherwise + + :param args: list of names to be checked + :return: True or False + """ + return any(map(lambda x: x == sys.platform, args)) + + +def os_is_not_in(*args): + """ + Return True if none of the elements in the list is equal to sys.platform, False otherwise + + :param args: list of names to be checked + :return: True or False + """ + return not os_is_in(*args) diff --git a/var/spack/packages/openssl/package.py b/var/spack/packages/openssl/package.py index 40648fca49..1fa288a36d 100644 --- a/var/spack/packages/openssl/package.py +++ b/var/spack/packages/openssl/package.py @@ -1,5 +1,7 @@ from spack import * +import spack.util.architecture as arch + class Openssl(Package): """The OpenSSL Project is a collaborative effort to develop a robust, commercial-grade, full-featured, and Open Source @@ -26,3 +28,16 @@ def install(self, spec, prefix): make() make("install") + + @when(arch.os_is_in('darwin')) + def install(self, spec, prefix): + perl = which('perl') + perl("./Configure", + "--prefix=%s" % prefix, + "--openssldir=%s/etc/openssl" % prefix, + "zlib", + "no-krb5", + "shared", + "darwin64-x86_64-cc") + filter_file(r'-arch x86_64', '', 'Makefile') + From 5520ce3c4a46476f5eec7a340471dd044932ac06 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 12 Jan 2016 12:19:03 -0600 Subject: [PATCH 016/189] Removed unnecessary URLs --- var/spack/packages/xz/package.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/var/spack/packages/xz/package.py b/var/spack/packages/xz/package.py index ba6c9733a7..fdcac95345 100644 --- a/var/spack/packages/xz/package.py +++ b/var/spack/packages/xz/package.py @@ -8,11 +8,9 @@ class Xz(Package): homepage = "http://tukaani.org/xz/" url = "http://tukaani.org/xz/xz-5.2.0.tar.bz2" - version('5.2.0', '867cc8611760240ebf3440bd6e170bb9', - url = 'http://tukaani.org/xz/xz-5.2.0.tar.bz2') - version('5.2.2', 'f90c9a0c8b259aee2234c4e0d7fd70af', - url = 'http://tukaani.org/xz/xz-5.2.2.tar.bz2') - + version('5.2.0', '867cc8611760240ebf3440bd6e170bb9') + version('5.2.2', 'f90c9a0c8b259aee2234c4e0d7fd70af') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() From 180c673c782d10864dce6cd7df43465dd05579a1 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 12 Jan 2016 13:21:53 -0600 Subject: [PATCH 017/189] Add different URL for older source code --- var/spack/packages/mpc/package.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/mpc/package.py b/var/spack/packages/mpc/package.py index 50477a0ccb..65c0ba7ea1 100644 --- a/var/spack/packages/mpc/package.py +++ b/var/spack/packages/mpc/package.py @@ -34,8 +34,14 @@ class Mpc(Package): version('1.0.3', 'd6a1d5f8ddea3abd2cc3e98f58352d26') version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3') - depends_on("gmp") - depends_on("mpfr") + depends_on("gmp@4.3.2:") + depends_on("mpfr@2.4.2:") + + def url_for_version(self, version): + if version < Version("1.0.1"): + return "http://www.multiprecision.org/mpc/download/mpc-%s.tar.gz" % version + else: + return "ftp://ftp.gnu.org/gnu/mpc/mpc-%s.tar.gz" % version def install(self, spec, prefix): configure("--prefix=%s" % prefix) From 7d0256c9fb5c3b8824cdeb0455d2a3e3b3f1eeb4 Mon Sep 17 00:00:00 2001 From: Nicola Varini Date: Fri, 15 Jan 2016 10:27:38 +0100 Subject: [PATCH 018/189] Quantum-ESPRESSO package --- var/spack/packages/espresso/package.py | 61 ++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 var/spack/packages/espresso/package.py diff --git a/var/spack/packages/espresso/package.py b/var/spack/packages/espresso/package.py new file mode 100644 index 0000000000..f9df74c0c0 --- /dev/null +++ b/var/spack/packages/espresso/package.py @@ -0,0 +1,61 @@ +# FIXME: +# This is a template package file for Spack. We've conveniently +# put "FIXME" labels next to all the things you'll want to change. +# +# Once you've edited all the FIXME's, delete this whole message, +# save this file, and test out your package like this: +# +# spack install espresso +# +# You can always get back here to change things with: +# +# spack edit espresso +# +# See the spack documentation for more information on building +# packages. +# +from spack import * + +class Espresso(Package): + """FIXME: put a proper description of your package here.""" + # FIXME: add a proper url for your package's homepage here. + homepage = "http://quantum-espresso.org" + url = "http://www.qe-forge.org/gf/download/frsrelease/199/855/espresso-5.2.1.tar.gz" + + version('5.2.1', 'da3ec5302e4343804e65de60f6004c2d') + variant('mpi', default=True, description='Build Quantum-ESPRESSO with mpi support') + variant('openmp', default=False, description='Build Quantum-ESPRESSO with mpi openmp') + variant('scalapack', default=False, description='Build Quantum-ESPRESSO with mpi openmp') + + + # FIXME: Add dependencies if this package requires them. + # depends_on("foo") + depends_on('mpi', when='+mpi') + + +# def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. +# configure('--prefix=%s' % prefix) + + # FIXME: Add logic to build and install here +# make() +# make("install") + + def install(self, spec, prefix): + # TAU isn't happy with directories that have '@' in the path. Sigh. + + # TAU configure, despite the name , seems to be a manually written script (nothing related to autotools). + # As such it has a few #peculiarities# that make this build quite hackish. + options = ["-prefix=%s" % prefix, + "--enable-parallel"] + + if '+openmp' in spec: + options.append('--enable-openmp') + + if '+scalapack' in spec: + options.append('--with-scalapack=yes') + + configure(*options) + make("all") + make("install") + From 7fc308ed26d7abfcb71af8165523b421a1f315c0 Mon Sep 17 00:00:00 2001 From: Nicola Varini Date: Fri, 15 Jan 2016 17:04:25 +0100 Subject: [PATCH 019/189] Update to version 5.3 --- var/spack/packages/espresso/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/packages/espresso/package.py b/var/spack/packages/espresso/package.py index f9df74c0c0..ce5dcc2acc 100644 --- a/var/spack/packages/espresso/package.py +++ b/var/spack/packages/espresso/package.py @@ -20,9 +20,9 @@ class Espresso(Package): """FIXME: put a proper description of your package here.""" # FIXME: add a proper url for your package's homepage here. homepage = "http://quantum-espresso.org" - url = "http://www.qe-forge.org/gf/download/frsrelease/199/855/espresso-5.2.1.tar.gz" + url = "http://www.qe-forge.org/gf/download/frsrelease/204/912/espresso-5.3.0.tar.gz" - version('5.2.1', 'da3ec5302e4343804e65de60f6004c2d') + version('5.3.0', '6848fcfaeb118587d6be36bd10b7f2c3') variant('mpi', default=True, description='Build Quantum-ESPRESSO with mpi support') variant('openmp', default=False, description='Build Quantum-ESPRESSO with mpi openmp') variant('scalapack', default=False, description='Build Quantum-ESPRESSO with mpi openmp') From 72d5cdf9abaee156d80dd027911a6cadbfebd7bb Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 20 Jan 2016 11:19:36 +0100 Subject: [PATCH 020/189] fixed two minor typos --- lib/spack/docs/site_configuration.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index a7211a9d95..8ab4e89dfc 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -76,9 +76,9 @@ directory. Here's an example of an external configuration:: path: /opt/openmpi-1.6.5-intel This example lists three installations of OpenMPI, one built with gcc, -one built with gcc and debug information, and another built with OpenMPI. +one built with gcc and debug information, and another built with Intel. If Spack is asked to build a package that uses one of these MPIs as a -dependency, it link the package to the pre-installed OpenMPI in +dependency, it links the package to the pre-installed OpenMPI in the given directory. Each ``packages.yaml`` should begin with a ``packages:`` token, followed From ea9d0c0263126c370119e5fc435652c7214e658a Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 21 Jan 2016 09:58:47 -0600 Subject: [PATCH 021/189] Remove constraints on dependency versions --- var/spack/repos/builtin/packages/mpc/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/mpc/package.py b/var/spack/repos/builtin/packages/mpc/package.py index 65c0ba7ea1..108fec678f 100644 --- a/var/spack/repos/builtin/packages/mpc/package.py +++ b/var/spack/repos/builtin/packages/mpc/package.py @@ -34,8 +34,8 @@ class Mpc(Package): version('1.0.3', 'd6a1d5f8ddea3abd2cc3e98f58352d26') version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3') - depends_on("gmp@4.3.2:") - depends_on("mpfr@2.4.2:") + depends_on("gmp") + depends_on("mpfr") def url_for_version(self, version): if version < Version("1.0.1"): From 093b8317998c395ed3e2dee7f19d1bdcea1b0560 Mon Sep 17 00:00:00 2001 From: alalazo Date: Tue, 26 Jan 2016 12:08:28 +0100 Subject: [PATCH 022/189] Fixes issues introduced after merge with conflicts --- lib/spack/spack/package.py | 3 +++ lib/spack/spack/stage.py | 43 +++++++++++++++++++------------------- 2 files changed, 25 insertions(+), 21 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 06aecf11bd..14cbc0dbce 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -804,6 +804,9 @@ def _get_resources(self): for when_spec, resource_list in self.resources.items(): if when_spec in self.spec: resources.extend(resource_list) + # Sorts the resources by the length of the string representing their destination. Since any nested resource + # must contain another resource's name in its path, it seems that should work + resources = sorted(resources, key=lambda res: len(res.destination)) return resources def _resource_stage(self, resource): diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 1deac1137c..6ba301d95f 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -1,4 +1,4 @@ -############################################################################## +1 ############################################################################## # Copyright (c) 2013, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # @@ -23,7 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import re +import errno import shutil import tempfile from urlparse import urljoin @@ -38,7 +38,6 @@ import spack.fetch_strategy as fs import spack.error - STAGE_PREFIX = 'spack-stage-' @@ -95,7 +94,7 @@ def __init__(self, url_or_fetch_strategy, **kwargs): raise ValueError("Can't construct Stage without url or fetch strategy") self.fetcher.set_stage(self) self.default_fetcher = self.fetcher # self.fetcher can change with mirrors. - self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. + self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. self.name = kwargs.get('name') self.mirror_path = kwargs.get('mirror_path') @@ -104,7 +103,6 @@ def __init__(self, url_or_fetch_strategy, **kwargs): self.path = None self._setup() - def _cleanup_dead_links(self): """Remove any dead links in the stage directory.""" for file in os.listdir(spack.stage_path): @@ -114,7 +112,6 @@ def _cleanup_dead_links(self): if not os.path.exists(path): os.unlink(path) - def _need_to_create_path(self): """Makes sure nothing weird has happened since the last time we looked at path. Returns True if path already exists and is ok. @@ -132,7 +129,7 @@ def _need_to_create_path(self): # Path looks ok, but need to check the target of the link. if os.path.islink(self.path): real_path = os.path.realpath(self.path) - real_tmp = os.path.realpath(self.tmp_root) + real_tmp = os.path.realpath(self.tmp_root) if spack.use_tmp_stage: # If we're using a tmp dir, it's a link, and it points at the right spot, @@ -151,7 +148,6 @@ def _need_to_create_path(self): return False - def _setup(self): """Creates the stage directory. If spack.use_tmp_stage is False, the stage directory is created @@ -200,7 +196,6 @@ def _setup(self): # Make sure we can actually do something with the stage we made. ensure_access(self.path) - @property def archive_file(self): """Path to the source archive within this stage directory.""" @@ -217,7 +212,6 @@ def archive_file(self): else: return None - @property def source_path(self): """Returns the path to the expanded/checked out source code @@ -232,7 +226,6 @@ def source_path(self): return p return None - def chdir(self): """Changes directory to the stage path. Or dies if it is not set up.""" if os.path.isdir(self.path): @@ -240,7 +233,6 @@ def chdir(self): else: tty.die("Setup failed: no such directory: " + self.path) - def fetch(self, mirror_only=False): """Downloads an archive or checks out code from a repository.""" self.chdir() @@ -293,7 +285,6 @@ def fetch(self, mirror_only=False): self.fetcher = self.default_fetcher raise fs.FetchError(errMessage, None) - def check(self): """Check the downloaded archive against a checksum digest. No-op if this stage checks code out of a repository.""" @@ -307,7 +298,6 @@ def check(self): else: self.fetcher.check() - def expand_archive(self): """Changes to the stage directory and attempt to expand the downloaded archive. Fail if the stage is not set up or if the archive is not yet @@ -320,7 +310,6 @@ def expand_archive(self): else: tty.msg("Already staged %s in %s." % (self.name, self.path)) - def chdir_to_source(self): """Changes directory to the expanded archive directory. Dies with an error if there was no expanded archive. @@ -333,14 +322,12 @@ def chdir_to_source(self): if not os.listdir(path): tty.die("Archive was empty for %s" % self.name) - def restage(self): """Removes the expanded archive path if it exists, then re-expands the archive. """ self.fetcher.reset() - def destroy(self): """Remove this stage directory.""" remove_linked_tree(self.path) @@ -367,11 +354,24 @@ def expand_archive(self): placement = {'': placement} # Make the paths in the dictionary absolute and link for key, value in placement.iteritems(): - link_path = join_path(root_stage.source_path, resource.destination, value) + target_path = join_path(root_stage.source_path, resource.destination) + destination_path = join_path(target_path, value) source_path = join_path(self.source_path, key) - if not os.path.exists(link_path): + + try: + os.makedirs(target_path) + except OSError as err: + if err.errno == errno.EEXIST and os.path.isdir(target_path): + pass + else: + raise + + if not os.path.exists(destination_path): # Create a symlink - os.symlink(source_path, link_path) + tty.info('Moving resource stage\n\tsource : {stage}\n\tdestination : {destination}'.format( + stage=source_path, destination=destination_path + )) + shutil.move(source_path, destination_path) @pattern.composite(method_list=['fetch', 'check', 'expand_archive', 'restage', 'destroy']) @@ -380,6 +380,7 @@ class StageComposite: Composite for Stage type objects. The first item in this composite is considered to be the root package, and operations that return a value are forwarded to it. """ + @property def source_path(self): return self[0].source_path @@ -394,6 +395,7 @@ def chdir_to_source(self): class DIYStage(object): """Simple class that allows any directory to be a spack stage.""" + def __init__(self, path): self.archive_file = None self.path = path @@ -431,7 +433,6 @@ def _get_mirrors(): return [val for name, val in config.iteritems()] - def ensure_access(file=spack.stage_path): """Ensure we can access a directory and die with an error if we can't.""" if not can_access(file): From 5e3b7a424765ee64f71192bdca129c104a1246f6 Mon Sep 17 00:00:00 2001 From: alalazo Date: Tue, 26 Jan 2016 14:11:23 +0100 Subject: [PATCH 023/189] Fixes issues introduced after merge with conflicts --- lib/spack/spack/package.py | 2 +- lib/spack/spack/stage.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 14cbc0dbce..a1b8d12ec2 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -702,7 +702,7 @@ def do_stage(self, mirror_only=False): if not self.spec.concrete: raise ValueError("Can only stage concrete packages.") - self.do_fetch() + self.do_fetch(mirror_only) self.stage.expand_archive() self.stage.chdir_to_source() diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 6ba301d95f..f217450d42 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -1,4 +1,4 @@ -1 ############################################################################## +############################################################################## # Copyright (c) 2013, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # From ee6f69a227a4ee005759a64bdaa542745972d8e0 Mon Sep 17 00:00:00 2001 From: alalazo Date: Tue, 26 Jan 2016 14:25:16 +0100 Subject: [PATCH 024/189] Unit tests : hack to make them pass while a decision on how to proceed is made --- lib/spack/spack/test/mirror.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index 046ec56604..f83cc8090c 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -102,6 +102,7 @@ def check_mirror(self): spec = Spec(name).concretized() pkg = spec.package + pkg._stage = None saved_checksum_setting = spack.do_checksum try: # Stage the archive from the mirror and cd to it. From f7f192e12b0387537652ba4914e6e484d29ef728 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 27 Jan 2016 17:12:24 +0100 Subject: [PATCH 025/189] Added unit tests for util.pattern --- lib/spack/spack/test/__init__.py | 1 + lib/spack/spack/test/pattern.py | 101 +++++++++++++++++++++++++++++++ lib/spack/spack/util/pattern.py | 2 +- 3 files changed, 103 insertions(+), 1 deletion(-) create mode 100644 lib/spack/spack/test/pattern.py diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index a569cbbf35..4b9a361d4b 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -48,6 +48,7 @@ 'package_sanity', 'config', 'directory_layout', + 'pattern', 'python_version', 'git_fetch', 'svn_fetch', diff --git a/lib/spack/spack/test/pattern.py b/lib/spack/spack/test/pattern.py new file mode 100644 index 0000000000..64fc9187f9 --- /dev/null +++ b/lib/spack/spack/test/pattern.py @@ -0,0 +1,101 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +import unittest + +import spack.util.pattern as pattern + + +class CompositeTest(unittest.TestCase): + + def setUp(self): + class Base: + counter = 0 + + def add(self): + raise NotImplemented('add not implemented') + + def subtract(self): + raise NotImplemented('subtract not implemented') + + class One(Base): + def add(self): + Base.counter += 1 + + def subtract(self): + Base.counter -= 1 + + class Two(Base): + def add(self): + Base.counter += 2 + + def subtract(self): + Base.counter -= 2 + + self.Base = Base + self.One = One + self.Two = Two + + def test_composite_from_method_list(self): + + @pattern.composite(method_list=['add', 'subtract']) + class CompositeFromMethodList: + pass + + composite = CompositeFromMethodList() + composite.append(self.One()) + composite.append(self.Two()) + composite.add() + self.assertEqual(self.Base.counter, 3) + composite.pop() + composite.subtract() + self.assertEqual(self.Base.counter, 2) + + def test_composite_from_interface(self): + + @pattern.composite(interface=self.Base) + class CompositeFromInterface: + pass + + composite = CompositeFromInterface() + composite.append(self.One()) + composite.append(self.Two()) + composite.add() + self.assertEqual(self.Base.counter, 3) + composite.pop() + composite.subtract() + self.assertEqual(self.Base.counter, 2) + + def test_error_conditions(self): + + with self.assertRaises(TypeError): + @pattern.composite(interface=self.Base, container=2) + class CompositeFromInterface: + pass + + with self.assertRaises(TypeError): + @pattern.composite() + class CompositeFromInterface: + pass diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py index 73c1e26aa5..17a126498b 100644 --- a/lib/spack/spack/util/pattern.py +++ b/lib/spack/spack/util/pattern.py @@ -53,7 +53,7 @@ def composite(interface=None, method_list=None, container=list): raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite") def cls_decorator(cls): - # Retrieve the base class of the composite. Inspect its the methods and decide which ones will be overridden + # Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden def no_special_no_private(x): return inspect.ismethod(x) and not x.__name__.startswith('_') From 47035671e80f071c970dc66b15ad7ced2b87329d Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 27 Jan 2016 17:22:12 +0100 Subject: [PATCH 026/189] unit tests : now compliant with 2.6 --- lib/spack/spack/test/pattern.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/pattern.py b/lib/spack/spack/test/pattern.py index 64fc9187f9..6c783c6a5f 100644 --- a/lib/spack/spack/test/pattern.py +++ b/lib/spack/spack/test/pattern.py @@ -90,12 +90,15 @@ class CompositeFromInterface: def test_error_conditions(self): - with self.assertRaises(TypeError): + def wrong_container(): @pattern.composite(interface=self.Base, container=2) class CompositeFromInterface: pass - with self.assertRaises(TypeError): + def no_methods(): @pattern.composite() class CompositeFromInterface: pass + + self.assertRaises(TypeError, wrong_container) + self.assertRaises(TypeError, no_methods) From 07bb6fef01bfe48aa22c39e53b75e4c779ac0c2e Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 28 Jan 2016 10:58:56 +0100 Subject: [PATCH 027/189] resource directive : now works with all the fetch strategies available --- lib/spack/spack/directives.py | 2 +- lib/spack/spack/fetch_strategy.py | 19 ++++++++++++++++--- lib/spack/spack/package.py | 1 - .../repos/builtin/packages/llvm/package.py | 19 +++++++++++++++++++ 4 files changed, 36 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 0b98211cb9..5745adce63 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -296,8 +296,8 @@ def resource(pkg, **kwargs): raise RuntimeError(message) when_spec = parse_anonymous_spec(when, pkg.name) resources = pkg.resources.setdefault(when_spec, []) - fetcher = from_kwargs(**kwargs) name = kwargs.get('name') + fetcher = from_kwargs(**kwargs) resources.append(Resource(name, fetcher, destination, placement)) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index b2ff587a60..83a2dbb59c 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -44,6 +44,7 @@ import sys import re import shutil +import copy from functools import wraps import llnl.util.tty as tty from llnl.util.filesystem import * @@ -370,8 +371,12 @@ class GitFetchStrategy(VCSFetchStrategy): required_attributes = ('git',) def __init__(self, **kwargs): + # Discards the keywords in kwargs that may conflict with the next call to __init__ + forwarded_args = copy.copy(kwargs) + forwarded_args.pop('name', None) + super(GitFetchStrategy, self).__init__( - 'git', 'tag', 'branch', 'commit', **kwargs) + 'git', 'tag', 'branch', 'commit', **forwarded_args) self._git = None @property @@ -479,8 +484,12 @@ class SvnFetchStrategy(VCSFetchStrategy): required_attributes = ['svn'] def __init__(self, **kwargs): + # Discards the keywords in kwargs that may conflict with the next call to __init__ + forwarded_args = copy.copy(kwargs) + forwarded_args.pop('name', None) + super(SvnFetchStrategy, self).__init__( - 'svn', 'revision', **kwargs) + 'svn', 'revision', **forwarded_args) self._svn = None if self.revision is not None: self.revision = str(self.revision) @@ -556,8 +565,12 @@ class HgFetchStrategy(VCSFetchStrategy): required_attributes = ['hg'] def __init__(self, **kwargs): + # Discards the keywords in kwargs that may conflict with the next call to __init__ + forwarded_args = copy.copy(kwargs) + forwarded_args.pop('name', None) + super(HgFetchStrategy, self).__init__( - 'hg', 'revision', **kwargs) + 'hg', 'revision', **forwarded_args) self._hg = None @property diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index a1b8d12ec2..8019b29cba 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -435,7 +435,6 @@ def url_for_version(self, version): def _make_resource_stage(self, root_stage, fetcher, resource): resource_stage_folder = self._resource_stage(resource) - # FIXME : works only for URLFetchStrategy resource_mirror = join_path(self.name, os.path.basename(fetcher.url)) stage = ResourceStage(resource.fetcher, root=root_stage, resource=resource, name=resource_stage_folder, mirror_path=resource_mirror) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index a2b2c6eccc..1805d3ded8 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -171,6 +171,25 @@ class Llvm(Package): when='@%(version)s' % release, placement=resources[name].get('placement', None)) + # SVN - current develop + version('develop', svn='http://llvm.org/svn/llvm-project/llvm/trunk') + resource(name='clang', svn='http://llvm.org/svn/llvm-project/cfe/trunk', + destination='tools', when='@develop', placement='clang') + resource(name='compiler-rt', svn='http://llvm.org/svn/llvm-project/compiler-rt/trunk', + destination='projects', when='@develop', placement='compiler-rt') + resource(name='openmp', svn='http://llvm.org/svn/llvm-project/openmp/trunk', + destination='projects', when='@develop', placement='openmp') + resource(name='libcxx', svn='http://llvm.org/svn/llvm-project/libcxx/trunk', + destination='projects', when='@develop', placement='libcxx') + resource(name='libcxxabi', svn='http://llvm.org/svn/llvm-project/libcxxabi/trunk', + destination='projects', when='@develop', placement='libcxxabi') + resource(name='polly', svn='http://llvm.org/svn/llvm-project/polly/trunk', + destination='tools', when='@develop', placement='polly') + resource(name='lldb', svn='http://llvm.org/svn/llvm-project/lldb/trunk', + destination='tools', when='@develop', placement='lldb') + + + def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag cmake_args = [ arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg ] From 603f5e69ee43e1249e413c966cde3e02ba10a1a3 Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 28 Jan 2016 11:30:28 +0100 Subject: [PATCH 028/189] removed outdated code : os detection (will be handled by platforms) and openssl modifications --- lib/spack/spack/architecture.py | 89 ------------------- .../repos/builtin/packages/openssl/package.py | 15 ---- 2 files changed, 104 deletions(-) delete mode 100644 lib/spack/spack/architecture.py diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py deleted file mode 100644 index 2701fab90c..0000000000 --- a/lib/spack/spack/architecture.py +++ /dev/null @@ -1,89 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://github.com/llnl/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -import os -import re -import platform - -from llnl.util.lang import memoized - -import spack -import spack.error as serr - - -class InvalidSysTypeError(serr.SpackError): - def __init__(self, sys_type): - super(InvalidSysTypeError, self).__init__( - "Invalid sys_type value for Spack: " + sys_type) - - -class NoSysTypeError(serr.SpackError): - def __init__(self): - super(NoSysTypeError, self).__init__( - "Could not determine sys_type for this machine.") - - -def get_sys_type_from_spack_globals(): - """Return the SYS_TYPE from spack globals, or None if it isn't set.""" - if not hasattr(spack, "sys_type"): - return None - elif hasattr(spack.sys_type, "__call__"): - return spack.sys_type() - else: - return spack.sys_type - - -def get_sys_type_from_environment(): - """Return $SYS_TYPE or None if it's not defined.""" - return os.environ.get('SYS_TYPE') - - -def get_sys_type_from_platform(): - """Return the architecture from Python's platform module.""" - sys_type = platform.system() + '-' + platform.machine() - sys_type = re.sub(r'[^\w-]', '_', sys_type) - return sys_type.lower() - - -@memoized -def sys_type(): - """Returns a SysType for the current machine.""" - methods = [get_sys_type_from_spack_globals, - get_sys_type_from_environment, - get_sys_type_from_platform] - - # search for a method that doesn't return None - sys_type = None - for method in methods: - sys_type = method() - if sys_type: break - - # Couldn't determine the sys_type for this machine. - if sys_type is None: - return "unknown_arch" - - if not isinstance(sys_type, basestring): - raise InvalidSysTypeError(sys_type) - - return sys_type diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index 8f0427796b..bbb169ec6b 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -1,7 +1,5 @@ from spack import * -import spack.util.architecture as arch - class Openssl(Package): """The OpenSSL Project is a collaborative effort to develop a robust, commercial-grade, full-featured, and Open Source @@ -40,16 +38,3 @@ def install(self, spec, prefix): make() make("install") - - @when(arch.os_is_in('darwin')) - def install(self, spec, prefix): - perl = which('perl') - perl("./Configure", - "--prefix=%s" % prefix, - "--openssldir=%s/etc/openssl" % prefix, - "zlib", - "no-krb5", - "shared", - "darwin64-x86_64-cc") - filter_file(r'-arch x86_64', '', 'Makefile') - From 0cf03518f3edf8b70331b1b5f2eae171d0a0dccf Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 28 Jan 2016 12:06:30 +0100 Subject: [PATCH 029/189] reverted deletion of the wrong `architecture.py` --- lib/spack/spack/architecture.py | 89 ++++++++++++++++++++++++++++ lib/spack/spack/util/architecture.py | 46 -------------- 2 files changed, 89 insertions(+), 46 deletions(-) create mode 100644 lib/spack/spack/architecture.py delete mode 100644 lib/spack/spack/util/architecture.py diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py new file mode 100644 index 0000000000..2701fab90c --- /dev/null +++ b/lib/spack/spack/architecture.py @@ -0,0 +1,89 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +import re +import platform + +from llnl.util.lang import memoized + +import spack +import spack.error as serr + + +class InvalidSysTypeError(serr.SpackError): + def __init__(self, sys_type): + super(InvalidSysTypeError, self).__init__( + "Invalid sys_type value for Spack: " + sys_type) + + +class NoSysTypeError(serr.SpackError): + def __init__(self): + super(NoSysTypeError, self).__init__( + "Could not determine sys_type for this machine.") + + +def get_sys_type_from_spack_globals(): + """Return the SYS_TYPE from spack globals, or None if it isn't set.""" + if not hasattr(spack, "sys_type"): + return None + elif hasattr(spack.sys_type, "__call__"): + return spack.sys_type() + else: + return spack.sys_type + + +def get_sys_type_from_environment(): + """Return $SYS_TYPE or None if it's not defined.""" + return os.environ.get('SYS_TYPE') + + +def get_sys_type_from_platform(): + """Return the architecture from Python's platform module.""" + sys_type = platform.system() + '-' + platform.machine() + sys_type = re.sub(r'[^\w-]', '_', sys_type) + return sys_type.lower() + + +@memoized +def sys_type(): + """Returns a SysType for the current machine.""" + methods = [get_sys_type_from_spack_globals, + get_sys_type_from_environment, + get_sys_type_from_platform] + + # search for a method that doesn't return None + sys_type = None + for method in methods: + sys_type = method() + if sys_type: break + + # Couldn't determine the sys_type for this machine. + if sys_type is None: + return "unknown_arch" + + if not isinstance(sys_type, basestring): + raise InvalidSysTypeError(sys_type) + + return sys_type diff --git a/lib/spack/spack/util/architecture.py b/lib/spack/spack/util/architecture.py deleted file mode 100644 index a020c74a7a..0000000000 --- a/lib/spack/spack/util/architecture.py +++ /dev/null @@ -1,46 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://github.com/llnl/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## - -import sys - - -def os_is_in(*args): - """ - Return True if any element in the list is equal to sys.platform, False otherwise - - :param args: list of names to be checked - :return: True or False - """ - return any(map(lambda x: x == sys.platform, args)) - - -def os_is_not_in(*args): - """ - Return True if none of the elements in the list is equal to sys.platform, False otherwise - - :param args: list of names to be checked - :return: True or False - """ - return not os_is_in(*args) From 587d356d6ec8820cf04043b3650ac66b82a651b2 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 3 Feb 2016 08:58:34 +0100 Subject: [PATCH 030/189] openssl : added logic to version computation. The package now warns user if he depends on an old version of the library --- .../repos/builtin/packages/openssl/package.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index 05de35fca0..c73102f05d 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -1,3 +1,6 @@ +import urllib +import llnl.util.tty as tty + from spack import * class Openssl(Package): @@ -10,6 +13,7 @@ class Openssl(Package): url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz" version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf') + version('1.0.1r', '1abd905e079542ccae948af37e393d28') version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a') version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5') version('1.0.2f', 'b3bf73f507172be9292ea2a8c28b659d') @@ -17,6 +21,42 @@ class Openssl(Package): depends_on("zlib") parallel = False + def url_for_version(self, version): + # This URL is computed pinging the place where the latest version is stored. To avoid slowdown + # due to repeated pinging, we store the URL in a private class attribute to do the job only once per version + openssl_urls = getattr(Openssl, '_openssl_url', {}) + openssl_url = openssl_urls.get(version, None) + # Same idea, but just to avoid issuing the same message multiple times + warnings_given_to_user = getattr(Openssl, '_warnings_given', {}) + if openssl_url is None: + latest = 'http://www.openssl.org/source/openssl-{version}.tar.gz' + older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz' + # Try to use the url where the latest tarballs are stored. If the url does not exist (404), then + # return the url for older format + version_number = '.'.join([str(x) for x in version[:-1]]) + older_url = older.format(version_number=version_number, version_full=version) + latest_url = latest.format(version=version) + response = urllib.urlopen(latest.format(version=version)) + if response.getcode() == 404: + openssl_url = older_url + # Checks if we already warned the user for this particular version of OpenSSL. + # If not we display a warning message and mark this version + if not warnings_given_to_user.get(version, False): + tty.warn('This installation depends on an old version of OpenSSL, which may have known security issues. ') + tty.warn('Consider updating to the latest version of this package.') + tty.warn('More details at {homepage}'.format(homepage=Openssl.homepage)) + warnings_given_to_user[version] = True + else: + openssl_url = latest_url + # Store the computed URL + openssl_urls[version] = openssl_url + # Store the updated dictionary of URLS + Openssl._openssl_url = openssl_urls + # Store the updated dictionary of warnings + Openssl._warnings_given = warnings_given_to_user + + return openssl_url + def install(self, spec, prefix): # OpenSSL uses a variable APPS in its Makefile. If it happens to be set # in the environment, then this will override what is set in the From 157ec210a74c2897abc093c14824975c49944f16 Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 11 Feb 2016 13:05:31 +0100 Subject: [PATCH 031/189] espresso : synchronized with current develop --- var/spack/{ => repos/builtin}/packages/espresso/package.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename var/spack/{ => repos/builtin}/packages/espresso/package.py (100%) diff --git a/var/spack/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py similarity index 100% rename from var/spack/packages/espresso/package.py rename to var/spack/repos/builtin/packages/espresso/package.py From ca3cdb445825126776c5269481540d3afac02c9f Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 11 Feb 2016 17:45:09 +0100 Subject: [PATCH 032/189] espresso : current working tree --- .../builtin/packages/espresso/package.py | 73 +++++++++---------- 1 file changed, 33 insertions(+), 40 deletions(-) diff --git a/var/spack/repos/builtin/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py index ce5dcc2acc..56b8c056b8 100644 --- a/var/spack/repos/builtin/packages/espresso/package.py +++ b/var/spack/repos/builtin/packages/espresso/package.py @@ -1,53 +1,44 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install espresso -# -# You can always get back here to change things with: -# -# spack edit espresso -# -# See the spack documentation for more information on building -# packages. -# +import llnl.util.tty as tty + from spack import * + class Espresso(Package): - """FIXME: put a proper description of your package here.""" - # FIXME: add a proper url for your package's homepage here. - homepage = "http://quantum-espresso.org" - url = "http://www.qe-forge.org/gf/download/frsrelease/204/912/espresso-5.3.0.tar.gz" + """ + QE is an integrated suite of Open-Source computer codes for electronic-structure calculations and materials + modeling at the nanoscale. It is based on density-functional theory, plane waves, and pseudopotentials. + """ + homepage = 'http://quantum-espresso.org' + url = 'http://www.qe-forge.org/gf/download/frsrelease/204/912/espresso-5.3.0.tar.gz' version('5.3.0', '6848fcfaeb118587d6be36bd10b7f2c3') + variant('mpi', default=True, description='Build Quantum-ESPRESSO with mpi support') - variant('openmp', default=False, description='Build Quantum-ESPRESSO with mpi openmp') - variant('scalapack', default=False, description='Build Quantum-ESPRESSO with mpi openmp') + variant('openmp', default=False, description='Enables openMP support') + variant('scalapack', default=False, description='Enables scalapack support') + variant('elpa', default=True, description='Use elpa as an eigenvalue solver') + depends_on('blas') + depends_on('lapack') - # FIXME: Add dependencies if this package requires them. - # depends_on("foo") depends_on('mpi', when='+mpi') + depends_on('elpa', when='+elpa') + depends_on('scalapack', when='+scalapack') - -# def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. -# configure('--prefix=%s' % prefix) - - # FIXME: Add logic to build and install here -# make() -# make("install") + def check_variants(self, spec): + error = 'you cannot ask for \'+{variant}\' when \'+mpi\' is not active' + if '+scalapack' in spec and '~mpi' in spec: + raise RuntimeError(error.format(variant='scalapack')) + if '+elpa' in spec and '~mpi' in spec: + raise RuntimeError(error.format(variant='elpa')) def install(self, spec, prefix): - # TAU isn't happy with directories that have '@' in the path. Sigh. + self.check_variants(spec) - # TAU configure, despite the name , seems to be a manually written script (nothing related to autotools). - # As such it has a few #peculiarities# that make this build quite hackish. - options = ["-prefix=%s" % prefix, - "--enable-parallel"] + options = ['-prefix=%s' % prefix] + + if '+mpi' in spec: + options.append('--enable-parallel') if '+openmp' in spec: options.append('--enable-openmp') @@ -55,7 +46,9 @@ def install(self, spec, prefix): if '+scalapack' in spec: options.append('--with-scalapack=yes') - configure(*options) - make("all") - make("install") + if '+elpa' in spec: + options.append('--with-elpa=%s' % spec['elpa'].prefix) + configure(*options) + make('all') + make('install') From b272a8881becf205c09ac11228dd035b99fa0fcd Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 11 Feb 2016 18:18:05 +0100 Subject: [PATCH 033/189] espresso : fixed dependency handling --- var/spack/repos/builtin/packages/espresso/package.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py index 56b8c056b8..df37bb0d71 100644 --- a/var/spack/repos/builtin/packages/espresso/package.py +++ b/var/spack/repos/builtin/packages/espresso/package.py @@ -1,5 +1,3 @@ -import llnl.util.tty as tty - from spack import * @@ -15,21 +13,21 @@ class Espresso(Package): variant('mpi', default=True, description='Build Quantum-ESPRESSO with mpi support') variant('openmp', default=False, description='Enables openMP support') - variant('scalapack', default=False, description='Enables scalapack support') + variant('scalapack', default=True, description='Enables scalapack support') variant('elpa', default=True, description='Use elpa as an eigenvalue solver') depends_on('blas') depends_on('lapack') depends_on('mpi', when='+mpi') - depends_on('elpa', when='+elpa') - depends_on('scalapack', when='+scalapack') + depends_on('elpa', when='+elpa+scalapack+mpi') # TODO : + mpi needed to avoid false dependencies installation + depends_on('scalapack', when='+scalapack+mpi') # TODO : + mpi needed to avoid false dependencies installation def check_variants(self, spec): error = 'you cannot ask for \'+{variant}\' when \'+mpi\' is not active' if '+scalapack' in spec and '~mpi' in spec: raise RuntimeError(error.format(variant='scalapack')) - if '+elpa' in spec and '~mpi' in spec: + if '+elpa' in spec and ('~mpi' in spec or '~scalapack' in spec): raise RuntimeError(error.format(variant='elpa')) def install(self, spec, prefix): From e8704433debe35d7893c0c20672d06973226c4e8 Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 11 Feb 2016 18:57:40 +0100 Subject: [PATCH 034/189] espresso : added directories to search path --- var/spack/repos/builtin/packages/espresso/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py index df37bb0d71..f85257b3cc 100644 --- a/var/spack/repos/builtin/packages/espresso/package.py +++ b/var/spack/repos/builtin/packages/espresso/package.py @@ -47,6 +47,14 @@ def install(self, spec, prefix): if '+elpa' in spec: options.append('--with-elpa=%s' % spec['elpa'].prefix) + # Add a list of directories to search + search_list = [] + for name, dependency_spec in spec.dependencies.iteritems(): + print name + search_list.extend([dependency_spec.prefix.lib, + dependency_spec.prefix.lib64]) + search_list = " ".join(search_list) + options.append('LIBDIRS=%s' % search_list) configure(*options) make('all') make('install') From d7f674ce9678ddb3b964ad457b29ed058e2cb3c7 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Thu, 14 Jan 2016 17:41:44 -0500 Subject: [PATCH 035/189] libedit depends on ncurses --- var/spack/repos/builtin/packages/libedit/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/libedit/package.py b/var/spack/repos/builtin/packages/libedit/package.py index bcd5212b9e..faed8bad37 100644 --- a/var/spack/repos/builtin/packages/libedit/package.py +++ b/var/spack/repos/builtin/packages/libedit/package.py @@ -7,6 +7,8 @@ class Libedit(Package): version('3.1', '43cdb5df3061d78b5e9d59109871b4f6', url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz") + depends_on('ncurses') + def install(self, spec, prefix): configure('--prefix=%s' % prefix) From 70985170e5f248d0d6a1b6245bd5581185ecaa64 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 12 Feb 2016 12:08:31 +0100 Subject: [PATCH 036/189] qe : compiled on rhle6 --- .../repos/builtin/packages/espresso/package.py | 15 ++++++++++----- var/spack/repos/builtin/packages/mpich/package.py | 8 ++++---- .../repos/builtin/packages/openblas/package.py | 2 ++ 3 files changed, 16 insertions(+), 9 deletions(-) diff --git a/var/spack/repos/builtin/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py index f85257b3cc..a2bf58f585 100644 --- a/var/spack/repos/builtin/packages/espresso/package.py +++ b/var/spack/repos/builtin/packages/espresso/package.py @@ -1,5 +1,6 @@ from spack import * +import os class Espresso(Package): """ @@ -20,9 +21,10 @@ class Espresso(Package): depends_on('lapack') depends_on('mpi', when='+mpi') - depends_on('elpa', when='+elpa+scalapack+mpi') # TODO : + mpi needed to avoid false dependencies installation + depends_on('fftw~mpi', when='~mpi') + depends_on('fftw+mpi', when='+mpi') depends_on('scalapack', when='+scalapack+mpi') # TODO : + mpi needed to avoid false dependencies installation - + def check_variants(self, spec): error = 'you cannot ask for \'+{variant}\' when \'+mpi\' is not active' if '+scalapack' in spec and '~mpi' in spec: @@ -45,16 +47,19 @@ def install(self, spec, prefix): options.append('--with-scalapack=yes') if '+elpa' in spec: - options.append('--with-elpa=%s' % spec['elpa'].prefix) + options.append('--with-elpa=yes') # Add a list of directories to search search_list = [] for name, dependency_spec in spec.dependencies.iteritems(): - print name search_list.extend([dependency_spec.prefix.lib, dependency_spec.prefix.lib64]) + search_list = " ".join(search_list) options.append('LIBDIRS=%s' % search_list) + options.append('F90=%s' % os.environ['FC']) + configure(*options) make('all') - make('install') + make('install') + diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index c856cfe277..26d3bc0c94 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -48,10 +48,10 @@ class Mpich(Package): def setup_dependent_environment(self, module, spec, dep_spec): """For dependencies, make mpicc's use spack wrapper.""" - os.environ['MPICH_CC'] = 'cc' - os.environ['MPICH_CXX'] = 'c++' - os.environ['MPICH_F77'] = 'f77' - os.environ['MPICH_F90'] = 'f90' + os.environ['MPICH_CC'] = os.environ['CC'] + os.environ['MPICH_CXX'] = os.environ['CXX'] + os.environ['MPICH_F77'] = os.environ['F77'] + os.environ['MPICH_FC'] = os.environ['FC'] def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 9c8fa1c694..3c909360a4 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -19,7 +19,9 @@ def install(self, spec, prefix): with working_dir(prefix.lib): symlink('libopenblas.a', 'blas.a') symlink('libopenblas.a', 'libblas.a') + symlink('libopenblas.so', 'libblas.so') # Lapack virtual package should provide liblapack.a with working_dir(prefix.lib): symlink('libopenblas.a', 'liblapack.a') + symlink('libopenblas.so', 'liblapack.so') From 247a4bc75108aa2106c1b191345a0c3387a8afac Mon Sep 17 00:00:00 2001 From: alalazo Date: Fri, 12 Feb 2016 12:15:09 +0100 Subject: [PATCH 037/189] mpich : added back MPI_F90 --- var/spack/repos/builtin/packages/mpich/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index 26d3bc0c94..c517defa83 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -51,6 +51,7 @@ def setup_dependent_environment(self, module, spec, dep_spec): os.environ['MPICH_CC'] = os.environ['CC'] os.environ['MPICH_CXX'] = os.environ['CXX'] os.environ['MPICH_F77'] = os.environ['F77'] + os.environ['MPICH_F90'] = os.environ['FC'] os.environ['MPICH_FC'] = os.environ['FC'] From 90e1b1f0ea4320a5bdc0f54ec14a12fdda6584ff Mon Sep 17 00:00:00 2001 From: David Beckingsale Date: Fri, 12 Feb 2016 09:33:19 -0800 Subject: [PATCH 038/189] Use hash to make shorter module and dotkit names, fixes #433 --- lib/spack/spack/modules.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 7036626e29..c834763564 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -194,12 +194,14 @@ class Dotkit(EnvModule): @property def file_name(self): return join_path(Dotkit.path, self.spec.architecture, - self.spec.format('$_$@$%@$+$#.dk')) + '%s.dk' % self.use_name) @property def use_name(self): - return self.spec.format('$_$@$%@$+$#') - + return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version, + self.spec.compiler.name, + self.spec.compiler.version, + self.spec.dag_hash()) def _write(self, dk_file): # Category @@ -235,7 +237,10 @@ def file_name(self): @property def use_name(self): - return self.spec.format('$_$@$%@$+$#') + return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version, + self.spec.compiler.name, + self.spec.compiler.version, + self.spec.dag_hash()) def _write(self, m_file): From 2cd9ad8ce63ea29093fcc2d8e1bd749d5cbccf0b Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Sun, 17 Jan 2016 10:46:21 -0500 Subject: [PATCH 039/189] Use "-Wl,-rpath," instead of "-Wl,-rpath=" The former translates to a linker argument "-rpath DIR", whereas the latter translates to "-rpath=DIR". The latter is not support on OS X. --- lib/spack/docs/packaging_guide.rst | 6 +++--- lib/spack/spack/package.py | 4 ++-- lib/spack/spack/test/cc.py | 10 +++++----- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 59ba63fa35..bb8a26ad02 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1711,15 +1711,15 @@ Compile-time library search paths * ``-L$dep_prefix/lib`` * ``-L$dep_prefix/lib64`` Runtime library search paths (RPATHs) - * ``-Wl,-rpath=$dep_prefix/lib`` - * ``-Wl,-rpath=$dep_prefix/lib64`` + * ``-Wl,-rpath,$dep_prefix/lib`` + * ``-Wl,-rpath,$dep_prefix/lib64`` Include search paths * ``-I$dep_prefix/include`` An example of this would be the ``libdwarf`` build, which has one dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf`` build will have ``-I$LIBELF_PREFIX/include``, -``-L$LIBELF_PREFIX/lib``, and ``-Wl,-rpath=$LIBELF_PREFIX/lib`` +``-L$LIBELF_PREFIX/lib``, and ``-Wl,-rpath,$LIBELF_PREFIX/lib`` inserted on the command line. This is done transparently to the project's build system, which will just think it's using a system where ``libelf`` is readily available. Because of this, you **do diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 8cb947c276..49d4fb6b23 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1220,8 +1220,8 @@ def rpath(self): @property def rpath_args(self): - """Get the rpath args as a string, with -Wl,-rpath= for each element.""" - return " ".join("-Wl,-rpath=%s" % p for p in self.rpath) + """Get the rpath args as a string, with -Wl,-rpath, for each element.""" + return " ".join("-Wl,-rpath,%s" % p for p in self.rpath) def validate_package_url(url_string): diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index 905af28a06..54d5638394 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -39,11 +39,11 @@ 'arg1', '-Wl,--start-group', 'arg2', - '-Wl,-rpath=/first/rpath', 'arg3', '-Wl,-rpath', '-Wl,/second/rpath', + '-Wl,-rpath,/first/rpath', 'arg3', '-Wl,-rpath', '-Wl,/second/rpath', '-llib1', '-llib2', 'arg4', '-Wl,--end-group', - '-Xlinker,-rpath', '-Xlinker,/third/rpath', '-Xlinker,-rpath=/fourth/rpath', + '-Xlinker,-rpath', '-Xlinker,/third/rpath', '-Xlinker,-rpath,/fourth/rpath', '-llib3', '-llib4', 'arg5', 'arg6'] @@ -95,13 +95,13 @@ def test_cpp_mode(self): def test_ccld_mode(self): self.check_cc('dump-mode', [], "ccld") self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld") - self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath=foo'], "ccld") - self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath=foo'], "ccld") + self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath,foo'], "ccld") + self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ccld") def test_ld_mode(self): self.check_ld('dump-mode', [], "ld") - self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath=foo'], "ld") + self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld") def test_includes(self): From 5038a38e296940f463c13d6fdda728d8673bc95a Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Mon, 18 Jan 2016 18:13:18 -0500 Subject: [PATCH 040/189] Correct -Xlinker arguments --- lib/spack/spack/test/cc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index 54d5638394..11420ec44a 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -43,7 +43,7 @@ '-llib1', '-llib2', 'arg4', '-Wl,--end-group', - '-Xlinker,-rpath', '-Xlinker,/third/rpath', '-Xlinker,-rpath,/fourth/rpath', + '-Xlinker,-rpath', '-Xlinker,/third/rpath', '-Xlinker,-rpath', '-Xlinker,/fourth/rpath', '-llib3', '-llib4', 'arg5', 'arg6'] From dc6a33b716dd6712da2e65a78a6a3ed98ca72d4d Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Fri, 12 Feb 2016 12:09:29 -0500 Subject: [PATCH 041/189] Handle multiple -Wl,-rpath,... paths --- lib/spack/env/cc | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index aacba996b3..c156b7b607 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -176,14 +176,21 @@ while [ -n "$1" ]; do -Wl,*) arg="${1#-Wl,}" if [ -z "$arg" ]; then shift; arg="$1"; fi - if [[ "$arg" = -rpath=* ]]; then - rpaths+=("${arg#-rpath=}") - elif [[ "$arg" = -rpath ]]; then + if [[ $arg = -rpath=* ]]; then + arg="${arg#-rpath=}" + for rpath in ${arg//,/ }; do + rpaths+=("$rpath") + done + elif [[ $arg = -rpath ]]; then shift; arg="$1" - if [[ "$arg" != -Wl,* ]]; then + if [[ $arg != -Wl,* ]]; then die "-Wl,-rpath was not followed by -Wl,*" fi - rpaths+=("${arg#-Wl,}") + # TODO: Handle multiple -Wl, continuations of -Wl,-rpath + arg="${arg#-Wl,}" + for rpath in ${arg//,/ }; do + rpaths+=("$rpath") + done else other_args+=("-Wl,$arg") fi @@ -191,11 +198,11 @@ while [ -n "$1" ]; do -Xlinker,*) arg="${1#-Xlinker,}" if [ -z "$arg" ]; then shift; arg="$1"; fi - if [[ "$arg" = -rpath=* ]]; then + if [[ $arg = -rpath=* ]]; then rpaths+=("${arg#-rpath=}") - elif [[ "$arg" = -rpath ]]; then + elif [[ $arg = -rpath ]]; then shift; arg="$1" - if [[ "$arg" != -Xlinker,* ]]; then + if [[ $arg != -Xlinker,* ]]; then die "-Xlinker,-rpath was not followed by -Xlinker,*" fi rpaths+=("${arg#-Xlinker,}") From 9a2c1090a6f40db762ac155d7d25063c2965c841 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Fri, 12 Feb 2016 12:37:03 -0500 Subject: [PATCH 042/189] Handle -Wl,-rpath,... syntax --- lib/spack/env/cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index c156b7b607..41933f5e1f 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -181,6 +181,11 @@ while [ -n "$1" ]; do for rpath in ${arg//,/ }; do rpaths+=("$rpath") done + elif [[ $arg = -rpath,* ]]; then + arg="${arg#-rpath,}" + for rpath in ${arg//,/ }; do + rpaths+=("$rpath") + done elif [[ $arg = -rpath ]]; then shift; arg="$1" if [[ $arg != -Wl,* ]]; then From 69064395eb61db3b03d1ed14f16bef7ec2c94ee3 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Sat, 13 Feb 2016 15:12:09 -0500 Subject: [PATCH 043/189] Add debug output --- lib/spack/env/cc | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 41933f5e1f..a431cffacf 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -174,19 +174,26 @@ while [ -n "$1" ]; do libs+=("$arg") ;; -Wl,*) + echo "FOUND arg=[$arg]" >&2 arg="${1#-Wl,}" if [ -z "$arg" ]; then shift; arg="$1"; fi + echo "SHIFTED, arg=[$arg]" >&2 if [[ $arg = -rpath=* ]]; then + echo "CASE 1" >&2 arg="${arg#-rpath=}" for rpath in ${arg//,/ }; do + echo " RPATH=[$rpath]" >&2 rpaths+=("$rpath") done elif [[ $arg = -rpath,* ]]; then + echo "CASE 2" >&2 arg="${arg#-rpath,}" for rpath in ${arg//,/ }; do + echo " RPATH=[$rpath]" >&2 rpaths+=("$rpath") done elif [[ $arg = -rpath ]]; then + echo "CASE 3" >&2 shift; arg="$1" if [[ $arg != -Wl,* ]]; then die "-Wl,-rpath was not followed by -Wl,*" @@ -194,9 +201,11 @@ while [ -n "$1" ]; do # TODO: Handle multiple -Wl, continuations of -Wl,-rpath arg="${arg#-Wl,}" for rpath in ${arg//,/ }; do + echo " RPATH=[$rpath]" >&2 rpaths+=("$rpath") done else + echo "OTHER" >&2 other_args+=("-Wl,$arg") fi ;; From 52647b9a5d8ab4b2fff7387f65347164576be088 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Sat, 13 Feb 2016 17:55:14 -0500 Subject: [PATCH 044/189] Using regexes instead of globbing to match path names --- lib/spack/env/cc | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index a431cffacf..c46986e19c 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -174,26 +174,19 @@ while [ -n "$1" ]; do libs+=("$arg") ;; -Wl,*) - echo "FOUND arg=[$arg]" >&2 arg="${1#-Wl,}" if [ -z "$arg" ]; then shift; arg="$1"; fi - echo "SHIFTED, arg=[$arg]" >&2 - if [[ $arg = -rpath=* ]]; then - echo "CASE 1" >&2 + if [[ $arg =~ -rpath=.* ]]; then arg="${arg#-rpath=}" for rpath in ${arg//,/ }; do - echo " RPATH=[$rpath]" >&2 rpaths+=("$rpath") done - elif [[ $arg = -rpath,* ]]; then - echo "CASE 2" >&2 + elif [[ $arg =~ -rpath,.* ]]; then arg="${arg#-rpath,}" for rpath in ${arg//,/ }; do - echo " RPATH=[$rpath]" >&2 - rpaths+=("$rpath") + rpaths+=("$rpath") done elif [[ $arg = -rpath ]]; then - echo "CASE 3" >&2 shift; arg="$1" if [[ $arg != -Wl,* ]]; then die "-Wl,-rpath was not followed by -Wl,*" @@ -201,11 +194,9 @@ while [ -n "$1" ]; do # TODO: Handle multiple -Wl, continuations of -Wl,-rpath arg="${arg#-Wl,}" for rpath in ${arg//,/ }; do - echo " RPATH=[$rpath]" >&2 rpaths+=("$rpath") done else - echo "OTHER" >&2 other_args+=("-Wl,$arg") fi ;; From 43670cbbd01562f6748f3ee6e2505be52e24bccb Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Sat, 13 Feb 2016 22:01:36 -0500 Subject: [PATCH 045/189] More games with quoting --- lib/spack/env/cc | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index c46986e19c..37483e0073 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -175,20 +175,19 @@ while [ -n "$1" ]; do ;; -Wl,*) arg="${1#-Wl,}" - if [ -z "$arg" ]; then shift; arg="$1"; fi - if [[ $arg =~ -rpath=.* ]]; then + if [[ $arg = "-rpath=*" ]]; then arg="${arg#-rpath=}" for rpath in ${arg//,/ }; do rpaths+=("$rpath") done - elif [[ $arg =~ -rpath,.* ]]; then + elif [[ $arg = "-rpath,*" ]]; then arg="${arg#-rpath,}" for rpath in ${arg//,/ }; do - rpaths+=("$rpath") + rpaths+=("$rpath") done - elif [[ $arg = -rpath ]]; then + elif [[ $arg = "-rpath" ]]; then shift; arg="$1" - if [[ $arg != -Wl,* ]]; then + if [[ $arg != "-Wl,*" ]]; then die "-Wl,-rpath was not followed by -Wl,*" fi # TODO: Handle multiple -Wl, continuations of -Wl,-rpath From a06e29fecbaf100f089c1eab7c6447cd469b95f6 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Sun, 14 Feb 2016 10:43:55 -0500 Subject: [PATCH 046/189] More quoting experiments --- lib/spack/env/cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 37483e0073..853a19dfdd 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -175,22 +175,22 @@ while [ -n "$1" ]; do ;; -Wl,*) arg="${1#-Wl,}" - if [[ $arg = "-rpath=*" ]]; then + # TODO: Handle multiple -Wl, continuations of -Wl,-rpath + if [[ $arg == '-rpath='* ]]; then arg="${arg#-rpath=}" for rpath in ${arg//,/ }; do rpaths+=("$rpath") done - elif [[ $arg = "-rpath,*" ]]; then + elif [[ $arg == '-rpath,'* ]]; then arg="${arg#-rpath,}" for rpath in ${arg//,/ }; do rpaths+=("$rpath") done - elif [[ $arg = "-rpath" ]]; then + elif [[ $arg == '-rpath' ]]; then shift; arg="$1" if [[ $arg != "-Wl,*" ]]; then die "-Wl,-rpath was not followed by -Wl,*" fi - # TODO: Handle multiple -Wl, continuations of -Wl,-rpath arg="${arg#-Wl,}" for rpath in ${arg//,/ }; do rpaths+=("$rpath") From 9868333e8e2586ffc55ee996a48ef62601a9c874 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Sun, 14 Feb 2016 11:34:08 -0500 Subject: [PATCH 047/189] Shell quoting is difficult --- lib/spack/env/cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 853a19dfdd..c3d1135722 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -188,7 +188,7 @@ while [ -n "$1" ]; do done elif [[ $arg == '-rpath' ]]; then shift; arg="$1" - if [[ $arg != "-Wl,*" ]]; then + if [[ $arg != '-Wl,'* ]]; then die "-Wl,-rpath was not followed by -Wl,*" fi arg="${arg#-Wl,}" From d8c0edcc818b8f0fb2220993d469381ae072ebe4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 14 Feb 2016 14:40:25 -0800 Subject: [PATCH 048/189] Minor code cleanup for gcc. --- var/spack/repos/builtin/packages/gcc/package.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index 3e5895cfb8..f8958ee290 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -36,8 +36,6 @@ class Gcc(Package): list_url = 'http://open-source-box.org/gcc/' list_depth = 2 - DEPENDS_ON_ISL_PREDICATE = '@5.0:' - version('5.3.0', 'c9616fd448f980259c31de613e575719') version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467') version('4.9.3', '6f831b4d251872736e8e9cc09746f327') @@ -53,12 +51,11 @@ class Gcc(Package): depends_on("mpfr") depends_on("gmp") - depends_on("mpc") # when @4.5: + depends_on("mpc", when='@4.5:') + depends_on("isl", when='@5.0:') depends_on("binutils~libiberty", when='~gold') depends_on("binutils~libiberty+gold", when='+gold') - # Save these until we can do optional deps. - depends_on("isl", when=DEPENDS_ON_ISL_PREDICATE) #depends_on("ppl") #depends_on("cloog") @@ -91,7 +88,7 @@ def install(self, spec, prefix): "--with-as=%s/bin/as" % spec['binutils'].prefix] options.extend(binutils_options) # Isl - if spec.satisfies(Gcc.DEPENDS_ON_ISL_PREDICATE): + if 'isl' in spec: isl_options = ["--with-isl=%s" % spec['isl'].prefix] options.extend(isl_options) From bf162e60f17f8aebcd62184d72a194eb91e4ef4f Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 15 Feb 2016 10:53:50 -0600 Subject: [PATCH 049/189] Add latest version --- var/spack/repos/builtin/packages/py-mpi4py/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-mpi4py/package.py b/var/spack/repos/builtin/packages/py-mpi4py/package.py index 8001689a18..f599205644 100644 --- a/var/spack/repos/builtin/packages/py-mpi4py/package.py +++ b/var/spack/repos/builtin/packages/py-mpi4py/package.py @@ -5,7 +5,9 @@ class PyMpi4py(Package): homepage = "https://pypi.python.org/pypi/mpi4py" url = "https://pypi.python.org/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz" + version('2.0.0', '4f7d8126d7367c239fd67615680990e3') version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c') + extends('python') depends_on('py-setuptools') depends_on('mpi') From db50f52bbc47ef6dbe2e26c54ce828c0e6076e96 Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Tue, 16 Feb 2016 11:40:00 -0800 Subject: [PATCH 050/189] Adding the "Crypto++" package installation files. --- .../builtin/packages/cryptopp/package.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 var/spack/repos/builtin/packages/cryptopp/package.py diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py new file mode 100644 index 0000000000..1693c4b160 --- /dev/null +++ b/var/spack/repos/builtin/packages/cryptopp/package.py @@ -0,0 +1,31 @@ +import glob +from spack import * + +class Cryptopp(Package): + """Crypto++ is an open-source C++ library of cryptographic schemes. The + library supports a number of different cryptography algorithms, including + authenticated encryption schemes (GCM, CCM), hash functions (SHA-1, SHA2), + public-key encryption (RSA, DSA), and a few obsolete/historical encryption + algorithms (MD5, Panama).""" + + homepage = "http://www.cryptopp.com/" + url = "http://www.cryptopp.com/cryptopp563.zip" + + version('5.6.3', '3c5b70e2ec98b7a24988734446242d07') + version('5.6.2', '7ed022585698df48e65ce9218f6c6a67') + + def install(self, spec, prefix): + make() + + mkdirp(prefix.include) + for hfile in glob.glob('*.h*'): + install(hfile, prefix.include) + + mkdirp(prefix.lib) + install('libcryptopp.a', prefix.lib) + + def url_for_version(self, version): + version_tuple = tuple(v for v in iter(version)) + version_string = reduce(lambda vs, nv: vs + str(nv), version_tuple, "") + + return "%scryptopp%s.zip" % (Cryptopp.homepage, version_string) From 9a6221ea40e8cda3bdfa495b7c4a93284e8f4895 Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Tue, 16 Feb 2016 12:47:14 -0800 Subject: [PATCH 051/189] Added the installation files for the "ndiff" package. --- .../repos/builtin/packages/ndiff/package.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 var/spack/repos/builtin/packages/ndiff/package.py diff --git a/var/spack/repos/builtin/packages/ndiff/package.py b/var/spack/repos/builtin/packages/ndiff/package.py new file mode 100644 index 0000000000..10e445c81e --- /dev/null +++ b/var/spack/repos/builtin/packages/ndiff/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Ndiff(Package): + """The ndiff tool is a binary utility that compares putatively similar files + while ignoring small numeric differernces. This utility is most often used + to compare files containing a lot of floating-point numeric data that + may be slightly different due to numeric error.""" + + homepage = "http://ftp.math.utah.edu/pub/ndiff/" + url = "http://ftp.math.utah.edu/pub/ndiff/ndiff-2.00.tar.gz" + + version('2.00', '885548b4dc26e72c5455bebb5ba6c16d') + version('1.00', 'f41ffe5d12f36cd36b6311acf46eccdc') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + mkdirp(prefix.bin) + mkdirp('%s/lib' % prefix.share) + + make('install-exe', 'install-shrlib') From 3c8bbeafc78c00cd93fa4526a0e55bf16d36b454 Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Tue, 16 Feb 2016 13:01:18 -0800 Subject: [PATCH 052/189] Added the installation files for the "Triangle" package. --- .../builtin/packages/Triangle/package.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 var/spack/repos/builtin/packages/Triangle/package.py diff --git a/var/spack/repos/builtin/packages/Triangle/package.py b/var/spack/repos/builtin/packages/Triangle/package.py new file mode 100644 index 0000000000..f65d93776d --- /dev/null +++ b/var/spack/repos/builtin/packages/Triangle/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Triangle(Package): + """Triangle is a two-dimensional mesh generator and Delaunay + triangulator. Triangle generates exact Delaunay triangulations, + constrained Delaunay triangulations, conforming Delaunay + triangulations, Voronoi diagrams, and high-quality triangular + meshes.""" + + homepage = "http://www.cs.cmu.edu/~quake/triangle.html" + url = "http://www.netlib.org/voronoi/triangle.zip" + + version('1.6', '10aff8d7950f5e0e2fb6dd2e340be2c9') + + def install(self, spec, prefix): + make() + mkdirp(prefix.bin) + + install('triangle', prefix.bin) + install('showme', prefix.bin) From 09254014b182ccf4cbc4ce291141b7ab39b9171d Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Tue, 16 Feb 2016 13:24:01 -0800 Subject: [PATCH 053/189] Added the installation files for the "TetGen" package. --- .../repos/builtin/packages/tetgen/package.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 var/spack/repos/builtin/packages/tetgen/package.py diff --git a/var/spack/repos/builtin/packages/tetgen/package.py b/var/spack/repos/builtin/packages/tetgen/package.py new file mode 100644 index 0000000000..30c2b76655 --- /dev/null +++ b/var/spack/repos/builtin/packages/tetgen/package.py @@ -0,0 +1,28 @@ +from spack import * + +class Tetgen(Package): + """TetGen is a program and library that can be used to generate tetrahedral + meshes for given 3D polyhedral domains. TetGen generates exact constrained + Delaunay tetrahedralizations, boundary conforming Delaunay meshes, and + Voronoi paritions.""" + + homepage = "http://www.tetgen.org" + url = "http://www.tetgen.org/files/tetgen1.4.3.tar.gz" + + version('1.4.3', 'd6a4bcdde2ac804f7ec66c29dcb63c18') + + # TODO: Make this a build dependency once build dependencies are supported + # (see: https://github.com/LLNL/spack/pull/378). + depends_on('cmake@2.8.7:', when='@1.5.0:') + + def install(self, spec, prefix): + make('tetgen', 'tetlib') + + mkdirp(prefix.bin) + install('tetgen', prefix.bin) + + mkdirp(prefix.include) + install('tetgen.h', prefix.include) + + mkdirp(prefix.lib) + install('libtet.a', prefix.lib) From 8cab10214e63344b77ffc34df3609dbc9f1690a5 Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Tue, 16 Feb 2016 16:43:24 -0800 Subject: [PATCH 054/189] Added the installation files for the "ExodusII" package. Added the "+static" variant to "hdf5" to enable "ExodusII" support. --- .../packages/exodusii/exodus-cmake.patch | 12 +++++ .../builtin/packages/exodusii/package.py | 49 +++++++++++++++++++ .../repos/builtin/packages/hdf5/package.py | 4 ++ 3 files changed, 65 insertions(+) create mode 100644 var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch create mode 100644 var/spack/repos/builtin/packages/exodusii/package.py diff --git a/var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch b/var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch new file mode 100644 index 0000000000..25355269ca --- /dev/null +++ b/var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch @@ -0,0 +1,12 @@ +diff --git a/cmake-exodus b/cmake-exodus +index 787fd9d..ed073a2 100755 +--- a/cmake-exodus ++++ b/cmake-exodus +@@ -1,4 +1,6 @@ +-EXTRA_ARGS=$@ ++#!/bin/bash ++ ++EXTRA_ARGS=-DSEACASProj_ENABLE_CXX11=OFF + + ### Change this to point to the compilers you want to use + CC=gcc diff --git a/var/spack/repos/builtin/packages/exodusii/package.py b/var/spack/repos/builtin/packages/exodusii/package.py new file mode 100644 index 0000000000..89c04bf79c --- /dev/null +++ b/var/spack/repos/builtin/packages/exodusii/package.py @@ -0,0 +1,49 @@ +from spack import * + +# TODO: Add support for a C++11 enabled installation that filters out the +# TODO: "C++11-Disabled" flag (but only if the spec compiler supports C++11). + +# TODO: Add support for parallel installation that uses MPI. + +# TODO: Create installation options for NetCDF that support larger page size +# TODO: suggested by Exodus (see the repository "README" file). + +class Exodusii(Package): + """Exodus II is a C++/Fortran library developed to store and retrieve data for + finite element analyses. It's used for preprocessing (problem definition), + postprocessing (results visualization), and data transfer between codes. + An Exodus II data file is a random access, machine independent, binary + file that is written and read via C, C++, or Fortran API routines.""" + + homepage = "https://github.com/gsjaardema/seacas" + url = "https://github.com/gsjaardema/seacas/archive/master.zip" + + version('2016-02-08', git='https://github.com/gsjaardema/seacas.git', commit='dcf3529') + + # TODO: Make this a build dependency once build dependencies are supported + # (see: https://github.com/LLNL/spack/pull/378). + depends_on('cmake@2.8.7:') + depends_on('hdf5+static~mpi') + depends_on('netcdf~mpi') + + patch('exodus-cmake.patch') + + def patch(self): + ff = FileFilter('cmake-exodus') + + ff.filter('CMAKE_INSTALL_PREFIX:PATH=${ACCESS}', + 'CMAKE_INSTALL_PREFIX:PATH=%s' % self.spec.prefix, string=True) + ff.filter('NetCDF_DIR:PATH=${TPL}', + 'NetCDF_DIR:PATH=%s' % self.spec['netcdf'].prefix, string=True) + ff.filter('HDF5_ROOT:PATH=${TPL}', + 'HDF5_ROOT:PATH=%s' % self.spec['hdf5'].prefix, string=True) + + def install(self, spec, prefix): + mkdirp('build') + cd('build') + + cmake_exodus = Executable('../cmake-exodus') + cmake_exodus() + + make() + make('install') diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 80f79539c0..7db4aff631 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -42,6 +42,7 @@ class Hdf5(Package): version('1.8.13', 'c03426e9e77d7766944654280b467289') variant('debug', default=False, description='Builds a debug version of the library') + variant('static', default=False, description='Builds a static executable version of the library') variant('cxx', default=True, description='Enable C++ support') variant('fortran', default=True, description='Enable Fortran support') @@ -78,6 +79,9 @@ def install(self, spec, prefix): else: extra_args.append('--enable-production') + if '+static' in spec: + extra_args.append('--enable-static-exec') + if '+unsupported' in spec: extra_args.append("--enable-unsupported") From fca7ef2f7b3d8091e935073ec24570ebb163fe6d Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Tue, 16 Feb 2016 17:23:37 -0800 Subject: [PATCH 055/189] Moved "Triangle" to "triangle" to be more in line with Spack package naming conventions. --- .../repos/builtin/packages/{Triangle => triangle}/package.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename var/spack/repos/builtin/packages/{Triangle => triangle}/package.py (100%) diff --git a/var/spack/repos/builtin/packages/Triangle/package.py b/var/spack/repos/builtin/packages/triangle/package.py similarity index 100% rename from var/spack/repos/builtin/packages/Triangle/package.py rename to var/spack/repos/builtin/packages/triangle/package.py From 0388093f7aed92bd1d3282fe4f53c27e66030fd4 Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Tue, 16 Feb 2016 17:33:19 -0800 Subject: [PATCH 056/189] Changed the hdf5 "+static" variant to become the "+shared" variant. --- var/spack/repos/builtin/packages/exodusii/package.py | 2 +- var/spack/repos/builtin/packages/hdf5/package.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/exodusii/package.py b/var/spack/repos/builtin/packages/exodusii/package.py index 89c04bf79c..d68baaa6d9 100644 --- a/var/spack/repos/builtin/packages/exodusii/package.py +++ b/var/spack/repos/builtin/packages/exodusii/package.py @@ -23,7 +23,7 @@ class Exodusii(Package): # TODO: Make this a build dependency once build dependencies are supported # (see: https://github.com/LLNL/spack/pull/378). depends_on('cmake@2.8.7:') - depends_on('hdf5+static~mpi') + depends_on('hdf5+shared~mpi') depends_on('netcdf~mpi') patch('exodus-cmake.patch') diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 7db4aff631..f4de92aa83 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -42,7 +42,7 @@ class Hdf5(Package): version('1.8.13', 'c03426e9e77d7766944654280b467289') variant('debug', default=False, description='Builds a debug version of the library') - variant('static', default=False, description='Builds a static executable version of the library') + variant('shared', default=False, description='Builds a static executable version of the library') variant('cxx', default=True, description='Enable C++ support') variant('fortran', default=True, description='Enable Fortran support') From 6cd76d69c3fa4b9eb2d30660de28206697af722d Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Wed, 17 Feb 2016 11:51:24 -0800 Subject: [PATCH 057/189] Fixed the "+shared" variant in the "hdf5" package. --- var/spack/repos/builtin/packages/exodusii/package.py | 2 +- var/spack/repos/builtin/packages/hdf5/package.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/exodusii/package.py b/var/spack/repos/builtin/packages/exodusii/package.py index d68baaa6d9..af258b7e6e 100644 --- a/var/spack/repos/builtin/packages/exodusii/package.py +++ b/var/spack/repos/builtin/packages/exodusii/package.py @@ -23,7 +23,7 @@ class Exodusii(Package): # TODO: Make this a build dependency once build dependencies are supported # (see: https://github.com/LLNL/spack/pull/378). depends_on('cmake@2.8.7:') - depends_on('hdf5+shared~mpi') + depends_on('hdf5~shared~mpi') depends_on('netcdf~mpi') patch('exodus-cmake.patch') diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index f4de92aa83..ed4e7c35c9 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -42,7 +42,7 @@ class Hdf5(Package): version('1.8.13', 'c03426e9e77d7766944654280b467289') variant('debug', default=False, description='Builds a debug version of the library') - variant('shared', default=False, description='Builds a static executable version of the library') + variant('shared', default=True, description='Builds a shared version of the library') variant('cxx', default=True, description='Enable C++ support') variant('fortran', default=True, description='Enable Fortran support') @@ -79,7 +79,9 @@ def install(self, spec, prefix): else: extra_args.append('--enable-production') - if '+static' in spec: + if '+shared' in spec: + extra_args.append('--enable-shared') + else: extra_args.append('--enable-static-exec') if '+unsupported' in spec: @@ -123,7 +125,6 @@ def install(self, spec, prefix): configure( "--prefix=%s" % prefix, "--with-zlib=%s" % spec['zlib'].prefix, - "--enable-shared", # TODO : this should be enabled by default, remove it? *extra_args) make() make("install") From 00125e4688bb457937b4907f8e69afe2a06737fc Mon Sep 17 00:00:00 2001 From: Tom Scogland Date: Wed, 17 Feb 2016 16:43:36 -0800 Subject: [PATCH 058/189] remove the unprotected key name from create.py Small fix to use dicts correctly. --- lib/spack/spack/cmd/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index edcea0718c..6809209046 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -222,7 +222,7 @@ def fetch_tarballs(url, name, args): archives_to_fetch = 1 if not versions: # If the fetch failed for some reason, revert to what the user provided - versions = { version : url } + versions = { "version" : url } elif len(versions) > 1: tty.msg("Found %s versions of %s:" % (len(versions), name), *spack.cmd.elide_list( From d8a70166d3abd14b5a34025f735cf1825179f84f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 18 Feb 2016 00:56:29 -0800 Subject: [PATCH 059/189] Fixes #434 Compiler detection was not getting triggered properly with some of the new config logic. Adjust the conditions under which Spack will serach for compilers. --- lib/spack/spack/compilers/__init__.py | 32 +++++++++++++++++---------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 6159ef576c..3a04bc2ebc 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -74,28 +74,36 @@ def _to_dict(compiler): def get_compiler_config(arch=None, scope=None): """Return the compiler configuration for the specified architecture. """ - # If any configuration file has compilers, just stick with the - # ones already configured. - config = spack.config.get_config('compilers', scope=scope) - + # Check whether we're on a front-end (native) architecture. my_arch = spack.architecture.sys_type() if arch is None: arch = my_arch - if arch in config: - return config[arch] - - # Only for the current arch in *highest* scope: automatically try to - # find compilers if none are configured yet. - if arch == my_arch and scope == 'user': + def init_compiler_config(): + """Compiler search used when Spack has no compilers.""" config[arch] = {} compilers = find_compilers(*get_path('PATH')) for compiler in compilers: config[arch].update(_to_dict(compiler)) spack.config.update_config('compilers', config, scope=scope) - return config[arch] - return {} + config = spack.config.get_config('compilers', scope=scope) + + # Update the configuration if there are currently no compilers + # configured. Avoid updating automatically if there ARE site + # compilers configured but no user ones. + if arch == my_arch and arch not in config: + if scope is None: + # We know no compilers were configured in any scope. + init_compiler_config() + elif scope == 'user': + # Check the site config and update the user config if + # nothing is configured at the site level. + site_config = spack.config.get_config('compilers', scope='site') + if not site_config: + init_compiler_config() + + return config[arch] if arch in config else {} def add_compilers_to_config(compilers, arch=None, scope=None): From 19d10291bfeae45315fcef852baddec63b69247a Mon Sep 17 00:00:00 2001 From: "Gregory L. Lee" Date: Thu, 18 Feb 2016 15:45:29 -0800 Subject: [PATCH 060/189] modify compiler commands in python config files, fix for #431 --- var/spack/repos/builtin/packages/python/package.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index a1ce06feb0..58d401244e 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -55,6 +55,20 @@ def install(self, spec, prefix): make() make("install") + # Modify compiler paths in configuration files. This is necessary for + # building site packages outside of spack + filter_file(r'([/s]=?)([\S=]*)/lib/spack/env(/[^\s/]*)?/(\S*)(\s)', + (r'\4\5'), + join_path(prefix.lib, 'python%d.%d' % self.version[:2], '_sysconfigdata.py')) + + python3_version = '' + if spec.satisfies('@3:'): + python3_version = '-%d.%dm' % self.version[:2] + makefile_filepath = join_path(prefix.lib, 'python%d.%d' % self.version[:2], 'config%s' % python3_version, 'Makefile') + filter_file(r'([/s]=?)([\S=]*)/lib/spack/env(/[^\s/]*)?/(\S*)(\s)', + (r'\4\5'), + makefile_filepath) + # ======================================================================== # Set up environment to make install easy for python extensions. From 30c304748213bee3669c75d4384bad146f68dbd7 Mon Sep 17 00:00:00 2001 From: alalazo Date: Fri, 19 Feb 2016 12:39:38 +0100 Subject: [PATCH 061/189] gromacs : added package --- .../repos/builtin/packages/fftw/package.py | 2 + .../repos/builtin/packages/gromacs/package.py | 56 +++++++++++++++++++ 2 files changed, 58 insertions(+) create mode 100644 var/spack/repos/builtin/packages/gromacs/package.py diff --git a/var/spack/repos/builtin/packages/fftw/package.py b/var/spack/repos/builtin/packages/fftw/package.py index 4d2b964242..bc129aaf1a 100644 --- a/var/spack/repos/builtin/packages/fftw/package.py +++ b/var/spack/repos/builtin/packages/fftw/package.py @@ -47,6 +47,8 @@ class Fftw(Package): depends_on('mpi', when='+mpi') + # TODO : add support for architecture specific optimizations as soon as targets are supported + def install(self, spec, prefix): options = ['--prefix=%s' % prefix, '--enable-shared', diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py new file mode 100644 index 0000000000..5fe8399308 --- /dev/null +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -0,0 +1,56 @@ +from spack import * + + +class Gromacs(Package): + """ + GROMACS (GROningen MAchine for Chemical Simulations) is a molecular dynamics package primarily designed for + simulations of proteins, lipids and nucleic acids. It was originally developed in the Biophysical Chemistry + department of University of Groningen, and is now maintained by contributors in universities and research centers + across the world. + + GROMACS is one of the fastest and most popular software packages available and can run on CPUs as well as GPUs. + It is free, open source released under the GNU General Public License. Starting from version 4.6, GROMACS is + released under the GNU Lesser General Public License. + """ + + homepage = 'http://www.gromacs.org' + url = 'ftp://ftp.gromacs.org/pub/gromacs/gromacs-5.1.2.tar.gz' + + version('5.1.2', '614d0be372f1a6f1f36382b7a6fcab98') + + variant('mpi', default=True, description='Activate MPI support') + variant('shared', default=True, description='Enables the build of shared libraries') + variant('debug', default=False, description='Enables debug mode') + variant('double', default=False, description='Produces a double precision version of the executables') + + depends_on('mpi', when='+mpi') + + depends_on('fftw') + + # TODO : add GPU support + + def install(self, spec, prefix): + + options = [] + + if '+mpi' in spec: + options.append('-DGMX_MPI:BOOL=ON') + + if '+double' in spec: + options.append('-DGMX_DOUBLE:BOOL=ON') + + if '~shared' in spec: + options.append('-DBUILD_SHARED_LIBS:BOOL=OFF') + + if '+debug' in spec: + options.append('-DCMAKE_BUILD_TYPE:STRING=Debug') + else: + options.append('-DCMAKE_BUILD_TYPE:STRING=Release') + + options.extend(std_cmake_args) + + with working_dir('spack-build', create=True): + + cmake('..', *options) + make() + make('install') From 5c8dd6c3c86400ac82b061ce45f58b543526bfff Mon Sep 17 00:00:00 2001 From: Tom Scogland Date: Sat, 20 Feb 2016 17:18:49 -0800 Subject: [PATCH 062/189] llvm trunk version Adding a trunk version to the llvm package. This has all the features and requirements of the others, with the additional caveat that the llvm project makes no guarantee that trunk on all repositories together will necessarily make a working compiler. It has been tested, and worked with a version today, but not yesterday, so if you test keep that in mind. --- .../repos/builtin/packages/llvm/package.py | 41 +++++++++++++++---- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 1805d3ded8..a8f19f9071 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -117,6 +117,21 @@ class Llvm(Package): }, } releases = [ + { + 'version' : 'trunk', + 'repo' : 'http://llvm.org/svn/llvm-project/llvm/trunk', + 'resources' : { + 'compiler-rt' : 'http://llvm.org/svn/llvm-project/compiler-rt/trunk', + 'openmp' : 'http://llvm.org/svn/llvm-project/openmp/trunk', + 'polly' : 'http://llvm.org/svn/llvm-project/polly/trunk', + 'libcxx' : 'http://llvm.org/svn/llvm-project/libcxx/trunk', + 'libcxxabi' : 'http://llvm.org/svn/llvm-project/libcxxabi/trunk', + 'clang' : 'http://llvm.org/svn/llvm-project/cfe/trunk', + 'clang-tools-extra' : 'http://llvm.org/svn/llvm-project/clang-tools-extra/trunk', + 'lldb' : 'http://llvm.org/svn/llvm-project/lldb/trunk', + 'llvm-libunwind' : 'http://llvm.org/svn/llvm-project/libunwind/trunk', + } + }, { 'version' : '3.7.0', 'md5':'b98b9495e5655a672d6cb83e1a180f8e', @@ -161,15 +176,25 @@ class Llvm(Package): ] for release in releases: - version(release['version'], release['md5'], url=llvm_url % release) + if release['version'] == 'trunk' : + version(release['version'], svn=release['repo']) - for name, md5 in release['resources'].items(): - resource(name=name, - url=resources[name]['url'] % release, - md5=md5, - destination=resources[name]['destination'], - when='@%(version)s' % release, - placement=resources[name].get('placement', None)) + for name, repo in release['resources'].items(): + resource(name=name, + svn=repo, + destination=resources[name]['destination'], + when='@%(version)s' % release, + placement=resources[name].get('placement', None)) + else: + version(release['version'], release['md5'], url=llvm_url % release) + + for name, md5 in release['resources'].items(): + resource(name=name, + url=resources[name]['url'] % release, + md5=md5, + destination=resources[name]['destination'], + when='@%(version)s' % release, + placement=resources[name].get('placement', None)) # SVN - current develop version('develop', svn='http://llvm.org/svn/llvm-project/llvm/trunk') From 4316f1cd31afd18e35b5ebd9852a156d2c68c8df Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 23 Feb 2016 16:57:19 +0100 Subject: [PATCH 063/189] Modified wrapper to have a different behavior when modeis vcheck --- lib/spack/env/cc | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index aacba996b3..dfb0801c56 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -113,14 +113,22 @@ case "$command" in ;; esac -# Finish setting up the mode. +# If any of the arguments below is present then the mode is vcheck. In vcheck mode nothing is added in terms of extra search paths or libraries if [ -z "$mode" ]; then - mode=ccld for arg in "$@"; do if [ "$arg" = -v -o "$arg" = -V -o "$arg" = --version -o "$arg" = -dumpversion ]; then mode=vcheck break - elif [ "$arg" = -E ]; then + fi + done +fi + +# Finish setting up the mode. + +if [ -z "$mode" ]; then + mode=ccld + for arg in "$@"; do + if [ "$arg" = -E ]; then mode=cpp break elif [ "$arg" = -c ]; then @@ -145,6 +153,11 @@ fi # Save original command for debug logging input_command="$@" +if [ "$mode" == vcheck ] ; then + exec "${input_command}" + exit +fi + # # Now do real parsing of the command line args, trying hard to keep # non-rpath linker arguments in the proper order w.r.t. other command @@ -330,3 +343,5 @@ if [ "$SPACK_DEBUG" = "TRUE" ]; then fi exec "${full_command[@]}" + + From 5ce97eeda52944128dc32c0d7728550d95520b0d Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 24 Feb 2016 09:17:04 +0100 Subject: [PATCH 064/189] intel : polished code --- lib/spack/env/cc | 3 --- 1 file changed, 3 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index dfb0801c56..644d2be1d6 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -155,7 +155,6 @@ input_command="$@" if [ "$mode" == vcheck ] ; then exec "${input_command}" - exit fi # @@ -343,5 +342,3 @@ if [ "$SPACK_DEBUG" = "TRUE" ]; then fi exec "${full_command[@]}" - - From 976ae91dccd36d1feb6d0d20cd1ef5b3470c4c39 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 24 Feb 2016 14:11:57 +0100 Subject: [PATCH 065/189] llvm : removed duplicate version --- .../repos/builtin/packages/llvm/package.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index a8f19f9071..934d994bd3 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -196,25 +196,6 @@ class Llvm(Package): when='@%(version)s' % release, placement=resources[name].get('placement', None)) - # SVN - current develop - version('develop', svn='http://llvm.org/svn/llvm-project/llvm/trunk') - resource(name='clang', svn='http://llvm.org/svn/llvm-project/cfe/trunk', - destination='tools', when='@develop', placement='clang') - resource(name='compiler-rt', svn='http://llvm.org/svn/llvm-project/compiler-rt/trunk', - destination='projects', when='@develop', placement='compiler-rt') - resource(name='openmp', svn='http://llvm.org/svn/llvm-project/openmp/trunk', - destination='projects', when='@develop', placement='openmp') - resource(name='libcxx', svn='http://llvm.org/svn/llvm-project/libcxx/trunk', - destination='projects', when='@develop', placement='libcxx') - resource(name='libcxxabi', svn='http://llvm.org/svn/llvm-project/libcxxabi/trunk', - destination='projects', when='@develop', placement='libcxxabi') - resource(name='polly', svn='http://llvm.org/svn/llvm-project/polly/trunk', - destination='tools', when='@develop', placement='polly') - resource(name='lldb', svn='http://llvm.org/svn/llvm-project/lldb/trunk', - destination='tools', when='@develop', placement='lldb') - - - def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag cmake_args = [ arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg ] From 6f42dd556d1ead8e7cad9788004dc33c11240564 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 24 Feb 2016 17:37:58 +0100 Subject: [PATCH 066/189] stage : on-going refactoring --- lib/spack/spack/package.py | 149 +++++++++++++++++----------------- lib/spack/spack/stage.py | 68 ++++++++++------ lib/spack/spack/test/stage.py | 122 +++++++++++----------------- 3 files changed, 167 insertions(+), 172 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 8019b29cba..4fdc582479 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -847,93 +847,94 @@ def do_install(self, make_jobs=make_jobs) start_time = time.time() - if not fake: - if not skip_patch: - self.do_patch() - else: - self.do_stage() - - # create the install directory. The install layout - # handles this in case so that it can use whatever - # package naming scheme it likes. - spack.install_layout.create_install_directory(self.spec) - - def cleanup(): - if not keep_prefix: - # If anything goes wrong, remove the install prefix - self.remove_prefix() - else: - tty.warn("Keeping install prefix in place despite error.", - "Spack will think this package is installed." + - "Manually remove this directory to fix:", - self.prefix, wrap=True) - - - def real_work(): - try: - tty.msg("Building %s." % self.name) - - # Run the pre-install hook in the child process after - # the directory is created. - spack.hooks.pre_install(self) - - # Set up process's build environment before running install. - if fake: - self.do_fake_install() + with self.stage: + if not fake: + if not skip_patch: + self.do_patch() else: - # Do the real install in the source directory. - self.stage.chdir_to_source() + self.do_stage() - # This redirects I/O to a build log (and optionally to the terminal) - log_path = join_path(os.getcwd(), 'spack-build.out') - log_file = open(log_path, 'w') - with log_output(log_file, verbose, sys.stdout.isatty(), True): - self.install(self.spec, self.prefix) + # create the install directory. The install layout + # handles this in case so that it can use whatever + # package naming scheme it likes. + spack.install_layout.create_install_directory(self.spec) - # Ensure that something was actually installed. - self._sanity_check_install() + def cleanup(): + if not keep_prefix: + # If anything goes wrong, remove the install prefix + self.remove_prefix() + else: + tty.warn("Keeping install prefix in place despite error.", + "Spack will think this package is installed." + + "Manually remove this directory to fix:", + self.prefix, wrap=True) - # Move build log into install directory on success - if not fake: - log_install_path = spack.install_layout.build_log_path(self.spec) - install(log_path, log_install_path) - # On successful install, remove the stage. - if not keep_stage: - self.stage.destroy() + def real_work(): + try: + tty.msg("Building %s." % self.name) - # Stop timer. - self._total_time = time.time() - start_time - build_time = self._total_time - self._fetch_time + # Run the pre-install hook in the child process after + # the directory is created. + spack.hooks.pre_install(self) - tty.msg("Successfully installed %s." % self.name, - "Fetch: %s. Build: %s. Total: %s." - % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) - print_pkg(self.prefix) + # Set up process's build environment before running install. + if fake: + self.do_fake_install() + else: + # Do the real install in the source directory. + self.stage.chdir_to_source() - except ProcessError, e: - # Annotate with location of build log. - e.build_log = log_path - cleanup() - raise e + # This redirects I/O to a build log (and optionally to the terminal) + log_path = join_path(os.getcwd(), 'spack-build.out') + log_file = open(log_path, 'w') + with log_output(log_file, verbose, sys.stdout.isatty(), True): + self.install(self.spec, self.prefix) - except: - # other exceptions just clean up and raise. - cleanup() - raise + # Ensure that something was actually installed. + self._sanity_check_install() - # Set parallelism before starting build. - self.make_jobs = make_jobs + # Move build log into install directory on success + if not fake: + log_install_path = spack.install_layout.build_log_path(self.spec) + install(log_path, log_install_path) - # Do the build. - spack.build_environment.fork(self, real_work) + # On successful install, remove the stage. + if not keep_stage: + self.stage.destroy() - # note: PARENT of the build process adds the new package to - # the database, so that we don't need to re-read from file. - spack.installed_db.add(self.spec, self.prefix) + # Stop timer. + self._total_time = time.time() - start_time + build_time = self._total_time - self._fetch_time - # Once everything else is done, run post install hooks - spack.hooks.post_install(self) + tty.msg("Successfully installed %s." % self.name, + "Fetch: %s. Build: %s. Total: %s." + % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) + print_pkg(self.prefix) + + except ProcessError, e: + # Annotate with location of build log. + e.build_log = log_path + cleanup() + raise e + + except: + # other exceptions just clean up and raise. + cleanup() + raise + + # Set parallelism before starting build. + self.make_jobs = make_jobs + + # Do the build. + spack.build_environment.fork(self, real_work) + + # note: PARENT of the build process adds the new package to + # the database, so that we don't need to re-read from file. + spack.installed_db.add(self.spec, self.prefix) + + # Once everything else is done, run post install hooks + spack.hooks.post_install(self) def _sanity_check_install(self): diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index f217450d42..96b1eaf3f2 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -96,12 +96,44 @@ def __init__(self, url_or_fetch_strategy, **kwargs): self.default_fetcher = self.fetcher # self.fetcher can change with mirrors. self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. - self.name = kwargs.get('name') + # TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name + # TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root + self.name = kwargs.get('name') if 'name' in kwargs else STAGE_PREFIX + next(tempfile._get_candidate_names()) self.mirror_path = kwargs.get('mirror_path') self.tmp_root = find_tmp_root() - self.path = None + # Try to construct here a temporary name for the stage directory + # If this is a named stage, then construct a named path. + self.path = join_path(spack.stage_path, self.name) + + self.delete_on_exit = True + + def __enter__(self): + """ + Entering a stage context will create the stage directory + """ + # FIXME : if _setup is used only here, then it makes no sense to retain the function self._setup() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """ + Exiting from a stage context will delete the stage directory unless: + - it was explicitly requested not to do so + - an exception has been raised + + Args: + exc_type: exception type + exc_val: exception value + exc_tb: exception traceback + + Returns: + Boolean + """ + self.delete_on_exit = False if exc_type is not None else self.delete_on_exit + + if self.delete_on_exit: + self.destroy() def _cleanup_dead_links(self): """Remove any dead links in the stage directory.""" @@ -163,35 +195,17 @@ def _setup(self): mkdirp(spack.stage_path) self._cleanup_dead_links() - # If this is a named stage, then construct a named path. - if self.name is not None: - self.path = join_path(spack.stage_path, self.name) - # If this is a temporary stage, them make the temp directory - tmp_dir = None if self.tmp_root: - if self.name is None: - # Unnamed tmp root. Link the path in + if self._need_to_create_path(): tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) - self.name = os.path.basename(tmp_dir) - self.path = join_path(spack.stage_path, self.name) - if self._need_to_create_path(): - os.symlink(tmp_dir, self.path) - - else: - if self._need_to_create_path(): - tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) - os.symlink(tmp_dir, self.path) + os.symlink(tmp_dir, self.path) # if we're not using a tmp dir, create the stage directly in the # stage dir, rather than linking to it. else: - if self.name is None: - self.path = tempfile.mkdtemp('', STAGE_PREFIX, spack.stage_path) - self.name = os.path.basename(self.path) - else: - if self._need_to_create_path(): - mkdirp(self.path) + if self._need_to_create_path(): + mkdirp(self.path) # Make sure we can actually do something with the stage we made. ensure_access(self.path) @@ -389,6 +403,12 @@ def source_path(self): def path(self): return self[0].path + def __enter__(self): + return self[0].__enter__() + + def __exit__(self, exc_type, exc_val, exc_tb): + return self[0].__exit__(exc_type, exc_val, exc_tb) + def chdir_to_source(self): return self[0].chdir_to_source() diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index c1b2a2a573..a7314eba4c 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -192,116 +192,90 @@ def check_destroy(self, stage, stage_name): def test_setup_and_destroy_name_with_tmp(self): with use_tmp(True): - stage = Stage(archive_url, name=stage_name) - self.check_setup(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + self.check_setup(stage, stage_name) self.check_destroy(stage, stage_name) def test_setup_and_destroy_name_without_tmp(self): with use_tmp(False): - stage = Stage(archive_url, name=stage_name) - self.check_setup(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + self.check_setup(stage, stage_name) self.check_destroy(stage, stage_name) def test_setup_and_destroy_no_name_with_tmp(self): with use_tmp(True): - stage = Stage(archive_url) - self.check_setup(stage, None) - - stage.destroy() + with Stage(archive_url) as stage: + self.check_setup(stage, None) self.check_destroy(stage, None) def test_setup_and_destroy_no_name_without_tmp(self): with use_tmp(False): - stage = Stage(archive_url) - self.check_setup(stage, None) - - stage.destroy() + with Stage(archive_url) as stage: + self.check_setup(stage, None) self.check_destroy(stage, None) def test_chdir(self): - stage = Stage(archive_url, name=stage_name) - - stage.chdir() - self.check_setup(stage, stage_name) - self.check_chdir(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + stage.chdir() + self.check_setup(stage, stage_name) + self.check_chdir(stage, stage_name) self.check_destroy(stage, stage_name) def test_fetch(self): - stage = Stage(archive_url, name=stage_name) - - stage.fetch() - self.check_setup(stage, stage_name) - self.check_chdir(stage, stage_name) - self.check_fetch(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + stage.fetch() + self.check_setup(stage, stage_name) + self.check_chdir(stage, stage_name) + self.check_fetch(stage, stage_name) self.check_destroy(stage, stage_name) def test_expand_archive(self): - stage = Stage(archive_url, name=stage_name) - - stage.fetch() - self.check_setup(stage, stage_name) - self.check_fetch(stage, stage_name) - - stage.expand_archive() - self.check_expand_archive(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + stage.fetch() + self.check_setup(stage, stage_name) + self.check_fetch(stage, stage_name) + stage.expand_archive() + self.check_expand_archive(stage, stage_name) self.check_destroy(stage, stage_name) def test_expand_archive(self): - stage = Stage(archive_url, name=stage_name) - - stage.fetch() - self.check_setup(stage, stage_name) - self.check_fetch(stage, stage_name) - - stage.expand_archive() - stage.chdir_to_source() - self.check_expand_archive(stage, stage_name) - self.check_chdir_to_source(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + stage.fetch() + self.check_setup(stage, stage_name) + self.check_fetch(stage, stage_name) + stage.expand_archive() + stage.chdir_to_source() + self.check_expand_archive(stage, stage_name) + self.check_chdir_to_source(stage, stage_name) self.check_destroy(stage, stage_name) def test_restage(self): - stage = Stage(archive_url, name=stage_name) + with Stage(archive_url, name=stage_name) as stage: + stage.fetch() + stage.expand_archive() + stage.chdir_to_source() + self.check_expand_archive(stage, stage_name) + self.check_chdir_to_source(stage, stage_name) - stage.fetch() - stage.expand_archive() - stage.chdir_to_source() - self.check_expand_archive(stage, stage_name) - self.check_chdir_to_source(stage, stage_name) + # Try to make a file in the old archive dir + with open('foobar', 'w') as file: + file.write("this file is to be destroyed.") - # Try to make a file in the old archive dir - with open('foobar', 'w') as file: - file.write("this file is to be destroyed.") + self.assertTrue('foobar' in os.listdir(stage.source_path)) - self.assertTrue('foobar' in os.listdir(stage.source_path)) - - # Make sure the file is not there after restage. - stage.restage() - self.check_chdir(stage, stage_name) - self.check_fetch(stage, stage_name) - - stage.chdir_to_source() - self.check_chdir_to_source(stage, stage_name) - self.assertFalse('foobar' in os.listdir(stage.source_path)) - - stage.destroy() + # Make sure the file is not there after restage. + stage.restage() + self.check_chdir(stage, stage_name) + self.check_fetch(stage, stage_name) + stage.chdir_to_source() + self.check_chdir_to_source(stage, stage_name) + self.assertFalse('foobar' in os.listdir(stage.source_path)) self.check_destroy(stage, stage_name) From 20845a739f15190cf9610da4375d3e3c9fc61b6b Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Wed, 24 Feb 2016 22:18:51 -0500 Subject: [PATCH 067/189] Libevent depends on OpenSSL --- var/spack/repos/builtin/packages/libevent/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py index 11b1083d67..2a44c49325 100644 --- a/var/spack/repos/builtin/packages/libevent/package.py +++ b/var/spack/repos/builtin/packages/libevent/package.py @@ -23,6 +23,9 @@ class Libevent(Package): version('2.0.12', '42986228baf95e325778ed328a93e070') + depends_on('openssl') + + def install(self, spec, prefix): configure("--prefix=%s" % prefix) From 30d9ca2bde033da28b6f6105f93ee05f4fd0acfa Mon Sep 17 00:00:00 2001 From: Joseph Ciurej Date: Fri, 26 Feb 2016 16:06:17 -0800 Subject: [PATCH 068/189] Updated the silo package and added the '+fortran' variant. --- .../repos/builtin/packages/silo/package.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index 9eda11df15..d1aed78e0e 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -1,19 +1,28 @@ from spack import * class Silo(Package): - """Silo is a library for reading and writing a wide variety of scientific data to binary, disk files.""" + """Silo is a library for reading and writing a wide variety of scientific + data to binary, disk files.""" homepage = "http://wci.llnl.gov/simulation/computer-codes/silo" url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-4.8/silo-4.8.tar.gz" - #version('4.9', 'a83eda4f06761a86726e918fc55e782a') version('4.8', 'b1cbc0e7ec435eb656dc4b53a23663c9') - depends_on("hdf5@:1.8.12") + variant('fortran', default=True, description='Enable Fortran support') + + depends_on("hdf5") def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-hdf5=%s" %spec['hdf5'].prefix) + config_args = [ + '--enable-fortran' if '+fortran' in spec else '--disable-fortran', + ] + + configure( + "--prefix=%s" % prefix, + "--with-hdf5=%s,%s" % (spec['hdf5'].prefix.include, spec['hdf5'].prefix.lib), + "--with-zlib=%s,%s" % (spec['zlib'].prefix.include, spec['zlib'].prefix.lib), + *config_args) make() make("install") From 8f3ac9ac8b7b1d16672e7a52f691966de1f8483f Mon Sep 17 00:00:00 2001 From: Luigi Calori Date: Sat, 27 Feb 2016 01:40:32 +0100 Subject: [PATCH 069/189] adding new version to praview, compiled with spack -d install -j 8 --keep-stage paraview@5.0.0+qt+python+tcl+opengl2%gcc@4.8.2 ^netcdf -mpi --- .../repos/builtin/packages/paraview/package.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index e43bdd4493..ccf2d14c06 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -2,9 +2,11 @@ class Paraview(Package): homepage = 'http://www.paraview.org' - url = 'http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz' + url = 'http://www.paraview.org/files/v5.0/ParaView-v' + _url_str = 'http://www.paraview.org/files/v%s/ParaView-v%s-source.tar.gz' - version('4.4.0', 'fa1569857dd680ebb4d7ff89c2227378', url='http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz') + version('4.4.0', 'fa1569857dd680ebb4d7ff89c2227378') + version('5.0.0', '4598f0b421460c8bbc635c9a1c3bdbee') variant('python', default=False, description='Enable Python support') @@ -25,8 +27,8 @@ class Paraview(Package): depends_on('bzip2') depends_on('freetype') - depends_on('hdf5') depends_on('hdf5+mpi', when='+mpi') + depends_on('hdf5~mpi', when='~mpi') depends_on('jpeg') depends_on('libpng') depends_on('libtiff') @@ -35,6 +37,11 @@ class Paraview(Package): #depends_on('protobuf') # version mismatches? #depends_on('sqlite') # external version not supported depends_on('zlib') + + def url_for_version(self, version): + """Handle ParaView version-based custom URLs.""" + return self._url_str % (version.up_to(2), version) + def install(self, spec, prefix): with working_dir('spack-build', create=True): From 67f327f805868d369eec0392631392281b68c39d Mon Sep 17 00:00:00 2001 From: Ian Lee Date: Sun, 28 Feb 2016 19:47:19 -0800 Subject: [PATCH 070/189] Updated links to use new llnl.gov address --- README.md | 4 ++-- lib/spack/docs/getting_started.rst | 2 +- lib/spack/spack/cmd/repo.py | 2 +- lib/spack/spack/repository.py | 2 +- lib/spack/spack/resource.py | 2 +- lib/spack/spack/test/namespace_trie.py | 2 +- lib/spack/spack/test/tally_plugin.py | 8 ++++---- var/spack/repos/builtin/packages/gdb/package.py | 2 +- var/spack/repos/builtin/packages/texinfo/package.py | 2 +- 9 files changed, 13 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index bdce345764..8664953c0c 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ written in pure Python, and specs allow package authors to write a single build script for many different builds of the same package. See the -[Feature Overview](http://llnl.github.io/spack/features.html) +[Feature Overview](http://software.llnl.gov/spack/features.html) for examples and highlights. To install spack and install your first package: @@ -31,7 +31,7 @@ To install spack and install your first package: Documentation ---------------- -[**Full documentation**](http://llnl.github.io/spack) for Spack is +[**Full documentation**](http://software.llnl.gov/spack) for Spack is the first place to look. See also: diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index 67ca18e71a..2c5b68ea65 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -22,7 +22,7 @@ go: $ spack install libelf For a richer experience, use Spack's `shell support -`_: +`_: .. code-block:: sh diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 34c755fb67..908f5400ab 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -6,7 +6,7 @@ # Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://llnl.github.io/spack +# For details, see https://software.llnl.gov/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index f58cd52125..e8d0cc09ec 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -6,7 +6,7 @@ # Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://llnl.github.io/spack +# For details, see https://software.llnl.gov/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify diff --git a/lib/spack/spack/resource.py b/lib/spack/spack/resource.py index 2bf92947fd..ddfaaf4cb0 100644 --- a/lib/spack/spack/resource.py +++ b/lib/spack/spack/resource.py @@ -6,7 +6,7 @@ # Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://llnl.github.io/spack +# For details, see https://software.llnl.gov/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py index d0d809004d..647976df21 100644 --- a/lib/spack/spack/test/namespace_trie.py +++ b/lib/spack/spack/test/namespace_trie.py @@ -6,7 +6,7 @@ # Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://llnl.github.io/spack +# For details, see https://software.llnl.gov/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/test/tally_plugin.py index 9ca898c47c..e0b9618e0c 100644 --- a/lib/spack/spack/test/tally_plugin.py +++ b/lib/spack/spack/test/tally_plugin.py @@ -6,7 +6,7 @@ # Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://scalability-llnl.github.io/spack +# For details, see https://scalability-software.llnl.gov/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify @@ -34,7 +34,7 @@ def __init__(self): self.successCount = 0 self.failCount = 0 self.errorCount = 0 - + @property def numberOfTestsRun(self): """Excludes skipped tests""" @@ -48,10 +48,10 @@ def configure(self, options, conf): def addSuccess(self, test): self.successCount += 1 - + def addError(self, test, err): self.errorCount += 1 - + def addFailure(self, test, err): self.failCount += 1 diff --git a/var/spack/repos/builtin/packages/gdb/package.py b/var/spack/repos/builtin/packages/gdb/package.py index dd02b426b9..b346fe80c2 100644 --- a/var/spack/repos/builtin/packages/gdb/package.py +++ b/var/spack/repos/builtin/packages/gdb/package.py @@ -6,7 +6,7 @@ # Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://llnl.github.io/spack +# For details, see https://software.llnl.gov/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify diff --git a/var/spack/repos/builtin/packages/texinfo/package.py b/var/spack/repos/builtin/packages/texinfo/package.py index a83c10c0c1..6cf8d79072 100644 --- a/var/spack/repos/builtin/packages/texinfo/package.py +++ b/var/spack/repos/builtin/packages/texinfo/package.py @@ -6,7 +6,7 @@ # Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # -# For details, see https://llnl.github.io/spack +# For details, see https://software.llnl.gov/spack # Please also see the LICENSE file for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify From a339ac0a727458ab55b2e0d27ff17dcc139c0a32 Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Mon, 29 Feb 2016 22:29:30 -0500 Subject: [PATCH 071/189] Bug Fix: When Spack create roots around for other versions, it sometimes finds files it thinks are tarballs, but are not. Previously, it would crash if any such files are found. This change allows it to simply skip them and move on, processing the rest of the files it finds correctly. --- lib/spack/spack/cmd/checksum.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index b1ad89dbb8..c451993233 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -58,24 +58,29 @@ def get_checksums(versions, urls, **kwargs): tty.msg("Downloading...") hashes = [] - for i, (url, version) in enumerate(zip(urls, versions)): + i = 0 + for url, version in zip(urls, versions): stage = Stage(url) try: stage.fetch() if i == 0 and first_stage_function: first_stage_function(stage) - hashes.append( - spack.util.crypto.checksum(hashlib.md5, stage.archive_file)) + hashes.append((version, + spack.util.crypto.checksum(hashlib.md5, stage.archive_file))) except FailedDownloadError, e: tty.msg("Failed to fetch %s" % url) continue + except Exception, e: + tty.msg('Something failed on %s, skipping.\n (%s)' % (url, e)) + continue finally: if not keep_stage: stage.destroy() + i += 1 - return zip(versions, hashes) + return hashes From e414c5fdfbc91e5934ee59233475f8f862e2e0ce Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Mon, 29 Feb 2016 22:31:18 -0500 Subject: [PATCH 072/189] Added missing cmake dependencies --- var/spack/repos/builtin/packages/cgal/package.py | 1 + var/spack/repos/builtin/packages/curl/package.py | 6 +++--- var/spack/repos/builtin/packages/expat/package.py | 1 + 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/cgal/package.py b/var/spack/repos/builtin/packages/cgal/package.py index 97356433be..ef4a2736db 100644 --- a/var/spack/repos/builtin/packages/cgal/package.py +++ b/var/spack/repos/builtin/packages/cgal/package.py @@ -46,6 +46,7 @@ class Cgal(Package): depends_on('mpfr') depends_on('gmp') depends_on('zlib') + depends_on('cmake') # FIXME : Qt5 dependency missing (needs Qt5 and OpenGL) # FIXME : Optional third party libraries missing diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py index 9e684445c7..6c302281a7 100644 --- a/var/spack/repos/builtin/packages/curl/package.py +++ b/var/spack/repos/builtin/packages/curl/package.py @@ -13,13 +13,13 @@ class Curl(Package): version('7.43.0', '11bddbb452a8b766b932f859aaeeed39') version('7.42.1', '296945012ce647b94083ed427c1877a8') - depends_on("openssl") +# depends_on("openssl") depends_on("zlib") def install(self, spec, prefix): configure('--prefix=%s' % prefix, - '--with-zlib=%s' % spec['zlib'].prefix, - '--with-ssl=%s' % spec['openssl'].prefix) + '--with-zlib=%s' % spec['zlib'].prefix) +# '--with-ssl=%s' % spec['openssl'].prefix) make() make("install") diff --git a/var/spack/repos/builtin/packages/expat/package.py b/var/spack/repos/builtin/packages/expat/package.py index 082da5bf0b..3f925c6546 100644 --- a/var/spack/repos/builtin/packages/expat/package.py +++ b/var/spack/repos/builtin/packages/expat/package.py @@ -7,6 +7,7 @@ class Expat(Package): version('2.1.0', 'dd7dab7a5fea97d2a6a43f511449b7cd') + depends_on('cmake') def install(self, spec, prefix): From 15ae92aae9c8ac521bb5d7ee5f210d265494d87a Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Mon, 29 Feb 2016 22:59:28 -0500 Subject: [PATCH 073/189] New packages added: blitz, netcdf-cxx4, netcdf-fortran, proj, udunits2 --- .../repos/builtin/packages/blitz/package.py | 40 ++++++++++++++++++ .../builtin/packages/netcdf-cxx4/package.py | 41 +++++++++++++++++++ .../packages/netcdf-fortran/package.py | 21 ++++++++++ .../repos/builtin/packages/proj/package.py | 37 +++++++++++++++++ .../builtin/packages/udunits2/package.py | 16 ++++++++ 5 files changed, 155 insertions(+) create mode 100644 var/spack/repos/builtin/packages/blitz/package.py create mode 100644 var/spack/repos/builtin/packages/netcdf-cxx4/package.py create mode 100644 var/spack/repos/builtin/packages/netcdf-fortran/package.py create mode 100644 var/spack/repos/builtin/packages/proj/package.py create mode 100644 var/spack/repos/builtin/packages/udunits2/package.py diff --git a/var/spack/repos/builtin/packages/blitz/package.py b/var/spack/repos/builtin/packages/blitz/package.py new file mode 100644 index 0000000000..82ff634925 --- /dev/null +++ b/var/spack/repos/builtin/packages/blitz/package.py @@ -0,0 +1,40 @@ +# FIXME: +# This is a template package file for Spack. We've conveniently +# put "FIXME" labels next to all the things you'll want to change. +# +# Once you've edited all the FIXME's, delete this whole message, +# save this file, and test out your package like this: +# +# spack install blitz +# +# You can always get back here to change things with: +# +# spack edit blitz +# +# See the spack documentation for more information on building +# packages. +# +from spack import * + +class Blitz(Package): + """N-dimensional arrays for C++""" + homepage = "http://github.com/blitzpp/blitz" + +# This version doesn't have the configure script generated yet. + url = "https://github.com/blitzpp/blitz/tarball/1.0.0" +#http://prdownloads.sourceforge.net/%(namelower)s + + version('1.0.0', '9f040b9827fe22228a892603671a77af') + + # FIXME: Add dependencies if this package requires them. + # depends_on("foo") + + def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. + # FIXME: Spack couldn't guess one, so here are some options: + configure('--prefix=%s' % prefix) + # cmake('.', *std_cmake_args) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py new file mode 100644 index 0000000000..9d70eab05f --- /dev/null +++ b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py @@ -0,0 +1,41 @@ +# FIXME: +# This is a template package file for Spack. We've conveniently +# put "FIXME" labels next to all the things you'll want to change. +# +# Once you've edited all the FIXME's, delete this whole message, +# save this file, and test out your package like this: +# +# spack install netcdf-cxx4 +# +# You can always get back here to change things with: +# +# spack edit netcdf-cxx4 +# +# See the spack documentation for more information on building +# packages. +# +from spack import * + +class NetcdfCxx4(Package): + """C++ interface for NetCDF4""" + homepage = "http://www.unidata.ucar.edu/downloads/netcdf/netcdf-cxx/index.jsp" + url = "http://www.unidata.ucar.edu/downloads/netcdf/ftp/netcdf-cxx4-4.2.tar.gz" + + version('4.2', 'd019853802092cf686254aaba165fc81') + + + variant('mpi', default=True, description='Enables MPI parallelism') +# variant('hdf4', default=False, description="Enable HDF4 support") + + # NetCDF-CXX4 doesn't really depend (directly) on MPI. However... this + # depndency ensures taht the right version of MPI is selected (eg: ^openmpi) + depends_on('mpi', when='+mpi') + depends_on('netcdf') + + def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. + configure('--prefix=%s' % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py new file mode 100644 index 0000000000..8e5c8ecc3a --- /dev/null +++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py @@ -0,0 +1,21 @@ +from spack import * + +class NetcdfFortran(Package): + """Fortran interface for NetCDF4""" + + homepage = "http://www.unidata.ucar.edu/downloads/netcdf/netcdf-cxx/index.jsp" + url = "http://www.unidata.ucar.edu/downloads/netcdf/ftp/netcdf-fortran-4.4.3.tar.gz" + + version('4.4.3', 'bfd4ae23a34635b273d3eb0d91cbde9e') + + variant('mpi', default=True, description='Enables MPI parallelism') + + # NetCDF-CXX4 doesn't really depend (directly) on MPI. However... this + # depndency ensures taht the right version of MPI is selected (eg: ^openmpi) + depends_on('mpi', when='+mpi') + depends_on('netcdf') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/proj/package.py b/var/spack/repos/builtin/packages/proj/package.py new file mode 100644 index 0000000000..4a0d3feac7 --- /dev/null +++ b/var/spack/repos/builtin/packages/proj/package.py @@ -0,0 +1,37 @@ +# FIXME: +# This is a template package file for Spack. We've conveniently +# put "FIXME" labels next to all the things you'll want to change. +# +# Once you've edited all the FIXME's, delete this whole message, +# save this file, and test out your package like this: +# +# spack install proj +# +# You can always get back here to change things with: +# +# spack edit proj +# +# See the spack documentation for more information on building +# packages. +# +from spack import * + +class Proj(Package): + """Cartographic Projections""" + homepage = "https://github.com/OSGeo/proj.4/wiki" + url = "http://download.osgeo.org/proj/proj-4.9.2.tar.gz" + + version('4.9.2', '9843131676e31bbd903d60ae7dc76cf9') + version('4.9.1', '3cbb2a964fd19a496f5f4265a717d31c') + version('4.8.0', 'd815838c92a29179298c126effbb1537') + version('4.7.0', '927d34623b52e0209ba2bfcca18fe8cd') + version('4.6.1', '7dbaab8431ad50c25669fd3fb28dc493') + + # FIXME: Add dependencies if this package requires them. + # depends_on("foo") + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/udunits2/package.py b/var/spack/repos/builtin/packages/udunits2/package.py new file mode 100644 index 0000000000..9954a733bb --- /dev/null +++ b/var/spack/repos/builtin/packages/udunits2/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Udunits2(Package): + """Automated units conversion""" + + homepage = "http://www.unidata.ucar.edu/software/udunits" + url = "ftp://ftp.unidata.ucar.edu/pub/udunits/udunits-2.2.20.tar.gz" + + version('2.2.20', '1586b70a49dfe05da5fcc29ef239dce0') + + depends_on('expat') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") From eb1d90a8cfadeb556754b57e5d21e5605b9a0e9d Mon Sep 17 00:00:00 2001 From: citibeth Date: Mon, 29 Feb 2016 23:31:54 -0500 Subject: [PATCH 074/189] Undid accidental change on this branch. --- var/spack/repos/builtin/packages/curl/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py index 6c302281a7..9e684445c7 100644 --- a/var/spack/repos/builtin/packages/curl/package.py +++ b/var/spack/repos/builtin/packages/curl/package.py @@ -13,13 +13,13 @@ class Curl(Package): version('7.43.0', '11bddbb452a8b766b932f859aaeeed39') version('7.42.1', '296945012ce647b94083ed427c1877a8') -# depends_on("openssl") + depends_on("openssl") depends_on("zlib") def install(self, spec, prefix): configure('--prefix=%s' % prefix, - '--with-zlib=%s' % spec['zlib'].prefix) -# '--with-ssl=%s' % spec['openssl'].prefix) + '--with-zlib=%s' % spec['zlib'].prefix, + '--with-ssl=%s' % spec['openssl'].prefix) make() make("install") From a9f0b98d4d30e44032f5b9c86c5b6e27425098f5 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 1 Mar 2016 15:53:53 -0600 Subject: [PATCH 075/189] Typo fix --- lib/spack/env/cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index aacba996b3..a323c48124 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -130,7 +130,7 @@ if [ -z "$mode" ]; then done fi -# Dump the version and exist if we're in testing mode. +# Dump the version and exit if we're in testing mode. if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then echo "$mode" exit From 8174489787c56cee1726ca36799c236e4869f471 Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Tue, 1 Mar 2016 15:25:57 -0700 Subject: [PATCH 076/189] + Provide two new variants for cmake: 1) +qt - build the cmake-gui Qt application. - adds a dependency on Qt. 2) +sphinxbuild - build the html CMake documentation. - adds a dependency on python and py-sphinx --- .../repos/builtin/packages/cmake/package.py | 26 ++++++++++++++++--- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index e20c1e4aeb..f39a681284 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -37,16 +37,34 @@ class Cmake(Package): version('2.8.10.2', '097278785da7182ec0aea8769d06860c') variant('ncurses', default=True, description='Enables the build of the ncurses gui') + variant('qt', default=False, description='Enables the build of cmake-gui') + variant('sphinxbuild', default=False, description='Enables the generation of html and man page documentation') + depends_on('ncurses', when='+ncurses') + depends_on('qt', when='+qt') + depends_on('python@2.7.11:', when='+sphinxbuild') + depends_on('py-sphinx', when='+sphinxbuild') def url_for_version(self, version): """Handle CMake's version-based custom URLs.""" return 'https://cmake.org/files/v%s/cmake-%s.tar.gz' % (version.up_to(2), version) - def install(self, spec, prefix): - configure('--prefix=' + prefix, - '--parallel=' + str(make_jobs), - '--', '-DCMAKE_USE_OPENSSL=ON') + + options = ['--prefix=%s' % prefix] + options.append('--parallel=%s' % str(make_jobs)) + + if '+qt' in spec: + options.append('--qt-gui') + + if '+sphinxbuild' in spec: + options.append('--sphinx-html') + options.append('--sphinx-man') + + options.append('--') + options.append('-DCMAKE_USE_OPENSSL=ON') + + configure(*options) + make() make('install') From c488f7c4d8e2ff240d561820df11a51518199a53 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 1 Mar 2016 23:57:34 -0800 Subject: [PATCH 077/189] Fix bug in install permission setting. --- lib/spack/llnl/util/filesystem.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 24cfbfde71..da3cf96050 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -152,15 +152,20 @@ def set_install_permissions(path): def copy_mode(src, dest): src_mode = os.stat(src).st_mode dest_mode = os.stat(dest).st_mode - if src_mode | stat.S_IXUSR: dest_mode |= stat.S_IXUSR - if src_mode | stat.S_IXGRP: dest_mode |= stat.S_IXGRP - if src_mode | stat.S_IXOTH: dest_mode |= stat.S_IXOTH + if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR + if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP + if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH os.chmod(dest, dest_mode) def install(src, dest): """Manually install a file to a particular location.""" tty.debug("Installing %s to %s" % (src, dest)) + + # Expand dsst to its eventual full path if it is a directory. + if os.path.isdir(dest): + dest = join_path(dest, os.path.basename(src)) + shutil.copy(src, dest) set_install_permissions(dest) copy_mode(src, dest) From be306d09e99a69732afc3f44724222ab6c6d71cc Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 2 Mar 2016 00:04:46 -0800 Subject: [PATCH 078/189] Move repo creation code into repository.py --- lib/spack/spack/cmd/repo.py | 46 +-------------------------- lib/spack/spack/repository.py | 58 ++++++++++++++++++++++++++++++++++- 2 files changed, 58 insertions(+), 46 deletions(-) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 908f5400ab..c2e352786d 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -74,51 +74,7 @@ def setup_parser(subparser): def repo_create(args): """Create a new package repository.""" - root = canonicalize_path(args.directory) - namespace = args.namespace - - if not args.namespace: - namespace = os.path.basename(root) - - if not re.match(r'\w[\.\w-]*', namespace): - tty.die("'%s' is not a valid namespace." % namespace) - - existed = False - if os.path.exists(root): - if os.path.isfile(root): - tty.die('File %s already exists and is not a directory' % root) - elif os.path.isdir(root): - if not os.access(root, os.R_OK | os.W_OK): - tty.die('Cannot create new repo in %s: cannot access directory.' % root) - if os.listdir(root): - tty.die('Cannot create new repo in %s: directory is not empty.' % root) - existed = True - - full_path = os.path.realpath(root) - parent = os.path.dirname(full_path) - if not os.access(parent, os.R_OK | os.W_OK): - tty.die("Cannot create repository in %s: can't access parent!" % root) - - try: - config_path = os.path.join(root, repo_config_name) - packages_path = os.path.join(root, packages_dir_name) - - mkdirp(packages_path) - with open(config_path, 'w') as config: - config.write("repo:\n") - config.write(" namespace: '%s'\n" % namespace) - - except (IOError, OSError) as e: - tty.die('Failed to create new repository in %s.' % root, - "Caused by %s: %s" % (type(e), e)) - - # try to clean up. - if existed: - shutil.rmtree(config_path, ignore_errors=True) - shutil.rmtree(packages_path, ignore_errors=True) - else: - shutil.rmtree(root, ignore_errors=True) - + full_path, namespace = create_repo(args.directory, args.namespace) tty.msg("Created repo with namespace '%s'." % namespace) tty.msg("To register it with spack, run this command:", 'spack repo add %s' % full_path) diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index e8d0cc09ec..6aa75cb43e 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -33,7 +33,7 @@ from external import yaml import llnl.util.tty as tty -from llnl.util.filesystem import join_path +from llnl.util.filesystem import * import spack.error import spack.config @@ -705,6 +705,58 @@ def __contains__(self, pkg_name): return self.exists(pkg_name) +def create_repo(root, namespace=None): + """Create a new repository in root with the specified namespace. + + If the namespace is not provided, use basename of root. + Return the canonicalized path and the namespace of the created repository. + """ + root = canonicalize_path(root) + if not namespace: + namespace = os.path.basename(root) + + if not re.match(r'\w[\.\w-]*', namespace): + raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace) + + existed = False + if os.path.exists(root): + if os.path.isfile(root): + raise BadRepoError('File %s already exists and is not a directory' % root) + elif os.path.isdir(root): + if not os.access(root, os.R_OK | os.W_OK): + raise BadRepoError('Cannot create new repo in %s: cannot access directory.' % root) + if os.listdir(root): + raise BadRepoError('Cannot create new repo in %s: directory is not empty.' % root) + existed = True + + full_path = os.path.realpath(root) + parent = os.path.dirname(full_path) + if not os.access(parent, os.R_OK | os.W_OK): + raise BadRepoError("Cannot create repository in %s: can't access parent!" % root) + + try: + config_path = os.path.join(root, repo_config_name) + packages_path = os.path.join(root, packages_dir_name) + + mkdirp(packages_path) + with open(config_path, 'w') as config: + config.write("repo:\n") + config.write(" namespace: '%s'\n" % namespace) + + except (IOError, OSError) as e: + raise BadRepoError('Failed to create new repository in %s.' % root, + "Caused by %s: %s" % (type(e), e)) + + # try to clean up. + if existed: + shutil.rmtree(config_path, ignore_errors=True) + shutil.rmtree(packages_path, ignore_errors=True) + else: + shutil.rmtree(root, ignore_errors=True) + + return full_path, namespace + + class RepoError(spack.error.SpackError): """Superclass for repository-related errors.""" @@ -713,6 +765,10 @@ class NoRepoConfiguredError(RepoError): """Raised when there are no repositories configured.""" +class InvalidNamespaceError(RepoError): + """Raised when an invalid namespace is encountered.""" + + class BadRepoError(RepoError): """Raised when repo layout is invalid.""" From 21d125c9147f5d3cfc0eb8a9516c8ce24df7279b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 2 Mar 2016 00:08:36 -0800 Subject: [PATCH 079/189] Fixes #175: Dump environment provenance as well as build log. --- lib/spack/spack/directory_layout.py | 14 +++++++++++++- lib/spack/spack/package.py | 7 +++++++ lib/spack/spack/util/environment.py | 7 +++++++ 3 files changed, 27 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 3e416a6a1f..29d87b65b3 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -173,7 +173,9 @@ def __init__(self, root, **kwargs): self.spec_file_name = 'spec.yaml' self.extension_file_name = 'extensions.yaml' - self.build_log_name = 'build.out' # TODO: use config file. + self.build_log_name = 'build.out' # build log. + self.build_env_name = 'build.env' # build environment + self.packages_dir = 'repos' # archive of package.py files # Cache of already written/read extension maps. self._extension_maps = {} @@ -231,6 +233,16 @@ def build_log_path(self, spec): self.build_log_name) + def build_env_path(self, spec): + return join_path(self.path_for_spec(spec), self.metadata_dir, + self.build_env_name) + + + def build_packages_path(self, spec): + return join_path(self.path_for_spec(spec), self.metadata_dir, + self.packages_dir) + + def create_install_directory(self, spec): _check_concrete(spec) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 8019b29cba..5b1927fe8f 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -66,6 +66,7 @@ from spack.stage import Stage, ResourceStage, StageComposite from spack.util.compression import allowed_archive, extension from spack.util.executable import ProcessError +from spack.util.environment import dump_environment """Allowed URL schemes for spack packages.""" _ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"] @@ -884,10 +885,14 @@ def real_work(): # Do the real install in the source directory. self.stage.chdir_to_source() + # Save the build environment in a file before building. + env_path = join_path(os.getcwd(), 'spack-build.env') + # This redirects I/O to a build log (and optionally to the terminal) log_path = join_path(os.getcwd(), 'spack-build.out') log_file = open(log_path, 'w') with log_output(log_file, verbose, sys.stdout.isatty(), True): + dump_environment(env_path) self.install(self.spec, self.prefix) # Ensure that something was actually installed. @@ -896,7 +901,9 @@ def real_work(): # Move build log into install directory on success if not fake: log_install_path = spack.install_layout.build_log_path(self.spec) + env_install_path = spack.install_layout.build_env_path(self.spec) install(log_path, log_install_path) + install(env_path, env_install_path) # On successful install, remove the stage. if not keep_stage: diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index cd413dcfbc..ae8e5708be 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -63,3 +63,10 @@ def pop_keys(dictionary, *keys): for key in keys: if key in dictionary: dictionary.pop(key) + + +def dump_environment(path): + """Dump the current environment out to a file.""" + with open(path, 'w') as env_file: + for key,val in sorted(os.environ.items()): + env_file.write("%s=%s\n" % (key, val)) From 52081c46d641fcf679e70a04e9937db67e8787e4 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 2 Mar 2016 00:09:24 -0800 Subject: [PATCH 080/189] Fixes #469: Store package.py files in the .spack directory. - Adds packages in spack repos inside the .spack directory, so that packages can be rebuilt automatically later. --- lib/spack/spack/package.py | 51 +++++++++++++++++++++++++++++++++++ lib/spack/spack/repository.py | 39 +++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 5b1927fe8f..9f1825ca21 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -58,6 +58,7 @@ import spack.mirror import spack.hooks import spack.directives +import spack.repository import spack.build_environment import spack.url import spack.util.web @@ -502,6 +503,7 @@ def fetcher(self): self._fetcher = self._make_fetcher() return self._fetcher + @fetcher.setter def fetcher(self, f): self._fetcher = f @@ -905,6 +907,9 @@ def real_work(): install(log_path, log_install_path) install(env_path, env_install_path) + packages_dir = spack.install_layout.build_packages_path(self.spec) + dump_packages(self.spec, packages_dir) + # On successful install, remove the stage. if not keep_stage: self.stage.destroy() @@ -1219,6 +1224,52 @@ def validate_package_url(url_string): tty.die("Invalid file type in URL: '%s'" % url_string) +def dump_packages(spec, path): + """Dump all package information for a spec and its dependencies. + + This creates a package repository within path for every + namespace in the spec DAG, and fills the repos wtih package + files and patch files for every node in the DAG. + """ + mkdirp(path) + + # Copy in package.py files from any dependencies. + # Note that we copy them in as they are in the *install* directory + # NOT as they are in the repository, because we want a snapshot of + # how *this* particular build was done. + for node in spec.traverse(): + if node is not spec: + # Locate the dependency package in the install tree and find + # its provenance information. + source = spack.install_layout.build_packages_path(node) + source_repo_root = join_path(source, node.namespace) + + # There's no provenance installed for the source package. Skip it. + # User can always get something current from the builtin repo. + if not os.path.isdir(source_repo_root): + continue + + # Create a source repo and get the pkg directory out of it. + try: + source_repo = spack.repository.Repo(source_repo_root) + source_pkg_dir = source_repo.dirname_for_package_name(node.name) + except RepoError as e: + tty.warn("Warning: Couldn't copy in provenance for %s" % node.name) + + # Create a destination repository + dest_repo_root = join_path(path, node.namespace) + if not os.path.exists(dest_repo_root): + spack.repository.create_repo(dest_repo_root) + repo = spack.repository.Repo(dest_repo_root) + + # Get the location of the package in the dest repo. + dest_pkg_dir = repo.dirname_for_package_name(node.name) + if node is not spec: + install_tree(source_pkg_dir, dest_pkg_dir) + else: + spack.repo.dump_provenance(node, dest_pkg_dir) + + def print_pkg(message): """Outputs a message with a package icon.""" from llnl.util.tty.color import cwrite diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 6aa75cb43e..8d06fefe7f 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -316,6 +316,16 @@ def get(self, spec, new=False): return self.repo_for_pkg(spec).get(spec) + @_autospec + def dump_provenance(self, spec, path): + """Dump provenance information for a spec to a particular path. + + This dumps the package file and any associated patch files. + Raises UnknownPackageError if not found. + """ + return self.repo_for_pkg(spec).dump_provenance(spec, path) + + def dirname_for_package_name(self, pkg_name): return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name) @@ -552,6 +562,35 @@ def get(self, spec, new=False): return self._instances[key] + @_autospec + def dump_provenance(self, spec, path): + """Dump provenance information for a spec to a particular path. + + This dumps the package file and any associated patch files. + Raises UnknownPackageError if not found. + """ + # Some preliminary checks. + if spec.virtual: + raise UnknownPackageError(spec.name) + + if spec.namespace and spec.namespace != self.namespace: + raise UnknownPackageError("Repository %s does not contain package %s." + % (self.namespace, spec.fullname)) + + # Install any patch files needed by packages. + mkdirp(path) + for spec, patches in spec.package.patches.items(): + for patch in patches: + if patch.path: + if os.path.exists(patch.path): + install(patch.path, path) + else: + tty.warn("Patch file did not exist: %s" % patch.path) + + # Install the package.py file itself. + install(self.filename_for_package_name(spec), path) + + def purge(self): """Clear entire package instance cache.""" self._instances.clear() From a0c6519de9d03744e33b5187d42ec0d966dbd3e0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 2 Mar 2016 01:41:43 -0800 Subject: [PATCH 081/189] Fixes #476: create was broken for FTP URLs. --- lib/spack/spack/util/web.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index e26daef296..73f4858b02 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -86,12 +86,12 @@ def _spider(args): if not "Content-type" in resp.headers: tty.debug("ignoring page " + url) - return pages + return pages, links if not resp.headers["Content-type"].startswith('text/html'): tty.debug("ignoring page " + url + " with content type " + resp.headers["Content-type"]) - return pages + return pages, links # Do the real GET request when we know it's just HTML. req.get_method = lambda: "GET" @@ -173,7 +173,7 @@ def spider(root_url, **kwargs): performance over a sequential fetch. """ max_depth = kwargs.setdefault('depth', 1) - pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False)) + pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False)) return pages, links From 726b350689bf6da7b82eec170001adc8100beb71 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 2 Mar 2016 12:52:38 +0100 Subject: [PATCH 082/189] test : fixed failing unit tests --- lib/spack/spack/mirror.py | 49 +++++++++---------- lib/spack/spack/stage.py | 7 ++- lib/spack/spack/test/configure_guess.py | 12 ++--- lib/spack/spack/test/git_fetch.py | 31 +++++++------ lib/spack/spack/test/hg_fetch.py | 31 +++++++------ lib/spack/spack/test/link_tree.py | 6 +-- lib/spack/spack/test/mirror.py | 62 ++++++++++++------------- lib/spack/spack/test/svn_fetch.py | 31 +++++++------ 8 files changed, 116 insertions(+), 113 deletions(-) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index fa29e20803..bc8870926f 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -168,32 +168,33 @@ def create(path, specs, **kwargs): pkg = spec.package tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) try: - for ii, stage in enumerate(pkg.stage): - fetcher = stage.fetcher - if ii == 0: - # create a subdirectory for the current package@version - archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher))) - name = spec.format("$_$@") - else: - resource = stage.resource - archive_path = join_path(subdir, suggest_archive_basename(resource)) - name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@")) - subdir = os.path.dirname(archive_path) - mkdirp(subdir) + with pkg.stage: + for ii, stage in enumerate(pkg.stage): + fetcher = stage.fetcher + if ii == 0: + # create a subdirectory for the current package@version + archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher))) + name = spec.format("$_$@") + else: + resource = stage.resource + archive_path = join_path(subdir, suggest_archive_basename(resource)) + name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@")) + subdir = os.path.dirname(archive_path) + mkdirp(subdir) - if os.path.exists(archive_path): - tty.msg("{name} : already added".format(name=name)) - else: - everything_already_exists = False - fetcher.fetch() - if not kwargs.get('no_checksum', False): - fetcher.check() - tty.msg("{name} : checksum passed".format(name=name)) + if os.path.exists(archive_path): + tty.msg("{name} : already added".format(name=name)) + else: + everything_already_exists = False + fetcher.fetch() + if not kwargs.get('no_checksum', False): + fetcher.check() + tty.msg("{name} : checksum passed".format(name=name)) - # Fetchers have to know how to archive their files. Use - # that to move/copy/create an archive in the mirror. - fetcher.archive(archive_path) - tty.msg("{name} : added".format(name=name)) + # Fetchers have to know how to archive their files. Use + # that to move/copy/create an archive in the mirror. + fetcher.archive(archive_path) + tty.msg("{name} : added".format(name=name)) if everything_already_exists: present.append(spec) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 96b1eaf3f2..956d1c8706 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -404,10 +404,13 @@ def path(self): return self[0].path def __enter__(self): - return self[0].__enter__() + for item in self: + item.__enter__() + return self def __exit__(self, exc_type, exc_val, exc_tb): - return self[0].__exit__(exc_type, exc_val, exc_tb) + for item in reversed(self): + item.__exit__(exc_type, exc_val, exc_tb) def chdir_to_source(self): return self[0].chdir_to_source() diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/configure_guess.py index a4e8565b62..bc2332acc2 100644 --- a/lib/spack/spack/test/configure_guess.py +++ b/lib/spack/spack/test/configure_guess.py @@ -52,8 +52,6 @@ def setUp(self): def tearDown(self): shutil.rmtree(self.tmpdir, ignore_errors=True) - if self.stage: - self.stage.destroy() os.chdir(self.orig_dir) @@ -64,12 +62,12 @@ def check_archive(self, filename, system): url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz') print url - self.stage = Stage(url) - self.stage.fetch() + with Stage(url) as stage: + stage.fetch() - guesser = ConfigureGuesser() - guesser(self.stage) - self.assertEqual(system, guesser.build_system) + guesser = ConfigureGuesser() + guesser(stage) + self.assertEqual(system, guesser.build_system) def test_python(self): diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index d84433176a..caa076823e 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -76,26 +76,27 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('git')] = args - self.pkg.do_stage() - self.assert_rev(rev) + with self.pkg.stage: + self.pkg.do_stage() + self.assert_rev(rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked_file = 'foobarbaz' - touch(untracked_file) - self.assertTrue(os.path.isfile(untracked_file)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked_file)) + untracked_file = 'foobarbaz' + touch(untracked_file) + self.assertTrue(os.path.isfile(untracked_file)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked_file)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assert_rev(rev) + self.assert_rev(rev) def test_fetch_master(self): diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index bbcb64e4c1..75aa7ab17e 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -68,26 +68,27 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('hg')] = args - self.pkg.do_stage() - self.assertEqual(self.repo.get_rev(), rev) + with self.pkg.stage: + self.pkg.do_stage() + self.assertEqual(self.repo.get_rev(), rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked = 'foobarbaz' - touch(untracked) - self.assertTrue(os.path.isfile(untracked)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked)) + untracked = 'foobarbaz' + touch(untracked) + self.assertTrue(os.path.isfile(untracked)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assertEqual(self.repo.get_rev(), rev) + self.assertEqual(self.repo.get_rev(), rev) def test_fetch_default(self): diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py index 886b7ef4c5..7b67e873dd 100644 --- a/lib/spack/spack/test/link_tree.py +++ b/lib/spack/spack/test/link_tree.py @@ -38,6 +38,8 @@ class LinkTreeTest(unittest.TestCase): def setUp(self): self.stage = Stage('link-tree-test') + # FIXME : possibly this test needs to be refactored to avoid the explicit call to __enter__ and __exit__ + self.stage.__enter__() with working_dir(self.stage.path): touchp('source/1') @@ -51,10 +53,8 @@ def setUp(self): source_path = os.path.join(self.stage.path, 'source') self.link_tree = LinkTree(source_path) - def tearDown(self): - if self.stage: - self.stage.destroy() + self.stage.__exit__(None, None, None) def check_file_link(self, filename): diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index f83cc8090c..9e2c631978 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -74,14 +74,14 @@ def set_up_package(self, name, MockRepoClass, url_attr): def check_mirror(self): - stage = Stage('spack-mirror-test') - mirror_root = join_path(stage.path, 'test-mirror') + with Stage('spack-mirror-test') as stage: + mirror_root = join_path(stage.path, 'test-mirror') + + # register mirror with spack config + mirrors = { 'spack-mirror-test' : 'file://' + mirror_root } + spack.config.update_config('mirrors', mirrors) - # register mirror with spack config - mirrors = { 'spack-mirror-test' : 'file://' + mirror_root } - spack.config.update_config('mirrors', mirrors) - try: os.chdir(stage.path) spack.mirror.create( mirror_root, self.repos, no_checksum=True) @@ -97,38 +97,36 @@ def check_mirror(self): files = os.listdir(subdir) self.assertEqual(len(files), 1) - # Now try to fetch each package. - for name, mock_repo in self.repos.items(): - spec = Spec(name).concretized() - pkg = spec.package + # Now try to fetch each package. + for name, mock_repo in self.repos.items(): + spec = Spec(name).concretized() + pkg = spec.package - pkg._stage = None - saved_checksum_setting = spack.do_checksum - try: - # Stage the archive from the mirror and cd to it. - spack.do_checksum = False - pkg.do_stage(mirror_only=True) + saved_checksum_setting = spack.do_checksum + with pkg.stage: + try: + # Stage the archive from the mirror and cd to it. + spack.do_checksum = False + pkg.do_stage(mirror_only=True) - # Compare the original repo with the expanded archive - original_path = mock_repo.path - if 'svn' in name: - # have to check out the svn repo to compare. - original_path = join_path(mock_repo.path, 'checked_out') - svn('checkout', mock_repo.url, original_path) + # Compare the original repo with the expanded archive + original_path = mock_repo.path + if 'svn' in name: + # have to check out the svn repo to compare. + original_path = join_path(mock_repo.path, 'checked_out') + svn('checkout', mock_repo.url, original_path) - dcmp = dircmp(original_path, pkg.stage.source_path) + dcmp = dircmp(original_path, pkg.stage.source_path) - # make sure there are no new files in the expanded tarball - self.assertFalse(dcmp.right_only) + # make sure there are no new files in the expanded tarball + self.assertFalse(dcmp.right_only) - # and that all original files are present. - self.assertTrue(all(l in exclude for l in dcmp.left_only)) + # and that all original files are present. + self.assertTrue(all(l in exclude for l in dcmp.left_only)) - finally: - spack.do_checksum = saved_checksum_setting - pkg.do_clean() - finally: - stage.destroy() + finally: + spack.do_checksum = saved_checksum_setting + pkg.do_clean() def test_git_mirror(self): diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 454a7f1d1f..6ac9e2f343 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -82,26 +82,27 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('svn')] = args - self.pkg.do_stage() - self.assert_rev(rev) + with self.pkg.stage: + self.pkg.do_stage() + self.assert_rev(rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked = 'foobarbaz' - touch(untracked) - self.assertTrue(os.path.isfile(untracked)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked)) + untracked = 'foobarbaz' + touch(untracked) + self.assertTrue(os.path.isfile(untracked)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assert_rev(rev) + self.assert_rev(rev) def test_fetch_default(self): From 65b2a24f7c12380d0815e5705d1c1e66e5fd22a9 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 2 Mar 2016 13:03:40 +0100 Subject: [PATCH 083/189] stage : removed _setup method --- lib/spack/spack/stage.py | 57 ++++++++++++++++++---------------------- 1 file changed, 25 insertions(+), 32 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 956d1c8706..48770fb407 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -111,9 +111,32 @@ def __init__(self, url_or_fetch_strategy, **kwargs): def __enter__(self): """ Entering a stage context will create the stage directory + + If self.tmp_root evaluates to False, the stage directory is created directly under spack.stage_path, otherwise + this will attempt to create a stage in a temporary directory and link it into spack.stage_path. + + Spack will use the first writable location in spack.tmp_dirs to create a stage. If there is no valid location + in tmp_dirs, fall back to making the stage inside spack.stage_path. """ - # FIXME : if _setup is used only here, then it makes no sense to retain the function - self._setup() + # Create the top-level stage directory + mkdirp(spack.stage_path) + self._cleanup_dead_links() + + # If this is a temporary stage, them make the temp directory + if self.tmp_root: + if self._need_to_create_path(): + tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) + os.symlink(tmp_dir, self.path) + + # if we're not using a tmp dir, create the stage directly in the + # stage dir, rather than linking to it. + else: + if self._need_to_create_path(): + mkdirp(self.path) + + # Make sure we can actually do something with the stage we made. + ensure_access(self.path) + return self def __exit__(self, exc_type, exc_val, exc_tb): @@ -180,36 +203,6 @@ def _need_to_create_path(self): return False - def _setup(self): - """Creates the stage directory. - If spack.use_tmp_stage is False, the stage directory is created - directly under spack.stage_path. - - If spack.use_tmp_stage is True, this will attempt to create a - stage in a temporary directory and link it into spack.stage_path. - Spack will use the first writable location in spack.tmp_dirs to - create a stage. If there is no valid location in tmp_dirs, fall - back to making the stage inside spack.stage_path. - """ - # Create the top-level stage directory - mkdirp(spack.stage_path) - self._cleanup_dead_links() - - # If this is a temporary stage, them make the temp directory - if self.tmp_root: - if self._need_to_create_path(): - tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) - os.symlink(tmp_dir, self.path) - - # if we're not using a tmp dir, create the stage directly in the - # stage dir, rather than linking to it. - else: - if self._need_to_create_path(): - mkdirp(self.path) - - # Make sure we can actually do something with the stage we made. - ensure_access(self.path) - @property def archive_file(self): """Path to the source archive within this stage directory.""" From d649b715edae0871254695e2deacf078554a1475 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 2 Mar 2016 13:16:04 +0100 Subject: [PATCH 084/189] stage : updated functions doc _cleanup_dead_links : fixed minor bug --- lib/spack/spack/stage.py | 41 +++++++++++++++++----------------------- 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 48770fb407..900acd664d 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -42,33 +42,26 @@ class Stage(object): - """A Stage object manages a directory where some source code is - downloaded and built before being installed. It handles - fetching the source code, either as an archive to be expanded - or by checking it out of a repository. A stage's lifecycle - looks like this: + """ + A Stage object is a context manager that handles a directory where some source code is downloaded and built + before being installed. It handles fetching the source code, either as an archive to be expanded or by checking + it out of a repository. A stage's lifecycle looks like this: - Stage() - Constructor creates the stage directory. - fetch() - Fetch a source archive into the stage. - expand_archive() - Expand the source archive. - - Build and install the archive. This is handled by the Package class. - destroy() - Remove the stage once the package has been installed. + ``` + with Stage() as stage: # Context manager creates and destroys the stage directory + fetch() # Fetch a source archive into the stage. + expand_archive() # Expand the source archive. + # Build and install the archive. This is handled by the Package class. + ``` - If spack.use_tmp_stage is True, spack will attempt to create stages - in a tmp directory. Otherwise, stages are created directly in - spack.stage_path. + If spack.use_tmp_stage is True, spack will attempt to create stages in a tmp directory. + Otherwise, stages are created directly in spack.stage_path. - There are two kinds of stages: named and unnamed. Named stages can - persist between runs of spack, e.g. if you fetched a tarball but - didn't finish building it, you won't have to fetch it again. + There are two kinds of stages: named and unnamed. Named stages can persist between runs of spack, e.g. if you + fetched a tarball but didn't finish building it, you won't have to fetch it again. - Unnamed stages are created using standard mkdtemp mechanisms or - similar, and are intended to persist for only one run of spack. + Unnamed stages are created using standard mkdtemp mechanisms or similar, and are intended to persist for + only one run of spack. """ def __init__(self, url_or_fetch_strategy, **kwargs): @@ -164,7 +157,7 @@ def _cleanup_dead_links(self): path = join_path(spack.stage_path, file) if os.path.islink(path): real_path = os.path.realpath(path) - if not os.path.exists(path): + if not os.path.exists(real_path): os.unlink(path) def _need_to_create_path(self): From 9001b9ed3c01f8ccaceaca60d6a34c3551f77240 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 2 Mar 2016 15:56:09 +0100 Subject: [PATCH 085/189] package : minor syntax fixes mirror : extracted add_single_spec from create --- lib/spack/spack/mirror.py | 98 ++++++++++++++++++++------------------ lib/spack/spack/package.py | 4 +- 2 files changed, 54 insertions(+), 48 deletions(-) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index bc8870926f..5ed7aff176 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -110,7 +110,6 @@ def suggest_archive_basename(resource): return basename - def create(path, specs, **kwargs): """Create a directory to be used as a spack mirror, and fill it with package archives. @@ -158,58 +157,65 @@ def create(path, specs, **kwargs): "Cannot create directory '%s':" % mirror_root, str(e)) # Things to keep track of while parsing specs. - present = [] - mirrored = [] - error = [] + categories = { + 'present': [], + 'mirrored': [], + 'error': [] + } # Iterate through packages and download all the safe tarballs for each of them - everything_already_exists = True for spec in version_specs: - pkg = spec.package - tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) - try: - with pkg.stage: - for ii, stage in enumerate(pkg.stage): - fetcher = stage.fetcher - if ii == 0: - # create a subdirectory for the current package@version - archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher))) - name = spec.format("$_$@") - else: - resource = stage.resource - archive_path = join_path(subdir, suggest_archive_basename(resource)) - name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@")) - subdir = os.path.dirname(archive_path) - mkdirp(subdir) + add_single_spec(spec, mirror_root, categories, **kwargs) - if os.path.exists(archive_path): - tty.msg("{name} : already added".format(name=name)) - else: - everything_already_exists = False - fetcher.fetch() - if not kwargs.get('no_checksum', False): - fetcher.check() - tty.msg("{name} : checksum passed".format(name=name)) + return categories['present'], categories['mirrored'], categories['error'] - # Fetchers have to know how to archive their files. Use - # that to move/copy/create an archive in the mirror. - fetcher.archive(archive_path) - tty.msg("{name} : added".format(name=name)) - if everything_already_exists: - present.append(spec) - else: - mirrored.append(spec) - except Exception, e: - if spack.debug: - sys.excepthook(*sys.exc_info()) - else: - tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message) - error.append(spec) - finally: - pkg.stage.destroy() +def add_single_spec(spec, mirror_root, categories, **kwargs): + tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) + spec_exists_in_mirror = True + try: + with spec.package.stage: + # fetcher = stage.fetcher + # fetcher.fetch() + # ... + # fetcher.archive(archive_path) + for ii, stage in enumerate(spec.package.stage): + fetcher = stage.fetcher + if ii == 0: + # create a subdirectory for the current package@version + archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher))) + name = spec.format("$_$@") + else: + resource = stage.resource + archive_path = join_path(subdir, suggest_archive_basename(resource)) + name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@")) + subdir = os.path.dirname(archive_path) + mkdirp(subdir) - return (present, mirrored, error) + if os.path.exists(archive_path): + tty.msg("{name} : already added".format(name=name)) + else: + spec_exists_in_mirror = False + fetcher.fetch() + if not kwargs.get('no_checksum', False): + fetcher.check() + tty.msg("{name} : checksum passed".format(name=name)) + + # Fetchers have to know how to archive their files. Use + # that to move/copy/create an archive in the mirror. + fetcher.archive(archive_path) + tty.msg("{name} : added".format(name=name)) + + if spec_exists_in_mirror: + categories['present'].append(spec) + else: + categories['mirrored'].append(spec) + except Exception as e: + if spack.debug: + sys.excepthook(*sys.exc_info()) + else: + tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message) + categories['error'].append(spec) class MirrorError(spack.error.SpackError): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 4fdc582479..29b781a749 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -732,7 +732,7 @@ def do_patch(self): # If we encounter an archive that failed to patch, restage it # so that we can apply all the patches again. if os.path.isfile(bad_file): - tty.msg("Patching failed last time. Restaging.") + tty.msg("Patching failed last time. Restaging.") self.stage.restage() self.stage.chdir_to_source() @@ -912,7 +912,7 @@ def real_work(): % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) print_pkg(self.prefix) - except ProcessError, e: + except ProcessError as e: # Annotate with location of build log. e.build_log = log_path cleanup() From 901e4851b993f73c4bdd92b85b2469d6673b7617 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 2 Mar 2016 16:19:32 +0100 Subject: [PATCH 086/189] _cleanup_dead_links : factored method into a function and put it in llnl.filesystem --- lib/spack/llnl/util/filesystem.py | 19 +++++++++++++++++-- lib/spack/spack/stage.py | 20 ++++---------------- 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 24cfbfde71..4a708b639a 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -25,7 +25,7 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink'] + 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'remove_dead_links'] import os import sys @@ -235,7 +235,7 @@ def touchp(path): def force_symlink(src, dest): try: os.symlink(src, dest) - except OSError, e: + except OSError as e: os.remove(dest) os.symlink(src, dest) @@ -339,3 +339,18 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): if order == 'post': yield (source_path, dest_path) + +def remove_dead_links(root): + """ + Removes any dead link that is present in root + + Args: + root: path where to search for dead links + + """ + for file in os.listdir(root): + path = join_path(root, file) + if os.path.islink(path): + real_path = os.path.realpath(path) + if not os.path.exists(real_path): + os.unlink(path) \ No newline at end of file diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 900acd664d..e910643192 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -98,7 +98,7 @@ def __init__(self, url_or_fetch_strategy, **kwargs): # Try to construct here a temporary name for the stage directory # If this is a named stage, then construct a named path. self.path = join_path(spack.stage_path, self.name) - + # Flag to decide whether to delete the stage folder on exit or not self.delete_on_exit = True def __enter__(self): @@ -113,20 +113,17 @@ def __enter__(self): """ # Create the top-level stage directory mkdirp(spack.stage_path) - self._cleanup_dead_links() + remove_dead_links(spack.stage_path) - # If this is a temporary stage, them make the temp directory + # If a tmp_root exists then create a directory there and then link it in the stage area, + # otherwise create the stage directory in self.path if self.tmp_root: if self._need_to_create_path(): tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) os.symlink(tmp_dir, self.path) - - # if we're not using a tmp dir, create the stage directly in the - # stage dir, rather than linking to it. else: if self._need_to_create_path(): mkdirp(self.path) - # Make sure we can actually do something with the stage we made. ensure_access(self.path) @@ -151,15 +148,6 @@ def __exit__(self, exc_type, exc_val, exc_tb): if self.delete_on_exit: self.destroy() - def _cleanup_dead_links(self): - """Remove any dead links in the stage directory.""" - for file in os.listdir(spack.stage_path): - path = join_path(spack.stage_path, file) - if os.path.islink(path): - real_path = os.path.realpath(path) - if not os.path.exists(real_path): - os.unlink(path) - def _need_to_create_path(self): """Makes sure nothing weird has happened since the last time we looked at path. Returns True if path already exists and is ok. From 4d63544fe918393de9265f8879bb0199a542cc3b Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 2 Mar 2016 16:55:57 +0100 Subject: [PATCH 087/189] remove_link_tree : moved to llnl.util.filesystem --- lib/spack/llnl/util/filesystem.py | 20 ++++++++++++++++++-- lib/spack/spack/stage.py | 21 +++------------------ 2 files changed, 21 insertions(+), 20 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 4a708b639a..015eeb9aa1 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -25,7 +25,7 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'remove_dead_links'] + 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'remove_dead_links', 'remove_linked_tree'] import os import sys @@ -353,4 +353,20 @@ def remove_dead_links(root): if os.path.islink(path): real_path = os.path.realpath(path) if not os.path.exists(real_path): - os.unlink(path) \ No newline at end of file + os.unlink(path) + +def remove_linked_tree(path): + """ + Removes a directory and its contents. If the directory is a symlink, follows the link and removes the real + directory before removing the link. + + Args: + path: directory to be removed + + """ + if os.path.exists(path): + if os.path.islink(path): + shutil.rmtree(os.path.realpath(path), True) + os.unlink(path) + else: + shutil.rmtree(path, True) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index e910643192..e87b822a8f 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -114,15 +114,13 @@ def __enter__(self): # Create the top-level stage directory mkdirp(spack.stage_path) remove_dead_links(spack.stage_path) - # If a tmp_root exists then create a directory there and then link it in the stage area, # otherwise create the stage directory in self.path - if self.tmp_root: - if self._need_to_create_path(): + if self._need_to_create_path(): + if self.tmp_root: tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) os.symlink(tmp_dir, self.path) - else: - if self._need_to_create_path(): + else: mkdirp(self.path) # Make sure we can actually do something with the stage we made. ensure_access(self.path) @@ -436,19 +434,6 @@ def ensure_access(file=spack.stage_path): tty.die("Insufficient permissions for %s" % file) -def remove_linked_tree(path): - """Removes a directory and its contents. If the directory is a symlink, - follows the link and reamoves the real directory before removing the - link. - """ - if os.path.exists(path): - if os.path.islink(path): - shutil.rmtree(os.path.realpath(path), True) - os.unlink(path) - else: - shutil.rmtree(path, True) - - def purge(): """Remove all build directories in the top-level stage path.""" if os.path.isdir(spack.stage_path): From ca41909ec52ab2b8f82ca91421d3882300531ac2 Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 2 Mar 2016 17:19:27 +0100 Subject: [PATCH 088/189] package : removed do_clean() --- lib/spack/spack/cmd/clean.py | 2 +- lib/spack/spack/package.py | 9 +------- lib/spack/spack/test/git_fetch.py | 4 ---- lib/spack/spack/test/hg_fetch.py | 3 --- lib/spack/spack/test/mirror.py | 38 ++++++++++++------------------- lib/spack/spack/test/svn_fetch.py | 4 ---- 6 files changed, 17 insertions(+), 43 deletions(-) diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 6e7179122c..0c8bd1d528 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -43,4 +43,4 @@ def clean(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = spack.repo.get(spec) - package.do_clean() + package.stage.destroy() diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 29b781a749..fa64b2c047 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1138,13 +1138,6 @@ def do_restage(self): """Reverts expanded/checked out source to a pristine state.""" self.stage.restage() - - def do_clean(self): - """Removes the package's build stage and source tarball.""" - if os.path.exists(self.stage.path): - self.stage.destroy() - - def format_doc(self, **kwargs): """Wrap doc string at 72 characters and format nicely""" indent = kwargs.get('indent', 0) @@ -1181,7 +1174,7 @@ def fetch_remote_versions(self): try: return spack.util.web.find_versions_of_archive( *self.all_urls, list_url=self.list_url, list_depth=self.list_depth) - except spack.error.NoNetworkConnectionError, e: + except spack.error.NoNetworkConnectionError as e: tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message) diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index caa076823e..6d6a67a1d3 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -52,19 +52,15 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) - def tearDown(self): """Destroy the stage space used by this test.""" super(GitFetchTest, self).tearDown() self.repo.destroy() - self.pkg.do_clean() - def assert_rev(self, rev): """Check that the current git revision is equal to the supplied rev.""" self.assertEqual(self.repo.rev_hash('HEAD'), self.repo.rev_hash(rev)) - def try_fetch(self, rev, test_file, args): """Tries to: 1. Fetch the repo using a fetch strategy constructed with diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index 75aa7ab17e..d884ed78a0 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -49,13 +49,10 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) - def tearDown(self): """Destroy the stage space used by this test.""" super(HgFetchTest, self).tearDown() self.repo.destroy() - self.pkg.do_clean() - def try_fetch(self, rev, test_file, args): """Tries to: diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index 9e2c631978..f117e04242 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -104,29 +104,21 @@ def check_mirror(self): saved_checksum_setting = spack.do_checksum with pkg.stage: - try: - # Stage the archive from the mirror and cd to it. - spack.do_checksum = False - pkg.do_stage(mirror_only=True) - - # Compare the original repo with the expanded archive - original_path = mock_repo.path - if 'svn' in name: - # have to check out the svn repo to compare. - original_path = join_path(mock_repo.path, 'checked_out') - svn('checkout', mock_repo.url, original_path) - - dcmp = dircmp(original_path, pkg.stage.source_path) - - # make sure there are no new files in the expanded tarball - self.assertFalse(dcmp.right_only) - - # and that all original files are present. - self.assertTrue(all(l in exclude for l in dcmp.left_only)) - - finally: - spack.do_checksum = saved_checksum_setting - pkg.do_clean() + # Stage the archive from the mirror and cd to it. + spack.do_checksum = False + pkg.do_stage(mirror_only=True) + # Compare the original repo with the expanded archive + original_path = mock_repo.path + if 'svn' in name: + # have to check out the svn repo to compare. + original_path = join_path(mock_repo.path, 'checked_out') + svn('checkout', mock_repo.url, original_path) + dcmp = dircmp(original_path, pkg.stage.source_path) + # make sure there are no new files in the expanded tarball + self.assertFalse(dcmp.right_only) + # and that all original files are present. + self.assertTrue(all(l in exclude for l in dcmp.left_only)) + spack.do_checksum = saved_checksum_setting def test_git_mirror(self): diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 6ac9e2f343..13a00f5df7 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -51,13 +51,10 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) - def tearDown(self): """Destroy the stage space used by this test.""" super(SvnFetchTest, self).tearDown() self.repo.destroy() - self.pkg.do_clean() - def assert_rev(self, rev): """Check that the current revision is equal to the supplied rev.""" @@ -70,7 +67,6 @@ def get_rev(): return match.group(1) self.assertEqual(get_rev(), rev) - def try_fetch(self, rev, test_file, args): """Tries to: 1. Fetch the repo using a fetch strategy constructed with From 21cd05aad5e8f45a7ebac7b213855a16685d229f Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 2 Mar 2016 17:28:21 +0100 Subject: [PATCH 089/189] package : updated doc --- lib/spack/spack/package.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index fa64b2c047..0214dcd771 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -291,7 +291,6 @@ class SomePackage(Package): .. code-block:: python - p.do_clean() # removes the stage directory entirely p.do_restage() # removes the build directory and # re-expands the archive. From 6bbadbfaac406f9abe81e64f174d8a2b4c9c255e Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Wed, 2 Mar 2016 13:18:43 -0600 Subject: [PATCH 090/189] Add CPATH pointing to include directories. --- lib/spack/docs/basic_usage.rst | 2 +- lib/spack/spack/modules.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 0578f0c8db..f94ac3d2ba 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -896,7 +896,7 @@ Or, similarly with modules, you could type: $ spack load mpich %gcc@4.4.7 These commands will add appropriate directories to your ``PATH``, -``MANPATH``, and ``LD_LIBRARY_PATH``. When you no longer want to use +``MANPATH``, ``CPATH``, and ``LD_LIBRARY_PATH``. When you no longer want to use a package, you can type unload or unuse similarly: .. code-block:: sh diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index c834763564..c27043db8c 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -33,6 +33,7 @@ * /bin directories to be appended to PATH * /lib* directories for LD_LIBRARY_PATH + * /include directories for CPATH * /man* and /share/man* directories for MANPATH * the package prefix for CMAKE_PREFIX_PATH @@ -121,6 +122,7 @@ def add_path(path_name, directory): ('LIBRARY_PATH', self.spec.prefix.lib64), ('LD_LIBRARY_PATH', self.spec.prefix.lib), ('LD_LIBRARY_PATH', self.spec.prefix.lib64), + ('CPATH', self.spec.prefix.include), ('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib, 'pkgconfig')), ('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib64, 'pkgconfig'))]: From 976d0240c467b9ff74333fafcd2ac1e896819a80 Mon Sep 17 00:00:00 2001 From: citibeth Date: Wed, 2 Mar 2016 18:58:47 -0500 Subject: [PATCH 091/189] Removed FIXMEs. --- .../repos/builtin/packages/blitz/package.py | 27 +------------------ .../builtin/packages/netcdf-cxx4/package.py | 21 +-------------- .../packages/netcdf-fortran/package.py | 2 +- .../repos/builtin/packages/proj/package.py | 19 +------------ 4 files changed, 4 insertions(+), 65 deletions(-) diff --git a/var/spack/repos/builtin/packages/blitz/package.py b/var/spack/repos/builtin/packages/blitz/package.py index 82ff634925..9413b276fe 100644 --- a/var/spack/repos/builtin/packages/blitz/package.py +++ b/var/spack/repos/builtin/packages/blitz/package.py @@ -1,40 +1,15 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install blitz -# -# You can always get back here to change things with: -# -# spack edit blitz -# -# See the spack documentation for more information on building -# packages. -# from spack import * class Blitz(Package): """N-dimensional arrays for C++""" homepage = "http://github.com/blitzpp/blitz" - -# This version doesn't have the configure script generated yet. url = "https://github.com/blitzpp/blitz/tarball/1.0.0" -#http://prdownloads.sourceforge.net/%(namelower)s version('1.0.0', '9f040b9827fe22228a892603671a77af') - # FIXME: Add dependencies if this package requires them. - # depends_on("foo") + # No dependencies def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. - # FIXME: Spack couldn't guess one, so here are some options: configure('--prefix=%s' % prefix) - # cmake('.', *std_cmake_args) - - # FIXME: Add logic to build and install here make() make("install") diff --git a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py index 9d70eab05f..8d51a10679 100644 --- a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py +++ b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py @@ -1,19 +1,3 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install netcdf-cxx4 -# -# You can always get back here to change things with: -# -# spack edit netcdf-cxx4 -# -# See the spack documentation for more information on building -# packages. -# from spack import * class NetcdfCxx4(Package): @@ -25,17 +9,14 @@ class NetcdfCxx4(Package): variant('mpi', default=True, description='Enables MPI parallelism') -# variant('hdf4', default=False, description="Enable HDF4 support") - # NetCDF-CXX4 doesn't really depend (directly) on MPI. However... this + # netcdf-cxx4 doesn't really depend (directly) on MPI. However... this # depndency ensures taht the right version of MPI is selected (eg: ^openmpi) depends_on('mpi', when='+mpi') depends_on('netcdf') def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. configure('--prefix=%s' % prefix) - # FIXME: Add logic to build and install here make() make("install") diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py index 8e5c8ecc3a..9e4aee95fb 100644 --- a/var/spack/repos/builtin/packages/netcdf-fortran/package.py +++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py @@ -10,7 +10,7 @@ class NetcdfFortran(Package): variant('mpi', default=True, description='Enables MPI parallelism') - # NetCDF-CXX4 doesn't really depend (directly) on MPI. However... this + # netcdf-fortran doesn't really depend (directly) on MPI. However... this # depndency ensures taht the right version of MPI is selected (eg: ^openmpi) depends_on('mpi', when='+mpi') depends_on('netcdf') diff --git a/var/spack/repos/builtin/packages/proj/package.py b/var/spack/repos/builtin/packages/proj/package.py index 4a0d3feac7..797772f4f6 100644 --- a/var/spack/repos/builtin/packages/proj/package.py +++ b/var/spack/repos/builtin/packages/proj/package.py @@ -1,19 +1,3 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install proj -# -# You can always get back here to change things with: -# -# spack edit proj -# -# See the spack documentation for more information on building -# packages. -# from spack import * class Proj(Package): @@ -27,8 +11,7 @@ class Proj(Package): version('4.7.0', '927d34623b52e0209ba2bfcca18fe8cd') version('4.6.1', '7dbaab8431ad50c25669fd3fb28dc493') - # FIXME: Add dependencies if this package requires them. - # depends_on("foo") + # No dependencies def install(self, spec, prefix): configure('--prefix=%s' % prefix) From 717bcb862428a1b1a3b9c70f925c3fe6c325f622 Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 3 Mar 2016 09:17:49 +0100 Subject: [PATCH 092/189] test : optimized import statements --- lib/spack/spack/test/concretize.py | 2 -- lib/spack/spack/test/config.py | 6 ++--- lib/spack/spack/test/configure_guess.py | 9 ++------ lib/spack/spack/test/database.py | 11 +++++---- lib/spack/spack/test/directory_layout.py | 13 +++++------ lib/spack/spack/test/git_fetch.py | 11 ++------- lib/spack/spack/test/hg_fetch.py | 10 +++------ lib/spack/spack/test/install.py | 9 ++------ lib/spack/spack/test/link_tree.py | 7 ++---- lib/spack/spack/test/lock.py | 8 +++---- lib/spack/spack/test/make_executable.py | 6 ++--- lib/spack/spack/test/mirror.py | 5 ++--- lib/spack/spack/test/mock_packages_test.py | 8 +++---- lib/spack/spack/test/mock_repo.py | 4 ---- lib/spack/spack/test/multimethod.py | 5 +---- lib/spack/spack/test/namespace_trie.py | 1 + lib/spack/spack/test/optional_deps.py | 4 +--- lib/spack/spack/test/packages.py | 26 ++++++---------------- lib/spack/spack/test/python_version.py | 3 +-- lib/spack/spack/test/spec_dag.py | 2 -- lib/spack/spack/test/spec_semantics.py | 1 - lib/spack/spack/test/spec_syntax.py | 3 ++- lib/spack/spack/test/stage.py | 8 +++---- lib/spack/spack/test/svn_fetch.py | 14 ++++-------- lib/spack/spack/test/tally_plugin.py | 4 ++-- lib/spack/spack/test/unit_install.py | 3 ++- lib/spack/spack/test/url_extrapolate.py | 3 --- lib/spack/spack/test/url_parse.py | 2 +- lib/spack/spack/test/url_substitution.py | 1 - lib/spack/spack/test/versions.py | 1 + lib/spack/spack/test/yaml.py | 1 + 31 files changed, 62 insertions(+), 129 deletions(-) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 7f2938aec5..794344fb6a 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -22,8 +22,6 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest - import spack from spack.spec import Spec, CompilerSpec from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index d8be5a855b..0562d2d620 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -22,13 +22,13 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest -import shutil import os +import shutil from tempfile import mkdtemp -from ordereddict_backport import OrderedDict + import spack import spack.config +from ordereddict_backport import OrderedDict from spack.test.mock_packages_test import * # Some sample compiler config data diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/configure_guess.py index bc2332acc2..2440d120e5 100644 --- a/lib/spack/spack/test/configure_guess.py +++ b/lib/spack/spack/test/configure_guess.py @@ -23,20 +23,15 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import unittest import shutil import tempfile +import unittest from llnl.util.filesystem import * - from spack.cmd.create import ConfigureGuesser from spack.stage import Stage - -from spack.fetch_strategy import URLFetchStrategy -from spack.directory_layout import YamlDirectoryLayout -from spack.util.executable import which from spack.test.mock_packages_test import * -from spack.test.mock_repo import MockArchive +from spack.util.executable import which class InstallTest(unittest.TestCase): diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 0205f4b8ce..9a57e1f03e 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -26,19 +26,18 @@ These tests check the database is functioning properly, both in memory and in its file """ -import tempfile -import shutil import multiprocessing - -from llnl.util.lock import * -from llnl.util.filesystem import join_path +import shutil +import tempfile import spack +from llnl.util.filesystem import join_path +from llnl.util.lock import * +from llnl.util.tty.colify import colify from spack.database import Database from spack.directory_layout import YamlDirectoryLayout from spack.test.mock_packages_test import * -from llnl.util.tty.colify import colify def _print_ref_counts(): """Print out all ref counts for the graph used here, for debugging""" diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index 925cb648ed..d814572d4a 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -25,20 +25,17 @@ """\ This test verifies that the Spack directory layout works properly. """ -import unittest -import tempfile -import shutil import os - -from llnl.util.filesystem import * +import shutil +import tempfile import spack -from spack.spec import Spec -from spack.repository import RepoPath +from llnl.util.filesystem import * from spack.directory_layout import YamlDirectoryLayout +from spack.repository import RepoPath +from spack.spec import Spec from spack.test.mock_packages_test import * - # number of packages to test (to reduce test time) max_packages = 10 diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index 6d6a67a1d3..3578044116 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -23,19 +23,12 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import unittest -import shutil -import tempfile - -from llnl.util.filesystem import * import spack -from spack.version import ver -from spack.stage import Stage -from spack.util.executable import which - +from llnl.util.filesystem import * from spack.test.mock_packages_test import * from spack.test.mock_repo import MockGitRepo +from spack.version import ver class GitFetchTest(MockPackagesTest): diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index d884ed78a0..b8a0c1ec46 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -23,16 +23,12 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import unittest - -from llnl.util.filesystem import * - import spack + from spack.version import ver -from spack.stage import Stage -from spack.util.executable import which -from spack.test.mock_packages_test import * from spack.test.mock_repo import MockHgRepo +from llnl.util.filesystem import * +from spack.test.mock_packages_test import * class HgFetchTest(MockPackagesTest): diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 8863d13c42..8297893f01 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -22,18 +22,13 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os -import unittest import shutil import tempfile -from llnl.util.filesystem import * - import spack -from spack.stage import Stage -from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite +from llnl.util.filesystem import * from spack.directory_layout import YamlDirectoryLayout -from spack.util.executable import which +from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite from spack.test.mock_packages_test import * from spack.test.mock_repo import MockArchive diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py index 7b67e873dd..ee37e765c7 100644 --- a/lib/spack/spack/test/link_tree.py +++ b/lib/spack/spack/test/link_tree.py @@ -24,8 +24,6 @@ ############################################################################## import os import unittest -import shutil -import tempfile from llnl.util.filesystem import * from llnl.util.link_tree import LinkTree @@ -38,8 +36,7 @@ class LinkTreeTest(unittest.TestCase): def setUp(self): self.stage = Stage('link-tree-test') - # FIXME : possibly this test needs to be refactored to avoid the explicit call to __enter__ and __exit__ - self.stage.__enter__() + self.stage.create() with working_dir(self.stage.path): touchp('source/1') @@ -54,7 +51,7 @@ def setUp(self): self.link_tree = LinkTree(source_path) def tearDown(self): - self.stage.__exit__(None, None, None) + self.stage.destroy() def check_file_link(self, filename): diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index bc68df01db..3b11d18da4 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -25,15 +25,13 @@ """ These tests ensure that our lock works correctly. """ -import unittest -import os -import tempfile import shutil +import tempfile +import unittest from multiprocessing import Process -from llnl.util.lock import * from llnl.util.filesystem import join_path, touch - +from llnl.util.lock import * from spack.util.multiproc import Barrier # This is the longest a failed test will take, as the barriers will diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py index d568a28d44..a2606acf19 100644 --- a/lib/spack/spack/test/make_executable.py +++ b/lib/spack/spack/test/make_executable.py @@ -28,13 +28,13 @@ This just tests whether the right args are getting passed to make. """ import os -import unittest -import tempfile import shutil +import tempfile +import unittest from llnl.util.filesystem import * -from spack.util.environment import path_put_first from spack.build_environment import MakeExecutable +from spack.util.environment import path_put_first class MakeExecutableTest(unittest.TestCase): diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index f117e04242..e707adfe9d 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -23,11 +23,10 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -from filecmp import dircmp - import spack import spack.mirror -from spack.util.compression import decompressor_for + +from filecmp import dircmp from spack.test.mock_packages_test import * from spack.test.mock_repo import * diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index e9f1f95df5..0b8867b61e 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -22,17 +22,15 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys import os import shutil -import unittest import tempfile -from ordereddict_backport import OrderedDict - -from llnl.util.filesystem import mkdirp +import unittest import spack import spack.config +from llnl.util.filesystem import mkdirp +from ordereddict_backport import OrderedDict from spack.repository import RepoPath from spack.spec import Spec diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py index ed94023b0e..a8bdfb5571 100644 --- a/lib/spack/spack/test/mock_repo.py +++ b/lib/spack/spack/test/mock_repo.py @@ -26,13 +26,9 @@ import shutil from llnl.util.filesystem import * - -import spack -from spack.version import ver from spack.stage import Stage from spack.util.executable import which - # # VCS Systems used by mock repo code. # diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py index 7bf4ff0a0a..2d4b8cd584 100644 --- a/lib/spack/spack/test/multimethod.py +++ b/lib/spack/spack/test/multimethod.py @@ -25,14 +25,11 @@ """ Test for multi_method dispatch. """ -import unittest import spack from spack.multimethod import * -from spack.version import * -from spack.spec import Spec -from spack.multimethod import when from spack.test.mock_packages_test import * +from spack.version import * class MultiMethodTest(MockPackagesTest): diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py index d0d809004d..83fb34bf76 100644 --- a/lib/spack/spack/test/namespace_trie.py +++ b/lib/spack/spack/test/namespace_trie.py @@ -23,6 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import unittest + from spack.util.naming import NamespaceTrie diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py index ebd7281999..55f35ea4c9 100644 --- a/lib/spack/spack/test/optional_deps.py +++ b/lib/spack/spack/test/optional_deps.py @@ -22,10 +22,8 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest -import spack -from spack.spec import Spec, CompilerSpec +from spack.spec import Spec from spack.test.mock_packages_test import * class ConcretizeTest(MockPackagesTest): diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index 83984dc5f6..07ff0b21af 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -22,14 +22,12 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest - -from llnl.util.filesystem import join_path import spack +from llnl.util.filesystem import join_path from spack.repository import Repo -from spack.util.naming import mod_to_class from spack.test.mock_packages_test import * +from spack.util.naming import mod_to_class class PackagesTest(MockPackagesTest): @@ -77,30 +75,20 @@ def test_package_class_names(self): # def test_import_package(self): - import spack.pkg.builtin.mock.mpich + pass def test_import_package_as(self): - import spack.pkg.builtin.mock.mpich as mp + pass def test_import_class_from_package(self): - from spack.pkg.builtin.mock.mpich import Mpich + pass def test_import_module_from_package(self): - from spack.pkg.builtin.mock import mpich + pass def test_import_namespace_container_modules(self): - import spack.pkg - import spack.pkg as p - from spack import pkg - - import spack.pkg.builtin - import spack.pkg.builtin as b - from spack.pkg import builtin - - import spack.pkg.builtin.mock - import spack.pkg.builtin.mock as m - from spack.pkg.builtin import mock + pass diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py index d74d3b9b7d..4294975304 100644 --- a/lib/spack/spack/test/python_version.py +++ b/lib/spack/spack/test/python_version.py @@ -28,12 +28,11 @@ Spack was originally 2.7, but enough systems in 2014 are still using 2.6 on their frontend nodes that we need 2.6 to get adopted. """ -import unittest import os import re +import unittest import llnl.util.tty as tty - import pyqver2 import spack diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 632f777cde..5e6162b6e6 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -31,8 +31,6 @@ import spack import spack.package -from llnl.util.lang import list_modules - from spack.spec import Spec from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 44a09cbd7f..8c33d1ff6e 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -22,7 +22,6 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest from spack.spec import * from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 1daaa4be8f..6e08e30e13 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -23,9 +23,10 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import unittest + import spack.spec -from spack.spec import * from spack.parse import Token +from spack.spec import * # Sample output for a complex lexing. complex_lex = [Token(ID, 'mvapich_foo'), diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index a7314eba4c..dbcf89d864 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -25,15 +25,13 @@ """\ Test that the Stage class works correctly. """ -import unittest -import shutil import os -import getpass +import shutil +import unittest from contextlib import * -from llnl.util.filesystem import * - import spack +from llnl.util.filesystem import * from spack.stage import Stage from spack.util.executable import which diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 13a00f5df7..1ee4ee700e 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -24,18 +24,12 @@ ############################################################################## import os import re -import unittest -import shutil -import tempfile - -from llnl.util.filesystem import * - import spack -from spack.version import ver -from spack.stage import Stage -from spack.util.executable import which -from spack.test.mock_packages_test import * + from spack.test.mock_repo import svn, MockSvnRepo +from spack.version import ver +from spack.test.mock_packages_test import * +from llnl.util.filesystem import * class SvnFetchTest(MockPackagesTest): diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/test/tally_plugin.py index 9ca898c47c..eb1e4a3240 100644 --- a/lib/spack/spack/test/tally_plugin.py +++ b/lib/spack/spack/test/tally_plugin.py @@ -22,10 +22,10 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -from nose.plugins import Plugin - import os +from nose.plugins import Plugin + class Tally(Plugin): name = 'tally' diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py index ccc409dd60..18615b7efe 100644 --- a/lib/spack/spack/test/unit_install.py +++ b/lib/spack/spack/test/unit_install.py @@ -22,10 +22,11 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest import itertools +import unittest import spack + test_install = __import__("spack.cmd.test-install", fromlist=["BuildId", "create_test_output", "TestResult"]) diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py index 87adf89401..068a335b49 100644 --- a/lib/spack/spack/test/url_extrapolate.py +++ b/lib/spack/spack/test/url_extrapolate.py @@ -25,10 +25,7 @@ """\ Tests ability of spack to extrapolate URL versions from existing versions. """ -import spack import spack.url as url -from spack.spec import Spec -from spack.version import ver from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py index efde7c0c73..561d4658a1 100644 --- a/lib/spack/spack/test/url_parse.py +++ b/lib/spack/spack/test/url_parse.py @@ -27,8 +27,8 @@ detection in Homebrew. """ import unittest + import spack.url as url -from pprint import pprint class UrlParseTest(unittest.TestCase): diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py index aec8baf4ea..2be38af0d3 100644 --- a/lib/spack/spack/test/url_substitution.py +++ b/lib/spack/spack/test/url_substitution.py @@ -27,7 +27,6 @@ """ import unittest -import spack import spack.url as url diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py index 108450e098..2732006eb3 100644 --- a/lib/spack/spack/test/versions.py +++ b/lib/spack/spack/test/versions.py @@ -28,6 +28,7 @@ where it makes sense. """ import unittest + from spack.version import * diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/yaml.py index 5a357b8e69..b930c022f2 100644 --- a/lib/spack/spack/test/yaml.py +++ b/lib/spack/spack/test/yaml.py @@ -26,6 +26,7 @@ Test Spack's custom YAML format. """ import unittest + import spack.util.spack_yaml as syaml test_file = """\ From 353d12d2e6e24c0f62795dd9e649a801b566c4d8 Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 3 Mar 2016 09:18:47 +0100 Subject: [PATCH 093/189] stage : added create function as decided in #477 comments --- lib/spack/spack/stage.py | 50 ++++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index e87b822a8f..5d8b09fa9e 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -105,26 +105,10 @@ def __enter__(self): """ Entering a stage context will create the stage directory - If self.tmp_root evaluates to False, the stage directory is created directly under spack.stage_path, otherwise - this will attempt to create a stage in a temporary directory and link it into spack.stage_path. - - Spack will use the first writable location in spack.tmp_dirs to create a stage. If there is no valid location - in tmp_dirs, fall back to making the stage inside spack.stage_path. + Returns: + self """ - # Create the top-level stage directory - mkdirp(spack.stage_path) - remove_dead_links(spack.stage_path) - # If a tmp_root exists then create a directory there and then link it in the stage area, - # otherwise create the stage directory in self.path - if self._need_to_create_path(): - if self.tmp_root: - tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) - os.symlink(tmp_dir, self.path) - else: - mkdirp(self.path) - # Make sure we can actually do something with the stage we made. - ensure_access(self.path) - + self.create() return self def __exit__(self, exc_type, exc_val, exc_tb): @@ -314,8 +298,34 @@ def restage(self): """ self.fetcher.reset() + def create(self): + """ + Creates the stage directory + + If self.tmp_root evaluates to False, the stage directory is created directly under spack.stage_path, otherwise + this will attempt to create a stage in a temporary directory and link it into spack.stage_path. + + Spack will use the first writable location in spack.tmp_dirs to create a stage. If there is no valid location + in tmp_dirs, fall back to making the stage inside spack.stage_path. + """ + # Create the top-level stage directory + mkdirp(spack.stage_path) + remove_dead_links(spack.stage_path) + # If a tmp_root exists then create a directory there and then link it in the stage area, + # otherwise create the stage directory in self.path + if self._need_to_create_path(): + if self.tmp_root: + tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) + os.symlink(tmp_dir, self.path) + else: + mkdirp(self.path) + # Make sure we can actually do something with the stage we made. + ensure_access(self.path) + def destroy(self): - """Remove this stage directory.""" + """ + Removes this stage directory + """ remove_linked_tree(self.path) # Make sure we don't end up in a removed directory From 5ef36215569e1151164ecb630011de425387732a Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 3 Mar 2016 10:10:30 +0100 Subject: [PATCH 094/189] stage : minor syntax fix --- lib/spack/spack/stage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 5d8b09fa9e..985043c103 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -246,7 +246,7 @@ def fetch(self, mirror_only=False): self.fetcher = fetcher self.fetcher.fetch() break - except spack.error.SpackError, e: + except spack.error.SpackError as e: tty.msg("Fetching from %s failed." % fetcher) tty.debug(e) continue From db92699c02f9aaf435d6bc073c89cd2c75d8824c Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 3 Mar 2016 11:05:36 +0100 Subject: [PATCH 095/189] test : fixed tests that were broken by import optimization --- lib/spack/spack/test/packages.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index 07ff0b21af..f0b5e05f3b 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -75,20 +75,30 @@ def test_package_class_names(self): # def test_import_package(self): - pass + import spack.pkg.builtin.mock.mpich def test_import_package_as(self): - pass + import spack.pkg.builtin.mock.mpich as mp def test_import_class_from_package(self): - pass + from spack.pkg.builtin.mock.mpich import Mpich def test_import_module_from_package(self): - pass + from spack.pkg.builtin.mock import mpich def test_import_namespace_container_modules(self): - pass + import spack.pkg + import spack.pkg as p + from spack import pkg + + import spack.pkg.builtin + import spack.pkg.builtin as b + from spack.pkg import builtin + + import spack.pkg.builtin.mock + import spack.pkg.builtin.mock as m + from spack.pkg.builtin import mock From a531a4f5e306388fcb6aa3c2f9db7bb93b530242 Mon Sep 17 00:00:00 2001 From: Luigi Calori Date: Thu, 3 Mar 2016 18:00:50 +0100 Subject: [PATCH 096/189] help spack versions paraview to get versions without v --- lib/spack/spack/url.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 02c0b83e26..ad551a6ded 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -225,7 +225,7 @@ def parse_version_offset(path): (r'_((\d+\.)+\d+[a-z]?)[.]orig$', stem), # e.g. http://www.openssl.org/source/openssl-0.9.8s.tar.gz - (r'-([^-]+(-alpha|-beta)?)', stem), + (r'-v?([^-]+(-alpha|-beta)?)', stem), # e.g. astyle_1.23_macosx.tar.gz (r'_([^_]+(_alpha|_beta)?)', stem), From 7183db1b7df3bce7e5f174ce38a50476c2e3b218 Mon Sep 17 00:00:00 2001 From: Scott Pakin Date: Thu, 3 Mar 2016 11:30:57 -0700 Subject: [PATCH 097/189] Added LLVM 3.7.1 support --- .../repos/builtin/packages/llvm/package.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 934d994bd3..280e400f69 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -1,5 +1,5 @@ ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Copyright (c) 2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. @@ -34,7 +34,7 @@ class Llvm(Package): it is the full name of the project. """ homepage = 'http://llvm.org/' - url = 'http://llvm.org/releases/3.7.0/llvm-3.7.0.src.tar.xz' + url = 'http://llvm.org/releases/3.7.1/llvm-3.7.1.src.tar.xz' version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') # currently required by mesa package @@ -132,6 +132,21 @@ class Llvm(Package): 'llvm-libunwind' : 'http://llvm.org/svn/llvm-project/libunwind/trunk', } }, + { + 'version' : '3.7.1', + 'md5':'bf8b3a2c79e61212c5409041dfdbd319', + 'resources' : { + 'compiler-rt' : '1c6975daf30bb3b0473b53c3a1a6ff01', + 'openmp' : 'b4ad08cda4e5c22e42b66062b140438e', + 'polly' : '3a2a7367002740881637f4d47bca4dc3', + 'libcxx' : 'f9c43fa552a10e14ff53b94d04bea140', + 'libcxxabi' : '52d925afac9f97e9dcac90745255c169', + 'clang' : '0acd026b5529164197563d135a8fd83e', + 'clang-tools-extra' : '5d49ff745037f061a7c86aeb6a24c3d2', + 'lldb' : 'a106d8a0d21fc84d76953822fbaf3398', + 'llvm-libunwind' : '814bd52c9247c5d04629658fbcb3ab8c', + } + }, { 'version' : '3.7.0', 'md5':'b98b9495e5655a672d6cb83e1a180f8e', From c5d9ee89246b3d2aeddb756a04588424051d3295 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 3 Mar 2016 14:41:20 -0800 Subject: [PATCH 098/189] Revert "refactoring proposal : turn Stage into a context manager" --- lib/spack/llnl/util/filesystem.py | 35 +--- lib/spack/spack/cmd/clean.py | 2 +- lib/spack/spack/mirror.py | 55 +++--- lib/spack/spack/package.py | 176 +++++++++---------- lib/spack/spack/stage.py | 186 +++++++++++---------- lib/spack/spack/test/concretize.py | 2 + lib/spack/spack/test/config.py | 6 +- lib/spack/spack/test/configure_guess.py | 21 ++- lib/spack/spack/test/database.py | 11 +- lib/spack/spack/test/directory_layout.py | 13 +- lib/spack/spack/test/git_fetch.py | 46 +++-- lib/spack/spack/test/hg_fetch.py | 44 ++--- lib/spack/spack/test/install.py | 9 +- lib/spack/spack/test/link_tree.py | 7 +- lib/spack/spack/test/lock.py | 8 +- lib/spack/spack/test/make_executable.py | 6 +- lib/spack/spack/test/mirror.py | 69 ++++---- lib/spack/spack/test/mock_packages_test.py | 8 +- lib/spack/spack/test/mock_repo.py | 4 + lib/spack/spack/test/multimethod.py | 5 +- lib/spack/spack/test/namespace_trie.py | 1 - lib/spack/spack/test/optional_deps.py | 4 +- lib/spack/spack/test/packages.py | 6 +- lib/spack/spack/test/python_version.py | 3 +- lib/spack/spack/test/spec_dag.py | 2 + lib/spack/spack/test/spec_semantics.py | 1 + lib/spack/spack/test/spec_syntax.py | 3 +- lib/spack/spack/test/stage.py | 130 ++++++++------ lib/spack/spack/test/svn_fetch.py | 49 +++--- lib/spack/spack/test/tally_plugin.py | 4 +- lib/spack/spack/test/unit_install.py | 3 +- lib/spack/spack/test/url_extrapolate.py | 3 + lib/spack/spack/test/url_parse.py | 2 +- lib/spack/spack/test/url_substitution.py | 1 + lib/spack/spack/test/versions.py | 1 - lib/spack/spack/test/yaml.py | 1 - 36 files changed, 504 insertions(+), 423 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 366237ef8f..da3cf96050 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -25,7 +25,7 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'remove_dead_links', 'remove_linked_tree'] + 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink'] import os import sys @@ -240,7 +240,7 @@ def touchp(path): def force_symlink(src, dest): try: os.symlink(src, dest) - except OSError as e: + except OSError, e: os.remove(dest) os.symlink(src, dest) @@ -344,34 +344,3 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): if order == 'post': yield (source_path, dest_path) - -def remove_dead_links(root): - """ - Removes any dead link that is present in root - - Args: - root: path where to search for dead links - - """ - for file in os.listdir(root): - path = join_path(root, file) - if os.path.islink(path): - real_path = os.path.realpath(path) - if not os.path.exists(real_path): - os.unlink(path) - -def remove_linked_tree(path): - """ - Removes a directory and its contents. If the directory is a symlink, follows the link and removes the real - directory before removing the link. - - Args: - path: directory to be removed - - """ - if os.path.exists(path): - if os.path.islink(path): - shutil.rmtree(os.path.realpath(path), True) - os.unlink(path) - else: - shutil.rmtree(path, True) diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 0c8bd1d528..6e7179122c 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -43,4 +43,4 @@ def clean(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = spack.repo.get(spec) - package.stage.destroy() + package.do_clean() diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 5ed7aff176..fa29e20803 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -110,6 +110,7 @@ def suggest_archive_basename(resource): return basename + def create(path, specs, **kwargs): """Create a directory to be used as a spack mirror, and fill it with package archives. @@ -157,29 +158,17 @@ def create(path, specs, **kwargs): "Cannot create directory '%s':" % mirror_root, str(e)) # Things to keep track of while parsing specs. - categories = { - 'present': [], - 'mirrored': [], - 'error': [] - } + present = [] + mirrored = [] + error = [] # Iterate through packages and download all the safe tarballs for each of them + everything_already_exists = True for spec in version_specs: - add_single_spec(spec, mirror_root, categories, **kwargs) - - return categories['present'], categories['mirrored'], categories['error'] - - -def add_single_spec(spec, mirror_root, categories, **kwargs): - tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) - spec_exists_in_mirror = True - try: - with spec.package.stage: - # fetcher = stage.fetcher - # fetcher.fetch() - # ... - # fetcher.archive(archive_path) - for ii, stage in enumerate(spec.package.stage): + pkg = spec.package + tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) + try: + for ii, stage in enumerate(pkg.stage): fetcher = stage.fetcher if ii == 0: # create a subdirectory for the current package@version @@ -195,7 +184,7 @@ def add_single_spec(spec, mirror_root, categories, **kwargs): if os.path.exists(archive_path): tty.msg("{name} : already added".format(name=name)) else: - spec_exists_in_mirror = False + everything_already_exists = False fetcher.fetch() if not kwargs.get('no_checksum', False): fetcher.check() @@ -206,16 +195,20 @@ def add_single_spec(spec, mirror_root, categories, **kwargs): fetcher.archive(archive_path) tty.msg("{name} : added".format(name=name)) - if spec_exists_in_mirror: - categories['present'].append(spec) - else: - categories['mirrored'].append(spec) - except Exception as e: - if spack.debug: - sys.excepthook(*sys.exc_info()) - else: - tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message) - categories['error'].append(spec) + if everything_already_exists: + present.append(spec) + else: + mirrored.append(spec) + except Exception, e: + if spack.debug: + sys.excepthook(*sys.exc_info()) + else: + tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message) + error.append(spec) + finally: + pkg.stage.destroy() + + return (present, mirrored, error) class MirrorError(spack.error.SpackError): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 106c546d5c..9f1825ca21 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -293,6 +293,7 @@ class SomePackage(Package): .. code-block:: python + p.do_clean() # removes the stage directory entirely p.do_restage() # removes the build directory and # re-expands the archive. @@ -502,6 +503,7 @@ def fetcher(self): self._fetcher = self._make_fetcher() return self._fetcher + @fetcher.setter def fetcher(self, f): self._fetcher = f @@ -733,7 +735,7 @@ def do_patch(self): # If we encounter an archive that failed to patch, restage it # so that we can apply all the patches again. if os.path.isfile(bad_file): - tty.msg("Patching failed last time. Restaging.") + tty.msg("Patching failed last time. Restaging.") self.stage.restage() self.stage.chdir_to_source() @@ -848,103 +850,102 @@ def do_install(self, make_jobs=make_jobs) start_time = time.time() - with self.stage: - if not fake: - if not skip_patch: - self.do_patch() + if not fake: + if not skip_patch: + self.do_patch() + else: + self.do_stage() + + # create the install directory. The install layout + # handles this in case so that it can use whatever + # package naming scheme it likes. + spack.install_layout.create_install_directory(self.spec) + + def cleanup(): + if not keep_prefix: + # If anything goes wrong, remove the install prefix + self.remove_prefix() + else: + tty.warn("Keeping install prefix in place despite error.", + "Spack will think this package is installed." + + "Manually remove this directory to fix:", + self.prefix, wrap=True) + + + def real_work(): + try: + tty.msg("Building %s." % self.name) + + # Run the pre-install hook in the child process after + # the directory is created. + spack.hooks.pre_install(self) + + # Set up process's build environment before running install. + if fake: + self.do_fake_install() else: - self.do_stage() + # Do the real install in the source directory. + self.stage.chdir_to_source() - # create the install directory. The install layout - # handles this in case so that it can use whatever - # package naming scheme it likes. - spack.install_layout.create_install_directory(self.spec) + # Save the build environment in a file before building. + env_path = join_path(os.getcwd(), 'spack-build.env') - def cleanup(): - if not keep_prefix: - # If anything goes wrong, remove the install prefix - self.remove_prefix() - else: - tty.warn("Keeping install prefix in place despite error.", - "Spack will think this package is installed." + - "Manually remove this directory to fix:", - self.prefix, wrap=True) + # This redirects I/O to a build log (and optionally to the terminal) + log_path = join_path(os.getcwd(), 'spack-build.out') + log_file = open(log_path, 'w') + with log_output(log_file, verbose, sys.stdout.isatty(), True): + dump_environment(env_path) + self.install(self.spec, self.prefix) + # Ensure that something was actually installed. + self._sanity_check_install() - def real_work(): - try: - tty.msg("Building %s." % self.name) + # Move build log into install directory on success + if not fake: + log_install_path = spack.install_layout.build_log_path(self.spec) + env_install_path = spack.install_layout.build_env_path(self.spec) + install(log_path, log_install_path) + install(env_path, env_install_path) - # Run the pre-install hook in the child process after - # the directory is created. - spack.hooks.pre_install(self) + packages_dir = spack.install_layout.build_packages_path(self.spec) + dump_packages(self.spec, packages_dir) - # Set up process's build environment before running install. - if fake: - self.do_fake_install() - else: - # Do the real install in the source directory. - self.stage.chdir_to_source() + # On successful install, remove the stage. + if not keep_stage: + self.stage.destroy() - # Save the build environment in a file before building. - env_path = join_path(os.getcwd(), 'spack-build.env') + # Stop timer. + self._total_time = time.time() - start_time + build_time = self._total_time - self._fetch_time - # This redirects I/O to a build log (and optionally to the terminal) - log_path = join_path(os.getcwd(), 'spack-build.out') - log_file = open(log_path, 'w') - with log_output(log_file, verbose, sys.stdout.isatty(), True): - dump_environment(env_path) - self.install(self.spec, self.prefix) + tty.msg("Successfully installed %s." % self.name, + "Fetch: %s. Build: %s. Total: %s." + % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) + print_pkg(self.prefix) - # Ensure that something was actually installed. - self._sanity_check_install() + except ProcessError, e: + # Annotate with location of build log. + e.build_log = log_path + cleanup() + raise e - # Move build log into install directory on success - if not fake: - log_install_path = spack.install_layout.build_log_path(self.spec) - env_install_path = spack.install_layout.build_env_path(self.spec) - install(log_path, log_install_path) - install(env_path, env_install_path) + except: + # other exceptions just clean up and raise. + cleanup() + raise - packages_dir = spack.install_layout.build_packages_path(self.spec) - dump_packages(self.spec, packages_dir) + # Set parallelism before starting build. + self.make_jobs = make_jobs - # On successful install, remove the stage. - if not keep_stage: - self.stage.destroy() + # Do the build. + spack.build_environment.fork(self, real_work) - # Stop timer. - self._total_time = time.time() - start_time - build_time = self._total_time - self._fetch_time + # note: PARENT of the build process adds the new package to + # the database, so that we don't need to re-read from file. + spack.installed_db.add(self.spec, self.prefix) - tty.msg("Successfully installed %s." % self.name, - "Fetch: %s. Build: %s. Total: %s." - % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) - print_pkg(self.prefix) - - except ProcessError as e: - # Annotate with location of build log. - e.build_log = log_path - cleanup() - raise e - - except: - # other exceptions just clean up and raise. - cleanup() - raise - - # Set parallelism before starting build. - self.make_jobs = make_jobs - - # Do the build. - spack.build_environment.fork(self, real_work) - - # note: PARENT of the build process adds the new package to - # the database, so that we don't need to re-read from file. - spack.installed_db.add(self.spec, self.prefix) - - # Once everything else is done, run post install hooks - spack.hooks.post_install(self) + # Once everything else is done, run post install hooks + spack.hooks.post_install(self) def _sanity_check_install(self): @@ -1148,6 +1149,13 @@ def do_restage(self): """Reverts expanded/checked out source to a pristine state.""" self.stage.restage() + + def do_clean(self): + """Removes the package's build stage and source tarball.""" + if os.path.exists(self.stage.path): + self.stage.destroy() + + def format_doc(self, **kwargs): """Wrap doc string at 72 characters and format nicely""" indent = kwargs.get('indent', 0) @@ -1184,7 +1192,7 @@ def fetch_remote_versions(self): try: return spack.util.web.find_versions_of_archive( *self.all_urls, list_url=self.list_url, list_depth=self.list_depth) - except spack.error.NoNetworkConnectionError as e: + except spack.error.NoNetworkConnectionError, e: tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 985043c103..f217450d42 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -42,26 +42,33 @@ class Stage(object): - """ - A Stage object is a context manager that handles a directory where some source code is downloaded and built - before being installed. It handles fetching the source code, either as an archive to be expanded or by checking - it out of a repository. A stage's lifecycle looks like this: + """A Stage object manages a directory where some source code is + downloaded and built before being installed. It handles + fetching the source code, either as an archive to be expanded + or by checking it out of a repository. A stage's lifecycle + looks like this: - ``` - with Stage() as stage: # Context manager creates and destroys the stage directory - fetch() # Fetch a source archive into the stage. - expand_archive() # Expand the source archive. - # Build and install the archive. This is handled by the Package class. - ``` + Stage() + Constructor creates the stage directory. + fetch() + Fetch a source archive into the stage. + expand_archive() + Expand the source archive. + + Build and install the archive. This is handled by the Package class. + destroy() + Remove the stage once the package has been installed. - If spack.use_tmp_stage is True, spack will attempt to create stages in a tmp directory. - Otherwise, stages are created directly in spack.stage_path. + If spack.use_tmp_stage is True, spack will attempt to create stages + in a tmp directory. Otherwise, stages are created directly in + spack.stage_path. - There are two kinds of stages: named and unnamed. Named stages can persist between runs of spack, e.g. if you - fetched a tarball but didn't finish building it, you won't have to fetch it again. + There are two kinds of stages: named and unnamed. Named stages can + persist between runs of spack, e.g. if you fetched a tarball but + didn't finish building it, you won't have to fetch it again. - Unnamed stages are created using standard mkdtemp mechanisms or similar, and are intended to persist for - only one run of spack. + Unnamed stages are created using standard mkdtemp mechanisms or + similar, and are intended to persist for only one run of spack. """ def __init__(self, url_or_fetch_strategy, **kwargs): @@ -89,46 +96,21 @@ def __init__(self, url_or_fetch_strategy, **kwargs): self.default_fetcher = self.fetcher # self.fetcher can change with mirrors. self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. - # TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name - # TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root - self.name = kwargs.get('name') if 'name' in kwargs else STAGE_PREFIX + next(tempfile._get_candidate_names()) + self.name = kwargs.get('name') self.mirror_path = kwargs.get('mirror_path') self.tmp_root = find_tmp_root() - # Try to construct here a temporary name for the stage directory - # If this is a named stage, then construct a named path. - self.path = join_path(spack.stage_path, self.name) - # Flag to decide whether to delete the stage folder on exit or not - self.delete_on_exit = True + self.path = None + self._setup() - def __enter__(self): - """ - Entering a stage context will create the stage directory - - Returns: - self - """ - self.create() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """ - Exiting from a stage context will delete the stage directory unless: - - it was explicitly requested not to do so - - an exception has been raised - - Args: - exc_type: exception type - exc_val: exception value - exc_tb: exception traceback - - Returns: - Boolean - """ - self.delete_on_exit = False if exc_type is not None else self.delete_on_exit - - if self.delete_on_exit: - self.destroy() + def _cleanup_dead_links(self): + """Remove any dead links in the stage directory.""" + for file in os.listdir(spack.stage_path): + path = join_path(spack.stage_path, file) + if os.path.islink(path): + real_path = os.path.realpath(path) + if not os.path.exists(path): + os.unlink(path) def _need_to_create_path(self): """Makes sure nothing weird has happened since the last time we @@ -166,6 +148,54 @@ def _need_to_create_path(self): return False + def _setup(self): + """Creates the stage directory. + If spack.use_tmp_stage is False, the stage directory is created + directly under spack.stage_path. + + If spack.use_tmp_stage is True, this will attempt to create a + stage in a temporary directory and link it into spack.stage_path. + Spack will use the first writable location in spack.tmp_dirs to + create a stage. If there is no valid location in tmp_dirs, fall + back to making the stage inside spack.stage_path. + """ + # Create the top-level stage directory + mkdirp(spack.stage_path) + self._cleanup_dead_links() + + # If this is a named stage, then construct a named path. + if self.name is not None: + self.path = join_path(spack.stage_path, self.name) + + # If this is a temporary stage, them make the temp directory + tmp_dir = None + if self.tmp_root: + if self.name is None: + # Unnamed tmp root. Link the path in + tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) + self.name = os.path.basename(tmp_dir) + self.path = join_path(spack.stage_path, self.name) + if self._need_to_create_path(): + os.symlink(tmp_dir, self.path) + + else: + if self._need_to_create_path(): + tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) + os.symlink(tmp_dir, self.path) + + # if we're not using a tmp dir, create the stage directly in the + # stage dir, rather than linking to it. + else: + if self.name is None: + self.path = tempfile.mkdtemp('', STAGE_PREFIX, spack.stage_path) + self.name = os.path.basename(self.path) + else: + if self._need_to_create_path(): + mkdirp(self.path) + + # Make sure we can actually do something with the stage we made. + ensure_access(self.path) + @property def archive_file(self): """Path to the source archive within this stage directory.""" @@ -246,7 +276,7 @@ def fetch(self, mirror_only=False): self.fetcher = fetcher self.fetcher.fetch() break - except spack.error.SpackError as e: + except spack.error.SpackError, e: tty.msg("Fetching from %s failed." % fetcher) tty.debug(e) continue @@ -298,34 +328,8 @@ def restage(self): """ self.fetcher.reset() - def create(self): - """ - Creates the stage directory - - If self.tmp_root evaluates to False, the stage directory is created directly under spack.stage_path, otherwise - this will attempt to create a stage in a temporary directory and link it into spack.stage_path. - - Spack will use the first writable location in spack.tmp_dirs to create a stage. If there is no valid location - in tmp_dirs, fall back to making the stage inside spack.stage_path. - """ - # Create the top-level stage directory - mkdirp(spack.stage_path) - remove_dead_links(spack.stage_path) - # If a tmp_root exists then create a directory there and then link it in the stage area, - # otherwise create the stage directory in self.path - if self._need_to_create_path(): - if self.tmp_root: - tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) - os.symlink(tmp_dir, self.path) - else: - mkdirp(self.path) - # Make sure we can actually do something with the stage we made. - ensure_access(self.path) - def destroy(self): - """ - Removes this stage directory - """ + """Remove this stage directory.""" remove_linked_tree(self.path) # Make sure we don't end up in a removed directory @@ -385,15 +389,6 @@ def source_path(self): def path(self): return self[0].path - def __enter__(self): - for item in self: - item.__enter__() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - for item in reversed(self): - item.__exit__(exc_type, exc_val, exc_tb) - def chdir_to_source(self): return self[0].chdir_to_source() @@ -444,6 +439,19 @@ def ensure_access(file=spack.stage_path): tty.die("Insufficient permissions for %s" % file) +def remove_linked_tree(path): + """Removes a directory and its contents. If the directory is a symlink, + follows the link and reamoves the real directory before removing the + link. + """ + if os.path.exists(path): + if os.path.islink(path): + shutil.rmtree(os.path.realpath(path), True) + os.unlink(path) + else: + shutil.rmtree(path, True) + + def purge(): """Remove all build directories in the top-level stage path.""" if os.path.isdir(spack.stage_path): diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 794344fb6a..7f2938aec5 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -22,6 +22,8 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import unittest + import spack from spack.spec import Spec, CompilerSpec from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index 0562d2d620..d8be5a855b 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -22,13 +22,13 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os +import unittest import shutil +import os from tempfile import mkdtemp - +from ordereddict_backport import OrderedDict import spack import spack.config -from ordereddict_backport import OrderedDict from spack.test.mock_packages_test import * # Some sample compiler config data diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/configure_guess.py index 2440d120e5..a4e8565b62 100644 --- a/lib/spack/spack/test/configure_guess.py +++ b/lib/spack/spack/test/configure_guess.py @@ -23,15 +23,20 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import unittest import shutil import tempfile -import unittest from llnl.util.filesystem import * + from spack.cmd.create import ConfigureGuesser from spack.stage import Stage -from spack.test.mock_packages_test import * + +from spack.fetch_strategy import URLFetchStrategy +from spack.directory_layout import YamlDirectoryLayout from spack.util.executable import which +from spack.test.mock_packages_test import * +from spack.test.mock_repo import MockArchive class InstallTest(unittest.TestCase): @@ -47,6 +52,8 @@ def setUp(self): def tearDown(self): shutil.rmtree(self.tmpdir, ignore_errors=True) + if self.stage: + self.stage.destroy() os.chdir(self.orig_dir) @@ -57,12 +64,12 @@ def check_archive(self, filename, system): url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz') print url - with Stage(url) as stage: - stage.fetch() + self.stage = Stage(url) + self.stage.fetch() - guesser = ConfigureGuesser() - guesser(stage) - self.assertEqual(system, guesser.build_system) + guesser = ConfigureGuesser() + guesser(self.stage) + self.assertEqual(system, guesser.build_system) def test_python(self): diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 9a57e1f03e..0205f4b8ce 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -26,18 +26,19 @@ These tests check the database is functioning properly, both in memory and in its file """ -import multiprocessing -import shutil import tempfile +import shutil +import multiprocessing + +from llnl.util.lock import * +from llnl.util.filesystem import join_path import spack -from llnl.util.filesystem import join_path -from llnl.util.lock import * -from llnl.util.tty.colify import colify from spack.database import Database from spack.directory_layout import YamlDirectoryLayout from spack.test.mock_packages_test import * +from llnl.util.tty.colify import colify def _print_ref_counts(): """Print out all ref counts for the graph used here, for debugging""" diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index d814572d4a..925cb648ed 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -25,17 +25,20 @@ """\ This test verifies that the Spack directory layout works properly. """ -import os -import shutil +import unittest import tempfile +import shutil +import os + +from llnl.util.filesystem import * import spack -from llnl.util.filesystem import * -from spack.directory_layout import YamlDirectoryLayout -from spack.repository import RepoPath from spack.spec import Spec +from spack.repository import RepoPath +from spack.directory_layout import YamlDirectoryLayout from spack.test.mock_packages_test import * + # number of packages to test (to reduce test time) max_packages = 10 diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index 3578044116..d84433176a 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -23,12 +23,19 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import unittest +import shutil +import tempfile + +from llnl.util.filesystem import * import spack -from llnl.util.filesystem import * +from spack.version import ver +from spack.stage import Stage +from spack.util.executable import which + from spack.test.mock_packages_test import * from spack.test.mock_repo import MockGitRepo -from spack.version import ver class GitFetchTest(MockPackagesTest): @@ -45,15 +52,19 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) + def tearDown(self): """Destroy the stage space used by this test.""" super(GitFetchTest, self).tearDown() self.repo.destroy() + self.pkg.do_clean() + def assert_rev(self, rev): """Check that the current git revision is equal to the supplied rev.""" self.assertEqual(self.repo.rev_hash('HEAD'), self.repo.rev_hash(rev)) + def try_fetch(self, rev, test_file, args): """Tries to: 1. Fetch the repo using a fetch strategy constructed with @@ -65,27 +76,26 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('git')] = args - with self.pkg.stage: - self.pkg.do_stage() - self.assert_rev(rev) + self.pkg.do_stage() + self.assert_rev(rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked_file = 'foobarbaz' - touch(untracked_file) - self.assertTrue(os.path.isfile(untracked_file)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked_file)) + untracked_file = 'foobarbaz' + touch(untracked_file) + self.assertTrue(os.path.isfile(untracked_file)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked_file)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assert_rev(rev) + self.assert_rev(rev) def test_fetch_master(self): diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index b8a0c1ec46..bbcb64e4c1 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -23,12 +23,16 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import spack +import unittest -from spack.version import ver -from spack.test.mock_repo import MockHgRepo from llnl.util.filesystem import * + +import spack +from spack.version import ver +from spack.stage import Stage +from spack.util.executable import which from spack.test.mock_packages_test import * +from spack.test.mock_repo import MockHgRepo class HgFetchTest(MockPackagesTest): @@ -45,10 +49,13 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) + def tearDown(self): """Destroy the stage space used by this test.""" super(HgFetchTest, self).tearDown() self.repo.destroy() + self.pkg.do_clean() + def try_fetch(self, rev, test_file, args): """Tries to: @@ -61,27 +68,26 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('hg')] = args - with self.pkg.stage: - self.pkg.do_stage() - self.assertEqual(self.repo.get_rev(), rev) + self.pkg.do_stage() + self.assertEqual(self.repo.get_rev(), rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked = 'foobarbaz' - touch(untracked) - self.assertTrue(os.path.isfile(untracked)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked)) + untracked = 'foobarbaz' + touch(untracked) + self.assertTrue(os.path.isfile(untracked)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assertEqual(self.repo.get_rev(), rev) + self.assertEqual(self.repo.get_rev(), rev) def test_fetch_default(self): diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 8297893f01..8863d13c42 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -22,13 +22,18 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import os +import unittest import shutil import tempfile -import spack from llnl.util.filesystem import * -from spack.directory_layout import YamlDirectoryLayout + +import spack +from spack.stage import Stage from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite +from spack.directory_layout import YamlDirectoryLayout +from spack.util.executable import which from spack.test.mock_packages_test import * from spack.test.mock_repo import MockArchive diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py index ee37e765c7..886b7ef4c5 100644 --- a/lib/spack/spack/test/link_tree.py +++ b/lib/spack/spack/test/link_tree.py @@ -24,6 +24,8 @@ ############################################################################## import os import unittest +import shutil +import tempfile from llnl.util.filesystem import * from llnl.util.link_tree import LinkTree @@ -36,7 +38,6 @@ class LinkTreeTest(unittest.TestCase): def setUp(self): self.stage = Stage('link-tree-test') - self.stage.create() with working_dir(self.stage.path): touchp('source/1') @@ -50,8 +51,10 @@ def setUp(self): source_path = os.path.join(self.stage.path, 'source') self.link_tree = LinkTree(source_path) + def tearDown(self): - self.stage.destroy() + if self.stage: + self.stage.destroy() def check_file_link(self, filename): diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index 3b11d18da4..bc68df01db 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -25,13 +25,15 @@ """ These tests ensure that our lock works correctly. """ -import shutil -import tempfile import unittest +import os +import tempfile +import shutil from multiprocessing import Process -from llnl.util.filesystem import join_path, touch from llnl.util.lock import * +from llnl.util.filesystem import join_path, touch + from spack.util.multiproc import Barrier # This is the longest a failed test will take, as the barriers will diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py index a2606acf19..d568a28d44 100644 --- a/lib/spack/spack/test/make_executable.py +++ b/lib/spack/spack/test/make_executable.py @@ -28,13 +28,13 @@ This just tests whether the right args are getting passed to make. """ import os -import shutil -import tempfile import unittest +import tempfile +import shutil from llnl.util.filesystem import * -from spack.build_environment import MakeExecutable from spack.util.environment import path_put_first +from spack.build_environment import MakeExecutable class MakeExecutableTest(unittest.TestCase): diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index e707adfe9d..f83cc8090c 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -23,10 +23,11 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +from filecmp import dircmp + import spack import spack.mirror - -from filecmp import dircmp +from spack.util.compression import decompressor_for from spack.test.mock_packages_test import * from spack.test.mock_repo import * @@ -73,14 +74,14 @@ def set_up_package(self, name, MockRepoClass, url_attr): def check_mirror(self): - with Stage('spack-mirror-test') as stage: - mirror_root = join_path(stage.path, 'test-mirror') - - # register mirror with spack config - mirrors = { 'spack-mirror-test' : 'file://' + mirror_root } - spack.config.update_config('mirrors', mirrors) + stage = Stage('spack-mirror-test') + mirror_root = join_path(stage.path, 'test-mirror') + # register mirror with spack config + mirrors = { 'spack-mirror-test' : 'file://' + mirror_root } + spack.config.update_config('mirrors', mirrors) + try: os.chdir(stage.path) spack.mirror.create( mirror_root, self.repos, no_checksum=True) @@ -96,28 +97,38 @@ def check_mirror(self): files = os.listdir(subdir) self.assertEqual(len(files), 1) - # Now try to fetch each package. - for name, mock_repo in self.repos.items(): - spec = Spec(name).concretized() - pkg = spec.package + # Now try to fetch each package. + for name, mock_repo in self.repos.items(): + spec = Spec(name).concretized() + pkg = spec.package - saved_checksum_setting = spack.do_checksum - with pkg.stage: - # Stage the archive from the mirror and cd to it. - spack.do_checksum = False - pkg.do_stage(mirror_only=True) - # Compare the original repo with the expanded archive - original_path = mock_repo.path - if 'svn' in name: - # have to check out the svn repo to compare. - original_path = join_path(mock_repo.path, 'checked_out') - svn('checkout', mock_repo.url, original_path) - dcmp = dircmp(original_path, pkg.stage.source_path) - # make sure there are no new files in the expanded tarball - self.assertFalse(dcmp.right_only) - # and that all original files are present. - self.assertTrue(all(l in exclude for l in dcmp.left_only)) - spack.do_checksum = saved_checksum_setting + pkg._stage = None + saved_checksum_setting = spack.do_checksum + try: + # Stage the archive from the mirror and cd to it. + spack.do_checksum = False + pkg.do_stage(mirror_only=True) + + # Compare the original repo with the expanded archive + original_path = mock_repo.path + if 'svn' in name: + # have to check out the svn repo to compare. + original_path = join_path(mock_repo.path, 'checked_out') + svn('checkout', mock_repo.url, original_path) + + dcmp = dircmp(original_path, pkg.stage.source_path) + + # make sure there are no new files in the expanded tarball + self.assertFalse(dcmp.right_only) + + # and that all original files are present. + self.assertTrue(all(l in exclude for l in dcmp.left_only)) + + finally: + spack.do_checksum = saved_checksum_setting + pkg.do_clean() + finally: + stage.destroy() def test_git_mirror(self): diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 0b8867b61e..e9f1f95df5 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -22,15 +22,17 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import sys import os import shutil -import tempfile import unittest +import tempfile +from ordereddict_backport import OrderedDict + +from llnl.util.filesystem import mkdirp import spack import spack.config -from llnl.util.filesystem import mkdirp -from ordereddict_backport import OrderedDict from spack.repository import RepoPath from spack.spec import Spec diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py index a8bdfb5571..ed94023b0e 100644 --- a/lib/spack/spack/test/mock_repo.py +++ b/lib/spack/spack/test/mock_repo.py @@ -26,9 +26,13 @@ import shutil from llnl.util.filesystem import * + +import spack +from spack.version import ver from spack.stage import Stage from spack.util.executable import which + # # VCS Systems used by mock repo code. # diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py index 2d4b8cd584..7bf4ff0a0a 100644 --- a/lib/spack/spack/test/multimethod.py +++ b/lib/spack/spack/test/multimethod.py @@ -25,11 +25,14 @@ """ Test for multi_method dispatch. """ +import unittest import spack from spack.multimethod import * -from spack.test.mock_packages_test import * from spack.version import * +from spack.spec import Spec +from spack.multimethod import when +from spack.test.mock_packages_test import * class MultiMethodTest(MockPackagesTest): diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py index 2023ba6d96..647976df21 100644 --- a/lib/spack/spack/test/namespace_trie.py +++ b/lib/spack/spack/test/namespace_trie.py @@ -23,7 +23,6 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import unittest - from spack.util.naming import NamespaceTrie diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py index 55f35ea4c9..ebd7281999 100644 --- a/lib/spack/spack/test/optional_deps.py +++ b/lib/spack/spack/test/optional_deps.py @@ -22,8 +22,10 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import unittest -from spack.spec import Spec +import spack +from spack.spec import Spec, CompilerSpec from spack.test.mock_packages_test import * class ConcretizeTest(MockPackagesTest): diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index f0b5e05f3b..83984dc5f6 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -22,12 +22,14 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import unittest + +from llnl.util.filesystem import join_path import spack -from llnl.util.filesystem import join_path from spack.repository import Repo -from spack.test.mock_packages_test import * from spack.util.naming import mod_to_class +from spack.test.mock_packages_test import * class PackagesTest(MockPackagesTest): diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py index 4294975304..d74d3b9b7d 100644 --- a/lib/spack/spack/test/python_version.py +++ b/lib/spack/spack/test/python_version.py @@ -28,11 +28,12 @@ Spack was originally 2.7, but enough systems in 2014 are still using 2.6 on their frontend nodes that we need 2.6 to get adopted. """ +import unittest import os import re -import unittest import llnl.util.tty as tty + import pyqver2 import spack diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 5e6162b6e6..632f777cde 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -31,6 +31,8 @@ import spack import spack.package +from llnl.util.lang import list_modules + from spack.spec import Spec from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 8c33d1ff6e..44a09cbd7f 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -22,6 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import unittest from spack.spec import * from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 6e08e30e13..1daaa4be8f 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -23,10 +23,9 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import unittest - import spack.spec -from spack.parse import Token from spack.spec import * +from spack.parse import Token # Sample output for a complex lexing. complex_lex = [Token(ID, 'mvapich_foo'), diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index dbcf89d864..c1b2a2a573 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -25,13 +25,15 @@ """\ Test that the Stage class works correctly. """ -import os -import shutil import unittest +import shutil +import os +import getpass from contextlib import * -import spack from llnl.util.filesystem import * + +import spack from spack.stage import Stage from spack.util.executable import which @@ -190,90 +192,116 @@ def check_destroy(self, stage, stage_name): def test_setup_and_destroy_name_with_tmp(self): with use_tmp(True): - with Stage(archive_url, name=stage_name) as stage: - self.check_setup(stage, stage_name) + stage = Stage(archive_url, name=stage_name) + self.check_setup(stage, stage_name) + + stage.destroy() self.check_destroy(stage, stage_name) def test_setup_and_destroy_name_without_tmp(self): with use_tmp(False): - with Stage(archive_url, name=stage_name) as stage: - self.check_setup(stage, stage_name) + stage = Stage(archive_url, name=stage_name) + self.check_setup(stage, stage_name) + + stage.destroy() self.check_destroy(stage, stage_name) def test_setup_and_destroy_no_name_with_tmp(self): with use_tmp(True): - with Stage(archive_url) as stage: - self.check_setup(stage, None) + stage = Stage(archive_url) + self.check_setup(stage, None) + + stage.destroy() self.check_destroy(stage, None) def test_setup_and_destroy_no_name_without_tmp(self): with use_tmp(False): - with Stage(archive_url) as stage: - self.check_setup(stage, None) + stage = Stage(archive_url) + self.check_setup(stage, None) + + stage.destroy() self.check_destroy(stage, None) def test_chdir(self): - with Stage(archive_url, name=stage_name) as stage: - stage.chdir() - self.check_setup(stage, stage_name) - self.check_chdir(stage, stage_name) + stage = Stage(archive_url, name=stage_name) + + stage.chdir() + self.check_setup(stage, stage_name) + self.check_chdir(stage, stage_name) + + stage.destroy() self.check_destroy(stage, stage_name) def test_fetch(self): - with Stage(archive_url, name=stage_name) as stage: - stage.fetch() - self.check_setup(stage, stage_name) - self.check_chdir(stage, stage_name) - self.check_fetch(stage, stage_name) + stage = Stage(archive_url, name=stage_name) + + stage.fetch() + self.check_setup(stage, stage_name) + self.check_chdir(stage, stage_name) + self.check_fetch(stage, stage_name) + + stage.destroy() self.check_destroy(stage, stage_name) def test_expand_archive(self): - with Stage(archive_url, name=stage_name) as stage: - stage.fetch() - self.check_setup(stage, stage_name) - self.check_fetch(stage, stage_name) - stage.expand_archive() - self.check_expand_archive(stage, stage_name) + stage = Stage(archive_url, name=stage_name) + + stage.fetch() + self.check_setup(stage, stage_name) + self.check_fetch(stage, stage_name) + + stage.expand_archive() + self.check_expand_archive(stage, stage_name) + + stage.destroy() self.check_destroy(stage, stage_name) def test_expand_archive(self): - with Stage(archive_url, name=stage_name) as stage: - stage.fetch() - self.check_setup(stage, stage_name) - self.check_fetch(stage, stage_name) - stage.expand_archive() - stage.chdir_to_source() - self.check_expand_archive(stage, stage_name) - self.check_chdir_to_source(stage, stage_name) + stage = Stage(archive_url, name=stage_name) + + stage.fetch() + self.check_setup(stage, stage_name) + self.check_fetch(stage, stage_name) + + stage.expand_archive() + stage.chdir_to_source() + self.check_expand_archive(stage, stage_name) + self.check_chdir_to_source(stage, stage_name) + + stage.destroy() self.check_destroy(stage, stage_name) def test_restage(self): - with Stage(archive_url, name=stage_name) as stage: - stage.fetch() - stage.expand_archive() - stage.chdir_to_source() - self.check_expand_archive(stage, stage_name) - self.check_chdir_to_source(stage, stage_name) + stage = Stage(archive_url, name=stage_name) - # Try to make a file in the old archive dir - with open('foobar', 'w') as file: - file.write("this file is to be destroyed.") + stage.fetch() + stage.expand_archive() + stage.chdir_to_source() + self.check_expand_archive(stage, stage_name) + self.check_chdir_to_source(stage, stage_name) - self.assertTrue('foobar' in os.listdir(stage.source_path)) + # Try to make a file in the old archive dir + with open('foobar', 'w') as file: + file.write("this file is to be destroyed.") - # Make sure the file is not there after restage. - stage.restage() - self.check_chdir(stage, stage_name) - self.check_fetch(stage, stage_name) - stage.chdir_to_source() - self.check_chdir_to_source(stage, stage_name) - self.assertFalse('foobar' in os.listdir(stage.source_path)) + self.assertTrue('foobar' in os.listdir(stage.source_path)) + + # Make sure the file is not there after restage. + stage.restage() + self.check_chdir(stage, stage_name) + self.check_fetch(stage, stage_name) + + stage.chdir_to_source() + self.check_chdir_to_source(stage, stage_name) + self.assertFalse('foobar' in os.listdir(stage.source_path)) + + stage.destroy() self.check_destroy(stage, stage_name) diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 1ee4ee700e..454a7f1d1f 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -24,13 +24,19 @@ ############################################################################## import os import re -import spack +import unittest +import shutil +import tempfile -from spack.test.mock_repo import svn, MockSvnRepo -from spack.version import ver -from spack.test.mock_packages_test import * from llnl.util.filesystem import * +import spack +from spack.version import ver +from spack.stage import Stage +from spack.util.executable import which +from spack.test.mock_packages_test import * +from spack.test.mock_repo import svn, MockSvnRepo + class SvnFetchTest(MockPackagesTest): """Tests fetching from a dummy git repository.""" @@ -45,10 +51,13 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) + def tearDown(self): """Destroy the stage space used by this test.""" super(SvnFetchTest, self).tearDown() self.repo.destroy() + self.pkg.do_clean() + def assert_rev(self, rev): """Check that the current revision is equal to the supplied rev.""" @@ -61,6 +70,7 @@ def get_rev(): return match.group(1) self.assertEqual(get_rev(), rev) + def try_fetch(self, rev, test_file, args): """Tries to: 1. Fetch the repo using a fetch strategy constructed with @@ -72,27 +82,26 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('svn')] = args - with self.pkg.stage: - self.pkg.do_stage() - self.assert_rev(rev) + self.pkg.do_stage() + self.assert_rev(rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked = 'foobarbaz' - touch(untracked) - self.assertTrue(os.path.isfile(untracked)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked)) + untracked = 'foobarbaz' + touch(untracked) + self.assertTrue(os.path.isfile(untracked)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assert_rev(rev) + self.assert_rev(rev) def test_fetch_default(self): diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/test/tally_plugin.py index 4163ab95dd..e0b9618e0c 100644 --- a/lib/spack/spack/test/tally_plugin.py +++ b/lib/spack/spack/test/tally_plugin.py @@ -22,10 +22,10 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os - from nose.plugins import Plugin +import os + class Tally(Plugin): name = 'tally' diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py index 18615b7efe..ccc409dd60 100644 --- a/lib/spack/spack/test/unit_install.py +++ b/lib/spack/spack/test/unit_install.py @@ -22,11 +22,10 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import itertools import unittest +import itertools import spack - test_install = __import__("spack.cmd.test-install", fromlist=["BuildId", "create_test_output", "TestResult"]) diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py index 068a335b49..87adf89401 100644 --- a/lib/spack/spack/test/url_extrapolate.py +++ b/lib/spack/spack/test/url_extrapolate.py @@ -25,7 +25,10 @@ """\ Tests ability of spack to extrapolate URL versions from existing versions. """ +import spack import spack.url as url +from spack.spec import Spec +from spack.version import ver from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py index 561d4658a1..efde7c0c73 100644 --- a/lib/spack/spack/test/url_parse.py +++ b/lib/spack/spack/test/url_parse.py @@ -27,8 +27,8 @@ detection in Homebrew. """ import unittest - import spack.url as url +from pprint import pprint class UrlParseTest(unittest.TestCase): diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py index 2be38af0d3..aec8baf4ea 100644 --- a/lib/spack/spack/test/url_substitution.py +++ b/lib/spack/spack/test/url_substitution.py @@ -27,6 +27,7 @@ """ import unittest +import spack import spack.url as url diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py index 2732006eb3..108450e098 100644 --- a/lib/spack/spack/test/versions.py +++ b/lib/spack/spack/test/versions.py @@ -28,7 +28,6 @@ where it makes sense. """ import unittest - from spack.version import * diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/yaml.py index b930c022f2..5a357b8e69 100644 --- a/lib/spack/spack/test/yaml.py +++ b/lib/spack/spack/test/yaml.py @@ -26,7 +26,6 @@ Test Spack's custom YAML format. """ import unittest - import spack.util.spack_yaml as syaml test_file = """\ From 311d71ec86bc7e245520a816cee4053c31a8a086 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 3 Mar 2016 16:50:51 -0600 Subject: [PATCH 099/189] Modifications to get PGI working --- lib/spack/env/cc | 6 +++--- lib/spack/env/pgi/case-insensitive/pgCC | 1 - lib/spack/env/pgi/{pgf77 => pgc++} | 0 lib/spack/env/pgi/{pgf90 => pgfortran} | 0 lib/spack/spack/compilers/pgi.py | 18 +++++++++--------- 5 files changed, 12 insertions(+), 13 deletions(-) delete mode 120000 lib/spack/env/pgi/case-insensitive/pgCC rename lib/spack/env/pgi/{pgf77 => pgc++} (100%) rename lib/spack/env/pgi/{pgf90 => pgfortran} (100%) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index a323c48124..b8b6c86e01 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -90,15 +90,15 @@ case "$command" in command="$SPACK_CC" language="C" ;; - c++|CC|g++|clang++|icpc|pgCC|xlc++) + c++|CC|g++|clang++|icpc|pgc++|xlc++) command="$SPACK_CXX" language="C++" ;; - f90|fc|f95|gfortran|ifort|pgf90|xlf90|nagfor) + f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor) command="$SPACK_FC" language="Fortran 90" ;; - f77|gfortran|ifort|pgf77|xlf|nagfor) + f77|gfortran|ifort|pgfortran|xlf|nagfor) command="$SPACK_F77" language="Fortran 77" ;; diff --git a/lib/spack/env/pgi/case-insensitive/pgCC b/lib/spack/env/pgi/case-insensitive/pgCC deleted file mode 120000 index e2deb67f3b..0000000000 --- a/lib/spack/env/pgi/case-insensitive/pgCC +++ /dev/null @@ -1 +0,0 @@ -../../cc \ No newline at end of file diff --git a/lib/spack/env/pgi/pgf77 b/lib/spack/env/pgi/pgc++ similarity index 100% rename from lib/spack/env/pgi/pgf77 rename to lib/spack/env/pgi/pgc++ diff --git a/lib/spack/env/pgi/pgf90 b/lib/spack/env/pgi/pgfortran similarity index 100% rename from lib/spack/env/pgi/pgf90 rename to lib/spack/env/pgi/pgfortran diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py index 9ac74cfbdb..c6a1078bd9 100644 --- a/lib/spack/spack/compilers/pgi.py +++ b/lib/spack/spack/compilers/pgi.py @@ -29,28 +29,28 @@ class Pgi(Compiler): cc_names = ['pgcc'] # Subclasses use possible names of C++ compiler - cxx_names = ['pgCC'] + cxx_names = ['pgc++', 'pgCC'] # Subclasses use possible names of Fortran 77 compiler - f77_names = ['pgf77'] + f77_names = ['pgfortran', 'pgf77'] # Subclasses use possible names of Fortran 90 compiler - fc_names = ['pgf95', 'pgf90'] + fc_names = ['pgfortran', 'pgf95', 'pgf90'] # Named wrapper links within spack.build_env_path link_paths = { 'cc' : 'pgi/pgcc', - 'cxx' : 'pgi/case-insensitive/pgCC', - 'f77' : 'pgi/pgf77', - 'fc' : 'pgi/pgf90' } + 'cxx' : 'pgi/pgc++', + 'f77' : 'pgi/pgfortran', + 'fc' : 'pgi/pgfortran' } @classmethod def default_version(cls, comp): """The '-V' option works for all the PGI compilers. Output looks like this:: - pgf95 10.2-0 64-bit target on x86-64 Linux -tp nehalem-64 - Copyright 1989-2000, The Portland Group, Inc. All Rights Reserved. - Copyright 2000-2010, STMicroelectronics, Inc. All Rights Reserved. + pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge + The Portland Group - PGI Compilers and Tools + Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved. """ return get_compiler_version( comp, '-V', r'pg[^ ]* ([^ ]+) \d\d\d?-bit target') From 202fbc681265bd26739e0af3c37bb09a80b7e564 Mon Sep 17 00:00:00 2001 From: citibeth Date: Thu, 3 Mar 2016 23:40:29 -0500 Subject: [PATCH 100/189] Rmoved periods. Should help use of cut'n'paste from console windows. --- lib/spack/spack/cmd/checksum.py | 6 +++--- lib/spack/spack/cmd/compiler.py | 6 +++--- lib/spack/spack/cmd/create.py | 10 +++++----- lib/spack/spack/cmd/mirror.py | 4 ++-- lib/spack/spack/cmd/module.py | 6 +++--- lib/spack/spack/cmd/repo.py | 12 ++++++------ lib/spack/spack/config.py | 6 +++--- lib/spack/spack/database.py | 2 +- lib/spack/spack/directives.py | 4 ++-- lib/spack/spack/directory_layout.py | 4 ++-- lib/spack/spack/fetch_strategy.py | 16 ++++++++-------- lib/spack/spack/mirror.py | 4 ++-- lib/spack/spack/package.py | 26 +++++++++++++------------- lib/spack/spack/repository.py | 6 +++--- lib/spack/spack/stage.py | 4 ++-- 15 files changed, 58 insertions(+), 58 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index c451993233..966ff9a5e9 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -100,11 +100,11 @@ def checksum(parser, args): else: versions = pkg.fetch_remote_versions() if not versions: - tty.die("Could not fetch any versions for %s." % pkg.name) + tty.die("Could not fetch any versions for %s" % pkg.name) sorted_versions = sorted(versions, reverse=True) - tty.msg("Found %s versions of %s." % (len(versions), pkg.name), + tty.msg("Found %s versions of %s" % (len(versions), pkg.name), *spack.cmd.elide_list( ["%-10s%s" % (v, versions[v]) for v in sorted_versions])) print @@ -121,7 +121,7 @@ def checksum(parser, args): keep_stage=args.keep_stage) if not version_hashes: - tty.die("Could not fetch any versions for %s." % pkg.name) + tty.die("Could not fetch any versions for %s" % pkg.name) version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes] tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines) diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 75b51f6b49..3e58e82184 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -96,7 +96,7 @@ def compiler_remove(args): compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope) if not compilers: - tty.die("No compilers match spec %s." % cspec) + tty.die("No compilers match spec %s" % cspec) elif not args.all and len(compilers) > 1: tty.error("Multiple compilers match spec %s. Choose one:" % cspec) colify(reversed(sorted([c.spec for c in compilers])), indent=4) @@ -105,7 +105,7 @@ def compiler_remove(args): for compiler in compilers: spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope) - tty.msg("Removed compiler %s." % compiler.spec) + tty.msg("Removed compiler %s" % compiler.spec) def compiler_info(args): @@ -114,7 +114,7 @@ def compiler_info(args): compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope) if not compilers: - tty.error("No compilers match spec %s." % cspec) + tty.error("No compilers match spec %s" % cspec) else: for c in compilers: print str(c.spec) + ":" diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 6809209046..4564143f83 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -156,7 +156,7 @@ def guess_name_and_version(url, args): # Try to deduce name and version of the new package from the URL version = spack.url.parse_version(url) if not version: - tty.die("Couldn't guess a version string from %s." % url) + tty.die("Couldn't guess a version string from %s" % url) # Try to guess a name. If it doesn't work, allow the user to override. if args.alternate_name: @@ -189,7 +189,7 @@ def find_repository(spec, args): try: repo = Repo(repo_path) if spec.namespace and spec.namespace != repo.namespace: - tty.die("Can't create package with namespace %s in repo with namespace %s." + tty.die("Can't create package with namespace %s in repo with namespace %s" % (spec.namespace, repo.namespace)) except RepoError as e: tty.die(str(e)) @@ -252,7 +252,7 @@ def create(parser, args): name = spec.name # factors out namespace, if any repo = find_repository(spec, args) - tty.msg("This looks like a URL for %s version %s." % (name, version)) + tty.msg("This looks like a URL for %s version %s" % (name, version)) tty.msg("Creating template for package %s" % name) # Fetch tarballs (prompting user if necessary) @@ -266,7 +266,7 @@ def create(parser, args): keep_stage=args.keep_stage) if not ver_hash_tuples: - tty.die("Could not fetch any tarballs for %s." % name) + tty.die("Could not fetch any tarballs for %s" % name) # Prepend 'py-' to python package names, by convention. if guesser.build_system == 'python': @@ -291,4 +291,4 @@ def create(parser, args): # If everything checks out, go ahead and edit. spack.editor(pkg_path) - tty.msg("Created package %s." % pkg_path) + tty.msg("Created package %s" % pkg_path) diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 8e9438c1a3..fcd15a6a90 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -126,7 +126,7 @@ def mirror_remove(args): old_value = mirrors.pop(name) spack.config.update_config('mirrors', mirrors, scope=args.scope) - tty.msg("Removed mirror %s with url %s." % (name, old_value)) + tty.msg("Removed mirror %s with url %s" % (name, old_value)) def mirror_list(args): @@ -203,7 +203,7 @@ def mirror_create(args): verb = "updated" if existed else "created" tty.msg( - "Successfully %s mirror in %s." % (verb, directory), + "Successfully %s mirror in %s" % (verb, directory), "Archive stats:", " %-4d already present" % p, " %-4d added" % m, diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index a5a9570eb5..1d6867c1d9 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -58,7 +58,7 @@ def module_find(mtype, spec_array): should type to use that package's module. """ if mtype not in module_types: - tty.die("Invalid module type: '%s'. Options are %s." % (mtype, comma_or(module_types))) + tty.die("Invalid module type: '%s'. Options are %s" % (mtype, comma_or(module_types))) specs = spack.cmd.parse_specs(spec_array) if len(specs) > 1: @@ -78,7 +78,7 @@ def module_find(mtype, spec_array): mt = module_types[mtype] mod = mt(specs[0]) if not os.path.isfile(mod.file_name): - tty.die("No %s module is installed for %s." % (mtype, spec)) + tty.die("No %s module is installed for %s" % (mtype, spec)) print mod.use_name @@ -94,7 +94,7 @@ def module_refresh(): shutil.rmtree(cls.path, ignore_errors=False) mkdirp(cls.path) for spec in specs: - tty.debug(" Writing file for %s." % spec) + tty.debug(" Writing file for %s" % spec) cls(spec).write() diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index c2e352786d..87c782000f 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -89,11 +89,11 @@ def repo_add(args): # check if the path exists if not os.path.exists(canon_path): - tty.die("No such file or directory: '%s'." % path) + tty.die("No such file or directory: %s" % path) # Make sure the path is a directory. if not os.path.isdir(canon_path): - tty.die("Not a Spack repository: '%s'." % path) + tty.die("Not a Spack repository: %s" % path) # Make sure it's actually a spack repository by constructing it. repo = Repo(canon_path) @@ -103,7 +103,7 @@ def repo_add(args): if not repos: repos = [] if repo.root in repos or path in repos: - tty.die("Repository is already registered with Spack: '%s'" % path) + tty.die("Repository is already registered with Spack: %s" % path) repos.insert(0, canon_path) spack.config.update_config('repos', repos, args.scope) @@ -122,7 +122,7 @@ def repo_remove(args): if canon_path == repo_canon_path: repos.remove(repo_path) spack.config.update_config('repos', repos, args.scope) - tty.msg("Removed repository '%s'." % repo_path) + tty.msg("Removed repository %s" % repo_path) return # If it is a namespace, remove corresponding repo @@ -132,13 +132,13 @@ def repo_remove(args): if repo.namespace == path_or_namespace: repos.remove(path) spack.config.update_config('repos', repos, args.scope) - tty.msg("Removed repository '%s' with namespace %s." + tty.msg("Removed repository %s with namespace '%s'." % (repo.root, repo.namespace)) return except RepoError as e: continue - tty.die("No repository with path or namespace: '%s'" + tty.die("No repository with path or namespace: %s" % path_or_namespace) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 6fecde9980..576a5afa2e 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -205,7 +205,7 @@ def validate_section_name(section): """Raise a ValueError if the section is not a valid section.""" if section not in section_schemas: - raise ValueError("Invalid config section: '%s'. Options are %s." + raise ValueError("Invalid config section: '%s'. Options are %s" % (section, section_schemas)) @@ -335,7 +335,7 @@ def validate_scope(scope): return config_scopes[scope] else: - raise ValueError("Invalid config scope: '%s'. Must be one of %s." + raise ValueError("Invalid config scope: '%s'. Must be one of %s" % (scope, config_scopes.keys())) @@ -350,7 +350,7 @@ def _read_config_file(filename, schema): "Invlaid configuration. %s exists but is not a file." % filename) elif not os.access(filename, os.R_OK): - raise ConfigFileError("Config file is not readable: %s." % filename) + raise ConfigFileError("Config file is not readable: %s" % filename) try: tty.debug("Reading config file %s" % filename) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 9cbe7de44a..089d29325e 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -330,7 +330,7 @@ def _check_ref_counts(self): found = rec.ref_count if not expected == found: raise AssertionError( - "Invalid ref_count: %s: %d (expected %d), in DB %s." + "Invalid ref_count: %s: %d (expected %d), in DB %s" % (key, found, expected, self._index_path)) diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index c8542f55f0..61cd303012 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -125,7 +125,7 @@ def __init__(self, dicts=None): dicts = (dicts,) elif type(dicts) not in (list, tuple): raise TypeError( - "dicts arg must be list, tuple, or string. Found %s." + "dicts arg must be list, tuple, or string. Found %s" % type(dicts)) self.dicts = dicts @@ -317,5 +317,5 @@ class CircularReferenceError(DirectiveError): def __init__(self, directive, package): super(CircularReferenceError, self).__init__( directive, - "Package '%s' cannot pass itself to %s." % (package, directive)) + "Package '%s' cannot pass itself to %s" % (package, directive)) self.package = package diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 29d87b65b3..08c23627f4 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -335,7 +335,7 @@ def _extension_map(self, spec): if not dag_hash in by_hash: raise InvalidExtensionSpecError( - "Spec %s not found in %s." % (dag_hash, prefix)) + "Spec %s not found in %s" % (dag_hash, prefix)) ext_spec = by_hash[dag_hash] if not prefix == ext_spec.prefix: @@ -450,7 +450,7 @@ class ExtensionConflictError(DirectoryLayoutError): """Raised when an extension is added to a package that already has it.""" def __init__(self, spec, ext_spec, conflict): super(ExtensionConflictError, self).__init__( - "%s cannot be installed in %s because it conflicts with %s."% ( + "%s cannot be installed in %s because it conflicts with %s"% ( ext_spec.short_spec, spec.short_spec, conflict.short_spec)) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 83a2dbb59c..ec17cb97f1 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -153,7 +153,7 @@ def fetch(self): self.stage.chdir() if self.archive_file: - tty.msg("Already downloaded %s." % self.archive_file) + tty.msg("Already downloaded %s" % self.archive_file) return tty.msg("Trying to fetch from %s" % self.url) @@ -275,8 +275,8 @@ def check(self): checker = crypto.Checker(self.digest) if not checker.check(self.archive_file): raise ChecksumError( - "%s checksum failed for %s." % (checker.hash_name, self.archive_file), - "Expected %s but got %s." % (self.digest, checker.sum)) + "%s checksum failed for %s" % (checker.hash_name, self.archive_file), + "Expected %s but got %s" % (self.digest, checker.sum)) @_needs_stage def reset(self): @@ -312,7 +312,7 @@ def __init__(self, name, *rev_types, **kwargs): # Ensure that there's only one of the rev_types if sum(k in kwargs for k in rev_types) > 1: raise FetchStrategyError( - "Supply only one of %s to fetch with %s." % ( + "Supply only one of %s to fetch with %s" % ( comma_or(rev_types), name)) # Set attributes for each rev type. @@ -321,7 +321,7 @@ def __init__(self, name, *rev_types, **kwargs): @_needs_stage def check(self): - tty.msg("No checksum needed when fetching with %s." % self.name) + tty.msg("No checksum needed when fetching with %s" % self.name) @_needs_stage def expand(self): @@ -395,7 +395,7 @@ def fetch(self): self.stage.chdir() if self.stage.source_path: - tty.msg("Already fetched %s." % self.stage.source_path) + tty.msg("Already fetched %s" % self.stage.source_path) return args = [] @@ -505,7 +505,7 @@ def fetch(self): self.stage.chdir() if self.stage.source_path: - tty.msg("Already fetched %s." % self.stage.source_path) + tty.msg("Already fetched %s" % self.stage.source_path) return tty.msg("Trying to check out svn repository: %s" % self.url) @@ -584,7 +584,7 @@ def fetch(self): self.stage.chdir() if self.stage.source_path: - tty.msg("Already fetched %s." % self.stage.source_path) + tty.msg("Already fetched %s" % self.stage.source_path) return args = [] diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index fa29e20803..58e31c2c7b 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -73,7 +73,7 @@ def get_matching_versions(specs, **kwargs): # Skip any package that has no known versions. if not pkg.versions: - tty.msg("No safe (checksummed) versions for package %s." % pkg.name) + tty.msg("No safe (checksummed) versions for package %s" % pkg.name) continue num_versions = kwargs.get('num_versions', 0) @@ -203,7 +203,7 @@ def create(path, specs, **kwargs): if spack.debug: sys.excepthook(*sys.exc_info()) else: - tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message) + tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message) error.append(spec) finally: pkg.stage.destroy() diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 9f1825ca21..fb96f61de9 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -688,7 +688,7 @@ def do_fetch(self, mirror_only=False): if not ignore_checksum: raise FetchError( - "Will not fetch %s." % self.spec.format('$_$@'), checksum_msg) + "Will not fetch %s" % self.spec.format('$_$@'), checksum_msg) self.stage.fetch(mirror_only) @@ -722,7 +722,7 @@ def do_patch(self): # If there are no patches, note it. if not self.patches and not has_patch_fun: - tty.msg("No patches needed for %s." % self.name) + tty.msg("No patches needed for %s" % self.name) return # Construct paths to special files in the archive dir used to @@ -745,7 +745,7 @@ def do_patch(self): tty.msg("Already patched %s" % self.name) return elif os.path.isfile(no_patches_file): - tty.msg("No patches needed for %s." % self.name) + tty.msg("No patches needed for %s" % self.name) return # Apply all the patches for specs that match this one @@ -766,10 +766,10 @@ def do_patch(self): if has_patch_fun: try: self.patch() - tty.msg("Ran patch() for %s." % self.name) + tty.msg("Ran patch() for %s" % self.name) patched = True except: - tty.msg("patch() function failed for %s." % self.name) + tty.msg("patch() function failed for %s" % self.name) touch(bad_file) raise @@ -838,7 +838,7 @@ def do_install(self, raise ValueError("Can only install concrete packages.") if os.path.exists(self.prefix): - tty.msg("%s is already installed in %s." % (self.name, self.prefix)) + tty.msg("%s is already installed in %s" % (self.name, self.prefix)) return tty.msg("Installing %s" % self.name) @@ -874,7 +874,7 @@ def cleanup(): def real_work(): try: - tty.msg("Building %s." % self.name) + tty.msg("Building %s" % self.name) # Run the pre-install hook in the child process after # the directory is created. @@ -918,8 +918,8 @@ def real_work(): self._total_time = time.time() - start_time build_time = self._total_time - self._fetch_time - tty.msg("Successfully installed %s." % self.name, - "Fetch: %s. Build: %s. Total: %s." + tty.msg("Successfully installed %s" % self.name, + "Fetch: %s. Build: %s. Total: %s" % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) print_pkg(self.prefix) @@ -1025,7 +1025,7 @@ def do_uninstall(self, force=False): # Uninstalling in Spack only requires removing the prefix. self.remove_prefix() spack.installed_db.remove(self.spec) - tty.msg("Successfully uninstalled %s." % self.spec.short_spec) + tty.msg("Successfully uninstalled %s" % self.spec.short_spec) # Once everything else is done, run post install hooks spack.hooks.post_uninstall(self) @@ -1072,7 +1072,7 @@ def do_activate(self, force=False): self.extendee_spec.package.activate(self, **self.extendee_args) spack.install_layout.add_extension(self.extendee_spec, self.spec) - tty.msg("Activated extension %s for %s." + tty.msg("Activated extension %s for %s" % (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@"))) @@ -1124,7 +1124,7 @@ def do_deactivate(self, **kwargs): if self.activated: spack.install_layout.remove_extension(self.extendee_spec, self.spec) - tty.msg("Deactivated extension %s for %s." + tty.msg("Deactivated extension %s for %s" % (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@"))) @@ -1320,7 +1320,7 @@ class PackageVersionError(PackageError): """Raised when a version URL cannot automatically be determined.""" def __init__(self, version): super(PackageVersionError, self).__init__( - "Cannot determine a URL automatically for version %s." % version, + "Cannot determine a URL automatically for version %s" % version, "Please provide a url for this version in the package.py file.") diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 8d06fefe7f..3c3ba08bcc 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -156,7 +156,7 @@ def _add(self, repo): if repo.namespace in self.by_namespace: raise DuplicateRepoError( - "Package repos '%s' and '%s' both provide namespace %s." + "Package repos '%s' and '%s' both provide namespace %s" % (repo.root, self.by_namespace[repo.namespace].root, repo.namespace)) # Add repo to the pkg indexes @@ -545,7 +545,7 @@ def get(self, spec, new=False): raise UnknownPackageError(spec.name) if spec.namespace and spec.namespace != self.namespace: - raise UnknownPackageError("Repository %s does not contain package %s." + raise UnknownPackageError("Repository %s does not contain package %s" % (self.namespace, spec.fullname)) key = hash(spec) @@ -825,7 +825,7 @@ class UnknownPackageError(PackageLoadError): def __init__(self, name, repo=None): msg = None if repo: - msg = "Package %s not found in repository %s." % (name, repo) + msg = "Package %s not found in repository %s" % (name, repo) else: msg = "Package %s not found." % name super(UnknownPackageError, self).__init__(msg) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index f217450d42..5591cb9ba5 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -306,9 +306,9 @@ def expand_archive(self): archive_dir = self.source_path if not archive_dir: self.fetcher.expand() - tty.msg("Created stage in %s." % self.path) + tty.msg("Created stage in %s" % self.path) else: - tty.msg("Already staged %s in %s." % (self.name, self.path)) + tty.msg("Already staged %s in %s" % (self.name, self.path)) def chdir_to_source(self): """Changes directory to the expanded archive directory. From 0eb6ef2cd031d1668e1891425bdc6234df0594df Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Fri, 4 Mar 2016 11:09:40 -0500 Subject: [PATCH 101/189] 1. Removed fake MPI dependency from netcdf-cxx4 and netcdf-fortran. 2. Removed Fortran bootstrap variant from netcdf. Users who need NetCDF Fortran interface should install netcdf-fortran. 3. Added result of ./configure --help on the netcdf-fortran. Verified the package has no additional options that should be exposed through Spack. --- .../builtin/packages/netcdf-cxx4/package.py | 6 - .../packages/netcdf-fortran/package.py | 156 +++++++++++++++++- .../repos/builtin/packages/netcdf/package.py | 12 +- 3 files changed, 154 insertions(+), 20 deletions(-) diff --git a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py index 8d51a10679..fb4c2886cd 100644 --- a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py +++ b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py @@ -7,12 +7,6 @@ class NetcdfCxx4(Package): version('4.2', 'd019853802092cf686254aaba165fc81') - - variant('mpi', default=True, description='Enables MPI parallelism') - - # netcdf-cxx4 doesn't really depend (directly) on MPI. However... this - # depndency ensures taht the right version of MPI is selected (eg: ^openmpi) - depends_on('mpi', when='+mpi') depends_on('netcdf') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py index 9e4aee95fb..4e0b14d012 100644 --- a/var/spack/repos/builtin/packages/netcdf-fortran/package.py +++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py @@ -10,12 +10,162 @@ class NetcdfFortran(Package): variant('mpi', default=True, description='Enables MPI parallelism') - # netcdf-fortran doesn't really depend (directly) on MPI. However... this - # depndency ensures taht the right version of MPI is selected (eg: ^openmpi) - depends_on('mpi', when='+mpi') depends_on('netcdf') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install") + + + + +# netcdf-fortran configure parameters are below +# --------------------------------------------- +# +# `configure' configures netCDF-Fortran 4.4.3 to adapt to many kinds of systems. +# +# Usage: ./configure [OPTION]... [VAR=VALUE]... +# +# To assign environment variables (e.g., CC, CFLAGS...), specify them as +# VAR=VALUE. See below for descriptions of some of the useful variables. +# +# Defaults for the options are specified in brackets. +# +# Configuration: +# -h, --help display this help and exit +# --help=short display options specific to this package +# --help=recursive display the short help of all the included packages +# -V, --version display version information and exit +# -q, --quiet, --silent do not print `checking ...' messages +# --cache-file=FILE cache test results in FILE [disabled] +# -C, --config-cache alias for `--cache-file=config.cache' +# -n, --no-create do not create output files +# --srcdir=DIR find the sources in DIR [configure dir or `..'] +# +# Installation directories: +# --prefix=PREFIX install architecture-independent files in PREFIX +# [/usr/local] +# --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX +# [PREFIX] +# +# By default, `make install' will install all the files in +# `/usr/local/bin', `/usr/local/lib' etc. You can specify +# an installation prefix other than `/usr/local' using `--prefix', +# for instance `--prefix=$HOME'. +# +# For better control, use the options below. +# +# Fine tuning of the installation directories: +# --bindir=DIR user executables [EPREFIX/bin] +# --sbindir=DIR system admin executables [EPREFIX/sbin] +# --libexecdir=DIR program executables [EPREFIX/libexec] +# --sysconfdir=DIR read-only single-machine data [PREFIX/etc] +# --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] +# --localstatedir=DIR modifiable single-machine data [PREFIX/var] +# --libdir=DIR object code libraries [EPREFIX/lib] +# --includedir=DIR C header files [PREFIX/include] +# --oldincludedir=DIR C header files for non-gcc [/usr/include] +# --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] +# --datadir=DIR read-only architecture-independent data [DATAROOTDIR] +# --infodir=DIR info documentation [DATAROOTDIR/info] +# --localedir=DIR locale-dependent data [DATAROOTDIR/locale] +# --mandir=DIR man documentation [DATAROOTDIR/man] +# --docdir=DIR documentation root [DATAROOTDIR/doc/netcdf-fortran] +# --htmldir=DIR html documentation [DOCDIR] +# --dvidir=DIR dvi documentation [DOCDIR] +# --pdfdir=DIR pdf documentation [DOCDIR] +# --psdir=DIR ps documentation [DOCDIR] +# +# Program names: +# --program-prefix=PREFIX prepend PREFIX to installed program names +# --program-suffix=SUFFIX append SUFFIX to installed program names +# --program-transform-name=PROGRAM run sed PROGRAM on installed program names +# +# System types: +# --build=BUILD configure for building on BUILD [guessed] +# --host=HOST cross-compile to build programs to run on HOST [BUILD] +# --target=TARGET configure for building compilers for TARGET [HOST] +# +# Optional Features: +# --disable-option-checking ignore unrecognized --enable/--with options +# --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) +# --enable-FEATURE[=ARG] include FEATURE [ARG=yes] +# --enable-silent-rules less verbose build output (undo: "make V=1") +# --disable-silent-rules verbose build output (undo: "make V=0") +# --enable-maintainer-mode +# enable make rules and dependencies not useful (and +# sometimes confusing) to the casual installer +# --enable-valgrind-tests build with valgrind-tests (valgrind is required, +# static builds only) +# --enable-parallel-tests Run extra parallel IO tests. Ignored if netCDF-4 is +# not enabled, or built on a system without parallel +# I/O support. +# --enable-extra-tests run some extra tests that may not pass because of +# known issues +# --enable-doxygen Enable generation of documentation with doxygen. +# --enable-dot Use dot (provided by graphviz) to generate charts +# and graphs in the doxygen-based documentation. +# --enable-internal-docs Include documentation of library internals. This is +# of interest only to those developing the netCDF +# library. +# --enable-dependency-tracking +# do not reject slow dependency extractors +# --disable-dependency-tracking +# speeds up one-time build +# --disable-f03-compiler-check +# disable check of ISO_C_BINDING support in Fortran +# compiler +# --disable-f03 suppress netCDF Fortran 2003 native code +# --disable-fortran-type-check +# cause the Fortran type sizes checks to be skipped +# --enable-large-file-tests +# Run tests which create very large data files (~13 GB +# disk space required, but it will be recovered when +# tests are complete). See option --with-temp-large to +# specify temporary directory +# --enable-benchmarks Run benchmarks. This is an experimental feature. +# --enable-shared[=PKGS] build shared libraries [default=yes] +# --enable-static[=PKGS] build static libraries [default=yes] +# --enable-fast-install[=PKGS] +# optimize for fast installation [default=yes] +# --disable-libtool-lock avoid locking (might break parallel builds) +# --disable-largefile omit support for large files +# --enable-extra-example-tests +# Run extra example tests; requires GNU sed. Ignored +# if netCDF-4 is not enabled. +# --enable-dll build a win32 DLL (only works on mingw) +# +# Optional Packages: +# --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] +# --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) +# --with-temp-large= +# specify directory where large files (i.e. >2 GB) +# will be written, if large files tests are run with +# --enable-large-file-tests +# --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use +# both] +# --with-gnu-ld assume the C compiler uses GNU ld [default=no] +# --with-sysroot=DIR Search for dependent libraries within DIR +# (or the compiler's sysroot if not specified). +# +# Some influential environment variables: +# CC C compiler command +# CFLAGS C compiler flags +# LDFLAGS linker flags, e.g. -L if you have libraries in a +# nonstandard directory +# LIBS libraries to pass to the linker, e.g. -l +# CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if +# you have headers in a nonstandard directory +# FC Fortran compiler command +# FCFLAGS Fortran compiler flags +# F77 Fortran 77 compiler command +# FFLAGS Fortran 77 compiler flags +# CPP C preprocessor +# +# Use these variables to override the choices made by `configure' or to help +# it to find libraries and programs with nonstandard names/locations. +# +# Report bugs to . +# +# from spack import * diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py index 41a0d2b6f9..0b112a59ce 100644 --- a/var/spack/repos/builtin/packages/netcdf/package.py +++ b/var/spack/repos/builtin/packages/netcdf/package.py @@ -13,7 +13,6 @@ class Netcdf(Package): version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae') variant('mpi', default=True, description='Enables MPI parallelism') - variant('fortran', default=False, description="Download and install NetCDF-Fortran") variant('hdf4', default=False, description="Enable HDF4 support") # Dependencies: @@ -66,11 +65,7 @@ def install(self, spec, prefix): # Fortran support # In version 4.2+, NetCDF-C and NetCDF-Fortran have split. - # They can be installed separately, but this bootstrap procedure - # should be able to install both at the same time. - # Note: this is a new experimental feature. - if '+fortran' in spec: - config_args.append("--enable-remote-fortran-bootstrap") + # Use the netcdf-fortran package to install Fortran support. config_args.append('CPPFLAGS=%s' % ' '.join(CPPFLAGS)) config_args.append('LDFLAGS=%s' % ' '.join(LDFLAGS)) @@ -79,8 +74,3 @@ def install(self, spec, prefix): configure(*config_args) make() make("install") - - # After installing NetCDF-C, install NetCDF-Fortran - if '+fortran' in spec: - make("build-netcdf-fortran") - make("install-netcdf-fortran") From 5db967390450659f794d202bfb5c4b4f11fb964b Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 4 Mar 2016 10:51:41 -0600 Subject: [PATCH 102/189] Typo fix in compiler docstring --- lib/spack/spack/compiler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 12c02e0ea2..d38c0b00b1 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -256,12 +256,12 @@ def find(cls, *path): def __repr__(self): - """Return a string represntation of the compiler toolchain.""" + """Return a string representation of the compiler toolchain.""" return self.__str__() def __str__(self): - """Return a string represntation of the compiler toolchain.""" + """Return a string representation of the compiler toolchain.""" return "%s(%s)" % ( self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc)))) From b043c4a5b83f36ad4837fc7302f0919a3d759940 Mon Sep 17 00:00:00 2001 From: Elizabeth Fischer Date: Fri, 4 Mar 2016 12:04:41 -0500 Subject: [PATCH 103/189] Update package.py Removed comments on configure options. --- .../packages/netcdf-fortran/package.py | 153 ------------------ 1 file changed, 153 deletions(-) diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py index 4e0b14d012..954e7dc3e8 100644 --- a/var/spack/repos/builtin/packages/netcdf-fortran/package.py +++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py @@ -16,156 +16,3 @@ def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install") - - - - -# netcdf-fortran configure parameters are below -# --------------------------------------------- -# -# `configure' configures netCDF-Fortran 4.4.3 to adapt to many kinds of systems. -# -# Usage: ./configure [OPTION]... [VAR=VALUE]... -# -# To assign environment variables (e.g., CC, CFLAGS...), specify them as -# VAR=VALUE. See below for descriptions of some of the useful variables. -# -# Defaults for the options are specified in brackets. -# -# Configuration: -# -h, --help display this help and exit -# --help=short display options specific to this package -# --help=recursive display the short help of all the included packages -# -V, --version display version information and exit -# -q, --quiet, --silent do not print `checking ...' messages -# --cache-file=FILE cache test results in FILE [disabled] -# -C, --config-cache alias for `--cache-file=config.cache' -# -n, --no-create do not create output files -# --srcdir=DIR find the sources in DIR [configure dir or `..'] -# -# Installation directories: -# --prefix=PREFIX install architecture-independent files in PREFIX -# [/usr/local] -# --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX -# [PREFIX] -# -# By default, `make install' will install all the files in -# `/usr/local/bin', `/usr/local/lib' etc. You can specify -# an installation prefix other than `/usr/local' using `--prefix', -# for instance `--prefix=$HOME'. -# -# For better control, use the options below. -# -# Fine tuning of the installation directories: -# --bindir=DIR user executables [EPREFIX/bin] -# --sbindir=DIR system admin executables [EPREFIX/sbin] -# --libexecdir=DIR program executables [EPREFIX/libexec] -# --sysconfdir=DIR read-only single-machine data [PREFIX/etc] -# --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] -# --localstatedir=DIR modifiable single-machine data [PREFIX/var] -# --libdir=DIR object code libraries [EPREFIX/lib] -# --includedir=DIR C header files [PREFIX/include] -# --oldincludedir=DIR C header files for non-gcc [/usr/include] -# --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] -# --datadir=DIR read-only architecture-independent data [DATAROOTDIR] -# --infodir=DIR info documentation [DATAROOTDIR/info] -# --localedir=DIR locale-dependent data [DATAROOTDIR/locale] -# --mandir=DIR man documentation [DATAROOTDIR/man] -# --docdir=DIR documentation root [DATAROOTDIR/doc/netcdf-fortran] -# --htmldir=DIR html documentation [DOCDIR] -# --dvidir=DIR dvi documentation [DOCDIR] -# --pdfdir=DIR pdf documentation [DOCDIR] -# --psdir=DIR ps documentation [DOCDIR] -# -# Program names: -# --program-prefix=PREFIX prepend PREFIX to installed program names -# --program-suffix=SUFFIX append SUFFIX to installed program names -# --program-transform-name=PROGRAM run sed PROGRAM on installed program names -# -# System types: -# --build=BUILD configure for building on BUILD [guessed] -# --host=HOST cross-compile to build programs to run on HOST [BUILD] -# --target=TARGET configure for building compilers for TARGET [HOST] -# -# Optional Features: -# --disable-option-checking ignore unrecognized --enable/--with options -# --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) -# --enable-FEATURE[=ARG] include FEATURE [ARG=yes] -# --enable-silent-rules less verbose build output (undo: "make V=1") -# --disable-silent-rules verbose build output (undo: "make V=0") -# --enable-maintainer-mode -# enable make rules and dependencies not useful (and -# sometimes confusing) to the casual installer -# --enable-valgrind-tests build with valgrind-tests (valgrind is required, -# static builds only) -# --enable-parallel-tests Run extra parallel IO tests. Ignored if netCDF-4 is -# not enabled, or built on a system without parallel -# I/O support. -# --enable-extra-tests run some extra tests that may not pass because of -# known issues -# --enable-doxygen Enable generation of documentation with doxygen. -# --enable-dot Use dot (provided by graphviz) to generate charts -# and graphs in the doxygen-based documentation. -# --enable-internal-docs Include documentation of library internals. This is -# of interest only to those developing the netCDF -# library. -# --enable-dependency-tracking -# do not reject slow dependency extractors -# --disable-dependency-tracking -# speeds up one-time build -# --disable-f03-compiler-check -# disable check of ISO_C_BINDING support in Fortran -# compiler -# --disable-f03 suppress netCDF Fortran 2003 native code -# --disable-fortran-type-check -# cause the Fortran type sizes checks to be skipped -# --enable-large-file-tests -# Run tests which create very large data files (~13 GB -# disk space required, but it will be recovered when -# tests are complete). See option --with-temp-large to -# specify temporary directory -# --enable-benchmarks Run benchmarks. This is an experimental feature. -# --enable-shared[=PKGS] build shared libraries [default=yes] -# --enable-static[=PKGS] build static libraries [default=yes] -# --enable-fast-install[=PKGS] -# optimize for fast installation [default=yes] -# --disable-libtool-lock avoid locking (might break parallel builds) -# --disable-largefile omit support for large files -# --enable-extra-example-tests -# Run extra example tests; requires GNU sed. Ignored -# if netCDF-4 is not enabled. -# --enable-dll build a win32 DLL (only works on mingw) -# -# Optional Packages: -# --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] -# --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) -# --with-temp-large= -# specify directory where large files (i.e. >2 GB) -# will be written, if large files tests are run with -# --enable-large-file-tests -# --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use -# both] -# --with-gnu-ld assume the C compiler uses GNU ld [default=no] -# --with-sysroot=DIR Search for dependent libraries within DIR -# (or the compiler's sysroot if not specified). -# -# Some influential environment variables: -# CC C compiler command -# CFLAGS C compiler flags -# LDFLAGS linker flags, e.g. -L if you have libraries in a -# nonstandard directory -# LIBS libraries to pass to the linker, e.g. -l -# CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if -# you have headers in a nonstandard directory -# FC Fortran compiler command -# FCFLAGS Fortran compiler flags -# F77 Fortran 77 compiler command -# FFLAGS Fortran 77 compiler flags -# CPP C preprocessor -# -# Use these variables to override the choices made by `configure' or to help -# it to find libraries and programs with nonstandard names/locations. -# -# Report bugs to . -# -# from spack import * From 648d08eb305e04d95ee702b819923f31fd829955 Mon Sep 17 00:00:00 2001 From: Elizabeth Fischer Date: Fri, 4 Mar 2016 12:05:43 -0500 Subject: [PATCH 104/189] Update package.py Updated homepage URL to general NetCDF homepage. --- var/spack/repos/builtin/packages/netcdf-cxx4/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py index fb4c2886cd..ab717ac6ff 100644 --- a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py +++ b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py @@ -2,7 +2,7 @@ class NetcdfCxx4(Package): """C++ interface for NetCDF4""" - homepage = "http://www.unidata.ucar.edu/downloads/netcdf/netcdf-cxx/index.jsp" + homepage = "http://www.unidata.ucar.edu/software/netcdf" url = "http://www.unidata.ucar.edu/downloads/netcdf/ftp/netcdf-cxx4-4.2.tar.gz" version('4.2', 'd019853802092cf686254aaba165fc81') From a7b918837eab02ebff5af783d06e56ec7f868d9e Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 4 Mar 2016 15:22:28 -0600 Subject: [PATCH 105/189] GNU m4 depends on libsigsegv --- .../repos/builtin/packages/libsigsegv/package.py | 15 +++++++++++++++ var/spack/repos/builtin/packages/m4/package.py | 2 ++ 2 files changed, 17 insertions(+) create mode 100644 var/spack/repos/builtin/packages/libsigsegv/package.py diff --git a/var/spack/repos/builtin/packages/libsigsegv/package.py b/var/spack/repos/builtin/packages/libsigsegv/package.py new file mode 100644 index 0000000000..4b486198ec --- /dev/null +++ b/var/spack/repos/builtin/packages/libsigsegv/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Libsigsegv(Package): + """GNU libsigsegv is a library for handling page faults in user mode.""" + homepage = "https://www.gnu.org/software/libsigsegv/" + url = "ftp://ftp.gnu.org/gnu/libsigsegv/libsigsegv-2.10.tar.gz" + + version('2.10', '7f96fb1f65b3b8cbc1582fb7be774f0f') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix, + '--enable-shared') + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/m4/package.py b/var/spack/repos/builtin/packages/m4/package.py index 5d76d8866b..3890663ad1 100644 --- a/var/spack/repos/builtin/packages/m4/package.py +++ b/var/spack/repos/builtin/packages/m4/package.py @@ -7,6 +7,8 @@ class M4(Package): version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76') + depends_on('libsigsegv') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() From f90eaa5f46bfd64b9b0adb4ab5a58d169efe3c69 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Fri, 4 Mar 2016 14:53:08 -0800 Subject: [PATCH 106/189] Fixed unmatched function signature for do_fetch in jdk package --- var/spack/repos/builtin/packages/jdk/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index f8f5fc21bd..cbcc53ac0a 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -28,7 +28,7 @@ class Jdk(Package): '-H', # specify required License Agreement cookie 'Cookie: oraclelicense=accept-securebackup-cookie'] - def do_fetch(self): + def do_fetch(self, mirror_only=False): # Add our custom curl commandline options tty.msg( "[Jdk] Adding required commandline options to curl " + @@ -39,7 +39,7 @@ def do_fetch(self): spack.curl.add_default_arg(option) # Now perform the actual fetch - super(Jdk, self).do_fetch() + super(Jdk, self).do_fetch(mirror_only) def install(self, spec, prefix): From f663d37da75e4ef0f266c63707e77d29a651e07d Mon Sep 17 00:00:00 2001 From: Mark Miller Date: Fri, 4 Mar 2016 17:08:11 -0800 Subject: [PATCH 107/189] Adding zfp package. --- .../repos/builtin/packages/zfp/package.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 var/spack/repos/builtin/packages/zfp/package.py diff --git a/var/spack/repos/builtin/packages/zfp/package.py b/var/spack/repos/builtin/packages/zfp/package.py new file mode 100644 index 0000000000..620fe9d456 --- /dev/null +++ b/var/spack/repos/builtin/packages/zfp/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Zfp(Package): + """zfp is an open source C library for compressed floating-point arrays that supports + very high throughput read and write random acces, target error bounds or bit rates. + Although bit-for-bit lossless compression is not always possible, zfp is usually + accurate to within machine epsilon in near-lossless mode, and is often orders of + magnitude more accurate than other lossy compressors. + """ + + homepage = "http://computation.llnl.gov/projects/floating-point-compression" + url = "http://computation.llnl.gov/projects/floating-point-compression/download/zfp-0.5.0.tar.gz" + + version('0.5.0', '2ab29a852e65ad85aae38925c5003654') + + def install(self, spec, prefix): + make("shared") + + # No install provided + mkdirp(prefix.lib) + mkdirp(prefix.include) + install('lib/libzfp.so', prefix.lib) + install('inc/zfp.h', prefix.include) + install('inc/types.h', prefix.include) + install('inc/bitstream.h', prefix.include) + install('inc/system.h', prefix.include) From fc6aa7374a243928af38fd1e583c6c67146951cd Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 5 Mar 2016 04:18:48 -0800 Subject: [PATCH 108/189] Fix #104, #54: issues with overlong shebang in deep directories. This does several things: - Add `sbang`: a script to run scripts with long shebang lines. - Documentation for `sbang` is in `bin/sbang`. - Add an `sbang` hook that filters the `bin` directory after install and modifies any scripts wtih shebangs that are too long to use `sbang` instead. - `sbang` is at the top level, so it should be runnable (not much we can do if spack itself is too deep for shebang) - `sbang`, when used as the interpreter, runs the *second* shebang line it finds in a script. - shoud fix issues with too long shebang paths. --- bin/sbang | 84 +++++++++++++++++++++++++++++++ lib/spack/llnl/util/filesystem.py | 11 +++- lib/spack/spack/hooks/sbang.py | 74 +++++++++++++++++++++++++++ 3 files changed, 168 insertions(+), 1 deletion(-) create mode 100755 bin/sbang create mode 100644 lib/spack/spack/hooks/sbang.py diff --git a/bin/sbang b/bin/sbang new file mode 100755 index 0000000000..ebfbe2e7a1 --- /dev/null +++ b/bin/sbang @@ -0,0 +1,84 @@ +#!/bin/bash +# +# `sbang`: Run scripts with long shebang lines. +# +# Many operating systems limit the length of shebang lines, making it +# hard to use interpreters that are deep in the directory hierarchy. +# `sbang` can run such scripts, either as a shebang interpreter, or +# directly on the command line. +# +# Usage +# ----------------------------- +# Suppose you have a script, long-shebang.sh, like this: +# +# 1 #!/very/long/path/to/some/interpreter +# 2 +# 3 echo "success!" +# +# Invoking this script will result in an error on some OS's. On +# Linux, you get this: +# +# $ ./long-shebang.sh +# -bash: ./long: /very/long/path/to/some/interp: bad interpreter: +# No such file or directory +# +# On Mac OS X, the system simply assumes the interpreter is the shell +# and tries to run with it, which is likely not what you want. +# +# +# `sbang` on the command line +# ----------------------------- +# You can use `sbang` in two ways. The first is to use it directly, +# from the command line, like this: +# +# $ sbang ./long-shebang.sh +# success! +# +# +# `sbang` as the interpreter +# ----------------------------- +# You can also use `sbang` *as* the interpreter for your script. Put +# `#!/bin/bash /path/to/sbang` on line 1, and move the original +# shebang to line 2 of the script: +# +# 1 #!/bin/bash /path/to/sbang +# 2 #!/long/path/to/real/interpreter with arguments +# 3 +# 4 echo "success!" +# +# $ ./long-shebang.sh +# success! +# +# On Linux, you could shorten line 1 to `#!/path/to/sbang`, but other +# operating systems like Mac OS X require the interpreter to be a +# binary, so it's best to use `sbang` as a `bash` argument. +# Obviously, for this to work, `sbang` needs to have a short enough +# path that *it* will run without hitting OS limits. +# +# +# How it works +# ----------------------------- +# `sbang` is a very simple bash script. It looks at the first two +# lines of a script argument and runs the last line starting with +# `#!`, with the script as an argument. It also forwards arguments. +# + +# First argument is the script we want to actually run. +script="$1" + +# Search the first two lines of script for interpreters. +lines=0 +while read line && ((lines < 2)) ; do + if [[ "$line" = '#!'* ]]; then + interpreter="${line#\#!}" + fi + lines=$((lines+1)) +done < "$script" + +# Invoke any interpreter found, or raise an error if none was found. +if [ -n "$interpreter" ]; then + exec $interpreter "$@" +else + echo "error: sbang found no interpreter in $script" + exit 1 +fi diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index da3cf96050..10d25bdce8 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -25,7 +25,8 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink'] + 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', + 'copy_mode', 'unset_executable_mode'] import os import sys @@ -158,6 +159,14 @@ def copy_mode(src, dest): os.chmod(dest, dest_mode) +def unset_executable_mode(path): + mode = os.stat(path).st_mode + mode &= ~stat.S_IXUSR + mode &= ~stat.S_IXGRP + mode &= ~stat.S_IXOTH + os.chmod(path, mode) + + def install(src, dest): """Manually install a file to a particular location.""" tty.debug("Installing %s to %s" % (src, dest)) diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py new file mode 100644 index 0000000000..6117c4809d --- /dev/null +++ b/lib/spack/spack/hooks/sbang.py @@ -0,0 +1,74 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os + +from llnl.util.filesystem import * +import llnl.util.tty as tty + +import spack +import spack.modules + +# Character limit for shebang line. Using Linux's 127 characters +# here, as it is the shortest I could find on a modern OS. +shebang_limit = 127 + +def shebang_too_long(path): + """Detects whether an file has a shebang line that is too long.""" + with open(path, 'r') as script: + bytes = script.read(2) + if bytes != '#!': + return False + + line = bytes + script.readline() + return len(line) > shebang_limit + + +def filter_shebang(path): + """Adds a second shebang line, using sbang, at the beginning of a file.""" + backup = path + ".shebang.bak" + os.rename(path, backup) + + with open(backup, 'r') as bak_file: + original = bak_file.read() + + with open(path, 'w') as new_file: + new_file.write('#!/bin/bash %s/bin/sbang\n' % spack.spack_root) + new_file.write(original) + + copy_mode(backup, path) + unset_executable_mode(backup) + + tty.warn("Patched overly long shebang in %s" % path) + + +def post_install(pkg): + """This hook edits scripts so that they call /bin/bash + $spack_prefix/bin/sbang instead of something longer than the + shebang limit.""" + for file in os.listdir(pkg.prefix.bin): + path = os.path.join(pkg.prefix.bin, file) + if shebang_too_long(path): + filter_shebang(path) + From bfce2c7508ada675ebe4398395b30ede82b67093 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 5 Mar 2016 04:42:52 -0800 Subject: [PATCH 109/189] Fix bug in hook: ensure bin directory exists before listing. --- lib/spack/spack/hooks/sbang.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py index 6117c4809d..3390ecea29 100644 --- a/lib/spack/spack/hooks/sbang.py +++ b/lib/spack/spack/hooks/sbang.py @@ -67,6 +67,9 @@ def post_install(pkg): """This hook edits scripts so that they call /bin/bash $spack_prefix/bin/sbang instead of something longer than the shebang limit.""" + if not os.path.isdir(pkg.prefix.bin): + return + for file in os.listdir(pkg.prefix.bin): path = os.path.join(pkg.prefix.bin, file) if shebang_too_long(path): From 3dd630d0a567964acd11bee23ebbc52c7d1c61c5 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 5 Mar 2016 14:33:23 -0800 Subject: [PATCH 110/189] Make openssl a variant in libevent. --- var/spack/repos/builtin/packages/libevent/package.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py index 2a44c49325..714a155dc0 100644 --- a/var/spack/repos/builtin/packages/libevent/package.py +++ b/var/spack/repos/builtin/packages/libevent/package.py @@ -22,12 +22,16 @@ class Libevent(Package): version('2.0.13', 'af786b4b3f790c9d3279792edf7867fc') version('2.0.12', '42986228baf95e325778ed328a93e070') - - depends_on('openssl') - + variant('openssl', default=True, description="Build with encryption enabled at the libevent level.") + depends_on('openssl', when='+openssl') def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + configure_args = [] + if '+openssl' in spec: + configure_args.append('--enable-openssl') + else: + configure_args.append('--enable-openssl') + configure("--prefix=%s" % prefix, *configure_args) make() make("install") From 220b6a9fee0f307d4de1e48b29093812f7dd10ec Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sat, 5 Mar 2016 16:46:32 -0600 Subject: [PATCH 111/189] Make libsigsegv an optional dependency --- var/spack/repos/builtin/packages/m4/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/m4/package.py b/var/spack/repos/builtin/packages/m4/package.py index 3890663ad1..9d522dfccf 100644 --- a/var/spack/repos/builtin/packages/m4/package.py +++ b/var/spack/repos/builtin/packages/m4/package.py @@ -7,7 +7,9 @@ class M4(Package): version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76') - depends_on('libsigsegv') + variant('sigsegv', default=True, description="Build the libsigsegv dependency") + + depends_on('libsigsegv', when='+sigsegv') def install(self, spec, prefix): configure("--prefix=%s" % prefix) From 7d847f4dc4f8895d57e492c11cfd7c3fbb794945 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 5 Mar 2016 15:14:21 -0800 Subject: [PATCH 112/189] Minor tweaks to m4 -- be sure to add sigsegv args explicitly. --- var/spack/repos/builtin/packages/m4/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/m4/package.py b/var/spack/repos/builtin/packages/m4/package.py index 9d522dfccf..d6829dbcd4 100644 --- a/var/spack/repos/builtin/packages/m4/package.py +++ b/var/spack/repos/builtin/packages/m4/package.py @@ -12,6 +12,12 @@ class M4(Package): depends_on('libsigsegv', when='+sigsegv') def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + configure_args = [] + if 'libsigsegv' in spec: + configure_args.append('--with-libsigsegv-prefix=%s' % spec['libsigsegv'].prefix) + else: + configure_args.append('--without-libsigsegv-prefix') + + configure("--prefix=%s" % prefix, *configure_args) make() make("install") From 14d48eba461596b3c5b03cfe269318946406e02c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 5 Mar 2016 18:40:28 -0800 Subject: [PATCH 113/189] Revert c5d9ee8924 for bug fixes. - This reverts commit c5d9ee89246b3d2aeddb756a04588424051d3295. - merged too soon before - reverting and fixing bugs now. Conflicts: lib/spack/spack/mirror.py lib/spack/spack/package.py --- lib/spack/llnl/util/filesystem.py | 35 +++- lib/spack/spack/cmd/clean.py | 2 +- lib/spack/spack/mirror.py | 55 +++--- lib/spack/spack/package.py | 175 +++++++++---------- lib/spack/spack/stage.py | 186 ++++++++++----------- lib/spack/spack/test/concretize.py | 2 - lib/spack/spack/test/config.py | 6 +- lib/spack/spack/test/configure_guess.py | 21 +-- lib/spack/spack/test/database.py | 11 +- lib/spack/spack/test/directory_layout.py | 13 +- lib/spack/spack/test/git_fetch.py | 46 ++--- lib/spack/spack/test/hg_fetch.py | 44 +++-- lib/spack/spack/test/install.py | 9 +- lib/spack/spack/test/link_tree.py | 7 +- lib/spack/spack/test/lock.py | 8 +- lib/spack/spack/test/make_executable.py | 6 +- lib/spack/spack/test/mirror.py | 69 ++++---- lib/spack/spack/test/mock_packages_test.py | 8 +- lib/spack/spack/test/mock_repo.py | 4 - lib/spack/spack/test/multimethod.py | 5 +- lib/spack/spack/test/namespace_trie.py | 1 + lib/spack/spack/test/optional_deps.py | 4 +- lib/spack/spack/test/packages.py | 6 +- lib/spack/spack/test/python_version.py | 3 +- lib/spack/spack/test/spec_dag.py | 2 - lib/spack/spack/test/spec_semantics.py | 1 - lib/spack/spack/test/spec_syntax.py | 3 +- lib/spack/spack/test/stage.py | 130 ++++++-------- lib/spack/spack/test/svn_fetch.py | 49 +++--- lib/spack/spack/test/tally_plugin.py | 4 +- lib/spack/spack/test/unit_install.py | 3 +- lib/spack/spack/test/url_extrapolate.py | 3 - lib/spack/spack/test/url_parse.py | 2 +- lib/spack/spack/test/url_substitution.py | 1 - lib/spack/spack/test/versions.py | 1 + lib/spack/spack/test/yaml.py | 1 + 36 files changed, 422 insertions(+), 504 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index da3cf96050..366237ef8f 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -25,7 +25,7 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink'] + 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'remove_dead_links', 'remove_linked_tree'] import os import sys @@ -240,7 +240,7 @@ def touchp(path): def force_symlink(src, dest): try: os.symlink(src, dest) - except OSError, e: + except OSError as e: os.remove(dest) os.symlink(src, dest) @@ -344,3 +344,34 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): if order == 'post': yield (source_path, dest_path) + +def remove_dead_links(root): + """ + Removes any dead link that is present in root + + Args: + root: path where to search for dead links + + """ + for file in os.listdir(root): + path = join_path(root, file) + if os.path.islink(path): + real_path = os.path.realpath(path) + if not os.path.exists(real_path): + os.unlink(path) + +def remove_linked_tree(path): + """ + Removes a directory and its contents. If the directory is a symlink, follows the link and removes the real + directory before removing the link. + + Args: + path: directory to be removed + + """ + if os.path.exists(path): + if os.path.islink(path): + shutil.rmtree(os.path.realpath(path), True) + os.unlink(path) + else: + shutil.rmtree(path, True) diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 6e7179122c..0c8bd1d528 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -43,4 +43,4 @@ def clean(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = spack.repo.get(spec) - package.do_clean() + package.stage.destroy() diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 58e31c2c7b..fdc4e7967f 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -110,7 +110,6 @@ def suggest_archive_basename(resource): return basename - def create(path, specs, **kwargs): """Create a directory to be used as a spack mirror, and fill it with package archives. @@ -158,17 +157,29 @@ def create(path, specs, **kwargs): "Cannot create directory '%s':" % mirror_root, str(e)) # Things to keep track of while parsing specs. - present = [] - mirrored = [] - error = [] + categories = { + 'present': [], + 'mirrored': [], + 'error': [] + } # Iterate through packages and download all the safe tarballs for each of them - everything_already_exists = True for spec in version_specs: - pkg = spec.package - tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) - try: - for ii, stage in enumerate(pkg.stage): + add_single_spec(spec, mirror_root, categories, **kwargs) + + return categories['present'], categories['mirrored'], categories['error'] + + +def add_single_spec(spec, mirror_root, categories, **kwargs): + tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@"))) + spec_exists_in_mirror = True + try: + with spec.package.stage: + # fetcher = stage.fetcher + # fetcher.fetch() + # ... + # fetcher.archive(archive_path) + for ii, stage in enumerate(spec.package.stage): fetcher = stage.fetcher if ii == 0: # create a subdirectory for the current package@version @@ -184,7 +195,7 @@ def create(path, specs, **kwargs): if os.path.exists(archive_path): tty.msg("{name} : already added".format(name=name)) else: - everything_already_exists = False + spec_exists_in_mirror = False fetcher.fetch() if not kwargs.get('no_checksum', False): fetcher.check() @@ -195,20 +206,16 @@ def create(path, specs, **kwargs): fetcher.archive(archive_path) tty.msg("{name} : added".format(name=name)) - if everything_already_exists: - present.append(spec) - else: - mirrored.append(spec) - except Exception, e: - if spack.debug: - sys.excepthook(*sys.exc_info()) - else: - tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message) - error.append(spec) - finally: - pkg.stage.destroy() - - return (present, mirrored, error) + if spec_exists_in_mirror: + categories['present'].append(spec) + else: + categories['mirrored'].append(spec) + except Exception as e: + if spack.debug: + sys.excepthook(*sys.exc_info()) + else: + tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message) + categories['error'].append(spec) class MirrorError(spack.error.SpackError): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index fb96f61de9..be45415b75 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -293,7 +293,6 @@ class SomePackage(Package): .. code-block:: python - p.do_clean() # removes the stage directory entirely p.do_restage() # removes the build directory and # re-expands the archive. @@ -503,7 +502,6 @@ def fetcher(self): self._fetcher = self._make_fetcher() return self._fetcher - @fetcher.setter def fetcher(self, f): self._fetcher = f @@ -735,7 +733,7 @@ def do_patch(self): # If we encounter an archive that failed to patch, restage it # so that we can apply all the patches again. if os.path.isfile(bad_file): - tty.msg("Patching failed last time. Restaging.") + tty.msg("Patching failed last time. Restaging.") self.stage.restage() self.stage.chdir_to_source() @@ -850,102 +848,102 @@ def do_install(self, make_jobs=make_jobs) start_time = time.time() - if not fake: - if not skip_patch: - self.do_patch() - else: - self.do_stage() - - # create the install directory. The install layout - # handles this in case so that it can use whatever - # package naming scheme it likes. - spack.install_layout.create_install_directory(self.spec) - - def cleanup(): - if not keep_prefix: - # If anything goes wrong, remove the install prefix - self.remove_prefix() - else: - tty.warn("Keeping install prefix in place despite error.", - "Spack will think this package is installed." + - "Manually remove this directory to fix:", - self.prefix, wrap=True) - - - def real_work(): - try: - tty.msg("Building %s" % self.name) - - # Run the pre-install hook in the child process after - # the directory is created. - spack.hooks.pre_install(self) - - # Set up process's build environment before running install. - if fake: - self.do_fake_install() + with self.stage: + if not fake: + if not skip_patch: + self.do_patch() else: - # Do the real install in the source directory. - self.stage.chdir_to_source() + self.do_stage() - # Save the build environment in a file before building. - env_path = join_path(os.getcwd(), 'spack-build.env') + # create the install directory. The install layout + # handles this in case so that it can use whatever + # package naming scheme it likes. + spack.install_layout.create_install_directory(self.spec) - # This redirects I/O to a build log (and optionally to the terminal) - log_path = join_path(os.getcwd(), 'spack-build.out') - log_file = open(log_path, 'w') - with log_output(log_file, verbose, sys.stdout.isatty(), True): - dump_environment(env_path) - self.install(self.spec, self.prefix) + def cleanup(): + if not keep_prefix: + # If anything goes wrong, remove the install prefix + self.remove_prefix() + else: + tty.warn("Keeping install prefix in place despite error.", + "Spack will think this package is installed." + + "Manually remove this directory to fix:", + self.prefix, wrap=True) - # Ensure that something was actually installed. - self._sanity_check_install() + def real_work(): + try: + tty.msg("Building %s" % self.name) - # Move build log into install directory on success - if not fake: - log_install_path = spack.install_layout.build_log_path(self.spec) - env_install_path = spack.install_layout.build_env_path(self.spec) - install(log_path, log_install_path) - install(env_path, env_install_path) + # Run the pre-install hook in the child process after + # the directory is created. + spack.hooks.pre_install(self) - packages_dir = spack.install_layout.build_packages_path(self.spec) - dump_packages(self.spec, packages_dir) + # Set up process's build environment before running install. + if fake: + self.do_fake_install() + else: + # Do the real install in the source directory. + self.stage.chdir_to_source() - # On successful install, remove the stage. - if not keep_stage: - self.stage.destroy() + # Save the build environment in a file before building. + env_path = join_path(os.getcwd(), 'spack-build.env') - # Stop timer. - self._total_time = time.time() - start_time - build_time = self._total_time - self._fetch_time + # This redirects I/O to a build log (and optionally to the terminal) + log_path = join_path(os.getcwd(), 'spack-build.out') + log_file = open(log_path, 'w') + with log_output(log_file, verbose, sys.stdout.isatty(), True): + dump_environment(env_path) + self.install(self.spec, self.prefix) - tty.msg("Successfully installed %s" % self.name, - "Fetch: %s. Build: %s. Total: %s" - % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) - print_pkg(self.prefix) + # Ensure that something was actually installed. + self._sanity_check_install() - except ProcessError, e: - # Annotate with location of build log. - e.build_log = log_path - cleanup() - raise e + # Move build log into install directory on success + if not fake: + log_install_path = spack.install_layout.build_log_path(self.spec) + env_install_path = spack.install_layout.build_env_path(self.spec) + install(log_path, log_install_path) + install(env_path, env_install_path) - except: - # other exceptions just clean up and raise. - cleanup() - raise + packages_dir = spack.install_layout.build_packages_path(self.spec) + dump_packages(self.spec, packages_dir) - # Set parallelism before starting build. - self.make_jobs = make_jobs + # On successful install, remove the stage. + if not keep_stage: + self.stage.destroy() - # Do the build. - spack.build_environment.fork(self, real_work) + # Stop timer. + self._total_time = time.time() - start_time + build_time = self._total_time - self._fetch_time - # note: PARENT of the build process adds the new package to - # the database, so that we don't need to re-read from file. - spack.installed_db.add(self.spec, self.prefix) + tty.msg("Successfully installed %s" % self.name, + "Fetch: %s. Build: %s. Total: %s." + % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) + print_pkg(self.prefix) - # Once everything else is done, run post install hooks - spack.hooks.post_install(self) + except ProcessError as e: + # Annotate with location of build log. + e.build_log = log_path + cleanup() + raise e + + except: + # other exceptions just clean up and raise. + cleanup() + raise + + # Set parallelism before starting build. + self.make_jobs = make_jobs + + # Do the build. + spack.build_environment.fork(self, real_work) + + # note: PARENT of the build process adds the new package to + # the database, so that we don't need to re-read from file. + spack.installed_db.add(self.spec, self.prefix) + + # Once everything else is done, run post install hooks + spack.hooks.post_install(self) def _sanity_check_install(self): @@ -1149,13 +1147,6 @@ def do_restage(self): """Reverts expanded/checked out source to a pristine state.""" self.stage.restage() - - def do_clean(self): - """Removes the package's build stage and source tarball.""" - if os.path.exists(self.stage.path): - self.stage.destroy() - - def format_doc(self, **kwargs): """Wrap doc string at 72 characters and format nicely""" indent = kwargs.get('indent', 0) @@ -1192,7 +1183,7 @@ def fetch_remote_versions(self): try: return spack.util.web.find_versions_of_archive( *self.all_urls, list_url=self.list_url, list_depth=self.list_depth) - except spack.error.NoNetworkConnectionError, e: + except spack.error.NoNetworkConnectionError as e: tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 5591cb9ba5..a22982a6d4 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -42,33 +42,26 @@ class Stage(object): - """A Stage object manages a directory where some source code is - downloaded and built before being installed. It handles - fetching the source code, either as an archive to be expanded - or by checking it out of a repository. A stage's lifecycle - looks like this: + """ + A Stage object is a context manager that handles a directory where some source code is downloaded and built + before being installed. It handles fetching the source code, either as an archive to be expanded or by checking + it out of a repository. A stage's lifecycle looks like this: - Stage() - Constructor creates the stage directory. - fetch() - Fetch a source archive into the stage. - expand_archive() - Expand the source archive. - - Build and install the archive. This is handled by the Package class. - destroy() - Remove the stage once the package has been installed. + ``` + with Stage() as stage: # Context manager creates and destroys the stage directory + fetch() # Fetch a source archive into the stage. + expand_archive() # Expand the source archive. + # Build and install the archive. This is handled by the Package class. + ``` - If spack.use_tmp_stage is True, spack will attempt to create stages - in a tmp directory. Otherwise, stages are created directly in - spack.stage_path. + If spack.use_tmp_stage is True, spack will attempt to create stages in a tmp directory. + Otherwise, stages are created directly in spack.stage_path. - There are two kinds of stages: named and unnamed. Named stages can - persist between runs of spack, e.g. if you fetched a tarball but - didn't finish building it, you won't have to fetch it again. + There are two kinds of stages: named and unnamed. Named stages can persist between runs of spack, e.g. if you + fetched a tarball but didn't finish building it, you won't have to fetch it again. - Unnamed stages are created using standard mkdtemp mechanisms or - similar, and are intended to persist for only one run of spack. + Unnamed stages are created using standard mkdtemp mechanisms or similar, and are intended to persist for + only one run of spack. """ def __init__(self, url_or_fetch_strategy, **kwargs): @@ -96,21 +89,46 @@ def __init__(self, url_or_fetch_strategy, **kwargs): self.default_fetcher = self.fetcher # self.fetcher can change with mirrors. self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. - self.name = kwargs.get('name') + # TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name + # TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root + self.name = kwargs.get('name') if 'name' in kwargs else STAGE_PREFIX + next(tempfile._get_candidate_names()) self.mirror_path = kwargs.get('mirror_path') self.tmp_root = find_tmp_root() - self.path = None - self._setup() + # Try to construct here a temporary name for the stage directory + # If this is a named stage, then construct a named path. + self.path = join_path(spack.stage_path, self.name) + # Flag to decide whether to delete the stage folder on exit or not + self.delete_on_exit = True - def _cleanup_dead_links(self): - """Remove any dead links in the stage directory.""" - for file in os.listdir(spack.stage_path): - path = join_path(spack.stage_path, file) - if os.path.islink(path): - real_path = os.path.realpath(path) - if not os.path.exists(path): - os.unlink(path) + def __enter__(self): + """ + Entering a stage context will create the stage directory + + Returns: + self + """ + self.create() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """ + Exiting from a stage context will delete the stage directory unless: + - it was explicitly requested not to do so + - an exception has been raised + + Args: + exc_type: exception type + exc_val: exception value + exc_tb: exception traceback + + Returns: + Boolean + """ + self.delete_on_exit = False if exc_type is not None else self.delete_on_exit + + if self.delete_on_exit: + self.destroy() def _need_to_create_path(self): """Makes sure nothing weird has happened since the last time we @@ -148,54 +166,6 @@ def _need_to_create_path(self): return False - def _setup(self): - """Creates the stage directory. - If spack.use_tmp_stage is False, the stage directory is created - directly under spack.stage_path. - - If spack.use_tmp_stage is True, this will attempt to create a - stage in a temporary directory and link it into spack.stage_path. - Spack will use the first writable location in spack.tmp_dirs to - create a stage. If there is no valid location in tmp_dirs, fall - back to making the stage inside spack.stage_path. - """ - # Create the top-level stage directory - mkdirp(spack.stage_path) - self._cleanup_dead_links() - - # If this is a named stage, then construct a named path. - if self.name is not None: - self.path = join_path(spack.stage_path, self.name) - - # If this is a temporary stage, them make the temp directory - tmp_dir = None - if self.tmp_root: - if self.name is None: - # Unnamed tmp root. Link the path in - tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) - self.name = os.path.basename(tmp_dir) - self.path = join_path(spack.stage_path, self.name) - if self._need_to_create_path(): - os.symlink(tmp_dir, self.path) - - else: - if self._need_to_create_path(): - tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) - os.symlink(tmp_dir, self.path) - - # if we're not using a tmp dir, create the stage directly in the - # stage dir, rather than linking to it. - else: - if self.name is None: - self.path = tempfile.mkdtemp('', STAGE_PREFIX, spack.stage_path) - self.name = os.path.basename(self.path) - else: - if self._need_to_create_path(): - mkdirp(self.path) - - # Make sure we can actually do something with the stage we made. - ensure_access(self.path) - @property def archive_file(self): """Path to the source archive within this stage directory.""" @@ -276,7 +246,7 @@ def fetch(self, mirror_only=False): self.fetcher = fetcher self.fetcher.fetch() break - except spack.error.SpackError, e: + except spack.error.SpackError as e: tty.msg("Fetching from %s failed." % fetcher) tty.debug(e) continue @@ -328,8 +298,34 @@ def restage(self): """ self.fetcher.reset() + def create(self): + """ + Creates the stage directory + + If self.tmp_root evaluates to False, the stage directory is created directly under spack.stage_path, otherwise + this will attempt to create a stage in a temporary directory and link it into spack.stage_path. + + Spack will use the first writable location in spack.tmp_dirs to create a stage. If there is no valid location + in tmp_dirs, fall back to making the stage inside spack.stage_path. + """ + # Create the top-level stage directory + mkdirp(spack.stage_path) + remove_dead_links(spack.stage_path) + # If a tmp_root exists then create a directory there and then link it in the stage area, + # otherwise create the stage directory in self.path + if self._need_to_create_path(): + if self.tmp_root: + tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root) + os.symlink(tmp_dir, self.path) + else: + mkdirp(self.path) + # Make sure we can actually do something with the stage we made. + ensure_access(self.path) + def destroy(self): - """Remove this stage directory.""" + """ + Removes this stage directory + """ remove_linked_tree(self.path) # Make sure we don't end up in a removed directory @@ -389,6 +385,15 @@ def source_path(self): def path(self): return self[0].path + def __enter__(self): + for item in self: + item.__enter__() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for item in reversed(self): + item.__exit__(exc_type, exc_val, exc_tb) + def chdir_to_source(self): return self[0].chdir_to_source() @@ -439,19 +444,6 @@ def ensure_access(file=spack.stage_path): tty.die("Insufficient permissions for %s" % file) -def remove_linked_tree(path): - """Removes a directory and its contents. If the directory is a symlink, - follows the link and reamoves the real directory before removing the - link. - """ - if os.path.exists(path): - if os.path.islink(path): - shutil.rmtree(os.path.realpath(path), True) - os.unlink(path) - else: - shutil.rmtree(path, True) - - def purge(): """Remove all build directories in the top-level stage path.""" if os.path.isdir(spack.stage_path): diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 7f2938aec5..794344fb6a 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -22,8 +22,6 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest - import spack from spack.spec import Spec, CompilerSpec from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index d8be5a855b..0562d2d620 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -22,13 +22,13 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest -import shutil import os +import shutil from tempfile import mkdtemp -from ordereddict_backport import OrderedDict + import spack import spack.config +from ordereddict_backport import OrderedDict from spack.test.mock_packages_test import * # Some sample compiler config data diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/configure_guess.py index a4e8565b62..2440d120e5 100644 --- a/lib/spack/spack/test/configure_guess.py +++ b/lib/spack/spack/test/configure_guess.py @@ -23,20 +23,15 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import unittest import shutil import tempfile +import unittest from llnl.util.filesystem import * - from spack.cmd.create import ConfigureGuesser from spack.stage import Stage - -from spack.fetch_strategy import URLFetchStrategy -from spack.directory_layout import YamlDirectoryLayout -from spack.util.executable import which from spack.test.mock_packages_test import * -from spack.test.mock_repo import MockArchive +from spack.util.executable import which class InstallTest(unittest.TestCase): @@ -52,8 +47,6 @@ def setUp(self): def tearDown(self): shutil.rmtree(self.tmpdir, ignore_errors=True) - if self.stage: - self.stage.destroy() os.chdir(self.orig_dir) @@ -64,12 +57,12 @@ def check_archive(self, filename, system): url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz') print url - self.stage = Stage(url) - self.stage.fetch() + with Stage(url) as stage: + stage.fetch() - guesser = ConfigureGuesser() - guesser(self.stage) - self.assertEqual(system, guesser.build_system) + guesser = ConfigureGuesser() + guesser(stage) + self.assertEqual(system, guesser.build_system) def test_python(self): diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 0205f4b8ce..9a57e1f03e 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -26,19 +26,18 @@ These tests check the database is functioning properly, both in memory and in its file """ -import tempfile -import shutil import multiprocessing - -from llnl.util.lock import * -from llnl.util.filesystem import join_path +import shutil +import tempfile import spack +from llnl.util.filesystem import join_path +from llnl.util.lock import * +from llnl.util.tty.colify import colify from spack.database import Database from spack.directory_layout import YamlDirectoryLayout from spack.test.mock_packages_test import * -from llnl.util.tty.colify import colify def _print_ref_counts(): """Print out all ref counts for the graph used here, for debugging""" diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index 925cb648ed..d814572d4a 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -25,20 +25,17 @@ """\ This test verifies that the Spack directory layout works properly. """ -import unittest -import tempfile -import shutil import os - -from llnl.util.filesystem import * +import shutil +import tempfile import spack -from spack.spec import Spec -from spack.repository import RepoPath +from llnl.util.filesystem import * from spack.directory_layout import YamlDirectoryLayout +from spack.repository import RepoPath +from spack.spec import Spec from spack.test.mock_packages_test import * - # number of packages to test (to reduce test time) max_packages = 10 diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index d84433176a..3578044116 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -23,19 +23,12 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import unittest -import shutil -import tempfile - -from llnl.util.filesystem import * import spack -from spack.version import ver -from spack.stage import Stage -from spack.util.executable import which - +from llnl.util.filesystem import * from spack.test.mock_packages_test import * from spack.test.mock_repo import MockGitRepo +from spack.version import ver class GitFetchTest(MockPackagesTest): @@ -52,19 +45,15 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) - def tearDown(self): """Destroy the stage space used by this test.""" super(GitFetchTest, self).tearDown() self.repo.destroy() - self.pkg.do_clean() - def assert_rev(self, rev): """Check that the current git revision is equal to the supplied rev.""" self.assertEqual(self.repo.rev_hash('HEAD'), self.repo.rev_hash(rev)) - def try_fetch(self, rev, test_file, args): """Tries to: 1. Fetch the repo using a fetch strategy constructed with @@ -76,26 +65,27 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('git')] = args - self.pkg.do_stage() - self.assert_rev(rev) + with self.pkg.stage: + self.pkg.do_stage() + self.assert_rev(rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked_file = 'foobarbaz' - touch(untracked_file) - self.assertTrue(os.path.isfile(untracked_file)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked_file)) + untracked_file = 'foobarbaz' + touch(untracked_file) + self.assertTrue(os.path.isfile(untracked_file)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked_file)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assert_rev(rev) + self.assert_rev(rev) def test_fetch_master(self): diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index bbcb64e4c1..b8a0c1ec46 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -23,16 +23,12 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -import unittest - -from llnl.util.filesystem import * - import spack + from spack.version import ver -from spack.stage import Stage -from spack.util.executable import which -from spack.test.mock_packages_test import * from spack.test.mock_repo import MockHgRepo +from llnl.util.filesystem import * +from spack.test.mock_packages_test import * class HgFetchTest(MockPackagesTest): @@ -49,13 +45,10 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) - def tearDown(self): """Destroy the stage space used by this test.""" super(HgFetchTest, self).tearDown() self.repo.destroy() - self.pkg.do_clean() - def try_fetch(self, rev, test_file, args): """Tries to: @@ -68,26 +61,27 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('hg')] = args - self.pkg.do_stage() - self.assertEqual(self.repo.get_rev(), rev) + with self.pkg.stage: + self.pkg.do_stage() + self.assertEqual(self.repo.get_rev(), rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked = 'foobarbaz' - touch(untracked) - self.assertTrue(os.path.isfile(untracked)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked)) + untracked = 'foobarbaz' + touch(untracked) + self.assertTrue(os.path.isfile(untracked)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assertEqual(self.repo.get_rev(), rev) + self.assertEqual(self.repo.get_rev(), rev) def test_fetch_default(self): diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 8863d13c42..8297893f01 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -22,18 +22,13 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os -import unittest import shutil import tempfile -from llnl.util.filesystem import * - import spack -from spack.stage import Stage -from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite +from llnl.util.filesystem import * from spack.directory_layout import YamlDirectoryLayout -from spack.util.executable import which +from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite from spack.test.mock_packages_test import * from spack.test.mock_repo import MockArchive diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py index 886b7ef4c5..ee37e765c7 100644 --- a/lib/spack/spack/test/link_tree.py +++ b/lib/spack/spack/test/link_tree.py @@ -24,8 +24,6 @@ ############################################################################## import os import unittest -import shutil -import tempfile from llnl.util.filesystem import * from llnl.util.link_tree import LinkTree @@ -38,6 +36,7 @@ class LinkTreeTest(unittest.TestCase): def setUp(self): self.stage = Stage('link-tree-test') + self.stage.create() with working_dir(self.stage.path): touchp('source/1') @@ -51,10 +50,8 @@ def setUp(self): source_path = os.path.join(self.stage.path, 'source') self.link_tree = LinkTree(source_path) - def tearDown(self): - if self.stage: - self.stage.destroy() + self.stage.destroy() def check_file_link(self, filename): diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index bc68df01db..3b11d18da4 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -25,15 +25,13 @@ """ These tests ensure that our lock works correctly. """ -import unittest -import os -import tempfile import shutil +import tempfile +import unittest from multiprocessing import Process -from llnl.util.lock import * from llnl.util.filesystem import join_path, touch - +from llnl.util.lock import * from spack.util.multiproc import Barrier # This is the longest a failed test will take, as the barriers will diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py index d568a28d44..a2606acf19 100644 --- a/lib/spack/spack/test/make_executable.py +++ b/lib/spack/spack/test/make_executable.py @@ -28,13 +28,13 @@ This just tests whether the right args are getting passed to make. """ import os -import unittest -import tempfile import shutil +import tempfile +import unittest from llnl.util.filesystem import * -from spack.util.environment import path_put_first from spack.build_environment import MakeExecutable +from spack.util.environment import path_put_first class MakeExecutableTest(unittest.TestCase): diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index f83cc8090c..e707adfe9d 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -23,11 +23,10 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os -from filecmp import dircmp - import spack import spack.mirror -from spack.util.compression import decompressor_for + +from filecmp import dircmp from spack.test.mock_packages_test import * from spack.test.mock_repo import * @@ -74,14 +73,14 @@ def set_up_package(self, name, MockRepoClass, url_attr): def check_mirror(self): - stage = Stage('spack-mirror-test') - mirror_root = join_path(stage.path, 'test-mirror') + with Stage('spack-mirror-test') as stage: + mirror_root = join_path(stage.path, 'test-mirror') + + # register mirror with spack config + mirrors = { 'spack-mirror-test' : 'file://' + mirror_root } + spack.config.update_config('mirrors', mirrors) - # register mirror with spack config - mirrors = { 'spack-mirror-test' : 'file://' + mirror_root } - spack.config.update_config('mirrors', mirrors) - try: os.chdir(stage.path) spack.mirror.create( mirror_root, self.repos, no_checksum=True) @@ -97,38 +96,28 @@ def check_mirror(self): files = os.listdir(subdir) self.assertEqual(len(files), 1) - # Now try to fetch each package. - for name, mock_repo in self.repos.items(): - spec = Spec(name).concretized() - pkg = spec.package + # Now try to fetch each package. + for name, mock_repo in self.repos.items(): + spec = Spec(name).concretized() + pkg = spec.package - pkg._stage = None - saved_checksum_setting = spack.do_checksum - try: - # Stage the archive from the mirror and cd to it. - spack.do_checksum = False - pkg.do_stage(mirror_only=True) - - # Compare the original repo with the expanded archive - original_path = mock_repo.path - if 'svn' in name: - # have to check out the svn repo to compare. - original_path = join_path(mock_repo.path, 'checked_out') - svn('checkout', mock_repo.url, original_path) - - dcmp = dircmp(original_path, pkg.stage.source_path) - - # make sure there are no new files in the expanded tarball - self.assertFalse(dcmp.right_only) - - # and that all original files are present. - self.assertTrue(all(l in exclude for l in dcmp.left_only)) - - finally: - spack.do_checksum = saved_checksum_setting - pkg.do_clean() - finally: - stage.destroy() + saved_checksum_setting = spack.do_checksum + with pkg.stage: + # Stage the archive from the mirror and cd to it. + spack.do_checksum = False + pkg.do_stage(mirror_only=True) + # Compare the original repo with the expanded archive + original_path = mock_repo.path + if 'svn' in name: + # have to check out the svn repo to compare. + original_path = join_path(mock_repo.path, 'checked_out') + svn('checkout', mock_repo.url, original_path) + dcmp = dircmp(original_path, pkg.stage.source_path) + # make sure there are no new files in the expanded tarball + self.assertFalse(dcmp.right_only) + # and that all original files are present. + self.assertTrue(all(l in exclude for l in dcmp.left_only)) + spack.do_checksum = saved_checksum_setting def test_git_mirror(self): diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index e9f1f95df5..0b8867b61e 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -22,17 +22,15 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys import os import shutil -import unittest import tempfile -from ordereddict_backport import OrderedDict - -from llnl.util.filesystem import mkdirp +import unittest import spack import spack.config +from llnl.util.filesystem import mkdirp +from ordereddict_backport import OrderedDict from spack.repository import RepoPath from spack.spec import Spec diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py index ed94023b0e..a8bdfb5571 100644 --- a/lib/spack/spack/test/mock_repo.py +++ b/lib/spack/spack/test/mock_repo.py @@ -26,13 +26,9 @@ import shutil from llnl.util.filesystem import * - -import spack -from spack.version import ver from spack.stage import Stage from spack.util.executable import which - # # VCS Systems used by mock repo code. # diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py index 7bf4ff0a0a..2d4b8cd584 100644 --- a/lib/spack/spack/test/multimethod.py +++ b/lib/spack/spack/test/multimethod.py @@ -25,14 +25,11 @@ """ Test for multi_method dispatch. """ -import unittest import spack from spack.multimethod import * -from spack.version import * -from spack.spec import Spec -from spack.multimethod import when from spack.test.mock_packages_test import * +from spack.version import * class MultiMethodTest(MockPackagesTest): diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py index 647976df21..2023ba6d96 100644 --- a/lib/spack/spack/test/namespace_trie.py +++ b/lib/spack/spack/test/namespace_trie.py @@ -23,6 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import unittest + from spack.util.naming import NamespaceTrie diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py index ebd7281999..55f35ea4c9 100644 --- a/lib/spack/spack/test/optional_deps.py +++ b/lib/spack/spack/test/optional_deps.py @@ -22,10 +22,8 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest -import spack -from spack.spec import Spec, CompilerSpec +from spack.spec import Spec from spack.test.mock_packages_test import * class ConcretizeTest(MockPackagesTest): diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index 83984dc5f6..f0b5e05f3b 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -22,14 +22,12 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest - -from llnl.util.filesystem import join_path import spack +from llnl.util.filesystem import join_path from spack.repository import Repo -from spack.util.naming import mod_to_class from spack.test.mock_packages_test import * +from spack.util.naming import mod_to_class class PackagesTest(MockPackagesTest): diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py index d74d3b9b7d..4294975304 100644 --- a/lib/spack/spack/test/python_version.py +++ b/lib/spack/spack/test/python_version.py @@ -28,12 +28,11 @@ Spack was originally 2.7, but enough systems in 2014 are still using 2.6 on their frontend nodes that we need 2.6 to get adopted. """ -import unittest import os import re +import unittest import llnl.util.tty as tty - import pyqver2 import spack diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 632f777cde..5e6162b6e6 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -31,8 +31,6 @@ import spack import spack.package -from llnl.util.lang import list_modules - from spack.spec import Spec from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 44a09cbd7f..8c33d1ff6e 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -22,7 +22,6 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest from spack.spec import * from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 1daaa4be8f..6e08e30e13 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -23,9 +23,10 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import unittest + import spack.spec -from spack.spec import * from spack.parse import Token +from spack.spec import * # Sample output for a complex lexing. complex_lex = [Token(ID, 'mvapich_foo'), diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index c1b2a2a573..dbcf89d864 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -25,15 +25,13 @@ """\ Test that the Stage class works correctly. """ -import unittest -import shutil import os -import getpass +import shutil +import unittest from contextlib import * -from llnl.util.filesystem import * - import spack +from llnl.util.filesystem import * from spack.stage import Stage from spack.util.executable import which @@ -192,116 +190,90 @@ def check_destroy(self, stage, stage_name): def test_setup_and_destroy_name_with_tmp(self): with use_tmp(True): - stage = Stage(archive_url, name=stage_name) - self.check_setup(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + self.check_setup(stage, stage_name) self.check_destroy(stage, stage_name) def test_setup_and_destroy_name_without_tmp(self): with use_tmp(False): - stage = Stage(archive_url, name=stage_name) - self.check_setup(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + self.check_setup(stage, stage_name) self.check_destroy(stage, stage_name) def test_setup_and_destroy_no_name_with_tmp(self): with use_tmp(True): - stage = Stage(archive_url) - self.check_setup(stage, None) - - stage.destroy() + with Stage(archive_url) as stage: + self.check_setup(stage, None) self.check_destroy(stage, None) def test_setup_and_destroy_no_name_without_tmp(self): with use_tmp(False): - stage = Stage(archive_url) - self.check_setup(stage, None) - - stage.destroy() + with Stage(archive_url) as stage: + self.check_setup(stage, None) self.check_destroy(stage, None) def test_chdir(self): - stage = Stage(archive_url, name=stage_name) - - stage.chdir() - self.check_setup(stage, stage_name) - self.check_chdir(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + stage.chdir() + self.check_setup(stage, stage_name) + self.check_chdir(stage, stage_name) self.check_destroy(stage, stage_name) def test_fetch(self): - stage = Stage(archive_url, name=stage_name) - - stage.fetch() - self.check_setup(stage, stage_name) - self.check_chdir(stage, stage_name) - self.check_fetch(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + stage.fetch() + self.check_setup(stage, stage_name) + self.check_chdir(stage, stage_name) + self.check_fetch(stage, stage_name) self.check_destroy(stage, stage_name) def test_expand_archive(self): - stage = Stage(archive_url, name=stage_name) - - stage.fetch() - self.check_setup(stage, stage_name) - self.check_fetch(stage, stage_name) - - stage.expand_archive() - self.check_expand_archive(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + stage.fetch() + self.check_setup(stage, stage_name) + self.check_fetch(stage, stage_name) + stage.expand_archive() + self.check_expand_archive(stage, stage_name) self.check_destroy(stage, stage_name) def test_expand_archive(self): - stage = Stage(archive_url, name=stage_name) - - stage.fetch() - self.check_setup(stage, stage_name) - self.check_fetch(stage, stage_name) - - stage.expand_archive() - stage.chdir_to_source() - self.check_expand_archive(stage, stage_name) - self.check_chdir_to_source(stage, stage_name) - - stage.destroy() + with Stage(archive_url, name=stage_name) as stage: + stage.fetch() + self.check_setup(stage, stage_name) + self.check_fetch(stage, stage_name) + stage.expand_archive() + stage.chdir_to_source() + self.check_expand_archive(stage, stage_name) + self.check_chdir_to_source(stage, stage_name) self.check_destroy(stage, stage_name) def test_restage(self): - stage = Stage(archive_url, name=stage_name) + with Stage(archive_url, name=stage_name) as stage: + stage.fetch() + stage.expand_archive() + stage.chdir_to_source() + self.check_expand_archive(stage, stage_name) + self.check_chdir_to_source(stage, stage_name) - stage.fetch() - stage.expand_archive() - stage.chdir_to_source() - self.check_expand_archive(stage, stage_name) - self.check_chdir_to_source(stage, stage_name) + # Try to make a file in the old archive dir + with open('foobar', 'w') as file: + file.write("this file is to be destroyed.") - # Try to make a file in the old archive dir - with open('foobar', 'w') as file: - file.write("this file is to be destroyed.") + self.assertTrue('foobar' in os.listdir(stage.source_path)) - self.assertTrue('foobar' in os.listdir(stage.source_path)) - - # Make sure the file is not there after restage. - stage.restage() - self.check_chdir(stage, stage_name) - self.check_fetch(stage, stage_name) - - stage.chdir_to_source() - self.check_chdir_to_source(stage, stage_name) - self.assertFalse('foobar' in os.listdir(stage.source_path)) - - stage.destroy() + # Make sure the file is not there after restage. + stage.restage() + self.check_chdir(stage, stage_name) + self.check_fetch(stage, stage_name) + stage.chdir_to_source() + self.check_chdir_to_source(stage, stage_name) + self.assertFalse('foobar' in os.listdir(stage.source_path)) self.check_destroy(stage, stage_name) diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 454a7f1d1f..1ee4ee700e 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -24,18 +24,12 @@ ############################################################################## import os import re -import unittest -import shutil -import tempfile - -from llnl.util.filesystem import * - import spack -from spack.version import ver -from spack.stage import Stage -from spack.util.executable import which -from spack.test.mock_packages_test import * + from spack.test.mock_repo import svn, MockSvnRepo +from spack.version import ver +from spack.test.mock_packages_test import * +from llnl.util.filesystem import * class SvnFetchTest(MockPackagesTest): @@ -51,13 +45,10 @@ def setUp(self): spec.concretize() self.pkg = spack.repo.get(spec, new=True) - def tearDown(self): """Destroy the stage space used by this test.""" super(SvnFetchTest, self).tearDown() self.repo.destroy() - self.pkg.do_clean() - def assert_rev(self, rev): """Check that the current revision is equal to the supplied rev.""" @@ -70,7 +61,6 @@ def get_rev(): return match.group(1) self.assertEqual(get_rev(), rev) - def try_fetch(self, rev, test_file, args): """Tries to: 1. Fetch the repo using a fetch strategy constructed with @@ -82,26 +72,27 @@ def try_fetch(self, rev, test_file, args): """ self.pkg.versions[ver('svn')] = args - self.pkg.do_stage() - self.assert_rev(rev) + with self.pkg.stage: + self.pkg.do_stage() + self.assert_rev(rev) - file_path = join_path(self.pkg.stage.source_path, test_file) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + file_path = join_path(self.pkg.stage.source_path, test_file) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - os.unlink(file_path) - self.assertFalse(os.path.isfile(file_path)) + os.unlink(file_path) + self.assertFalse(os.path.isfile(file_path)) - untracked = 'foobarbaz' - touch(untracked) - self.assertTrue(os.path.isfile(untracked)) - self.pkg.do_restage() - self.assertFalse(os.path.isfile(untracked)) + untracked = 'foobarbaz' + touch(untracked) + self.assertTrue(os.path.isfile(untracked)) + self.pkg.do_restage() + self.assertFalse(os.path.isfile(untracked)) - self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) - self.assertTrue(os.path.isfile(file_path)) + self.assertTrue(os.path.isdir(self.pkg.stage.source_path)) + self.assertTrue(os.path.isfile(file_path)) - self.assert_rev(rev) + self.assert_rev(rev) def test_fetch_default(self): diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/test/tally_plugin.py index e0b9618e0c..4163ab95dd 100644 --- a/lib/spack/spack/test/tally_plugin.py +++ b/lib/spack/spack/test/tally_plugin.py @@ -22,10 +22,10 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -from nose.plugins import Plugin - import os +from nose.plugins import Plugin + class Tally(Plugin): name = 'tally' diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py index ccc409dd60..18615b7efe 100644 --- a/lib/spack/spack/test/unit_install.py +++ b/lib/spack/spack/test/unit_install.py @@ -22,10 +22,11 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import unittest import itertools +import unittest import spack + test_install = __import__("spack.cmd.test-install", fromlist=["BuildId", "create_test_output", "TestResult"]) diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py index 87adf89401..068a335b49 100644 --- a/lib/spack/spack/test/url_extrapolate.py +++ b/lib/spack/spack/test/url_extrapolate.py @@ -25,10 +25,7 @@ """\ Tests ability of spack to extrapolate URL versions from existing versions. """ -import spack import spack.url as url -from spack.spec import Spec -from spack.version import ver from spack.test.mock_packages_test import * diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py index efde7c0c73..561d4658a1 100644 --- a/lib/spack/spack/test/url_parse.py +++ b/lib/spack/spack/test/url_parse.py @@ -27,8 +27,8 @@ detection in Homebrew. """ import unittest + import spack.url as url -from pprint import pprint class UrlParseTest(unittest.TestCase): diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py index aec8baf4ea..2be38af0d3 100644 --- a/lib/spack/spack/test/url_substitution.py +++ b/lib/spack/spack/test/url_substitution.py @@ -27,7 +27,6 @@ """ import unittest -import spack import spack.url as url diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py index 108450e098..2732006eb3 100644 --- a/lib/spack/spack/test/versions.py +++ b/lib/spack/spack/test/versions.py @@ -28,6 +28,7 @@ where it makes sense. """ import unittest + from spack.version import * diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/yaml.py index 5a357b8e69..b930c022f2 100644 --- a/lib/spack/spack/test/yaml.py +++ b/lib/spack/spack/test/yaml.py @@ -26,6 +26,7 @@ Test Spack's custom YAML format. """ import unittest + import spack.util.spack_yaml as syaml test_file = """\ From ad103dcafa652a839590f5fce28b2e2ce3b5a56d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 5 Mar 2016 19:55:07 -0800 Subject: [PATCH 114/189] Small refactor: add keep parameter to stage, get rid of stage.destroy call. - package.py uses context manager more effectively. - Stage.__init__ has easier to understand method signature now. - keep can be used to override the default behavior either to keep the stage ALL the time or to delete the stage ALL the time. --- lib/spack/llnl/util/filesystem.py | 5 +- lib/spack/spack/cmd/clean.py | 2 +- lib/spack/spack/package.py | 108 +++++++++++++++------------- lib/spack/spack/stage.py | 116 ++++++++++++++++++++---------- 4 files changed, 141 insertions(+), 90 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 366237ef8f..a92cb0706d 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -362,8 +362,9 @@ def remove_dead_links(root): def remove_linked_tree(path): """ - Removes a directory and its contents. If the directory is a symlink, follows the link and removes the real - directory before removing the link. + Removes a directory and its contents. If the directory is a + symlink, follows the link and removes the real directory before + removing the link. Args: path: directory to be removed diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index 0c8bd1d528..6e7179122c 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -43,4 +43,4 @@ def clean(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = spack.repo.get(spec) - package.stage.destroy() + package.do_clean() diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index be45415b75..47d259968a 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -293,6 +293,7 @@ class SomePackage(Package): .. code-block:: python + p.do_clean() # removes the stage directory entirely p.do_restage() # removes the build directory and # re-expands the archive. @@ -455,7 +456,7 @@ def _make_stage(self): # Construct a composite stage on top of the composite FetchStrategy composite_fetcher = self.fetcher composite_stage = StageComposite() - resources = self._get_resources() + resources = self._get_needed_resources() for ii, fetcher in enumerate(composite_fetcher): if ii == 0: # Construct root stage first @@ -484,12 +485,14 @@ def stage(self, stage): def _make_fetcher(self): - # Construct a composite fetcher that always contains at least one element (the root package). In case there - # are resources associated with the package, append their fetcher to the composite. + # Construct a composite fetcher that always contains at least + # one element (the root package). In case there are resources + # associated with the package, append their fetcher to the + # composite. root_fetcher = fs.for_package_version(self, self.version) fetcher = fs.FetchStrategyComposite() # Composite fetcher fetcher.append(root_fetcher) # Root fetcher is always present - resources = self._get_resources() + resources = self._get_needed_resources() for resource in resources: fetcher.append(resource.fetcher) return fetcher @@ -706,6 +709,7 @@ def do_stage(self, mirror_only=False): self.stage.expand_archive() self.stage.chdir_to_source() + def do_patch(self): """Calls do_stage(), then applied patches to the expanded tarball if they haven't been applied already.""" @@ -798,7 +802,7 @@ def do_fake_install(self): mkdirp(self.prefix.man1) - def _get_resources(self): + def _get_needed_resources(self): resources = [] # Select the resources that are needed for this build for when_spec, resource_list in self.resources.items(): @@ -816,7 +820,7 @@ def _resource_stage(self, resource): def do_install(self, - keep_prefix=False, keep_stage=False, ignore_deps=False, + keep_prefix=False, keep_stage=None, ignore_deps=False, skip_patch=False, verbose=False, make_jobs=None, fake=False): """Called by commands to install a package and its dependencies. @@ -825,7 +829,8 @@ def do_install(self, Args: keep_prefix -- Keep install prefix on failure. By default, destroys it. - keep_stage -- Keep stage on successful build. By default, destroys it. + keep_stage -- Set to True or false to always keep or always delete stage. + By default, stage is destroyed only if there are no exceptions. ignore_deps -- Do not install dependencies before installing this package. fake -- Don't really build -- install fake stub files instead. skip_patch -- Skip patch stage of build if True. @@ -848,32 +853,33 @@ def do_install(self, make_jobs=make_jobs) start_time = time.time() - with self.stage: - if not fake: - if not skip_patch: - self.do_patch() - else: - self.do_stage() + if not fake: + if not skip_patch: + self.do_patch() + else: + self.do_stage() - # create the install directory. The install layout - # handles this in case so that it can use whatever - # package naming scheme it likes. - spack.install_layout.create_install_directory(self.spec) + # create the install directory. The install layout + # handles this in case so that it can use whatever + # package naming scheme it likes. + spack.install_layout.create_install_directory(self.spec) - def cleanup(): - if not keep_prefix: - # If anything goes wrong, remove the install prefix - self.remove_prefix() - else: - tty.warn("Keeping install prefix in place despite error.", - "Spack will think this package is installed." + - "Manually remove this directory to fix:", - self.prefix, wrap=True) + def cleanup(): + if not keep_prefix: + # If anything goes wrong, remove the install prefix + self.remove_prefix() + else: + tty.warn("Keeping install prefix in place despite error.", + "Spack will think this package is installed." + + "Manually remove this directory to fix:", + self.prefix, wrap=True) - def real_work(): - try: - tty.msg("Building %s" % self.name) + def real_work(): + try: + tty.msg("Building %s" % self.name) + self.stage.keep = keep_stage + with self.stage: # Run the pre-install hook in the child process after # the directory is created. spack.hooks.pre_install(self) @@ -888,7 +894,7 @@ def real_work(): # Save the build environment in a file before building. env_path = join_path(os.getcwd(), 'spack-build.env') - # This redirects I/O to a build log (and optionally to the terminal) + # Redirect I/O to a build log (and optionally to the terminal) log_path = join_path(os.getcwd(), 'spack-build.out') log_file = open(log_path, 'w') with log_output(log_file, verbose, sys.stdout.isatty(), True): @@ -908,29 +914,25 @@ def real_work(): packages_dir = spack.install_layout.build_packages_path(self.spec) dump_packages(self.spec, packages_dir) - # On successful install, remove the stage. - if not keep_stage: - self.stage.destroy() + # Stop timer. + self._total_time = time.time() - start_time + build_time = self._total_time - self._fetch_time - # Stop timer. - self._total_time = time.time() - start_time - build_time = self._total_time - self._fetch_time + tty.msg("Successfully installed %s" % self.name, + "Fetch: %s. Build: %s. Total: %s." + % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) + print_pkg(self.prefix) - tty.msg("Successfully installed %s" % self.name, - "Fetch: %s. Build: %s. Total: %s." - % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) - print_pkg(self.prefix) + except ProcessError as e: + # Annotate with location of build log. + e.build_log = log_path + cleanup() + raise e - except ProcessError as e: - # Annotate with location of build log. - e.build_log = log_path - cleanup() - raise e - - except: - # other exceptions just clean up and raise. - cleanup() - raise + except: + # other exceptions just clean up and raise. + cleanup() + raise # Set parallelism before starting build. self.make_jobs = make_jobs @@ -1147,6 +1149,12 @@ def do_restage(self): """Reverts expanded/checked out source to a pristine state.""" self.stage.restage() + + def do_clean(self): + """Removes the package's build stage and source tarball.""" + self.stage.destroy() + + def format_doc(self, **kwargs): """Wrap doc string at 72 characters and format nicely""" indent = kwargs.get('indent', 0) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index a22982a6d4..b117c76aa1 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -42,29 +42,53 @@ class Stage(object): - """ - A Stage object is a context manager that handles a directory where some source code is downloaded and built - before being installed. It handles fetching the source code, either as an archive to be expanded or by checking - it out of a repository. A stage's lifecycle looks like this: + """Manages a temporary stage directory for building. + + A Stage object is a context manager that handles a directory where + some source code is downloaded and built before being installed. + It handles fetching the source code, either as an archive to be + expanded or by checking it out of a repository. A stage's + lifecycle looks like this: ``` - with Stage() as stage: # Context manager creates and destroys the stage directory - fetch() # Fetch a source archive into the stage. - expand_archive() # Expand the source archive. - # Build and install the archive. This is handled by the Package class. + with Stage() as stage: # Context manager creates and destroys the stage directory + stage.fetch() # Fetch a source archive into the stage. + stage.expand_archive() # Expand the source archive. + # Build and install the archive. (handled by user of Stage) ``` - If spack.use_tmp_stage is True, spack will attempt to create stages in a tmp directory. - Otherwise, stages are created directly in spack.stage_path. + When used as a context manager, the stage is automatically + destroyed if no exception is raised by the context. If an + excpetion is raised, the stage is left in the filesystem and NOT + destroyed, for potential reuse later. - There are two kinds of stages: named and unnamed. Named stages can persist between runs of spack, e.g. if you - fetched a tarball but didn't finish building it, you won't have to fetch it again. + You can also use the stage's create/destroy functions manually, + like this: - Unnamed stages are created using standard mkdtemp mechanisms or similar, and are intended to persist for - only one run of spack. + ``` + stage = Stage() + try: + stage.create() # Explicitly create the stage directory. + stage.fetch() # Fetch a source archive into the stage. + stage.expand_archive() # Expand the source archive. + # Build and install the archive. (handled by user of Stage) + finally: + stage.destroy() # Explicitly destroy the stage directory. + ``` + + If spack.use_tmp_stage is True, spack will attempt to create + stages in a tmp directory. Otherwise, stages are created directly + in spack.stage_path. + + There are two kinds of stages: named and unnamed. Named stages + can persist between runs of spack, e.g. if you fetched a tarball + but didn't finish building it, you won't have to fetch it again. + + Unnamed stages are created using standard mkdtemp mechanisms or + similar, and are intended to persist for only one run of spack. """ - def __init__(self, url_or_fetch_strategy, **kwargs): + def __init__(self, url_or_fetch_strategy, name=None, mirror_path=None, keep=None): """Create a stage object. Parameters: url_or_fetch_strategy @@ -76,6 +100,18 @@ def __init__(self, url_or_fetch_strategy, **kwargs): and will persist between runs (or if you construct another stage object later). If name is not provided, then this stage will be given a unique name automatically. + + mirror_path + If provided, Stage will search Spack's mirrors for + this archive at the mirror_path, before using the + default fetch strategy. + + keep + By default, when used as a context manager, the Stage + is cleaned up when everything goes well, and it is + kept intact when an exception is raised. You can + override this behavior by setting keep to True + (always keep) or False (always delete). """ # TODO: fetch/stage coupling needs to be reworked -- the logic # TODO: here is convoluted and not modular enough. @@ -91,15 +127,19 @@ def __init__(self, url_or_fetch_strategy, **kwargs): # TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name # TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root - self.name = kwargs.get('name') if 'name' in kwargs else STAGE_PREFIX + next(tempfile._get_candidate_names()) - self.mirror_path = kwargs.get('mirror_path') + self.name = name + if name is None: + self.name = STAGE_PREFIX + next(tempfile._get_candidate_names()) + self.mirror_path = mirror_path self.tmp_root = find_tmp_root() # Try to construct here a temporary name for the stage directory # If this is a named stage, then construct a named path. self.path = join_path(spack.stage_path, self.name) + # Flag to decide whether to delete the stage folder on exit or not - self.delete_on_exit = True + self.keep = keep + def __enter__(self): """ @@ -111,6 +151,7 @@ def __enter__(self): self.create() return self + def __exit__(self, exc_type, exc_val, exc_tb): """ Exiting from a stage context will delete the stage directory unless: @@ -125,11 +166,15 @@ def __exit__(self, exc_type, exc_val, exc_tb): Returns: Boolean """ - self.delete_on_exit = False if exc_type is not None else self.delete_on_exit + if self.keep is None: + # Default: delete when there are no exceptions. + if exc_type is None: self.destroy() - if self.delete_on_exit: + elif not self.keep: + # Overridden. Either always keep or always delete. self.destroy() + def _need_to_create_path(self): """Makes sure nothing weird has happened since the last time we looked at path. Returns True if path already exists and is ok. @@ -201,7 +246,7 @@ def chdir(self): if os.path.isdir(self.path): os.chdir(self.path) else: - tty.die("Setup failed: no such directory: " + self.path) + raise ChdirError("Setup failed: no such directory: " + self.path) def fetch(self, mirror_only=False): """Downloads an archive or checks out code from a repository.""" @@ -302,11 +347,14 @@ def create(self): """ Creates the stage directory - If self.tmp_root evaluates to False, the stage directory is created directly under spack.stage_path, otherwise - this will attempt to create a stage in a temporary directory and link it into spack.stage_path. + If self.tmp_root evaluates to False, the stage directory is + created directly under spack.stage_path, otherwise this will + attempt to create a stage in a temporary directory and link it + into spack.stage_path. - Spack will use the first writable location in spack.tmp_dirs to create a stage. If there is no valid location - in tmp_dirs, fall back to making the stage inside spack.stage_path. + Spack will use the first writable location in spack.tmp_dirs + to create a stage. If there is no valid location in tmp_dirs, + fall back to making the stage inside spack.stage_path. """ # Create the top-level stage directory mkdirp(spack.stage_path) @@ -323,9 +371,7 @@ def create(self): ensure_access(self.path) def destroy(self): - """ - Removes this stage directory - """ + """Removes this stage directory.""" remove_linked_tree(self.path) # Make sure we don't end up in a removed directory @@ -370,7 +416,7 @@ def expand_archive(self): shutil.move(source_path, destination_path) -@pattern.composite(method_list=['fetch', 'check', 'expand_archive', 'restage', 'destroy']) +@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy']) class StageComposite: """ Composite for Stage type objects. The first item in this composite is considered to be the root package, and @@ -410,7 +456,7 @@ def chdir(self): if os.path.isdir(self.path): os.chdir(self.path) else: - tty.die("Setup failed: no such directory: " + self.path) + raise ChdirError("Setup failed: no such directory: " + self.path) def chdir_to_source(self): self.chdir() @@ -472,19 +518,15 @@ def find_tmp_root(): class StageError(spack.error.SpackError): - def __init__(self, message, long_message=None): - super(self, StageError).__init__(message, long_message) + """"Superclass for all errors encountered during staging.""" class RestageError(StageError): - def __init__(self, message, long_msg=None): - super(RestageError, self).__init__(message, long_msg) + """"Error encountered during restaging.""" class ChdirError(StageError): - def __init__(self, message, long_msg=None): - super(ChdirError, self).__init__(message, long_msg) - + """Raised when Spack can't change directories.""" # Keep this in namespace for convenience FailedDownloadError = fs.FailedDownloadError From ae87948a232c17982a7987e5423ac77acdc59194 Mon Sep 17 00:00:00 2001 From: citibeth Date: Sat, 5 Mar 2016 23:05:45 -0500 Subject: [PATCH 115/189] Fixed URLs Removed vestigal mpi variant in netcdf-fortran --- var/spack/repos/builtin/packages/netcdf-cxx4/package.py | 1 - var/spack/repos/builtin/packages/netcdf-fortran/package.py | 4 +--- var/spack/repos/builtin/packages/netcdf/package.py | 2 +- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py index ab717ac6ff..b83e964b00 100644 --- a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py +++ b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py @@ -11,6 +11,5 @@ class NetcdfCxx4(Package): def install(self, spec, prefix): configure('--prefix=%s' % prefix) - make() make("install") diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py index 954e7dc3e8..e4e33445e5 100644 --- a/var/spack/repos/builtin/packages/netcdf-fortran/package.py +++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py @@ -3,13 +3,11 @@ class NetcdfFortran(Package): """Fortran interface for NetCDF4""" - homepage = "http://www.unidata.ucar.edu/downloads/netcdf/netcdf-cxx/index.jsp" + homepage = "http://www.unidata.ucar.edu/software/netcdf" url = "http://www.unidata.ucar.edu/downloads/netcdf/ftp/netcdf-fortran-4.4.3.tar.gz" version('4.4.3', 'bfd4ae23a34635b273d3eb0d91cbde9e') - variant('mpi', default=True, description='Enables MPI parallelism') - depends_on('netcdf') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py index 0b112a59ce..227362399a 100644 --- a/var/spack/repos/builtin/packages/netcdf/package.py +++ b/var/spack/repos/builtin/packages/netcdf/package.py @@ -6,7 +6,7 @@ class Netcdf(Package): data formats that support the creation, access, and sharing of array-oriented scientific data.""" - homepage = "http://www.unidata.ucar.edu/software/netcdf/" + homepage = "http://www.unidata.ucar.edu/software/netcdf" url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz" version('4.4.0', 'cffda0cbd97fdb3a06e9274f7aef438e') From e515042a36e7aa21e52943dab1e8b5594f3f0e94 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 6 Mar 2016 01:41:48 -0800 Subject: [PATCH 116/189] Fix stage creation bug, simplify do_install code. --- lib/spack/spack/package.py | 73 +++++++++++++++++++++----------------- 1 file changed, 40 insertions(+), 33 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 47d259968a..ce8cce27e2 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -467,6 +467,11 @@ def _make_stage(self): stage = self._make_resource_stage(composite_stage[0], fetcher, resource) # Append the item to the composite composite_stage.append(stage) + + # Create stage on first access. Needed because fetch, stage, + # patch, and install can be called independently of each + # other, so `with self.stage:` in do_install isn't sufficient. + composite_stage.create() return composite_stage @property @@ -846,35 +851,38 @@ def do_install(self, tty.msg("Installing %s" % self.name) + # First, install dependencies recursively. if not ignore_deps: self.do_install_dependencies( keep_prefix=keep_prefix, keep_stage=keep_stage, ignore_deps=ignore_deps, - fake=fake, skip_patch=skip_patch, verbose=verbose, - make_jobs=make_jobs) - - start_time = time.time() - if not fake: - if not skip_patch: - self.do_patch() - else: - self.do_stage() - - # create the install directory. The install layout - # handles this in case so that it can use whatever - # package naming scheme it likes. - spack.install_layout.create_install_directory(self.spec) + fake=fake, skip_patch=skip_patch, verbose=verbose, make_jobs=make_jobs) def cleanup(): + """Handles removing install prefix on error.""" if not keep_prefix: - # If anything goes wrong, remove the install prefix self.remove_prefix() else: tty.warn("Keeping install prefix in place despite error.", - "Spack will think this package is installed." + + "Spack will think this package is installed. " + "Manually remove this directory to fix:", self.prefix, wrap=True) + # Then install the package itself. def real_work(): + """Forked for each build. Has its own process and python + module space set up by build_environment.fork().""" + start_time = time.time() + if not fake: + if not skip_patch: + self.do_patch() + else: + self.do_stage() + + # create the install directory. The install layout + # handles this in case so that it can use whatever + # package naming scheme it likes. + spack.install_layout.create_install_directory(self.spec) + try: tty.msg("Building %s" % self.name) @@ -884,9 +892,9 @@ def real_work(): # the directory is created. spack.hooks.pre_install(self) - # Set up process's build environment before running install. if fake: self.do_fake_install() + else: # Do the real install in the source directory. self.stage.chdir_to_source() @@ -901,18 +909,17 @@ def real_work(): dump_environment(env_path) self.install(self.spec, self.prefix) - # Ensure that something was actually installed. - self._sanity_check_install() + # Ensure that something was actually installed. + self._sanity_check_install() - # Move build log into install directory on success - if not fake: + # Copy provenance into the install directory on success log_install_path = spack.install_layout.build_log_path(self.spec) env_install_path = spack.install_layout.build_env_path(self.spec) + packages_dir = spack.install_layout.build_packages_path(self.spec) + install(log_path, log_install_path) install(env_path, env_install_path) - - packages_dir = spack.install_layout.build_packages_path(self.spec) - dump_packages(self.spec, packages_dir) + dump_packages(self.spec, packages_dir) # Stop timer. self._total_time = time.time() - start_time @@ -934,18 +941,18 @@ def real_work(): cleanup() raise - # Set parallelism before starting build. - self.make_jobs = make_jobs + # Set parallelism before starting build. + self.make_jobs = make_jobs - # Do the build. - spack.build_environment.fork(self, real_work) + # Do the build. + spack.build_environment.fork(self, real_work) - # note: PARENT of the build process adds the new package to - # the database, so that we don't need to re-read from file. - spack.installed_db.add(self.spec, self.prefix) + # note: PARENT of the build process adds the new package to + # the database, so that we don't need to re-read from file. + spack.installed_db.add(self.spec, self.prefix) - # Once everything else is done, run post install hooks - spack.hooks.post_install(self) + # Once everything else is done, run post install hooks + spack.hooks.post_install(self) def _sanity_check_install(self): From 240ada5775c7857932279d86e4305ef001d33717 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 6 Mar 2016 16:51:09 -0800 Subject: [PATCH 117/189] Add `expand=False` option for URL downloads. - Allows skipping the expand step for downloads. - Fixed stage so that it knows expansion didn't fail when there is a no-expand URLFetchStrategy. - Updated docs to reflect new option, and provided an example. --- lib/spack/docs/packaging_guide.rst | 29 ++++++++++++++++++ lib/spack/llnl/util/filesystem.py | 9 +++++- lib/spack/spack/fetch_strategy.py | 7 ++++- lib/spack/spack/mirror.py | 13 ++++++-- lib/spack/spack/stage.py | 48 ++++++++++++++++++++---------- 5 files changed, 86 insertions(+), 20 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 59ba63fa35..bae8c34d52 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -401,6 +401,35 @@ construct the new one for ``8.2.1``. When you supply a custom URL for a version, Spack uses that URL *verbatim* and does not perform extrapolation. +Skipping the expand step +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Spack normally expands archives automatically after downloading +them. If you want to skip this step (e.g., for self-extracting +executables and other custom archive types), you can add +``expand=False`` to a ``version`` directive. + +.. code-block:: python + + version('8.2.1', '4136d7b4c04df68b686570afa26988ac', + url='http://example.com/foo-8.2.1-special-version.tar.gz', 'expand=False') + +When ``expand`` is set to ``False``, Spack sets the current working +directory to the directory containing the downloaded archive before it +calls your ``install`` method. Within ``install``, the path to the +downloaded archive is available as ``self.stage.archive_file``. + +Here is an example snippet for packages distribuetd as self-extracting +archives. The example sets permissions on the downloaded file to make +it executable, then runs it with some arguments. + +.. code-block:: python + + def install(self, spec, prefix): + set_executable(self.stage.archive_file) + installer = Executable(self.stage.archive_file) + installer('--prefix=%s' % prefix, 'arg1', 'arg2', 'etc.') + Checksums ~~~~~~~~~~~~~~~~~ diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index a92cb0706d..f218b7c424 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -25,7 +25,8 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'remove_dead_links', 'remove_linked_tree'] + 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', + 'set_executable', 'remove_dead_links', 'remove_linked_tree'] import os import sys @@ -345,6 +346,12 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): if order == 'post': yield (source_path, dest_path) + +def set_executable(path): + st = os.stat(path) + os.chmod(path, st.st_mode | stat.S_IEXEC) + + def remove_dead_links(root): """ Removes any dead link that is present in root diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index ec17cb97f1..0d0a7db8a9 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -82,7 +82,6 @@ class FetchStrategy(object): class __metaclass__(type): """This metaclass registers all fetch strategies in a list.""" - def __init__(cls, name, bases, dict): type.__init__(cls, name, bases, dict) if cls.enabled: all_strategies.append(cls) @@ -145,6 +144,8 @@ def __init__(self, url=None, digest=None, **kwargs): self.digest = kwargs.get('md5', None) if not self.digest: self.digest = digest + self.expand_archive = kwargs.get('expand', True) + if not self.url: raise ValueError("URLFetchStrategy requires a url for fetching.") @@ -218,6 +219,10 @@ def archive_file(self): @_needs_stage def expand(self): + if not self.expand_archive: + tty.msg("Skipping expand step for %s" % self.archive_file) + return + tty.msg("Staging archive: %s" % self.archive_file) self.stage.chdir() diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index fdc4e7967f..6981f69ac0 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -51,13 +51,20 @@ def mirror_archive_filename(spec, fetcher): raise ValueError("mirror.path requires spec with concrete version.") if isinstance(fetcher, fs.URLFetchStrategy): - # If we fetch this version with a URLFetchStrategy, use URL's archive type - ext = url.downloaded_file_extension(fetcher.url) + if fetcher.expand_archive: + # If we fetch this version with a URLFetchStrategy, use URL's archive type + ext = url.downloaded_file_extension(fetcher.url) + else: + # If the archive shouldn't be expanded, don't check for its extension. + ext = None else: # Otherwise we'll make a .tar.gz ourselves ext = 'tar.gz' - return "%s-%s.%s" % (spec.package.name, spec.version, ext) + filename = "%s-%s" % (spec.package.name, spec.version) + if ext: + filename += ".%s" % ext + return filename def mirror_archive_path(spec, fetcher): diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index b117c76aa1..b405915a75 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -229,13 +229,22 @@ def archive_file(self): @property def source_path(self): - """Returns the path to the expanded/checked out source code - within this fetch strategy's path. + """Returns the path to the expanded/checked out source code. - This assumes nothing else is going ot be put in the - FetchStrategy's path. It searches for the first - subdirectory of the path it can find, then returns that. + To find the source code, this method searches for the first + subdirectory of the stage that it can find, and returns it. + This assumes nothing besides the archive file will be in the + stage path, but it has the advantage that we don't need to + know the name of the archive or its contents. + + If the fetch strategy is not supposed to expand the downloaded + file, it will just return the stage path. If the archive needs + to be expanded, it will return None when no archive is found. """ + if isinstance(self.fetcher, fs.URLFetchStrategy): + if not self.fetcher.expand_archive: + return self.path + for p in [os.path.join(self.path, f) for f in os.listdir(self.path)]: if os.path.isdir(p): return p @@ -416,21 +425,15 @@ def expand_archive(self): shutil.move(source_path, destination_path) -@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy']) +@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy']) class StageComposite: """ Composite for Stage type objects. The first item in this composite is considered to be the root package, and operations that return a value are forwarded to it. """ - - @property - def source_path(self): - return self[0].source_path - - @property - def path(self): - return self[0].path - + # + # __enter__ and __exit__ delegate to all stages in the composite. + # def __enter__(self): for item in self: item.__enter__() @@ -440,9 +443,24 @@ def __exit__(self, exc_type, exc_val, exc_tb): for item in reversed(self): item.__exit__(exc_type, exc_val, exc_tb) + # + # Below functions act only on the *first* stage in the composite. + # + @property + def source_path(self): + return self[0].source_path + + @property + def path(self): + return self[0].path + def chdir_to_source(self): return self[0].chdir_to_source() + @property + def archive_file(self): + return self[0].archive_file + class DIYStage(object): """Simple class that allows any directory to be a spack stage.""" From 1ee90a6fe792bce1730bfd3fddb4fa12b4597361 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 6 Mar 2016 16:52:34 -0800 Subject: [PATCH 118/189] Make spack_cc, spack_cxx, spack_f77, spack_fc available in build env. - Add for convenience for packages that need to refer to these. - Added an example in the documentation. --- lib/spack/docs/packaging_guide.rst | 9 +++++++++ lib/spack/spack/build_environment.py | 7 +++++++ 2 files changed, 16 insertions(+) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index bae8c34d52..983adb28b0 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2137,6 +2137,15 @@ Filtering functions Examples: + #. Filtering a Makefile to force it to use Spack's compiler wrappers: + + .. code-block:: python + + filter_file(r'^CC\s*=.*', spack_cc, 'Makefile') + filter_file(r'^CXX\s*=.*', spack_cxx, 'Makefile') + filter_file(r'^F77\s*=.*', spack_f77, 'Makefile') + filter_file(r'^FC\s*=.*', spack_fc, 'Makefile') + #. Replacing ``#!/usr/bin/perl`` with ``#!/usr/bin/env perl`` in ``bib2xhtml``: .. code-block:: python diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 1b87778080..e22597a789 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -214,6 +214,13 @@ def set_module_variables_for_package(pkg, m): m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE') m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg))) + # Put spack compiler paths in module scope. + link_dir = spack.build_env_path + m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc']) + m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx']) + m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77']) + m.spack_f90 = join_path(link_dir, pkg.compiler.link_paths['fc']) + # Emulate some shell commands for convenience m.pwd = os.getcwd m.cd = os.chdir From 108277fb5dc648c0e93fbd22e6ea6613329acc9d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 6 Mar 2016 19:32:00 -0800 Subject: [PATCH 119/189] Fix docs that have gone stale since repos were added. --- lib/spack/docs/developer_guide.rst | 15 ++++++++++++++- lib/spack/docs/features.rst | 2 +- lib/spack/docs/packaging_guide.rst | 20 ++++++++++---------- 3 files changed, 25 insertions(+), 12 deletions(-) diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst index db47de80f5..0b618aa683 100644 --- a/lib/spack/docs/developer_guide.rst +++ b/lib/spack/docs/developer_guide.rst @@ -73,19 +73,32 @@ with a high level view of Spack's directory structure:: spack/ <- installation root bin/ spack <- main spack executable + + etc/ + spack/ <- Spack config files. + Can be overridden by files in ~/.spack. + var/ spack/ <- build & stage directories + repos/ <- contains package repositories + builtin/ <- pkg repository that comes with Spack + repo.yaml <- descriptor for the builtin repository + packages/ <- directories under here contain packages + opt/ spack/ <- packages are installed here + lib/ spack/ docs/ <- source for this documentation env/ <- compiler wrappers for build environment + external/ <- external libs included in Spack distro + llnl/ <- some general-use libraries + spack/ <- spack module; contains Python code cmd/ <- each file in here is a spack subcommand compilers/ <- compiler description files - packages/ <- each file in here is a spack package test/ <- unit test modules util/ <- common code diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst index fcb810086d..0998ba8da4 100644 --- a/lib/spack/docs/features.rst +++ b/lib/spack/docs/features.rst @@ -103,7 +103,7 @@ creates a simple python file: It doesn't take much python coding to get from there to a working package: -.. literalinclude:: ../../../var/spack/packages/libelf/package.py +.. literalinclude:: ../../../var/spack/repos/builtin/packages/libelf/package.py :lines: 25- Spack also provides wrapper functions around common commands like diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 59ba63fa35..a3e1f4de59 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -84,7 +84,7 @@ always choose to download just one tarball initially, and run If it fails entirely, you can get minimal boilerplate by using :ref:`spack-edit-f`, or you can manually create a directory and - ``package.py`` file for the package in ``var/spack/packages``. + ``package.py`` file for the package in ``var/spack/repos/builtin/packages``. .. note:: @@ -203,7 +203,7 @@ edit`` command: So, if you used ``spack create`` to create a package, then saved and closed the resulting file, you can get back to it with ``spack edit``. The ``cmake`` package actually lives in -``$SPACK_ROOT/var/spack/packages/cmake/package.py``, but this provides +``$SPACK_ROOT/var/spack/repos/builtin/packages/cmake/package.py``, but this provides a much simpler shortcut and saves you the trouble of typing the full path. @@ -269,18 +269,18 @@ live in Spack's directory structure. In general, `spack-create`_ and `spack-edit`_ handle creating package files for you, so you can skip most of the details here. -``var/spack/packages`` +``var/spack/repos/builtin/packages`` ~~~~~~~~~~~~~~~~~~~~~~~ A Spack installation directory is structured like a standard UNIX install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``, etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``. -Packages themselves live in ``$SPACK_ROOT/var/spack/packages``. +Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``. If you ``cd`` to that directory, you will see directories for each package: -.. command-output:: cd $SPACK_ROOT/var/spack/packages; ls -CF +.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages; ls -CF :shell: :ellipsis: 10 @@ -288,7 +288,7 @@ Each directory contains a file called ``package.py``, which is where all the python code for the package goes. For example, the ``libelf`` package lives in:: - $SPACK_ROOT/var/spack/packages/libelf/package.py + $SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py Alongside the ``package.py`` file, a package may contain extra directories or files (like patches) that it needs to build. @@ -301,7 +301,7 @@ Packages are named after the directory containing ``package.py``. So, ``libelf``'s ``package.py`` lives in a directory called ``libelf``. The ``package.py`` file defines a class called ``Libelf``, which extends Spack's ``Package`` class. for example, here is -``$SPACK_ROOT/var/spack/packages/libelf/package.py``: +``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``: .. code-block:: python :linenos: @@ -328,7 +328,7 @@ these: $ spack install libelf@0.8.13 Spack sees the package name in the spec and looks for -``libelf/package.py`` in ``var/spack/packages``. Likewise, if you say +``libelf/package.py`` in ``var/spack/repos/builtin/packages``. Likewise, if you say ``spack install py-numpy``, then Spack looks for ``py-numpy/package.py``. @@ -703,7 +703,7 @@ supply is a filename, then the patch needs to live within the spack source tree. For example, the patch above lives in a directory structure like this:: - $SPACK_ROOT/var/spack/packages/ + $SPACK_ROOT/var/spack/repos/builtin/packages/ mvapich2/ package.py ad_lustre_rwcontig_open_source.patch @@ -1533,7 +1533,7 @@ The last element of a package is its ``install()`` method. This is where the real work of installation happens, and it's the main part of the package you'll need to customize for each piece of software. -.. literalinclude:: ../../../var/spack/packages/libelf/package.py +.. literalinclude:: ../../../var/spack/repos/builtin/packages/libelf/package.py :start-after: 0.8.12 :linenos: From 265ef337a8cdb7397aa01858077787ca4c2669fb Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Thu, 3 Mar 2016 10:40:06 -0500 Subject: [PATCH 120/189] Don't quote -rpath literal --- lib/spack/env/cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index c3d1135722..fb0df79d33 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -176,17 +176,17 @@ while [ -n "$1" ]; do -Wl,*) arg="${1#-Wl,}" # TODO: Handle multiple -Wl, continuations of -Wl,-rpath - if [[ $arg == '-rpath='* ]]; then + if [[ $arg == -rpath=* ]]; then arg="${arg#-rpath=}" for rpath in ${arg//,/ }; do rpaths+=("$rpath") done - elif [[ $arg == '-rpath,'* ]]; then + elif [[ $arg == -rpath,* ]]; then arg="${arg#-rpath,}" for rpath in ${arg//,/ }; do rpaths+=("$rpath") done - elif [[ $arg == '-rpath' ]]; then + elif [[ $arg == -rpath ]]; then shift; arg="$1" if [[ $arg != '-Wl,'* ]]; then die "-Wl,-rpath was not followed by -Wl,*" From a399451e1e32c9a294f5b735237cef93ddbd4131 Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Sun, 6 Mar 2016 23:34:48 -0500 Subject: [PATCH 121/189] Update OpenSSL to 1.0.2g --- var/spack/repos/builtin/packages/openssl/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index c73102f05d..70afaf4038 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -17,6 +17,7 @@ class Openssl(Package): version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a') version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5') version('1.0.2f', 'b3bf73f507172be9292ea2a8c28b659d') + version('1.0.2g', 'f3c710c045cdee5fd114feb69feba7aa') depends_on("zlib") parallel = False From 547933e7e4997a175bf03363b5c0cf7e687f2000 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 7 Mar 2016 01:08:08 -0800 Subject: [PATCH 122/189] Fix bug in -Xlinker argument handling - also update test to match. --- lib/spack/env/cc | 15 ++++++++------- lib/spack/spack/test/cc.py | 2 +- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 5e6ed93985..a19346ce97 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -199,19 +199,20 @@ while [ -n "$1" ]; do other_args+=("-Wl,$arg") fi ;; - -Xlinker,*) - arg="${1#-Xlinker,}" - if [ -z "$arg" ]; then shift; arg="$1"; fi + -Xlinker) + shift; arg="$1"; if [[ $arg = -rpath=* ]]; then rpaths+=("${arg#-rpath=}") elif [[ $arg = -rpath ]]; then shift; arg="$1" - if [[ $arg != -Xlinker,* ]]; then - die "-Xlinker,-rpath was not followed by -Xlinker,*" + if [[ $arg != -Xlinker ]]; then + die "-Xlinker -rpath was not followed by -Xlinker " fi - rpaths+=("${arg#-Xlinker,}") + shift; arg="$1" + rpaths+=("$arg") else - other_args+=("-Xlinker,$arg") + other_args+=("-Xlinker") + other_args+=("$arg") fi ;; *) diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index 11420ec44a..f3f6d4a22e 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -43,7 +43,7 @@ '-llib1', '-llib2', 'arg4', '-Wl,--end-group', - '-Xlinker,-rpath', '-Xlinker,/third/rpath', '-Xlinker,-rpath', '-Xlinker,/fourth/rpath', + '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker', '-rpath', '-Xlinker', '/fourth/rpath', '-llib3', '-llib4', 'arg5', 'arg6'] From 12ddf241a53f85d871c981307e5f682d2e979c75 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 7 Mar 2016 02:39:14 -0800 Subject: [PATCH 123/189] Fix code block in docs. --- lib/spack/spack/multimethod.py | 36 ++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py index 51c6a8e89d..3cd17e796a 100644 --- a/lib/spack/spack/multimethod.py +++ b/lib/spack/spack/multimethod.py @@ -138,7 +138,7 @@ class when(object): methods like install() that depend on the package's spec. For example: - .. code-block:: + .. code-block:: python class SomePackage(Package): ... @@ -163,26 +163,28 @@ def install(self, prefix): if you only have part of the install that is platform specific, you could do this: - class SomePackage(Package): - ... - # virtual dependence on MPI. - # could resolve to mpich, mpich2, OpenMPI - depends_on('mpi') + .. code-block:: python - def setup(self): - # do nothing in the default case - pass + class SomePackage(Package): + ... + # virtual dependence on MPI. + # could resolve to mpich, mpich2, OpenMPI + depends_on('mpi') - @when('^openmpi') - def setup(self): - # do something special when this is built with OpenMPI for - # its MPI implementations. + def setup(self): + # do nothing in the default case + pass + + @when('^openmpi') + def setup(self): + # do something special when this is built with OpenMPI for + # its MPI implementations. - def install(self, prefix): - # Do common install stuff - self.setup() - # Do more common install stuff + def install(self, prefix): + # Do common install stuff + self.setup() + # Do more common install stuff There must be one (and only one) @when clause that matches the package's spec. If there is more than one, or if none match, From 1e7d946d9116dee4900b95def1614bd3f788f6f0 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 7 Mar 2016 12:37:51 -0600 Subject: [PATCH 124/189] Add patch to allow PGI to build M4 --- var/spack/repos/builtin/packages/m4/inline-pgi.patch | 10 ++++++++++ var/spack/repos/builtin/packages/m4/package.py | 2 ++ 2 files changed, 12 insertions(+) create mode 100644 var/spack/repos/builtin/packages/m4/inline-pgi.patch diff --git a/var/spack/repos/builtin/packages/m4/inline-pgi.patch b/var/spack/repos/builtin/packages/m4/inline-pgi.patch new file mode 100644 index 0000000000..da5eb57a93 --- /dev/null +++ b/var/spack/repos/builtin/packages/m4/inline-pgi.patch @@ -0,0 +1,10 @@ +--- a/m4/extern-inline.m4 ++++ b/m4/extern-inline.m4 +@@ -34,6 +34,7 @@ + ? defined __GNUC_STDC_INLINE__ && __GNUC_STDC_INLINE__ \ + : (199901L <= __STDC_VERSION__ \ + && !defined __HP_cc \ ++ && !defined __PGI \ + && !(defined __SUNPRO_C && __STDC__))) \ + && !defined _GL_EXTERN_INLINE_STDHEADER_BUG) + # define _GL_INLINE inline diff --git a/var/spack/repos/builtin/packages/m4/package.py b/var/spack/repos/builtin/packages/m4/package.py index d6829dbcd4..ef70add18a 100644 --- a/var/spack/repos/builtin/packages/m4/package.py +++ b/var/spack/repos/builtin/packages/m4/package.py @@ -7,6 +7,8 @@ class M4(Package): version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76') + patch('inline-pgi.patch', when='@1.4.17') + variant('sigsegv', default=True, description="Build the libsigsegv dependency") depends_on('libsigsegv', when='+sigsegv') From b7750cf61c51704d87d679efeba9ca8cb8c2b768 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 7 Mar 2016 15:52:22 -0600 Subject: [PATCH 125/189] Autoreconf is necessary after patch --- var/spack/repos/builtin/packages/m4/inline-pgi.patch | 12 ++++++++++++ var/spack/repos/builtin/packages/m4/package.py | 4 ++++ 2 files changed, 16 insertions(+) diff --git a/var/spack/repos/builtin/packages/m4/inline-pgi.patch b/var/spack/repos/builtin/packages/m4/inline-pgi.patch index da5eb57a93..bc31ad918e 100644 --- a/var/spack/repos/builtin/packages/m4/inline-pgi.patch +++ b/var/spack/repos/builtin/packages/m4/inline-pgi.patch @@ -8,3 +8,15 @@ && !(defined __SUNPRO_C && __STDC__))) \ && !defined _GL_EXTERN_INLINE_STDHEADER_BUG) # define _GL_INLINE inline +--- a/configure.ac ++++ b/configure.ac +@@ -22,7 +22,7 @@ + [bug-m4@gnu.org]) + AC_CONFIG_AUX_DIR([build-aux]) + +-AM_INIT_AUTOMAKE([1.11.6 dist-bzip2 dist-xz color-tests parallel-tests ++AM_INIT_AUTOMAKE([dist-bzip2 dist-xz color-tests parallel-tests + silent-rules subdir-objects gnu]) + + m4_pattern_forbid([^M4_[A-Z]]) + diff --git a/var/spack/repos/builtin/packages/m4/package.py b/var/spack/repos/builtin/packages/m4/package.py index ef70add18a..aa0c775f08 100644 --- a/var/spack/repos/builtin/packages/m4/package.py +++ b/var/spack/repos/builtin/packages/m4/package.py @@ -14,6 +14,10 @@ class M4(Package): depends_on('libsigsegv', when='+sigsegv') def install(self, spec, prefix): + # After patch, update generated configuration files that depend on extern-inline.m4 + autoreconf = which('autoreconf') + autoreconf() + configure_args = [] if 'libsigsegv' in spec: configure_args.append('--with-libsigsegv-prefix=%s' % spec['libsigsegv'].prefix) From 77ec27c73013eb14821a4249f6a08a8321e10eef Mon Sep 17 00:00:00 2001 From: alalazo Date: Tue, 8 Mar 2016 11:09:41 +0100 Subject: [PATCH 126/189] fixed bug : similar issues in checksum and md5 as were solved in ad103dcafa652a839590f5fce28b2e2ce3b5a56d --- lib/spack/spack/cmd/checksum.py | 39 +++++++++--------------- lib/spack/spack/cmd/md5.py | 54 ++++++++++++++++----------------- lib/spack/spack/cmd/stage.py | 4 ++- lib/spack/spack/stage.py | 1 + 4 files changed, 45 insertions(+), 53 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 966ff9a5e9..5504673c9e 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -22,23 +22,18 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os -import re import argparse import hashlib -from pprint import pprint -from subprocess import CalledProcessError import llnl.util.tty as tty -from llnl.util.tty.colify import colify - import spack import spack.cmd import spack.util.crypto from spack.stage import Stage, FailedDownloadError from spack.version import * -description ="Checksum available versions of a package." +description = "Checksum available versions of a package." + def setup_parser(subparser): subparser.add_argument( @@ -60,30 +55,24 @@ def get_checksums(versions, urls, **kwargs): hashes = [] i = 0 for url, version in zip(urls, versions): - stage = Stage(url) try: - stage.fetch() - if i == 0 and first_stage_function: - first_stage_function(stage) + with Stage(url) as stage: + stage.delete_on_exit = not keep_stage + stage.fetch() + if i == 0 and first_stage_function: + first_stage_function(stage) - hashes.append((version, - spack.util.crypto.checksum(hashlib.md5, stage.archive_file))) - except FailedDownloadError, e: + hashes.append((version, + spack.util.crypto.checksum(hashlib.md5, stage.archive_file))) + i += 1 + except FailedDownloadError as e: tty.msg("Failed to fetch %s" % url) - continue - except Exception, e: + except Exception as e: tty.msg('Something failed on %s, skipping.\n (%s)' % (url, e)) - continue - - finally: - if not keep_stage: - stage.destroy() - i += 1 return hashes - def checksum(parser, args): # get the package we're going to generate checksums for pkg = spack.repo.get(args.package) @@ -106,8 +95,8 @@ def checksum(parser, args): tty.msg("Found %s versions of %s" % (len(versions), pkg.name), *spack.cmd.elide_list( - ["%-10s%s" % (v, versions[v]) for v in sorted_versions])) - print + ["%-10s%s" % (v, versions[v]) for v in sorted_versions])) + print() archives_to_fetch = tty.get_number( "How many would you like to checksum?", default=5, abort='q') diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py index 879ef9f7b7..20508abf99 100644 --- a/lib/spack/spack/cmd/md5.py +++ b/lib/spack/spack/cmd/md5.py @@ -22,51 +22,51 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os import argparse import hashlib - -from contextlib import contextmanager +import os import llnl.util.tty as tty -from llnl.util.filesystem import * - import spack.util.crypto from spack.stage import Stage, FailedDownloadError description = "Calculate md5 checksums for files/urls." -@contextmanager -def stager(url): - _cwd = os.getcwd() - _stager = Stage(url) - try: - _stager.fetch() - yield _stager - except FailedDownloadError: - tty.msg("Failed to fetch %s" % url) - finally: - _stager.destroy() - os.chdir(_cwd) # the Stage class changes the current working dir so it has to be restored def setup_parser(subparser): setup_parser.parser = subparser subparser.add_argument('files', nargs=argparse.REMAINDER, help="Files to checksum.") + +def compute_md5_checksum(url): + if not os.path.isfile(url): + with Stage(url) as stage: + stage.fetch() + value = spack.util.crypto.checksum(hashlib.md5, stage.archive_file) + else: + value = spack.util.crypto.checksum(hashlib.md5, url) + return value + + def md5(parser, args): if not args.files: setup_parser.parser.print_help() return 1 - for f in args.files: - if not os.path.isfile(f): - with stager(f) as stage: - checksum = spack.util.crypto.checksum(hashlib.md5, stage.archive_file) - print "%s %s" % (checksum, f) - else: - if not can_access(f): - tty.die("Cannot read file: %s" % f) + results = [] + for url in args.files: + try: + checksum = compute_md5_checksum(url) + results.append((checksum, url)) + except FailedDownloadError as e: + tty.warn("Failed to fetch %s" % url) + tty.warn("%s" % e) + except IOError as e: + tty.warn("Error when reading %s" % url) + tty.warn("%s" % e) - checksum = spack.util.crypto.checksum(hashlib.md5, f) - print "%s %s" % (checksum, f) + # Dump the MD5s at last without interleaving them with downloads + tty.msg("Number of MD5 check-sums computed: %s " % len(results)) + for checksum, url in results: + tty.msg("%s %s" % (checksum, url)) diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py index 5786780efb..b575f6c456 100644 --- a/lib/spack/spack/cmd/stage.py +++ b/lib/spack/spack/cmd/stage.py @@ -50,4 +50,6 @@ def stage(parser, args): specs = spack.cmd.parse_specs(args.specs, concretize=True) for spec in specs: package = spack.repo.get(spec) - package.do_stage() + with package.stage as stage: + stage.delete_on_exit = False + package.do_stage() diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index b117c76aa1..9404e12e84 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -438,6 +438,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): for item in reversed(self): + item.delete_on_exit = getattr(self, 'delete_on_exit', True) item.__exit__(exc_type, exc_val, exc_tb) def chdir_to_source(self): From 18ce5ccf8fb31dae71848f541f3e139bd582953c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 8 Mar 2016 02:49:11 -0800 Subject: [PATCH 127/189] Avoid race in pango's `make install`, set parallel=False --- var/spack/repos/builtin/packages/pango/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py index df43625bf5..79dad3a3d2 100644 --- a/var/spack/repos/builtin/packages/pango/package.py +++ b/var/spack/repos/builtin/packages/pango/package.py @@ -16,4 +16,4 @@ class Pango(Package): def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() - make("install") + make("install", parallel=False) From 5aadb6df19ea6d7575ee5f5e5f18d92934245d98 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 8 Mar 2016 02:50:26 -0800 Subject: [PATCH 128/189] Fixes #502. Create install prefix before build, clean up do_install. - Fix bug introduced during merge of stage refactor. - install prefix was not created before build_environment.fork() - build_environment.fork() calls setup_dependent_environment - python's setup_dependent_environment can inadvertently create the install prefix before directory_layout expects it. - Clean up Package.do_install: - simplify control flow: parent process now entirely responsible for creating/destroying the install prefix. cleanup is now in one place. - Hoisting cleanup out of the child improves nesting of try/catch in `real_work`. - `real_work` renamed to `build_process` --- lib/spack/spack/directory_layout.py | 56 +++++++++----- lib/spack/spack/package.py | 115 +++++++++++++--------------- 2 files changed, 91 insertions(+), 80 deletions(-) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 08c23627f4..242eb1afa0 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -85,6 +85,16 @@ def create_install_directory(self, spec): raise NotImplementedError() + def check_installed(self, spec): + """Checks whether a spec is installed. + + Return the spec's prefix, if it is installed, None otherwise. + + Raise an exception if the install is inconsistent or corrupt. + """ + raise NotImplementedError() + + def extension_map(self, spec): """Get a dict of currently installed extension packages for a spec. @@ -246,26 +256,36 @@ def build_packages_path(self, spec): def create_install_directory(self, spec): _check_concrete(spec) + prefix = self.check_installed(spec) + if prefix: + raise InstallDirectoryAlreadyExistsError(prefix) + + mkdirp(self.metadata_path(spec)) + self.write_spec(spec, self.spec_file_path(spec)) + + + def check_installed(self, spec): + _check_concrete(spec) path = self.path_for_spec(spec) spec_file_path = self.spec_file_path(spec) - if os.path.isdir(path): - if not os.path.isfile(spec_file_path): - raise InconsistentInstallDirectoryError( - 'No spec file found at path %s' % spec_file_path) + if not os.path.isdir(path): + return None - installed_spec = self.read_spec(spec_file_path) - if installed_spec == self.spec: - raise InstallDirectoryAlreadyExistsError(path) + if not os.path.isfile(spec_file_path): + raise InconsistentInstallDirectoryError( + 'Inconsistent state: install prefix exists but contains no spec.yaml:', + " " + path) - if spec.dag_hash() == installed_spec.dag_hash(): - raise SpecHashCollisionError(installed_hash, spec_hash) - else: - raise InconsistentInstallDirectoryError( - 'Spec file in %s does not match hash!' % spec_file_path) + installed_spec = self.read_spec(spec_file_path) + if installed_spec == spec: + return path - mkdirp(self.metadata_path(spec)) - self.write_spec(spec, spec_file_path) + if spec.dag_hash() == installed_spec.dag_hash(): + raise SpecHashCollisionError(installed_hash, spec_hash) + else: + raise InconsistentInstallDirectoryError( + 'Spec file in %s does not match hash!' % spec_file_path) def all_specs(self): @@ -399,8 +419,8 @@ def remove_extension(self, spec, ext_spec): class DirectoryLayoutError(SpackError): """Superclass for directory layout errors.""" - def __init__(self, message): - super(DirectoryLayoutError, self).__init__(message) + def __init__(self, message, long_msg=None): + super(DirectoryLayoutError, self).__init__(message, long_msg) class SpecHashCollisionError(DirectoryLayoutError): @@ -422,8 +442,8 @@ def __init__(self, installed_spec, prefix, error): class InconsistentInstallDirectoryError(DirectoryLayoutError): """Raised when a package seems to be installed to the wrong place.""" - def __init__(self, message): - super(InconsistentInstallDirectoryError, self).__init__(message) + def __init__(self, message, long_msg=None): + super(InconsistentInstallDirectoryError, self).__init__(message, long_msg) class InstallDirectoryAlreadyExistsError(DirectoryLayoutError): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index d4acbf5024..972a0410b9 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -845,7 +845,8 @@ def do_install(self, if not self.spec.concrete: raise ValueError("Can only install concrete packages.") - if os.path.exists(self.prefix): + # Ensure package is not already installed + if spack.install_layout.check_installed(self.spec): tty.msg("%s is already installed in %s" % (self.name, self.prefix)) return @@ -857,18 +858,11 @@ def do_install(self, keep_prefix=keep_prefix, keep_stage=keep_stage, ignore_deps=ignore_deps, fake=fake, skip_patch=skip_patch, verbose=verbose, make_jobs=make_jobs) - def cleanup(): - """Handles removing install prefix on error.""" - if not keep_prefix: - self.remove_prefix() - else: - tty.warn("Keeping install prefix in place despite error.", - "Spack will think this package is installed. " + - "Manually remove this directory to fix:", - self.prefix, wrap=True) + # Set parallelism before starting build. + self.make_jobs = make_jobs # Then install the package itself. - def real_work(): + def build_process(): """Forked for each build. Has its own process and python module space set up by build_environment.fork().""" start_time = time.time() @@ -878,30 +872,24 @@ def real_work(): else: self.do_stage() - # create the install directory. The install layout - # handles this in case so that it can use whatever - # package naming scheme it likes. - spack.install_layout.create_install_directory(self.spec) + tty.msg("Building %s" % self.name) - try: - tty.msg("Building %s" % self.name) + self.stage.keep = keep_stage + with self.stage: + # Run the pre-install hook in the child process after + # the directory is created. + spack.hooks.pre_install(self) - self.stage.keep = keep_stage - with self.stage: - # Run the pre-install hook in the child process after - # the directory is created. - spack.hooks.pre_install(self) + if fake: + self.do_fake_install() + else: + # Do the real install in the source directory. + self.stage.chdir_to_source() - if fake: - self.do_fake_install() - - else: - # Do the real install in the source directory. - self.stage.chdir_to_source() - - # Save the build environment in a file before building. - env_path = join_path(os.getcwd(), 'spack-build.env') + # Save the build environment in a file before building. + env_path = join_path(os.getcwd(), 'spack-build.env') + try: # Redirect I/O to a build log (and optionally to the terminal) log_path = join_path(os.getcwd(), 'spack-build.out') log_file = open(log_path, 'w') @@ -909,43 +897,46 @@ def real_work(): dump_environment(env_path) self.install(self.spec, self.prefix) - # Ensure that something was actually installed. - self._sanity_check_install() + except ProcessError as e: + # Annotate ProcessErrors with the location of the build log. + e.build_log = log_path + raise e - # Copy provenance into the install directory on success - log_install_path = spack.install_layout.build_log_path(self.spec) - env_install_path = spack.install_layout.build_env_path(self.spec) - packages_dir = spack.install_layout.build_packages_path(self.spec) + # Ensure that something was actually installed. + self._sanity_check_install() - install(log_path, log_install_path) - install(env_path, env_install_path) - dump_packages(self.spec, packages_dir) + # Copy provenance into the install directory on success + log_install_path = spack.install_layout.build_log_path(self.spec) + env_install_path = spack.install_layout.build_env_path(self.spec) + packages_dir = spack.install_layout.build_packages_path(self.spec) - # Stop timer. - self._total_time = time.time() - start_time - build_time = self._total_time - self._fetch_time + install(log_path, log_install_path) + install(env_path, env_install_path) + dump_packages(self.spec, packages_dir) - tty.msg("Successfully installed %s" % self.name, - "Fetch: %s. Build: %s. Total: %s." - % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) - print_pkg(self.prefix) + # Stop timer. + self._total_time = time.time() - start_time + build_time = self._total_time - self._fetch_time - except ProcessError as e: - # Annotate with location of build log. - e.build_log = log_path - cleanup() - raise e + tty.msg("Successfully installed %s" % self.name, + "Fetch: %s. Build: %s. Total: %s." + % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) + print_pkg(self.prefix) - except: - # other exceptions just clean up and raise. - cleanup() - raise - - # Set parallelism before starting build. - self.make_jobs = make_jobs - - # Do the build. - spack.build_environment.fork(self, real_work) + try: + # Create the install prefix and fork the build process. + spack.install_layout.create_install_directory(self.spec) + spack.build_environment.fork(self, build_process) + except: + # remove the install prefix if anything went wrong during install. + if not keep_prefix: + self.remove_prefix() + else: + tty.warn("Keeping install prefix in place despite error.", + "Spack will think this package is installed. " + + "Manually remove this directory to fix:", + self.prefix, wrap=True) + raise # note: PARENT of the build process adds the new package to # the database, so that we don't need to re-read from file. From e0e545774aaddb5386367bd900a31b85692abc51 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 8 Mar 2016 12:15:40 -0600 Subject: [PATCH 129/189] Add more versions to Autotools --- var/spack/repos/builtin/packages/autoconf/package.py | 1 + var/spack/repos/builtin/packages/automake/package.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/autoconf/package.py b/var/spack/repos/builtin/packages/autoconf/package.py index 5189faf054..6412e810a6 100644 --- a/var/spack/repos/builtin/packages/autoconf/package.py +++ b/var/spack/repos/builtin/packages/autoconf/package.py @@ -6,6 +6,7 @@ class Autoconf(Package): url = "http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz" version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b') + version('2.62', '6c1f3b3734999035d77da5024aab4fbd') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/automake/package.py b/var/spack/repos/builtin/packages/automake/package.py index 9115822730..2172a42030 100644 --- a/var/spack/repos/builtin/packages/automake/package.py +++ b/var/spack/repos/builtin/packages/automake/package.py @@ -5,7 +5,9 @@ class Automake(Package): homepage = "http://www.gnu.org/software/automake/" url = "http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz" + version('1.15', '716946a105ca228ab545fc37a70df3a3') version('1.14.1', 'd052a3e884631b9c7892f2efce542d75') + version('1.11.6', '0286dc30295b62985ca51919202ecfcc') depends_on('autoconf') From a4861a30729e32aef493b3418d03874837eecc08 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 8 Mar 2016 10:39:46 -0800 Subject: [PATCH 130/189] Minor changes/bugfixes on md5/checksum PR --- lib/spack/spack/cmd/checksum.py | 5 ++--- lib/spack/spack/cmd/md5.py | 4 ++-- lib/spack/spack/cmd/stage.py | 4 +--- lib/spack/spack/stage.py | 2 +- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 5504673c9e..518d2703dc 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -56,8 +56,7 @@ def get_checksums(versions, urls, **kwargs): i = 0 for url, version in zip(urls, versions): try: - with Stage(url) as stage: - stage.delete_on_exit = not keep_stage + with Stage(url, keep=keep_stage) as stage: stage.fetch() if i == 0 and first_stage_function: first_stage_function(stage) @@ -96,7 +95,7 @@ def checksum(parser, args): tty.msg("Found %s versions of %s" % (len(versions), pkg.name), *spack.cmd.elide_list( ["%-10s%s" % (v, versions[v]) for v in sorted_versions])) - print() + print archives_to_fetch = tty.get_number( "How many would you like to checksum?", default=5, abort='q') diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py index 20508abf99..f99fc0f8c2 100644 --- a/lib/spack/spack/cmd/md5.py +++ b/lib/spack/spack/cmd/md5.py @@ -67,6 +67,6 @@ def md5(parser, args): tty.warn("%s" % e) # Dump the MD5s at last without interleaving them with downloads - tty.msg("Number of MD5 check-sums computed: %s " % len(results)) + tty.msg("%d MD5 checksums:" % len(results)) for checksum, url in results: - tty.msg("%s %s" % (checksum, url)) + print "%s %s" % (checksum, url) diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py index b575f6c456..5786780efb 100644 --- a/lib/spack/spack/cmd/stage.py +++ b/lib/spack/spack/cmd/stage.py @@ -50,6 +50,4 @@ def stage(parser, args): specs = spack.cmd.parse_specs(args.specs, concretize=True) for spec in specs: package = spack.repo.get(spec) - with package.stage as stage: - stage.delete_on_exit = False - package.do_stage() + package.do_stage() diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 0d35511c34..d2ed03c271 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -441,7 +441,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): for item in reversed(self): - item.delete_on_exit = getattr(self, 'delete_on_exit', True) + item.keep = getattr(self, 'keep', None) item.__exit__(exc_type, exc_val, exc_tb) # From 0c7d0c0b6c650cd0fd6f4814a255613e3efa1814 Mon Sep 17 00:00:00 2001 From: Alfredo Gimenez Date: Tue, 8 Mar 2016 12:26:40 -0800 Subject: [PATCH 131/189] Variants and fixes to thrift package --- .../repos/builtin/packages/thrift/package.py | 58 +++++++++++-------- 1 file changed, 35 insertions(+), 23 deletions(-) diff --git a/var/spack/repos/builtin/packages/thrift/package.py b/var/spack/repos/builtin/packages/thrift/package.py index 0e15052f64..ec3b13e563 100644 --- a/var/spack/repos/builtin/packages/thrift/package.py +++ b/var/spack/repos/builtin/packages/thrift/package.py @@ -12,33 +12,45 @@ class Thrift(Package): version('0.9.2', '89f63cc4d0100912f4a1f8a9dee63678') - extends("python") + # Currently only support for c-family and python + variant('c', default=True, description="Build support for C-family languages") + variant('python', default=True, description="Build support for python") - depends_on("autoconf") - depends_on("automake") - depends_on("bison") - depends_on("boost") - depends_on("flex") - depends_on("jdk") - depends_on("libtool") - depends_on("openssl") - depends_on("python") + depends_on('jdk') + depends_on('autoconf') + depends_on('automake') + depends_on('libtool') + depends_on('boost@1.53:') + depends_on('bison') + depends_on('flex') + depends_on('openssl') + + # Variant dependencies + extends('python', when='+python') + depends_on('python', when='+python') + + depends_on('zlib', when='+c') + depends_on('libevent', when='+c') - # Compilation fails for most languages, fortunately cpp installs fine - # All other languages (yes, including C) are omitted until someone needs them def install(self, spec, prefix): - env["PY_PREFIX"] = prefix - env["JAVA_PREFIX"] = prefix + env['PY_PREFIX'] = prefix + env['JAVA_HOME'] = spec['jdk'].prefix - configure("--prefix=%s" % prefix, - "--with-boost=%s" % spec['boost'].prefix, - "--with-c=no", - "--with-go=no", - "--with-python=yes", - "--with-lua=no", - "--with-php=no", - "--with-qt4=no", - "--enable-tests=no") + # configure options + options = ['--prefix=%s' % prefix] + + options.append('--with-boost=%s' % spec['boost'].prefix) + options.append('--enable-tests=no') + + options.append('--with-c=%s' % ('yes' if '+c' in spec else 'no')) + options.append('--with-python=%s' % ('yes' if '+python' in spec else 'no')) + options.append('--with-java=%s' % ('yes' if '+java' in spec else 'no')) + options.append('--with-go=%s' % ('yes' if '+go' in spec else 'no')) + options.append('--with-lua=%s' % ('yes' if '+lua' in spec else 'no')) + options.append('--with-php=%s' % ('yes' if '+php' in spec else 'no')) + options.append('--with-qt4=%s' % ('yes' if '+qt4' in spec else 'no')) + + configure(*options) make() make("install") From a1be45d0e7f1921176b40b2aa497309029f1f7ad Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 8 Mar 2016 13:56:44 -0800 Subject: [PATCH 132/189] Fix bug with setting module-scope vars in derived package classes. --- lib/spack/spack/build_environment.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index e22597a789..87fc310b5a 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -177,8 +177,6 @@ def set_module_variables_for_package(pkg, m): """Populate the module scope of install() with some useful functions. This makes things easier for package writers. """ - m = pkg.module - # number of jobs spack will to build with. jobs = multiprocessing.cpu_count() if not pkg.parallel: From df84677d1640139673c26a4c407698486905fbb3 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 8 Mar 2016 13:57:13 -0800 Subject: [PATCH 133/189] Make diy generate verbose build output by default. - added -q option to shut it up --- lib/spack/spack/cmd/diy.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 9df53312f8..2c3a8761ab 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -45,6 +45,9 @@ def setup_parser(subparser): subparser.add_argument( '--skip-patch', action='store_true', help="Skip patching for the DIY build.") + subparser.add_argument( + '-q', '--quiet', action='store_true', dest='quiet', + help="Do not display verbose build output while installing.") subparser.add_argument( 'spec', nargs=argparse.REMAINDER, help="specs to use for install. Must contain package AND verison.") @@ -92,4 +95,5 @@ def diy(self, args): package.do_install( keep_prefix=args.keep_prefix, ignore_deps=args.ignore_deps, + verbose=not args.quiet, keep_stage=True) # don't remove source dir for DIY. From 9c6184373752746e1219263c1afd9d1955911891 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 8 Mar 2016 13:58:41 -0800 Subject: [PATCH 134/189] Fix bugs in DIYStage: fetch & context handling. - DIYStage needs to be a context handler - DIYStage.fetch needs to take 2 args. --- lib/spack/spack/stage.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index d2ed03c271..5354135e6a 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -477,10 +477,14 @@ def chdir(self): else: raise ChdirError("Setup failed: no such directory: " + self.path) + # DIY stages do nothing as context managers. + def __enter__(self): pass + def __exit__(self, exc_type, exc_val, exc_tb): pass + def chdir_to_source(self): self.chdir() - def fetch(self): + def fetch(self, mirror_only): tty.msg("No need to fetch for DIY.") def check(self): From 5b22873b3dce37c0d7bad418bf6e0b73d381d19e Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Wed, 9 Mar 2016 00:18:20 -0500 Subject: [PATCH 135/189] Fixed issues with hypre: 1. --with-lapack-lib was wrong. 2. --with-MPI was wrong; set env vars for MPI wrappers instead. 3. Added version 2.10.1 4. Added shared library variant (True by default). Hypre can build shared or static libraries, but not both in the same build. --- .../repos/builtin/packages/hypre/package.py | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index 0f7f14dd89..242ee100d7 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -1,4 +1,5 @@ from spack import * +import os class Hypre(Package): """Hypre is a library of high performance preconditioners that @@ -8,8 +9,11 @@ class Hypre(Package): homepage = "http://computation.llnl.gov/project/linear_solvers/software.php" url = "http://computation.llnl.gov/project/linear_solvers/download/hypre-2.10.0b.tar.gz" + version('2.10.1', 'dc048c4cabb3cd549af72591474ad674') version('2.10.0b', '768be38793a35bb5d055905b271f5b8e') + variant('shared', default=True, description="Build shared library version (disables static library)") + depends_on("mpi") depends_on("blas") depends_on("lapack") @@ -17,16 +21,26 @@ class Hypre(Package): def install(self, spec, prefix): blas_dir = spec['blas'].prefix lapack_dir = spec['lapack'].prefix + mpi_dir = spec['mpi'].prefix + + os.environ['CC'] = os.path.join(mpi_dir, 'bin', 'mpicc') + os.environ['CXX'] = os.path.join(mpi_dir, 'bin', 'mpicxx') + os.environ['F77'] = os.path.join(mpi_dir, 'bin', 'mpif77') + + + configure_args = [ + "--prefix=%s" % prefix, + "--with-lapack-libs=lapack", + "--with-lapack-lib-dirs=%s/lib" % lapack_dir, + "--with-blas-libs=blas", + "--with-blas-lib-dirs=%s/lib" % blas_dir] + if '+shared' in self.spec: + configure_args.append("--enable-shared") # Hypre's source is staged under ./src so we'll have to manually # cd into it. with working_dir("src"): - configure( - "--prefix=%s" % prefix, - "--with-blas-libs=blas", - "--with-blas-lib-dirs=%s/lib" % blas_dir, - "--with-lapack-libs=\"lapack blas\"", - "--with-lapack-lib-dirs=%s/lib" % lapack_dir, - "--with-MPI") + configure(*configure_args) + make() make("install") From c67b922185ebf5b7686d5b4958e2b31b8f0f1469 Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Wed, 9 Mar 2016 00:34:08 -0500 Subject: [PATCH 136/189] Added shared library capability. --- var/spack/repos/builtin/packages/petsc/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 87f700629d..e42425bcef 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -11,8 +11,11 @@ class Petsc(Package): version('3.5.3', 'd4fd2734661e89f18ac6014b5dd1ef2f') version('3.5.2', 'ad170802b3b058b5deb9cd1f968e7e13') version('3.5.1', 'a557e029711ebf425544e117ffa44d8f') + version('3.4.4', '7edbc68aa6d8d6a3295dd5f6c2f6979d') - depends_on("python @2.6:2.9") # requires Python for building + variant('shared', default=True, description="Build shared library version") + + depends_on("python @2.6:2.7") # requires Python for building depends_on("boost") depends_on("blas") @@ -33,7 +36,7 @@ def install(self, spec, prefix): "--with-metis-dir=%s" % spec['metis'].prefix, "--with-hdf5-dir=%s" % spec['hdf5'].prefix, "--with-mpi-dir=%s" % spec['mpi'].prefix, - "--with-shared-libraries=0") + "--with-shared-libraries=%d" % (1 if '+shared' in self.spec else 0)) # PETSc has its own way of doing parallel make. make('MAKE_NP=%s' % make_jobs, parallel=False) From f01d1c4385f05dd7bda7efb3a9e7f2c8008e177a Mon Sep 17 00:00:00 2001 From: alalazo Date: Wed, 9 Mar 2016 14:36:37 +0100 Subject: [PATCH 137/189] petsc : added variants and logic to build various flavors --- .../repos/builtin/packages/petsc/package.py | 92 ++++++++++++++----- 1 file changed, 69 insertions(+), 23 deletions(-) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 87f700629d..5be187f348 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -1,40 +1,86 @@ +import os from spack import * + class Petsc(Package): - """PETSc is a suite of data structures and routines for the - scalable (parallel) solution of scientific applications modeled by - partial differential equations.""" + """ + PETSc is a suite of data structures and routines for the scalable (parallel) solution of scientific applications + modeled by partial differential equations. + """ homepage = "http://www.mcs.anl.gov/petsc/index.html" - url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-3.5.3.tar.gz" + url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-3.5.3.tar.gz" + version('3.6.3', '91dd3522de5a5ef039ff8f50800db606') version('3.5.3', 'd4fd2734661e89f18ac6014b5dd1ef2f') version('3.5.2', 'ad170802b3b058b5deb9cd1f968e7e13') version('3.5.1', 'a557e029711ebf425544e117ffa44d8f') - depends_on("python @2.6:2.9") # requires Python for building + variant('shared', default=True, description='Enables the build of shared libraries') + variant('mpi', default=True, description='Activates MPI support') + variant('double', default=True, description='Switches between single and double precision') - depends_on("boost") - depends_on("blas") - depends_on("lapack") - depends_on("hypre") - depends_on("parmetis") - depends_on("metis") - depends_on("hdf5+mpi") - depends_on("mpi") + variant('metis', default=True, description='Activates support for metis and parmetis') + variant('hdf5', default=True, description='Activates support for HDF5 (only parallel)') + variant('boost', default=True, description='Activates support for Boost') + variant('hypre', default=True, description='Activates support for Hypre') + + # Build dependencies + depends_on('python @2.6:2.9') # requires Python for building + + # Virtual dependencies + depends_on('blas') + depends_on('lapack') + depends_on('mpi', when='+mpi') + + # Other dependencies + depends_on('boost', when='+boost') + depends_on('metis', when='+metis') + + depends_on('hdf5~cxx~unsupported+mpi', when='+hdf5+mpi') + depends_on('parmetis', when='+metis+mpi') + depends_on('hypre', when='+hypre+mpi') + + def mpi_dependent_options(self): + if '~mpi' in self.spec: + compiler_opts = [ + '--with-cc=%s' % os.environ['CC'], + '--with-cxx=%s' % (os.environ['CXX'] if self.compiler.cxx is not None else '0'), + '--with-fc=%s' % (os.environ['FC'] if self.compiler.fc is not None else '0'), + '--with-mpi=0' + ] + error_message_fmt = '\t{library} support requires "+mpi" to be activated' + errors = [error_message_fmt.format(library=x) for x in ('hdf5', 'hypre') if ('+'+x) in self.spec] + if errors: + errors = ['incompatible variants given'] + errors + raise RuntimeError('\n'.join(errors)) + else: + compiler_opts = [ + '--with-mpi=1', + '--with-mpi-dir=%s' % self.spec['mpi'].prefix, + ] + return compiler_opts def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-blas-lib=%s/libblas.a" % spec['blas'].prefix.lib, - "--with-lapack-lib=%s/liblapack.a" % spec['lapack'].prefix.lib, - "--with-boost-dir=%s" % spec['boost'].prefix, - "--with-hypre-dir=%s" % spec['hypre'].prefix, - "--with-parmetis-dir=%s" % spec['parmetis'].prefix, - "--with-metis-dir=%s" % spec['metis'].prefix, - "--with-hdf5-dir=%s" % spec['hdf5'].prefix, - "--with-mpi-dir=%s" % spec['mpi'].prefix, - "--with-shared-libraries=0") + options = [] + options.extend(self.mpi_dependent_options()) + options.extend([ + '--with-precision=%s' % ('double' if '+double' in spec else 'single'), + '--with-shared-libraries=%s' % ('1' if '+shared' in spec else '0'), + '--with-blas-lapack-dir=%s' % spec['lapack'].prefix + ]) + # Activates library support if needed + for library in ('metis', 'boost', 'hfd5', 'hypre', 'parmetis'): + options.append( + '--with-{library}={value}'.format(library=library, value=('1' if library in spec else '0')) + ) + if library in spec: + options.append( + '--with-{library}-dir={path}'.format(library=library, path=spec[library].prefix) + ) + + configure('--prefix=%s' % prefix, *options) # PETSc has its own way of doing parallel make. make('MAKE_NP=%s' % make_jobs, parallel=False) make("install") From 8e76cda200bb3da159cdc27726c7812f66c5a5ed Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 9 Mar 2016 10:41:31 -0600 Subject: [PATCH 138/189] Patch config.hin, not extern-inline.m4 --- .../builtin/packages/m4/inline-pgi.patch | 22 ------------------- .../repos/builtin/packages/m4/package.py | 6 +---- var/spack/repos/builtin/packages/m4/pgi.patch | 10 +++++++++ 3 files changed, 11 insertions(+), 27 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/m4/inline-pgi.patch create mode 100644 var/spack/repos/builtin/packages/m4/pgi.patch diff --git a/var/spack/repos/builtin/packages/m4/inline-pgi.patch b/var/spack/repos/builtin/packages/m4/inline-pgi.patch deleted file mode 100644 index bc31ad918e..0000000000 --- a/var/spack/repos/builtin/packages/m4/inline-pgi.patch +++ /dev/null @@ -1,22 +0,0 @@ ---- a/m4/extern-inline.m4 -+++ b/m4/extern-inline.m4 -@@ -34,6 +34,7 @@ - ? defined __GNUC_STDC_INLINE__ && __GNUC_STDC_INLINE__ \ - : (199901L <= __STDC_VERSION__ \ - && !defined __HP_cc \ -+ && !defined __PGI \ - && !(defined __SUNPRO_C && __STDC__))) \ - && !defined _GL_EXTERN_INLINE_STDHEADER_BUG) - # define _GL_INLINE inline ---- a/configure.ac -+++ b/configure.ac -@@ -22,7 +22,7 @@ - [bug-m4@gnu.org]) - AC_CONFIG_AUX_DIR([build-aux]) - --AM_INIT_AUTOMAKE([1.11.6 dist-bzip2 dist-xz color-tests parallel-tests -+AM_INIT_AUTOMAKE([dist-bzip2 dist-xz color-tests parallel-tests - silent-rules subdir-objects gnu]) - - m4_pattern_forbid([^M4_[A-Z]]) - diff --git a/var/spack/repos/builtin/packages/m4/package.py b/var/spack/repos/builtin/packages/m4/package.py index aa0c775f08..a4b9dcb623 100644 --- a/var/spack/repos/builtin/packages/m4/package.py +++ b/var/spack/repos/builtin/packages/m4/package.py @@ -7,17 +7,13 @@ class M4(Package): version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76') - patch('inline-pgi.patch', when='@1.4.17') + patch('pgi.patch', when='@1.4.17') variant('sigsegv', default=True, description="Build the libsigsegv dependency") depends_on('libsigsegv', when='+sigsegv') def install(self, spec, prefix): - # After patch, update generated configuration files that depend on extern-inline.m4 - autoreconf = which('autoreconf') - autoreconf() - configure_args = [] if 'libsigsegv' in spec: configure_args.append('--with-libsigsegv-prefix=%s' % spec['libsigsegv'].prefix) diff --git a/var/spack/repos/builtin/packages/m4/pgi.patch b/var/spack/repos/builtin/packages/m4/pgi.patch new file mode 100644 index 0000000000..1ad63e2cf1 --- /dev/null +++ b/var/spack/repos/builtin/packages/m4/pgi.patch @@ -0,0 +1,10 @@ +--- a/lib/config.hin ++++ b/lib/config.hin +@@ -1510,6 +1510,7 @@ + ? defined __GNUC_STDC_INLINE__ && __GNUC_STDC_INLINE__ \ + : (199901L <= __STDC_VERSION__ \ + && !defined __HP_cc \ ++ && !defined __PGI \ + && !(defined __SUNPRO_C && __STDC__))) \ + && !defined _GL_EXTERN_INLINE_APPLE_BUG) + # define _GL_INLINE inline From bf4d51ea0b00e091728ecf8d4d51aa295448a416 Mon Sep 17 00:00:00 2001 From: Luigi Calori Date: Wed, 9 Mar 2016 14:45:29 +0100 Subject: [PATCH 139/189] fix error exit when doing spack patch without parameters, same as spack stage --- lib/spack/spack/cmd/patch.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py index 44fc8696db..b04b402738 100644 --- a/lib/spack/spack/cmd/patch.py +++ b/lib/spack/spack/cmd/patch.py @@ -24,6 +24,7 @@ ############################################################################## import argparse +import llnl.util.tty as tty import spack.cmd import spack From b43c277dc617020d7aaa1e292e01e79d0840999e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 9 Mar 2016 10:55:50 -0800 Subject: [PATCH 140/189] Merge @citibeth and @alalazo's petsc fixes from #515 and #517 --- .../repos/builtin/packages/petsc/package.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 5be187f348..41fd859945 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -15,6 +15,7 @@ class Petsc(Package): version('3.5.3', 'd4fd2734661e89f18ac6014b5dd1ef2f') version('3.5.2', 'ad170802b3b058b5deb9cd1f968e7e13') version('3.5.1', 'a557e029711ebf425544e117ffa44d8f') + version('3.4.4', '7edbc68aa6d8d6a3295dd5f6c2f6979d') variant('shared', default=True, description='Enables the build of shared libraries') variant('mpi', default=True, description='Activates MPI support') @@ -25,21 +26,21 @@ class Petsc(Package): variant('boost', default=True, description='Activates support for Boost') variant('hypre', default=True, description='Activates support for Hypre') - # Build dependencies - depends_on('python @2.6:2.9') # requires Python for building - # Virtual dependencies depends_on('blas') depends_on('lapack') depends_on('mpi', when='+mpi') + # Build dependencies + depends_on('python @2.6:2.7') + # Other dependencies depends_on('boost', when='+boost') depends_on('metis', when='+metis') - depends_on('hdf5~cxx~unsupported+mpi', when='+hdf5+mpi') + depends_on('hdf5+mpi', when='+hdf5+mpi') depends_on('parmetis', when='+metis+mpi') - depends_on('hypre', when='+hypre+mpi') + depends_on('hypre', when='+hypre+mpi') def mpi_dependent_options(self): if '~mpi' in self.spec: @@ -50,7 +51,9 @@ def mpi_dependent_options(self): '--with-mpi=0' ] error_message_fmt = '\t{library} support requires "+mpi" to be activated' - errors = [error_message_fmt.format(library=x) for x in ('hdf5', 'hypre') if ('+'+x) in self.spec] + errors = [error_message_fmt.format(library=x) + for x in ('hdf5', 'hypre', 'parmetis') + if ('+'+x) in self.spec] if errors: errors = ['incompatible variants given'] + errors raise RuntimeError('\n'.join(errors)) @@ -70,7 +73,7 @@ def install(self, spec, prefix): '--with-blas-lapack-dir=%s' % spec['lapack'].prefix ]) # Activates library support if needed - for library in ('metis', 'boost', 'hfd5', 'hypre', 'parmetis'): + for library in ('metis', 'boost', 'hdf5', 'hypre', 'parmetis'): options.append( '--with-{library}={value}'.format(library=library, value=('1' if library in spec else '0')) ) @@ -79,8 +82,8 @@ def install(self, spec, prefix): '--with-{library}-dir={path}'.format(library=library, path=spec[library].prefix) ) - configure('--prefix=%s' % prefix, *options) + # PETSc has its own way of doing parallel make. make('MAKE_NP=%s' % make_jobs, parallel=False) make("install") From d06ebf23d4ffe3499edca3f34f60be9b561f5f8c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 9 Mar 2016 11:16:35 -0800 Subject: [PATCH 141/189] Removing `unsupported` variant from HDF5. - `unsupported` shouldn't be a variant. --- var/spack/repos/builtin/packages/hdf5/package.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index ed4e7c35c9..513a38ee8a 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -46,7 +46,6 @@ class Hdf5(Package): variant('cxx', default=True, description='Enable C++ support') variant('fortran', default=True, description='Enable Fortran support') - variant('unsupported', default=True, description='Enables unsupported configuration options') variant('mpi', default=False, description='Enable MPI support') variant('szip', default=False, description='Enable szip support') @@ -74,6 +73,13 @@ def install(self, spec, prefix): self.validate(spec) # Handle compilation after spec validation extra_args = [] + + # Always enable this option. This does not actually enable any + # features: it only *allows* the user to specify certain + # combinations of other arguments. Enabling it just skips a + # sanity check in configure, so this doesn't merit a variant. + extra_args.append("--enable-unsupported") + if '+debug' in spec: extra_args.append('--enable-debug=all') else: @@ -84,9 +90,6 @@ def install(self, spec, prefix): else: extra_args.append('--enable-static-exec') - if '+unsupported' in spec: - extra_args.append("--enable-unsupported") - if '+cxx' in spec: extra_args.append('--enable-cxx') From a4d40177fdfece3c25d7ebb48855e8fee2a7093d Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Tue, 8 Mar 2016 09:44:21 -0700 Subject: [PATCH 142/189] + Provide download/build instructions for qt/5.4.2. - This version provides updates to provided cmake scripts that are required for building cmake-gui. --- var/spack/repos/builtin/packages/qt/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 91afa420c1..ef5f05601f 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -8,6 +8,9 @@ class Qt(Package): list_url = 'http://download.qt-project.org/official_releases/qt/' list_depth = 2 + version('5.4.2', 'fa1c4d819b401b267eb246a543a63ea5', + url='http://download.qt-project.org/official_releases/qt/5.4/5.4.2/single/qt-everywhere-opensource-src-5.4.2.tar.gz') + version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6', url='http://download.qt-project.org/official_releases/qt/5.4/5.4.0/single/qt-everywhere-opensource-src-5.4.0.tar.gz') From 383e73a5f53e0ab3d1f448789bc5a1b4a7c85292 Mon Sep 17 00:00:00 2001 From: Alfredo Adolfo Gimenez Date: Wed, 9 Mar 2016 11:25:51 -0800 Subject: [PATCH 143/189] Remove unneccessary depends_on --- var/spack/repos/builtin/packages/thrift/package.py | 1 - 1 file changed, 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/thrift/package.py b/var/spack/repos/builtin/packages/thrift/package.py index ec3b13e563..6430f40e80 100644 --- a/var/spack/repos/builtin/packages/thrift/package.py +++ b/var/spack/repos/builtin/packages/thrift/package.py @@ -27,7 +27,6 @@ class Thrift(Package): # Variant dependencies extends('python', when='+python') - depends_on('python', when='+python') depends_on('zlib', when='+c') depends_on('libevent', when='+c') From 45ef496dd553e023903b8b4d5bcace59c56eb486 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 9 Mar 2016 11:26:37 -0800 Subject: [PATCH 144/189] Add some descriptive language to a list comprehension. --- var/spack/repos/builtin/packages/petsc/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 41fd859945..efe172fc08 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -51,6 +51,9 @@ def mpi_dependent_options(self): '--with-mpi=0' ] error_message_fmt = '\t{library} support requires "+mpi" to be activated' + + # If mpi is disabled (~mpi), it's an error to have any of these enabled. + # This generates a list of any such errors. errors = [error_message_fmt.format(library=x) for x in ('hdf5', 'hypre', 'parmetis') if ('+'+x) in self.spec] From 6e82ab1f1501c8b64383f1d9a26d743284f4054f Mon Sep 17 00:00:00 2001 From: Nicolas Richart Date: Wed, 9 Mar 2016 21:18:44 +0100 Subject: [PATCH 145/189] change of url for mpfr --- var/spack/repos/builtin/packages/mpfr/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py index a1bd7529cf..7e6e7d5bb6 100644 --- a/var/spack/repos/builtin/packages/mpfr/package.py +++ b/var/spack/repos/builtin/packages/mpfr/package.py @@ -28,8 +28,9 @@ class Mpfr(Package): """The MPFR library is a C library for multiple-precision floating-point computations with correct rounding.""" homepage = "http://www.mpfr.org" - url = "http://www.mpfr.org/mpfr-current/mpfr-3.1.3.tar.bz2" + url = "https://gforge.inria.fr/frs/download.php/latestfile/159/mpfr-3.1.2.tar.bz2" + version('3.1.4', 'b8a2f6b0e68bef46e53da2ac439e1cf4') version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138') version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19') From ca102295657adef7100f638be0dabc0e6d7cf37f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 9 Mar 2016 14:56:21 -0800 Subject: [PATCH 146/189] Fixes #524 - Had attempted to add more functionality by assigning different meanign None, True, and False values "keep_stage" (where False was "always delete"). - Turns out that's not really worth the complexity. Having the third "always delete" sense is hardly ever useful but makes the code hard to understand. --- lib/spack/spack/package.py | 7 ++++--- lib/spack/spack/stage.py | 24 ++++++++++-------------- lib/spack/spack/test/stage.py | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 972a0410b9..ca9e9c4bd1 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -825,7 +825,7 @@ def _resource_stage(self, resource): def do_install(self, - keep_prefix=False, keep_stage=None, ignore_deps=False, + keep_prefix=False, keep_stage=False, ignore_deps=False, skip_patch=False, verbose=False, make_jobs=None, fake=False): """Called by commands to install a package and its dependencies. @@ -834,8 +834,9 @@ def do_install(self, Args: keep_prefix -- Keep install prefix on failure. By default, destroys it. - keep_stage -- Set to True or false to always keep or always delete stage. - By default, stage is destroyed only if there are no exceptions. + keep_stage -- By default, stage is destroyed only if there are no + exceptions during build. Set to True to keep the stage + even with exceptions. ignore_deps -- Do not install dependencies before installing this package. fake -- Don't really build -- install fake stub files instead. skip_patch -- Skip patch stage of build if True. diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 5354135e6a..f88f82fc2d 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -88,7 +88,8 @@ class Stage(object): similar, and are intended to persist for only one run of spack. """ - def __init__(self, url_or_fetch_strategy, name=None, mirror_path=None, keep=None): + def __init__(self, url_or_fetch_strategy, + name=None, mirror_path=None, keep=False): """Create a stage object. Parameters: url_or_fetch_strategy @@ -108,10 +109,9 @@ def __init__(self, url_or_fetch_strategy, name=None, mirror_path=None, keep=None keep By default, when used as a context manager, the Stage - is cleaned up when everything goes well, and it is - kept intact when an exception is raised. You can - override this behavior by setting keep to True - (always keep) or False (always delete). + is deleted on exit when no exceptions are raised. + Pass True to keep the stage intact even if no + exceptions are raised. """ # TODO: fetch/stage coupling needs to be reworked -- the logic # TODO: here is convoluted and not modular enough. @@ -166,12 +166,8 @@ def __exit__(self, exc_type, exc_val, exc_tb): Returns: Boolean """ - if self.keep is None: - # Default: delete when there are no exceptions. - if exc_type is None: self.destroy() - - elif not self.keep: - # Overridden. Either always keep or always delete. + # Delete when there are no exceptions, unless asked to keep. + if exc_type is None and not self.keep: self.destroy() @@ -195,8 +191,8 @@ def _need_to_create_path(self): real_tmp = os.path.realpath(self.tmp_root) if spack.use_tmp_stage: - # If we're using a tmp dir, it's a link, and it points at the right spot, - # then keep it. + # If we're using a tmp dir, it's a link, and it points at the + # right spot, then keep it. if (real_path.startswith(real_tmp) and os.path.exists(real_path)): return False else: @@ -441,7 +437,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): for item in reversed(self): - item.keep = getattr(self, 'keep', None) + item.keep = getattr(self, 'keep', False) item.__exit__(exc_type, exc_val, exc_tb) # diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index dbcf89d864..ea425127c4 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -277,3 +277,38 @@ def test_restage(self): self.check_chdir_to_source(stage, stage_name) self.assertFalse('foobar' in os.listdir(stage.source_path)) self.check_destroy(stage, stage_name) + + + def test_no_keep_without_exceptions(self): + with Stage(archive_url, name=stage_name, keep=False) as stage: + pass + self.check_destroy(stage, stage_name) + + + def test_keep_without_exceptions(self): + with Stage(archive_url, name=stage_name, keep=True) as stage: + pass + path = self.get_stage_path(stage, stage_name) + self.assertTrue(os.path.isdir(path)) + + + def test_no_keep_with_exceptions(self): + try: + with Stage(archive_url, name=stage_name, keep=False) as stage: + raise Exception() + + path = self.get_stage_path(stage, stage_name) + self.assertTrue(os.path.isdir(path)) + except: + pass # ignore here. + + + def test_keep_exceptions(self): + try: + with Stage(archive_url, name=stage_name, keep=True) as stage: + raise Exception() + + path = self.get_stage_path(stage, stage_name) + self.assertTrue(os.path.isdir(path)) + except: + pass # ignore here. From a384ad5b1270140d71110e46d39144a0f0e9081e Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Wed, 9 Mar 2016 16:11:33 -0800 Subject: [PATCH 147/189] Fix problem with pure integer arguments in preferred versions list (e.g, 2 instead of 2.7.3) --- lib/spack/spack/config.py | 5 +++-- lib/spack/spack/preferred_packages.py | 6 +++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 807a898644..95a988f7ff 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -214,7 +214,8 @@ 'version': { 'type' : 'array', 'default' : [], - 'items' : { 'type' : 'string' } }, #version strings + 'items' : { 'anyOf' : [ { 'type' : 'string' }, + { 'type' : 'number'}]}}, #version strings 'compiler': { 'type' : 'array', 'default' : [], @@ -573,7 +574,7 @@ def __init__(self, validation_error, data): # Try to get line number from erroneous instance and its parent instance_mark = getattr(validation_error.instance, '_start_mark', None) parent_mark = getattr(validation_error.parent, '_start_mark', None) - path = getattr(validation_error, 'path', None) + path = [str(s) for s in getattr(validation_error, 'path', None)] # Try really hard to get the parent (which sometimes is not # set) This digs it out of the validated structure if it's not diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index 2b0ba791b6..eaea016a85 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -45,7 +45,7 @@ def _order_for_package(self, pkgname, component, second_key, test_all=True): order = order.get(second_key, {}) if not order: continue - return [s.strip() for s in order] + return [str(s).strip() for s in order] return [] @@ -98,11 +98,11 @@ def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, secon b_index = None reverse = -1 if reverse_natural_compare else 1 for i, cspec in enumerate(specs): - if a_index == None and cspec.satisfies(a): + if a_index == None and (cspec.satisfies(a) or a.satisfies(cspec)): a_index = i if b_index: break - if b_index == None and cspec.satisfies(b): + if b_index == None and (cspec.satisfies(b) or b.satisfies(cspec)): b_index = i if a_index: break From 1f06dd40f7e65252568da23e9758bf5af02833eb Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Wed, 9 Mar 2016 16:11:53 -0800 Subject: [PATCH 148/189] Update documentation for new packages.yaml config format. --- lib/spack/docs/packaging_guide.rst | 41 +++++++++---------- lib/spack/docs/site_configuration.rst | 59 +++++++++++++-------------- 2 files changed, 48 insertions(+), 52 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index f368d0a4fa..ef9fd89b62 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1561,50 +1561,49 @@ be concretized on their system. For example, one user may prefer packages built with OpenMPI and the Intel compiler. Another user may prefer packages be built with MVAPICH and GCC. -Spack's ``preferred`` configuration can be used to set defaults for sites or users. -Spack uses this configuration to make decisions about which compilers, package -versions, depends_on, and variants it should prefer during concretization. - -The preferred configuration can be controlled by editing the -``~/.spack/preferred.yaml`` file for user configuations, or the +Spack can be configurated to prefer certain compilers, package +versions, depends_on, and variants during concretization. +The preferred configuration can be controlled via the +``~/.spack/packages.yaml`` file for user configuations, or the +``etc/spack/packages.yaml`` site configuration. -Here's an example preferred.yaml file: +Here's an example packages.yaml file that sets preferred packages: .. code-block:: sh - preferred: + packages: dyninst: - compiler: gcc@4.9 + compiler: [gcc@4.9] variants: +debug gperftools: - version: 2.2, 2.4, 2.3 + version: [2.2, 2.4, 2.3] all: - compiler: gcc@4.4.7, gcc@4.6:, intel, clang, pgi - providers: - mpi: mvapich, mpich, openmpi + compiler: [gcc@4.4.7, gcc@4.6:, intel, clang, pgi] + providers: + mpi: [mvapich, mpich, openmpi] + At a high level, this example is specifying how packages should be concretized. The dyninst package should prefer using gcc 4.9 and be built with debug options. The gperftools package should prefer version 2.2 over 2.4. Every package on the system should prefer mvapich for -its MPI and gcc 4.4.7 (except for Dyninst, which perfers gcc 4.9). +its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by perfering gcc 4.9). These options are used to fill in implicit defaults. Any of them can be overwritten on the command line if explicitly requested. -Each preferred.yaml file begin with the string ``preferred:`` and -each subsequent entry is indented underneath it. The next layer contains -package names or the special string ``all`` (which applies to -every package). Underneath each package name is +Each packages.yaml file begin with the string ``packages:`` and +package names are specified on the next level. The special string ``all`` +applies settings to each package. Underneath each package name is one or more components: ``compiler``, ``variants``, ``version``, or ``providers``. Each component has an ordered list of spec ``constraints``, with earlier entries in the list being prefered over -latter entries. +later entries. Sometimes a package installation may have constraints that forbid the first concretization rule, in which case Spack will use the first legal concretization rule. Going back to the example, if a user -requests gperftools 2.3 or latter, then Spack will install version 2.4 +requests gperftools 2.3 or later, then Spack will install version 2.4 as the 2.4 version of gperftools is preferred over 2.3. An explicit concretization rule in the preferred section will always @@ -1612,7 +1611,7 @@ take preference over unlisted concretizations. In the above example, xlc isn't listed in the compiler list. Every listed compiler from gcc to pgi will thus be preferred over the xlc compiler. -The syntax for the ``providers`` section differs slightly from other +The syntax for the ``provider`` section differs slightly from other concretization rules. A provider lists a value that packages may ``depend_on`` (e.g, mpi) and a list of rules for fulfilling that dependency. diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index a7211a9d95..ebf0437106 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -56,44 +56,43 @@ directory is. External Packages ~~~~~~~~~~~~~~~~~~~~~ -It's possible for Spack to use certain externally-installed -packages rather than always rebuilding packages. This may be desirable +Spack can be configured to use externally-installed +packages rather than building its own packages. This may be desirable if machines ship with system packages, such as a customized MPI that should be used instead of Spack building its own MPI. External packages are configured through the ``packages.yaml`` file found in a Spack installation's ``etc/spack/`` or a user's ``~/.spack/`` -directory. Here's an example of an external configuration:: +directory. Here's an example of an external configuration: .. code-block:: yaml - packages: - - openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: - path: /opt/openmpi-1.4.3 - - openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: - path: /opt/openmpi-1.4.3-debug - - openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: - path: /opt/openmpi-1.6.5-intel + packages: + openmpi: + paths: + openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3 + openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug + openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel This example lists three installations of OpenMPI, one built with gcc, one built with gcc and debug information, and another built with OpenMPI. If Spack is asked to build a package that uses one of these MPIs as a -dependency, it link the package to the pre-installed OpenMPI in -the given directory. +dependency, it will use the the pre-installed OpenMPI in +the given directory. This example also specifies that Spack should never +build its own OpenMPI via the ``nobuild: True`` option. -Each ``packages.yaml`` should begin with a ``packages:`` token, followed -by a list of package specs. Specs in the ``packages.yaml`` have at most -one ``path`` tag, which specifies the top-level directory where the -spec is installed. - -Each spec should be as well-defined as reasonably possible. If a +Each ``packages.yaml`` begins with a ``packages:`` token, followed +by a list of package names. To specify externals, add a ``paths`` +token under the package name, which lists externals in a +``spec : /path`` format. Each spec should be as +well-defined as reasonably possible. If a package lacks a spec component, such as missing a compiler or package version, then Spack will guess the missing component based on its most-favored packages, and it may guess incorrectly. -All package versions and compilers listed in ``packages.yaml`` should +Each package version and compilers listed in an external should have entries in Spack's packages and compiler configuration, even -the package and compiler may not actually be used. +though the package and compiler may not every be built. The packages configuration can tell Spack to use an external location for certain package versions, but it does not restrict Spack to using @@ -103,27 +102,25 @@ rather than continue using the pre-installed OpenMPI versions. To prevent this, the ``packages.yaml`` configuration also allows packages to be flagged as non-buildable. The previous example could be modified to -be:: +be: .. code-block:: yaml packages: - - openmpi: - nobuild: True - - openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: - path: /opt/openmpi-1.4.3 - - openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: - path: /opt/openmpi-1.4.3-debug - - openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: - path: /opt/openmpi-1.6.5-intel + openmpi: + paths: + openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3 + openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug + openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel + nobuild: True The addition of the ``nobuild`` flag tells Spack that it should never build its own version of OpenMPI, and it will instead always rely on a pre-built OpenMPI. Similar to ``path``, ``nobuild`` is specified as a property under -a spec and will prevent building of anything that satisfies that spec. +a package name. The ``nobuild`` does not need to be paired with external packages. -It could also be used alone to forbid versions of packages that may be +It could also be used alone to forbid packages that may be buggy or otherwise undesirable. From 23cbc2b1d9dc5816c63f330640105c5435ccdc22 Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Tue, 8 Mar 2016 09:44:21 -0700 Subject: [PATCH 149/189] + Provide download/build instructions for qt/5.4.2. - This version provides updates to provided cmake scripts that are required for building cmake-gui. + Provide download/build instructions for version 3.5.0. - When building the +qt variant, add a validate function to ensure that qt-5.4.0 is not used (this version of qt has errors related to cmake). --- .../repos/builtin/packages/cmake/package.py | 21 +++++++++++++++++-- .../repos/builtin/packages/qt/package.py | 3 +++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index f39a681284..806c37a68c 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -30,6 +30,7 @@ class Cmake(Package): homepage = 'https://www.cmake.org' url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz' + version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e') version('3.4.3', '4cb3ff35b2472aae70f542116d616e63') version('3.4.0', 'cd3034e0a44256a0917e254167217fc8') version('3.3.1', '52638576f4e1e621fed6c3410d3a1b12') @@ -49,8 +50,25 @@ def url_for_version(self, version): """Handle CMake's version-based custom URLs.""" return 'https://cmake.org/files/v%s/cmake-%s.tar.gz' % (version.up_to(2), version) - def install(self, spec, prefix): + def validate(self, spec): + """ + Checks if incompatible versions of qt were specified + :param spec: spec of the package + :raises RuntimeError: in case of inconsistencies + """ + + print spec + + if '+qt' in spec and spec.satisfies('^qt@5.4.0'): + msg = 'qt-5.4.0 has broken CMake modules.' + raise RuntimeError(msg) + + def install(self, spec, prefix): + # Consistency check + self.validate(spec) + + # configure, build, install: options = ['--prefix=%s' % prefix] options.append('--parallel=%s' % str(make_jobs)) @@ -65,6 +83,5 @@ def install(self, spec, prefix): options.append('-DCMAKE_USE_OPENSSL=ON') configure(*options) - make() make('install') diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 91afa420c1..ef5f05601f 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -8,6 +8,9 @@ class Qt(Package): list_url = 'http://download.qt-project.org/official_releases/qt/' list_depth = 2 + version('5.4.2', 'fa1c4d819b401b267eb246a543a63ea5', + url='http://download.qt-project.org/official_releases/qt/5.4/5.4.2/single/qt-everywhere-opensource-src-5.4.2.tar.gz') + version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6', url='http://download.qt-project.org/official_releases/qt/5.4/5.4.0/single/qt-everywhere-opensource-src-5.4.0.tar.gz') From f56939c16c36ee946f40571bdc330b432af93cce Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 9 Mar 2016 17:01:01 -0800 Subject: [PATCH 150/189] Add unit test for sbang patching. --- lib/spack/spack/test/__init__.py | 13 ++--- lib/spack/spack/test/sbang.py | 93 ++++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+), 6 deletions(-) create mode 100644 lib/spack/spack/test/sbang.py diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 4b9a361d4b..d5d8b64765 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -65,7 +65,8 @@ 'lock', 'database', 'namespace_trie', - 'yaml'] + 'yaml', + 'sbang'] def list_tests(): @@ -87,20 +88,20 @@ def run(names, outputDir, verbose=False): "Valid names are:") colify(sorted(test_names), indent=4) sys.exit(1) - + tally = Tally() for test in names: module = 'spack.test.' + test print module - + tty.msg("Running test: %s" % test) - + runOpts = ["--with-%s" % spack.test.tally_plugin.Tally.name] - + if outputDir: xmlOutputFname = "unittests-{0}.xml".format(test) xmlOutputPath = join_path(outputDir, xmlOutputFname) - runOpts += ["--with-xunit", + runOpts += ["--with-xunit", "--xunit-file={0}".format(xmlOutputPath)] argv = [""] + runOpts + [module] result = nose.run(argv=argv, addplugins=[tally]) diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py new file mode 100644 index 0000000000..825bc4be98 --- /dev/null +++ b/lib/spack/spack/test/sbang.py @@ -0,0 +1,93 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""\ +Test that Spack's shebang filtering works correctly. +""" +import os +import unittest +import tempfile +import shutil + +from llnl.util.filesystem import * +from spack.hooks.sbang import filter_shebangs_in_directory +import spack + +short_line = "#!/this/is/short/bin/bash\n" +long_line = "#!/this/" + ('x' * 200) + "/is/long\n" +sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root +last_line = "last!\n" + +class SbangTest(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.mkdtemp() + + # make sure we can ignore non-files + directory = os.path.join(self.tempdir, 'dir') + mkdirp(directory) + + # Script with short shebang + self.short_shebang = os.path.join(self.tempdir, 'short') + with open(self.short_shebang, 'w') as f: + f.write(short_line) + f.write(last_line) + + # Script with long shebang + self.long_shebang = os.path.join(self.tempdir, 'long') + with open(self.long_shebang, 'w') as f: + f.write(long_line) + f.write(last_line) + + # Script already using sbang. + self.has_shebang = os.path.join(self.tempdir, 'shebang') + with open(self.has_shebang, 'w') as f: + f.write(sbang_line) + f.write(long_line) + f.write(last_line) + + + def tearDown(self): + shutil.rmtree(self.tempdir, ignore_errors=True) + + + + def test_shebang_handling(self): + filter_shebangs_in_directory(self.tempdir) + + # Make sure this is untouched + with open(self.short_shebang, 'r') as f: + self.assertEqual(f.readline(), short_line) + self.assertEqual(f.readline(), last_line) + + # Make sure this got patched. + with open(self.long_shebang, 'r') as f: + self.assertEqual(f.readline(), sbang_line) + self.assertEqual(f.readline(), long_line) + self.assertEqual(f.readline(), last_line) + + # Make sure this is untouched + with open(self.has_shebang, 'r') as f: + self.assertEqual(f.readline(), sbang_line) + self.assertEqual(f.readline(), long_line) + self.assertEqual(f.readline(), last_line) From 52cdcdde76b272ba4905a23642323d116b00a7e7 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 9 Mar 2016 17:13:04 -0800 Subject: [PATCH 151/189] Fix #525: sbang handles symlinks & directories properly. --- lib/spack/spack/hooks/sbang.py | 43 +++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py index 3390ecea29..d78adb576e 100644 --- a/lib/spack/spack/hooks/sbang.py +++ b/lib/spack/spack/hooks/sbang.py @@ -35,7 +35,7 @@ shebang_limit = 127 def shebang_too_long(path): - """Detects whether an file has a shebang line that is too long.""" + """Detects whether a file has a shebang line that is too long.""" with open(path, 'r') as script: bytes = script.read(2) if bytes != '#!': @@ -47,14 +47,21 @@ def shebang_too_long(path): def filter_shebang(path): """Adds a second shebang line, using sbang, at the beginning of a file.""" + with open(path, 'r') as original_file: + original = original_file.read() + + # This line will be prepended to file + new_sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root + + # Skip files that are already using sbang. + if original.startswith(new_sbang_line): + return + backup = path + ".shebang.bak" os.rename(path, backup) - with open(backup, 'r') as bak_file: - original = bak_file.read() - with open(path, 'w') as new_file: - new_file.write('#!/bin/bash %s/bin/sbang\n' % spack.spack_root) + new_file.write(new_sbang_line) new_file.write(original) copy_mode(backup, path) @@ -63,15 +70,29 @@ def filter_shebang(path): tty.warn("Patched overly long shebang in %s" % path) +def filter_shebangs_in_directory(directory): + for file in os.listdir(directory): + path = os.path.join(directory, file) + + # only handle files + if not os.path.isfile(path): + continue + + # only handle links that resolve within THIS package's prefix. + if os.path.islink(path): + real_path = os.path.realpath(path) + if not real_path.startswith(directory + os.sep): + continue + + # test the file for a long shebang, and filter + if shebang_too_long(path): + filter_shebang(path) + + def post_install(pkg): """This hook edits scripts so that they call /bin/bash $spack_prefix/bin/sbang instead of something longer than the shebang limit.""" if not os.path.isdir(pkg.prefix.bin): return - - for file in os.listdir(pkg.prefix.bin): - path = os.path.join(pkg.prefix.bin, file) - if shebang_too_long(path): - filter_shebang(path) - + filter_shebangs_in_directory(pkg.prefix.bin) From 267e83d8a70324d9b8a70a86085853dd762c7f93 Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Wed, 9 Mar 2016 21:12:11 -0500 Subject: [PATCH 152/189] Added emacs package. --- .../repos/builtin/packages/emacs/package.py | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 var/spack/repos/builtin/packages/emacs/package.py diff --git a/var/spack/repos/builtin/packages/emacs/package.py b/var/spack/repos/builtin/packages/emacs/package.py new file mode 100644 index 0000000000..e496ffc3d1 --- /dev/null +++ b/var/spack/repos/builtin/packages/emacs/package.py @@ -0,0 +1,36 @@ +# FIXME: +# This is a template package file for Spack. We've conveniently +# put "FIXME" labels next to all the things you'll want to change. +# +# Once you've edited all the FIXME's, delete this whole message, +# save this file, and test out your package like this: +# +# spack install emacs +# +# You can always get back here to change things with: +# +# spack edit emacs +# +# See the spack documentation for more information on building +# packages. +# +from spack import * + +class Emacs(Package): + """FIXME: put a proper description of your package here.""" + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.example.com" + url = "http://ftp.gnu.org/gnu/emacs/emacs-24.5.tar.gz" + + version('24.5', 'd74b597503a68105e61b5b9f6d065b44') + + # FIXME: Add dependencies if this package requires them. + depends_on('ncurses') + + def install(self, spec, prefix): + # FIXME: Modify the configure line to suit your build system here. + configure('--prefix=%s' % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") From b701aa10d4b40eda3d5ea3f0c96b5aed7493c7a7 Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Wed, 9 Mar 2016 21:21:25 -0500 Subject: [PATCH 153/189] Fixed up --- .../repos/builtin/packages/emacs/package.py | 29 ++++--------------- 1 file changed, 6 insertions(+), 23 deletions(-) diff --git a/var/spack/repos/builtin/packages/emacs/package.py b/var/spack/repos/builtin/packages/emacs/package.py index e496ffc3d1..09eb05d5a7 100644 --- a/var/spack/repos/builtin/packages/emacs/package.py +++ b/var/spack/repos/builtin/packages/emacs/package.py @@ -1,36 +1,19 @@ -# FIXME: -# This is a template package file for Spack. We've conveniently -# put "FIXME" labels next to all the things you'll want to change. -# -# Once you've edited all the FIXME's, delete this whole message, -# save this file, and test out your package like this: -# -# spack install emacs -# -# You can always get back here to change things with: -# -# spack edit emacs -# -# See the spack documentation for more information on building -# packages. -# from spack import * class Emacs(Package): - """FIXME: put a proper description of your package here.""" - # FIXME: add a proper url for your package's homepage here. - homepage = "http://www.example.com" + """The Emacs programmable text editor.""" + homepage = "https://www.gnu.org/software/emacs" url = "http://ftp.gnu.org/gnu/emacs/emacs-24.5.tar.gz" version('24.5', 'd74b597503a68105e61b5b9f6d065b44') - # FIXME: Add dependencies if this package requires them. depends_on('ncurses') + # Emacs also depends on: + # GTK or other widget library + # libtiff, png, etc. + # For now, we assume the system provides all that stuff. def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. configure('--prefix=%s' % prefix) - - # FIXME: Add logic to build and install here make() make("install") From b0377da771d9154956a408a59e97000049d7c2fb Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 31 Jan 2016 12:21:04 -0800 Subject: [PATCH 154/189] update mirror config documentation. - mirrors.yaml uses Spack's OrderedDict rather than lists. --- lib/spack/docs/mirrors.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst index 7581a0e9ed..b20fedb55f 100644 --- a/lib/spack/docs/mirrors.rst +++ b/lib/spack/docs/mirrors.rst @@ -186,7 +186,7 @@ Each mirror has a name so that you can refer to it again later. ``spack mirror list`` ---------------------------- -If you want to see all the mirrors Spack knows about you can run ``spack mirror list``:: +To see all the mirrors Spack knows about, run ``spack mirror list``:: $ spack mirror list local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24 @@ -196,7 +196,7 @@ If you want to see all the mirrors Spack knows about you can run ``spack mirror ``spack mirror remove`` ---------------------------- -And, if you want to remove a mirror, just remove it by name:: +To remove a mirror by name:: $ spack mirror remove local_filesystem $ spack mirror list @@ -205,11 +205,11 @@ And, if you want to remove a mirror, just remove it by name:: Mirror precedence ---------------------------- -Adding a mirror really just adds a section in ``~/.spack/mirrors.yaml``:: +Adding a mirror really adds a line in ``~/.spack/mirrors.yaml``:: mirrors: - - local_filesystem: file:///Users/gamblin2/spack-mirror-2014-06-24 - - remote_server: https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24 + local_filesystem: file:///Users/gamblin2/spack-mirror-2014-06-24 + remote_server: https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24 If you want to change the order in which mirrors are searched for packages, you can edit this file and reorder the sections. Spack will From 0244d794cd68efd68edd6a797dd1db158aca87b6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 31 Jan 2016 13:28:12 -0800 Subject: [PATCH 155/189] remove unnecessary import --- lib/spack/spack/stage.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 4703a3aae6..5354135e6a 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -26,7 +26,6 @@ import errno import shutil import tempfile -import sys from urlparse import urljoin import llnl.util.tty as tty From b0572a546242ef1c570f5dd3c9c6336bc1d55607 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 7 Feb 2016 11:27:39 -0700 Subject: [PATCH 156/189] Minor tweaks to abi code. --- lib/spack/spack/abi.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py index f0a997703c..7e565bcbf9 100644 --- a/lib/spack/spack/abi.py +++ b/lib/spack/spack/abi.py @@ -69,11 +69,11 @@ def _gcc_get_libstdcxx_version(self, version): if not libpath: return None return os.path.basename(libpath) - - + + @memoized def _gcc_compiler_compare(self, pversion, cversion): - """Returns true iff the gcc version pversion and cversion + """Returns true iff the gcc version pversion and cversion are ABI compatible.""" plib = self._gcc_get_libstdcxx_version(pversion) clib = self._gcc_get_libstdcxx_version(cversion) @@ -86,43 +86,43 @@ def _intel_compiler_compare(self, pversion, cversion): """Returns true iff the intel version pversion and cversion are ABI compatible""" - #Test major and minor versions. Ignore build version. + # Test major and minor versions. Ignore build version. if (len(pversion.version) < 2 or len(cversion.version) < 2): return False - return (pversion.version[0] == cversion.version[0]) and \ - (pversion.version[1] == cversion.version[1]) - - + return pversion.version[:2] == cversion.version[:2] + + def compiler_compatible(self, parent, child, **kwargs): """Returns true iff the compilers for parent and child specs are ABI compatible""" if not parent.compiler or not child.compiler: return True - + if parent.compiler.name != child.compiler.name: - #Different compiler families are assumed ABI incompatible + # Different compiler families are assumed ABI incompatible return False - + if kwargs.get('loose', False): return True + # TODO: Can we move the specialized ABI matching stuff + # TODO: into compiler classes? for pversion in parent.compiler.versions: for cversion in child.compiler.versions: - #For a few compilers use specialized comparisons. Otherwise + # For a few compilers use specialized comparisons. Otherwise # match on version match. if pversion.satisfies(cversion): return True - elif parent.compiler.name == "gcc" and \ - self._gcc_compiler_compare(pversion, cversion): + elif (parent.compiler.name == "gcc" and + self._gcc_compiler_compare(pversion, cversion)): return True - elif parent.compiler.name == "intel" and \ - self._intel_compiler_compare(pversion, cversion): + elif (parent.compiler.name == "intel" and + self._intel_compiler_compare(pversion, cversion)): return True return False - + def compatible(self, parent, child, **kwargs): """Returns true iff a parent and child spec are ABI compatible""" loosematch = kwargs.get('loose', False) return self.architecture_compatible(parent, child) and \ self.compiler_compatible(parent, child, loose=loosematch) - From 048c406f49a3c7a30008268590ab57b74ea60b6b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 7 Feb 2016 11:32:26 -0700 Subject: [PATCH 157/189] Remove vestigial variants in directory name. --- lib/spack/spack/directory_layout.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index b94468faf0..39ee4e203d 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -198,14 +198,10 @@ def hidden_file_paths(self): def relative_path_for_spec(self, spec): _check_concrete(spec) - + if spec.external: return spec.external - enabled_variants = ( - '-' + v.name for v in spec.variants.values() - if v.enabled) - dir_name = "%s-%s-%s" % ( spec.name, spec.version, From 1fe196f95cc26cac73abe64752ff67b150f4d50a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 2 Mar 2016 22:38:21 -0800 Subject: [PATCH 158/189] whitespace and formatting --- lib/spack/spack/preferred_packages.py | 38 +++++++++---------- lib/spack/spack/spec.py | 8 ++-- .../repos/builtin/packages/python/package.py | 5 ++- 3 files changed, 26 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index eaea016a85..4ff0f18b31 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -33,8 +33,8 @@ def __init__(self): self.preferred = spack.config.get_config('packages') self._spec_for_pkgname_cache = {} - #Given a package name, sort component (e.g, version, compiler, ...), and - # a second_key (used by providers), return the list + # Given a package name, sort component (e.g, version, compiler, ...), and + # a second_key (used by providers), return the list def _order_for_package(self, pkgname, component, second_key, test_all=True): pkglist = [pkgname] if test_all: @@ -47,10 +47,10 @@ def _order_for_package(self, pkgname, component, second_key, test_all=True): continue return [str(s).strip() for s in order] return [] - - # A generic sorting function. Given a package name and sort - # component, return less-than-0, 0, or greater-than-0 if + + # A generic sorting function. Given a package name and sort + # component, return less-than-0, 0, or greater-than-0 if # a is respectively less-than, equal to, or greater than b. def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key): if a is None: @@ -76,7 +76,7 @@ def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, cmp_a = orderlist.index(str(a)) cmp_b = orderlist.index(str(b)) reverse = 1 - + if cmp_a < cmp_b: return -1 * reverse elif cmp_a > cmp_b: @@ -87,7 +87,7 @@ def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, # A sorting function for specs. Similar to component_compare, but # a and b are considered to match entries in the sorting list if they - # satisfy the list component. + # satisfy the list component. def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key): if not a or not a.concrete: return -1 @@ -121,7 +121,7 @@ def _spec_for_pkgname(self, pkgname, component, second_key): key = (pkgname, component, second_key) if not key in self._spec_for_pkgname_cache: pkglist = self._order_for_package(pkgname, component, second_key) - if not pkglist: + if not pkglist: if component in self._default_order: pkglist = self._default_order[component] if component == 'compiler': @@ -132,9 +132,9 @@ def _spec_for_pkgname(self, pkgname, component, second_key): self._spec_for_pkgname_cache[key] = [spack.spec.Spec(s) for s in pkglist] return self._spec_for_pkgname_cache[key] - + def provider_compare(self, pkgname, provider_str, a, b): - """Return less-than-0, 0, or greater than 0 if a is respecively less-than, equal-to, or + """Return less-than-0, 0, or greater than 0 if a is respecively less-than, equal-to, or greater-than b. A and b are possible implementations of provider_str. One provider is less-than another if it is preferred over the other. For example, provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would return -1 if @@ -148,28 +148,28 @@ def spec_has_preferred_provider(self, pkgname, provider_str): def version_compare(self, pkgname, a, b): - """Return less-than-0, 0, or greater than 0 if version a of pkgname is - respecively less-than, equal-to, or greater-than version b of pkgname. + """Return less-than-0, 0, or greater than 0 if version a of pkgname is + respecively less-than, equal-to, or greater-than version b of pkgname. One version is less-than another if it is preferred over the other.""" return self._spec_compare(pkgname, 'version', a, b, True, None) - + def variant_compare(self, pkgname, a, b): - """Return less-than-0, 0, or greater than 0 if variant a of pkgname is - respecively less-than, equal-to, or greater-than variant b of pkgname. + """Return less-than-0, 0, or greater than 0 if variant a of pkgname is + respecively less-than, equal-to, or greater-than variant b of pkgname. One variant is less-than another if it is preferred over the other.""" return self._component_compare(pkgname, 'variant', a, b, False, None) def architecture_compare(self, pkgname, a, b): - """Return less-than-0, 0, or greater than 0 if architecture a of pkgname is - respecively less-than, equal-to, or greater-than architecture b of pkgname. + """Return less-than-0, 0, or greater than 0 if architecture a of pkgname is + respecively less-than, equal-to, or greater-than architecture b of pkgname. One architecture is less-than another if it is preferred over the other.""" return self._component_compare(pkgname, 'architecture', a, b, False, None) def compiler_compare(self, pkgname, a, b): - """Return less-than-0, 0, or greater than 0 if compiler a of pkgname is - respecively less-than, equal-to, or greater-than compiler b of pkgname. + """Return less-than-0, 0, or greater than 0 if compiler a of pkgname is + respecively less-than, equal-to, or greater-than compiler b of pkgname. One compiler is less-than another if it is preferred over the other.""" return self._spec_compare(pkgname, 'compiler', a, b, False, None) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 6f55065f01..b8c0d0ef9c 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -428,7 +428,7 @@ def __init__(self, spec_like, *dep_like, **kwargs): for dep in dep_like: spec = dep if isinstance(dep, Spec) else Spec(dep) self._add_dependency(spec) - + # # Private routines here are called by the parser when building a spec. @@ -1410,7 +1410,7 @@ def _dup(self, other, **kwargs): self.architecture != other.architecture and self.compiler != other.compiler and \ self.variants != other.variants and self._normal != other._normal and \ self.concrete != other.concrete and self.external != other.external) - + # Local node attributes get copied first. self.name = other.name self.versions = other.versions.copy() @@ -1585,7 +1585,7 @@ def format(self, format_string='$_$@$%@$+$=', **kwargs): $@ Version with '@' prefix $% Compiler with '%' prefix $%@ Compiler with '%' prefix & compiler version with '@' prefix - $+ Options + $+ Options $= Architecture with '=' prefix $# 7-char prefix of DAG hash with '-' prefix $$ $ @@ -1738,7 +1738,7 @@ def __cmp__(self, other): #Package name sort order is not configurable, always goes alphabetical if self.name != other.name: return cmp(self.name, other.name) - + #Package version is second in compare order pkgname = self.name if self.versions != other.versions: diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 58d401244e..dd240d1ea0 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -34,8 +34,9 @@ def install(self, spec, prefix): env['PYTHONHOME'] = prefix env['MACOSX_DEPLOYMENT_TARGET'] = '10.6' - # Rest of install is pretty standard except setup.py needs to be able to read the CPPFLAGS - # and LDFLAGS as it scans for the library and headers to build + # Rest of install is pretty standard except setup.py needs to + # be able to read the CPPFLAGS and LDFLAGS as it scans for the + # library and headers to build configure_args= [ "--prefix=%s" % prefix, "--with-threads", From 82b7067fdfc3f2fb90cff9014ab5379e334b40fd Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 3 Mar 2016 00:44:00 -0800 Subject: [PATCH 159/189] Refactored external packages slightly. - Move `Spec.__cmp__` out of spec, into concretize as `cmp_specs`. - `Spec.__cmp__` was never called (except explicitly) due to rich comparison operators from `key_ordering` - Refactor `_find_other_spec` to free function `find_spec`. Add a test for it to make sure it works. --- lib/spack/spack/concretize.py | 142 +++++++++++++++++--------- lib/spack/spack/preferred_packages.py | 2 +- lib/spack/spack/spec.py | 34 ------ lib/spack/spack/test/concretize.py | 64 ++++++++++++ 4 files changed, 158 insertions(+), 84 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 8da7011b53..bad67c34e3 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -50,34 +50,17 @@ class DefaultConcretizer(object): default concretization strategies, or you can override all of them. """ - def _find_other_spec(self, spec, condition): - """Searches the dag from spec in an intelligent order and looks - for a spec that matches a condition""" - dagiter = chain(spec.traverse(direction='parents'), spec.traverse(direction='children')) - found = next((x for x in dagiter if x is not spec and condition(x)), None) - if found: - return found - dagiter = chain(spec.traverse(direction='parents'), spec.traverse(direction='children')) - searched = list(dagiter) - found = next((x for x in spec.root.traverse() if x not in searched and x is not spec and condition(x)), None) - if found: - return found - if condition(spec): - return spec - return None - - def _valid_virtuals_and_externals(self, spec): """Returns a list of spec/external-path pairs for both virtuals and externals - that can concretize this spec.""" + that can concretize this spec.""" # Get a list of candidate packages that could satisfy this spec packages = [] if spec.virtual: providers = spack.repo.providers_for(spec) if not providers: raise UnsatisfiableProviderSpecError(providers[0], spec) - spec_w_preferred_providers = self._find_other_spec(spec, \ - lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name)) + spec_w_preferred_providers = find_spec( + spec, lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name)) if not spec_w_preferred_providers: spec_w_preferred_providers = spec provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name) @@ -101,15 +84,15 @@ def _valid_virtuals_and_externals(self, spec): raise NoBuildError(spec) def cmp_externals(a, b): - result = a[0].__cmp__(b[0]) - if result != 0: return result + result = cmp_specs(a[0], b[0]) + if result != 0: + return result if not a[1] and b[1]: return 1 if not b[1] and a[1]: return -1 - return a[1].__cmp__(b[1]) + return cmp_specs(a[1], b[1]) - #result = sorted(result, cmp=lambda a,b: a[0].__cmp__(b[0])) result = sorted(result, cmp=cmp_externals) return result @@ -121,27 +104,27 @@ def concretize_virtual_and_external(self, spec): if not candidates: return False - #Find the nearest spec in the dag that has a compiler. We'll use that + # Find the nearest spec in the dag that has a compiler. We'll use that # spec to test compiler compatibility. - other_spec = self._find_other_spec(spec, lambda(x): x.compiler) + other_spec = find_spec(spec, lambda(x): x.compiler) if not other_spec: other_spec = spec.root - #Choose an ABI-compatible candidate, or the first match otherwise. + # Choose an ABI-compatible candidate, or the first match otherwise. candidate = None if other_spec: candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec)), None) if not candidate: - #Try a looser ABI matching + # Try a looser ABI matching candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec, loose=True)), None) if not candidate: - #No ABI matches. Pick the top choice based on the orignal preferences. + # No ABI matches. Pick the top choice based on the orignal preferences. candidate = candidates[0] candidate_spec = candidate[0] external = candidate[1] changed = False - #If we're external then trim the dependencies + # If we're external then trim the dependencies if external: if (spec.dependencies): changed = True @@ -150,26 +133,26 @@ def concretize_virtual_and_external(self, spec): def fequal(candidate_field, spec_field): return (not candidate_field) or (candidate_field == spec_field) - if fequal(candidate_spec.name, spec.name) and \ - fequal(candidate_spec.versions, spec.versions) and \ - fequal(candidate_spec.compiler, spec.compiler) and \ - fequal(candidate_spec.architecture, spec.architecture) and \ - fequal(candidate_spec.dependencies, spec.dependencies) and \ - fequal(candidate_spec.variants, spec.variants) and \ - fequal(external, spec.external): + if (fequal(candidate_spec.name, spec.name) and + fequal(candidate_spec.versions, spec.versions) and + fequal(candidate_spec.compiler, spec.compiler) and + fequal(candidate_spec.architecture, spec.architecture) and + fequal(candidate_spec.dependencies, spec.dependencies) and + fequal(candidate_spec.variants, spec.variants) and + fequal(external, spec.external)): return changed - - #Refine this spec to the candidate. + + # Refine this spec to the candidate. if spec.virtual: spec._replace_with(candidate_spec) changed = True if spec._dup(candidate_spec, deps=False, cleardeps=False): changed = True - spec.external = external + spec.external = external return changed - - + + def concretize_version(self, spec): """If the spec is already concrete, return. Otherwise take the preferred version from spackconfig, and default to the package's @@ -263,7 +246,7 @@ def concretize_compiler(self, spec): """If the spec already has a compiler, we're done. If not, then take the compiler used for the nearest ancestor with a compiler spec and use that. If the ancestor's compiler is not - concrete, then used the preferred compiler as specified in + concrete, then used the preferred compiler as specified in spackconfig. Intuition: Use the spackconfig default if no package that depends on @@ -272,37 +255,99 @@ def concretize_compiler(self, spec): link to this one, to maximize compatibility. """ all_compilers = spack.compilers.all_compilers() - + if (spec.compiler and spec.compiler.concrete and spec.compiler in all_compilers): return False #Find the another spec that has a compiler, or the root if none do - other_spec = self._find_other_spec(spec, lambda(x) : x.compiler) + other_spec = find_spec(spec, lambda(x) : x.compiler) if not other_spec: other_spec = spec.root other_compiler = other_spec.compiler assert(other_spec) - + # Check if the compiler is already fully specified if other_compiler in all_compilers: spec.compiler = other_compiler.copy() return True - + # Filter the compilers into a sorted list based on the compiler_order from spackconfig compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler) cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name) matches = sorted(compiler_list, cmp=cmp_compilers) if not matches: raise UnavailableCompilerVersionError(other_compiler) - + # copy concrete version into other_compiler spec.compiler = matches[0].copy() assert(spec.compiler.concrete) return True # things changed. +def find_spec(spec, condition): + """Searches the dag from spec in an intelligent order and looks + for a spec that matches a condition""" + # First search parents, then search children + dagiter = chain(spec.traverse(direction='parents', root=False), + spec.traverse(direction='children', root=False)) + visited = set() + for relative in dagiter: + if condition(relative): + return relative + visited.add(id(relative)) + + # Then search all other relatives in the DAG *except* spec + for relative in spec.root.traverse(): + if relative is spec: continue + if id(relative) in visited: continue + if condition(relative): + return relative + + # Finally search spec itself. + if condition(spec): + return spec + + return None # Nohting matched the condition. + + +def cmp_specs(lhs, rhs): + # Package name sort order is not configurable, always goes alphabetical + if lhs.name != rhs.name: + return cmp(lhs.name, rhs.name) + + # Package version is second in compare order + pkgname = lhs.name + if lhs.versions != rhs.versions: + return spack.pkgsort.version_compare( + pkgname, lhs.versions, rhs.versions) + + # Compiler is third + if lhs.compiler != rhs.compiler: + return spack.pkgsort.compiler_compare( + pkgname, lhs.compiler, rhs.compiler) + + # Variants + if lhs.variants != rhs.variants: + return spack.pkgsort.variant_compare( + pkgname, lhs.variants, rhs.variants) + + # Architecture + if lhs.architecture != rhs.architecture: + return spack.pkgsort.architecture_compare( + pkgname, lhs.architecture, rhs.architecture) + + # Dependency is not configurable + lhash, rhash = hash(lhs), hash(rhs) + if lhash != rhash: + return -1 if lhash < rhash else 1 + + # Equal specs + return 0 + + + class UnavailableCompilerVersionError(spack.error.SpackError): """Raised when there is no available compiler that satisfies a compiler spec.""" @@ -326,4 +371,3 @@ class NoBuildError(spack.error.SpackError): def __init__(self, spec): super(NoBuildError, self).__init__( "The spec '%s' is configured as nobuild, and no matching external installs were found" % spec.name) - diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index 4ff0f18b31..9d219a1a6e 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -27,7 +27,7 @@ from spack.version import * class PreferredPackages(object): - _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] }, #Arbitrary, but consistent + _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] }, # Arbitrary, but consistent def __init__(self): self.preferred = spack.config.get_config('packages') diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index b8c0d0ef9c..c045e80365 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1734,40 +1734,6 @@ def dep_string(self): return ''.join("^" + dep.format() for dep in self.sorted_deps()) - def __cmp__(self, other): - #Package name sort order is not configurable, always goes alphabetical - if self.name != other.name: - return cmp(self.name, other.name) - - #Package version is second in compare order - pkgname = self.name - if self.versions != other.versions: - return spack.pkgsort.version_compare(pkgname, - self.versions, other.versions) - - #Compiler is third - if self.compiler != other.compiler: - return spack.pkgsort.compiler_compare(pkgname, - self.compiler, other.compiler) - - #Variants - if self.variants != other.variants: - return spack.pkgsort.variant_compare(pkgname, - self.variants, other.variants) - - #Architecture - if self.architecture != other.architecture: - return spack.pkgsort.architecture_compare(pkgname, - self.architecture, other.architecture) - - #Dependency is not configurable - if self.dag_hash() != other.dag_hash(): - return -1 if self.dag_hash() < other.dag_hash() else 1 - - #Equal specs - return 0 - - def __str__(self): return self.format() + self.dep_string() diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 820c5d84a8..07828d8ea6 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -24,6 +24,7 @@ ############################################################################## import spack from spack.spec import Spec, CompilerSpec +from spack.concretize import find_spec from spack.test.mock_packages_test import * class ConcretizeTest(MockPackagesTest): @@ -218,3 +219,66 @@ def test_external_and_virtual(self): self.assertEqual(spec['stuff'].external, '/path/to/external_virtual_gcc') self.assertTrue(spec['externaltool'].compiler.satisfies('gcc')) self.assertTrue(spec['stuff'].compiler.satisfies('gcc')) + + + def test_find_spec_parents(self): + """Tests the spec finding logic used by concretization. """ + s = Spec('a +foo', + Spec('b +foo', + Spec('c'), + Spec('d +foo')), + Spec('e +foo')) + + self.assertEqual('a', find_spec(s['b'], lambda s: '+foo' in s).name) + + + def test_find_spec_children(self): + s = Spec('a', + Spec('b +foo', + Spec('c'), + Spec('d +foo')), + Spec('e +foo')) + self.assertEqual('d', find_spec(s['b'], lambda s: '+foo' in s).name) + s = Spec('a', + Spec('b +foo', + Spec('c +foo'), + Spec('d')), + Spec('e +foo')) + self.assertEqual('c', find_spec(s['b'], lambda s: '+foo' in s).name) + + + def test_find_spec_sibling(self): + s = Spec('a', + Spec('b +foo', + Spec('c'), + Spec('d')), + Spec('e +foo')) + self.assertEqual('e', find_spec(s['b'], lambda s: '+foo' in s).name) + self.assertEqual('b', find_spec(s['e'], lambda s: '+foo' in s).name) + + s = Spec('a', + Spec('b +foo', + Spec('c'), + Spec('d')), + Spec('e', + Spec('f +foo'))) + self.assertEqual('f', find_spec(s['b'], lambda s: '+foo' in s).name) + + + def test_find_spec_self(self): + s = Spec('a', + Spec('b +foo', + Spec('c'), + Spec('d')), + Spec('e')) + self.assertEqual('b', find_spec(s['b'], lambda s: '+foo' in s).name) + + + def test_find_spec_none(self): + s = Spec('a', + Spec('b', + Spec('c'), + Spec('d')), + Spec('e')) + self.assertEqual(None, find_spec(s['b'], lambda s: '+foo' in s)) + From c4fddcc6e977eb7040c48581eb90f1a441cf3e71 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 10 Mar 2016 03:15:49 -0800 Subject: [PATCH 160/189] Add 'provders' back into packages.yaml schema --- lib/spack/spack/config.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 95a988f7ff..3a785fe692 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -224,6 +224,15 @@ 'type': 'boolean', 'default': False, }, + 'providers': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*': { + 'type' : 'array', + 'default' : [], + 'items' : { 'type' : 'string' },},},}, 'paths': { 'type' : 'object', 'default' : {}, @@ -534,11 +543,11 @@ def spec_externals(spec): allpkgs = get_config('packages') name = spec.name spec_locations = [] - + pkg_paths = allpkgs.get(name, {}).get('paths', None) if not pkg_paths: return [] - + for pkg,path in pkg_paths.iteritems(): if not spec.satisfies(pkg): continue From 7ae6c62ddb0271d9e2364cf4249adbc7abbc0b31 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 10 Mar 2016 14:08:43 +0100 Subject: [PATCH 161/189] Fixed shell quoting error --- lib/spack/env/cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 6b6073db43..2888c28c48 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -154,7 +154,7 @@ fi input_command="$@" if [ "$mode" == vcheck ] ; then - exec "${input_command}" + exec ${command} ${input_command} fi # From 78ef0618bc6d60f50edc6bb83b5cb1a5a48ac22b Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 10 Mar 2016 14:35:09 +0100 Subject: [PATCH 162/189] cc : converted indents to spaces --- lib/spack/env/cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 2888c28c48..3e429fdf22 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -119,7 +119,7 @@ if [ -z "$mode" ]; then if [ "$arg" = -v -o "$arg" = -V -o "$arg" = --version -o "$arg" = -dumpversion ]; then mode=vcheck break - fi + fi done fi From 30adc4c9b8cdbe099ebe4ef9bec4b2594cda7b61 Mon Sep 17 00:00:00 2001 From: alalazo Date: Thu, 10 Mar 2016 14:59:31 +0100 Subject: [PATCH 163/189] cc : handle spaces in folders? --- lib/spack/env/cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 3e429fdf22..4a3e6eddc9 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -154,7 +154,7 @@ fi input_command="$@" if [ "$mode" == vcheck ] ; then - exec ${command} ${input_command} + exec ${command} "$@" fi # From 2bbf42b49d874adad2d9f53e1c8f6225df80415b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 10 Mar 2016 16:18:11 -0800 Subject: [PATCH 164/189] Indentation change. --- lib/spack/docs/site_configuration.rst | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index 76fe8fdd7d..7ae541d4a3 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -56,7 +56,7 @@ directory is. External Packages ~~~~~~~~~~~~~~~~~~~~~ -Spack can be configured to use externally-installed +Spack can be configured to use externally-installed packages rather than building its own packages. This may be desirable if machines ship with system packages, such as a customized MPI that should be used instead of Spack building its own MPI. @@ -76,7 +76,7 @@ directory. Here's an example of an external configuration: This example lists three installations of OpenMPI, one built with gcc, one built with gcc and debug information, and another built with Intel. -If Spack is asked to build a package that uses one of these MPIs as a +If Spack is asked to build a package that uses one of these MPIs as a dependency, it will use the the pre-installed OpenMPI in the given directory. This example also specifies that Spack should never build its own OpenMPI via the ``nobuild: True`` option. @@ -85,12 +85,12 @@ Each ``packages.yaml`` begins with a ``packages:`` token, followed by a list of package names. To specify externals, add a ``paths`` token under the package name, which lists externals in a ``spec : /path`` format. Each spec should be as -well-defined as reasonably possible. If a -package lacks a spec component, such as missing a compiler or -package version, then Spack will guess the missing component based +well-defined as reasonably possible. If a +package lacks a spec component, such as missing a compiler or +package version, then Spack will guess the missing component based on its most-favored packages, and it may guess incorrectly. -Each package version and compilers listed in an external should +Each package version and compilers listed in an external should have entries in Spack's packages and compiler configuration, even though the package and compiler may not every be built. @@ -111,18 +111,18 @@ be: paths: openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3 openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug - openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel - nobuild: True + openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel + nobuild: True The addition of the ``nobuild`` flag tells Spack that it should never build its own version of OpenMPI, and it will instead always rely on a pre-built OpenMPI. Similar to ``paths``, ``nobuild`` is specified as a property under a package name. -The ``nobuild`` does not need to be paired with external packages. -It could also be used alone to forbid packages that may be +The ``nobuild`` does not need to be paired with external packages. +It could also be used alone to forbid packages that may be buggy or otherwise undesirable. - + Profiling ~~~~~~~~~~~~~~~~~~~~~ From 4693af0736910244abcb193a65041314447f0da6 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Thu, 10 Mar 2016 16:23:35 -0800 Subject: [PATCH 165/189] Fix type error that was causing mis-ordering of compiler versions --- lib/spack/spack/preferred_packages.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py index 9d219a1a6e..4d8526c75f 100644 --- a/lib/spack/spack/preferred_packages.py +++ b/lib/spack/spack/preferred_packages.py @@ -27,7 +27,7 @@ from spack.version import * class PreferredPackages(object): - _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] }, # Arbitrary, but consistent + _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] } # Arbitrary, but consistent def __init__(self): self.preferred = spack.config.get_config('packages') From ac88cab68ffa16d4ff448de5f186d746cb36b40a Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Thu, 10 Mar 2016 17:00:27 -0800 Subject: [PATCH 166/189] Fix issue with preferred satisfies not being respected --- lib/spack/spack/concretize.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index bad67c34e3..2268084e56 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -70,20 +70,23 @@ def _valid_virtuals_and_externals(self, spec): # For each candidate package, if it has externals add those to the candidates # if it's a nobuild, then only add the externals. - result = [] + candidates = [] all_compilers = spack.compilers.all_compilers() for pkg in packages: externals = spec_externals(pkg) buildable = not is_spec_nobuild(pkg) if buildable: - result.append((pkg, None)) + candidates.append((pkg, None)) for ext in externals: if ext[0].satisfies(spec): - result.append(ext) - if not result: + candidates.append(ext) + if not candidates: raise NoBuildError(spec) def cmp_externals(a, b): + if a[0].name != b[0].name: + #We're choosing between different providers. Maintain order from above sort + return candidates.index(a) - candidates.index(b) result = cmp_specs(a[0], b[0]) if result != 0: return result @@ -91,10 +94,10 @@ def cmp_externals(a, b): return 1 if not b[1] and a[1]: return -1 - return cmp_specs(a[1], b[1]) + return cmp(a[1], b[1]) - result = sorted(result, cmp=cmp_externals) - return result + candidates = sorted(candidates, cmp=cmp_externals) + return candidates def concretize_virtual_and_external(self, spec): From f5e8857c5e44719778e531bcc149c9fe228240b3 Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Fri, 11 Mar 2016 09:51:12 -0700 Subject: [PATCH 167/189] + Rename variant 'sphinxbuild' to 'doc' as recommended in the discussion of PR#526. Also, remove a debug print statement that was accidentally committed. --- var/spack/repos/builtin/packages/cmake/package.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index 806c37a68c..cc93c7067c 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -39,12 +39,12 @@ class Cmake(Package): variant('ncurses', default=True, description='Enables the build of the ncurses gui') variant('qt', default=False, description='Enables the build of cmake-gui') - variant('sphinxbuild', default=False, description='Enables the generation of html and man page documentation') + variant('doc', default=False, description='Enables the generation of html and man page documentation') depends_on('ncurses', when='+ncurses') depends_on('qt', when='+qt') - depends_on('python@2.7.11:', when='+sphinxbuild') - depends_on('py-sphinx', when='+sphinxbuild') + depends_on('python@2.7.11:', when='+doc') + depends_on('py-sphinx', when='+doc') def url_for_version(self, version): """Handle CMake's version-based custom URLs.""" @@ -58,8 +58,6 @@ def validate(self, spec): :raises RuntimeError: in case of inconsistencies """ - print spec - if '+qt' in spec and spec.satisfies('^qt@5.4.0'): msg = 'qt-5.4.0 has broken CMake modules.' raise RuntimeError(msg) @@ -75,7 +73,7 @@ def install(self, spec, prefix): if '+qt' in spec: options.append('--qt-gui') - if '+sphinxbuild' in spec: + if '+doc' in spec: options.append('--sphinx-html') options.append('--sphinx-man') From 1c7f754e5b8cd7740f3c4a91ee22a0354b40844a Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Fri, 11 Mar 2016 10:00:00 -0800 Subject: [PATCH 168/189] Invert and rename the `nobuild` option in package.yaml configs to `buildable`. --- lib/spack/docs/site_configuration.rst | 15 +++++++-------- lib/spack/spack/concretize.py | 8 ++++---- lib/spack/spack/config.py | 16 ++++++++-------- lib/spack/spack/test/mock_packages_test.py | 4 ++-- 4 files changed, 21 insertions(+), 22 deletions(-) diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst index 7ae541d4a3..3abfa21a9d 100644 --- a/lib/spack/docs/site_configuration.rst +++ b/lib/spack/docs/site_configuration.rst @@ -78,8 +78,7 @@ This example lists three installations of OpenMPI, one built with gcc, one built with gcc and debug information, and another built with Intel. If Spack is asked to build a package that uses one of these MPIs as a dependency, it will use the the pre-installed OpenMPI in -the given directory. This example also specifies that Spack should never -build its own OpenMPI via the ``nobuild: True`` option. +the given directory. Each ``packages.yaml`` begins with a ``packages:`` token, followed by a list of package names. To specify externals, add a ``paths`` @@ -111,16 +110,16 @@ be: paths: openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3 openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug - openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel - nobuild: True + openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel + buildable: False -The addition of the ``nobuild`` flag tells Spack that it should never build +The addition of the ``buildable`` flag tells Spack that it should never build its own version of OpenMPI, and it will instead always rely on a pre-built -OpenMPI. Similar to ``paths``, ``nobuild`` is specified as a property under +OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under a package name. -The ``nobuild`` does not need to be paired with external packages. -It could also be used alone to forbid packages that may be +The ``buildable`` does not need to be paired with external packages. +It could also be used alone to forbid packages that may be buggy or otherwise undesirable. diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 2268084e56..8d29a03f93 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -69,12 +69,12 @@ def _valid_virtuals_and_externals(self, spec): packages = [spec] # For each candidate package, if it has externals add those to the candidates - # if it's a nobuild, then only add the externals. + # if it's not buildable, then only add the externals. candidates = [] all_compilers = spack.compilers.all_compilers() for pkg in packages: externals = spec_externals(pkg) - buildable = not is_spec_nobuild(pkg) + buildable = is_spec_buildable(pkg) if buildable: candidates.append((pkg, None)) for ext in externals: @@ -369,8 +369,8 @@ def __init__(self, spec): class NoBuildError(spack.error.SpackError): - """Raised when a package is configured with the nobuild option, but + """Raised when a package is configured with the buildable option False, but no satisfactory external versions can be found""" def __init__(self, spec): super(NoBuildError, self).__init__( - "The spec '%s' is configured as nobuild, and no matching external installs were found" % spec.name) + "The spec '%s' is configured as not buildable, and no matching external installs were found" % spec.name) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 3a785fe692..a21dd6dbe1 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -220,9 +220,9 @@ 'type' : 'array', 'default' : [], 'items' : { 'type' : 'string' } }, #compiler specs - 'nobuild': { + 'buildable': { 'type': 'boolean', - 'default': False, + 'default': True, }, 'providers': { 'type': 'object', @@ -557,15 +557,15 @@ def spec_externals(spec): return spec_locations -def is_spec_nobuild(spec): - """Return true if the spec pkgspec is configured as nobuild""" +def is_spec_buildable(spec): + """Return true if the spec pkgspec is configured as buildable""" allpkgs = get_config('packages') name = spec.name if not spec.name in allpkgs: - return False - if not 'nobuild' in allpkgs[spec.name]: - return False - return allpkgs[spec.name]['nobuild'] + return True + if not 'buildable' in allpkgs[spec.name]: + return True + return allpkgs[spec.name]['buildable'] class ConfigError(SpackError): pass diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 079cbcc136..6d24a84150 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -52,11 +52,11 @@ mock_packages_config = """\ packages: externaltool: - nobuild: True + buildable: False paths: externaltool@1.0%gcc@4.5.0: /path/to/external_tool externalvirtual: - nobuild: True + buildable: False paths: externalvirtual@2.0%clang@3.3: /path/to/external_virtual_clang externalvirtual@1.0%gcc@4.5.0: /path/to/external_virtual_gcc From bae03404f44ac60439e85e31a85bb6848db2a5e4 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 11 Mar 2016 12:51:45 -0600 Subject: [PATCH 169/189] Documentation typo fixes --- lib/spack/docs/index.rst | 2 +- lib/spack/docs/packaging_guide.rst | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index 79757208c9..d6ce52b747 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -18,7 +18,7 @@ configurations can coexist on the same system. Most importantly, Spack is *simple*. It offers a simple *spec* syntax so that users can specify versions and configuration options concisely. Spack is also simple for package authors: package files -are writtin in pure Python, and specs allow package authors to +are written in pure Python, and specs allow package authors to maintain a single file for many different builds of the same package. See the :doc:`features` for examples and highlights. diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index ef9fd89b62..169899212d 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -419,7 +419,7 @@ directory to the directory containing the downloaded archive before it calls your ``install`` method. Within ``install``, the path to the downloaded archive is available as ``self.stage.archive_file``. -Here is an example snippet for packages distribuetd as self-extracting +Here is an example snippet for packages distributed as self-extracting archives. The example sets permissions on the downloaded file to make it executable, then runs it with some arguments. @@ -1556,12 +1556,12 @@ you ask for a particular spec. ``Concretization Policies`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~ -A user may have certain perferrences for how packages should +A user may have certain preferences for how packages should be concretized on their system. For example, one user may prefer packages built with OpenMPI and the Intel compiler. Another user may prefer packages be built with MVAPICH and GCC. -Spack can be configurated to prefer certain compilers, package +Spack can be configured to prefer certain compilers, package versions, depends_on, and variants during concretization. The preferred configuration can be controlled via the ``~/.spack/packages.yaml`` file for user configuations, or the @@ -1588,16 +1588,16 @@ At a high level, this example is specifying how packages should be concretized. The dyninst package should prefer using gcc 4.9 and be built with debug options. The gperftools package should prefer version 2.2 over 2.4. Every package on the system should prefer mvapich for -its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by perfering gcc 4.9). +its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9). These options are used to fill in implicit defaults. Any of them can be overwritten on the command line if explicitly requested. -Each packages.yaml file begin with the string ``packages:`` and +Each packages.yaml file begins with the string ``packages:`` and package names are specified on the next level. The special string ``all`` applies settings to each package. Underneath each package name is one or more components: ``compiler``, ``variants``, ``version``, or ``providers``. Each component has an ordered list of spec -``constraints``, with earlier entries in the list being prefered over +``constraints``, with earlier entries in the list being preferred over later entries. Sometimes a package installation may have constraints that forbid From 6ec65cd4ca1e6c2d346c949e21902680f27364c5 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 11 Mar 2016 15:03:37 -0600 Subject: [PATCH 170/189] Add GNU Octave package --- .../repos/builtin/packages/octave/package.py | 182 ++++++++++++++++++ 1 file changed, 182 insertions(+) create mode 100644 var/spack/repos/builtin/packages/octave/package.py diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py new file mode 100644 index 0000000000..6810da6d98 --- /dev/null +++ b/var/spack/repos/builtin/packages/octave/package.py @@ -0,0 +1,182 @@ +from spack import * + +class Octave(Package): + """GNU Octave is a high-level language, primarily intended for numerical + computations. It provides a convenient command line interface for solving + linear and nonlinear problems numerically, and for performing other + numerical experiments using a language that is mostly compatible with + Matlab. It may also be used as a batch-oriented language.""" + + homepage = "https://www.gnu.org/software/octave/" + url = "ftp://ftp.gnu.org/gnu/octave/octave-4.0.0.tar.gz" + + version('4.0.0' , 'a69f8320a4f20a8480c1b278b1adb799') + + variant('readline', default=True) + variant('arpack', default=False) + variant('curl', default=False) + variant('fftw', default=False) + variant('fltk', default=False) + variant('fontconfig', default=False) + variant('freetype', default=False) + variant('glpk', default=False) + variant('gl2ps', default=False) + variant('gnuplot', default=False) + variant('magick', default=False) + variant('hdf5', default=False) + variant('jdk', default=False) + variant('llvm', default=False) + variant('opengl', default=False) + variant('qhull', default=False) + variant('qrupdate', default=False) + variant('qscintilla', default=False) + variant('qt', default=False) + variant('suiteparse', default=False) + variant('zlib', default=False) + + # Required dependencies + depends_on('blas') + depends_on('lapack') + depends_on('pcre') + + # Strongly recommended dependencies + depends_on('readline', when='+readline') + + # Optional dependencies + depends_on('arpack', when='+arpack') + depends_on('curl', when='+curl') + depends_on('fftw@3', when='+fftw') + depends_on('fltk', when='+fltk') + depends_on('fontconfig', when='+fontconfig') + depends_on('freetype', when='+freetype') + depends_on('glpk', when='+glpk') + #depends_on('gl2ps', when='+gl2ps') + depends_on('gnuplot', when='+gnuplot') + depends_on('ImageMagick', when='+magick') + depends_on('hdf5', when='+hdf5') + depends_on('jdk', when='+jdk') + depends_on('llvm', when='+llvm') + #depends_on('opengl', when='+opengl') + depends_on('qhull', when='+qhull') + #depends_on('qrupdate', when='+qrupdate') + #depends_on('qscintilla', when='+qscintilla) + depends_on('qt', when='+qt') + depends_on('SuiteSparse', when='suitesparse') + depends_on('zlib', when='+zlib') + + + def install(self, spec, prefix): + config_args = [ + "--prefix=%s" % prefix + ] + + # Required dependencies + config_args.extend([ + "--with-blas=%s" % spec['blas'].prefix.lib, + "--with-lapack=%s" % spec['lapack'].prefix.lib + ]) + + # Strongly recommended dependencies + if '+readline' in spec: + config_args.append('--enable-readline') + else: + config_args.append('--disable-readline') + + # Optional dependencies + if '+arpack' in spec: + config_args.extend([ + "--with-arpack-includedir=%s" % spec['arpack'].prefix.include, + "--with-arpack-libdir=%s" % spec['arpack'].prefix.lib + ]) + else: + config_args.append("--without-arpack") + + if '+curl' in spec: + config_args.extend([ + "--with-curl-includedir=%s" % spec['curl'].prefix.include, + "--with-curl-libdir=%s" % spec['curl'].prefix.lib + ]) + else: + config_args.append("--without-curl") + + if '+fftw' in spec: + config_args.extend([ + "--with-fftw3-includedir=%s" % spec['fftw'].prefix.include, + "--with-fftw3-libdir=%s" % spec['fftw'].prefix.lib, + "--with-fftw3f-includedir=%s" % spec['fftw'].prefix.include, + "--with-fftw3f-libdir=%s" % spec['fftw'].prefix.lib + ]) + else: + config_args.extend([ + "--without-fftw3", + "--without-fftw3f" + ]) + + if '+fltk' in spec: + config_args.extend([ + "--with-fltk-prefix=%s" % spec['fltk'].prefix, + "--with-fltk-exec-prefix=%s" % spec['fltk'].prefix + ]) + else: + config_args.append("--without-fltk") + + if '+glpk' in spec: + config_args.extend([ + "--with-glpk-includedir=%s" % spec['glpk'].prefix.include + "--with-glpk-libdir=%s" % spec['glpk'].prefix.lib + ]) + else: + config_args.append("--without-glpk") + + if '+magick' in spec: + config_args.append("--with-magick=%s" % spec['ImageMagick'].prefix.lib) + + if '+hdf5' in spec: + config_args.extend([ + "--with-hdf5-includedir=%s" % spec['hdf5'].prefix.include, + "--with-hdf5-libdir=%s" % spec['hdf5'].prefix.lib + ]) + else: + config_args.append("--without-hdf5") + + if '+jdk' in spec: + config_args.extend([ + "--with-java-homedir=%s" % spec['jdk'].prefix, + "--with-java-includedir=%s" % spec['jdk'].prefix.include, + "--with-java-libdir=%s" % spec['jdk'].prefix.lib + ]) + + #if '~opengl' in spec: + # config_args.extend([ + # "--without-opengl", + # "--without-framework-opengl" + # ]) + + if '+qhull' in spec: + config_args.extend([ + "--with-qhull-includedir=%s" % spec['qhull'].prefix.include, + "--with-qhull-libdir=%s" % spec['qhull'].prefix.lib + ]) + else: + config_args.append("--without-qhull") + + #if '+qrupdate' in spec: + # config_args.extend([ + # "--with-qrupdate-includedir=%s" % spec['qrupdate'].prefix.include, + # "--with-qrupdate-libdir=%s" % spec['qrupdate'].prefix.lib + # ]) + #else: + # config_args.append("--without-qrupdate") + + if '+zlib' in spec: + config_args.extend([ + "--with-z-includedir=%s" % spec['zlib'].prefix.include, + "--with-z-libdir=%s" % spec['zlib'].prefix.lib + ]) + else: + config_args.append("--without-z") + + configure(*config_args) + + make() + make("install") From 1179217334386984211645d508d8e02692297183 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 11 Mar 2016 13:33:56 -0800 Subject: [PATCH 171/189] Add compiler info and a simple libdwarf build to the checks. --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index ab379be486..1bed6b0874 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,10 @@ before_install: script: - . share/spack/setup-env.sh + - spack compilers + - spack config get compilers - spack test + - spack install -v libdwarf notifications: email: From 145390c7f3a9de6e679f56dc9b275973a459de91 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 11 Mar 2016 16:57:37 -0600 Subject: [PATCH 172/189] Add gl2ps and qrupdate packages --- .../repos/builtin/packages/gl2ps/package.py | 18 ++++++++++++++ .../repos/builtin/packages/octave/package.py | 24 +++++++++---------- .../builtin/packages/qrupdate/package.py | 18 ++++++++++++++ 3 files changed, 48 insertions(+), 12 deletions(-) create mode 100644 var/spack/repos/builtin/packages/gl2ps/package.py create mode 100644 var/spack/repos/builtin/packages/qrupdate/package.py diff --git a/var/spack/repos/builtin/packages/gl2ps/package.py b/var/spack/repos/builtin/packages/gl2ps/package.py new file mode 100644 index 0000000000..cb376b3f03 --- /dev/null +++ b/var/spack/repos/builtin/packages/gl2ps/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Gl2ps(Package): + """GL2PS is a C library providing high quality vector output for any + OpenGL application.""" + + homepage = "http://www.geuz.org/gl2ps/" + url = "http://geuz.org/gl2ps/src/gl2ps-1.3.9.tgz" + + version('1.3.9', '377b2bcad62d528e7096e76358f41140') + + depends_on("libpng") + + def install(self, spec, prefix): + cmake('.', *std_cmake_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py index 6810da6d98..99847e1dbe 100644 --- a/var/spack/repos/builtin/packages/octave/package.py +++ b/var/spack/repos/builtin/packages/octave/package.py @@ -50,7 +50,7 @@ class Octave(Package): depends_on('fontconfig', when='+fontconfig') depends_on('freetype', when='+freetype') depends_on('glpk', when='+glpk') - #depends_on('gl2ps', when='+gl2ps') + depends_on('gl2ps', when='+gl2ps') depends_on('gnuplot', when='+gnuplot') depends_on('ImageMagick', when='+magick') depends_on('hdf5', when='+hdf5') @@ -58,10 +58,10 @@ class Octave(Package): depends_on('llvm', when='+llvm') #depends_on('opengl', when='+opengl') depends_on('qhull', when='+qhull') - #depends_on('qrupdate', when='+qrupdate') + depends_on('qrupdate', when='+qrupdate') #depends_on('qscintilla', when='+qscintilla) depends_on('qt', when='+qt') - depends_on('SuiteSparse', when='suitesparse') + depends_on('SuiteSparse', when='+suitesparse') depends_on('zlib', when='+zlib') @@ -72,7 +72,7 @@ def install(self, spec, prefix): # Required dependencies config_args.extend([ - "--with-blas=%s" % spec['blas'].prefix.lib, + "--with-blas=%s" % spec['blas'].prefix.lib, "--with-lapack=%s" % spec['lapack'].prefix.lib ]) @@ -122,7 +122,7 @@ def install(self, spec, prefix): if '+glpk' in spec: config_args.extend([ - "--with-glpk-includedir=%s" % spec['glpk'].prefix.include + "--with-glpk-includedir=%s" % spec['glpk'].prefix.include, "--with-glpk-libdir=%s" % spec['glpk'].prefix.lib ]) else: @@ -160,13 +160,13 @@ def install(self, spec, prefix): else: config_args.append("--without-qhull") - #if '+qrupdate' in spec: - # config_args.extend([ - # "--with-qrupdate-includedir=%s" % spec['qrupdate'].prefix.include, - # "--with-qrupdate-libdir=%s" % spec['qrupdate'].prefix.lib - # ]) - #else: - # config_args.append("--without-qrupdate") + if '+qrupdate' in spec: + config_args.extend([ + "--with-qrupdate-includedir=%s" % spec['qrupdate'].prefix.include, + "--with-qrupdate-libdir=%s" % spec['qrupdate'].prefix.lib + ]) + else: + config_args.append("--without-qrupdate") if '+zlib' in spec: config_args.extend([ diff --git a/var/spack/repos/builtin/packages/qrupdate/package.py b/var/spack/repos/builtin/packages/qrupdate/package.py new file mode 100644 index 0000000000..aff44bb2d8 --- /dev/null +++ b/var/spack/repos/builtin/packages/qrupdate/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Qrupdate(Package): + """qrupdate is a Fortran library for fast updates of QR and + Cholesky decompositions.""" + + homepage = "http://sourceforge.net/projects/qrupdate/" + url = "https://downloads.sourceforge.net/qrupdate/qrupdate-1.1.2.tar.gz" + + version('1.1.2', '6d073887c6e858c24aeda5b54c57a8c4') + + depends_on("openblas") + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") From 90f2e40ff92077da1f1b9b4d9aa317b89aa07960 Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Fri, 11 Mar 2016 23:28:16 -0500 Subject: [PATCH 173/189] Added comment to Emacs. --- var/spack/repos/builtin/packages/emacs/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/emacs/package.py b/var/spack/repos/builtin/packages/emacs/package.py index 09eb05d5a7..caa264857e 100644 --- a/var/spack/repos/builtin/packages/emacs/package.py +++ b/var/spack/repos/builtin/packages/emacs/package.py @@ -12,6 +12,8 @@ class Emacs(Package): # GTK or other widget library # libtiff, png, etc. # For now, we assume the system provides all that stuff. + # For Ubuntu 14.04 LTS: + # sudo apt-get install libgtk-3-dev libxpm-dev libtiff5-dev libjpeg8-dev libgif-dev libpng12-dev def install(self, spec, prefix): configure('--prefix=%s' % prefix) From 7cd478418d5b8a204a8e6b9a5307ecf71ea83939 Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Fri, 11 Mar 2016 23:28:36 -0500 Subject: [PATCH 174/189] New version of LAPACK --- var/spack/repos/builtin/packages/netlib-lapack/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py index fb6b99e27c..741f4af421 100644 --- a/var/spack/repos/builtin/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py @@ -12,6 +12,7 @@ class NetlibLapack(Package): homepage = "http://www.netlib.org/lapack/" url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" + version('3.6.0', 'f2f6c67134e851fe189bb3ca1fbb5101') version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') version('3.4.2', '61bf1a8a4469d4bdb7604f5897179478') version('3.4.1', '44c3869c38c8335c2b9c2a8bb276eb55') From 3383486adc86ba03456aae3703c0176620888e77 Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Sun, 13 Mar 2016 19:38:05 -0400 Subject: [PATCH 175/189] Fixed typo bug. Made error comment more explicit --- lib/spack/spack/cmd/diy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 2c3a8761ab..199362d915 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -75,8 +75,8 @@ def diy(self, args): edit_package(spec.name, spack.repo.first_repo(), None, True) return - if not spec.version.concrete: - tty.die("spack diy spec must have a single, concrete version.") + if not spec.versions.concrete: + tty.die("spack spconfig spec must have a single, concrete version. Did you forget a package version number?") spec.concretize() package = spack.repo.get(spec) From 5c865b9b702695797ab29263fb629b53e56983fb Mon Sep 17 00:00:00 2001 From: Elizabeth F Date: Sun, 13 Mar 2016 19:42:15 -0400 Subject: [PATCH 176/189] Fixed typo in typo fix. --- lib/spack/spack/cmd/diy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 199362d915..45f13e4463 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -76,7 +76,7 @@ def diy(self, args): return if not spec.versions.concrete: - tty.die("spack spconfig spec must have a single, concrete version. Did you forget a package version number?") + tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?") spec.concretize() package = spack.repo.get(spec) From 8b715d9c3fabdf4658ff4c0c500d01f74e7ed64e Mon Sep 17 00:00:00 2001 From: Erik Schnetter Date: Sun, 13 Mar 2016 21:14:41 -0400 Subject: [PATCH 177/189] Update tmux --- var/spack/repos/builtin/packages/tmux/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/tmux/package.py b/var/spack/repos/builtin/packages/tmux/package.py index 23d36db427..f2067d1366 100644 --- a/var/spack/repos/builtin/packages/tmux/package.py +++ b/var/spack/repos/builtin/packages/tmux/package.py @@ -7,10 +7,11 @@ class Tmux(Package): do a lot more. """ - homepage = "http://tmux.sourceforge.net" - url = "http://downloads.sourceforge.net/project/tmux/tmux/tmux-1.9/tmux-1.9a.tar.gz" + homepage = "http://tmux.github.io" + url = "https://github.com/tmux/tmux/releases/download/2.1/tmux-2.1.tar.gz" version('1.9a', 'b07601711f96f1d260b390513b509a2d') + version('2.1', '74a2855695bccb51b6e301383ad4818c') depends_on('libevent') depends_on('ncurses') From 003fd4d834d3e06ea89c7f3c2fa13241b0730f06 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 14 Mar 2016 04:55:30 -0700 Subject: [PATCH 178/189] Optimize __eq__ and __ne__ in key_ordering - use `is` when possible before calling `_cmp_key()` --- lib/spack/llnl/util/lang.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 1c4d1ed623..13d301f84e 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -235,11 +235,11 @@ def setter(name, value): if not has_method(cls, '_cmp_key'): raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__) - setter('__eq__', lambda s,o: o is not None and s._cmp_key() == o._cmp_key()) + setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key())) setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key()) setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key()) - setter('__ne__', lambda s,o: o is None or s._cmp_key() != o._cmp_key()) + setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key())) setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key()) setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key()) From 05c761dee9c86faf9ce6d5b98ae57c8737694898 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 14 Mar 2016 04:59:29 -0700 Subject: [PATCH 179/189] Add `package_class` method to spec. - Shouldn't call .package from within things like normalize() and concretize() beacuse spec may be inconsistent. - Add `.package_class` property so that we can get at package metadata without constructing a Package with a Spec. - should be faster than `.package` was, anyway. Use where possible. --- lib/spack/spack/concretize.py | 2 +- lib/spack/spack/repository.py | 9 +++++++-- lib/spack/spack/spec.py | 12 ++++++++++-- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 8d29a03f93..445ecd8896 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -238,7 +238,7 @@ def concretize_variants(self, spec): the default variants from the package specification. """ changed = False - for name, variant in spec.package.variants.items(): + for name, variant in spec.package_class.variants.items(): if name not in spec.variants: spec.variants[name] = spack.spec.VariantSpec(name, variant.default) changed = True diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 3c3ba08bcc..d2fdc937f7 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -316,6 +316,11 @@ def get(self, spec, new=False): return self.repo_for_pkg(spec).get(spec) + def get_pkg_class(self, pkg_name): + """Find a class for the spec's package and return the class object.""" + return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name) + + @_autospec def dump_provenance(self, spec, path): """Dump provenance information for a spec to a particular path. @@ -550,7 +555,7 @@ def get(self, spec, new=False): key = hash(spec) if new or key not in self._instances: - package_class = self._get_pkg_class(spec.name) + package_class = self.get_pkg_class(spec.name) try: copy = spec.copy() # defensive copy. Package owns its spec. self._instances[key] = package_class(copy) @@ -715,7 +720,7 @@ def _get_pkg_module(self, pkg_name): return self._modules[pkg_name] - def _get_pkg_class(self, pkg_name): + def get_pkg_class(self, pkg_name): """Get the class for the package out of its module. First loads (or fetches from cache) a module for the diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index c045e80365..573e288d17 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -353,7 +353,7 @@ def constrain(self, other): @property def concrete(self): return self.spec._concrete or all( - v in self for v in self.spec.package.variants) + v in self for v in self.spec.package_class.variants) def copy(self): @@ -498,6 +498,14 @@ def package(self): return spack.repo.get(self) + @property + def package_class(self): + """Internal package call gets only the class object for a package. + Use this to just get package metadata. + """ + return spack.repo.get_pkg_class(self.name) + + @property def virtual(self): """Right now, a spec is virtual if no package exists with its name. @@ -1161,7 +1169,7 @@ def validate_names(self): # Ensure that variants all exist. for vname, variant in spec.variants.items(): - if vname not in spec.package.variants: + if vname not in spec.package_class.variants: raise UnknownVariantError(spec.name, vname) From f45b8b1083e5f628dd31fa4b7b873b6df7119d0e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 14 Mar 2016 05:02:50 -0700 Subject: [PATCH 180/189] Add some tests for packages with multiple virtual dependencies. - Added mock `hypre` package, depends on `lapack` and `blas`. - test cases where some packages provide both `lapack` and `blas`, but others do not. --- lib/spack/spack/test/concretize.py | 29 +++++++++++++- .../builtin.mock/packages/hypre/package.py | 39 +++++++++++++++++++ .../packages/openblas-with-lapack/package.py | 38 ++++++++++++++++++ 3 files changed, 105 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin.mock/packages/hypre/package.py create mode 100644 var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 07828d8ea6..f264faf17a 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -142,6 +142,34 @@ def test_concretize_with_provides_when(self): for spec in spack.repo.providers_for('mpi@3'))) + def test_concretize_two_virtuals(self): + """Test a package with multiple virtual dependencies.""" + s = Spec('hypre').concretize() + + + def test_concretize_two_virtuals_with_one_bound(self): + """Test a package with multiple virtual dependencies and one preset.""" + s = Spec('hypre ^openblas').concretize() + + + def test_concretize_two_virtuals_with_two_bound(self): + """Test a package with multiple virtual dependencies and two of them preset.""" + s = Spec('hypre ^openblas ^netlib-lapack').concretize() + + + def test_concretize_two_virtuals_with_dual_provider(self): + """Test a package with multiple virtual dependencies and force a provider + that provides both.""" + s = Spec('hypre ^openblas-with-lapack').concretize() + + + def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): + """Test a package with multiple virtual dependencies and force a provider + that provides both, and another conflicting package that provides one.""" + s = Spec('hypre ^openblas-with-lapack ^netlib-lapack') + self.assertRaises(spack.spec.MultipleProviderError, s.concretize) + + def test_virtual_is_fully_expanded_for_callpath(self): # force dependence on fake "zmpi" by asking for MPI 10.0 spec = Spec('callpath ^mpi@10.0') @@ -281,4 +309,3 @@ def test_find_spec_none(self): Spec('d')), Spec('e')) self.assertEqual(None, find_spec(s['b'], lambda s: '+foo' in s)) - diff --git a/var/spack/repos/builtin.mock/packages/hypre/package.py b/var/spack/repos/builtin.mock/packages/hypre/package.py new file mode 100644 index 0000000000..f69f16d2cc --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/hypre/package.py @@ -0,0 +1,39 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Hypre(Package): + """Hypre is included here as an example of a package that depends on + both LAPACK and BLAS.""" + homepage = "http://www.openblas.net" + url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" + + version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9') + + depends_on('lapack') + depends_on('blas') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py new file mode 100644 index 0000000000..509bfb71e5 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class OpenblasWithLapack(Package): + """Dummy version of OpenBLAS that also provides LAPACK, for testing.""" + homepage = "http://www.openblas.net" + url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" + + version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9') + + provides('lapack') + provides('blas') + + def install(self, spec, prefix): + pass From f2761270f3c0506a689b484b0e12d7d6e9f4300d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 14 Mar 2016 05:04:01 -0700 Subject: [PATCH 181/189] Make concretization less greedy: add backtracking for virtuals. - `_expand_virtual_packages` now gets a candidate list and will try all the candidates. - Good news: If the first virtual in the list conflicts with something else in the spec, we'll keep trying until we find a good one. - Bad news: Only looks as far as the next normalize(); can't see conflicts further ahead than that if they're inevitable some other virtual expansion. - Refactor `concretize.py` to keep all the nasty spec graph stitching in `spec.py`. This is more similar to before externals support. - `concretize.py` now just returns a list of candidates sorted by ABI compatibility to `_expand_virtual_packages`, and `spec.py` handles testing the candidates. - Refactor the way external paths are handled in `config.py` and `concretize.py`: - previously, `spec_externals` returned spec/path pairs. Now it returns specs with `external` set. Makes code in `concretize.py` more natural. --- lib/spack/spack/concretize.py | 130 ++++++++++++++-------------------- lib/spack/spack/config.py | 17 +++-- lib/spack/spack/spec.py | 129 +++++++++++++++++++++++++++------ 3 files changed, 169 insertions(+), 107 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 445ecd8896..8083f91982 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -51,10 +51,10 @@ class DefaultConcretizer(object): """ def _valid_virtuals_and_externals(self, spec): - """Returns a list of spec/external-path pairs for both virtuals and externals - that can concretize this spec.""" - # Get a list of candidate packages that could satisfy this spec - packages = [] + """Returns a list of candidate virtual dep providers and external + packages that coiuld be used to concretize a spec.""" + # First construct a list of concrete candidates to replace spec with. + candidates = [spec] if spec.virtual: providers = spack.repo.providers_for(spec) if not providers: @@ -64,96 +64,72 @@ def _valid_virtuals_and_externals(self, spec): if not spec_w_preferred_providers: spec_w_preferred_providers = spec provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name) - packages = sorted(providers, cmp=provider_cmp) - else: - packages = [spec] + candidates = sorted(providers, cmp=provider_cmp) - # For each candidate package, if it has externals add those to the candidates - # if it's not buildable, then only add the externals. - candidates = [] - all_compilers = spack.compilers.all_compilers() - for pkg in packages: - externals = spec_externals(pkg) - buildable = is_spec_buildable(pkg) - if buildable: - candidates.append((pkg, None)) + # For each candidate package, if it has externals, add those to the usable list. + # if it's not buildable, then *only* add the externals. + usable = [] + for cspec in candidates: + if is_spec_buildable(cspec): + usable.append(cspec) + externals = spec_externals(cspec) for ext in externals: - if ext[0].satisfies(spec): - candidates.append(ext) - if not candidates: + if ext.satisfies(spec): + usable.append(ext) + + # If nothing is in the usable list now, it's because we aren't + # allowed to build anything. + if not usable: raise NoBuildError(spec) def cmp_externals(a, b): - if a[0].name != b[0].name: - #We're choosing between different providers. Maintain order from above sort + if a.name != b.name: + # We're choosing between different providers, so + # maintain order from provider sort return candidates.index(a) - candidates.index(b) - result = cmp_specs(a[0], b[0]) + + result = cmp_specs(a, b) if result != 0: return result - if not a[1] and b[1]: - return 1 - if not b[1] and a[1]: - return -1 - return cmp(a[1], b[1]) - candidates = sorted(candidates, cmp=cmp_externals) - return candidates + # prefer external packages to internal packages. + if a.external is None or b.external is None: + return -cmp(a.external, b.external) + else: + return cmp(a.external, b.external) + + usable.sort(cmp=cmp_externals) + return usable - def concretize_virtual_and_external(self, spec): - """From a list of candidate virtual and external packages, concretize to one that - is ABI compatible with the rest of the DAG.""" + def choose_virtual_or_external(self, spec): + """Given a list of candidate virtual and external packages, try to + find one that is most ABI compatible. + """ candidates = self._valid_virtuals_and_externals(spec) if not candidates: - return False + return candidates - # Find the nearest spec in the dag that has a compiler. We'll use that - # spec to test compiler compatibility. - other_spec = find_spec(spec, lambda(x): x.compiler) - if not other_spec: - other_spec = spec.root + # Find the nearest spec in the dag that has a compiler. We'll + # use that spec to calibrate compiler compatibility. + abi_exemplar = find_spec(spec, lambda(x): x.compiler) + if not abi_exemplar: + abi_exemplar = spec.root - # Choose an ABI-compatible candidate, or the first match otherwise. - candidate = None - if other_spec: - candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec)), None) - if not candidate: - # Try a looser ABI matching - candidate = next((c for c in candidates if spack.abi.compatible(c[0], other_spec, loose=True)), None) - if not candidate: - # No ABI matches. Pick the top choice based on the orignal preferences. - candidate = candidates[0] - candidate_spec = candidate[0] - external = candidate[1] - changed = False + # Make a list including ABI compatibility of specs with the exemplar. + strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates] + loose = [spack.abi.compatible(c, abi_exemplar, loose=True) for c in candidates] + keys = zip(strict, loose, candidates) - # If we're external then trim the dependencies - if external: - if (spec.dependencies): - changed = True - spec.dependencies = DependencyMap() - candidate_spec.dependencies = DependencyMap() + # Sort candidates from most to least compatibility. + # Note: + # 1. We reverse because True > False. + # 2. Sort is stable, so c's keep their order. + keys.sort(key=lambda k:k[:2], reverse=True) - def fequal(candidate_field, spec_field): - return (not candidate_field) or (candidate_field == spec_field) - if (fequal(candidate_spec.name, spec.name) and - fequal(candidate_spec.versions, spec.versions) and - fequal(candidate_spec.compiler, spec.compiler) and - fequal(candidate_spec.architecture, spec.architecture) and - fequal(candidate_spec.dependencies, spec.dependencies) and - fequal(candidate_spec.variants, spec.variants) and - fequal(external, spec.external)): - return changed - - # Refine this spec to the candidate. - if spec.virtual: - spec._replace_with(candidate_spec) - changed = True - if spec._dup(candidate_spec, deps=False, cleardeps=False): - changed = True - spec.external = external - - return changed + # Pull the candidates back out and return them in order + candidates = [c for s,l,c in keys] + return candidates def concretize_version(self, spec): diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index a21dd6dbe1..6afd69b3ac 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -539,22 +539,25 @@ def print_section(section): def spec_externals(spec): - """Return a list of spec, directory pairs for each external location for spec""" + """Return a list of external specs (with external directory path filled in), + one for each known external installation.""" allpkgs = get_config('packages') name = spec.name - spec_locations = [] + external_specs = [] pkg_paths = allpkgs.get(name, {}).get('paths', None) if not pkg_paths: return [] - for pkg,path in pkg_paths.iteritems(): - if not spec.satisfies(pkg): - continue + for external_spec, path in pkg_paths.iteritems(): if not path: + # skip entries without paths (avoid creating extra Specs) continue - spec_locations.append( (spack.spec.Spec(pkg), path) ) - return spec_locations + + external_spec = spack.spec.Spec(external_spec, external=path) + if external_spec.satisfies(spec): + external_specs.append(external_spec) + return external_specs def is_spec_buildable(spec): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 573e288d17..d04135860e 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -418,9 +418,11 @@ def __init__(self, spec_like, *dep_like, **kwargs): # cases we've read them from a file want to assume normal. # This allows us to manipulate specs that Spack doesn't have # package.py files for. - self._normal = kwargs.get('normal', False) + self._normal = kwargs.get('normal', False) self._concrete = kwargs.get('concrete', False) - self.external = None + + # Allow a spec to be constructed with an external path. + self.external = kwargs.get('external', None) # This allows users to construct a spec DAG with literals. # Note that given two specs a and b, Spec(a) copies a, but @@ -794,8 +796,30 @@ def _replace_with(self, concrete): """Replace this virtual spec with a concrete spec.""" assert(self.virtual) for name, dependent in self.dependents.items(): + # remove self from all dependents. del dependent.dependencies[self.name] - dependent._add_dependency(concrete) + + # add the replacement, unless it is already a dep of dependent. + if concrete.name not in dependent.dependencies: + dependent._add_dependency(concrete) + + + def _replace_node(self, replacement): + """Replace this spec with another. + + Connects all dependents of this spec to its replacement, and + disconnects this spec from any dependencies it has. New spec + will have any dependencies the replacement had, and may need + to be normalized. + + """ + for name, dependent in self.dependents.items(): + del dependent.dependencies[self.name] + dependent._add_dependency(replacement) + + for name, dep in self.dependencies.items(): + del dep.dependents[self.name] + del self.dependencies[dep.name] def _expand_virtual_packages(self): @@ -815,18 +839,80 @@ def _expand_virtual_packages(self): this are infrequent, but should implement this before it is a problem. """ + # Make an index of stuff this spec already provides + self_index = ProviderIndex(self.traverse(), restrict=True) + changed = False done = False while not done: done = True for spec in list(self.traverse()): - if spack.concretizer.concretize_virtual_and_external(spec): - done = False + replacement = None + if spec.virtual: + replacement = self._find_provider(spec, self_index) + if replacement: + # TODO: may break if in-place on self but + # shouldn't happen if root is traversed first. + spec._replace_with(replacement) + done=False + break + + if not replacement: + # Get a list of possible replacements in order of preference. + candidates = spack.concretizer.choose_virtual_or_external(spec) + + # Try the replacements in order, skipping any that cause + # satisfiability problems. + for replacement in candidates: + if replacement is spec: + break + + # Replace spec with the candidate and normalize + copy = self.copy() + copy[spec.name]._dup(replacement.copy(deps=False)) + + try: + # If there are duplicate providers or duplicate provider + # deps, consolidate them and merge constraints. + copy.normalize(force=True) + break + except SpecError as e: + # On error, we'll try the next replacement. + continue + + # If replacement is external then trim the dependencies + if replacement.external: + if (spec.dependencies): + changed = True + spec.dependencies = DependencyMap() + replacement.dependencies = DependencyMap() + + # TODO: could this and the stuff in _dup be cleaned up? + def feq(cfield, sfield): + return (not cfield) or (cfield == sfield) + + if replacement is spec or (feq(replacement.name, spec.name) and + feq(replacement.versions, spec.versions) and + feq(replacement.compiler, spec.compiler) and + feq(replacement.architecture, spec.architecture) and + feq(replacement.dependencies, spec.dependencies) and + feq(replacement.variants, spec.variants) and + feq(replacement.external, spec.external)): + continue + + # Refine this spec to the candidate. This uses + # replace_with AND dup so that it can work in + # place. TODO: make this more efficient. + if spec.virtual: + spec._replace_with(replacement) + changed = True + if spec._dup(replacement, deps=False, cleardeps=False): changed = True - # If there are duplicate providers or duplicate provider deps, this - # consolidates them and merge constraints. - changed |= self.normalize(force=True) + self_index.update(spec) + done=False + break + return changed @@ -850,7 +936,7 @@ def concretize(self): force = False while changed: - changes = (self.normalize(force=force), + changes = (self.normalize(force), self._expand_virtual_packages(), self._concretize_helper()) changed = any(changes) @@ -976,8 +1062,8 @@ def _evaluate_dependency_conditions(self, name): def _find_provider(self, vdep, provider_index): """Find provider for a virtual spec in the provider index. - Raise an exception if there is a conflicting virtual - dependency already in this spec. + Raise an exception if there is a conflicting virtual + dependency already in this spec. """ assert(vdep.virtual) providers = provider_index.providers_for(vdep) @@ -1018,17 +1104,14 @@ def _merge_dependency(self, dep, visited, spec_deps, provider_index): """ changed = False - # If it's a virtual dependency, try to find a provider and - # merge that. + # If it's a virtual dependency, try to find an existing + # provider in the spec, and merge that. if dep.virtual: visited.add(dep.name) provider = self._find_provider(dep, provider_index) if provider: dep = provider - else: - # if it's a real dependency, check whether it provides - # something already required in the spec. index = ProviderIndex([dep], restrict=True) for vspec in (v for v in spec_deps.values() if v.virtual): if index.providers_for(vspec): @@ -1125,13 +1208,14 @@ def normalize(self, force=False): # Get all the dependencies into one DependencyMap spec_deps = self.flat_dependencies(copy=False) - # Initialize index of virtual dependency providers - index = ProviderIndex(spec_deps.values(), restrict=True) + # Initialize index of virtual dependency providers if + # concretize didn't pass us one already + provider_index = ProviderIndex(spec_deps.values(), restrict=True) # traverse the package DAG and fill out dependencies according # to package files & their 'when' specs visited = set() - any_change = self._normalize_helper(visited, spec_deps, index) + any_change = self._normalize_helper(visited, spec_deps, provider_index) # If there are deps specified but not visited, they're not # actually deps of this package. Raise an error. @@ -1410,13 +1494,12 @@ def _dup(self, other, **kwargs): Whether deps should be copied too. Set to false to copy a spec but not its dependencies. """ - # We don't count dependencies as changes here changed = True if hasattr(self, 'name'): - changed = (self.name != other.name and self.versions != other.versions and \ - self.architecture != other.architecture and self.compiler != other.compiler and \ - self.variants != other.variants and self._normal != other._normal and \ + changed = (self.name != other.name and self.versions != other.versions and + self.architecture != other.architecture and self.compiler != other.compiler and + self.variants != other.variants and self._normal != other._normal and self.concrete != other.concrete and self.external != other.external) # Local node attributes get copied first. From 0d9a6d3c25de2b8490622c49362b09f706459bc0 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 14 Mar 2016 14:19:30 -0500 Subject: [PATCH 182/189] Updates to qrupdate --- .../repos/builtin/packages/octave/package.py | 17 +++++++++-------- .../repos/builtin/packages/qrupdate/package.py | 10 +++++----- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py index 99847e1dbe..38b355159d 100644 --- a/var/spack/repos/builtin/packages/octave/package.py +++ b/var/spack/repos/builtin/packages/octave/package.py @@ -12,6 +12,7 @@ class Octave(Package): version('4.0.0' , 'a69f8320a4f20a8480c1b278b1adb799') + # Variants variant('readline', default=True) variant('arpack', default=False) variant('curl', default=False) @@ -45,7 +46,7 @@ class Octave(Package): # Optional dependencies depends_on('arpack', when='+arpack') depends_on('curl', when='+curl') - depends_on('fftw@3', when='+fftw') + depends_on('fftw', when='+fftw') depends_on('fltk', when='+fltk') depends_on('fontconfig', when='+fontconfig') depends_on('freetype', when='+freetype') @@ -56,10 +57,10 @@ class Octave(Package): depends_on('hdf5', when='+hdf5') depends_on('jdk', when='+jdk') depends_on('llvm', when='+llvm') - #depends_on('opengl', when='+opengl') + #depends_on('opengl', when='+opengl') # TODO: add package depends_on('qhull', when='+qhull') depends_on('qrupdate', when='+qrupdate') - #depends_on('qscintilla', when='+qscintilla) + #depends_on('qscintilla', when='+qscintilla) # TODO: add package depends_on('qt', when='+qt') depends_on('SuiteSparse', when='+suitesparse') depends_on('zlib', when='+zlib') @@ -146,11 +147,11 @@ def install(self, spec, prefix): "--with-java-libdir=%s" % spec['jdk'].prefix.lib ]) - #if '~opengl' in spec: - # config_args.extend([ - # "--without-opengl", - # "--without-framework-opengl" - # ]) + if '~opengl' in spec: + config_args.extend([ + "--without-opengl", + "--without-framework-opengl" + ]) if '+qhull' in spec: config_args.extend([ diff --git a/var/spack/repos/builtin/packages/qrupdate/package.py b/var/spack/repos/builtin/packages/qrupdate/package.py index aff44bb2d8..5374d02c97 100644 --- a/var/spack/repos/builtin/packages/qrupdate/package.py +++ b/var/spack/repos/builtin/packages/qrupdate/package.py @@ -9,10 +9,10 @@ class Qrupdate(Package): version('1.1.2', '6d073887c6e858c24aeda5b54c57a8c4') - depends_on("openblas") + depends_on("blas") + depends_on("lapack") def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - - make() - make("install") + # Build static and dynamic libraries + make("lib", "solib") + make("install", "PREFIX=%s" % prefix) From d8a402ae5bcfbea627967531f7ec35cf77946b4a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 11 Mar 2016 13:33:56 -0800 Subject: [PATCH 183/189] Add compiler info and a simple libdwarf build to the checks. --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index ab379be486..1bed6b0874 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,10 @@ before_install: script: - . share/spack/setup-env.sh + - spack compilers + - spack config get compilers - spack test + - spack install -v libdwarf notifications: email: From 15bbd088e6007a9a6df8c4427340a371e5871506 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 15 Mar 2016 14:38:06 -0700 Subject: [PATCH 184/189] Fix #551: version bug in `spack create` - `spack create` now sets a proper version in generated file, based on the filename, even if it can't find any tarballs for the package. --- lib/spack/spack/cmd/create.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 4564143f83..f0cd50b8df 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -208,7 +208,7 @@ def find_repository(spec, args): return repo -def fetch_tarballs(url, name, args): +def fetch_tarballs(url, name, version): """Try to find versions of the supplied archive by scraping the web. Prompts the user to select how many to download if many are found. @@ -222,7 +222,7 @@ def fetch_tarballs(url, name, args): archives_to_fetch = 1 if not versions: # If the fetch failed for some reason, revert to what the user provided - versions = { "version" : url } + versions = { version : url } elif len(versions) > 1: tty.msg("Found %s versions of %s:" % (len(versions), name), *spack.cmd.elide_list( @@ -256,7 +256,7 @@ def create(parser, args): tty.msg("Creating template for package %s" % name) # Fetch tarballs (prompting user if necessary) - versions, urls = fetch_tarballs(url, name, args) + versions, urls = fetch_tarballs(url, name, version) # Try to guess what configure system is used. guesser = ConfigureGuesser() From 7eca383b10f017666c320ffd7deb6cae83b5c26b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 10 Mar 2016 01:12:25 -0800 Subject: [PATCH 185/189] Add sanity check paths to packages; fix #505 --- lib/spack/spack/package.py | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 696adaf896..02fb3e5834 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -318,6 +318,17 @@ class SomePackage(Package): """Most packages are NOT extendable. Set to True if you want extensions.""" extendable = False + """List of prefix-relative file paths. If these do not exist after + install, or if they exist but are not files, sanity checks fail. + """ + sanity_check_files = [] + + """List of prefix-relative directory paths. If these do not exist + after install, or if they exist but are not directories, sanity + checks will fail. + """ + sanity_check_dirs = [] + def __init__(self, spec): # this determines how the package should be built. @@ -909,7 +920,7 @@ def build_process(): raise e # Ensure that something was actually installed. - self._sanity_check_install() + self.sanity_check_prefix() # Copy provenance into the install directory on success log_install_path = spack.install_layout.build_log_path(self.spec) @@ -952,7 +963,18 @@ def build_process(): spack.hooks.post_install(self) - def _sanity_check_install(self): + def sanity_check_prefix(self): + """This function checks whether install succeeded.""" + def check_paths(path_list, filetype, predicate): + for path in path_list: + abs_path = os.path.join(self.prefix, path) + if not predicate(abs_path): + raise InstallError("Install failed for %s. No such %s in prefix: %s" + % (self.name, filetype, path)) + + check_paths(self.sanity_check_files, 'file', os.path.isfile) + check_paths(self.sanity_check_dirs, 'directory', os.path.isdir) + installed = set(os.listdir(self.prefix)) installed.difference_update(spack.install_layout.hidden_file_paths) if not installed: From 68d22253eca40bc4223234d6c26204f292be6a0c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 15 Mar 2016 17:22:48 -0700 Subject: [PATCH 186/189] Add sanity checks to the libelf build. --- var/spack/repos/builtin/packages/libelf/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/libelf/package.py b/var/spack/repos/builtin/packages/libelf/package.py index 29bc21b65c..0fcb56c164 100644 --- a/var/spack/repos/builtin/packages/libelf/package.py +++ b/var/spack/repos/builtin/packages/libelf/package.py @@ -38,6 +38,9 @@ class Libelf(Package): provides('elf') + sanity_check_files = ['include/libelf.h'] + sanity_check_dirs = ['lib'] + def install(self, spec, prefix): configure("--prefix=" + prefix, "--enable-shared", From 9c2996667433e4f69eb25cce2d74a94caeb1646c Mon Sep 17 00:00:00 2001 From: Nicolas Richart Date: Wed, 16 Mar 2016 12:57:17 +0100 Subject: [PATCH 187/189] Change urls in binutils to help 'spack checksum/versions' + adding latest version --- var/spack/repos/builtin/packages/binutils/package.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py index de04221e33..897539a439 100644 --- a/var/spack/repos/builtin/packages/binutils/package.py +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -4,10 +4,13 @@ class Binutils(Package): """GNU binutils, which contain the linker, assembler, objdump and others""" homepage = "http://www.gnu.org/software/binutils/" - version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66',url="ftp://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2") - version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b',url="ftp://ftp.gnu.org/gnu/binutils/binutils-2.24.tar.bz2") - version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e',url="ftp://ftp.gnu.org/gnu/binutils/binutils-2.23.2.tar.bz2") - version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764',url="ftp://ftp.gnu.org/gnu/binutils/binutils-2.20.1.tar.bz2") + url="https://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2" + + version('2.26', '64146a0faa3b411ba774f47d41de239f') + version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66') + version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b') + version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e') + version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764') # Add a patch that creates binutils libiberty_pic.a which is preferred by OpenSpeedShop and cbtf-krell variant('krellpatch', default=False, description="build with openspeedshop based patch.") From bb04d5cc63182b6129f9abf78db260a2dab9f506 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 16 Mar 2016 14:27:22 -0500 Subject: [PATCH 188/189] Fix grammar in mirrors documentation --- lib/spack/docs/mirrors.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst index b20fedb55f..dad04d053b 100644 --- a/lib/spack/docs/mirrors.rst +++ b/lib/spack/docs/mirrors.rst @@ -38,7 +38,7 @@ contains tarballs for each package, named after each package. .. note:: - Archives are **not** named exactly they were in the package's fetch + Archives are **not** named exactly the way they were in the package's fetch URL. They have the form ``-.``, where ```` is Spack's name for the package, ```` is the version of the tarball, and ```` is whatever format the From fa2a66db6721ef5d7b54b9cede69aa2f1b5b7531 Mon Sep 17 00:00:00 2001 From: citibeth Date: Wed, 16 Mar 2016 15:46:59 -0400 Subject: [PATCH 189/189] 1. Disabled git versions known to have vulnerabilities. 2. Added autoconf command to allow building directly from GitHub source. --- .../repos/builtin/packages/git/package.py | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index ddc5078c4d..586b6ce3c3 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -5,14 +5,22 @@ class Git(Package): system designed to handle everything from small to very large projects with speed and efficiency.""" homepage = "http://git-scm.com" - url = "https://www.kernel.org/pub/software/scm/git/git-2.2.1.tar.gz" + url = "https://github.com/git/git/tarball/v2.7.1" - version('2.6.3', 'b711be7628a4a2c25f38d859ee81b423') - version('2.6.2', 'da293290da69f45a86a311ad3cd43dc8') - version('2.6.1', '4c62ee9c5991fe93d99cf2a6b68397fd') - version('2.6.0', 'eb76a07148d94802a1745d759716a57e') - version('2.5.4', '3eca2390cf1fa698b48e2a233563a76b') - version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c') + version('2.8.0-rc2', 'c2cf9f2cc70e35f2fafbaf9258f82e4c') + version('2.7.3', 'fa1c008b56618c355a32ba4a678305f6') + version('2.7.1', 'bf0706b433a8dedd27a63a72f9a66060') + + + # See here for info on vulnerable Git versions: + # http://www.theregister.co.uk/2016/03/16/git_server_client_patch_now/ + # All the following are vulnerable + #version('2.6.3', 'b711be7628a4a2c25f38d859ee81b423') + #version('2.6.2', 'da293290da69f45a86a311ad3cd43dc8') + #version('2.6.1', '4c62ee9c5991fe93d99cf2a6b68397fd') + #version('2.6.0', 'eb76a07148d94802a1745d759716a57e') + #version('2.5.4', '3eca2390cf1fa698b48e2a233563a76b') + #version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c') # Git compiles with curl support by default on but if your system @@ -24,6 +32,7 @@ class Git(Package): variant("expat", default=False, description="Add the internal support of expat for https push") depends_on("openssl") + depends_on("autoconf") depends_on("curl", when="+curl") depends_on("expat", when="+expat") @@ -47,6 +56,7 @@ def install(self, spec, prefix): if '+expat' in spec: configure_args.append("--with-expat=%s" % spec['expat'].prefix) + which('autoreconf')('-i') configure(*configure_args) make() make("install")