diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 5774034062..498518057b 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -29,9 +29,11 @@ description = "Get detailed information on a particular package" + def padder(str_list, extra=0): """Return a function to pad elements of a list.""" length = max(len(str(s)) for s in str_list) + extra + def pad(string): string = str(string) padding = max(0, length - len(string)) @@ -40,7 +42,8 @@ def pad(string): def setup_parser(subparser): - subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.") + subparser.add_argument( + 'name', metavar="PACKAGE", help="Name of package to get info for.") def print_text_info(pkg): @@ -84,7 +87,7 @@ def print_text_info(pkg): for deptype in ('build', 'link', 'run'): print print "%s Dependencies:" % deptype.capitalize() - deps = pkg.dependencies(deptype) + deps = pkg.dependencies_of_type(deptype) if deps: colify(deps, indent=4) else: diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index eabf740dbc..317b0d5784 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -60,7 +60,7 @@ _db_dirname = '.spack-db' # DB version. This is stuck in the DB file to track changes in format. -_db_version = Version('0.9.1') +_db_version = Version('0.9.2') # Default timeout for spack database locks is 5 min. _db_lock_timeout = 60 @@ -215,14 +215,10 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None): # Add dependencies from other records in the install DB to # form a full spec. if 'dependencies' in spec_dict[spec.name]: - for dep in spec_dict[spec.name]['dependencies'].values(): - if type(dep) == tuple: - dep_hash, deptypes = dep - else: - dep_hash = dep - deptypes = spack.alldeps - child = self._read_spec_from_yaml(dep_hash, installs, hash_key) - spec._add_dependency(child, deptypes) + yaml_deps = spec_dict[spec.name]['dependencies'] + for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps): + child = self._read_spec_from_yaml(dhash, installs, hash_key) + spec._add_dependency(child, dtypes) # Specs from the database need to be marked concrete because # they represent actual installations. @@ -639,8 +635,8 @@ def _exit(self): class CorruptDatabaseError(SpackError): def __init__(self, path, msg=''): super(CorruptDatabaseError, self).__init__( - "Spack database is corrupt: %s. %s." + \ - "Try running `spack reindex` to fix." % (path, msg)) + "Spack database is corrupt: %s. %s." % (path, msg), + "Try running `spack reindex` to fix.") class InvalidDatabaseVersionError(SpackError): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 7f53b461b2..6a92c548fb 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -37,7 +37,6 @@ import re import textwrap import time -import glob import string import llnl.util.tty as tty @@ -62,10 +61,10 @@ from spack.stage import Stage, ResourceStage, StageComposite from spack.util.compression import allowed_archive from spack.util.environment import dump_environment -from spack.util.executable import ProcessError, Executable, which +from spack.util.executable import ProcessError, which from spack.version import * from spack import directory_layout -from urlparse import urlparse + """Allowed URL schemes for spack packages.""" _ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"] @@ -410,7 +409,6 @@ def package_dir(self): """Return the directory where the package.py file lives.""" return os.path.dirname(self.module.__file__) - @property def global_license_dir(self): """Returns the directory where global license files for all @@ -565,6 +563,11 @@ def fetcher(self): def fetcher(self, f): self._fetcher = f + def dependencies_of_type(self, *deptypes): + """Get subset of the dependencies with certain types.""" + return dict((name, conds) for name, conds in self.dependencies.items() + if any(d in self._deptypes[name] for d in deptypes)) + @property def extendee_spec(self): """ @@ -687,7 +690,7 @@ def installed_dependents(self): if self.name == spec.name: continue # XXX(deptype): Should build dependencies not count here? - #for dep in spec.traverse(deptype=('run')): + # for dep in spec.traverse(deptype=('run')): for dep in spec.traverse(deptype=spack.alldeps): if self.spec == dep: dependents.append(spec) @@ -699,13 +702,13 @@ def prefix(self): return self.spec.prefix @property - #TODO: Change this to architecture + # TODO: Change this to architecture def compiler(self): """Get the spack.compiler.Compiler object used to build this package""" if not self.spec.concrete: raise ValueError("Can only get a compiler for a concrete package.") return spack.compilers.compiler_for_spec(self.spec.compiler, - self.spec.architecture) + self.spec.architecture) def url_version(self, version): """ @@ -761,7 +764,6 @@ def do_fetch(self, mirror_only=False): self.stage.cache_local() - def do_stage(self, mirror_only=False): """Unpacks the fetched tarball, then changes into the expanded tarball directory.""" @@ -879,6 +881,7 @@ def _resource_stage(self, resource): return resource_stage_folder install_phases = set(['configure', 'build', 'install', 'provenance']) + def do_install(self, keep_prefix=False, keep_stage=False, @@ -890,7 +893,7 @@ def do_install(self, fake=False, explicit=False, dirty=False, - install_phases = install_phases): + install_phases=install_phases): """Called by commands to install a package and its dependencies. Package implementations should override install() to describe @@ -911,7 +914,8 @@ def do_install(self, run_tests -- Runn tests within the package's install() """ if not self.spec.concrete: - raise ValueError("Can only install concrete packages: %s." % self.spec.name) + raise ValueError("Can only install concrete packages: %s." + % self.spec.name) # No installation needed if package is external if self.spec.external: @@ -920,7 +924,8 @@ def do_install(self, return # Ensure package is not already installed - if 'install' in install_phases and spack.install_layout.check_installed(self.spec): + layout = spack.install_layout + if 'install' in install_phases and layout.check_installed(self.spec): tty.msg("%s is already installed in %s" % (self.name, self.prefix)) rec = spack.installed_db.get_record(self.spec) if (not rec.explicit) and explicit: @@ -1001,20 +1006,17 @@ def build_process(): if 'install' in self.install_phases: self.sanity_check_prefix() - # Copy provenance into the install directory on success if 'provenance' in self.install_phases: - log_install_path = spack.install_layout.build_log_path( - self.spec) - env_install_path = spack.install_layout.build_env_path( - self.spec) - packages_dir = spack.install_layout.build_packages_path( - self.spec) + log_install_path = layout.build_log_path(self.spec) + env_install_path = layout.build_env_path(self.spec) + packages_dir = layout.build_packages_path(self.spec) # Remove first if we're overwriting another build # (can happen with spack setup) try: - shutil.rmtree(packages_dir) # log_install_path and env_install_path are inside this + # log_install_path and env_install_path are here + shutil.rmtree(packages_dir) except: pass @@ -1041,7 +1043,7 @@ def build_process(): except directory_layout.InstallDirectoryAlreadyExistsError: if 'install' in install_phases: # Abort install if install directory exists. - # But do NOT remove it (you'd be overwriting someon else's stuff) + # But do NOT remove it (you'd be overwriting someone's data) tty.warn("Keeping existing install prefix in place.") raise else: @@ -1533,24 +1535,29 @@ def _hms(seconds): parts.append("%.2fs" % s) return ' '.join(parts) + class StagedPackage(Package): """A Package subclass where the install() is split up into stages.""" def install_setup(self): - """Creates an spack_setup.py script to configure the package later if we like.""" - raise InstallError("Package %s provides no install_setup() method!" % self.name) + """Creates a spack_setup.py script to configure the package later.""" + raise InstallError( + "Package %s provides no install_setup() method!" % self.name) def install_configure(self): """Runs the configure process.""" - raise InstallError("Package %s provides no install_configure() method!" % self.name) + raise InstallError( + "Package %s provides no install_configure() method!" % self.name) def install_build(self): """Runs the build process.""" - raise InstallError("Package %s provides no install_build() method!" % self.name) + raise InstallError( + "Package %s provides no install_build() method!" % self.name) def install_install(self): """Runs the install process.""" - raise InstallError("Package %s provides no install_install() method!" % self.name) + raise InstallError( + "Package %s provides no install_install() method!" % self.name) def install(self, spec, prefix): if 'setup' in self.install_phases: @@ -1567,9 +1574,10 @@ def install(self, spec, prefix): else: # Create a dummy file so the build doesn't fail. # That way, the module file will also be created. - with open(os.path.join(prefix, 'dummy'), 'w') as fout: + with open(os.path.join(prefix, 'dummy'), 'w'): pass + # stackoverflow.com/questions/12791997/how-do-you-do-a-simple-chmod-x-from-within-python def make_executable(path): mode = os.stat(path).st_mode @@ -1577,9 +1585,7 @@ def make_executable(path): os.chmod(path, mode) - class CMakePackage(StagedPackage): - def make_make(self): import multiprocessing # number of jobs spack will to build with. @@ -1593,37 +1599,41 @@ def make_make(self): return make def configure_args(self): - """Returns package-specific arguments to be provided to the configure command.""" + """Returns package-specific arguments to be provided to + the configure command. + """ return list() def configure_env(self): - """Returns package-specific environment under which the configure command should be run.""" + """Returns package-specific environment under which the + configure command should be run. + """ return dict() - def spack_transitive_include_path(self): + def transitive_inc_path(self): return ';'.join( os.path.join(dep, 'include') for dep in os.environ['SPACK_DEPENDENCIES'].split(os.pathsep) ) def install_setup(self): - cmd = [str(which('cmake'))] + \ - spack.build_environment.get_std_cmake_args(self) + \ - ['-DCMAKE_INSTALL_PREFIX=%s' % os.environ['SPACK_PREFIX'], - '-DCMAKE_C_COMPILER=%s' % os.environ['SPACK_CC'], - '-DCMAKE_CXX_COMPILER=%s' % os.environ['SPACK_CXX'], - '-DCMAKE_Fortran_COMPILER=%s' % os.environ['SPACK_FC']] + \ - self.configure_args() + cmd = [str(which('cmake'))] + cmd += spack.build_environment.get_std_cmake_args(self) + cmd += ['-DCMAKE_INSTALL_PREFIX=%s' % os.environ['SPACK_PREFIX'], + '-DCMAKE_C_COMPILER=%s' % os.environ['SPACK_CC'], + '-DCMAKE_CXX_COMPILER=%s' % os.environ['SPACK_CXX'], + '-DCMAKE_Fortran_COMPILER=%s' % os.environ['SPACK_FC']] + cmd += self.configure_args() - env = dict() - env['PATH'] = os.environ['PATH'] - env['SPACK_TRANSITIVE_INCLUDE_PATH'] = self.spack_transitive_include_path() - env['CMAKE_PREFIX_PATH'] = os.environ['CMAKE_PREFIX_PATH'] + env = { + 'PATH': os.environ['PATH'], + 'SPACK_TRANSITIVE_INCLUDE_PATH': self.transitive_inc_path(), + 'CMAKE_PREFIX_PATH': os.environ['CMAKE_PREFIX_PATH'] + } setup_fname = 'spconfig.py' with open(setup_fname, 'w') as fout: - fout.write(\ -r"""#!%s + fout.write(r"""#!%s # import sys @@ -1631,7 +1641,7 @@ def install_setup(self): import subprocess def cmdlist(str): - return list(x.strip().replace("'",'') for x in str.split('\n') if x) + return list(x.strip().replace("'",'') for x in str.split('\n') if x) env = dict(os.environ) """ % sys.executable) @@ -1639,34 +1649,39 @@ def cmdlist(str): for name in env_vars: val = env[name] if string.find(name, 'PATH') < 0: - fout.write('env[%s] = %s\n' % (repr(name),repr(val))) + fout.write('env[%s] = %s\n' % (repr(name), repr(val))) else: if name == 'SPACK_TRANSITIVE_INCLUDE_PATH': sep = ';' else: sep = ':' - fout.write('env[%s] = "%s".join(cmdlist("""\n' % (repr(name),sep)) + fout.write('env[%s] = "%s".join(cmdlist("""\n' + % (repr(name), sep)) for part in string.split(val, sep): fout.write(' %s\n' % part) fout.write('"""))\n') - fout.write("env['CMAKE_TRANSITIVE_INCLUDE_PATH'] = env['SPACK_TRANSITIVE_INCLUDE_PATH'] # Deprecated\n") + fout.write("env['CMAKE_TRANSITIVE_INCLUDE_PATH'] = " + "env['SPACK_TRANSITIVE_INCLUDE_PATH'] # Deprecated\n") fout.write('\ncmd = cmdlist("""\n') fout.write('%s\n' % cmd[0]) for arg in cmd[1:]: fout.write(' %s\n' % arg) fout.write('""") + sys.argv[1:]\n') - fout.write('\nproc = subprocess.Popen(cmd, env=env)\nproc.wait()\n') + fout.write('\nproc = subprocess.Popen(cmd, env=env)\n') + fout.write('proc.wait()\n') make_executable(setup_fname) - def install_configure(self): cmake = which('cmake') with working_dir(self.build_directory, create=True): - os.environ.update(self.configure_env()) - os.environ['SPACK_TRANSITIVE_INCLUDE_PATH'] = self.spack_transitive_include_path() - options = self.configure_args() + spack.build_environment.get_std_cmake_args(self) + env = os.environ + env.update(self.configure_env()) + env['SPACK_TRANSITIVE_INCLUDE_PATH'] = self.transitive_inc_path() + + options = self.configure_args() + options += spack.build_environment.get_std_cmake_args(self) cmake(self.source_directory, *options) def install_build(self): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index d3a5f66e57..e694f2b2da 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -96,7 +96,6 @@ expansion when it is the first character in an id typed on the command line. """ import sys -import itertools import hashlib import base64 import imp @@ -116,8 +115,6 @@ import spack.error import spack.compilers as compilers -# TODO: move display_specs to some other location. -from spack.cmd.find import display_specs from spack.version import * from spack.util.string import * from spack.util.prefix import Prefix @@ -155,6 +152,7 @@ every time we call str()""" _any_version = VersionList([':']) +# Special types of dependencies. alldeps = ('build', 'link', 'run') nolink = ('build', 'run') @@ -296,10 +294,15 @@ def __repr__(self): @key_ordering class DependencySpec(object): - """ - Dependencies have conditions in which they apply. + """Dependencies can be one (or more) of several types: - This stores both what is depended on and why it is a dependency. + - build: needs to be in the PATH at build time. + - link: is linked to and added to compiler flags. + - run: needs to be in the PATH for the package to run. + + Fields: + - spec: the spack.spec.Spec description of a dependency. + - deptypes: strings representing the type of dependency this is. """ def __init__(self, spec, deptypes): self.spec = spec @@ -558,15 +561,15 @@ def dependents(self, deptype=None): def _find_deps_dict(self, where, deptype): deptype = self._deptype_norm(deptype) - return [(dep.spec.name, dep) - for dep in where.values() - if deptype and any(d in deptype for d in dep.deptypes)] + return dict((dep.spec.name, dep) + for dep in where.values() + if deptype and any(d in deptype for d in dep.deptypes)) def dependencies_dict(self, deptype=None): - return dict(self._find_deps_dict(self._dependencies, deptype)) + return self._find_deps_dict(self._dependencies, deptype) def dependents_dict(self, deptype=None): - return dict(self._find_deps_dict(self._dependents, deptype)) + return self._find_deps_dict(self._dependents, deptype) # # Private routines here are called by the parser when building a spec. @@ -644,7 +647,8 @@ def _set_platform(self, value): mod = imp.load_source(mod_name, path) class_name = mod_to_class(value) if not hasattr(mod, class_name): - tty.die('No class %s defined in %s' % (class_name, mod_name)) + tty.die( + 'No class %s defined in %s' % (class_name, mod_name)) cls = getattr(mod, class_name) if not inspect.isclass(cls): tty.die('%s.%s is not a class' % (mod_name, class_name)) @@ -667,13 +671,15 @@ def _set_platform(self, value): def _set_os(self, value): """Called by the parser to set the architecture operating system""" - if self.architecture.platform: - self.architecture.platform_os = self.architecture.platform.operating_system(value) + arch = self.architecture + if arch.platform: + arch.platform_os = arch.platform.operating_system(value) def _set_target(self, value): """Called by the parser to set the architecture target""" - if self.architecture.platform: - self.architecture.target = self.architecture.platform.target(value) + arch = self.architecture + if arch.platform: + arch.target = arch.platform.target(value) def _add_dependency(self, spec, deptypes): """Called by the parser to add another spec as a dependency.""" @@ -688,8 +694,9 @@ def _add_dependency(self, spec, deptypes): # @property def fullname(self): - return (('%s.%s' % (self.namespace, self.name)) if self.namespace else - (self.name if self.name else '')) + return ( + ('%s.%s' % (self.namespace, self.name)) if self.namespace else + (self.name if self.name else '')) @property def root(self): @@ -745,15 +752,15 @@ def concrete(self): if self._concrete: return True - self._concrete = bool(not self.virtual - and self.namespace is not None - and self.versions.concrete - and self.variants.concrete - and self.architecture - and self.architecture.concrete - and self.compiler and self.compiler.concrete - and self.compiler_flags.concrete - and self._dependencies.concrete) + self._concrete = bool(not self.virtual and + self.namespace is not None and + self.versions.concrete and + self.variants.concrete and + self.architecture and + self.architecture.concrete and + self.compiler and self.compiler.concrete and + self.compiler_flags.concrete and + self._dependencies.concrete) return self._concrete def traverse(self, visited=None, deptype=None, **kwargs): @@ -864,9 +871,9 @@ def return_val(res): for name in sorted(successors): child = successors[name] children = child.spec.traverse_with_deptype( - visited, d=d + 1, deptype=deptype_query, - deptype_query=deptype_query, - _self_deptype=child.deptypes, **kwargs) + visited, d=d + 1, deptype=deptype_query, + deptype_query=deptype_query, + _self_deptype=child.deptypes, **kwargs) for elt in children: yield elt @@ -914,9 +921,11 @@ def to_node_dict(self): d = { 'parameters': params, 'arch': self.architecture, - 'dependencies': dict((d, (deps[d].spec.dag_hash(), - deps[d].deptypes)) - for d in sorted(deps.keys())) + 'dependencies': dict( + (name, { + 'hash': dspec.spec.dag_hash(), + 'type': [str(s) for s in dspec.deptypes]}) + for name, dspec in deps.items()) } # Older concrete specs do not have a namespace. Omit for @@ -982,13 +991,33 @@ def from_node_dict(node): raise SpackRecordError( "Did not find a valid format for variants in YAML file") - # XXX(deptypes): why are dependencies not meant to be read here? - #for name, dep_info in node['dependencies'].items(): - # (dag_hash, deptypes) = dep_info - # spec._dependencies[name] = DependencySpec(dag_hash, deptypes) + # Don't read dependencies here; from_node_dict() is used by + # from_yaml() to read the root *and* each dependency spec. return spec + @staticmethod + def read_yaml_dep_specs(dependency_dict): + """Read the DependencySpec portion of a YAML-formatted Spec. + + This needs to be backward-compatible with older spack spec + formats so that reindex will work on old specs/databases. + """ + for dep_name, elt in dependency_dict.items(): + if isinstance(elt, basestring): + # original format, elt is just the dependency hash. + dag_hash, deptypes = elt, ['build', 'link'] + elif isinstance(elt, tuple): + # original deptypes format: (used tuples, not future-proof) + dag_hash, deptypes = elt + elif isinstance(elt, dict): + # new format: elements of dependency spec are keyed. + dag_hash, deptypes = elt['hash'], elt['type'] + else: + raise SpecError("Couldn't parse dependency types in spec.") + + yield dep_name, dag_hash, list(deptypes) + @staticmethod def from_yaml(stream): """Construct a spec from YAML. @@ -1000,27 +1029,30 @@ def from_yaml(stream): represent more than the DAG does. """ - deps = {} - spec = None - try: yfile = yaml.load(stream) except MarkedYAMLError, e: raise SpackYAMLError("error parsing YAML spec:", str(e)) - for node in yfile['spec']: - name = next(iter(node)) - dep = Spec.from_node_dict(node) - if not spec: - spec = dep - deps[dep.name] = dep + nodes = yfile['spec'] - for node in yfile['spec']: + # Read nodes out of list. Root spec is the first element; + # dependencies are the following elements. + dep_list = [Spec.from_node_dict(node) for node in nodes] + if not dep_list: + raise SpecError("YAML spec contains no nodes.") + deps = dict((spec.name, spec) for spec in dep_list) + spec = dep_list[0] + + for node in nodes: + # get dependency dict from the node. name = next(iter(node)) - for dep_name, (dep, deptypes) in \ - node[name]['dependencies'].items(): - deps[name]._dependencies[dep_name] = \ - DependencySpec(deps[dep_name], deptypes) + yaml_deps = node[name]['dependencies'] + for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps): + # Fill in dependencies by looking them up by name in deps dict + deps[name]._dependencies[dname] = DependencySpec( + deps[dname], set(dtypes)) + return spec def _concretize_helper(self, presets=None, visited=None): @@ -1171,14 +1203,16 @@ def _expand_virtual_packages(self): def feq(cfield, sfield): return (not cfield) or (cfield == sfield) - if replacement is spec or (feq(replacement.name, spec.name) and - feq(replacement.versions, spec.versions) and - feq(replacement.compiler, spec.compiler) and - feq(replacement.architecture, spec.architecture) and - feq(replacement._dependencies, spec._dependencies) and - feq(replacement.variants, spec.variants) and - feq(replacement.external, spec.external) and - feq(replacement.external_module, spec.external_module)): + if replacement is spec or ( + feq(replacement.name, spec.name) and + feq(replacement.versions, spec.versions) and + feq(replacement.compiler, spec.compiler) and + feq(replacement.architecture, spec.architecture) and + feq(replacement._dependencies, spec._dependencies) and + feq(replacement.variants, spec.variants) and + feq(replacement.external, spec.external) and + feq(replacement.external_module, + spec.external_module)): continue # Refine this spec to the candidate. This uses # replace_with AND dup so that it can work in @@ -1235,10 +1269,10 @@ def concretize(self): if s.namespace is None: s.namespace = spack.repo.repo_for_pkg(s.name).namespace - for s in self.traverse(root=False): if s.external_module: - compiler = spack.compilers.compiler_for_spec(s.compiler, s.architecture) + compiler = spack.compilers.compiler_for_spec( + s.compiler, s.architecture) for mod in compiler.modules: load_module(mod) @@ -1505,13 +1539,13 @@ def normalize(self, force=False): # Ensure first that all packages & compilers in the DAG exist. self.validate_names() # Get all the dependencies into one DependencyMap - spec_deps = self.flat_dependencies_with_deptype(copy=False, - deptype_query=alldeps) + spec_deps = self.flat_dependencies_with_deptype( + copy=False, deptype_query=alldeps) # Initialize index of virtual dependency providers if # concretize didn't pass us one already - provider_index = ProviderIndex([s.spec for s in spec_deps.values()], - restrict=True) + provider_index = ProviderIndex( + [s.spec for s in spec_deps.values()], restrict=True) # traverse the package DAG and fill out dependencies according # to package files & their 'when' specs @@ -1584,20 +1618,17 @@ def constrain(self, other, deps=True): other.variants[v]) # TODO: Check out the logic here - if self.architecture is not None and other.architecture is not None: - if self.architecture.platform is not None and other.architecture.platform is not None: - if self.architecture.platform != other.architecture.platform: - raise UnsatisfiableArchitectureSpecError(self.architecture, - other.architecture) - if self.architecture.platform_os is not None and other.architecture.platform_os is not None: - if self.architecture.platform_os != other.architecture.platform_os: - raise UnsatisfiableArchitectureSpecError(self.architecture, - other.architecture) - if self.architecture.target is not None and other.architecture.target is not None: - if self.architecture.target != other.architecture.target: - raise UnsatisfiableArchitectureSpecError(self.architecture, - other.architecture) - + sarch, oarch = self.architecture, other.architecture + if sarch is not None and oarch is not None: + if sarch.platform is not None and oarch.platform is not None: + if sarch.platform != oarch.platform: + raise UnsatisfiableArchitectureSpecError(sarch, oarch) + if sarch.platform_os is not None and oarch.platform_os is not None: + if sarch.platform_os != oarch.platform_os: + raise UnsatisfiableArchitectureSpecError(sarch, oarch) + if sarch.target is not None and oarch.target is not None: + if sarch.target != oarch.target: + raise UnsatisfiableArchitectureSpecError(sarch, oarch) changed = False if self.compiler is not None and other.compiler is not None: @@ -1612,15 +1643,16 @@ def constrain(self, other, deps=True): changed |= self.compiler_flags.constrain(other.compiler_flags) old = str(self.architecture) - if self.architecture is None or other.architecture is None: - self.architecture = self.architecture or other.architecture + sarch, oarch = self.architecture, other.architecture + if sarch is None or other.architecture is None: + self.architecture = sarch or oarch else: - if self.architecture.platform is None or other.architecture.platform is None: - self.architecture.platform = self.architecture.platform or other.architecture.platform - if self.architecture.platform_os is None or other.architecture.platform_os is None: - self.architecture.platform_os = self.architecture.platform_os or other.architecture.platform_os - if self.architecture.target is None or other.architecture.target is None: - self.architecture.target = self.architecture.target or other.architecture.target + if sarch.platform is None or oarch.platform is None: + self.architecture.platform = sarch.platform or oarch.platform + if sarch.platform_os is None or oarch.platform_os is None: + sarch.platform_os = sarch.platform_os or oarch.platform_os + if sarch.target is None or oarch.target is None: + sarch.target = sarch.target or oarch.target changed |= (str(self.architecture) != old) if deps: @@ -1751,15 +1783,25 @@ def satisfies(self, other, deps=True, strict=False): # Architecture satisfaction is currently just string equality. # If not strict, None means unconstrained. - if self.architecture and other.architecture: - if ((self.architecture.platform and other.architecture.platform and self.architecture.platform != other.architecture.platform) or - (self.architecture.platform_os and other.architecture.platform_os and self.architecture.platform_os != other.architecture.platform_os) or - (self.architecture.target and other.architecture.target and self.architecture.target != other.architecture.target)): + sarch, oarch = self.architecture, other.architecture + if sarch and oarch: + if ((sarch.platform and + oarch.platform and + sarch.platform != oarch.platform) or + + (sarch.platform_os and + oarch.platform_os and + sarch.platform_os != oarch.platform_os) or + + (sarch.target and + oarch.target and + sarch.target != oarch.target)): return False - elif strict and ((other.architecture and not self.architecture) or - (other.architecture.platform and not self.architecture.platform) or - (other.architecture.platform_os and not self.architecture.platform_os) or - (other.architecture.target and not self.architecture.target)): + + elif strict and ((oarch and not sarch) or + (oarch.platform and not sarch.platform) or + (oarch.platform_os and not sarch.platform_os) or + (oarch.target and not sarch.target)): return False if not self.compiler_flags.satisfies( @@ -1841,11 +1883,16 @@ def _dup(self, other, **kwargs): # We don't count dependencies as changes here changed = True if hasattr(self, 'name'): - changed = (self.name != other.name and self.versions != other.versions and \ - self.architecture != other.architecture and self.compiler != other.compiler and \ - self.variants != other.variants and self._normal != other._normal and \ - self.concrete != other.concrete and self.external != other.external and \ - self.external_module != other.external_module and self.compiler_flags != other.compiler_flags) + changed = (self.name != other.name and + self.versions != other.versions and + self.architecture != other.architecture and + self.compiler != other.compiler and + self.variants != other.variants and + self._normal != other._normal and + self.concrete != other.concrete and + self.external != other.external and + self.external_module != other.external_module and + self.compiler_flags != other.compiler_flags) # Local node attributes get copied first. self.name = other.name @@ -1889,7 +1936,7 @@ def _dup(self, other, **kwargs): # here. if depspec.spec.name not in new_spec._dependencies: new_spec._add_dependency( - new_nodes[depspec.spec.name], depspec.deptypes) + new_nodes[depspec.spec.name], depspec.deptypes) # Since we preserved structure, we can copy _normal safely. self._normal = other._normal @@ -2000,7 +2047,6 @@ def _cmp_node(self): self.compiler, self.compiler_flags) - def eq_node(self, other): """Equality with another spec, not including dependencies.""" return self._cmp_node() == other._cmp_node() @@ -2196,41 +2242,39 @@ def write(s, c): def dep_string(self): return ''.join("^" + dep.format() for dep in self.sorted_deps()) - def __cmp__(self, other): - #Package name sort order is not configurable, always goes alphabetical + # Package name sort order is not configurable, always goes alphabetical if self.name != other.name: return cmp(self.name, other.name) - #Package version is second in compare order + # Package version is second in compare order pkgname = self.name if self.versions != other.versions: - return spack.pkgsort.version_compare(pkgname, - self.versions, other.versions) + return spack.pkgsort.version_compare( + pkgname, self.versions, other.versions) - #Compiler is third + # Compiler is third if self.compiler != other.compiler: - return spack.pkgsort.compiler_compare(pkgname, - self.compiler, other.compiler) + return spack.pkgsort.compiler_compare( + pkgname, self.compiler, other.compiler) - #Variants + # Variants if self.variants != other.variants: - return spack.pkgsort.variant_compare(pkgname, - self.variants, other.variants) + return spack.pkgsort.variant_compare( + pkgname, self.variants, other.variants) - #Target + # Target if self.architecture != other.architecture: - return spack.pkgsort.architecture_compare(pkgname, - self.architecture, other.architecture) + return spack.pkgsort.architecture_compare( + pkgname, self.architecture, other.architecture) - #Dependency is not configurable + # Dependency is not configurable if self._dependencies != other._dependencies: return -1 if self._dependencies < other._dependencies else 1 - #Equal specs + # Equal specs return 0 - def __str__(self): return self.format() + self.dep_string() @@ -2244,12 +2288,14 @@ def tree(self, **kwargs): indent = kwargs.pop('indent', 0) fmt = kwargs.pop('format', '$_$@$%@+$+$=') prefix = kwargs.pop('prefix', None) + deptypes = kwargs.pop('deptypes', ('build', 'link')) check_kwargs(kwargs, self.tree) out = "" cur_id = 0 ids = {} - for d, node in self.traverse(order='pre', cover=cover, depth=True): + for d, node in self.traverse( + order='pre', cover=cover, depth=True, deptypes=deptypes): if prefix is not None: out += prefix(node) out += " " * indent @@ -2303,8 +2349,8 @@ def __init__(self): # Lexer is always the same for every parser. _lexer = SpecLexer() -class SpecParser(spack.parse.Parser): +class SpecParser(spack.parse.Parser): def __init__(self): super(SpecParser, self).__init__(_lexer) self.previous = None @@ -2357,8 +2403,8 @@ def do_parse(self): except spack.parse.ParseError, e: raise SpecParseError(e) - - # If the spec has an os or a target and no platform, give it the default platform + # If the spec has an os or a target and no platform, give it + # the default platform for spec in specs: for s in spec.traverse(): if s.architecture.os_string or s.architecture.target_string: diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index ae3f08deed..09bdb021af 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -1,7 +1,31 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## """ Test checks if the architecture class is created correctly and also that the functions are looking for the correct architecture name """ -import unittest +import itertools import os import platform as py_platform import spack @@ -14,9 +38,8 @@ from spack.test.mock_packages_test import * -#class ArchitectureTest(unittest.TestCase): -class ArchitectureTest(MockPackagesTest): +class ArchitectureTest(MockPackagesTest): def setUp(self): super(ArchitectureTest, self).setUp() self.platform = spack.architecture.platform() @@ -36,24 +59,22 @@ def test_dict_functions_for_architecture(self): self.assertEqual(arch, new_arch) - self.assertTrue( isinstance(arch, spack.architecture.Arch) ) - self.assertTrue( isinstance(arch.platform, spack.architecture.Platform) ) - self.assertTrue( isinstance(arch.platform_os, - spack.architecture.OperatingSystem) ) - self.assertTrue( isinstance(arch.target, - spack.architecture.Target) ) - self.assertTrue( isinstance(new_arch, spack.architecture.Arch) ) - self.assertTrue( isinstance(new_arch.platform, - spack.architecture.Platform) ) - self.assertTrue( isinstance(new_arch.platform_os, - spack.architecture.OperatingSystem) ) - self.assertTrue( isinstance(new_arch.target, - spack.architecture.Target) ) - + self.assertTrue(isinstance(arch, spack.architecture.Arch)) + self.assertTrue(isinstance(arch.platform, spack.architecture.Platform)) + self.assertTrue(isinstance(arch.platform_os, + spack.architecture.OperatingSystem)) + self.assertTrue(isinstance(arch.target, + spack.architecture.Target)) + self.assertTrue(isinstance(new_arch, spack.architecture.Arch)) + self.assertTrue(isinstance(new_arch.platform, + spack.architecture.Platform)) + self.assertTrue(isinstance(new_arch.platform_os, + spack.architecture.OperatingSystem)) + self.assertTrue(isinstance(new_arch.target, + spack.architecture.Target)) def test_platform(self): output_platform_class = spack.architecture.platform() - my_arch_class = None if os.path.exists('/opt/cray/craype'): my_platform_class = CrayXc() elif os.path.exists('/bgsys'): @@ -91,7 +112,7 @@ def test_user_defaults(self): default_os = self.platform.operating_system("default_os") default_target = self.platform.target("default_target") - default_spec = Spec("libelf") # default is no args + default_spec = Spec("libelf") # default is no args default_spec.concretize() self.assertEqual(default_os, default_spec.architecture.platform_os) self.assertEqual(default_target, default_spec.architecture.target) @@ -107,10 +128,11 @@ def test_user_input_combination(self): combinations = itertools.product(os_list, target_list) results = [] for arch in combinations: - o,t = arch + o, t = arch spec = Spec("libelf os=%s target=%s" % (o, t)) spec.concretize() - results.append(spec.architecture.platform_os == self.platform.operating_system(o)) + results.append(spec.architecture.platform_os == + self.platform.operating_system(o)) results.append(spec.architecture.target == self.platform.target(t)) res = all(results)