Dependencies now work. Added libelf, libdwarf.

This commit is contained in:
Todd Gamblin 2013-02-18 23:46:04 -08:00
parent cc76c0f5f9
commit 38becacace
40 changed files with 1163 additions and 260 deletions

View file

@ -19,7 +19,7 @@ import spack
# Command parsing
parser = argparse.ArgumentParser(
description='Spack: the Supercomputing PACKage Manager.')
parser.add_argument('-V', '--version', action='version', version="%s" % spack.version)
parser.add_argument('-V', '--version', action='version', version="%s" % spack.spack_version)
parser.add_argument('-v', '--verbose', action='store_true', dest='verbose')
# each command module implements a parser() function, to which we pass its

1
lib/spack/env/c++ vendored Symbolic link
View file

@ -0,0 +1 @@
cc

1
lib/spack/env/c89 vendored Symbolic link
View file

@ -0,0 +1 @@
cc

1
lib/spack/env/c99 vendored Symbolic link
View file

@ -0,0 +1 @@
cc

1
lib/spack/env/case-insensitive/CC vendored Symbolic link
View file

@ -0,0 +1 @@
../cc

69
lib/spack/env/cc vendored Executable file
View file

@ -0,0 +1,69 @@
#!/usr/bin/env python
import sys
import os
import subprocess
import argparse
def get_path(name):
path = os.environ.get(name, "")
return path.split(":")
# Import spack parameters through the build environment.
spack_lib = os.environ.get("SPACK_LIB")
spack_deps = get_path("SPACK_DEPENDENCIES")
spack_env_path = get_path("SPACK_ENV_PATH")
if not spack_lib or spack_deps == None:
print "%s must be run from spack." % os.path.abspath(sys.argv[0])
sys.exit(1)
# Figure out what type of operation we're doing
command = os.path.basename(sys.argv[0])
# Grab a minimal set of spack packages
sys.path.append(spack_lib)
from spack.utils import *
from spack.compilation import parse_rpaths
import spack.tty as tty
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-I", action='append', default=[], dest='include_path')
parser.add_argument("-L", action='append', default=[], dest='lib_path')
parser.add_argument("-l", action='append', default=[], dest='libs')
options, other_args = parser.parse_known_args()
rpaths, other_args = parse_rpaths(other_args)
if rpaths:
tty.warn("Spack stripping non-spack rpaths: ", *rpaths)
# Find the actual command the build is trying to run by removing
# Spack's env paths from the path. We use this later for which()
script_dir = os.path.dirname(os.path.expanduser(__file__))
clean_path = get_path("PATH")
remove_items(clean_path, '.')
for path in spack_env_path:
remove_items(clean_path, path)
# Add dependence's paths to our compiler flags.
def append_if_dir(path_list, prefix, *dirs):
full_path = os.path.join(prefix, *dirs)
if os.path.isdir(full_path):
path_list.append(full_path)
for prefix in spack_deps:
append_if_dir(options.include_path, prefix, "include")
append_if_dir(options.lib_path, prefix, "lib")
append_if_dir(options.lib_path, prefix, "lib64")
# Add our modified arguments to it.
cmd = which(command, path=clean_path)
arguments = ['-I%s' % path for path in options.include_path]
arguments += other_args
arguments += ['-L%s' % path for path in options.lib_path]
arguments += ['-l%s' % path for path in options.libs]
# Unset some pesky environment variables
pop_keys(os.environ, "LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH")
rcode = cmd(*arguments, fail_on_error=False)
sys.exit(rcode)

1
lib/spack/env/clang vendored Symbolic link
View file

@ -0,0 +1 @@
cc

1
lib/spack/env/clang++ vendored Symbolic link
View file

@ -0,0 +1 @@
cc

1
lib/spack/env/cpp vendored Symbolic link
View file

@ -0,0 +1 @@
cc

1
lib/spack/env/g++ vendored Symbolic link
View file

@ -0,0 +1 @@
cc

1
lib/spack/env/gcc vendored Symbolic link
View file

@ -0,0 +1 @@
cc

1
lib/spack/env/ld vendored Symbolic link
View file

@ -0,0 +1 @@
cc

View file

@ -2,16 +2,39 @@
import os
import re
import subprocess
import platform
import shutil
from spack import *
import packages
import tty
import attr
import validate
import version
import shutil
import platform
import arch
from stage import Stage
DEPENDS_ON = "depends_on"
class Dependency(object):
"""Represents a dependency from one package to another."""
def __init__(self, name, **kwargs):
self.name = name
for key in kwargs:
setattr(self, key, kwargs[key])
@property
def package(self):
return packages.get(self.name)
def __repr__(self):
return "<dep: %s>" % self.name
def __str__(self):
return self.__repr__()
def depends_on(*args, **kwargs):
"""Adds a depends_on local variable in the locals of
the calling class, based on args.
@ -21,40 +44,170 @@ def depends_on(*args, **kwargs):
locals = stack[1][0].f_locals
finally:
del stack
print locals
locals["depends_on"] = kwargs
dependencies = locals.setdefault("dependencies", [])
for name in args:
dependencies.append(Dependency(name))
class Package(object):
def __init__(self):
def __init__(self, arch=arch.sys_type()):
attr.required(self, 'homepage')
attr.required(self, 'url')
attr.required(self, 'md5')
attr.setdefault(self, "dependencies", [])
# Name of package is just the classname lowercased
self.name = self.__class__.__name__.lower()
# Architecture for this package.
self.arch = arch
# Name of package is the name of its module (the file that contains it)
self.name = inspect.getmodulename(self.module.__file__)
# Make sure URL is an allowed type
validate.url(self.url)
v = version.parse(self.url)
if not v:
# Set up version
attr.setdefault(self, 'version', version.parse_version(self.url))
if not self.version:
tty.die("Couldn't extract version from '%s'. " +
"You must specify it explicitly for this URL." % self.url)
self.version = v
# This adds a bunch of convenient commands to the package's module scope.
self.add_commands_to_module()
# Controls whether install and uninstall check deps before acting.
self.ignore_dependencies = False
# Empty at first; only compute dependents if necessary
self._dependents = None
def add_commands_to_module(self):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
self.module.make = make_make()
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
self.module.configure = Executable('./configure')
self.module.cmake = which("cmake")
# standard CMake arguments
self.module.std_cmake_args = [
'-DCMAKE_INSTALL_PREFIX=%s' % self.prefix,
'-DCMAKE_BUILD_TYPE=None']
if platform.mac_ver()[0]:
self.module.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')
# Emulate some shell commands for convenience
self.module.cd = os.chdir
self.module.mkdir = os.mkdir
self.module.makedirs = os.makedirs
self.module.removedirs = os.removedirs
self.module.mkdirp = mkdirp
self.module.install = install
self.module.rmtree = shutil.rmtree
self.module.move = shutil.move
# Useful directories within the prefix
self.module.prefix = self.prefix
self.module.bin = new_path(self.prefix, 'bin')
self.module.sbin = new_path(self.prefix, 'sbin')
self.module.etc = new_path(self.prefix, 'etc')
self.module.include = new_path(self.prefix, 'include')
self.module.lib = new_path(self.prefix, 'lib')
self.module.libexec = new_path(self.prefix, 'libexec')
self.module.share = new_path(self.prefix, 'share')
self.module.doc = new_path(self.module.share, 'doc')
self.module.info = new_path(self.module.share, 'info')
self.module.man = new_path(self.module.share, 'man')
self.module.man1 = new_path(self.module.man, 'man1')
self.module.man2 = new_path(self.module.man, 'man2')
self.module.man3 = new_path(self.module.man, 'man3')
self.module.man4 = new_path(self.module.man, 'man4')
self.module.man5 = new_path(self.module.man, 'man5')
self.module.man6 = new_path(self.module.man, 'man6')
self.module.man7 = new_path(self.module.man, 'man7')
self.module.man8 = new_path(self.module.man, 'man8')
@property
def dependents(self):
"""List of names of packages that depend on this one."""
if self._dependents is None:
packages.compute_dependents()
return tuple(self._dependents)
@property
def installed(self):
return os.path.exists(self.prefix)
@property
def installed_dependents(self):
installed = [d for d in self.dependents if packages.get(d).installed]
all_deps = []
for d in installed:
all_deps.append(d)
all_deps.extend(packages.get(d).installed_dependents)
return tuple(all_deps)
@property
def all_dependents(self):
all_deps = list(self.dependents)
for pkg in self.dependents:
all_deps.extend(packages.get(pkg).all_dependents)
return tuple(all_deps)
@property
def stage(self):
return Stage(self.stage_name)
return Stage(self.stage_name, self.url)
@property
def stage_name(self):
return "%s-%s" % (self.name, self.version)
@property
def platform_path(self):
"""Directory for binaries for the current platform."""
return new_path(install_path, self.arch)
@property
def package_path(self):
"""Directory for different versions of this package. Lives just above prefix."""
return new_path(self.platform_path, self.name)
@property
def installed_versions(self):
return [ver for ver in os.listdir(self.package_path)
if os.path.isdir(new_path(self.package_path, ver))]
@property
def prefix(self):
return new_path(install_path, self.stage_name)
"""Packages are installed in $spack_prefix/opt/<sys_type>/<name>/<version>"""
return new_path(self.package_path, self.version)
def remove_prefix(self):
"""Removes the prefix for a package along with any empty parent directories."""
shutil.rmtree(self.prefix, True)
for dir in (self.package_path, self.platform_path):
if not os.listdir(dir):
os.rmdir(dir)
else:
break
def do_fetch(self):
"""Creates a stage directory and downloads the taball for this package.
@ -62,49 +215,28 @@ def do_fetch(self):
"""
stage = self.stage
stage.setup()
stage.chdir()
stage.fetch()
archive_file = os.path.basename(self.url)
if not os.path.exists(archive_file):
tty.msg("Fetching %s" % self.url)
# Run curl but grab the mime type from the http headers
headers = curl('-#', '-O', '-D', '-', self.url, return_output=True)
# output this if we somehow got an HTML file rather than the archive we
# asked for.
if re.search(r'Content-Type: text/html', headers):
tty.warn("The contents of '%s' look like HTML. The checksum will "+
"likely fail. Use 'spack clean %s' to delete this file. "
"The fix the gateway issue and install again." % (archive_file, self.name))
if not os.path.exists(archive_file):
tty.die("Failed to download '%s'!" % self.url)
else:
tty.msg("Already downloaded %s." % self.name)
archive_md5 = md5(archive_file)
archive_md5 = md5(stage.archive_file)
if archive_md5 != self.md5:
tty.die("MD5 Checksum failed for %s. Expected %s but got %s."
% (self.name, self.md5, archive_md5))
return archive_file
def do_stage(self):
"""Unpacks the fetched tarball, then changes into the expanded tarball directory."""
archive_file = self.do_fetch()
self.do_fetch()
stage = self.stage
archive_dir = stage.archive_path
archive_dir = stage.expanded_archive_path
if not archive_dir:
tty.msg("Staging archive: '%s'" % archive_file)
decompress = decompressor_for(archive_file)
decompress(archive_file)
tty.msg("Staging archive: '%s'" % stage.archive_file)
stage.expand_archive()
else:
tty.msg("Alredy staged %s" % self.name)
tty.msg("Already staged %s" % self.name)
stage.chdir_to_archive()
def do_install(self):
"""This class should call this version of the install method.
Package implementations should override install().
@ -114,17 +246,55 @@ def do_install(self):
tty.pkg(self.prefix)
return
if not self.ignore_dependencies:
self.do_install_dependencies()
self.do_stage()
self.setup_install_environment()
# Populate the module scope of install() with some useful functions.
# This makes things easier for package writers.
self.module.configure = which("configure", [self.stage.archive_path])
self.module.cmake = which("cmake")
try:
self.install(self.prefix)
if not os.path.isdir(self.prefix):
tty.die("Install failed for %s. No install dir created." % self.name)
except Exception, e:
# Blow away the install tree if anything goes wrong.
self.remove_prefix()
tty.die("Install failed for %s" % self.name, e.message)
tty.msg("Successfully installed %s" % self.name)
tty.pkg(self.prefix)
def setup_install_environment(self):
"""This ensures a clean install environment when we build packages."""
pop_keys(os.environ, "LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH")
# Add spack environment at front of path and pass the
# lib location along so the compiler script can find spack
os.environ["SPACK_LIB"] = lib_path
# Fix for case-insensitive file systems. Conflicting links are
# in directories called "case*" within the env directory.
env_paths = [env_path]
for file in os.listdir(env_path):
path = new_path(env_path, file)
if file.startswith("case") and os.path.isdir(path):
env_paths.append(path)
path_prepend("PATH", *env_paths)
path_prepend("SPACK_ENV_PATH", *env_paths)
# Pass along paths of dependencies here
for dep in self.dependencies:
path_prepend("SPACK_DEPENDENCIES", dep.package.prefix)
def do_install_dependencies(self):
# Pass along paths of dependencies here
for dep in self.dependencies:
dep.package.do_install()
@property
def module(self):
"""Use this to add variables to the class's module's scope.
@ -133,36 +303,49 @@ def module(self):
return __import__(self.__class__.__module__,
fromlist=[self.__class__.__name__])
def install(self, prefix):
"""Package implementations override this with their own build configuration."""
tty.die("Packages must provide an install method!")
def do_uninstall(self):
self.uninstall(self.prefix)
if not os.path.exists(self.prefix):
tty.die(self.name + " is not installed.")
if not self.ignore_dependencies:
deps = self.installed_dependents
if deps: tty.die(
"Cannot uninstall %s. The following installed packages depend on it:"
% self.name, " ".join(deps))
self.remove_prefix()
tty.msg("Successfully uninstalled %s." % self.name)
def uninstall(self, prefix):
"""By default just blows the install dir away."""
shutil.rmtree(self.prefix, True)
def do_clean(self):
if self.stage.expanded_archive_path:
self.stage.chdir_to_archive()
self.clean()
def clean(self):
"""By default just runs make clean. Override if this isn't good."""
stage = self.stage
if stage.archive_path:
stage.chdir_to_archive()
try:
make("clean")
make = make_make()
make('clean')
tty.msg("Successfully cleaned %s" % self.name)
except subprocess.CalledProcessError:
# Might not be configured. Ignore.
pass
except subprocess.CalledProcessError, e:
tty.warn("Warning: 'make clean' didn't work. Consider 'spack clean --work'.")
def do_clean_all(self):
def do_clean_work(self):
"""By default just blows away the stage directory and re-stages."""
self.stage.restage()
def do_clean_dist(self):
"""Removes the stage directory where this package was built."""
if os.path.exists(self.stage.path):
self.stage.destroy()
tty.msg("Successfully cleaned %s" % self.name)

View file

@ -1,5 +1,6 @@
from globals import *
from fileutils import *
from utils import *
from exception import *
from Package import Package, depends_on

34
lib/spack/spack/arch.py Normal file
View file

@ -0,0 +1,34 @@
import os
import platform
from version import Version
from utils import memoized
instances = {}
macos_versions = [
('10.8', 'mountain_lion'),
('10.7', 'lion'),
('10.6', 'snow_leopard'),
('10.5', 'leopard')]
class SysType(object):
def __init__(self, arch_string):
self.arch_string = arch_string
def __repr__(self):
return self.arch_string
def __str__(self):
return self.__repr__()
@memoized
def sys_type():
stype = os.environ.get('SYS_TYPE')
if stype:
return SysType(stype)
elif platform.mac_ver()[0]:
version = Version(platform.mac_ver()[0])
for mac_ver, name in macos_versions:
if version >= Version(mac_ver):
return SysType(name)

View file

@ -6,3 +6,9 @@ def required(obj, attr_name):
tty.die("No required attribute '%s' in class '%s'"
% (attr_name, obj.__class__.__name__))
def setdefault(obj, name, value):
"""Like dict.setdefault, but for objects."""
if not hasattr(obj, name):
setattr(obj, name, value)
return getattr(obj, name)

View file

@ -4,16 +4,20 @@
import spack
import spack.tty as tty
SETUP_PARSER = "setup_parser"
# Patterns to ignore in the commands directory when looking for commands.
ignore_files = r'^\.|^__init__.py$|^#'
setup_parser = "setup_parser"
command_path = os.path.join(spack.lib_path, "spack", "cmd")
commands = []
for file in os.listdir(command_path):
if file.endswith(".py") and not file == "__init__.py":
if file.endswith(".py") and not re.search(ignore_files, file):
cmd = re.sub(r'.py$', '', file)
commands.append(cmd)
commands.sort()
def null_op(*args):
pass
@ -21,8 +25,8 @@ def null_op(*args):
def get_module(name):
"""Imports the module for a particular command name and returns it."""
module_name = "%s.%s" % (__name__, name)
module = __import__(module_name, fromlist=[name, SETUP_PARSER], level=0)
module.setup_parser = getattr(module, SETUP_PARSER, null_op)
module = __import__(module_name, fromlist=[name, setup_parser], level=0)
module.setup_parser = getattr(module, setup_parser, null_op)
if not hasattr(module, name):
tty.die("Command module %s (%s) must define function '%s'."

View file

@ -1,12 +0,0 @@
from spack import *
import spack.version as version
import multiprocessing
import platform
def arch(args):
print multiprocessing.cpu_count()
print platform.mac_ver()
print version.canonical(platform.mac_ver()[0])

View file

@ -1,16 +1,21 @@
import spack.packages as packages
def setup_parser(subparser):
subparser.add_argument('name', help="name of package to clean")
subparser.add_argument('-a', "--all", action="store_true", dest="all",
help="delete the entire stage directory")
subparser.add_argument('names', nargs='+', help="name(s) of package(s) to clean")
subparser.add_mutually_exclusive_group()
subparser.add_argument('-c', "--clean", action="store_true", dest='clean',
help="run make clean in the stage directory (default)")
subparser.add_argument('-w', "--work", action="store_true", dest='work',
help="delete and re-expand the entire stage directory")
subparser.add_argument('-d', "--dist", action="store_true", dest='dist',
help="delete the downloaded archive.")
def clean(args):
package_class = packages.get(args.name)
package = package_class()
if args.all:
package.do_clean_all()
for name in args.names:
package = packages.get(name)
if args.dist:
package.do_clean_dist()
elif args.work:
package.do_clean_work()
else:
package.do_clean()

View file

@ -1,27 +1,30 @@
import string
import os
import spack
import spack.packages as packages
import spack.tty as tty
import spack.version
pacakge_tempate = string.Template("""\
from spack.stage import Stage
from contextlib import closing
package_template = string.Template("""\
from spack import *
class $name(Package):
homepage = "${homepage}"
class ${class_name}(Package):
homepage = "http://www.example.com"
url = "${url}"
md5 = "${md5}"
def install(self):
# Insert your installation code here.
pass
def install(self, prefix):
# Insert the configure line for your build system here.
configure("--prefix=%s" % prefix)
# cmake(".", *std_cmake_args)
make()
make("install")
""")
def create_template(name):
class_name = name.capitalize()
return new_pacakge_tempate % class_name
def setup_parser(subparser):
subparser.add_argument('url', nargs='?', help="url of package archive")
@ -30,28 +33,42 @@ def setup_parser(subparser):
def create(args):
url = args.url
version = spack.version.parse(url)
if not version:
tty.die("Couldn't figure out a version string from '%s'." % url)
# By default open the directory where packages live.
# Try to deduce name and version of the new package from the URL
name, version = spack.version.parse(url)
if not name:
path = spack.packages_path
print "Couldn't guess a name for this package."
while not name:
new_name = raw_input("Name: ")
if packages.valid_name(name):
name = new_name
else:
path = packages.filename_for(name)
print "Package names must contain letters, numbers, and '_' or '-'"
if not version:
tty.die("Couldn't guess a version string from %s." % url)
path = packages.filename_for(name)
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
if not os.access(path, os.R_OK|os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
else:
tty.msg("Editing new file: '%s'." % path)
file = open(path, "w")
file.write(create_template(name))
file.close()
tty.die("%s already exists." % path)
# make a stage and fetch the archive.
try:
stage = Stage(name, url)
archive_file = stage.fetch()
except spack.FailedDownloadException, e:
tty.die(e.message)
md5 = spack.md5(archive_file)
class_name = packages.class_for(name)
# Write outa template for the file
tty.msg("Editing %s." % path)
with closing(open(path, "w")) as pkg_file:
pkg_file.write(
package_template.substitute(
class_name=class_name,
url=url,
md5=md5))
# If everything checks out, go ahead and edit.
spack.editor(path)

View file

@ -3,29 +3,12 @@
import spack.packages as packages
import spack.tty as tty
new_pacakge_tempate = """\
from spack import *
class %s(Package):
homepage = "https://www.example.com"
url = "https://www.example.com/download/example-1.0.tar.gz"
md5 = "nomd5"
def install(self):
# Insert your installation code here.
pass
"""
def create_template(name):
class_name = name.capitalize()
return new_pacakge_tempate % class_name
def setup_parser(subparser):
subparser.add_argument(
'name', nargs='?', default=None, help="name of package to edit")
def edit(args):
name = args.name
@ -41,10 +24,7 @@ def edit(args):
if not os.access(path, os.R_OK|os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
else:
tty.msg("Editing new file: '%s'." % path)
file = open(path, "w")
file.write(create_template(name))
file.close()
tty.die("No package for %s. Use spack create.")
# If everything checks out, go ahead and edit.
spack.editor(path)

View file

@ -4,6 +4,5 @@ def setup_parser(subparser):
subparser.add_argument('name', help="name of package to fetch")
def fetch(args):
package_class = packages.get(args.name)
package = package_class()
package = packages.get(args.name)
package.do_fetch()

View file

@ -0,0 +1,6 @@
import spack
import spack.packages as packages
def graph(args):
packages.graph_dependencies()

View file

@ -1,11 +1,14 @@
import spack
import spack.packages as packages
def setup_parser(subparser):
subparser.add_argument('name', help="name of package to install")
subparser.add_argument('names', nargs='+', help="name(s) of package(s) to install")
subparser.add_argument('-i', '--ignore-dependencies',
action='store_true', dest='ignore_dependencies',
help="Do not try to install dependencies of requested packages.")
def install(args):
package_class = packages.get(args.name)
package = package_class()
spack.ignore_dependencies = args.ignore_dependencies
for name in args.names:
package = packages.get(name)
package.do_install()

View file

@ -0,0 +1,22 @@
import spack
import spack.packages as packages
from spack.colify import colify
def setup_parser(subparser):
subparser.add_argument('-i', '--installed', action='store_true', dest='installed',
help='List installed packages for each platform along with versions.')
def list(args):
if args.installed:
pkgs = packages.installed_packages()
for sys_type in pkgs:
print "%s:" % sys_type
package_vers = []
for pkg in pkgs[sys_type]:
pv = [pkg.name + "/" + v for v in pkg.installed_versions]
package_vers.extend(pv)
colify(sorted(package_vers), indent=4)
else:
colify(packages.all_package_names())

View file

@ -4,6 +4,5 @@ def setup_parser(subparser):
subparser.add_argument('name', help="name of package to stage")
def stage(args):
package_class = packages.get(args.name)
package = package_class()
package = packages.get(args.name)
package.do_stage()

View file

@ -1,9 +1,20 @@
import spack.packages as packages
def setup_parser(subparser):
subparser.add_argument('name', help="name of package to uninstall")
subparser.add_argument('names', nargs='+', help="name(s) of package(s) to uninstall")
subparser.add_argument('-f', '--force', action='store_true', dest='force',
help="Ignore installed packages that depend on this one and remove it anyway.")
def uninstall(args):
package_class = packages.get(args.name)
package = package_class()
package.do_uninstall()
# get packages to uninstall as a list.
pkgs = [packages.get(name) for name in args.names]
# Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order
def num_installed_deps(pkg):
return len(pkg.installed_dependents)
pkgs.sort(key=num_installed_deps)
# Uninstall packages in order now.
for pkg in pkgs:
pkg.do_uninstall()

159
lib/spack/spack/colify.py Normal file
View file

@ -0,0 +1,159 @@
#!/usr/bin/env python
#
# colify
# By Todd Gamblin, tgamblin@llnl.gov
#
# Takes a list of items as input and finds a good columnization of them,
# similar to how gnu ls does. You can pipe output to this script and
# get a tight display for it. This supports both uniform-width and
# variable-width (tighter) columns.
#
# Run colify -h for more information.
#
def get_terminal_size():
import os
"""Get the dimensions of the console."""
def ioctl_GWINSZ(fd):
try:
import fcntl, termios, struct
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
except:
return
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(cr[1]), int(cr[0])
class ColumnConfig:
def __init__(self, cols):
self.cols = cols
self.line_length = 0
self.valid = True
self.widths = [0] * cols
def __repr__(self):
attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
def config_variable_cols(elts, console_cols, padding):
# Get a bound on the most columns we could possibly have.
lengths = [len(elt) for elt in elts]
max_cols = max(1, console_cols / (min(lengths) + padding))
max_cols = min(len(elts), max_cols)
configs = [ColumnConfig(c) for c in xrange(1, max_cols+1)]
for elt, length in enumerate(lengths):
for i, conf in enumerate(configs):
if conf.valid:
col = elt / ((len(elts) + i) / (i + 1))
padded = length
if col < i:
padded += padding
if conf.widths[col] < padded:
conf.line_length += padded - conf.widths[col]
conf.widths[col] = padded
conf.valid = (conf.line_length < console_cols)
try:
config = next(conf for conf in reversed(configs) if conf.valid)
except StopIteration:
# If nothing was valid the screen was too narrow -- just use 1 col.
config = configs[0]
config.widths = [w for w in config.widths if w != 0]
config.cols = len(config.widths)
return config
def config_uniform_cols(elts, console_cols, padding):
max_len = max(len(elt) for elt in elts) + padding
cols = max(1, console_cols / max_len)
cols = min(len(elts), cols)
config = ColumnConfig(cols)
config.widths = [max_len] * cols
return config
def colify(elts, **options):
import sys
# Get keyword arguments or set defaults
output = options.get("output", sys.stdout)
indent = options.get("indent", 0)
padding = options.get("padding", 2)
# elts needs to be in an array so we can count the elements
if not type(elts) == list:
elts = list(elts)
console_cols = options.get("cols", None)
if not console_cols:
console_cols, console_rows = get_terminal_size()
elif type(console_cols) != int:
raise ValueError("Number of columns must be an int")
console_cols = max(1, console_cols - indent)
method = options.get("method", "variable")
if method == "variable":
config = config_variable_cols(elts, console_cols, padding)
elif method == "uniform":
config = config_uniform_cols(elts, console_cols, padding)
else:
raise ValueError("method must be one of: " + allowed_methods)
cols = config.cols
formats = ["%%-%ds" % width for width in config.widths[:-1]]
formats.append("%s") # last column has no trailing space
rows = (len(elts) + cols - 1) / cols
rows_last_col = len(elts) % rows
for row in xrange(rows):
output.write(" " * indent)
for col in xrange(cols):
elt = col * rows + row
output.write(formats[col] % elts[elt])
output.write("\n")
row += 1
if row == rows_last_col:
cols -= 1
if __name__ == "__main__":
import optparse, sys
cols, rows = get_terminal_size()
parser = optparse.OptionParser()
parser.add_option("-u", "--uniform", action="store_true", default=False,
help="Use uniformly sized columns instead of variable-size.")
parser.add_option("-p", "--padding", metavar="PADDING", action="store",
type=int, default=2, help="Spaces to add between columns. Default is 2.")
parser.add_option("-i", "--indent", metavar="SPACES", action="store",
type=int, default=0, help="Indent the output by SPACES. Default is 0.")
parser.add_option("-w", "--width", metavar="COLS", action="store",
type=int, default=cols, help="Indent the output by SPACES. Default is 0.")
options, args = parser.parse_args()
method = "variable"
if options.uniform:
method = "uniform"
if sys.stdin.isatty():
parser.print_help()
sys.exit(1)
else:
colify([line.strip() for line in sys.stdin], method=method, **options.__dict__)

View file

@ -0,0 +1,52 @@
import os
def parse_rpaths(arguments):
"""argparse, for all its features, cannot understand most compilers'
rpath arguments. This handles '-Wl,', '-Xlinker', and '-R'"""
linker_args = []
other_args = []
def get_next(arg, args):
"""Get an expected next value of an iterator, or die if it's not there"""
try:
return next(args)
except StopIteration:
# quietly ignore -rpath and -Xlinker without args.
return None
# Separate linker args from non-linker args
args = iter(arguments)
for arg in args:
if arg.startswith('-Wl,'):
sub_args = [sub for sub in arg.replace('-Wl,', '', 1).split(',')]
linker_args.extend(sub_args)
elif arg == '-Xlinker':
target = get_next(arg, args)
if target != None:
linker_args.append(target)
else:
other_args.append(arg)
# Extract all the possible ways rpath can appear in linker args
# and append non-rpaths to other_args
rpaths = []
largs = iter(linker_args)
for arg in largs:
if arg == '-rpath':
target = get_next(arg, largs)
if target != None:
rpaths.append(target)
elif arg.startswith('-R'):
target = arg.replace('-R', '', 1)
if not target:
target = get_next(arg, largs)
if target == None: break
if os.path.isdir(target):
rpaths.append(target)
else:
other_args.extend([arg, target])
else:
other_args.append(arg)
return rpaths, other_args

View file

@ -0,0 +1,23 @@
class SpackException(Exception):
def __init__(self, message):
self.message = message
class FailedDownloadException(SpackException):
def __init__(self, url):
super(FailedDownloadException, self).__init__("Failed to fetch file from URL: " + url)
self.url = url
class InvalidPackageNameException(SpackException):
def __init__(self, name):
super(InvalidPackageNameException, self).__init__("Invalid package name: " + name)
self.name = name
class CommandFailedException(SpackException):
def __init__(self, command):
super(CommandFailedException, self).__init__("Failed to execute command: " + command)
self.command = command

View file

@ -4,7 +4,8 @@
from version import Version
import tty
from fileutils import *
from utils import *
from spack.exception import *
# This lives in $prefix/lib/spac/spack/__file__
prefix = ancestor(__file__, 4)
@ -14,6 +15,7 @@
# spack directory hierarchy
lib_path = new_path(prefix, "lib", "spack")
env_path = new_path(lib_path, "env")
module_path = new_path(lib_path, "spack")
packages_path = new_path(module_path, "packages")
@ -23,20 +25,13 @@
install_path = new_path(prefix, "opt")
# Version information
version = Version("0.1")
spack_version = Version("0.1")
# User's editor from the environment
editor = Executable(os.environ.get("EDITOR", ""))
# Curl tool for fetching files.
curl = which("curl")
if not curl:
tty.die("spack requires curl. Make sure it is in your path.")
make = which("make")
make.add_default_arg("-j%d" % multiprocessing.cpu_count())
if not make:
tty.die("spack requires make. Make sure it is in your path.")
curl = which("curl", required=True)
verbose = False
debug = False

View file

@ -1,17 +1,86 @@
import spack
from spack.fileutils import *
import re
import os
import sys
import string
import inspect
import glob
import spack
from spack.utils import *
import spack.arch as arch
import spack.version as version
import spack.attr as attr
# Valid package names
valid_package = r'^[a-zA-Z0-9_-]*$'
# Don't allow consecutive [_-] in package names
invalid_package = r'[_-][_-]+'
instances = {}
def filename_for(package):
def valid_name(pkg):
return re.match(valid_package, pkg) and not re.search(invalid_package, pkg)
def validate_name(pkg):
if not valid_name(pkg):
raise spack.InvalidPackageNameException(pkg)
def filename_for(pkg):
"""Get the filename where a package name should be stored."""
return new_path(spack.packages_path, "%s.py" % package.lower())
validate_name(pkg)
return new_path(spack.packages_path, "%s.py" % pkg)
def get(name):
file = filename_for(name)
def installed_packages(**kwargs):
"""Returns a dict from SysType to lists of Package objects."""
list_installed = kwargs.get('installed', False)
pkgs = {}
if not os.path.isdir(spack.install_path):
return pkgs
for sys_type in os.listdir(spack.install_path):
sys_type = arch.SysType(sys_type)
sys_path = new_path(spack.install_path, sys_type)
pkgs[sys_type] = [get(pkg) for pkg in os.listdir(sys_path)
if os.path.isdir(new_path(sys_path, pkg))]
return pkgs
def all_package_names():
"""Generator function for all packages."""
os.chdir(spack.packages_path)
for name in glob.glob("*.py"):
if name != '__init__.py':
yield re.sub('.py$', '', name)
def all_packages():
for name in all_package_names():
yield get(name)
def class_for(pkg):
"""Get a name for the class the package file should contain. Note that
conflicts don't matter because the classes are in different modules.
"""
validate_name(pkg)
class_name = string.capwords(pkg.replace('_', '-'), '-')
# If a class starts with a number, prefix it with Number_ to make it a valid
# Python class name.
if re.match(r'^[0-9]', class_name):
class_name = "Number_%s" % class_name
return class_name
def get_class(pkg):
file = filename_for(pkg)
if os.path.exists(file):
if not os.path.isfile(file):
@ -19,17 +88,63 @@ def get(name):
if not os.access(file, os.R_OK):
tty.die("Cannot read '%s'!" % file)
class_name = name.capitalize()
class_name = pkg.capitalize()
try:
module_name = "%s.%s" % (__name__, name)
module_name = "%s.%s" % (__name__, pkg)
module = __import__(module_name, fromlist=[class_name])
except ImportError, e:
tty.die("Error while importing %s.%s:\n%s" % (name, class_name, e.message))
tty.die("Error while importing %s.%s:\n%s" % (pkg, class_name, e.message))
klass = getattr(module, class_name)
if not inspect.isclass(klass):
tty.die("%s.%s is not a class" % (name, class_name))
tty.die("%s.%s is not a class" % (pkg, class_name))
return klass
def get(pkg, arch=arch.sys_type()):
key = (pkg, arch)
if not key in instances:
package_class = get_class(pkg)
instances[key] = package_class(arch)
return instances[key]
def compute_dependents():
"""Reads in all package files and sets dependence information on
Package objects in memory.
"""
for pkg in all_packages():
if pkg._dependents is None:
pkg._dependents = []
for dep in pkg.dependencies:
dpkg = get(dep.name)
if dpkg._dependents is None:
dpkg._dependents = []
dpkg._dependents.append(pkg.name)
def graph_dependencies(out=sys.stdout):
"""Print out a graph of all the dependencies between package.
Graph is in dot format."""
out.write('digraph G {\n')
out.write(' label = "Spack Dependencies"\n')
out.write(' labelloc = "b"\n')
out.write(' rankdir = "LR"\n')
out.write(' ranksep = "5"\n')
out.write('\n')
def quote(string):
return '"%s"' % string
deps = []
for pkg in all_packages():
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
for dep in pkg.dependencies:
deps.append((pkg.name, dep.name))
out.write('\n')
for pair in deps:
out.write(' "%s" -> "%s"\n' % pair)
out.write('}\n')

View file

@ -0,0 +1,41 @@
from spack import *
import os
# Only build certain parts of dwarf because the other ones break.
dwarf_dirs = ['libdwarf', 'dwarfdump2']
class Libdwarf(Package):
homepage = "http://www.example.com"
url = "http://reality.sgiweb.org/davea/libdwarf-20130207.tar.gz"
md5 = "64b42692e947d5180e162e46c689dfbf"
depends_on("libelf")
def clean(self):
for dir in dwarf_dirs:
with working_dir(dir):
if os.path.exists('Makefile'):
make('clean')
def install(self, prefix):
make.add_default_arg('ARFLAGS=rcs')
for dir in dwarf_dirs:
with working_dir(dir):
#configure("--prefix=%s" % prefix, '--enable-shared')
configure("--prefix=%s" % prefix)
make()
# Dwarf doesn't provide an install. Annoying.
mkdirp(bin, include, lib, man1)
with working_dir('libdwarf'):
install('libdwarf.a', lib)
#install('libdwarf.so', lib)
install('libdwarf.h', include)
install('dwarf.h', include)
with working_dir('dwarfdump2'):
install('dwarfdump', bin)
install('dwarfdump.conf', lib)
install('dwarfdump.1', man1)

View file

@ -0,0 +1,13 @@
from spack import *
class Libelf(Package):
homepage = "http://www.mr511.de/software/english.html"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
md5 = "4136d7b4c04df68b686570afa26988ac"
def install(self, prefix):
configure("--prefix=%s" % prefix,
"--disable-dependency-tracking",
"--disable-debug")
make()
make("install")

View file

@ -1,7 +1,9 @@
import os
import re
import shutil
import spack
import packages
import tty
@ -11,8 +13,9 @@ def ensure_access(dir=spack.stage_path):
class Stage(object):
def __init__(self, stage_name):
def __init__(self, stage_name, url):
self.stage_name = stage_name
self.url = url
@property
def path(self):
@ -30,7 +33,15 @@ def setup(self):
@property
def archive_path(self):
def archive_file(self):
path = os.path.join(self.path, os.path.basename(self.url))
if os.path.exists(path):
return path
return None
@property
def expanded_archive_path(self):
""""Returns the path to the expanded archive directory if it's expanded;
None if the archive hasn't been expanded.
"""
@ -43,17 +54,54 @@ def archive_path(self):
def chdir(self):
"""Changes directory to the stage path. Or dies if it is not set up."""
self.setup()
if os.path.isdir(self.path):
os.chdir(self.path)
else:
tty.die("Attempt to chdir to stage before setup.")
tty.die("Setup failed: no such directory: " + self.path)
def fetch(self):
"""Downloads the file at URL to the stage. Returns true if it was downloaded,
false if it already existed."""
self.chdir()
if self.archive_file:
tty.msg("Already downloaded %s." % self.archive_file)
else:
tty.msg("Fetching %s" % self.url)
# Run curl but grab the mime type from the http headers
headers = spack.curl('-#', '-O', '-D', '-', self.url, return_output=True)
# output this if we somehow got an HTML file rather than the archive we
# asked for.
if re.search(r'Content-Type: text/html', headers):
tty.warn("The contents of '%s' look like HTML. The checksum will "+
"likely fail. Use 'spack clean %s' to delete this file. "
"The fix the gateway issue and install again." % (self.archive_file, self.name))
if not self.archive_file:
raise FailedDownloadException(url)
return self.archive_file
def expand_archive(self):
self.chdir()
if not self.archive_file:
tty.die("Attempt to expand archive before fetching.")
decompress = spack.decompressor_for(self.archive_file)
decompress(self.archive_file)
def chdir_to_archive(self):
"""Changes directory to the expanded archive directory if it exists.
Dies with an error otherwise.
"""
path = self.archive_path
path = self.expanded_archive_path
if not path:
tty.die("Attempt to chdir before expanding archive.")
else:
@ -62,6 +110,16 @@ def chdir_to_archive(self):
tty.die("Archive was empty for '%s'" % self.name)
def restage(self):
"""Removes the expanded archive path if it exists, then re-expands the archive."""
if not self.archive_file:
tty.die("Attempt to restage when not staged.")
if self.expanded_archive_path:
shutil.rmtree(self.expanded_archive_path, True)
self.expand_archive()
def destroy(self):
"""Blows away the stage directory. Can always call setup() again."""
if os.path.exists(self.path):

View file

@ -10,20 +10,24 @@
class VersionTest(unittest.TestCase):
def assert_not_detected(self, string):
self.assertIsNone(version.parse(string))
name, v = version.parse(string)
self.assertIsNone(v)
def assert_detected(self, name, v, string):
parsed_name, parsed_v = version.parse(string)
self.assertEqual(parsed_name, name)
self.assertEqual(parsed_v, version.Version(v))
def assert_detected(self, v, string):
self.assertEqual(v, version.parse(string))
def test_wwwoffle_version(self):
self.assert_detected(
'2.9h', 'http://www.gedanken.demon.co.uk/download-wwwoffle/wwwoffle-2.9h.tgz')
'wwwoffle', '2.9h', 'http://www.gedanken.demon.co.uk/download-wwwoffle/wwwoffle-2.9h.tgz')
def test_version_sourceforge_download(self):
self.assert_detected(
'1.21', 'http://sourceforge.net/foo_bar-1.21.tar.gz/download')
'foo_bar', '1.21', 'http://sourceforge.net/foo_bar-1.21.tar.gz/download')
self.assert_detected(
'1.21', 'http://sf.net/foo_bar-1.21.tar.gz/download')
'foo_bar', '1.21', 'http://sf.net/foo_bar-1.21.tar.gz/download')
def test_no_version(self):
self.assert_not_detected('http://example.com/blah.tar')
@ -31,159 +35,158 @@ def test_no_version(self):
def test_version_all_dots(self):
self.assert_detected(
'1.14','http://example.com/foo.bar.la.1.14.zip')
'foo.bar.la', '1.14','http://example.com/foo.bar.la.1.14.zip')
def test_version_underscore_separator(self):
self.assert_detected(
'1.1', 'http://example.com/grc_1.1.tar.gz')
'grc', '1.1', 'http://example.com/grc_1.1.tar.gz')
def test_boost_version_style(self):
self.assert_detected(
'1.39.0', 'http://example.com/boost_1_39_0.tar.bz2')
'boost', '1.39.0', 'http://example.com/boost_1_39_0.tar.bz2')
def test_erlang_version_style(self):
self.assert_detected(
'R13B', 'http://erlang.org/download/otp_src_R13B.tar.gz')
'otp', 'R13B', 'http://erlang.org/download/otp_src_R13B.tar.gz')
def test_another_erlang_version_style(self):
self.assert_detected(
'R15B01', 'https://github.com/erlang/otp/tarball/OTP_R15B01')
'otp', 'R15B01', 'https://github.com/erlang/otp/tarball/OTP_R15B01')
def test_yet_another_erlang_version_style(self):
self.assert_detected(
'R15B03-1', 'https://github.com/erlang/otp/tarball/OTP_R15B03-1')
'otp', 'R15B03-1', 'https://github.com/erlang/otp/tarball/OTP_R15B03-1')
def test_p7zip_version_style(self):
self.assert_detected(
'9.04',
'http://kent.dl.sourceforge.net/sourceforge/p7zip/p7zip_9.04_src_all.tar.bz2')
'p7zip', '9.04', 'http://kent.dl.sourceforge.net/sourceforge/p7zip/p7zip_9.04_src_all.tar.bz2')
def test_new_github_style(self):
self.assert_detected(
'1.1.4', 'https://github.com/sam-github/libnet/tarball/libnet-1.1.4')
'libnet', '1.1.4', 'https://github.com/sam-github/libnet/tarball/libnet-1.1.4')
def test_gloox_beta_style(self):
self.assert_detected(
'1.0-beta7', 'http://camaya.net/download/gloox-1.0-beta7.tar.bz2')
'gloox', '1.0-beta7', 'http://camaya.net/download/gloox-1.0-beta7.tar.bz2')
def test_sphinx_beta_style(self):
self.assert_detected(
'1.10-beta', 'http://sphinxsearch.com/downloads/sphinx-1.10-beta.tar.gz')
'sphinx', '1.10-beta', 'http://sphinxsearch.com/downloads/sphinx-1.10-beta.tar.gz')
def test_astyle_verson_style(self):
self.assert_detected(
'1.23', 'http://kent.dl.sourceforge.net/sourceforge/astyle/astyle_1.23_macosx.tar.gz')
'astyle', '1.23', 'http://kent.dl.sourceforge.net/sourceforge/astyle/astyle_1.23_macosx.tar.gz')
def test_version_dos2unix(self):
self.assert_detected(
'3.1', 'http://www.sfr-fresh.com/linux/misc/dos2unix-3.1.tar.gz')
'dos2unix', '3.1', 'http://www.sfr-fresh.com/linux/misc/dos2unix-3.1.tar.gz')
def test_version_internal_dash(self):
self.assert_detected(
'1.1-2', 'http://example.com/foo-arse-1.1-2.tar.gz')
'foo-arse', '1.1-2', 'http://example.com/foo-arse-1.1-2.tar.gz')
def test_version_single_digit(self):
self.assert_detected(
'45', 'http://example.com/foo_bar.45.tar.gz')
'foo_bar', '45', 'http://example.com/foo_bar.45.tar.gz')
def test_noseparator_single_digit(self):
self.assert_detected(
'45', 'http://example.com/foo_bar45.tar.gz')
'foo_bar', '45', 'http://example.com/foo_bar45.tar.gz')
def test_version_developer_that_hates_us_format(self):
self.assert_detected(
'1.2.3', 'http://example.com/foo-bar-la.1.2.3.tar.gz')
'foo-bar-la', '1.2.3', 'http://example.com/foo-bar-la.1.2.3.tar.gz')
def test_version_regular(self):
self.assert_detected(
'1.21', 'http://example.com/foo_bar-1.21.tar.gz')
'foo_bar', '1.21', 'http://example.com/foo_bar-1.21.tar.gz')
def test_version_github(self):
self.assert_detected(
'1.0.5', 'http://github.com/lloyd/yajl/tarball/1.0.5')
'yajl', '1.0.5', 'http://github.com/lloyd/yajl/tarball/1.0.5')
def test_version_github_with_high_patch_number(self):
self.assert_detected(
'1.2.34', 'http://github.com/lloyd/yajl/tarball/v1.2.34')
'yajl', '1.2.34', 'http://github.com/lloyd/yajl/tarball/v1.2.34')
def test_yet_another_version(self):
self.assert_detected(
'0.15.1b', 'http://example.com/mad-0.15.1b.tar.gz')
'mad', '0.15.1b', 'http://example.com/mad-0.15.1b.tar.gz')
def test_lame_version_style(self):
self.assert_detected(
'398-2', 'http://kent.dl.sourceforge.net/sourceforge/lame/lame-398-2.tar.gz')
'lame', '398-2', 'http://kent.dl.sourceforge.net/sourceforge/lame/lame-398-2.tar.gz')
def test_ruby_version_style(self):
self.assert_detected(
'1.9.1-p243', 'ftp://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.9.1-p243.tar.gz')
'ruby', '1.9.1-p243', 'ftp://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.9.1-p243.tar.gz')
def test_omega_version_style(self):
self.assert_detected(
'0.80.2', 'http://www.alcyone.com/binaries/omega/omega-0.80.2-src.tar.gz')
'omega', '0.80.2', 'http://www.alcyone.com/binaries/omega/omega-0.80.2-src.tar.gz')
def test_rc_style(self):
self.assert_detected(
'1.2.2rc1', 'http://downloads.xiph.org/releases/vorbis/libvorbis-1.2.2rc1.tar.bz2')
'libvorbis', '1.2.2rc1', 'http://downloads.xiph.org/releases/vorbis/libvorbis-1.2.2rc1.tar.bz2')
def test_dash_rc_style(self):
self.assert_detected(
'1.8.0-rc1', 'http://ftp.mozilla.org/pub/mozilla.org/js/js-1.8.0-rc1.tar.gz')
'js', '1.8.0-rc1', 'http://ftp.mozilla.org/pub/mozilla.org/js/js-1.8.0-rc1.tar.gz')
def test_angband_version_style(self):
self.assert_detected(
'3.0.9b', 'http://rephial.org/downloads/3.0/angband-3.0.9b-src.tar.gz')
'angband', '3.0.9b', 'http://rephial.org/downloads/3.0/angband-3.0.9b-src.tar.gz')
def test_stable_suffix(self):
self.assert_detected(
'1.4.14b', 'http://www.monkey.org/~provos/libevent-1.4.14b-stable.tar.gz')
'libevent', '1.4.14b', 'http://www.monkey.org/~provos/libevent-1.4.14b-stable.tar.gz')
def test_debian_style_1(self):
self.assert_detected(
'3.03', 'http://ftp.de.debian.org/debian/pool/main/s/sl/sl_3.03.orig.tar.gz')
'sl', '3.03', 'http://ftp.de.debian.org/debian/pool/main/s/sl/sl_3.03.orig.tar.gz')
def test_debian_style_2(self):
self.assert_detected(
'1.01b', 'http://ftp.de.debian.org/debian/pool/main/m/mmv/mmv_1.01b.orig.tar.gz')
'mmv', '1.01b', 'http://ftp.de.debian.org/debian/pool/main/m/mmv/mmv_1.01b.orig.tar.gz')
def test_imagemagick_style(self):
self.assert_detected(
'6.7.5-7', 'http://downloads.sf.net/project/machomebrew/mirror/ImageMagick-6.7.5-7.tar.bz2')
'ImageMagick', '6.7.5-7', 'http://downloads.sf.net/project/machomebrew/mirror/ImageMagick-6.7.5-7.tar.bz2')
def test_dash_version_dash_style(self):
self.assert_detected(
'3.4', 'http://www.antlr.org/download/antlr-3.4-complete.jar')
'antlr', '3.4', 'http://www.antlr.org/download/antlr-3.4-complete.jar')
def test_apache_version_style(self):
self.assert_detected(
'1.2.0-rc2', 'http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz')
'apache-cassandra', '1.2.0-rc2', 'http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz')
def test_jpeg_style(self):
self.assert_detected(
'8d', 'http://www.ijg.org/files/jpegsrc.v8d.tar.gz')
'jpegsrc', '8d', 'http://www.ijg.org/files/jpegsrc.v8d.tar.gz')
def test_more_versions(self):
self.assert_detected(
'1.4.1', 'http://pypy.org/download/pypy-1.4.1-osx.tar.bz2')
'pypy', '1.4.1', 'http://pypy.org/download/pypy-1.4.1-osx.tar.bz2')
self.assert_detected(
'0.9.8s', 'http://www.openssl.org/source/openssl-0.9.8s.tar.gz')
'openssl', '0.9.8s', 'http://www.openssl.org/source/openssl-0.9.8s.tar.gz')
self.assert_detected(
'1.5E', 'ftp://ftp.visi.com/users/hawkeyd/X/Xaw3d-1.5E.tar.gz')
'Xaw3d', '1.5E', 'ftp://ftp.visi.com/users/hawkeyd/X/Xaw3d-1.5E.tar.gz')
self.assert_detected(
'2.1.0beta', 'http://downloads.sourceforge.net/project/fann/fann/2.1.0beta/fann-2.1.0beta.zip')
'fann', '2.1.0beta', 'http://downloads.sourceforge.net/project/fann/fann/2.1.0beta/fann-2.1.0beta.zip')
self.assert_detected(
'2.0.1', 'ftp://iges.org/grads/2.0/grads-2.0.1-bin-darwin9.8-intel.tar.gz')
'grads', '2.0.1', 'ftp://iges.org/grads/2.0/grads-2.0.1-bin-darwin9.8-intel.tar.gz')
self.assert_detected(
'2.08', 'http://haxe.org/file/haxe-2.08-osx.tar.gz')
'haxe', '2.08', 'http://haxe.org/file/haxe-2.08-osx.tar.gz')
self.assert_detected(
'2007f', 'ftp://ftp.cac.washington.edu/imap/imap-2007f.tar.gz')
'imap', '2007f', 'ftp://ftp.cac.washington.edu/imap/imap-2007f.tar.gz')
self.assert_detected(
'3.3.12ga7', 'http://sourceforge.net/projects/x3270/files/x3270/3.3.12ga7/suite3270-3.3.12ga7-src.tgz')
'suite3270', '3.3.12ga7', 'http://sourceforge.net/projects/x3270/files/x3270/3.3.12ga7/suite3270-3.3.12ga7-src.tgz')
self.assert_detected(
'1.3.6p2', 'http://synergy.googlecode.com/files/synergy-1.3.6p2-MacOSX-Universal.zip')
'synergy', '1.3.6p2', 'http://synergy.googlecode.com/files/synergy-1.3.6p2-MacOSX-Universal.zip')
if __name__ == "__main__":
unittest.main()
unittest.main(failfast=True)

View file

@ -32,6 +32,11 @@ def msg(msg, *args, **kwargs):
print "{}==>{} {}{}".format(color, white, str(msg), reset)
for arg in args: print indent + str(arg)
def info(msg, *args, **kwargs):
color = kwargs.get("color", blue)
print "{}==>{} {}".format(color, reset, str(msg))
for arg in args: print indent + str(arg)
def verbose(*args):
if spack.verbose: msg(*args, color=green)
@ -46,8 +51,8 @@ def warn(msg, *args):
print "{}Warning{}: {}".format(yellow, reset, str(msg))
for arg in args: print indent + str(arg)
def die(msg):
error(msg)
def die(msg, *args):
error(msg, *args)
sys.exit(1)
def pkg(msg):

View file

@ -1,8 +1,12 @@
import os
import subprocess
import re
import errno
import shutil
import subprocess
import multiprocessing
from itertools import product
from contextlib import closing
import functools
from contextlib import closing, contextmanager
import tty
@ -14,17 +18,85 @@
ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + EXTS
def memoized(obj):
"""Decorator that caches the results of a function, storing them
in an attribute of that function."""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
if args not in cache:
cache[args] = obj(*args, **kwargs)
return cache[args]
return memoizer
def make_make():
"""Gets a make set up with the proper default arguments."""
make = which('make', required=True)
if not env_flag("SPACK_NO_PARALLEL_MAKE"):
make.add_default_arg("-j%d" % multiprocessing.cpu_count())
return make
def install(src, dest):
tty.info("Installing %s to %s" % (src, dest))
shutil.copy(src, dest)
@contextmanager
def working_dir(dirname):
orig_dir = os.getcwd()
os.chdir(dirname)
yield
os.chdir(orig_dir)
def mkdirp(*paths):
for path in paths:
if not os.path.exists(path):
os.makedirs(path)
elif not os.path.isdir(path):
raise OSError(errno.EEXIST, "File alredy exists", path)
def env_flag(name):
if name in os.environ:
return os.environ[name].lower() == "true"
return False
def path_prepend(var_name, *directories):
path = os.environ.get(var_name, "")
path_str = ":".join(str(dir) for dir in directories)
if path == "":
os.environ[var_name] = path_str
else:
os.environ[var_name] = "%s:%s" % (path_str, path)
def pop_keys(dictionary, *keys):
for key in keys:
if key in dictionary:
dictionary.pop(key)
def remove_items(item_list, *items):
for item in items:
if item in item_list:
item_list.remove(item)
def has_whitespace(string):
return re.search(r'\s', string)
def new_path(prefix, *args):
path=prefix
path=str(prefix)
for elt in args:
path = os.path.join(path, elt)
path = os.path.join(path, str(elt))
if has_whitespace(path):
tty.die("Invalid path: '%s'. Use a path without whitespace.")
tty.die("Invalid path: '%s'. Use a path without whitespace." % path)
return path
@ -48,6 +120,7 @@ def add_default_arg(self, arg):
def __call__(self, *args, **kwargs):
"""Run the executable with subprocess.check_output, return output."""
return_output = kwargs.get("return_output", False)
fail_on_error = kwargs.get("fail_on_error", True)
quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)]
if quoted_args:
@ -61,24 +134,30 @@ def __call__(self, *args, **kwargs):
if return_output:
return subprocess.check_output(cmd)
else:
elif fail_on_error:
return subprocess.check_call(cmd)
else:
return subprocess.call(cmd)
def __repr__(self):
return "<exe: %s>" % self.exe
def which(name, path=None):
def which(name, **kwargs):
"""Finds an executable in the path like command-line which."""
path = kwargs.get('path', os.environ.get('PATH', '').split(os.pathsep))
required = kwargs.get('required', False)
if not path:
path = os.environ.get('PATH', '').split(os.pathsep)
if not path:
return None
path = []
for dir in path:
exe = os.path.join(dir, name)
if os.access(exe, os.X_OK):
return Executable(exe)
if required:
tty.die("spack requires %s. Make sure it is in your path." % name)
return None
@ -94,10 +173,9 @@ def stem(path):
def decompressor_for(path):
"""Get the appropriate decompressor for a path."""
if which("tar"):
return Executable("tar -xf")
else:
tty.die("spack requires tar. Make sure it's on your path.")
tar = which('tar', required=True)
tar.add_default_arg('-xf')
return tar
def md5(filename, block_size=2**20):

View file

@ -1,5 +1,5 @@
import tty
from fileutils import ALLOWED_ARCHIVE_TYPES
from utils import ALLOWED_ARCHIVE_TYPES
from urlparse import urlparse
ALLOWED_SCHEMES = ["http", "https", "ftp"]

View file

@ -1,7 +1,7 @@
import os
import re
import fileutils
import utils
class Version(object):
"""Class to represent versions"""
@ -48,16 +48,16 @@ def intify(part):
return tuple(intify(v) for v in re.split(r'[_.-]+', v))
def parse(spec):
"""Try to extract a version from a filename. This is taken largely from
Homebrew's Version class."""
def parse_version(spec):
"""Try to extract a version from a filename or URL. This is taken
largely from Homebrew's Version class."""
if os.path.isdir(spec):
stem = os.path.basename(spec)
elif re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', spec):
stem = fileutils.stem(os.path.dirname(spec))
stem = utils.stem(os.path.dirname(spec))
else:
stem = fileutils.stem(spec)
stem = utils.stem(spec)
version_types = [
# GitHub tarballs, e.g. v1.2.3
@ -115,12 +115,36 @@ def parse(spec):
# e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz
(r'\.v(\d+[a-z]?)', stem)]
for type in version_types:
regex, match_string = type[:2]
for vtype in version_types:
regex, match_string = vtype[:2]
match = re.search(regex, match_string)
if match and match.group(1) is not None:
if type[2:]:
return Version(type[2](match.group(1)))
if vtype[2:]:
return Version(vtype[2](match.group(1)))
else:
return Version(match.group(1))
return None
def parse_name(spec, ver=None):
if ver is None:
ver = parse_version(spec)
ntypes = (r'/sourceforge/([^/]+)/',
r'/([^/]+)/(tarball|zipball)/',
r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % ver,
r'/([^/]+)[_.-]v?%s' % ver,
r'/([^/]+)%s' % ver,
r'^([^/]+)[_.-]v?%s' % ver,
r'^([^/]+)%s' % ver)
for nt in ntypes:
match = re.search(nt, spec)
if match:
return match.group(1)
return None
def parse(spec):
ver = parse_version(spec)
name = parse_name(spec, ver)
return (name, ver)