Added libunwind and fixed link issues in cc.
This commit is contained in:
parent
48b0351945
commit
fb172bc702
9 changed files with 170 additions and 85 deletions
100
lib/spack/env/cc
vendored
100
lib/spack/env/cc
vendored
|
@ -1,12 +1,10 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import argparse
|
import argparse
|
||||||
|
from contextlib import closing
|
||||||
# reimplement some tty stuff to minimize imports
|
|
||||||
blue, green, yellow, reset = [
|
|
||||||
'\033[1;39m', '\033[1;92m', '\033[4;33m', '\033[0m']
|
|
||||||
|
|
||||||
# Import spack parameters through the build environment.
|
# Import spack parameters through the build environment.
|
||||||
spack_lib = os.environ.get("SPACK_LIB")
|
spack_lib = os.environ.get("SPACK_LIB")
|
||||||
|
@ -19,14 +17,27 @@ sys.path.append(spack_lib)
|
||||||
from spack.compilation import *
|
from spack.compilation import *
|
||||||
import spack.tty as tty
|
import spack.tty as tty
|
||||||
|
|
||||||
spack_prefix = get_env_var("SPACK_PREFIX")
|
spack_prefix = get_env_var("SPACK_PREFIX")
|
||||||
spack_debug = get_env_flag("SPACK_DEBUG")
|
spack_build_root = get_env_var("SPACK_BUILD_ROOT")
|
||||||
spack_deps = get_path("SPACK_DEPENDENCIES")
|
spack_debug = get_env_flag("SPACK_DEBUG")
|
||||||
spack_env_path = get_path("SPACK_ENV_PATH")
|
spack_deps = get_path("SPACK_DEPENDENCIES")
|
||||||
|
spack_env_path = get_path("SPACK_ENV_PATH")
|
||||||
|
|
||||||
# Figure out what type of operation we're doing
|
# Figure out what type of operation we're doing
|
||||||
command = os.path.basename(sys.argv[0])
|
command = os.path.basename(sys.argv[0])
|
||||||
|
cpp, cc, ccld, ld = range(4)
|
||||||
|
if command == 'cpp':
|
||||||
|
mode = cpp
|
||||||
|
elif command == 'ld':
|
||||||
|
mode = ld
|
||||||
|
elif '-E' in sys.argv:
|
||||||
|
mode = cpp
|
||||||
|
elif '-c' in sys.argv:
|
||||||
|
mode = cc
|
||||||
|
else:
|
||||||
|
mode = ccld
|
||||||
|
|
||||||
|
# Parse out the includes, libs, etc. so we can adjust them if need be.
|
||||||
parser = argparse.ArgumentParser(add_help=False)
|
parser = argparse.ArgumentParser(add_help=False)
|
||||||
parser.add_argument("-I", action='append', default=[], dest='include_path')
|
parser.add_argument("-I", action='append', default=[], dest='include_path')
|
||||||
parser.add_argument("-L", action='append', default=[], dest='lib_path')
|
parser.add_argument("-L", action='append', default=[], dest='lib_path')
|
||||||
|
@ -35,9 +46,38 @@ parser.add_argument("-l", action='append', default=[], dest='libs')
|
||||||
options, other_args = parser.parse_known_args()
|
options, other_args = parser.parse_known_args()
|
||||||
rpaths, other_args = parse_rpaths(other_args)
|
rpaths, other_args = parse_rpaths(other_args)
|
||||||
|
|
||||||
if rpaths:
|
# Add dependencies' include and lib paths to our compiler flags.
|
||||||
print "{}Warning{}: Spack stripping non-spack rpaths: ".format(yellow, reset)
|
def append_if_dir(path_list, *dirs):
|
||||||
for rp in rpaths: print " %s" % rp
|
full_path = os.path.join(*dirs)
|
||||||
|
if os.path.isdir(full_path):
|
||||||
|
path_list.append(full_path)
|
||||||
|
|
||||||
|
for dep_dir in spack_deps:
|
||||||
|
append_if_dir(options.include_path, dep_dir, "include")
|
||||||
|
append_if_dir(options.lib_path, dep_dir, "lib")
|
||||||
|
append_if_dir(options.lib_path, dep_dir, "lib64")
|
||||||
|
|
||||||
|
# Add our modified arguments to it.
|
||||||
|
arguments = ['-I%s' % path for path in options.include_path]
|
||||||
|
arguments += other_args
|
||||||
|
arguments += ['-L%s' % path for path in options.lib_path]
|
||||||
|
arguments += ['-l%s' % path for path in options.libs]
|
||||||
|
|
||||||
|
# Add rpaths to install dir and its dependencies. We add both lib and lib64
|
||||||
|
# here because we don't know which will be created.
|
||||||
|
rpaths.extend(options.lib_path)
|
||||||
|
rpaths.append('%s/lib' % spack_prefix)
|
||||||
|
rpaths.append('%s/lib64' % spack_prefix)
|
||||||
|
if mode == ccld:
|
||||||
|
arguments += ['-Wl,-rpath,%s' % p for p in rpaths]
|
||||||
|
elif mode == ld:
|
||||||
|
pairs = [('-rpath', '%s' % p) for p in rpaths]
|
||||||
|
arguments += [item for sublist in pairs for item in sublist]
|
||||||
|
|
||||||
|
# Unset some pesky environment variables
|
||||||
|
for var in ["LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH"]:
|
||||||
|
if var in os.environ:
|
||||||
|
os.environ.pop(var)
|
||||||
|
|
||||||
# Ensure that the delegated command doesn't just call this script again.
|
# Ensure that the delegated command doesn't just call this script again.
|
||||||
clean_path = get_path("PATH")
|
clean_path = get_path("PATH")
|
||||||
|
@ -46,35 +86,15 @@ for item in ['.'] + spack_env_path:
|
||||||
clean_path.remove(item)
|
clean_path.remove(item)
|
||||||
os.environ["PATH"] = ":".join(clean_path)
|
os.environ["PATH"] = ":".join(clean_path)
|
||||||
|
|
||||||
# Add dependence's paths to our compiler flags.
|
full_command = [command] + arguments
|
||||||
def append_if_dir(path_list, prefix, *dirs):
|
|
||||||
full_path = os.path.join(prefix, *dirs)
|
|
||||||
if os.path.isdir(full_path):
|
|
||||||
path_list.append(full_path)
|
|
||||||
|
|
||||||
for prefix in spack_deps:
|
|
||||||
append_if_dir(options.include_path, prefix, "include")
|
|
||||||
append_if_dir(options.lib_path, prefix, "lib")
|
|
||||||
append_if_dir(options.lib_path, prefix, "lib64")
|
|
||||||
|
|
||||||
# Add our modified arguments to it.
|
|
||||||
arguments = ['-I%s' % path for path in options.include_path]
|
|
||||||
arguments += other_args
|
|
||||||
arguments += ['-L%s' % path for path in options.lib_path]
|
|
||||||
arguments += ['-l%s' % path for path in options.libs]
|
|
||||||
|
|
||||||
spack_rpaths = [spack_prefix] + spack_deps
|
|
||||||
arguments += ['-Wl,-rpath,%s/lib64' % path for path in spack_rpaths]
|
|
||||||
arguments += ['-Wl,-rpath,%s/lib' % path for path in spack_rpaths]
|
|
||||||
|
|
||||||
# Unset some pesky environment variables
|
|
||||||
for var in ["LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH"]:
|
|
||||||
if var in os.environ:
|
|
||||||
os.environ.pop(var)
|
|
||||||
|
|
||||||
if spack_debug:
|
if spack_debug:
|
||||||
sys.stderr.write("{}==>{} {} {}\n".format(
|
input_log = os.path.join(spack_build_root, 'spack_cc_in.log')
|
||||||
green, reset, command, " ".join(arguments)))
|
output_log = os.path.join(spack_build_root, 'spack_cc_out.log')
|
||||||
|
with closing(open(input_log, 'a')) as log:
|
||||||
|
args = [os.path.basename(sys.argv[0])] + sys.argv[1:]
|
||||||
|
log.write("%s\n" % " ".join(arg.replace(' ', r'\ ') for arg in args))
|
||||||
|
with closing(open(output_log, 'a')) as log:
|
||||||
|
log.write("%s\n" % " ".join(full_command))
|
||||||
|
|
||||||
rcode = subprocess.call([command] + arguments)
|
rcode = subprocess.call(full_command)
|
||||||
sys.exit(rcode)
|
sys.exit(rcode)
|
||||||
|
|
|
@ -65,7 +65,7 @@ def __init__(self, name, parallel):
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
parallel = kwargs.get('parallel', self.parallel)
|
parallel = kwargs.get('parallel', self.parallel)
|
||||||
env_parallel = not env_flag("SPACK_NO_PARALLEL_MAKE")
|
env_parallel = not env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||||
|
|
||||||
if parallel and env_parallel:
|
if parallel and env_parallel:
|
||||||
args += ("-j%d" % multiprocessing.cpu_count(),)
|
args += ("-j%d" % multiprocessing.cpu_count(),)
|
||||||
|
@ -88,13 +88,17 @@ def __init__(self, arch=arch.sys_type()):
|
||||||
# Name of package is the name of its module (the file that contains it)
|
# Name of package is the name of its module (the file that contains it)
|
||||||
self.name = inspect.getmodulename(self.module.__file__)
|
self.name = inspect.getmodulename(self.module.__file__)
|
||||||
|
|
||||||
|
# Don't allow the default homepage.
|
||||||
|
if re.search(r'example.com', self.homepage):
|
||||||
|
tty.die("Bad homepage in %s: %s" % (self.name, self.homepage))
|
||||||
|
|
||||||
# Make sure URL is an allowed type
|
# Make sure URL is an allowed type
|
||||||
validate.url(self.url)
|
validate.url(self.url)
|
||||||
|
|
||||||
# Set up version
|
# Set up version
|
||||||
attr.setdefault(self, 'version', version.parse_version(self.url))
|
attr.setdefault(self, 'version', version.parse_version(self.url))
|
||||||
if not self.version:
|
if not self.version:
|
||||||
tty.die("Couldn't extract version from '%s'. " +
|
tty.die("Couldn't extract version from %s. " +
|
||||||
"You must specify it explicitly for this URL." % self.url)
|
"You must specify it explicitly for this URL." % self.url)
|
||||||
|
|
||||||
# This adds a bunch of convenient commands to the package's module scope.
|
# This adds a bunch of convenient commands to the package's module scope.
|
||||||
|
@ -109,6 +113,9 @@ def __init__(self, arch=arch.sys_type()):
|
||||||
# Whether to remove intermediate build/install when things go wrong.
|
# Whether to remove intermediate build/install when things go wrong.
|
||||||
self.dirty = False
|
self.dirty = False
|
||||||
|
|
||||||
|
# stage used to build this package.
|
||||||
|
self.stage = Stage(self.stage_name, self.url)
|
||||||
|
|
||||||
|
|
||||||
def make_make(self):
|
def make_make(self):
|
||||||
"""Create a make command set up with the proper default arguments."""
|
"""Create a make command set up with the proper default arguments."""
|
||||||
|
@ -201,11 +208,6 @@ def all_dependents(self):
|
||||||
return tuple(all_deps)
|
return tuple(all_deps)
|
||||||
|
|
||||||
|
|
||||||
@property
|
|
||||||
def stage(self):
|
|
||||||
return Stage(self.stage_name, self.url)
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def stage_name(self):
|
def stage_name(self):
|
||||||
return "%s-%s" % (self.name, self.version)
|
return "%s-%s" % (self.name, self.version)
|
||||||
|
@ -270,7 +272,7 @@ def do_stage(self):
|
||||||
|
|
||||||
archive_dir = stage.expanded_archive_path
|
archive_dir = stage.expanded_archive_path
|
||||||
if not archive_dir:
|
if not archive_dir:
|
||||||
tty.msg("Staging archive: '%s'" % stage.archive_file)
|
tty.msg("Staging archive: %s" % stage.archive_file)
|
||||||
stage.expand_archive()
|
stage.expand_archive()
|
||||||
else:
|
else:
|
||||||
tty.msg("Already staged %s" % self.name)
|
tty.msg("Already staged %s" % self.name)
|
||||||
|
@ -316,7 +318,7 @@ def setup_install_environment(self):
|
||||||
|
|
||||||
# Add spack environment at front of path and pass the
|
# Add spack environment at front of path and pass the
|
||||||
# lib location along so the compiler script can find spack
|
# lib location along so the compiler script can find spack
|
||||||
os.environ["SPACK_LIB"] = lib_path
|
os.environ[SPACK_LIB] = lib_path
|
||||||
|
|
||||||
# Fix for case-insensitive file systems. Conflicting links are
|
# Fix for case-insensitive file systems. Conflicting links are
|
||||||
# in directories called "case*" within the env directory.
|
# in directories called "case*" within the env directory.
|
||||||
|
@ -326,14 +328,17 @@ def setup_install_environment(self):
|
||||||
if file.startswith("case") and os.path.isdir(path):
|
if file.startswith("case") and os.path.isdir(path):
|
||||||
env_paths.append(path)
|
env_paths.append(path)
|
||||||
path_put_first("PATH", env_paths)
|
path_put_first("PATH", env_paths)
|
||||||
path_set("SPACK_ENV_PATH", env_paths)
|
path_set(SPACK_ENV_PATH, env_paths)
|
||||||
|
|
||||||
# Pass along prefixes of dependencies here
|
# Pass along prefixes of dependencies here
|
||||||
path_set("SPACK_DEPENDENCIES",
|
path_set(SPACK_DEPENDENCIES,
|
||||||
[dep.package.prefix for dep in self.dependencies])
|
[dep.package.prefix for dep in self.dependencies])
|
||||||
|
|
||||||
# Install location
|
# Install location
|
||||||
os.environ["SPACK_PREFIX"] = self.prefix
|
os.environ[SPACK_PREFIX] = self.prefix
|
||||||
|
|
||||||
|
# Build root for logging.
|
||||||
|
os.environ[SPACK_BUILD_ROOT] = self.stage.expanded_archive_path
|
||||||
|
|
||||||
|
|
||||||
def do_install_dependencies(self):
|
def do_install_dependencies(self):
|
||||||
|
@ -379,7 +384,7 @@ def do_clean(self):
|
||||||
def clean(self):
|
def clean(self):
|
||||||
"""By default just runs make clean. Override if this isn't good."""
|
"""By default just runs make clean. Override if this isn't good."""
|
||||||
try:
|
try:
|
||||||
make = MakeExecutable('make')
|
make = MakeExecutable('make', self.parallel)
|
||||||
make('clean')
|
make('clean')
|
||||||
tty.msg("Successfully cleaned %s" % self.name)
|
tty.msg("Successfully cleaned %s" % self.name)
|
||||||
except subprocess.CalledProcessError, e:
|
except subprocess.CalledProcessError, e:
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
import spack.packages as packages
|
import spack.packages as packages
|
||||||
|
import spack.tty as tty
|
||||||
|
import spack.stage as stage
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('names', nargs='+', help="name(s) of package(s) to clean")
|
subparser.add_argument('names', nargs='+', help="name(s) of package(s) to clean")
|
||||||
|
|
||||||
subparser.add_mutually_exclusive_group()
|
subparser.add_mutually_exclusive_group()
|
||||||
subparser.add_argument('-c', "--clean", action="store_true", dest='clean',
|
subparser.add_argument('-c', "--clean", action="store_true", dest='clean',
|
||||||
help="run make clean in the stage directory (default)")
|
help="run make clean in the stage directory (default)")
|
||||||
|
@ -9,8 +12,17 @@ def setup_parser(subparser):
|
||||||
help="delete and re-expand the entire stage directory")
|
help="delete and re-expand the entire stage directory")
|
||||||
subparser.add_argument('-d', "--dist", action="store_true", dest='dist',
|
subparser.add_argument('-d', "--dist", action="store_true", dest='dist',
|
||||||
help="delete the downloaded archive.")
|
help="delete the downloaded archive.")
|
||||||
|
subparser.add_argument('-a', "--all", action="store_true", dest='purge',
|
||||||
|
help="delete the entire build staging area")
|
||||||
|
|
||||||
def clean(args):
|
def clean(args):
|
||||||
|
if args.purge:
|
||||||
|
stage.purge()
|
||||||
|
return
|
||||||
|
|
||||||
|
if not args.names:
|
||||||
|
tty.die("spack clean requires at least one package name.")
|
||||||
|
|
||||||
for name in args.names:
|
for name in args.names:
|
||||||
package = packages.get(name)
|
package = packages.get(name)
|
||||||
if args.dist:
|
if args.dist:
|
||||||
|
|
|
@ -28,6 +28,8 @@ def install(self, prefix):
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('url', nargs='?', help="url of package archive")
|
subparser.add_argument('url', nargs='?', help="url of package archive")
|
||||||
|
subparser.add_argument('-f', '--force', action='store_true', dest='force',
|
||||||
|
help="Remove existing package file.")
|
||||||
|
|
||||||
|
|
||||||
def create(args):
|
def create(args):
|
||||||
|
@ -48,12 +50,12 @@ def create(args):
|
||||||
tty.die("Couldn't guess a version string from %s." % url)
|
tty.die("Couldn't guess a version string from %s." % url)
|
||||||
|
|
||||||
path = packages.filename_for(name)
|
path = packages.filename_for(name)
|
||||||
if os.path.exists(path):
|
if not args.force and os.path.exists(path):
|
||||||
tty.die("%s already exists." % path)
|
tty.die("%s already exists." % path)
|
||||||
|
|
||||||
# make a stage and fetch the archive.
|
# make a stage and fetch the archive.
|
||||||
try:
|
try:
|
||||||
stage = Stage(name, url)
|
stage = Stage("%s-%s" % (name, version), url)
|
||||||
archive_file = stage.fetch()
|
archive_file = stage.fetch()
|
||||||
except spack.FailedDownloadException, e:
|
except spack.FailedDownloadException, e:
|
||||||
tty.die(e.message)
|
tty.die(e.message)
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def get_env_var(name, required=True):
|
def get_env_var(name, required=True):
|
||||||
value = os.environ.get(name)
|
value = os.environ.get(name)
|
||||||
|
@ -16,16 +18,16 @@ def get_env_flag(name, required=False):
|
||||||
|
|
||||||
|
|
||||||
def get_path(name):
|
def get_path(name):
|
||||||
path = os.environ.get(name, "")
|
path = os.environ.get(name, "").strip()
|
||||||
return path.split(":")
|
if path:
|
||||||
|
return path.split(":")
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
def parse_rpaths(arguments):
|
def parse_rpaths(arguments):
|
||||||
"""argparse, for all its features, cannot understand most compilers'
|
"""argparse, for all its features, cannot understand most compilers'
|
||||||
rpath arguments. This handles '-Wl,', '-Xlinker', and '-R'"""
|
rpath arguments. This handles '-Wl,', '-Xlinker', and '-R'"""
|
||||||
linker_args = []
|
|
||||||
other_args = []
|
|
||||||
|
|
||||||
def get_next(arg, args):
|
def get_next(arg, args):
|
||||||
"""Get an expected next value of an iterator, or die if it's not there"""
|
"""Get an expected next value of an iterator, or die if it's not there"""
|
||||||
try:
|
try:
|
||||||
|
@ -34,23 +36,32 @@ def get_next(arg, args):
|
||||||
# quietly ignore -rpath and -Xlinker without args.
|
# quietly ignore -rpath and -Xlinker without args.
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Separate linker args from non-linker args
|
other_args = []
|
||||||
args = iter(arguments)
|
def linker_args():
|
||||||
for arg in args:
|
"""This generator function allows us to parse the linker args separately
|
||||||
if arg.startswith('-Wl,'):
|
from the compiler args, so that we can handle them more naturally.
|
||||||
sub_args = [sub for sub in arg.replace('-Wl,', '', 1).split(',')]
|
"""
|
||||||
linker_args.extend(sub_args)
|
args = iter(arguments)
|
||||||
elif arg == '-Xlinker':
|
for arg in args:
|
||||||
target = get_next(arg, args)
|
if arg.startswith('-Wl,'):
|
||||||
if target != None:
|
sub_args = [sub for sub in arg.replace('-Wl,', '', 1).split(',')]
|
||||||
linker_args.append(target)
|
for arg in sub_args:
|
||||||
else:
|
yield arg
|
||||||
other_args.append(arg)
|
elif arg == '-Xlinker':
|
||||||
|
target = get_next(arg, args)
|
||||||
|
if target != None:
|
||||||
|
yield target
|
||||||
|
else:
|
||||||
|
other_args.append(arg)
|
||||||
|
|
||||||
# Extract all the possible ways rpath can appear in linker args
|
# Extract all the possible ways rpath can appear in linker args, then
|
||||||
# and append non-rpaths to other_args
|
# append non-rpaths to other_args. This happens in-line as the linker
|
||||||
|
# args are extracted, so we preserve the original order of arguments.
|
||||||
|
# This is important for args like --whole-archive, --no-whole-archive,
|
||||||
|
# and others that tell the linker how to handle the next few libraries
|
||||||
|
# it encounters on the command line.
|
||||||
rpaths = []
|
rpaths = []
|
||||||
largs = iter(linker_args)
|
largs = linker_args()
|
||||||
for arg in largs:
|
for arg in largs:
|
||||||
if arg == '-rpath':
|
if arg == '-rpath':
|
||||||
target = get_next(arg, largs)
|
target = get_next(arg, largs)
|
||||||
|
|
|
@ -35,3 +35,14 @@
|
||||||
|
|
||||||
verbose = False
|
verbose = False
|
||||||
debug = False
|
debug = False
|
||||||
|
|
||||||
|
# Whether stage should use tmp filesystem or build in the spack prefix
|
||||||
|
use_tmp_stage = True
|
||||||
|
|
||||||
|
# Important environment variables
|
||||||
|
SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE'
|
||||||
|
SPACK_LIB = 'SPACK_LIB'
|
||||||
|
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
|
||||||
|
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
|
||||||
|
SPACK_PREFIX = 'SPACK_PREFIX'
|
||||||
|
SPACK_BUILD_ROOT = 'SPACK_BUILD_ROOT'
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
dwarf_dirs = ['libdwarf', 'dwarfdump2']
|
dwarf_dirs = ['libdwarf', 'dwarfdump2']
|
||||||
|
|
||||||
class Libdwarf(Package):
|
class Libdwarf(Package):
|
||||||
homepage = "http://www.example.com"
|
homepage = "http://reality.sgiweb.org/davea/dwarf.html"
|
||||||
url = "http://reality.sgiweb.org/davea/libdwarf-20130207.tar.gz"
|
url = "http://reality.sgiweb.org/davea/libdwarf-20130207.tar.gz"
|
||||||
md5 = "64b42692e947d5180e162e46c689dfbf"
|
md5 = "64b42692e947d5180e162e46c689dfbf"
|
||||||
|
|
||||||
|
|
11
lib/spack/spack/packages/libunwind.py
Normal file
11
lib/spack/spack/packages/libunwind.py
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Libunwind(Package):
|
||||||
|
homepage = "http://www.nongnu.org/libunwind/"
|
||||||
|
url = "http://download.savannah.gnu.org/releases/libunwind/libunwind-1.1.tar.gz"
|
||||||
|
md5 = "fb4ea2f6fbbe45bf032cd36e586883ce"
|
||||||
|
|
||||||
|
def install(self, prefix):
|
||||||
|
configure("--prefix=%s" % prefix)
|
||||||
|
make()
|
||||||
|
make("install")
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import packages
|
import packages
|
||||||
|
@ -12,6 +13,12 @@ def ensure_access(dir=spack.stage_path):
|
||||||
tty.die("Insufficient permissions on directory %s" % dir)
|
tty.die("Insufficient permissions on directory %s" % dir)
|
||||||
|
|
||||||
|
|
||||||
|
def purge():
|
||||||
|
"""Remove the entire stage path."""
|
||||||
|
if os.path.isdir(spack.stage_path):
|
||||||
|
shutil.rmtree(spack.stage_path, True)
|
||||||
|
|
||||||
|
|
||||||
class Stage(object):
|
class Stage(object):
|
||||||
def __init__(self, stage_name, url):
|
def __init__(self, stage_name, url):
|
||||||
self.stage_name = stage_name
|
self.stage_name = stage_name
|
||||||
|
@ -42,8 +49,8 @@ def archive_file(self):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def expanded_archive_path(self):
|
def expanded_archive_path(self):
|
||||||
""""Returns the path to the expanded archive directory if it's expanded;
|
"""Returns the path to the expanded archive directory if it's expanded;
|
||||||
None if the archive hasn't been expanded.
|
None if the archive hasn't been expanded.
|
||||||
"""
|
"""
|
||||||
for file in os.listdir(self.path):
|
for file in os.listdir(self.path):
|
||||||
archive_path = spack.new_path(self.path, file)
|
archive_path = spack.new_path(self.path, file)
|
||||||
|
@ -72,14 +79,20 @@ def fetch(self):
|
||||||
tty.msg("Fetching %s" % self.url)
|
tty.msg("Fetching %s" % self.url)
|
||||||
|
|
||||||
# Run curl but grab the mime type from the http headers
|
# Run curl but grab the mime type from the http headers
|
||||||
headers = spack.curl('-#', '-O', '-D', '-', self.url, return_output=True)
|
headers = spack.curl('-#', # status bar
|
||||||
|
'-O', # save file to disk
|
||||||
|
'-D', '-', # print out HTML headers
|
||||||
|
'-L', self.url, return_output=True)
|
||||||
|
|
||||||
# output this if we somehow got an HTML file rather than the archive we
|
# Check if we somehow got an HTML file rather than the archive we
|
||||||
# asked for.
|
# asked for. We only look at the last content type, to handle
|
||||||
if re.search(r'Content-Type: text/html', headers):
|
# redirects properly.
|
||||||
tty.warn("The contents of %s look like HTML. The checksum will "+
|
content_types = re.findall(r'Content-Type:[^\r\n]+', headers)
|
||||||
"likely fail. Use 'spack clean %s' to delete this file. "
|
if content_types and 'text/html' in content_types[-1]:
|
||||||
"The fix the gateway issue and install again." % (self.archive_file, self.name))
|
tty.warn("The contents of " + self.archive_file + " look like HTML.",
|
||||||
|
"The checksum will likely be bad. If it is, you can use",
|
||||||
|
"'spack clean --all' to remove the bad archive, then fix",
|
||||||
|
"your internet gateway issue and install again.")
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadException(url)
|
raise FailedDownloadException(url)
|
||||||
|
|
Loading…
Reference in a new issue