Merge pull request #8 in SCALE/spack from features/boost to develop
# By Todd Gamblin # Via Todd Gamblin * commit 'a0c91791c5ac995b80111a92c94d8fb760caa6e3': SPACK-25: Add package for boost. Works on Linux currently. Better temp directory selection. Status messages now show install and stage directories. Better version wildcard handling, better spidering
This commit is contained in:
commit
c59e23c1f9
9 changed files with 108 additions and 28 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,6 +1,6 @@
|
|||
/var/spack/stage
|
||||
*.pyc
|
||||
/opt/
|
||||
/var/
|
||||
*~
|
||||
.DS_Store
|
||||
.idea
|
||||
|
|
|
@ -41,5 +41,7 @@ def __init__(self, message):
|
|||
class NoNetworkConnectionError(SpackError):
|
||||
"""Raised when an operation needs an internet connection."""
|
||||
def __init__(self, message, url):
|
||||
super(NoNetworkConnectionError, self).__init__(message)
|
||||
super(NoNetworkConnectionError, self).__init__(
|
||||
"No network connection: " + str(message),
|
||||
"URL was: " + str(url))
|
||||
self.url = url
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
|
@ -92,9 +93,11 @@
|
|||
# Use a %u to add a username to the stage paths here, in case this
|
||||
# is a shared filesystem. Spack will use the first of these paths
|
||||
# that it can create.
|
||||
tmp_dirs = ['/nfs/tmp2/%u/spack-stage',
|
||||
'/var/tmp/%u/spack-stage',
|
||||
'/tmp/%u/spack-stage']
|
||||
tmp_dirs = []
|
||||
_default_tmp = tempfile.gettempdir()
|
||||
if _default_tmp != os.getcwd():
|
||||
tmp_dirs.append(os.path.join(_default_tmp, 'spack-stage'))
|
||||
tmp_dirs.append('/nfs/tmp2/%u/spack-stage')
|
||||
|
||||
# Whether spack should allow installation of unsafe versions of
|
||||
# software. "Unsafe" versions are ones it doesn't have a checksum
|
||||
|
|
|
@ -568,8 +568,9 @@ def do_stage(self):
|
|||
if not archive_dir:
|
||||
tty.msg("Staging archive: %s" % self.stage.archive_file)
|
||||
self.stage.expand_archive()
|
||||
tty.msg("Created stage directory in %s." % self.stage.path)
|
||||
else:
|
||||
tty.msg("Already staged %s" % self.name)
|
||||
tty.msg("Already staged %s in %s." % (self.name, self.stage.path))
|
||||
self.stage.chdir_to_archive()
|
||||
|
||||
|
||||
|
@ -631,7 +632,7 @@ def do_install(self, **kwargs):
|
|||
raise ValueError("Can only install concrete packages.")
|
||||
|
||||
if os.path.exists(self.prefix):
|
||||
tty.msg("%s is already installed." % self.name)
|
||||
tty.msg("%s is already installed in %s." % (self.name, self.prefix))
|
||||
return
|
||||
|
||||
if not ignore_deps:
|
||||
|
|
|
@ -206,7 +206,7 @@ def wildcard_version(path):
|
|||
ver, start, end = parse_version_string_with_indices(path)
|
||||
|
||||
v = Version(ver)
|
||||
parts = list(re.escape(p) for p in path.split(str(v)))
|
||||
parts = [re.escape(p) for p in re.split(v.wildcard(), path)]
|
||||
|
||||
# Make a group for the wildcard, so it will be captured by the regex.
|
||||
version_group = '(%s)' % v.wildcard()
|
||||
|
|
|
@ -23,11 +23,12 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
import urllib2
|
||||
import urlparse
|
||||
from multiprocessing import Pool
|
||||
from HTMLParser import HTMLParser
|
||||
from HTMLParser import HTMLParser, HTMLParseError
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
@ -67,7 +68,7 @@ def _spider(args):
|
|||
pool. Firing off all the child links at once makes the fetch MUCH
|
||||
faster for pages with lots of children.
|
||||
"""
|
||||
url, depth, max_depth = args
|
||||
url, depth, max_depth, raise_on_error = args
|
||||
|
||||
pages = {}
|
||||
try:
|
||||
|
@ -81,11 +82,12 @@ def _spider(args):
|
|||
resp = urllib2.urlopen(req, timeout=TIMEOUT)
|
||||
|
||||
if not "Content-type" in resp.headers:
|
||||
print "ignoring page " + url
|
||||
tty.warn("ignoring page " + url)
|
||||
return pages
|
||||
|
||||
if not resp.headers["Content-type"].startswith('text/html'):
|
||||
print "ignoring page " + url + " with content type " + resp.headers["Content-type"]
|
||||
tty.warn("ignoring page " + url + " with content type " +
|
||||
resp.headers["Content-type"])
|
||||
return pages
|
||||
|
||||
# Do the real GET request when we know it's just HTML.
|
||||
|
@ -100,9 +102,9 @@ def _spider(args):
|
|||
# If we're not at max depth, parse out the links in the page
|
||||
if depth < max_depth:
|
||||
link_parser = LinkParser()
|
||||
|
||||
subcalls = []
|
||||
link_parser.feed(page)
|
||||
|
||||
while link_parser.links:
|
||||
raw_link = link_parser.links.pop()
|
||||
|
||||
|
@ -112,7 +114,7 @@ def _spider(args):
|
|||
|
||||
# Evaluate the link relative to the page it came from.
|
||||
abs_link = urlparse.urljoin(response_url, raw_link)
|
||||
subcalls.append((abs_link, depth+1, max_depth))
|
||||
subcalls.append((abs_link, depth+1, max_depth, raise_on_error))
|
||||
|
||||
if subcalls:
|
||||
pool = Pool(processes=len(subcalls))
|
||||
|
@ -121,13 +123,21 @@ def _spider(args):
|
|||
pages.update(d)
|
||||
|
||||
except urllib2.URLError, e:
|
||||
# Only report it if it's the root page. We ignore errors when spidering.
|
||||
if depth == 1:
|
||||
raise spack.error.NoNetworkConnectionError(e.reason, url)
|
||||
if raise_on_error:
|
||||
raise spack.error.NoNetworkConnectionError(str(e), url)
|
||||
|
||||
except HTMLParseError, e:
|
||||
# This error indicates that Python's HTML parser sucks.
|
||||
msg = "Got an error parsing HTML."
|
||||
|
||||
# Pre-2.7.3 Pythons in particular have rather prickly HTML parsing.
|
||||
if sys.version_info[:3] < (2,7,3):
|
||||
msg += " Use Python 2.7.3 or newer for better HTML parsing."
|
||||
|
||||
tty.warn(msg, url, "HTMLParseError: " + str(e))
|
||||
|
||||
except Exception, e:
|
||||
# Other types of errors are completely ignored.
|
||||
pass
|
||||
pass # Other types of errors are completely ignored.
|
||||
|
||||
return pages
|
||||
|
||||
|
@ -141,5 +151,5 @@ def get_pages(root_url, **kwargs):
|
|||
performance over a sequential fetch.
|
||||
"""
|
||||
max_depth = kwargs.setdefault('depth', 1)
|
||||
pages = _spider((root_url, 1, max_depth))
|
||||
pages = _spider((root_url, 1, max_depth, False))
|
||||
return pages
|
||||
|
|
|
@ -152,21 +152,24 @@ def a_or_n(seg):
|
|||
return r'[a-zA-Z]+'
|
||||
|
||||
version = self.version
|
||||
separators = ('',) + self.separators
|
||||
|
||||
# Use a wildcard for separators, in case a version is written
|
||||
# two different ways (e.g., boost writes 1_55_0 and 1.55.0)
|
||||
sep_re = '[_.-]'
|
||||
separators = ('',) + (sep_re,) * len(self.separators)
|
||||
|
||||
version += (version[-1],) * 2
|
||||
separators += (separators[-1],) * 2
|
||||
separators += (sep_re,) * 2
|
||||
|
||||
sep_res = [re.escape(sep) for sep in separators]
|
||||
seg_res = [a_or_n(seg) for seg in version]
|
||||
segments = [a_or_n(seg) for seg in version]
|
||||
|
||||
wc = seg_res[0]
|
||||
for i in xrange(1, len(sep_res)):
|
||||
wc += '(?:' + sep_res[i] + seg_res[i]
|
||||
wc = segments[0]
|
||||
for i in xrange(1, len(separators)):
|
||||
wc += '(?:' + separators[i] + segments[i]
|
||||
|
||||
# Add possible alpha or beta indicator at the end of each segemnt
|
||||
# We treat these specially b/c they're so common.
|
||||
wc += '[ab]?)?' * (len(seg_res) - 1)
|
||||
wc += '[ab]?)?' * (len(segments) - 1)
|
||||
return wc
|
||||
|
||||
|
||||
|
|
60
var/spack/packages/boost/package.py
Normal file
60
var/spack/packages/boost/package.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
from spack import *
|
||||
|
||||
class Boost(Package):
|
||||
"""Boost provides free peer-reviewed portable C++ source
|
||||
libraries, emphasizing libraries that work well with the C++
|
||||
Standard Library.
|
||||
|
||||
Boost libraries are intended to be widely useful, and usable
|
||||
across a broad spectrum of applications. The Boost license
|
||||
encourages both commercial and non-commercial use.
|
||||
"""
|
||||
homepage = "http://www.boost.org"
|
||||
url = "http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2"
|
||||
list_url = "http://sourceforge.net/projects/boost/files/boost/"
|
||||
list_depth = 2
|
||||
|
||||
versions = {
|
||||
'1.55.0' : 'd6eef4b4cacb2183f2bf265a5a03a354',
|
||||
'1.54.0' : '15cb8c0803064faef0c4ddf5bc5ca279',
|
||||
'1.53.0' : 'a00d22605d5dbcfb4c9936a9b35bc4c2',
|
||||
'1.52.0' : '3a855e0f919107e0ca4de4d84ad3f750',
|
||||
'1.51.0' : '4b6bd483b692fd138aef84ed2c8eb679',
|
||||
'1.50.0' : '52dd00be775e689f55a987baebccc462',
|
||||
'1.49.0' : '0d202cb811f934282dea64856a175698',
|
||||
'1.48.0' : 'd1e9a7a7f532bb031a3c175d86688d95',
|
||||
'1.47.0' : 'a2dc343f7bc7f83f8941e47ed4a18200',
|
||||
'1.46.1' : '7375679575f4c8db605d426fc721d506',
|
||||
'1.46.0' : '37b12f1702319b73876b0097982087e0',
|
||||
'1.45.0' : 'd405c606354789d0426bc07bea617e58',
|
||||
'1.44.0' : 'f02578f5218f217a9f20e9c30e119c6a',
|
||||
'1.43.0' : 'dd49767bfb726b0c774f7db0cef91ed1',
|
||||
'1.42.0' : '7bf3b4eb841b62ffb0ade2b82218ebe6',
|
||||
'1.41.0' : '8bb65e133907db727a2a825c5400d0a6',
|
||||
'1.40.0' : 'ec3875caeac8c52c7c129802a8483bd7',
|
||||
'1.39.0' : 'a17281fd88c48e0d866e1a12deecbcc0',
|
||||
'1.38.0' : '5eca2116d39d61382b8f8235915cb267',
|
||||
'1.37.0' : '8d9f990bfb7e83769fa5f1d6f065bc92',
|
||||
'1.36.0' : '328bfec66c312150e4c2a78dcecb504b',
|
||||
'1.35.0' : 'dce952a7214e72d6597516bcac84048b',
|
||||
'1.34.1' : '2d938467e8a448a2c9763e0a9f8ca7e5',
|
||||
'1.34.0' : 'ed5b9291ffad776f8757a916e1726ad0'
|
||||
}
|
||||
|
||||
def url_for_version(self, version):
|
||||
"""Handle Boost's weird URLs, which write the version two different ways."""
|
||||
parts = [str(p) for p in Version(version)]
|
||||
dots = ".".join(parts)
|
||||
underscores = "_".join(parts)
|
||||
return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (
|
||||
dots, underscores)
|
||||
|
||||
|
||||
def install(self, spec, prefix):
|
||||
bootstrap = Executable('./bootstrap.sh')
|
||||
bootstrap()
|
||||
|
||||
b2 = Executable('./b2')
|
||||
b2('install',
|
||||
'-j %s' % make_jobs,
|
||||
'--prefix=%s' % prefix)
|
|
@ -34,6 +34,7 @@ class Dyninst(Package):
|
|||
|
||||
depends_on("libelf")
|
||||
depends_on("libdwarf")
|
||||
depends_on("boost@1.42:")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=" + prefix)
|
||||
|
|
Loading…
Reference in a new issue