make create and checksum consistent.
- create now searches and prompts for checksums. - makes package creation easier
This commit is contained in:
parent
2f1eae8c0d
commit
354c8a281b
6 changed files with 182 additions and 96 deletions
|
@ -12,9 +12,7 @@
|
||||||
from spack.colify import colify
|
from spack.colify import colify
|
||||||
from spack.version import *
|
from spack.version import *
|
||||||
|
|
||||||
default_number_to_fetch = 10
|
description ="Checksum available versions of a package to update a package file."
|
||||||
|
|
||||||
description ="Checksum available versions of a package, print out checksums for addition to a package file."
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
|
@ -23,6 +21,32 @@ def setup_parser(subparser):
|
||||||
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
|
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
|
||||||
|
|
||||||
|
|
||||||
|
def get_checksums(versions, urls, **kwargs):
|
||||||
|
# Allow commands like create() to do some analysis on the first
|
||||||
|
# archive after it is downloaded.
|
||||||
|
first_stage_function = kwargs.get('first_stage_function', None)
|
||||||
|
|
||||||
|
tty.msg("Downloading...")
|
||||||
|
hashes = []
|
||||||
|
for i, (url, version) in enumerate(zip(urls, versions)):
|
||||||
|
stage = Stage(url)
|
||||||
|
try:
|
||||||
|
stage.fetch()
|
||||||
|
if i == 0 and first_stage_function:
|
||||||
|
first_stage_function(stage)
|
||||||
|
|
||||||
|
hashes.append(
|
||||||
|
spack.util.crypto.checksum(hashlib.md5, stage.archive_file))
|
||||||
|
except FailedDownloadError, e:
|
||||||
|
tty.msg("Failed to fetch %s" % url)
|
||||||
|
continue
|
||||||
|
|
||||||
|
finally:
|
||||||
|
stage.destroy()
|
||||||
|
|
||||||
|
return zip(versions, hashes)
|
||||||
|
|
||||||
|
|
||||||
def checksum(parser, args):
|
def checksum(parser, args):
|
||||||
# get the package we're going to generate checksums for
|
# get the package we're going to generate checksums for
|
||||||
pkg = packages.get(args.package)
|
pkg = packages.get(args.package)
|
||||||
|
@ -42,47 +66,24 @@ def checksum(parser, args):
|
||||||
versions = list(reversed(versions))
|
versions = list(reversed(versions))
|
||||||
urls = [pkg.url_for_version(v) for v in versions]
|
urls = [pkg.url_for_version(v) for v in versions]
|
||||||
|
|
||||||
version_listings = ["%-10s%s" % (v,u) for v, u in zip(versions, urls)]
|
|
||||||
tty.msg("Found %s versions to checksum." % len(urls),
|
|
||||||
*version_listings)
|
|
||||||
|
|
||||||
|
tty.msg("Found %s versions of %s." % (len(urls), pkg.name),
|
||||||
|
*["%-10s%s" % (v,u) for v, u in zip(versions, urls)])
|
||||||
print
|
print
|
||||||
while True:
|
archives_to_fetch = tty.get_number(
|
||||||
ans = raw_input("How many would you like to checksum? (default 10, 0 to abort) ")
|
"How many would you like to checksum?", default=5, abort='q')
|
||||||
try:
|
|
||||||
if not ans:
|
|
||||||
to_download = default_number_to_fetch
|
|
||||||
else:
|
|
||||||
to_download = int(ans)
|
|
||||||
break
|
|
||||||
except ValueError:
|
|
||||||
tty.msg("Please enter a valid number.")
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not to_download:
|
if not archives_to_fetch:
|
||||||
tty.msg("Aborted.")
|
tty.msg("Aborted.")
|
||||||
return
|
return
|
||||||
else:
|
|
||||||
urls = urls[:to_download]
|
|
||||||
|
|
||||||
tty.msg("Downloading...")
|
version_hashes = get_checksums(
|
||||||
hashes = []
|
versions[:archives_to_fetch], urls[:archives_to_fetch])
|
||||||
for url, version in zip(urls, versions):
|
|
||||||
stage = Stage(url)
|
|
||||||
try:
|
|
||||||
stage.fetch()
|
|
||||||
hashes.append(spack.util.crypto.checksum(
|
|
||||||
hashlib.md5, stage.archive_file))
|
|
||||||
except FailedDownloadError, e:
|
|
||||||
tty.msg("Failed to fetch %s" % url)
|
|
||||||
continue
|
|
||||||
|
|
||||||
finally:
|
if not version_hashes:
|
||||||
stage.destroy()
|
tty.die("Could not fetch any available versions for %s." % pkg.name)
|
||||||
|
|
||||||
|
dict_string = [" '%s' : '%s'," % (v, h) for v, h in version_hashes]
|
||||||
|
dict_string = ['{'] + dict_string + ["}"]
|
||||||
|
|
||||||
dict_string = ["{"]
|
|
||||||
for i, (v, h) in enumerate(zip(versions, hashes)):
|
|
||||||
comma = "" if i == len(hashes) - 1 else ","
|
|
||||||
dict_string.append(" '%s' : '%s'%s" % (str(v), str(h), comma))
|
|
||||||
dict_string.append("}")
|
|
||||||
tty.msg("Checksummed new versions of %s:" % pkg.name, *dict_string)
|
tty.msg("Checksummed new versions of %s:" % pkg.name, *dict_string)
|
||||||
|
|
|
@ -2,32 +2,37 @@
|
||||||
import os
|
import os
|
||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import re
|
||||||
|
from contextlib import closing
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
import spack.package
|
||||||
import spack.packages as packages
|
import spack.packages as packages
|
||||||
import spack.tty as tty
|
import spack.tty as tty
|
||||||
import spack.url
|
import spack.url
|
||||||
import spack.util.crypto as crypto
|
import spack.util.crypto as crypto
|
||||||
|
import spack.cmd.checksum
|
||||||
|
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from contextlib import closing
|
|
||||||
|
|
||||||
description = "Create a new package file from an archive URL"
|
description = "Create a new package file from an archive URL"
|
||||||
|
|
||||||
package_template = string.Template("""\
|
package_template = string.Template("""\
|
||||||
# FIXME:
|
# FIXME:
|
||||||
# This is a template package file for Spack. We've conveniently
|
# This is a template package file for Spack. We've conveniently
|
||||||
# put giant "FIXME" labels next to all the things you'll probably
|
# put "FIXME" labels next to all the things you'll want to change.
|
||||||
# want to change.
|
|
||||||
#
|
#
|
||||||
# Once you've edited all the FIXME's, delete this whole message,
|
# Once you've edited all the FIXME's, delete this whole message,
|
||||||
# save this file, and test out your package like this:
|
# save this file, and test out your package like this:
|
||||||
#
|
#
|
||||||
# spack install ${name}
|
# spack install ${name}
|
||||||
#
|
#
|
||||||
# You can always get back here with 'spack edit ${name}'. See
|
# You can always get back here to change things with:
|
||||||
# the spack documentation for more information on building
|
#
|
||||||
|
# spack edit ${name}
|
||||||
|
#
|
||||||
|
# See the spack documentation for more information on building
|
||||||
# packages.
|
# packages.
|
||||||
#
|
#
|
||||||
from spack import *
|
from spack import *
|
||||||
|
@ -52,28 +57,31 @@ def install(self, prefix):
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument('url', nargs='?', help="url of package archive")
|
subparser.add_argument('url', nargs='?', help="url of package archive")
|
||||||
subparser.add_argument('-f', '--force', action='store_true', dest='force',
|
subparser.add_argument(
|
||||||
help="Remove existing package file.")
|
'-f', '--force', action='store_true', dest='force',
|
||||||
|
help="Overwrite any existing package file with the same name.")
|
||||||
|
|
||||||
|
|
||||||
def guess_configure(archive_file):
|
class ConfigureGuesser(object):
|
||||||
"""Try to guess the type of build system used by the project, and return
|
def __call__(self, stage):
|
||||||
an appropriate configure line.
|
"""Try to guess the type of build system used by the project, and return
|
||||||
"""
|
an appropriate configure line.
|
||||||
tar = which('tar')
|
"""
|
||||||
output = tar("--exclude=*/*/*", "-tf", archive_file, return_output=True)
|
tar = which('tar')
|
||||||
|
output = tar(
|
||||||
|
"--exclude=*/*/*", "-tf", stage.archive_file, return_output=True)
|
||||||
|
|
||||||
autotools = 'configure("--prefix=%s" % prefix)'
|
autotools = 'configure("--prefix=%s" % prefix)'
|
||||||
cmake = 'cmake(".", *std_cmake_args)'
|
cmake = 'cmake(".", *std_cmake_args)'
|
||||||
lines = output.split('\n')
|
lines = output.split('\n')
|
||||||
|
|
||||||
if any(re.search(r'/configure$', l) for l in lines):
|
if any(re.search(r'/configure$', l) for l in lines):
|
||||||
return autotools
|
self.configure = autotools
|
||||||
elif any(re.search(r'/CMakeLists.txt$', l) for l in lines):
|
elif any(re.search(r'/CMakeLists.txt$', l) for l in lines):
|
||||||
return cmake
|
self.configure = cmake
|
||||||
else:
|
else:
|
||||||
# Both, with cmake commented out
|
# Both, with cmake commented out
|
||||||
return '%s\n # %s' % (autotools, cmake)
|
self.configure = '%s\n # %s' % (autotools, cmake)
|
||||||
|
|
||||||
|
|
||||||
def create(parser, args):
|
def create(parser, args):
|
||||||
|
@ -82,43 +90,67 @@ def create(parser, args):
|
||||||
# Try to deduce name and version of the new package from the URL
|
# Try to deduce name and version of the new package from the URL
|
||||||
name, version = spack.url.parse_name_and_version(url)
|
name, version = spack.url.parse_name_and_version(url)
|
||||||
if not name:
|
if not name:
|
||||||
print "Couldn't guess a name for this package."
|
tty.msg("Couldn't guess a name for this package.")
|
||||||
while not name:
|
while not name:
|
||||||
new_name = raw_input("Name: ")
|
new_name = raw_input("Name: ")
|
||||||
if packages.valid_name(name):
|
if packages.valid_name(name):
|
||||||
name = new_name
|
name = new_name
|
||||||
else:
|
else:
|
||||||
print "Package names must contain letters, numbers, and '_' or '-'"
|
print "Package name can only contain A-Z, a-z, 0-9, '_' and '-'"
|
||||||
|
|
||||||
if not version:
|
if not version:
|
||||||
tty.die("Couldn't guess a version string from %s." % url)
|
tty.die("Couldn't guess a version string from %s." % url)
|
||||||
|
|
||||||
path = packages.filename_for_package_name(name)
|
tty.msg("Creating template for package %s" % name)
|
||||||
if not args.force and os.path.exists(path):
|
|
||||||
tty.die("%s already exists." % path)
|
|
||||||
|
|
||||||
# make a stage and fetch the archive.
|
pkg_path = packages.filename_for_package_name(name)
|
||||||
try:
|
if os.path.exists(pkg_path) and not args.force:
|
||||||
stage = Stage(url)
|
tty.die("%s already exists." % pkg_path)
|
||||||
archive_file = stage.fetch()
|
|
||||||
except spack.FailedDownloadException, e:
|
|
||||||
tty.die(e.message)
|
|
||||||
|
|
||||||
md5 = crypto.checksum(hashlib.md5, archive_file)
|
|
||||||
versions = '{ "%s" : "%s" }' % (version, md5)
|
|
||||||
class_name = packages.class_name_for_package_name(name)
|
class_name = packages.class_name_for_package_name(name)
|
||||||
configure = guess_configure(archive_file)
|
versions = list(reversed(spack.package.find_versions_of_archive(url)))
|
||||||
|
|
||||||
|
archives_to_fetch = 1
|
||||||
|
if not versions:
|
||||||
|
# If the fetch failed for some reason, revert to what the user provided
|
||||||
|
versions = [version]
|
||||||
|
urls = [url]
|
||||||
|
else:
|
||||||
|
urls = [spack.url.substitute_version(url, v) for v in versions]
|
||||||
|
if len(urls) > 1:
|
||||||
|
tty.msg("Found %s versions of %s to checksum." % (len(urls), name),
|
||||||
|
*["%-10s%s" % (v,u) for v, u in zip(versions, urls)])
|
||||||
|
print
|
||||||
|
archives_to_fetch = tty.get_number(
|
||||||
|
"Include how many checksums in the package file?",
|
||||||
|
default=5, abort='q')
|
||||||
|
|
||||||
|
if not archives_to_fetch:
|
||||||
|
tty.msg("Aborted.")
|
||||||
|
return
|
||||||
|
|
||||||
|
guesser = ConfigureGuesser()
|
||||||
|
version_hashes = spack.cmd.checksum.get_checksums(
|
||||||
|
versions[:archives_to_fetch], urls[:archives_to_fetch],
|
||||||
|
first_stage_function=guesser)
|
||||||
|
|
||||||
|
if not version_hashes:
|
||||||
|
tty.die("Could not fetch any tarballs for %s." % name)
|
||||||
|
|
||||||
|
sep = '\n '
|
||||||
|
versions_string = '{ ' + sep.join(
|
||||||
|
"'%s' : '%s'," % (v, h) for v, h in version_hashes) + ' }'
|
||||||
|
|
||||||
# Write out a template for the file
|
# Write out a template for the file
|
||||||
tty.msg("Editing %s." % path)
|
with closing(open(pkg_path, "w")) as pkg_file:
|
||||||
with closing(open(path, "w")) as pkg_file:
|
|
||||||
pkg_file.write(
|
pkg_file.write(
|
||||||
package_template.substitute(
|
package_template.substitute(
|
||||||
name=name,
|
name=name,
|
||||||
configure=configure,
|
configure=guesser.configure,
|
||||||
class_name=class_name,
|
class_name=class_name,
|
||||||
url=url,
|
url=url,
|
||||||
versions=versions))
|
versions=versions_string))
|
||||||
|
|
||||||
# If everything checks out, go ahead and edit.
|
# If everything checks out, go ahead and edit.
|
||||||
spack.editor(path)
|
spack.editor(pkg_path)
|
||||||
|
tty.msg("Created package %s." % pkg_path)
|
||||||
|
|
|
@ -338,7 +338,7 @@ def __init__(self, spec):
|
||||||
|
|
||||||
# Set a default list URL (place to find available versions)
|
# Set a default list URL (place to find available versions)
|
||||||
if not hasattr(self, 'list_url'):
|
if not hasattr(self, 'list_url'):
|
||||||
self.list_url = os.path.dirname(self.url)
|
self.list_url = None
|
||||||
|
|
||||||
if not hasattr(self, 'list_depth'):
|
if not hasattr(self, 'list_depth'):
|
||||||
self.list_depth = 1
|
self.list_depth = 1
|
||||||
|
@ -733,21 +733,12 @@ def do_clean_dist(self):
|
||||||
def fetch_available_versions(self):
|
def fetch_available_versions(self):
|
||||||
# If not, then try to fetch using list_url
|
# If not, then try to fetch using list_url
|
||||||
if not self._available_versions:
|
if not self._available_versions:
|
||||||
self._available_versions = VersionList()
|
|
||||||
url_regex = os.path.basename(url.wildcard_version(self.url))
|
|
||||||
wildcard = self.default_version.wildcard()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page_map = get_pages(self.list_url, depth=self.list_depth)
|
self._available_versions = find_versions_of_archive(
|
||||||
|
self.url,
|
||||||
for site, page in page_map.iteritems():
|
list_url=self.list_url,
|
||||||
strings = re.findall(url_regex, page)
|
list_depth=self.list_depth,
|
||||||
|
wildcard=self.default_version.wildcard())
|
||||||
for s in strings:
|
|
||||||
match = re.search(wildcard, s)
|
|
||||||
if match:
|
|
||||||
v = match.group(0)
|
|
||||||
self._available_versions.add(Version(v))
|
|
||||||
|
|
||||||
if not self._available_versions:
|
if not self._available_versions:
|
||||||
tty.warn("Found no versions for %s" % self.name,
|
tty.warn("Found no versions for %s" % self.name,
|
||||||
|
@ -774,6 +765,33 @@ def available_versions(self):
|
||||||
return vlist
|
return vlist
|
||||||
|
|
||||||
|
|
||||||
|
def find_versions_of_archive(archive_url, **kwargs):
|
||||||
|
list_url = kwargs.get('list_url', None)
|
||||||
|
list_depth = kwargs.get('list_depth', 1)
|
||||||
|
wildcard = kwargs.get('wildcard', None)
|
||||||
|
|
||||||
|
if not list_url:
|
||||||
|
list_url = os.path.dirname(archive_url)
|
||||||
|
if not wildcard:
|
||||||
|
wildcard = url.parse_version(archive_url).wildcard()
|
||||||
|
|
||||||
|
versions = VersionList()
|
||||||
|
url_regex = os.path.basename(url.wildcard_version(archive_url))
|
||||||
|
|
||||||
|
page_map = get_pages(list_url, depth=list_depth)
|
||||||
|
|
||||||
|
for site, page in page_map.iteritems():
|
||||||
|
strings = re.findall(url_regex, page)
|
||||||
|
|
||||||
|
for s in strings:
|
||||||
|
match = re.search(wildcard, s)
|
||||||
|
if match:
|
||||||
|
v = match.group(0)
|
||||||
|
versions.add(Version(v))
|
||||||
|
|
||||||
|
return versions
|
||||||
|
|
||||||
|
|
||||||
class MakeExecutable(Executable):
|
class MakeExecutable(Executable):
|
||||||
"""Special Executable for make so the user can specify parallel or
|
"""Special Executable for make so the user can specify parallel or
|
||||||
not on a per-invocation basis. Using 'parallel' as a kwarg will
|
not on a per-invocation basis. Using 'parallel' as a kwarg will
|
||||||
|
|
|
@ -267,6 +267,11 @@ def destroy(self):
|
||||||
"""Remove this stage directory."""
|
"""Remove this stage directory."""
|
||||||
remove_linked_tree(self.path)
|
remove_linked_tree(self.path)
|
||||||
|
|
||||||
|
# Make sure we don't end up in a removed directory
|
||||||
|
try:
|
||||||
|
os.getcwd()
|
||||||
|
except OSError:
|
||||||
|
os.chdir(os.path.dirname(self.path))
|
||||||
|
|
||||||
|
|
||||||
def ensure_access(file=spack.stage_path):
|
def ensure_access(file=spack.stage_path):
|
||||||
|
|
|
@ -51,3 +51,33 @@ def pkg(message):
|
||||||
else:
|
else:
|
||||||
cwrite('@*g{[+]} ')
|
cwrite('@*g{[+]} ')
|
||||||
print message
|
print message
|
||||||
|
|
||||||
|
|
||||||
|
def get_number(prompt, **kwargs):
|
||||||
|
default = kwargs.get('default', None)
|
||||||
|
abort = kwargs.get('abort', None)
|
||||||
|
|
||||||
|
if default is not None and abort is not None:
|
||||||
|
prompt += ' (default is %s, %s to abort) ' % (default, abort)
|
||||||
|
elif default is not None:
|
||||||
|
prompt += ' (default is %s) ' % default
|
||||||
|
elif abort is not None:
|
||||||
|
prompt += ' (%s to abort) ' % abort
|
||||||
|
|
||||||
|
number = None
|
||||||
|
while number is None:
|
||||||
|
ans = raw_input(prompt)
|
||||||
|
if ans == str(abort):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if ans:
|
||||||
|
try:
|
||||||
|
number = int(ans)
|
||||||
|
if number < 1:
|
||||||
|
msg("Please enter a valid number.")
|
||||||
|
number = None
|
||||||
|
except ValueError:
|
||||||
|
msg("Please enter a valid number.")
|
||||||
|
elif default is not None:
|
||||||
|
number = default
|
||||||
|
return number
|
||||||
|
|
|
@ -168,7 +168,7 @@ def substitute_version(path, new_version):
|
||||||
the new version for it.
|
the new version for it.
|
||||||
"""
|
"""
|
||||||
ver, start, end = parse_version_string_with_indices(path)
|
ver, start, end = parse_version_string_with_indices(path)
|
||||||
return path[:start] + new_version + path[end:]
|
return path[:start] + str(new_version) + path[end:]
|
||||||
|
|
||||||
|
|
||||||
def wildcard_version(path):
|
def wildcard_version(path):
|
||||||
|
|
Loading…
Reference in a new issue