Merge branch 'develop' of https://github.com/RemoteConnectionManager/spack into paraview_fix
This commit is contained in:
commit
1b6909dd9b
36 changed files with 423 additions and 504 deletions
|
@ -25,7 +25,7 @@
|
|||
__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
|
||||
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
|
||||
'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
|
||||
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink']
|
||||
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'remove_dead_links', 'remove_linked_tree']
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
@ -240,7 +240,7 @@ def touchp(path):
|
|||
def force_symlink(src, dest):
|
||||
try:
|
||||
os.symlink(src, dest)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
os.remove(dest)
|
||||
os.symlink(src, dest)
|
||||
|
||||
|
@ -344,3 +344,34 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
|||
|
||||
if order == 'post':
|
||||
yield (source_path, dest_path)
|
||||
|
||||
def remove_dead_links(root):
|
||||
"""
|
||||
Removes any dead link that is present in root
|
||||
|
||||
Args:
|
||||
root: path where to search for dead links
|
||||
|
||||
"""
|
||||
for file in os.listdir(root):
|
||||
path = join_path(root, file)
|
||||
if os.path.islink(path):
|
||||
real_path = os.path.realpath(path)
|
||||
if not os.path.exists(real_path):
|
||||
os.unlink(path)
|
||||
|
||||
def remove_linked_tree(path):
|
||||
"""
|
||||
Removes a directory and its contents. If the directory is a symlink, follows the link and removes the real
|
||||
directory before removing the link.
|
||||
|
||||
Args:
|
||||
path: directory to be removed
|
||||
|
||||
"""
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), True)
|
||||
os.unlink(path)
|
||||
else:
|
||||
shutil.rmtree(path, True)
|
||||
|
|
|
@ -43,4 +43,4 @@ def clean(parser, args):
|
|||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
package = spack.repo.get(spec)
|
||||
package.do_clean()
|
||||
package.stage.destroy()
|
||||
|
|
|
@ -110,7 +110,6 @@ def suggest_archive_basename(resource):
|
|||
return basename
|
||||
|
||||
|
||||
|
||||
def create(path, specs, **kwargs):
|
||||
"""Create a directory to be used as a spack mirror, and fill it with
|
||||
package archives.
|
||||
|
@ -158,17 +157,29 @@ def create(path, specs, **kwargs):
|
|||
"Cannot create directory '%s':" % mirror_root, str(e))
|
||||
|
||||
# Things to keep track of while parsing specs.
|
||||
present = []
|
||||
mirrored = []
|
||||
error = []
|
||||
categories = {
|
||||
'present': [],
|
||||
'mirrored': [],
|
||||
'error': []
|
||||
}
|
||||
|
||||
# Iterate through packages and download all the safe tarballs for each of them
|
||||
everything_already_exists = True
|
||||
for spec in version_specs:
|
||||
pkg = spec.package
|
||||
tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@")))
|
||||
try:
|
||||
for ii, stage in enumerate(pkg.stage):
|
||||
add_single_spec(spec, mirror_root, categories, **kwargs)
|
||||
|
||||
return categories['present'], categories['mirrored'], categories['error']
|
||||
|
||||
|
||||
def add_single_spec(spec, mirror_root, categories, **kwargs):
|
||||
tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@")))
|
||||
spec_exists_in_mirror = True
|
||||
try:
|
||||
with spec.package.stage:
|
||||
# fetcher = stage.fetcher
|
||||
# fetcher.fetch()
|
||||
# ...
|
||||
# fetcher.archive(archive_path)
|
||||
for ii, stage in enumerate(spec.package.stage):
|
||||
fetcher = stage.fetcher
|
||||
if ii == 0:
|
||||
# create a subdirectory for the current package@version
|
||||
|
@ -184,7 +195,7 @@ def create(path, specs, **kwargs):
|
|||
if os.path.exists(archive_path):
|
||||
tty.msg("{name} : already added".format(name=name))
|
||||
else:
|
||||
everything_already_exists = False
|
||||
spec_exists_in_mirror = False
|
||||
fetcher.fetch()
|
||||
if not kwargs.get('no_checksum', False):
|
||||
fetcher.check()
|
||||
|
@ -195,20 +206,16 @@ def create(path, specs, **kwargs):
|
|||
fetcher.archive(archive_path)
|
||||
tty.msg("{name} : added".format(name=name))
|
||||
|
||||
if everything_already_exists:
|
||||
present.append(spec)
|
||||
else:
|
||||
mirrored.append(spec)
|
||||
except Exception, e:
|
||||
if spack.debug:
|
||||
sys.excepthook(*sys.exc_info())
|
||||
else:
|
||||
tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message)
|
||||
error.append(spec)
|
||||
finally:
|
||||
pkg.stage.destroy()
|
||||
|
||||
return (present, mirrored, error)
|
||||
if spec_exists_in_mirror:
|
||||
categories['present'].append(spec)
|
||||
else:
|
||||
categories['mirrored'].append(spec)
|
||||
except Exception as e:
|
||||
if spack.debug:
|
||||
sys.excepthook(*sys.exc_info())
|
||||
else:
|
||||
tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message)
|
||||
categories['error'].append(spec)
|
||||
|
||||
|
||||
class MirrorError(spack.error.SpackError):
|
||||
|
|
|
@ -293,7 +293,6 @@ class SomePackage(Package):
|
|||
|
||||
.. code-block:: python
|
||||
|
||||
p.do_clean() # removes the stage directory entirely
|
||||
p.do_restage() # removes the build directory and
|
||||
# re-expands the archive.
|
||||
|
||||
|
@ -503,7 +502,6 @@ def fetcher(self):
|
|||
self._fetcher = self._make_fetcher()
|
||||
return self._fetcher
|
||||
|
||||
|
||||
@fetcher.setter
|
||||
def fetcher(self, f):
|
||||
self._fetcher = f
|
||||
|
@ -735,7 +733,7 @@ def do_patch(self):
|
|||
# If we encounter an archive that failed to patch, restage it
|
||||
# so that we can apply all the patches again.
|
||||
if os.path.isfile(bad_file):
|
||||
tty.msg("Patching failed last time. Restaging.")
|
||||
tty.msg("Patching failed last time. Restaging.")
|
||||
self.stage.restage()
|
||||
|
||||
self.stage.chdir_to_source()
|
||||
|
@ -850,102 +848,103 @@ def do_install(self,
|
|||
make_jobs=make_jobs)
|
||||
|
||||
start_time = time.time()
|
||||
if not fake:
|
||||
if not skip_patch:
|
||||
self.do_patch()
|
||||
else:
|
||||
self.do_stage()
|
||||
|
||||
# create the install directory. The install layout
|
||||
# handles this in case so that it can use whatever
|
||||
# package naming scheme it likes.
|
||||
spack.install_layout.create_install_directory(self.spec)
|
||||
|
||||
def cleanup():
|
||||
if not keep_prefix:
|
||||
# If anything goes wrong, remove the install prefix
|
||||
self.remove_prefix()
|
||||
else:
|
||||
tty.warn("Keeping install prefix in place despite error.",
|
||||
"Spack will think this package is installed." +
|
||||
"Manually remove this directory to fix:",
|
||||
self.prefix, wrap=True)
|
||||
|
||||
|
||||
def real_work():
|
||||
try:
|
||||
tty.msg("Building %s." % self.name)
|
||||
|
||||
# Run the pre-install hook in the child process after
|
||||
# the directory is created.
|
||||
spack.hooks.pre_install(self)
|
||||
|
||||
# Set up process's build environment before running install.
|
||||
if fake:
|
||||
self.do_fake_install()
|
||||
with self.stage:
|
||||
if not fake:
|
||||
if not skip_patch:
|
||||
self.do_patch()
|
||||
else:
|
||||
# Do the real install in the source directory.
|
||||
self.stage.chdir_to_source()
|
||||
self.do_stage()
|
||||
|
||||
# Save the build environment in a file before building.
|
||||
env_path = join_path(os.getcwd(), 'spack-build.env')
|
||||
# create the install directory. The install layout
|
||||
# handles this in case so that it can use whatever
|
||||
# package naming scheme it likes.
|
||||
spack.install_layout.create_install_directory(self.spec)
|
||||
|
||||
# This redirects I/O to a build log (and optionally to the terminal)
|
||||
log_path = join_path(os.getcwd(), 'spack-build.out')
|
||||
log_file = open(log_path, 'w')
|
||||
with log_output(log_file, verbose, sys.stdout.isatty(), True):
|
||||
dump_environment(env_path)
|
||||
self.install(self.spec, self.prefix)
|
||||
def cleanup():
|
||||
if not keep_prefix:
|
||||
# If anything goes wrong, remove the install prefix
|
||||
self.remove_prefix()
|
||||
else:
|
||||
tty.warn("Keeping install prefix in place despite error.",
|
||||
"Spack will think this package is installed." +
|
||||
"Manually remove this directory to fix:",
|
||||
self.prefix, wrap=True)
|
||||
|
||||
# Ensure that something was actually installed.
|
||||
self._sanity_check_install()
|
||||
|
||||
# Move build log into install directory on success
|
||||
if not fake:
|
||||
log_install_path = spack.install_layout.build_log_path(self.spec)
|
||||
env_install_path = spack.install_layout.build_env_path(self.spec)
|
||||
install(log_path, log_install_path)
|
||||
install(env_path, env_install_path)
|
||||
def real_work():
|
||||
try:
|
||||
tty.msg("Building %s." % self.name)
|
||||
|
||||
packages_dir = spack.install_layout.build_packages_path(self.spec)
|
||||
dump_packages(self.spec, packages_dir)
|
||||
# Run the pre-install hook in the child process after
|
||||
# the directory is created.
|
||||
spack.hooks.pre_install(self)
|
||||
|
||||
# On successful install, remove the stage.
|
||||
if not keep_stage:
|
||||
self.stage.destroy()
|
||||
# Set up process's build environment before running install.
|
||||
if fake:
|
||||
self.do_fake_install()
|
||||
else:
|
||||
# Do the real install in the source directory.
|
||||
self.stage.chdir_to_source()
|
||||
|
||||
# Stop timer.
|
||||
self._total_time = time.time() - start_time
|
||||
build_time = self._total_time - self._fetch_time
|
||||
# Save the build environment in a file before building.
|
||||
env_path = join_path(os.getcwd(), 'spack-build.env')
|
||||
|
||||
tty.msg("Successfully installed %s." % self.name,
|
||||
"Fetch: %s. Build: %s. Total: %s."
|
||||
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
|
||||
print_pkg(self.prefix)
|
||||
# This redirects I/O to a build log (and optionally to the terminal)
|
||||
log_path = join_path(os.getcwd(), 'spack-build.out')
|
||||
log_file = open(log_path, 'w')
|
||||
with log_output(log_file, verbose, sys.stdout.isatty(), True):
|
||||
dump_environment(env_path)
|
||||
self.install(self.spec, self.prefix)
|
||||
|
||||
except ProcessError, e:
|
||||
# Annotate with location of build log.
|
||||
e.build_log = log_path
|
||||
cleanup()
|
||||
raise e
|
||||
# Ensure that something was actually installed.
|
||||
self._sanity_check_install()
|
||||
|
||||
except:
|
||||
# other exceptions just clean up and raise.
|
||||
cleanup()
|
||||
raise
|
||||
# Move build log into install directory on success
|
||||
if not fake:
|
||||
log_install_path = spack.install_layout.build_log_path(self.spec)
|
||||
env_install_path = spack.install_layout.build_env_path(self.spec)
|
||||
install(log_path, log_install_path)
|
||||
install(env_path, env_install_path)
|
||||
|
||||
# Set parallelism before starting build.
|
||||
self.make_jobs = make_jobs
|
||||
packages_dir = spack.install_layout.build_packages_path(self.spec)
|
||||
dump_packages(self.spec, packages_dir)
|
||||
|
||||
# Do the build.
|
||||
spack.build_environment.fork(self, real_work)
|
||||
# On successful install, remove the stage.
|
||||
if not keep_stage:
|
||||
self.stage.destroy()
|
||||
|
||||
# note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.installed_db.add(self.spec, self.prefix)
|
||||
# Stop timer.
|
||||
self._total_time = time.time() - start_time
|
||||
build_time = self._total_time - self._fetch_time
|
||||
|
||||
# Once everything else is done, run post install hooks
|
||||
spack.hooks.post_install(self)
|
||||
tty.msg("Successfully installed %s." % self.name,
|
||||
"Fetch: %s. Build: %s. Total: %s."
|
||||
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
|
||||
print_pkg(self.prefix)
|
||||
|
||||
except ProcessError as e:
|
||||
# Annotate with location of build log.
|
||||
e.build_log = log_path
|
||||
cleanup()
|
||||
raise e
|
||||
|
||||
except:
|
||||
# other exceptions just clean up and raise.
|
||||
cleanup()
|
||||
raise
|
||||
|
||||
# Set parallelism before starting build.
|
||||
self.make_jobs = make_jobs
|
||||
|
||||
# Do the build.
|
||||
spack.build_environment.fork(self, real_work)
|
||||
|
||||
# note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.installed_db.add(self.spec, self.prefix)
|
||||
|
||||
# Once everything else is done, run post install hooks
|
||||
spack.hooks.post_install(self)
|
||||
|
||||
|
||||
def _sanity_check_install(self):
|
||||
|
@ -1149,13 +1148,6 @@ def do_restage(self):
|
|||
"""Reverts expanded/checked out source to a pristine state."""
|
||||
self.stage.restage()
|
||||
|
||||
|
||||
def do_clean(self):
|
||||
"""Removes the package's build stage and source tarball."""
|
||||
if os.path.exists(self.stage.path):
|
||||
self.stage.destroy()
|
||||
|
||||
|
||||
def format_doc(self, **kwargs):
|
||||
"""Wrap doc string at 72 characters and format nicely"""
|
||||
indent = kwargs.get('indent', 0)
|
||||
|
@ -1192,7 +1184,7 @@ def fetch_remote_versions(self):
|
|||
try:
|
||||
return spack.util.web.find_versions_of_archive(
|
||||
*self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
|
||||
except spack.error.NoNetworkConnectionError, e:
|
||||
except spack.error.NoNetworkConnectionError as e:
|
||||
tty.die("Package.fetch_versions couldn't connect to:",
|
||||
e.url, e.message)
|
||||
|
||||
|
|
|
@ -42,33 +42,26 @@
|
|||
|
||||
|
||||
class Stage(object):
|
||||
"""A Stage object manages a directory where some source code is
|
||||
downloaded and built before being installed. It handles
|
||||
fetching the source code, either as an archive to be expanded
|
||||
or by checking it out of a repository. A stage's lifecycle
|
||||
looks like this:
|
||||
"""
|
||||
A Stage object is a context manager that handles a directory where some source code is downloaded and built
|
||||
before being installed. It handles fetching the source code, either as an archive to be expanded or by checking
|
||||
it out of a repository. A stage's lifecycle looks like this:
|
||||
|
||||
Stage()
|
||||
Constructor creates the stage directory.
|
||||
fetch()
|
||||
Fetch a source archive into the stage.
|
||||
expand_archive()
|
||||
Expand the source archive.
|
||||
<install>
|
||||
Build and install the archive. This is handled by the Package class.
|
||||
destroy()
|
||||
Remove the stage once the package has been installed.
|
||||
```
|
||||
with Stage() as stage: # Context manager creates and destroys the stage directory
|
||||
fetch() # Fetch a source archive into the stage.
|
||||
expand_archive() # Expand the source archive.
|
||||
<install> # Build and install the archive. This is handled by the Package class.
|
||||
```
|
||||
|
||||
If spack.use_tmp_stage is True, spack will attempt to create stages
|
||||
in a tmp directory. Otherwise, stages are created directly in
|
||||
spack.stage_path.
|
||||
If spack.use_tmp_stage is True, spack will attempt to create stages in a tmp directory.
|
||||
Otherwise, stages are created directly in spack.stage_path.
|
||||
|
||||
There are two kinds of stages: named and unnamed. Named stages can
|
||||
persist between runs of spack, e.g. if you fetched a tarball but
|
||||
didn't finish building it, you won't have to fetch it again.
|
||||
There are two kinds of stages: named and unnamed. Named stages can persist between runs of spack, e.g. if you
|
||||
fetched a tarball but didn't finish building it, you won't have to fetch it again.
|
||||
|
||||
Unnamed stages are created using standard mkdtemp mechanisms or
|
||||
similar, and are intended to persist for only one run of spack.
|
||||
Unnamed stages are created using standard mkdtemp mechanisms or similar, and are intended to persist for
|
||||
only one run of spack.
|
||||
"""
|
||||
|
||||
def __init__(self, url_or_fetch_strategy, **kwargs):
|
||||
|
@ -96,21 +89,46 @@ def __init__(self, url_or_fetch_strategy, **kwargs):
|
|||
self.default_fetcher = self.fetcher # self.fetcher can change with mirrors.
|
||||
self.skip_checksum_for_mirror = True # used for mirrored archives of repositories.
|
||||
|
||||
self.name = kwargs.get('name')
|
||||
# TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name
|
||||
# TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root
|
||||
self.name = kwargs.get('name') if 'name' in kwargs else STAGE_PREFIX + next(tempfile._get_candidate_names())
|
||||
self.mirror_path = kwargs.get('mirror_path')
|
||||
self.tmp_root = find_tmp_root()
|
||||
|
||||
self.path = None
|
||||
self._setup()
|
||||
# Try to construct here a temporary name for the stage directory
|
||||
# If this is a named stage, then construct a named path.
|
||||
self.path = join_path(spack.stage_path, self.name)
|
||||
# Flag to decide whether to delete the stage folder on exit or not
|
||||
self.delete_on_exit = True
|
||||
|
||||
def _cleanup_dead_links(self):
|
||||
"""Remove any dead links in the stage directory."""
|
||||
for file in os.listdir(spack.stage_path):
|
||||
path = join_path(spack.stage_path, file)
|
||||
if os.path.islink(path):
|
||||
real_path = os.path.realpath(path)
|
||||
if not os.path.exists(path):
|
||||
os.unlink(path)
|
||||
def __enter__(self):
|
||||
"""
|
||||
Entering a stage context will create the stage directory
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.create()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""
|
||||
Exiting from a stage context will delete the stage directory unless:
|
||||
- it was explicitly requested not to do so
|
||||
- an exception has been raised
|
||||
|
||||
Args:
|
||||
exc_type: exception type
|
||||
exc_val: exception value
|
||||
exc_tb: exception traceback
|
||||
|
||||
Returns:
|
||||
Boolean
|
||||
"""
|
||||
self.delete_on_exit = False if exc_type is not None else self.delete_on_exit
|
||||
|
||||
if self.delete_on_exit:
|
||||
self.destroy()
|
||||
|
||||
def _need_to_create_path(self):
|
||||
"""Makes sure nothing weird has happened since the last time we
|
||||
|
@ -148,54 +166,6 @@ def _need_to_create_path(self):
|
|||
|
||||
return False
|
||||
|
||||
def _setup(self):
|
||||
"""Creates the stage directory.
|
||||
If spack.use_tmp_stage is False, the stage directory is created
|
||||
directly under spack.stage_path.
|
||||
|
||||
If spack.use_tmp_stage is True, this will attempt to create a
|
||||
stage in a temporary directory and link it into spack.stage_path.
|
||||
Spack will use the first writable location in spack.tmp_dirs to
|
||||
create a stage. If there is no valid location in tmp_dirs, fall
|
||||
back to making the stage inside spack.stage_path.
|
||||
"""
|
||||
# Create the top-level stage directory
|
||||
mkdirp(spack.stage_path)
|
||||
self._cleanup_dead_links()
|
||||
|
||||
# If this is a named stage, then construct a named path.
|
||||
if self.name is not None:
|
||||
self.path = join_path(spack.stage_path, self.name)
|
||||
|
||||
# If this is a temporary stage, them make the temp directory
|
||||
tmp_dir = None
|
||||
if self.tmp_root:
|
||||
if self.name is None:
|
||||
# Unnamed tmp root. Link the path in
|
||||
tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
|
||||
self.name = os.path.basename(tmp_dir)
|
||||
self.path = join_path(spack.stage_path, self.name)
|
||||
if self._need_to_create_path():
|
||||
os.symlink(tmp_dir, self.path)
|
||||
|
||||
else:
|
||||
if self._need_to_create_path():
|
||||
tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
|
||||
os.symlink(tmp_dir, self.path)
|
||||
|
||||
# if we're not using a tmp dir, create the stage directly in the
|
||||
# stage dir, rather than linking to it.
|
||||
else:
|
||||
if self.name is None:
|
||||
self.path = tempfile.mkdtemp('', STAGE_PREFIX, spack.stage_path)
|
||||
self.name = os.path.basename(self.path)
|
||||
else:
|
||||
if self._need_to_create_path():
|
||||
mkdirp(self.path)
|
||||
|
||||
# Make sure we can actually do something with the stage we made.
|
||||
ensure_access(self.path)
|
||||
|
||||
@property
|
||||
def archive_file(self):
|
||||
"""Path to the source archive within this stage directory."""
|
||||
|
@ -276,7 +246,7 @@ def fetch(self, mirror_only=False):
|
|||
self.fetcher = fetcher
|
||||
self.fetcher.fetch()
|
||||
break
|
||||
except spack.error.SpackError, e:
|
||||
except spack.error.SpackError as e:
|
||||
tty.msg("Fetching from %s failed." % fetcher)
|
||||
tty.debug(e)
|
||||
continue
|
||||
|
@ -328,8 +298,34 @@ def restage(self):
|
|||
"""
|
||||
self.fetcher.reset()
|
||||
|
||||
def create(self):
|
||||
"""
|
||||
Creates the stage directory
|
||||
|
||||
If self.tmp_root evaluates to False, the stage directory is created directly under spack.stage_path, otherwise
|
||||
this will attempt to create a stage in a temporary directory and link it into spack.stage_path.
|
||||
|
||||
Spack will use the first writable location in spack.tmp_dirs to create a stage. If there is no valid location
|
||||
in tmp_dirs, fall back to making the stage inside spack.stage_path.
|
||||
"""
|
||||
# Create the top-level stage directory
|
||||
mkdirp(spack.stage_path)
|
||||
remove_dead_links(spack.stage_path)
|
||||
# If a tmp_root exists then create a directory there and then link it in the stage area,
|
||||
# otherwise create the stage directory in self.path
|
||||
if self._need_to_create_path():
|
||||
if self.tmp_root:
|
||||
tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
|
||||
os.symlink(tmp_dir, self.path)
|
||||
else:
|
||||
mkdirp(self.path)
|
||||
# Make sure we can actually do something with the stage we made.
|
||||
ensure_access(self.path)
|
||||
|
||||
def destroy(self):
|
||||
"""Remove this stage directory."""
|
||||
"""
|
||||
Removes this stage directory
|
||||
"""
|
||||
remove_linked_tree(self.path)
|
||||
|
||||
# Make sure we don't end up in a removed directory
|
||||
|
@ -389,6 +385,15 @@ def source_path(self):
|
|||
def path(self):
|
||||
return self[0].path
|
||||
|
||||
def __enter__(self):
|
||||
for item in self:
|
||||
item.__enter__()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
for item in reversed(self):
|
||||
item.__exit__(exc_type, exc_val, exc_tb)
|
||||
|
||||
def chdir_to_source(self):
|
||||
return self[0].chdir_to_source()
|
||||
|
||||
|
@ -439,19 +444,6 @@ def ensure_access(file=spack.stage_path):
|
|||
tty.die("Insufficient permissions for %s" % file)
|
||||
|
||||
|
||||
def remove_linked_tree(path):
|
||||
"""Removes a directory and its contents. If the directory is a symlink,
|
||||
follows the link and reamoves the real directory before removing the
|
||||
link.
|
||||
"""
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), True)
|
||||
os.unlink(path)
|
||||
else:
|
||||
shutil.rmtree(path, True)
|
||||
|
||||
|
||||
def purge():
|
||||
"""Remove all build directories in the top-level stage path."""
|
||||
if os.path.isdir(spack.stage_path):
|
||||
|
|
|
@ -22,8 +22,6 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
from spack.spec import Spec, CompilerSpec
|
||||
from spack.test.mock_packages_test import *
|
||||
|
|
|
@ -22,13 +22,13 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
import shutil
|
||||
import os
|
||||
import shutil
|
||||
from tempfile import mkdtemp
|
||||
from ordereddict_backport import OrderedDict
|
||||
|
||||
import spack
|
||||
import spack.config
|
||||
from ordereddict_backport import OrderedDict
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
# Some sample compiler config data
|
||||
|
|
|
@ -23,20 +23,15 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import unittest
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
from spack.cmd.create import ConfigureGuesser
|
||||
from spack.stage import Stage
|
||||
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.directory_layout import YamlDirectoryLayout
|
||||
from spack.util.executable import which
|
||||
from spack.test.mock_packages_test import *
|
||||
from spack.test.mock_repo import MockArchive
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
class InstallTest(unittest.TestCase):
|
||||
|
@ -52,8 +47,6 @@ def setUp(self):
|
|||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir, ignore_errors=True)
|
||||
if self.stage:
|
||||
self.stage.destroy()
|
||||
os.chdir(self.orig_dir)
|
||||
|
||||
|
||||
|
@ -64,12 +57,12 @@ def check_archive(self, filename, system):
|
|||
|
||||
url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz')
|
||||
print url
|
||||
self.stage = Stage(url)
|
||||
self.stage.fetch()
|
||||
with Stage(url) as stage:
|
||||
stage.fetch()
|
||||
|
||||
guesser = ConfigureGuesser()
|
||||
guesser(self.stage)
|
||||
self.assertEqual(system, guesser.build_system)
|
||||
guesser = ConfigureGuesser()
|
||||
guesser(stage)
|
||||
self.assertEqual(system, guesser.build_system)
|
||||
|
||||
|
||||
def test_python(self):
|
||||
|
|
|
@ -26,19 +26,18 @@
|
|||
These tests check the database is functioning properly,
|
||||
both in memory and in its file
|
||||
"""
|
||||
import tempfile
|
||||
import shutil
|
||||
import multiprocessing
|
||||
|
||||
from llnl.util.lock import *
|
||||
from llnl.util.filesystem import join_path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import spack
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lock import *
|
||||
from llnl.util.tty.colify import colify
|
||||
from spack.database import Database
|
||||
from spack.directory_layout import YamlDirectoryLayout
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
def _print_ref_counts():
|
||||
"""Print out all ref counts for the graph used here, for debugging"""
|
||||
|
|
|
@ -25,20 +25,17 @@
|
|||
"""\
|
||||
This test verifies that the Spack directory layout works properly.
|
||||
"""
|
||||
import unittest
|
||||
import tempfile
|
||||
import shutil
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import spack
|
||||
from spack.spec import Spec
|
||||
from spack.repository import RepoPath
|
||||
from llnl.util.filesystem import *
|
||||
from spack.directory_layout import YamlDirectoryLayout
|
||||
from spack.repository import RepoPath
|
||||
from spack.spec import Spec
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
|
||||
# number of packages to test (to reduce test time)
|
||||
max_packages = 10
|
||||
|
||||
|
|
|
@ -23,19 +23,12 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import unittest
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.version import ver
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
from spack.test.mock_packages_test import *
|
||||
from spack.test.mock_repo import MockGitRepo
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class GitFetchTest(MockPackagesTest):
|
||||
|
@ -52,19 +45,15 @@ def setUp(self):
|
|||
spec.concretize()
|
||||
self.pkg = spack.repo.get(spec, new=True)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
"""Destroy the stage space used by this test."""
|
||||
super(GitFetchTest, self).tearDown()
|
||||
self.repo.destroy()
|
||||
self.pkg.do_clean()
|
||||
|
||||
|
||||
def assert_rev(self, rev):
|
||||
"""Check that the current git revision is equal to the supplied rev."""
|
||||
self.assertEqual(self.repo.rev_hash('HEAD'), self.repo.rev_hash(rev))
|
||||
|
||||
|
||||
def try_fetch(self, rev, test_file, args):
|
||||
"""Tries to:
|
||||
1. Fetch the repo using a fetch strategy constructed with
|
||||
|
@ -76,26 +65,27 @@ def try_fetch(self, rev, test_file, args):
|
|||
"""
|
||||
self.pkg.versions[ver('git')] = args
|
||||
|
||||
self.pkg.do_stage()
|
||||
self.assert_rev(rev)
|
||||
with self.pkg.stage:
|
||||
self.pkg.do_stage()
|
||||
self.assert_rev(rev)
|
||||
|
||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
|
||||
os.unlink(file_path)
|
||||
self.assertFalse(os.path.isfile(file_path))
|
||||
os.unlink(file_path)
|
||||
self.assertFalse(os.path.isfile(file_path))
|
||||
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
self.assertTrue(os.path.isfile(untracked_file))
|
||||
self.pkg.do_restage()
|
||||
self.assertFalse(os.path.isfile(untracked_file))
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
self.assertTrue(os.path.isfile(untracked_file))
|
||||
self.pkg.do_restage()
|
||||
self.assertFalse(os.path.isfile(untracked_file))
|
||||
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
|
||||
self.assert_rev(rev)
|
||||
self.assert_rev(rev)
|
||||
|
||||
|
||||
def test_fetch_master(self):
|
||||
|
|
|
@ -23,16 +23,12 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
|
||||
from spack.version import ver
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
from spack.test.mock_packages_test import *
|
||||
from spack.test.mock_repo import MockHgRepo
|
||||
from llnl.util.filesystem import *
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
|
||||
class HgFetchTest(MockPackagesTest):
|
||||
|
@ -49,13 +45,10 @@ def setUp(self):
|
|||
spec.concretize()
|
||||
self.pkg = spack.repo.get(spec, new=True)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
"""Destroy the stage space used by this test."""
|
||||
super(HgFetchTest, self).tearDown()
|
||||
self.repo.destroy()
|
||||
self.pkg.do_clean()
|
||||
|
||||
|
||||
def try_fetch(self, rev, test_file, args):
|
||||
"""Tries to:
|
||||
|
@ -68,26 +61,27 @@ def try_fetch(self, rev, test_file, args):
|
|||
"""
|
||||
self.pkg.versions[ver('hg')] = args
|
||||
|
||||
self.pkg.do_stage()
|
||||
self.assertEqual(self.repo.get_rev(), rev)
|
||||
with self.pkg.stage:
|
||||
self.pkg.do_stage()
|
||||
self.assertEqual(self.repo.get_rev(), rev)
|
||||
|
||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
|
||||
os.unlink(file_path)
|
||||
self.assertFalse(os.path.isfile(file_path))
|
||||
os.unlink(file_path)
|
||||
self.assertFalse(os.path.isfile(file_path))
|
||||
|
||||
untracked = 'foobarbaz'
|
||||
touch(untracked)
|
||||
self.assertTrue(os.path.isfile(untracked))
|
||||
self.pkg.do_restage()
|
||||
self.assertFalse(os.path.isfile(untracked))
|
||||
untracked = 'foobarbaz'
|
||||
touch(untracked)
|
||||
self.assertTrue(os.path.isfile(untracked))
|
||||
self.pkg.do_restage()
|
||||
self.assertFalse(os.path.isfile(untracked))
|
||||
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
|
||||
self.assertEqual(self.repo.get_rev(), rev)
|
||||
self.assertEqual(self.repo.get_rev(), rev)
|
||||
|
||||
|
||||
def test_fetch_default(self):
|
||||
|
|
|
@ -22,18 +22,13 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import unittest
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.stage import Stage
|
||||
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
||||
from llnl.util.filesystem import *
|
||||
from spack.directory_layout import YamlDirectoryLayout
|
||||
from spack.util.executable import which
|
||||
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
||||
from spack.test.mock_packages_test import *
|
||||
from spack.test.mock_repo import MockArchive
|
||||
|
||||
|
|
|
@ -24,8 +24,6 @@
|
|||
##############################################################################
|
||||
import os
|
||||
import unittest
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
@ -38,6 +36,7 @@ class LinkTreeTest(unittest.TestCase):
|
|||
|
||||
def setUp(self):
|
||||
self.stage = Stage('link-tree-test')
|
||||
self.stage.create()
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
touchp('source/1')
|
||||
|
@ -51,10 +50,8 @@ def setUp(self):
|
|||
source_path = os.path.join(self.stage.path, 'source')
|
||||
self.link_tree = LinkTree(source_path)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
if self.stage:
|
||||
self.stage.destroy()
|
||||
self.stage.destroy()
|
||||
|
||||
|
||||
def check_file_link(self, filename):
|
||||
|
|
|
@ -25,15 +25,13 @@
|
|||
"""
|
||||
These tests ensure that our lock works correctly.
|
||||
"""
|
||||
import unittest
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
from multiprocessing import Process
|
||||
|
||||
from llnl.util.lock import *
|
||||
from llnl.util.filesystem import join_path, touch
|
||||
|
||||
from llnl.util.lock import *
|
||||
from spack.util.multiproc import Barrier
|
||||
|
||||
# This is the longest a failed test will take, as the barriers will
|
||||
|
|
|
@ -28,13 +28,13 @@
|
|||
This just tests whether the right args are getting passed to make.
|
||||
"""
|
||||
import os
|
||||
import unittest
|
||||
import tempfile
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
from spack.util.environment import path_put_first
|
||||
from spack.build_environment import MakeExecutable
|
||||
from spack.util.environment import path_put_first
|
||||
|
||||
|
||||
class MakeExecutableTest(unittest.TestCase):
|
||||
|
|
|
@ -23,11 +23,10 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
from filecmp import dircmp
|
||||
|
||||
import spack
|
||||
import spack.mirror
|
||||
from spack.util.compression import decompressor_for
|
||||
|
||||
from filecmp import dircmp
|
||||
from spack.test.mock_packages_test import *
|
||||
from spack.test.mock_repo import *
|
||||
|
||||
|
@ -74,14 +73,14 @@ def set_up_package(self, name, MockRepoClass, url_attr):
|
|||
|
||||
|
||||
def check_mirror(self):
|
||||
stage = Stage('spack-mirror-test')
|
||||
mirror_root = join_path(stage.path, 'test-mirror')
|
||||
with Stage('spack-mirror-test') as stage:
|
||||
mirror_root = join_path(stage.path, 'test-mirror')
|
||||
|
||||
# register mirror with spack config
|
||||
mirrors = { 'spack-mirror-test' : 'file://' + mirror_root }
|
||||
spack.config.update_config('mirrors', mirrors)
|
||||
|
||||
# register mirror with spack config
|
||||
mirrors = { 'spack-mirror-test' : 'file://' + mirror_root }
|
||||
spack.config.update_config('mirrors', mirrors)
|
||||
|
||||
try:
|
||||
os.chdir(stage.path)
|
||||
spack.mirror.create(
|
||||
mirror_root, self.repos, no_checksum=True)
|
||||
|
@ -97,38 +96,28 @@ def check_mirror(self):
|
|||
files = os.listdir(subdir)
|
||||
self.assertEqual(len(files), 1)
|
||||
|
||||
# Now try to fetch each package.
|
||||
for name, mock_repo in self.repos.items():
|
||||
spec = Spec(name).concretized()
|
||||
pkg = spec.package
|
||||
# Now try to fetch each package.
|
||||
for name, mock_repo in self.repos.items():
|
||||
spec = Spec(name).concretized()
|
||||
pkg = spec.package
|
||||
|
||||
pkg._stage = None
|
||||
saved_checksum_setting = spack.do_checksum
|
||||
try:
|
||||
# Stage the archive from the mirror and cd to it.
|
||||
spack.do_checksum = False
|
||||
pkg.do_stage(mirror_only=True)
|
||||
|
||||
# Compare the original repo with the expanded archive
|
||||
original_path = mock_repo.path
|
||||
if 'svn' in name:
|
||||
# have to check out the svn repo to compare.
|
||||
original_path = join_path(mock_repo.path, 'checked_out')
|
||||
svn('checkout', mock_repo.url, original_path)
|
||||
|
||||
dcmp = dircmp(original_path, pkg.stage.source_path)
|
||||
|
||||
# make sure there are no new files in the expanded tarball
|
||||
self.assertFalse(dcmp.right_only)
|
||||
|
||||
# and that all original files are present.
|
||||
self.assertTrue(all(l in exclude for l in dcmp.left_only))
|
||||
|
||||
finally:
|
||||
spack.do_checksum = saved_checksum_setting
|
||||
pkg.do_clean()
|
||||
finally:
|
||||
stage.destroy()
|
||||
saved_checksum_setting = spack.do_checksum
|
||||
with pkg.stage:
|
||||
# Stage the archive from the mirror and cd to it.
|
||||
spack.do_checksum = False
|
||||
pkg.do_stage(mirror_only=True)
|
||||
# Compare the original repo with the expanded archive
|
||||
original_path = mock_repo.path
|
||||
if 'svn' in name:
|
||||
# have to check out the svn repo to compare.
|
||||
original_path = join_path(mock_repo.path, 'checked_out')
|
||||
svn('checkout', mock_repo.url, original_path)
|
||||
dcmp = dircmp(original_path, pkg.stage.source_path)
|
||||
# make sure there are no new files in the expanded tarball
|
||||
self.assertFalse(dcmp.right_only)
|
||||
# and that all original files are present.
|
||||
self.assertTrue(all(l in exclude for l in dcmp.left_only))
|
||||
spack.do_checksum = saved_checksum_setting
|
||||
|
||||
|
||||
def test_git_mirror(self):
|
||||
|
|
|
@ -22,17 +22,15 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
import tempfile
|
||||
from ordereddict_backport import OrderedDict
|
||||
|
||||
from llnl.util.filesystem import mkdirp
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
import spack.config
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from ordereddict_backport import OrderedDict
|
||||
from spack.repository import RepoPath
|
||||
from spack.spec import Spec
|
||||
|
||||
|
|
|
@ -26,13 +26,9 @@
|
|||
import shutil
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.version import ver
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
#
|
||||
# VCS Systems used by mock repo code.
|
||||
#
|
||||
|
|
|
@ -25,14 +25,11 @@
|
|||
"""
|
||||
Test for multi_method dispatch.
|
||||
"""
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
from spack.multimethod import *
|
||||
from spack.version import *
|
||||
from spack.spec import Spec
|
||||
from spack.multimethod import when
|
||||
from spack.test.mock_packages_test import *
|
||||
from spack.version import *
|
||||
|
||||
|
||||
class MultiMethodTest(MockPackagesTest):
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
|
||||
from spack.util.naming import NamespaceTrie
|
||||
|
||||
|
||||
|
|
|
@ -22,10 +22,8 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
from spack.spec import Spec, CompilerSpec
|
||||
from spack.spec import Spec
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
class ConcretizeTest(MockPackagesTest):
|
||||
|
|
|
@ -22,14 +22,12 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
|
||||
from llnl.util.filesystem import join_path
|
||||
|
||||
import spack
|
||||
from llnl.util.filesystem import join_path
|
||||
from spack.repository import Repo
|
||||
from spack.util.naming import mod_to_class
|
||||
from spack.test.mock_packages_test import *
|
||||
from spack.util.naming import mod_to_class
|
||||
|
||||
|
||||
class PackagesTest(MockPackagesTest):
|
||||
|
|
|
@ -28,12 +28,11 @@
|
|||
Spack was originally 2.7, but enough systems in 2014 are still using
|
||||
2.6 on their frontend nodes that we need 2.6 to get adopted.
|
||||
"""
|
||||
import unittest
|
||||
import os
|
||||
import re
|
||||
import unittest
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import pyqver2
|
||||
import spack
|
||||
|
||||
|
|
|
@ -31,8 +31,6 @@
|
|||
import spack
|
||||
import spack.package
|
||||
|
||||
from llnl.util.lang import list_modules
|
||||
|
||||
from spack.spec import Spec
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
from spack.spec import *
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
|
|
|
@ -23,9 +23,10 @@
|
|||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
|
||||
import spack.spec
|
||||
from spack.spec import *
|
||||
from spack.parse import Token
|
||||
from spack.spec import *
|
||||
|
||||
# Sample output for a complex lexing.
|
||||
complex_lex = [Token(ID, 'mvapich_foo'),
|
||||
|
|
|
@ -25,15 +25,13 @@
|
|||
"""\
|
||||
Test that the Stage class works correctly.
|
||||
"""
|
||||
import unittest
|
||||
import shutil
|
||||
import os
|
||||
import getpass
|
||||
import shutil
|
||||
import unittest
|
||||
from contextlib import *
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from llnl.util.filesystem import *
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
|
@ -192,116 +190,90 @@ def check_destroy(self, stage, stage_name):
|
|||
|
||||
def test_setup_and_destroy_name_with_tmp(self):
|
||||
with use_tmp(True):
|
||||
stage = Stage(archive_url, name=stage_name)
|
||||
self.check_setup(stage, stage_name)
|
||||
|
||||
stage.destroy()
|
||||
with Stage(archive_url, name=stage_name) as stage:
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_destroy(stage, stage_name)
|
||||
|
||||
|
||||
def test_setup_and_destroy_name_without_tmp(self):
|
||||
with use_tmp(False):
|
||||
stage = Stage(archive_url, name=stage_name)
|
||||
self.check_setup(stage, stage_name)
|
||||
|
||||
stage.destroy()
|
||||
with Stage(archive_url, name=stage_name) as stage:
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_destroy(stage, stage_name)
|
||||
|
||||
|
||||
def test_setup_and_destroy_no_name_with_tmp(self):
|
||||
with use_tmp(True):
|
||||
stage = Stage(archive_url)
|
||||
self.check_setup(stage, None)
|
||||
|
||||
stage.destroy()
|
||||
with Stage(archive_url) as stage:
|
||||
self.check_setup(stage, None)
|
||||
self.check_destroy(stage, None)
|
||||
|
||||
|
||||
def test_setup_and_destroy_no_name_without_tmp(self):
|
||||
with use_tmp(False):
|
||||
stage = Stage(archive_url)
|
||||
self.check_setup(stage, None)
|
||||
|
||||
stage.destroy()
|
||||
with Stage(archive_url) as stage:
|
||||
self.check_setup(stage, None)
|
||||
self.check_destroy(stage, None)
|
||||
|
||||
|
||||
def test_chdir(self):
|
||||
stage = Stage(archive_url, name=stage_name)
|
||||
|
||||
stage.chdir()
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_chdir(stage, stage_name)
|
||||
|
||||
stage.destroy()
|
||||
with Stage(archive_url, name=stage_name) as stage:
|
||||
stage.chdir()
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_chdir(stage, stage_name)
|
||||
self.check_destroy(stage, stage_name)
|
||||
|
||||
|
||||
def test_fetch(self):
|
||||
stage = Stage(archive_url, name=stage_name)
|
||||
|
||||
stage.fetch()
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_chdir(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
|
||||
stage.destroy()
|
||||
with Stage(archive_url, name=stage_name) as stage:
|
||||
stage.fetch()
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_chdir(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
self.check_destroy(stage, stage_name)
|
||||
|
||||
|
||||
def test_expand_archive(self):
|
||||
stage = Stage(archive_url, name=stage_name)
|
||||
|
||||
stage.fetch()
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
|
||||
stage.expand_archive()
|
||||
self.check_expand_archive(stage, stage_name)
|
||||
|
||||
stage.destroy()
|
||||
with Stage(archive_url, name=stage_name) as stage:
|
||||
stage.fetch()
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
stage.expand_archive()
|
||||
self.check_expand_archive(stage, stage_name)
|
||||
self.check_destroy(stage, stage_name)
|
||||
|
||||
|
||||
def test_expand_archive(self):
|
||||
stage = Stage(archive_url, name=stage_name)
|
||||
|
||||
stage.fetch()
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
|
||||
stage.expand_archive()
|
||||
stage.chdir_to_source()
|
||||
self.check_expand_archive(stage, stage_name)
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
|
||||
stage.destroy()
|
||||
with Stage(archive_url, name=stage_name) as stage:
|
||||
stage.fetch()
|
||||
self.check_setup(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
stage.expand_archive()
|
||||
stage.chdir_to_source()
|
||||
self.check_expand_archive(stage, stage_name)
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
self.check_destroy(stage, stage_name)
|
||||
|
||||
|
||||
def test_restage(self):
|
||||
stage = Stage(archive_url, name=stage_name)
|
||||
with Stage(archive_url, name=stage_name) as stage:
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
stage.chdir_to_source()
|
||||
self.check_expand_archive(stage, stage_name)
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
stage.chdir_to_source()
|
||||
self.check_expand_archive(stage, stage_name)
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
# Try to make a file in the old archive dir
|
||||
with open('foobar', 'w') as file:
|
||||
file.write("this file is to be destroyed.")
|
||||
|
||||
# Try to make a file in the old archive dir
|
||||
with open('foobar', 'w') as file:
|
||||
file.write("this file is to be destroyed.")
|
||||
self.assertTrue('foobar' in os.listdir(stage.source_path))
|
||||
|
||||
self.assertTrue('foobar' in os.listdir(stage.source_path))
|
||||
|
||||
# Make sure the file is not there after restage.
|
||||
stage.restage()
|
||||
self.check_chdir(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
|
||||
stage.chdir_to_source()
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
self.assertFalse('foobar' in os.listdir(stage.source_path))
|
||||
|
||||
stage.destroy()
|
||||
# Make sure the file is not there after restage.
|
||||
stage.restage()
|
||||
self.check_chdir(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
stage.chdir_to_source()
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
self.assertFalse('foobar' in os.listdir(stage.source_path))
|
||||
self.check_destroy(stage, stage_name)
|
||||
|
|
|
@ -24,18 +24,12 @@
|
|||
##############################################################################
|
||||
import os
|
||||
import re
|
||||
import unittest
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.version import ver
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
from spack.test.mock_repo import svn, MockSvnRepo
|
||||
from spack.version import ver
|
||||
from spack.test.mock_packages_test import *
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
|
||||
class SvnFetchTest(MockPackagesTest):
|
||||
|
@ -51,13 +45,10 @@ def setUp(self):
|
|||
spec.concretize()
|
||||
self.pkg = spack.repo.get(spec, new=True)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
"""Destroy the stage space used by this test."""
|
||||
super(SvnFetchTest, self).tearDown()
|
||||
self.repo.destroy()
|
||||
self.pkg.do_clean()
|
||||
|
||||
|
||||
def assert_rev(self, rev):
|
||||
"""Check that the current revision is equal to the supplied rev."""
|
||||
|
@ -70,7 +61,6 @@ def get_rev():
|
|||
return match.group(1)
|
||||
self.assertEqual(get_rev(), rev)
|
||||
|
||||
|
||||
def try_fetch(self, rev, test_file, args):
|
||||
"""Tries to:
|
||||
1. Fetch the repo using a fetch strategy constructed with
|
||||
|
@ -82,26 +72,27 @@ def try_fetch(self, rev, test_file, args):
|
|||
"""
|
||||
self.pkg.versions[ver('svn')] = args
|
||||
|
||||
self.pkg.do_stage()
|
||||
self.assert_rev(rev)
|
||||
with self.pkg.stage:
|
||||
self.pkg.do_stage()
|
||||
self.assert_rev(rev)
|
||||
|
||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
|
||||
os.unlink(file_path)
|
||||
self.assertFalse(os.path.isfile(file_path))
|
||||
os.unlink(file_path)
|
||||
self.assertFalse(os.path.isfile(file_path))
|
||||
|
||||
untracked = 'foobarbaz'
|
||||
touch(untracked)
|
||||
self.assertTrue(os.path.isfile(untracked))
|
||||
self.pkg.do_restage()
|
||||
self.assertFalse(os.path.isfile(untracked))
|
||||
untracked = 'foobarbaz'
|
||||
touch(untracked)
|
||||
self.assertTrue(os.path.isfile(untracked))
|
||||
self.pkg.do_restage()
|
||||
self.assertFalse(os.path.isfile(untracked))
|
||||
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||
self.assertTrue(os.path.isfile(file_path))
|
||||
|
||||
self.assert_rev(rev)
|
||||
self.assert_rev(rev)
|
||||
|
||||
|
||||
def test_fetch_default(self):
|
||||
|
|
|
@ -22,10 +22,10 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from nose.plugins import Plugin
|
||||
|
||||
import os
|
||||
|
||||
from nose.plugins import Plugin
|
||||
|
||||
class Tally(Plugin):
|
||||
name = 'tally'
|
||||
|
||||
|
|
|
@ -22,10 +22,11 @@
|
|||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import unittest
|
||||
import itertools
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
|
||||
test_install = __import__("spack.cmd.test-install",
|
||||
fromlist=["BuildId", "create_test_output", "TestResult"])
|
||||
|
||||
|
|
|
@ -25,10 +25,7 @@
|
|||
"""\
|
||||
Tests ability of spack to extrapolate URL versions from existing versions.
|
||||
"""
|
||||
import spack
|
||||
import spack.url as url
|
||||
from spack.spec import Spec
|
||||
from spack.version import ver
|
||||
from spack.test.mock_packages_test import *
|
||||
|
||||
|
||||
|
|
|
@ -27,8 +27,8 @@
|
|||
detection in Homebrew.
|
||||
"""
|
||||
import unittest
|
||||
|
||||
import spack.url as url
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
class UrlParseTest(unittest.TestCase):
|
||||
|
|
|
@ -27,7 +27,6 @@
|
|||
"""
|
||||
import unittest
|
||||
|
||||
import spack
|
||||
import spack.url as url
|
||||
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
where it makes sense.
|
||||
"""
|
||||
import unittest
|
||||
|
||||
from spack.version import *
|
||||
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
Test Spack's custom YAML format.
|
||||
"""
|
||||
import unittest
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
test_file = """\
|
||||
|
|
Loading…
Reference in a new issue