Implement per-version attributes for flexible fetch policies.
- Tests pass with URL fetching and new scheme. - Lots of refactoring - Infrastructure is there for arbitrary fetch policies and more attribtues on the version() call. - Mirrors do not currently work properly, and they get in the way of a proper git fetch
This commit is contained in:
parent
52d140c337
commit
0cc79e0564
8 changed files with 275 additions and 89 deletions
|
@ -47,9 +47,11 @@
|
|||
import spack
|
||||
import spack.error
|
||||
import spack.util.crypto as crypto
|
||||
from spack.version import Version, ver
|
||||
from spack.util.compression import decompressor_for
|
||||
|
||||
|
||||
|
||||
class FetchStrategy(object):
|
||||
def __init__(self):
|
||||
# The stage is initialized late, so that fetch strategies can be constructed
|
||||
|
@ -63,21 +65,37 @@ def set_stage(self, stage):
|
|||
self.stage = stage
|
||||
|
||||
|
||||
# Subclasses need to implement tehse methods
|
||||
# Subclasses need to implement these methods
|
||||
def fetch(self): pass # Return True on success, False on fail
|
||||
def check(self): pass
|
||||
def expand(self): pass
|
||||
def reset(self): pass
|
||||
def __str__(self): pass
|
||||
def __str__(self):
|
||||
return "FetchStrategy.__str___"
|
||||
|
||||
# This method is used to match fetch strategies to version()
|
||||
# arguments in packages.
|
||||
@classmethod
|
||||
def match(kwargs):
|
||||
return any(k in kwargs for k in self.attributes)
|
||||
|
||||
|
||||
class URLFetchStrategy(FetchStrategy):
|
||||
attributes = ('url', 'md5')
|
||||
|
||||
def __init__(self, url, digest=None):
|
||||
def __init__(self, url=None, digest=None, **kwargs):
|
||||
super(URLFetchStrategy, self).__init__()
|
||||
self.url = url
|
||||
self.digest = digest
|
||||
|
||||
# If URL or digest are provided in the kwargs, then prefer
|
||||
# those values.
|
||||
self.url = kwargs.get('url', None)
|
||||
if not self.url: self.url = url
|
||||
|
||||
self.digest = kwargs.get('md5', None)
|
||||
if not self.digest: self.digest = digest
|
||||
|
||||
if not self.url:
|
||||
raise ValueError("URLFetchStrategy requires a url for fetching.")
|
||||
|
||||
|
||||
def fetch(self):
|
||||
|
@ -142,9 +160,7 @@ def expand(self):
|
|||
self.stage.chdir()
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError("URLFetchStrategy couldn't find archive file",
|
||||
"Failed on expand() for URL %s" % self.url)
|
||||
|
||||
print self.archive_file
|
||||
"Failed on expand() for URL %s" % self.url)
|
||||
|
||||
decompress = decompressor_for(self.archive_file)
|
||||
decompress(self.archive_file)
|
||||
|
@ -174,20 +190,118 @@ def reset(self):
|
|||
self.expand()
|
||||
|
||||
|
||||
def __str__(self):
|
||||
if self.url:
|
||||
return self.url
|
||||
else:
|
||||
return "URLFetchStrategy <no url>"
|
||||
|
||||
|
||||
class VCSFetchStrategy(FetchStrategy):
|
||||
def __init__(self, name):
|
||||
super(VCSFetchStrategy, self).__init__()
|
||||
self.name = name
|
||||
|
||||
|
||||
def check(self):
|
||||
assert(self.stage)
|
||||
tty.msg("No check needed when fetching with %s." % self.name)
|
||||
|
||||
def expand(self):
|
||||
assert(self.stage)
|
||||
tty.debug("Source fetched with %s is already expanded." % self.name)
|
||||
|
||||
|
||||
|
||||
class GitFetchStrategy(VCSFetchStrategy):
|
||||
attributes = ('git', 'ref', 'tag', 'branch')
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(GitFetchStrategy, self).__init__("git")
|
||||
self.url = kwargs.get('git', None)
|
||||
if not self.url:
|
||||
raise ValueError("GitFetchStrategy requires git argument.")
|
||||
|
||||
if sum((k in kwargs for k in ('ref', 'tag', 'branch'))) > 1:
|
||||
raise FetchStrategyError(
|
||||
"Git requires exactly one ref, branch, or tag.")
|
||||
|
||||
self._git = None
|
||||
self.ref = kwargs.get('ref', None)
|
||||
self.branch = kwargs.get('branch', None)
|
||||
if not self.branch:
|
||||
self.branch = kwargs.get('tag', None)
|
||||
|
||||
|
||||
@property
|
||||
def git_version(self):
|
||||
git = which('git', required=True)
|
||||
vstring = git('--version', return_output=True).lstrip('git version ')
|
||||
return Version(vstring)
|
||||
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
if not self._git:
|
||||
self._git = which('git', required=True)
|
||||
return self._git
|
||||
|
||||
|
||||
def fetch(self):
|
||||
assert(self.stage)
|
||||
self.stage.chdir()
|
||||
|
||||
if self.stage.source_path:
|
||||
tty.msg("Already fetched %s." % self.source_path)
|
||||
return
|
||||
|
||||
tty.msg("Trying to clone git repository: %s" % self.url)
|
||||
|
||||
|
||||
if self.ref:
|
||||
# Need to do a regular clone and check out everything if
|
||||
# they asked for a particular ref.
|
||||
git('clone', self.url)
|
||||
self.chdir_to_source()
|
||||
git('checkout', self.ref)
|
||||
|
||||
else:
|
||||
# Can be more efficient if not checking out a specific ref.
|
||||
args = ['clone']
|
||||
|
||||
# If we want a particular branch ask for it.
|
||||
if self.branch:
|
||||
args.extend(['--branch', self.branch])
|
||||
|
||||
# Try to be efficient if we're using a new enough git.
|
||||
# This checks out only one branch's history
|
||||
if self.git_version > ver('1.7.10'):
|
||||
args.append('--single-branch')
|
||||
|
||||
args.append(self.url)
|
||||
git(*args)
|
||||
self.chdir_to_source()
|
||||
|
||||
|
||||
def reset(self):
|
||||
assert(self.stage)
|
||||
git = which('git', required=True)
|
||||
|
||||
self.stage.chdir_to_source()
|
||||
git('checkout', '.')
|
||||
git('clean', '-f')
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return self.url
|
||||
|
||||
|
||||
|
||||
class GitFetchStrategy(FetchStrategy):
|
||||
pass
|
||||
|
||||
|
||||
class SvnFetchStrategy(FetchStrategy):
|
||||
attributes = ('svn', 'rev', 'revision')
|
||||
pass
|
||||
|
||||
|
||||
def strategy_for_url(url):
|
||||
def from_url(url):
|
||||
"""Given a URL, find an appropriate fetch strategy for it.
|
||||
Currently just gives you a URLFetchStrategy that uses curl.
|
||||
|
||||
|
@ -197,6 +311,27 @@ def strategy_for_url(url):
|
|||
return URLFetchStrategy(url)
|
||||
|
||||
|
||||
def args_are_for(args, fetcher):
|
||||
return any(arg in args for arg in fetcher.attributes)
|
||||
|
||||
|
||||
def from_args(args, pkg):
|
||||
"""Determine a fetch strategy based on the arguments supplied to
|
||||
version() in the package description."""
|
||||
fetchers = (URLFetchStrategy, GitFetchStrategy)
|
||||
for fetcher in fetchers:
|
||||
if args_are_for(args, fetcher):
|
||||
attrs = {}
|
||||
for attr in fetcher.attributes:
|
||||
default = getattr(pkg, attr, None)
|
||||
if default:
|
||||
attrs[attr] = default
|
||||
|
||||
attrs.update(args)
|
||||
return fetcher(**attrs)
|
||||
|
||||
return None
|
||||
|
||||
class FetchStrategyError(spack.error.SpackError):
|
||||
def __init__(self, msg, long_msg):
|
||||
super(FetchStrategyError, self).__init__(msg, long_msg)
|
||||
|
@ -220,3 +355,7 @@ def __init__(self, msg, long_msg):
|
|||
super(NoDigestError, self).__init__(msg, long_msg)
|
||||
|
||||
|
||||
class InvalidArgsError(FetchStrategyError):
|
||||
def __init__(self, msg, long_msg):
|
||||
super(InvalidArgsError, self).__init__(msg, long_msg)
|
||||
|
||||
|
|
|
@ -52,6 +52,7 @@
|
|||
import spack.hooks
|
||||
import spack.build_environment as build_env
|
||||
import spack.url as url
|
||||
import spack.fetch_strategy as fs
|
||||
from spack.version import *
|
||||
from spack.stage import Stage
|
||||
from spack.util.web import get_pages
|
||||
|
@ -300,7 +301,7 @@ class SomePackage(Package):
|
|||
# These variables are defaults for the various "relations".
|
||||
#
|
||||
"""Map of information about Versions of this package.
|
||||
Map goes: Version -> VersionDescriptor"""
|
||||
Map goes: Version -> dict of attributes"""
|
||||
versions = {}
|
||||
|
||||
"""Specs of dependency packages, keyed by name."""
|
||||
|
@ -356,8 +357,7 @@ def ensure_has_dict(attr_name):
|
|||
|
||||
# Check version descriptors
|
||||
for v in sorted(self.versions):
|
||||
vdesc = self.versions[v]
|
||||
assert(isinstance(vdesc, spack.relations.VersionDescriptor))
|
||||
assert(isinstance(self.versions[v], dict))
|
||||
|
||||
# Version-ize the keys in versions dict
|
||||
try:
|
||||
|
@ -368,9 +368,14 @@ def ensure_has_dict(attr_name):
|
|||
# stage used to build this package.
|
||||
self._stage = None
|
||||
|
||||
# patch up self.url based on the actual version
|
||||
# If there's no default URL provided, set this package's url to None
|
||||
if not hasattr(self, 'url'):
|
||||
self.url = None
|
||||
|
||||
# Set up a fetch strategy for this package.
|
||||
self.fetcher = None
|
||||
if self.spec.concrete:
|
||||
self.url = self.url_for_version(self.version)
|
||||
self.fetcher = fs.from_args(self.versions[self.version], self)
|
||||
|
||||
# Set a default list URL (place to find available versions)
|
||||
if not hasattr(self, 'list_url'):
|
||||
|
@ -387,6 +392,61 @@ def version(self):
|
|||
return self.spec.versions[0]
|
||||
|
||||
|
||||
@memoized
|
||||
def version_urls(self):
|
||||
"""Return a list of URLs for different versions of this
|
||||
package, sorted by version. A version's URL only appears
|
||||
in this list if it has an explicitly defined URL."""
|
||||
version_urls = {}
|
||||
for v in sorted(self.versions):
|
||||
args = self.versions[v]
|
||||
if 'url' in args:
|
||||
version_urls[v] = args['url']
|
||||
return version_urls
|
||||
|
||||
|
||||
def nearest_url(self, version):
|
||||
"""Finds the URL for the next lowest version with a URL.
|
||||
If there is no lower version with a URL, uses the
|
||||
package url property. If that isn't there, uses a
|
||||
*higher* URL, and if that isn't there raises an error.
|
||||
"""
|
||||
version_urls = self.version_urls()
|
||||
url = self.url
|
||||
|
||||
for v in version_urls:
|
||||
if v > version and url:
|
||||
break
|
||||
if version_urls[v]:
|
||||
url = version_urls[v]
|
||||
return url
|
||||
|
||||
|
||||
def has_url(self):
|
||||
"""Returns whether there is a URL available for this package.
|
||||
If there isn't, it's probably fetched some other way (version
|
||||
control, etc.)"""
|
||||
return self.url or self.version_urls()
|
||||
|
||||
|
||||
# TODO: move this out of here and into some URL extrapolation module.
|
||||
def url_for_version(self, version):
|
||||
"""Returns a URL that you can download a new version of this package from."""
|
||||
if not isinstance(version, Version):
|
||||
version = Version(version)
|
||||
|
||||
if not self.has_url():
|
||||
raise NoURLError(self.__class__)
|
||||
|
||||
# If we have a specific URL for this version, don't extrapolate.
|
||||
version_urls = self.version_urls()
|
||||
if version in version_urls:
|
||||
return version_urls[version]
|
||||
else:
|
||||
return url.substitute_version(self.nearest_url(version),
|
||||
self.url_version(version))
|
||||
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
if not self.spec.concrete:
|
||||
|
@ -397,11 +457,12 @@ def stage(self):
|
|||
raise PackageVersionError(self.version)
|
||||
|
||||
# TODO: move this logic into a mirror module.
|
||||
# TODO: get rid of dependence on extension.
|
||||
mirror_path = "%s/%s" % (self.name, "%s-%s.%s" % (
|
||||
self.name, self.version, extension(self.url)))
|
||||
|
||||
self._stage = Stage(
|
||||
self.url, mirror_path=mirror_path, name=self.spec.short_spec)
|
||||
self.fetcher, mirror_path=mirror_path, name=self.spec.short_spec)
|
||||
return self._stage
|
||||
|
||||
|
||||
|
@ -523,36 +584,6 @@ def url_version(self, version):
|
|||
return str(version)
|
||||
|
||||
|
||||
def url_for_version(self, version):
|
||||
"""Returns a URL that you can download a new version of this package from."""
|
||||
if not isinstance(version, Version):
|
||||
version = Version(version)
|
||||
|
||||
def nearest_url(version):
|
||||
"""Finds the URL for the next lowest version with a URL.
|
||||
If there is no lower version with a URL, uses the
|
||||
package url property. If that isn't there, uses a
|
||||
*higher* URL, and if that isn't there raises an error.
|
||||
"""
|
||||
url = getattr(self, 'url', None)
|
||||
for v in sorted(self.versions):
|
||||
if v > version and url:
|
||||
break
|
||||
if self.versions[v].url:
|
||||
url = self.versions[v].url
|
||||
return url
|
||||
|
||||
if version in self.versions:
|
||||
vdesc = self.versions[version]
|
||||
if not vdesc.url:
|
||||
base_url = nearest_url(version)
|
||||
vdesc.url = url.substitute_version(
|
||||
base_url, self.url_version(version))
|
||||
return vdesc.url
|
||||
else:
|
||||
return nearest_url(version)
|
||||
|
||||
|
||||
@property
|
||||
def default_url(self):
|
||||
if self.concrete:
|
||||
|
@ -605,7 +636,7 @@ def do_stage(self):
|
|||
tty.msg("Created stage directory in %s." % self.stage.path)
|
||||
else:
|
||||
tty.msg("Already staged %s in %s." % (self.name, self.stage.path))
|
||||
self.stage.chdir_to_archive()
|
||||
self.stage.chdir_to_source()
|
||||
|
||||
|
||||
def do_patch(self):
|
||||
|
@ -628,7 +659,7 @@ def do_patch(self):
|
|||
tty.msg("Patching failed last time. Restaging.")
|
||||
self.stage.restage()
|
||||
|
||||
self.stage.chdir_to_archive()
|
||||
self.stage.chdir_to_source()
|
||||
|
||||
# If this file exists, then we already applied all the patches.
|
||||
if os.path.isfile(good_file):
|
||||
|
@ -788,7 +819,7 @@ def do_uninstall(self, **kwargs):
|
|||
|
||||
def do_clean(self):
|
||||
if self.stage.expanded_archive_path:
|
||||
self.stage.chdir_to_archive()
|
||||
self.stage.chdir_to_source()
|
||||
self.clean()
|
||||
|
||||
|
||||
|
@ -944,3 +975,10 @@ def __init__(self, cls):
|
|||
super(VersionFetchError, self).__init__(
|
||||
"Cannot fetch version for package %s " % cls.__name__ +
|
||||
"because it does not define a default url.")
|
||||
|
||||
|
||||
class NoURLError(PackageError):
|
||||
"""Raised when someone tries to build a URL for a package with no URLs."""
|
||||
def __init__(self, cls):
|
||||
super(NoURLError, self).__init__(
|
||||
"Package %s has no version with a URL." % cls.__name__)
|
||||
|
|
|
@ -64,7 +64,7 @@ def apply(self, stage):
|
|||
"""Fetch this patch, if necessary, and apply it to the source
|
||||
code in the supplied stage.
|
||||
"""
|
||||
stage.chdir_to_archive()
|
||||
stage.chdir_to_source()
|
||||
|
||||
patch_stage = None
|
||||
try:
|
||||
|
|
|
@ -79,32 +79,28 @@ class Mpileaks(Package):
|
|||
import spack.spec
|
||||
import spack.error
|
||||
import spack.url
|
||||
|
||||
from spack.version import Version
|
||||
from spack.patch import Patch
|
||||
from spack.spec import Spec, parse_anonymous_spec
|
||||
|
||||
|
||||
class VersionDescriptor(object):
|
||||
"""A VersionDescriptor contains information to describe a
|
||||
particular version of a package. That currently includes a URL
|
||||
for the version along with a checksum."""
|
||||
def __init__(self, checksum, url):
|
||||
self.checksum = checksum
|
||||
self.url = url
|
||||
|
||||
|
||||
def version(ver, checksum, **kwargs):
|
||||
"""Adds a version and metadata describing how to fetch it."""
|
||||
def version(ver, checksum=None, **kwargs):
|
||||
"""Adds a version and metadata describing how to fetch it.
|
||||
Metadata is just stored as a dict in the package's versions
|
||||
dictionary. Package must turn it into a valid fetch strategy
|
||||
later.
|
||||
"""
|
||||
pkg = caller_locals()
|
||||
|
||||
versions = pkg.setdefault('versions', {})
|
||||
patches = pkg.setdefault('patches', {})
|
||||
|
||||
ver = Version(ver)
|
||||
url = kwargs.get('url', None)
|
||||
# special case checksum for backward compatibility
|
||||
if checksum:
|
||||
kwargs['md5'] = checksum
|
||||
|
||||
versions[ver] = VersionDescriptor(checksum, url)
|
||||
# Store the kwargs for the package to use later when constructing
|
||||
# a fetch strategy.
|
||||
versions[Version(ver)] = kwargs
|
||||
|
||||
|
||||
def depends_on(*specs):
|
||||
|
|
|
@ -32,11 +32,10 @@
|
|||
|
||||
import spack
|
||||
import spack.config
|
||||
from spack.fetch_strategy import strategy_for_url, URLFetchStrategy
|
||||
import spack.fetch_strategy as fetch_strategy
|
||||
import spack.error
|
||||
|
||||
|
||||
|
||||
STAGE_PREFIX = 'spack-stage-'
|
||||
|
||||
|
||||
|
@ -70,7 +69,7 @@ class Stage(object):
|
|||
similar, and are intended to persist for only one run of spack.
|
||||
"""
|
||||
|
||||
def __init__(self, url, **kwargs):
|
||||
def __init__(self, url_or_fetch_strategy, **kwargs):
|
||||
"""Create a stage object.
|
||||
Parameters:
|
||||
url_or_fetch_strategy
|
||||
|
@ -83,10 +82,16 @@ def __init__(self, url, **kwargs):
|
|||
stage object later). If name is not provided, then this
|
||||
stage will be given a unique name automatically.
|
||||
"""
|
||||
if isinstance(url, basestring):
|
||||
self.fetcher = strategy_for_url(url)
|
||||
if isinstance(url_or_fetch_strategy, basestring):
|
||||
self.fetcher = fetch_strategy.from_url(url_or_fetch_strategy)
|
||||
self.fetcher.set_stage(self)
|
||||
|
||||
elif isinstance(url_or_fetch_strategy, fetch_strategy.FetchStrategy):
|
||||
self.fetcher = url_or_fetch_strategy
|
||||
|
||||
else:
|
||||
raise ValueError("Can't construct Stage without url or fetch strategy")
|
||||
|
||||
self.name = kwargs.get('name')
|
||||
self.mirror_path = kwargs.get('mirror_path')
|
||||
|
||||
|
@ -237,10 +242,12 @@ def fetch(self):
|
|||
# TODO: move mirror logic out of here and clean it up!
|
||||
if self.mirror_path:
|
||||
urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()]
|
||||
|
||||
digest = None
|
||||
if isinstance(self.fetcher, URLFetchStrategy):
|
||||
if isinstance(self.fetcher, fetch_strategy.URLFetchStrategy):
|
||||
digest = self.fetcher.digest
|
||||
fetchers = [URLFetchStrategy(url, digest) for url in urls] + fetchers
|
||||
fetchers = [fetch_strategy.URLFetchStrategy(url, digest)
|
||||
for url in urls] + fetchers
|
||||
for f in fetchers:
|
||||
f.set_stage(self)
|
||||
|
||||
|
@ -267,7 +274,7 @@ def expand_archive(self):
|
|||
self.fetcher.expand()
|
||||
|
||||
|
||||
def chdir_to_archive(self):
|
||||
def chdir_to_source(self):
|
||||
"""Changes directory to the expanded archive directory.
|
||||
Dies with an error if there was no expanded archive.
|
||||
"""
|
||||
|
|
|
@ -32,6 +32,7 @@
|
|||
|
||||
import spack
|
||||
from spack.stage import Stage
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.directory_layout import SpecHashDirectoryLayout
|
||||
from spack.util.executable import which
|
||||
from spack.test.mock_packages_test import *
|
||||
|
@ -100,11 +101,16 @@ def test_install_and_uninstall(self):
|
|||
self.assertTrue(spec.concrete)
|
||||
|
||||
# Get the package
|
||||
print
|
||||
print "======== GETTING PACKAGE ========"
|
||||
pkg = spack.db.get(spec)
|
||||
|
||||
print "======== GOT PACKAGE ========"
|
||||
print
|
||||
|
||||
# Fake the URL for the package so it downloads from a file.
|
||||
archive_path = join_path(self.stage.path, archive_name)
|
||||
pkg.url = 'file://' + archive_path
|
||||
pkg.fetcher = URLFetchStrategy('file://' + archive_path)
|
||||
|
||||
try:
|
||||
pkg.do_install()
|
||||
|
|
|
@ -56,8 +56,8 @@ def test_get_all_mock_packages(self):
|
|||
def test_url_versions(self):
|
||||
"""Check URLs for regular packages, if they are explicitly defined."""
|
||||
for pkg in spack.db.all_packages():
|
||||
for v, vdesc in pkg.versions.items():
|
||||
if vdesc.url:
|
||||
for v, vattrs in pkg.versions.items():
|
||||
if 'url' in vattrs:
|
||||
# If there is a url for the version check it.
|
||||
v_url = pkg.url_for_version(v)
|
||||
self.assertEqual(vdesc.url, v_url)
|
||||
self.assertEqual(vattrs['url'], v_url)
|
||||
|
|
|
@ -170,7 +170,7 @@ def check_chdir(self, stage, stage_name):
|
|||
self.assertEqual(os.path.realpath(stage_path), os.getcwd())
|
||||
|
||||
|
||||
def check_chdir_to_archive(self, stage, stage_name):
|
||||
def check_chdir_to_source(self, stage, stage_name):
|
||||
stage_path = self.get_stage_path(stage, stage_name)
|
||||
self.assertEqual(
|
||||
join_path(os.path.realpath(stage_path), archive_dir),
|
||||
|
@ -271,9 +271,9 @@ def test_expand_archive(self):
|
|||
self.check_fetch(stage, stage_name)
|
||||
|
||||
stage.expand_archive()
|
||||
stage.chdir_to_archive()
|
||||
stage.chdir_to_source()
|
||||
self.check_expand_archive(stage, stage_name)
|
||||
self.check_chdir_to_archive(stage, stage_name)
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
|
||||
stage.destroy()
|
||||
self.check_destroy(stage, stage_name)
|
||||
|
@ -284,9 +284,9 @@ def test_restage(self):
|
|||
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
stage.chdir_to_archive()
|
||||
stage.chdir_to_source()
|
||||
self.check_expand_archive(stage, stage_name)
|
||||
self.check_chdir_to_archive(stage, stage_name)
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
|
||||
# Try to make a file in the old archive dir
|
||||
with closing(open('foobar', 'w')) as file:
|
||||
|
@ -299,8 +299,8 @@ def test_restage(self):
|
|||
self.check_chdir(stage, stage_name)
|
||||
self.check_fetch(stage, stage_name)
|
||||
|
||||
stage.chdir_to_archive()
|
||||
self.check_chdir_to_archive(stage, stage_name)
|
||||
stage.chdir_to_source()
|
||||
self.check_chdir_to_source(stage, stage_name)
|
||||
self.assertFalse('foobar' in os.listdir(stage.source_path))
|
||||
|
||||
stage.destroy()
|
||||
|
|
Loading…
Reference in a new issue