Standardize subcommand help strings (#38804)
### Rationale While working on #29549, I noticed a lot of inconsistencies in our argparse help messages. This is important for fish where these help messages end up as descriptions in the tab completion menu. See https://github.com/spack/spack/pull/29549#issuecomment-1627596477 for some examples of longer or more stylized help messages. ### Implementation This PR makes the following changes: - [x] help messages start with a lowercase letter. - [x] Help messages do not end with a period - [x] the first line of a help message is short and simple longer text is separated by an empty line - [x] "help messages do not use triple quotes" """(except docstrings)""" - [x] Parentheses not needed for string concatenation inside function call - [x] Remove "..." "..." string concatenation leftover from black reformatting - [x] Remove Sphinx argument docs from help messages The first 2 choices aren't very controversial, and are designed to match the syntax of the `--help` flag automatically added by argparse. The 3rd choice is more up for debate, and is designed to match our package/module docstrings. The 4th choice is designed to avoid excessive newline characters and indentation. We may actually want to go even further and disallow docstrings altogether. ### Alternatives Choice 3 in particular has a lot of alternatives. My goal is solely to ensure that fish tab completion looks reasonable. Alternatives include: 1. Get rid of long help messages, only allow short simple messages 2. Move longer help messages to epilog 3. Separate by 2 newline characters instead of 1 4. Separate by period instead of newline. First sentence goes into tab completion description The number of commands with long help text is actually rather small, and is mostly relegated to `spack ci` and `spack buildcache`. So 1 isn't actually as ridiculous as it sounds. Let me know if there are any other standardizations or alternatives you would like to suggest.
This commit is contained in:
parent
6312ae8464
commit
bb7f437bf5
42 changed files with 295 additions and 348 deletions
|
@ -59,7 +59,7 @@ def setup_parser(subparser):
|
|||
|
||||
subparser.add_argument(
|
||||
"package_or_file",
|
||||
help="name of package to show contributions for, " "or path to a file in the spack repo",
|
||||
help="name of package to show contributions for, or path to a file in the spack repo",
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ def setup_parser(subparser):
|
|||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
)
|
||||
|
@ -53,42 +53,37 @@ def setup_parser(subparser):
|
|||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
)
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||
push.add_argument("-k", "--key", metavar="key", type=str, default=None, help="key for signing")
|
||||
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Regenerate buildcache index after building package(s)",
|
||||
help="regenerate buildcache index after building package(s)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
push.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
dest="things_to_install",
|
||||
choices=["package", "dependencies"],
|
||||
help=(
|
||||
"Select the buildcache mode. the default is to"
|
||||
" build a cache for the package along with all"
|
||||
" its dependencies. Alternatively, one can"
|
||||
" decide to build a cache for only the package"
|
||||
" or only the dependencies"
|
||||
),
|
||||
help="select the buildcache mode\n\n"
|
||||
"the default is to build a cache for the package along with all its dependencies. "
|
||||
"alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
install = subparsers.add_parser("install", help=install_fn.__doc__)
|
||||
install.add_argument(
|
||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists."
|
||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists"
|
||||
)
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages"
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
|
@ -142,11 +137,11 @@ def setup_parser(subparser):
|
|||
"-m",
|
||||
"--mirror-url",
|
||||
default=None,
|
||||
help="Override any configured mirrors with this mirror URL",
|
||||
help="override any configured mirrors with this mirror URL",
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-o", "--output-file", default=None, help="File where rebuild info should be written"
|
||||
"-o", "--output-file", default=None, help="file where rebuild info should be written"
|
||||
)
|
||||
|
||||
# used to construct scope arguments below
|
||||
|
@ -162,13 +157,13 @@ def setup_parser(subparser):
|
|||
)
|
||||
|
||||
check.add_argument(
|
||||
"-s", "--spec", default=None, help="Check single spec instead of release specs file"
|
||||
"-s", "--spec", default=None, help="check single spec instead of release specs file"
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Check single spec from json or yaml file instead of release specs file"),
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
@ -176,15 +171,15 @@ def setup_parser(subparser):
|
|||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download.add_argument(
|
||||
"-s", "--spec", default=None, help="Download built tarball for spec from mirror"
|
||||
"-s", "--spec", default=None, help="download built tarball for spec from mirror"
|
||||
)
|
||||
download.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Download built tarball for spec (from json or yaml file) from mirror"),
|
||||
help="download built tarball for spec (from json or yaml file) from mirror",
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="Path to directory where tarball should be downloaded"
|
||||
"-p", "--path", default=None, help="path to directory where tarball should be downloaded"
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
|
@ -193,52 +188,52 @@ def setup_parser(subparser):
|
|||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"-s", "--spec", default=None, help="Spec string for which buildcache name is desired"
|
||||
"-s", "--spec", default=None, help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Path to spec json or yaml file for which buildcache name is desired"),
|
||||
help="path to spec json or yaml file for which buildcache name is desired",
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile.add_argument("--root-spec", default=None, help="Root spec of dependent spec")
|
||||
savespecfile.add_argument("--root-spec", default=None, help="root spec of dependent spec")
|
||||
savespecfile.add_argument(
|
||||
"--root-specfile",
|
||||
default=None,
|
||||
help="Path to json or yaml file containing root spec of dependent spec",
|
||||
help="path to json or yaml file containing root spec of dependent spec",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
default=None,
|
||||
help="List of dependent specs for which saved yaml is desired",
|
||||
help="list of dependent specs for which saved yaml is desired",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"--specfile-dir", default=None, help="Path to directory where spec yamls should be saved"
|
||||
"--specfile-dir", default=None, help="path to directory where spec yamls should be saved"
|
||||
)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files"
|
||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
||||
)
|
||||
sync.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
help="Source mirror name, path, or URL",
|
||||
help="source mirror name, path, or URL",
|
||||
)
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
help="Destination mirror name, path, or URL",
|
||||
help="destination mirror name, path, or URL",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
|
@ -247,14 +242,14 @@ def setup_parser(subparser):
|
|||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||
"mirror", type=arguments.mirror_name_or_url, help="destination mirror name, path, or URL"
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
"--keys",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If provided, key index will be updated as well as package index",
|
||||
help="if provided, key index will be updated as well as package index",
|
||||
)
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
|
@ -411,9 +406,7 @@ def keys_fn(args):
|
|||
|
||||
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables
|
||||
and libraries are relocatable
|
||||
"""
|
||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||
constraints = spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.store.find(constraints, multiple=True)
|
||||
|
||||
|
@ -425,11 +418,11 @@ def preview_fn(args):
|
|||
|
||||
|
||||
def check_fn(args):
|
||||
"""Check specs (either a single spec from --spec, or else the full set
|
||||
of release specs) against remote binary mirror(s) to see if any need
|
||||
to be rebuilt. This command uses the process exit code to indicate
|
||||
its result, specifically, if the exit code is non-zero, then at least
|
||||
one of the indicated specs needs to be rebuilt.
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
either a single spec from --spec, or else the full set of release specs. this command uses the
|
||||
process exit code to indicate its result, specifically, if the exit code is non-zero, then at
|
||||
least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
|
@ -460,10 +453,12 @@ def check_fn(args):
|
|||
|
||||
|
||||
def download_fn(args):
|
||||
"""Download buildcache entry from a remote mirror to local folder. This
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components."""
|
||||
"""download buildcache entry from a remote mirror to local folder
|
||||
|
||||
this command uses the process exit code to indicate its result, specifically, a non-zero exit
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
|
@ -480,19 +475,18 @@ def download_fn(args):
|
|||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""Get name (prefix) of buildcache entries for this spec"""
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, "")
|
||||
print("{0}".format(buildcache_name))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
"""Get full spec for dependencies, relative to root spec, and write them
|
||||
to files in the specified output directory. Uses exit code to signal
|
||||
success or failure. An exit code of zero means the command was likely
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero.
|
||||
"""get full spec for dependencies and write them to files in the specified output directory
|
||||
|
||||
uses exit code to signal success or failure. an exit code of zero means the command was likely
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg("No root spec provided, exiting.")
|
||||
|
@ -546,12 +540,9 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
|||
|
||||
|
||||
def sync_fn(args):
|
||||
"""Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
"""sync binaries (and associated metadata) from one mirror to another
|
||||
|
||||
Args:
|
||||
src (str): Source mirror URL
|
||||
dest (str): Destination mirror URL
|
||||
requires an active environment in order to know which specs to sync
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
|
@ -639,7 +630,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
|||
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
"""update a buildcache index"""
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
|
|
|
@ -47,40 +47,36 @@ def setup_parser(subparser):
|
|||
generate.add_argument(
|
||||
"--output-file",
|
||||
default=None,
|
||||
help="""pathname for the generated gitlab ci yaml file
|
||||
Path to the file where generated jobs file should
|
||||
be written. Default is .gitlab-ci.yml in the root of
|
||||
the repository.""",
|
||||
help="pathname for the generated gitlab ci yaml file\n\n"
|
||||
"path to the file where generated jobs file should be written. "
|
||||
"default is .gitlab-ci.yml in the root of the repository",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--copy-to",
|
||||
default=None,
|
||||
help="""path to additional directory for job files
|
||||
This option provides an absolute path to a directory
|
||||
where the generated jobs yaml file should be copied.
|
||||
Default is not to copy.""",
|
||||
help="path to additional directory for job files\n\n"
|
||||
"this option provides an absolute path to a directory where the generated "
|
||||
"jobs yaml file should be copied. default is not to copy",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""(Experimental) optimize the gitlab yaml file for size
|
||||
Run the generated document through a series of
|
||||
optimization passes designed to reduce the size
|
||||
of the generated file.""",
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(Experimental) disable DAG scheduling; use " ' "plain" dependencies.',
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) "
|
||||
+ "in order to push binaries from the generated pipeline to a "
|
||||
+ "different location.",
|
||||
help="override the mirror configured in the environment\n\n"
|
||||
"allows for pushing binaries from the generated pipeline to a different location",
|
||||
)
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
|
@ -88,45 +84,37 @@ def setup_parser(subparser):
|
|||
action="store_true",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="""skip up-to-date specs
|
||||
Do not generate jobs for specs that are up-to-date
|
||||
on the mirror.""",
|
||||
help="skip up-to-date specs\n\n"
|
||||
"do not generate jobs for specs that are up-to-date on the mirror",
|
||||
)
|
||||
prune_group.add_argument(
|
||||
"--no-prune-dag",
|
||||
action="store_false",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="""process up-to-date specs
|
||||
Generate jobs for specs even when they are up-to-date
|
||||
on the mirror.""",
|
||||
help="process up-to-date specs\n\n"
|
||||
"generate jobs for specs even when they are up-to-date on the mirror",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--check-index-only",
|
||||
action="store_true",
|
||||
dest="index_only",
|
||||
default=False,
|
||||
help="""only check spec state from buildcache indices
|
||||
Spack always checks specs against configured binary
|
||||
mirrors, regardless of the DAG pruning option.
|
||||
If enabled, Spack will assume all remote buildcache
|
||||
indices are up-to-date when assessing whether the spec
|
||||
on the mirror, if present, is up-to-date. This has the
|
||||
benefit of reducing pipeline generation time but at the
|
||||
potential cost of needlessly rebuilding specs when the
|
||||
indices are outdated.
|
||||
If not enabled, Spack will fetch remote spec files
|
||||
directly to assess whether the spec on the mirror is
|
||||
up-to-date.""",
|
||||
help="only check spec state from buildcache indices\n\n"
|
||||
"Spack always checks specs against configured binary mirrors, regardless of the DAG "
|
||||
"pruning option. if enabled, Spack will assume all remote buildcache indices are "
|
||||
"up-to-date when assessing whether the spec on the mirror, if present, is up-to-date. "
|
||||
"this has the benefit of reducing pipeline generation time but at the potential cost of "
|
||||
"needlessly rebuilding specs when the indices are outdated. if not enabled, Spack will "
|
||||
"fetch remote spec files directly to assess whether the spec on the mirror is up-to-date",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--artifacts-root",
|
||||
default=None,
|
||||
help="""path to the root of the artifacts directory
|
||||
If provided, concrete environment files (spack.yaml,
|
||||
spack.lock) will be generated under this directory.
|
||||
Their location will be passed to generated child jobs
|
||||
through the SPACK_CONCRETE_ENVIRONMENT_PATH variable.""",
|
||||
help="path to the root of the artifacts directory\n\n"
|
||||
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
|
||||
"this directory. their location will be passed to generated child jobs through the "
|
||||
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
|
||||
)
|
||||
generate.set_defaults(func=ci_generate)
|
||||
|
||||
|
@ -150,13 +138,13 @@ def setup_parser(subparser):
|
|||
"--tests",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""run stand-alone tests after the build""",
|
||||
help="run stand-alone tests after the build",
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""stop stand-alone tests after the first failure""",
|
||||
help="stop stand-alone tests after the first failure",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
|
||||
|
@ -166,10 +154,10 @@ def setup_parser(subparser):
|
|||
description=deindent(ci_reproduce.__doc__),
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="Url of job artifacts bundle")
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
help="Where to unpack artifacts",
|
||||
help="where to unpack artifacts",
|
||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||
)
|
||||
|
||||
|
@ -177,12 +165,12 @@ def setup_parser(subparser):
|
|||
|
||||
|
||||
def ci_generate(args):
|
||||
"""Generate jobs file from a CI-aware spack file.
|
||||
"""generate jobs file from a CI-aware spack file
|
||||
|
||||
If you want to report the results on CDash, you will need to set
|
||||
the SPACK_CDASH_AUTH_TOKEN before invoking this command. The
|
||||
value must be the CDash authorization token needed to create a
|
||||
build group and register all generated jobs under it."""
|
||||
if you want to report the results on CDash, you will need to set the SPACK_CDASH_AUTH_TOKEN
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
output_file = args.output_file
|
||||
|
@ -223,10 +211,11 @@ def ci_generate(args):
|
|||
|
||||
|
||||
def ci_reindex(args):
|
||||
"""Rebuild the buildcache index for the remote mirror.
|
||||
"""rebuild the buildcache index for the remote mirror
|
||||
|
||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||
index for the associated mirror."""
|
||||
use the active, gitlab-enabled environment to rebuild the buildcache index for the associated
|
||||
mirror
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
|
@ -242,10 +231,11 @@ def ci_reindex(args):
|
|||
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""Rebuild a spec if it is not on the remote mirror.
|
||||
"""rebuild a spec if it is not on the remote mirror
|
||||
|
||||
Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the hash."""
|
||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
||||
not contain the hash
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
|
@ -606,7 +596,7 @@ def ci_rebuild(args):
|
|||
)
|
||||
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||
if args.tests and broken_tests:
|
||||
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'")
|
||||
tty.warn("Unable to run stand-alone tests since listed in ci's 'broken-tests-packages'")
|
||||
if cdash_handler:
|
||||
msg = "Package is listed in ci's broken-tests-packages"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
|
@ -649,7 +639,7 @@ def ci_rebuild(args):
|
|||
tty.warn("No recognized test results reporting option")
|
||||
|
||||
else:
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful " "installation")
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful installation")
|
||||
if cdash_handler:
|
||||
msg = "Failed to install the package"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
|
@ -728,10 +718,11 @@ def ci_rebuild(args):
|
|||
|
||||
|
||||
def ci_reproduce(args):
|
||||
"""Generate instructions for reproducing the spec rebuild job.
|
||||
"""generate instructions for reproducing the spec rebuild job
|
||||
|
||||
Artifacts of the provided gitlab pipeline rebuild job's URL will be
|
||||
used to derive instructions for reproducing the build locally."""
|
||||
artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
|
||||
instructions for reproducing the build locally
|
||||
"""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ def get_origin_info(remote):
|
|||
)
|
||||
except ProcessError:
|
||||
origin_url = _SPACK_UPSTREAM
|
||||
tty.warn("No git repository found; " "using default upstream URL: %s" % origin_url)
|
||||
tty.warn("No git repository found; using default upstream URL: %s" % origin_url)
|
||||
return (origin_url.strip(), branch.strip())
|
||||
|
||||
|
||||
|
@ -69,7 +69,7 @@ def clone(parser, args):
|
|||
files_in_the_way = os.listdir(prefix)
|
||||
if files_in_the_way:
|
||||
tty.die(
|
||||
"There are already files there! " "Delete these files before boostrapping spack.",
|
||||
"There are already files there! Delete these files before boostrapping spack.",
|
||||
*files_in_the_way,
|
||||
)
|
||||
|
||||
|
|
|
@ -265,7 +265,7 @@ def recurse_dependents():
|
|||
"--dependents",
|
||||
action="store_true",
|
||||
dest="dependents",
|
||||
help="also uninstall any packages that depend on the ones given " "via command line",
|
||||
help="also uninstall any packages that depend on the ones given via command line",
|
||||
)
|
||||
|
||||
|
||||
|
@ -286,7 +286,7 @@ def deptype():
|
|||
"--deptype",
|
||||
action=DeptypeAction,
|
||||
default=dep.all_deptypes,
|
||||
help="comma-separated list of deptypes to traverse\ndefault=%s"
|
||||
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
|
||||
% ",".join(dep.all_deptypes),
|
||||
)
|
||||
|
||||
|
@ -350,9 +350,9 @@ def install_status():
|
|||
"--install-status",
|
||||
action="store_true",
|
||||
default=True,
|
||||
help="show install status of packages. packages can be: "
|
||||
help="show install status of packages\n\npackages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in and upstream instance [^], "
|
||||
"installed in an upstream instance [^], "
|
||||
"or not installed (no annotation)",
|
||||
)
|
||||
|
||||
|
@ -393,24 +393,23 @@ def add_cdash_args(subparser, add_help):
|
|||
cdash_help = {}
|
||||
if add_help:
|
||||
cdash_help["upload-url"] = "CDash URL where reports will be uploaded"
|
||||
cdash_help[
|
||||
"build"
|
||||
] = """The name of the build that will be reported to CDash.
|
||||
Defaults to spec of the package to operate on."""
|
||||
cdash_help[
|
||||
"site"
|
||||
] = """The site name that will be reported to CDash.
|
||||
Defaults to current system hostname."""
|
||||
cdash_help[
|
||||
"track"
|
||||
] = """Results will be reported to this group on CDash.
|
||||
Defaults to Experimental."""
|
||||
cdash_help[
|
||||
"buildstamp"
|
||||
] = """Instead of letting the CDash reporter prepare the
|
||||
buildstamp which, when combined with build name, site and project,
|
||||
uniquely identifies the build, provide this argument to identify
|
||||
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
|
||||
cdash_help["build"] = (
|
||||
"name of the build that will be reported to CDash\n\n"
|
||||
"defaults to spec of the package to operate on"
|
||||
)
|
||||
cdash_help["site"] = (
|
||||
"site name that will be reported to CDash\n\n" "defaults to current system hostname"
|
||||
)
|
||||
cdash_help["track"] = (
|
||||
"results will be reported to this group on CDash\n\n" "defaults to Experimental"
|
||||
)
|
||||
cdash_help["buildstamp"] = (
|
||||
"use custom buildstamp\n\n"
|
||||
"instead of letting the CDash reporter prepare the "
|
||||
"buildstamp which, when combined with build name, site and project, "
|
||||
"uniquely identifies the build, provide this argument to identify "
|
||||
"the build yourself. format: %%Y%%m%%d-%%H%%M-[cdash-track]"
|
||||
)
|
||||
else:
|
||||
cdash_help["upload-url"] = argparse.SUPPRESS
|
||||
cdash_help["build"] = argparse.SUPPRESS
|
||||
|
@ -542,16 +541,16 @@ def add_s3_connection_args(subparser, add_help):
|
|||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-secret", help="Secret string to use to connect to this S3 mirror"
|
||||
"--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-token", help="Access Token to use to connect to this S3 mirror"
|
||||
"--s3-access-token", help="access token to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror"
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -14,18 +14,16 @@
|
|||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="Re-concretize even if already concretized."
|
||||
"-f", "--force", action="store_true", help="re-concretize even if already concretized"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""",
|
||||
help="concretize with test dependencies of only root packages or all packages",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-q", "--quiet", action="store_true", help="Don't print concretized specs"
|
||||
"-q", "--quiet", action="store_true", help="don't print concretized specs"
|
||||
)
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
|
|
@ -42,7 +42,7 @@ def setup_parser(subparser):
|
|||
get_parser = sp.add_parser("get", help="print configuration values")
|
||||
get_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print. " "options: %(choices)s",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
|
@ -53,7 +53,7 @@ def setup_parser(subparser):
|
|||
)
|
||||
blame_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print. " "options: %(choices)s",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
)
|
||||
|
@ -61,7 +61,7 @@ def setup_parser(subparser):
|
|||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
edit_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to edit. " "options: %(choices)s",
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
|
@ -76,7 +76,7 @@ def setup_parser(subparser):
|
|||
add_parser.add_argument(
|
||||
"path",
|
||||
nargs="?",
|
||||
help="colon-separated path to config that should be added," " e.g. 'config:default:true'",
|
||||
help="colon-separated path to config that should be added, e.g. 'config:default:true'",
|
||||
)
|
||||
add_parser.add_argument("-f", "--file", help="file from which to set all config values")
|
||||
|
||||
|
@ -88,7 +88,7 @@ def setup_parser(subparser):
|
|||
"--local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Set packages preferences based on local installs, rather " "than upstream.",
|
||||
help="set packages preferences based on local installs, rather than upstream",
|
||||
)
|
||||
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove configuration parameters")
|
||||
|
@ -157,7 +157,7 @@ def config_get(args):
|
|||
tty.die("environment has no %s file" % ev.manifest_name)
|
||||
|
||||
else:
|
||||
tty.die("`spack config get` requires a section argument " "or an active environment.")
|
||||
tty.die("`spack config get` requires a section argument or an active environment.")
|
||||
|
||||
|
||||
def config_blame(args):
|
||||
|
@ -180,7 +180,7 @@ def config_edit(args):
|
|||
# If we aren't editing a spack.yaml file, get config path from scope.
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument " "or an active environment.")
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
|
@ -374,7 +374,7 @@ def config_revert(args):
|
|||
|
||||
proceed = True
|
||||
if not args.yes_to_all:
|
||||
msg = "The following scopes will be restored from the corresponding" " backup files:\n"
|
||||
msg = "The following scopes will be restored from the corresponding backup files:\n"
|
||||
for entry in to_be_restored:
|
||||
msg += "\t[scope={0.scope}, bkp={0.bkp}]\n".format(entry)
|
||||
msg += "This operation cannot be undone."
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
import spack.container
|
||||
import spack.container.images
|
||||
|
||||
description = "creates recipes to build images for different" " container runtimes"
|
||||
description = "creates recipes to build images for different container runtimes"
|
||||
section = "container"
|
||||
level = "long"
|
||||
|
||||
|
|
|
@ -612,7 +612,7 @@ def setup_parser(subparser):
|
|||
"--template",
|
||||
metavar="TEMPLATE",
|
||||
choices=sorted(templates.keys()),
|
||||
help="build system template to use. options: %(choices)s",
|
||||
help="build system template to use\n\noptions: %(choices)s",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-r", "--repo", help="path to a repository where the package should be created"
|
||||
|
@ -620,7 +620,7 @@ def setup_parser(subparser):
|
|||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespace",
|
||||
help="specify a namespace for the package. must be the namespace of "
|
||||
help="specify a namespace for the package\n\nmust be the namespace of "
|
||||
"a repository registered with Spack",
|
||||
)
|
||||
subparser.add_argument(
|
||||
|
@ -878,7 +878,7 @@ def get_build_system(template, url, guesser):
|
|||
# Use whatever build system the guesser detected
|
||||
selected_template = guesser.build_system
|
||||
if selected_template == "generic":
|
||||
tty.warn("Unable to detect a build system. " "Using a generic package template.")
|
||||
tty.warn("Unable to detect a build system. Using a generic package template.")
|
||||
else:
|
||||
msg = "This package looks like it uses the {0} build system"
|
||||
tty.msg(msg.format(selected_template))
|
||||
|
|
|
@ -26,8 +26,8 @@ def setup_parser(subparser):
|
|||
"--installed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List installed dependencies of an installed spec, "
|
||||
"instead of possible dependencies of a package.",
|
||||
help="list installed dependencies of an installed spec "
|
||||
"instead of possible dependencies of a package",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t",
|
||||
|
|
|
@ -25,15 +25,15 @@ def setup_parser(subparser):
|
|||
"--installed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List installed dependents of an installed spec, "
|
||||
"instead of possible dependents of a package.",
|
||||
help="list installed dependents of an installed spec "
|
||||
"instead of possible dependents of a package",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t",
|
||||
"--transitive",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Show all transitive dependents.",
|
||||
help="show all transitive dependents",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "Replace one package with another via symlinks"
|
||||
description = "replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
|
@ -46,7 +46,7 @@ def setup_parser(sp):
|
|||
action="store_true",
|
||||
default=True,
|
||||
dest="dependencies",
|
||||
help="Deprecate dependencies (default)",
|
||||
help="deprecate dependencies (default)",
|
||||
)
|
||||
deps.add_argument(
|
||||
"-D",
|
||||
|
@ -54,7 +54,7 @@ def setup_parser(sp):
|
|||
action="store_false",
|
||||
default=True,
|
||||
dest="dependencies",
|
||||
help="Do not deprecate dependencies",
|
||||
help="do not deprecate dependencies",
|
||||
)
|
||||
|
||||
install = sp.add_mutually_exclusive_group()
|
||||
|
@ -64,7 +64,7 @@ def setup_parser(sp):
|
|||
action="store_true",
|
||||
default=False,
|
||||
dest="install",
|
||||
help="Concretize and install deprecator spec",
|
||||
help="concretize and install deprecator spec",
|
||||
)
|
||||
install.add_argument(
|
||||
"-I",
|
||||
|
@ -72,7 +72,7 @@ def setup_parser(sp):
|
|||
action="store_false",
|
||||
default=False,
|
||||
dest="install",
|
||||
help="Deprecator spec must already be installed (default)",
|
||||
help="deprecator spec must already be installed (default)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
|
@ -81,7 +81,7 @@ def setup_parser(sp):
|
|||
type=str,
|
||||
default="soft",
|
||||
choices=["soft", "hard"],
|
||||
help="Type of filesystem link to use for deprecation (default soft)",
|
||||
help="type of filesystem link to use for deprecation (default soft)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
|
|
|
@ -25,14 +25,14 @@ def setup_parser(subparser):
|
|||
"--source-path",
|
||||
dest="source_path",
|
||||
default=None,
|
||||
help="path to source directory. defaults to the current directory",
|
||||
help="path to source directory (defaults to the current directory)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-i",
|
||||
"--ignore-dependencies",
|
||||
action="store_true",
|
||||
dest="ignore_deps",
|
||||
help="don't try to install dependencies of requested packages",
|
||||
help="do not try to install dependencies of requested packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
||||
subparser.add_argument(
|
||||
|
@ -55,16 +55,13 @@ def setup_parser(subparser):
|
|||
type=str,
|
||||
dest="shell",
|
||||
default=None,
|
||||
help="drop into a build environment in a new shell, e.g. bash, zsh",
|
||||
help="drop into a build environment in a new shell, e.g., bash",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""If 'root' is chosen, run package tests during
|
||||
installation for top-level packages (but skip tests for dependencies).
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages.""",
|
||||
help="run tests on only root packages or all packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument("-p", "--path", help="Source location of package")
|
||||
subparser.add_argument("-p", "--path", help="source location of package")
|
||||
|
||||
clone_group = subparser.add_mutually_exclusive_group()
|
||||
clone_group.add_argument(
|
||||
|
@ -28,18 +28,18 @@ def setup_parser(subparser):
|
|||
action="store_false",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="Do not clone. The package already exists at the source path",
|
||||
help="do not clone, the package already exists at the source path",
|
||||
)
|
||||
clone_group.add_argument(
|
||||
"--clone",
|
||||
action="store_true",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="Clone the package even if the path already exists",
|
||||
help="clone the package even if the path already exists",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-f", "--force", help="Remove any files or directories that block cloning source code"
|
||||
"-f", "--force", help="remove any files or directories that block cloning source code"
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
|
|
@ -29,7 +29,7 @@ def setup_parser(subparser):
|
|||
action="store_true",
|
||||
default=False,
|
||||
dest="dump_json",
|
||||
help="Dump json output instead of pretty printing.",
|
||||
help="dump json output instead of pretty printing",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--first",
|
||||
|
|
|
@ -62,7 +62,7 @@ def setup_parser(subparser):
|
|||
dest="path",
|
||||
action="store_const",
|
||||
const=spack.paths.build_systems_path,
|
||||
help="Edit the build system with the supplied name.",
|
||||
help="edit the build system with the supplied name",
|
||||
)
|
||||
excl_args.add_argument(
|
||||
"-c",
|
||||
|
|
|
@ -102,7 +102,7 @@ def env_activate_setup_parser(subparser):
|
|||
dest="with_view",
|
||||
const=True,
|
||||
default=True,
|
||||
help="update PATH etc. with associated view",
|
||||
help="update PATH, etc., with associated view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"-V",
|
||||
|
@ -111,7 +111,7 @@ def env_activate_setup_parser(subparser):
|
|||
dest="with_view",
|
||||
const=False,
|
||||
default=True,
|
||||
help="do not update PATH etc. with associated view",
|
||||
help="do not update PATH, etc., with associated view",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
|
@ -161,7 +161,7 @@ def env_activate(args):
|
|||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env " "is ambiguous")
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env is ambiguous")
|
||||
|
||||
env_name_or_dir = args.activate_env or args.dir
|
||||
|
||||
|
@ -250,7 +250,7 @@ def env_deactivate(args):
|
|||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env " "is ambiguous")
|
||||
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env is ambiguous")
|
||||
|
||||
if ev.active_environment() is None:
|
||||
tty.die("No environment is currently active.")
|
||||
|
@ -290,7 +290,7 @@ def env_create_setup_parser(subparser):
|
|||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file.",
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
|
||||
|
||||
|
@ -608,16 +608,16 @@ def env_depfile_setup_parser(subparser):
|
|||
"--make-target-prefix",
|
||||
default=None,
|
||||
metavar="TARGET",
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>. By default "
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
|
||||
"the absolute path to the directory makedeps under the environment metadata dir is "
|
||||
"used. Can be set to an empty string --make-prefix ''.",
|
||||
"used. can be set to an empty string --make-prefix ''",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--make-disable-jobserver",
|
||||
default=True,
|
||||
action="store_false",
|
||||
dest="jobserver",
|
||||
help="disable POSIX jobserver support.",
|
||||
help="disable POSIX jobserver support",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--use-buildcache",
|
||||
|
@ -625,8 +625,8 @@ def env_depfile_setup_parser(subparser):
|
|||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="When using `only`, redundant build dependencies are pruned from the DAG. "
|
||||
"This flag is passed on to the generated spack install commands.",
|
||||
help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
|
||||
"this flag is passed on to the generated spack install commands",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-o",
|
||||
|
@ -640,7 +640,7 @@ def env_depfile_setup_parser(subparser):
|
|||
"--generator",
|
||||
default="make",
|
||||
choices=("make",),
|
||||
help="specify the depfile type. Currently only make is supported.",
|
||||
help="specify the depfile type\n\ncurrently only make is supported",
|
||||
)
|
||||
subparser.add_argument(
|
||||
metavar="specs",
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"If called without argument returns " "the list of all valid extendable packages"
|
||||
"If called without argument returns the list of all valid extendable packages"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
subparser.add_argument(
|
||||
|
|
|
@ -42,7 +42,7 @@ def setup_parser(subparser):
|
|||
"--path",
|
||||
default=None,
|
||||
action="append",
|
||||
help="Alternative search paths for finding externals. May be repeated",
|
||||
help="one or more alternative search paths for finding externals",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
|
@ -66,10 +66,8 @@ def setup_parser(subparser):
|
|||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
"read-cray-manifest",
|
||||
help=(
|
||||
"consume a Spack-compatible description of externally-installed "
|
||||
"packages, including dependency relationships"
|
||||
),
|
||||
help="consume a Spack-compatible description of externally-installed packages, including "
|
||||
"dependency relationships",
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
"--file", default=None, help="specify a location other than the default"
|
||||
|
@ -92,7 +90,7 @@ def setup_parser(subparser):
|
|||
read_cray_manifest.add_argument(
|
||||
"--fail-on-error",
|
||||
action="store_true",
|
||||
help=("if a manifest file cannot be parsed, fail and report the " "full stack trace"),
|
||||
help="if a manifest file cannot be parsed, fail and report the full stack trace",
|
||||
)
|
||||
|
||||
|
||||
|
@ -111,14 +109,14 @@ def external_find(args):
|
|||
# For most exceptions, just print a warning and continue.
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = "Skipping manifest and continuing with other external " "checks"
|
||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
||||
errno.EPERM,
|
||||
errno.EACCES,
|
||||
]:
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient " "permissions.", skip_msg)
|
||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||
|
||||
|
@ -168,7 +166,7 @@ def external_find(args):
|
|||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system " "and added to {0}"
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
else:
|
||||
|
@ -236,7 +234,7 @@ def _collect_and_consume_cray_manifest_files(
|
|||
if fail_on_error:
|
||||
raise
|
||||
else:
|
||||
tty.warn("Failure reading manifest file: {0}" "\n\t{1}".format(path, str(e)))
|
||||
tty.warn("Failure reading manifest file: {0}\n\t{1}".format(path, str(e)))
|
||||
|
||||
|
||||
def external_list(args):
|
||||
|
|
|
@ -51,9 +51,7 @@ def fetch(parser, args):
|
|||
else:
|
||||
specs = env.all_specs()
|
||||
if specs == []:
|
||||
tty.die(
|
||||
"No uninstalled specs in environment. Did you " "run `spack concretize` yet?"
|
||||
)
|
||||
tty.die("No uninstalled specs in environment. Did you run `spack concretize` yet?")
|
||||
else:
|
||||
tty.die("fetch requires at least one spec argument")
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ def setup_parser(subparser):
|
|||
metavar="DEST",
|
||||
type=str,
|
||||
dest="secret",
|
||||
help="export the private key to a file.",
|
||||
help="export the private key to a file",
|
||||
)
|
||||
create.set_defaults(func=gpg_create)
|
||||
|
||||
|
@ -86,7 +86,7 @@ def setup_parser(subparser):
|
|||
export = subparsers.add_parser("export", help=gpg_export.__doc__)
|
||||
export.add_argument("location", type=str, help="where to export keys")
|
||||
export.add_argument(
|
||||
"keys", nargs="*", help="the keys to export; " "all public keys if unspecified"
|
||||
"keys", nargs="*", help="the keys to export (all public keys if unspecified)"
|
||||
)
|
||||
export.add_argument("--secret", action="store_true", help="export secret keys")
|
||||
export.set_defaults(func=gpg_export)
|
||||
|
@ -99,29 +99,29 @@ def setup_parser(subparser):
|
|||
"--directory",
|
||||
metavar="directory",
|
||||
type=str,
|
||||
help="local directory where keys will be published.",
|
||||
help="local directory where keys will be published",
|
||||
)
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
type=str,
|
||||
help="name of the mirror where " + "keys will be published.",
|
||||
help="name of the mirror where keys will be published",
|
||||
)
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
type=str,
|
||||
help="URL of the mirror where " + "keys will be published.",
|
||||
help="URL of the mirror where keys will be published",
|
||||
)
|
||||
publish.add_argument(
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Regenerate buildcache key index " "after publishing key(s)"),
|
||||
help="regenerate buildcache key index after publishing key(s)",
|
||||
)
|
||||
publish.add_argument(
|
||||
"keys", nargs="*", help="the keys to publish; " "all public keys if unspecified"
|
||||
"keys", nargs="*", help="keys to publish (all public keys if unspecified)"
|
||||
)
|
||||
publish.set_defaults(func=gpg_publish)
|
||||
|
||||
|
@ -146,7 +146,7 @@ def gpg_create(args):
|
|||
|
||||
|
||||
def gpg_export(args):
|
||||
"""export a gpg key, optionally including secret key."""
|
||||
"""export a gpg key, optionally including secret key"""
|
||||
keys = args.keys
|
||||
if not keys:
|
||||
keys = spack.util.gpg.signing_keys()
|
||||
|
@ -168,7 +168,7 @@ def gpg_sign(args):
|
|||
elif not keys:
|
||||
raise RuntimeError("no signing keys are available")
|
||||
else:
|
||||
raise RuntimeError("multiple signing keys are available; " "please choose one")
|
||||
raise RuntimeError("multiple signing keys are available; please choose one")
|
||||
output = args.output
|
||||
if not output:
|
||||
output = args.spec[0] + ".asc"
|
||||
|
|
|
@ -75,10 +75,9 @@ def setup_parser(subparser):
|
|||
default="package,dependencies",
|
||||
dest="things_to_install",
|
||||
choices=["package", "dependencies"],
|
||||
help="""select the mode of installation.
|
||||
the default is to install the package along with all its dependencies.
|
||||
alternatively one can decide to install only the package or only
|
||||
the dependencies""",
|
||||
help="select the mode of installation\n\n"
|
||||
"default is to install the package along with all its dependencies. "
|
||||
"alternatively, one can decide to install only the package or only the dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-u",
|
||||
|
@ -143,12 +142,11 @@ def setup_parser(subparser):
|
|||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="""select the mode of buildcache for the 'package' and 'dependencies'.
|
||||
Default: package:auto,dependencies:auto
|
||||
- `auto` behaves like --use-cache
|
||||
- `only` behaves like --cache-only
|
||||
- `never` behaves like --no-cache
|
||||
""",
|
||||
help="select the mode of buildcache for the 'package' and 'dependencies'\n\n"
|
||||
"default: package:auto,dependencies:auto\n\n"
|
||||
"- `auto` behaves like --use-cache\n"
|
||||
"- `only` behaves like --cache-only\n"
|
||||
"- `never` behaves like --no-cache",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
|
@ -156,8 +154,8 @@ def setup_parser(subparser):
|
|||
action="store_true",
|
||||
dest="include_build_deps",
|
||||
default=False,
|
||||
help="""include build deps when installing from cache,
|
||||
which is useful for CI pipeline troubleshooting""",
|
||||
help="include build deps when installing from cache, "
|
||||
"useful for CI pipeline troubleshooting",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
|
@ -186,7 +184,7 @@ def setup_parser(subparser):
|
|||
dest="install_verbose",
|
||||
help="display verbose build output while installing",
|
||||
)
|
||||
subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes.")
|
||||
subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes")
|
||||
subparser.add_argument(
|
||||
"--only-concrete",
|
||||
action="store_true",
|
||||
|
@ -199,14 +197,13 @@ def setup_parser(subparser):
|
|||
"--add",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""(with environment) add spec to the environment as a root.""",
|
||||
help="(with environment) add spec to the environment as a root",
|
||||
)
|
||||
updateenv_group.add_argument(
|
||||
"--no-add",
|
||||
action="store_false",
|
||||
dest="add",
|
||||
help="""(with environment) do not add spec to the environment as a
|
||||
root (the default behavior).""",
|
||||
help="(with environment) do not add spec to the environment as a root",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
|
@ -216,7 +213,7 @@ def setup_parser(subparser):
|
|||
default=[],
|
||||
dest="specfiles",
|
||||
metavar="SPEC_YAML_FILE",
|
||||
help="install from file. Read specs to install from .yaml files",
|
||||
help="read specs to install from .yaml files",
|
||||
)
|
||||
|
||||
cd_group = subparser.add_mutually_exclusive_group()
|
||||
|
@ -227,19 +224,12 @@ def setup_parser(subparser):
|
|||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="""If 'root' is chosen, run package tests during
|
||||
installation for top-level packages (but skip tests for dependencies).
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages.""",
|
||||
help="run tests on only root packages or all packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["log_format"])
|
||||
subparser.add_argument("--log-file", default=None, help="filename for the log file")
|
||||
subparser.add_argument(
|
||||
"--log-file",
|
||||
default=None,
|
||||
help="filename for the log file. if not passed a default will be used",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
arguments.add_cdash_args(subparser, False)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all", "spec"])
|
||||
|
@ -280,7 +270,7 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
|
|||
display_args = {"long": True, "show_flags": True, "variants": True}
|
||||
|
||||
if installed:
|
||||
tty.msg("The following package specs will be " "reinstalled:\n")
|
||||
tty.msg("The following package specs will be reinstalled:\n")
|
||||
spack.cmd.display_specs(installed, **display_args)
|
||||
|
||||
not_installed = list(filter(lambda x: x not in installed, concrete_specs))
|
||||
|
|
|
@ -66,10 +66,9 @@ def setup_parser(subparser):
|
|||
default="package,dependencies",
|
||||
dest="things_to_load",
|
||||
choices=["package", "dependencies"],
|
||||
help="""select whether to load the package and its dependencies
|
||||
the default is to load the package and all dependencies
|
||||
alternatively one can decide to load only the package or only
|
||||
the dependencies""",
|
||||
help="select whether to load the package and its dependencies\n\n"
|
||||
"the default is to load the package and all dependencies. alternatively, "
|
||||
"one can decide to load only the package or only the dependencies",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
|
|
|
@ -55,13 +55,13 @@ def setup_parser(subparser):
|
|||
directories.add_argument(
|
||||
"--source-dir",
|
||||
action="store_true",
|
||||
help="source directory for a spec " "(requires it to be staged first)",
|
||||
help="source directory for a spec (requires it to be staged first)",
|
||||
)
|
||||
directories.add_argument(
|
||||
"-b",
|
||||
"--build-dir",
|
||||
action="store_true",
|
||||
help="build directory for a spec " "(requires it to be staged first)",
|
||||
help="build directory for a spec (requires it to be staged first)",
|
||||
)
|
||||
directories.add_argument(
|
||||
"-e",
|
||||
|
@ -162,7 +162,7 @@ def location(parser, args):
|
|||
# source dir remains, which requires the spec to be staged
|
||||
if not pkg.stage.expanded:
|
||||
tty.die(
|
||||
"Source directory does not exist yet. " "Run this to create it:",
|
||||
"Source directory does not exist yet. Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec),
|
||||
)
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ def line_to_rtf(str):
|
|||
def setup_parser(subparser):
|
||||
spack_source_group = subparser.add_mutually_exclusive_group(required=True)
|
||||
spack_source_group.add_argument(
|
||||
"-v", "--spack-version", default="", help="download given spack version e.g. 0.16.0"
|
||||
"-v", "--spack-version", default="", help="download given spack version"
|
||||
)
|
||||
spack_source_group.add_argument(
|
||||
"-s", "--spack-source", default="", help="full path to spack source"
|
||||
|
@ -50,7 +50,7 @@ def setup_parser(subparser):
|
|||
"--git-installer-verbosity",
|
||||
default="",
|
||||
choices=["SILENT", "VERYSILENT"],
|
||||
help="Level of verbosity provided by bundled Git Installer. Default is fully verbose",
|
||||
help="level of verbosity provided by bundled git installer (default is fully verbose)",
|
||||
required=False,
|
||||
action="store",
|
||||
dest="git_verbosity",
|
||||
|
|
|
@ -35,10 +35,7 @@ def setup_parser(subparser):
|
|||
"--all",
|
||||
action="store_true",
|
||||
dest="all",
|
||||
help="Mark ALL installed packages that match each "
|
||||
"supplied spec. If you `mark --all libelf`,"
|
||||
" ALL versions of `libelf` are marked. If no spec is "
|
||||
"supplied, all installed packages will be marked.",
|
||||
help="mark ALL installed packages that match each supplied spec",
|
||||
)
|
||||
exim = subparser.add_mutually_exclusive_group(required=True)
|
||||
exim.add_argument(
|
||||
|
@ -46,14 +43,14 @@ def setup_parser(subparser):
|
|||
"--explicit",
|
||||
action="store_true",
|
||||
dest="explicit",
|
||||
help="Mark packages as explicitly installed.",
|
||||
help="mark packages as explicitly installed",
|
||||
)
|
||||
exim.add_argument(
|
||||
"-i",
|
||||
"--implicit",
|
||||
action="store_true",
|
||||
dest="implicit",
|
||||
help="Mark packages as implicitly installed.",
|
||||
help="mark packages as implicitly installed",
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -55,13 +55,13 @@ def setup_parser(subparser):
|
|||
)
|
||||
create_parser.add_argument(
|
||||
"--exclude-specs",
|
||||
help="specs which Spack should not try to add to a mirror" " (specified on command line)",
|
||||
help="specs which Spack should not try to add to a mirror (specified on command line)",
|
||||
)
|
||||
|
||||
create_parser.add_argument(
|
||||
"--skip-unstable-versions",
|
||||
action="store_true",
|
||||
help="don't cache versions unless they identify a stable (unchanging)" " source code",
|
||||
help="don't cache versions unless they identify a stable (unchanging) source code",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||
|
@ -144,7 +144,7 @@ def setup_parser(subparser):
|
|||
|
||||
|
||||
def mirror_add(args):
|
||||
"""Add a mirror to Spack."""
|
||||
"""add a mirror to Spack"""
|
||||
if (
|
||||
args.s3_access_key_id
|
||||
or args.s3_access_key_secret
|
||||
|
@ -168,12 +168,12 @@ def mirror_add(args):
|
|||
|
||||
|
||||
def mirror_remove(args):
|
||||
"""Remove a mirror by name."""
|
||||
"""remove a mirror by name"""
|
||||
spack.mirror.remove(args.name, args.scope)
|
||||
|
||||
|
||||
def mirror_set_url(args):
|
||||
"""Change the URL of a mirror."""
|
||||
"""change the URL of a mirror"""
|
||||
url = args.url
|
||||
mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||
if not mirrors:
|
||||
|
@ -242,7 +242,7 @@ def mirror_set_url(args):
|
|||
|
||||
|
||||
def mirror_list(args):
|
||||
"""Print out available mirrors to the console."""
|
||||
"""print out available mirrors to the console"""
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
|
||||
if not mirrors:
|
||||
|
@ -395,9 +395,7 @@ def process_mirror_stats(present, mirrored, error):
|
|||
|
||||
|
||||
def mirror_create(args):
|
||||
"""Create a directory to be used as a spack mirror, and fill it with
|
||||
package archives.
|
||||
"""
|
||||
"""create a directory to be used as a spack mirror, and fill it with package archives"""
|
||||
if args.specs and args.all:
|
||||
raise SpackError(
|
||||
"cannot specify specs on command line if you chose to mirror all specs with '--all'"
|
||||
|
@ -470,7 +468,7 @@ def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions,
|
|||
|
||||
|
||||
def mirror_destroy(args):
|
||||
"""Given a url, recursively delete everything under it."""
|
||||
"""given a url, recursively delete everything under it"""
|
||||
mirror_url = None
|
||||
|
||||
if args.mirror_name:
|
||||
|
|
|
@ -31,7 +31,7 @@ def setup_parser(subparser):
|
|||
action="store",
|
||||
dest="module_set_name",
|
||||
default="default",
|
||||
help="Named module set to use from modules configuration.",
|
||||
help="named module set to use from modules configuration",
|
||||
)
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="subparser_name")
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ def add_command(parser, command_dict):
|
|||
|
||||
|
||||
def setdefault(module_type, specs, args):
|
||||
"""Set the default module file, when multiple are present"""
|
||||
"""set the default module file, when multiple are present"""
|
||||
# For details on the underlying mechanism see:
|
||||
#
|
||||
# https://lmod.readthedocs.io/en/latest/060_locating.html#marking-a-version-as-default
|
||||
|
|
|
@ -29,7 +29,7 @@ def add_command(parser, command_dict):
|
|||
|
||||
|
||||
def setdefault(module_type, specs, args):
|
||||
"""Set the default module file, when multiple are present"""
|
||||
"""set the default module file, when multiple are present"""
|
||||
# Currently, accepts only a single matching spec
|
||||
spack.cmd.modules.one_spec_or_raise(specs)
|
||||
spec = specs[0]
|
||||
|
|
|
@ -58,7 +58,7 @@ def setup_parser(subparser):
|
|||
"--type",
|
||||
action="store",
|
||||
default="C",
|
||||
help="Types of changes to show (A: added, R: removed, " "C: changed); default is 'C'",
|
||||
help="types of changes to show (A: added, R: removed, C: changed); default is 'C'",
|
||||
)
|
||||
|
||||
rm_parser = sp.add_parser("removed", help=pkg_removed.__doc__)
|
||||
|
@ -81,7 +81,7 @@ def setup_parser(subparser):
|
|||
"--canonical",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="dump canonical source as used by package hash.",
|
||||
help="dump canonical source as used by package hash",
|
||||
)
|
||||
arguments.add_common_arguments(source_parser, ["spec"])
|
||||
|
||||
|
|
|
@ -17,9 +17,7 @@
|
|||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"If called without argument returns " "the list of all valid virtual packages"
|
||||
)
|
||||
subparser.epilog = "If called without argument returns the list of all valid virtual packages"
|
||||
subparser.add_argument(
|
||||
"virtual_package", nargs="*", help="find packages that provide this virtual package"
|
||||
)
|
||||
|
|
|
@ -27,7 +27,7 @@ def setup_parser(subparser):
|
|||
create_parser.add_argument("directory", help="directory to create the repo in")
|
||||
create_parser.add_argument(
|
||||
"namespace",
|
||||
help="namespace to identify packages in the repository. " "defaults to the directory name",
|
||||
help="namespace to identify packages in the repository (defaults to the directory name)",
|
||||
nargs="?",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
|
@ -36,10 +36,8 @@ def setup_parser(subparser):
|
|||
action="store",
|
||||
dest="subdir",
|
||||
default=spack.repo.packages_dir_name,
|
||||
help=(
|
||||
"subdirectory to store packages in the repository."
|
||||
" Default 'packages'. Use an empty string for no subdirectory."
|
||||
),
|
||||
help="subdirectory to store packages in the repository\n\n"
|
||||
"default 'packages'. use an empty string for no subdirectory",
|
||||
)
|
||||
|
||||
# List
|
||||
|
@ -78,14 +76,14 @@ def setup_parser(subparser):
|
|||
|
||||
|
||||
def repo_create(args):
|
||||
"""Create a new package repository."""
|
||||
"""create a new package repository"""
|
||||
full_path, namespace = spack.repo.create_repo(args.directory, args.namespace, args.subdir)
|
||||
tty.msg("Created repo with namespace '%s'." % namespace)
|
||||
tty.msg("To register it with spack, run this command:", "spack repo add %s" % full_path)
|
||||
|
||||
|
||||
def repo_add(args):
|
||||
"""Add a package source to Spack's configuration."""
|
||||
"""add a package source to Spack's configuration"""
|
||||
path = args.path
|
||||
|
||||
# real_path is absolute and handles substitution.
|
||||
|
@ -116,7 +114,7 @@ def repo_add(args):
|
|||
|
||||
|
||||
def repo_remove(args):
|
||||
"""Remove a repository from Spack's configuration."""
|
||||
"""remove a repository from Spack's configuration"""
|
||||
repos = spack.config.get("repos", scope=args.scope)
|
||||
namespace_or_path = args.namespace_or_path
|
||||
|
||||
|
@ -146,7 +144,7 @@ def repo_remove(args):
|
|||
|
||||
|
||||
def repo_list(args):
|
||||
"""Show registered repositories and their namespaces."""
|
||||
"""show registered repositories and their namespaces"""
|
||||
roots = spack.config.get("repos", scope=args.scope)
|
||||
repos = []
|
||||
for r in roots:
|
||||
|
|
|
@ -33,7 +33,7 @@ def setup_parser(subparser):
|
|||
"--show",
|
||||
action="store",
|
||||
default="opt,solutions",
|
||||
help="select outputs: comma-separated list of: \n"
|
||||
help="select outputs\n\ncomma-separated list of:\n"
|
||||
" asp asp program text\n"
|
||||
" opt optimization criteria for best model\n"
|
||||
" output raw clingo output\n"
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
import spack.store
|
||||
import spack.tag
|
||||
|
||||
description = "Show package tags and associated packages"
|
||||
description = "show package tags and associated packages"
|
||||
section = "basic"
|
||||
level = "long"
|
||||
|
||||
|
|
|
@ -35,39 +35,35 @@ def setup_parser(subparser):
|
|||
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
|
||||
)
|
||||
|
||||
alias_help_msg = "Provide an alias for this test-suite"
|
||||
alias_help_msg += " for subsequent access."
|
||||
run_parser.add_argument("--alias", help=alias_help_msg)
|
||||
run_parser.add_argument(
|
||||
"--alias", help="provide an alias for this test-suite for subsequent access"
|
||||
)
|
||||
|
||||
run_parser.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="Stop tests for each package after the first failure.",
|
||||
help="stop tests for each package after the first failure",
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--fail-first", action="store_true", help="Stop after the first failed package."
|
||||
"--fail-first", action="store_true", help="stop after the first failed package"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--externals", action="store_true", help="Test packages that are externally installed."
|
||||
"--externals", action="store_true", help="test packages that are externally installed"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"-x",
|
||||
"--explicit",
|
||||
action="store_true",
|
||||
help="Only test packages that are explicitly installed.",
|
||||
help="only test packages that are explicitly installed",
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--keep-stage", action="store_true", help="Keep testing directory for debugging"
|
||||
"--keep-stage", action="store_true", help="keep testing directory for debugging"
|
||||
)
|
||||
arguments.add_common_arguments(run_parser, ["log_format"])
|
||||
run_parser.add_argument(
|
||||
"--log-file",
|
||||
default=None,
|
||||
help="filename for the log file. if not passed a default will be used",
|
||||
)
|
||||
run_parser.add_argument("--log-file", default=None, help="filename for the log file")
|
||||
arguments.add_cdash_args(run_parser, False)
|
||||
run_parser.add_argument(
|
||||
"--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
|
||||
cd_group = run_parser.add_mutually_exclusive_group()
|
||||
|
@ -96,7 +92,7 @@ def setup_parser(subparser):
|
|||
find_parser.add_argument(
|
||||
"filter",
|
||||
nargs=argparse.REMAINDER,
|
||||
help="optional case-insensitive glob patterns to filter results.",
|
||||
help="optional case-insensitive glob patterns to filter results",
|
||||
)
|
||||
|
||||
# Status
|
||||
|
@ -104,7 +100,7 @@ def setup_parser(subparser):
|
|||
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
|
||||
)
|
||||
status_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||
"names", nargs=argparse.REMAINDER, help="test suites for which to print status"
|
||||
)
|
||||
|
||||
# Results
|
||||
|
@ -142,15 +138,15 @@ def setup_parser(subparser):
|
|||
)
|
||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||
remove_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites to remove from test stage"
|
||||
"names", nargs=argparse.REMAINDER, help="test suites to remove from test stage"
|
||||
)
|
||||
|
||||
|
||||
def test_run(args):
|
||||
"""Run tests for the specified installed packages.
|
||||
"""run tests for the specified installed packages
|
||||
|
||||
If no specs are listed, run tests for all packages in the current
|
||||
environment or all installed packages if there is no active environment.
|
||||
if no specs are listed, run tests for all packages in the current
|
||||
environment or all installed packages if there is no active environment
|
||||
"""
|
||||
if args.alias:
|
||||
suites = spack.install_test.get_named_test_suites(args.alias)
|
||||
|
@ -231,7 +227,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
|||
|
||||
|
||||
def test_list(args):
|
||||
"""List installed packages with available tests."""
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
|
@ -263,10 +259,10 @@ def has_test_and_tags(pkg_class):
|
|||
|
||||
|
||||
def test_find(args): # TODO: merge with status (noargs)
|
||||
"""Find tests that are running or have available results.
|
||||
"""find tests that are running or have available results
|
||||
|
||||
Displays aliases for tests that have them, otherwise test suite content
|
||||
hashes."""
|
||||
displays aliases for tests that have them, otherwise test suite content hashes
|
||||
"""
|
||||
test_suites = spack.install_test.get_all_test_suites()
|
||||
|
||||
# Filter tests by filter argument
|
||||
|
@ -302,7 +298,7 @@ def match(t, f):
|
|||
|
||||
|
||||
def test_status(args):
|
||||
"""Get the current status for the specified Spack test suite(s)."""
|
||||
"""get the current status for the specified Spack test suite(s)"""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
|
@ -387,7 +383,7 @@ def _report_suite_results(test_suite, args, constraints):
|
|||
|
||||
|
||||
def test_results(args):
|
||||
"""Get the results from Spack test suite(s) (default all)."""
|
||||
"""get the results from Spack test suite(s) (default all)"""
|
||||
if args.names:
|
||||
try:
|
||||
sep_index = args.names.index("--")
|
||||
|
@ -414,12 +410,13 @@ def test_results(args):
|
|||
|
||||
|
||||
def test_remove(args):
|
||||
"""Remove results from Spack test suite(s) (default all).
|
||||
"""remove results from Spack test suite(s) (default all)
|
||||
|
||||
If no test suite is listed, remove results for all suites.
|
||||
if no test suite is listed, remove results for all suites.
|
||||
|
||||
Removed tests can no longer be accessed for results or status, and will not
|
||||
appear in `spack test list` results."""
|
||||
removed tests can no longer be accessed for results or status, and will not
|
||||
appear in `spack test list` results
|
||||
"""
|
||||
if args.names:
|
||||
test_suites = []
|
||||
for name in args.names:
|
||||
|
|
|
@ -54,7 +54,7 @@ def setup_parser(subparser):
|
|||
"--force",
|
||||
action="store_true",
|
||||
dest="force",
|
||||
help="remove regardless of whether other packages or environments " "depend on this one",
|
||||
help="remove regardless of whether other packages or environments depend on this one",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--remove",
|
||||
|
|
|
@ -53,15 +53,15 @@ def setup_parser(subparser):
|
|||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages."
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
||||
)
|
||||
|
||||
|
||||
def unload(parser, args):
|
||||
"""Unload spack packages from the user environment."""
|
||||
"""unload spack packages from the user environment"""
|
||||
if args.specs and args.all:
|
||||
raise spack.error.SpackError(
|
||||
"Cannot specify specs on command line" " when unloading all specs with '--all'"
|
||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||
)
|
||||
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
import spack.store
|
||||
import spack.verify
|
||||
|
||||
description = "Check that all spack packages are on disk as installed"
|
||||
description = "check that all spack packages are on disk as installed"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
|
@ -19,14 +19,14 @@ def setup_parser(subparser):
|
|||
setup_parser.parser = subparser
|
||||
|
||||
subparser.add_argument(
|
||||
"-l", "--local", action="store_true", help="Verify only locally installed packages"
|
||||
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-j", "--json", action="store_true", help="Ouptut json-formatted errors"
|
||||
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
||||
)
|
||||
subparser.add_argument("-a", "--all", action="store_true", help="Verify all packages")
|
||||
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||
subparser.add_argument(
|
||||
"specs_or_files", nargs=argparse.REMAINDER, help="Specs or files to verify"
|
||||
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
||||
)
|
||||
|
||||
type = subparser.add_mutually_exclusive_group()
|
||||
|
@ -37,7 +37,7 @@ def setup_parser(subparser):
|
|||
const="specs",
|
||||
dest="type",
|
||||
default="specs",
|
||||
help="Treat entries as specs (default)",
|
||||
help="treat entries as specs (default)",
|
||||
)
|
||||
type.add_argument(
|
||||
"-f",
|
||||
|
@ -46,7 +46,7 @@ def setup_parser(subparser):
|
|||
const="files",
|
||||
dest="type",
|
||||
default="specs",
|
||||
help="Treat entries as absolute filenames. Cannot be used with '-a'",
|
||||
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ def setup_parser(subparser):
|
|||
output.add_argument(
|
||||
"--safe-only",
|
||||
action="store_true",
|
||||
help="[deprecated] only list safe versions " "of the package",
|
||||
help="[deprecated] only list safe versions of the package",
|
||||
)
|
||||
output.add_argument(
|
||||
"-r", "--remote", action="store_true", help="only list remote versions of the package"
|
||||
|
@ -35,7 +35,7 @@ def setup_parser(subparser):
|
|||
"-n",
|
||||
"--new",
|
||||
action="store_true",
|
||||
help="only list remote versions newer than " "the latest checksummed version",
|
||||
help="only list remote versions newer than the latest checksummed version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c", "--concurrency", default=32, type=int, help="number of concurrent requests"
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||
from spack.util import spack_yaml as s_yaml
|
||||
|
||||
description = "project packages to a compact naming scheme on the filesystem."
|
||||
description = "project packages to a compact naming scheme on the filesystem"
|
||||
section = "environments"
|
||||
level = "short"
|
||||
|
||||
|
@ -81,7 +81,7 @@ def setup_parser(sp):
|
|||
"--verbose",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="If not verbose only warnings/errors will be printed.",
|
||||
help="if not verbose only warnings/errors will be printed",
|
||||
)
|
||||
sp.add_argument(
|
||||
"-e",
|
||||
|
@ -95,7 +95,7 @@ def setup_parser(sp):
|
|||
"--dependencies",
|
||||
choices=["true", "false", "yes", "no"],
|
||||
default="true",
|
||||
help="Link/remove/list dependencies.",
|
||||
help="link/remove/list dependencies",
|
||||
)
|
||||
|
||||
ssp = sp.add_subparsers(metavar="ACTION", dest="action")
|
||||
|
@ -137,12 +137,11 @@ def setup_parser(sp):
|
|||
if cmd in ("symlink", "hardlink", "copy"):
|
||||
# invalid for remove/statlink, for those commands the view needs to
|
||||
# already know its own projections.
|
||||
help_msg = "Initialize view using projections from file."
|
||||
act.add_argument(
|
||||
"--projection-file",
|
||||
dest="projection_file",
|
||||
type=spack.cmd.extant_file,
|
||||
help=help_msg,
|
||||
help="initialize view using projections from file",
|
||||
)
|
||||
|
||||
if cmd == "remove":
|
||||
|
@ -150,7 +149,7 @@ def setup_parser(sp):
|
|||
act.add_argument(
|
||||
"--no-remove-dependents",
|
||||
action="store_true",
|
||||
help="Do not remove dependents of specified specs.",
|
||||
help="do not remove dependents of specified specs",
|
||||
)
|
||||
|
||||
# with all option, spec is an optional argument
|
||||
|
|
|
@ -436,7 +436,7 @@ def make_argument_parser(**kwargs):
|
|||
default=None,
|
||||
action="append",
|
||||
dest="config_vars",
|
||||
help="add one or more custom, one off config settings.",
|
||||
help="add one or more custom, one off config settings",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
|
@ -451,9 +451,9 @@ def make_argument_parser(**kwargs):
|
|||
"--debug",
|
||||
action="count",
|
||||
default=0,
|
||||
help="write out debug messages " "(more d's for more verbosity: -d, -dd, -ddd, etc.)",
|
||||
help="write out debug messages\n\n(more d's for more verbosity: -d, -dd, -ddd, etc.)",
|
||||
)
|
||||
parser.add_argument("--timestamp", action="store_true", help="Add a timestamp to tty output")
|
||||
parser.add_argument("--timestamp", action="store_true", help="add a timestamp to tty output")
|
||||
parser.add_argument("--pdb", action="store_true", help="run spack under the pdb debugger")
|
||||
|
||||
env_group = parser.add_mutually_exclusive_group()
|
||||
|
@ -527,8 +527,7 @@ def make_argument_parser(**kwargs):
|
|||
"--sorted-profile",
|
||||
default=None,
|
||||
metavar="STAT",
|
||||
help="profile and sort by one or more of:\n[%s]"
|
||||
% ",\n ".join([", ".join(line) for line in stat_lines]),
|
||||
help=f"profile and sort\n\none or more of: {stat_lines[0]}",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--lines",
|
||||
|
@ -555,7 +554,7 @@ def make_argument_parser(**kwargs):
|
|||
"-V", "--version", action="store_true", help="show version number and exit"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--print-shell-vars", action="store", help="print info needed by setup-env.[c]sh"
|
||||
"--print-shell-vars", action="store", help="print info needed by setup-env.*sh"
|
||||
)
|
||||
|
||||
return parser
|
||||
|
|
Loading…
Reference in a new issue