Standardize subcommand help strings (#38804)

### Rationale

While working on #29549, I noticed a lot of inconsistencies in our argparse help messages. This is important for fish where these help messages end up as descriptions in the tab completion menu. See https://github.com/spack/spack/pull/29549#issuecomment-1627596477 for some examples of longer or more stylized help messages.

### Implementation

This PR makes the following changes:

- [x] help messages start with a lowercase letter.
- [x] Help messages do not end with a period
- [x] the first line of a help message is short and simple

    longer text is separated by an empty line
- [x] "help messages do not use triple quotes" 

    """(except docstrings)"""
- [x] Parentheses not needed for string concatenation inside function call
- [x] Remove "..." "..." string concatenation leftover from black reformatting
- [x] Remove Sphinx argument docs from help messages

The first 2 choices aren't very controversial, and are designed to match the syntax of the `--help` flag automatically added by argparse. The 3rd choice is more up for debate, and is designed to match our package/module docstrings. The 4th choice is designed to avoid excessive newline characters and indentation. We may actually want to go even further and disallow docstrings altogether.

### Alternatives

Choice 3 in particular has a lot of alternatives. My goal is solely to ensure that fish tab completion looks reasonable. Alternatives include:

1. Get rid of long help messages, only allow short simple messages
2. Move longer help messages to epilog
3. Separate by 2 newline characters instead of 1
4. Separate by period instead of newline. First sentence goes into tab completion description

The number of commands with long help text is actually rather small, and is mostly relegated to `spack ci` and `spack buildcache`. So 1 isn't actually as ridiculous as it sounds.

Let me know if there are any other standardizations or alternatives you would like to suggest.
This commit is contained in:
Adam J. Stewart 2023-07-13 02:18:23 -05:00 committed by GitHub
parent 6312ae8464
commit bb7f437bf5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
42 changed files with 295 additions and 348 deletions

View file

@ -59,7 +59,7 @@ def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
"package_or_file", "package_or_file",
help="name of package to show contributions for, " "or path to a file in the spack repo", help="name of package to show contributions for, or path to a file in the spack repo",
) )

View file

@ -43,7 +43,7 @@ def setup_parser(subparser):
subparsers = subparser.add_subparsers(help="buildcache sub-commands") subparsers = subparser.add_subparsers(help="buildcache sub-commands")
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__) push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.") push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
push.add_argument( push.add_argument(
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs" "-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
) )
@ -53,42 +53,37 @@ def setup_parser(subparser):
action="store_true", action="store_true",
help="allow install root string in binary files after RPATH substitution", help="allow install root string in binary files after RPATH substitution",
) )
push.add_argument( push.add_argument("-k", "--key", metavar="key", type=str, default=None, help="key for signing")
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing." push.add_argument("mirror", type=str, help="mirror name, path, or URL")
)
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
push.add_argument( push.add_argument(
"--update-index", "--update-index",
"--rebuild-index", "--rebuild-index",
action="store_true", action="store_true",
default=False, default=False,
help="Regenerate buildcache index after building package(s)", help="regenerate buildcache index after building package(s)",
) )
push.add_argument( push.add_argument(
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file" "--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
) )
push.add_argument( push.add_argument(
"--only", "--only",
default="package,dependencies", default="package,dependencies",
dest="things_to_install", dest="things_to_install",
choices=["package", "dependencies"], choices=["package", "dependencies"],
help=( help="select the buildcache mode\n\n"
"Select the buildcache mode. the default is to" "the default is to build a cache for the package along with all its dependencies. "
" build a cache for the package along with all" "alternatively, one can decide to build a cache for only the package or only the "
" its dependencies. Alternatively, one can" "dependencies",
" decide to build a cache for only the package"
" or only the dependencies"
),
) )
arguments.add_common_arguments(push, ["specs"]) arguments.add_common_arguments(push, ["specs"])
push.set_defaults(func=push_fn) push.set_defaults(func=push_fn)
install = subparsers.add_parser("install", help=install_fn.__doc__) install = subparsers.add_parser("install", help=install_fn.__doc__)
install.add_argument( install.add_argument(
"-f", "--force", action="store_true", help="overwrite install directory if it exists." "-f", "--force", action="store_true", help="overwrite install directory if it exists"
) )
install.add_argument( install.add_argument(
"-m", "--multiple", action="store_true", help="allow all matching packages " "-m", "--multiple", action="store_true", help="allow all matching packages"
) )
install.add_argument( install.add_argument(
"-u", "-u",
@ -142,11 +137,11 @@ def setup_parser(subparser):
"-m", "-m",
"--mirror-url", "--mirror-url",
default=None, default=None,
help="Override any configured mirrors with this mirror URL", help="override any configured mirrors with this mirror URL",
) )
check.add_argument( check.add_argument(
"-o", "--output-file", default=None, help="File where rebuild info should be written" "-o", "--output-file", default=None, help="file where rebuild info should be written"
) )
# used to construct scope arguments below # used to construct scope arguments below
@ -162,13 +157,13 @@ def setup_parser(subparser):
) )
check.add_argument( check.add_argument(
"-s", "--spec", default=None, help="Check single spec instead of release specs file" "-s", "--spec", default=None, help="check single spec instead of release specs file"
) )
check.add_argument( check.add_argument(
"--spec-file", "--spec-file",
default=None, default=None,
help=("Check single spec from json or yaml file instead of release specs file"), help="check single spec from json or yaml file instead of release specs file",
) )
check.set_defaults(func=check_fn) check.set_defaults(func=check_fn)
@ -176,15 +171,15 @@ def setup_parser(subparser):
# Download tarball and specfile # Download tarball and specfile
download = subparsers.add_parser("download", help=download_fn.__doc__) download = subparsers.add_parser("download", help=download_fn.__doc__)
download.add_argument( download.add_argument(
"-s", "--spec", default=None, help="Download built tarball for spec from mirror" "-s", "--spec", default=None, help="download built tarball for spec from mirror"
) )
download.add_argument( download.add_argument(
"--spec-file", "--spec-file",
default=None, default=None,
help=("Download built tarball for spec (from json or yaml file) from mirror"), help="download built tarball for spec (from json or yaml file) from mirror",
) )
download.add_argument( download.add_argument(
"-p", "--path", default=None, help="Path to directory where tarball should be downloaded" "-p", "--path", default=None, help="path to directory where tarball should be downloaded"
) )
download.set_defaults(func=download_fn) download.set_defaults(func=download_fn)
@ -193,52 +188,52 @@ def setup_parser(subparser):
"get-buildcache-name", help=get_buildcache_name_fn.__doc__ "get-buildcache-name", help=get_buildcache_name_fn.__doc__
) )
getbuildcachename.add_argument( getbuildcachename.add_argument(
"-s", "--spec", default=None, help="Spec string for which buildcache name is desired" "-s", "--spec", default=None, help="spec string for which buildcache name is desired"
) )
getbuildcachename.add_argument( getbuildcachename.add_argument(
"--spec-file", "--spec-file",
default=None, default=None,
help=("Path to spec json or yaml file for which buildcache name is desired"), help="path to spec json or yaml file for which buildcache name is desired",
) )
getbuildcachename.set_defaults(func=get_buildcache_name_fn) getbuildcachename.set_defaults(func=get_buildcache_name_fn)
# Given the root spec, save the yaml of the dependent spec to a file # Given the root spec, save the yaml of the dependent spec to a file
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__) savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
savespecfile.add_argument("--root-spec", default=None, help="Root spec of dependent spec") savespecfile.add_argument("--root-spec", default=None, help="root spec of dependent spec")
savespecfile.add_argument( savespecfile.add_argument(
"--root-specfile", "--root-specfile",
default=None, default=None,
help="Path to json or yaml file containing root spec of dependent spec", help="path to json or yaml file containing root spec of dependent spec",
) )
savespecfile.add_argument( savespecfile.add_argument(
"-s", "-s",
"--specs", "--specs",
default=None, default=None,
help="List of dependent specs for which saved yaml is desired", help="list of dependent specs for which saved yaml is desired",
) )
savespecfile.add_argument( savespecfile.add_argument(
"--specfile-dir", default=None, help="Path to directory where spec yamls should be saved" "--specfile-dir", default=None, help="path to directory where spec yamls should be saved"
) )
savespecfile.set_defaults(func=save_specfile_fn) savespecfile.set_defaults(func=save_specfile_fn)
# Sync buildcache entries from one mirror to another # Sync buildcache entries from one mirror to another
sync = subparsers.add_parser("sync", help=sync_fn.__doc__) sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
sync.add_argument( sync.add_argument(
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files" "--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
) )
sync.add_argument( sync.add_argument(
"src_mirror", "src_mirror",
metavar="source mirror", metavar="source mirror",
type=arguments.mirror_name_or_url, type=arguments.mirror_name_or_url,
nargs="?", nargs="?",
help="Source mirror name, path, or URL", help="source mirror name, path, or URL",
) )
sync.add_argument( sync.add_argument(
"dest_mirror", "dest_mirror",
metavar="destination mirror", metavar="destination mirror",
type=arguments.mirror_name_or_url, type=arguments.mirror_name_or_url,
nargs="?", nargs="?",
help="Destination mirror name, path, or URL", help="destination mirror name, path, or URL",
) )
sync.set_defaults(func=sync_fn) sync.set_defaults(func=sync_fn)
@ -247,14 +242,14 @@ def setup_parser(subparser):
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__ "update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
) )
update_index.add_argument( update_index.add_argument(
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL" "mirror", type=arguments.mirror_name_or_url, help="destination mirror name, path, or URL"
) )
update_index.add_argument( update_index.add_argument(
"-k", "-k",
"--keys", "--keys",
default=False, default=False,
action="store_true", action="store_true",
help="If provided, key index will be updated as well as package index", help="if provided, key index will be updated as well as package index",
) )
update_index.set_defaults(func=update_index_fn) update_index.set_defaults(func=update_index_fn)
@ -411,9 +406,7 @@ def keys_fn(args):
def preview_fn(args): def preview_fn(args):
"""analyze an installed spec and reports whether executables """analyze an installed spec and reports whether executables and libraries are relocatable"""
and libraries are relocatable
"""
constraints = spack.cmd.parse_specs(args.specs) constraints = spack.cmd.parse_specs(args.specs)
specs = spack.store.find(constraints, multiple=True) specs = spack.store.find(constraints, multiple=True)
@ -425,11 +418,11 @@ def preview_fn(args):
def check_fn(args): def check_fn(args):
"""Check specs (either a single spec from --spec, or else the full set """check specs against remote binary mirror(s) to see if any need to be rebuilt
of release specs) against remote binary mirror(s) to see if any need
to be rebuilt. This command uses the process exit code to indicate either a single spec from --spec, or else the full set of release specs. this command uses the
its result, specifically, if the exit code is non-zero, then at least process exit code to indicate its result, specifically, if the exit code is non-zero, then at
one of the indicated specs needs to be rebuilt. least one of the indicated specs needs to be rebuilt
""" """
if args.spec or args.spec_file: if args.spec or args.spec_file:
specs = [_concrete_spec_from_args(args)] specs = [_concrete_spec_from_args(args)]
@ -460,10 +453,12 @@ def check_fn(args):
def download_fn(args): def download_fn(args):
"""Download buildcache entry from a remote mirror to local folder. This """download buildcache entry from a remote mirror to local folder
command uses the process exit code to indicate its result, specifically,
a non-zero exit code indicates that the command failed to download at this command uses the process exit code to indicate its result, specifically, a non-zero exit
least one of the required buildcache components.""" code indicates that the command failed to download at least one of the required buildcache
components
"""
if not args.spec and not args.spec_file: if not args.spec and not args.spec_file:
tty.msg("No specs provided, exiting.") tty.msg("No specs provided, exiting.")
return return
@ -480,19 +475,18 @@ def download_fn(args):
def get_buildcache_name_fn(args): def get_buildcache_name_fn(args):
"""Get name (prefix) of buildcache entries for this spec""" """get name (prefix) of buildcache entries for this spec"""
spec = _concrete_spec_from_args(args) spec = _concrete_spec_from_args(args)
buildcache_name = bindist.tarball_name(spec, "") buildcache_name = bindist.tarball_name(spec, "")
print("{0}".format(buildcache_name)) print("{0}".format(buildcache_name))
def save_specfile_fn(args): def save_specfile_fn(args):
"""Get full spec for dependencies, relative to root spec, and write them """get full spec for dependencies and write them to files in the specified output directory
to files in the specified output directory. Uses exit code to signal
success or failure. An exit code of zero means the command was likely uses exit code to signal success or failure. an exit code of zero means the command was likely
successful. If any errors or exceptions are encountered, or if expected successful. if any errors or exceptions are encountered, or if expected command-line arguments
command-line arguments are not provided, then the exit code will be are not provided, then the exit code will be non-zero
non-zero.
""" """
if not args.root_spec and not args.root_specfile: if not args.root_spec and not args.root_specfile:
tty.msg("No root spec provided, exiting.") tty.msg("No root spec provided, exiting.")
@ -546,12 +540,9 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
def sync_fn(args): def sync_fn(args):
"""Syncs binaries (and associated metadata) from one mirror to another. """sync binaries (and associated metadata) from one mirror to another
Requires an active environment in order to know which specs to sync.
Args: requires an active environment in order to know which specs to sync
src (str): Source mirror URL
dest (str): Destination mirror URL
""" """
if args.manifest_glob: if args.manifest_glob:
manifest_copy(glob.glob(args.manifest_glob)) manifest_copy(glob.glob(args.manifest_glob))
@ -639,7 +630,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
def update_index_fn(args): def update_index_fn(args):
"""Update a buildcache index.""" """update a buildcache index"""
update_index(args.mirror, update_keys=args.keys) update_index(args.mirror, update_keys=args.keys)

View file

@ -47,40 +47,36 @@ def setup_parser(subparser):
generate.add_argument( generate.add_argument(
"--output-file", "--output-file",
default=None, default=None,
help="""pathname for the generated gitlab ci yaml file help="pathname for the generated gitlab ci yaml file\n\n"
Path to the file where generated jobs file should "path to the file where generated jobs file should be written. "
be written. Default is .gitlab-ci.yml in the root of "default is .gitlab-ci.yml in the root of the repository",
the repository.""",
) )
generate.add_argument( generate.add_argument(
"--copy-to", "--copy-to",
default=None, default=None,
help="""path to additional directory for job files help="path to additional directory for job files\n\n"
This option provides an absolute path to a directory "this option provides an absolute path to a directory where the generated "
where the generated jobs yaml file should be copied. "jobs yaml file should be copied. default is not to copy",
Default is not to copy.""",
) )
generate.add_argument( generate.add_argument(
"--optimize", "--optimize",
action="store_true", action="store_true",
default=False, default=False,
help="""(Experimental) optimize the gitlab yaml file for size help="(experimental) optimize the gitlab yaml file for size\n\n"
Run the generated document through a series of "run the generated document through a series of optimization passes "
optimization passes designed to reduce the size "designed to reduce the size of the generated file",
of the generated file.""",
) )
generate.add_argument( generate.add_argument(
"--dependencies", "--dependencies",
action="store_true", action="store_true",
default=False, default=False,
help="(Experimental) disable DAG scheduling; use " ' "plain" dependencies.', help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
) )
generate.add_argument( generate.add_argument(
"--buildcache-destination", "--buildcache-destination",
default=None, default=None,
help="Override the mirror configured in the environment (spack.yaml) " help="override the mirror configured in the environment\n\n"
+ "in order to push binaries from the generated pipeline to a " "allows for pushing binaries from the generated pipeline to a different location",
+ "different location.",
) )
prune_group = generate.add_mutually_exclusive_group() prune_group = generate.add_mutually_exclusive_group()
prune_group.add_argument( prune_group.add_argument(
@ -88,45 +84,37 @@ def setup_parser(subparser):
action="store_true", action="store_true",
dest="prune_dag", dest="prune_dag",
default=True, default=True,
help="""skip up-to-date specs help="skip up-to-date specs\n\n"
Do not generate jobs for specs that are up-to-date "do not generate jobs for specs that are up-to-date on the mirror",
on the mirror.""",
) )
prune_group.add_argument( prune_group.add_argument(
"--no-prune-dag", "--no-prune-dag",
action="store_false", action="store_false",
dest="prune_dag", dest="prune_dag",
default=True, default=True,
help="""process up-to-date specs help="process up-to-date specs\n\n"
Generate jobs for specs even when they are up-to-date "generate jobs for specs even when they are up-to-date on the mirror",
on the mirror.""",
) )
generate.add_argument( generate.add_argument(
"--check-index-only", "--check-index-only",
action="store_true", action="store_true",
dest="index_only", dest="index_only",
default=False, default=False,
help="""only check spec state from buildcache indices help="only check spec state from buildcache indices\n\n"
Spack always checks specs against configured binary "Spack always checks specs against configured binary mirrors, regardless of the DAG "
mirrors, regardless of the DAG pruning option. "pruning option. if enabled, Spack will assume all remote buildcache indices are "
If enabled, Spack will assume all remote buildcache "up-to-date when assessing whether the spec on the mirror, if present, is up-to-date. "
indices are up-to-date when assessing whether the spec "this has the benefit of reducing pipeline generation time but at the potential cost of "
on the mirror, if present, is up-to-date. This has the "needlessly rebuilding specs when the indices are outdated. if not enabled, Spack will "
benefit of reducing pipeline generation time but at the "fetch remote spec files directly to assess whether the spec on the mirror is up-to-date",
potential cost of needlessly rebuilding specs when the
indices are outdated.
If not enabled, Spack will fetch remote spec files
directly to assess whether the spec on the mirror is
up-to-date.""",
) )
generate.add_argument( generate.add_argument(
"--artifacts-root", "--artifacts-root",
default=None, default=None,
help="""path to the root of the artifacts directory help="path to the root of the artifacts directory\n\n"
If provided, concrete environment files (spack.yaml, "if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
spack.lock) will be generated under this directory. "this directory. their location will be passed to generated child jobs through the "
Their location will be passed to generated child jobs "SPACK_CONCRETE_ENVIRONMENT_PATH variable",
through the SPACK_CONCRETE_ENVIRONMENT_PATH variable.""",
) )
generate.set_defaults(func=ci_generate) generate.set_defaults(func=ci_generate)
@ -150,13 +138,13 @@ def setup_parser(subparser):
"--tests", "--tests",
action="store_true", action="store_true",
default=False, default=False,
help="""run stand-alone tests after the build""", help="run stand-alone tests after the build",
) )
rebuild.add_argument( rebuild.add_argument(
"--fail-fast", "--fail-fast",
action="store_true", action="store_true",
default=False, default=False,
help="""stop stand-alone tests after the first failure""", help="stop stand-alone tests after the first failure",
) )
rebuild.set_defaults(func=ci_rebuild) rebuild.set_defaults(func=ci_rebuild)
@ -166,10 +154,10 @@ def setup_parser(subparser):
description=deindent(ci_reproduce.__doc__), description=deindent(ci_reproduce.__doc__),
help=spack.cmd.first_line(ci_reproduce.__doc__), help=spack.cmd.first_line(ci_reproduce.__doc__),
) )
reproduce.add_argument("job_url", help="Url of job artifacts bundle") reproduce.add_argument("job_url", help="URL of job artifacts bundle")
reproduce.add_argument( reproduce.add_argument(
"--working-dir", "--working-dir",
help="Where to unpack artifacts", help="where to unpack artifacts",
default=os.path.join(os.getcwd(), "ci_reproduction"), default=os.path.join(os.getcwd(), "ci_reproduction"),
) )
@ -177,12 +165,12 @@ def setup_parser(subparser):
def ci_generate(args): def ci_generate(args):
"""Generate jobs file from a CI-aware spack file. """generate jobs file from a CI-aware spack file
If you want to report the results on CDash, you will need to set if you want to report the results on CDash, you will need to set the SPACK_CDASH_AUTH_TOKEN
the SPACK_CDASH_AUTH_TOKEN before invoking this command. The before invoking this command. the value must be the CDash authorization token needed to create
value must be the CDash authorization token needed to create a a build group and register all generated jobs under it
build group and register all generated jobs under it.""" """
env = spack.cmd.require_active_env(cmd_name="ci generate") env = spack.cmd.require_active_env(cmd_name="ci generate")
output_file = args.output_file output_file = args.output_file
@ -223,10 +211,11 @@ def ci_generate(args):
def ci_reindex(args): def ci_reindex(args):
"""Rebuild the buildcache index for the remote mirror. """rebuild the buildcache index for the remote mirror
Use the active, gitlab-enabled environment to rebuild the buildcache use the active, gitlab-enabled environment to rebuild the buildcache index for the associated
index for the associated mirror.""" mirror
"""
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index") env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
yaml_root = env.manifest[ev.TOP_LEVEL_KEY] yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
@ -242,10 +231,11 @@ def ci_reindex(args):
def ci_rebuild(args): def ci_rebuild(args):
"""Rebuild a spec if it is not on the remote mirror. """rebuild a spec if it is not on the remote mirror
Check a single spec against the remote mirror, and rebuild it from check a single spec against the remote mirror, and rebuild it from source if the mirror does
source if the mirror does not contain the hash.""" not contain the hash
"""
env = spack.cmd.require_active_env(cmd_name="ci rebuild") env = spack.cmd.require_active_env(cmd_name="ci rebuild")
# Make sure the environment is "gitlab-enabled", or else there's nothing # Make sure the environment is "gitlab-enabled", or else there's nothing
@ -606,7 +596,7 @@ def ci_rebuild(args):
) )
reports_dir = fs.join_path(os.getcwd(), "cdash_report") reports_dir = fs.join_path(os.getcwd(), "cdash_report")
if args.tests and broken_tests: if args.tests and broken_tests:
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'") tty.warn("Unable to run stand-alone tests since listed in ci's 'broken-tests-packages'")
if cdash_handler: if cdash_handler:
msg = "Package is listed in ci's broken-tests-packages" msg = "Package is listed in ci's broken-tests-packages"
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg) cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
@ -649,7 +639,7 @@ def ci_rebuild(args):
tty.warn("No recognized test results reporting option") tty.warn("No recognized test results reporting option")
else: else:
tty.warn("Unable to run stand-alone tests due to unsuccessful " "installation") tty.warn("Unable to run stand-alone tests due to unsuccessful installation")
if cdash_handler: if cdash_handler:
msg = "Failed to install the package" msg = "Failed to install the package"
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg) cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
@ -728,10 +718,11 @@ def ci_rebuild(args):
def ci_reproduce(args): def ci_reproduce(args):
"""Generate instructions for reproducing the spec rebuild job. """generate instructions for reproducing the spec rebuild job
Artifacts of the provided gitlab pipeline rebuild job's URL will be artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
used to derive instructions for reproducing the build locally.""" instructions for reproducing the build locally
"""
job_url = args.job_url job_url = args.job_url
work_dir = args.working_dir work_dir = args.working_dir

View file

@ -48,7 +48,7 @@ def get_origin_info(remote):
) )
except ProcessError: except ProcessError:
origin_url = _SPACK_UPSTREAM origin_url = _SPACK_UPSTREAM
tty.warn("No git repository found; " "using default upstream URL: %s" % origin_url) tty.warn("No git repository found; using default upstream URL: %s" % origin_url)
return (origin_url.strip(), branch.strip()) return (origin_url.strip(), branch.strip())
@ -69,7 +69,7 @@ def clone(parser, args):
files_in_the_way = os.listdir(prefix) files_in_the_way = os.listdir(prefix)
if files_in_the_way: if files_in_the_way:
tty.die( tty.die(
"There are already files there! " "Delete these files before boostrapping spack.", "There are already files there! Delete these files before boostrapping spack.",
*files_in_the_way, *files_in_the_way,
) )

View file

@ -265,7 +265,7 @@ def recurse_dependents():
"--dependents", "--dependents",
action="store_true", action="store_true",
dest="dependents", dest="dependents",
help="also uninstall any packages that depend on the ones given " "via command line", help="also uninstall any packages that depend on the ones given via command line",
) )
@ -286,7 +286,7 @@ def deptype():
"--deptype", "--deptype",
action=DeptypeAction, action=DeptypeAction,
default=dep.all_deptypes, default=dep.all_deptypes,
help="comma-separated list of deptypes to traverse\ndefault=%s" help="comma-separated list of deptypes to traverse\n\ndefault=%s"
% ",".join(dep.all_deptypes), % ",".join(dep.all_deptypes),
) )
@ -350,9 +350,9 @@ def install_status():
"--install-status", "--install-status",
action="store_true", action="store_true",
default=True, default=True,
help="show install status of packages. packages can be: " help="show install status of packages\n\npackages can be: "
"installed [+], missing and needed by an installed package [-], " "installed [+], missing and needed by an installed package [-], "
"installed in and upstream instance [^], " "installed in an upstream instance [^], "
"or not installed (no annotation)", "or not installed (no annotation)",
) )
@ -393,24 +393,23 @@ def add_cdash_args(subparser, add_help):
cdash_help = {} cdash_help = {}
if add_help: if add_help:
cdash_help["upload-url"] = "CDash URL where reports will be uploaded" cdash_help["upload-url"] = "CDash URL where reports will be uploaded"
cdash_help[ cdash_help["build"] = (
"build" "name of the build that will be reported to CDash\n\n"
] = """The name of the build that will be reported to CDash. "defaults to spec of the package to operate on"
Defaults to spec of the package to operate on.""" )
cdash_help[ cdash_help["site"] = (
"site" "site name that will be reported to CDash\n\n" "defaults to current system hostname"
] = """The site name that will be reported to CDash. )
Defaults to current system hostname.""" cdash_help["track"] = (
cdash_help[ "results will be reported to this group on CDash\n\n" "defaults to Experimental"
"track" )
] = """Results will be reported to this group on CDash. cdash_help["buildstamp"] = (
Defaults to Experimental.""" "use custom buildstamp\n\n"
cdash_help[ "instead of letting the CDash reporter prepare the "
"buildstamp" "buildstamp which, when combined with build name, site and project, "
] = """Instead of letting the CDash reporter prepare the "uniquely identifies the build, provide this argument to identify "
buildstamp which, when combined with build name, site and project, "the build yourself. format: %%Y%%m%%d-%%H%%M-[cdash-track]"
uniquely identifies the build, provide this argument to identify )
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
else: else:
cdash_help["upload-url"] = argparse.SUPPRESS cdash_help["upload-url"] = argparse.SUPPRESS
cdash_help["build"] = argparse.SUPPRESS cdash_help["build"] = argparse.SUPPRESS
@ -542,16 +541,16 @@ def add_s3_connection_args(subparser, add_help):
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror" "--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
) )
subparser.add_argument( subparser.add_argument(
"--s3-access-key-secret", help="Secret string to use to connect to this S3 mirror" "--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
) )
subparser.add_argument( subparser.add_argument(
"--s3-access-token", help="Access Token to use to connect to this S3 mirror" "--s3-access-token", help="access token to use to connect to this S3 mirror"
) )
subparser.add_argument( subparser.add_argument(
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None "--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
) )
subparser.add_argument( subparser.add_argument(
"--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror" "--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
) )

View file

@ -14,18 +14,16 @@
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
"-f", "--force", action="store_true", help="Re-concretize even if already concretized." "-f", "--force", action="store_true", help="re-concretize even if already concretized"
) )
subparser.add_argument( subparser.add_argument(
"--test", "--test",
default=None, default=None,
choices=["root", "all"], choices=["root", "all"],
help="""Concretize with test dependencies. When 'root' is chosen, test help="concretize with test dependencies of only root packages or all packages",
dependencies are only added for the environment's root specs. When 'all' is
chosen, test dependencies are enabled for all packages in the environment.""",
) )
subparser.add_argument( subparser.add_argument(
"-q", "--quiet", action="store_true", help="Don't print concretized specs" "-q", "--quiet", action="store_true", help="don't print concretized specs"
) )
spack.cmd.common.arguments.add_concretizer_args(subparser) spack.cmd.common.arguments.add_concretizer_args(subparser)

View file

@ -42,7 +42,7 @@ def setup_parser(subparser):
get_parser = sp.add_parser("get", help="print configuration values") get_parser = sp.add_parser("get", help="print configuration values")
get_parser.add_argument( get_parser.add_argument(
"section", "section",
help="configuration section to print. " "options: %(choices)s", help="configuration section to print\n\noptions: %(choices)s",
nargs="?", nargs="?",
metavar="section", metavar="section",
choices=spack.config.section_schemas, choices=spack.config.section_schemas,
@ -53,7 +53,7 @@ def setup_parser(subparser):
) )
blame_parser.add_argument( blame_parser.add_argument(
"section", "section",
help="configuration section to print. " "options: %(choices)s", help="configuration section to print\n\noptions: %(choices)s",
metavar="section", metavar="section",
choices=spack.config.section_schemas, choices=spack.config.section_schemas,
) )
@ -61,7 +61,7 @@ def setup_parser(subparser):
edit_parser = sp.add_parser("edit", help="edit configuration file") edit_parser = sp.add_parser("edit", help="edit configuration file")
edit_parser.add_argument( edit_parser.add_argument(
"section", "section",
help="configuration section to edit. " "options: %(choices)s", help="configuration section to edit\n\noptions: %(choices)s",
metavar="section", metavar="section",
nargs="?", nargs="?",
choices=spack.config.section_schemas, choices=spack.config.section_schemas,
@ -76,7 +76,7 @@ def setup_parser(subparser):
add_parser.add_argument( add_parser.add_argument(
"path", "path",
nargs="?", nargs="?",
help="colon-separated path to config that should be added," " e.g. 'config:default:true'", help="colon-separated path to config that should be added, e.g. 'config:default:true'",
) )
add_parser.add_argument("-f", "--file", help="file from which to set all config values") add_parser.add_argument("-f", "--file", help="file from which to set all config values")
@ -88,7 +88,7 @@ def setup_parser(subparser):
"--local", "--local",
action="store_true", action="store_true",
default=False, default=False,
help="Set packages preferences based on local installs, rather " "than upstream.", help="set packages preferences based on local installs, rather than upstream",
) )
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove configuration parameters") remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove configuration parameters")
@ -157,7 +157,7 @@ def config_get(args):
tty.die("environment has no %s file" % ev.manifest_name) tty.die("environment has no %s file" % ev.manifest_name)
else: else:
tty.die("`spack config get` requires a section argument " "or an active environment.") tty.die("`spack config get` requires a section argument or an active environment.")
def config_blame(args): def config_blame(args):
@ -180,7 +180,7 @@ def config_edit(args):
# If we aren't editing a spack.yaml file, get config path from scope. # If we aren't editing a spack.yaml file, get config path from scope.
scope, section = _get_scope_and_section(args) scope, section = _get_scope_and_section(args)
if not scope and not section: if not scope and not section:
tty.die("`spack config edit` requires a section argument " "or an active environment.") tty.die("`spack config edit` requires a section argument or an active environment.")
config_file = spack.config.config.get_config_filename(scope, section) config_file = spack.config.config.get_config_filename(scope, section)
if args.print_file: if args.print_file:
@ -374,7 +374,7 @@ def config_revert(args):
proceed = True proceed = True
if not args.yes_to_all: if not args.yes_to_all:
msg = "The following scopes will be restored from the corresponding" " backup files:\n" msg = "The following scopes will be restored from the corresponding backup files:\n"
for entry in to_be_restored: for entry in to_be_restored:
msg += "\t[scope={0.scope}, bkp={0.bkp}]\n".format(entry) msg += "\t[scope={0.scope}, bkp={0.bkp}]\n".format(entry)
msg += "This operation cannot be undone." msg += "This operation cannot be undone."

View file

@ -10,7 +10,7 @@
import spack.container import spack.container
import spack.container.images import spack.container.images
description = "creates recipes to build images for different" " container runtimes" description = "creates recipes to build images for different container runtimes"
section = "container" section = "container"
level = "long" level = "long"

View file

@ -612,7 +612,7 @@ def setup_parser(subparser):
"--template", "--template",
metavar="TEMPLATE", metavar="TEMPLATE",
choices=sorted(templates.keys()), choices=sorted(templates.keys()),
help="build system template to use. options: %(choices)s", help="build system template to use\n\noptions: %(choices)s",
) )
subparser.add_argument( subparser.add_argument(
"-r", "--repo", help="path to a repository where the package should be created" "-r", "--repo", help="path to a repository where the package should be created"
@ -620,7 +620,7 @@ def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
"-N", "-N",
"--namespace", "--namespace",
help="specify a namespace for the package. must be the namespace of " help="specify a namespace for the package\n\nmust be the namespace of "
"a repository registered with Spack", "a repository registered with Spack",
) )
subparser.add_argument( subparser.add_argument(
@ -878,7 +878,7 @@ def get_build_system(template, url, guesser):
# Use whatever build system the guesser detected # Use whatever build system the guesser detected
selected_template = guesser.build_system selected_template = guesser.build_system
if selected_template == "generic": if selected_template == "generic":
tty.warn("Unable to detect a build system. " "Using a generic package template.") tty.warn("Unable to detect a build system. Using a generic package template.")
else: else:
msg = "This package looks like it uses the {0} build system" msg = "This package looks like it uses the {0} build system"
tty.msg(msg.format(selected_template)) tty.msg(msg.format(selected_template))

View file

@ -26,8 +26,8 @@ def setup_parser(subparser):
"--installed", "--installed",
action="store_true", action="store_true",
default=False, default=False,
help="List installed dependencies of an installed spec, " help="list installed dependencies of an installed spec "
"instead of possible dependencies of a package.", "instead of possible dependencies of a package",
) )
subparser.add_argument( subparser.add_argument(
"-t", "-t",

View file

@ -25,15 +25,15 @@ def setup_parser(subparser):
"--installed", "--installed",
action="store_true", action="store_true",
default=False, default=False,
help="List installed dependents of an installed spec, " help="list installed dependents of an installed spec "
"instead of possible dependents of a package.", "instead of possible dependents of a package",
) )
subparser.add_argument( subparser.add_argument(
"-t", "-t",
"--transitive", "--transitive",
action="store_true", action="store_true",
default=False, default=False,
help="Show all transitive dependents.", help="show all transitive dependents",
) )
arguments.add_common_arguments(subparser, ["spec"]) arguments.add_common_arguments(subparser, ["spec"])

View file

@ -26,7 +26,7 @@
from spack.database import InstallStatuses from spack.database import InstallStatuses
from spack.error import SpackError from spack.error import SpackError
description = "Replace one package with another via symlinks" description = "replace one package with another via symlinks"
section = "admin" section = "admin"
level = "long" level = "long"
@ -46,7 +46,7 @@ def setup_parser(sp):
action="store_true", action="store_true",
default=True, default=True,
dest="dependencies", dest="dependencies",
help="Deprecate dependencies (default)", help="deprecate dependencies (default)",
) )
deps.add_argument( deps.add_argument(
"-D", "-D",
@ -54,7 +54,7 @@ def setup_parser(sp):
action="store_false", action="store_false",
default=True, default=True,
dest="dependencies", dest="dependencies",
help="Do not deprecate dependencies", help="do not deprecate dependencies",
) )
install = sp.add_mutually_exclusive_group() install = sp.add_mutually_exclusive_group()
@ -64,7 +64,7 @@ def setup_parser(sp):
action="store_true", action="store_true",
default=False, default=False,
dest="install", dest="install",
help="Concretize and install deprecator spec", help="concretize and install deprecator spec",
) )
install.add_argument( install.add_argument(
"-I", "-I",
@ -72,7 +72,7 @@ def setup_parser(sp):
action="store_false", action="store_false",
default=False, default=False,
dest="install", dest="install",
help="Deprecator spec must already be installed (default)", help="deprecator spec must already be installed (default)",
) )
sp.add_argument( sp.add_argument(
@ -81,7 +81,7 @@ def setup_parser(sp):
type=str, type=str,
default="soft", default="soft",
choices=["soft", "hard"], choices=["soft", "hard"],
help="Type of filesystem link to use for deprecation (default soft)", help="type of filesystem link to use for deprecation (default soft)",
) )
sp.add_argument( sp.add_argument(

View file

@ -25,14 +25,14 @@ def setup_parser(subparser):
"--source-path", "--source-path",
dest="source_path", dest="source_path",
default=None, default=None,
help="path to source directory. defaults to the current directory", help="path to source directory (defaults to the current directory)",
) )
subparser.add_argument( subparser.add_argument(
"-i", "-i",
"--ignore-dependencies", "--ignore-dependencies",
action="store_true", action="store_true",
dest="ignore_deps", dest="ignore_deps",
help="don't try to install dependencies of requested packages", help="do not try to install dependencies of requested packages",
) )
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"]) arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
subparser.add_argument( subparser.add_argument(
@ -55,16 +55,13 @@ def setup_parser(subparser):
type=str, type=str,
dest="shell", dest="shell",
default=None, default=None,
help="drop into a build environment in a new shell, e.g. bash, zsh", help="drop into a build environment in a new shell, e.g., bash",
) )
subparser.add_argument( subparser.add_argument(
"--test", "--test",
default=None, default=None,
choices=["root", "all"], choices=["root", "all"],
help="""If 'root' is chosen, run package tests during help="run tests on only root packages or all packages",
installation for top-level packages (but skip tests for dependencies).
if 'all' is chosen, run package tests during installation for all
packages. If neither are chosen, don't run tests for any packages.""",
) )
arguments.add_common_arguments(subparser, ["spec"]) arguments.add_common_arguments(subparser, ["spec"])

View file

@ -20,7 +20,7 @@
def setup_parser(subparser): def setup_parser(subparser):
subparser.add_argument("-p", "--path", help="Source location of package") subparser.add_argument("-p", "--path", help="source location of package")
clone_group = subparser.add_mutually_exclusive_group() clone_group = subparser.add_mutually_exclusive_group()
clone_group.add_argument( clone_group.add_argument(
@ -28,18 +28,18 @@ def setup_parser(subparser):
action="store_false", action="store_false",
dest="clone", dest="clone",
default=None, default=None,
help="Do not clone. The package already exists at the source path", help="do not clone, the package already exists at the source path",
) )
clone_group.add_argument( clone_group.add_argument(
"--clone", "--clone",
action="store_true", action="store_true",
dest="clone", dest="clone",
default=None, default=None,
help="Clone the package even if the path already exists", help="clone the package even if the path already exists",
) )
subparser.add_argument( subparser.add_argument(
"-f", "--force", help="Remove any files or directories that block cloning source code" "-f", "--force", help="remove any files or directories that block cloning source code"
) )
arguments.add_common_arguments(subparser, ["spec"]) arguments.add_common_arguments(subparser, ["spec"])

View file

@ -29,7 +29,7 @@ def setup_parser(subparser):
action="store_true", action="store_true",
default=False, default=False,
dest="dump_json", dest="dump_json",
help="Dump json output instead of pretty printing.", help="dump json output instead of pretty printing",
) )
subparser.add_argument( subparser.add_argument(
"--first", "--first",

View file

@ -62,7 +62,7 @@ def setup_parser(subparser):
dest="path", dest="path",
action="store_const", action="store_const",
const=spack.paths.build_systems_path, const=spack.paths.build_systems_path,
help="Edit the build system with the supplied name.", help="edit the build system with the supplied name",
) )
excl_args.add_argument( excl_args.add_argument(
"-c", "-c",

View file

@ -102,7 +102,7 @@ def env_activate_setup_parser(subparser):
dest="with_view", dest="with_view",
const=True, const=True,
default=True, default=True,
help="update PATH etc. with associated view", help="update PATH, etc., with associated view",
) )
view_options.add_argument( view_options.add_argument(
"-V", "-V",
@ -111,7 +111,7 @@ def env_activate_setup_parser(subparser):
dest="with_view", dest="with_view",
const=False, const=False,
default=True, default=True,
help="do not update PATH etc. with associated view", help="do not update PATH, etc., with associated view",
) )
subparser.add_argument( subparser.add_argument(
@ -161,7 +161,7 @@ def env_activate(args):
# Error out when -e, -E, -D flags are given, cause they are ambiguous. # Error out when -e, -E, -D flags are given, cause they are ambiguous.
if args.env or args.no_env or args.env_dir: if args.env or args.no_env or args.env_dir:
tty.die("Calling spack env activate with --env, --env-dir and --no-env " "is ambiguous") tty.die("Calling spack env activate with --env, --env-dir and --no-env is ambiguous")
env_name_or_dir = args.activate_env or args.dir env_name_or_dir = args.activate_env or args.dir
@ -250,7 +250,7 @@ def env_deactivate(args):
# Error out when -e, -E, -D flags are given, cause they are ambiguous. # Error out when -e, -E, -D flags are given, cause they are ambiguous.
if args.env or args.no_env or args.env_dir: if args.env or args.no_env or args.env_dir:
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env " "is ambiguous") tty.die("Calling spack env deactivate with --env, --env-dir and --no-env is ambiguous")
if ev.active_environment() is None: if ev.active_environment() is None:
tty.die("No environment is currently active.") tty.die("No environment is currently active.")
@ -290,7 +290,7 @@ def env_create_setup_parser(subparser):
"envfile", "envfile",
nargs="?", nargs="?",
default=None, default=None,
help="either a lockfile (must end with '.json' or '.lock') or a manifest file.", help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
) )
@ -608,16 +608,16 @@ def env_depfile_setup_parser(subparser):
"--make-target-prefix", "--make-target-prefix",
default=None, default=None,
metavar="TARGET", metavar="TARGET",
help="prefix Makefile targets (and variables) with <TARGET>/<name>. By default " help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
"the absolute path to the directory makedeps under the environment metadata dir is " "the absolute path to the directory makedeps under the environment metadata dir is "
"used. Can be set to an empty string --make-prefix ''.", "used. can be set to an empty string --make-prefix ''",
) )
subparser.add_argument( subparser.add_argument(
"--make-disable-jobserver", "--make-disable-jobserver",
default=True, default=True,
action="store_false", action="store_false",
dest="jobserver", dest="jobserver",
help="disable POSIX jobserver support.", help="disable POSIX jobserver support",
) )
subparser.add_argument( subparser.add_argument(
"--use-buildcache", "--use-buildcache",
@ -625,8 +625,8 @@ def env_depfile_setup_parser(subparser):
type=arguments.use_buildcache, type=arguments.use_buildcache,
default="package:auto,dependencies:auto", default="package:auto,dependencies:auto",
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]", metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
help="When using `only`, redundant build dependencies are pruned from the DAG. " help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
"This flag is passed on to the generated spack install commands.", "this flag is passed on to the generated spack install commands",
) )
subparser.add_argument( subparser.add_argument(
"-o", "-o",
@ -640,7 +640,7 @@ def env_depfile_setup_parser(subparser):
"--generator", "--generator",
default="make", default="make",
choices=("make",), choices=("make",),
help="specify the depfile type. Currently only make is supported.", help="specify the depfile type\n\ncurrently only make is supported",
) )
subparser.add_argument( subparser.add_argument(
metavar="specs", metavar="specs",

View file

@ -22,7 +22,7 @@
def setup_parser(subparser): def setup_parser(subparser):
subparser.epilog = ( subparser.epilog = (
"If called without argument returns " "the list of all valid extendable packages" "If called without argument returns the list of all valid extendable packages"
) )
arguments.add_common_arguments(subparser, ["long", "very_long"]) arguments.add_common_arguments(subparser, ["long", "very_long"])
subparser.add_argument( subparser.add_argument(

View file

@ -42,7 +42,7 @@ def setup_parser(subparser):
"--path", "--path",
default=None, default=None,
action="append", action="append",
help="Alternative search paths for finding externals. May be repeated", help="one or more alternative search paths for finding externals",
) )
find_parser.add_argument( find_parser.add_argument(
"--scope", "--scope",
@ -66,10 +66,8 @@ def setup_parser(subparser):
read_cray_manifest = sp.add_parser( read_cray_manifest = sp.add_parser(
"read-cray-manifest", "read-cray-manifest",
help=( help="consume a Spack-compatible description of externally-installed packages, including "
"consume a Spack-compatible description of externally-installed " "dependency relationships",
"packages, including dependency relationships"
),
) )
read_cray_manifest.add_argument( read_cray_manifest.add_argument(
"--file", default=None, help="specify a location other than the default" "--file", default=None, help="specify a location other than the default"
@ -92,7 +90,7 @@ def setup_parser(subparser):
read_cray_manifest.add_argument( read_cray_manifest.add_argument(
"--fail-on-error", "--fail-on-error",
action="store_true", action="store_true",
help=("if a manifest file cannot be parsed, fail and report the " "full stack trace"), help="if a manifest file cannot be parsed, fail and report the full stack trace",
) )
@ -111,14 +109,14 @@ def external_find(args):
# For most exceptions, just print a warning and continue. # For most exceptions, just print a warning and continue.
# Note that KeyboardInterrupt does not subclass Exception # Note that KeyboardInterrupt does not subclass Exception
# (so CTRL-C will terminate the program as expected). # (so CTRL-C will terminate the program as expected).
skip_msg = "Skipping manifest and continuing with other external " "checks" skip_msg = "Skipping manifest and continuing with other external checks"
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [ if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
errno.EPERM, errno.EPERM,
errno.EACCES, errno.EACCES,
]: ]:
# The manifest file does not have sufficient permissions enabled: # The manifest file does not have sufficient permissions enabled:
# print a warning and keep going # print a warning and keep going
tty.warn("Unable to read manifest due to insufficient " "permissions.", skip_msg) tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
else: else:
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg) tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
@ -168,7 +166,7 @@ def external_find(args):
) )
if new_entries: if new_entries:
path = spack.config.config.get_config_filename(args.scope, "packages") path = spack.config.config.get_config_filename(args.scope, "packages")
msg = "The following specs have been detected on this system " "and added to {0}" msg = "The following specs have been detected on this system and added to {0}"
tty.msg(msg.format(path)) tty.msg(msg.format(path))
spack.cmd.display_specs(new_entries) spack.cmd.display_specs(new_entries)
else: else:
@ -236,7 +234,7 @@ def _collect_and_consume_cray_manifest_files(
if fail_on_error: if fail_on_error:
raise raise
else: else:
tty.warn("Failure reading manifest file: {0}" "\n\t{1}".format(path, str(e))) tty.warn("Failure reading manifest file: {0}\n\t{1}".format(path, str(e)))
def external_list(args): def external_list(args):

View file

@ -51,9 +51,7 @@ def fetch(parser, args):
else: else:
specs = env.all_specs() specs = env.all_specs()
if specs == []: if specs == []:
tty.die( tty.die("No uninstalled specs in environment. Did you run `spack concretize` yet?")
"No uninstalled specs in environment. Did you " "run `spack concretize` yet?"
)
else: else:
tty.die("fetch requires at least one spec argument") tty.die("fetch requires at least one spec argument")

View file

@ -68,7 +68,7 @@ def setup_parser(subparser):
metavar="DEST", metavar="DEST",
type=str, type=str,
dest="secret", dest="secret",
help="export the private key to a file.", help="export the private key to a file",
) )
create.set_defaults(func=gpg_create) create.set_defaults(func=gpg_create)
@ -86,7 +86,7 @@ def setup_parser(subparser):
export = subparsers.add_parser("export", help=gpg_export.__doc__) export = subparsers.add_parser("export", help=gpg_export.__doc__)
export.add_argument("location", type=str, help="where to export keys") export.add_argument("location", type=str, help="where to export keys")
export.add_argument( export.add_argument(
"keys", nargs="*", help="the keys to export; " "all public keys if unspecified" "keys", nargs="*", help="the keys to export (all public keys if unspecified)"
) )
export.add_argument("--secret", action="store_true", help="export secret keys") export.add_argument("--secret", action="store_true", help="export secret keys")
export.set_defaults(func=gpg_export) export.set_defaults(func=gpg_export)
@ -99,29 +99,29 @@ def setup_parser(subparser):
"--directory", "--directory",
metavar="directory", metavar="directory",
type=str, type=str,
help="local directory where keys will be published.", help="local directory where keys will be published",
) )
output.add_argument( output.add_argument(
"-m", "-m",
"--mirror-name", "--mirror-name",
metavar="mirror-name", metavar="mirror-name",
type=str, type=str,
help="name of the mirror where " + "keys will be published.", help="name of the mirror where keys will be published",
) )
output.add_argument( output.add_argument(
"--mirror-url", "--mirror-url",
metavar="mirror-url", metavar="mirror-url",
type=str, type=str,
help="URL of the mirror where " + "keys will be published.", help="URL of the mirror where keys will be published",
) )
publish.add_argument( publish.add_argument(
"--rebuild-index", "--rebuild-index",
action="store_true", action="store_true",
default=False, default=False,
help=("Regenerate buildcache key index " "after publishing key(s)"), help="regenerate buildcache key index after publishing key(s)",
) )
publish.add_argument( publish.add_argument(
"keys", nargs="*", help="the keys to publish; " "all public keys if unspecified" "keys", nargs="*", help="keys to publish (all public keys if unspecified)"
) )
publish.set_defaults(func=gpg_publish) publish.set_defaults(func=gpg_publish)
@ -146,7 +146,7 @@ def gpg_create(args):
def gpg_export(args): def gpg_export(args):
"""export a gpg key, optionally including secret key.""" """export a gpg key, optionally including secret key"""
keys = args.keys keys = args.keys
if not keys: if not keys:
keys = spack.util.gpg.signing_keys() keys = spack.util.gpg.signing_keys()
@ -168,7 +168,7 @@ def gpg_sign(args):
elif not keys: elif not keys:
raise RuntimeError("no signing keys are available") raise RuntimeError("no signing keys are available")
else: else:
raise RuntimeError("multiple signing keys are available; " "please choose one") raise RuntimeError("multiple signing keys are available; please choose one")
output = args.output output = args.output
if not output: if not output:
output = args.spec[0] + ".asc" output = args.spec[0] + ".asc"

View file

@ -75,10 +75,9 @@ def setup_parser(subparser):
default="package,dependencies", default="package,dependencies",
dest="things_to_install", dest="things_to_install",
choices=["package", "dependencies"], choices=["package", "dependencies"],
help="""select the mode of installation. help="select the mode of installation\n\n"
the default is to install the package along with all its dependencies. "default is to install the package along with all its dependencies. "
alternatively one can decide to install only the package or only "alternatively, one can decide to install only the package or only the dependencies",
the dependencies""",
) )
subparser.add_argument( subparser.add_argument(
"-u", "-u",
@ -143,12 +142,11 @@ def setup_parser(subparser):
type=arguments.use_buildcache, type=arguments.use_buildcache,
default="package:auto,dependencies:auto", default="package:auto,dependencies:auto",
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]", metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
help="""select the mode of buildcache for the 'package' and 'dependencies'. help="select the mode of buildcache for the 'package' and 'dependencies'\n\n"
Default: package:auto,dependencies:auto "default: package:auto,dependencies:auto\n\n"
- `auto` behaves like --use-cache "- `auto` behaves like --use-cache\n"
- `only` behaves like --cache-only "- `only` behaves like --cache-only\n"
- `never` behaves like --no-cache "- `never` behaves like --no-cache",
""",
) )
subparser.add_argument( subparser.add_argument(
@ -156,8 +154,8 @@ def setup_parser(subparser):
action="store_true", action="store_true",
dest="include_build_deps", dest="include_build_deps",
default=False, default=False,
help="""include build deps when installing from cache, help="include build deps when installing from cache, "
which is useful for CI pipeline troubleshooting""", "useful for CI pipeline troubleshooting",
) )
subparser.add_argument( subparser.add_argument(
@ -186,7 +184,7 @@ def setup_parser(subparser):
dest="install_verbose", dest="install_verbose",
help="display verbose build output while installing", help="display verbose build output while installing",
) )
subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes.") subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes")
subparser.add_argument( subparser.add_argument(
"--only-concrete", "--only-concrete",
action="store_true", action="store_true",
@ -199,14 +197,13 @@ def setup_parser(subparser):
"--add", "--add",
action="store_true", action="store_true",
default=False, default=False,
help="""(with environment) add spec to the environment as a root.""", help="(with environment) add spec to the environment as a root",
) )
updateenv_group.add_argument( updateenv_group.add_argument(
"--no-add", "--no-add",
action="store_false", action="store_false",
dest="add", dest="add",
help="""(with environment) do not add spec to the environment as a help="(with environment) do not add spec to the environment as a root",
root (the default behavior).""",
) )
subparser.add_argument( subparser.add_argument(
@ -216,7 +213,7 @@ def setup_parser(subparser):
default=[], default=[],
dest="specfiles", dest="specfiles",
metavar="SPEC_YAML_FILE", metavar="SPEC_YAML_FILE",
help="install from file. Read specs to install from .yaml files", help="read specs to install from .yaml files",
) )
cd_group = subparser.add_mutually_exclusive_group() cd_group = subparser.add_mutually_exclusive_group()
@ -227,19 +224,12 @@ def setup_parser(subparser):
"--test", "--test",
default=None, default=None,
choices=["root", "all"], choices=["root", "all"],
help="""If 'root' is chosen, run package tests during help="run tests on only root packages or all packages",
installation for top-level packages (but skip tests for dependencies).
if 'all' is chosen, run package tests during installation for all
packages. If neither are chosen, don't run tests for any packages.""",
) )
arguments.add_common_arguments(subparser, ["log_format"]) arguments.add_common_arguments(subparser, ["log_format"])
subparser.add_argument("--log-file", default=None, help="filename for the log file")
subparser.add_argument( subparser.add_argument(
"--log-file", "--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
default=None,
help="filename for the log file. if not passed a default will be used",
)
subparser.add_argument(
"--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
) )
arguments.add_cdash_args(subparser, False) arguments.add_cdash_args(subparser, False)
arguments.add_common_arguments(subparser, ["yes_to_all", "spec"]) arguments.add_common_arguments(subparser, ["yes_to_all", "spec"])
@ -280,7 +270,7 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
display_args = {"long": True, "show_flags": True, "variants": True} display_args = {"long": True, "show_flags": True, "variants": True}
if installed: if installed:
tty.msg("The following package specs will be " "reinstalled:\n") tty.msg("The following package specs will be reinstalled:\n")
spack.cmd.display_specs(installed, **display_args) spack.cmd.display_specs(installed, **display_args)
not_installed = list(filter(lambda x: x not in installed, concrete_specs)) not_installed = list(filter(lambda x: x not in installed, concrete_specs))

View file

@ -66,10 +66,9 @@ def setup_parser(subparser):
default="package,dependencies", default="package,dependencies",
dest="things_to_load", dest="things_to_load",
choices=["package", "dependencies"], choices=["package", "dependencies"],
help="""select whether to load the package and its dependencies help="select whether to load the package and its dependencies\n\n"
the default is to load the package and all dependencies "the default is to load the package and all dependencies. alternatively, "
alternatively one can decide to load only the package or only "one can decide to load only the package or only the dependencies",
the dependencies""",
) )
subparser.add_argument( subparser.add_argument(

View file

@ -55,13 +55,13 @@ def setup_parser(subparser):
directories.add_argument( directories.add_argument(
"--source-dir", "--source-dir",
action="store_true", action="store_true",
help="source directory for a spec " "(requires it to be staged first)", help="source directory for a spec (requires it to be staged first)",
) )
directories.add_argument( directories.add_argument(
"-b", "-b",
"--build-dir", "--build-dir",
action="store_true", action="store_true",
help="build directory for a spec " "(requires it to be staged first)", help="build directory for a spec (requires it to be staged first)",
) )
directories.add_argument( directories.add_argument(
"-e", "-e",
@ -162,7 +162,7 @@ def location(parser, args):
# source dir remains, which requires the spec to be staged # source dir remains, which requires the spec to be staged
if not pkg.stage.expanded: if not pkg.stage.expanded:
tty.die( tty.die(
"Source directory does not exist yet. " "Run this to create it:", "Source directory does not exist yet. Run this to create it:",
"spack stage " + " ".join(args.spec), "spack stage " + " ".join(args.spec),
) )

View file

@ -39,7 +39,7 @@ def line_to_rtf(str):
def setup_parser(subparser): def setup_parser(subparser):
spack_source_group = subparser.add_mutually_exclusive_group(required=True) spack_source_group = subparser.add_mutually_exclusive_group(required=True)
spack_source_group.add_argument( spack_source_group.add_argument(
"-v", "--spack-version", default="", help="download given spack version e.g. 0.16.0" "-v", "--spack-version", default="", help="download given spack version"
) )
spack_source_group.add_argument( spack_source_group.add_argument(
"-s", "--spack-source", default="", help="full path to spack source" "-s", "--spack-source", default="", help="full path to spack source"
@ -50,7 +50,7 @@ def setup_parser(subparser):
"--git-installer-verbosity", "--git-installer-verbosity",
default="", default="",
choices=["SILENT", "VERYSILENT"], choices=["SILENT", "VERYSILENT"],
help="Level of verbosity provided by bundled Git Installer. Default is fully verbose", help="level of verbosity provided by bundled git installer (default is fully verbose)",
required=False, required=False,
action="store", action="store",
dest="git_verbosity", dest="git_verbosity",

View file

@ -35,10 +35,7 @@ def setup_parser(subparser):
"--all", "--all",
action="store_true", action="store_true",
dest="all", dest="all",
help="Mark ALL installed packages that match each " help="mark ALL installed packages that match each supplied spec",
"supplied spec. If you `mark --all libelf`,"
" ALL versions of `libelf` are marked. If no spec is "
"supplied, all installed packages will be marked.",
) )
exim = subparser.add_mutually_exclusive_group(required=True) exim = subparser.add_mutually_exclusive_group(required=True)
exim.add_argument( exim.add_argument(
@ -46,14 +43,14 @@ def setup_parser(subparser):
"--explicit", "--explicit",
action="store_true", action="store_true",
dest="explicit", dest="explicit",
help="Mark packages as explicitly installed.", help="mark packages as explicitly installed",
) )
exim.add_argument( exim.add_argument(
"-i", "-i",
"--implicit", "--implicit",
action="store_true", action="store_true",
dest="implicit", dest="implicit",
help="Mark packages as implicitly installed.", help="mark packages as implicitly installed",
) )

View file

@ -55,13 +55,13 @@ def setup_parser(subparser):
) )
create_parser.add_argument( create_parser.add_argument(
"--exclude-specs", "--exclude-specs",
help="specs which Spack should not try to add to a mirror" " (specified on command line)", help="specs which Spack should not try to add to a mirror (specified on command line)",
) )
create_parser.add_argument( create_parser.add_argument(
"--skip-unstable-versions", "--skip-unstable-versions",
action="store_true", action="store_true",
help="don't cache versions unless they identify a stable (unchanging)" " source code", help="don't cache versions unless they identify a stable (unchanging) source code",
) )
create_parser.add_argument( create_parser.add_argument(
"-D", "--dependencies", action="store_true", help="also fetch all dependencies" "-D", "--dependencies", action="store_true", help="also fetch all dependencies"
@ -144,7 +144,7 @@ def setup_parser(subparser):
def mirror_add(args): def mirror_add(args):
"""Add a mirror to Spack.""" """add a mirror to Spack"""
if ( if (
args.s3_access_key_id args.s3_access_key_id
or args.s3_access_key_secret or args.s3_access_key_secret
@ -168,12 +168,12 @@ def mirror_add(args):
def mirror_remove(args): def mirror_remove(args):
"""Remove a mirror by name.""" """remove a mirror by name"""
spack.mirror.remove(args.name, args.scope) spack.mirror.remove(args.name, args.scope)
def mirror_set_url(args): def mirror_set_url(args):
"""Change the URL of a mirror.""" """change the URL of a mirror"""
url = args.url url = args.url
mirrors = spack.config.get("mirrors", scope=args.scope) mirrors = spack.config.get("mirrors", scope=args.scope)
if not mirrors: if not mirrors:
@ -242,7 +242,7 @@ def mirror_set_url(args):
def mirror_list(args): def mirror_list(args):
"""Print out available mirrors to the console.""" """print out available mirrors to the console"""
mirrors = spack.mirror.MirrorCollection(scope=args.scope) mirrors = spack.mirror.MirrorCollection(scope=args.scope)
if not mirrors: if not mirrors:
@ -395,9 +395,7 @@ def process_mirror_stats(present, mirrored, error):
def mirror_create(args): def mirror_create(args):
"""Create a directory to be used as a spack mirror, and fill it with """create a directory to be used as a spack mirror, and fill it with package archives"""
package archives.
"""
if args.specs and args.all: if args.specs and args.all:
raise SpackError( raise SpackError(
"cannot specify specs on command line if you chose to mirror all specs with '--all'" "cannot specify specs on command line if you chose to mirror all specs with '--all'"
@ -470,7 +468,7 @@ def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions,
def mirror_destroy(args): def mirror_destroy(args):
"""Given a url, recursively delete everything under it.""" """given a url, recursively delete everything under it"""
mirror_url = None mirror_url = None
if args.mirror_name: if args.mirror_name:

View file

@ -31,7 +31,7 @@ def setup_parser(subparser):
action="store", action="store",
dest="module_set_name", dest="module_set_name",
default="default", default="default",
help="Named module set to use from modules configuration.", help="named module set to use from modules configuration",
) )
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="subparser_name") sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="subparser_name")

View file

@ -30,7 +30,7 @@ def add_command(parser, command_dict):
def setdefault(module_type, specs, args): def setdefault(module_type, specs, args):
"""Set the default module file, when multiple are present""" """set the default module file, when multiple are present"""
# For details on the underlying mechanism see: # For details on the underlying mechanism see:
# #
# https://lmod.readthedocs.io/en/latest/060_locating.html#marking-a-version-as-default # https://lmod.readthedocs.io/en/latest/060_locating.html#marking-a-version-as-default

View file

@ -29,7 +29,7 @@ def add_command(parser, command_dict):
def setdefault(module_type, specs, args): def setdefault(module_type, specs, args):
"""Set the default module file, when multiple are present""" """set the default module file, when multiple are present"""
# Currently, accepts only a single matching spec # Currently, accepts only a single matching spec
spack.cmd.modules.one_spec_or_raise(specs) spack.cmd.modules.one_spec_or_raise(specs)
spec = specs[0] spec = specs[0]

View file

@ -58,7 +58,7 @@ def setup_parser(subparser):
"--type", "--type",
action="store", action="store",
default="C", default="C",
help="Types of changes to show (A: added, R: removed, " "C: changed); default is 'C'", help="types of changes to show (A: added, R: removed, C: changed); default is 'C'",
) )
rm_parser = sp.add_parser("removed", help=pkg_removed.__doc__) rm_parser = sp.add_parser("removed", help=pkg_removed.__doc__)
@ -81,7 +81,7 @@ def setup_parser(subparser):
"--canonical", "--canonical",
action="store_true", action="store_true",
default=False, default=False,
help="dump canonical source as used by package hash.", help="dump canonical source as used by package hash",
) )
arguments.add_common_arguments(source_parser, ["spec"]) arguments.add_common_arguments(source_parser, ["spec"])

View file

@ -17,9 +17,7 @@
def setup_parser(subparser): def setup_parser(subparser):
subparser.epilog = ( subparser.epilog = "If called without argument returns the list of all valid virtual packages"
"If called without argument returns " "the list of all valid virtual packages"
)
subparser.add_argument( subparser.add_argument(
"virtual_package", nargs="*", help="find packages that provide this virtual package" "virtual_package", nargs="*", help="find packages that provide this virtual package"
) )

View file

@ -27,7 +27,7 @@ def setup_parser(subparser):
create_parser.add_argument("directory", help="directory to create the repo in") create_parser.add_argument("directory", help="directory to create the repo in")
create_parser.add_argument( create_parser.add_argument(
"namespace", "namespace",
help="namespace to identify packages in the repository. " "defaults to the directory name", help="namespace to identify packages in the repository (defaults to the directory name)",
nargs="?", nargs="?",
) )
create_parser.add_argument( create_parser.add_argument(
@ -36,10 +36,8 @@ def setup_parser(subparser):
action="store", action="store",
dest="subdir", dest="subdir",
default=spack.repo.packages_dir_name, default=spack.repo.packages_dir_name,
help=( help="subdirectory to store packages in the repository\n\n"
"subdirectory to store packages in the repository." "default 'packages'. use an empty string for no subdirectory",
" Default 'packages'. Use an empty string for no subdirectory."
),
) )
# List # List
@ -78,14 +76,14 @@ def setup_parser(subparser):
def repo_create(args): def repo_create(args):
"""Create a new package repository.""" """create a new package repository"""
full_path, namespace = spack.repo.create_repo(args.directory, args.namespace, args.subdir) full_path, namespace = spack.repo.create_repo(args.directory, args.namespace, args.subdir)
tty.msg("Created repo with namespace '%s'." % namespace) tty.msg("Created repo with namespace '%s'." % namespace)
tty.msg("To register it with spack, run this command:", "spack repo add %s" % full_path) tty.msg("To register it with spack, run this command:", "spack repo add %s" % full_path)
def repo_add(args): def repo_add(args):
"""Add a package source to Spack's configuration.""" """add a package source to Spack's configuration"""
path = args.path path = args.path
# real_path is absolute and handles substitution. # real_path is absolute and handles substitution.
@ -116,7 +114,7 @@ def repo_add(args):
def repo_remove(args): def repo_remove(args):
"""Remove a repository from Spack's configuration.""" """remove a repository from Spack's configuration"""
repos = spack.config.get("repos", scope=args.scope) repos = spack.config.get("repos", scope=args.scope)
namespace_or_path = args.namespace_or_path namespace_or_path = args.namespace_or_path
@ -146,7 +144,7 @@ def repo_remove(args):
def repo_list(args): def repo_list(args):
"""Show registered repositories and their namespaces.""" """show registered repositories and their namespaces"""
roots = spack.config.get("repos", scope=args.scope) roots = spack.config.get("repos", scope=args.scope)
repos = [] repos = []
for r in roots: for r in roots:

View file

@ -33,7 +33,7 @@ def setup_parser(subparser):
"--show", "--show",
action="store", action="store",
default="opt,solutions", default="opt,solutions",
help="select outputs: comma-separated list of: \n" help="select outputs\n\ncomma-separated list of:\n"
" asp asp program text\n" " asp asp program text\n"
" opt optimization criteria for best model\n" " opt optimization criteria for best model\n"
" output raw clingo output\n" " output raw clingo output\n"

View file

@ -12,7 +12,7 @@
import spack.store import spack.store
import spack.tag import spack.tag
description = "Show package tags and associated packages" description = "show package tags and associated packages"
section = "basic" section = "basic"
level = "long" level = "long"

View file

@ -35,39 +35,35 @@ def setup_parser(subparser):
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__) "run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
) )
alias_help_msg = "Provide an alias for this test-suite" run_parser.add_argument(
alias_help_msg += " for subsequent access." "--alias", help="provide an alias for this test-suite for subsequent access"
run_parser.add_argument("--alias", help=alias_help_msg) )
run_parser.add_argument( run_parser.add_argument(
"--fail-fast", "--fail-fast",
action="store_true", action="store_true",
help="Stop tests for each package after the first failure.", help="stop tests for each package after the first failure",
) )
run_parser.add_argument( run_parser.add_argument(
"--fail-first", action="store_true", help="Stop after the first failed package." "--fail-first", action="store_true", help="stop after the first failed package"
) )
run_parser.add_argument( run_parser.add_argument(
"--externals", action="store_true", help="Test packages that are externally installed." "--externals", action="store_true", help="test packages that are externally installed"
) )
run_parser.add_argument( run_parser.add_argument(
"-x", "-x",
"--explicit", "--explicit",
action="store_true", action="store_true",
help="Only test packages that are explicitly installed.", help="only test packages that are explicitly installed",
) )
run_parser.add_argument( run_parser.add_argument(
"--keep-stage", action="store_true", help="Keep testing directory for debugging" "--keep-stage", action="store_true", help="keep testing directory for debugging"
) )
arguments.add_common_arguments(run_parser, ["log_format"]) arguments.add_common_arguments(run_parser, ["log_format"])
run_parser.add_argument( run_parser.add_argument("--log-file", default=None, help="filename for the log file")
"--log-file",
default=None,
help="filename for the log file. if not passed a default will be used",
)
arguments.add_cdash_args(run_parser, False) arguments.add_cdash_args(run_parser, False)
run_parser.add_argument( run_parser.add_argument(
"--help-cdash", action="store_true", help="Show usage instructions for CDash reporting" "--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
) )
cd_group = run_parser.add_mutually_exclusive_group() cd_group = run_parser.add_mutually_exclusive_group()
@ -96,7 +92,7 @@ def setup_parser(subparser):
find_parser.add_argument( find_parser.add_argument(
"filter", "filter",
nargs=argparse.REMAINDER, nargs=argparse.REMAINDER,
help="optional case-insensitive glob patterns to filter results.", help="optional case-insensitive glob patterns to filter results",
) )
# Status # Status
@ -104,7 +100,7 @@ def setup_parser(subparser):
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__) "status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
) )
status_parser.add_argument( status_parser.add_argument(
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status" "names", nargs=argparse.REMAINDER, help="test suites for which to print status"
) )
# Results # Results
@ -142,15 +138,15 @@ def setup_parser(subparser):
) )
arguments.add_common_arguments(remove_parser, ["yes_to_all"]) arguments.add_common_arguments(remove_parser, ["yes_to_all"])
remove_parser.add_argument( remove_parser.add_argument(
"names", nargs=argparse.REMAINDER, help="Test suites to remove from test stage" "names", nargs=argparse.REMAINDER, help="test suites to remove from test stage"
) )
def test_run(args): def test_run(args):
"""Run tests for the specified installed packages. """run tests for the specified installed packages
If no specs are listed, run tests for all packages in the current if no specs are listed, run tests for all packages in the current
environment or all installed packages if there is no active environment. environment or all installed packages if there is no active environment
""" """
if args.alias: if args.alias:
suites = spack.install_test.get_named_test_suites(args.alias) suites = spack.install_test.get_named_test_suites(args.alias)
@ -231,7 +227,7 @@ def create_reporter(args, specs_to_test, test_suite):
def test_list(args): def test_list(args):
"""List installed packages with available tests.""" """list installed packages with available tests"""
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set() tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
def has_test_and_tags(pkg_class): def has_test_and_tags(pkg_class):
@ -263,10 +259,10 @@ def has_test_and_tags(pkg_class):
def test_find(args): # TODO: merge with status (noargs) def test_find(args): # TODO: merge with status (noargs)
"""Find tests that are running or have available results. """find tests that are running or have available results
Displays aliases for tests that have them, otherwise test suite content displays aliases for tests that have them, otherwise test suite content hashes
hashes.""" """
test_suites = spack.install_test.get_all_test_suites() test_suites = spack.install_test.get_all_test_suites()
# Filter tests by filter argument # Filter tests by filter argument
@ -302,7 +298,7 @@ def match(t, f):
def test_status(args): def test_status(args):
"""Get the current status for the specified Spack test suite(s).""" """get the current status for the specified Spack test suite(s)"""
if args.names: if args.names:
test_suites = [] test_suites = []
for name in args.names: for name in args.names:
@ -387,7 +383,7 @@ def _report_suite_results(test_suite, args, constraints):
def test_results(args): def test_results(args):
"""Get the results from Spack test suite(s) (default all).""" """get the results from Spack test suite(s) (default all)"""
if args.names: if args.names:
try: try:
sep_index = args.names.index("--") sep_index = args.names.index("--")
@ -414,12 +410,13 @@ def test_results(args):
def test_remove(args): def test_remove(args):
"""Remove results from Spack test suite(s) (default all). """remove results from Spack test suite(s) (default all)
If no test suite is listed, remove results for all suites. if no test suite is listed, remove results for all suites.
Removed tests can no longer be accessed for results or status, and will not removed tests can no longer be accessed for results or status, and will not
appear in `spack test list` results.""" appear in `spack test list` results
"""
if args.names: if args.names:
test_suites = [] test_suites = []
for name in args.names: for name in args.names:

View file

@ -54,7 +54,7 @@ def setup_parser(subparser):
"--force", "--force",
action="store_true", action="store_true",
dest="force", dest="force",
help="remove regardless of whether other packages or environments " "depend on this one", help="remove regardless of whether other packages or environments depend on this one",
) )
subparser.add_argument( subparser.add_argument(
"--remove", "--remove",

View file

@ -53,15 +53,15 @@ def setup_parser(subparser):
) )
subparser.add_argument( subparser.add_argument(
"-a", "--all", action="store_true", help="unload all loaded Spack packages." "-a", "--all", action="store_true", help="unload all loaded Spack packages"
) )
def unload(parser, args): def unload(parser, args):
"""Unload spack packages from the user environment.""" """unload spack packages from the user environment"""
if args.specs and args.all: if args.specs and args.all:
raise spack.error.SpackError( raise spack.error.SpackError(
"Cannot specify specs on command line" " when unloading all specs with '--all'" "Cannot specify specs on command line when unloading all specs with '--all'"
) )
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":") hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")

View file

@ -10,7 +10,7 @@
import spack.store import spack.store
import spack.verify import spack.verify
description = "Check that all spack packages are on disk as installed" description = "check that all spack packages are on disk as installed"
section = "admin" section = "admin"
level = "long" level = "long"
@ -19,14 +19,14 @@ def setup_parser(subparser):
setup_parser.parser = subparser setup_parser.parser = subparser
subparser.add_argument( subparser.add_argument(
"-l", "--local", action="store_true", help="Verify only locally installed packages" "-l", "--local", action="store_true", help="verify only locally installed packages"
) )
subparser.add_argument( subparser.add_argument(
"-j", "--json", action="store_true", help="Ouptut json-formatted errors" "-j", "--json", action="store_true", help="ouptut json-formatted errors"
) )
subparser.add_argument("-a", "--all", action="store_true", help="Verify all packages") subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
subparser.add_argument( subparser.add_argument(
"specs_or_files", nargs=argparse.REMAINDER, help="Specs or files to verify" "specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
) )
type = subparser.add_mutually_exclusive_group() type = subparser.add_mutually_exclusive_group()
@ -37,7 +37,7 @@ def setup_parser(subparser):
const="specs", const="specs",
dest="type", dest="type",
default="specs", default="specs",
help="Treat entries as specs (default)", help="treat entries as specs (default)",
) )
type.add_argument( type.add_argument(
"-f", "-f",
@ -46,7 +46,7 @@ def setup_parser(subparser):
const="files", const="files",
dest="type", dest="type",
default="specs", default="specs",
help="Treat entries as absolute filenames. Cannot be used with '-a'", help="treat entries as absolute filenames\n\ncannot be used with '-a'",
) )

View file

@ -26,7 +26,7 @@ def setup_parser(subparser):
output.add_argument( output.add_argument(
"--safe-only", "--safe-only",
action="store_true", action="store_true",
help="[deprecated] only list safe versions " "of the package", help="[deprecated] only list safe versions of the package",
) )
output.add_argument( output.add_argument(
"-r", "--remote", action="store_true", help="only list remote versions of the package" "-r", "--remote", action="store_true", help="only list remote versions of the package"
@ -35,7 +35,7 @@ def setup_parser(subparser):
"-n", "-n",
"--new", "--new",
action="store_true", action="store_true",
help="only list remote versions newer than " "the latest checksummed version", help="only list remote versions newer than the latest checksummed version",
) )
subparser.add_argument( subparser.add_argument(
"-c", "--concurrency", default=32, type=int, help="number of concurrent requests" "-c", "--concurrency", default=32, type=int, help="number of concurrent requests"

View file

@ -44,7 +44,7 @@
from spack.filesystem_view import YamlFilesystemView, view_func_parser from spack.filesystem_view import YamlFilesystemView, view_func_parser
from spack.util import spack_yaml as s_yaml from spack.util import spack_yaml as s_yaml
description = "project packages to a compact naming scheme on the filesystem." description = "project packages to a compact naming scheme on the filesystem"
section = "environments" section = "environments"
level = "short" level = "short"
@ -81,7 +81,7 @@ def setup_parser(sp):
"--verbose", "--verbose",
action="store_true", action="store_true",
default=False, default=False,
help="If not verbose only warnings/errors will be printed.", help="if not verbose only warnings/errors will be printed",
) )
sp.add_argument( sp.add_argument(
"-e", "-e",
@ -95,7 +95,7 @@ def setup_parser(sp):
"--dependencies", "--dependencies",
choices=["true", "false", "yes", "no"], choices=["true", "false", "yes", "no"],
default="true", default="true",
help="Link/remove/list dependencies.", help="link/remove/list dependencies",
) )
ssp = sp.add_subparsers(metavar="ACTION", dest="action") ssp = sp.add_subparsers(metavar="ACTION", dest="action")
@ -137,12 +137,11 @@ def setup_parser(sp):
if cmd in ("symlink", "hardlink", "copy"): if cmd in ("symlink", "hardlink", "copy"):
# invalid for remove/statlink, for those commands the view needs to # invalid for remove/statlink, for those commands the view needs to
# already know its own projections. # already know its own projections.
help_msg = "Initialize view using projections from file."
act.add_argument( act.add_argument(
"--projection-file", "--projection-file",
dest="projection_file", dest="projection_file",
type=spack.cmd.extant_file, type=spack.cmd.extant_file,
help=help_msg, help="initialize view using projections from file",
) )
if cmd == "remove": if cmd == "remove":
@ -150,7 +149,7 @@ def setup_parser(sp):
act.add_argument( act.add_argument(
"--no-remove-dependents", "--no-remove-dependents",
action="store_true", action="store_true",
help="Do not remove dependents of specified specs.", help="do not remove dependents of specified specs",
) )
# with all option, spec is an optional argument # with all option, spec is an optional argument

View file

@ -436,7 +436,7 @@ def make_argument_parser(**kwargs):
default=None, default=None,
action="append", action="append",
dest="config_vars", dest="config_vars",
help="add one or more custom, one off config settings.", help="add one or more custom, one off config settings",
) )
parser.add_argument( parser.add_argument(
"-C", "-C",
@ -451,9 +451,9 @@ def make_argument_parser(**kwargs):
"--debug", "--debug",
action="count", action="count",
default=0, default=0,
help="write out debug messages " "(more d's for more verbosity: -d, -dd, -ddd, etc.)", help="write out debug messages\n\n(more d's for more verbosity: -d, -dd, -ddd, etc.)",
) )
parser.add_argument("--timestamp", action="store_true", help="Add a timestamp to tty output") parser.add_argument("--timestamp", action="store_true", help="add a timestamp to tty output")
parser.add_argument("--pdb", action="store_true", help="run spack under the pdb debugger") parser.add_argument("--pdb", action="store_true", help="run spack under the pdb debugger")
env_group = parser.add_mutually_exclusive_group() env_group = parser.add_mutually_exclusive_group()
@ -527,8 +527,7 @@ def make_argument_parser(**kwargs):
"--sorted-profile", "--sorted-profile",
default=None, default=None,
metavar="STAT", metavar="STAT",
help="profile and sort by one or more of:\n[%s]" help=f"profile and sort\n\none or more of: {stat_lines[0]}",
% ",\n ".join([", ".join(line) for line in stat_lines]),
) )
parser.add_argument( parser.add_argument(
"--lines", "--lines",
@ -555,7 +554,7 @@ def make_argument_parser(**kwargs):
"-V", "--version", action="store_true", help="show version number and exit" "-V", "--version", action="store_true", help="show version number and exit"
) )
parser.add_argument( parser.add_argument(
"--print-shell-vars", action="store", help="print info needed by setup-env.[c]sh" "--print-shell-vars", action="store", help="print info needed by setup-env.*sh"
) )
return parser return parser