Windows: Non config changes to support Gitlab CI (#43965)
* Quote python for shlex * Remove python path quoting patch * spack env: Allow `C` "protocol" for config_path When running spack on windows, a path beginning with `C://...` is a valid path. * Remove makefile from ci rebuild * GPG use llnl.util.filesystem.getuid * Cleanup process_command * Remove unused lines * Fix tyop in encode_path * Double quote arguments * Cleanup process_command * Pass cdash args with = * Escape parens in CMD script * escape parens doesn't only apply to paths * Install deps * sfn prefix * use sfn with libxml2 * Add hash to dep install * WIP * REview * Changes missed in prior review commit * Style * Ensure we handle Windows paths with config scopes * clarify docstring * No more MAKE_COMMAND * syntax cleanup * Actually correct is_path_url * Correct call * raise on other errors * url2path behaves differently on unix * Ensure proper quoting * actually prepend slash in slash_hash --------- Co-authored-by: Ryan Krattiger <ryan.krattiger@kitware.com> Co-authored-by: Mike VanDenburgh <michael.vandenburgh@kitware.com>
This commit is contained in:
parent
a70ea11e69
commit
74ef630241
9 changed files with 118 additions and 118 deletions
|
@ -187,12 +187,18 @@ def polite_filename(filename: str) -> str:
|
|||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid():
|
||||
def getuid() -> Union[str, int]:
|
||||
"""Returns os getuid on non Windows
|
||||
On Windows returns 0 for admin users, login string otherwise
|
||||
This is in line with behavior from get_owner_uid which
|
||||
always returns the login string on Windows
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
# If not admin, use the string name of the login as a unique ID
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
return 1
|
||||
return os.getlogin()
|
||||
return 0
|
||||
else:
|
||||
return os.getuid()
|
||||
|
@ -213,6 +219,15 @@ def _win_rename(src, dst):
|
|||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def msdos_escape_parens(path):
|
||||
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||
if sys.platform == "win32":
|
||||
return path.replace("(", "^(").replace(")", "^)")
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
|
@ -553,7 +568,13 @@ def exploding_archive_handler(tarball_container, stage):
|
|||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||
"""Returns owner UID of path destination
|
||||
On non Windows this is the value of st_uid
|
||||
On Windows this is the login string associated with the
|
||||
owning user.
|
||||
|
||||
"""
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
|
||||
|
|
|
@ -145,7 +145,7 @@ def install(self, pkg, spec, prefix):
|
|||
opts += self.nmake_install_args()
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", prefix))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||
|
|
|
@ -1497,6 +1497,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
if IS_WINDOWS:
|
||||
quote_str = f'"{quote_str}"'
|
||||
return quote_str
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||
and extract the contents into the given work_dir
|
||||
|
@ -1961,9 +1967,9 @@ def compose_command_err_handling(args):
|
|||
# but we need to handle EXEs (git, etc) ourselves
|
||||
catch_exe_failure = (
|
||||
"""
|
||||
if ($LASTEXITCODE -ne 0){
|
||||
throw "Command {} has failed"
|
||||
}
|
||||
if ($LASTEXITCODE -ne 0){{
|
||||
throw 'Command {} has failed'
|
||||
}}
|
||||
"""
|
||||
if IS_WINDOWS
|
||||
else ""
|
||||
|
@ -2195,13 +2201,13 @@ def __init__(self, ci_cdash):
|
|||
def args(self):
|
||||
return [
|
||||
"--cdash-upload-url",
|
||||
self.upload_url,
|
||||
win_quote(self.upload_url),
|
||||
"--cdash-build",
|
||||
self.build_name,
|
||||
win_quote(self.build_name),
|
||||
"--cdash-site",
|
||||
self.site,
|
||||
win_quote(self.site),
|
||||
"--cdash-buildstamp",
|
||||
self.build_stamp,
|
||||
win_quote(self.build_stamp),
|
||||
]
|
||||
|
||||
@property # type: ignore
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
level = "long"
|
||||
|
||||
SPACK_COMMAND = "spack"
|
||||
MAKE_COMMAND = "make"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
|
||||
|
@ -40,6 +39,12 @@ def deindent(desc):
|
|||
return desc.replace(" ", "")
|
||||
|
||||
|
||||
def unicode_escape(path: str) -> str:
|
||||
"""Returns transformed path with any unicode
|
||||
characters replaced with their corresponding escapes"""
|
||||
return path.encode("unicode-escape").decode("utf-8")
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||
|
@ -551,75 +556,35 @@ def ci_rebuild(args):
|
|||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||
|
||||
config = cfg.get("config")
|
||||
if not config["verify_ssl"]:
|
||||
spack_cmd.append("-k")
|
||||
|
||||
install_args = []
|
||||
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing dependencies from cache
|
||||
deps_install_args = install_args
|
||||
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
root_install_args = install_args + [
|
||||
"--keep-stage",
|
||||
"--only=package",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
]
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
root_install_args.extend(cdash_handler.args())
|
||||
root_install_args.append(slash_hash)
|
||||
|
||||
# ["x", "y"] -> "'x' 'y'"
|
||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||
|
||||
commands = [
|
||||
# apparently there's a race when spack bootstraps? do it up front once
|
||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
"Makefile",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
slash_hash, # limit to spec we're building
|
||||
],
|
||||
[
|
||||
# --output-sync requires GNU make 4.x.
|
||||
# Old make errors when you pass it a flag it doesn't recognize,
|
||||
# but it doesn't error or warn when you set unrecognized flags in
|
||||
# this variable.
|
||||
"export",
|
||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
||||
],
|
||||
[
|
||||
MAKE_COMMAND,
|
||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
||||
"SPACK_COLOR=always",
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||
"{name}-{version}-{hash}"
|
||||
)
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||
spack_cmd + deps_install_args + [slash_hash],
|
||||
spack_cmd + root_install_args + [slash_hash],
|
||||
]
|
||||
|
||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||
|
||||
|
|
|
@ -3036,54 +3036,56 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
|
||||
include_url = urllib.parse.urlparse(config_path)
|
||||
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
# If scheme is not valid, config_path is not a url
|
||||
# of a type Spack is generally aware
|
||||
if spack.util.url.validate_scheme(include_url.scheme):
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, str(self.config_stage_dir), skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
)
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, str(self.config_stage_dir), skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
)
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
if not os.path.isabs(config_path):
|
||||
|
|
|
@ -760,7 +760,6 @@ def test_ci_rebuild_mock_success(
|
|||
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "echo")
|
||||
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "echo")
|
||||
|
||||
with rebuild_env.env_dir.as_cwd():
|
||||
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||
|
@ -843,7 +842,6 @@ def test_ci_rebuild(
|
|||
ci_cmd("rebuild", "--tests", fail_on_error=False)
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "notcommand")
|
||||
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "notcommand")
|
||||
monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
|
||||
|
||||
with rebuild_env.env_dir.as_cwd():
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
|
@ -385,7 +387,7 @@ def _socket_dir(gpgconf):
|
|||
os.mkdir(var_run_user)
|
||||
os.chmod(var_run_user, 0o777)
|
||||
|
||||
user_dir = os.path.join(var_run_user, str(os.getuid()))
|
||||
user_dir = os.path.join(var_run_user, str(llnl.util.filesystem.getuid()))
|
||||
|
||||
if not os.path.exists(user_dir):
|
||||
os.mkdir(user_dir)
|
||||
|
|
|
@ -78,14 +78,7 @@ def is_path_instead_of_url(path_or_url):
|
|||
"""Historically some config files and spack commands used paths
|
||||
where urls should be used. This utility can be used to validate
|
||||
and promote paths to urls."""
|
||||
scheme = urllib.parse.urlparse(path_or_url).scheme
|
||||
|
||||
# On non-Windows, no scheme means it's likely a path
|
||||
if not sys.platform == "win32":
|
||||
return not scheme
|
||||
|
||||
# On Windows, we may have drive letters.
|
||||
return "A" <= scheme <= "Z"
|
||||
return not validate_scheme(urllib.parse.urlparse(path_or_url).scheme)
|
||||
|
||||
|
||||
def format(parsed_url):
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
from spack.build_systems import autotools, nmake
|
||||
from spack.package import *
|
||||
|
@ -244,19 +246,30 @@ def makefile_name(self):
|
|||
|
||||
@property
|
||||
def build_directory(self):
|
||||
return os.path.join(self.stage.source_path, "win32")
|
||||
return fs.windows_sfn(os.path.join(self.stage.source_path, "win32"))
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
with working_dir(self.build_directory):
|
||||
opts = [
|
||||
"prefix=%s" % prefix,
|
||||
"prefix=%s" % fs.windows_sfn(prefix),
|
||||
"compiler=msvc",
|
||||
"iconv=no",
|
||||
"zlib=yes",
|
||||
"lzma=yes",
|
||||
"lib=%s" % ";".join((spec["zlib-api"].prefix.lib, spec["xz"].prefix.lib)),
|
||||
"lib=%s"
|
||||
% ";".join(
|
||||
(
|
||||
fs.windows_sfn(spec["zlib-api"].prefix.lib),
|
||||
fs.windows_sfn(spec["xz"].prefix.lib),
|
||||
)
|
||||
),
|
||||
"include=%s"
|
||||
% ";".join((spec["zlib-api"].prefix.include, spec["xz"].prefix.include)),
|
||||
% ";".join(
|
||||
(
|
||||
fs.windows_sfn(spec["zlib-api"].prefix.include),
|
||||
fs.windows_sfn(spec["xz"].prefix.include),
|
||||
)
|
||||
),
|
||||
]
|
||||
if "+python" in spec:
|
||||
opts.append("python=yes")
|
||||
|
|
Loading…
Reference in a new issue