Path handling (#28402)
Consolidate Spack's internal filepath logic to a select few places and refactor to consistent internal useage of os.path utilities. Creates a prefix, and a series of utilities in the path utility module that facilitate handling paths in a platform agnostic manner. Convert Windows paths to posix paths internally Prefer posixpath.join instead of os.path.join Updated util/ directory to account for Windows integration Co-authored-by: Stephen Crowell <stephen.crowell@khq.kitware.com> Co-authored-by: John Parent <john.parent@kitware.com> Module template format for windows (#23041)
This commit is contained in:
parent
df4129d395
commit
e4d4a5193f
36 changed files with 495 additions and 234 deletions
|
@ -5,19 +5,17 @@
|
|||
import collections
|
||||
import errno
|
||||
import glob
|
||||
import grp
|
||||
import ctypes
|
||||
import hashlib
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
import pwd
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from sys import platform as _platform
|
||||
|
||||
import six
|
||||
|
||||
|
@ -27,6 +25,27 @@
|
|||
from llnl.util.symlink import symlink
|
||||
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
is_windows = _platform == 'win32'
|
||||
|
||||
if not is_windows:
|
||||
import grp
|
||||
import pwd
|
||||
else:
|
||||
import win32security
|
||||
|
||||
|
||||
is_windows = _platform == 'win32'
|
||||
|
||||
if not is_windows:
|
||||
import grp
|
||||
import pwd
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Sequence # novm
|
||||
else:
|
||||
from collections import Sequence
|
||||
|
||||
__all__ = [
|
||||
'FileFilter',
|
||||
|
@ -76,7 +95,8 @@
|
|||
|
||||
|
||||
def getuid():
|
||||
if _platform == "win32":
|
||||
if is_windows:
|
||||
import ctypes
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
return 1
|
||||
return 0
|
||||
|
@ -84,6 +104,7 @@ def getuid():
|
|||
return os.getuid()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
if is_windows:
|
||||
|
@ -92,6 +113,7 @@ def rename(src, dst):
|
|||
os.rename(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def path_contains_subdirectory(path, root):
|
||||
norm_root = os.path.abspath(root).rstrip(os.path.sep) + os.path.sep
|
||||
norm_path = os.path.abspath(path).rstrip(os.path.sep) + os.path.sep
|
||||
|
@ -116,6 +138,7 @@ def paths_containing_libs(paths, library_names):
|
|||
required_lib_fnames = possible_library_filenames(library_names)
|
||||
|
||||
rpaths_to_include = []
|
||||
paths = path_to_os_path(*paths)
|
||||
for path in paths:
|
||||
fnames = set(os.listdir(path))
|
||||
if fnames & required_lib_fnames:
|
||||
|
@ -124,6 +147,7 @@ def paths_containing_libs(paths, library_names):
|
|||
return rpaths_to_include
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def same_path(path1, path2):
|
||||
norm1 = os.path.abspath(path1).rstrip(os.path.sep)
|
||||
norm2 = os.path.abspath(path2).rstrip(os.path.sep)
|
||||
|
@ -174,7 +198,7 @@ def groupid_to_group(x):
|
|||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
|
@ -284,13 +308,39 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
|||
|
||||
repl = r's@\1@\2@g'
|
||||
repl = repl.replace('@', new_delim)
|
||||
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for f in filenames:
|
||||
filter_file(whole_lines, repl, f)
|
||||
filter_file(single_quoted, "'%s'" % repl, f)
|
||||
filter_file(double_quoted, '"%s"' % repl, f)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
|
||||
p_stat = os.stat(path)
|
||||
if p_stat.st_mode & stat.S_IRWXU != stat.S_IRWXU:
|
||||
tty.error("Expected {0} to support mode {1}, but it is {2}"
|
||||
.format(path, stat.S_IRWXU, p_stat.st_mode))
|
||||
|
||||
raise OSError(errno.EACCES,
|
||||
err_msg.format(path, path) if err_msg else "")
|
||||
else:
|
||||
p_stat = os.stat(path)
|
||||
|
||||
if _platform != "win32":
|
||||
owner_uid = p_stat.st_uid
|
||||
else:
|
||||
sid = win32security.GetFileSecurity(
|
||||
path, win32security.OWNER_SECURITY_INFORMATION) \
|
||||
.GetSecurityDescriptorOwner()
|
||||
owner_uid = win32security.LookupAccountSid(None, sid)[0]
|
||||
return owner_uid
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def set_install_permissions(path):
|
||||
"""Set appropriate permissions on the installed file."""
|
||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||
|
@ -313,12 +363,17 @@ def group_ids(uid=None):
|
|||
Returns:
|
||||
(list of int): gids of groups the user is a member of
|
||||
"""
|
||||
if is_windows:
|
||||
tty.warn("Function is not supported on Windows")
|
||||
return []
|
||||
|
||||
if uid is None:
|
||||
uid = getuid()
|
||||
user = pwd.getpwuid(uid).pw_name
|
||||
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
|
@ -331,6 +386,7 @@ def chgrp(path, group):
|
|||
os.chown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chmod_x(entry, perms):
|
||||
"""Implements chmod, treating all executable bits as set using the chmod
|
||||
utility's `+X` option.
|
||||
|
@ -344,6 +400,7 @@ def chmod_x(entry, perms):
|
|||
os.chmod(entry, perms)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_mode(src, dest):
|
||||
"""Set the mode of dest to that of src unless it is a link.
|
||||
"""
|
||||
|
@ -360,6 +417,7 @@ def copy_mode(src, dest):
|
|||
os.chmod(dest, dest_mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def unset_executable_mode(path):
|
||||
mode = os.stat(path).st_mode
|
||||
mode &= ~stat.S_IXUSR
|
||||
|
@ -368,6 +426,7 @@ def unset_executable_mode(path):
|
|||
os.chmod(path, mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy(src, dest, _permissions=False):
|
||||
"""Copy the file(s) *src* to the file or directory *dest*.
|
||||
|
||||
|
@ -412,6 +471,7 @@ def copy(src, dest, _permissions=False):
|
|||
copy_mode(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install(src, dest):
|
||||
"""Install the file(s) *src* to the file or directory *dest*.
|
||||
|
||||
|
@ -430,6 +490,7 @@ def install(src, dest):
|
|||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
|
@ -444,6 +505,7 @@ def resolve_link_target_relative_to_the_link(link):
|
|||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
"""Recursively copy an entire directory tree rooted at *src*.
|
||||
|
||||
|
@ -528,6 +590,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
|||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
|
@ -547,11 +610,13 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
|||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def is_exe(path):
|
||||
"""True if path is an executable file."""
|
||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def get_filetype(path_name):
|
||||
"""
|
||||
Return the output of file path_name as a string to identify file type.
|
||||
|
@ -563,6 +628,7 @@ def get_filetype(path_name):
|
|||
return output.strip()
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp_if_not_world_writable(path, group):
|
||||
"""chgrp path to group if path is not world writable"""
|
||||
mode = os.stat(path).st_mode
|
||||
|
@ -592,7 +658,7 @@ def mkdirp(*paths, **kwargs):
|
|||
mode = kwargs.get('mode', None)
|
||||
group = kwargs.get('group', None)
|
||||
default_perms = kwargs.get('default_perms', 'args')
|
||||
|
||||
paths = path_to_os_path(*paths)
|
||||
for path in paths:
|
||||
if not os.path.exists(path):
|
||||
try:
|
||||
|
@ -653,6 +719,7 @@ def mkdirp(*paths, **kwargs):
|
|||
raise OSError(errno.EEXIST, "File already exists", path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def force_remove(*paths):
|
||||
"""Remove files without printing errors. Like ``rm -f``, does NOT
|
||||
remove directories."""
|
||||
|
@ -664,6 +731,7 @@ def force_remove(*paths):
|
|||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def working_dir(dirname, **kwargs):
|
||||
if kwargs.get('create', False):
|
||||
mkdirp(dirname)
|
||||
|
@ -683,6 +751,7 @@ def __init__(self, inner_exception, outer_exception):
|
|||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
"""Moves a directory to a temporary space. If the operations executed
|
||||
within the context manager don't raise an exception, the directory is
|
||||
|
@ -738,6 +807,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
|||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def hash_directory(directory, ignore=[]):
|
||||
"""Hashes recursively the content of a directory.
|
||||
|
||||
|
@ -766,6 +836,7 @@ def hash_directory(directory, ignore=[]):
|
|||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def write_tmp_and_move(filename):
|
||||
"""Write to a temporary file, then move into place."""
|
||||
dirname = os.path.dirname(filename)
|
||||
|
@ -777,6 +848,7 @@ def write_tmp_and_move(filename):
|
|||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def open_if_filename(str_or_file, mode='r'):
|
||||
"""Takes either a path or a file object, and opens it if it is a path.
|
||||
|
||||
|
@ -789,8 +861,12 @@ def open_if_filename(str_or_file, mode='r'):
|
|||
yield str_or_file
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def touch(path):
|
||||
"""Creates an empty file at the specified path."""
|
||||
if is_windows:
|
||||
perms = (os.O_WRONLY | os.O_CREAT)
|
||||
else:
|
||||
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
|
||||
fd = None
|
||||
try:
|
||||
|
@ -801,6 +877,7 @@ def touch(path):
|
|||
os.close(fd)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def touchp(path):
|
||||
"""Like ``touch``, but creates any parent directories needed for the file.
|
||||
"""
|
||||
|
@ -808,6 +885,7 @@ def touchp(path):
|
|||
touch(path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def force_symlink(src, dest):
|
||||
try:
|
||||
symlink(src, dest)
|
||||
|
@ -816,6 +894,7 @@ def force_symlink(src, dest):
|
|||
symlink(src, dest)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def join_path(prefix, *args):
|
||||
path = str(prefix)
|
||||
for elt in args:
|
||||
|
@ -823,14 +902,16 @@ def join_path(prefix, *args):
|
|||
return path
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def ancestor(dir, n=1):
|
||||
"""Get the nth ancestor of a directory."""
|
||||
parent = os.path.abspath(dir)
|
||||
for i in range(n):
|
||||
parent = os.path.dirname(parent)
|
||||
return parent
|
||||
return parent.replace("\\", "/")
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def get_single_file(directory):
|
||||
fnames = os.listdir(directory)
|
||||
if len(fnames) != 1:
|
||||
|
@ -850,6 +931,7 @@ def temp_cwd():
|
|||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def temp_rename(orig_path, temp_path):
|
||||
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
|
||||
if not same_path:
|
||||
|
@ -861,11 +943,13 @@ def temp_rename(orig_path, temp_path):
|
|||
shutil.move(temp_path, orig_path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def can_access(file_name):
|
||||
"""True if we have read/write access to the file."""
|
||||
return os.access(file_name, os.R_OK | os.W_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
"""Traverse two filesystem trees simultaneously.
|
||||
|
||||
|
@ -948,6 +1032,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
|||
yield (source_path, dest_path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def set_executable(path):
|
||||
mode = os.stat(path).st_mode
|
||||
if mode & stat.S_IRUSR:
|
||||
|
@ -959,6 +1044,7 @@ def set_executable(path):
|
|||
os.chmod(path, mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def last_modification_time_recursive(path):
|
||||
path = os.path.abspath(path)
|
||||
times = [os.stat(path).st_mtime]
|
||||
|
@ -968,6 +1054,7 @@ def last_modification_time_recursive(path):
|
|||
return max(times)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_empty_directories(root):
|
||||
"""Ascend up from the leaves accessible from `root` and remove empty
|
||||
directories.
|
||||
|
@ -984,6 +1071,7 @@ def remove_empty_directories(root):
|
|||
pass
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_dead_links(root):
|
||||
"""Recursively removes any dead link that is present in root.
|
||||
|
||||
|
@ -996,6 +1084,7 @@ def remove_dead_links(root):
|
|||
remove_if_dead_link(path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_if_dead_link(path):
|
||||
"""Removes the argument if it is a dead link.
|
||||
|
||||
|
@ -1006,6 +1095,7 @@ def remove_if_dead_link(path):
|
|||
os.unlink(path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_linked_tree(path):
|
||||
"""Removes a directory and its contents.
|
||||
|
||||
|
@ -1024,6 +1114,7 @@ def remove_linked_tree(path):
|
|||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def safe_remove(*files_or_dirs):
|
||||
"""Context manager to remove the files passed as input, but restore
|
||||
them in case any exception is raised in the context block.
|
||||
|
@ -1070,6 +1161,7 @@ def safe_remove(*files_or_dirs):
|
|||
raise
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def fix_darwin_install_name(path):
|
||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
||||
|
||||
|
@ -1156,6 +1248,10 @@ def find(root, files, recursive=True):
|
|||
return _find_non_recursive(root, files)
|
||||
|
||||
|
||||
# here and in _find_non_recursive below we only take the first
|
||||
# index to check for system path safety as glob handles this
|
||||
# w.r.t. search_files
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
|
@ -1166,7 +1262,6 @@ def _find_recursive(root, search_files):
|
|||
|
||||
# Make the path absolute to have os.walk also return an absolute path
|
||||
root = os.path.abspath(root)
|
||||
|
||||
for path, _, list_files in os.walk(root):
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(path, search_file))
|
||||
|
@ -1180,6 +1275,7 @@ def _find_recursive(root, search_files):
|
|||
return answer
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_non_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||
# can return files in any order (does not preserve stability)
|
||||
|
@ -1311,7 +1407,7 @@ def directories(self, value):
|
|||
if isinstance(value, six.string_types):
|
||||
value = [value]
|
||||
|
||||
self._directories = [os.path.normpath(x) for x in value]
|
||||
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
|
||||
|
||||
def _default_directories(self):
|
||||
"""Default computation of directories based on the list of
|
||||
|
@ -1469,6 +1565,7 @@ def find_headers(headers, root, recursive=False):
|
|||
return HeaderList(find(root, headers, recursive))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def find_all_headers(root):
|
||||
"""Convenience function that returns the list of all headers found
|
||||
in the directory passed as argument.
|
||||
|
@ -1696,6 +1793,7 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
|||
return LibraryList(found_libs)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_access_dir(path):
|
||||
"""Returns True if the argument is an accessible directory.
|
||||
|
@ -1709,6 +1807,7 @@ def can_access_dir(path):
|
|||
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_write_to_dir(path):
|
||||
"""Return True if the argument is a directory in which we can write.
|
||||
|
@ -1722,6 +1821,7 @@ def can_write_to_dir(path):
|
|||
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK | os.W_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def files_in(*search_paths):
|
||||
"""Returns all the files in paths passed as arguments.
|
||||
|
@ -1743,6 +1843,7 @@ def files_in(*search_paths):
|
|||
return files
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def search_paths_for_executables(*path_hints):
|
||||
"""Given a list of path hints returns a list of paths where
|
||||
to search for an executable.
|
||||
|
@ -1770,6 +1871,7 @@ def search_paths_for_executables(*path_hints):
|
|||
return executable_paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def partition_path(path, entry=None):
|
||||
"""
|
||||
Split the prefixes of the path at the first occurrence of entry and
|
||||
|
@ -1786,7 +1888,11 @@ def partition_path(path, entry=None):
|
|||
# Derive the index of entry within paths, which will correspond to
|
||||
# the location of the entry in within the path.
|
||||
try:
|
||||
entries = path.split(os.sep)
|
||||
sep = os.sep
|
||||
entries = path.split(sep)
|
||||
if entries[0].endswith(":"):
|
||||
# Handle drive letters e.g. C:/ on Windows
|
||||
entries[0] = entries[0] + sep
|
||||
i = entries.index(entry)
|
||||
if '' in entries:
|
||||
i -= 1
|
||||
|
@ -1797,6 +1903,7 @@ def partition_path(path, entry=None):
|
|||
return paths, '', []
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def prefixes(path):
|
||||
"""
|
||||
Returns a list containing the path and its ancestors, top-to-bottom.
|
||||
|
@ -1810,6 +1917,9 @@ def prefixes(path):
|
|||
For example, path ``./hi/jkl/mn`` results in a list with the following
|
||||
paths, in order: ``./hi``, ``./hi/jkl``, and ``./hi/jkl/mn``.
|
||||
|
||||
On Windows, paths will be normalized to use ``/`` and ``/`` will always
|
||||
be used as the separator instead of ``os.sep``.
|
||||
|
||||
Parameters:
|
||||
path (str): the string used to derive ancestor paths
|
||||
|
||||
|
@ -1818,14 +1928,17 @@ def prefixes(path):
|
|||
"""
|
||||
if not path:
|
||||
return []
|
||||
|
||||
parts = path.strip(os.sep).split(os.sep)
|
||||
if path.startswith(os.sep):
|
||||
parts.insert(0, os.sep)
|
||||
sep = os.sep
|
||||
parts = path.strip(sep).split(sep)
|
||||
if path.startswith(sep):
|
||||
parts.insert(0, sep)
|
||||
elif parts[0].endswith(":"):
|
||||
# Handle drive letters e.g. C:/ on Windows
|
||||
parts[0] = parts[0] + sep
|
||||
paths = [os.path.join(*parts[:i + 1]) for i in range(len(parts))]
|
||||
|
||||
try:
|
||||
paths.remove(os.sep)
|
||||
paths.remove(sep)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
@ -1837,6 +1950,7 @@ def prefixes(path):
|
|||
return paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def md5sum(file):
|
||||
"""Compute the MD5 sum of a file.
|
||||
|
||||
|
@ -1852,6 +1966,7 @@ def md5sum(file):
|
|||
return md5.digest()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
|
@ -1863,6 +1978,7 @@ def remove_directory_contents(dir):
|
|||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def keep_modification_time(*filenames):
|
||||
"""
|
||||
Context manager to keep the modification timestamps of the input files.
|
||||
|
|
|
@ -265,16 +265,16 @@ def set_compiler_environment_variables(pkg, env):
|
|||
# Set SPACK compiler variables so that our wrapper knows what to call
|
||||
if compiler.cc:
|
||||
env.set('SPACK_CC', compiler.cc)
|
||||
env.set('CC', os.path.join(link_dir, compiler.link_paths['cc']))
|
||||
env.set('CC', os.path.join(link_dir, os.path.join(compiler.link_paths['cc'])))
|
||||
if compiler.cxx:
|
||||
env.set('SPACK_CXX', compiler.cxx)
|
||||
env.set('CXX', os.path.join(link_dir, compiler.link_paths['cxx']))
|
||||
env.set('CXX', os.path.join(link_dir, os.path.join(compiler.link_paths['cxx'])))
|
||||
if compiler.f77:
|
||||
env.set('SPACK_F77', compiler.f77)
|
||||
env.set('F77', os.path.join(link_dir, compiler.link_paths['f77']))
|
||||
env.set('F77', os.path.join(link_dir, os.path.join(compiler.link_paths['f77'])))
|
||||
if compiler.fc:
|
||||
env.set('SPACK_FC', compiler.fc)
|
||||
env.set('FC', os.path.join(link_dir, compiler.link_paths['fc']))
|
||||
env.set('FC', os.path.join(link_dir, os.path.join(compiler.link_paths['fc'])))
|
||||
|
||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
||||
env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg)
|
||||
|
@ -374,7 +374,8 @@ def set_wrapper_variables(pkg, env):
|
|||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths['cc']))
|
||||
spack.paths.build_env_path,
|
||||
os.path.dirname(os.path.join(pkg.compiler.link_paths['cc'])))
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, 'case-insensitive')
|
||||
|
@ -537,10 +538,10 @@ def _set_variables_for_single_module(pkg, module):
|
|||
|
||||
# Put spack compiler paths in module scope.
|
||||
link_dir = spack.paths.build_env_path
|
||||
m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths['cc'])
|
||||
m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths['cxx'])
|
||||
m.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths['f77'])
|
||||
m.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths['fc'])
|
||||
m.spack_cc = os.path.join(link_dir, os.path.join(pkg.compiler.link_paths['cc']))
|
||||
m.spack_cxx = os.path.join(link_dir, os.path.join(pkg.compiler.link_paths['cxx']))
|
||||
m.spack_f77 = os.path.join(link_dir, os.path.join(pkg.compiler.link_paths['f77']))
|
||||
m.spack_fc = os.path.join(link_dir, os.path.join(pkg.compiler.link_paths['fc']))
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
m.pwd = os.getcwd
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.path import system_path_filter
|
||||
|
||||
__all__ = ['Compiler']
|
||||
|
||||
|
@ -157,6 +158,7 @@ def _parse_link_paths(string):
|
|||
return implicit_link_dirs
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _parse_non_system_link_dirs(string):
|
||||
"""Parses link paths out of compiler debug output.
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
@ -42,10 +43,10 @@ def opt_flags(self):
|
|||
|
||||
@property
|
||||
def link_paths(self):
|
||||
link_paths = {'cc': 'aocc/clang',
|
||||
'cxx': 'aocc/clang++',
|
||||
'f77': 'aocc/flang',
|
||||
'fc': 'aocc/flang'}
|
||||
link_paths = {'cc': os.path.join('aocc', 'clang'),
|
||||
'cxx': os.path.join('aocc', 'clang++'),
|
||||
'f77': os.path.join('aocc', 'flang'),
|
||||
'fc': os.path.join('aocc', 'flang')}
|
||||
|
||||
return link_paths
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import spack.compiler
|
||||
|
@ -22,10 +23,10 @@ class Arm(spack.compiler.Compiler):
|
|||
fc_names = ['armflang']
|
||||
|
||||
# Named wrapper links within lib/spack/env
|
||||
link_paths = {'cc': 'arm/armclang',
|
||||
'cxx': 'arm/armclang++',
|
||||
'f77': 'arm/armflang',
|
||||
'fc': 'arm/armflang'}
|
||||
link_paths = {'cc': os.path.join('arm', 'armclang'),
|
||||
'cxx': os.path.join('arm', 'armclang++'),
|
||||
'f77': os.path.join('arm', 'armflang'),
|
||||
'fc': os.path.join('arm', 'armflang')}
|
||||
|
||||
# The ``--version`` option seems to be the most consistent one for
|
||||
# arm compilers. Output looks like this:
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import ver
|
||||
|
||||
|
@ -27,10 +29,10 @@ class Cce(Compiler):
|
|||
PrgEnv = 'PrgEnv-cray'
|
||||
PrgEnv_compiler = 'cce'
|
||||
|
||||
link_paths = {'cc': 'cce/cc',
|
||||
'cxx': 'cce/case-insensitive/CC',
|
||||
'f77': 'cce/ftn',
|
||||
'fc': 'cce/ftn'}
|
||||
link_paths = {'cc': os.path.join('cce', 'cc'),
|
||||
'cxx': os.path.join('cce', 'case-insensitive', 'CC'),
|
||||
'f77': os.path.join('cce', 'ftn'),
|
||||
'fc': os.path.join('cce', 'ftn')}
|
||||
|
||||
@property
|
||||
def is_clang_based(self):
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
@ -13,20 +14,20 @@
|
|||
|
||||
#: compiler symlink mappings for mixed f77 compilers
|
||||
f77_mapping = [
|
||||
('gfortran', 'clang/gfortran'),
|
||||
('xlf_r', 'xl_r/xlf_r'),
|
||||
('xlf', 'xl/xlf'),
|
||||
('pgfortran', 'pgi/pgfortran'),
|
||||
('ifort', 'intel/ifort')
|
||||
('gfortran', ('clang', 'gfortran')),
|
||||
('xlf_r', ('xl_r', 'xlf_r')),
|
||||
('xlf', ('xl', 'xlf')),
|
||||
('pgfortran', ('pgi', 'pgfortran')),
|
||||
('ifort', ('intel', 'ifort'))
|
||||
]
|
||||
|
||||
#: compiler symlink mappings for mixed f90/fc compilers
|
||||
fc_mapping = [
|
||||
('gfortran', 'clang/gfortran'),
|
||||
('xlf90_r', 'xl_r/xlf90_r'),
|
||||
('xlf90', 'xl/xlf90'),
|
||||
('pgfortran', 'pgi/pgfortran'),
|
||||
('ifort', 'intel/ifort')
|
||||
('gfortran', ('clang', 'gfortran')),
|
||||
('xlf90_r', ('xl_r', 'xlf90_r')),
|
||||
('xlf90', ('xl', 'xlf90')),
|
||||
('pgfortran', ('pgi', 'pgfortran')),
|
||||
('ifort', ('intel', 'ifort'))
|
||||
]
|
||||
|
||||
|
||||
|
@ -60,8 +61,8 @@ def opt_flags(self):
|
|||
@property
|
||||
def link_paths(self):
|
||||
# clang links are always the same
|
||||
link_paths = {'cc': 'clang/clang',
|
||||
'cxx': 'clang/clang++'}
|
||||
link_paths = {'cc': os.path.join('clang', 'clang'),
|
||||
'cxx': os.path.join('clang', 'clang++')}
|
||||
|
||||
# fortran links need to look at the actual compiler names from
|
||||
# compilers.yaml to figure out which named symlink to use
|
||||
|
@ -70,14 +71,14 @@ def link_paths(self):
|
|||
link_paths['f77'] = link_path
|
||||
break
|
||||
else:
|
||||
link_paths['f77'] = 'clang/flang'
|
||||
link_paths['f77'] = os.path.join('clang', 'flang')
|
||||
|
||||
for compiler_name, link_path in fc_mapping:
|
||||
if self.fc and compiler_name in self.fc:
|
||||
link_paths['fc'] = link_path
|
||||
break
|
||||
else:
|
||||
link_paths['fc'] = 'clang/flang'
|
||||
link_paths['fc'] = os.path.join('clang', 'flang')
|
||||
|
||||
return link_paths
|
||||
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compilers.oneapi
|
||||
|
||||
|
||||
|
@ -23,7 +25,7 @@ class Dpcpp(spack.compilers.oneapi.Oneapi):
|
|||
cxx_names = ['dpcpp']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'oneapi/icx',
|
||||
'cxx': 'oneapi/dpcpp',
|
||||
'f77': 'oneapi/ifx',
|
||||
'fc': 'oneapi/ifx'}
|
||||
link_paths = {'cc': os.path.join('oneapi', 'icx'),
|
||||
'cxx': os.path.join('oneapi', 'dpcpp'),
|
||||
'f77': os.path.join('oneapi', 'ifx'),
|
||||
'fc': os.path.join('oneapi', 'ifx')}
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compiler
|
||||
|
||||
|
||||
|
@ -20,10 +22,10 @@ class Fj(spack.compiler.Compiler):
|
|||
fc_names = ['frt']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'fj/fcc',
|
||||
'cxx': 'fj/case-insensitive/FCC',
|
||||
'f77': 'fj/frt',
|
||||
'fc': 'fj/frt'}
|
||||
link_paths = {'cc': os.path.join('fj', 'fcc'),
|
||||
'cxx': os.path.join('fj', 'case-insensitive', 'FCC'),
|
||||
'f77': os.path.join('fj', 'frt'),
|
||||
'fc': os.path.join('fj', 'frt')}
|
||||
|
||||
version_argument = '--version'
|
||||
version_regex = r'\((?:FCC|FRT)\) ([a-z\d.]+)'
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import spack.compiler
|
||||
|
@ -29,10 +30,10 @@ class Gcc(spack.compiler.Compiler):
|
|||
suffixes = [r'-mp-\d+(?:\.\d+)?', r'-\d+(?:\.\d+)?', r'\d\d']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'gcc/gcc',
|
||||
'cxx': 'gcc/g++',
|
||||
'f77': 'gcc/gfortran',
|
||||
'fc': 'gcc/gfortran'}
|
||||
link_paths = {'cc': os.path.join('gcc', 'gcc'),
|
||||
'cxx': os.path.join('gcc', 'g++'),
|
||||
'f77': os.path.join('gcc', 'gfortran'),
|
||||
'fc': os.path.join('gcc', 'gfortran')}
|
||||
|
||||
PrgEnv = 'PrgEnv-gnu'
|
||||
PrgEnv_compiler = 'gcc'
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
|
@ -23,10 +24,10 @@ class Intel(Compiler):
|
|||
fc_names = ['ifort']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'intel/icc',
|
||||
'cxx': 'intel/icpc',
|
||||
'f77': 'intel/ifort',
|
||||
'fc': 'intel/ifort'}
|
||||
link_paths = {'cc': os.path.join('intel', 'icc'),
|
||||
'cxx': os.path.join('intel', 'icpc'),
|
||||
'f77': os.path.join('intel', 'ifort'),
|
||||
'fc': os.path.join('intel', 'ifort')}
|
||||
|
||||
PrgEnv = 'PrgEnv-intel'
|
||||
PrgEnv_compiler = 'intel'
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
from typing import List # novm
|
||||
|
||||
import spack.compiler
|
||||
|
@ -26,8 +27,8 @@ class Nag(spack.compiler.Compiler):
|
|||
link_paths = {
|
||||
'cc': 'cc',
|
||||
'cxx': 'c++',
|
||||
'f77': 'nag/nagfor',
|
||||
'fc': 'nag/nagfor'}
|
||||
'f77': os.path.join('nag', 'nagfor'),
|
||||
'fc': os.path.join('nag', 'nagfor')}
|
||||
|
||||
version_argument = '-V'
|
||||
version_regex = r'NAG Fortran Compiler Release ([0-9.]+)'
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from spack.compiler import Compiler
|
||||
|
||||
|
||||
|
@ -20,10 +22,10 @@ class Nvhpc(Compiler):
|
|||
fc_names = ['nvfortran']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'nvhpc/nvc',
|
||||
'cxx': 'nvhpc/nvc++',
|
||||
'f77': 'nvhpc/nvfortran',
|
||||
'fc': 'nvhpc/nvfortran'}
|
||||
link_paths = {'cc': os.path.join('nvhpc', 'nvc'),
|
||||
'cxx': os.path.join('nvhpc', 'nvc++'),
|
||||
'f77': os.path.join('nvhpc', 'nvfortran'),
|
||||
'fc': os.path.join('nvhpc', 'nvfortran')}
|
||||
|
||||
PrgEnv = 'PrgEnv-nvhpc'
|
||||
PrgEnv_compiler = 'nvhpc'
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
from os.path import dirname
|
||||
|
||||
from spack.compiler import Compiler
|
||||
|
@ -22,10 +23,10 @@ class Oneapi(Compiler):
|
|||
fc_names = ['ifx']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'oneapi/icx',
|
||||
'cxx': 'oneapi/icpx',
|
||||
'f77': 'oneapi/ifx',
|
||||
'fc': 'oneapi/ifx'}
|
||||
link_paths = {'cc': os.path.join('oneapi', 'icx'),
|
||||
'cxx': os.path.join('oneapi', 'icpx'),
|
||||
'f77': os.path.join('oneapi', 'ifx'),
|
||||
'fc': os.path.join('oneapi', 'ifx')}
|
||||
|
||||
PrgEnv = 'PrgEnv-oneapi'
|
||||
PrgEnv_compiler = 'oneapi'
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import ver
|
||||
|
||||
|
@ -21,10 +23,10 @@ class Pgi(Compiler):
|
|||
fc_names = ['pgfortran', 'pgf95', 'pgf90']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'pgi/pgcc',
|
||||
'cxx': 'pgi/pgc++',
|
||||
'f77': 'pgi/pgfortran',
|
||||
'fc': 'pgi/pgfortran'}
|
||||
link_paths = {'cc': os.path.join('pgi', 'pgcc'),
|
||||
'cxx': os.path.join('pgi', 'pgc++'),
|
||||
'f77': os.path.join('pgi', 'pgfortran'),
|
||||
'fc': os.path.join('pgi', 'pgfortran')}
|
||||
|
||||
PrgEnv = 'PrgEnv-pgi'
|
||||
PrgEnv_compiler = 'pgi'
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import ver
|
||||
|
||||
|
@ -21,10 +23,10 @@ class Xl(Compiler):
|
|||
fc_names = ['xlf90', 'xlf95', 'xlf2003', 'xlf2008']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'xl/xlc',
|
||||
'cxx': 'xl/xlc++',
|
||||
'f77': 'xl/xlf',
|
||||
'fc': 'xl/xlf90'}
|
||||
link_paths = {'cc': os.path.join('xl', 'xlc'),
|
||||
'cxx': os.path.join('xl', 'xlc++'),
|
||||
'f77': os.path.join('xl', 'xlf'),
|
||||
'fc': os.path.join('xl', 'xlf90')}
|
||||
|
||||
version_argument = '-qversion'
|
||||
version_regex = r'([0-9]?[0-9]\.[0-9])'
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compilers.xl
|
||||
|
||||
|
||||
|
@ -20,7 +22,7 @@ class XlR(spack.compilers.xl.Xl):
|
|||
fc_names = ['xlf90_r', 'xlf95_r', 'xlf2003_r', 'xlf2008_r']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': 'xl_r/xlc_r',
|
||||
'cxx': 'xl_r/xlc++_r',
|
||||
'f77': 'xl_r/xlf_r',
|
||||
'fc': 'xl_r/xlf90_r'}
|
||||
link_paths = {'cc': os.path.join('xl_r', 'xlc_r'),
|
||||
'cxx': os.path.join('xl_r', 'xlc++_r'),
|
||||
'f77': os.path.join('xl_r', 'xlf_r'),
|
||||
'fc': os.path.join('xl_r', 'xlf90_r')}
|
||||
|
|
|
@ -218,6 +218,8 @@ def compute_windows_program_path_for_package(pkg):
|
|||
pkg (spack.package.Package): package for which
|
||||
Program Files location is to be computed
|
||||
"""
|
||||
# note windows paths are fine here as this method should only ever be invoked
|
||||
# to interact with Windows
|
||||
program_files = 'C:\\Program Files {}\\{}'
|
||||
|
||||
return[program_files.format(arch, name) for
|
||||
|
|
|
@ -308,6 +308,9 @@ def candidate_urls(self):
|
|||
url = url.replace("\\", "/")
|
||||
if sys.platform != "win32" and url.startswith('file://'):
|
||||
path = urllib_parse.quote(url[len('file://'):])
|
||||
if sys.platform == "win32":
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
url = 'file://' + path
|
||||
urls.append(url)
|
||||
|
||||
|
@ -681,6 +684,9 @@ def __init__(self, **kwargs):
|
|||
raise ValueError(
|
||||
"%s requires %s argument." % (self.__class__, self.url_attr))
|
||||
|
||||
if sys.platform == "win32":
|
||||
self.url = self.url.replace('\\', '/')
|
||||
|
||||
for attr in self.optional_attrs:
|
||||
setattr(self, attr, kwargs.get(attr, None))
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
import collections
|
||||
import itertools
|
||||
import os.path
|
||||
import posixpath
|
||||
from typing import Any, Dict # novm
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
@ -99,7 +100,7 @@ def guess_core_compilers(name, store=False):
|
|||
|
||||
class LmodConfiguration(BaseConfiguration):
|
||||
"""Configuration class for lmod module files."""
|
||||
default_projections = {'all': os.path.join('{name}', '{version}')}
|
||||
default_projections = {'all': posixpath.join('{name}', '{version}')}
|
||||
|
||||
@property
|
||||
def core_compilers(self):
|
||||
|
@ -449,7 +450,7 @@ def manipulate_path(token):
|
|||
|
||||
class LmodModulefileWriter(BaseModuleFileWriter):
|
||||
"""Writer class for lmod module files."""
|
||||
default_template = os.path.join('modules', 'modulefile.lua')
|
||||
default_template = posixpath.join('modules', 'modulefile.lua')
|
||||
|
||||
|
||||
class CoreCompilersNotFoundError(spack.error.SpackError, KeyError):
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
"""This module implements the classes necessary to generate TCL
|
||||
non-hierarchical modules.
|
||||
"""
|
||||
import os.path
|
||||
import posixpath
|
||||
import string
|
||||
from typing import Any, Dict # novm
|
||||
|
||||
|
@ -106,4 +106,4 @@ def conflicts(self):
|
|||
|
||||
class TclModulefileWriter(BaseModuleFileWriter):
|
||||
"""Writer class for tcl module files."""
|
||||
default_template = os.path.join('modules', 'modulefile.tcl')
|
||||
default_template = posixpath.join('modules', 'modulefile.tcl')
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
dependencies.
|
||||
"""
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
|
@ -20,41 +21,41 @@
|
|||
spack_root = prefix
|
||||
|
||||
#: bin directory in the spack prefix
|
||||
bin_path = os.path.join(prefix, "bin")
|
||||
bin_path = posixpath.join(prefix, "bin")
|
||||
|
||||
#: The spack script itself
|
||||
spack_script = os.path.join(bin_path, "spack")
|
||||
spack_script = posixpath.join(bin_path, "spack")
|
||||
|
||||
#: The sbang script in the spack installation
|
||||
sbang_script = os.path.join(bin_path, "sbang")
|
||||
sbang_script = posixpath.join(bin_path, "sbang")
|
||||
|
||||
# spack directory hierarchy
|
||||
lib_path = os.path.join(prefix, "lib", "spack")
|
||||
external_path = os.path.join(lib_path, "external")
|
||||
build_env_path = os.path.join(lib_path, "env")
|
||||
module_path = os.path.join(lib_path, "spack")
|
||||
command_path = os.path.join(module_path, "cmd")
|
||||
analyzers_path = os.path.join(module_path, "analyzers")
|
||||
platform_path = os.path.join(module_path, 'platforms')
|
||||
compilers_path = os.path.join(module_path, "compilers")
|
||||
build_systems_path = os.path.join(module_path, 'build_systems')
|
||||
operating_system_path = os.path.join(module_path, 'operating_systems')
|
||||
test_path = os.path.join(module_path, "test")
|
||||
hooks_path = os.path.join(module_path, "hooks")
|
||||
opt_path = os.path.join(prefix, "opt")
|
||||
share_path = os.path.join(prefix, "share", "spack")
|
||||
etc_path = os.path.join(prefix, "etc")
|
||||
lib_path = posixpath.join(prefix, "lib", "spack")
|
||||
external_path = posixpath.join(lib_path, "external")
|
||||
build_env_path = posixpath.join(lib_path, "env")
|
||||
module_path = posixpath.join(lib_path, "spack")
|
||||
command_path = posixpath.join(module_path, "cmd")
|
||||
analyzers_path = posixpath.join(module_path, "analyzers")
|
||||
platform_path = posixpath.join(module_path, 'platforms')
|
||||
compilers_path = posixpath.join(module_path, "compilers")
|
||||
build_systems_path = posixpath.join(module_path, 'build_systems')
|
||||
operating_system_path = posixpath.join(module_path, 'operating_systems')
|
||||
test_path = posixpath.join(module_path, "test")
|
||||
hooks_path = posixpath.join(module_path, "hooks")
|
||||
opt_path = posixpath.join(prefix, "opt")
|
||||
share_path = posixpath.join(prefix, "share", "spack")
|
||||
etc_path = posixpath.join(prefix, "etc")
|
||||
|
||||
|
||||
#
|
||||
# Things in $spack/var/spack
|
||||
#
|
||||
var_path = os.path.join(prefix, "var", "spack")
|
||||
var_path = posixpath.join(prefix, "var", "spack")
|
||||
|
||||
# read-only things in $spack/var/spack
|
||||
repos_path = os.path.join(var_path, "repos")
|
||||
packages_path = os.path.join(repos_path, "builtin")
|
||||
mock_packages_path = os.path.join(repos_path, "builtin.mock")
|
||||
repos_path = posixpath.join(var_path, "repos")
|
||||
packages_path = posixpath.join(repos_path, "builtin")
|
||||
mock_packages_path = posixpath.join(repos_path, "builtin.mock")
|
||||
|
||||
#
|
||||
# Writable things in $spack/var/spack
|
||||
|
@ -62,13 +63,13 @@
|
|||
# TODO: These should probably move to user cache, or some other location.
|
||||
#
|
||||
# fetch cache for downloaded files
|
||||
default_fetch_cache_path = os.path.join(var_path, "cache")
|
||||
default_fetch_cache_path = posixpath.join(var_path, "cache")
|
||||
|
||||
# GPG paths.
|
||||
gpg_keys_path = os.path.join(var_path, "gpg")
|
||||
mock_gpg_data_path = os.path.join(var_path, "gpg.mock", "data")
|
||||
mock_gpg_keys_path = os.path.join(var_path, "gpg.mock", "keys")
|
||||
gpg_path = os.path.join(opt_path, "spack", "gpg")
|
||||
gpg_keys_path = posixpath.join(var_path, "gpg")
|
||||
mock_gpg_data_path = posixpath.join(var_path, "gpg.mock", "data")
|
||||
mock_gpg_keys_path = posixpath.join(var_path, "gpg.mock", "keys")
|
||||
gpg_path = posixpath.join(opt_path, "spack", "gpg")
|
||||
|
||||
|
||||
# Below paths are where Spack can write information for the user.
|
||||
|
@ -87,22 +88,22 @@ def _get_user_cache_path():
|
|||
user_cache_path = _get_user_cache_path()
|
||||
|
||||
#: junit, cdash, etc. reports about builds
|
||||
reports_path = os.path.join(user_cache_path, "reports")
|
||||
reports_path = posixpath.join(user_cache_path, "reports")
|
||||
|
||||
#: installation test (spack test) output
|
||||
default_test_path = os.path.join(user_cache_path, "test")
|
||||
default_test_path = posixpath.join(user_cache_path, "test")
|
||||
|
||||
#: spack monitor analysis directories
|
||||
default_monitor_path = os.path.join(reports_path, "monitor")
|
||||
default_monitor_path = posixpath.join(reports_path, "monitor")
|
||||
|
||||
#: git repositories fetched to compare commits to versions
|
||||
user_repos_cache_path = os.path.join(user_cache_path, 'git_repos')
|
||||
user_repos_cache_path = posixpath.join(user_cache_path, 'git_repos')
|
||||
|
||||
#: bootstrap store for bootstrapping clingo and other tools
|
||||
default_user_bootstrap_path = os.path.join(user_cache_path, 'bootstrap')
|
||||
default_user_bootstrap_path = posixpath.join(user_cache_path, 'bootstrap')
|
||||
|
||||
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
||||
default_misc_cache_path = os.path.join(user_cache_path, 'cache')
|
||||
default_misc_cache_path = posixpath.join(user_cache_path, 'cache')
|
||||
|
||||
|
||||
# Below paths pull configuration from the host environment.
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
|
@ -137,6 +138,8 @@ class FastPackageChecker(Mapping):
|
|||
def __init__(self, packages_path):
|
||||
# The path of the repository managed by this instance
|
||||
self.packages_path = packages_path
|
||||
if sys.platform == 'win32':
|
||||
self.packages_path = self.packages_path.replace("\\", "/")
|
||||
|
||||
# If the cache we need is not there yet, then build it appropriately
|
||||
if packages_path not in self._paths_cache:
|
||||
|
@ -163,7 +166,7 @@ def _create_new_cache(self): # type: () -> Dict[str, os.stat_result]
|
|||
cache = {} # type: Dict[str, os.stat_result]
|
||||
for pkg_name in os.listdir(self.packages_path):
|
||||
# Skip non-directories in the package root.
|
||||
pkg_dir = os.path.join(self.packages_path, pkg_name)
|
||||
pkg_dir = posixpath.join(self.packages_path, pkg_name)
|
||||
|
||||
# Warn about invalid names that look like packages.
|
||||
if not nm.valid_module_name(pkg_name):
|
||||
|
@ -174,7 +177,7 @@ def _create_new_cache(self): # type: () -> Dict[str, os.stat_result]
|
|||
continue
|
||||
|
||||
# Construct the file name from the directory
|
||||
pkg_file = os.path.join(
|
||||
pkg_file = posixpath.join(
|
||||
self.packages_path, pkg_name, package_file_name
|
||||
)
|
||||
|
||||
|
@ -328,6 +331,8 @@ class RepoIndex(object):
|
|||
def __init__(self, package_checker, namespace):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == 'win32':
|
||||
self.packages_path = self.packages_path.replace("\\", "/")
|
||||
self.namespace = namespace
|
||||
|
||||
self.indexers = {}
|
||||
|
@ -705,6 +710,8 @@ def __init__(self, root):
|
|||
# Root directory, containing _repo.yaml and package dirs
|
||||
# Allow roots to by spack-relative by starting with '$spack'
|
||||
self.root = spack.util.path.canonicalize_path(root)
|
||||
if sys.platform == 'win32':
|
||||
self.root = self.root.replace("\\", "/")
|
||||
|
||||
# check and raise BadRepoError on fail.
|
||||
def check(condition, msg):
|
||||
|
@ -712,18 +719,18 @@ def check(condition, msg):
|
|||
raise BadRepoError(msg)
|
||||
|
||||
# Validate repository layout.
|
||||
self.config_file = os.path.join(self.root, repo_config_name)
|
||||
self.config_file = posixpath.join(self.root, repo_config_name)
|
||||
check(os.path.isfile(self.config_file),
|
||||
"No %s found in '%s'" % (repo_config_name, root))
|
||||
|
||||
self.packages_path = os.path.join(self.root, packages_dir_name)
|
||||
self.packages_path = posixpath.join(self.root, packages_dir_name)
|
||||
check(os.path.isdir(self.packages_path),
|
||||
"No directory '%s' found in '%s'" % (packages_dir_name, root))
|
||||
|
||||
# Read configuration and validate namespace
|
||||
config = self._read_config()
|
||||
check('namespace' in config, '%s must define a namespace.'
|
||||
% os.path.join(root, repo_config_name))
|
||||
% posixpath.join(root, repo_config_name))
|
||||
|
||||
self.namespace = config['namespace']
|
||||
check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace),
|
||||
|
@ -972,7 +979,7 @@ def extensions_for(self, extendee_spec):
|
|||
def dirname_for_package_name(self, pkg_name):
|
||||
"""Get the directory name for a particular package. This is the
|
||||
directory that contains its package.py file."""
|
||||
return os.path.join(self.packages_path, pkg_name)
|
||||
return posixpath.join(self.packages_path, pkg_name)
|
||||
|
||||
def filename_for_package_name(self, pkg_name):
|
||||
"""Get the filename for the module we should load for a particular
|
||||
|
@ -984,7 +991,7 @@ def filename_for_package_name(self, pkg_name):
|
|||
the package exists before importing.
|
||||
"""
|
||||
pkg_dir = self.dirname_for_package_name(pkg_name)
|
||||
return os.path.join(pkg_dir, package_file_name)
|
||||
return posixpath.join(pkg_dir, package_file_name)
|
||||
|
||||
@property
|
||||
def _pkg_checker(self):
|
||||
|
@ -1160,8 +1167,8 @@ def create_repo(root, namespace=None):
|
|||
"Cannot create repository in %s: can't access parent!" % root)
|
||||
|
||||
try:
|
||||
config_path = os.path.join(root, repo_config_name)
|
||||
packages_path = os.path.join(root, packages_dir_name)
|
||||
config_path = posixpath.join(root, repo_config_name)
|
||||
packages_path = posixpath.join(root, packages_dir_name)
|
||||
|
||||
fs.mkdirp(packages_path)
|
||||
with open(config_path, 'w') as config:
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
can_access,
|
||||
get_owner_uid,
|
||||
getuid,
|
||||
install,
|
||||
install_tree,
|
||||
|
@ -90,7 +91,7 @@ def create_stage_root(path):
|
|||
for p in user_paths:
|
||||
# Ensure access controls of subdirs from `$user` on down are
|
||||
# restricted to the user.
|
||||
owner_uid = sup.get_owner_uid(p)
|
||||
owner_uid = get_owner_uid(p)
|
||||
if user_uid != owner_uid:
|
||||
tty.warn("Expected user {0} to own {1}, but it is owned by {2}"
|
||||
.format(user_uid, p, owner_uid))
|
||||
|
|
|
@ -205,7 +205,7 @@ def test_spack_paths_before_module_paths(
|
|||
s.concretize()
|
||||
pkg = s.package
|
||||
|
||||
module_path = '/path/to/module'
|
||||
module_path = os.path.join('path', 'to', 'module')
|
||||
|
||||
def _set_wrong_cc(x):
|
||||
os.environ['PATH'] = module_path + os.pathsep + os.environ['PATH']
|
||||
|
@ -219,11 +219,9 @@ def _set_wrong_cc(x):
|
|||
|
||||
spack.build_environment.setup_package(pkg, False)
|
||||
|
||||
spack_path = posixpath.join(spack.paths.prefix, 'lib/spack/env')
|
||||
spack_path = os.path.join(spack.paths.prefix, os.path.join('lib', 'spack', 'env'))
|
||||
|
||||
paths = os.environ['PATH'].split(os.pathsep)
|
||||
if sys.platform == 'win32':
|
||||
paths = [p.replace("\\", "/") for p in paths]
|
||||
|
||||
assert paths.index(spack_path) < paths.index(module_path)
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
|
||||
import filecmp
|
||||
import os
|
||||
import posixpath
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
@ -92,9 +91,6 @@ def test_changed_files(flake8_package):
|
|||
for path in changed_files()
|
||||
]
|
||||
|
||||
if sys.platform == "win32":
|
||||
files = [f.replace("\\", "/") for f in files]
|
||||
|
||||
# There will likely be other files that have changed
|
||||
# when these tests are run
|
||||
assert flake8_package in files
|
||||
|
@ -125,9 +121,6 @@ def test_changed_files_all_files(flake8_package):
|
|||
for path in changed_files(all_files=True)
|
||||
])
|
||||
|
||||
if sys.platform == "win32":
|
||||
files = [f.replace("\\", "/") for f in files]
|
||||
|
||||
# spack has a lot of files -- check that we're in the right ballpark
|
||||
assert len(files) > 6000
|
||||
|
||||
|
@ -136,13 +129,13 @@ def test_changed_files_all_files(flake8_package):
|
|||
assert zlib.module.__file__ in files
|
||||
|
||||
# a core spack file
|
||||
assert posixpath.join(spack.paths.module_path, "spec.py") in files
|
||||
assert os.path.join(spack.paths.module_path, "spec.py") in files
|
||||
|
||||
# a mock package
|
||||
assert flake8_package in files
|
||||
|
||||
# this test
|
||||
assert __file__.replace("\\", "/") in files
|
||||
assert __file__ in files
|
||||
|
||||
# ensure externals are excluded
|
||||
assert not any(f.startswith(spack.paths.external_path) for f in files)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import fnmatch
|
||||
import posixpath
|
||||
import os.path
|
||||
|
||||
import pytest
|
||||
import six
|
||||
|
@ -198,7 +198,7 @@ def test_add(self, header_list):
|
|||
|
||||
|
||||
#: Directory where the data for the test below is stored
|
||||
search_dir = posixpath.join(spack.paths.test_path, 'data', 'directory_search')
|
||||
search_dir = os.path.join(spack.paths.test_path, 'data', 'directory_search')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('search_fn,search_list,root,kwargs', [
|
||||
|
@ -229,12 +229,12 @@ def test_add(self, header_list):
|
|||
(find_headers, ['a', 'c'], search_dir, {'recursive': True}),
|
||||
(find_libraries,
|
||||
['liba', 'libd'],
|
||||
posixpath.join(search_dir, 'b'),
|
||||
os.path.join(search_dir, 'b'),
|
||||
{'recursive': False}
|
||||
),
|
||||
(find_headers,
|
||||
['b', 'd'],
|
||||
posixpath.join(search_dir, 'b'),
|
||||
os.path.join(search_dir, 'b'),
|
||||
{'recursive': False}
|
||||
),
|
||||
])
|
||||
|
@ -270,14 +270,14 @@ def test_searching_order(search_fn, search_list, root, kwargs):
|
|||
|
||||
@pytest.mark.parametrize('root,search_list,kwargs,expected', [
|
||||
(search_dir, '*/*bar.tx?', {'recursive': False}, [
|
||||
posixpath.join(search_dir, posixpath.join('a', 'foobar.txt')),
|
||||
posixpath.join(search_dir, posixpath.join('b', 'bar.txp')),
|
||||
posixpath.join(search_dir, posixpath.join('c', 'bar.txt')),
|
||||
os.path.join(search_dir, os.path.join('a', 'foobar.txt')),
|
||||
os.path.join(search_dir, os.path.join('b', 'bar.txp')),
|
||||
os.path.join(search_dir, os.path.join('c', 'bar.txt')),
|
||||
]),
|
||||
(search_dir, '*/*bar.tx?', {'recursive': True}, [
|
||||
posixpath.join(search_dir, posixpath.join('a', 'foobar.txt')),
|
||||
posixpath.join(search_dir, posixpath.join('b', 'bar.txp')),
|
||||
posixpath.join(search_dir, posixpath.join('c', 'bar.txt')),
|
||||
os.path.join(search_dir, os.path.join('a', 'foobar.txt')),
|
||||
os.path.join(search_dir, os.path.join('b', 'bar.txp')),
|
||||
os.path.join(search_dir, os.path.join('c', 'bar.txt')),
|
||||
])
|
||||
])
|
||||
def test_find_with_globbing(root, search_list, kwargs, expected):
|
||||
|
|
|
@ -415,8 +415,7 @@ def test_computation_of_header_directories(
|
|||
|
||||
def test_headers_directory_setter():
|
||||
if sys.platform == "win32":
|
||||
# TODO: Test with \\'s
|
||||
root = "C:/pfx/include/subdir"
|
||||
root = r'C:\pfx\include\subdir'
|
||||
else:
|
||||
root = "/pfx/include/subdir"
|
||||
hl = fs.HeaderList(
|
||||
|
@ -453,14 +452,14 @@ def test_headers_directory_setter():
|
|||
if sys.platform == "win32":
|
||||
# TODO: Test \\s
|
||||
paths = [
|
||||
('C:/user/root', None,
|
||||
(['C:/', 'C:/user', 'C:/user/root'], '', [])),
|
||||
('C:/user/root', 'C:/', ([], 'C:/', ['C:/user', 'C:/user/root'])),
|
||||
('C:/user/root', 'user', (['C:/'], 'C:/user', ['C:/user/root'])),
|
||||
('C:/user/root', 'root', (['C:/', 'C:/user'], 'C:/user/root', [])),
|
||||
('relative/path', None, (['relative', 'relative/path'], '', [])),
|
||||
('relative/path', 'relative', ([], 'relative', ['relative/path'])),
|
||||
('relative/path', 'path', (['relative'], 'relative/path', []))
|
||||
(r'C:\user\root', None,
|
||||
(['C:\\', r'C:\user', r'C:\user\root'], '', [])),
|
||||
(r'C:\user\root', 'C:\\', ([], 'C:\\', [r'C:\user', r'C:\user\root'])),
|
||||
(r'C:\user\root', r'user', (['C:\\'], r'C:\user', [r'C:\user\root'])),
|
||||
(r'C:\user\root', r'root', (['C:\\', r'C:\user'], r'C:\user\root', [])),
|
||||
(r'relative\path', None, ([r'relative', r'relative\path'], '', [])),
|
||||
(r'relative\path', r'relative', ([], r'relative', [r'relative\path'])),
|
||||
(r'relative\path', r'path', ([r'relative'], r'relative\path', []))
|
||||
]
|
||||
else:
|
||||
paths = [
|
||||
|
@ -483,9 +482,8 @@ def test_partition_path(path, entry, expected):
|
|||
if sys.platform == "win32":
|
||||
path_list = [
|
||||
('', []),
|
||||
('C:\\user\\dir', ['C:/', 'C:/user', 'C:/user/dir']),
|
||||
('./some/sub/dir', ['./some', './some/sub', './some/sub/dir']),
|
||||
('another/sub/dir', ['another', 'another/sub', 'another/sub/dir'])
|
||||
(r'.\some\sub\dir', [r'.\some', r'.\some\sub', r'.\some\sub\dir']),
|
||||
(r'another\sub\dir', [r'another', r'another\sub', r'another\sub\dir'])
|
||||
]
|
||||
else:
|
||||
path_list = [
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import posixpath
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
@ -29,7 +29,7 @@ def test_package_name(self):
|
|||
def test_package_filename(self):
|
||||
repo = spack.repo.Repo(mock_packages_path)
|
||||
filename = repo.filename_for_package_name('mpich')
|
||||
assert filename == posixpath.join(
|
||||
assert filename == os.path.join(
|
||||
mock_packages_path,
|
||||
'packages',
|
||||
'mpich',
|
||||
|
@ -39,7 +39,7 @@ def test_package_filename(self):
|
|||
def test_nonexisting_package_filename(self):
|
||||
repo = spack.repo.Repo(mock_packages_path)
|
||||
filename = repo.filename_for_package_name('some-nonexisting-package')
|
||||
assert filename == posixpath.join(
|
||||
assert filename == os.path.join(
|
||||
mock_packages_path,
|
||||
'packages',
|
||||
'some-nonexisting-package',
|
||||
|
|
|
@ -11,10 +11,10 @@
|
|||
"""
|
||||
import code
|
||||
import os
|
||||
import signal
|
||||
import traceback
|
||||
import sys
|
||||
import pdb
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
|
||||
def debug_handler(sig, frame):
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
"""
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
@ -39,8 +40,13 @@ def _find_exe_from_env_var(var):
|
|||
if not exe:
|
||||
return None, []
|
||||
|
||||
if sys.platform == "win32":
|
||||
# Fix separators
|
||||
exe = exe.replace('\\', '/')
|
||||
|
||||
# split env var into executable and args if needed
|
||||
args = shlex.split(str(exe))
|
||||
|
||||
if not args:
|
||||
return None, []
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
import spack.spec
|
||||
import spack.util.executable as executable
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
system_paths = ['/', '/usr', '/usr/local']
|
||||
suffixes = ['bin', 'bin64', 'include', 'lib', 'lib64']
|
||||
|
@ -130,6 +131,7 @@ def env_var_to_source_line(var, val):
|
|||
return source_line
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def dump_environment(path, environment=None):
|
||||
"""Dump an environment dictionary to a source-able file."""
|
||||
use_env = environment or os.environ
|
||||
|
@ -143,6 +145,7 @@ def dump_environment(path, environment=None):
|
|||
'\n']))
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def pickle_environment(path, environment=None):
|
||||
"""Pickle an environment dictionary to a file."""
|
||||
cPickle.dump(dict(environment if environment else os.environ),
|
||||
|
@ -307,7 +310,7 @@ def execute(self, env):
|
|||
environment_value = env.get(self.name, '')
|
||||
directories = environment_value.split(
|
||||
self.separator) if environment_value else []
|
||||
directories.append(os.path.normpath(self.value))
|
||||
directories.append(path_to_os_path(os.path.normpath(self.value)).pop())
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
|
@ -319,7 +322,8 @@ def execute(self, env):
|
|||
environment_value = env.get(self.name, '')
|
||||
directories = environment_value.split(
|
||||
self.separator) if environment_value else []
|
||||
directories = [os.path.normpath(self.value)] + directories
|
||||
directories = [path_to_os_path(os.path.normpath(self.value)).pop()] \
|
||||
+ directories
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
|
@ -331,8 +335,9 @@ def execute(self, env):
|
|||
environment_value = env.get(self.name, '')
|
||||
directories = environment_value.split(
|
||||
self.separator) if environment_value else []
|
||||
directories = [os.path.normpath(x) for x in directories
|
||||
if x != os.path.normpath(self.value)]
|
||||
directories = [path_to_os_path(os.path.normpath(x)).pop()
|
||||
for x in directories
|
||||
if x != path_to_os_path(os.path.normpath(self.value)).pop()]
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
|
@ -343,8 +348,8 @@ def execute(self, env):
|
|||
environment_value = env.get(self.name, '')
|
||||
directories = environment_value.split(
|
||||
self.separator) if environment_value else []
|
||||
directories = deprioritize_system_paths([os.path.normpath(x)
|
||||
for x in directories])
|
||||
directories = deprioritize_system_paths(
|
||||
[path_to_os_path(os.path.normpath(x)).pop() for x in directories])
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
|
@ -356,7 +361,7 @@ def execute(self, env):
|
|||
environment_value = env.get(self.name, '')
|
||||
directories = environment_value.split(
|
||||
self.separator) if environment_value else []
|
||||
directories = prune_duplicate_paths([os.path.normpath(x)
|
||||
directories = prune_duplicate_paths([path_to_os_path(os.path.normpath(x)).pop()
|
||||
for x in directories])
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
import llnl.util.tty as tty
|
||||
|
||||
import spack.error
|
||||
from spack.util.path import Path, marshall_path, path_to_os_path, system_path_filter
|
||||
|
||||
__all__ = ['Executable', 'which', 'ProcessError']
|
||||
|
||||
|
@ -22,9 +23,11 @@ class Executable(object):
|
|||
"""Class representing a program that can be run on the command line."""
|
||||
|
||||
def __init__(self, name):
|
||||
if sys.platform == 'win32':
|
||||
name = name.replace('\\', '/')
|
||||
# necesary here for the shlex call to succeed
|
||||
name = marshall_path(name, mode=Path.unix)
|
||||
self.exe = shlex.split(str(name))
|
||||
# filter back to platform dependent path
|
||||
self.exe = path_to_os_path(*self.exe)
|
||||
self.default_env = {}
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
self.default_envmod = EnvironmentModifications()
|
||||
|
@ -33,10 +36,12 @@ def __init__(self, name):
|
|||
if not self.exe:
|
||||
raise ProcessError("Cannot construct executable for '%s'" % name)
|
||||
|
||||
@system_path_filter
|
||||
def add_default_arg(self, arg):
|
||||
"""Add a default argument to the command."""
|
||||
self.exe.append(arg)
|
||||
|
||||
@system_path_filter
|
||||
def add_default_env(self, key, value):
|
||||
"""Set an environment variable when the command is run.
|
||||
|
||||
|
@ -77,6 +82,7 @@ def path(self):
|
|||
"""
|
||||
return self.exe[0]
|
||||
|
||||
# needs a small fixup to better handle URLS and the like
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""Run this executable in a subprocess.
|
||||
|
||||
|
@ -204,11 +210,17 @@ def streamify(arg, mode):
|
|||
if output in (str, str.split) or error in (str, str.split):
|
||||
result = ''
|
||||
if output in (str, str.split):
|
||||
if sys.platform == 'win32':
|
||||
outstr = text_type(out.decode('ISO-8859-1'))
|
||||
else:
|
||||
outstr = text_type(out.decode('utf-8'))
|
||||
result += outstr
|
||||
if output is str.split:
|
||||
sys.stdout.write(outstr)
|
||||
if error in (str, str.split):
|
||||
if sys.platform == 'win32':
|
||||
errstr = text_type(err.decode('ISO-8859-1'))
|
||||
else:
|
||||
errstr = text_type(err.decode('utf-8'))
|
||||
result += errstr
|
||||
if error is str.split:
|
||||
|
@ -263,6 +275,7 @@ def __str__(self):
|
|||
return ' '.join(self.exe)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def which_string(*args, **kwargs):
|
||||
"""Like ``which()``, but return a string instead of an ``Executable``."""
|
||||
path = kwargs.get('path', os.environ.get('PATH', ''))
|
||||
|
@ -272,13 +285,21 @@ def which_string(*args, **kwargs):
|
|||
path = path.split(os.pathsep)
|
||||
|
||||
for name in args:
|
||||
if os.path.sep in name:
|
||||
exe = os.path.abspath(name)
|
||||
win_candidates = []
|
||||
if sys.platform == "win32" and (not name.endswith(".exe")
|
||||
and not name.endswith(".bat")):
|
||||
win_candidates = [name + ext for ext in ['.exe', '.bat']]
|
||||
candidate_names = [name] if not win_candidates else win_candidates
|
||||
|
||||
for candidate_name in candidate_names:
|
||||
if os.path.sep in candidate_name:
|
||||
exe = os.path.abspath(candidate_name)
|
||||
if os.path.isfile(exe) and os.access(exe, os.X_OK):
|
||||
return exe
|
||||
else:
|
||||
for directory in path:
|
||||
exe = os.path.join(directory, name)
|
||||
directory = path_to_os_path(directory).pop()
|
||||
exe = os.path.join(directory, candidate_name)
|
||||
if os.path.isfile(exe) and os.access(exe, os.X_OK):
|
||||
return exe
|
||||
|
||||
|
|
|
@ -8,37 +8,38 @@
|
|||
TODO: this is really part of spack.config. Consolidate it.
|
||||
"""
|
||||
import contextlib
|
||||
import errno
|
||||
import getpass
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from sys import platform as _platform
|
||||
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.paths
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
if _platform == "win32":
|
||||
import win32security
|
||||
is_windows = sys.platform == 'win32'
|
||||
|
||||
__all__ = [
|
||||
'substitute_config_variables',
|
||||
'substitute_path_variables',
|
||||
'canonicalize_path']
|
||||
|
||||
|
||||
# Substitutions to perform
|
||||
replacements = {
|
||||
def replacements():
|
||||
# break circular import from spack.util.executable
|
||||
import spack.paths
|
||||
return {
|
||||
'spack': spack.paths.prefix,
|
||||
'user': getpass.getuser(),
|
||||
'tempdir': tempfile.gettempdir(),
|
||||
'user_cache_path': spack.paths.user_cache_path,
|
||||
}
|
||||
'user_cache_path': spack.paths.user_cache_path}
|
||||
|
||||
|
||||
# This is intended to be longer than the part of the install path
|
||||
# spack generates from the root path we give it. Included in the
|
||||
|
@ -60,6 +61,62 @@
|
|||
SPACK_PATH_PADDING_CHARS = '__spack_path_placeholder__'
|
||||
|
||||
|
||||
def is_path_url(path):
|
||||
if '\\' in path:
|
||||
return False
|
||||
url_tuple = urlparse(path)
|
||||
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
|
||||
|
||||
|
||||
def path_to_os_path(*pths):
|
||||
"""
|
||||
Takes an arbitrary number of postional parameters
|
||||
converts each arguemnt of type string to use a normalized
|
||||
filepath separator, and returns a list of all values
|
||||
"""
|
||||
ret_pths = []
|
||||
for pth in pths:
|
||||
if type(pth) is str and\
|
||||
not is_path_url(pth):
|
||||
pth = marshall_path(pth, mode=Path.platform_path)
|
||||
ret_pths.append(pth)
|
||||
return ret_pths
|
||||
|
||||
|
||||
def system_path_filter(_func=None, arg_slice=None):
|
||||
"""
|
||||
Filters function arguments to account for platform path separators.
|
||||
Optional slicing range can be specified to select specific arguments
|
||||
|
||||
This decorator takes all (or a slice) of a method's positional arguments
|
||||
and normalizes useage of filepath separators on a per platform basis.
|
||||
|
||||
Note: **kwargs, urls, and any type that is not a string are ignored
|
||||
so in such cases where path normalization is required, that should be
|
||||
handled by calling path_to_os_path directly as needed.
|
||||
|
||||
Parameters:
|
||||
arg_slice (slice): a slice object specifying the slice of arguments
|
||||
in the decorated method over which filepath separators are
|
||||
normalized
|
||||
"""
|
||||
from functools import wraps
|
||||
|
||||
def holder_func(func):
|
||||
@wraps(func)
|
||||
def path_filter_caller(*args, **kwargs):
|
||||
args = list(args)
|
||||
if arg_slice:
|
||||
args[arg_slice] = path_to_os_path(*args[arg_slice])
|
||||
else:
|
||||
args = path_to_os_path(*args)
|
||||
return func(*args, **kwargs)
|
||||
return path_filter_caller
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
||||
|
||||
|
||||
@memoized
|
||||
def get_system_path_max():
|
||||
# Choose a conservative default
|
||||
|
@ -77,28 +134,37 @@ def get_system_path_max():
|
|||
return sys_max_path_length
|
||||
|
||||
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
class Path:
|
||||
"""
|
||||
Describes the filepath separator types
|
||||
in an enum style
|
||||
with a helper attribute
|
||||
exposing the path type of
|
||||
the current platform.
|
||||
"""
|
||||
unix = 0
|
||||
windows = 1
|
||||
platform_path = windows if is_windows\
|
||||
else unix
|
||||
|
||||
p_stat = os.stat(path)
|
||||
if p_stat.st_mode & stat.S_IRWXU != stat.S_IRWXU:
|
||||
tty.error("Expected {0} to support mode {1}, but it is {2}"
|
||||
.format(path, stat.S_IRWXU, p_stat.st_mode))
|
||||
|
||||
raise OSError(errno.EACCES,
|
||||
err_msg.format(path, path) if err_msg else "")
|
||||
def marshall_path(path, mode=Path.unix):
|
||||
"""
|
||||
Format path to use consistent, platform specific
|
||||
separators.
|
||||
|
||||
Parameters:
|
||||
path (str): the path to be normalized, must be a string
|
||||
or expose the replace method.
|
||||
mode (Path): the path filesperator style to normalize the
|
||||
passed path to. Default is unix style, i.e. '/'
|
||||
|
||||
"""
|
||||
if mode == Path.windows:
|
||||
path = path.replace('/', '\\')
|
||||
else:
|
||||
p_stat = os.stat(path)
|
||||
|
||||
if _platform != "win32":
|
||||
owner_uid = p_stat.st_uid
|
||||
else:
|
||||
sid = win32security.GetFileSecurity(
|
||||
path, win32security.OWNER_SECURITY_INFORMATION) \
|
||||
.GetSecurityDescriptorOwner()
|
||||
owner_uid = win32security.LookupAccountSid(None, sid)[0]
|
||||
return owner_uid
|
||||
path = path.replace('\\', '/')
|
||||
return path
|
||||
|
||||
|
||||
def substitute_config_variables(path):
|
||||
|
@ -118,17 +184,18 @@ def substitute_config_variables(path):
|
|||
environment yaml files.
|
||||
"""
|
||||
import spack.environment as ev # break circular
|
||||
_replacements = replacements()
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
replacements.update({'env': env.path})
|
||||
_replacements.update({'env': env.path})
|
||||
else:
|
||||
# If a previous invocation added env, remove it
|
||||
replacements.pop('env', None)
|
||||
_replacements.pop('env', None)
|
||||
|
||||
# Look up replacements
|
||||
def repl(match):
|
||||
m = match.group(0).strip('${}')
|
||||
return replacements.get(m.lower(), match.group(0))
|
||||
return _replacements.get(m.lower(), match.group(0))
|
||||
|
||||
# Replace $var or ${var}.
|
||||
return re.sub(r'(\$\w+\b|\$\{\w+\})', repl, path)
|
||||
|
@ -247,7 +314,7 @@ def padding_filter(string):
|
|||
"""
|
||||
global _filter_re
|
||||
|
||||
pad = spack.util.path.SPACK_PATH_PADDING_CHARS
|
||||
pad = SPACK_PATH_PADDING_CHARS
|
||||
if not _filter_re:
|
||||
longest_prefix = longest_prefix_re(pad)
|
||||
regex = (
|
||||
|
@ -275,6 +342,7 @@ def filter_padding():
|
|||
This is needed because Spack's debug output gets extremely long when we use a
|
||||
long padded installation path.
|
||||
"""
|
||||
import spack.config
|
||||
padding = spack.config.get("config:install_tree:padded_length", None)
|
||||
if padding:
|
||||
# filter out all padding from the intsall command output
|
||||
|
|
|
@ -11,9 +11,11 @@
|
|||
import ntpath
|
||||
import posixpath
|
||||
import re
|
||||
import sys
|
||||
|
||||
import six.moves.urllib.parse as urllib_parse
|
||||
from six import string_types
|
||||
from six.moves.urllib.request import url2pathname
|
||||
|
||||
import spack.util.path
|
||||
|
||||
|
@ -75,6 +77,7 @@ def parse(url, scheme='file'):
|
|||
if isinstance(url, string_types) else url)
|
||||
|
||||
(scheme, netloc, path, params, query, _) = url_obj
|
||||
|
||||
scheme = (scheme or 'file').lower()
|
||||
|
||||
# This is the first way that a windows path can be parsed.
|
||||
|
@ -120,6 +123,9 @@ def parse(url, scheme='file'):
|
|||
if update_netloc:
|
||||
netloc, path = path[:2], path[2:]
|
||||
|
||||
if sys.platform == "win32":
|
||||
path = path.replace('\\', '/')
|
||||
|
||||
return urllib_parse.ParseResult(scheme=scheme,
|
||||
netloc=netloc,
|
||||
path=path,
|
||||
|
@ -194,7 +200,8 @@ def join(base_url, path, *extra, **kwargs):
|
|||
'file:///opt/spack'
|
||||
"""
|
||||
paths = [
|
||||
(x if isinstance(x, string_types) else x.geturl())
|
||||
(x.replace('\\', '/') if isinstance(x, string_types)
|
||||
else x.geturl().replace('\\', '/'))
|
||||
for x in itertools.chain((base_url, path), extra)]
|
||||
n = len(paths)
|
||||
last_abs_component = None
|
||||
|
@ -289,6 +296,9 @@ def _join(base_url, path, *extra, **kwargs):
|
|||
netloc = path_tokens.pop(0)
|
||||
base_path = posixpath.join('', *path_tokens)
|
||||
|
||||
if sys.platform == "win32":
|
||||
base_path = base_path.replace('\\', '/')
|
||||
|
||||
return format(urllib_parse.ParseResult(scheme=scheme,
|
||||
netloc=netloc,
|
||||
path=base_path,
|
||||
|
|
|
@ -165,6 +165,9 @@ def warn_no_ssl_cert_checking():
|
|||
|
||||
def push_to_url(
|
||||
local_file_path, remote_path, keep_original=True, extra_args=None):
|
||||
if sys.platform == "win32":
|
||||
if remote_path[1] == ':':
|
||||
remote_path = "file:///" + remote_path
|
||||
remote_url = url_util.parse(remote_path)
|
||||
verify_ssl = spack.config.get('config:verify_ssl')
|
||||
|
||||
|
@ -649,6 +652,7 @@ def find_versions_of_archive(
|
|||
versions = {}
|
||||
matched = set()
|
||||
for url in archive_urls + sorted(links):
|
||||
url = url.replace("\\", "/")
|
||||
if any(re.search(r, url) for r in regexes):
|
||||
try:
|
||||
ver = spack.url.parse_version(url)
|
||||
|
|
Loading…
Reference in a new issue