From d9b945f663d815273590e21e1f4caed61fc11453 Mon Sep 17 00:00:00 2001 From: Rui Xue Date: Wed, 2 Sep 2020 02:15:39 -0500 Subject: [PATCH] Mac OS: support Python >= 3.8 by using fork-based multiprocessing (#18124) As detailed in https://bugs.python.org/issue33725, starting new processes with 'fork' on Mac OS is not guaranteed to work in general. As of Python 3.8 the default process spawning mechanism was changed to avoid this issue. Spack depends on the fork-based method to preserve file descriptors transparently, to preserve global state, and to avoid pickling some objects. An effort is underway to remove dependence on fork-based process spawning (see #18205). In the meantime, this allows Spack to run with Python 3.8 on Mac OS by explicitly choosing to use 'fork'. Co-authored-by: Peter Josef Scheibel Co-authored-by: Adam J. Stewart Co-authored-by: Todd Gamblin --- .github/workflows/macos_python.yml | 8 ++++---- .github/workflows/macos_unit_tests.yaml | 2 +- .github/workflows/style_and_docs.yaml | 2 +- lib/spack/llnl/util/lang.py | 18 ++++++++++++++++++ lib/spack/llnl/util/tty/log.py | 3 ++- lib/spack/llnl/util/tty/pty.py | 3 ++- lib/spack/spack/build_environment.py | 4 ++-- lib/spack/spack/test/database.py | 4 ++-- lib/spack/spack/test/llnl/util/lock.py | 5 +++-- 9 files changed, 35 insertions(+), 14 deletions(-) diff --git a/.github/workflows/macos_python.yml b/.github/workflows/macos_python.yml index feb9dcef04..856d850cc8 100644 --- a/.github/workflows/macos_python.yml +++ b/.github/workflows/macos_python.yml @@ -27,7 +27,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.8 - name: spack install run: | . .github/workflows/install_spack.sh @@ -42,7 +42,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.8 - name: spack install run: | . .github/workflows/install_spack.sh @@ -56,7 +56,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.8 - name: spack install run: | . .github/workflows/install_spack.sh @@ -71,7 +71,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.8 - name: spack install run: | . .github/workflows/install_spack.sh diff --git a/.github/workflows/macos_unit_tests.yaml b/.github/workflows/macos_unit_tests.yaml index 1e60f76918..24affa8327 100644 --- a/.github/workflows/macos_unit_tests.yaml +++ b/.github/workflows/macos_unit_tests.yaml @@ -18,7 +18,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.8 - name: Install Python packages run: | pip install --upgrade pip six setuptools diff --git a/.github/workflows/style_and_docs.yaml b/.github/workflows/style_and_docs.yaml index 2277d5598f..84eb0efeab 100644 --- a/.github/workflows/style_and_docs.yaml +++ b/.github/workflows/style_and_docs.yaml @@ -16,7 +16,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.8 - name: Install Python Packages run: | pip install --upgrade pip diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index e746ce096c..0bcbebba72 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -5,6 +5,7 @@ from __future__ import division +import multiprocessing import os import re import functools @@ -19,6 +20,23 @@ ignore_modules = [r'^\.#', '~$'] +# On macOS, Python 3.8 multiprocessing now defaults to the 'spawn' start +# method. Spack cannot currently handle this, so force the process to start +# using the 'fork' start method. +# +# TODO: This solution is not ideal, as the 'fork' start method can lead to +# crashes of the subprocess. Figure out how to make 'spawn' work. +# +# See: +# * https://github.com/spack/spack/pull/18124 +# * https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods # noqa: E501 +# * https://bugs.python.org/issue33725 +if sys.version_info >= (3,): # novm + fork_context = multiprocessing.get_context('fork') +else: + fork_context = multiprocessing + + def index_by(objects, *funcs): """Create a hierarchy of dictionaries by splitting the supplied set of objects on unique values of the supplied functions. diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py index de5ffa8eec..97fba0592c 100644 --- a/lib/spack/llnl/util/tty/log.py +++ b/lib/spack/llnl/util/tty/log.py @@ -21,6 +21,7 @@ from six import StringIO import llnl.util.tty as tty +from llnl.util.lang import fork_context try: import termios @@ -430,7 +431,7 @@ def __enter__(self): except BaseException: input_stream = None # just don't forward input if this fails - self.process = multiprocessing.Process( + self.process = fork_context.Process( target=_writer_daemon, args=( input_stream, read_fd, write_fd, self.echo, self.log_file, diff --git a/lib/spack/llnl/util/tty/pty.py b/lib/spack/llnl/util/tty/pty.py index 84c272a6e2..79f4ce20cf 100644 --- a/lib/spack/llnl/util/tty/pty.py +++ b/lib/spack/llnl/util/tty/pty.py @@ -24,6 +24,7 @@ import traceback import llnl.util.tty.log as log +from llnl.util.lang import fork_context from spack.util.executable import which @@ -233,7 +234,7 @@ def start(self, **kwargs): ``minion_function``. """ - self.proc = multiprocessing.Process( + self.proc = fork_context.Process( target=PseudoShell._set_up_and_run_controller_function, args=(self.controller_function, self.minion_function, self.controller_timeout, self.sleep_time), diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 4a57dde77b..be6b2cc1ab 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -45,7 +45,7 @@ import llnl.util.tty as tty from llnl.util.tty.color import cescape, colorize from llnl.util.filesystem import mkdirp, install, install_tree -from llnl.util.lang import dedupe +from llnl.util.lang import dedupe, fork_context import spack.build_systems.cmake import spack.build_systems.meson @@ -886,7 +886,7 @@ def child_process(child_pipe, input_stream): if sys.stdin.isatty() and hasattr(sys.stdin, 'fileno'): input_stream = os.fdopen(os.dup(sys.stdin.fileno())) - p = multiprocessing.Process( + p = fork_context.Process( target=child_process, args=(child_pipe, input_stream)) p.start() diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index a161a22908..a97243ae9c 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -9,7 +9,6 @@ """ import datetime import functools -import multiprocessing import os import pytest import json @@ -24,6 +23,7 @@ import llnl.util.lock as lk from llnl.util.tty.colify import colify +from llnl.util.lang import fork_context import spack.repo import spack.store @@ -524,7 +524,7 @@ def read_and_modify(): with mutable_database.write_transaction(): _mock_remove('mpileaks ^zmpi') - p = multiprocessing.Process(target=read_and_modify, args=()) + p = fork_context.Process(target=read_and_modify, args=()) p.start() p.join() diff --git a/lib/spack/spack/test/llnl/util/lock.py b/lib/spack/spack/test/llnl/util/lock.py index b2b7cf85ac..99c1eba588 100644 --- a/lib/spack/spack/test/llnl/util/lock.py +++ b/lib/spack/spack/test/llnl/util/lock.py @@ -51,13 +51,14 @@ import glob import getpass from contextlib import contextmanager -from multiprocessing import Process, Queue +from multiprocessing import Queue import pytest import llnl.util.lock as lk import llnl.util.multiproc as mp from llnl.util.filesystem import touch +from llnl.util.lang import fork_context # @@ -214,7 +215,7 @@ def local_multiproc_test(*functions, **kwargs): b = mp.Barrier(len(functions), timeout=barrier_timeout) args = (b,) + tuple(kwargs.get('extra_args', ())) - procs = [Process(target=f, args=args, name=f.__name__) + procs = [fork_context.Process(target=f, args=args, name=f.__name__) for f in functions] for p in procs: