Mac OS: support Python >= 3.8 by using fork-based multiprocessing (#18124)

As detailed in https://bugs.python.org/issue33725, starting new
processes with 'fork' on Mac OS is not guaranteed to work in general.
As of Python 3.8 the default process spawning mechanism was changed
to avoid this issue.

Spack depends on the fork-based method to preserve file descriptors
transparently, to preserve global state, and to avoid pickling some
objects. An effort is underway to remove dependence on fork-based
process spawning (see #18205). In the meantime, this allows Spack to
run with Python 3.8 on Mac OS by explicitly choosing to use 'fork'.

Co-authored-by: Peter Josef Scheibel <scheibel1@llnl.gov>
Co-authored-by: Adam J. Stewart <ajstewart426@gmail.com>
Co-authored-by: Todd Gamblin <tgamblin@llnl.gov>
This commit is contained in:
Rui Xue 2020-09-02 02:15:39 -05:00 committed by GitHub
parent 0740a4ac7e
commit d9b945f663
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 35 additions and 14 deletions

View file

@ -27,7 +27,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with: with:
python-version: 3.7 python-version: 3.8
- name: spack install - name: spack install
run: | run: |
. .github/workflows/install_spack.sh . .github/workflows/install_spack.sh
@ -42,7 +42,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with: with:
python-version: 3.7 python-version: 3.8
- name: spack install - name: spack install
run: | run: |
. .github/workflows/install_spack.sh . .github/workflows/install_spack.sh
@ -56,7 +56,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with: with:
python-version: 3.7 python-version: 3.8
- name: spack install - name: spack install
run: | run: |
. .github/workflows/install_spack.sh . .github/workflows/install_spack.sh
@ -71,7 +71,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with: with:
python-version: 3.7 python-version: 3.8
- name: spack install - name: spack install
run: | run: |
. .github/workflows/install_spack.sh . .github/workflows/install_spack.sh

View file

@ -18,7 +18,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with: with:
python-version: 3.7 python-version: 3.8
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip six setuptools pip install --upgrade pip six setuptools

View file

@ -16,7 +16,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with: with:
python-version: 3.7 python-version: 3.8
- name: Install Python Packages - name: Install Python Packages
run: | run: |
pip install --upgrade pip pip install --upgrade pip

View file

@ -5,6 +5,7 @@
from __future__ import division from __future__ import division
import multiprocessing
import os import os
import re import re
import functools import functools
@ -19,6 +20,23 @@
ignore_modules = [r'^\.#', '~$'] ignore_modules = [r'^\.#', '~$']
# On macOS, Python 3.8 multiprocessing now defaults to the 'spawn' start
# method. Spack cannot currently handle this, so force the process to start
# using the 'fork' start method.
#
# TODO: This solution is not ideal, as the 'fork' start method can lead to
# crashes of the subprocess. Figure out how to make 'spawn' work.
#
# See:
# * https://github.com/spack/spack/pull/18124
# * https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods # noqa: E501
# * https://bugs.python.org/issue33725
if sys.version_info >= (3,): # novm
fork_context = multiprocessing.get_context('fork')
else:
fork_context = multiprocessing
def index_by(objects, *funcs): def index_by(objects, *funcs):
"""Create a hierarchy of dictionaries by splitting the supplied """Create a hierarchy of dictionaries by splitting the supplied
set of objects on unique values of the supplied functions. set of objects on unique values of the supplied functions.

View file

@ -21,6 +21,7 @@
from six import StringIO from six import StringIO
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import fork_context
try: try:
import termios import termios
@ -430,7 +431,7 @@ def __enter__(self):
except BaseException: except BaseException:
input_stream = None # just don't forward input if this fails input_stream = None # just don't forward input if this fails
self.process = multiprocessing.Process( self.process = fork_context.Process(
target=_writer_daemon, target=_writer_daemon,
args=( args=(
input_stream, read_fd, write_fd, self.echo, self.log_file, input_stream, read_fd, write_fd, self.echo, self.log_file,

View file

@ -24,6 +24,7 @@
import traceback import traceback
import llnl.util.tty.log as log import llnl.util.tty.log as log
from llnl.util.lang import fork_context
from spack.util.executable import which from spack.util.executable import which
@ -233,7 +234,7 @@ def start(self, **kwargs):
``minion_function``. ``minion_function``.
""" """
self.proc = multiprocessing.Process( self.proc = fork_context.Process(
target=PseudoShell._set_up_and_run_controller_function, target=PseudoShell._set_up_and_run_controller_function,
args=(self.controller_function, self.minion_function, args=(self.controller_function, self.minion_function,
self.controller_timeout, self.sleep_time), self.controller_timeout, self.sleep_time),

View file

@ -45,7 +45,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import cescape, colorize
from llnl.util.filesystem import mkdirp, install, install_tree from llnl.util.filesystem import mkdirp, install, install_tree
from llnl.util.lang import dedupe from llnl.util.lang import dedupe, fork_context
import spack.build_systems.cmake import spack.build_systems.cmake
import spack.build_systems.meson import spack.build_systems.meson
@ -886,7 +886,7 @@ def child_process(child_pipe, input_stream):
if sys.stdin.isatty() and hasattr(sys.stdin, 'fileno'): if sys.stdin.isatty() and hasattr(sys.stdin, 'fileno'):
input_stream = os.fdopen(os.dup(sys.stdin.fileno())) input_stream = os.fdopen(os.dup(sys.stdin.fileno()))
p = multiprocessing.Process( p = fork_context.Process(
target=child_process, args=(child_pipe, input_stream)) target=child_process, args=(child_pipe, input_stream))
p.start() p.start()

View file

@ -9,7 +9,6 @@
""" """
import datetime import datetime
import functools import functools
import multiprocessing
import os import os
import pytest import pytest
import json import json
@ -24,6 +23,7 @@
import llnl.util.lock as lk import llnl.util.lock as lk
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
from llnl.util.lang import fork_context
import spack.repo import spack.repo
import spack.store import spack.store
@ -524,7 +524,7 @@ def read_and_modify():
with mutable_database.write_transaction(): with mutable_database.write_transaction():
_mock_remove('mpileaks ^zmpi') _mock_remove('mpileaks ^zmpi')
p = multiprocessing.Process(target=read_and_modify, args=()) p = fork_context.Process(target=read_and_modify, args=())
p.start() p.start()
p.join() p.join()

View file

@ -51,13 +51,14 @@
import glob import glob
import getpass import getpass
from contextlib import contextmanager from contextlib import contextmanager
from multiprocessing import Process, Queue from multiprocessing import Queue
import pytest import pytest
import llnl.util.lock as lk import llnl.util.lock as lk
import llnl.util.multiproc as mp import llnl.util.multiproc as mp
from llnl.util.filesystem import touch from llnl.util.filesystem import touch
from llnl.util.lang import fork_context
# #
@ -214,7 +215,7 @@ def local_multiproc_test(*functions, **kwargs):
b = mp.Barrier(len(functions), timeout=barrier_timeout) b = mp.Barrier(len(functions), timeout=barrier_timeout)
args = (b,) + tuple(kwargs.get('extra_args', ())) args = (b,) + tuple(kwargs.get('extra_args', ()))
procs = [Process(target=f, args=args, name=f.__name__) procs = [fork_context.Process(target=f, args=args, name=f.__name__)
for f in functions] for f in functions]
for p in procs: for p in procs: