Skip to content

Commit

Permalink
SuperSize: Better Ctrl-C & remove _TerminatePools
Browse files Browse the repository at this point in the history
* _TerminatePools no longer needed with Python 3
* Ctrl-C made to spew fewer backtraces
* Also tweaks an assert message & comment

Bug: None
Change-Id: Ie18e9b99acc9176e79843031f0ec9ef87b63aa4b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2956948
Auto-Submit: Andrew Grieve <agrieve@chromium.org>
Reviewed-by: Samuel Huang <huangs@chromium.org>
Commit-Queue: Andrew Grieve <agrieve@chromium.org>
Cr-Commit-Position: refs/heads/master@{#892176}
  • Loading branch information
agrieve authored and Chromium LUCI CQ committed Jun 14, 2021
1 parent 4897f16 commit d7f5312
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 50 deletions.
2 changes: 1 addition & 1 deletion tools/binary_size/libsupersize/nm.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ def RunNmOnIntermediates(target, tool_prefix, output_directory):
# llvm-nm can print 'no symbols' to stderr. Capture and count the number of
# lines, to be returned to the caller.
stdout, stderr = proc.communicate()
assert proc.returncode == 0
assert proc.returncode == 0, 'NM failed: ' + ' '.join(args)
num_no_symbols = len(stderr.splitlines())
lines = stdout.splitlines()
# Empty .a file has no output.
Expand Down
67 changes: 20 additions & 47 deletions tools/binary_size/libsupersize/parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# found in the LICENSE file.
"""Helpers related to multiprocessing."""

import atexit
import builtins
import itertools
import logging
Expand All @@ -18,7 +17,6 @@
if DISABLE_ASYNC:
logging.debug('Running in synchronous mode.')

_all_pools = None
_is_child_process = False
_silence_exceptions = False

Expand All @@ -27,6 +25,23 @@
_fork_kwargs = None


# Avoid printing backtrace for every worker for Ctrl-C.
def _PatchMultiprocessing():
from multiprocessing import process
old_run = process.BaseProcess.run

def new_run(self):
try:
return old_run(self)
except (BrokenPipeError, KeyboardInterrupt):
sys.exit(1)

process.BaseProcess.run = new_run


_PatchMultiprocessing()


class _ImmediateResult(object):
def __init__(self, value):
self._value = value
Expand Down Expand Up @@ -68,7 +83,7 @@ def __init__(self, func):
def __call__(self, index, _=None):
try:
return self._func(*_fork_params[index], **_fork_kwargs)
except Exception as e:
except BaseException as e:
# Only keep the exception type for builtin exception types or else risk
# further marshalling exceptions.
exception_type = None
Expand All @@ -77,21 +92,17 @@ def __call__(self, index, _=None):
# multiprocessing is supposed to catch and return exceptions automatically
# but it doesn't seem to work properly :(.
return _ExceptionWrapper(traceback.format_exc(), exception_type)
except: # pylint: disable=bare-except
return _ExceptionWrapper(traceback.format_exc())


class _WrappedResult(object):
"""Allows for host-side logic to be run after child process has terminated.
* Unregisters associated pool _all_pools.
* Raises exception caught by _FuncWrapper.
* Allows for custom unmarshalling of return value.
"""

def __init__(self, result, pool=None, decode_func=None):
def __init__(self, result, decode_func=None):
self._result = result
self._pool = pool
self._decode_func = decode_func

def get(self):
Expand All @@ -104,9 +115,6 @@ def get(self):

def wait(self):
self._result.wait()
if self._pool:
_all_pools.remove(self._pool)
self._pool = None

def ready(self):
return self._result.ready()
Expand All @@ -115,34 +123,6 @@ def successful(self):
return self._result.successful()


def _TerminatePools():
"""Calls .terminate() on all active process pools.
Not supposed to be necessary according to the docs, but seems to be required
when child process throws an exception or Ctrl-C is hit.
"""
global _silence_exceptions
_silence_exceptions = True
# Child processes cannot have pools, but atexit runs this function because
# it was registered before fork()ing.
if _is_child_process:
return

def close_pool(pool):
try:
pool.terminate()
except: # pylint: disable=bare-except
pass

for i, pool in enumerate(_all_pools):
# Without calling terminate() on a separate thread, the call can block
# forever.
thread = threading.Thread(
name='Pool-Terminate-{}'.format(i), target=close_pool, args=(pool, ))
thread.daemon = True
thread.start()


def _CheckForException(value):
if isinstance(value, _ExceptionWrapper):
global _silence_exceptions
Expand All @@ -154,7 +134,6 @@ def _CheckForException(value):


def _MakeProcessPool(job_params, **job_kwargs):
global _all_pools
global _fork_params
global _fork_kwargs
assert _fork_params is None
Expand All @@ -165,10 +144,6 @@ def _MakeProcessPool(job_params, **job_kwargs):
ret = multiprocessing.Pool(pool_size)
_fork_params = None
_fork_kwargs = None
if _all_pools is None:
_all_pools = []
atexit.register(_TerminatePools)
_all_pools.append(ret)
return ret


Expand All @@ -179,13 +154,12 @@ def ForkAndCall(func, args, decode_func=None):
A Result object (call .get() to get the return value)
"""
if DISABLE_ASYNC:
pool = None
result = _ImmediateResult(func(*args))
else:
pool = _MakeProcessPool([args]) # Omit |kwargs|.
result = pool.apply_async(_FuncWrapper(func), (0, ))
pool.close()
return _WrappedResult(result, pool=pool, decode_func=decode_func)
return _WrappedResult(result, decode_func=decode_func)


def BulkForkAndCall(func, arg_tuples, **kwargs):
Expand Down Expand Up @@ -214,7 +188,6 @@ def BulkForkAndCall(func, arg_tuples, **kwargs):
finally:
pool.close()
pool.join()
_all_pools.remove(pool)


def CallOnThread(func, *args, **kwargs):
Expand Down
4 changes: 2 additions & 2 deletions tools/binary_size/libsupersize/zip_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ def UnzipToTemp(zip_path, inner_path):
"""
try:
_, suffix = os.path.splitext(inner_path)
# Can't use NamedTemporaryFile() because it uses atexit, which does not play
# well with fork().
# Can't use NamedTemporaryFile() because it deletes via __del__, which will
# trigger in both this and the fork()'ed processes.
fd, temp_file = tempfile.mkstemp(suffix=suffix)
logging.debug('Extracting %s', inner_path)
with zipfile.ZipFile(zip_path) as z:
Expand Down

0 comments on commit d7f5312

Please sign in to comment.