Merge pull request #3056 from dnephin/fix_shutdown_errors

Fix signal handling with pyinstaller.
This commit is contained in:
Aanand Prasad 2016-03-01 14:56:27 -08:00
commit af12c78915
3 changed files with 26 additions and 15 deletions

View File

@ -54,7 +54,7 @@ def main():
try:
command = TopLevelCommand()
command.sys_dispatch()
except KeyboardInterrupt:
except (KeyboardInterrupt, signals.ShutdownException):
log.error("Aborting.")
sys.exit(1)
except (UserError, NoSuchService, ConfigurationError) as e:

View File

@ -10,6 +10,7 @@ try:
except ImportError:
from queue import Queue, Empty # Python 3.x
from compose.cli.signals import ShutdownException
STOP = object()
@ -47,7 +48,7 @@ class Multiplexer(object):
pass
# See https://github.com/docker/compose/issues/189
except thread.error:
raise KeyboardInterrupt()
raise ShutdownException()
def _init_readers(self):
for iterator in self.iterators:

View File

@ -6,9 +6,11 @@ import sys
from threading import Thread
from docker.errors import APIError
from six.moves import _thread as thread
from six.moves.queue import Empty
from six.moves.queue import Queue
from compose.cli.signals import ShutdownException
from compose.utils import get_output_stream
@ -26,19 +28,7 @@ def parallel_execute(objects, func, index_func, msg):
objects = list(objects)
stream = get_output_stream(sys.stderr)
writer = ParallelStreamWriter(stream, msg)
for obj in objects:
writer.initialize(index_func(obj))
q = Queue()
# TODO: limit the number of threads #1828
for obj in objects:
t = Thread(
target=perform_operation,
args=(func, obj, q.put, index_func(obj)))
t.daemon = True
t.start()
q = setup_queue(writer, objects, func, index_func)
done = 0
errors = {}
@ -48,6 +38,9 @@ def parallel_execute(objects, func, index_func, msg):
msg_index, result = q.get(timeout=1)
except Empty:
continue
# See https://github.com/docker/compose/issues/189
except thread.error:
raise ShutdownException()
if isinstance(result, APIError):
errors[msg_index] = "error", result.explanation
@ -68,6 +61,23 @@ def parallel_execute(objects, func, index_func, msg):
raise error
def setup_queue(writer, objects, func, index_func):
for obj in objects:
writer.initialize(index_func(obj))
q = Queue()
# TODO: limit the number of threads #1828
for obj in objects:
t = Thread(
target=perform_operation,
args=(func, obj, q.put, index_func(obj)))
t.daemon = True
t.start()
return q
class ParallelStreamWriter(object):
"""Write out messages for operations happening in parallel.