mirror of
https://github.com/docker/compose.git
synced 2025-07-23 13:45:00 +02:00
Rename a couple of functions in parallel.py
Signed-off-by: Aanand Prasad <aanand.prasad@gmail.com>
This commit is contained in:
parent
720dc893e2
commit
ebae76bee8
@ -32,7 +32,7 @@ def parallel_execute(objects, func, get_name, msg, get_deps=None):
|
|||||||
for obj in objects:
|
for obj in objects:
|
||||||
writer.initialize(get_name(obj))
|
writer.initialize(get_name(obj))
|
||||||
|
|
||||||
events = parallel_execute_stream(objects, func, get_deps)
|
events = parallel_execute_iter(objects, func, get_deps)
|
||||||
|
|
||||||
errors = {}
|
errors = {}
|
||||||
results = []
|
results = []
|
||||||
@ -86,7 +86,7 @@ class State(object):
|
|||||||
return set(self.objects) - self.started - self.finished - self.failed
|
return set(self.objects) - self.started - self.finished - self.failed
|
||||||
|
|
||||||
|
|
||||||
def parallel_execute_stream(objects, func, get_deps):
|
def parallel_execute_iter(objects, func, get_deps):
|
||||||
"""
|
"""
|
||||||
Runs func on objects in parallel while ensuring that func is
|
Runs func on objects in parallel while ensuring that func is
|
||||||
ran on object only after it is ran on all its dependencies.
|
ran on object only after it is ran on all its dependencies.
|
||||||
@ -130,7 +130,7 @@ def parallel_execute_stream(objects, func, get_deps):
|
|||||||
yield event
|
yield event
|
||||||
|
|
||||||
|
|
||||||
def queue_producer(obj, func, results):
|
def producer(obj, func, results):
|
||||||
"""
|
"""
|
||||||
The entry point for a producer thread which runs func on a single object.
|
The entry point for a producer thread which runs func on a single object.
|
||||||
Places a tuple on the results queue once func has either returned or raised.
|
Places a tuple on the results queue once func has either returned or raised.
|
||||||
@ -165,7 +165,7 @@ def feed_queue(objects, func, get_deps, results, state):
|
|||||||
for dep in deps
|
for dep in deps
|
||||||
):
|
):
|
||||||
log.debug('Starting producer thread for {}'.format(obj))
|
log.debug('Starting producer thread for {}'.format(obj))
|
||||||
t = Thread(target=queue_producer, args=(obj, func, results))
|
t = Thread(target=producer, args=(obj, func, results))
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
state.started.add(obj)
|
state.started.add(obj)
|
||||||
|
@ -5,7 +5,7 @@ import six
|
|||||||
from docker.errors import APIError
|
from docker.errors import APIError
|
||||||
|
|
||||||
from compose.parallel import parallel_execute
|
from compose.parallel import parallel_execute
|
||||||
from compose.parallel import parallel_execute_stream
|
from compose.parallel import parallel_execute_iter
|
||||||
from compose.parallel import UpstreamError
|
from compose.parallel import UpstreamError
|
||||||
|
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ def test_parallel_execute_with_upstream_errors():
|
|||||||
events = [
|
events = [
|
||||||
(obj, result, type(exception))
|
(obj, result, type(exception))
|
||||||
for obj, result, exception
|
for obj, result, exception
|
||||||
in parallel_execute_stream(objects, process, get_deps)
|
in parallel_execute_iter(objects, process, get_deps)
|
||||||
]
|
]
|
||||||
|
|
||||||
assert (cache, None, type(None)) in events
|
assert (cache, None, type(None)) in events
|
||||||
|
Loading…
x
Reference in New Issue
Block a user