mirror of
https://github.com/docker/compose.git
synced 2025-07-08 14:24:26 +02:00
Merge pull request #6501 from chris-crone/build-fixes
Various build fixes
This commit is contained in:
commit
3cddd1b670
@ -17,6 +17,8 @@ ENV LANG en_US.UTF-8
|
|||||||
RUN useradd -d /home/user -m -s /bin/bash user
|
RUN useradd -d /home/user -m -s /bin/bash user
|
||||||
WORKDIR /code/
|
WORKDIR /code/
|
||||||
|
|
||||||
|
# FIXME(chris-crone): virtualenv 16.3.0 breaks build, force 16.2.0 until fixed
|
||||||
|
RUN pip install virtualenv==16.2.0
|
||||||
RUN pip install tox==2.1.1
|
RUN pip install tox==2.1.1
|
||||||
|
|
||||||
ADD requirements.txt /code/
|
ADD requirements.txt /code/
|
||||||
@ -25,6 +27,7 @@ ADD .pre-commit-config.yaml /code/
|
|||||||
ADD setup.py /code/
|
ADD setup.py /code/
|
||||||
ADD tox.ini /code/
|
ADD tox.ini /code/
|
||||||
ADD compose /code/compose/
|
ADD compose /code/compose/
|
||||||
|
ADD README.md /code/
|
||||||
RUN tox --notest
|
RUN tox --notest
|
||||||
|
|
||||||
ADD . /code/
|
ADD . /code/
|
||||||
|
@ -291,7 +291,7 @@ class Service(object):
|
|||||||
c for c in stopped_containers if self._containers_have_diverged([c])
|
c for c in stopped_containers if self._containers_have_diverged([c])
|
||||||
]
|
]
|
||||||
for c in divergent_containers:
|
for c in divergent_containers:
|
||||||
c.remove()
|
c.remove()
|
||||||
|
|
||||||
all_containers = list(set(all_containers) - set(divergent_containers))
|
all_containers = list(set(all_containers) - set(divergent_containers))
|
||||||
|
|
||||||
@ -467,50 +467,50 @@ class Service(object):
|
|||||||
|
|
||||||
def _execute_convergence_recreate(self, containers, scale, timeout, detached, start,
|
def _execute_convergence_recreate(self, containers, scale, timeout, detached, start,
|
||||||
renew_anonymous_volumes):
|
renew_anonymous_volumes):
|
||||||
if scale is not None and len(containers) > scale:
|
if scale is not None and len(containers) > scale:
|
||||||
self._downscale(containers[scale:], timeout)
|
self._downscale(containers[scale:], timeout)
|
||||||
containers = containers[:scale]
|
containers = containers[:scale]
|
||||||
|
|
||||||
def recreate(container):
|
def recreate(container):
|
||||||
return self.recreate_container(
|
return self.recreate_container(
|
||||||
container, timeout=timeout, attach_logs=not detached,
|
container, timeout=timeout, attach_logs=not detached,
|
||||||
start_new_container=start, renew_anonymous_volumes=renew_anonymous_volumes
|
start_new_container=start, renew_anonymous_volumes=renew_anonymous_volumes
|
||||||
)
|
|
||||||
containers, errors = parallel_execute(
|
|
||||||
containers,
|
|
||||||
recreate,
|
|
||||||
lambda c: c.name,
|
|
||||||
"Recreating",
|
|
||||||
)
|
)
|
||||||
|
containers, errors = parallel_execute(
|
||||||
|
containers,
|
||||||
|
recreate,
|
||||||
|
lambda c: c.name,
|
||||||
|
"Recreating",
|
||||||
|
)
|
||||||
|
for error in errors.values():
|
||||||
|
raise OperationFailedError(error)
|
||||||
|
|
||||||
|
if scale is not None and len(containers) < scale:
|
||||||
|
containers.extend(self._execute_convergence_create(
|
||||||
|
scale - len(containers), detached, start
|
||||||
|
))
|
||||||
|
return containers
|
||||||
|
|
||||||
|
def _execute_convergence_start(self, containers, scale, timeout, detached, start):
|
||||||
|
if scale is not None and len(containers) > scale:
|
||||||
|
self._downscale(containers[scale:], timeout)
|
||||||
|
containers = containers[:scale]
|
||||||
|
if start:
|
||||||
|
_, errors = parallel_execute(
|
||||||
|
containers,
|
||||||
|
lambda c: self.start_container_if_stopped(c, attach_logs=not detached, quiet=True),
|
||||||
|
lambda c: c.name,
|
||||||
|
"Starting",
|
||||||
|
)
|
||||||
|
|
||||||
for error in errors.values():
|
for error in errors.values():
|
||||||
raise OperationFailedError(error)
|
raise OperationFailedError(error)
|
||||||
|
|
||||||
if scale is not None and len(containers) < scale:
|
if scale is not None and len(containers) < scale:
|
||||||
containers.extend(self._execute_convergence_create(
|
containers.extend(self._execute_convergence_create(
|
||||||
scale - len(containers), detached, start
|
scale - len(containers), detached, start
|
||||||
))
|
))
|
||||||
return containers
|
return containers
|
||||||
|
|
||||||
def _execute_convergence_start(self, containers, scale, timeout, detached, start):
|
|
||||||
if scale is not None and len(containers) > scale:
|
|
||||||
self._downscale(containers[scale:], timeout)
|
|
||||||
containers = containers[:scale]
|
|
||||||
if start:
|
|
||||||
_, errors = parallel_execute(
|
|
||||||
containers,
|
|
||||||
lambda c: self.start_container_if_stopped(c, attach_logs=not detached, quiet=True),
|
|
||||||
lambda c: c.name,
|
|
||||||
"Starting",
|
|
||||||
)
|
|
||||||
|
|
||||||
for error in errors.values():
|
|
||||||
raise OperationFailedError(error)
|
|
||||||
|
|
||||||
if scale is not None and len(containers) < scale:
|
|
||||||
containers.extend(self._execute_convergence_create(
|
|
||||||
scale - len(containers), detached, start
|
|
||||||
))
|
|
||||||
return containers
|
|
||||||
|
|
||||||
def _downscale(self, containers, timeout=None):
|
def _downscale(self, containers, timeout=None):
|
||||||
def stop_and_remove(container):
|
def stop_and_remove(container):
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
coverage==4.4.2
|
coverage==4.4.2
|
||||||
ddt==1.2.0
|
ddt==1.2.0
|
||||||
flake8==3.5.0
|
flake8==3.5.0
|
||||||
mock>=1.0.1
|
mock==2.0.0
|
||||||
pytest==3.6.3
|
pytest==3.6.3
|
||||||
pytest-cov==2.5.1
|
pytest-cov==2.5.1
|
||||||
|
@ -5,7 +5,7 @@ set -ex
|
|||||||
./script/clean
|
./script/clean
|
||||||
|
|
||||||
TAG="docker-compose"
|
TAG="docker-compose"
|
||||||
docker build -t "$TAG" . | tail -n 200
|
docker build -t "$TAG" .
|
||||||
docker run \
|
docker run \
|
||||||
--rm --entrypoint="script/build/linux-entrypoint" \
|
--rm --entrypoint="script/build/linux-entrypoint" \
|
||||||
-v $(pwd)/dist:/code/dist \
|
-v $(pwd)/dist:/code/dist \
|
||||||
|
@ -13,13 +13,13 @@ if ! [ ${DEPLOYMENT_TARGET} == "$(macos_version)" ]; then
|
|||||||
SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62
|
SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62
|
||||||
fi
|
fi
|
||||||
|
|
||||||
OPENSSL_VERSION=1.1.0h
|
OPENSSL_VERSION=1.1.0j
|
||||||
OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz
|
OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz
|
||||||
OPENSSL_SHA1=0fc39f6aa91b6e7f4d05018f7c5e991e1d2491fd
|
OPENSSL_SHA1=dcad1efbacd9a4ed67d4514470af12bbe2a1d60a
|
||||||
|
|
||||||
PYTHON_VERSION=3.6.6
|
PYTHON_VERSION=3.6.8
|
||||||
PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
|
PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
|
||||||
PYTHON_SHA1=ae1fc9ddd29ad8c1d5f7b0d799ff0787efeb9652
|
PYTHON_SHA1=09fcc4edaef0915b4dedbfb462f1cd15f82d3a6f
|
||||||
|
|
||||||
#
|
#
|
||||||
# Install prerequisites.
|
# Install prerequisites.
|
||||||
|
@ -193,7 +193,7 @@ class TestConsumeQueue(object):
|
|||||||
queue.put(item)
|
queue.put(item)
|
||||||
|
|
||||||
generator = consume_queue(queue, True)
|
generator = consume_queue(queue, True)
|
||||||
assert next(generator) is 'foobar-1'
|
assert next(generator) == 'foobar-1'
|
||||||
|
|
||||||
def test_item_is_none_when_timeout_is_hit(self):
|
def test_item_is_none_when_timeout_is_hit(self):
|
||||||
queue = Queue()
|
queue = Queue()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user