mirror of https://github.com/docker/compose.git
commit
e35be8b0d4
|
@ -7,7 +7,7 @@ jobs:
|
|||
- checkout
|
||||
- run:
|
||||
name: install python3
|
||||
command: brew update > /dev/null && brew install python3
|
||||
command: brew update > /dev/null && brew upgrade python
|
||||
- run:
|
||||
name: install tox
|
||||
command: sudo pip install --upgrade tox==2.1.1
|
||||
|
|
10
CHANGELOG.md
10
CHANGELOG.md
|
@ -38,10 +38,12 @@ Change log
|
|||
- Proxy configuration found in the `~/.docker/config.json` file now populates
|
||||
environment and build args for containers created by Compose
|
||||
|
||||
- Added a `--use-aliases` flag to `docker-compose run`, indicating that
|
||||
- Added the `--use-aliases` flag to `docker-compose run`, indicating that
|
||||
network aliases declared in the service's config should be used for the
|
||||
running container
|
||||
|
||||
- Added the `--include-deps` flag to `docker-compose pull`
|
||||
|
||||
- `docker-compose run` now kills and removes the running container upon
|
||||
receiving `SIGHUP`
|
||||
|
||||
|
@ -55,6 +57,9 @@ Change log
|
|||
- Fixed `.dockerignore` handling, notably with regard to absolute paths
|
||||
and last-line precedence rules
|
||||
|
||||
- Fixed an issue where Compose would make costly DNS lookups when connecting
|
||||
to the Engine when using Docker For Mac
|
||||
|
||||
- Fixed a bug introduced in 1.19.0 which caused the default certificate path
|
||||
to not be honored by Compose
|
||||
|
||||
|
@ -70,6 +75,9 @@ Change log
|
|||
- A `seccomp:<filepath>` entry in the `security_opt` config now correctly
|
||||
sends the contents of the file to the engine
|
||||
|
||||
- ANSI output for `up` and `down` operations should no longer affect the wrong
|
||||
lines
|
||||
|
||||
- Improved support for non-unicode locales
|
||||
|
||||
- Fixed a crash occurring on Windows when the user's home directory name
|
||||
|
|
|
@ -1,42 +1,20 @@
|
|||
FROM sgerrand/glibc-builder as glibc
|
||||
RUN apt-get install -yq bison
|
||||
|
||||
ENV PKGDIR /pkgdata
|
||||
|
||||
RUN mkdir -p /usr/glibc-compat/etc && touch /usr/glibc-compat/etc/ld.so.conf
|
||||
RUN /builder 2.27 /usr/glibc-compat || true
|
||||
RUN mkdir -p $PKGDIR
|
||||
RUN tar -xf /glibc-bin-2.27.tar.gz -C $PKGDIR
|
||||
RUN rm "$PKGDIR"/usr/glibc-compat/etc/rpc && \
|
||||
rm -rf "$PKGDIR"/usr/glibc-compat/bin && \
|
||||
rm -rf "$PKGDIR"/usr/glibc-compat/sbin && \
|
||||
rm -rf "$PKGDIR"/usr/glibc-compat/lib/gconv && \
|
||||
rm -rf "$PKGDIR"/usr/glibc-compat/lib/getconf && \
|
||||
rm -rf "$PKGDIR"/usr/glibc-compat/lib/audit && \
|
||||
rm -rf "$PKGDIR"/usr/glibc-compat/share && \
|
||||
rm -rf "$PKGDIR"/usr/glibc-compat/var
|
||||
|
||||
|
||||
FROM alpine:3.6
|
||||
|
||||
RUN apk update && apk add --no-cache openssl ca-certificates
|
||||
COPY --from=glibc /pkgdata/ /
|
||||
ENV GLIBC 2.27-r0
|
||||
ENV DOCKERBINS_SHA 1270dce1bd7e1838d62ae21d2505d87f16efc1d9074645571daaefdfd0c14054
|
||||
|
||||
RUN mkdir -p /lib /lib64 /usr/glibc-compat/lib/locale /etc && \
|
||||
RUN apk update && apk add --no-cache openssl ca-certificates curl && \
|
||||
curl -fsSL -o /etc/apk/keys/sgerrand.rsa.pub https://raw.githubusercontent.com/sgerrand/alpine-pkg-glibc/master/sgerrand.rsa.pub && \
|
||||
curl -fsSL -o glibc-$GLIBC.apk https://github.com/sgerrand/alpine-pkg-glibc/releases/download/$GLIBC/glibc-$GLIBC.apk && \
|
||||
apk add --no-cache glibc-$GLIBC.apk && \
|
||||
ln -s /lib/libz.so.1 /usr/glibc-compat/lib/ && \
|
||||
ln -s /lib/libc.musl-x86_64.so.1 /usr/glibc-compat/lib && \
|
||||
ln -s /usr/glibc-compat/lib/ld-linux-x86-64.so.2 /lib/ld-linux-x86-64.so.2 && \
|
||||
ln -s /usr/glibc-compat/lib/ld-linux-x86-64.so.2 /lib64/ld-linux-x86-64.so.2 && \
|
||||
ln -s /usr/glibc-compat/etc/ld.so.cache /etc/ld.so.cache
|
||||
|
||||
RUN apk add --no-cache curl && \
|
||||
curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/x86_64/docker-17.12.0-ce.tgz" && \
|
||||
SHA256=692e1c72937f6214b1038def84463018d8e320c8eaf8530546c84c2f8f9c767d; \
|
||||
echo "${SHA256} dockerbins.tgz" | sha256sum -c - && \
|
||||
curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/x86_64/docker-17.12.1-ce.tgz" && \
|
||||
echo "${DOCKERBINS_SHA} dockerbins.tgz" | sha256sum -c - && \
|
||||
tar xvf dockerbins.tgz docker/docker --strip-components 1 && \
|
||||
mv docker /usr/local/bin/docker && \
|
||||
chmod +x /usr/local/bin/docker && \
|
||||
rm dockerbins.tgz && \
|
||||
rm dockerbins.tgz /etc/apk/keys/sgerrand.rsa.pub glibc-$GLIBC.apk && \
|
||||
apk del curl
|
||||
|
||||
COPY dist/docker-compose-Linux-x86_64 /usr/local/bin/docker-compose
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
__version__ = '1.20.0-rc1'
|
||||
__version__ = '1.20.0-rc2'
|
||||
|
|
|
@ -1039,6 +1039,7 @@ def merge_service_dicts(base, override, version):
|
|||
md.merge_sequence('links', ServiceLink.parse)
|
||||
md.merge_sequence('secrets', types.ServiceSecret.parse)
|
||||
md.merge_sequence('configs', types.ServiceConfig.parse)
|
||||
md.merge_sequence('security_opt', types.SecurityOpt.parse)
|
||||
md.merge_mapping('extra_hosts', parse_extra_hosts)
|
||||
|
||||
for field in ['volumes', 'devices']:
|
||||
|
@ -1046,7 +1047,7 @@ def merge_service_dicts(base, override, version):
|
|||
|
||||
for field in [
|
||||
'cap_add', 'cap_drop', 'expose', 'external_links',
|
||||
'security_opt', 'volumes_from', 'device_cgroup_rules',
|
||||
'volumes_from', 'device_cgroup_rules',
|
||||
]:
|
||||
md.merge_field(field, merge_unique_items_lists, default=[])
|
||||
|
||||
|
|
|
@ -464,6 +464,8 @@ def normalize_port_dict(port):
|
|||
class SecurityOpt(namedtuple('_SecurityOpt', 'value src_file')):
|
||||
@classmethod
|
||||
def parse(cls, value):
|
||||
if not isinstance(value, six.string_types):
|
||||
return value
|
||||
# based on https://github.com/docker/cli/blob/9de1b162f/cli/command/container/opts.go#L673-L697
|
||||
con = value.split('=', 2)
|
||||
if len(con) == 1 and con[0] != 'no-new-privileges':
|
||||
|
@ -486,3 +488,7 @@ class SecurityOpt(namedtuple('_SecurityOpt', 'value src_file')):
|
|||
if self.src_file is not None:
|
||||
return 'seccomp:{}'.format(self.src_file)
|
||||
return self.value
|
||||
|
||||
@property
|
||||
def merge_field(self):
|
||||
return self.value
|
||||
|
|
|
@ -4,6 +4,7 @@ from __future__ import unicode_literals
|
|||
import logging
|
||||
import operator
|
||||
import sys
|
||||
from threading import Lock
|
||||
from threading import Semaphore
|
||||
from threading import Thread
|
||||
|
||||
|
@ -42,7 +43,36 @@ class GlobalLimit(object):
|
|||
cls.global_limiter = Semaphore(value)
|
||||
|
||||
|
||||
def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None, parent_objects=None):
|
||||
def parallel_execute_watch(events, writer, errors, results, msg, get_name):
|
||||
""" Watch events from a parallel execution, update status and fill errors and results.
|
||||
Returns exception to re-raise.
|
||||
"""
|
||||
error_to_reraise = None
|
||||
for obj, result, exception in events:
|
||||
if exception is None:
|
||||
writer.write(msg, get_name(obj), 'done', green)
|
||||
results.append(result)
|
||||
elif isinstance(exception, ImageNotFound):
|
||||
# This is to bubble up ImageNotFound exceptions to the client so we
|
||||
# can prompt the user if they want to rebuild.
|
||||
errors[get_name(obj)] = exception.explanation
|
||||
writer.write(msg, get_name(obj), 'error', red)
|
||||
error_to_reraise = exception
|
||||
elif isinstance(exception, APIError):
|
||||
errors[get_name(obj)] = exception.explanation
|
||||
writer.write(msg, get_name(obj), 'error', red)
|
||||
elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured)):
|
||||
errors[get_name(obj)] = exception.msg
|
||||
writer.write(msg, get_name(obj), 'error', red)
|
||||
elif isinstance(exception, UpstreamError):
|
||||
writer.write(msg, get_name(obj), 'error', red)
|
||||
else:
|
||||
errors[get_name(obj)] = exception
|
||||
error_to_reraise = exception
|
||||
return error_to_reraise
|
||||
|
||||
|
||||
def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None):
|
||||
"""Runs func on objects in parallel while ensuring that func is
|
||||
ran on object only after it is ran on all its dependencies.
|
||||
|
||||
|
@ -52,45 +82,21 @@ def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None, pa
|
|||
objects = list(objects)
|
||||
stream = get_output_stream(sys.stderr)
|
||||
|
||||
writer = ParallelStreamWriter(stream, msg)
|
||||
if ParallelStreamWriter.instance:
|
||||
writer = ParallelStreamWriter.instance
|
||||
else:
|
||||
writer = ParallelStreamWriter(stream)
|
||||
|
||||
display_objects = list(parent_objects) if parent_objects else objects
|
||||
|
||||
for obj in display_objects:
|
||||
writer.add_object(get_name(obj))
|
||||
|
||||
# write data in a second loop to consider all objects for width alignment
|
||||
# and avoid duplicates when parent_objects exists
|
||||
for obj in objects:
|
||||
writer.write_initial(get_name(obj))
|
||||
writer.add_object(msg, get_name(obj))
|
||||
for obj in objects:
|
||||
writer.write_initial(msg, get_name(obj))
|
||||
|
||||
events = parallel_execute_iter(objects, func, get_deps, limit)
|
||||
|
||||
errors = {}
|
||||
results = []
|
||||
error_to_reraise = None
|
||||
|
||||
for obj, result, exception in events:
|
||||
if exception is None:
|
||||
writer.write(get_name(obj), 'done', green)
|
||||
results.append(result)
|
||||
elif isinstance(exception, ImageNotFound):
|
||||
# This is to bubble up ImageNotFound exceptions to the client so we
|
||||
# can prompt the user if they want to rebuild.
|
||||
errors[get_name(obj)] = exception.explanation
|
||||
writer.write(get_name(obj), 'error', red)
|
||||
error_to_reraise = exception
|
||||
elif isinstance(exception, APIError):
|
||||
errors[get_name(obj)] = exception.explanation
|
||||
writer.write(get_name(obj), 'error', red)
|
||||
elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured)):
|
||||
errors[get_name(obj)] = exception.msg
|
||||
writer.write(get_name(obj), 'error', red)
|
||||
elif isinstance(exception, UpstreamError):
|
||||
writer.write(get_name(obj), 'error', red)
|
||||
else:
|
||||
errors[get_name(obj)] = exception
|
||||
error_to_reraise = exception
|
||||
error_to_reraise = parallel_execute_watch(events, writer, errors, results, msg, get_name)
|
||||
|
||||
for obj_name, error in errors.items():
|
||||
stream.write("\nERROR: for {} {}\n".format(obj_name, error))
|
||||
|
@ -251,53 +257,59 @@ class ParallelStreamWriter(object):
|
|||
"""
|
||||
|
||||
noansi = False
|
||||
lock = Lock()
|
||||
instance = None
|
||||
|
||||
@classmethod
|
||||
def set_noansi(cls, value=True):
|
||||
cls.noansi = value
|
||||
|
||||
def __init__(self, stream, msg):
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
self.msg = msg
|
||||
self.lines = []
|
||||
self.width = 0
|
||||
ParallelStreamWriter.instance = self
|
||||
|
||||
def add_object(self, obj_index):
|
||||
self.lines.append(obj_index)
|
||||
self.width = max(self.width, len(obj_index))
|
||||
|
||||
def write_initial(self, obj_index):
|
||||
if self.msg is None:
|
||||
def add_object(self, msg, obj_index):
|
||||
if msg is None:
|
||||
return
|
||||
self.stream.write("{} {:<{width}} ... \r\n".format(
|
||||
self.msg, self.lines[self.lines.index(obj_index)], width=self.width))
|
||||
self.lines.append(msg + obj_index)
|
||||
self.width = max(self.width, len(msg + ' ' + obj_index))
|
||||
|
||||
def write_initial(self, msg, obj_index):
|
||||
if msg is None:
|
||||
return
|
||||
self.stream.write("{:<{width}} ... \r\n".format(
|
||||
msg + ' ' + obj_index, width=self.width))
|
||||
self.stream.flush()
|
||||
|
||||
def _write_ansi(self, obj_index, status):
|
||||
position = self.lines.index(obj_index)
|
||||
def _write_ansi(self, msg, obj_index, status):
|
||||
self.lock.acquire()
|
||||
position = self.lines.index(msg + obj_index)
|
||||
diff = len(self.lines) - position
|
||||
# move up
|
||||
self.stream.write("%c[%dA" % (27, diff))
|
||||
# erase
|
||||
self.stream.write("%c[2K\r" % 27)
|
||||
self.stream.write("{} {:<{width}} ... {}\r".format(self.msg, obj_index,
|
||||
self.stream.write("{:<{width}} ... {}\r".format(msg + ' ' + obj_index,
|
||||
status, width=self.width))
|
||||
# move back down
|
||||
self.stream.write("%c[%dB" % (27, diff))
|
||||
self.stream.flush()
|
||||
self.lock.release()
|
||||
|
||||
def _write_noansi(self, obj_index, status):
|
||||
self.stream.write("{} {:<{width}} ... {}\r\n".format(self.msg, obj_index,
|
||||
def _write_noansi(self, msg, obj_index, status):
|
||||
self.stream.write("{:<{width}} ... {}\r\n".format(msg + ' ' + obj_index,
|
||||
status, width=self.width))
|
||||
self.stream.flush()
|
||||
|
||||
def write(self, obj_index, status, color_func):
|
||||
if self.msg is None:
|
||||
def write(self, msg, obj_index, status, color_func):
|
||||
if msg is None:
|
||||
return
|
||||
if self.noansi:
|
||||
self._write_noansi(obj_index, status)
|
||||
self._write_noansi(msg, obj_index, status)
|
||||
else:
|
||||
self._write_ansi(obj_index, color_func(status))
|
||||
self._write_ansi(msg, obj_index, color_func(status))
|
||||
|
||||
|
||||
def parallel_operation(containers, operation, options, message):
|
||||
|
|
|
@ -402,8 +402,7 @@ class Service(object):
|
|||
[ServiceName(self.project, self.name, index) for index in range(i, i + scale)],
|
||||
lambda service_name: create_and_start(self, service_name.number),
|
||||
lambda service_name: self.get_container_name(service_name.service, service_name.number),
|
||||
"Creating",
|
||||
parent_objects=project_services
|
||||
"Creating"
|
||||
)
|
||||
for error in errors.values():
|
||||
raise OperationFailedError(error)
|
||||
|
|
|
@ -179,18 +179,22 @@ _docker_compose_docker_compose() {
|
|||
_filedir "y?(a)ml"
|
||||
return
|
||||
;;
|
||||
--log-level)
|
||||
COMPREPLY=( $( compgen -W "debug info warning error critical" -- "$cur" ) )
|
||||
return
|
||||
;;
|
||||
--project-directory)
|
||||
_filedir -d
|
||||
return
|
||||
;;
|
||||
$(__docker_compose_to_extglob "$top_level_options_with_args") )
|
||||
$(__docker_compose_to_extglob "$daemon_options_with_args") )
|
||||
return
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$cur" in
|
||||
-*)
|
||||
COMPREPLY=( $( compgen -W "$top_level_boolean_options $top_level_options_with_args --help -h --no-ansi --verbose --version -v" -- "$cur" ) )
|
||||
COMPREPLY=( $( compgen -W "$daemon_boolean_options $daemon_options_with_args $top_level_options_with_args --help -h --no-ansi --verbose --version -v" -- "$cur" ) )
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=( $( compgen -W "${commands[*]}" -- "$cur" ) )
|
||||
|
@ -375,7 +379,7 @@ _docker_compose_ps() {
|
|||
_docker_compose_pull() {
|
||||
case "$cur" in
|
||||
-*)
|
||||
COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --parallel --quiet -q" -- "$cur" ) )
|
||||
COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --parallel --quiet -q" -- "$cur" ) )
|
||||
;;
|
||||
*)
|
||||
__docker_compose_services_from_image
|
||||
|
@ -444,7 +448,7 @@ _docker_compose_run() {
|
|||
|
||||
case "$cur" in
|
||||
-*)
|
||||
COMPREPLY=( $( compgen -W "-d --detach --entrypoint -e --help --label -l --name --no-deps --publish -p --rm --service-ports -T --user -u --volume -v --workdir -w" -- "$cur" ) )
|
||||
COMPREPLY=( $( compgen -W "--detach -d --entrypoint -e --help --label -l --name --no-deps --publish -p --rm --service-ports -T --use-aliases --user -u --volume -v --workdir -w" -- "$cur" ) )
|
||||
;;
|
||||
*)
|
||||
__docker_compose_services_all
|
||||
|
@ -605,14 +609,12 @@ _docker_compose() {
|
|||
|
||||
# Options for the docker daemon that have to be passed to secondary calls to
|
||||
# docker-compose executed by this script.
|
||||
# Other global otions that are not relevant for secondary calls are defined in
|
||||
# `_docker_compose_docker_compose`.
|
||||
local top_level_boolean_options="
|
||||
local daemon_boolean_options="
|
||||
--skip-hostname-check
|
||||
--tls
|
||||
--tlsverify
|
||||
"
|
||||
local top_level_options_with_args="
|
||||
local daemon_options_with_args="
|
||||
--file -f
|
||||
--host -H
|
||||
--project-directory
|
||||
|
@ -622,6 +624,11 @@ _docker_compose() {
|
|||
--tlskey
|
||||
"
|
||||
|
||||
# These options are require special treatment when searching the command.
|
||||
local top_level_options_with_args="
|
||||
--log-level
|
||||
"
|
||||
|
||||
COMPREPLY=()
|
||||
local cur prev words cword
|
||||
_get_comp_words_by_ref -n : cur prev words cword
|
||||
|
@ -634,15 +641,18 @@ _docker_compose() {
|
|||
|
||||
while [ $counter -lt $cword ]; do
|
||||
case "${words[$counter]}" in
|
||||
$(__docker_compose_to_extglob "$top_level_boolean_options") )
|
||||
$(__docker_compose_to_extglob "$daemon_boolean_options") )
|
||||
local opt=${words[counter]}
|
||||
top_level_options+=($opt)
|
||||
;;
|
||||
$(__docker_compose_to_extglob "$top_level_options_with_args") )
|
||||
$(__docker_compose_to_extglob "$daemon_options_with_args") )
|
||||
local opt=${words[counter]}
|
||||
local arg=${words[++counter]}
|
||||
top_level_options+=($opt $arg)
|
||||
;;
|
||||
$(__docker_compose_to_extglob "$top_level_options_with_args") )
|
||||
(( counter++ ))
|
||||
;;
|
||||
-*)
|
||||
;;
|
||||
*)
|
||||
|
|
|
@ -2,7 +2,7 @@ backports.ssl-match-hostname==3.5.0.1; python_version < '3'
|
|||
cached-property==1.3.0
|
||||
certifi==2017.4.17
|
||||
chardet==3.0.4
|
||||
docker==3.1.0
|
||||
docker==3.1.1
|
||||
docker-pycreds==0.2.1
|
||||
dockerpty==0.4.1
|
||||
docopt==0.6.2
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
set -e
|
||||
|
||||
VERSION="1.20.0-rc1"
|
||||
VERSION="1.20.0-rc2"
|
||||
IMAGE="docker/compose:$VERSION"
|
||||
|
||||
|
||||
|
|
2
setup.py
2
setup.py
|
@ -36,7 +36,7 @@ install_requires = [
|
|||
'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.19',
|
||||
'texttable >= 0.9.0, < 0.10',
|
||||
'websocket-client >= 0.32.0, < 1.0',
|
||||
'docker >= 3.1.0, < 4.0',
|
||||
'docker >= 3.1.1, < 4.0',
|
||||
'dockerpty >= 0.4.1, < 0.5',
|
||||
'six >= 1.3.0, < 2',
|
||||
'jsonschema >= 2.5.1, < 3',
|
||||
|
|
|
@ -35,6 +35,7 @@ from compose.const import LABEL_SERVICE
|
|||
from compose.const import LABEL_VERSION
|
||||
from compose.container import Container
|
||||
from compose.errors import OperationFailedError
|
||||
from compose.parallel import ParallelStreamWriter
|
||||
from compose.project import OneOffFilter
|
||||
from compose.service import ConvergencePlan
|
||||
from compose.service import ConvergenceStrategy
|
||||
|
@ -1197,6 +1198,7 @@ class ServiceTest(DockerClientTestCase):
|
|||
service.create_container(number=next_number)
|
||||
service.create_container(number=next_number + 1)
|
||||
|
||||
ParallelStreamWriter.instance = None
|
||||
with mock.patch('sys.stderr', new_callable=StringIO) as mock_stderr:
|
||||
service.scale(2)
|
||||
for container in service.containers():
|
||||
|
@ -1220,6 +1222,7 @@ class ServiceTest(DockerClientTestCase):
|
|||
for container in service.containers():
|
||||
assert not container.is_running
|
||||
|
||||
ParallelStreamWriter.instance = None
|
||||
with mock.patch('sys.stderr', new_callable=StringIO) as mock_stderr:
|
||||
service.scale(2)
|
||||
|
||||
|
|
|
@ -4508,6 +4508,29 @@ class ExtendsTest(unittest.TestCase):
|
|||
for svc in services:
|
||||
assert svc['ports'] == [types.ServicePort('80', None, None, None, None)]
|
||||
|
||||
def test_extends_with_security_opt(self):
|
||||
tmpdir = py.test.ensuretemp('test_extends_with_ports')
|
||||
self.addCleanup(tmpdir.remove)
|
||||
tmpdir.join('docker-compose.yml').write("""
|
||||
version: '2'
|
||||
|
||||
services:
|
||||
a:
|
||||
image: nginx
|
||||
security_opt:
|
||||
- apparmor:unconfined
|
||||
- seccomp:unconfined
|
||||
|
||||
b:
|
||||
extends:
|
||||
service: a
|
||||
""")
|
||||
services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
|
||||
assert len(services) == 2
|
||||
for svc in services:
|
||||
assert types.SecurityOpt.parse('apparmor:unconfined') in svc['security_opt']
|
||||
assert types.SecurityOpt.parse('seccomp:unconfined') in svc['security_opt']
|
||||
|
||||
|
||||
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
|
||||
class ExpandPathTest(unittest.TestCase):
|
||||
|
|
|
@ -143,6 +143,7 @@ class ParallelTest(unittest.TestCase):
|
|||
|
||||
|
||||
def test_parallel_execute_alignment(capsys):
|
||||
ParallelStreamWriter.instance = None
|
||||
results, errors = parallel_execute(
|
||||
objects=["short", "a very long name"],
|
||||
func=lambda x: x,
|
||||
|
@ -158,6 +159,7 @@ def test_parallel_execute_alignment(capsys):
|
|||
|
||||
|
||||
def test_parallel_execute_ansi(capsys):
|
||||
ParallelStreamWriter.instance = None
|
||||
ParallelStreamWriter.set_noansi(value=False)
|
||||
results, errors = parallel_execute(
|
||||
objects=["something", "something more"],
|
||||
|
@ -173,6 +175,7 @@ def test_parallel_execute_ansi(capsys):
|
|||
|
||||
|
||||
def test_parallel_execute_noansi(capsys):
|
||||
ParallelStreamWriter.instance = None
|
||||
ParallelStreamWriter.set_noansi()
|
||||
results, errors = parallel_execute(
|
||||
objects=["something", "something more"],
|
||||
|
|
|
@ -20,6 +20,7 @@ from compose.const import LABEL_PROJECT
|
|||
from compose.const import LABEL_SERVICE
|
||||
from compose.const import SECRETS_PATH
|
||||
from compose.container import Container
|
||||
from compose.parallel import ParallelStreamWriter
|
||||
from compose.project import OneOffFilter
|
||||
from compose.service import build_ulimits
|
||||
from compose.service import build_volume_binding
|
||||
|
@ -727,6 +728,7 @@ class ServiceTest(unittest.TestCase):
|
|||
@mock.patch('compose.service.log', autospec=True)
|
||||
def test_only_log_warning_when_host_ports_clash(self, mock_log):
|
||||
self.mock_client.inspect_image.return_value = {'Id': 'abcd'}
|
||||
ParallelStreamWriter.instance = None
|
||||
name = 'foo'
|
||||
service = Service(
|
||||
name,
|
||||
|
|
Loading…
Reference in New Issue