Merge pull request #3967 from docker/bump-1.8.1

Bump 1.8.1
This commit is contained in:
Joffrey F 2016-10-05 14:21:15 -07:00 committed by GitHub
commit 08f1ea7a93
28 changed files with 318 additions and 69 deletions

View File

@ -1,6 +1,47 @@
Change log Change log
========== ==========
1.8.1 (2016-09-22)
-----------------
Bug Fixes
- Fixed a bug where users using a credentials store were not able
to access their private images.
- Fixed a bug where users using identity tokens to authenticate
were not able to access their private images.
- Fixed a bug where an `HttpHeaders` entry in the docker configuration
file would cause Compose to crash when trying to build an image.
- Fixed a few bugs related to the handling of Windows paths in volume
binding declarations.
- Fixed a bug where Compose would sometimes crash while trying to
read a streaming response from the engine.
- Fixed an issue where Compose would crash when encountering an API error
while streaming container logs.
- Fixed an issue where Compose would erroneously try to output logs from
drivers not handled by the Engine's API.
- Fixed a bug where options from the `docker-machine config` command would
not be properly interpreted by Compose.
- Fixed a bug where the connection to the Docker Engine would
sometimes fail when running a large number of services simultaneously.
- Fixed an issue where Compose would sometimes print a misleading
suggestion message when running the `bundle` command.
- Fixed a bug where connection errors would not be handled properly by
Compose during the project initialization phase.
- Fixed a bug where a misleading error would appear when encountering
a connection timeout.
1.8.0 (2016-06-14) 1.8.0 (2016-06-14)
----------------- -----------------

View File

@ -1,4 +1,4 @@
from __future__ import absolute_import from __future__ import absolute_import
from __future__ import unicode_literals from __future__ import unicode_literals
__version__ = '1.8.0' __version__ = '1.8.1'

View File

@ -46,8 +46,9 @@ class NeedsPush(Exception):
class NeedsPull(Exception): class NeedsPull(Exception):
def __init__(self, image_name): def __init__(self, image_name, service_name):
self.image_name = image_name self.image_name = image_name
self.service_name = service_name
class MissingDigests(Exception): class MissingDigests(Exception):
@ -74,7 +75,7 @@ def get_image_digests(project, allow_push=False):
except NeedsPush as e: except NeedsPush as e:
needs_push.add(e.image_name) needs_push.add(e.image_name)
except NeedsPull as e: except NeedsPull as e:
needs_pull.add(e.image_name) needs_pull.add(e.service_name)
if needs_push or needs_pull: if needs_push or needs_pull:
raise MissingDigests(needs_push, needs_pull) raise MissingDigests(needs_push, needs_pull)
@ -109,7 +110,7 @@ def get_image_digest(service, allow_push=False):
return image['RepoDigests'][0] return image['RepoDigests'][0]
if 'build' not in service.options: if 'build' not in service.options:
raise NeedsPull(service.image_name) raise NeedsPull(service.image_name, service.name)
if not allow_push: if not allow_push:
raise NeedsPush(service.image_name) raise NeedsPush(service.image_name)

View File

@ -9,17 +9,19 @@ from docker.tls import TLSConfig
from docker.utils import kwargs_from_env from docker.utils import kwargs_from_env
from ..const import HTTP_TIMEOUT from ..const import HTTP_TIMEOUT
from ..const import IS_WINDOWS_PLATFORM
from .errors import UserError from .errors import UserError
from .utils import generate_user_agent from .utils import generate_user_agent
from .utils import unquote_path
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def tls_config_from_options(options): def tls_config_from_options(options):
tls = options.get('--tls', False) tls = options.get('--tls', False)
ca_cert = options.get('--tlscacert') ca_cert = unquote_path(options.get('--tlscacert'))
cert = options.get('--tlscert') cert = unquote_path(options.get('--tlscert'))
key = options.get('--tlskey') key = unquote_path(options.get('--tlskey'))
verify = options.get('--tlsverify') verify = options.get('--tlsverify')
skip_hostname_check = options.get('--skip-hostname-check', False) skip_hostname_check = options.get('--skip-hostname-check', False)
@ -70,4 +72,9 @@ def docker_client(environment, version=None, tls_config=None, host=None,
kwargs['user_agent'] = generate_user_agent() kwargs['user_agent'] = generate_user_agent()
if 'base_url' not in kwargs and IS_WINDOWS_PLATFORM:
# docker-py 1.10 defaults to using npipes, but we don't want that
# change in compose yet - use the default TCP connection instead.
kwargs['base_url'] = 'tcp://127.0.0.1:2375'
return Client(**kwargs) return Client(**kwargs)

View File

@ -53,7 +53,7 @@ def handle_connection_errors(client):
log_api_error(e, client.api_version) log_api_error(e, client.api_version)
raise ConnectionError() raise ConnectionError()
except (ReadTimeout, socket.timeout) as e: except (ReadTimeout, socket.timeout) as e:
log_timeout_error() log_timeout_error(client.timeout)
raise ConnectionError() raise ConnectionError()

View File

@ -6,6 +6,7 @@ from collections import namedtuple
from itertools import cycle from itertools import cycle
from threading import Thread from threading import Thread
from docker.errors import APIError
from six.moves import _thread as thread from six.moves import _thread as thread
from six.moves.queue import Empty from six.moves.queue import Empty
from six.moves.queue import Queue from six.moves.queue import Queue
@ -176,8 +177,14 @@ def build_log_generator(container, log_args):
def wait_on_exit(container): def wait_on_exit(container):
exit_code = container.wait() try:
return "%s exited with code %s\n" % (container.name, exit_code) exit_code = container.wait()
return "%s exited with code %s\n" % (container.name, exit_code)
except APIError as e:
return "Unexpected API error for %s (HTTP code %s)\nResponse body:\n%s\n" % (
container.name, e.response.status_code,
e.response.text or '[empty]'
)
def start_producer_thread(thread_args): def start_producer_thread(thread_args):

View File

@ -23,6 +23,7 @@ from ..config.environment import Environment
from ..config.serialize import serialize_config from ..config.serialize import serialize_config
from ..const import DEFAULT_TIMEOUT from ..const import DEFAULT_TIMEOUT
from ..const import IS_WINDOWS_PLATFORM from ..const import IS_WINDOWS_PLATFORM
from ..errors import StreamParseError
from ..progress_stream import StreamOutputError from ..progress_stream import StreamOutputError
from ..project import NoSuchService from ..project import NoSuchService
from ..project import OneOffFilter from ..project import OneOffFilter
@ -75,7 +76,7 @@ def main():
except NeedsBuildError as e: except NeedsBuildError as e:
log.error("Service '%s' needs to be built, but --no-build was passed." % e.service.name) log.error("Service '%s' needs to be built, but --no-build was passed." % e.service.name)
sys.exit(1) sys.exit(1)
except errors.ConnectionError: except (errors.ConnectionError, StreamParseError):
sys.exit(1) sys.exit(1)
@ -615,8 +616,7 @@ class TopLevelCommand(object):
Options: Options:
-f, --force Don't ask to confirm removal -f, --force Don't ask to confirm removal
-v Remove any anonymous volumes attached to containers -v Remove any anonymous volumes attached to containers
-a, --all Obsolete. Also remove one-off containers created by -a, --all Deprecated - no effect.
docker-compose run
""" """
if options.get('--all'): if options.get('--all'):
log.warn( log.warn(

View File

@ -122,3 +122,11 @@ def generate_user_agent():
else: else:
parts.append("{}/{}".format(p_system, p_release)) parts.append("{}/{}".format(p_system, p_release))
return " ".join(parts) return " ".join(parts)
def unquote_path(s):
if not s:
return s
if s[0] == '"' and s[-1] == '"':
return s[1:-1]
return s

View File

@ -3,7 +3,6 @@ from __future__ import unicode_literals
import functools import functools
import logging import logging
import ntpath
import os import os
import string import string
import sys import sys
@ -16,6 +15,7 @@ from cached_property import cached_property
from ..const import COMPOSEFILE_V1 as V1 from ..const import COMPOSEFILE_V1 as V1
from ..const import COMPOSEFILE_V2_0 as V2_0 from ..const import COMPOSEFILE_V2_0 as V2_0
from ..utils import build_string_dict from ..utils import build_string_dict
from ..utils import splitdrive
from .environment import env_vars_from_file from .environment import env_vars_from_file
from .environment import Environment from .environment import Environment
from .environment import split_env from .environment import split_env
@ -940,13 +940,7 @@ def split_path_mapping(volume_path):
path. Using splitdrive so windows absolute paths won't cause issues with path. Using splitdrive so windows absolute paths won't cause issues with
splitting on ':'. splitting on ':'.
""" """
# splitdrive is very naive, so handle special cases where we can be sure drive, volume_config = splitdrive(volume_path)
# the first character is not a drive.
if (volume_path.startswith('.') or volume_path.startswith('~') or
volume_path.startswith('/')):
drive, volume_config = '', volume_path
else:
drive, volume_config = ntpath.splitdrive(volume_path)
if ':' in volume_config: if ':' in volume_config:
(host, container) = volume_config.split(':', 1) (host, container) = volume_config.split(':', 1)

View File

@ -12,6 +12,7 @@ import six
from compose.config.config import V1 from compose.config.config import V1
from compose.config.errors import ConfigurationError from compose.config.errors import ConfigurationError
from compose.const import IS_WINDOWS_PLATFORM from compose.const import IS_WINDOWS_PLATFORM
from compose.utils import splitdrive
class VolumeFromSpec(namedtuple('_VolumeFromSpec', 'source mode type')): class VolumeFromSpec(namedtuple('_VolumeFromSpec', 'source mode type')):
@ -114,41 +115,23 @@ def parse_extra_hosts(extra_hosts_config):
return extra_hosts_dict return extra_hosts_dict
def normalize_paths_for_engine(external_path, internal_path): def normalize_path_for_engine(path):
"""Windows paths, c:\my\path\shiny, need to be changed to be compatible with """Windows paths, c:\my\path\shiny, need to be changed to be compatible with
the Engine. Volume paths are expected to be linux style /c/my/path/shiny/ the Engine. Volume paths are expected to be linux style /c/my/path/shiny/
""" """
if not IS_WINDOWS_PLATFORM: drive, tail = splitdrive(path)
return external_path, internal_path
if external_path: if drive:
drive, tail = os.path.splitdrive(external_path) path = '/' + drive.lower().rstrip(':') + tail
if drive: return path.replace('\\', '/')
external_path = '/' + drive.lower().rstrip(':') + tail
external_path = external_path.replace('\\', '/')
return external_path, internal_path.replace('\\', '/')
class VolumeSpec(namedtuple('_VolumeSpec', 'external internal mode')): class VolumeSpec(namedtuple('_VolumeSpec', 'external internal mode')):
@classmethod @classmethod
def parse(cls, volume_config): def _parse_unix(cls, volume_config):
"""Parse a volume_config path and split it into external:internal[:mode] parts = volume_config.split(':')
parts to be returned as a valid VolumeSpec.
"""
if IS_WINDOWS_PLATFORM:
# relative paths in windows expand to include the drive, eg C:\
# so we join the first 2 parts back together to count as one
drive, tail = os.path.splitdrive(volume_config)
parts = tail.split(":")
if drive:
parts[0] = drive + parts[0]
else:
parts = volume_config.split(':')
if len(parts) > 3: if len(parts) > 3:
raise ConfigurationError( raise ConfigurationError(
@ -156,13 +139,11 @@ class VolumeSpec(namedtuple('_VolumeSpec', 'external internal mode')):
"external:internal[:mode]" % volume_config) "external:internal[:mode]" % volume_config)
if len(parts) == 1: if len(parts) == 1:
external, internal = normalize_paths_for_engine( external = None
None, internal = os.path.normpath(parts[0])
os.path.normpath(parts[0]))
else: else:
external, internal = normalize_paths_for_engine( external = os.path.normpath(parts[0])
os.path.normpath(parts[0]), internal = os.path.normpath(parts[1])
os.path.normpath(parts[1]))
mode = 'rw' mode = 'rw'
if len(parts) == 3: if len(parts) == 3:
@ -170,6 +151,48 @@ class VolumeSpec(namedtuple('_VolumeSpec', 'external internal mode')):
return cls(external, internal, mode) return cls(external, internal, mode)
@classmethod
def _parse_win32(cls, volume_config):
# relative paths in windows expand to include the drive, eg C:\
# so we join the first 2 parts back together to count as one
mode = 'rw'
def separate_next_section(volume_config):
drive, tail = splitdrive(volume_config)
parts = tail.split(':', 1)
if drive:
parts[0] = drive + parts[0]
return parts
parts = separate_next_section(volume_config)
if len(parts) == 1:
internal = normalize_path_for_engine(os.path.normpath(parts[0]))
external = None
else:
external = parts[0]
parts = separate_next_section(parts[1])
external = normalize_path_for_engine(os.path.normpath(external))
internal = normalize_path_for_engine(os.path.normpath(parts[0]))
if len(parts) > 1:
if ':' in parts[1]:
raise ConfigurationError(
"Volume %s has incorrect format, should be "
"external:internal[:mode]" % volume_config
)
mode = parts[1]
return cls(external, internal, mode)
@classmethod
def parse(cls, volume_config):
"""Parse a volume_config path and split it into external:internal[:mode]
parts to be returned as a valid VolumeSpec.
"""
if IS_WINDOWS_PLATFORM:
return cls._parse_win32(volume_config)
else:
return cls._parse_unix(volume_config)
def repr(self): def repr(self):
external = self.external + ':' if self.external else '' external = self.external + ':' if self.external else ''
return '{ext}{v.internal}:{v.mode}'.format(ext=external, v=self) return '{ext}{v.internal}:{v.mode}'.format(ext=external, v=self)

View File

@ -163,7 +163,7 @@ class Container(object):
@property @property
def has_api_logs(self): def has_api_logs(self):
log_type = self.log_driver log_type = self.log_driver
return not log_type or log_type != 'none' return not log_type or log_type in ('json-file', 'journald')
def attach_log_stream(self): def attach_log_stream(self):
"""A log stream can only be attached if the container uses a json-file """A log stream can only be attached if the container uses a json-file

View File

@ -5,3 +5,8 @@ from __future__ import unicode_literals
class OperationFailedError(Exception): class OperationFailedError(Exception):
def __init__(self, reason): def __init__(self, reason):
self.msg = reason self.msg = reason
class StreamParseError(RuntimeError):
def __init__(self, reason):
self.msg = reason

View File

@ -5,11 +5,16 @@ import codecs
import hashlib import hashlib
import json import json
import json.decoder import json.decoder
import logging
import ntpath
import six import six
from .errors import StreamParseError
json_decoder = json.JSONDecoder() json_decoder = json.JSONDecoder()
log = logging.getLogger(__name__)
def get_output_stream(stream): def get_output_stream(stream):
@ -60,13 +65,21 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
yield item yield item
if buffered: if buffered:
yield decoder(buffered) try:
yield decoder(buffered)
except Exception as e:
log.error(
'Compose tried decoding the following data chunk, but failed:'
'\n%s' % repr(buffered)
)
raise StreamParseError(e)
def json_splitter(buffer): def json_splitter(buffer):
"""Attempt to parse a json object from a buffer. If there is at least one """Attempt to parse a json object from a buffer. If there is at least one
object, return it and the rest of the buffer, otherwise return None. object, return it and the rest of the buffer, otherwise return None.
""" """
buffer = buffer.strip()
try: try:
obj, index = json_decoder.raw_decode(buffer) obj, index = json_decoder.raw_decode(buffer)
rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():] rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():]
@ -96,3 +109,11 @@ def microseconds_from_time_nano(time_nano):
def build_string_dict(source_dict): def build_string_dict(source_dict):
return dict((k, str(v if v is not None else '')) for k, v in source_dict.items()) return dict((k, str(v if v is not None else '')) for k, v in source_dict.items())
def splitdrive(path):
if len(path) == 0:
return ('', '')
if path[0] in ['.', '\\', '/', '~']:
return ('', path)
return ntpath.splitdrive(path)

View File

@ -117,7 +117,7 @@ _docker_compose_bundle() {
;; ;;
esac esac
COMPREPLY=( $( compgen -W "--fetch-digests --help --output -o" -- "$cur" ) ) COMPREPLY=( $( compgen -W "--push-images --help --output -o" -- "$cur" ) )
} }

View File

@ -207,6 +207,7 @@ __docker-compose_subcommand() {
(bundle) (bundle)
_arguments \ _arguments \
$opts_help \ $opts_help \
'--push-images[Automatically push images for any services which have a `build` option specified.]' \
'(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to "<project name>.dab".]:file:_files' && ret=0 '(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to "<project name>.dab".]:file:_files' && ret=0
;; ;;
(config) (config)

View File

@ -39,7 +39,7 @@ which the release page specifies, in your terminal.
The following is an example command illustrating the format: The following is an example command illustrating the format:
curl -L https://github.com/docker/compose/releases/download/1.8.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose curl -L https://github.com/docker/compose/releases/download/1.8.1/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
If you have problems installing with `curl`, see If you have problems installing with `curl`, see
[Alternative Install Options](#alternative-install-options). [Alternative Install Options](#alternative-install-options).
@ -54,7 +54,7 @@ which the release page specifies, in your terminal.
7. Test the installation. 7. Test the installation.
$ docker-compose --version $ docker-compose --version
docker-compose version: 1.8.0 docker-compose version: 1.8.1
## Alternative install options ## Alternative install options
@ -77,7 +77,7 @@ to get started.
Compose can also be run inside a container, from a small bash script wrapper. Compose can also be run inside a container, from a small bash script wrapper.
To install compose as a container run: To install compose as a container run:
$ curl -L https://github.com/docker/compose/releases/download/1.8.0/run.sh > /usr/local/bin/docker-compose $ curl -L https://github.com/docker/compose/releases/download/1.8.1/run.sh > /usr/local/bin/docker-compose
$ chmod +x /usr/local/bin/docker-compose $ chmod +x /usr/local/bin/docker-compose
## Master builds ## Master builds

View File

@ -17,8 +17,7 @@ Usage: rm [options] [SERVICE...]
Options: Options:
-f, --force Don't ask to confirm removal -f, --force Don't ask to confirm removal
-v Remove any anonymous volumes attached to containers -v Remove any anonymous volumes attached to containers
-a, --all Also remove one-off containers created by -a, --all Deprecated - no effect.
docker-compose run
``` ```
Removes stopped service containers. Removes stopped service containers.

View File

@ -1,14 +1,15 @@
PyYAML==3.11 PyYAML==3.11
backports.ssl-match-hostname==3.5.0.1; python_version < '3' backports.ssl-match-hostname==3.5.0.1; python_version < '3'
cached-property==1.2.0 cached-property==1.2.0
docker-py==1.9.0 docker-py==1.10.3
dockerpty==0.4.1 dockerpty==0.4.1
docopt==0.6.1 docopt==0.6.1
enum34==1.0.4; python_version < '3.4' enum34==1.0.4; python_version < '3.4'
functools32==3.2.3.post2; python_version < '3.2' functools32==3.2.3.post2; python_version < '3.2'
ipaddress==1.0.16 ipaddress==1.0.16
jsonschema==2.5.1 jsonschema==2.5.1
pypiwin32==219; sys_platform == 'win32'
requests==2.7.0 requests==2.7.0
six==1.7.3 six==1.10.0
texttable==0.8.4 texttable==0.8.4
websocket-client==0.32.0 websocket-client==0.32.0

View File

@ -15,7 +15,7 @@
set -e set -e
VERSION="1.8.0" VERSION="1.8.1"
IMAGE="docker/compose:$VERSION" IMAGE="docker/compose:$VERSION"

View File

@ -15,7 +15,7 @@ desired_python_brew_version="2.7.9"
python_formula="https://raw.githubusercontent.com/Homebrew/homebrew/1681e193e4d91c9620c4901efd4458d9b6fcda8e/Library/Formula/python.rb" python_formula="https://raw.githubusercontent.com/Homebrew/homebrew/1681e193e4d91c9620c4901efd4458d9b6fcda8e/Library/Formula/python.rb"
desired_openssl_version="1.0.2h" desired_openssl_version="1.0.2h"
desired_openssl_brew_version="1.0.2h" desired_openssl_brew_version="1.0.2h_1"
openssl_formula="https://raw.githubusercontent.com/Homebrew/homebrew-core/30d3766453347f6e22b3ed6c74bb926d6def2eb5/Formula/openssl.rb" openssl_formula="https://raw.githubusercontent.com/Homebrew/homebrew-core/30d3766453347f6e22b3ed6c74bb926d6def2eb5/Formula/openssl.rb"
PATH="/usr/local/bin:$PATH" PATH="/usr/local/bin:$PATH"

View File

@ -34,7 +34,7 @@ install_requires = [
'requests >= 2.6.1, < 2.8', 'requests >= 2.6.1, < 2.8',
'texttable >= 0.8.1, < 0.9', 'texttable >= 0.8.1, < 0.9',
'websocket-client >= 0.32.0, < 1.0', 'websocket-client >= 0.32.0, < 1.0',
'docker-py >= 1.9.0, < 2.0', 'docker-py >= 1.10.3, < 2.0',
'dockerpty >= 0.4.1, < 0.5', 'dockerpty >= 0.4.1, < 0.5',
'six >= 1.3.0, < 2', 'six >= 1.3.0, < 2',
'jsonschema >= 2.5.1, < 3', 'jsonschema >= 2.5.1, < 3',

View File

@ -42,6 +42,14 @@ class DockerClientTestCase(unittest.TestCase):
assert fake_log.error.call_count == 1 assert fake_log.error.call_count == 1
assert '123' in fake_log.error.call_args[0][0] assert '123' in fake_log.error.call_args[0][0]
with mock.patch('compose.cli.errors.log') as fake_log:
with pytest.raises(errors.ConnectionError):
with errors.handle_connection_errors(client):
raise errors.ReadTimeout()
assert fake_log.error.call_count == 1
assert '123' in fake_log.error.call_args[0][0]
def test_user_agent(self): def test_user_agent(self):
client = docker_client(os.environ) client = docker_client(os.environ)
expected = "docker-compose/{0} docker-py/{1} {2}/{3}".format( expected = "docker-compose/{0} docker-py/{1} {2}/{3}".format(
@ -52,6 +60,14 @@ class DockerClientTestCase(unittest.TestCase):
) )
self.assertEqual(client.headers['User-Agent'], expected) self.assertEqual(client.headers['User-Agent'], expected)
@mock.patch.dict(os.environ)
def test_docker_client_default_windows_host(self):
with mock.patch('compose.cli.docker_client.IS_WINDOWS_PLATFORM', True):
if 'DOCKER_HOST' in os.environ:
del os.environ['DOCKER_HOST']
client = docker_client(os.environ)
assert client.base_url == 'http://127.0.0.1:2375'
class TLSConfigTestCase(unittest.TestCase): class TLSConfigTestCase(unittest.TestCase):
ca_cert = 'tests/fixtures/tls/ca.pem' ca_cert = 'tests/fixtures/tls/ca.pem'
@ -136,3 +152,16 @@ class TLSConfigTestCase(unittest.TestCase):
result = tls_config_from_options(options) result = tls_config_from_options(options)
assert isinstance(result, docker.tls.TLSConfig) assert isinstance(result, docker.tls.TLSConfig)
assert result.assert_hostname is False assert result.assert_hostname is False
def test_tls_client_and_ca_quoted_paths(self):
options = {
'--tlscacert': '"{0}"'.format(self.ca_cert),
'--tlscert': '"{0}"'.format(self.client_cert),
'--tlskey': '"{0}"'.format(self.key),
'--tlsverify': True
}
result = tls_config_from_options(options)
assert isinstance(result, docker.tls.TLSConfig)
assert result.cert == (self.client_cert, self.key)
assert result.ca_cert == self.ca_cert
assert result.verify is True

View File

@ -32,7 +32,7 @@ class TestHandleConnectionErrors(object):
raise ConnectionError() raise ConnectionError()
_, args, _ = mock_logging.error.mock_calls[0] _, args, _ = mock_logging.error.mock_calls[0]
assert "Couldn't connect to Docker daemon at" in args[0] assert "Couldn't connect to Docker daemon" in args[0]
def test_api_error_version_mismatch(self, mock_logging): def test_api_error_version_mismatch(self, mock_logging):
with pytest.raises(errors.ConnectionError): with pytest.raises(errors.ConnectionError):

View File

@ -4,7 +4,9 @@ from __future__ import unicode_literals
import itertools import itertools
import pytest import pytest
import requests
import six import six
from docker.errors import APIError
from six.moves.queue import Queue from six.moves.queue import Queue
from compose.cli.log_printer import build_log_generator from compose.cli.log_printer import build_log_generator
@ -56,6 +58,26 @@ def test_wait_on_exit():
assert expected == wait_on_exit(mock_container) assert expected == wait_on_exit(mock_container)
def test_wait_on_exit_raises():
status_code = 500
def mock_wait():
resp = requests.Response()
resp.status_code = status_code
raise APIError('Bad server', resp)
mock_container = mock.Mock(
spec=Container,
name='cname',
wait=mock_wait
)
expected = 'Unexpected API error for {} (HTTP code {})\n'.format(
mock_container.name, status_code,
)
assert expected in wait_on_exit(mock_container)
def test_build_no_log_generator(mock_container): def test_build_no_log_generator(mock_container):
mock_container.has_api_logs = False mock_container.has_api_logs = False
mock_container.log_driver = 'none' mock_container.log_driver = 'none'

View File

@ -0,0 +1,23 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import unittest
from compose.cli.utils import unquote_path
class UnquotePathTest(unittest.TestCase):
def test_no_quotes(self):
assert unquote_path('hello') == 'hello'
def test_simple_quotes(self):
assert unquote_path('"hello"') == 'hello'
def test_uneven_quotes(self):
assert unquote_path('"hello') == '"hello'
assert unquote_path('hello"') == 'hello"'
def test_nested_quotes(self):
assert unquote_path('""hello""') == '"hello"'
assert unquote_path('"hel"lo"') == 'hel"lo'
assert unquote_path('"hello""') == 'hello"'

View File

@ -9,7 +9,6 @@ from compose.config.errors import ConfigurationError
from compose.config.types import parse_extra_hosts from compose.config.types import parse_extra_hosts
from compose.config.types import VolumeFromSpec from compose.config.types import VolumeFromSpec
from compose.config.types import VolumeSpec from compose.config.types import VolumeSpec
from compose.const import IS_WINDOWS_PLATFORM
def test_parse_extra_hosts_list(): def test_parse_extra_hosts_list():
@ -64,15 +63,38 @@ class TestVolumeSpec(object):
VolumeSpec.parse('one:two:three:four') VolumeSpec.parse('one:two:three:four')
assert 'has incorrect format' in exc.exconly() assert 'has incorrect format' in exc.exconly()
@pytest.mark.xfail((not IS_WINDOWS_PLATFORM), reason='does not have a drive')
def test_parse_volume_windows_absolute_path(self): def test_parse_volume_windows_absolute_path(self):
windows_path = "c:\\Users\\me\\Documents\\shiny\\config:\\opt\\shiny\\config:ro" windows_path = "c:\\Users\\me\\Documents\\shiny\\config:\\opt\\shiny\\config:ro"
assert VolumeSpec.parse(windows_path) == ( assert VolumeSpec._parse_win32(windows_path) == (
"/c/Users/me/Documents/shiny/config", "/c/Users/me/Documents/shiny/config",
"/opt/shiny/config", "/opt/shiny/config",
"ro" "ro"
) )
def test_parse_volume_windows_internal_path(self):
windows_path = 'C:\\Users\\reimu\\scarlet:C:\\scarlet\\app:ro'
assert VolumeSpec._parse_win32(windows_path) == (
'/c/Users/reimu/scarlet',
'/c/scarlet/app',
'ro'
)
def test_parse_volume_windows_just_drives(self):
windows_path = 'E:\\:C:\\:ro'
assert VolumeSpec._parse_win32(windows_path) == (
'/e/',
'/c/',
'ro'
)
def test_parse_volume_windows_mixed_notations(self):
windows_path = '/c/Foo:C:\\bar'
assert VolumeSpec._parse_win32(windows_path) == (
'/c/Foo',
'/c/bar',
'rw'
)
class TestVolumesFromSpec(object): class TestVolumesFromSpec(object):

View File

@ -150,6 +150,34 @@ class ContainerTest(unittest.TestCase):
container = Container(None, self.container_dict, has_been_inspected=True) container = Container(None, self.container_dict, has_been_inspected=True)
assert container.short_id == self.container_id[:12] assert container.short_id == self.container_id[:12]
def test_has_api_logs(self):
container_dict = {
'HostConfig': {
'LogConfig': {
'Type': 'json-file'
}
}
}
container = Container(None, container_dict, has_been_inspected=True)
assert container.has_api_logs is True
container_dict['HostConfig']['LogConfig']['Type'] = 'none'
container = Container(None, container_dict, has_been_inspected=True)
assert container.has_api_logs is False
container_dict['HostConfig']['LogConfig']['Type'] = 'syslog'
container = Container(None, container_dict, has_been_inspected=True)
assert container.has_api_logs is False
container_dict['HostConfig']['LogConfig']['Type'] = 'journald'
container = Container(None, container_dict, has_been_inspected=True)
assert container.has_api_logs is True
container_dict['HostConfig']['LogConfig']['Type'] = 'foobar'
container = Container(None, container_dict, has_been_inspected=True)
assert container.has_api_logs is False
class GetContainerNameTestCase(unittest.TestCase): class GetContainerNameTestCase(unittest.TestCase):

View File

@ -15,6 +15,10 @@ class TestJsonSplitter(object):
data = '{"foo": "bar"}\n \n{"next": "obj"}' data = '{"foo": "bar"}\n \n{"next": "obj"}'
assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}') assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
def test_json_splitter_leading_whitespace(self):
data = '\n \r{"foo": "bar"}\n\n {"next": "obj"}'
assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
class TestStreamAsText(object): class TestStreamAsText(object):
@ -43,3 +47,16 @@ class TestJsonStream(object):
[1, 2, 3], [1, 2, 3],
[], [],
] ]
def test_with_leading_whitespace(self):
stream = [
'\n \r\n {"one": "two"}{"x": 1}',
' {"three": "four"}\t\t{"x": 2}'
]
output = list(utils.json_stream(stream))
assert output == [
{'one': 'two'},
{'x': 1},
{'three': 'four'},
{'x': 2}
]