Merge branch 'master' into 3191_2_help_cli_feature

This commit is contained in:
twitherspoon 2016-04-07 12:43:34 -04:00
commit 9f47e43b5c
35 changed files with 539 additions and 226 deletions

View File

@ -10,7 +10,7 @@
- id: end-of-file-fixer
- id: flake8
- id: name-tests-test
exclude: 'tests/(helpers\.py|integration/testcases\.py)'
exclude: 'tests/(integration/testcases\.py|helpers\.py)'
- id: requirements-txt-fixer
- id: trailing-whitespace
- repo: git://github.com/asottile/reorder_python_imports

View File

@ -1,13 +1,21 @@
# Roadmap
## An even better tool for development environments
Compose is a great tool for development environments, but it could be even better. For example:
- It should be possible to define hostnames for containers which work from the host machine, e.g. “mywebcontainer.local”. This is needed by apps comprising multiple web services which generate links to one another (e.g. a frontend website and a separate admin webapp)
## More than just development environments
Over time we will extend Compose's remit to cover test, staging and production environments. This is not a simple task, and will take many incremental improvements such as:
Compose currently works really well in development, but we want to make the Compose file format better for test, staging, and production environments. To support these use cases, there will need to be improvements to the file format, improvements to the command-line tool, integrations with other tools, and perhaps new tools altogether.
Some specific things we are considering:
- Compose currently will attempt to get your application into the correct state when running `up`, but it has a number of shortcomings:
- It should roll back to a known good state if it fails.
- It should allow a user to check the actions it is about to perform before running them.
- It should be possible to partially modify the config file for different environments (dev/test/staging/prod), passing in e.g. custom ports or volume mount paths. ([#1377](https://github.com/docker/compose/issues/1377))
- It should be possible to partially modify the config file for different environments (dev/test/staging/prod), passing in e.g. custom ports, volume mount paths, or volume drivers. ([#1377](https://github.com/docker/compose/issues/1377))
- Compose should recommend a technique for zero-downtime deploys.
- It should be possible to continuously attempt to keep an application in the correct state, instead of just performing `up` a single time.
@ -22,10 +30,3 @@ The current state of integration is documented in [SWARM.md](SWARM.md).
Compose works well for applications that are in a single repository and depend on services that are hosted on Docker Hub. If your application depends on another application within your organisation, Compose doesn't work as well.
There are several ideas about how this could work, such as [including external files](https://github.com/docker/fig/issues/318).
## An even better tool for development environments
Compose is a great tool for development environments, but it could be even better. For example:
- [Compose could watch your code and automatically kick off builds when something changes.](https://github.com/docker/fig/issues/184)
- It should be possible to define hostnames for containers which work from the host machine, e.g. “mywebcontainer.local”. This is needed by apps comprising multiple web services which generate links to one another (e.g. a frontend website and a separate admin webapp)

View File

@ -9,6 +9,7 @@ import six
from . import verbose_proxy
from .. import config
from ..config.environment import Environment
from ..const import API_VERSIONS
from ..project import Project
from .docker_client import docker_client
@ -19,29 +20,34 @@ log = logging.getLogger(__name__)
def project_from_options(project_dir, options):
environment = Environment.from_env_file(project_dir)
return get_project(
project_dir,
get_config_path_from_options(options),
get_config_path_from_options(project_dir, options, environment),
project_name=options.get('--project-name'),
verbose=options.get('--verbose'),
host=options.get('--host'),
tls_config=tls_config_from_options(options),
environment=environment
)
def get_config_path_from_options(options):
def get_config_path_from_options(base_dir, options, environment):
file_option = options.get('--file')
if file_option:
return file_option
config_files = os.environ.get('COMPOSE_FILE')
config_files = environment.get('COMPOSE_FILE')
if config_files:
return config_files.split(os.pathsep)
return None
def get_client(verbose=False, version=None, tls_config=None, host=None):
client = docker_client(version=version, tls_config=tls_config, host=host)
def get_client(environment, verbose=False, version=None, tls_config=None, host=None):
client = docker_client(
version=version, tls_config=tls_config, host=host,
environment=environment
)
if verbose:
version_info = six.iteritems(client.version())
log.info(get_version_info('full'))
@ -53,27 +59,33 @@ def get_client(verbose=False, version=None, tls_config=None, host=None):
def get_project(project_dir, config_path=None, project_name=None, verbose=False,
host=None, tls_config=None):
config_details = config.find(project_dir, config_path)
project_name = get_project_name(config_details.working_dir, project_name)
host=None, tls_config=None, environment=None):
if not environment:
environment = Environment.from_env_file(project_dir)
config_details = config.find(project_dir, config_path, environment)
project_name = get_project_name(
config_details.working_dir, project_name, environment
)
config_data = config.load(config_details)
api_version = os.environ.get(
api_version = environment.get(
'COMPOSE_API_VERSION',
API_VERSIONS[config_data.version])
client = get_client(
verbose=verbose, version=api_version, tls_config=tls_config,
host=host
host=host, environment=environment
)
return Project.from_config(project_name, config_data, client)
def get_project_name(working_dir, project_name=None):
def get_project_name(working_dir, project_name=None, environment=None):
def normalize_name(name):
return re.sub(r'[^a-z0-9]', '', name.lower())
project_name = project_name or os.environ.get('COMPOSE_PROJECT_NAME')
if not environment:
environment = Environment.from_env_file(working_dir)
project_name = project_name or environment.get('COMPOSE_PROJECT_NAME')
if project_name:
return normalize_name(project_name)

View File

@ -2,13 +2,11 @@ from __future__ import absolute_import
from __future__ import unicode_literals
import logging
import os
from docker import Client
from docker.errors import TLSParameterError
from docker.tls import TLSConfig
from docker.utils import kwargs_from_env
from requests.utils import urlparse
from ..const import HTTP_TIMEOUT
from .errors import UserError
@ -22,37 +20,36 @@ def tls_config_from_options(options):
cert = options.get('--tlscert')
key = options.get('--tlskey')
verify = options.get('--tlsverify')
hostname = urlparse(options.get('--host') or '').hostname
skip_hostname_check = options.get('--skip-hostname-check', False)
advanced_opts = any([ca_cert, cert, key, verify])
if tls is True and not advanced_opts:
return True
elif advanced_opts:
elif advanced_opts: # --tls is a noop
client_cert = None
if cert or key:
client_cert = (cert, key)
return TLSConfig(
client_cert=client_cert, verify=verify, ca_cert=ca_cert,
assert_hostname=(
hostname or not options.get('--skip-hostname-check', False)
)
assert_hostname=False if skip_hostname_check else None
)
else:
return None
return None
def docker_client(version=None, tls_config=None, host=None):
def docker_client(environment, version=None, tls_config=None, host=None):
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
if 'DOCKER_CLIENT_TIMEOUT' in environment:
log.warn("The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. "
"Please use COMPOSE_HTTP_TIMEOUT instead.")
try:
kwargs = kwargs_from_env(assert_hostname=False)
kwargs = kwargs_from_env(environment=environment)
except TLSParameterError:
raise UserError(
"TLS configuration is invalid - make sure your DOCKER_TLS_VERIFY "
@ -67,6 +64,10 @@ def docker_client(version=None, tls_config=None, host=None):
if version:
kwargs['version'] = version
kwargs['timeout'] = HTTP_TIMEOUT
timeout = environment.get('COMPOSE_HTTP_TIMEOUT')
if timeout:
kwargs['timeout'] = int(timeout)
else:
kwargs['timeout'] = HTTP_TIMEOUT
return Client(**kwargs)

View File

@ -17,6 +17,7 @@ from .. import __version__
from ..config import config
from ..config import ConfigurationError
from ..config import parse_environment
from ..config.environment import Environment
from ..config.serialize import serialize_config
from ..const import DEFAULT_TIMEOUT
from ..const import IS_WINDOWS_PLATFORM
@ -222,8 +223,13 @@ class TopLevelCommand(object):
--services Print the service names, one per line.
"""
config_path = get_config_path_from_options(config_options)
compose_config = config.load(config.find(self.project_dir, config_path))
environment = Environment.from_env_file(self.project_dir)
config_path = get_config_path_from_options(
self.project_dir, config_options, environment
)
compose_config = config.load(
config.find(self.project_dir, config_path, environment)
)
if options['--quiet']:
return

View File

@ -2,6 +2,7 @@
from __future__ import absolute_import
from __future__ import unicode_literals
from . import environment
from .config import ConfigurationError
from .config import DOCKER_CONFIG_KEYS
from .config import find

View File

@ -1,7 +1,6 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import codecs
import functools
import logging
import operator
@ -17,6 +16,9 @@ from cached_property import cached_property
from ..const import COMPOSEFILE_V1 as V1
from ..const import COMPOSEFILE_V2_0 as V2_0
from ..utils import build_string_dict
from .environment import env_vars_from_file
from .environment import Environment
from .environment import split_env
from .errors import CircularReference
from .errors import ComposeFileNotFound
from .errors import ConfigurationError
@ -113,13 +115,21 @@ DEFAULT_OVERRIDE_FILENAME = 'docker-compose.override.yml'
log = logging.getLogger(__name__)
class ConfigDetails(namedtuple('_ConfigDetails', 'working_dir config_files')):
class ConfigDetails(namedtuple('_ConfigDetails', 'working_dir config_files environment')):
"""
:param working_dir: the directory to use for relative paths in the config
:type working_dir: string
:param config_files: list of configuration files to load
:type config_files: list of :class:`ConfigFile`
:param environment: computed environment values for this project
:type environment: :class:`environment.Environment`
"""
def __new__(cls, working_dir, config_files, environment=None):
if environment is None:
environment = Environment.from_env_file(working_dir)
return super(ConfigDetails, cls).__new__(
cls, working_dir, config_files, environment
)
class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
@ -207,11 +217,13 @@ class ServiceConfig(namedtuple('_ServiceConfig', 'working_dir filename name conf
config)
def find(base_dir, filenames):
def find(base_dir, filenames, environment):
if filenames == ['-']:
return ConfigDetails(
os.getcwd(),
[ConfigFile(None, yaml.safe_load(sys.stdin))])
[ConfigFile(None, yaml.safe_load(sys.stdin))],
environment
)
if filenames:
filenames = [os.path.join(base_dir, f) for f in filenames]
@ -221,7 +233,9 @@ def find(base_dir, filenames):
log.debug("Using configuration files: {}".format(",".join(filenames)))
return ConfigDetails(
os.path.dirname(filenames[0]),
[ConfigFile.from_filename(f) for f in filenames])
[ConfigFile.from_filename(f) for f in filenames],
environment
)
def validate_config_version(config_files):
@ -289,7 +303,7 @@ def load(config_details):
validate_config_version(config_details.config_files)
processed_files = [
process_config_file(config_file)
process_config_file(config_file, config_details.environment)
for config_file in config_details.config_files
]
config_details = config_details._replace(config_files=processed_files)
@ -301,10 +315,7 @@ def load(config_details):
networks = load_mapping(
config_details.config_files, 'get_networks', 'Network'
)
service_dicts = load_services(
config_details.working_dir,
main_file,
[file.get_service_dicts() for file in config_details.config_files])
service_dicts = load_services(config_details, main_file)
if main_file.version != V1:
for service_dict in service_dicts:
@ -348,14 +359,16 @@ def load_mapping(config_files, get_func, entity_type):
return mapping
def load_services(working_dir, config_file, service_configs):
def load_services(config_details, config_file):
def build_service(service_name, service_dict, service_names):
service_config = ServiceConfig.with_abs_paths(
working_dir,
config_details.working_dir,
config_file.filename,
service_name,
service_dict)
resolver = ServiceExtendsResolver(service_config, config_file)
resolver = ServiceExtendsResolver(
service_config, config_file, environment=config_details.environment
)
service_dict = process_service(resolver.run())
service_config = service_config._replace(config=service_dict)
@ -363,7 +376,8 @@ def load_services(working_dir, config_file, service_configs):
service_dict = finalize_service(
service_config,
service_names,
config_file.version)
config_file.version,
config_details.environment)
return service_dict
def build_services(service_config):
@ -383,6 +397,10 @@ def load_services(working_dir, config_file, service_configs):
for name in all_service_names
}
service_configs = [
file.get_service_dicts() for file in config_details.config_files
]
service_config = service_configs[0]
for next_config in service_configs[1:]:
service_config = merge_services(service_config, next_config)
@ -390,16 +408,17 @@ def load_services(working_dir, config_file, service_configs):
return build_services(service_config)
def interpolate_config_section(filename, config, section):
def interpolate_config_section(filename, config, section, environment):
validate_config_section(filename, config, section)
return interpolate_environment_variables(config, section)
return interpolate_environment_variables(config, section, environment)
def process_config_file(config_file, service_name=None):
def process_config_file(config_file, environment, service_name=None):
services = interpolate_config_section(
config_file.filename,
config_file.get_service_dicts(),
'service')
'service',
environment,)
if config_file.version == V2_0:
processed_config = dict(config_file.config)
@ -407,11 +426,13 @@ def process_config_file(config_file, service_name=None):
processed_config['volumes'] = interpolate_config_section(
config_file.filename,
config_file.get_volumes(),
'volume')
'volume',
environment,)
processed_config['networks'] = interpolate_config_section(
config_file.filename,
config_file.get_networks(),
'network')
'network',
environment,)
if config_file.version == V1:
processed_config = services
@ -428,11 +449,12 @@ def process_config_file(config_file, service_name=None):
class ServiceExtendsResolver(object):
def __init__(self, service_config, config_file, already_seen=None):
def __init__(self, service_config, config_file, environment, already_seen=None):
self.service_config = service_config
self.working_dir = service_config.working_dir
self.already_seen = already_seen or []
self.config_file = config_file
self.environment = environment
@property
def signature(self):
@ -462,8 +484,8 @@ class ServiceExtendsResolver(object):
extends_file = ConfigFile.from_filename(config_path)
validate_config_version([self.config_file, extends_file])
extended_file = process_config_file(
extends_file,
service_name=service_name)
extends_file, self.environment, service_name=service_name
)
service_config = extended_file.get_service(service_name)
return config_path, service_config, service_name
@ -476,7 +498,9 @@ class ServiceExtendsResolver(object):
service_name,
service_dict),
self.config_file,
already_seen=self.already_seen + [self.signature])
already_seen=self.already_seen + [self.signature],
environment=self.environment
)
service_config = resolver.run()
other_service_dict = process_service(service_config)
@ -505,7 +529,7 @@ class ServiceExtendsResolver(object):
return filename
def resolve_environment(service_dict):
def resolve_environment(service_dict, environment=None):
"""Unpack any environment variables from an env_file, if set.
Interpolate environment values if set.
"""
@ -514,12 +538,12 @@ def resolve_environment(service_dict):
env.update(env_vars_from_file(env_file))
env.update(parse_environment(service_dict.get('environment')))
return dict(resolve_env_var(k, v) for k, v in six.iteritems(env))
return dict(resolve_env_var(k, v, environment) for k, v in six.iteritems(env))
def resolve_build_args(build):
def resolve_build_args(build, environment):
args = parse_build_arguments(build.get('args'))
return dict(resolve_env_var(k, v) for k, v in six.iteritems(args))
return dict(resolve_env_var(k, v, environment) for k, v in six.iteritems(args))
def validate_extended_service_dict(service_dict, filename, service):
@ -598,11 +622,11 @@ def process_service(service_config):
return service_dict
def finalize_service(service_config, service_names, version):
def finalize_service(service_config, service_names, version, environment):
service_dict = dict(service_config.config)
if 'environment' in service_dict or 'env_file' in service_dict:
service_dict['environment'] = resolve_environment(service_dict)
service_dict['environment'] = resolve_environment(service_dict, environment)
service_dict.pop('env_file', None)
if 'volumes_from' in service_dict:
@ -629,7 +653,7 @@ def finalize_service(service_config, service_names, version):
if 'restart' in service_dict:
service_dict['restart'] = parse_restart_spec(service_dict['restart'])
normalize_build(service_dict, service_config.working_dir)
normalize_build(service_dict, service_config.working_dir, environment)
service_dict['name'] = service_config.name
return normalize_v1_service_format(service_dict)
@ -777,15 +801,6 @@ def merge_environment(base, override):
return env
def split_env(env):
if isinstance(env, six.binary_type):
env = env.decode('utf-8', 'replace')
if '=' in env:
return env.split('=', 1)
else:
return env, None
def split_label(label):
if '=' in label:
return label.split('=', 1)
@ -823,30 +838,15 @@ def parse_ulimits(ulimits):
return dict(ulimits)
def resolve_env_var(key, val):
def resolve_env_var(key, val, environment):
if val is not None:
return key, val
elif key in os.environ:
return key, os.environ[key]
elif environment and key in environment:
return key, environment[key]
else:
return key, None
def env_vars_from_file(filename):
"""
Read in a line delimited file of environment variables.
"""
if not os.path.exists(filename):
raise ConfigurationError("Couldn't find env file: %s" % filename)
env = {}
for line in codecs.open(filename, 'r', 'utf-8'):
line = line.strip()
if line and not line.startswith('#'):
k, v = split_env(line)
env[k] = v
return env
def resolve_volume_paths(working_dir, service_dict):
return [
resolve_volume_path(working_dir, volume)
@ -866,7 +866,7 @@ def resolve_volume_path(working_dir, volume):
return container_path
def normalize_build(service_dict, working_dir):
def normalize_build(service_dict, working_dir, environment):
if 'build' in service_dict:
build = {}
@ -876,7 +876,9 @@ def normalize_build(service_dict, working_dir):
else:
build.update(service_dict['build'])
if 'args' in build:
build['args'] = build_string_dict(resolve_build_args(build))
build['args'] = build_string_dict(
resolve_build_args(build, environment)
)
service_dict['build'] = build

View File

@ -0,0 +1,93 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import codecs
import logging
import os
import six
from ..const import IS_WINDOWS_PLATFORM
from .errors import ConfigurationError
log = logging.getLogger(__name__)
def split_env(env):
if isinstance(env, six.binary_type):
env = env.decode('utf-8', 'replace')
if '=' in env:
return env.split('=', 1)
else:
return env, None
def env_vars_from_file(filename):
"""
Read in a line delimited file of environment variables.
"""
if not os.path.exists(filename):
raise ConfigurationError("Couldn't find env file: %s" % filename)
env = {}
for line in codecs.open(filename, 'r', 'utf-8'):
line = line.strip()
if line and not line.startswith('#'):
k, v = split_env(line)
env[k] = v
return env
class Environment(dict):
def __init__(self, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
self.missing_keys = []
@classmethod
def from_env_file(cls, base_dir):
def _initialize():
result = cls()
if base_dir is None:
return result
env_file_path = os.path.join(base_dir, '.env')
try:
return cls(env_vars_from_file(env_file_path))
except ConfigurationError:
pass
return result
instance = _initialize()
instance.update(os.environ)
return instance
def __getitem__(self, key):
try:
return super(Environment, self).__getitem__(key)
except KeyError:
if IS_WINDOWS_PLATFORM:
try:
return super(Environment, self).__getitem__(key.upper())
except KeyError:
pass
if key not in self.missing_keys:
log.warn(
"The {} variable is not set. Defaulting to a blank string."
.format(key)
)
self.missing_keys.append(key)
return ""
def __contains__(self, key):
result = super(Environment, self).__contains__(key)
if IS_WINDOWS_PLATFORM:
return (
result or super(Environment, self).__contains__(key.upper())
)
return result
def get(self, key, *args, **kwargs):
if IS_WINDOWS_PLATFORM:
return super(Environment, self).get(
key,
super(Environment, self).get(key.upper(), *args, **kwargs)
)
return super(Environment, self).get(key, *args, **kwargs)

View File

@ -2,7 +2,6 @@ from __future__ import absolute_import
from __future__ import unicode_literals
import logging
import os
from string import Template
import six
@ -11,12 +10,11 @@ from .errors import ConfigurationError
log = logging.getLogger(__name__)
def interpolate_environment_variables(config, section):
mapping = BlankDefaultDict(os.environ)
def interpolate_environment_variables(config, section, environment):
def process_item(name, config_dict):
return dict(
(key, interpolate_value(name, key, val, section, mapping))
(key, interpolate_value(name, key, val, section, environment))
for key, val in (config_dict or {}).items()
)
@ -60,25 +58,6 @@ def interpolate(string, mapping):
raise InvalidInterpolation(string)
class BlankDefaultDict(dict):
def __init__(self, *args, **kwargs):
super(BlankDefaultDict, self).__init__(*args, **kwargs)
self.missing_keys = []
def __getitem__(self, key):
try:
return super(BlankDefaultDict, self).__getitem__(key)
except KeyError:
if key not in self.missing_keys:
log.warn(
"The {} variable is not set. Defaulting to a blank string."
.format(key)
)
self.missing_keys.append(key)
return ""
class InvalidInterpolation(Exception):
def __init__(self, string):
self.string = string

View File

@ -5,7 +5,7 @@ import os
import sys
DEFAULT_TIMEOUT = 10
HTTP_TIMEOUT = int(os.environ.get('COMPOSE_HTTP_TIMEOUT', os.environ.get('DOCKER_CLIENT_TIMEOUT', 60)))
HTTP_TIMEOUT = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
IMAGE_EVENTS = ['delete', 'import', 'pull', 'push', 'tag', 'untag']
IS_WINDOWS_PLATFORM = (sys.platform == "win32")
LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number'

View File

@ -39,7 +39,7 @@ class Container(object):
@classmethod
def from_id(cls, client, id):
return cls(client, client.inspect_container(id))
return cls(client, client.inspect_container(id), has_been_inspected=True)
@classmethod
def create(cls, client, **options):

View File

@ -18,11 +18,22 @@
__docker_compose_q() {
local file_args
if [ ${#compose_files[@]} -ne 0 ] ; then
file_args="${compose_files[@]/#/-f }"
fi
docker-compose 2>/dev/null $file_args ${compose_project:+-p $compose_project} "$@"
docker-compose 2>/dev/null $daemon_options "$@"
}
# Transforms a multiline list of strings into a single line string
# with the words separated by "|".
__docker_compose_to_alternatives() {
local parts=( $1 )
local IFS='|'
echo "${parts[*]}"
}
# Transforms a multiline list of options into an extglob pattern
# suitable for use in case statements.
__docker_compose_to_extglob() {
local extglob=$( __docker_compose_to_alternatives "$1" )
echo "@($extglob)"
}
# suppress trailing whitespace
@ -31,20 +42,6 @@ __docker_compose_nospace() {
type compopt &>/dev/null && compopt -o nospace
}
# For compatibility reasons, Compose and therefore its completion supports several
# stack compositon files as listed here, in descending priority.
# Support for these filenames might be dropped in some future version.
__docker_compose_compose_file() {
local file
for file in docker-compose.y{,a}ml ; do
[ -e $file ] && {
echo $file
return
}
done
echo docker-compose.yml
}
# Extracts all service names from the compose file.
___docker_compose_all_services_in_compose_file() {
__docker_compose_q config --services
@ -131,18 +128,22 @@ _docker_compose_create() {
_docker_compose_docker_compose() {
case "$prev" in
--tlscacert|--tlscert|--tlskey)
_filedir
return
;;
--file|-f)
_filedir "y?(a)ml"
return
;;
--project-name|-p)
$(__docker_compose_to_extglob "$daemon_options_with_args") )
return
;;
esac
case "$cur" in
-*)
COMPREPLY=( $( compgen -W "--file -f --help -h --project-name -p --verbose --version -v" -- "$cur" ) )
COMPREPLY=( $( compgen -W "$daemon_boolean_options $daemon_options_with_args --help -h --verbose --version -v" -- "$cur" ) )
;;
*)
COMPREPLY=( $( compgen -W "${commands[*]}" -- "$cur" ) )
@ -185,6 +186,24 @@ _docker_compose_events() {
}
_docker_compose_exec() {
case "$prev" in
--index|--user)
return
;;
esac
case "$cur" in
-*)
COMPREPLY=( $( compgen -W "-d --help --index --privileged -T --user" -- "$cur" ) )
;;
*)
__docker_compose_services_running
;;
esac
}
_docker_compose_help() {
COMPREPLY=( $( compgen -W "${commands[*]}" -- "$cur" ) )
}
@ -210,9 +229,15 @@ _docker_compose_kill() {
_docker_compose_logs() {
case "$prev" in
--tail)
return
;;
esac
case "$cur" in
-*)
COMPREPLY=( $( compgen -W "--help --no-color" -- "$cur" ) )
COMPREPLY=( $( compgen -W "--follow -f --help --no-color --tail --timestamps -t" -- "$cur" ) )
;;
*)
__docker_compose_services_all
@ -300,7 +325,7 @@ _docker_compose_restart() {
_docker_compose_rm() {
case "$cur" in
-*)
COMPREPLY=( $( compgen -W "--force -f --help -v" -- "$cur" ) )
COMPREPLY=( $( compgen -W "--all -a --force -f --help -v" -- "$cur" ) )
;;
*)
__docker_compose_services_stopped
@ -406,7 +431,7 @@ _docker_compose_up() {
case "$cur" in
-*)
COMPREPLY=( $( compgen -W "--abort-on-container-exit -d --force-recreate --help --no-build --no-color --no-deps --no-recreate --timeout -t --remove-orphans" -- "$cur" ) )
COMPREPLY=( $( compgen -W "--abort-on-container-exit --build -d --force-recreate --help --no-build --no-color --no-deps --no-recreate --timeout -t --remove-orphans" -- "$cur" ) )
;;
*)
__docker_compose_services_all
@ -434,6 +459,7 @@ _docker_compose() {
create
down
events
exec
help
kill
logs
@ -452,6 +478,22 @@ _docker_compose() {
version
)
# options for the docker daemon that have to be passed to secondary calls to
# docker-compose executed by this script
local daemon_boolean_options="
--skip-hostname-check
--tls
--tlsverify
"
local daemon_options_with_args="
--file -f
--host -H
--project-name -p
--tlscacert
--tlscert
--tlskey
"
COMPREPLY=()
local cur prev words cword
_get_comp_words_by_ref -n : cur prev words cword
@ -459,17 +501,19 @@ _docker_compose() {
# search subcommand and invoke its handler.
# special treatment of some top-level options
local command='docker_compose'
local daemon_options=()
local counter=1
local compose_files=() compose_project
while [ $counter -lt $cword ]; do
case "${words[$counter]}" in
--file|-f)
(( counter++ ))
compose_files+=(${words[$counter]})
$(__docker_compose_to_extglob "$daemon_boolean_options") )
local opt=${words[counter]}
daemon_options+=($opt)
;;
--project-name|-p)
(( counter++ ))
compose_project="${words[$counter]}"
$(__docker_compose_to_extglob "$daemon_options_with_args") )
local opt=${words[counter]}
local arg=${words[++counter]}
daemon_options+=($opt $arg)
;;
-*)
;;

View File

@ -223,6 +223,18 @@ __docker-compose_subcommand() {
'--json[Output events as a stream of json objects.]' \
'*:services:__docker-compose_services_all' && ret=0
;;
(exec)
_arguments \
$opts_help \
'-d[Detached mode: Run command in the background.]' \
'--privileged[Give extended privileges to the process.]' \
'--user=[Run the command as this user.]:username:_users' \
'-T[Disable pseudo-tty allocation. By default `docker-compose exec` allocates a TTY.]' \
'--index=[Index of the container if there are multiple instances of a service (default: 1)]:index: ' \
'(-):running services:__docker-compose_runningservices' \
'(-):command: _command_names -e' \
'*::arguments: _normal' && ret=0
;;
(help)
_arguments ':subcommand:__docker-compose_commands' && ret=0
;;
@ -235,7 +247,10 @@ __docker-compose_subcommand() {
(logs)
_arguments \
$opts_help \
'(-f --follow)'{-f,--follow}'[Follow log output]' \
'--no-color[Produce monochrome output.]' \
'--tail=[Number of lines to show from the end of the logs for each container.]:number of lines: ' \
'(-t --timestamps)'{-t,--timestamps}'[Show timestamps]' \
'*:services:__docker-compose_services_all' && ret=0
;;
(pause)
@ -266,6 +281,7 @@ __docker-compose_subcommand() {
(rm)
_arguments \
$opts_help \
'(-a --all)'{-a,--all}"[Also remove one-off containers]" \
'(-f --force)'{-f,--force}"[Don't ask to confirm removal]" \
'-v[Remove volumes associated with containers]' \
'*:stopped services:__docker-compose_stoppedservices' && ret=0
@ -274,15 +290,16 @@ __docker-compose_subcommand() {
_arguments \
$opts_help \
'-d[Detached mode: Run container in the background, print new container name.]' \
'--name[Assign a name to the container]:name: ' \
'--entrypoint[Overwrite the entrypoint of the image.]:entry point: ' \
'*-e[KEY=VAL Set an environment variable (can be used multiple times)]:environment variable KEY=VAL: ' \
'(-u --user)'{-u,--user=-}'[Run as specified username or uid]:username or uid:_users' \
'--entrypoint[Overwrite the entrypoint of the image.]:entry point: ' \
'--name[Assign a name to the container]:name: ' \
"--no-deps[Don't start linked services.]" \
'(-p --publish)'{-p,--publish=-}"[Run command with manually mapped container's port(s) to the host.]" \
'--rm[Remove container after run. Ignored in detached mode.]' \
"--service-ports[Run command with the service's ports enabled and mapped to the host.]" \
'(-p --publish)'{-p,--publish=-}"[Run command with manually mapped container's port(s) to the host.]" \
'-T[Disable pseudo-tty allocation. By default `docker-compose run` allocates a TTY.]' \
'(-u --user)'{-u,--user=-}'[Run as specified username or uid]:username or uid:_users' \
'(-w --workdir)'{-w=,--workdir=}'[Working directory inside the container]:workdir: ' \
'(-):services:__docker-compose_services' \
'(-):command: _command_names -e' \
'*::arguments: _normal' && ret=0
@ -313,6 +330,7 @@ __docker-compose_subcommand() {
_arguments \
$opts_help \
'(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names.]' \
'--build[Build images before starting containers.]' \
'--no-color[Produce monochrome output.]' \
"--no-deps[Don't start linked services.]" \
"--force-recreate[Recreate containers even if their configuration and image haven't changed. Incompatible with --no-recreate.]" \

View File

@ -744,7 +744,7 @@ While it is possible to declare volumes on the fly as part of the service
declaration, this section allows you to create named volumes that can be
reused across multiple services (without relying on `volumes_from`), and are
easily retrieved and inspected using the docker command line or API.
See the [docker volume](/engine/reference/commandline/volume_create.md)
See the [docker volume](https://docs.docker.com/engine/reference/commandline/volume_create/)
subcommand documentation for more information.
### driver
@ -1089,21 +1089,24 @@ It's more complicated if you're using particular configuration features:
## Variable substitution
Your configuration options can contain environment variables. Compose uses the
variable values from the shell environment in which `docker-compose` is run. For
example, suppose the shell contains `POSTGRES_VERSION=9.3` and you supply this
configuration:
variable values from the shell environment in which `docker-compose` is run.
For example, suppose the shell contains `EXTERNAL_PORT=8000` and you supply
this configuration:
db:
image: "postgres:${POSTGRES_VERSION}"
web:
build: .
ports:
- "${EXTERNAL_PORT}:5000"
When you run `docker-compose up` with this configuration, Compose looks for the
`POSTGRES_VERSION` environment variable in the shell and substitutes its value
in. For this example, Compose resolves the `image` to `postgres:9.3` before
running the configuration.
When you run `docker-compose up` with this configuration, Compose looks for
the `EXTERNAL_PORT` environment variable in the shell and substitutes its
value in. In this example, Compose resolves the port mapping to `"8000:5000"`
before creating the `web` container.
If an environment variable is not set, Compose substitutes with an empty
string. In the example above, if `POSTGRES_VERSION` is not set, the value for
the `image` option is `postgres:`.
string. In the example above, if `EXTERNAL_PORT` is not set, the value for the
port mapping is `:5000` (which is of course an invalid port mapping, and will
result in an error when attempting to create the container).
Both `$VARIABLE` and `${VARIABLE}` syntax are supported. Extended shell-style
features, such as `${VARIABLE-default}` and `${VARIABLE/foo/bar}`, are not

43
docs/env-file.md Normal file
View File

@ -0,0 +1,43 @@
<!--[metadata]>
+++
title = "Environment file"
description = "Declaring default environment variables in file"
keywords = ["fig, composition, compose, docker, orchestration, environment, env file"]
[menu.main]
parent = "workw_compose"
weight=10
+++
<![end-metadata]-->
# Environment file
Compose supports declaring default environment variables in an environment
file named `.env` and placed in the same folder as your
[compose file](compose-file.md).
Compose expects each line in an env file to be in `VAR=VAL` format. Lines
beginning with `#` (i.e. comments) are ignored, as are blank lines.
> Note: Values present in the environment at runtime will always override
> those defined inside the `.env` file. Similarly, values passed via
> command-line arguments take precedence as well.
Those environment variables will be used for
[variable substitution](compose-file.md#variable-substitution) in your Compose
file, but can also be used to define the following
[CLI variables](reference/envvars.md):
- `COMPOSE_API_VERSION`
- `COMPOSE_FILE`
- `COMPOSE_HTTP_TIMEOUT`
- `COMPOSE_PROJECT_NAME`
- `DOCKER_CERT_PATH`
- `DOCKER_HOST`
- `DOCKER_TLS_VERIFY`
## More Compose documentation
- [User guide](index.md)
- [Command line reference](./reference/index.md)
- [Compose file reference](compose-file.md)

View File

@ -23,6 +23,7 @@ Compose is a tool for defining and running multi-container Docker applications.
- [Frequently asked questions](faq.md)
- [Command line reference](./reference/index.md)
- [Compose file reference](compose-file.md)
- [Environment file](env-file.md)
To see a detailed list of changes for past and current releases of Docker
Compose, please refer to the

View File

@ -15,7 +15,7 @@ weight=21
> **Note:** This document only applies if you're using [version 2 of the Compose file format](compose-file.md#versioning). Networking features are not supported for version 1 (legacy) Compose files.
By default Compose sets up a single
[network](/engine/reference/commandline/network_create.md) for your app. Each
[network](https://docs.docker.com/engine/reference/commandline/network_create/) for your app. Each
container for a service joins the default network and is both *reachable* by
other containers on that network, and *discoverable* by them at a hostname
identical to the container name.
@ -78,11 +78,11 @@ See the [links reference](compose-file.md#links) for more information.
When [deploying a Compose application to a Swarm cluster](swarm.md), you can make use of the built-in `overlay` driver to enable multi-host communication between containers with no changes to your Compose file or application code.
Consult the [Getting started with multi-host networking](/engine/userguide/networking/get-started-overlay.md) to see how to set up a Swarm cluster. The cluster will use the `overlay` driver by default, but you can specify it explicitly if you prefer - see below for how to do this.
Consult the [Getting started with multi-host networking](https://docs.docker.com/engine/userguide/networking/get-started-overlay/) to see how to set up a Swarm cluster. The cluster will use the `overlay` driver by default, but you can specify it explicitly if you prefer - see below for how to do this.
## Specifying custom networks
Instead of just using the default app network, you can specify your own networks with the top-level `networks` key. This lets you create more complex topologies and specify [custom network drivers](/engine/extend/plugins_network.md) and options. You can also use it to connect services to externally-created networks which aren't managed by Compose.
Instead of just using the default app network, you can specify your own networks with the top-level `networks` key. This lets you create more complex topologies and specify [custom network drivers](https://docs.docker.com/engine/extend/plugins_network/) and options. You can also use it to connect services to externally-created networks which aren't managed by Compose.
Each service can specify what networks to connect to with the *service-level* `networks` key, which is a list of names referencing entries under the *top-level* `networks` key.

View File

@ -17,6 +17,9 @@ Several environment variables are available for you to configure the Docker Comp
Variables starting with `DOCKER_` are the same as those used to configure the
Docker command-line client. If you're using `docker-machine`, then the `eval "$(docker-machine env my-docker-vm)"` command should set them to their correct values. (In this example, `my-docker-vm` is the name of a machine you created.)
> Note: Some of these variables can also be provided using an
> [environment file](../env-file.md)
## COMPOSE\_PROJECT\_NAME
Sets the project name. This value is prepended along with the service name to the container container on start up. For example, if you project name is `myapp` and it includes two services `db` and `web` then compose starts containers named `myapp_db_1` and `myapp_web_1` respectively.
@ -81,3 +84,4 @@ it failed. Defaults to 60 seconds.
- [User guide](../index.md)
- [Installing Compose](../install.md)
- [Compose file reference](../compose-file.md)
- [Environment file](../env-file.md)

View File

@ -25,10 +25,20 @@ Usage:
docker-compose -h|--help
Options:
-f, --file FILE Specify an alternate compose file (default: docker-compose.yml)
-p, --project-name NAME Specify an alternate project name (default: directory name)
--verbose Show more output
-v, --version Print version and exit
-f, --file FILE Specify an alternate compose file (default: docker-compose.yml)
-p, --project-name NAME Specify an alternate project name (default: directory name)
--verbose Show more output
-v, --version Print version and exit
-H, --host HOST Daemon socket to connect to
--tls Use TLS; implied by --tlsverify
--tlscacert CA_PATH Trust certs signed only by this CA
--tlscert CLIENT_CERT_PATH Path to TLS certificate file
--tlskey TLS_KEY_PATH Path to TLS key file
--tlsverify Use TLS and verify the remote
--skip-hostname-check Don't check the daemon's hostname against the name specified
in the client certificate (for example if your docker host
is an IP address)
Commands:
build Build or rebuild services

View File

@ -26,14 +26,11 @@ format](compose-file.md#versioning) you are using:
- subject to the [limitations](#limitations) described below,
- as long as the Swarm cluster is configured to use the [overlay
driver](/engine/userguide/networking/dockernetworks.md#an-overlay-network),
- as long as the Swarm cluster is configured to use the [overlay driver](https://docs.docker.com/engine/userguide/networking/dockernetworks/#an-overlay-network),
or a custom driver which supports multi-host networking.
Read the [Getting started with multi-host
networking](/engine/userguide/networking/get-started-overlay.md) to see how to
set up a Swarm cluster with [Docker Machine](/machine/overview) and the overlay driver.
Once you've got it running, deploying your app to it should be as simple as:
Read [Get started with multi-host networking](https://docs.docker.com/engine/userguide/networking/get-started-overlay/) to see how to
set up a Swarm cluster with [Docker Machine](/machine/overview) and the overlay driver. Once you've got it running, deploying your app to it should be as simple as:
$ eval "$(docker-machine env --swarm <name of swarm master machine>)"
$ docker-compose up

View File

@ -36,8 +36,10 @@ with Docker containers. This quick-start guide demonstrates how to use Compose t
In this case, your Dockerfile should include these two lines:
FROM orchardup/php5
FROM php:5.6-fpm
RUN docker-php-ext-install mysql
ADD . /code
CMD php -S 0.0.0.0:8000 -t /code/wordpress/
This tells the Docker Engine daemon how to build an image defining a container that contains PHP and WordPress.
@ -47,7 +49,6 @@ with Docker containers. This quick-start guide demonstrates how to use Compose t
services:
web:
build: .
command: php -S 0.0.0.0:8000 -t /code/wordpress/
ports:
- "8000:8000"
depends_on:
@ -55,9 +56,12 @@ with Docker containers. This quick-start guide demonstrates how to use Compose t
volumes:
- .:/code
db:
image: orchardup/mysql
image: mysql
environment:
MYSQL_ROOT_PASSWORD: wordpress
MYSQL_DATABASE: wordpress
MYSQL_USER: wordpress
MYSQL_PASSWORD: wordpress
5. Download WordPress into the current directory:
@ -71,8 +75,8 @@ with Docker containers. This quick-start guide demonstrates how to use Compose t
<?php
define('DB_NAME', 'wordpress');
define('DB_USER', 'root');
define('DB_PASSWORD', '');
define('DB_USER', 'wordpress');
define('DB_PASSWORD', 'wordpress');
define('DB_HOST', "db:3306");
define('DB_CHARSET', 'utf8');
define('DB_COLLATE', '');

View File

@ -1,9 +1,9 @@
PyYAML==3.11
cached-property==1.2.0
docker-py==1.8.0rc5
dockerpty==0.4.1
docopt==0.6.1
enum34==1.0.4
git+https://github.com/docker/docker-py.git@5c1c42397cf0fdb74182df2d69822b82df8f2a6a#egg=docker-py
jsonschema==2.5.1
requests==2.7.0
six==1.7.3

View File

@ -34,7 +34,7 @@ install_requires = [
'requests >= 2.6.1, < 2.8',
'texttable >= 0.8.1, < 0.9',
'websocket-client >= 0.32.0, < 1.0',
'docker-py >= 1.7.0, < 2',
'docker-py > 1.7.2, < 2',
'dockerpty >= 0.4.1, < 0.5',
'six >= 1.3.0, < 2',
'jsonschema >= 2.5.1, < 3',

4
tests/fixtures/default-env-file/.env vendored Normal file
View File

@ -0,0 +1,4 @@
IMAGE=alpine:latest
COMMAND=true
PORT1=5643
PORT2=9999

View File

@ -0,0 +1,6 @@
web:
image: ${IMAGE}
command: ${COMMAND}
ports:
- $PORT1
- $PORT2

View File

@ -13,4 +13,5 @@ def build_config(contents, **kwargs):
def build_config_details(contents, working_dir='working_dir', filename='filename.yml'):
return ConfigDetails(
working_dir,
[ConfigFile(filename, contents)])
[ConfigFile(filename, contents)],
)

View File

@ -6,6 +6,7 @@ import shutil
import tempfile
from os import path
import pytest
from docker.errors import APIError
from six import StringIO
from six import text_type
@ -768,17 +769,17 @@ class ServiceTest(DockerClientTestCase):
container = service.create_container(number=next_number, quiet=True)
container.start()
self.assertTrue(container.is_running)
self.assertEqual(len(service.containers()), 1)
container.inspect()
assert container.is_running
assert len(service.containers()) == 1
service.scale(1)
self.assertEqual(len(service.containers()), 1)
assert len(service.containers()) == 1
container.inspect()
self.assertTrue(container.is_running)
assert container.is_running
captured_output = mock_log.info.call_args[0]
self.assertIn('Desired container number already achieved', captured_output)
assert 'Desired container number already achieved' in captured_output
@mock.patch('compose.service.log')
def test_scale_with_custom_container_name_outputs_warning(self, mock_log):
@ -985,6 +986,7 @@ class ServiceTest(DockerClientTestCase):
one_off_container = service.create_container(one_off=True)
self.assertNotEqual(one_off_container.name, 'my-web-container')
@pytest.mark.skipif(True, reason="Broken on 1.11.0rc1")
def test_log_drive_invalid(self):
service = self.create_service('web', logging={'driver': 'xxx'})
expected_error_msg = "logger: no log driver named 'xxx' is registered"

View File

@ -12,6 +12,7 @@ from compose.cli.docker_client import docker_client
from compose.config.config import resolve_environment
from compose.config.config import V1
from compose.config.config import V2_0
from compose.config.environment import Environment
from compose.const import API_VERSIONS
from compose.const import LABEL_PROJECT
from compose.progress_stream import stream_output
@ -60,7 +61,7 @@ class DockerClientTestCase(unittest.TestCase):
else:
version = API_VERSIONS[V2_0]
cls.client = docker_client(version)
cls.client = docker_client(Environment(), version)
def tearDown(self):
for c in self.client.containers(
@ -89,7 +90,9 @@ class DockerClientTestCase(unittest.TestCase):
if 'command' not in kwargs:
kwargs['command'] = ["top"]
kwargs['environment'] = resolve_environment(kwargs)
kwargs['environment'] = resolve_environment(
kwargs, Environment.from_env_file(None)
)
labels = dict(kwargs.setdefault('labels', {}))
labels['com.docker.compose.test-name'] = self.id()

View File

@ -6,6 +6,7 @@ import os
import pytest
from compose.cli.command import get_config_path_from_options
from compose.config.environment import Environment
from compose.const import IS_WINDOWS_PLATFORM
from tests import mock
@ -15,24 +16,33 @@ class TestGetConfigPathFromOptions(object):
def test_path_from_options(self):
paths = ['one.yml', 'two.yml']
opts = {'--file': paths}
assert get_config_path_from_options(opts) == paths
environment = Environment.from_env_file('.')
assert get_config_path_from_options('.', opts, environment) == paths
def test_single_path_from_env(self):
with mock.patch.dict(os.environ):
os.environ['COMPOSE_FILE'] = 'one.yml'
assert get_config_path_from_options({}) == ['one.yml']
environment = Environment.from_env_file('.')
assert get_config_path_from_options('.', {}, environment) == ['one.yml']
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix separator')
def test_multiple_path_from_env(self):
with mock.patch.dict(os.environ):
os.environ['COMPOSE_FILE'] = 'one.yml:two.yml'
assert get_config_path_from_options({}) == ['one.yml', 'two.yml']
environment = Environment.from_env_file('.')
assert get_config_path_from_options(
'.', {}, environment
) == ['one.yml', 'two.yml']
@pytest.mark.skipif(not IS_WINDOWS_PLATFORM, reason='windows separator')
def test_multiple_path_from_env_windows(self):
with mock.patch.dict(os.environ):
os.environ['COMPOSE_FILE'] = 'one.yml;two.yml'
assert get_config_path_from_options({}) == ['one.yml', 'two.yml']
environment = Environment.from_env_file('.')
assert get_config_path_from_options(
'.', {}, environment
) == ['one.yml', 'two.yml']
def test_no_path(self):
assert not get_config_path_from_options({})
environment = Environment.from_env_file('.')
assert not get_config_path_from_options('.', {}, environment)

View File

@ -17,12 +17,12 @@ class DockerClientTestCase(unittest.TestCase):
def test_docker_client_no_home(self):
with mock.patch.dict(os.environ):
del os.environ['HOME']
docker_client()
docker_client(os.environ)
def test_docker_client_with_custom_timeout(self):
timeout = 300
with mock.patch('compose.cli.docker_client.HTTP_TIMEOUT', 300):
client = docker_client()
client = docker_client(os.environ)
self.assertEqual(client.timeout, int(timeout))
@ -103,3 +103,9 @@ class TLSConfigTestCase(unittest.TestCase):
options = {'--tlskey': self.key}
with pytest.raises(docker.errors.TLSParameterError):
tls_config_from_options(options)
def test_assert_hostname_explicit_skip(self):
options = {'--tlscacert': self.ca_cert, '--skip-hostname-check': True}
result = tls_config_from_options(options)
assert isinstance(result, docker.tls.TLSConfig)
assert result.assert_hostname is False

View File

@ -3,6 +3,8 @@ from __future__ import absolute_import
from __future__ import unicode_literals
import os
import shutil
import tempfile
import docker
import py
@ -43,11 +45,11 @@ class CLITestCase(unittest.TestCase):
project_name = get_project_name(None, project_name=name)
self.assertEquals('explicitprojectname', project_name)
@mock.patch.dict(os.environ)
def test_project_name_from_environment_new_var(self):
name = 'namefromenv'
with mock.patch.dict(os.environ):
os.environ['COMPOSE_PROJECT_NAME'] = name
project_name = get_project_name(None)
os.environ['COMPOSE_PROJECT_NAME'] = name
project_name = get_project_name(None)
self.assertEquals(project_name, name)
def test_project_name_with_empty_environment_var(self):
@ -57,6 +59,22 @@ class CLITestCase(unittest.TestCase):
project_name = get_project_name(base_dir)
self.assertEquals('simplecomposefile', project_name)
@mock.patch.dict(os.environ)
def test_project_name_with_environment_file(self):
base_dir = tempfile.mkdtemp()
try:
name = 'namefromenvfile'
with open(os.path.join(base_dir, '.env'), 'w') as f:
f.write('COMPOSE_PROJECT_NAME={}'.format(name))
project_name = get_project_name(base_dir)
assert project_name == name
# Environment has priority over .env file
os.environ['COMPOSE_PROJECT_NAME'] = 'namefromenv'
assert get_project_name(base_dir) == os.environ['COMPOSE_PROJECT_NAME']
finally:
shutil.rmtree(base_dir)
def test_get_project(self):
base_dir = 'tests/fixtures/longer-filename-composefile'
project = get_project(base_dir)

View File

@ -17,6 +17,7 @@ from compose.config.config import resolve_build_args
from compose.config.config import resolve_environment
from compose.config.config import V1
from compose.config.config import V2_0
from compose.config.environment import Environment
from compose.config.errors import ConfigurationError
from compose.config.errors import VERSION_EXPLANATION
from compose.config.types import VolumeSpec
@ -36,7 +37,9 @@ def make_service_dict(name, service_dict, working_dir, filename=None):
filename=filename,
name=name,
config=service_dict),
config.ConfigFile(filename=filename, config={}))
config.ConfigFile(filename=filename, config={}),
environment=Environment.from_env_file(working_dir)
)
return config.process_service(resolver.run())
@ -1581,8 +1584,25 @@ class PortsTest(unittest.TestCase):
class InterpolationTest(unittest.TestCase):
@mock.patch.dict(os.environ)
def test_config_file_with_environment_file(self):
project_dir = 'tests/fixtures/default-env-file'
service_dicts = config.load(
config.find(
project_dir, None, Environment.from_env_file(project_dir)
)
).services
self.assertEqual(service_dicts[0], {
'name': 'web',
'image': 'alpine:latest',
'ports': ['5643', '9999'],
'command': 'true'
})
@mock.patch.dict(os.environ)
def test_config_file_with_environment_variable(self):
project_dir = 'tests/fixtures/environment-interpolation'
os.environ.update(
IMAGE="busybox",
HOST_PORT="80",
@ -1590,7 +1610,9 @@ class InterpolationTest(unittest.TestCase):
)
service_dicts = config.load(
config.find('tests/fixtures/environment-interpolation', None),
config.find(
project_dir, None, Environment.from_env_file(project_dir)
)
).services
self.assertEqual(service_dicts, [
@ -1620,7 +1642,7 @@ class InterpolationTest(unittest.TestCase):
None,
)
with mock.patch('compose.config.interpolation.log') as log:
with mock.patch('compose.config.environment.log') as log:
config.load(config_details)
self.assertEqual(2, log.warn.call_count)
@ -2041,7 +2063,9 @@ class EnvTest(unittest.TestCase):
},
}
self.assertEqual(
resolve_environment(service_dict),
resolve_environment(
service_dict, Environment.from_env_file(None)
),
{'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': None},
)
@ -2078,7 +2102,10 @@ class EnvTest(unittest.TestCase):
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
self.assertEqual(
resolve_environment({'env_file': ['tests/fixtures/env/resolve.env']}),
resolve_environment(
{'env_file': ['tests/fixtures/env/resolve.env']},
Environment.from_env_file(None)
),
{
'FILE_DEF': u'bär',
'FILE_DEF_EMPTY': '',
@ -2101,7 +2128,7 @@ class EnvTest(unittest.TestCase):
}
}
self.assertEqual(
resolve_build_args(build),
resolve_build_args(build, Environment.from_env_file(build['context'])),
{'arg1': 'value1', 'empty_arg': '', 'env_arg': 'value2', 'no_env': None},
)
@ -2133,7 +2160,9 @@ class EnvTest(unittest.TestCase):
def load_from_filename(filename):
return config.load(config.find('.', [filename])).services
return config.load(
config.find('.', [filename], Environment.from_env_file('.'))
).services
class ExtendsTest(unittest.TestCase):
@ -2465,6 +2494,7 @@ class ExtendsTest(unittest.TestCase):
},
]))
@mock.patch.dict(os.environ)
def test_extends_with_environment_and_env_files(self):
tmpdir = py.test.ensuretemp('test_extends_with_environment')
self.addCleanup(tmpdir.remove)
@ -2520,12 +2550,12 @@ class ExtendsTest(unittest.TestCase):
},
},
]
with mock.patch.dict(os.environ):
os.environ['SECRET'] = 'secret'
os.environ['THING'] = 'thing'
os.environ['COMMON_ENV_FILE'] = 'secret'
os.environ['TOP_ENV_FILE'] = 'secret'
config = load_from_filename(str(tmpdir.join('docker-compose.yml')))
os.environ['SECRET'] = 'secret'
os.environ['THING'] = 'thing'
os.environ['COMMON_ENV_FILE'] = 'secret'
os.environ['TOP_ENV_FILE'] = 'secret'
config = load_from_filename(str(tmpdir.join('docker-compose.yml')))
assert config == expected

View File

@ -6,6 +6,7 @@ import os
import mock
import pytest
from compose.config.environment import Environment
from compose.config.interpolation import interpolate_environment_variables
@ -19,7 +20,7 @@ def mock_env():
def test_interpolate_environment_variables_in_services(mock_env):
services = {
'servivea': {
'servicea': {
'image': 'example:${USER}',
'volumes': ['$FOO:/target'],
'logging': {
@ -31,7 +32,7 @@ def test_interpolate_environment_variables_in_services(mock_env):
}
}
expected = {
'servivea': {
'servicea': {
'image': 'example:jenny',
'volumes': ['bar:/target'],
'logging': {
@ -42,7 +43,9 @@ def test_interpolate_environment_variables_in_services(mock_env):
}
}
}
assert interpolate_environment_variables(services, 'service') == expected
assert interpolate_environment_variables(
services, 'service', Environment.from_env_file(None)
) == expected
def test_interpolate_environment_variables_in_volumes(mock_env):
@ -66,4 +69,6 @@ def test_interpolate_environment_variables_in_volumes(mock_env):
},
'other': {},
}
assert interpolate_environment_variables(volumes, 'volume') == expected
assert interpolate_environment_variables(
volumes, 'volume', Environment.from_env_file(None)
) == expected

View File

@ -3,7 +3,7 @@ from __future__ import unicode_literals
import unittest
from compose.config.interpolation import BlankDefaultDict as bddict
from compose.config.environment import Environment as bddict
from compose.config.interpolation import interpolate
from compose.config.interpolation import InvalidInterpolation

View File

@ -270,12 +270,21 @@ class ProjectTest(unittest.TestCase):
'time': 1420092061,
'timeNano': 14200920610000004000,
},
{
'status': 'destroy',
'from': 'example/db',
'id': 'eeeee',
'time': 1420092061,
'timeNano': 14200920610000004000,
},
])
def dt_with_microseconds(dt, us):
return datetime.datetime.fromtimestamp(dt).replace(microsecond=us)
def get_container(cid):
if cid == 'eeeee':
raise NotFound(None, None, "oops")
if cid == 'abcde':
name = 'web'
labels = {LABEL_SERVICE: name}