Merge pull request #6034 from docker/bump-1.22.0-rc1

Bump 1.22.0-rc1
This commit is contained in:
Joffrey F 2018-06-21 14:30:26 -07:00 committed by GitHub
commit bdd7d47640
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 909 additions and 56 deletions

1
.gitignore vendored
View File

@ -12,3 +12,4 @@ compose/GITSHA
*.swo
*.swp
.DS_Store
.cache

View File

@ -1,6 +1,55 @@
Change log
==========
1.22.0 (2018-06-30)
-------------------
### Features
#### Compose format version 3.7
- Introduced version 3.7 of the `docker-compose.yml` specification.
This version requires Docker Engine 18.06.0 or above.
- Added support for `rollback_config` in the deploy configuration
- Added support for the `init` parameter in service configurations
- Added support for extension fields in service, network, volume, secret,
and config configurations
#### Compose format version 2.4
- Added support for extension fields in service, network,
and volume configurations
### Bugfixes
- Fixed a bug that prevented deployment with some Compose files when
`DOCKER_DEFAULT_PLATFORM` was set
- Compose will no longer try to create containers or volumes with
invalid starting characters
- Fixed several bugs that prevented Compose commands from working properly
with containers created with an older version of Compose
- Fixed an issue with the output of `docker-compose config` with the
`--compatibility-mode` flag enabled when the source file contains
attachable networks
- Fixed a bug that prevented the `gcloud` credential store from working
properly when used with the Compose binary on UNIX
- Fixed a bug that caused connection errors when trying to operate
over a non-HTTPS TCP connection on Windows
- Fixed a bug that caused builds to fail on Windows if the Dockerfile
was located in a subdirectory of the build context
- Fixed an issue that prevented proper parsing of UTF-8 BOM encoded
Compose files on Windows
1.21.2 (2018-05-03)
-------------------

View File

@ -1,4 +1,4 @@
from __future__ import absolute_import
from __future__ import unicode_literals
__version__ = '1.21.2'
__version__ = '1.22.0-rc1'

View File

@ -117,6 +117,13 @@ def docker_client(environment, version=None, tls_config=None, host=None,
kwargs['user_agent'] = generate_user_agent()
# Workaround for
# https://pyinstaller.readthedocs.io/en/v3.3.1/runtime-information.html#ld-library-path-libpath-considerations
if 'LD_LIBRARY_PATH_ORIG' in environment:
kwargs['credstore_env'] = {
'LD_LIBRARY_PATH': environment.get('LD_LIBRARY_PATH_ORIG'),
}
client = APIClient(**kwargs)
client._original_base_url = kwargs.get('base_url')

View File

@ -918,12 +918,17 @@ def convert_restart_policy(name):
def translate_deploy_keys_to_container_config(service_dict):
if 'credential_spec' in service_dict:
del service_dict['credential_spec']
if 'configs' in service_dict:
del service_dict['configs']
if 'deploy' not in service_dict:
return service_dict, []
deploy_dict = service_dict['deploy']
ignored_keys = [
k for k in ['endpoint_mode', 'labels', 'update_config', 'placement']
k for k in ['endpoint_mode', 'labels', 'update_config', 'rollback_config', 'placement']
if k in deploy_dict
]
@ -946,10 +951,6 @@ def translate_deploy_keys_to_container_config(service_dict):
)
del service_dict['deploy']
if 'credential_spec' in service_dict:
del service_dict['credential_spec']
if 'configs' in service_dict:
del service_dict['configs']
return service_dict, ignored_keys
@ -1135,6 +1136,7 @@ def merge_deploy(base, override):
md.merge_scalar('replicas')
md.merge_mapping('labels', parse_labels)
md.merge_mapping('update_config')
md.merge_mapping('rollback_config')
md.merge_mapping('restart_policy')
if md.needs_merge('resources'):
resources_md = MergeDict(md.base.get('resources') or {}, md.override.get('resources') or {})
@ -1434,15 +1436,15 @@ def has_uppercase(name):
return any(char in string.ascii_uppercase for char in name)
def load_yaml(filename, encoding=None):
def load_yaml(filename, encoding=None, binary=True):
try:
with io.open(filename, 'r', encoding=encoding) as fh:
with io.open(filename, 'rb' if binary else 'r', encoding=encoding) as fh:
return yaml.safe_load(fh)
except (IOError, yaml.YAMLError, UnicodeDecodeError) as e:
if encoding is None:
# Sometimes the user's locale sets an encoding that doesn't match
# the YAML files. Im such cases, retry once with the "default"
# UTF-8 encoding
return load_yaml(filename, encoding='utf-8')
return load_yaml(filename, encoding='utf-8-sig', binary=False)
error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
raise ConfigurationError(u"{}: {}".format(error_name, e))

View File

@ -346,6 +346,7 @@
"dependencies": {
"memswap_limit": ["mem_limit"]
},
"patternProperties": {"^x-": {}},
"additionalProperties": false
},
@ -409,6 +410,7 @@
"labels": {"$ref": "#/definitions/labels"},
"name": {"type": "string"}
},
"patternProperties": {"^x-": {}},
"additionalProperties": false
},
@ -451,6 +453,7 @@
"labels": {"$ref": "#/definitions/labels"},
"name": {"type": "string"}
},
"patternProperties": {"^x-": {}},
"additionalProperties": false
},

View File

@ -0,0 +1,602 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"id": "config_schema_v3.7.json",
"type": "object",
"required": ["version"],
"properties": {
"version": {
"type": "string"
},
"services": {
"id": "#/properties/services",
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9._-]+$": {
"$ref": "#/definitions/service"
}
},
"additionalProperties": false
},
"networks": {
"id": "#/properties/networks",
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9._-]+$": {
"$ref": "#/definitions/network"
}
}
},
"volumes": {
"id": "#/properties/volumes",
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9._-]+$": {
"$ref": "#/definitions/volume"
}
},
"additionalProperties": false
},
"secrets": {
"id": "#/properties/secrets",
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9._-]+$": {
"$ref": "#/definitions/secret"
}
},
"additionalProperties": false
},
"configs": {
"id": "#/properties/configs",
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9._-]+$": {
"$ref": "#/definitions/config"
}
},
"additionalProperties": false
}
},
"patternProperties": {"^x-": {}},
"additionalProperties": false,
"definitions": {
"service": {
"id": "#/definitions/service",
"type": "object",
"properties": {
"deploy": {"$ref": "#/definitions/deployment"},
"build": {
"oneOf": [
{"type": "string"},
{
"type": "object",
"properties": {
"context": {"type": "string"},
"dockerfile": {"type": "string"},
"args": {"$ref": "#/definitions/list_or_dict"},
"labels": {"$ref": "#/definitions/list_or_dict"},
"cache_from": {"$ref": "#/definitions/list_of_strings"},
"network": {"type": "string"},
"target": {"type": "string"},
"shm_size": {"type": ["integer", "string"]}
},
"additionalProperties": false
}
]
},
"cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"cgroup_parent": {"type": "string"},
"command": {
"oneOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}}
]
},
"configs": {
"type": "array",
"items": {
"oneOf": [
{"type": "string"},
{
"type": "object",
"properties": {
"source": {"type": "string"},
"target": {"type": "string"},
"uid": {"type": "string"},
"gid": {"type": "string"},
"mode": {"type": "number"}
}
}
]
}
},
"container_name": {"type": "string"},
"credential_spec": {"type": "object", "properties": {
"file": {"type": "string"},
"registry": {"type": "string"}
}},
"depends_on": {"$ref": "#/definitions/list_of_strings"},
"devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"dns": {"$ref": "#/definitions/string_or_list"},
"dns_search": {"$ref": "#/definitions/string_or_list"},
"domainname": {"type": "string"},
"entrypoint": {
"oneOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}}
]
},
"env_file": {"$ref": "#/definitions/string_or_list"},
"environment": {"$ref": "#/definitions/list_or_dict"},
"expose": {
"type": "array",
"items": {
"type": ["string", "number"],
"format": "expose"
},
"uniqueItems": true
},
"external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"extra_hosts": {"$ref": "#/definitions/list_or_dict"},
"healthcheck": {"$ref": "#/definitions/healthcheck"},
"hostname": {"type": "string"},
"image": {"type": "string"},
"init": {"type": "boolean"},
"ipc": {"type": "string"},
"isolation": {"type": "string"},
"labels": {"$ref": "#/definitions/list_or_dict"},
"links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"logging": {
"type": "object",
"properties": {
"driver": {"type": "string"},
"options": {
"type": "object",
"patternProperties": {
"^.+$": {"type": ["string", "number", "null"]}
}
}
},
"additionalProperties": false
},
"mac_address": {"type": "string"},
"network_mode": {"type": "string"},
"networks": {
"oneOf": [
{"$ref": "#/definitions/list_of_strings"},
{
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9._-]+$": {
"oneOf": [
{
"type": "object",
"properties": {
"aliases": {"$ref": "#/definitions/list_of_strings"},
"ipv4_address": {"type": "string"},
"ipv6_address": {"type": "string"}
},
"additionalProperties": false
},
{"type": "null"}
]
}
},
"additionalProperties": false
}
]
},
"pid": {"type": ["string", "null"]},
"ports": {
"type": "array",
"items": {
"oneOf": [
{"type": "number", "format": "ports"},
{"type": "string", "format": "ports"},
{
"type": "object",
"properties": {
"mode": {"type": "string"},
"target": {"type": "integer"},
"published": {"type": "integer"},
"protocol": {"type": "string"}
},
"additionalProperties": false
}
]
},
"uniqueItems": true
},
"privileged": {"type": "boolean"},
"read_only": {"type": "boolean"},
"restart": {"type": "string"},
"security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"shm_size": {"type": ["number", "string"]},
"secrets": {
"type": "array",
"items": {
"oneOf": [
{"type": "string"},
{
"type": "object",
"properties": {
"source": {"type": "string"},
"target": {"type": "string"},
"uid": {"type": "string"},
"gid": {"type": "string"},
"mode": {"type": "number"}
}
}
]
}
},
"sysctls": {"$ref": "#/definitions/list_or_dict"},
"stdin_open": {"type": "boolean"},
"stop_grace_period": {"type": "string", "format": "duration"},
"stop_signal": {"type": "string"},
"tmpfs": {"$ref": "#/definitions/string_or_list"},
"tty": {"type": "boolean"},
"ulimits": {
"type": "object",
"patternProperties": {
"^[a-z]+$": {
"oneOf": [
{"type": "integer"},
{
"type":"object",
"properties": {
"hard": {"type": "integer"},
"soft": {"type": "integer"}
},
"required": ["soft", "hard"],
"additionalProperties": false
}
]
}
}
},
"user": {"type": "string"},
"userns_mode": {"type": "string"},
"volumes": {
"type": "array",
"items": {
"oneOf": [
{"type": "string"},
{
"type": "object",
"required": ["type"],
"properties": {
"type": {"type": "string"},
"source": {"type": "string"},
"target": {"type": "string"},
"read_only": {"type": "boolean"},
"consistency": {"type": "string"},
"bind": {
"type": "object",
"properties": {
"propagation": {"type": "string"}
}
},
"volume": {
"type": "object",
"properties": {
"nocopy": {"type": "boolean"}
}
},
"tmpfs": {
"type": "object",
"properties": {
"size": {
"type": "integer",
"minimum": 0
}
}
}
},
"additionalProperties": false
}
],
"uniqueItems": true
}
},
"working_dir": {"type": "string"}
},
"patternProperties": {"^x-": {}},
"additionalProperties": false
},
"healthcheck": {
"id": "#/definitions/healthcheck",
"type": "object",
"additionalProperties": false,
"properties": {
"disable": {"type": "boolean"},
"interval": {"type": "string", "format": "duration"},
"retries": {"type": "number"},
"test": {
"oneOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}}
]
},
"timeout": {"type": "string", "format": "duration"},
"start_period": {"type": "string", "format": "duration"}
}
},
"deployment": {
"id": "#/definitions/deployment",
"type": ["object", "null"],
"properties": {
"mode": {"type": "string"},
"endpoint_mode": {"type": "string"},
"replicas": {"type": "integer"},
"labels": {"$ref": "#/definitions/list_or_dict"},
"rollback_config": {
"type": "object",
"properties": {
"parallelism": {"type": "integer"},
"delay": {"type": "string", "format": "duration"},
"failure_action": {"type": "string"},
"monitor": {"type": "string", "format": "duration"},
"max_failure_ratio": {"type": "number"},
"order": {"type": "string", "enum": [
"start-first", "stop-first"
]}
},
"additionalProperties": false
},
"update_config": {
"type": "object",
"properties": {
"parallelism": {"type": "integer"},
"delay": {"type": "string", "format": "duration"},
"failure_action": {"type": "string"},
"monitor": {"type": "string", "format": "duration"},
"max_failure_ratio": {"type": "number"},
"order": {"type": "string", "enum": [
"start-first", "stop-first"
]}
},
"additionalProperties": false
},
"resources": {
"type": "object",
"properties": {
"limits": {
"type": "object",
"properties": {
"cpus": {"type": "string"},
"memory": {"type": "string"}
},
"additionalProperties": false
},
"reservations": {
"type": "object",
"properties": {
"cpus": {"type": "string"},
"memory": {"type": "string"},
"generic_resources": {"$ref": "#/definitions/generic_resources"}
},
"additionalProperties": false
}
},
"additionalProperties": false
},
"restart_policy": {
"type": "object",
"properties": {
"condition": {"type": "string"},
"delay": {"type": "string", "format": "duration"},
"max_attempts": {"type": "integer"},
"window": {"type": "string", "format": "duration"}
},
"additionalProperties": false
},
"placement": {
"type": "object",
"properties": {
"constraints": {"type": "array", "items": {"type": "string"}},
"preferences": {
"type": "array",
"items": {
"type": "object",
"properties": {
"spread": {"type": "string"}
},
"additionalProperties": false
}
}
},
"additionalProperties": false
}
},
"additionalProperties": false
},
"generic_resources": {
"id": "#/definitions/generic_resources",
"type": "array",
"items": {
"type": "object",
"properties": {
"discrete_resource_spec": {
"type": "object",
"properties": {
"kind": {"type": "string"},
"value": {"type": "number"}
},
"additionalProperties": false
}
},
"additionalProperties": false
}
},
"network": {
"id": "#/definitions/network",
"type": ["object", "null"],
"properties": {
"name": {"type": "string"},
"driver": {"type": "string"},
"driver_opts": {
"type": "object",
"patternProperties": {
"^.+$": {"type": ["string", "number"]}
}
},
"ipam": {
"type": "object",
"properties": {
"driver": {"type": "string"},
"config": {
"type": "array",
"items": {
"type": "object",
"properties": {
"subnet": {"type": "string"}
},
"additionalProperties": false
}
}
},
"additionalProperties": false
},
"external": {
"type": ["boolean", "object"],
"properties": {
"name": {"type": "string"}
},
"additionalProperties": false
},
"internal": {"type": "boolean"},
"attachable": {"type": "boolean"},
"labels": {"$ref": "#/definitions/list_or_dict"}
},
"patternProperties": {"^x-": {}},
"additionalProperties": false
},
"volume": {
"id": "#/definitions/volume",
"type": ["object", "null"],
"properties": {
"name": {"type": "string"},
"driver": {"type": "string"},
"driver_opts": {
"type": "object",
"patternProperties": {
"^.+$": {"type": ["string", "number"]}
}
},
"external": {
"type": ["boolean", "object"],
"properties": {
"name": {"type": "string"}
},
"additionalProperties": false
},
"labels": {"$ref": "#/definitions/list_or_dict"}
},
"patternProperties": {"^x-": {}},
"additionalProperties": false
},
"secret": {
"id": "#/definitions/secret",
"type": "object",
"properties": {
"name": {"type": "string"},
"file": {"type": "string"},
"external": {
"type": ["boolean", "object"],
"properties": {
"name": {"type": "string"}
}
},
"labels": {"$ref": "#/definitions/list_or_dict"}
},
"patternProperties": {"^x-": {}},
"additionalProperties": false
},
"config": {
"id": "#/definitions/config",
"type": "object",
"properties": {
"name": {"type": "string"},
"file": {"type": "string"},
"external": {
"type": ["boolean", "object"],
"properties": {
"name": {"type": "string"}
}
},
"labels": {"$ref": "#/definitions/list_or_dict"}
},
"patternProperties": {"^x-": {}},
"additionalProperties": false
},
"string_or_list": {
"oneOf": [
{"type": "string"},
{"$ref": "#/definitions/list_of_strings"}
]
},
"list_of_strings": {
"type": "array",
"items": {"type": "string"},
"uniqueItems": true
},
"list_or_dict": {
"oneOf": [
{
"type": "object",
"patternProperties": {
".+": {
"type": ["string", "number", "null"]
}
},
"additionalProperties": false
},
{"type": "array", "items": {"type": "string"}, "uniqueItems": true}
]
},
"constraints": {
"service": {
"id": "#/definitions/constraints/service",
"anyOf": [
{"required": ["build"]},
{"required": ["image"]}
],
"properties": {
"build": {
"required": ["context"]
}
}
}
}
}
}

View File

@ -248,6 +248,8 @@ class ConversionMap(object):
service_path('deploy', 'replicas'): to_int,
service_path('deploy', 'update_config', 'parallelism'): to_int,
service_path('deploy', 'update_config', 'max_failure_ratio'): to_float,
service_path('deploy', 'rollback_config', 'parallelism'): to_int,
service_path('deploy', 'rollback_config', 'max_failure_ratio'): to_float,
service_path('deploy', 'restart_policy', 'max_attempts'): to_int,
service_path('mem_swappiness'): to_int,
service_path('labels', FULL_JOKER): to_str,

View File

@ -80,6 +80,10 @@ def denormalize_config(config, image_digests=None):
elif 'external' in conf:
conf['external'] = True
if 'attachable' in conf and config.version < V3_2:
# For compatibility mode, this option is invalid in v2
del conf['attachable']
return result

View File

@ -36,6 +36,7 @@ COMPOSEFILE_V3_3 = ComposeVersion('3.3')
COMPOSEFILE_V3_4 = ComposeVersion('3.4')
COMPOSEFILE_V3_5 = ComposeVersion('3.5')
COMPOSEFILE_V3_6 = ComposeVersion('3.6')
COMPOSEFILE_V3_7 = ComposeVersion('3.7')
API_VERSIONS = {
COMPOSEFILE_V1: '1.21',
@ -51,6 +52,7 @@ API_VERSIONS = {
COMPOSEFILE_V3_4: '1.30',
COMPOSEFILE_V3_5: '1.30',
COMPOSEFILE_V3_6: '1.36',
COMPOSEFILE_V3_7: '1.38',
}
API_VERSION_TO_ENGINE_VERSION = {
@ -67,4 +69,5 @@ API_VERSION_TO_ENGINE_VERSION = {
API_VERSIONS[COMPOSEFILE_V3_4]: '17.06.0',
API_VERSIONS[COMPOSEFILE_V3_5]: '17.06.0',
API_VERSIONS[COMPOSEFILE_V3_6]: '18.02.0',
API_VERSIONS[COMPOSEFILE_V3_7]: '18.06.0',
}

View File

@ -9,6 +9,8 @@ from docker.errors import ImageNotFound
from .const import LABEL_CONTAINER_NUMBER
from .const import LABEL_PROJECT
from .const import LABEL_SERVICE
from .const import LABEL_VERSION
from .version import ComposeVersion
class Container(object):
@ -283,6 +285,12 @@ class Container(object):
def attach(self, *args, **kwargs):
return self.client.attach(self.id, *args, **kwargs)
def has_legacy_proj_name(self, project_name):
return (
ComposeVersion(self.labels.get(LABEL_VERSION)) < ComposeVersion('1.21.0') and
self.project != project_name
)
def __repr__(self):
return '<Container: %s (%s)>' % (self.name, self.id[:6])

View File

@ -4,6 +4,7 @@ from __future__ import unicode_literals
import datetime
import logging
import operator
import re
from functools import reduce
import enum
@ -70,8 +71,11 @@ class Project(object):
self.networks = networks or ProjectNetworks({}, False)
self.config_version = config_version
def labels(self, one_off=OneOffFilter.exclude):
labels = ['{0}={1}'.format(LABEL_PROJECT, self.name)]
def labels(self, one_off=OneOffFilter.exclude, legacy=False):
name = self.name
if legacy:
name = re.sub(r'[_-]', '', name)
labels = ['{0}={1}'.format(LABEL_PROJECT, name)]
OneOffFilter.update_labels(one_off, labels)
return labels
@ -128,7 +132,8 @@ class Project(object):
volumes_from=volumes_from,
secrets=secrets,
pid_mode=pid_mode,
platform=service_dict.pop('platform', default_platform),
platform=service_dict.pop('platform', None),
default_platform=default_platform,
**service_dict)
)
@ -570,12 +575,21 @@ class Project(object):
service.push(ignore_push_failures)
def _labeled_containers(self, stopped=False, one_off=OneOffFilter.exclude):
return list(filter(None, [
ctnrs = list(filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=stopped,
filters={'label': self.labels(one_off=one_off)})])
)
if ctnrs:
return ctnrs
return list(filter(lambda c: c.has_legacy_proj_name(self.name), filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=stopped,
filters={'label': self.labels(one_off=one_off, legacy=True)})])
))
def containers(self, service_names=None, stopped=False, one_off=OneOffFilter.exclude):
if service_names:

View File

@ -1,6 +1,7 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import itertools
import logging
import os
import re
@ -51,7 +52,6 @@ from .progress_stream import StreamOutputError
from .utils import json_hash
from .utils import parse_bytes
from .utils import parse_seconds_float
from .version import ComposeVersion
log = logging.getLogger(__name__)
@ -172,6 +172,7 @@ class Service(object):
secrets=None,
scale=None,
pid_mode=None,
default_platform=None,
**options
):
self.name = name
@ -185,13 +186,14 @@ class Service(object):
self.networks = networks or {}
self.secrets = secrets or []
self.scale_num = scale or 1
self.default_platform = default_platform
self.options = options
def __repr__(self):
return '<Service: {}>'.format(self.name)
def containers(self, stopped=False, one_off=False, filters={}):
filters.update({'label': self.labels(one_off=one_off)})
def containers(self, stopped=False, one_off=False, filters={}, labels=None):
filters.update({'label': self.labels(one_off=one_off) + (labels or [])})
result = list(filter(None, [
Container.from_ps(self.client, container)
@ -202,10 +204,10 @@ class Service(object):
if result:
return result
filters.update({'label': self.labels(one_off=one_off, legacy=True)})
filters.update({'label': self.labels(one_off=one_off, legacy=True) + (labels or [])})
return list(
filter(
self.has_legacy_proj_name, filter(None, [
lambda c: c.has_legacy_proj_name(self.project), filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=stopped,
@ -217,9 +219,9 @@ class Service(object):
"""Return a :class:`compose.container.Container` for this service. The
container must be active, and match `number`.
"""
labels = self.labels() + ['{0}={1}'.format(LABEL_CONTAINER_NUMBER, number)]
for container in self.client.containers(filters={'label': labels}):
return Container.from_ps(self.client, container)
for container in self.containers(labels=['{0}={1}'.format(LABEL_CONTAINER_NUMBER, number)]):
return container
raise ValueError("No container found for %s_%s" % (self.name, number))
@ -256,6 +258,11 @@ class Service(object):
running_containers = self.containers(stopped=False)
num_running = len(running_containers)
for c in running_containers:
if not c.has_legacy_proj_name(self.project):
continue
log.info('Recreating container with legacy name %s' % c.name)
self.recreate_container(c, timeout, start_new_container=False)
if desired_num == num_running:
# do nothing as we already have the desired number
@ -358,6 +365,13 @@ class Service(object):
def image_name(self):
return self.options.get('image', '{s.project}_{s.name}'.format(s=self))
@property
def platform(self):
platform = self.options.get('platform')
if not platform and version_gte(self.client.api_version, '1.35'):
platform = self.default_platform
return platform
def convergence_plan(self, strategy=ConvergenceStrategy.changed):
containers = self.containers(stopped=True)
@ -395,7 +409,7 @@ class Service(object):
has_diverged = False
for c in containers:
if self.has_legacy_proj_name(c):
if c.has_legacy_proj_name(self.project):
log.debug('%s has diverged: Legacy project name' % c.name)
has_diverged = True
continue
@ -704,9 +718,14 @@ class Service(object):
# TODO: this would benefit from github.com/docker/docker/pull/14699
# to remove the need to inspect every container
def _next_container_number(self, one_off=False):
containers = self._fetch_containers(
all=True,
filters={'label': self.labels(one_off=one_off)}
containers = itertools.chain(
self._fetch_containers(
all=True,
filters={'label': self.labels(one_off=one_off)}
), self._fetch_containers(
all=True,
filters={'label': self.labels(one_off=one_off, legacy=True)}
)
)
numbers = [c.number for c in containers]
return 1 if not numbers else max(numbers) + 1
@ -1018,8 +1037,7 @@ class Service(object):
if not six.PY3 and not IS_WINDOWS_PLATFORM:
path = path.encode('utf8')
platform = self.options.get('platform')
if platform and version_lt(self.client.api_version, '1.35'):
if self.platform and version_lt(self.client.api_version, '1.35'):
raise OperationFailedError(
'Impossible to perform platform-targeted builds for API version < 1.35'
)
@ -1044,7 +1062,7 @@ class Service(object):
},
gzip=gzip,
isolation=build_opts.get('isolation', self.options.get('isolation', None)),
platform=platform,
platform=self.platform,
)
try:
@ -1150,14 +1168,14 @@ class Service(object):
kwargs = {
'tag': tag or 'latest',
'stream': True,
'platform': self.options.get('platform'),
'platform': self.platform,
}
if not silent:
log.info('Pulling %s (%s%s%s)...' % (self.name, repo, separator, tag))
if kwargs['platform'] and version_lt(self.client.api_version, '1.35'):
raise OperationFailedError(
'Impossible to perform platform-targeted builds for API version < 1.35'
'Impossible to perform platform-targeted pulls for API version < 1.35'
)
try:
output = self.client.pull(repo, **kwargs)
@ -1235,12 +1253,6 @@ class Service(object):
return result
def has_legacy_proj_name(self, ctnr):
return (
ComposeVersion(ctnr.labels.get(LABEL_VERSION)) < ComposeVersion('1.21.0') and
ctnr.project != self.project
)
def short_id_alias_exists(container, network):
aliases = container.get(
@ -1347,7 +1359,7 @@ class ServiceNetworkMode(object):
def build_container_name(project, service, number, one_off=False):
bits = [project, service]
bits = [project.lstrip('-_'), service]
if one_off:
bits.append('run')
return '_'.join(bits + [str(number)])

View File

@ -60,7 +60,7 @@ class Volume(object):
def full_name(self):
if self.custom_name:
return self.name
return '{0}_{1}'.format(self.project, self.name)
return '{0}_{1}'.format(self.project.lstrip('-_'), self.name)
@property
def legacy_full_name(self):

View File

@ -98,7 +98,7 @@ __docker_compose_complete_services() {
# The services for which at least one running container exists
__docker_compose_complete_running_services() {
local names=$(__docker_compose_complete_services --filter status=running)
local names=$(__docker_compose_services --filter status=running)
COMPREPLY=( $(compgen -W "$names" -- "$cur") )
}

View File

@ -82,6 +82,11 @@ exe = EXE(pyz,
'compose/config/config_schema_v3.6.json',
'DATA'
),
(
'compose/config/config_schema_v3.7.json',
'compose/config/config_schema_v3.7.json',
'DATA'
),
(
'compose/GITSHA',
'compose/GITSHA',

View File

@ -2,8 +2,8 @@ backports.ssl-match-hostname==3.5.0.1; python_version < '3'
cached-property==1.3.0
certifi==2017.4.17
chardet==3.0.4
docker==3.3.0
docker-pycreds==0.2.3
docker==3.4.0
docker-pycreds==0.3.0
dockerpty==0.4.1
docopt==0.6.2
enum34==1.1.6; python_version < '3.4'

View File

@ -58,8 +58,11 @@ def create_bump_commit(repository, release_branch, bintray_user, bintray_org):
repository.push_branch_to_remote(release_branch)
bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], bintray_user)
print('Creating data repository {} on bintray'.format(release_branch.name))
bintray_api.create_repository(bintray_org, release_branch.name, 'generic')
if not bintray_api.repository_exists(bintray_org, release_branch.name):
print('Creating data repository {} on bintray'.format(release_branch.name))
bintray_api.create_repository(bintray_org, release_branch.name, 'generic')
else:
print('Bintray repository {} already exists. Skipping'.format(release_branch.name))
def monitor_pr_status(pr_data):

View File

@ -15,12 +15,12 @@ if test -z $BINTRAY_TOKEN; then
exit 1
fi
docker run -e GITHUB_TOKEN=$GITHUB_TOKEN -e BINTRAY_TOKEN=$BINTRAY_TOKEN -it \
docker run -e GITHUB_TOKEN=$GITHUB_TOKEN -e BINTRAY_TOKEN=$BINTRAY_TOKEN -e SSH_AUTH_SOCK=$SSH_AUTH_SOCK -it \
--mount type=bind,source=$(pwd),target=/src \
--mount type=bind,source=$(pwd)/.git,target=/src/.git \
--mount type=bind,source=$HOME/.docker,target=/root/.docker \
--mount type=bind,source=$HOME/.gitconfig,target=/root/.gitconfig \
--mount type=bind,source=/var/run/docker.sock,target=/var/run/docker.sock \
--mount type=bind,source=$HOME/.ssh,target=/root/.ssh \
--mount type=bind,source=/tmp,target=/tmp \
-v $HOME/.pypirc:/root/.pypirc \
compose/release-tool $*

View File

@ -25,7 +25,19 @@ class BintrayAPI(requests.Session):
'desc': 'Automated release for {}: {}'.format(NAME, repo_name),
'labels': ['docker-compose', 'docker', 'release-bot'],
}
return self.post_json(url, data)
result = self.post_json(url, data)
result.raise_for_status()
return result
def repository_exists(self, subject, repo_name):
url = '{base}/repos/{subject}/{repo_name}'.format(
base=self.base_url, subject=subject, repo_name=repo_name,
)
result = self.get(url)
if result.status_code == 404:
return False
result.raise_for_status()
return True
def delete_repository(self, subject, repo_name):
url = '{base}/repos/{subject}/{repo_name}'.format(

View File

@ -48,7 +48,7 @@ class ImageManager(object):
container = docker_client.create_container(
'docker-compose-tests:tmp', entrypoint='tox'
)
docker_client.commit(container, 'docker/compose-tests:latest')
docker_client.commit(container, 'docker/compose-tests', 'latest')
docker_client.tag('docker/compose-tests:latest', 'docker/compose-tests:{}'.format(self.version))
docker_client.remove_container(container, force=True)
docker_client.remove_image('docker-compose-tests:tmp', force=True)

View File

@ -15,7 +15,7 @@
set -e
VERSION="1.21.2"
VERSION="1.22.0-rc1"
IMAGE="docker/compose:$VERSION"

View File

@ -36,7 +36,7 @@ install_requires = [
'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.19',
'texttable >= 0.9.0, < 0.10',
'websocket-client >= 0.32.0, < 1.0',
'docker >= 3.3.0, < 4.0',
'docker >= 3.4.0, < 4.0',
'dockerpty >= 0.4.1, < 0.5',
'six >= 1.3.0, < 2',
'jsonschema >= 2.5.1, < 3',

View File

@ -481,6 +481,7 @@ class CLITestCase(DockerClientTestCase):
assert yaml.load(result.stdout) == {
'version': '2.3',
'volumes': {'foo': {'driver': 'default'}},
'networks': {'bar': {}},
'services': {
'foo': {
'command': '/bin/true',
@ -490,9 +491,10 @@ class CLITestCase(DockerClientTestCase):
'mem_limit': '300M',
'mem_reservation': '100M',
'cpus': 0.7,
'volumes': ['foo:/bar:rw']
'volumes': ['foo:/bar:rw'],
'networks': {'bar': None},
}
}
},
}
def test_ps(self):

View File

@ -16,7 +16,13 @@ services:
memory: 100M
volumes:
- foo:/bar
networks:
- bar
volumes:
foo:
driver: default
networks:
bar:
attachable: true

View File

@ -1915,3 +1915,65 @@ class ProjectTest(DockerClientTestCase):
assert len(remote_secopts) == 1
assert remote_secopts[0].startswith('seccomp=')
assert json.loads(remote_secopts[0].lstrip('seccomp=')) == seccomp_data
@no_cluster('inspect volume by name defect on Swarm Classic')
def test_project_up_name_starts_with_illegal_char(self):
config_dict = {
'version': '2.3',
'services': {
'svc1': {
'image': 'busybox:latest',
'command': 'ls',
'volumes': ['foo:/foo:rw'],
'networks': ['bar'],
},
},
'volumes': {
'foo': {},
},
'networks': {
'bar': {},
}
}
config_data = load_config(config_dict)
project = Project.from_config(
name='_underscoretest', config_data=config_data, client=self.client
)
project.up()
self.addCleanup(project.down, None, True)
containers = project.containers(stopped=True)
assert len(containers) == 1
assert containers[0].name == 'underscoretest_svc1_1'
assert containers[0].project == '_underscoretest'
full_vol_name = 'underscoretest_foo'
vol_data = self.get_volume_data(full_vol_name)
assert vol_data
assert vol_data['Labels'][LABEL_PROJECT] == '_underscoretest'
full_net_name = '_underscoretest_bar'
net_data = self.client.inspect_network(full_net_name)
assert net_data
assert net_data['Labels'][LABEL_PROJECT] == '_underscoretest'
project2 = Project.from_config(
name='-dashtest', config_data=config_data, client=self.client
)
project2.up()
self.addCleanup(project2.down, None, True)
containers = project2.containers(stopped=True)
assert len(containers) == 1
assert containers[0].name == 'dashtest_svc1_1'
assert containers[0].project == '-dashtest'
full_vol_name = 'dashtest_foo'
vol_data = self.get_volume_data(full_vol_name)
assert vol_data
assert vol_data['Labels'][LABEL_PROJECT] == '-dashtest'
full_net_name = '-dashtest_bar'
net_data = self.client.inspect_network(full_net_name)
assert net_data
assert net_data['Labels'][LABEL_PROJECT] == '-dashtest'

View File

@ -29,6 +29,7 @@ class ProjectTest(unittest.TestCase):
def setUp(self):
self.mock_client = mock.create_autospec(docker.APIClient)
self.mock_client._general_configs = {}
self.mock_client.api_version = docker.constants.DEFAULT_DOCKER_API_VERSION
def test_from_config_v1(self):
config = Config(
@ -578,21 +579,21 @@ class ProjectTest(unittest.TestCase):
)
project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
assert project.get_service('web').options.get('platform') is None
assert project.get_service('web').platform is None
project = Project.from_config(
name='test', client=self.mock_client, config_data=config_data, default_platform='windows'
)
assert project.get_service('web').options.get('platform') == 'windows'
assert project.get_service('web').platform == 'windows'
service_config['platform'] = 'linux/s390x'
project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
assert project.get_service('web').options.get('platform') == 'linux/s390x'
assert project.get_service('web').platform == 'linux/s390x'
project = Project.from_config(
name='test', client=self.mock_client, config_data=config_data, default_platform='windows'
)
assert project.get_service('web').options.get('platform') == 'linux/s390x'
assert project.get_service('web').platform == 'linux/s390x'
@mock.patch('compose.parallel.ParallelStreamWriter._write_noansi')
def test_error_parallel_pull(self, mock_write):

View File

@ -446,6 +446,20 @@ class ServiceTest(unittest.TestCase):
with pytest.raises(OperationFailedError):
service.pull()
def test_pull_image_with_default_platform(self):
self.mock_client.api_version = '1.35'
service = Service(
'foo', client=self.mock_client, image='someimage:sometag',
default_platform='linux'
)
assert service.platform == 'linux'
service.pull()
assert self.mock_client.pull.call_count == 1
call_args = self.mock_client.pull.call_args
assert call_args[1]['platform'] == 'linux'
@mock.patch('compose.service.Container', autospec=True)
def test_recreate_container(self, _):
mock_container = mock.create_autospec(Container)
@ -538,7 +552,7 @@ class ServiceTest(unittest.TestCase):
assert self.mock_client.build.call_count == 1
assert not self.mock_client.build.call_args[1]['pull']
def test_build_does_with_platform(self):
def test_build_with_platform(self):
self.mock_client.api_version = '1.35'
self.mock_client.build.return_value = [
b'{"stream": "Successfully built 12345"}',
@ -551,6 +565,47 @@ class ServiceTest(unittest.TestCase):
call_args = self.mock_client.build.call_args
assert call_args[1]['platform'] == 'linux'
def test_build_with_default_platform(self):
self.mock_client.api_version = '1.35'
self.mock_client.build.return_value = [
b'{"stream": "Successfully built 12345"}',
]
service = Service(
'foo', client=self.mock_client, build={'context': '.'},
default_platform='linux'
)
assert service.platform == 'linux'
service.build()
assert self.mock_client.build.call_count == 1
call_args = self.mock_client.build.call_args
assert call_args[1]['platform'] == 'linux'
def test_service_platform_precedence(self):
self.mock_client.api_version = '1.35'
service = Service(
'foo', client=self.mock_client, platform='linux/arm',
default_platform='osx'
)
assert service.platform == 'linux/arm'
def test_service_ignore_default_platform_with_unsupported_api(self):
self.mock_client.api_version = '1.32'
self.mock_client.build.return_value = [
b'{"stream": "Successfully built 12345"}',
]
service = Service(
'foo', client=self.mock_client, default_platform='windows', build={'context': '.'}
)
assert service.platform is None
service.build()
assert self.mock_client.build.call_count == 1
call_args = self.mock_client.build.call_args
assert call_args[1]['platform'] is None
def test_build_with_override_build_args(self):
self.mock_client.build.return_value = [
b'{"stream": "Successfully built 12345"}',