From 1315b51e447f2b15ee9273c0973f08f3c5f9e149 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 23 Apr 2018 18:16:58 -0700 Subject: [PATCH 01/26] Incorrect key name for IPAM options check Signed-off-by: Joffrey F --- compose/network.py | 2 +- tests/unit/network_test.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/compose/network.py b/compose/network.py index 1a080c40c..7803db979 100644 --- a/compose/network.py +++ b/compose/network.py @@ -169,7 +169,7 @@ def check_remote_ipam_config(remote, local): raise NetworkConfigChangedError(local.full_name, 'IPAM config aux_addresses') remote_opts = remote_ipam.get('Options') or {} - local_opts = local.ipam.get('options') or {} + local_opts = local.ipam.get('Options') or {} for k in set.union(set(remote_opts.keys()), set(local_opts.keys())): if remote_opts.get(k) != local_opts.get(k): raise NetworkConfigChangedError(local.full_name, 'IPAM option "{}"'.format(k)) diff --git a/tests/unit/network_test.py b/tests/unit/network_test.py index b27339af8..0e03fc10e 100644 --- a/tests/unit/network_test.py +++ b/tests/unit/network_test.py @@ -23,7 +23,10 @@ class NetworkTest(unittest.TestCase): 'aux_addresses': ['11.0.0.1', '24.25.26.27'], 'ip_range': '156.0.0.1-254' } - ] + ], + 'options': { + 'iface': 'eth0', + } } labels = { 'com.project.tests.istest': 'true', @@ -57,6 +60,9 @@ class NetworkTest(unittest.TestCase): 'Subnet': '172.0.0.1/16', 'Gateway': '172.0.0.1' }], + 'Options': { + 'iface': 'eth0', + }, }, 'Labels': remote_labels }, From 385b65032db5106124ac3f62a04efc9ef843968f Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 23 Apr 2018 16:41:10 -0700 Subject: [PATCH 02/26] Retrieve objects using legacy (< 1.21) project names Signed-off-by: Joffrey F --- compose/network.py | 66 ++++++++++++++++++++++++++++++++++------------ compose/service.py | 36 +++++++++++++++++++++---- compose/volume.py | 43 +++++++++++++++++++++++++----- 3 files changed, 117 insertions(+), 28 deletions(-) diff --git a/compose/network.py b/compose/network.py index 7803db979..98b8f7b1d 100644 --- a/compose/network.py +++ b/compose/network.py @@ -2,6 +2,7 @@ from __future__ import absolute_import from __future__ import unicode_literals import logging +import re from collections import OrderedDict from docker.errors import NotFound @@ -10,9 +11,11 @@ from docker.types import IPAMPool from docker.utils import version_gte from docker.utils import version_lt +from . import __version__ from .config import ConfigurationError from .const import LABEL_NETWORK from .const import LABEL_PROJECT +from .const import LABEL_VERSION log = logging.getLogger(__name__) @@ -39,6 +42,7 @@ class Network(object): self.enable_ipv6 = enable_ipv6 self.labels = labels self.custom_name = custom_name + self.legacy = False def ensure(self): if self.external: @@ -68,6 +72,14 @@ class Network(object): data = self.inspect() check_remote_network_config(data, self) except NotFound: + try: + data = self.inspect(legacy=True) + self.legacy = True + check_remote_network_config(data, self) + return + except NotFound: + pass + driver_name = 'the default driver' if self.driver: driver_name = 'driver "{}"'.format(self.driver) @@ -94,18 +106,37 @@ class Network(object): log.info("Network %s is external, skipping", self.full_name) return - log.info("Removing network {}".format(self.full_name)) - self.client.remove_network(self.full_name) + log.info("Removing network {}".format(self.true_name)) + try: + self.client.remove_network(self.full_name) + except NotFound: + self.client.remove_network(self.legacy_full_name) - def inspect(self): + def inspect(self, legacy=False): + if legacy: + return self.client.inspect_network(self.legacy_full_name) return self.client.inspect_network(self.full_name) + @property + def legacy_full_name(self): + if self.custom_name: + return self.name + return '{0}_{1}'.format( + re.sub(r'[_-]', '', self.project), self.name + ) + @property def full_name(self): if self.custom_name: return self.name return '{0}_{1}'.format(self.project, self.name) + @property + def true_name(self): + if self.legacy: + return self.legacy_full_name + return self.full_name + @property def _labels(self): if version_lt(self.client._version, '1.23'): @@ -114,6 +145,7 @@ class Network(object): labels.update({ LABEL_PROJECT: self.project, LABEL_NETWORK: self.name, + LABEL_VERSION: __version__, }) return labels @@ -150,49 +182,49 @@ def check_remote_ipam_config(remote, local): remote_ipam = remote.get('IPAM') ipam_dict = create_ipam_config_from_dict(local.ipam) if local.ipam.get('driver') and local.ipam.get('driver') != remote_ipam.get('Driver'): - raise NetworkConfigChangedError(local.full_name, 'IPAM driver') + raise NetworkConfigChangedError(local.true_name, 'IPAM driver') if len(ipam_dict['Config']) != 0: if len(ipam_dict['Config']) != len(remote_ipam['Config']): - raise NetworkConfigChangedError(local.full_name, 'IPAM configs') + raise NetworkConfigChangedError(local.true_name, 'IPAM configs') remote_configs = sorted(remote_ipam['Config'], key='Subnet') local_configs = sorted(ipam_dict['Config'], key='Subnet') while local_configs: lc = local_configs.pop() rc = remote_configs.pop() if lc.get('Subnet') != rc.get('Subnet'): - raise NetworkConfigChangedError(local.full_name, 'IPAM config subnet') + raise NetworkConfigChangedError(local.true_name, 'IPAM config subnet') if lc.get('Gateway') is not None and lc.get('Gateway') != rc.get('Gateway'): - raise NetworkConfigChangedError(local.full_name, 'IPAM config gateway') + raise NetworkConfigChangedError(local.true_name, 'IPAM config gateway') if lc.get('IPRange') != rc.get('IPRange'): - raise NetworkConfigChangedError(local.full_name, 'IPAM config ip_range') + raise NetworkConfigChangedError(local.true_name, 'IPAM config ip_range') if sorted(lc.get('AuxiliaryAddresses')) != sorted(rc.get('AuxiliaryAddresses')): - raise NetworkConfigChangedError(local.full_name, 'IPAM config aux_addresses') + raise NetworkConfigChangedError(local.true_name, 'IPAM config aux_addresses') remote_opts = remote_ipam.get('Options') or {} local_opts = local.ipam.get('Options') or {} for k in set.union(set(remote_opts.keys()), set(local_opts.keys())): if remote_opts.get(k) != local_opts.get(k): - raise NetworkConfigChangedError(local.full_name, 'IPAM option "{}"'.format(k)) + raise NetworkConfigChangedError(local.true_name, 'IPAM option "{}"'.format(k)) def check_remote_network_config(remote, local): if local.driver and remote.get('Driver') != local.driver: - raise NetworkConfigChangedError(local.full_name, 'driver') + raise NetworkConfigChangedError(local.true_name, 'driver') local_opts = local.driver_opts or {} remote_opts = remote.get('Options') or {} for k in set.union(set(remote_opts.keys()), set(local_opts.keys())): if k in OPTS_EXCEPTIONS: continue if remote_opts.get(k) != local_opts.get(k): - raise NetworkConfigChangedError(local.full_name, 'option "{}"'.format(k)) + raise NetworkConfigChangedError(local.true_name, 'option "{}"'.format(k)) if local.ipam is not None: check_remote_ipam_config(remote, local) if local.internal is not None and local.internal != remote.get('Internal', False): - raise NetworkConfigChangedError(local.full_name, 'internal') + raise NetworkConfigChangedError(local.true_name, 'internal') if local.enable_ipv6 is not None and local.enable_ipv6 != remote.get('EnableIPv6', False): - raise NetworkConfigChangedError(local.full_name, 'enable_ipv6') + raise NetworkConfigChangedError(local.true_name, 'enable_ipv6') local_labels = local.labels or {} remote_labels = remote.get('Labels', {}) @@ -202,7 +234,7 @@ def check_remote_network_config(remote, local): if remote_labels.get(k) != local_labels.get(k): log.warn( 'Network {}: label "{}" has changed. It may need to be' - ' recreated.'.format(local.full_name, k) + ' recreated.'.format(local.true_name, k) ) @@ -257,7 +289,7 @@ class ProjectNetworks(object): try: network.remove() except NotFound: - log.warn("Network %s not found.", network.full_name) + log.warn("Network %s not found.", network.true_name) def initialize(self): if not self.use_networking: @@ -286,7 +318,7 @@ def get_networks(service_dict, network_definitions): for name, netdef in get_network_defs_for_service(service_dict).items(): network = network_definitions.get(name) if network: - networks[network.full_name] = netdef + networks[network.true_name] = netdef else: raise ConfigurationError( 'Service "{}" uses an undefined network "{}"' diff --git a/compose/service.py b/compose/service.py index bb9e26baa..0a866161c 100644 --- a/compose/service.py +++ b/compose/service.py @@ -51,6 +51,7 @@ from .progress_stream import StreamOutputError from .utils import json_hash from .utils import parse_bytes from .utils import parse_seconds_float +from .version import ComposeVersion log = logging.getLogger(__name__) @@ -192,11 +193,25 @@ class Service(object): def containers(self, stopped=False, one_off=False, filters={}): filters.update({'label': self.labels(one_off=one_off)}) - return list(filter(None, [ + result = list(filter(None, [ Container.from_ps(self.client, container) for container in self.client.containers( all=stopped, - filters=filters)])) + filters=filters)]) + ) + if result: + return result + + filters.update({'label': self.labels(one_off=one_off, legacy=True)}) + return list( + filter( + self.has_legacy_proj_name, filter(None, [ + Container.from_ps(self.client, container) + for container in self.client.containers( + all=stopped, + filters=filters)]) + ) + ) def get_container(self, number=1): """Return a :class:`compose.container.Container` for this service. The @@ -380,6 +395,10 @@ class Service(object): has_diverged = False for c in containers: + if self.has_legacy_proj_name(c): + log.debug('%s has diverged: Legacy project name' % c.name) + has_diverged = True + continue container_config_hash = c.labels.get(LABEL_CONFIG_HASH, None) if container_config_hash != config_hash: log.debug( @@ -1053,11 +1072,12 @@ class Service(object): def can_be_built(self): return 'build' in self.options - def labels(self, one_off=False): + def labels(self, one_off=False, legacy=False): + proj_name = self.project if not legacy else re.sub(r'[_-]', '', self.project) return [ - '{0}={1}'.format(LABEL_PROJECT, self.project), + '{0}={1}'.format(LABEL_PROJECT, proj_name), '{0}={1}'.format(LABEL_SERVICE, self.name), - '{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False") + '{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False"), ] @property @@ -1214,6 +1234,12 @@ class Service(object): return result + def has_legacy_proj_name(self, ctnr): + return ( + ComposeVersion(ctnr.labels.get(LABEL_VERSION)) < ComposeVersion('1.21.0') and + ctnr.project != self.project + ) + def short_id_alias_exists(container, network): aliases = container.get( diff --git a/compose/volume.py b/compose/volume.py index 6bf184045..6cad1e0de 100644 --- a/compose/volume.py +++ b/compose/volume.py @@ -2,15 +2,19 @@ from __future__ import absolute_import from __future__ import unicode_literals import logging +import re from docker.errors import NotFound from docker.utils import version_lt +from . import __version__ from .config import ConfigurationError from .config.types import VolumeSpec from .const import LABEL_PROJECT +from .const import LABEL_VERSION from .const import LABEL_VOLUME + log = logging.getLogger(__name__) @@ -25,6 +29,7 @@ class Volume(object): self.external = external self.labels = labels self.custom_name = custom_name + self.legacy = False def create(self): return self.client.create_volume( @@ -36,15 +41,26 @@ class Volume(object): log.info("Volume %s is external, skipping", self.full_name) return log.info("Removing volume %s", self.full_name) - return self.client.remove_volume(self.full_name) + try: + return self.client.remove_volume(self.full_name) + except NotFound: + self.client.remove_volume(self.legacy_full_name) - def inspect(self): + def inspect(self, legacy=False): + if legacy: + return self.client.inspect_volume(self.legacy_full_name) return self.client.inspect_volume(self.full_name) def exists(self): try: self.inspect() except NotFound: + try: + self.inspect(legacy=True) + self.legacy = True + return True + except NotFound: + pass return False return True @@ -54,6 +70,20 @@ class Volume(object): return self.name return '{0}_{1}'.format(self.project, self.name) + @property + def legacy_full_name(self): + if self.custom_name: + return self.name + return '{0}_{1}'.format( + re.sub(r'[_-]', '', self.project), self.name + ) + + @property + def true_name(self): + if self.legacy: + return self.legacy_full_name + return self.full_name + @property def _labels(self): if version_lt(self.client._version, '1.23'): @@ -62,6 +92,7 @@ class Volume(object): labels.update({ LABEL_PROJECT: self.project, LABEL_VOLUME: self.name, + LABEL_VERSION: __version__, }) return labels @@ -94,7 +125,7 @@ class ProjectVolumes(object): try: volume.remove() except NotFound: - log.warn("Volume %s not found.", volume.full_name) + log.warn("Volume %s not found.", volume.true_name) def initialize(self): try: @@ -136,9 +167,9 @@ class ProjectVolumes(object): if isinstance(volume_spec, VolumeSpec): volume = self.volumes[volume_spec.external] - return volume_spec._replace(external=volume.full_name) + return volume_spec._replace(external=volume.true_name) else: - volume_spec.source = self.volumes[volume_spec.source].full_name + volume_spec.source = self.volumes[volume_spec.source].true_name return volume_spec @@ -152,7 +183,7 @@ class VolumeConfigChangedError(ConfigurationError): 'first:\n$ docker volume rm {full_name}'.format( vol_name=local.name, property_name=property_name, local_value=local_value, remote_value=remote_value, - full_name=local.full_name + full_name=local.true_name ) ) From d9a6d30f6dccb28cf5a3d5a83e01c8701e172fd7 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Tue, 24 Apr 2018 15:48:02 -0700 Subject: [PATCH 03/26] Improve legacy network and volume detection Signed-off-by: Joffrey F --- compose/network.py | 26 ++++++++++++++------------ compose/volume.py | 25 +++++++++++++++---------- 2 files changed, 29 insertions(+), 22 deletions(-) diff --git a/compose/network.py b/compose/network.py index 98b8f7b1d..5e9d929d9 100644 --- a/compose/network.py +++ b/compose/network.py @@ -42,7 +42,7 @@ class Network(object): self.enable_ipv6 = enable_ipv6 self.labels = labels self.custom_name = custom_name - self.legacy = False + self.legacy = None def ensure(self): if self.external: @@ -68,25 +68,17 @@ class Network(object): ) return + self._set_legacy_flag() try: - data = self.inspect() + data = self.inspect(legacy=self.legacy) check_remote_network_config(data, self) except NotFound: - try: - data = self.inspect(legacy=True) - self.legacy = True - check_remote_network_config(data, self) - return - except NotFound: - pass - driver_name = 'the default driver' if self.driver: driver_name = 'driver "{}"'.format(self.driver) log.info( - 'Creating network "{}" with {}' - .format(self.full_name, driver_name) + 'Creating network "{}" with {}'.format(self.full_name, driver_name) ) self.client.create_network( @@ -133,6 +125,7 @@ class Network(object): @property def true_name(self): + self._set_legacy_flag() if self.legacy: return self.legacy_full_name return self.full_name @@ -149,6 +142,15 @@ class Network(object): }) return labels + def _set_legacy_flag(self): + if self.legacy is not None: + return + try: + data = self.inspect(legacy=True) + self.legacy = data is not None + except NotFound: + self.legacy = False + def create_ipam_config_from_dict(ipam_dict): if not ipam_dict: diff --git a/compose/volume.py b/compose/volume.py index 6cad1e0de..56ff601cd 100644 --- a/compose/volume.py +++ b/compose/volume.py @@ -29,7 +29,7 @@ class Volume(object): self.external = external self.labels = labels self.custom_name = custom_name - self.legacy = False + self.legacy = None def create(self): return self.client.create_volume( @@ -46,21 +46,16 @@ class Volume(object): except NotFound: self.client.remove_volume(self.legacy_full_name) - def inspect(self, legacy=False): + def inspect(self, legacy=None): if legacy: return self.client.inspect_volume(self.legacy_full_name) return self.client.inspect_volume(self.full_name) def exists(self): + self._set_legacy_flag() try: - self.inspect() + self.inspect(legacy=self.legacy) except NotFound: - try: - self.inspect(legacy=True) - self.legacy = True - return True - except NotFound: - pass return False return True @@ -80,6 +75,7 @@ class Volume(object): @property def true_name(self): + self._set_legacy_flag() if self.legacy: return self.legacy_full_name return self.full_name @@ -96,6 +92,15 @@ class Volume(object): }) return labels + def _set_legacy_flag(self): + if self.legacy is not None: + return + try: + data = self.inspect(legacy=True) + self.legacy = data is not None + except NotFound: + self.legacy = False + class ProjectVolumes(object): @@ -155,7 +160,7 @@ class ProjectVolumes(object): ) volume.create() else: - check_remote_volume_config(volume.inspect(), volume) + check_remote_volume_config(volume.inspect(legacy=volume.legacy), volume) except NotFound: raise ConfigurationError( 'Volume %s specifies nonexistent driver %s' % (volume.name, volume.driver) From 7341dba5696e47e8b7c67b6b041ba91a9e0376ea Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Tue, 24 Apr 2018 16:10:59 -0700 Subject: [PATCH 04/26] Use true_name for remove operation Signed-off-by: Joffrey F --- compose/network.py | 7 ++----- compose/volume.py | 9 +++------ tests/unit/network_test.py | 3 +++ tests/unit/project_test.py | 2 ++ 4 files changed, 10 insertions(+), 11 deletions(-) diff --git a/compose/network.py b/compose/network.py index 5e9d929d9..9751f2037 100644 --- a/compose/network.py +++ b/compose/network.py @@ -95,14 +95,11 @@ class Network(object): def remove(self): if self.external: - log.info("Network %s is external, skipping", self.full_name) + log.info("Network %s is external, skipping", self.true_name) return log.info("Removing network {}".format(self.true_name)) - try: - self.client.remove_network(self.full_name) - except NotFound: - self.client.remove_network(self.legacy_full_name) + self.client.remove_network(self.true_name) def inspect(self, legacy=False): if legacy: diff --git a/compose/volume.py b/compose/volume.py index 56ff601cd..7618417ff 100644 --- a/compose/volume.py +++ b/compose/volume.py @@ -38,13 +38,10 @@ class Volume(object): def remove(self): if self.external: - log.info("Volume %s is external, skipping", self.full_name) + log.info("Volume %s is external, skipping", self.true_name) return - log.info("Removing volume %s", self.full_name) - try: - return self.client.remove_volume(self.full_name) - except NotFound: - self.client.remove_volume(self.legacy_full_name) + log.info("Removing volume %s", self.true_name) + return self.client.remove_volume(self.true_name) def inspect(self, legacy=None): if legacy: diff --git a/tests/unit/network_test.py b/tests/unit/network_test.py index 0e03fc10e..d7ffa2894 100644 --- a/tests/unit/network_test.py +++ b/tests/unit/network_test.py @@ -84,6 +84,7 @@ class NetworkTest(unittest.TestCase): {'Driver': 'overlay', 'Options': remote_options}, net ) + @mock.patch('compose.network.Network.true_name', lambda n: n.full_name) def test_check_remote_network_config_driver_mismatch(self): net = Network(None, 'compose_test', 'net1', 'overlay') with pytest.raises(NetworkConfigChangedError) as e: @@ -93,6 +94,7 @@ class NetworkTest(unittest.TestCase): assert 'driver has changed' in str(e.value) + @mock.patch('compose.network.Network.true_name', lambda n: n.full_name) def test_check_remote_network_config_options_mismatch(self): net = Network(None, 'compose_test', 'net1', 'overlay') with pytest.raises(NetworkConfigChangedError) as e: @@ -146,6 +148,7 @@ class NetworkTest(unittest.TestCase): net ) + @mock.patch('compose.network.Network.true_name', lambda n: n.full_name) def test_check_remote_network_labels_mismatch(self): net = Network(None, 'compose_test', 'net1', 'overlay', labels={ 'com.project.touhou.character': 'sakuya.izayoi' diff --git a/tests/unit/project_test.py b/tests/unit/project_test.py index 83a014758..1b6b6651f 100644 --- a/tests/unit/project_test.py +++ b/tests/unit/project_test.py @@ -60,6 +60,7 @@ class ProjectTest(unittest.TestCase): assert project.get_service('db').options['image'] == 'busybox:latest' assert not project.networks.use_networking + @mock.patch('compose.network.Network.true_name', lambda n: n.full_name) def test_from_config_v2(self): config = Config( version=V2_0, @@ -217,6 +218,7 @@ class ProjectTest(unittest.TestCase): ) assert project.get_service('test')._get_volumes_from() == [container_name + ":rw"] + @mock.patch('compose.network.Network.true_name', lambda n: n.full_name) def test_use_volumes_from_service_container(self): container_ids = ['aabbccddee', '12345'] From 398b13d345dbd013eb92234a90134bc3b4572a43 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 23 Apr 2018 19:08:55 -0700 Subject: [PATCH 05/26] Clearly define IPAM config schema for validation Signed-off-by: Joffrey F --- compose/config/config_schema_v2.0.json | 21 ++++++++++++++++++++- compose/config/config_schema_v2.1.json | 21 ++++++++++++++++++++- compose/config/config_schema_v2.2.json | 21 ++++++++++++++++++++- compose/config/config_schema_v2.3.json | 21 ++++++++++++++++++++- compose/config/config_schema_v2.4.json | 21 ++++++++++++++++++++- tests/unit/config/config_test.py | 22 ++++++++++++++++++++++ 6 files changed, 122 insertions(+), 5 deletions(-) diff --git a/compose/config/config_schema_v2.0.json b/compose/config/config_schema_v2.0.json index eddf787ea..793cef1d6 100644 --- a/compose/config/config_schema_v2.0.json +++ b/compose/config/config_schema_v2.0.json @@ -281,7 +281,8 @@ "properties": { "driver": {"type": "string"}, "config": { - "type": "array" + "type": "array", + "items": {"$ref": "#/definitions/ipam_config"} }, "options": { "type": "object", @@ -305,6 +306,24 @@ "additionalProperties": false }, + "ipam_config": { + "id": "#/definitions/ipam_config", + "type": "object", + "properties": { + "subnet": {"type": "string"}, + "iprange": {"type": "string"}, + "gateway": {"type": "string"}, + "aux_addresses": { + "type": "object", + "patternProperties": { + "^.+$": {"type": "string"} + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "volume": { "id": "#/definitions/volume", "type": ["object", "null"], diff --git a/compose/config/config_schema_v2.1.json b/compose/config/config_schema_v2.1.json index 5ad5a20ea..5ea763544 100644 --- a/compose/config/config_schema_v2.1.json +++ b/compose/config/config_schema_v2.1.json @@ -332,7 +332,8 @@ "properties": { "driver": {"type": "string"}, "config": { - "type": "array" + "type": "array", + "items": {"$ref": "#/definitions/ipam_config"} }, "options": { "type": "object", @@ -359,6 +360,24 @@ "additionalProperties": false }, + "ipam_config": { + "id": "#/definitions/ipam_config", + "type": "object", + "properties": { + "subnet": {"type": "string"}, + "iprange": {"type": "string"}, + "gateway": {"type": "string"}, + "aux_addresses": { + "type": "object", + "patternProperties": { + "^.+$": {"type": "string"} + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "volume": { "id": "#/definitions/volume", "type": ["object", "null"], diff --git a/compose/config/config_schema_v2.2.json b/compose/config/config_schema_v2.2.json index 26044b651..a19d4c945 100644 --- a/compose/config/config_schema_v2.2.json +++ b/compose/config/config_schema_v2.2.json @@ -341,7 +341,8 @@ "properties": { "driver": {"type": "string"}, "config": { - "type": "array" + "type": "array", + "items": {"$ref": "#/definitions/ipam_config"} }, "options": { "type": "object", @@ -368,6 +369,24 @@ "additionalProperties": false }, + "ipam_config": { + "id": "#/definitions/ipam_config", + "type": "object", + "properties": { + "subnet": {"type": "string"}, + "iprange": {"type": "string"}, + "gateway": {"type": "string"}, + "aux_addresses": { + "type": "object", + "patternProperties": { + "^.+$": {"type": "string"} + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "volume": { "id": "#/definitions/volume", "type": ["object", "null"], diff --git a/compose/config/config_schema_v2.3.json b/compose/config/config_schema_v2.3.json index ac0778f2a..78b716a7a 100644 --- a/compose/config/config_schema_v2.3.json +++ b/compose/config/config_schema_v2.3.json @@ -385,7 +385,8 @@ "properties": { "driver": {"type": "string"}, "config": { - "type": "array" + "type": "array", + "items": {"$ref": "#/definitions/ipam_config"} }, "options": { "type": "object", @@ -412,6 +413,24 @@ "additionalProperties": false }, + "ipam_config": { + "id": "#/definitions/ipam_config", + "type": "object", + "properties": { + "subnet": {"type": "string"}, + "iprange": {"type": "string"}, + "gateway": {"type": "string"}, + "aux_addresses": { + "type": "object", + "patternProperties": { + "^.+$": {"type": "string"} + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "volume": { "id": "#/definitions/volume", "type": ["object", "null"], diff --git a/compose/config/config_schema_v2.4.json b/compose/config/config_schema_v2.4.json index 731fa2f9b..a5796d5b1 100644 --- a/compose/config/config_schema_v2.4.json +++ b/compose/config/config_schema_v2.4.json @@ -384,7 +384,8 @@ "properties": { "driver": {"type": "string"}, "config": { - "type": "array" + "type": "array", + "items": {"$ref": "#/definitions/ipam_config"} }, "options": { "type": "object", @@ -411,6 +412,24 @@ "additionalProperties": false }, + "ipam_config": { + "id": "#/definitions/ipam_config", + "type": "object", + "properties": { + "subnet": {"type": "string"}, + "iprange": {"type": "string"}, + "gateway": {"type": "string"}, + "aux_addresses": { + "type": "object", + "patternProperties": { + "^.+$": {"type": "string"} + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "volume": { "id": "#/definitions/volume", "type": ["object", "null"], diff --git a/tests/unit/config/config_test.py b/tests/unit/config/config_test.py index 8a75648ac..4562a99ca 100644 --- a/tests/unit/config/config_test.py +++ b/tests/unit/config/config_test.py @@ -1322,6 +1322,28 @@ class ConfigTest(unittest.TestCase): assert mount.type == 'bind' assert mount.source == expected_source + def test_config_invalid_ipam_config(self): + with pytest.raises(ConfigurationError) as excinfo: + config.load( + build_config_details( + { + 'version': str(V2_1), + 'networks': { + 'foo': { + 'driver': 'default', + 'ipam': { + 'driver': 'default', + 'config': ['172.18.0.0/16'], + } + } + } + }, + filename='filename.yml', + ) + ) + assert ('networks.foo.ipam.config contains an invalid type,' + ' it should be an object') in excinfo.exconly() + def test_config_valid_service_names(self): for valid_name in ['_', '-', '.__.', '_what-up.', 'what_.up----', 'whatup']: services = config.load( From 41417aa379a9ff3167d3dec7f276d654ee500877 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 18:08:34 -0700 Subject: [PATCH 06/26] Bump SDK version to latest Signed-off-by: Joffrey F --- requirements.txt | 4 ++-- setup.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 7dce40246..93a0cce35 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,8 +2,8 @@ backports.ssl-match-hostname==3.5.0.1; python_version < '3' cached-property==1.3.0 certifi==2017.4.17 chardet==3.0.4 -docker==3.2.1 -docker-pycreds==0.2.1 +docker==3.3.0 +docker-pycreds==0.2.3 dockerpty==0.4.1 docopt==0.6.2 enum34==1.1.6; python_version < '3.4' diff --git a/setup.py b/setup.py index a7a333634..422ba5466 100644 --- a/setup.py +++ b/setup.py @@ -36,7 +36,7 @@ install_requires = [ 'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.19', 'texttable >= 0.9.0, < 0.10', 'websocket-client >= 0.32.0, < 1.0', - 'docker >= 3.2.1, < 4.0', + 'docker >= 3.3.0, < 4.0', 'dockerpty >= 0.4.1, < 0.5', 'six >= 1.3.0, < 2', 'jsonschema >= 2.5.1, < 3', From b1a1c6a2345f05e3905e908e00abc81c8374a4db Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 26 Apr 2018 15:20:45 -0700 Subject: [PATCH 07/26] Prevent duplicate binds in generated container config Signed-off-by: Joffrey F --- compose/service.py | 7 ++++--- tests/unit/service_test.py | 19 +++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/compose/service.py b/compose/service.py index 0a866161c..ae9e0bb08 100644 --- a/compose/service.py +++ b/compose/service.py @@ -877,7 +877,6 @@ class Service(object): container_volumes, self.options.get('tmpfs') or [], previous_container, container_mounts ) - override_options['binds'] = binds container_options['environment'].update(affinity) container_options['volumes'] = dict((v.internal, {}) for v in container_volumes or {}) @@ -890,13 +889,13 @@ class Service(object): if m.is_tmpfs: override_options['tmpfs'].append(m.target) else: - override_options['binds'].append(m.legacy_repr()) + binds.append(m.legacy_repr()) container_options['volumes'][m.target] = {} secret_volumes = self.get_secret_volumes() if secret_volumes: if version_lt(self.client.api_version, '1.30'): - override_options['binds'].extend(v.legacy_repr() for v in secret_volumes) + binds.extend(v.legacy_repr() for v in secret_volumes) container_options['volumes'].update( (v.target, {}) for v in secret_volumes ) @@ -904,6 +903,8 @@ class Service(object): override_options['mounts'] = override_options.get('mounts') or [] override_options['mounts'].extend([build_mount(v) for v in secret_volumes]) + # Remove possible duplicates (see e.g. https://github.com/docker/compose/issues/5885) + override_options['binds'] = list(set(binds)) return container_options, override_options def _get_container_host_config(self, override_options, one_off=False): diff --git a/tests/unit/service_test.py b/tests/unit/service_test.py index 4ccc48653..d50db9044 100644 --- a/tests/unit/service_test.py +++ b/tests/unit/service_test.py @@ -10,6 +10,7 @@ from docker.errors import NotFound from .. import mock from .. import unittest from compose.config.errors import DependencyError +from compose.config.types import MountSpec from compose.config.types import ServicePort from compose.config.types import ServiceSecret from compose.config.types import VolumeFromSpec @@ -955,6 +956,24 @@ class ServiceTest(unittest.TestCase): assert service.create_container().id == 'new_cont_id' + def test_build_volume_options_duplicate_binds(self): + self.mock_client.api_version = '1.29' # Trigger 3.2 format workaround + service = Service('foo', client=self.mock_client) + ctnr_opts, override_opts = service._build_container_volume_options( + previous_container=None, + container_options={ + 'volumes': [ + MountSpec.parse({'source': 'vol', 'target': '/data', 'type': 'volume'}), + VolumeSpec.parse('vol:/data:rw'), + ], + 'environment': {}, + }, + override_options={}, + ) + assert 'binds' in override_opts + assert len(override_opts['binds']) == 1 + assert override_opts['binds'][0] == 'vol:/data:rw' + class TestServiceNetwork(unittest.TestCase): def setUp(self): From 192a6655694620a8eaafed721c02e433b2a5e8fa Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 27 Apr 2018 19:03:45 +0000 Subject: [PATCH 08/26] "Bump 1.21.1" Signed-off-by: Joffrey F --- CHANGELOG.md | 26 ++++++++++++++++++++++++++ compose/__init__.py | 2 +- script/run/run.sh | 2 +- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3709e263d..18742324f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,32 @@ Change log ========== +1.21.1 (2018-04-27) +------------------- + +### Bugfixes + +- In 1.21.0, we introduced a change to how project names are sanitized for + internal use in resource names. This caused issues when manipulating an + existing, deployed application whose name had changed as a result. + This release properly detects resources using "legacy" naming conventions. + +- Fixed an issue where specifying an in-context Dockerfile using an absolute + path would fail despite being valid. + +- Fixed a bug where IPAM option changes were incorrectly detected, preventing + redeployments. + +- Validation of v2 files now properly checks the structure of IPAM configs. + +- Improved support for credentials stores on Windows to include binaries using + extensions other than `.exe`. The list of valid extensions is determined by + the contents of the `PATHEXT` environment variable. + +- Fixed a bug where Compose would generate invalid binds containing duplicate + elements with some v3.2 files, triggering errors at the Engine level during + deployment. + 1.21.0 (2018-04-10) ------------------- diff --git a/compose/__init__.py b/compose/__init__.py index 693a1ab18..6baeabc14 100644 --- a/compose/__init__.py +++ b/compose/__init__.py @@ -1,4 +1,4 @@ from __future__ import absolute_import from __future__ import unicode_literals -__version__ = '1.21.0' +__version__ = '1.21.1' diff --git a/script/run/run.sh b/script/run/run.sh index 1e4bd9853..45e74febd 100755 --- a/script/run/run.sh +++ b/script/run/run.sh @@ -15,7 +15,7 @@ set -e -VERSION="1.21.0" +VERSION="1.21.1" IMAGE="docker/compose:$VERSION" From 4691515420b2e62912b3ededef252677ec10a184 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 18 Apr 2018 16:01:52 -0700 Subject: [PATCH 09/26] Inital pass on comprehensive automated release script Signed-off-by: Joffrey F --- script/release/release.md.tmpl | 34 +++++ script/release/release.py | 197 +++++++++++++++++++++++++++ script/release/release/__init__.py | 0 script/release/release/bintray.py | 40 ++++++ script/release/release/const.py | 9 ++ script/release/release/downloader.py | 72 ++++++++++ script/release/release/repository.py | 161 ++++++++++++++++++++++ script/release/release/utils.py | 63 +++++++++ 8 files changed, 576 insertions(+) create mode 100644 script/release/release.md.tmpl create mode 100755 script/release/release.py create mode 100644 script/release/release/__init__.py create mode 100644 script/release/release/bintray.py create mode 100644 script/release/release/const.py create mode 100644 script/release/release/downloader.py create mode 100644 script/release/release/repository.py create mode 100644 script/release/release/utils.py diff --git a/script/release/release.md.tmpl b/script/release/release.md.tmpl new file mode 100644 index 000000000..ee97ef104 --- /dev/null +++ b/script/release/release.md.tmpl @@ -0,0 +1,34 @@ +If you're a Mac or Windows user, the best way to install Compose and keep it up-to-date is **[Docker for Mac and Windows](https://www.docker.com/products/docker)**. + +Docker for Mac and Windows will automatically install the latest version of Docker Engine for you. + +Alternatively, you can use the usual commands to install or upgrade Compose: + +``` +curl -L https://github.com/docker/compose/releases/download/{{version}}/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose +chmod +x /usr/local/bin/docker-compose +``` + +See the [install docs](https://docs.docker.com/compose/install/) for more install options and instructions. + +## Compose file format compatibility matrix + +| Compose file format | Docker Engine | +| --- | --- | +{% for engine, formats in compat_matrix.items() -%} +| {% for format in formats %}{{format}}{% if not loop.last %}, {% endif %}{% endfor %} | {{engine}}+ | +{% endfor -%} + +## Changes + +{{changelog}} + +Thanks to {% for name in contributors %}@{{name}}{% if not loop.last %}, {% endif %}{% endfor %} for contributing to this release! + +## Integrity check + +Binary name | SHA-256 sum +| --- | --- | +{% for filename, sha in integrity.items() -%} +| `{{filename}}` | `{{sha[1]}}` | +{% endfor -%} diff --git a/script/release/release.py b/script/release/release.py new file mode 100755 index 000000000..f23146288 --- /dev/null +++ b/script/release/release.py @@ -0,0 +1,197 @@ +from __future__ import absolute_import +from __future__ import print_function +from __future__ import unicode_literals + +import argparse +import os +import sys +import time + +from jinja2 import Template +from release.bintray import BintrayAPI +from release.const import BINTRAY_ORG +from release.const import NAME +from release.const import REPO_ROOT +from release.downloader import BinaryDownloader +from release.repository import get_contributors +from release.repository import Repository +from release.repository import upload_assets +from release.utils import branch_name +from release.utils import compatibility_matrix +from release.utils import read_release_notes_from_changelog +from release.utils import ScriptError +from release.utils import update_init_py_version +from release.utils import update_run_sh_version + + +def create_initial_branch(repository, release, base, bintray_user): + release_branch = repository.create_release_branch(release, base) + print('Updating version info in __init__.py and run.sh') + update_run_sh_version(release) + update_init_py_version(release) + + input('Please add the release notes to the CHANGELOG.md file, then press Enter to continue.') + proceed = '' + while proceed.lower() != 'y': + print(repository.diff()) + proceed = input('Are these changes ok? y/N ') + + repository.create_bump_commit(release_branch, release) + repository.push_branch_to_remote(release_branch) + + bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], bintray_user) + print('Creating data repository {} on bintray'.format(release_branch.name)) + bintray_api.create_repository(BINTRAY_ORG, release_branch.name, 'generic') + + +def monitor_pr_status(pr_data): + print('Waiting for CI to complete...') + last_commit = pr_data.get_commits().reversed[0] + while True: + status = last_commit.get_combined_status() + if status.state == 'pending': + summary = { + 'pending': 0, + 'success': 0, + 'failure': 0, + } + for detail in status.statuses: + summary[detail.state] += 1 + print('{pending} pending, {success} successes, {failure} failures'.format(**summary)) + if status.total_count == 0: + # Mostly for testing purposes against repos with no CI setup + return True + time.sleep(30) + elif status.state == 'success': + print('{} successes: all clear!'.format(status.total_count)) + return True + else: + raise ScriptError('CI failure detected') + + +def create_release_draft(repository, version, pr_data, files): + print('Creating Github release draft') + with open(os.path.join(os.path.dirname(__file__), 'release.md.tmpl'), 'r') as f: + template = Template(f.read()) + print('Rendering release notes based on template') + release_notes = template.render( + version=version, + compat_matrix=compatibility_matrix(), + integrity=files, + contributors=get_contributors(pr_data), + changelog=read_release_notes_from_changelog(), + ) + gh_release = repository.create_release( + version, release_notes, draft=True, prerelease='-rc' in version, + target_commitish='release' + ) + print('Release draft initialized') + return gh_release + + +def resume(args): + raise NotImplementedError() + try: + repository = Repository(REPO_ROOT, args.repo or NAME) + br_name = branch_name(args.release) + if not repository.branch_exists(br_name): + raise ScriptError('No local branch exists for this release.') + # release_branch = repository.checkout_branch(br_name) + except ScriptError as e: + print(e) + return 1 + return 0 + + +def cancel(args): + try: + repository = Repository(REPO_ROOT, args.repo or NAME) + repository.close_release_pr(args.release) + repository.remove_release(args.release) + repository.remove_bump_branch(args.release) + # TODO: uncomment after testing is complete + # bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], args.bintray_user) + # print('Removing Bintray data repository for {}'.format(args.release)) + # bintray_api.delete_repository(BINTRAY_ORG, branch_name(args.release)) + except ScriptError as e: + print(e) + return 1 + print('Release cancellation complete.') + return 0 + + +def start(args): + try: + repository = Repository(REPO_ROOT, args.repo or NAME) + create_initial_branch(repository, args.release, args.base, args.bintray_user) + pr_data = repository.create_release_pull_request(args.release) + monitor_pr_status(pr_data) + downloader = BinaryDownloader(args.destination) + files = downloader.download_all(args.release) + gh_release = create_release_draft(repository, args.release, pr_data, files) + upload_assets(gh_release, files) + except ScriptError as e: + print(e) + return 1 + + return 0 + + +def main(): + if 'GITHUB_TOKEN' not in os.environ: + print('GITHUB_TOKEN environment variable must be set') + return 1 + + if 'BINTRAY_TOKEN' not in os.environ: + print('BINTRAY_TOKEN environment variable must be set') + return 1 + + parser = argparse.ArgumentParser( + description='Orchestrate a new release of docker/compose. This tool assumes that you have' + 'obtained a Github API token and Bintray API key and set the GITHUB_TOKEN and' + 'BINTRAY_TOKEN environment variables accordingly.', + epilog='''Example uses: + * Start a new feature release (includes all changes currently in master) + release.py -b user start 1.23.0 + * Start a new patch release + release.py -b user --patch 1.21.0 start 1.21.1 + * Cancel / rollback an existing release draft + release.py -b user cancel 1.23.0 + * Restart a previously aborted patch release + release.py -b user -p 1.21.0 resume 1.21.1 + ''', formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument( + 'action', choices=['start', 'resume', 'cancel'], + help='The action to be performed for this release' + ) + parser.add_argument('release', help='Release number, e.g. 1.9.0-rc1, 2.1.1') + parser.add_argument( + '--patch', '-p', dest='base', + help='Which version is being patched by this release' + ) + parser.add_argument( + '--repo', '-r', dest='repo', + help='Start a release for the given repo (default: {})'.format(NAME) + ) + parser.add_argument( + '-b', dest='bintray_user', required=True, metavar='USER', + help='Username associated with the Bintray API key' + ) + parser.add_argument( + '--destination', '-o', metavar='DIR', default='binaries', + help='Directory where release binaries will be downloaded relative to the project root' + ) + args = parser.parse_args() + + if args.action == 'start': + return start(args) + elif args.action == 'resume': + return resume(args) + elif args.action == 'cancel': + return cancel(args) + print('Unexpected action "{}"'.format(args.action), file=sys.stderr) + return 1 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/script/release/release/__init__.py b/script/release/release/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/script/release/release/bintray.py b/script/release/release/bintray.py new file mode 100644 index 000000000..d99d372c6 --- /dev/null +++ b/script/release/release/bintray.py @@ -0,0 +1,40 @@ +from __future__ import absolute_import +from __future__ import unicode_literals + +import json + +import requests + +from .const import NAME + + +class BintrayAPI(requests.Session): + def __init__(self, api_key, user, *args, **kwargs): + super(BintrayAPI, self).__init__(*args, **kwargs) + self.auth = (user, api_key) + self.base_url = 'https://api.bintray.com/' + + def create_repository(self, subject, repo_name, repo_type='generic'): + url = '{base}/repos/{subject}/{repo_name}'.format( + base=self.base_url, subject=subject, repo_name=repo_name, + ) + data = { + 'name': repo_name, + 'type': repo_type, + 'private': False, + 'desc': 'Automated release for {}: {}'.format(NAME, repo_name), + 'labels': ['docker-compose', 'docker', 'release-bot'], + } + return self.post_json(url, data) + + def delete_repository(self, subject, repo_name): + url = '{base}/repos/{subject}/{repo_name}'.format( + base=self.base_url, subject=subject, repo_name=repo_name, + ) + return self.delete(url) + + def post_json(self, url, data, **kwargs): + if 'headers' not in kwargs: + kwargs['headers'] = {} + kwargs['headers']['Content-Type'] = 'application/json' + return self.post(url, data=json.dumps(data), **kwargs) diff --git a/script/release/release/const.py b/script/release/release/const.py new file mode 100644 index 000000000..34f338a89 --- /dev/null +++ b/script/release/release/const.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +from __future__ import unicode_literals + +import os + + +REPO_ROOT = os.path.join(os.path.dirname(__file__), '..', '..', '..') +NAME = 'shin-/compose' +BINTRAY_ORG = 'shin-compose' diff --git a/script/release/release/downloader.py b/script/release/release/downloader.py new file mode 100644 index 000000000..cd43bc993 --- /dev/null +++ b/script/release/release/downloader.py @@ -0,0 +1,72 @@ +from __future__ import absolute_import +from __future__ import print_function +from __future__ import unicode_literals + +import hashlib +import os + +import requests + +from .const import BINTRAY_ORG +from .const import NAME +from .const import REPO_ROOT +from .utils import branch_name + + +class BinaryDownloader(requests.Session): + base_bintray_url = 'https://dl.bintray.com/{}'.format(BINTRAY_ORG) + base_appveyor_url = 'https://ci.appveyor.com/api/projects/{}/artifacts/'.format(NAME) + + def __init__(self, destination, *args, **kwargs): + super(BinaryDownloader, self).__init__(*args, **kwargs) + self.destination = destination + os.makedirs(self.destination, exist_ok=True) + + def download_from_bintray(self, repo_name, filename): + print('Downloading {} from bintray'.format(filename)) + url = '{base}/{repo_name}/{filename}'.format( + base=self.base_bintray_url, repo_name=repo_name, filename=filename + ) + full_dest = os.path.join(REPO_ROOT, self.destination, filename) + return self._download(url, full_dest) + + def download_from_appveyor(self, branch_name, filename): + print('Downloading {} from appveyor'.format(filename)) + url = '{base}/dist%2F{filename}?branch={branch_name}'.format( + base=self.base_appveyor_url, filename=filename, branch_name=branch_name + ) + full_dest = os.path.join(REPO_ROOT, self.destination, filename) + return self.download(url, full_dest) + + def _download(self, url, full_dest): + m = hashlib.sha256() + with open(full_dest, 'wb') as f: + r = self.get(url, stream=True) + for chunk in r.iter_content(chunk_size=1024 * 600, decode_unicode=False): + print('.', end='', flush=True) + m.update(chunk) + f.write(chunk) + + print(' download complete') + hex_digest = m.hexdigest() + with open(full_dest + '.sha256', 'w') as f: + f.write('{} {}\n'.format(hex_digest, os.path.basename(full_dest))) + return full_dest, hex_digest + + def download_all(self, version): + files = { + 'docker-compose-Darwin-x86_64': None, + 'docker-compose-Linux-x86_64': None, + # 'docker-compose-Windows-x86_64.exe': None, + } + + for filename in files.keys(): + if 'Windows' in filename: + files[filename] = self.download_from_appveyor( + branch_name(version), filename + ) + else: + files[filename] = self.download_from_bintray( + branch_name(version), filename + ) + return files diff --git a/script/release/release/repository.py b/script/release/release/repository.py new file mode 100644 index 000000000..77c697a99 --- /dev/null +++ b/script/release/release/repository.py @@ -0,0 +1,161 @@ +from __future__ import absolute_import +from __future__ import unicode_literals + +import os + +from git import GitCommandError +from git import Repo +from github import Github + +from .const import NAME +from .const import REPO_ROOT +from .utils import branch_name +from .utils import read_release_notes_from_changelog +from .utils import ScriptError + + +class Repository(object): + def __init__(self, root=None, gh_name=None): + if root is None: + root = REPO_ROOT + if gh_name is None: + gh_name = NAME + self.git_repo = Repo(root) + self.gh_client = Github(os.environ['GITHUB_TOKEN']) + self.gh_repo = self.gh_client.get_repo(gh_name) + + def create_release_branch(self, version, base=None): + print('Creating release branch {} based on {}...'.format(version, base or 'master')) + remote = self.find_remote(self.gh_repo.full_name) + remote.fetch() + if self.branch_exists(branch_name(version)): + raise ScriptError( + "Branch {} already exists locally. " + "Please remove it before running the release script.".format(branch_name(version)) + ) + if base is not None: + base = self.git_repo.tag('refs/tags/{}'.format(base)) + else: + base = 'refs/remotes/{}/master'.format(remote.name) + release_branch = self.git_repo.create_head(branch_name(version), commit=base) + release_branch.checkout() + self.git_repo.git.merge('--strategy=ours', '--no-edit', '{}/release'.format(remote.name)) + with release_branch.config_writer() as cfg: + cfg.set_value('release', version) + return release_branch + + def find_remote(self, remote_name=None): + if not remote_name: + remote_name = self.gh_repo.full_name + for remote in self.git_repo.remotes: + for url in remote.urls: + if remote_name in url: + return remote + return None + + def create_bump_commit(self, bump_branch, version): + print('Creating bump commit...') + bump_branch.checkout() + self.git_repo.git.commit('-a', '-s', '-m "Bump {}"'.format(version), '--no-verify') + + def diff(self): + return self.git_repo.git.diff() + + def checkout_branch(self, name): + return self.git_repo.branches[name].checkout() + + def push_branch_to_remote(self, branch, remote_name=None): + print('Pushing branch {} to remote...'.format(branch.name)) + remote = self.find_remote(remote_name) + remote.push(refspec=branch) + + def branch_exists(self, name): + return name in [h.name for h in self.git_repo.heads] + + def create_release_pull_request(self, version): + return self.gh_repo.create_pull( + title='Bump {}'.format(version), + body='Automated release for docker-compose {}\n\n{}'.format( + version, read_release_notes_from_changelog() + ), + base='release', + head=branch_name(version), + ) + + def create_release(self, version, release_notes, **kwargs): + return self.gh_repo.create_git_release( + tag=version, name=version, message=release_notes, **kwargs + ) + + def remove_release(self, version): + print('Removing release draft for {}'.format(version)) + releases = self.gh_repo.get_releases() + for release in releases: + if release.tag_name == version and release.title == version: + if not release.draft: + print( + 'The release at {} is no longer a draft. If you TRULY intend ' + 'to remove it, please do so manually.' + ) + continue + release.delete_release() + + def remove_bump_branch(self, version, remote_name=None): + name = branch_name(version) + if not self.branch_exists(name): + return False + print('Removing local branch "{}"'.format(name)) + if self.git_repo.active_branch.name == name: + print('Active branch is about to be deleted. Checking out to master...') + try: + self.checkout_branch('master') + except GitCommandError: + raise ScriptError( + 'Unable to checkout master. Try stashing local changes before proceeding.' + ) + self.git_repo.branches[name].delete(self.git_repo, name, force=True) + print('Removing remote branch "{}"'.format(name)) + remote = self.find_remote(remote_name) + try: + remote.push(name, delete=True) + except GitCommandError as e: + if 'remote ref does not exist' in str(e): + return False + raise ScriptError( + 'Error trying to remove remote branch: {}'.format(e) + ) + return True + + def close_release_pr(self, version): + print('Retrieving and closing release PR for {}'.format(version)) + name = branch_name(version) + open_prs = self.gh_repo.get_pulls(state='open') + count = 0 + for pr in open_prs: + if pr.head.ref == name: + print('Found matching PR #{}'.format(pr.number)) + pr.edit(state='closed') + count += 1 + if count == 0: + print('No open PR for this release branch.') + return count + + +def get_contributors(pr_data): + commits = pr_data.get_commits() + authors = {} + for commit in commits: + author = commit.author.login + authors[author] = authors.get(author, 0) + 1 + return [x[0] for x in sorted(list(authors.items()), key=lambda x: x[1])] + + +def upload_assets(gh_release, files): + print('Uploading binaries and hash sums') + for filename, filedata in files.items(): + print('Uploading {}...'.format(filename)) + gh_release.upload_asset(filedata[0], content_type='application/octet-stream') + gh_release.upload_asset('{}.sha256'.format(filedata[0]), content_type='text/plain') + gh_release.upload_asset( + os.path.join(REPO_ROOT, 'script', 'run', 'run.sh'), content_type='text/plain' + ) diff --git a/script/release/release/utils.py b/script/release/release/utils.py new file mode 100644 index 000000000..b0e1f6a84 --- /dev/null +++ b/script/release/release/utils.py @@ -0,0 +1,63 @@ +from __future__ import absolute_import +from __future__ import unicode_literals + +import os +import re + +from .const import REPO_ROOT +from compose import const as compose_const + +section_header_re = re.compile(r'^[0-9]+\.[0-9]+\.[0-9]+ \([0-9]{4}-[01][0-9]-[0-3][0-9]\)$') + + +class ScriptError(Exception): + pass + + +def branch_name(version): + return 'bump-{}'.format(version) + + +def read_release_notes_from_changelog(): + with open(os.path.join(REPO_ROOT, 'CHANGELOG.md'), 'r') as f: + lines = f.readlines() + i = 0 + while i < len(lines): + if section_header_re.match(lines[i]): + break + i += 1 + + j = i + 1 + while j < len(lines): + if section_header_re.match(lines[j]): + break + j += 1 + + return ''.join(lines[i + 2:j - 1]) + + +def update_init_py_version(version): + path = os.path.join(REPO_ROOT, 'compose', '__init__.py') + with open(path, 'r') as f: + contents = f.read() + contents = re.sub(r"__version__ = '[0-9a-z.-]+'", "__version__ = '{}'".format(version), contents) + with open(path, 'w') as f: + f.write(contents) + + +def update_run_sh_version(version): + path = os.path.join(REPO_ROOT, 'script', 'run', 'run.sh') + with open(path, 'r') as f: + contents = f.read() + contents = re.sub(r'VERSION="[0-9a-z.-]+"', 'VERSION="{}"'.format(version), contents) + with open(path, 'w') as f: + f.write(contents) + + +def compatibility_matrix(): + result = {} + for engine_version in compose_const.API_VERSION_TO_ENGINE_VERSION.values(): + result[engine_version] = [] + for fmt, api_version in compose_const.API_VERSIONS.items(): + result[compose_const.API_VERSION_TO_ENGINE_VERSION[api_version]].append(fmt.vstring) + return result From e4c5b2a248e239ad6c1720ce2df63a6132c690a8 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 18 Apr 2018 16:58:24 -0700 Subject: [PATCH 10/26] Implement resuming a release Signed-off-by: Joffrey F --- script/release/release.py | 52 ++++++++++++++++++++++++---- script/release/release/repository.py | 21 ++++++++++- 2 files changed, 65 insertions(+), 8 deletions(-) diff --git a/script/release/release.py b/script/release/release.py index f23146288..aa6c6198b 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -26,6 +26,12 @@ from release.utils import update_run_sh_version def create_initial_branch(repository, release, base, bintray_user): release_branch = repository.create_release_branch(release, base) + return create_bump_commit(repository, release_branch, bintray_user) + + +def create_bump_commit(repository, release_branch, bintray_user): + with release_branch.config_reader() as cfg: + release = cfg.get('release') print('Updating version info in __init__.py and run.sh') update_run_sh_version(release) update_init_py_version(release) @@ -36,7 +42,8 @@ def create_initial_branch(repository, release, base, bintray_user): print(repository.diff()) proceed = input('Are these changes ok? y/N ') - repository.create_bump_commit(release_branch, release) + if repository.diff(): + repository.create_bump_commit(release_branch, release) repository.push_branch_to_remote(release_branch) bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], bintray_user) @@ -89,17 +96,48 @@ def create_release_draft(repository, version, pr_data, files): return gh_release +def print_final_instructions(gh_release): + print(""" +You're almost done! The following steps should be executed after you've +verified that everything is in order and are ready to make the release public: +1. +2. +3.""") + + def resume(args): - raise NotImplementedError() try: repository = Repository(REPO_ROOT, args.repo or NAME) br_name = branch_name(args.release) if not repository.branch_exists(br_name): raise ScriptError('No local branch exists for this release.') - # release_branch = repository.checkout_branch(br_name) + release_branch = repository.checkout_branch(br_name) + create_bump_commit(repository, release_branch, args.bintray_user) + pr_data = repository.find_release_pr(args.release) + if not pr_data: + pr_data = repository.create_release_pull_request(args.release) + monitor_pr_status(pr_data) + downloader = BinaryDownloader(args.destination) + files = downloader.download_all(args.release) + gh_release = repository.find_release(args.release) + if not gh_release: + gh_release = create_release_draft(repository, args.release, pr_data, files) + elif not gh_release.draft: + print('WARNING!! Found non-draft (public) release for this version!') + proceed = input( + 'Are you sure you wish to proceed? Modifying an already ' + 'released version is dangerous! y/N' + ) + if proceed.lower() != 'y': + raise ScriptError('Aborting release') + for asset in gh_release.get_assets(): + asset.delete_asset() + upload_assets(gh_release, files) except ScriptError as e: print(e) return 1 + + print_final_instructions(gh_release) return 0 @@ -134,6 +172,7 @@ def start(args): print(e) return 1 + print_final_instructions(gh_release) return 0 @@ -147,8 +186,8 @@ def main(): return 1 parser = argparse.ArgumentParser( - description='Orchestrate a new release of docker/compose. This tool assumes that you have' - 'obtained a Github API token and Bintray API key and set the GITHUB_TOKEN and' + description='Orchestrate a new release of docker/compose. This tool assumes that you have ' + 'obtained a Github API token and Bintray API key and set the GITHUB_TOKEN and ' 'BINTRAY_TOKEN environment variables accordingly.', epilog='''Example uses: * Start a new feature release (includes all changes currently in master) @@ -158,8 +197,7 @@ def main(): * Cancel / rollback an existing release draft release.py -b user cancel 1.23.0 * Restart a previously aborted patch release - release.py -b user -p 1.21.0 resume 1.21.1 - ''', formatter_class=argparse.RawTextHelpFormatter) + release.py -b user -p 1.21.0 resume 1.21.1''', formatter_class=argparse.RawTextHelpFormatter) parser.add_argument( 'action', choices=['start', 'resume', 'cancel'], help='The action to be performed for this release' diff --git a/script/release/release/repository.py b/script/release/release/repository.py index 77c697a99..18c2dbf2c 100644 --- a/script/release/release/repository.py +++ b/script/release/release/repository.py @@ -67,7 +67,7 @@ class Repository(object): def push_branch_to_remote(self, branch, remote_name=None): print('Pushing branch {} to remote...'.format(branch.name)) remote = self.find_remote(remote_name) - remote.push(refspec=branch) + remote.push(refspec=branch, force=True) def branch_exists(self, name): return name in [h.name for h in self.git_repo.heads] @@ -87,6 +87,14 @@ class Repository(object): tag=version, name=version, message=release_notes, **kwargs ) + def find_release(self, version): + print('Retrieving release draft for {}'.format(version)) + releases = self.gh_repo.get_releases() + for release in releases: + if release.tag_name == version and release.title == version: + return release + return None + def remove_release(self, version): print('Removing release draft for {}'.format(version)) releases = self.gh_repo.get_releases() @@ -126,6 +134,17 @@ class Repository(object): ) return True + def find_release_pr(self, version): + print('Retrieving release PR for {}'.format(version)) + name = branch_name(version) + open_prs = self.gh_repo.get_pulls(state='open') + for pr in open_prs: + if pr.head.ref == name: + print('Found matching PR #{}'.format(pr.number)) + return pr + print('No open PR for this release branch.') + return None + def close_release_pr(self, version): print('Retrieving and closing release PR for {}'.format(version)) name = branch_name(version) From 0f4dbba0ec2a19593da198fe06ee5947078b2951 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 18 Apr 2018 17:02:15 -0700 Subject: [PATCH 11/26] Temp test Signed-off-by: Joffrey F --- script/release/release/repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/release/release/repository.py b/script/release/release/repository.py index 18c2dbf2c..c84c9e1b5 100644 --- a/script/release/release/repository.py +++ b/script/release/release/repository.py @@ -36,7 +36,7 @@ class Repository(object): if base is not None: base = self.git_repo.tag('refs/tags/{}'.format(base)) else: - base = 'refs/remotes/{}/master'.format(remote.name) + base = 'refs/remotes/{}/automated-releases'.format(remote.name) release_branch = self.git_repo.create_head(branch_name(version), commit=base) release_branch.checkout() self.git_repo.git.merge('--strategy=ours', '--no-edit', '{}/release'.format(remote.name)) From f083ef3d17a3eb936bef3266a7b371fd626fc5be Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 18 Apr 2018 17:07:41 -0700 Subject: [PATCH 12/26] Added logging for asset removal Signed-off-by: Joffrey F --- script/release/release.py | 4 ++-- script/release/release/repository.py | 7 +++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/script/release/release.py b/script/release/release.py index aa6c6198b..b72fb2546 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -13,6 +13,7 @@ from release.const import BINTRAY_ORG from release.const import NAME from release.const import REPO_ROOT from release.downloader import BinaryDownloader +from release.repository import delete_assets from release.repository import get_contributors from release.repository import Repository from release.repository import upload_assets @@ -130,8 +131,7 @@ def resume(args): ) if proceed.lower() != 'y': raise ScriptError('Aborting release') - for asset in gh_release.get_assets(): - asset.delete_asset() + delete_assets(gh_release) upload_assets(gh_release, files) except ScriptError as e: print(e) diff --git a/script/release/release/repository.py b/script/release/release/repository.py index c84c9e1b5..d7034f8bd 100644 --- a/script/release/release/repository.py +++ b/script/release/release/repository.py @@ -178,3 +178,10 @@ def upload_assets(gh_release, files): gh_release.upload_asset( os.path.join(REPO_ROOT, 'script', 'run', 'run.sh'), content_type='text/plain' ) + + +def delete_assets(gh_release): + print('Removing previously uploaded assets') + for asset in gh_release.get_assets(): + print('Deleting asset {}'.format(asset.name)) + asset.delete_asset() From fbbac04fb795b756d870a4607fdb5b57bd65d139 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 18 Apr 2018 18:33:20 -0700 Subject: [PATCH 13/26] Add images build step and finalize placeholder Signed-off-by: Joffrey F --- script/release/release.py | 116 ++++++++++++++++++++++----- script/release/release/repository.py | 5 ++ 2 files changed, 101 insertions(+), 20 deletions(-) diff --git a/script/release/release.py b/script/release/release.py index b72fb2546..848ab9751 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -4,9 +4,11 @@ from __future__ import unicode_literals import argparse import os +import shutil import sys import time +import docker from jinja2 import Template from release.bintray import BintrayAPI from release.const import BINTRAY_ORG @@ -77,6 +79,15 @@ def monitor_pr_status(pr_data): raise ScriptError('CI failure detected') +def check_pr_mergeable(pr_data): + if not pr_data.mergeable: + print( + 'WARNING!! PR #{} can not currently be merged. You will need to ' + 'resolve the conflicts manually before finalizing the release.'.format(pr_data.number) + ) + return pr_data.mergeable + + def create_release_draft(repository, version, pr_data, files): print('Creating Github release draft') with open(os.path.join(os.path.dirname(__file__), 'release.md.tmpl'), 'r') as f: @@ -97,13 +108,51 @@ def create_release_draft(repository, version, pr_data, files): return gh_release -def print_final_instructions(gh_release): - print(""" -You're almost done! The following steps should be executed after you've -verified that everything is in order and are ready to make the release public: -1. -2. -3.""") +def build_images(repository, files, version): + print("Building release images...") + repository.write_git_sha() + docker_client = docker.APIClient(**docker.utils.kwargs_from_env()) + distdir = os.path.join(REPO_ROOT, 'dist') + os.makedirs(distdir, exist_ok=True) + shutil.copy(files['docker-compose-Linux-x86_64'][0], distdir) + print('Building docker/compose image') + logstream = docker_client.build( + REPO_ROOT, tag='docker/compose:{}'.format(version), dockerfile='Dockerfile.run', + decode=True + ) + for chunk in logstream: + if 'error' in chunk: + raise ScriptError('Build error: {}'.format(chunk['error'])) + if 'stream' in chunk: + print(chunk['stream'], end='') + + print('Building test image (for UCP e2e)') + logstream = docker_client.build( + REPO_ROOT, tag='docker-compose-tests:tmp', decode=True + ) + for chunk in logstream: + if 'error' in chunk: + raise ScriptError('Build error: {}'.format(chunk['error'])) + if 'stream' in chunk: + print(chunk['stream'], end='') + + container = docker_client.create_container( + 'docker-compose-tests:tmp', entrypoint='tox' + ) + docker_client.commit(container, 'docker/compose-tests:latest') + docker_client.tag('docker/compose-tests:latest', 'docker/compose-tests:{}'.format(version)) + docker_client.remove_container(container, force=True) + docker_client.remove_image('docker-compose-tests:tmp', force=True) + + +def print_final_instructions(args): + print( + "You're almost done! Please verify that everything is in order and " + "you are ready to make the release public, then run the following " + "command:\n{exe} -b {user} finalize {version}".format( + exe=sys.argv[0], user=args.bintray_user, version=args.release + ) + ) def resume(args): @@ -117,6 +166,7 @@ def resume(args): pr_data = repository.find_release_pr(args.release) if not pr_data: pr_data = repository.create_release_pull_request(args.release) + check_pr_mergeable(pr_data) monitor_pr_status(pr_data) downloader = BinaryDownloader(args.destination) files = downloader.download_all(args.release) @@ -133,11 +183,12 @@ def resume(args): raise ScriptError('Aborting release') delete_assets(gh_release) upload_assets(gh_release, files) + build_images(repository, files, args.release) except ScriptError as e: print(e) return 1 - print_final_instructions(gh_release) + print_final_instructions(args) return 0 @@ -163,19 +214,50 @@ def start(args): repository = Repository(REPO_ROOT, args.repo or NAME) create_initial_branch(repository, args.release, args.base, args.bintray_user) pr_data = repository.create_release_pull_request(args.release) + check_pr_mergeable(pr_data) monitor_pr_status(pr_data) downloader = BinaryDownloader(args.destination) files = downloader.download_all(args.release) gh_release = create_release_draft(repository, args.release, pr_data, files) upload_assets(gh_release, files) + build_images(repository, files, args.release) except ScriptError as e: print(e) return 1 - print_final_instructions(gh_release) + print_final_instructions(args) return 0 +def finalize(args): + try: + raise NotImplementedError() + except ScriptError as e: + print(e) + return 1 + + return 0 + + +ACTIONS = [ + 'start', + 'cancel', + 'resume', + 'finalize', +] + +EPILOG = '''Example uses: + * Start a new feature release (includes all changes currently in master) + release.py -b user start 1.23.0 + * Start a new patch release + release.py -b user --patch 1.21.0 start 1.21.1 + * Cancel / rollback an existing release draft + release.py -b user cancel 1.23.0 + * Restart a previously aborted patch release + release.py -b user -p 1.21.0 resume 1.21.1 +''' + + def main(): if 'GITHUB_TOKEN' not in os.environ: print('GITHUB_TOKEN environment variable must be set') @@ -189,18 +271,9 @@ def main(): description='Orchestrate a new release of docker/compose. This tool assumes that you have ' 'obtained a Github API token and Bintray API key and set the GITHUB_TOKEN and ' 'BINTRAY_TOKEN environment variables accordingly.', - epilog='''Example uses: - * Start a new feature release (includes all changes currently in master) - release.py -b user start 1.23.0 - * Start a new patch release - release.py -b user --patch 1.21.0 start 1.21.1 - * Cancel / rollback an existing release draft - release.py -b user cancel 1.23.0 - * Restart a previously aborted patch release - release.py -b user -p 1.21.0 resume 1.21.1''', formatter_class=argparse.RawTextHelpFormatter) + epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument( - 'action', choices=['start', 'resume', 'cancel'], - help='The action to be performed for this release' + 'action', choices=ACTIONS, help='The action to be performed for this release' ) parser.add_argument('release', help='Release number, e.g. 1.9.0-rc1, 2.1.1') parser.add_argument( @@ -227,6 +300,9 @@ def main(): return resume(args) elif args.action == 'cancel': return cancel(args) + elif args.action == 'finalize': + return finalize(args) + print('Unexpected action "{}"'.format(args.action), file=sys.stderr) return 1 diff --git a/script/release/release/repository.py b/script/release/release/repository.py index d7034f8bd..dc4c6c466 100644 --- a/script/release/release/repository.py +++ b/script/release/release/repository.py @@ -159,6 +159,10 @@ class Repository(object): print('No open PR for this release branch.') return count + def write_git_sha(self): + with open(os.path.join(REPO_ROOT, 'compose', 'GITSHA'), 'w') as f: + f.write(self.git_repo.head.commit.hexsha[:7]) + def get_contributors(pr_data): commits = pr_data.get_commits() @@ -175,6 +179,7 @@ def upload_assets(gh_release, files): print('Uploading {}...'.format(filename)) gh_release.upload_asset(filedata[0], content_type='application/octet-stream') gh_release.upload_asset('{}.sha256'.format(filedata[0]), content_type='text/plain') + print('Uploading run.sh...') gh_release.upload_asset( os.path.join(REPO_ROOT, 'script', 'run', 'run.sh'), content_type='text/plain' ) From 05afd5a2dbc5028287233ea680b74fb74c09b196 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 19 Apr 2018 14:47:04 -0700 Subject: [PATCH 14/26] Add finalize step Signed-off-by: Joffrey F --- script/release/release.py | 77 ++++++++++++-------------- script/release/release/images.py | 82 ++++++++++++++++++++++++++++ script/release/release/repository.py | 8 +++ 3 files changed, 125 insertions(+), 42 deletions(-) create mode 100644 script/release/release/images.py diff --git a/script/release/release.py b/script/release/release.py index 848ab9751..92a8c1c0c 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -4,17 +4,18 @@ from __future__ import unicode_literals import argparse import os -import shutil import sys import time +from distutils.core import run_setup -import docker +import pypandoc from jinja2 import Template from release.bintray import BintrayAPI from release.const import BINTRAY_ORG from release.const import NAME from release.const import REPO_ROOT from release.downloader import BinaryDownloader +from release.images import ImageManager from release.repository import delete_assets from release.repository import get_contributors from release.repository import Repository @@ -108,43 +109,6 @@ def create_release_draft(repository, version, pr_data, files): return gh_release -def build_images(repository, files, version): - print("Building release images...") - repository.write_git_sha() - docker_client = docker.APIClient(**docker.utils.kwargs_from_env()) - distdir = os.path.join(REPO_ROOT, 'dist') - os.makedirs(distdir, exist_ok=True) - shutil.copy(files['docker-compose-Linux-x86_64'][0], distdir) - print('Building docker/compose image') - logstream = docker_client.build( - REPO_ROOT, tag='docker/compose:{}'.format(version), dockerfile='Dockerfile.run', - decode=True - ) - for chunk in logstream: - if 'error' in chunk: - raise ScriptError('Build error: {}'.format(chunk['error'])) - if 'stream' in chunk: - print(chunk['stream'], end='') - - print('Building test image (for UCP e2e)') - logstream = docker_client.build( - REPO_ROOT, tag='docker-compose-tests:tmp', decode=True - ) - for chunk in logstream: - if 'error' in chunk: - raise ScriptError('Build error: {}'.format(chunk['error'])) - if 'stream' in chunk: - print(chunk['stream'], end='') - - container = docker_client.create_container( - 'docker-compose-tests:tmp', entrypoint='tox' - ) - docker_client.commit(container, 'docker/compose-tests:latest') - docker_client.tag('docker/compose-tests:latest', 'docker/compose-tests:{}'.format(version)) - docker_client.remove_container(container, force=True) - docker_client.remove_image('docker-compose-tests:tmp', force=True) - - def print_final_instructions(args): print( "You're almost done! Please verify that everything is in order and " @@ -183,7 +147,8 @@ def resume(args): raise ScriptError('Aborting release') delete_assets(gh_release) upload_assets(gh_release, files) - build_images(repository, files, args.release) + img_manager = ImageManager(args.release) + img_manager.build_images(repository, files, args.release) except ScriptError as e: print(e) return 1 @@ -220,7 +185,8 @@ def start(args): files = downloader.download_all(args.release) gh_release = create_release_draft(repository, args.release, pr_data, files) upload_assets(gh_release, files) - build_images(repository, files, args.release) + img_manager = ImageManager(args.release) + img_manager.build_images(repository, files) except ScriptError as e: print(e) return 1 @@ -231,7 +197,34 @@ def start(args): def finalize(args): try: - raise NotImplementedError() + repository = Repository(REPO_ROOT, args.repo or NAME) + img_manager = ImageManager(args.release) + pr_data = repository.find_release_pr(args.release) + if not pr_data: + raise ScriptError('No PR found for {}'.format(args.release)) + if not check_pr_mergeable(pr_data): + raise ScriptError('Can not finalize release with an unmergeable PR') + if not img_manager.check_images(args.release): + raise ScriptError('Missing release image') + br_name = branch_name(args.release) + if not repository.branch_exists(br_name): + raise ScriptError('No local branch exists for this release.') + gh_release = repository.find_release(args.release) + if not gh_release: + raise ScriptError('No Github release draft for this version') + + pypandoc.convert_file( + os.path.join(REPO_ROOT, 'README.md'), 'rst', outputfile=os.path.join(REPO_ROOT, 'README.rst') + ) + run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['sdist', 'bdist_wheel']) + + merge_status = pr_data.merge() + if not merge_status.merged: + raise ScriptError('Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message)) + print('Uploading to PyPi') + run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['upload']) + img_manager.push_images(args.release) + repository.publish_release(gh_release) except ScriptError as e: print(e) return 1 diff --git a/script/release/release/images.py b/script/release/release/images.py new file mode 100644 index 000000000..0c7bb2045 --- /dev/null +++ b/script/release/release/images.py @@ -0,0 +1,82 @@ +from __future__ import absolute_import +from __future__ import print_function +from __future__ import unicode_literals + +import os +import shutil + +import docker + +from .const import REPO_ROOT +from .utils import ScriptError + + +class ImageManager(object): + def __init__(self, version): + self.docker_client = docker.APIClient(**docker.utils.kwargs_from_env()) + self.version = version + + def build_images(self, repository, files): + print("Building release images...") + repository.write_git_sha() + docker_client = docker.APIClient(**docker.utils.kwargs_from_env()) + distdir = os.path.join(REPO_ROOT, 'dist') + os.makedirs(distdir, exist_ok=True) + shutil.copy(files['docker-compose-Linux-x86_64'][0], distdir) + print('Building docker/compose image') + logstream = docker_client.build( + REPO_ROOT, tag='docker/compose:{}'.format(self.version), dockerfile='Dockerfile.run', + decode=True + ) + for chunk in logstream: + if 'error' in chunk: + raise ScriptError('Build error: {}'.format(chunk['error'])) + if 'stream' in chunk: + print(chunk['stream'], end='') + + print('Building test image (for UCP e2e)') + logstream = docker_client.build( + REPO_ROOT, tag='docker-compose-tests:tmp', decode=True + ) + for chunk in logstream: + if 'error' in chunk: + raise ScriptError('Build error: {}'.format(chunk['error'])) + if 'stream' in chunk: + print(chunk['stream'], end='') + + container = docker_client.create_container( + 'docker-compose-tests:tmp', entrypoint='tox' + ) + docker_client.commit(container, 'docker/compose-tests:latest') + docker_client.tag('docker/compose-tests:latest', 'docker/compose-tests:{}'.format(self.version)) + docker_client.remove_container(container, force=True) + docker_client.remove_image('docker-compose-tests:tmp', force=True) + + @property + def image_names(self): + return [ + 'docker/compose-tests:latest', + 'docker/compose-tests:{}'.format(self.version), + 'docker/compose:{}'.format(self.version) + ] + + def check_images(self, version): + docker_client = docker.APIClient(**docker.utils.kwargs_from_env()) + + for name in self.image_names: + try: + docker_client.inspect_image(name) + except docker.errors.ImageNotFound: + print('Expected image {} was not found'.format(name)) + return False + return True + + def push_images(self): + docker_client = docker.APIClient(**docker.utils.kwargs_from_env()) + + for name in self.image_names: + print('Pushing {} to Docker Hub'.format(name)) + logstream = docker_client.push(name, stream=True, decode=True) + for chunk in logstream: + if 'status' in chunk: + print(chunk['status']) diff --git a/script/release/release/repository.py b/script/release/release/repository.py index dc4c6c466..122eada8a 100644 --- a/script/release/release/repository.py +++ b/script/release/release/repository.py @@ -95,6 +95,14 @@ class Repository(object): return release return None + def publish_release(self, release): + release.update_release( + name=release.title, + message=release.body, + draft=False, + prerelease=release.prerelease + ) + def remove_release(self, version): print('Removing release draft for {}'.format(version)) releases = self.gh_repo.get_releases() From f248dbe28062682cdc05535e8af1f2605672d47c Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 19 Apr 2018 14:54:17 -0700 Subject: [PATCH 15/26] Avoid accidental prod push Signed-off-by: Joffrey F --- script/release/release.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/script/release/release.py b/script/release/release.py index 92a8c1c0c..3c154012f 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -213,6 +213,8 @@ def finalize(args): if not gh_release: raise ScriptError('No Github release draft for this version') + repository.checkout_branch(br_name) + pypandoc.convert_file( os.path.join(REPO_ROOT, 'README.md'), 'rst', outputfile=os.path.join(REPO_ROOT, 'README.rst') ) @@ -222,8 +224,9 @@ def finalize(args): if not merge_status.merged: raise ScriptError('Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message)) print('Uploading to PyPi') - run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['upload']) - img_manager.push_images(args.release) + # TODO: this will do real stuff. Uncomment when done testing + # run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['upload']) + # img_manager.push_images(args.release) repository.publish_release(gh_release) except ScriptError as e: print(e) From 0621739a86f0eca07811f43915750dcd849b52af Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 19 Apr 2018 15:22:55 -0700 Subject: [PATCH 16/26] Early check for non-draft release in resume Signed-off-by: Joffrey F --- script/release/release.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/script/release/release.py b/script/release/release.py index 3c154012f..06dfdad9a 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -125,6 +125,15 @@ def resume(args): br_name = branch_name(args.release) if not repository.branch_exists(br_name): raise ScriptError('No local branch exists for this release.') + gh_release = repository.find_release(args.release) + if gh_release and not gh_release.draft: + print('WARNING!! Found non-draft (public) release for this version!') + proceed = input( + 'Are you sure you wish to proceed? Modifying an already ' + 'released version is dangerous! y/N ' + ) + if proceed.lower() != 'y': + raise ScriptError('Aborting release') release_branch = repository.checkout_branch(br_name) create_bump_commit(repository, release_branch, args.bintray_user) pr_data = repository.find_release_pr(args.release) @@ -134,17 +143,8 @@ def resume(args): monitor_pr_status(pr_data) downloader = BinaryDownloader(args.destination) files = downloader.download_all(args.release) - gh_release = repository.find_release(args.release) if not gh_release: gh_release = create_release_draft(repository, args.release, pr_data, files) - elif not gh_release.draft: - print('WARNING!! Found non-draft (public) release for this version!') - proceed = input( - 'Are you sure you wish to proceed? Modifying an already ' - 'released version is dangerous! y/N' - ) - if proceed.lower() != 'y': - raise ScriptError('Aborting release') delete_assets(gh_release) upload_assets(gh_release, files) img_manager = ImageManager(args.release) From 4fab78d7e0d81ad00c66ad625566639ff4cef851 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 19 Apr 2018 15:24:41 -0700 Subject: [PATCH 17/26] Default base is master Signed-off-by: Joffrey F --- script/release/release/repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/release/release/repository.py b/script/release/release/repository.py index 122eada8a..e0c581bd5 100644 --- a/script/release/release/repository.py +++ b/script/release/release/repository.py @@ -36,7 +36,7 @@ class Repository(object): if base is not None: base = self.git_repo.tag('refs/tags/{}'.format(base)) else: - base = 'refs/remotes/{}/automated-releases'.format(remote.name) + base = 'refs/remotes/{}/master'.format(remote.name) release_branch = self.git_repo.create_head(branch_name(version), commit=base) release_branch.checkout() self.git_repo.git.merge('--strategy=ours', '--no-edit', '{}/release'.format(remote.name)) From b68811fd7f768b9187fd70584bd43c216c01895a Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 20 Apr 2018 13:06:41 -0700 Subject: [PATCH 18/26] Add support for PR cherry picks Signed-off-by: Joffrey F --- script/release/release.py | 5 +++++ script/release/release/repository.py | 24 +++++++++++++++++++++++- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/script/release/release.py b/script/release/release.py index 06dfdad9a..a38b2aa7d 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -30,6 +30,11 @@ from release.utils import update_run_sh_version def create_initial_branch(repository, release, base, bintray_user): release_branch = repository.create_release_branch(release, base) + if base: + print('Detected patch version.') + cherries = input('Indicate PR#s to cherry-pick then press Enter:\n') + repository.cherry_pick_prs(release_branch, cherries.split()) + return create_bump_commit(repository, release_branch, bintray_user) diff --git a/script/release/release/repository.py b/script/release/release/repository.py index e0c581bd5..4fcb2712a 100644 --- a/script/release/release/repository.py +++ b/script/release/release/repository.py @@ -2,7 +2,9 @@ from __future__ import absolute_import from __future__ import unicode_literals import os +import tempfile +import requests from git import GitCommandError from git import Repo from github import Github @@ -111,7 +113,7 @@ class Repository(object): if not release.draft: print( 'The release at {} is no longer a draft. If you TRULY intend ' - 'to remove it, please do so manually.' + 'to remove it, please do so manually.'.format(release.url) ) continue release.delete_release() @@ -171,6 +173,26 @@ class Repository(object): with open(os.path.join(REPO_ROOT, 'compose', 'GITSHA'), 'w') as f: f.write(self.git_repo.head.commit.hexsha[:7]) + def cherry_pick_prs(self, release_branch, ids): + if not ids: + return + release_branch.checkout() + for i in ids: + try: + i = int(i) + except ValueError as e: + raise ScriptError('Invalid PR id: {}'.format(e)) + print('Retrieving PR#{}'.format(i)) + pr = self.gh_repo.get_pull(i) + patch_data = requests.get(pr.patch_url).text + self.apply_patch(patch_data) + + def apply_patch(self, patch_data): + with tempfile.NamedTemporaryFile(mode='w', prefix='_compose_cherry', encoding='utf-8') as f: + f.write(patch_data) + f.flush() + self.git_repo.git.am('--3way', f.name) + def get_contributors(pr_data): commits = pr_data.get_commits() From 87b8eaa27cf56c367e25a88e0459e5163bf7f838 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 20 Apr 2018 13:34:18 -0700 Subject: [PATCH 19/26] Cleanup Signed-off-by: Joffrey F --- script/release/release.py | 61 +++++++++++++++++----------- script/release/release/const.py | 4 +- script/release/release/repository.py | 11 +++-- script/release/release/utils.py | 22 ++++++++++ 4 files changed, 68 insertions(+), 30 deletions(-) diff --git a/script/release/release.py b/script/release/release.py index a38b2aa7d..c1908f94a 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -26,19 +26,20 @@ from release.utils import read_release_notes_from_changelog from release.utils import ScriptError from release.utils import update_init_py_version from release.utils import update_run_sh_version +from release.utils import yesno -def create_initial_branch(repository, release, base, bintray_user): - release_branch = repository.create_release_branch(release, base) - if base: +def create_initial_branch(repository, args): + release_branch = repository.create_release_branch(args.release, args.base) + if args.base and args.cherries: print('Detected patch version.') - cherries = input('Indicate PR#s to cherry-pick then press Enter:\n') + cherries = input('Indicate (space-separated) PR numbers to cherry-pick then press Enter:\n') repository.cherry_pick_prs(release_branch, cherries.split()) - return create_bump_commit(repository, release_branch, bintray_user) + return create_bump_commit(repository, release_branch, args.bintray_user, args.bintray_org) -def create_bump_commit(repository, release_branch, bintray_user): +def create_bump_commit(repository, release_branch, bintray_user, bintray_org): with release_branch.config_reader() as cfg: release = cfg.get('release') print('Updating version info in __init__.py and run.sh') @@ -46,10 +47,10 @@ def create_bump_commit(repository, release_branch, bintray_user): update_init_py_version(release) input('Please add the release notes to the CHANGELOG.md file, then press Enter to continue.') - proceed = '' - while proceed.lower() != 'y': + proceed = None + while not proceed: print(repository.diff()) - proceed = input('Are these changes ok? y/N ') + proceed = yesno('Are these changes ok? y/N ', default=False) if repository.diff(): repository.create_bump_commit(release_branch, release) @@ -57,7 +58,7 @@ def create_bump_commit(repository, release_branch, bintray_user): bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], bintray_user) print('Creating data repository {} on bintray'.format(release_branch.name)) - bintray_api.create_repository(BINTRAY_ORG, release_branch.name, 'generic') + bintray_api.create_repository(bintray_org, release_branch.name, 'generic') def monitor_pr_status(pr_data): @@ -126,21 +127,26 @@ def print_final_instructions(args): def resume(args): try: - repository = Repository(REPO_ROOT, args.repo or NAME) + repository = Repository(REPO_ROOT, args.repo) br_name = branch_name(args.release) if not repository.branch_exists(br_name): raise ScriptError('No local branch exists for this release.') gh_release = repository.find_release(args.release) if gh_release and not gh_release.draft: print('WARNING!! Found non-draft (public) release for this version!') - proceed = input( + proceed = yesno( 'Are you sure you wish to proceed? Modifying an already ' - 'released version is dangerous! y/N ' + 'released version is dangerous! y/N ', default=False ) - if proceed.lower() != 'y': + if proceed.lower() is not True: raise ScriptError('Aborting release') + release_branch = repository.checkout_branch(br_name) - create_bump_commit(repository, release_branch, args.bintray_user) + if args.cherries: + cherries = input('Indicate (space-separated) PR numbers to cherry-pick then press Enter:\n') + repository.cherry_pick_prs(release_branch, cherries.split()) + + create_bump_commit(repository, release_branch, args.bintray_user, args.bintray_org) pr_data = repository.find_release_pr(args.release) if not pr_data: pr_data = repository.create_release_pull_request(args.release) @@ -164,14 +170,13 @@ def resume(args): def cancel(args): try: - repository = Repository(REPO_ROOT, args.repo or NAME) + repository = Repository(REPO_ROOT, args.repo) repository.close_release_pr(args.release) repository.remove_release(args.release) repository.remove_bump_branch(args.release) - # TODO: uncomment after testing is complete - # bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], args.bintray_user) - # print('Removing Bintray data repository for {}'.format(args.release)) - # bintray_api.delete_repository(BINTRAY_ORG, branch_name(args.release)) + bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], args.bintray_user) + print('Removing Bintray data repository for {}'.format(args.release)) + bintray_api.delete_repository(args.bintray_org, branch_name(args.release)) except ScriptError as e: print(e) return 1 @@ -181,8 +186,8 @@ def cancel(args): def start(args): try: - repository = Repository(REPO_ROOT, args.repo or NAME) - create_initial_branch(repository, args.release, args.base, args.bintray_user) + repository = Repository(REPO_ROOT, args.repo) + create_initial_branch(repository, args) pr_data = repository.create_release_pull_request(args.release) check_pr_mergeable(pr_data) monitor_pr_status(pr_data) @@ -202,7 +207,7 @@ def start(args): def finalize(args): try: - repository = Repository(REPO_ROOT, args.repo or NAME) + repository = Repository(REPO_ROOT, args.repo) img_manager = ImageManager(args.release) pr_data = repository.find_release_pr(args.release) if not pr_data: @@ -282,17 +287,25 @@ def main(): help='Which version is being patched by this release' ) parser.add_argument( - '--repo', '-r', dest='repo', + '--repo', '-r', dest='repo', default=NAME, help='Start a release for the given repo (default: {})'.format(NAME) ) parser.add_argument( '-b', dest='bintray_user', required=True, metavar='USER', help='Username associated with the Bintray API key' ) + parser.add_argument( + '--bintray-org', dest='bintray_org', metavar='ORG', default=BINTRAY_ORG, + help='Organization name on bintray where the data repository will be created.' + ) parser.add_argument( '--destination', '-o', metavar='DIR', default='binaries', help='Directory where release binaries will be downloaded relative to the project root' ) + parser.add_argument( + '--no-cherries', '-C', dest='cherries', action='store_false', + help='If set, the program will not prompt the user for PR numbers to cherry-pick' + ) args = parser.parse_args() if args.action == 'start': diff --git a/script/release/release/const.py b/script/release/release/const.py index 34f338a89..5a72bde41 100644 --- a/script/release/release/const.py +++ b/script/release/release/const.py @@ -5,5 +5,5 @@ import os REPO_ROOT = os.path.join(os.path.dirname(__file__), '..', '..', '..') -NAME = 'shin-/compose' -BINTRAY_ORG = 'shin-compose' +NAME = 'docker/compose' +BINTRAY_ORG = 'docker-compose' diff --git a/script/release/release/repository.py b/script/release/release/repository.py index 4fcb2712a..d4d1c7201 100644 --- a/script/release/release/repository.py +++ b/script/release/release/repository.py @@ -29,17 +29,20 @@ class Repository(object): def create_release_branch(self, version, base=None): print('Creating release branch {} based on {}...'.format(version, base or 'master')) remote = self.find_remote(self.gh_repo.full_name) + br_name = branch_name(version) remote.fetch() - if self.branch_exists(branch_name(version)): + if self.branch_exists(br_name): raise ScriptError( - "Branch {} already exists locally. " - "Please remove it before running the release script.".format(branch_name(version)) + "Branch {} already exists locally. Please remove it before " + "running the release script, or use `resume` instead.".format( + br_name + ) ) if base is not None: base = self.git_repo.tag('refs/tags/{}'.format(base)) else: base = 'refs/remotes/{}/master'.format(remote.name) - release_branch = self.git_repo.create_head(branch_name(version), commit=base) + release_branch = self.git_repo.create_head(br_name, commit=base) release_branch.checkout() self.git_repo.git.merge('--strategy=ours', '--no-edit', '{}/release'.format(remote.name)) with release_branch.config_writer() as cfg: diff --git a/script/release/release/utils.py b/script/release/release/utils.py index b0e1f6a84..977a0a712 100644 --- a/script/release/release/utils.py +++ b/script/release/release/utils.py @@ -61,3 +61,25 @@ def compatibility_matrix(): for fmt, api_version in compose_const.API_VERSIONS.items(): result[compose_const.API_VERSION_TO_ENGINE_VERSION[api_version]].append(fmt.vstring) return result + + +def yesno(prompt, default=None): + """ + Prompt the user for a yes or no. + + Can optionally specify a default value, which will only be + used if they enter a blank line. + + Unrecognised input (anything other than "y", "n", "yes", + "no" or "") will return None. + """ + answer = input(prompt).strip().lower() + + if answer == "y" or answer == "yes": + return True + elif answer == "n" or answer == "no": + return False + elif answer == "": + return default + else: + return None From 28f7f79fea91e8af27bd8fd3a454ad1d0199e3c7 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 20 Apr 2018 14:23:58 -0700 Subject: [PATCH 20/26] Improve monitor function Signed-off-by: Joffrey F --- script/release/release.py | 13 +++++++++---- script/release/release/downloader.py | 4 ++-- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/script/release/release.py b/script/release/release.py index c1908f94a..338e73af2 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -66,24 +66,29 @@ def monitor_pr_status(pr_data): last_commit = pr_data.get_commits().reversed[0] while True: status = last_commit.get_combined_status() - if status.state == 'pending': + if status.state == 'pending' or status.state == 'failure': summary = { 'pending': 0, 'success': 0, 'failure': 0, } for detail in status.statuses: + if detail.context == 'dco-signed': + # dco-signed check breaks on merge remote-tracking ; ignore it + continue summary[detail.state] += 1 print('{pending} pending, {success} successes, {failure} failures'.format(**summary)) if status.total_count == 0: # Mostly for testing purposes against repos with no CI setup return True + elif summary['pending'] == 0 and summary['failure'] == 0: + return True + elif summary['failure'] > 0: + raise ScriptError('CI failures detected!') time.sleep(30) elif status.state == 'success': print('{} successes: all clear!'.format(status.total_count)) return True - else: - raise ScriptError('CI failure detected') def check_pr_mergeable(pr_data): @@ -159,7 +164,7 @@ def resume(args): delete_assets(gh_release) upload_assets(gh_release, files) img_manager = ImageManager(args.release) - img_manager.build_images(repository, files, args.release) + img_manager.build_images(repository, files) except ScriptError as e: print(e) return 1 diff --git a/script/release/release/downloader.py b/script/release/release/downloader.py index cd43bc993..d92ae78b5 100644 --- a/script/release/release/downloader.py +++ b/script/release/release/downloader.py @@ -36,7 +36,7 @@ class BinaryDownloader(requests.Session): base=self.base_appveyor_url, filename=filename, branch_name=branch_name ) full_dest = os.path.join(REPO_ROOT, self.destination, filename) - return self.download(url, full_dest) + return self._download(url, full_dest) def _download(self, url, full_dest): m = hashlib.sha256() @@ -57,7 +57,7 @@ class BinaryDownloader(requests.Session): files = { 'docker-compose-Darwin-x86_64': None, 'docker-compose-Linux-x86_64': None, - # 'docker-compose-Windows-x86_64.exe': None, + 'docker-compose-Windows-x86_64.exe': None, } for filename in files.keys(): From 4faf7c19b6f10941645a577637ecf9ba2c7f82f9 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 20 Apr 2018 16:21:13 -0700 Subject: [PATCH 21/26] Containerize release tool Signed-off-by: Joffrey F --- script/release/Dockerfile | 14 ++++++++++++++ script/release/release.sh | 25 +++++++++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 script/release/Dockerfile create mode 100755 script/release/release.sh diff --git a/script/release/Dockerfile b/script/release/Dockerfile new file mode 100644 index 000000000..0d4ec27e1 --- /dev/null +++ b/script/release/Dockerfile @@ -0,0 +1,14 @@ +FROM python:3.6 +RUN mkdir -p /src && pip install -U Jinja2==2.10 \ + PyGithub==1.39 \ + pypandoc==1.4 \ + GitPython==2.1.9 \ + requests==2.18.4 && \ + apt-get update && apt-get install -y pandoc + +VOLUME /src/script/release +WORKDIR /src +COPY . /src +RUN python setup.py develop +ENTRYPOINT ["python", "script/release/release.py"] +CMD ["--help"] diff --git a/script/release/release.sh b/script/release/release.sh new file mode 100755 index 000000000..2310429aa --- /dev/null +++ b/script/release/release.sh @@ -0,0 +1,25 @@ +#!/bin/sh + +docker image inspect compose/release-tool > /dev/null +if test $? -ne 0; then + docker build -t compose/release-tool -f $(pwd)/script/release/Dockerfile $(pwd) +fi + +if test -z $GITHUB_TOKEN; then + echo "GITHUB_TOKEN environment variable must be set" + exit 1 +fi + +if test -z $BINTRAY_TOKEN; then + echo "BINTRAY_TOKEN environment variable must be set" + exit 1 +fi + +docker run -e GITHUB_TOKEN=$GITHUB_TOKEN -e BINTRAY_TOKEN=$BINTRAY_TOKEN -it \ + --mount type=bind,source=$(pwd),target=/src \ + --mount type=bind,source=$(pwd)/.git,target=/src/.git \ + --mount type=bind,source=$HOME/.docker,target=/root/.docker \ + --mount type=bind,source=/var/run/docker.sock,target=/var/run/docker.sock \ + --mount type=bind,source=$HOME/.ssh,target=/root/.ssh \ + -v $HOME/.pypirc:/root/.pypirc \ + compose/release-tool $* From a50c056d7cc932967626160f9a832878955053b4 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 20 Apr 2018 17:15:45 -0700 Subject: [PATCH 22/26] Uncomment deploy steps Signed-off-by: Joffrey F --- script/release/release.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/script/release/release.py b/script/release/release.py index 338e73af2..add8fb2d3 100755 --- a/script/release/release.py +++ b/script/release/release.py @@ -239,9 +239,8 @@ def finalize(args): if not merge_status.merged: raise ScriptError('Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message)) print('Uploading to PyPi') - # TODO: this will do real stuff. Uncomment when done testing - # run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['upload']) - # img_manager.push_images(args.release) + run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['upload']) + img_manager.push_images(args.release) repository.publish_release(gh_release) except ScriptError as e: print(e) From 7503a2eddd886b7871b47c18fdddd587a2836122 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 23 Apr 2018 14:37:34 -0700 Subject: [PATCH 23/26] Document new release process Signed-off-by: Joffrey F --- project/RELEASE-PROCESS.md | 149 +----------------------------- script/release/README.md | 184 +++++++++++++++++++++++++++++++++++++ 2 files changed, 185 insertions(+), 148 deletions(-) mode change 100644 => 120000 project/RELEASE-PROCESS.md create mode 100644 script/release/README.md diff --git a/project/RELEASE-PROCESS.md b/project/RELEASE-PROCESS.md deleted file mode 100644 index d4afb87b9..000000000 --- a/project/RELEASE-PROCESS.md +++ /dev/null @@ -1,148 +0,0 @@ -Building a Compose release -========================== - -## Prerequisites - -The release scripts require the following tools installed on the host: - -* https://hub.github.com/ -* https://stedolan.github.io/jq/ -* http://pandoc.org/ - -## To get started with a new release - -Create a branch, update version, and add release notes by running `make-branch` - - ./script/release/make-branch $VERSION [$BASE_VERSION] - -`$BASE_VERSION` will default to master. Use the last version tag for a bug fix -release. - -As part of this script you'll be asked to: - -1. Update the version in `compose/__init__.py` and `script/run/run.sh`. - - If the next release will be an RC, append `-rcN`, e.g. `1.4.0-rc1`. - -2. Write release notes in `CHANGELOG.md`. - - Almost every feature enhancement should be mentioned, with the most - visible/exciting ones first. Use descriptive sentences and give context - where appropriate. - - Bug fixes are worth mentioning if it's likely that they've affected lots - of people, or if they were regressions in the previous version. - - Improvements to the code are not worth mentioning. - -3. Create a new repository on [bintray](https://bintray.com/docker-compose). - The name has to match the name of the branch (e.g. `bump-1.9.0`) and the - type should be "Generic". Other fields can be left blank. - -4. Check that the `vnext-compose` branch on - [the docs repo](https://github.com/docker/docker.github.io/) has - documentation for all the new additions in the upcoming release, and create - a PR there for what needs to be amended. - - -## When a PR is merged into master that we want in the release - -1. Check out the bump branch and run the cherry pick script - - git checkout bump-$VERSION - ./script/release/cherry-pick-pr $PR_NUMBER - -2. When you are done cherry-picking branches move the bump version commit to HEAD - - ./script/release/rebase-bump-commit - git push --force $USERNAME bump-$VERSION - - -## To release a version (whether RC or stable) - -Check out the bump branch and run the `build-binaries` script - - git checkout bump-$VERSION - ./script/release/build-binaries - -When prompted build the non-linux binaries and test them. - -1. Download the different platform binaries by running the following script: - - `./script/release/download-binaries $VERSION` - - The binaries for Linux, OSX and Windows will be downloaded in the `binaries-$VERSION` folder. - -3. Draft a release from the tag on GitHub (the `build-binaries` script will open the window for - you) - - The tag will only be present on Github when you run the `push-release` - script in step 7, but you can pre-fill it at that point. - -4. Paste in installation instructions and release notes. Here's an example - - change the Compose version and Docker version as appropriate: - - If you're a Mac or Windows user, the best way to install Compose and keep it up-to-date is **[Docker for Mac and Windows](https://www.docker.com/products/docker)**. - - Docker for Mac and Windows will automatically install the latest version of Docker Engine for you. - - Alternatively, you can use the usual commands to install or upgrade Compose: - - ``` - curl -L https://github.com/docker/compose/releases/download/1.16.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose - chmod +x /usr/local/bin/docker-compose - ``` - - See the [install docs](https://docs.docker.com/compose/install/) for more install options and instructions. - - ## Compose file format compatibility matrix - - | Compose file format | Docker Engine | - | --- | --- | - | 3.3 | 17.06.0+ | - | 3.0 – 3.2 | 1.13.0+ | - | 2.3| 17.06.0+ | - | 2.2 | 1.13.0+ | - | 2.1 | 1.12.0+ | - | 2.0 | 1.10.0+ | - | 1.0 | 1.9.1+ | - - ## Changes - - ...release notes go here... - -5. Attach the binaries and `script/run/run.sh` - -6. Add "Thanks" with a list of contributors. The contributor list can be generated - by running `./script/release/contributors`. - -7. If everything looks good, it's time to push the release. - - - ./script/release/push-release - - -8. Merge the bump PR. - -8. Publish the release on GitHub. - -9. Check that all the binaries download (following the install instructions) and run. - -10. Announce the release on the appropriate Slack channel(s). - -## If it’s a stable release (not an RC) - -1. Close the release’s milestone. - -## If it’s a minor release (1.x.0), rather than a patch release (1.x.y) - -1. Open a PR against `master` to: - - - update `CHANGELOG.md` to bring it in line with `release` - - bump the version in `compose/__init__.py` to the *next* minor version number with `dev` appended. For example, if you just released `1.4.0`, update it to `1.5.0dev`. - -2. Get the PR merged. - -## Finally - -1. Celebrate, however you’d like. diff --git a/project/RELEASE-PROCESS.md b/project/RELEASE-PROCESS.md new file mode 120000 index 000000000..c8457671a --- /dev/null +++ b/project/RELEASE-PROCESS.md @@ -0,0 +1 @@ +../script/release/README.md \ No newline at end of file diff --git a/script/release/README.md b/script/release/README.md new file mode 100644 index 000000000..c5136c764 --- /dev/null +++ b/script/release/README.md @@ -0,0 +1,184 @@ +# Release HOWTO + +This file describes the process of making a public release of `docker-compose`. +Please read it carefully before proceeding! + +## Prerequisites + +The following things are required to bring a release to a successful conclusion + +### Local Docker engine (Linux Containers) + +The release script runs inside a container and builds images that will be part +of the release. + +### Docker Hub account + +You should be logged into a Docker Hub account that allows pushing to the +following repositories: + +- docker/compose +- docker/compose-tests + +### A Github account and Github API token + +Your Github account needs to have write access on the `docker/compose` repo. +To generate a Github token, head over to the +[Personal access tokens](https://github.com/settings/tokens) page in your +Github settings and select "Generate new token". Your token should include +(at minimum) the following scopes: + +- `repo:status` +- `public_repo` + +This API token should be exposed to the release script through the +`GITHUB_TOKEN` environment variable. + +### A Bintray account and Bintray API key + +Your Bintray account will need to be an admin member of the +[docker-compose organization](https://github.com/settings/tokens). +Additionally, you should generate a personal API key. To do so, click your +username in the top-right hand corner and select "Edit profile" ; on the new +page, select "API key" in the left-side menu. + +This API key should be exposed to the release script through the +`BINTRAY_TOKEN` environment variable. + +### A PyPi account + +Said account needs to be a member of the maintainers group for the +[`docker-compose` project](https://pypi.org/project/docker-compose/). + +Moreover, the `~/.pypirc` file should exist on your host and contain the +relevant pypi credentials. + +## Start a feature release + +A feature release is a release that includes all changes present in the +`master` branch when initiated. It's typically versioned `X.Y.0-rc1`, where +Y is the minor version of the previous release incremented by one. A series +of one or more Release Candidates (RCs) should be made available to the public +to find and squash potential bugs. + +From the root of the Compose repository, run the following command: +``` +./script/release/release.sh -b start X.Y.0-rc1 +``` + +After a short initialization period, the script will invite you to edit the +`CHANGELOG.md` file. Do so by being careful to respect the same format as +previous releases. Once done, the script will display a `diff` of the staged +changes for the bump commit. Once you validate these, a bump commit will be +created on the newly created release branch and pushed remotely. + +The release tool then waits for the CI to conclude before proceeding. +If failures are reported, the release will be aborted until these are fixed. +Please refer to the "Resume a draft release" section below for more details. + +Once all resources have been prepared, the release script will exit with a +message resembling this one: + +``` +You're almost done! Please verify that everything is in order and you are ready +to make the release public, then run the following command: +./script/release/release.sh -b user finalize X.Y.0-rc1 +``` + +Once you are ready to finalize the release (making binaries and other versioned +assets public), proceed to the "Finalize a release" section of this guide. + +## Start a patch release + +A patch release is a release that builds off a previous release with discrete +additions. This can be an RC release after RC1 (`X.Y.0-rcZ`, `Z > 1`), a GA release +based off the final RC (`X.Y.0`), or a bugfix release based off a previous +GA release (`X.Y.Z`, `Z > 0`). + +From the root of the Compose repository, run the following command: +``` +./script/release/release.sh -b start --patch=BASE_VERSION RELEASE_VERSION +``` + +The process of starting a patch release is identical to starting a feature +release except for one difference ; at the beginning, the script will ask for +PR numbers you wish to cherry-pick into the release. These numbers should +correspond to existing PRs on the docker/compose repository. Multiple numbers +should be separated by whitespace. + +Once you are ready to finalize the release (making binaries and other versioned +assets public), proceed to the "Finalize a release" section of this guide. + +## Finalize a release + +Once you're ready to make your release public, you may execute the following +command from the root of the Compose repository: +``` +./script/release/release.sh -b finalize RELEAE_VERSION +``` + +Note that this command will create and publish versioned assets to the public. +As a result, it can not be reverted. The command will perform some basic +sanity checks before doing so, but it is your responsibility to ensure +everything is in order before pushing the button. + +After the command exits, you should make sure: + +- The `docker/compose:VERSION` image is available on Docker Hub and functional +- The `pip install -U docker-compose==VERSION` command correctly installs the + specified version +- The install command on the Github release page installs the new release + +## Resume a draft release + +"Resuming" a release lets you address the following situations occurring before +a release is made final: + +- Cherry-pick additional PRs to include in the release +- Resume a release that was aborted because of CI failures after they've been + addressed +- Rebuild / redownload assets after manual changes have been made to the + release branch +- etc. + +From the root of the Compose repository, run the following command: +``` +./script/release/release.sh -b resume RELEASE_VERSION +``` + +The release tool will attempt to determine what steps it's already been through +for the specified release and pick up where it left off. Some steps are +executed again no matter what as it's assumed they'll produce different +results, like building images or downloading binaries. + +## Cancel a draft release + +If issues snuck into your release branch, it is sometimes easier to start from +scratch. Before a release has been finalized, it is possible to cancel it using +the following command: +``` +./script/release/release.sh -b cancel RELEASE_VERSION +``` + +This will remove the release branch with this release (locally and remotely), +close the associated PR, remove the release page draft on Github and delete +the Bintray repository for it, allowing you to start fresh. + +## Manual operations + +Some common, release-related operations are not covered by this tool and should +be handled manually by the operator: + +- After any release: + - Announce new release on Slack +- After a GA release: + - Close the release milestone + - Merge back `CHANGELOG.md` changes from the `release` branch into `master` + - Bump the version in `compose/__init__.py` to the *next* minor version + number with `dev` appended. For example, if you just released `1.4.0`, + update it to `1.5.0dev` + +## Advanced options + +You can consult the full list of options for the release tool by executing +`./script/release/release.sh --help`. From fe20526d05ab83ccc60b7ce35026073537b1edb0 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 23 Apr 2018 15:01:30 -0700 Subject: [PATCH 24/26] Remove obsolete release scripts Signed-off-by: Joffrey F --- script/release/build-binaries | 40 --------------- script/release/contributors | 30 ----------- script/release/download-binaries | 39 --------------- script/release/make-branch | 86 -------------------------------- 4 files changed, 195 deletions(-) delete mode 100755 script/release/build-binaries delete mode 100755 script/release/contributors delete mode 100755 script/release/download-binaries delete mode 100755 script/release/make-branch diff --git a/script/release/build-binaries b/script/release/build-binaries deleted file mode 100755 index a39b186d9..000000000 --- a/script/release/build-binaries +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -# -# Build the release binaries -# - -. "$(dirname "${BASH_SOURCE[0]}")/utils.sh" - -function usage() { - >&2 cat << EOM -Build binaries for the release. - -This script requires that 'git config branch.${BRANCH}.release' is set to the -release version for the release branch. - -EOM - exit 1 -} - -BRANCH="$(git rev-parse --abbrev-ref HEAD)" -VERSION="$(git config "branch.${BRANCH}.release")" || usage -REPO=docker/compose - -# Build the binaries -script/clean -script/build/linux - -echo "Building the container distribution" -script/build/image $VERSION - -echo "Building the compose-tests image" -script/build/test-image $VERSION - -echo "Create a github release" -# TODO: script more of this https://developer.github.com/v3/repos/releases/ -browser https://github.com/$REPO/releases/new - -echo "Don't forget to download the osx and windows binaries from appveyor/bintray\!" -echo "https://dl.bintray.com/docker-compose/$BRANCH/" -echo "https://ci.appveyor.com/project/docker/compose" -echo diff --git a/script/release/contributors b/script/release/contributors deleted file mode 100755 index 4657dd805..000000000 --- a/script/release/contributors +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -set -e - - -function usage() { - >&2 cat << EOM -Print the list of github contributors for the release - -Usage: - - $0 -EOM - exit 1 -} - -[[ -n "$1" ]] || usage -PREV_RELEASE=$1 -BRANCH="$(git rev-parse --abbrev-ref HEAD)" -URL="https://api.github.com/repos/docker/compose/compare" - -contribs=$(curl -sf "$URL/$PREV_RELEASE...$BRANCH" | \ - jq -r '.commits[].author.login' | \ - sort | \ - uniq -c | \ - sort -nr) - -echo "Contributions by user: " -echo "$contribs" -echo -echo "$contribs" | awk '{print "@"$2","}' | xargs diff --git a/script/release/download-binaries b/script/release/download-binaries deleted file mode 100755 index 0b187f6c2..000000000 --- a/script/release/download-binaries +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -function usage() { - >&2 cat << EOM -Download Linux, Mac OS and Windows binaries from remote endpoints - -Usage: - - $0 - -Options: - - version version string for the release (ex: 1.6.0) - -EOM - exit 1 -} - - -[ -n "$1" ] || usage -VERSION=$1 -BASE_BINTRAY_URL=https://dl.bintray.com/docker-compose/bump-$VERSION/ -DESTINATION=binaries-$VERSION -APPVEYOR_URL=https://ci.appveyor.com/api/projects/docker/compose/\ -artifacts/dist%2Fdocker-compose-Windows-x86_64.exe?branch=bump-$VERSION - -mkdir $DESTINATION - - -wget -O $DESTINATION/docker-compose-Darwin-x86_64 $BASE_BINTRAY_URL/docker-compose-Darwin-x86_64 -wget -O $DESTINATION/docker-compose-Linux-x86_64 $BASE_BINTRAY_URL/docker-compose-Linux-x86_64 -wget -O $DESTINATION/docker-compose-Windows-x86_64.exe $APPVEYOR_URL - -echo -e "\n\nCopy the following lines into the integrity check table in the release notes:\n\n" -cd $DESTINATION -rm -rf *.sha256 -ls | xargs sha256sum | sed 's/ / | /g' | sed -r 's/([^ |]+)/`\1`/g' -ls | xargs -I@ bash -c "sha256sum @ | cut -d' ' -f1 > @.sha256" -cd - diff --git a/script/release/make-branch b/script/release/make-branch deleted file mode 100755 index b8a0cd31e..000000000 --- a/script/release/make-branch +++ /dev/null @@ -1,86 +0,0 @@ -#!/bin/bash -# -# Prepare a new release branch -# - -. "$(dirname "${BASH_SOURCE[0]}")/utils.sh" - -function usage() { - >&2 cat << EOM -Create a new release branch 'release-' - -Usage: - - $0 [] - -Options: - - version version string for the release (ex: 1.6.0) - base_version branch or tag to start from. Defaults to master. For - bug-fix releases use the previous stage release tag. - -EOM - exit 1 -} - - -[ -n "$1" ] || usage -VERSION=$1 -BRANCH=bump-$VERSION -REPO=docker/compose -GITHUB_REPO=git@github.com:$REPO - -if [ -z "$2" ]; then - BASE_VERSION="master" -else - BASE_VERSION=$2 -fi - - -DEFAULT_REMOTE=release -REMOTE="$(find_remote "$GITHUB_REPO")" -# If we don't have a docker remote add one -if [ -z "$REMOTE" ]; then - echo "Creating $DEFAULT_REMOTE remote" - git remote add ${DEFAULT_REMOTE} ${GITHUB_REPO} -fi - -# handle the difference between a branch and a tag -if [ -z "$(git name-rev --tags $BASE_VERSION | grep tags)" ]; then - BASE_VERSION=$REMOTE/$BASE_VERSION -fi - -echo "Creating a release branch $VERSION from $BASE_VERSION" -read -n1 -r -p "Continue? (ctrl+c to cancel)" -git fetch $REMOTE -p -git checkout -b $BRANCH $BASE_VERSION - -echo "Merging remote release branch into new release branch" -git merge --strategy=ours --no-edit $REMOTE/release - -# Store the release version for this branch in git, so that other release -# scripts can use it -git config "branch.${BRANCH}.release" $VERSION - - -editor=${EDITOR:-vim} - -echo "Update versions in compose/__init__.py, script/run/run.sh" -$editor compose/__init__.py -$editor script/run/run.sh - - -echo "Write release notes in CHANGELOG.md" -browser "https://github.com/docker/compose/issues?q=milestone%3A$VERSION+is%3Aclosed" -$editor CHANGELOG.md - - -git diff -echo "Verify changes before commit. Exit the shell to commit changes" -$SHELL || true -git commit -a -m "Bump $VERSION" --signoff --no-verify - - -echo "Push branch to docker remote" -git push $REMOTE -browser https://github.com/$REPO/compare/docker:release...$BRANCH?expand=1 From 7db13582f1ae4274fd4b559c9707a43c4cab3883 Mon Sep 17 00:00:00 2001 From: Harald Albers Date: Thu, 12 Apr 2018 08:52:20 +0200 Subject: [PATCH 25/26] Add support for features added in 1.21.0 to bash completion - add support for `docker-compose exec --workdir|-w` - add support for `docker-compose build --compress` - add support for `docker-compose pull --no-parallel`, drop deprecated option `--parallel` Signed-off-by: Harald Albers --- contrib/completion/bash/docker-compose | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/contrib/completion/bash/docker-compose b/contrib/completion/bash/docker-compose index 90c9ce5fc..0713486d3 100644 --- a/contrib/completion/bash/docker-compose +++ b/contrib/completion/bash/docker-compose @@ -131,7 +131,7 @@ _docker_compose_build() { case "$cur" in -*) - COMPREPLY=( $( compgen -W "--build-arg --force-rm --help --memory --no-cache --pull" -- "$cur" ) ) + COMPREPLY=( $( compgen -W "--build-arg --compress --force-rm --help --memory --no-cache --pull" -- "$cur" ) ) ;; *) __docker_compose_services_from_build @@ -242,14 +242,14 @@ _docker_compose_events() { _docker_compose_exec() { case "$prev" in - --index|--user|-u) + --index|--user|-u|--workdir|-w) return ;; esac case "$cur" in -*) - COMPREPLY=( $( compgen -W "-d --detach --help --index --privileged -T --user -u" -- "$cur" ) ) + COMPREPLY=( $( compgen -W "-d --detach --help --index --privileged -T --user -u --workdir -w" -- "$cur" ) ) ;; *) __docker_compose_services_running @@ -379,7 +379,7 @@ _docker_compose_ps() { _docker_compose_pull() { case "$cur" in -*) - COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --parallel --quiet -q" -- "$cur" ) ) + COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --no-parallel --quiet -q" -- "$cur" ) ) ;; *) __docker_compose_services_from_image From 5a3f1a3cca0ba256ae2c6ee248b3b7a5e5e428bb Mon Sep 17 00:00:00 2001 From: Harald Albers Date: Wed, 11 Apr 2018 12:47:10 +0200 Subject: [PATCH 26/26] Refactor bash completion for services Signed-off-by: Harald Albers --- contrib/completion/bash/docker-compose | 91 +++++++++++--------------- 1 file changed, 37 insertions(+), 54 deletions(-) diff --git a/contrib/completion/bash/docker-compose b/contrib/completion/bash/docker-compose index 0713486d3..7aa69a463 100644 --- a/contrib/completion/bash/docker-compose +++ b/contrib/completion/bash/docker-compose @@ -81,41 +81,24 @@ __docker_compose_nospace() { type compopt &>/dev/null && compopt -o nospace } -# Extracts all service names from the compose file. -___docker_compose_all_services_in_compose_file() { - __docker_compose_q config --services + +# Outputs a list of all defined services, regardless of their running state. +# Arguments for `docker-compose ps` may be passed in order to filter the service list, +# e.g. `status=running`. +__docker_compose_services() { + __docker_compose_q ps --services "$@" } -# All services, even those without an existing container -__docker_compose_services_all() { - COMPREPLY=( $(compgen -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") ) -} - -# All services that are defined by a Dockerfile reference -__docker_compose_services_from_build() { - COMPREPLY=( $(compgen -W "$(__docker_compose_q ps --services --filter "source=build")" -- "$cur") ) -} - -# All services that are defined by an image -__docker_compose_services_from_image() { - COMPREPLY=( $(compgen -W "$(__docker_compose_q ps --services --filter "source=image")" -- "$cur") ) -} - -# The services for which at least one paused container exists -__docker_compose_services_paused() { - names=$(__docker_compose_q ps --services --filter "status=paused") - COMPREPLY=( $(compgen -W "$names" -- "$cur") ) +# Applies completion of services based on the current value of `$cur`. +# Arguments for `docker-compose ps` may be passed in order to filter the service list, +# see `__docker_compose_services`. +__docker_compose_complete_services() { + COMPREPLY=( $(compgen -W "$(__docker_compose_services "$@")" -- "$cur") ) } # The services for which at least one running container exists -__docker_compose_services_running() { - names=$(__docker_compose_q ps --services --filter "status=running") - COMPREPLY=( $(compgen -W "$names" -- "$cur") ) -} - -# The services for which at least one stopped container exists -__docker_compose_services_stopped() { - names=$(__docker_compose_q ps --services --filter "status=stopped") +__docker_compose_complete_running_services() { + local names=$(__docker_compose_complete_services --filter status=running) COMPREPLY=( $(compgen -W "$names" -- "$cur") ) } @@ -134,7 +117,7 @@ _docker_compose_build() { COMPREPLY=( $( compgen -W "--build-arg --compress --force-rm --help --memory --no-cache --pull" -- "$cur" ) ) ;; *) - __docker_compose_services_from_build + __docker_compose_complete_services --filter source=build ;; esac } @@ -163,7 +146,7 @@ _docker_compose_create() { COMPREPLY=( $( compgen -W "--build --force-recreate --help --no-build --no-recreate" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac } @@ -234,7 +217,7 @@ _docker_compose_events() { COMPREPLY=( $( compgen -W "--help --json" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac } @@ -252,7 +235,7 @@ _docker_compose_exec() { COMPREPLY=( $( compgen -W "-d --detach --help --index --privileged -T --user -u --workdir -w" -- "$cur" ) ) ;; *) - __docker_compose_services_running + __docker_compose_complete_running_services ;; esac } @@ -268,7 +251,7 @@ _docker_compose_images() { COMPREPLY=( $( compgen -W "--help --quiet -q" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac } @@ -286,7 +269,7 @@ _docker_compose_kill() { COMPREPLY=( $( compgen -W "--help -s" -- "$cur" ) ) ;; *) - __docker_compose_services_running + __docker_compose_complete_running_services ;; esac } @@ -304,7 +287,7 @@ _docker_compose_logs() { COMPREPLY=( $( compgen -W "--follow -f --help --no-color --tail --timestamps -t" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac } @@ -316,7 +299,7 @@ _docker_compose_pause() { COMPREPLY=( $( compgen -W "--help" -- "$cur" ) ) ;; *) - __docker_compose_services_running + __docker_compose_complete_running_services ;; esac } @@ -338,7 +321,7 @@ _docker_compose_port() { COMPREPLY=( $( compgen -W "--help --index --protocol" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac } @@ -370,7 +353,7 @@ _docker_compose_ps() { COMPREPLY=( $( compgen -W "--help --quiet -q --services --filter" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac } @@ -382,7 +365,7 @@ _docker_compose_pull() { COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --no-parallel --quiet -q" -- "$cur" ) ) ;; *) - __docker_compose_services_from_image + __docker_compose_complete_services --filter source=image ;; esac } @@ -394,7 +377,7 @@ _docker_compose_push() { COMPREPLY=( $( compgen -W "--help --ignore-push-failures" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac } @@ -412,7 +395,7 @@ _docker_compose_restart() { COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) ) ;; *) - __docker_compose_services_running + __docker_compose_complete_running_services ;; esac } @@ -425,9 +408,9 @@ _docker_compose_rm() { ;; *) if __docker_compose_has_option "--stop|-s" ; then - __docker_compose_services_all + __docker_compose_complete_services else - __docker_compose_services_stopped + __docker_compose_complete_services --filter status=stopped fi ;; esac @@ -451,7 +434,7 @@ _docker_compose_run() { COMPREPLY=( $( compgen -W "--detach -d --entrypoint -e --help --label -l --name --no-deps --publish -p --rm --service-ports -T --use-aliases --user -u --volume -v --workdir -w" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac } @@ -473,7 +456,7 @@ _docker_compose_scale() { COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) ) ;; *) - COMPREPLY=( $(compgen -S "=" -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") ) + COMPREPLY=( $(compgen -S "=" -W "$(__docker_compose_services)" -- "$cur") ) __docker_compose_nospace ;; esac @@ -486,7 +469,7 @@ _docker_compose_start() { COMPREPLY=( $( compgen -W "--help" -- "$cur" ) ) ;; *) - __docker_compose_services_stopped + __docker_compose_complete_services --filter status=stopped ;; esac } @@ -504,7 +487,7 @@ _docker_compose_stop() { COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) ) ;; *) - __docker_compose_services_running + __docker_compose_complete_running_services ;; esac } @@ -516,7 +499,7 @@ _docker_compose_top() { COMPREPLY=( $( compgen -W "--help" -- "$cur" ) ) ;; *) - __docker_compose_services_running + __docker_compose_complete_running_services ;; esac } @@ -528,7 +511,7 @@ _docker_compose_unpause() { COMPREPLY=( $( compgen -W "--help" -- "$cur" ) ) ;; *) - __docker_compose_services_paused + __docker_compose_complete_services --filter status=paused ;; esac } @@ -541,11 +524,11 @@ _docker_compose_up() { return ;; --exit-code-from) - __docker_compose_services_all + __docker_compose_complete_services return ;; --scale) - COMPREPLY=( $(compgen -S "=" -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") ) + COMPREPLY=( $(compgen -S "=" -W "$(__docker_compose_services)" -- "$cur") ) __docker_compose_nospace return ;; @@ -559,7 +542,7 @@ _docker_compose_up() { COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) ) ;; *) - __docker_compose_services_all + __docker_compose_complete_services ;; esac }