mirror of
https://github.com/docker/compose.git
synced 2025-07-23 13:45:00 +02:00
Remove "bundle" subcommand and support for DAB files
Deploying stacks using the "Docker Application Bundle" (`.dab`) file format was introduced as an experimental feature in Docker 1.13 / 17.03, but superseded by support for Docker Compose files in the CLI. With no development being done on this feature, and no active use of the file format, support for the DAB file format and the top-level `docker deploy` command (hidden by default in 19.03), will be removed from the CLI, in favour of `docker stack deploy` using compose files. This patch removes the `docker-compose bundle` subcommand from Docker Compose, which was used to convert compose files into DAB files (and given the above, will no longer be needed). Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
This commit is contained in:
parent
36790fc0e8
commit
33eeef41ab
@ -1,275 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import six
|
|
||||||
from docker.utils import split_command
|
|
||||||
from docker.utils.ports import split_port
|
|
||||||
|
|
||||||
from .cli.errors import UserError
|
|
||||||
from .config.serialize import denormalize_config
|
|
||||||
from .network import get_network_defs_for_service
|
|
||||||
from .service import format_environment
|
|
||||||
from .service import NoSuchImageError
|
|
||||||
from .service import parse_repository_tag
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
SERVICE_KEYS = {
|
|
||||||
'working_dir': 'WorkingDir',
|
|
||||||
'user': 'User',
|
|
||||||
'labels': 'Labels',
|
|
||||||
}
|
|
||||||
|
|
||||||
IGNORED_KEYS = {'build'}
|
|
||||||
|
|
||||||
SUPPORTED_KEYS = {
|
|
||||||
'image',
|
|
||||||
'ports',
|
|
||||||
'expose',
|
|
||||||
'networks',
|
|
||||||
'command',
|
|
||||||
'environment',
|
|
||||||
'entrypoint',
|
|
||||||
} | set(SERVICE_KEYS)
|
|
||||||
|
|
||||||
VERSION = '0.1'
|
|
||||||
|
|
||||||
|
|
||||||
class NeedsPush(Exception):
|
|
||||||
def __init__(self, image_name):
|
|
||||||
self.image_name = image_name
|
|
||||||
|
|
||||||
|
|
||||||
class NeedsPull(Exception):
|
|
||||||
def __init__(self, image_name, service_name):
|
|
||||||
self.image_name = image_name
|
|
||||||
self.service_name = service_name
|
|
||||||
|
|
||||||
|
|
||||||
class MissingDigests(Exception):
|
|
||||||
def __init__(self, needs_push, needs_pull):
|
|
||||||
self.needs_push = needs_push
|
|
||||||
self.needs_pull = needs_pull
|
|
||||||
|
|
||||||
|
|
||||||
def serialize_bundle(config, image_digests):
|
|
||||||
return json.dumps(to_bundle(config, image_digests), indent=2, sort_keys=True)
|
|
||||||
|
|
||||||
|
|
||||||
def get_image_digests(project, allow_push=False):
|
|
||||||
digests = {}
|
|
||||||
needs_push = set()
|
|
||||||
needs_pull = set()
|
|
||||||
|
|
||||||
for service in project.services:
|
|
||||||
try:
|
|
||||||
digests[service.name] = get_image_digest(
|
|
||||||
service,
|
|
||||||
allow_push=allow_push,
|
|
||||||
)
|
|
||||||
except NeedsPush as e:
|
|
||||||
needs_push.add(e.image_name)
|
|
||||||
except NeedsPull as e:
|
|
||||||
needs_pull.add(e.service_name)
|
|
||||||
|
|
||||||
if needs_push or needs_pull:
|
|
||||||
raise MissingDigests(needs_push, needs_pull)
|
|
||||||
|
|
||||||
return digests
|
|
||||||
|
|
||||||
|
|
||||||
def get_image_digest(service, allow_push=False):
|
|
||||||
if 'image' not in service.options:
|
|
||||||
raise UserError(
|
|
||||||
"Service '{s.name}' doesn't define an image tag. An image name is "
|
|
||||||
"required to generate a proper image digest for the bundle. Specify "
|
|
||||||
"an image repo and tag with the 'image' option.".format(s=service))
|
|
||||||
|
|
||||||
_, _, separator = parse_repository_tag(service.options['image'])
|
|
||||||
# Compose file already uses a digest, no lookup required
|
|
||||||
if separator == '@':
|
|
||||||
return service.options['image']
|
|
||||||
|
|
||||||
digest = get_digest(service)
|
|
||||||
|
|
||||||
if digest:
|
|
||||||
return digest
|
|
||||||
|
|
||||||
if 'build' not in service.options:
|
|
||||||
raise NeedsPull(service.image_name, service.name)
|
|
||||||
|
|
||||||
if not allow_push:
|
|
||||||
raise NeedsPush(service.image_name)
|
|
||||||
|
|
||||||
return push_image(service)
|
|
||||||
|
|
||||||
|
|
||||||
def get_digest(service):
|
|
||||||
digest = None
|
|
||||||
try:
|
|
||||||
image = service.image()
|
|
||||||
# TODO: pick a digest based on the image tag if there are multiple
|
|
||||||
# digests
|
|
||||||
if image['RepoDigests']:
|
|
||||||
digest = image['RepoDigests'][0]
|
|
||||||
except NoSuchImageError:
|
|
||||||
try:
|
|
||||||
# Fetch the image digest from the registry
|
|
||||||
distribution = service.get_image_registry_data()
|
|
||||||
|
|
||||||
if distribution['Descriptor']['digest']:
|
|
||||||
digest = '{image_name}@{digest}'.format(
|
|
||||||
image_name=service.image_name,
|
|
||||||
digest=distribution['Descriptor']['digest']
|
|
||||||
)
|
|
||||||
except NoSuchImageError:
|
|
||||||
raise UserError(
|
|
||||||
"Digest not found for service '{service}'. "
|
|
||||||
"Repository does not exist or may require 'docker login'"
|
|
||||||
.format(service=service.name))
|
|
||||||
return digest
|
|
||||||
|
|
||||||
|
|
||||||
def push_image(service):
|
|
||||||
try:
|
|
||||||
digest = service.push()
|
|
||||||
except Exception:
|
|
||||||
log.error(
|
|
||||||
"Failed to push image for service '{s.name}'. Please use an "
|
|
||||||
"image tag that can be pushed to a Docker "
|
|
||||||
"registry.".format(s=service))
|
|
||||||
raise
|
|
||||||
|
|
||||||
if not digest:
|
|
||||||
raise ValueError("Failed to get digest for %s" % service.name)
|
|
||||||
|
|
||||||
repo, _, _ = parse_repository_tag(service.options['image'])
|
|
||||||
identifier = '{repo}@{digest}'.format(repo=repo, digest=digest)
|
|
||||||
|
|
||||||
# only do this if RepoDigests isn't already populated
|
|
||||||
image = service.image()
|
|
||||||
if not image['RepoDigests']:
|
|
||||||
# Pull by digest so that image['RepoDigests'] is populated for next time
|
|
||||||
# and we don't have to pull/push again
|
|
||||||
service.client.pull(identifier)
|
|
||||||
log.info("Stored digest for {}".format(service.image_name))
|
|
||||||
|
|
||||||
return identifier
|
|
||||||
|
|
||||||
|
|
||||||
def to_bundle(config, image_digests):
|
|
||||||
if config.networks:
|
|
||||||
log.warning("Unsupported top level key 'networks' - ignoring")
|
|
||||||
|
|
||||||
if config.volumes:
|
|
||||||
log.warning("Unsupported top level key 'volumes' - ignoring")
|
|
||||||
|
|
||||||
config = denormalize_config(config)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'Version': VERSION,
|
|
||||||
'Services': {
|
|
||||||
name: convert_service_to_bundle(
|
|
||||||
name,
|
|
||||||
service_dict,
|
|
||||||
image_digests[name],
|
|
||||||
)
|
|
||||||
for name, service_dict in config['services'].items()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def convert_service_to_bundle(name, service_dict, image_digest):
|
|
||||||
container_config = {'Image': image_digest}
|
|
||||||
|
|
||||||
for key, value in service_dict.items():
|
|
||||||
if key in IGNORED_KEYS:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if key not in SUPPORTED_KEYS:
|
|
||||||
log.warning("Unsupported key '{}' in services.{} - ignoring".format(key, name))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if key == 'environment':
|
|
||||||
container_config['Env'] = format_environment({
|
|
||||||
envkey: envvalue for envkey, envvalue in value.items()
|
|
||||||
if envvalue
|
|
||||||
})
|
|
||||||
continue
|
|
||||||
|
|
||||||
if key in SERVICE_KEYS:
|
|
||||||
container_config[SERVICE_KEYS[key]] = value
|
|
||||||
continue
|
|
||||||
|
|
||||||
set_command_and_args(
|
|
||||||
container_config,
|
|
||||||
service_dict.get('entrypoint', []),
|
|
||||||
service_dict.get('command', []))
|
|
||||||
container_config['Networks'] = make_service_networks(name, service_dict)
|
|
||||||
|
|
||||||
ports = make_port_specs(service_dict)
|
|
||||||
if ports:
|
|
||||||
container_config['Ports'] = ports
|
|
||||||
|
|
||||||
return container_config
|
|
||||||
|
|
||||||
|
|
||||||
# See https://github.com/docker/swarmkit/blob/agent/exec/container/container.go#L95
|
|
||||||
def set_command_and_args(config, entrypoint, command):
|
|
||||||
if isinstance(entrypoint, six.string_types):
|
|
||||||
entrypoint = split_command(entrypoint)
|
|
||||||
if isinstance(command, six.string_types):
|
|
||||||
command = split_command(command)
|
|
||||||
|
|
||||||
if entrypoint:
|
|
||||||
config['Command'] = entrypoint + command
|
|
||||||
return
|
|
||||||
|
|
||||||
if command:
|
|
||||||
config['Args'] = command
|
|
||||||
|
|
||||||
|
|
||||||
def make_service_networks(name, service_dict):
|
|
||||||
networks = []
|
|
||||||
|
|
||||||
for network_name, network_def in get_network_defs_for_service(service_dict).items():
|
|
||||||
for key in network_def.keys():
|
|
||||||
log.warning(
|
|
||||||
"Unsupported key '{}' in services.{}.networks.{} - ignoring"
|
|
||||||
.format(key, name, network_name))
|
|
||||||
|
|
||||||
networks.append(network_name)
|
|
||||||
|
|
||||||
return networks
|
|
||||||
|
|
||||||
|
|
||||||
def make_port_specs(service_dict):
|
|
||||||
ports = []
|
|
||||||
|
|
||||||
internal_ports = [
|
|
||||||
internal_port
|
|
||||||
for port_def in service_dict.get('ports', [])
|
|
||||||
for internal_port in split_port(port_def)[0]
|
|
||||||
]
|
|
||||||
|
|
||||||
internal_ports += service_dict.get('expose', [])
|
|
||||||
|
|
||||||
for internal_port in internal_ports:
|
|
||||||
spec = make_port_spec(internal_port)
|
|
||||||
if spec not in ports:
|
|
||||||
ports.append(spec)
|
|
||||||
|
|
||||||
return ports
|
|
||||||
|
|
||||||
|
|
||||||
def make_port_spec(value):
|
|
||||||
components = six.text_type(value).partition('/')
|
|
||||||
return {
|
|
||||||
'Protocol': components[2] or 'tcp',
|
|
||||||
'Port': int(components[0]),
|
|
||||||
}
|
|
@ -15,14 +15,12 @@ from distutils.spawn import find_executable
|
|||||||
from inspect import getdoc
|
from inspect import getdoc
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
|
|
||||||
import docker
|
import docker.errors
|
||||||
|
import docker.utils
|
||||||
|
|
||||||
from . import errors
|
from . import errors
|
||||||
from . import signals
|
from . import signals
|
||||||
from .. import __version__
|
from .. import __version__
|
||||||
from ..bundle import get_image_digests
|
|
||||||
from ..bundle import MissingDigests
|
|
||||||
from ..bundle import serialize_bundle
|
|
||||||
from ..config import ConfigurationError
|
from ..config import ConfigurationError
|
||||||
from ..config import parse_environment
|
from ..config import parse_environment
|
||||||
from ..config import parse_labels
|
from ..config import parse_labels
|
||||||
@ -34,6 +32,8 @@ from ..const import COMPOSEFILE_V2_2 as V2_2
|
|||||||
from ..const import IS_WINDOWS_PLATFORM
|
from ..const import IS_WINDOWS_PLATFORM
|
||||||
from ..errors import StreamParseError
|
from ..errors import StreamParseError
|
||||||
from ..progress_stream import StreamOutputError
|
from ..progress_stream import StreamOutputError
|
||||||
|
from ..project import get_image_digests
|
||||||
|
from ..project import MissingDigests
|
||||||
from ..project import NoSuchService
|
from ..project import NoSuchService
|
||||||
from ..project import OneOffFilter
|
from ..project import OneOffFilter
|
||||||
from ..project import ProjectError
|
from ..project import ProjectError
|
||||||
@ -213,7 +213,6 @@ class TopLevelCommand(object):
|
|||||||
|
|
||||||
Commands:
|
Commands:
|
||||||
build Build or rebuild services
|
build Build or rebuild services
|
||||||
bundle Generate a Docker bundle from the Compose file
|
|
||||||
config Validate and view the Compose file
|
config Validate and view the Compose file
|
||||||
create Create services
|
create Create services
|
||||||
down Stop and remove containers, networks, images, and volumes
|
down Stop and remove containers, networks, images, and volumes
|
||||||
@ -304,38 +303,6 @@ class TopLevelCommand(object):
|
|||||||
progress=options.get('--progress'),
|
progress=options.get('--progress'),
|
||||||
)
|
)
|
||||||
|
|
||||||
def bundle(self, options):
|
|
||||||
"""
|
|
||||||
Generate a Distributed Application Bundle (DAB) from the Compose file.
|
|
||||||
|
|
||||||
Images must have digests stored, which requires interaction with a
|
|
||||||
Docker registry. If digests aren't stored for all images, you can fetch
|
|
||||||
them with `docker-compose pull` or `docker-compose push`. To push images
|
|
||||||
automatically when bundling, pass `--push-images`. Only services with
|
|
||||||
a `build` option specified will have their images pushed.
|
|
||||||
|
|
||||||
Usage: bundle [options]
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--push-images Automatically push images for any services
|
|
||||||
which have a `build` option specified.
|
|
||||||
|
|
||||||
-o, --output PATH Path to write the bundle file to.
|
|
||||||
Defaults to "<project name>.dab".
|
|
||||||
"""
|
|
||||||
compose_config = get_config_from_options('.', self.toplevel_options)
|
|
||||||
|
|
||||||
output = options["--output"]
|
|
||||||
if not output:
|
|
||||||
output = "{}.dab".format(self.project.name)
|
|
||||||
|
|
||||||
image_digests = image_digests_for_project(self.project, options['--push-images'])
|
|
||||||
|
|
||||||
with open(output, 'w') as f:
|
|
||||||
f.write(serialize_bundle(compose_config, image_digests))
|
|
||||||
|
|
||||||
log.info("Wrote bundle to {}".format(output))
|
|
||||||
|
|
||||||
def config(self, options):
|
def config(self, options):
|
||||||
"""
|
"""
|
||||||
Validate and view the Compose file.
|
Validate and view the Compose file.
|
||||||
@ -1216,12 +1183,10 @@ def timeout_from_opts(options):
|
|||||||
return None if timeout is None else int(timeout)
|
return None if timeout is None else int(timeout)
|
||||||
|
|
||||||
|
|
||||||
def image_digests_for_project(project, allow_push=False):
|
def image_digests_for_project(project):
|
||||||
try:
|
try:
|
||||||
return get_image_digests(
|
return get_image_digests(project)
|
||||||
project,
|
|
||||||
allow_push=allow_push
|
|
||||||
)
|
|
||||||
except MissingDigests as e:
|
except MissingDigests as e:
|
||||||
def list_images(images):
|
def list_images(images):
|
||||||
return "\n".join(" {}".format(name) for name in sorted(images))
|
return "\n".join(" {}".format(name) for name in sorted(images))
|
||||||
|
@ -16,6 +16,7 @@ from docker.errors import NotFound
|
|||||||
from docker.utils import version_lt
|
from docker.utils import version_lt
|
||||||
|
|
||||||
from . import parallel
|
from . import parallel
|
||||||
|
from .cli.errors import UserError
|
||||||
from .config import ConfigurationError
|
from .config import ConfigurationError
|
||||||
from .config.config import V1
|
from .config.config import V1
|
||||||
from .config.sort_services import get_container_name_from_network_mode
|
from .config.sort_services import get_container_name_from_network_mode
|
||||||
@ -33,6 +34,7 @@ from .service import ContainerNetworkMode
|
|||||||
from .service import ContainerPidMode
|
from .service import ContainerPidMode
|
||||||
from .service import ConvergenceStrategy
|
from .service import ConvergenceStrategy
|
||||||
from .service import NetworkMode
|
from .service import NetworkMode
|
||||||
|
from .service import NoSuchImageError
|
||||||
from .service import parse_repository_tag
|
from .service import parse_repository_tag
|
||||||
from .service import PidMode
|
from .service import PidMode
|
||||||
from .service import Service
|
from .service import Service
|
||||||
@ -42,7 +44,6 @@ from .utils import microseconds_from_time_nano
|
|||||||
from .utils import truncate_string
|
from .utils import truncate_string
|
||||||
from .volume import ProjectVolumes
|
from .volume import ProjectVolumes
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -381,6 +382,7 @@ class Project(object):
|
|||||||
|
|
||||||
def build_service(service):
|
def build_service(service):
|
||||||
service.build(no_cache, pull, force_rm, memory, build_args, gzip, rm, silent, cli, progress)
|
service.build(no_cache, pull, force_rm, memory, build_args, gzip, rm, silent, cli, progress)
|
||||||
|
|
||||||
if parallel_build:
|
if parallel_build:
|
||||||
_, errors = parallel.parallel_execute(
|
_, errors = parallel.parallel_execute(
|
||||||
services,
|
services,
|
||||||
@ -844,6 +846,91 @@ def get_secrets(service, service_secrets, secret_defs):
|
|||||||
return secrets
|
return secrets
|
||||||
|
|
||||||
|
|
||||||
|
def get_image_digests(project):
|
||||||
|
digests = {}
|
||||||
|
needs_push = set()
|
||||||
|
needs_pull = set()
|
||||||
|
|
||||||
|
for service in project.services:
|
||||||
|
try:
|
||||||
|
digests[service.name] = get_image_digest(service)
|
||||||
|
except NeedsPush as e:
|
||||||
|
needs_push.add(e.image_name)
|
||||||
|
except NeedsPull as e:
|
||||||
|
needs_pull.add(e.service_name)
|
||||||
|
|
||||||
|
if needs_push or needs_pull:
|
||||||
|
raise MissingDigests(needs_push, needs_pull)
|
||||||
|
|
||||||
|
return digests
|
||||||
|
|
||||||
|
|
||||||
|
def get_image_digest(service):
|
||||||
|
if 'image' not in service.options:
|
||||||
|
raise UserError(
|
||||||
|
"Service '{s.name}' doesn't define an image tag. An image name is "
|
||||||
|
"required to generate a proper image digest. Specify an image repo "
|
||||||
|
"and tag with the 'image' option.".format(s=service))
|
||||||
|
|
||||||
|
_, _, separator = parse_repository_tag(service.options['image'])
|
||||||
|
# Compose file already uses a digest, no lookup required
|
||||||
|
if separator == '@':
|
||||||
|
return service.options['image']
|
||||||
|
|
||||||
|
digest = get_digest(service)
|
||||||
|
|
||||||
|
if digest:
|
||||||
|
return digest
|
||||||
|
|
||||||
|
if 'build' not in service.options:
|
||||||
|
raise NeedsPull(service.image_name, service.name)
|
||||||
|
|
||||||
|
raise NeedsPush(service.image_name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_digest(service):
|
||||||
|
digest = None
|
||||||
|
try:
|
||||||
|
image = service.image()
|
||||||
|
# TODO: pick a digest based on the image tag if there are multiple
|
||||||
|
# digests
|
||||||
|
if image['RepoDigests']:
|
||||||
|
digest = image['RepoDigests'][0]
|
||||||
|
except NoSuchImageError:
|
||||||
|
try:
|
||||||
|
# Fetch the image digest from the registry
|
||||||
|
distribution = service.get_image_registry_data()
|
||||||
|
|
||||||
|
if distribution['Descriptor']['digest']:
|
||||||
|
digest = '{image_name}@{digest}'.format(
|
||||||
|
image_name=service.image_name,
|
||||||
|
digest=distribution['Descriptor']['digest']
|
||||||
|
)
|
||||||
|
except NoSuchImageError:
|
||||||
|
raise UserError(
|
||||||
|
"Digest not found for service '{service}'. "
|
||||||
|
"Repository does not exist or may require 'docker login'"
|
||||||
|
.format(service=service.name))
|
||||||
|
return digest
|
||||||
|
|
||||||
|
|
||||||
|
class MissingDigests(Exception):
|
||||||
|
def __init__(self, needs_push, needs_pull):
|
||||||
|
self.needs_push = needs_push
|
||||||
|
self.needs_pull = needs_pull
|
||||||
|
|
||||||
|
|
||||||
|
class NeedsPush(Exception):
|
||||||
|
def __init__(self, image_name):
|
||||||
|
self.image_name = image_name
|
||||||
|
|
||||||
|
|
||||||
|
class NeedsPull(Exception):
|
||||||
|
def __init__(self, image_name, service_name):
|
||||||
|
self.image_name = image_name
|
||||||
|
self.service_name = service_name
|
||||||
|
|
||||||
|
|
||||||
class NoSuchService(Exception):
|
class NoSuchService(Exception):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
if isinstance(name, six.binary_type):
|
if isinstance(name, six.binary_type):
|
||||||
|
@ -126,18 +126,6 @@ _docker_compose_build() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
_docker_compose_bundle() {
|
|
||||||
case "$prev" in
|
|
||||||
--output|-o)
|
|
||||||
_filedir
|
|
||||||
return
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
COMPREPLY=( $( compgen -W "--push-images --help --output -o" -- "$cur" ) )
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
_docker_compose_config() {
|
_docker_compose_config() {
|
||||||
case "$prev" in
|
case "$prev" in
|
||||||
--hash)
|
--hash)
|
||||||
@ -581,7 +569,6 @@ _docker_compose() {
|
|||||||
|
|
||||||
local commands=(
|
local commands=(
|
||||||
build
|
build
|
||||||
bundle
|
|
||||||
config
|
config
|
||||||
create
|
create
|
||||||
down
|
down
|
||||||
|
@ -121,12 +121,6 @@ __docker-compose_subcommand() {
|
|||||||
'--parallel[Build images in parallel.]' \
|
'--parallel[Build images in parallel.]' \
|
||||||
'*:services:__docker-compose_services_from_build' && ret=0
|
'*:services:__docker-compose_services_from_build' && ret=0
|
||||||
;;
|
;;
|
||||||
(bundle)
|
|
||||||
_arguments \
|
|
||||||
$opts_help \
|
|
||||||
'--push-images[Automatically push images for any services which have a `build` option specified.]' \
|
|
||||||
'(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to "<project name>.dab".]:file:_files' && ret=0
|
|
||||||
;;
|
|
||||||
(config)
|
(config)
|
||||||
_arguments \
|
_arguments \
|
||||||
$opts_help \
|
$opts_help \
|
||||||
|
@ -855,32 +855,6 @@ services:
|
|||||||
)
|
)
|
||||||
assert 'Favorite Touhou Character: hong.meiling' in result.stdout
|
assert 'Favorite Touhou Character: hong.meiling' in result.stdout
|
||||||
|
|
||||||
def test_bundle_with_digests(self):
|
|
||||||
self.base_dir = 'tests/fixtures/bundle-with-digests/'
|
|
||||||
tmpdir = pytest.ensuretemp('cli_test_bundle')
|
|
||||||
self.addCleanup(tmpdir.remove)
|
|
||||||
filename = str(tmpdir.join('example.dab'))
|
|
||||||
|
|
||||||
self.dispatch(['bundle', '--output', filename])
|
|
||||||
with open(filename, 'r') as fh:
|
|
||||||
bundle = json.load(fh)
|
|
||||||
|
|
||||||
assert bundle == {
|
|
||||||
'Version': '0.1',
|
|
||||||
'Services': {
|
|
||||||
'web': {
|
|
||||||
'Image': ('dockercloud/hello-world@sha256:fe79a2cfbd17eefc3'
|
|
||||||
'44fb8419420808df95a1e22d93b7f621a7399fd1e9dca1d'),
|
|
||||||
'Networks': ['default'],
|
|
||||||
},
|
|
||||||
'redis': {
|
|
||||||
'Image': ('redis@sha256:a84cb8f53a70e19f61ff2e1d5e73fb7ae62d'
|
|
||||||
'374b2b7392de1e7d77be26ef8f7b'),
|
|
||||||
'Networks': ['default'],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_build_override_dir(self):
|
def test_build_override_dir(self):
|
||||||
self.base_dir = 'tests/fixtures/build-path-override-dir'
|
self.base_dir = 'tests/fixtures/build-path-override-dir'
|
||||||
self.override_dir = os.path.abspath('tests/fixtures')
|
self.override_dir = os.path.abspath('tests/fixtures')
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
|
|
||||||
version: '2.0'
|
|
||||||
|
|
||||||
services:
|
|
||||||
web:
|
|
||||||
image: dockercloud/hello-world@sha256:fe79a2cfbd17eefc344fb8419420808df95a1e22d93b7f621a7399fd1e9dca1d
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis@sha256:a84cb8f53a70e19f61ff2e1d5e73fb7ae62d374b2b7392de1e7d77be26ef8f7b
|
|
@ -1,233 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import docker
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from .. import mock
|
|
||||||
from compose import bundle
|
|
||||||
from compose import service
|
|
||||||
from compose.cli.errors import UserError
|
|
||||||
from compose.config.config import Config
|
|
||||||
from compose.const import COMPOSEFILE_V2_0 as V2_0
|
|
||||||
from compose.service import NoSuchImageError
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_service():
|
|
||||||
return mock.create_autospec(
|
|
||||||
service.Service,
|
|
||||||
client=mock.create_autospec(docker.APIClient),
|
|
||||||
options={})
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_image_digest_exists(mock_service):
|
|
||||||
mock_service.options['image'] = 'abcd'
|
|
||||||
mock_service.image.return_value = {'RepoDigests': ['digest1']}
|
|
||||||
digest = bundle.get_image_digest(mock_service)
|
|
||||||
assert digest == 'digest1'
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_image_digest_image_uses_digest(mock_service):
|
|
||||||
mock_service.options['image'] = image_id = 'redis@sha256:digest'
|
|
||||||
|
|
||||||
digest = bundle.get_image_digest(mock_service)
|
|
||||||
assert digest == image_id
|
|
||||||
assert not mock_service.image.called
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_image_digest_from_repository(mock_service):
|
|
||||||
mock_service.options['image'] = 'abcd'
|
|
||||||
mock_service.image_name = 'abcd'
|
|
||||||
mock_service.image.side_effect = NoSuchImageError(None)
|
|
||||||
mock_service.get_image_registry_data.return_value = {'Descriptor': {'digest': 'digest'}}
|
|
||||||
|
|
||||||
digest = bundle.get_image_digest(mock_service)
|
|
||||||
assert digest == 'abcd@digest'
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_image_digest_no_image(mock_service):
|
|
||||||
with pytest.raises(UserError) as exc:
|
|
||||||
bundle.get_image_digest(service.Service(name='theservice'))
|
|
||||||
|
|
||||||
assert "doesn't define an image tag" in exc.exconly()
|
|
||||||
|
|
||||||
|
|
||||||
def test_push_image_with_saved_digest(mock_service):
|
|
||||||
mock_service.options['build'] = '.'
|
|
||||||
mock_service.options['image'] = image_id = 'abcd'
|
|
||||||
mock_service.push.return_value = expected = 'sha256:thedigest'
|
|
||||||
mock_service.image.return_value = {'RepoDigests': ['digest1']}
|
|
||||||
|
|
||||||
digest = bundle.push_image(mock_service)
|
|
||||||
assert digest == image_id + '@' + expected
|
|
||||||
|
|
||||||
mock_service.push.assert_called_once_with()
|
|
||||||
assert not mock_service.client.push.called
|
|
||||||
|
|
||||||
|
|
||||||
def test_push_image(mock_service):
|
|
||||||
mock_service.options['build'] = '.'
|
|
||||||
mock_service.options['image'] = image_id = 'abcd'
|
|
||||||
mock_service.push.return_value = expected = 'sha256:thedigest'
|
|
||||||
mock_service.image.return_value = {'RepoDigests': []}
|
|
||||||
|
|
||||||
digest = bundle.push_image(mock_service)
|
|
||||||
assert digest == image_id + '@' + expected
|
|
||||||
|
|
||||||
mock_service.push.assert_called_once_with()
|
|
||||||
mock_service.client.pull.assert_called_once_with(digest)
|
|
||||||
|
|
||||||
|
|
||||||
def test_to_bundle():
|
|
||||||
image_digests = {'a': 'aaaa', 'b': 'bbbb'}
|
|
||||||
services = [
|
|
||||||
{'name': 'a', 'build': '.', },
|
|
||||||
{'name': 'b', 'build': './b'},
|
|
||||||
]
|
|
||||||
config = Config(
|
|
||||||
version=V2_0,
|
|
||||||
services=services,
|
|
||||||
volumes={'special': {}},
|
|
||||||
networks={'extra': {}},
|
|
||||||
secrets={},
|
|
||||||
configs={}
|
|
||||||
)
|
|
||||||
|
|
||||||
with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
|
|
||||||
output = bundle.to_bundle(config, image_digests)
|
|
||||||
|
|
||||||
assert mock_log.mock_calls == [
|
|
||||||
mock.call("Unsupported top level key 'networks' - ignoring"),
|
|
||||||
mock.call("Unsupported top level key 'volumes' - ignoring"),
|
|
||||||
]
|
|
||||||
|
|
||||||
assert output == {
|
|
||||||
'Version': '0.1',
|
|
||||||
'Services': {
|
|
||||||
'a': {'Image': 'aaaa', 'Networks': ['default']},
|
|
||||||
'b': {'Image': 'bbbb', 'Networks': ['default']},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_convert_service_to_bundle():
|
|
||||||
name = 'theservice'
|
|
||||||
image_digest = 'thedigest'
|
|
||||||
service_dict = {
|
|
||||||
'ports': ['80'],
|
|
||||||
'expose': ['1234'],
|
|
||||||
'networks': {'extra': {}},
|
|
||||||
'command': 'foo',
|
|
||||||
'entrypoint': 'entry',
|
|
||||||
'environment': {'BAZ': 'ENV'},
|
|
||||||
'build': '.',
|
|
||||||
'working_dir': '/tmp',
|
|
||||||
'user': 'root',
|
|
||||||
'labels': {'FOO': 'LABEL'},
|
|
||||||
'privileged': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
|
|
||||||
config = bundle.convert_service_to_bundle(name, service_dict, image_digest)
|
|
||||||
|
|
||||||
mock_log.assert_called_once_with(
|
|
||||||
"Unsupported key 'privileged' in services.theservice - ignoring")
|
|
||||||
|
|
||||||
assert config == {
|
|
||||||
'Image': image_digest,
|
|
||||||
'Ports': [
|
|
||||||
{'Protocol': 'tcp', 'Port': 80},
|
|
||||||
{'Protocol': 'tcp', 'Port': 1234},
|
|
||||||
],
|
|
||||||
'Networks': ['extra'],
|
|
||||||
'Command': ['entry', 'foo'],
|
|
||||||
'Env': ['BAZ=ENV'],
|
|
||||||
'WorkingDir': '/tmp',
|
|
||||||
'User': 'root',
|
|
||||||
'Labels': {'FOO': 'LABEL'},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_set_command_and_args_none():
|
|
||||||
config = {}
|
|
||||||
bundle.set_command_and_args(config, [], [])
|
|
||||||
assert config == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_set_command_and_args_from_command():
|
|
||||||
config = {}
|
|
||||||
bundle.set_command_and_args(config, [], "echo ok")
|
|
||||||
assert config == {'Args': ['echo', 'ok']}
|
|
||||||
|
|
||||||
|
|
||||||
def test_set_command_and_args_from_entrypoint():
|
|
||||||
config = {}
|
|
||||||
bundle.set_command_and_args(config, "echo entry", [])
|
|
||||||
assert config == {'Command': ['echo', 'entry']}
|
|
||||||
|
|
||||||
|
|
||||||
def test_set_command_and_args_from_both():
|
|
||||||
config = {}
|
|
||||||
bundle.set_command_and_args(config, "echo entry", ["extra", "arg"])
|
|
||||||
assert config == {'Command': ['echo', 'entry', "extra", "arg"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_make_service_networks_default():
|
|
||||||
name = 'theservice'
|
|
||||||
service_dict = {}
|
|
||||||
|
|
||||||
with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
|
|
||||||
networks = bundle.make_service_networks(name, service_dict)
|
|
||||||
|
|
||||||
assert not mock_log.called
|
|
||||||
assert networks == ['default']
|
|
||||||
|
|
||||||
|
|
||||||
def test_make_service_networks():
|
|
||||||
name = 'theservice'
|
|
||||||
service_dict = {
|
|
||||||
'networks': {
|
|
||||||
'foo': {
|
|
||||||
'aliases': ['one', 'two'],
|
|
||||||
},
|
|
||||||
'bar': {}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
|
|
||||||
networks = bundle.make_service_networks(name, service_dict)
|
|
||||||
|
|
||||||
mock_log.assert_called_once_with(
|
|
||||||
"Unsupported key 'aliases' in services.theservice.networks.foo - ignoring")
|
|
||||||
assert sorted(networks) == sorted(service_dict['networks'])
|
|
||||||
|
|
||||||
|
|
||||||
def test_make_port_specs():
|
|
||||||
service_dict = {
|
|
||||||
'expose': ['80', '500/udp'],
|
|
||||||
'ports': [
|
|
||||||
'400:80',
|
|
||||||
'222',
|
|
||||||
'127.0.0.1:8001:8001',
|
|
||||||
'127.0.0.1:5000-5001:3000-3001'],
|
|
||||||
}
|
|
||||||
port_specs = bundle.make_port_specs(service_dict)
|
|
||||||
assert port_specs == [
|
|
||||||
{'Protocol': 'tcp', 'Port': 80},
|
|
||||||
{'Protocol': 'tcp', 'Port': 222},
|
|
||||||
{'Protocol': 'tcp', 'Port': 8001},
|
|
||||||
{'Protocol': 'tcp', 'Port': 3000},
|
|
||||||
{'Protocol': 'tcp', 'Port': 3001},
|
|
||||||
{'Protocol': 'udp', 'Port': 500},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_make_port_spec_with_protocol():
|
|
||||||
port_spec = bundle.make_port_spec("5000/udp")
|
|
||||||
assert port_spec == {'Protocol': 'udp', 'Port': 5000}
|
|
||||||
|
|
||||||
|
|
||||||
def test_make_port_spec_default_protocol():
|
|
||||||
port_spec = bundle.make_port_spec("50000")
|
|
||||||
assert port_spec == {'Protocol': 'tcp', 'Port': 50000}
|
|
Loading…
x
Reference in New Issue
Block a user