Merge pull request #7294 from alexrecuenco/remove-python2-1.26goal

Clean up python2 style code
This commit is contained in:
Anca Iordache 2020-08-11 14:19:28 +02:00 committed by GitHub
commit b01601a53c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 350 additions and 385 deletions

View File

@ -20,3 +20,9 @@
language_version: 'python3.7' language_version: 'python3.7'
args: args:
- --py3-plus - --py3-plus
- repo: https://github.com/asottile/pyupgrade
rev: v2.1.0
hooks:
- id: pyupgrade
args:
- --py3-plus

View File

@ -1,2 +1 @@
__version__ = '1.27.0dev' __version__ = '1.27.0dev'

View File

@ -14,16 +14,16 @@ NAMES = [
def get_pairs(): def get_pairs():
for i, name in enumerate(NAMES): for i, name in enumerate(NAMES):
yield(name, str(30 + i)) yield (name, str(30 + i))
yield('intense_' + name, str(30 + i) + ';1') yield ('intense_' + name, str(30 + i) + ';1')
def ansi(code): def ansi(code):
return '\033[{0}m'.format(code) return '\033[{}m'.format(code)
def ansi_color(code, s): def ansi_color(code, s):
return '{0}{1}{2}'.format(ansi(code), s, ansi(0)) return '{}{}{}'.format(ansi(code), s, ansi(0))
def make_color_fn(code): def make_color_fn(code):

View File

@ -147,15 +147,17 @@ def get_project(project_dir, config_path=None, project_name=None, verbose=False,
def execution_context_labels(config_details, environment_file): def execution_context_labels(config_details, environment_file):
extra_labels = [ extra_labels = [
'{0}={1}'.format(LABEL_WORKING_DIR, os.path.abspath(config_details.working_dir)) '{}={}'.format(LABEL_WORKING_DIR, os.path.abspath(config_details.working_dir))
] ]
if not use_config_from_stdin(config_details): if not use_config_from_stdin(config_details):
extra_labels.append('{0}={1}'.format(LABEL_CONFIG_FILES, config_files_label(config_details))) extra_labels.append('{}={}'.format(LABEL_CONFIG_FILES, config_files_label(config_details)))
if environment_file is not None: if environment_file is not None:
extra_labels.append('{0}={1}'.format(LABEL_ENVIRONMENT_FILE, extra_labels.append('{}={}'.format(
os.path.normpath(environment_file))) LABEL_ENVIRONMENT_FILE,
os.path.normpath(environment_file))
)
return extra_labels return extra_labels
@ -168,7 +170,8 @@ def use_config_from_stdin(config_details):
def config_files_label(config_details): def config_files_label(config_details):
return ",".join( return ",".join(
map(str, (os.path.normpath(c.filename) for c in config_details.config_files))) os.path.normpath(c.filename) for c in config_details.config_files
)
def get_project_name(working_dir, project_name=None, environment=None): def get_project_name(working_dir, project_name=None, environment=None):

View File

@ -11,7 +11,7 @@ def docopt_full_help(docstring, *args, **kwargs):
raise SystemExit(docstring) raise SystemExit(docstring)
class DocoptDispatcher(object): class DocoptDispatcher:
def __init__(self, command_class, options): def __init__(self, command_class, options):
self.command_class = command_class self.command_class = command_class
@ -50,7 +50,7 @@ def get_handler(command_class, command):
class NoSuchCommand(Exception): class NoSuchCommand(Exception):
def __init__(self, command, supercommand): def __init__(self, command, supercommand):
super(NoSuchCommand, self).__init__("No such command: %s" % command) super().__init__("No such command: %s" % command)
self.command = command self.command = command
self.supercommand = supercommand self.supercommand = supercommand

View File

@ -26,11 +26,9 @@ class UserError(Exception):
def __init__(self, msg): def __init__(self, msg):
self.msg = dedent(msg).strip() self.msg = dedent(msg).strip()
def __unicode__(self): def __str__(self):
return self.msg return self.msg
__str__ = __unicode__
class ConnectionError(Exception): class ConnectionError(Exception):
pass pass

View File

@ -1,15 +1,10 @@
import logging import logging
import shutil from shutil import get_terminal_size
import texttable import texttable
from compose.cli import colors from compose.cli import colors
if hasattr(shutil, "get_terminal_size"):
from shutil import get_terminal_size
else:
from backports.shutil_get_terminal_size import get_terminal_size
def get_tty_width(): def get_tty_width():
try: try:
@ -45,15 +40,15 @@ class ConsoleWarningFormatter(logging.Formatter):
def get_level_message(self, record): def get_level_message(self, record):
separator = ': ' separator = ': '
if record.levelno == logging.WARNING: if record.levelno >= logging.ERROR:
return colors.yellow(record.levelname) + separator
if record.levelno == logging.ERROR:
return colors.red(record.levelname) + separator return colors.red(record.levelname) + separator
if record.levelno >= logging.WARNING:
return colors.yellow(record.levelname) + separator
return '' return ''
def format(self, record): def format(self, record):
if isinstance(record.msg, bytes): if isinstance(record.msg, bytes):
record.msg = record.msg.decode('utf-8') record.msg = record.msg.decode('utf-8')
message = super(ConsoleWarningFormatter, self).format(record) message = super().format(record)
return '{0}{1}'.format(self.get_level_message(record), message) return '{}{}'.format(self.get_level_message(record), message)

View File

@ -2,6 +2,7 @@ import _thread as thread
import sys import sys
from collections import namedtuple from collections import namedtuple
from itertools import cycle from itertools import cycle
from operator import attrgetter
from queue import Empty from queue import Empty
from queue import Queue from queue import Queue
from threading import Thread from threading import Thread
@ -13,7 +14,7 @@ from compose.cli.signals import ShutdownException
from compose.utils import split_buffer from compose.utils import split_buffer
class LogPresenter(object): class LogPresenter:
def __init__(self, prefix_width, color_func): def __init__(self, prefix_width, color_func):
self.prefix_width = prefix_width self.prefix_width = prefix_width
@ -50,7 +51,7 @@ def max_name_width(service_names, max_index_width=3):
return max(len(name) for name in service_names) + max_index_width return max(len(name) for name in service_names) + max_index_width
class LogPrinter(object): class LogPrinter:
"""Print logs from many containers to a single output stream.""" """Print logs from many containers to a single output stream."""
def __init__(self, def __init__(self,
@ -133,7 +134,7 @@ def build_thread_map(initial_containers, presenters, thread_args):
# Container order is unspecified, so they are sorted by name in order to make # Container order is unspecified, so they are sorted by name in order to make
# container:presenter (log color) assignment deterministic when given a list of containers # container:presenter (log color) assignment deterministic when given a list of containers
# with the same names. # with the same names.
for container in sorted(initial_containers, key=lambda c: c.name) for container in sorted(initial_containers, key=attrgetter('name'))
} }
@ -194,9 +195,9 @@ def build_log_generator(container, log_args):
def wait_on_exit(container): def wait_on_exit(container):
try: try:
exit_code = container.wait() exit_code = container.wait()
return "%s exited with code %s\n" % (container.name, exit_code) return "{} exited with code {}\n".format(container.name, exit_code)
except APIError as e: except APIError as e:
return "Unexpected API error for %s (HTTP code %s)\nResponse body:\n%s\n" % ( return "Unexpected API error for {} (HTTP code {})\nResponse body:\n{}\n".format(
container.name, e.response.status_code, container.name, e.response.status_code,
e.response.text or '[empty]' e.response.text or '[empty]'
) )

View File

@ -73,7 +73,7 @@ def main():
log.error(e.msg) log.error(e.msg)
sys.exit(1) sys.exit(1)
except BuildError as e: except BuildError as e:
log.error("Service '%s' failed to build: %s" % (e.service.name, e.reason)) log.error("Service '{}' failed to build: {}".format(e.service.name, e.reason))
sys.exit(1) sys.exit(1)
except StreamOutputError as e: except StreamOutputError as e:
log.error(e) log.error(e)
@ -175,7 +175,7 @@ def parse_doc_section(name, source):
return [s.strip() for s in pattern.findall(source)] return [s.strip() for s in pattern.findall(source)]
class TopLevelCommand(object): class TopLevelCommand:
"""Define and run multi-container applications with Docker. """Define and run multi-container applications with Docker.
Usage: Usage:
@ -546,7 +546,7 @@ class TopLevelCommand(object):
key=attrgetter('name')) key=attrgetter('name'))
if options['--quiet']: if options['--quiet']:
for image in set(c.image for c in containers): for image in {c.image for c in containers}:
print(image.split(':')[1]) print(image.split(':')[1])
return return
@ -1130,7 +1130,7 @@ def compute_service_exit_code(exit_value_from, attached_containers):
attached_containers)) attached_containers))
if not candidates: if not candidates:
log.error( log.error(
'No containers matching the spec "{0}" ' 'No containers matching the spec "{}" '
'were run.'.format(exit_value_from) 'were run.'.format(exit_value_from)
) )
return 2 return 2
@ -1453,10 +1453,7 @@ def call_docker(args, dockeropts, environment):
args = [executable_path] + tls_options + args args = [executable_path] + tls_options + args
log.debug(" ".join(map(pipes.quote, args))) log.debug(" ".join(map(pipes.quote, args)))
filtered_env = {} filtered_env = {k: v for k, v in environment.items() if v is not None}
for k, v in environment.items():
if v is not None:
filtered_env[k] = environment[k]
return subprocess.call(args, env=filtered_env) return subprocess.call(args, env=filtered_env)

View File

@ -11,13 +11,6 @@ import docker
import compose import compose
from ..const import IS_WINDOWS_PLATFORM from ..const import IS_WINDOWS_PLATFORM
# WindowsError is not defined on non-win32 platforms. Avoid runtime errors by
# defining it as OSError (its parent class) if missing.
try:
WindowsError
except NameError:
WindowsError = OSError
def yesno(prompt, default=None): def yesno(prompt, default=None):
""" """
@ -58,7 +51,7 @@ def call_silently(*args, **kwargs):
with open(os.devnull, 'w') as shutup: with open(os.devnull, 'w') as shutup:
try: try:
return subprocess.call(*args, stdout=shutup, stderr=shutup, **kwargs) return subprocess.call(*args, stdout=shutup, stderr=shutup, **kwargs)
except WindowsError: except OSError:
# On Windows, subprocess.call() can still raise exceptions. Normalize # On Windows, subprocess.call() can still raise exceptions. Normalize
# to POSIXy behaviour by returning a nonzero exit code. # to POSIXy behaviour by returning a nonzero exit code.
return 1 return 1
@ -120,7 +113,7 @@ def generate_user_agent():
try: try:
p_system = platform.system() p_system = platform.system()
p_release = platform.release() p_release = platform.release()
except IOError: except OSError:
pass pass
else: else:
parts.append("{}/{}".format(p_system, p_release)) parts.append("{}/{}".format(p_system, p_release))
@ -133,7 +126,7 @@ def human_readable_file_size(size):
if order >= len(suffixes): if order >= len(suffixes):
order = len(suffixes) - 1 order = len(suffixes) - 1
return '{0:.4g} {1}'.format( return '{:.4g} {}'.format(
size / pow(10, order * 3), size / pow(10, order * 3),
suffixes[order] suffixes[order]
) )

View File

@ -6,13 +6,13 @@ from itertools import chain
def format_call(args, kwargs): def format_call(args, kwargs):
args = (repr(a) for a in args) args = (repr(a) for a in args)
kwargs = ("{0!s}={1!r}".format(*item) for item in kwargs.items()) kwargs = ("{!s}={!r}".format(*item) for item in kwargs.items())
return "({0})".format(", ".join(chain(args, kwargs))) return "({})".format(", ".join(chain(args, kwargs)))
def format_return(result, max_lines): def format_return(result, max_lines):
if isinstance(result, (list, tuple, set)): if isinstance(result, (list, tuple, set)):
return "({0} with {1} items)".format(type(result).__name__, len(result)) return "({} with {} items)".format(type(result).__name__, len(result))
if result: if result:
lines = pprint.pformat(result).split('\n') lines = pprint.pformat(result).split('\n')
@ -22,7 +22,7 @@ def format_return(result, max_lines):
return result return result
class VerboseProxy(object): class VerboseProxy:
"""Proxy all function calls to another class and log method name, arguments """Proxy all function calls to another class and log method name, arguments
and return values for each call. and return values for each call.
""" """

View File

@ -1,12 +1,13 @@
import functools import functools
import io
import logging import logging
import os import os
import re import re
import string import string
import sys import sys
from collections import namedtuple from collections import namedtuple
from itertools import chain
from operator import attrgetter from operator import attrgetter
from operator import itemgetter
import yaml import yaml
from cached_property import cached_property from cached_property import cached_property
@ -166,7 +167,7 @@ class ConfigDetails(namedtuple('_ConfigDetails', 'working_dir config_files envir
def __new__(cls, working_dir, config_files, environment=None): def __new__(cls, working_dir, config_files, environment=None):
if environment is None: if environment is None:
environment = Environment.from_env_file(working_dir) environment = Environment.from_env_file(working_dir)
return super(ConfigDetails, cls).__new__( return super().__new__(
cls, working_dir, config_files, environment cls, working_dir, config_files, environment
) )
@ -315,8 +316,8 @@ def validate_config_version(config_files):
if main_file.version != next_file.version: if main_file.version != next_file.version:
raise ConfigurationError( raise ConfigurationError(
"Version mismatch: file {0} specifies version {1} but " "Version mismatch: file {} specifies version {} but "
"extension file {2} uses version {3}".format( "extension file {} uses version {}".format(
main_file.filename, main_file.filename,
main_file.version, main_file.version,
next_file.filename, next_file.filename,
@ -595,7 +596,7 @@ def process_config_file(config_file, environment, service_name=None, interpolate
return config_file return config_file
class ServiceExtendsResolver(object): class ServiceExtendsResolver:
def __init__(self, service_config, config_file, environment, already_seen=None): def __init__(self, service_config, config_file, environment, already_seen=None):
self.service_config = service_config self.service_config = service_config
self.working_dir = service_config.working_dir self.working_dir = service_config.working_dir
@ -703,7 +704,7 @@ def resolve_build_args(buildargs, environment):
def validate_extended_service_dict(service_dict, filename, service): def validate_extended_service_dict(service_dict, filename, service):
error_prefix = "Cannot extend service '%s' in %s:" % (service, filename) error_prefix = "Cannot extend service '{}' in {}:".format(service, filename)
if 'links' in service_dict: if 'links' in service_dict:
raise ConfigurationError( raise ConfigurationError(
@ -826,9 +827,9 @@ def process_ports(service_dict):
def process_depends_on(service_dict): def process_depends_on(service_dict):
if 'depends_on' in service_dict and not isinstance(service_dict['depends_on'], dict): if 'depends_on' in service_dict and not isinstance(service_dict['depends_on'], dict):
service_dict['depends_on'] = dict([ service_dict['depends_on'] = {
(svc, {'condition': 'service_started'}) for svc in service_dict['depends_on'] svc: {'condition': 'service_started'} for svc in service_dict['depends_on']
]) }
return service_dict return service_dict
@ -1071,9 +1072,9 @@ def merge_service_dicts(base, override, version):
def merge_unique_items_lists(base, override): def merge_unique_items_lists(base, override):
override = [str(o) for o in override] override = (str(o) for o in override)
base = [str(b) for b in base] base = (str(b) for b in base)
return sorted(set().union(base, override)) return sorted(set(chain(base, override)))
def merge_healthchecks(base, override): def merge_healthchecks(base, override):
@ -1086,9 +1087,7 @@ def merge_healthchecks(base, override):
def merge_ports(md, base, override): def merge_ports(md, base, override):
def parse_sequence_func(seq): def parse_sequence_func(seq):
acc = [] acc = [s for item in seq for s in ServicePort.parse(item)]
for item in seq:
acc.extend(ServicePort.parse(item))
return to_mapping(acc, 'merge_field') return to_mapping(acc, 'merge_field')
field = 'ports' field = 'ports'
@ -1098,7 +1097,7 @@ def merge_ports(md, base, override):
merged = parse_sequence_func(md.base.get(field, [])) merged = parse_sequence_func(md.base.get(field, []))
merged.update(parse_sequence_func(md.override.get(field, []))) merged.update(parse_sequence_func(md.override.get(field, [])))
md[field] = [item for item in sorted(merged.values(), key=lambda x: x.target)] md[field] = [item for item in sorted(merged.values(), key=attrgetter("target"))]
def merge_build(output, base, override): def merge_build(output, base, override):
@ -1170,8 +1169,8 @@ def merge_reservations(base, override):
def merge_unique_objects_lists(base, override): def merge_unique_objects_lists(base, override):
result = dict((json_hash(i), i) for i in base + override) result = {json_hash(i): i for i in base + override}
return [i[1] for i in sorted([(k, v) for k, v in result.items()], key=lambda x: x[0])] return [i[1] for i in sorted(((k, v) for k, v in result.items()), key=itemgetter(0))]
def merge_blkio_config(base, override): def merge_blkio_config(base, override):
@ -1179,11 +1178,11 @@ def merge_blkio_config(base, override):
md.merge_scalar('weight') md.merge_scalar('weight')
def merge_blkio_limits(base, override): def merge_blkio_limits(base, override):
index = dict((b['path'], b) for b in base) get_path = itemgetter('path')
for o in override: index = {get_path(b): b for b in base}
index[o['path']] = o index.update((get_path(o), o) for o in override)
return sorted(list(index.values()), key=lambda x: x['path']) return sorted(index.values(), key=get_path)
for field in [ for field in [
"device_read_bps", "device_read_iops", "device_write_bps", "device_read_bps", "device_read_iops", "device_write_bps",
@ -1304,7 +1303,7 @@ def resolve_volume_path(working_dir, volume):
if host_path.startswith('.'): if host_path.startswith('.'):
host_path = expand_path(working_dir, host_path) host_path = expand_path(working_dir, host_path)
host_path = os.path.expanduser(host_path) host_path = os.path.expanduser(host_path)
return u"{}:{}{}".format(host_path, container_path, (':' + mode if mode else '')) return "{}:{}{}".format(host_path, container_path, (':' + mode if mode else ''))
return container_path return container_path
@ -1447,13 +1446,13 @@ def has_uppercase(name):
def load_yaml(filename, encoding=None, binary=True): def load_yaml(filename, encoding=None, binary=True):
try: try:
with io.open(filename, 'rb' if binary else 'r', encoding=encoding) as fh: with open(filename, 'rb' if binary else 'r', encoding=encoding) as fh:
return yaml.safe_load(fh) return yaml.safe_load(fh)
except (IOError, yaml.YAMLError, UnicodeDecodeError) as e: except (OSError, yaml.YAMLError, UnicodeDecodeError) as e:
if encoding is None: if encoding is None:
# Sometimes the user's locale sets an encoding that doesn't match # Sometimes the user's locale sets an encoding that doesn't match
# the YAML files. Im such cases, retry once with the "default" # the YAML files. Im such cases, retry once with the "default"
# UTF-8 encoding # UTF-8 encoding
return load_yaml(filename, encoding='utf-8-sig', binary=False) return load_yaml(filename, encoding='utf-8-sig', binary=False)
error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__ error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
raise ConfigurationError(u"{}: {}".format(error_name, e)) raise ConfigurationError("{}: {}".format(error_name, e))

View File

@ -43,7 +43,7 @@ def env_vars_from_file(filename, interpolate=True):
class Environment(dict): class Environment(dict):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.missing_keys = [] self.missing_keys = []
self.silent = False self.silent = False
@ -81,11 +81,11 @@ class Environment(dict):
def __getitem__(self, key): def __getitem__(self, key):
try: try:
return super(Environment, self).__getitem__(key) return super().__getitem__(key)
except KeyError: except KeyError:
if IS_WINDOWS_PLATFORM: if IS_WINDOWS_PLATFORM:
try: try:
return super(Environment, self).__getitem__(key.upper()) return super().__getitem__(key.upper())
except KeyError: except KeyError:
pass pass
if not self.silent and key not in self.missing_keys: if not self.silent and key not in self.missing_keys:
@ -98,20 +98,20 @@ class Environment(dict):
return "" return ""
def __contains__(self, key): def __contains__(self, key):
result = super(Environment, self).__contains__(key) result = super().__contains__(key)
if IS_WINDOWS_PLATFORM: if IS_WINDOWS_PLATFORM:
return ( return (
result or super(Environment, self).__contains__(key.upper()) result or super().__contains__(key.upper())
) )
return result return result
def get(self, key, *args, **kwargs): def get(self, key, *args, **kwargs):
if IS_WINDOWS_PLATFORM: if IS_WINDOWS_PLATFORM:
return super(Environment, self).get( return super().get(
key, key,
super(Environment, self).get(key.upper(), *args, **kwargs) super().get(key.upper(), *args, **kwargs)
) )
return super(Environment, self).get(key, *args, **kwargs) return super().get(key, *args, **kwargs)
def get_boolean(self, key): def get_boolean(self, key):
# Convert a value to a boolean using "common sense" rules. # Convert a value to a boolean using "common sense" rules.

View File

@ -1,5 +1,3 @@
VERSION_EXPLANATION = ( VERSION_EXPLANATION = (
'You might be seeing this error because you\'re using the wrong Compose file version. ' 'You might be seeing this error because you\'re using the wrong Compose file version. '
'Either specify a supported version (e.g "2.2" or "3.3") and place ' 'Either specify a supported version (e.g "2.2" or "3.3") and place '
@ -40,7 +38,7 @@ class CircularReference(ConfigurationError):
class ComposeFileNotFound(ConfigurationError): class ComposeFileNotFound(ConfigurationError):
def __init__(self, supported_filenames): def __init__(self, supported_filenames):
super(ComposeFileNotFound, self).__init__(""" super().__init__("""
Can't find a suitable configuration file in this directory or any Can't find a suitable configuration file in this directory or any
parent. Are you in the right directory? parent. Are you in the right directory?
@ -51,7 +49,7 @@ class ComposeFileNotFound(ConfigurationError):
class DuplicateOverrideFileFound(ConfigurationError): class DuplicateOverrideFileFound(ConfigurationError):
def __init__(self, override_filenames): def __init__(self, override_filenames):
self.override_filenames = override_filenames self.override_filenames = override_filenames
super(DuplicateOverrideFileFound, self).__init__( super().__init__(
"Multiple override files found: {}. You may only use a single " "Multiple override files found: {}. You may only use a single "
"override file.".format(", ".join(override_filenames)) "override file.".format(", ".join(override_filenames))
) )

View File

@ -11,7 +11,7 @@ from compose.utils import parse_nanoseconds_int
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class Interpolator(object): class Interpolator:
def __init__(self, templater, mapping): def __init__(self, templater, mapping):
self.templater = templater self.templater = templater
@ -31,15 +31,15 @@ def interpolate_environment_variables(version, config, section, environment):
interpolator = Interpolator(TemplateWithDefaults, environment) interpolator = Interpolator(TemplateWithDefaults, environment)
def process_item(name, config_dict): def process_item(name, config_dict):
return dict( return {
(key, interpolate_value(name, key, val, section, interpolator)) key: interpolate_value(name, key, val, section, interpolator)
for key, val in (config_dict or {}).items() for key, val in (config_dict or {}).items()
) }
return dict( return {
(name, process_item(name, config_dict or {})) name: process_item(name, config_dict or {})
for name, config_dict in config.items() for name, config_dict in config.items()
) }
def get_config_path(config_key, section, name): def get_config_path(config_key, section, name):
@ -75,10 +75,10 @@ def recursive_interpolate(obj, interpolator, config_path):
if isinstance(obj, str): if isinstance(obj, str):
return converter.convert(config_path, interpolator.interpolate(obj)) return converter.convert(config_path, interpolator.interpolate(obj))
if isinstance(obj, dict): if isinstance(obj, dict):
return dict( return {
(key, recursive_interpolate(val, interpolator, append(config_path, key))) key: recursive_interpolate(val, interpolator, append(config_path, key))
for (key, val) in obj.items() for key, val in obj.items()
) }
if isinstance(obj, list): if isinstance(obj, list):
return [recursive_interpolate(val, interpolator, config_path) for val in obj] return [recursive_interpolate(val, interpolator, config_path) for val in obj]
return converter.convert(config_path, obj) return converter.convert(config_path, obj)
@ -135,7 +135,7 @@ class TemplateWithDefaults(Template):
val = mapping[named] val = mapping[named]
if isinstance(val, bytes): if isinstance(val, bytes):
val = val.decode('utf-8') val = val.decode('utf-8')
return '%s' % (val,) return '{}'.format(val)
if mo.group('escaped') is not None: if mo.group('escaped') is not None:
return self.delimiter return self.delimiter
if mo.group('invalid') is not None: if mo.group('invalid') is not None:
@ -224,7 +224,7 @@ def to_microseconds(v):
return int(parse_nanoseconds_int(v) / 1000) return int(parse_nanoseconds_int(v) / 1000)
class ConversionMap(object): class ConversionMap:
map = { map = {
service_path('blkio_config', 'weight'): to_int, service_path('blkio_config', 'weight'): to_int,
service_path('blkio_config', 'weight_device', 'weight'): to_int, service_path('blkio_config', 'weight_device', 'weight'): to_int,

View File

@ -104,7 +104,7 @@ def serialize_ns_time_value(value):
result = (int(value), stage[1]) result = (int(value), stage[1])
else: else:
break break
return '{0}{1}'.format(*result) return '{}{}'.format(*result)
def denormalize_service_dict(service_dict, version, image_digest=None): def denormalize_service_dict(service_dict, version, image_digest=None):

View File

@ -21,7 +21,7 @@ def get_source_name_from_network_mode(network_mode, source_type):
def get_service_names(links): def get_service_names(links):
return [link.split(':')[0] for link in links] return [link.split(':', 1)[0] for link in links]
def get_service_names_from_volumes_from(volumes_from): def get_service_names_from_volumes_from(volumes_from):

View File

@ -146,7 +146,7 @@ def normpath(path, win_host=False):
return path return path
class MountSpec(object): class MountSpec:
options_map = { options_map = {
'volume': { 'volume': {
'nocopy': 'no_copy' 'nocopy': 'no_copy'
@ -338,9 +338,9 @@ class ServiceConfigBase(namedtuple('_ServiceConfigBase', 'source target uid gid
return self.source return self.source
def repr(self): def repr(self):
return dict( return {
[(k, v) for k, v in zip(self._fields, self) if v is not None] k: v for k, v in zip(self._fields, self) if v is not None
) }
class ServiceSecret(ServiceConfigBase): class ServiceSecret(ServiceConfigBase):
@ -362,10 +362,7 @@ class ServicePort(namedtuple('_ServicePort', 'target published protocol mode ext
if published: if published:
if isinstance(published, str) and '-' in published: # "x-y:z" format if isinstance(published, str) and '-' in published: # "x-y:z" format
a, b = published.split('-', 1) a, b = published.split('-', 1)
try: if not a.isdigit() or not b.isdigit():
int(a)
int(b)
except ValueError:
raise ConfigurationError('Invalid published port: {}'.format(published)) raise ConfigurationError('Invalid published port: {}'.format(published))
else: else:
try: try:
@ -373,7 +370,7 @@ class ServicePort(namedtuple('_ServicePort', 'target published protocol mode ext
except ValueError: except ValueError:
raise ConfigurationError('Invalid published port: {}'.format(published)) raise ConfigurationError('Invalid published port: {}'.format(published))
return super(ServicePort, cls).__new__( return super().__new__(
cls, target, published, *args, **kwargs cls, target, published, *args, **kwargs
) )
@ -422,9 +419,9 @@ class ServicePort(namedtuple('_ServicePort', 'target published protocol mode ext
return (self.target, self.published, self.external_ip, self.protocol) return (self.target, self.published, self.external_ip, self.protocol)
def repr(self): def repr(self):
return dict( return {
[(k, v) for k, v in zip(self._fields, self) if v is not None] k: v for k, v in zip(self._fields, self) if v is not None
) }
def legacy_repr(self): def legacy_repr(self):
return normalize_port_dict(self.repr()) return normalize_port_dict(self.repr())
@ -484,9 +481,9 @@ class SecurityOpt(namedtuple('_SecurityOpt', 'value src_file')):
if con[0] == 'seccomp' and con[1] != 'unconfined': if con[0] == 'seccomp' and con[1] != 'unconfined':
try: try:
with open(unquote_path(con[1]), 'r') as f: with open(unquote_path(con[1])) as f:
seccomp_data = json.load(f) seccomp_data = json.load(f)
except (IOError, ValueError) as e: except (OSError, ValueError) as e:
raise ConfigurationError('Error reading seccomp profile: {}'.format(e)) raise ConfigurationError('Error reading seccomp profile: {}'.format(e))
return cls( return cls(
'seccomp={}'.format(json.dumps(seccomp_data)), con[1] 'seccomp={}'.format(json.dumps(seccomp_data)), con[1]

View File

@ -100,7 +100,7 @@ def match_named_volumes(service_dict, project_volumes):
for volume_spec in service_volumes: for volume_spec in service_volumes:
if volume_spec.is_named_volume and volume_spec.external not in project_volumes: if volume_spec.is_named_volume and volume_spec.external not in project_volumes:
raise ConfigurationError( raise ConfigurationError(
'Named volume "{0}" is used in service "{1}" but no' 'Named volume "{}" is used in service "{}" but no'
' declaration was found in the volumes section.'.format( ' declaration was found in the volumes section.'.format(
volume_spec.repr(), service_dict.get('name') volume_spec.repr(), service_dict.get('name')
) )
@ -508,13 +508,13 @@ def load_jsonschema(version):
filename = os.path.join( filename = os.path.join(
get_schema_path(), get_schema_path(),
"config_schema_{0}.json".format(suffix)) "config_schema_{}.json".format(suffix))
if not os.path.exists(filename): if not os.path.exists(filename):
raise ConfigurationError( raise ConfigurationError(
'Version in "{}" is unsupported. {}' 'Version in "{}" is unsupported. {}'
.format(filename, VERSION_EXPLANATION)) .format(filename, VERSION_EXPLANATION))
with open(filename, "r") as fh: with open(filename) as fh:
return json.load(fh) return json.load(fh)
@ -534,7 +534,7 @@ def handle_errors(errors, format_error_func, filename):
gone wrong. Process each error and pull out relevant information and re-write gone wrong. Process each error and pull out relevant information and re-write
helpful error messages that are relevant. helpful error messages that are relevant.
""" """
errors = list(sorted(errors, key=str)) errors = sorted(errors, key=str)
if not errors: if not errors:
return return

View File

@ -12,7 +12,7 @@ from .utils import truncate_id
from .version import ComposeVersion from .version import ComposeVersion
class Container(object): class Container:
""" """
Represents a Docker container, constructed from the output of Represents a Docker container, constructed from the output of
GET /containers/:id:/json. GET /containers/:id:/json.
@ -78,8 +78,8 @@ class Container(object):
@property @property
def name_without_project(self): def name_without_project(self):
if self.name.startswith('{0}_{1}'.format(self.project, self.service)): if self.name.startswith('{}_{}'.format(self.project, self.service)):
return '{0}_{1}'.format(self.service, self.number if self.number is not None else self.slug) return '{}_{}'.format(self.service, self.number if self.number is not None else self.slug)
else: else:
return self.name return self.name
@ -91,7 +91,7 @@ class Container(object):
number = self.labels.get(LABEL_CONTAINER_NUMBER) number = self.labels.get(LABEL_CONTAINER_NUMBER)
if not number: if not number:
raise ValueError("Container {0} does not have a {1} label".format( raise ValueError("Container {} does not have a {} label".format(
self.short_id, LABEL_CONTAINER_NUMBER)) self.short_id, LABEL_CONTAINER_NUMBER))
return int(number) return int(number)
@ -224,7 +224,7 @@ class Container(object):
return reduce(get_value, key.split('.'), self.dictionary) return reduce(get_value, key.split('.'), self.dictionary)
def get_local_port(self, port, protocol='tcp'): def get_local_port(self, port, protocol='tcp'):
port = self.ports.get("%s/%s" % (port, protocol)) port = self.ports.get("{}/{}".format(port, protocol))
return "{HostIp}:{HostPort}".format(**port[0]) if port else None return "{HostIp}:{HostPort}".format(**port[0]) if port else None
def get_mount(self, mount_dest): def get_mount(self, mount_dest):
@ -266,7 +266,7 @@ class Container(object):
""" """
if not self.name.startswith(self.short_id): if not self.name.startswith(self.short_id):
self.client.rename( self.client.rename(
self.id, '{0}_{1}'.format(self.short_id, self.name) self.id, '{}_{}'.format(self.short_id, self.name)
) )
def inspect_if_not_inspected(self): def inspect_if_not_inspected(self):
@ -309,7 +309,7 @@ class Container(object):
) )
def __repr__(self): def __repr__(self):
return '<Container: %s (%s)>' % (self.name, self.id[:6]) return '<Container: {} ({})>'.format(self.name, self.id[:6])
def __eq__(self, other): def __eq__(self, other):
if type(self) != type(other): if type(self) != type(other):

View File

@ -1,5 +1,3 @@
class OperationFailedError(Exception): class OperationFailedError(Exception):
def __init__(self, reason): def __init__(self, reason):
self.msg = reason self.msg = reason
@ -17,14 +15,14 @@ class HealthCheckException(Exception):
class HealthCheckFailed(HealthCheckException): class HealthCheckFailed(HealthCheckException):
def __init__(self, container_id): def __init__(self, container_id):
super(HealthCheckFailed, self).__init__( super().__init__(
'Container "{}" is unhealthy.'.format(container_id) 'Container "{}" is unhealthy.'.format(container_id)
) )
class NoHealthCheckConfigured(HealthCheckException): class NoHealthCheckConfigured(HealthCheckException):
def __init__(self, service_name): def __init__(self, service_name):
super(NoHealthCheckConfigured, self).__init__( super().__init__(
'Service "{}" is missing a healthcheck configuration'.format( 'Service "{}" is missing a healthcheck configuration'.format(
service_name service_name
) )

View File

@ -1,6 +1,7 @@
import logging import logging
import re import re
from collections import OrderedDict from collections import OrderedDict
from operator import itemgetter
from docker.errors import NotFound from docker.errors import NotFound
from docker.types import IPAMConfig from docker.types import IPAMConfig
@ -24,7 +25,7 @@ OPTS_EXCEPTIONS = [
] ]
class Network(object): class Network:
def __init__(self, client, project, name, driver=None, driver_opts=None, def __init__(self, client, project, name, driver=None, driver_opts=None,
ipam=None, external=False, internal=False, enable_ipv6=False, ipam=None, external=False, internal=False, enable_ipv6=False,
labels=None, custom_name=False): labels=None, custom_name=False):
@ -51,7 +52,7 @@ class Network(object):
try: try:
self.inspect() self.inspect()
log.debug( log.debug(
'Network {0} declared as external. No new ' 'Network {} declared as external. No new '
'network will be created.'.format(self.name) 'network will be created.'.format(self.name)
) )
except NotFound: except NotFound:
@ -107,7 +108,7 @@ class Network(object):
def legacy_full_name(self): def legacy_full_name(self):
if self.custom_name: if self.custom_name:
return self.name return self.name
return '{0}_{1}'.format( return '{}_{}'.format(
re.sub(r'[_-]', '', self.project), self.name re.sub(r'[_-]', '', self.project), self.name
) )
@ -115,7 +116,7 @@ class Network(object):
def full_name(self): def full_name(self):
if self.custom_name: if self.custom_name:
return self.name return self.name
return '{0}_{1}'.format(self.project, self.name) return '{}_{}'.format(self.project, self.name)
@property @property
def true_name(self): def true_name(self):
@ -167,7 +168,7 @@ def create_ipam_config_from_dict(ipam_dict):
class NetworkConfigChangedError(ConfigurationError): class NetworkConfigChangedError(ConfigurationError):
def __init__(self, net_name, property_name): def __init__(self, net_name, property_name):
super(NetworkConfigChangedError, self).__init__( super().__init__(
'Network "{}" needs to be recreated - {} has changed'.format( 'Network "{}" needs to be recreated - {} has changed'.format(
net_name, property_name net_name, property_name
) )
@ -258,7 +259,7 @@ def build_networks(name, config_data, client):
return networks return networks
class ProjectNetworks(object): class ProjectNetworks:
def __init__(self, networks, use_networking): def __init__(self, networks, use_networking):
self.networks = networks or {} self.networks = networks or {}
@ -299,10 +300,10 @@ def get_network_defs_for_service(service_dict):
if 'network_mode' in service_dict: if 'network_mode' in service_dict:
return {} return {}
networks = service_dict.get('networks', {'default': None}) networks = service_dict.get('networks', {'default': None})
return dict( return {
(net, (config or {})) net: (config or {})
for net, config in networks.items() for net, config in networks.items()
) }
def get_network_names_for_service(service_dict): def get_network_names_for_service(service_dict):
@ -328,4 +329,4 @@ def get_networks(service_dict, network_definitions):
else: else:
# Ensure Compose will pick a consistent primary network if no # Ensure Compose will pick a consistent primary network if no
# priority is set # priority is set
return OrderedDict(sorted(networks.items(), key=lambda t: t[0])) return OrderedDict(sorted(networks.items(), key=itemgetter(0)))

View File

@ -25,7 +25,7 @@ log = logging.getLogger(__name__)
STOP = object() STOP = object()
class GlobalLimit(object): class GlobalLimit:
"""Simple class to hold a global semaphore limiter for a project. This class """Simple class to hold a global semaphore limiter for a project. This class
should be treated as a singleton that is instantiated when the project is. should be treated as a singleton that is instantiated when the project is.
""" """
@ -114,7 +114,7 @@ def _no_deps(x):
return [] return []
class State(object): class State:
""" """
Holds the state of a partially-complete parallel operation. Holds the state of a partially-complete parallel operation.
@ -136,7 +136,7 @@ class State(object):
return set(self.objects) - self.started - self.finished - self.failed return set(self.objects) - self.started - self.finished - self.failed
class NoLimit(object): class NoLimit:
def __enter__(self): def __enter__(self):
pass pass
@ -252,7 +252,7 @@ class UpstreamError(Exception):
pass pass
class ParallelStreamWriter(object): class ParallelStreamWriter:
"""Write out messages for operations happening in parallel. """Write out messages for operations happening in parallel.
Each operation has its own line, and ANSI code characters are used Each operation has its own line, and ANSI code characters are used

View File

@ -79,19 +79,19 @@ def print_output_event(event, stream, is_terminal):
status = event.get('status', '') status = event.get('status', '')
if 'progress' in event: if 'progress' in event:
write_to_stream("%s %s%s" % (status, event['progress'], terminator), stream) write_to_stream("{} {}{}".format(status, event['progress'], terminator), stream)
elif 'progressDetail' in event: elif 'progressDetail' in event:
detail = event['progressDetail'] detail = event['progressDetail']
total = detail.get('total') total = detail.get('total')
if 'current' in detail and total: if 'current' in detail and total:
percentage = float(detail['current']) / float(total) * 100 percentage = float(detail['current']) / float(total) * 100
write_to_stream('%s (%.1f%%)%s' % (status, percentage, terminator), stream) write_to_stream('{} ({:.1f}%){}'.format(status, percentage, terminator), stream)
else: else:
write_to_stream('%s%s' % (status, terminator), stream) write_to_stream('{}{}'.format(status, terminator), stream)
elif 'stream' in event: elif 'stream' in event:
write_to_stream("%s%s" % (event['stream'], terminator), stream) write_to_stream("{}{}".format(event['stream'], terminator), stream)
else: else:
write_to_stream("%s%s\n" % (status, terminator), stream) write_to_stream("{}{}\n".format(status, terminator), stream)
def get_digest_from_pull(events): def get_digest_from_pull(events):

View File

@ -55,16 +55,16 @@ class OneOffFilter(enum.Enum):
@classmethod @classmethod
def update_labels(cls, value, labels): def update_labels(cls, value, labels):
if value == cls.only: if value == cls.only:
labels.append('{0}={1}'.format(LABEL_ONE_OFF, "True")) labels.append('{}={}'.format(LABEL_ONE_OFF, "True"))
elif value == cls.exclude: elif value == cls.exclude:
labels.append('{0}={1}'.format(LABEL_ONE_OFF, "False")) labels.append('{}={}'.format(LABEL_ONE_OFF, "False"))
elif value == cls.include: elif value == cls.include:
pass pass
else: else:
raise ValueError("Invalid value for one_off: {}".format(repr(value))) raise ValueError("Invalid value for one_off: {}".format(repr(value)))
class Project(object): class Project:
""" """
A collection of services. A collection of services.
""" """
@ -80,7 +80,7 @@ class Project(object):
name = self.name name = self.name
if legacy: if legacy:
name = re.sub(r'[_-]', '', name) name = re.sub(r'[_-]', '', name)
labels = ['{0}={1}'.format(LABEL_PROJECT, name)] labels = ['{}={}'.format(LABEL_PROJECT, name)]
OneOffFilter.update_labels(one_off, labels) OneOffFilter.update_labels(one_off, labels)
return labels return labels
@ -549,10 +549,10 @@ class Project(object):
'action': event['status'], 'action': event['status'],
'id': event['Actor']['ID'], 'id': event['Actor']['ID'],
'service': container_attrs.get(LABEL_SERVICE), 'service': container_attrs.get(LABEL_SERVICE),
'attributes': dict([ 'attributes': {
(k, v) for k, v in container_attrs.items() k: v for k, v in container_attrs.items()
if not k.startswith('com.docker.compose.') if not k.startswith('com.docker.compose.')
]), },
'container': container, 'container': container,
} }
@ -812,7 +812,7 @@ class Project(object):
return return
if remove_orphans: if remove_orphans:
for ctnr in orphans: for ctnr in orphans:
log.info('Removing orphan container "{0}"'.format(ctnr.name)) log.info('Removing orphan container "{}"'.format(ctnr.name))
try: try:
ctnr.kill() ctnr.kill()
except APIError: except APIError:
@ -820,7 +820,7 @@ class Project(object):
ctnr.remove(force=True) ctnr.remove(force=True)
else: else:
log.warning( log.warning(
'Found orphan containers ({0}) for this project. If ' 'Found orphan containers ({}) for this project. If '
'you removed or renamed this service in your compose ' 'you removed or renamed this service in your compose '
'file, you can run this command with the ' 'file, you can run this command with the '
'--remove-orphans flag to clean it up.'.format( '--remove-orphans flag to clean it up.'.format(
@ -966,16 +966,16 @@ def get_secrets(service, service_secrets, secret_defs):
.format(service=service, secret=secret.source)) .format(service=service, secret=secret.source))
if secret_def.get('external'): if secret_def.get('external'):
log.warning("Service \"{service}\" uses secret \"{secret}\" which is external. " log.warning('Service "{service}" uses secret "{secret}" which is external. '
"External secrets are not available to containers created by " 'External secrets are not available to containers created by '
"docker-compose.".format(service=service, secret=secret.source)) 'docker-compose.'.format(service=service, secret=secret.source))
continue continue
if secret.uid or secret.gid or secret.mode: if secret.uid or secret.gid or secret.mode:
log.warning( log.warning(
"Service \"{service}\" uses secret \"{secret}\" with uid, " 'Service "{service}" uses secret "{secret}" with uid, '
"gid, or mode. These fields are not supported by this " 'gid, or mode. These fields are not supported by this '
"implementation of the Compose file".format( 'implementation of the Compose file'.format(
service=service, secret=secret.source service=service, secret=secret.source
) )
) )
@ -983,8 +983,8 @@ def get_secrets(service, service_secrets, secret_defs):
secret_file = secret_def.get('file') secret_file = secret_def.get('file')
if not path.isfile(str(secret_file)): if not path.isfile(str(secret_file)):
log.warning( log.warning(
"Service \"{service}\" uses an undefined secret file \"{secret_file}\", " 'Service "{service}" uses an undefined secret file "{secret_file}", '
"the following file should be created \"{secret_file}\"".format( 'the following file should be created "{secret_file}"'.format(
service=service, secret_file=secret_file service=service, secret_file=secret_file
) )
) )

View File

@ -163,7 +163,7 @@ class BuildAction(enum.Enum):
skip = 2 skip = 2
class Service(object): class Service:
def __init__( def __init__(
self, self,
name, name,
@ -230,10 +230,10 @@ class Service(object):
"""Return a :class:`compose.container.Container` for this service. The """Return a :class:`compose.container.Container` for this service. The
container must be active, and match `number`. container must be active, and match `number`.
""" """
for container in self.containers(labels=['{0}={1}'.format(LABEL_CONTAINER_NUMBER, number)]): for container in self.containers(labels=['{}={}'.format(LABEL_CONTAINER_NUMBER, number)]):
return container return container
raise ValueError("No container found for %s_%s" % (self.name, number)) raise ValueError("No container found for {}_{}".format(self.name, number))
def start(self, **options): def start(self, **options):
containers = self.containers(stopped=True) containers = self.containers(stopped=True)
@ -642,7 +642,7 @@ class Service(object):
expl = binarystr_to_unicode(ex.explanation) expl = binarystr_to_unicode(ex.explanation)
if "driver failed programming external connectivity" in expl: if "driver failed programming external connectivity" in expl:
log.warn("Host is already in use by another container") log.warn("Host is already in use by another container")
raise OperationFailedError("Cannot start service %s: %s" % (self.name, expl)) raise OperationFailedError("Cannot start service {}: {}".format(self.name, expl))
return container return container
@property @property
@ -736,12 +736,12 @@ class Service(object):
pid_namespace = self.pid_mode.service_name pid_namespace = self.pid_mode.service_name
ipc_namespace = self.ipc_mode.service_name ipc_namespace = self.ipc_mode.service_name
configs = dict( configs = {
[(name, None) for name in self.get_linked_service_names()] name: None for name in self.get_linked_service_names()
}
configs.update(
(name, None) for name in self.get_volumes_from_names()
) )
configs.update(dict(
[(name, None) for name in self.get_volumes_from_names()]
))
configs.update({net_name: None} if net_name else {}) configs.update({net_name: None} if net_name else {})
configs.update({pid_namespace: None} if pid_namespace else {}) configs.update({pid_namespace: None} if pid_namespace else {})
configs.update({ipc_namespace: None} if ipc_namespace else {}) configs.update({ipc_namespace: None} if ipc_namespace else {})
@ -863,9 +863,9 @@ class Service(object):
add_config_hash = (not one_off and not override_options) add_config_hash = (not one_off and not override_options)
slug = generate_random_id() if one_off else None slug = generate_random_id() if one_off else None
container_options = dict( container_options = {
(k, self.options[k]) k: self.options[k]
for k in DOCKER_CONFIG_KEYS if k in self.options) for k in DOCKER_CONFIG_KEYS if k in self.options}
override_volumes = override_options.pop('volumes', []) override_volumes = override_options.pop('volumes', [])
container_options.update(override_options) container_options.update(override_options)
@ -957,7 +957,7 @@ class Service(object):
) )
container_options['environment'].update(affinity) container_options['environment'].update(affinity)
container_options['volumes'] = dict((v.internal, {}) for v in container_volumes or {}) container_options['volumes'] = {v.internal: {} for v in container_volumes or {}}
if version_gte(self.client.api_version, '1.30'): if version_gte(self.client.api_version, '1.30'):
override_options['mounts'] = [build_mount(v) for v in container_mounts] or None override_options['mounts'] = [build_mount(v) for v in container_mounts] or None
else: else:
@ -1159,9 +1159,9 @@ class Service(object):
def labels(self, one_off=False, legacy=False): def labels(self, one_off=False, legacy=False):
proj_name = self.project if not legacy else re.sub(r'[_-]', '', self.project) proj_name = self.project if not legacy else re.sub(r'[_-]', '', self.project)
return [ return [
'{0}={1}'.format(LABEL_PROJECT, proj_name), '{}={}'.format(LABEL_PROJECT, proj_name),
'{0}={1}'.format(LABEL_SERVICE, self.name), '{}={}'.format(LABEL_SERVICE, self.name),
'{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False"), '{}={}'.format(LABEL_ONE_OFF, "True" if one_off else "False"),
] ]
@property @property
@ -1178,7 +1178,7 @@ class Service(object):
ext_links_origins = [link.split(':')[0] for link in self.options.get('external_links', [])] ext_links_origins = [link.split(':')[0] for link in self.options.get('external_links', [])]
if container_name in ext_links_origins: if container_name in ext_links_origins:
raise DependencyError( raise DependencyError(
'Service {0} has a self-referential external link: {1}'.format( 'Service {} has a self-referential external link: {}'.format(
self.name, container_name self.name, container_name
) )
) )
@ -1233,11 +1233,9 @@ class Service(object):
output = self.client.pull(repo, **pull_kwargs) output = self.client.pull(repo, **pull_kwargs)
if silent: if silent:
with open(os.devnull, 'w') as devnull: with open(os.devnull, 'w') as devnull:
for event in stream_output(output, devnull): yield from stream_output(output, devnull)
yield event
else: else:
for event in stream_output(output, sys.stdout): yield from stream_output(output, sys.stdout)
yield event
except (StreamOutputError, NotFound) as e: except (StreamOutputError, NotFound) as e:
if not ignore_pull_failures: if not ignore_pull_failures:
raise raise
@ -1255,7 +1253,7 @@ class Service(object):
'platform': self.platform, 'platform': self.platform,
} }
if not silent: if not silent:
log.info('Pulling %s (%s%s%s)...' % (self.name, repo, separator, tag)) log.info('Pulling {} ({}{}{})...'.format(self.name, repo, separator, tag))
if kwargs['platform'] and version_lt(self.client.api_version, '1.35'): if kwargs['platform'] and version_lt(self.client.api_version, '1.35'):
raise OperationFailedError( raise OperationFailedError(
@ -1273,7 +1271,7 @@ class Service(object):
repo, tag, separator = parse_repository_tag(self.options['image']) repo, tag, separator = parse_repository_tag(self.options['image'])
tag = tag or 'latest' tag = tag or 'latest'
log.info('Pushing %s (%s%s%s)...' % (self.name, repo, separator, tag)) log.info('Pushing {} ({}{}{})...'.format(self.name, repo, separator, tag))
output = self.client.push(repo, tag=tag, stream=True) output = self.client.push(repo, tag=tag, stream=True)
try: try:
@ -1335,7 +1333,7 @@ def short_id_alias_exists(container, network):
return container.short_id in aliases return container.short_id in aliases
class IpcMode(object): class IpcMode:
def __init__(self, mode): def __init__(self, mode):
self._mode = mode self._mode = mode
@ -1375,7 +1373,7 @@ class ContainerIpcMode(IpcMode):
self._mode = 'container:{}'.format(container.id) self._mode = 'container:{}'.format(container.id)
class PidMode(object): class PidMode:
def __init__(self, mode): def __init__(self, mode):
self._mode = mode self._mode = mode
@ -1415,7 +1413,7 @@ class ContainerPidMode(PidMode):
self._mode = 'container:{}'.format(container.id) self._mode = 'container:{}'.format(container.id)
class NetworkMode(object): class NetworkMode:
"""A `standard` network mode (ex: host, bridge)""" """A `standard` network mode (ex: host, bridge)"""
service_name = None service_name = None
@ -1430,7 +1428,7 @@ class NetworkMode(object):
mode = id mode = id
class ContainerNetworkMode(object): class ContainerNetworkMode:
"""A network mode that uses a container's network stack.""" """A network mode that uses a container's network stack."""
service_name = None service_name = None
@ -1447,7 +1445,7 @@ class ContainerNetworkMode(object):
return 'container:' + self.container.id return 'container:' + self.container.id
class ServiceNetworkMode(object): class ServiceNetworkMode:
"""A network mode that uses a service's network stack.""" """A network mode that uses a service's network stack."""
def __init__(self, service): def __init__(self, service):
@ -1552,10 +1550,10 @@ def get_container_data_volumes(container, volumes_option, tmpfs_option, mounts_o
volumes = [] volumes = []
volumes_option = volumes_option or [] volumes_option = volumes_option or []
container_mounts = dict( container_mounts = {
(mount['Destination'], mount) mount['Destination']: mount
for mount in container.get('Mounts') or {} for mount in container.get('Mounts') or {}
) }
image_volumes = [ image_volumes = [
VolumeSpec.parse(volume) VolumeSpec.parse(volume)
@ -1607,9 +1605,9 @@ def get_container_data_volumes(container, volumes_option, tmpfs_option, mounts_o
def warn_on_masked_volume(volumes_option, container_volumes, service): def warn_on_masked_volume(volumes_option, container_volumes, service):
container_volumes = dict( container_volumes = {
(volume.internal, volume.external) volume.internal: volume.external
for volume in container_volumes) for volume in container_volumes}
for volume in volumes_option: for volume in volumes_option:
if ( if (
@ -1759,7 +1757,7 @@ def convert_blkio_config(blkio_config):
continue continue
arr = [] arr = []
for item in blkio_config[field]: for item in blkio_config[field]:
arr.append(dict([(k.capitalize(), v) for k, v in item.items()])) arr.append({k.capitalize(): v for k, v in item.items()})
result[field] = arr result[field] = arr
return result return result
@ -1771,7 +1769,7 @@ def rewrite_build_path(path):
return path return path
class _CLIBuilder(object): class _CLIBuilder:
def __init__(self, progress): def __init__(self, progress):
self._progress = progress self._progress = progress
@ -1879,7 +1877,7 @@ class _CLIBuilder(object):
yield json.dumps({"stream": "{}{}\n".format(magic_word, image_id)}) yield json.dumps({"stream": "{}{}\n".format(magic_word, image_id)})
class _CommandBuilder(object): class _CommandBuilder:
def __init__(self): def __init__(self):
self._args = ["docker", "build"] self._args = ["docker", "build"]

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*-
''' '''
timeparse.py timeparse.py
(c) Will Roberts <wildwilhelm@gmail.com> 1 February, 2014 (c) Will Roberts <wildwilhelm@gmail.com> 1 February, 2014
@ -54,14 +53,14 @@ TIMEFORMAT = r'{HOURS}{MINS}{SECS}{MILLI}{MICRO}{NANO}'.format(
NANO=opt(NANO), NANO=opt(NANO),
) )
MULTIPLIERS = dict([ MULTIPLIERS = {
('hours', 60 * 60), 'hours': 60 * 60,
('mins', 60), 'mins': 60,
('secs', 1), 'secs': 1,
('milli', 1.0 / 1000), 'milli': 1.0 / 1000,
('micro', 1.0 / 1000.0 / 1000), 'micro': 1.0 / 1000.0 / 1000,
('nano', 1.0 / 1000.0 / 1000.0 / 1000.0), 'nano': 1.0 / 1000.0 / 1000.0 / 1000.0,
]) }
def timeparse(sval): def timeparse(sval):
@ -90,4 +89,4 @@ def timeparse(sval):
def cast(value): def cast(value):
return int(value, 10) if value.isdigit() else float(value) return int(value) if value.isdigit() else float(value)

View File

@ -29,7 +29,7 @@ def stream_as_text(stream):
yield data yield data
def line_splitter(buffer, separator=u'\n'): def line_splitter(buffer, separator='\n'):
index = buffer.find(str(separator)) index = buffer.find(str(separator))
if index == -1: if index == -1:
return None return None
@ -45,7 +45,7 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
of the input. of the input.
""" """
splitter = splitter or line_splitter splitter = splitter or line_splitter
buffered = str('') buffered = ''
for data in stream_as_text(stream): for data in stream_as_text(stream):
buffered += data buffered += data
@ -116,7 +116,7 @@ def parse_nanoseconds_int(value):
def build_string_dict(source_dict): def build_string_dict(source_dict):
return dict((k, str(v if v is not None else '')) for k, v in source_dict.items()) return {k: str(v if v is not None else '') for k, v in source_dict.items()}
def splitdrive(path): def splitdrive(path):

View File

@ -1,5 +1,6 @@
import logging import logging
import re import re
from itertools import chain
from docker.errors import NotFound from docker.errors import NotFound
from docker.utils import version_lt from docker.utils import version_lt
@ -15,7 +16,7 @@ from .const import LABEL_VOLUME
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class Volume(object): class Volume:
def __init__(self, client, project, name, driver=None, driver_opts=None, def __init__(self, client, project, name, driver=None, driver_opts=None,
external=False, labels=None, custom_name=False): external=False, labels=None, custom_name=False):
self.client = client self.client = client
@ -57,13 +58,13 @@ class Volume(object):
def full_name(self): def full_name(self):
if self.custom_name: if self.custom_name:
return self.name return self.name
return '{0}_{1}'.format(self.project.lstrip('-_'), self.name) return '{}_{}'.format(self.project.lstrip('-_'), self.name)
@property @property
def legacy_full_name(self): def legacy_full_name(self):
if self.custom_name: if self.custom_name:
return self.name return self.name
return '{0}_{1}'.format( return '{}_{}'.format(
re.sub(r'[_-]', '', self.project), self.name re.sub(r'[_-]', '', self.project), self.name
) )
@ -96,7 +97,7 @@ class Volume(object):
self.legacy = False self.legacy = False
class ProjectVolumes(object): class ProjectVolumes:
def __init__(self, volumes): def __init__(self, volumes):
self.volumes = volumes self.volumes = volumes
@ -132,7 +133,7 @@ class ProjectVolumes(object):
volume_exists = volume.exists() volume_exists = volume.exists()
if volume.external: if volume.external:
log.debug( log.debug(
'Volume {0} declared as external. No new ' 'Volume {} declared as external. No new '
'volume will be created.'.format(volume.name) 'volume will be created.'.format(volume.name)
) )
if not volume_exists: if not volume_exists:
@ -148,7 +149,7 @@ class ProjectVolumes(object):
if not volume_exists: if not volume_exists:
log.info( log.info(
'Creating volume "{0}" with {1} driver'.format( 'Creating volume "{}" with {} driver'.format(
volume.full_name, volume.driver or 'default' volume.full_name, volume.driver or 'default'
) )
) )
@ -157,7 +158,7 @@ class ProjectVolumes(object):
check_remote_volume_config(volume.inspect(legacy=volume.legacy), volume) check_remote_volume_config(volume.inspect(legacy=volume.legacy), volume)
except NotFound: except NotFound:
raise ConfigurationError( raise ConfigurationError(
'Volume %s specifies nonexistent driver %s' % (volume.name, volume.driver) 'Volume {} specifies nonexistent driver {}'.format(volume.name, volume.driver)
) )
def namespace_spec(self, volume_spec): def namespace_spec(self, volume_spec):
@ -174,7 +175,7 @@ class ProjectVolumes(object):
class VolumeConfigChangedError(ConfigurationError): class VolumeConfigChangedError(ConfigurationError):
def __init__(self, local, property_name, local_value, remote_value): def __init__(self, local, property_name, local_value, remote_value):
super(VolumeConfigChangedError, self).__init__( super().__init__(
'Configuration for volume {vol_name} specifies {property_name} ' 'Configuration for volume {vol_name} specifies {property_name} '
'{local_value}, but a volume with the same name uses a different ' '{local_value}, but a volume with the same name uses a different '
'{property_name} ({remote_value}). If you wish to use the new ' '{property_name} ({remote_value}). If you wish to use the new '
@ -192,7 +193,7 @@ def check_remote_volume_config(remote, local):
raise VolumeConfigChangedError(local, 'driver', local.driver, remote.get('Driver')) raise VolumeConfigChangedError(local, 'driver', local.driver, remote.get('Driver'))
local_opts = local.driver_opts or {} local_opts = local.driver_opts or {}
remote_opts = remote.get('Options') or {} remote_opts = remote.get('Options') or {}
for k in set.union(set(remote_opts.keys()), set(local_opts.keys())): for k in set(chain(remote_opts, local_opts)):
if k.startswith('com.docker.'): # These options are set internally if k.startswith('com.docker.'): # These options are set internally
continue continue
if remote_opts.get(k) != local_opts.get(k): if remote_opts.get(k) != local_opts.get(k):
@ -202,7 +203,7 @@ def check_remote_volume_config(remote, local):
local_labels = local.labels or {} local_labels = local.labels or {}
remote_labels = remote.get('Labels') or {} remote_labels = remote.get('Labels') or {}
for k in set.union(set(remote_labels.keys()), set(local_labels.keys())): for k in set(chain(remote_labels, local_labels)):
if k.startswith('com.docker.'): # We are only interested in user-specified labels if k.startswith('com.docker.'): # We are only interested in user-specified labels
continue continue
if remote_labels.get(k) != local_labels.get(k): if remote_labels.get(k) != local_labels.get(k):

View File

@ -156,7 +156,7 @@ def main(args):
opts = parse_opts(args) opts = parse_opts(args)
with open(opts.filename, 'r') as fh: with open(opts.filename) as fh:
new_format = migrate(fh.read()) new_format = migrate(fh.read())
if opts.in_place: if opts.in_place:

View File

@ -1,4 +1,4 @@
# -*- mode: python ; coding: utf-8 -*- # -*- mode: python -*-
block_cipher = None block_cipher = None

View File

@ -8,8 +8,6 @@ docker==4.3.0
docker-pycreds==0.4.0 docker-pycreds==0.4.0
dockerpty==0.4.1 dockerpty==0.4.1
docopt==0.6.2 docopt==0.6.2
enum34==1.1.6; python_version < '3.4'
functools32==3.2.3.post2; python_version < '3.2'
idna==2.10 idna==2.10
ipaddress==1.0.23 ipaddress==1.0.23
jsonschema==3.2.0 jsonschema==3.2.0

View File

@ -6,7 +6,7 @@ from const import REPO_ROOT
def update_init_py_version(version): def update_init_py_version(version):
path = os.path.join(REPO_ROOT, 'compose', '__init__.py') path = os.path.join(REPO_ROOT, 'compose', '__init__.py')
with open(path, 'r') as f: with open(path) as f:
contents = f.read() contents = f.read()
contents = re.sub(r"__version__ = '[0-9a-z.-]+'", "__version__ = '{}'".format(version), contents) contents = re.sub(r"__version__ = '[0-9a-z.-]+'", "__version__ = '{}'".format(version), contents)
with open(path, 'w') as f: with open(path, 'w') as f:
@ -15,7 +15,7 @@ def update_init_py_version(version):
def update_run_sh_version(version): def update_run_sh_version(version):
path = os.path.join(REPO_ROOT, 'script', 'run', 'run.sh') path = os.path.join(REPO_ROOT, 'script', 'run', 'run.sh')
with open(path, 'r') as f: with open(path) as f:
contents = f.read() contents = f.read()
contents = re.sub(r'VERSION="[0-9a-z.-]+"', 'VERSION="{}"'.format(version), contents) contents = re.sub(r'VERSION="[0-9a-z.-]+"', 'VERSION="{}"'.format(version), contents)
with open(path, 'w') as f: with open(path, 'w') as f:

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs import codecs
import os import os
import re import re
@ -50,11 +49,7 @@ if sys.version_info[:2] < (3, 4):
tests_require.append('mock >= 1.0.1, < 4') tests_require.append('mock >= 1.0.1, < 4')
extras_require = { extras_require = {
':python_version < "3.2"': ['subprocess32 >= 3.5.4, < 4'],
':python_version < "3.4"': ['enum34 >= 1.0.4, < 2'],
':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5, < 4'], ':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5, < 4'],
':python_version < "3.3"': ['backports.shutil_get_terminal_size == 1.0.0',
'ipaddress >= 1.0.16, < 2'],
':sys_platform == "win32"': ['colorama >= 0.4, < 1'], ':sys_platform == "win32"': ['colorama >= 0.4, < 1'],
'socks': ['PySocks >= 1.5.6, != 1.5.7, < 2'], 'socks': ['PySocks >= 1.5.6, != 1.5.7, < 2'],
'tests': tests_require, 'tests': tests_require,
@ -94,7 +89,7 @@ setup(
install_requires=install_requires, install_requires=install_requires,
extras_require=extras_require, extras_require=extras_require,
tests_require=tests_require, tests_require=tests_require,
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', python_requires='>=3.4',
entry_points={ entry_points={
'console_scripts': ['docker-compose=compose.cli.main:main'], 'console_scripts': ['docker-compose=compose.cli.main:main'],
}, },

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import datetime import datetime
import json import json
import os.path import os.path
@ -99,7 +98,7 @@ def kill_service(service):
container.kill() container.kill()
class ContainerCountCondition(object): class ContainerCountCondition:
def __init__(self, project, expected): def __init__(self, project, expected):
self.project = project self.project = project
@ -112,7 +111,7 @@ class ContainerCountCondition(object):
return "waiting for counter count == %s" % self.expected return "waiting for counter count == %s" % self.expected
class ContainerStateCondition(object): class ContainerStateCondition:
def __init__(self, client, name, status): def __init__(self, client, name, status):
self.client = client self.client = client
@ -140,7 +139,7 @@ class ContainerStateCondition(object):
class CLITestCase(DockerClientTestCase): class CLITestCase(DockerClientTestCase):
def setUp(self): def setUp(self):
super(CLITestCase, self).setUp() super().setUp()
self.base_dir = 'tests/fixtures/simple-composefile' self.base_dir = 'tests/fixtures/simple-composefile'
self.override_dir = None self.override_dir = None
@ -162,7 +161,7 @@ class CLITestCase(DockerClientTestCase):
if hasattr(self, '_project'): if hasattr(self, '_project'):
del self._project del self._project
super(CLITestCase, self).tearDown() super().tearDown()
@property @property
def project(self): def project(self):
@ -206,14 +205,14 @@ class CLITestCase(DockerClientTestCase):
def test_shorthand_host_opt(self): def test_shorthand_host_opt(self):
self.dispatch( self.dispatch(
['-H={0}'.format(os.environ.get('DOCKER_HOST', 'unix://')), ['-H={}'.format(os.environ.get('DOCKER_HOST', 'unix://')),
'up', '-d'], 'up', '-d'],
returncode=0 returncode=0
) )
def test_shorthand_host_opt_interactive(self): def test_shorthand_host_opt_interactive(self):
self.dispatch( self.dispatch(
['-H={0}'.format(os.environ.get('DOCKER_HOST', 'unix://')), ['-H={}'.format(os.environ.get('DOCKER_HOST', 'unix://')),
'run', 'another', 'ls'], 'run', 'another', 'ls'],
returncode=0 returncode=0
) )
@ -1453,7 +1452,7 @@ services:
if v['Name'].split('/')[-1].startswith('{}_'.format(self.project.name)) if v['Name'].split('/')[-1].startswith('{}_'.format(self.project.name))
] ]
assert set([v['Name'].split('/')[-1] for v in volumes]) == {volume_with_label} assert {v['Name'].split('/')[-1] for v in volumes} == {volume_with_label}
assert 'label_key' in volumes[0]['Labels'] assert 'label_key' in volumes[0]['Labels']
assert volumes[0]['Labels']['label_key'] == 'label_val' assert volumes[0]['Labels']['label_key'] == 'label_val'
@ -1866,12 +1865,12 @@ services:
self.dispatch(['run', 'implicit']) self.dispatch(['run', 'implicit'])
service = self.project.get_service('implicit') service = self.project.get_service('implicit')
containers = service.containers(stopped=True, one_off=OneOffFilter.only) containers = service.containers(stopped=True, one_off=OneOffFilter.only)
assert [c.human_readable_command for c in containers] == [u'/bin/sh -c echo "success"'] assert [c.human_readable_command for c in containers] == ['/bin/sh -c echo "success"']
self.dispatch(['run', 'explicit']) self.dispatch(['run', 'explicit'])
service = self.project.get_service('explicit') service = self.project.get_service('explicit')
containers = service.containers(stopped=True, one_off=OneOffFilter.only) containers = service.containers(stopped=True, one_off=OneOffFilter.only)
assert [c.human_readable_command for c in containers] == [u'/bin/true'] assert [c.human_readable_command for c in containers] == ['/bin/true']
@pytest.mark.skipif(SWARM_SKIP_RM_VOLUMES, reason='Swarm DELETE /containers/<id> bug') @pytest.mark.skipif(SWARM_SKIP_RM_VOLUMES, reason='Swarm DELETE /containers/<id> bug')
def test_run_rm(self): def test_run_rm(self):
@ -2701,7 +2700,7 @@ services:
str_iso_date, str_iso_time, container_info = string.split(' ', 2) str_iso_date, str_iso_time, container_info = string.split(' ', 2)
try: try:
return isinstance(datetime.datetime.strptime( return isinstance(datetime.datetime.strptime(
'%s %s' % (str_iso_date, str_iso_time), '{} {}'.format(str_iso_date, str_iso_time),
'%Y-%m-%d %H:%M:%S.%f'), '%Y-%m-%d %H:%M:%S.%f'),
datetime.datetime) datetime.datetime)
except ValueError: except ValueError:
@ -2790,7 +2789,7 @@ services:
self.base_dir = 'tests/fixtures/extends' self.base_dir = 'tests/fixtures/extends'
self.dispatch(['up', '-d'], None) self.dispatch(['up', '-d'], None)
assert set([s.name for s in self.project.services]) == {'mydb', 'myweb'} assert {s.name for s in self.project.services} == {'mydb', 'myweb'}
# Sort by name so we get [db, web] # Sort by name so we get [db, web]
containers = sorted( containers = sorted(

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import os import os
import shutil import shutil
import unittest import unittest

View File

@ -49,7 +49,7 @@ def create_custom_host_file(client, filename, content):
def create_host_file(client, filename): def create_host_file(client, filename):
with open(filename, 'r') as fh: with open(filename) as fh:
content = fh.read() content = fh.read()
return create_custom_host_file(client, filename, content) return create_custom_host_file(client, filename, content)

View File

@ -15,7 +15,7 @@ from tests.integration.testcases import DockerClientTestCase
class EnvironmentTest(DockerClientTestCase): class EnvironmentTest(DockerClientTestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
super(EnvironmentTest, cls).setUpClass() super().setUpClass()
cls.compose_file = tempfile.NamedTemporaryFile(mode='w+b') cls.compose_file = tempfile.NamedTemporaryFile(mode='w+b')
cls.compose_file.write(bytes("""version: '3.2' cls.compose_file.write(bytes("""version: '3.2'
services: services:
@ -27,7 +27,7 @@ services:
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
super(EnvironmentTest, cls).tearDownClass() super().tearDownClass()
cls.compose_file.close() cls.compose_file.close()
@data('events', @data('events',

View File

@ -289,19 +289,19 @@ class ProjectTest(DockerClientTestCase):
db_container = db.create_container() db_container = db.create_container()
project.start(service_names=['web']) project.start(service_names=['web'])
assert set(c.name for c in project.containers() if c.is_running) == { assert {c.name for c in project.containers() if c.is_running} == {
web_container_1.name, web_container_2.name} web_container_1.name, web_container_2.name}
project.start() project.start()
assert set(c.name for c in project.containers() if c.is_running) == { assert {c.name for c in project.containers() if c.is_running} == {
web_container_1.name, web_container_2.name, db_container.name} web_container_1.name, web_container_2.name, db_container.name}
project.pause(service_names=['web']) project.pause(service_names=['web'])
assert set([c.name for c in project.containers() if c.is_paused]) == { assert {c.name for c in project.containers() if c.is_paused} == {
web_container_1.name, web_container_2.name} web_container_1.name, web_container_2.name}
project.pause() project.pause()
assert set([c.name for c in project.containers() if c.is_paused]) == { assert {c.name for c in project.containers() if c.is_paused} == {
web_container_1.name, web_container_2.name, db_container.name} web_container_1.name, web_container_2.name, db_container.name}
project.unpause(service_names=['db']) project.unpause(service_names=['db'])
@ -311,7 +311,7 @@ class ProjectTest(DockerClientTestCase):
assert len([c.name for c in project.containers() if c.is_paused]) == 0 assert len([c.name for c in project.containers() if c.is_paused]) == 0
project.stop(service_names=['web'], timeout=1) project.stop(service_names=['web'], timeout=1)
assert set(c.name for c in project.containers() if c.is_running) == {db_container.name} assert {c.name for c in project.containers() if c.is_running} == {db_container.name}
project.kill(service_names=['db']) project.kill(service_names=['db'])
assert len([c for c in project.containers() if c.is_running]) == 0 assert len([c for c in project.containers() if c.is_running]) == 0
@ -1177,8 +1177,8 @@ class ProjectTest(DockerClientTestCase):
assert networks[0]['Labels']['label_key'] == 'label_val' assert networks[0]['Labels']['label_key'] == 'label_val'
def test_project_up_volumes(self): def test_project_up_volumes(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name) full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config( config_data = build_config(
services=[{ services=[{
'name': 'web', 'name': 'web',
@ -1232,9 +1232,9 @@ class ProjectTest(DockerClientTestCase):
if v['Name'].split('/')[-1].startswith('composetest_') if v['Name'].split('/')[-1].startswith('composetest_')
] ]
assert set([v['Name'].split('/')[-1] for v in volumes]) == set( assert {v['Name'].split('/')[-1] for v in volumes} == {
['composetest_{}'.format(volume_name)] 'composetest_{}'.format(volume_name)
) }
assert 'label_key' in volumes[0]['Labels'] assert 'label_key' in volumes[0]['Labels']
assert volumes[0]['Labels']['label_key'] == 'label_val' assert volumes[0]['Labels']['label_key'] == 'label_val'
@ -1348,8 +1348,8 @@ class ProjectTest(DockerClientTestCase):
assert len(project.containers()) == 3 assert len(project.containers()) == 3
def test_initialize_volumes(self): def test_initialize_volumes(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name) full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config( config_data = build_config(
services=[{ services=[{
'name': 'web', 'name': 'web',
@ -1370,8 +1370,8 @@ class ProjectTest(DockerClientTestCase):
assert volume_data['Driver'] == 'local' assert volume_data['Driver'] == 'local'
def test_project_up_implicit_volume_driver(self): def test_project_up_implicit_volume_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name) full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config( config_data = build_config(
services=[{ services=[{
'name': 'web', 'name': 'web',
@ -1479,7 +1479,7 @@ class ProjectTest(DockerClientTestCase):
assert output == b"This is the secret\n" assert output == b"This is the secret\n"
def test_initialize_volumes_invalid_volume_driver(self): def test_initialize_volumes_invalid_volume_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
config_data = build_config( config_data = build_config(
version=VERSION, version=VERSION,
@ -1500,8 +1500,8 @@ class ProjectTest(DockerClientTestCase):
@no_cluster('inspect volume by name defect on Swarm Classic') @no_cluster('inspect volume by name defect on Swarm Classic')
def test_initialize_volumes_updated_driver(self): def test_initialize_volumes_updated_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name) full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config( config_data = build_config(
services=[{ services=[{
@ -1531,14 +1531,14 @@ class ProjectTest(DockerClientTestCase):
) )
with pytest.raises(config.ConfigurationError) as e: with pytest.raises(config.ConfigurationError) as e:
project.volumes.initialize() project.volumes.initialize()
assert 'Configuration for volume {0} specifies driver smb'.format( assert 'Configuration for volume {} specifies driver smb'.format(
vol_name vol_name
) in str(e.value) ) in str(e.value)
@no_cluster('inspect volume by name defect on Swarm Classic') @no_cluster('inspect volume by name defect on Swarm Classic')
def test_initialize_volumes_updated_driver_opts(self): def test_initialize_volumes_updated_driver_opts(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name) full_vol_name = 'composetest_{}'.format(vol_name)
tmpdir = tempfile.mkdtemp(prefix='compose_test_') tmpdir = tempfile.mkdtemp(prefix='compose_test_')
self.addCleanup(shutil.rmtree, tmpdir) self.addCleanup(shutil.rmtree, tmpdir)
driver_opts = {'o': 'bind', 'device': tmpdir, 'type': 'none'} driver_opts = {'o': 'bind', 'device': tmpdir, 'type': 'none'}
@ -1575,13 +1575,13 @@ class ProjectTest(DockerClientTestCase):
) )
with pytest.raises(config.ConfigurationError) as e: with pytest.raises(config.ConfigurationError) as e:
project.volumes.initialize() project.volumes.initialize()
assert 'Configuration for volume {0} specifies "device" driver_opt {1}'.format( assert 'Configuration for volume {} specifies "device" driver_opt {}'.format(
vol_name, driver_opts['device'] vol_name, driver_opts['device']
) in str(e.value) ) in str(e.value)
def test_initialize_volumes_updated_blank_driver(self): def test_initialize_volumes_updated_blank_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name) full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config( config_data = build_config(
services=[{ services=[{
@ -1617,8 +1617,8 @@ class ProjectTest(DockerClientTestCase):
@no_cluster('inspect volume by name defect on Swarm Classic') @no_cluster('inspect volume by name defect on Swarm Classic')
def test_initialize_volumes_external_volumes(self): def test_initialize_volumes_external_volumes(self):
# Use composetest_ prefix so it gets garbage-collected in tearDown() # Use composetest_ prefix so it gets garbage-collected in tearDown()
vol_name = 'composetest_{0:x}'.format(random.getrandbits(32)) vol_name = 'composetest_{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name) full_vol_name = 'composetest_{}'.format(vol_name)
self.client.create_volume(vol_name) self.client.create_volume(vol_name)
config_data = build_config( config_data = build_config(
services=[{ services=[{
@ -1640,7 +1640,7 @@ class ProjectTest(DockerClientTestCase):
self.client.inspect_volume(full_vol_name) self.client.inspect_volume(full_vol_name)
def test_initialize_volumes_inexistent_external_volume(self): def test_initialize_volumes_inexistent_external_volume(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
config_data = build_config( config_data = build_config(
services=[{ services=[{
@ -1658,13 +1658,13 @@ class ProjectTest(DockerClientTestCase):
) )
with pytest.raises(config.ConfigurationError) as e: with pytest.raises(config.ConfigurationError) as e:
project.volumes.initialize() project.volumes.initialize()
assert 'Volume {0} declared as external'.format( assert 'Volume {} declared as external'.format(
vol_name vol_name
) in str(e.value) ) in str(e.value)
def test_project_up_named_volumes_in_binds(self): def test_project_up_named_volumes_in_binds(self):
vol_name = '{0:x}'.format(random.getrandbits(32)) vol_name = '{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name) full_vol_name = 'composetest_{}'.format(vol_name)
base_file = config.ConfigFile( base_file = config.ConfigFile(
'base.yml', 'base.yml',
@ -1673,7 +1673,7 @@ class ProjectTest(DockerClientTestCase):
'simple': { 'simple': {
'image': BUSYBOX_IMAGE_WITH_TAG, 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top', 'command': 'top',
'volumes': ['{0}:/data'.format(vol_name)] 'volumes': ['{}:/data'.format(vol_name)]
}, },
}, },
'volumes': { 'volumes': {

View File

@ -22,7 +22,7 @@ class ResilienceTest(DockerClientTestCase):
def tearDown(self): def tearDown(self):
del self.project del self.project
del self.db del self.db
super(ResilienceTest, self).tearDown() super().tearDown()
def test_successful_recreate(self): def test_successful_recreate(self):
self.project.up(strategy=ConvergenceStrategy.always) self.project.up(strategy=ConvergenceStrategy.always)

View File

@ -248,7 +248,7 @@ class ServiceTest(DockerClientTestCase):
service = self.create_service('db', security_opt=security_opt) service = self.create_service('db', security_opt=security_opt)
container = service.create_container() container = service.create_container()
service.start_container(container) service.start_container(container)
assert set(container.get('HostConfig.SecurityOpt')) == set([o.repr() for o in security_opt]) assert set(container.get('HostConfig.SecurityOpt')) == {o.repr() for o in security_opt}
@pytest.mark.xfail(True, reason='Not supported on most drivers') @pytest.mark.xfail(True, reason='Not supported on most drivers')
def test_create_container_with_storage_opt(self): def test_create_container_with_storage_opt(self):
@ -290,7 +290,7 @@ class ServiceTest(DockerClientTestCase):
actual_host_path = container.get_mount(container_path)['Source'] actual_host_path = container.get_mount(container_path)['Source']
assert path.basename(actual_host_path) == path.basename(host_path), ( assert path.basename(actual_host_path) == path.basename(host_path), (
"Last component differs: %s, %s" % (actual_host_path, host_path) "Last component differs: {}, {}".format(actual_host_path, host_path)
) )
def test_create_container_with_host_mount(self): def test_create_container_with_host_mount(self):
@ -844,11 +844,11 @@ class ServiceTest(DockerClientTestCase):
db2 = create_and_start_container(db) db2 = create_and_start_container(db)
create_and_start_container(web) create_and_start_container(web)
assert set(get_links(web.containers()[0])) == set([ assert set(get_links(web.containers()[0])) == {
db1.name, db1.name_without_project, db1.name, db1.name_without_project,
db2.name, db2.name_without_project, db2.name, db2.name_without_project,
'db' 'db'
]) }
@no_cluster('No legacy links support in Swarm') @no_cluster('No legacy links support in Swarm')
def test_start_container_creates_links_with_names(self): def test_start_container_creates_links_with_names(self):
@ -859,11 +859,11 @@ class ServiceTest(DockerClientTestCase):
db2 = create_and_start_container(db) db2 = create_and_start_container(db)
create_and_start_container(web) create_and_start_container(web)
assert set(get_links(web.containers()[0])) == set([ assert set(get_links(web.containers()[0])) == {
db1.name, db1.name_without_project, db1.name, db1.name_without_project,
db2.name, db2.name_without_project, db2.name, db2.name_without_project,
'custom_link_name' 'custom_link_name'
]) }
@no_cluster('No legacy links support in Swarm') @no_cluster('No legacy links support in Swarm')
def test_start_container_with_external_links(self): def test_start_container_with_external_links(self):
@ -879,11 +879,11 @@ class ServiceTest(DockerClientTestCase):
create_and_start_container(web) create_and_start_container(web)
assert set(get_links(web.containers()[0])) == set([ assert set(get_links(web.containers()[0])) == {
db_ctnrs[0].name, db_ctnrs[0].name,
db_ctnrs[1].name, db_ctnrs[1].name,
'db_3' 'db_3'
]) }
@no_cluster('No legacy links support in Swarm') @no_cluster('No legacy links support in Swarm')
def test_start_normal_container_does_not_create_links_to_its_own_service(self): def test_start_normal_container_does_not_create_links_to_its_own_service(self):
@ -893,7 +893,7 @@ class ServiceTest(DockerClientTestCase):
create_and_start_container(db) create_and_start_container(db)
c = create_and_start_container(db) c = create_and_start_container(db)
assert set(get_links(c)) == set([]) assert set(get_links(c)) == set()
@no_cluster('No legacy links support in Swarm') @no_cluster('No legacy links support in Swarm')
def test_start_one_off_container_creates_links_to_its_own_service(self): def test_start_one_off_container_creates_links_to_its_own_service(self):
@ -904,11 +904,11 @@ class ServiceTest(DockerClientTestCase):
c = create_and_start_container(db, one_off=OneOffFilter.only) c = create_and_start_container(db, one_off=OneOffFilter.only)
assert set(get_links(c)) == set([ assert set(get_links(c)) == {
db1.name, db1.name_without_project, db1.name, db1.name_without_project,
db2.name, db2.name_without_project, db2.name, db2.name_without_project,
'db' 'db'
]) }
def test_start_container_builds_images(self): def test_start_container_builds_images(self):
service = Service( service = Service(
@ -1719,14 +1719,14 @@ class ServiceTest(DockerClientTestCase):
options = service._get_container_create_options({}, service._next_container_number()) options = service._get_container_create_options({}, service._next_container_number())
original = Container.create(service.client, **options) original = Container.create(service.client, **options)
assert set(service.containers(stopped=True)) == set([original]) assert set(service.containers(stopped=True)) == {original}
assert set(service.duplicate_containers()) == set() assert set(service.duplicate_containers()) == set()
options['name'] = 'temporary_container_name' options['name'] = 'temporary_container_name'
duplicate = Container.create(service.client, **options) duplicate = Container.create(service.client, **options)
assert set(service.containers(stopped=True)) == set([original, duplicate]) assert set(service.containers(stopped=True)) == {original, duplicate}
assert set(service.duplicate_containers()) == set([duplicate]) assert set(service.duplicate_containers()) == {duplicate}
def converge(service, strategy=ConvergenceStrategy.changed): def converge(service, strategy=ConvergenceStrategy.changed):

View File

@ -39,7 +39,7 @@ class ProjectTestCase(DockerClientTestCase):
class BasicProjectTest(ProjectTestCase): class BasicProjectTest(ProjectTestCase):
def setUp(self): def setUp(self):
super(BasicProjectTest, self).setUp() super().setUp()
self.cfg = { self.cfg = {
'db': {'image': BUSYBOX_IMAGE_WITH_TAG, 'command': 'top'}, 'db': {'image': BUSYBOX_IMAGE_WITH_TAG, 'command': 'top'},
@ -95,7 +95,7 @@ class BasicProjectTest(ProjectTestCase):
class ProjectWithDependenciesTest(ProjectTestCase): class ProjectWithDependenciesTest(ProjectTestCase):
def setUp(self): def setUp(self):
super(ProjectWithDependenciesTest, self).setUp() super().setUp()
self.cfg = { self.cfg = {
'db': { 'db': {
@ -116,7 +116,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
def test_up(self): def test_up(self):
containers = self.run_up(self.cfg) containers = self.run_up(self.cfg)
assert set(c.service for c in containers) == set(['db', 'web', 'nginx']) assert {c.service for c in containers} == {'db', 'web', 'nginx'}
def test_change_leaf(self): def test_change_leaf(self):
old_containers = self.run_up(self.cfg) old_containers = self.run_up(self.cfg)
@ -124,7 +124,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['nginx']['environment'] = {'NEW_VAR': '1'} self.cfg['nginx']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg) new_containers = self.run_up(self.cfg)
assert set(c.service for c in new_containers - old_containers) == set(['nginx']) assert {c.service for c in new_containers - old_containers} == {'nginx'}
def test_change_middle(self): def test_change_middle(self):
old_containers = self.run_up(self.cfg) old_containers = self.run_up(self.cfg)
@ -132,7 +132,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['web']['environment'] = {'NEW_VAR': '1'} self.cfg['web']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg) new_containers = self.run_up(self.cfg)
assert set(c.service for c in new_containers - old_containers) == set(['web']) assert {c.service for c in new_containers - old_containers} == {'web'}
def test_change_middle_always_recreate_deps(self): def test_change_middle_always_recreate_deps(self):
old_containers = self.run_up(self.cfg, always_recreate_deps=True) old_containers = self.run_up(self.cfg, always_recreate_deps=True)
@ -140,7 +140,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['web']['environment'] = {'NEW_VAR': '1'} self.cfg['web']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg, always_recreate_deps=True) new_containers = self.run_up(self.cfg, always_recreate_deps=True)
assert set(c.service for c in new_containers - old_containers) == {'web', 'nginx'} assert {c.service for c in new_containers - old_containers} == {'web', 'nginx'}
def test_change_root(self): def test_change_root(self):
old_containers = self.run_up(self.cfg) old_containers = self.run_up(self.cfg)
@ -148,7 +148,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['db']['environment'] = {'NEW_VAR': '1'} self.cfg['db']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg) new_containers = self.run_up(self.cfg)
assert set(c.service for c in new_containers - old_containers) == set(['db']) assert {c.service for c in new_containers - old_containers} == {'db'}
def test_change_root_always_recreate_deps(self): def test_change_root_always_recreate_deps(self):
old_containers = self.run_up(self.cfg, always_recreate_deps=True) old_containers = self.run_up(self.cfg, always_recreate_deps=True)
@ -156,7 +156,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['db']['environment'] = {'NEW_VAR': '1'} self.cfg['db']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg, always_recreate_deps=True) new_containers = self.run_up(self.cfg, always_recreate_deps=True)
assert set(c.service for c in new_containers - old_containers) == { assert {c.service for c in new_containers - old_containers} == {
'db', 'web', 'nginx' 'db', 'web', 'nginx'
} }
@ -213,7 +213,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
class ProjectWithDependsOnDependenciesTest(ProjectTestCase): class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
def setUp(self): def setUp(self):
super(ProjectWithDependsOnDependenciesTest, self).setUp() super().setUp()
self.cfg = { self.cfg = {
'version': '2', 'version': '2',
@ -238,7 +238,7 @@ class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
def test_up(self): def test_up(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
containers = self.run_up(local_cfg) containers = self.run_up(local_cfg)
assert set(c.service for c in containers) == set(['db', 'web', 'nginx']) assert {c.service for c in containers} == {'db', 'web', 'nginx'}
def test_change_leaf(self): def test_change_leaf(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
@ -247,7 +247,7 @@ class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
local_cfg['services']['nginx']['environment'] = {'NEW_VAR': '1'} local_cfg['services']['nginx']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(local_cfg) new_containers = self.run_up(local_cfg)
assert set(c.service for c in new_containers - old_containers) == set(['nginx']) assert {c.service for c in new_containers - old_containers} == {'nginx'}
def test_change_middle(self): def test_change_middle(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
@ -256,7 +256,7 @@ class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
local_cfg['services']['web']['environment'] = {'NEW_VAR': '1'} local_cfg['services']['web']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(local_cfg) new_containers = self.run_up(local_cfg)
assert set(c.service for c in new_containers - old_containers) == set(['web']) assert {c.service for c in new_containers - old_containers} == {'web'}
def test_change_middle_always_recreate_deps(self): def test_change_middle_always_recreate_deps(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
@ -265,7 +265,7 @@ class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
local_cfg['services']['web']['environment'] = {'NEW_VAR': '1'} local_cfg['services']['web']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(local_cfg, always_recreate_deps=True) new_containers = self.run_up(local_cfg, always_recreate_deps=True)
assert set(c.service for c in new_containers - old_containers) == set(['web', 'nginx']) assert {c.service for c in new_containers - old_containers} == {'web', 'nginx'}
def test_change_root(self): def test_change_root(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
@ -274,7 +274,7 @@ class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'} local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(local_cfg) new_containers = self.run_up(local_cfg)
assert set(c.service for c in new_containers - old_containers) == set(['db']) assert {c.service for c in new_containers - old_containers} == {'db'}
def test_change_root_always_recreate_deps(self): def test_change_root_always_recreate_deps(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
@ -283,7 +283,7 @@ class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'} local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(local_cfg, always_recreate_deps=True) new_containers = self.run_up(local_cfg, always_recreate_deps=True)
assert set(c.service for c in new_containers - old_containers) == set(['db', 'web', 'nginx']) assert {c.service for c in new_containers - old_containers} == {'db', 'web', 'nginx'}
def test_change_root_no_recreate(self): def test_change_root_no_recreate(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
@ -303,24 +303,24 @@ class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
del next_cfg['services']['web']['depends_on'] del next_cfg['services']['web']['depends_on']
containers = self.run_up(local_cfg) containers = self.run_up(local_cfg)
assert set(c.service for c in containers) == set(['db', 'web', 'nginx']) assert {c.service for c in containers} == {'db', 'web', 'nginx'}
project = self.make_project(local_cfg) project = self.make_project(local_cfg)
project.stop(timeout=1) project.stop(timeout=1)
next_containers = self.run_up(next_cfg) next_containers = self.run_up(next_cfg)
assert set(c.service for c in next_containers) == set(['web', 'nginx']) assert {c.service for c in next_containers} == {'web', 'nginx'}
def test_service_removed_while_up(self): def test_service_removed_while_up(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
containers = self.run_up(local_cfg) containers = self.run_up(local_cfg)
assert set(c.service for c in containers) == set(['db', 'web', 'nginx']) assert {c.service for c in containers} == {'db', 'web', 'nginx'}
del local_cfg['services']['db'] del local_cfg['services']['db']
del local_cfg['services']['web']['depends_on'] del local_cfg['services']['web']['depends_on']
containers = self.run_up(local_cfg) containers = self.run_up(local_cfg)
assert set(c.service for c in containers) == set(['web', 'nginx']) assert {c.service for c in containers} == {'web', 'nginx'}
def test_dependency_removed(self): def test_dependency_removed(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
@ -328,24 +328,24 @@ class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
del next_cfg['services']['nginx']['depends_on'] del next_cfg['services']['nginx']['depends_on']
containers = self.run_up(local_cfg, service_names=['nginx']) containers = self.run_up(local_cfg, service_names=['nginx'])
assert set(c.service for c in containers) == set(['db', 'web', 'nginx']) assert {c.service for c in containers} == {'db', 'web', 'nginx'}
project = self.make_project(local_cfg) project = self.make_project(local_cfg)
project.stop(timeout=1) project.stop(timeout=1)
next_containers = self.run_up(next_cfg, service_names=['nginx']) next_containers = self.run_up(next_cfg, service_names=['nginx'])
assert set(c.service for c in next_containers if c.is_running) == set(['nginx']) assert {c.service for c in next_containers if c.is_running} == {'nginx'}
def test_dependency_added(self): def test_dependency_added(self):
local_cfg = copy.deepcopy(self.cfg) local_cfg = copy.deepcopy(self.cfg)
del local_cfg['services']['nginx']['depends_on'] del local_cfg['services']['nginx']['depends_on']
containers = self.run_up(local_cfg, service_names=['nginx']) containers = self.run_up(local_cfg, service_names=['nginx'])
assert set(c.service for c in containers) == set(['nginx']) assert {c.service for c in containers} == {'nginx'}
local_cfg['services']['nginx']['depends_on'] = ['db'] local_cfg['services']['nginx']['depends_on'] = ['db']
containers = self.run_up(local_cfg, service_names=['nginx']) containers = self.run_up(local_cfg, service_names=['nginx'])
assert set(c.service for c in containers) == set(['nginx', 'db']) assert {c.service for c in containers} == {'nginx', 'db'}
class ServiceStateTest(DockerClientTestCase): class ServiceStateTest(DockerClientTestCase):

View File

@ -18,7 +18,7 @@ class VolumeTest(DockerClientTestCase):
except DockerException: except DockerException:
pass pass
del self.tmp_volumes del self.tmp_volumes
super(VolumeTest, self).tearDown() super().tearDown()
def create_volume(self, name, driver=None, opts=None, external=None, custom_name=False): def create_volume(self, name, driver=None, opts=None, external=None, custom_name=False):
if external: if external:

View File

@ -1,4 +1,3 @@
# ~*~ encoding: utf-8 ~*~
import os import os
import pytest import pytest
@ -9,7 +8,7 @@ from compose.const import IS_WINDOWS_PLATFORM
from tests import mock from tests import mock
class TestGetConfigPathFromOptions(object): class TestGetConfigPathFromOptions:
def test_path_from_options(self): def test_path_from_options(self):
paths = ['one.yml', 'two.yml'] paths = ['one.yml', 'two.yml']

View File

@ -55,7 +55,7 @@ class DockerClientTestCase(unittest.TestCase):
def test_user_agent(self): def test_user_agent(self):
client = docker_client(os.environ) client = docker_client(os.environ)
expected = "docker-compose/{0} docker-py/{1} {2}/{3}".format( expected = "docker-compose/{} docker-py/{} {}/{}".format(
compose.__version__, compose.__version__,
docker.__version__, docker.__version__,
platform.system(), platform.system(),
@ -151,9 +151,9 @@ class TLSConfigTestCase(unittest.TestCase):
def test_tls_client_and_ca_quoted_paths(self): def test_tls_client_and_ca_quoted_paths(self):
options = { options = {
'--tlscacert': '"{0}"'.format(self.ca_cert), '--tlscacert': '"{}"'.format(self.ca_cert),
'--tlscert': '"{0}"'.format(self.client_cert), '--tlscert': '"{}"'.format(self.client_cert),
'--tlskey': '"{0}"'.format(self.key), '--tlskey': '"{}"'.format(self.key),
'--tlsverify': True '--tlsverify': True
} }
result = tls_config_from_options(options) result = tls_config_from_options(options)
@ -185,9 +185,9 @@ class TLSConfigTestCase(unittest.TestCase):
'DOCKER_TLS_VERIFY': 'false' 'DOCKER_TLS_VERIFY': 'false'
}) })
options = { options = {
'--tlscacert': '"{0}"'.format(self.ca_cert), '--tlscacert': '"{}"'.format(self.ca_cert),
'--tlscert': '"{0}"'.format(self.client_cert), '--tlscert': '"{}"'.format(self.client_cert),
'--tlskey': '"{0}"'.format(self.key), '--tlskey': '"{}"'.format(self.key),
'--tlsverify': True '--tlsverify': True
} }
@ -230,7 +230,7 @@ class TLSConfigTestCase(unittest.TestCase):
assert result.cert == (self.client_cert, self.key) assert result.cert == (self.client_cert, self.key)
class TestGetTlsVersion(object): class TestGetTlsVersion:
def test_get_tls_version_default(self): def test_get_tls_version_default(self):
environment = {} environment = {}
assert get_tls_version(environment) is None assert get_tls_version(environment) is None

View File

@ -21,7 +21,7 @@ def patch_find_executable(side_effect):
side_effect=side_effect) side_effect=side_effect)
class TestHandleConnectionErrors(object): class TestHandleConnectionErrors:
def test_generic_connection_error(self, mock_logging): def test_generic_connection_error(self, mock_logging):
with pytest.raises(errors.ConnectionError): with pytest.raises(errors.ConnectionError):
@ -43,7 +43,7 @@ class TestHandleConnectionErrors(object):
def test_api_error_version_mismatch_unicode_explanation(self, mock_logging): def test_api_error_version_mismatch_unicode_explanation(self, mock_logging):
with pytest.raises(errors.ConnectionError): with pytest.raises(errors.ConnectionError):
with handle_connection_errors(mock.Mock(api_version='1.38')): with handle_connection_errors(mock.Mock(api_version='1.38')):
raise APIError(None, None, u"client is newer than server") raise APIError(None, None, "client is newer than server")
_, args, _ = mock_logging.error.mock_calls[0] _, args, _ = mock_logging.error.mock_calls[0]
assert "Docker Engine of version 18.06.0 or greater" in args[0] assert "Docker Engine of version 18.06.0 or greater" in args[0]
@ -57,7 +57,7 @@ class TestHandleConnectionErrors(object):
mock_logging.error.assert_called_once_with(msg.decode('utf-8')) mock_logging.error.assert_called_once_with(msg.decode('utf-8'))
def test_api_error_version_other_unicode_explanation(self, mock_logging): def test_api_error_version_other_unicode_explanation(self, mock_logging):
msg = u"Something broke!" msg = "Something broke!"
with pytest.raises(errors.ConnectionError): with pytest.raises(errors.ConnectionError):
with handle_connection_errors(mock.Mock(api_version='1.22')): with handle_connection_errors(mock.Mock(api_version='1.22')):
raise APIError(None, None, msg) raise APIError(None, None, msg)

View File

@ -40,10 +40,10 @@ class ConsoleWarningFormatterTestCase(unittest.TestCase):
message = b'\xec\xa0\x95\xec\x88\x98\xec\xa0\x95' message = b'\xec\xa0\x95\xec\x88\x98\xec\xa0\x95'
output = self.formatter.format(make_log_record(logging.WARN, message)) output = self.formatter.format(make_log_record(logging.WARN, message))
expected = colors.yellow('WARNING') + ': ' expected = colors.yellow('WARNING') + ': '
assert output == '{0}{1}'.format(expected, message.decode('utf-8')) assert output == '{}{}'.format(expected, message.decode('utf-8'))
def test_format_unicode_error(self): def test_format_unicode_error(self):
message = b'\xec\xa0\x95\xec\x88\x98\xec\xa0\x95' message = b'\xec\xa0\x95\xec\x88\x98\xec\xa0\x95'
output = self.formatter.format(make_log_record(logging.ERROR, message)) output = self.formatter.format(make_log_record(logging.ERROR, message))
expected = colors.red('ERROR') + ': ' expected = colors.red('ERROR') + ': '
assert output == '{0}{1}'.format(expected, message.decode('utf-8')) assert output == '{}{}'.format(expected, message.decode('utf-8'))

View File

@ -29,7 +29,7 @@ def mock_container():
return mock.Mock(spec=Container, name_without_project='web_1') return mock.Mock(spec=Container, name_without_project='web_1')
class TestLogPresenter(object): class TestLogPresenter:
def test_monochrome(self, mock_container): def test_monochrome(self, mock_container):
presenters = build_log_presenters(['foo', 'bar'], True) presenters = build_log_presenters(['foo', 'bar'], True)
@ -83,7 +83,7 @@ def test_build_no_log_generator(mock_container):
assert "exited with code" not in output assert "exited with code" not in output
class TestBuildLogGenerator(object): class TestBuildLogGenerator:
def test_no_log_stream(self, mock_container): def test_no_log_stream(self, mock_container):
mock_container.log_stream = None mock_container.log_stream = None
@ -108,7 +108,7 @@ class TestBuildLogGenerator(object):
assert next(generator) == "world" assert next(generator) == "world"
def test_unicode(self, output_stream): def test_unicode(self, output_stream):
glyph = u'\u2022\n' glyph = '\u2022\n'
mock_container.log_stream = iter([glyph.encode('utf-8')]) mock_container.log_stream = iter([glyph.encode('utf-8')])
generator = build_log_generator(mock_container, {}) generator = build_log_generator(mock_container, {})
@ -125,7 +125,7 @@ def mock_presenters():
return itertools.cycle([mock.Mock()]) return itertools.cycle([mock.Mock()])
class TestWatchEvents(object): class TestWatchEvents:
def test_stop_event(self, thread_map, mock_presenters): def test_stop_event(self, thread_map, mock_presenters):
event_stream = [{'action': 'stop', 'id': 'cid'}] event_stream = [{'action': 'stop', 'id': 'cid'}]
@ -167,7 +167,7 @@ class TestWatchEvents(object):
assert container_id not in thread_map assert container_id not in thread_map
class TestConsumeQueue(object): class TestConsumeQueue:
def test_item_is_an_exception(self): def test_item_is_an_exception(self):

View File

@ -22,7 +22,7 @@ def mock_container(service, number):
container.Container, container.Container,
service=service, service=service,
number=number, number=number,
name_without_project='{0}_{1}'.format(service, number)) name_without_project='{}_{}'.format(service, number))
@pytest.fixture @pytest.fixture
@ -32,7 +32,7 @@ def logging_handler():
return logging.StreamHandler(stream=stream) return logging.StreamHandler(stream=stream)
class TestCLIMainTestCase(object): class TestCLIMainTestCase:
def test_filter_attached_containers(self): def test_filter_attached_containers(self):
containers = [ containers = [
@ -135,7 +135,7 @@ class TestCLIMainTestCase(object):
assert expected_docker_start_call == docker_start_call assert expected_docker_start_call == docker_start_call
class TestSetupConsoleHandlerTestCase(object): class TestSetupConsoleHandlerTestCase:
def test_with_tty_verbose(self, logging_handler): def test_with_tty_verbose(self, logging_handler):
setup_console_handler(logging_handler, True) setup_console_handler(logging_handler, True)
@ -155,7 +155,7 @@ class TestSetupConsoleHandlerTestCase(object):
assert type(logging_handler.formatter) == logging.Formatter assert type(logging_handler.formatter) == logging.Formatter
class TestConvergeStrategyFromOptsTestCase(object): class TestConvergeStrategyFromOptsTestCase:
def test_invalid_opts(self): def test_invalid_opts(self):
options = {'--force-recreate': True, '--no-recreate': True} options = {'--force-recreate': True, '--no-recreate': True}
@ -189,7 +189,7 @@ def mock_find_executable(exe):
@mock.patch('compose.cli.main.find_executable', mock_find_executable) @mock.patch('compose.cli.main.find_executable', mock_find_executable)
class TestCallDocker(object): class TestCallDocker:
def test_simple_no_options(self): def test_simple_no_options(self):
with mock.patch('subprocess.call') as fake_call: with mock.patch('subprocess.call') as fake_call:
call_docker(['ps'], {}, {}) call_docker(['ps'], {}, {})

View File

@ -1,4 +1,3 @@
# encoding: utf-8
import os import os
import shutil import shutil
import tempfile import tempfile

View File

@ -1,4 +1,3 @@
# encoding: utf-8
import codecs import codecs
import os import os
import shutil import shutil
@ -3885,12 +3884,12 @@ class VolumeConfigTest(unittest.TestCase):
assert d['volumes'] == ['~:/data'] assert d['volumes'] == ['~:/data']
def test_volume_path_with_non_ascii_directory(self): def test_volume_path_with_non_ascii_directory(self):
volume = u'/Füü/data:/data' volume = '/Füü/data:/data'
container_path = config.resolve_volume_path(".", volume) container_path = config.resolve_volume_path(".", volume)
assert container_path == volume assert container_path == volume
class MergePathMappingTest(object): class MergePathMappingTest:
config_name = "" config_name = ""
def test_empty(self): def test_empty(self):
@ -3963,7 +3962,7 @@ class BuildOrImageMergeTest(unittest.TestCase):
assert config.merge_service_dicts({'image': 'redis'}, {'build': '.'}, V1) == {'build': '.'} assert config.merge_service_dicts({'image': 'redis'}, {'build': '.'}, V1) == {'build': '.'}
class MergeListsTest(object): class MergeListsTest:
config_name = "" config_name = ""
base_config = [] base_config = []
override_config = [] override_config = []
@ -4396,7 +4395,7 @@ class EnvTest(unittest.TestCase):
{'env_file': ['tests/fixtures/env/resolve.env']}, {'env_file': ['tests/fixtures/env/resolve.env']},
Environment.from_env_file(None) Environment.from_env_file(None)
) == { ) == {
'FILE_DEF': u'bär', 'FILE_DEF': 'bär',
'FILE_DEF_EMPTY': '', 'FILE_DEF_EMPTY': '',
'ENV_DEF': 'E3', 'ENV_DEF': 'E3',
'NO_DEF': None 'NO_DEF': None
@ -5042,14 +5041,14 @@ class VolumePathTest(unittest.TestCase):
container_path = 'c:\\scarletdevil\\data' container_path = 'c:\\scarletdevil\\data'
expected_mapping = (container_path, (host_path, None)) expected_mapping = (container_path, (host_path, None))
mapping = config.split_path_mapping('{0}:{1}'.format(host_path, container_path)) mapping = config.split_path_mapping('{}:{}'.format(host_path, container_path))
assert mapping == expected_mapping assert mapping == expected_mapping
def test_split_path_mapping_with_root_mount(self): def test_split_path_mapping_with_root_mount(self):
host_path = '/' host_path = '/'
container_path = '/var/hostroot' container_path = '/var/hostroot'
expected_mapping = (container_path, (host_path, None)) expected_mapping = (container_path, (host_path, None))
mapping = config.split_path_mapping('{0}:{1}'.format(host_path, container_path)) mapping = config.split_path_mapping('{}:{}'.format(host_path, container_path))
assert mapping == expected_mapping assert mapping == expected_mapping

View File

@ -1,4 +1,3 @@
# encoding: utf-8
import codecs import codecs
import os import os
import shutil import shutil

View File

@ -1,4 +1,3 @@
# encoding: utf-8
import pytest import pytest
from compose.config.environment import Environment from compose.config.environment import Environment
@ -439,7 +438,7 @@ def test_unbraced_separators(defaults_interpolator):
def test_interpolate_unicode_values(): def test_interpolate_unicode_values():
variable_mapping = { variable_mapping = {
'FOO': '十六夜 咲夜'.encode('utf-8'), 'FOO': '十六夜 咲夜'.encode(),
'BAR': '十六夜 咲夜' 'BAR': '十六夜 咲夜'
} }
interpol = Interpolator(TemplateWithDefaults, variable_mapping).interpolate interpol = Interpolator(TemplateWithDefaults, variable_mapping).interpolate

View File

@ -5,7 +5,7 @@ from compose.config.sort_services import sort_service_dicts
from compose.config.types import VolumeFromSpec from compose.config.types import VolumeFromSpec
class TestSortService(object): class TestSortService:
def test_sort_service_dicts_1(self): def test_sort_service_dicts_1(self):
services = [ services = [
{ {

View File

@ -39,7 +39,7 @@ def test_parse_extra_hosts_dict():
} }
class TestServicePort(object): class TestServicePort:
def test_parse_dict(self): def test_parse_dict(self):
data = { data = {
'target': 8000, 'target': 8000,
@ -129,7 +129,7 @@ class TestServicePort(object):
ServicePort.parse(port_def) ServicePort.parse(port_def)
class TestVolumeSpec(object): class TestVolumeSpec:
def test_parse_volume_spec_only_one_path(self): def test_parse_volume_spec_only_one_path(self):
spec = VolumeSpec.parse('/the/volume') spec = VolumeSpec.parse('/the/volume')
@ -216,7 +216,7 @@ class TestVolumeSpec(object):
) )
class TestVolumesFromSpec(object): class TestVolumesFromSpec:
services = ['servicea', 'serviceb'] services = ['servicea', 'serviceb']

View File

@ -1,5 +1,3 @@
# ~*~ encoding: utf-8 ~*~
import io
import os import os
import random import random
import shutil import shutil
@ -75,7 +73,7 @@ class ProgressStreamTestCase(unittest.TestCase):
def mktempfile(encoding): def mktempfile(encoding):
fname = os.path.join(tmpdir, hex(random.getrandbits(128))[2:-1]) fname = os.path.join(tmpdir, hex(random.getrandbits(128))[2:-1])
return io.open(fname, mode='w+', encoding=encoding) return open(fname, mode='w+', encoding=encoding)
text = '就吃饭' text = '就吃饭'
with mktempfile(encoding='utf-8') as tf: with mktempfile(encoding='utf-8') as tf:

View File

@ -1,4 +1,3 @@
# encoding: utf-8
import datetime import datetime
import os import os
import tempfile import tempfile
@ -739,7 +738,7 @@ class ProjectTest(unittest.TestCase):
assert fake_log.warn.call_count == 0 assert fake_log.warn.call_count == 0
def test_no_such_service_unicode(self): def test_no_such_service_unicode(self):
assert NoSuchService('十六夜 咲夜'.encode('utf-8')).msg == 'No such service: 十六夜 咲夜' assert NoSuchService('十六夜 咲夜'.encode()).msg == 'No such service: 十六夜 咲夜'
assert NoSuchService('十六夜 咲夜').msg == 'No such service: 十六夜 咲夜' assert NoSuchService('十六夜 咲夜').msg == 'No such service: 十六夜 咲夜'
def test_project_platform_value(self): def test_project_platform_value(self):

View File

@ -63,9 +63,9 @@ class ServiceTest(unittest.TestCase):
assert [c.id for c in service.containers()] == list(range(3)) assert [c.id for c in service.containers()] == list(range(3))
expected_labels = [ expected_labels = [
'{0}=myproject'.format(LABEL_PROJECT), '{}=myproject'.format(LABEL_PROJECT),
'{0}=db'.format(LABEL_SERVICE), '{}=db'.format(LABEL_SERVICE),
'{0}=False'.format(LABEL_ONE_OFF), '{}=False'.format(LABEL_ONE_OFF),
] ]
self.mock_client.containers.assert_called_once_with( self.mock_client.containers.assert_called_once_with(

View File

@ -36,7 +36,7 @@ class SplitBufferTest(unittest.TestCase):
self.assert_produces(reader, ['abc\n', 'd']) self.assert_produces(reader, ['abc\n', 'd'])
def test_preserves_unicode_sequences_within_lines(self): def test_preserves_unicode_sequences_within_lines(self):
string = u"a\u2022c\n" string = "a\u2022c\n"
def reader(): def reader():
yield string.encode('utf-8') yield string.encode('utf-8')

View File

@ -1,8 +1,7 @@
# encoding: utf-8
from compose import utils from compose import utils
class TestJsonSplitter(object): class TestJsonSplitter:
def test_json_splitter_no_object(self): def test_json_splitter_no_object(self):
data = '{"foo": "bar' data = '{"foo": "bar'
@ -17,7 +16,7 @@ class TestJsonSplitter(object):
assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}') assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
class TestStreamAsText(object): class TestStreamAsText:
def test_stream_with_non_utf_unicode_character(self): def test_stream_with_non_utf_unicode_character(self):
stream = [b'\xed\xf3\xf3'] stream = [b'\xed\xf3\xf3']
@ -25,12 +24,12 @@ class TestStreamAsText(object):
assert output == '<EFBFBD><EFBFBD><EFBFBD>' assert output == '<EFBFBD><EFBFBD><EFBFBD>'
def test_stream_with_utf_character(self): def test_stream_with_utf_character(self):
stream = ['ěĝ'.encode('utf-8')] stream = ['ěĝ'.encode()]
output, = utils.stream_as_text(stream) output, = utils.stream_as_text(stream)
assert output == 'ěĝ' assert output == 'ěĝ'
class TestJsonStream(object): class TestJsonStream:
def test_with_falsy_entries(self): def test_with_falsy_entries(self):
stream = [ stream = [
@ -59,7 +58,7 @@ class TestJsonStream(object):
] ]
class TestParseBytes(object): class TestParseBytes:
def test_parse_bytes(self): def test_parse_bytes(self):
assert utils.parse_bytes('123kb') == 123 * 1024 assert utils.parse_bytes('123kb') == 123 * 1024
assert utils.parse_bytes(123) == 123 assert utils.parse_bytes(123) == 123
@ -67,7 +66,7 @@ class TestParseBytes(object):
assert utils.parse_bytes('123') == 123 assert utils.parse_bytes('123') == 123
class TestMoreItertools(object): class TestMoreItertools:
def test_unique_everseen(self): def test_unique_everseen(self):
unique = utils.unique_everseen unique = utils.unique_everseen
assert list(unique([2, 1, 2, 1])) == [2, 1] assert list(unique([2, 1, 2, 1])) == [2, 1]

View File

@ -10,7 +10,7 @@ def mock_client():
return mock.create_autospec(docker.APIClient) return mock.create_autospec(docker.APIClient)
class TestVolume(object): class TestVolume:
def test_remove_local_volume(self, mock_client): def test_remove_local_volume(self, mock_client):
vol = volume.Volume(mock_client, 'foo', 'project') vol = volume.Volume(mock_client, 'foo', 'project')