Merge pull request #6286 from docker/bump-1.23.0-rc3

Bump 1.23.0-rc3
This commit is contained in:
Joffrey F 2018-10-17 14:48:37 -07:00 committed by GitHub
commit 7bd4291f90
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 217 additions and 123 deletions

View File

@ -1,11 +1,13 @@
*.egg-info
.coverage
.git
.github
.tox
build
binaries
coverage-html
docs/_site
venv
*venv
.tox
**/__pycache__
*.pyc

18
.gitignore vendored
View File

@ -1,16 +1,18 @@
*.egg-info
*.pyc
*.swo
*.swp
.cache
.coverage*
.DS_Store
.idea
/.tox
/binaries
/build
/compose/GITSHA
/coverage-html
/dist
/docs/_site
/venv
README.rst
compose/GITSHA
*.swo
*.swp
.DS_Store
.cache
.idea
/README.rst
/*venv

View File

@ -48,9 +48,16 @@ naming scheme accordingly before upgrading.
the actual exit code even when the watched container isn't the cause of the
exit.
- Fixed an issue that would prevent recreating a service in some cases where
a volume would be mapped to the same mountpoint as a volume declared inside
the image's Dockerfile.
- Fixed a bug that caused hash configuration with multiple networks to be
inconsistent, causing some services to be unnecessarily restarted.
- Fixed a bug that would cause failures with variable substitution for services
with a name containing one or more dot characters
- Fixed a pipe handling issue when using the containerized version of Compose.
- Fixed a bug causing `external: false` entries in the Compose file to be

View File

@ -1,20 +1,14 @@
FROM docker:18.06.1 as docker
FROM python:3.6
RUN set -ex; \
apt-get update -qq; \
apt-get install -y \
locales \
curl \
python-dev \
git
RUN curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/x86_64/docker-17.12.0-ce.tgz" && \
SHA256=692e1c72937f6214b1038def84463018d8e320c8eaf8530546c84c2f8f9c767d; \
echo "${SHA256} dockerbins.tgz" | sha256sum -c - && \
tar xvf dockerbins.tgz docker/docker --strip-components 1 && \
mv docker /usr/local/bin/docker && \
chmod +x /usr/local/bin/docker && \
rm dockerbins.tgz
COPY --from=docker /usr/local/bin/docker /usr/local/bin/docker
# Python3 requires a valid locale
RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen

View File

@ -1,7 +1,7 @@
FROM alpine:3.6
FROM docker:18.06.1 as docker
FROM alpine:3.8
ENV GLIBC 2.27-r0
ENV DOCKERBINS_SHA 1270dce1bd7e1838d62ae21d2505d87f16efc1d9074645571daaefdfd0c14054
ENV GLIBC 2.28-r0
RUN apk update && apk add --no-cache openssl ca-certificates curl libgcc && \
curl -fsSL -o /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub && \
@ -10,14 +10,10 @@ RUN apk update && apk add --no-cache openssl ca-certificates curl libgcc && \
ln -s /lib/libz.so.1 /usr/glibc-compat/lib/ && \
ln -s /lib/libc.musl-x86_64.so.1 /usr/glibc-compat/lib && \
ln -s /usr/lib/libgcc_s.so.1 /usr/glibc-compat/lib && \
curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/x86_64/docker-17.12.1-ce.tgz" && \
echo "${DOCKERBINS_SHA} dockerbins.tgz" | sha256sum -c - && \
tar xvf dockerbins.tgz docker/docker --strip-components 1 && \
mv docker /usr/local/bin/docker && \
chmod +x /usr/local/bin/docker && \
rm dockerbins.tgz /etc/apk/keys/sgerrand.rsa.pub glibc-$GLIBC.apk && \
rm /etc/apk/keys/sgerrand.rsa.pub glibc-$GLIBC.apk && \
apk del curl
COPY --from=docker /usr/local/bin/docker /usr/local/bin/docker
COPY dist/docker-compose-Linux-x86_64 /usr/local/bin/docker-compose
ENTRYPOINT ["docker-compose"]

View File

@ -1,4 +1,4 @@
from __future__ import absolute_import
from __future__ import unicode_literals
__version__ = '1.23.0-rc2'
__version__ = '1.23.0-rc3'

View File

@ -48,7 +48,7 @@ def interpolate_environment_variables(version, config, section, environment):
def get_config_path(config_key, section, name):
return '{}.{}.{}'.format(section, name, config_key)
return '{}/{}/{}'.format(section, name, config_key)
def interpolate_value(name, config_key, value, section, interpolator):
@ -75,7 +75,7 @@ def interpolate_value(name, config_key, value, section, interpolator):
def recursive_interpolate(obj, interpolator, config_path):
def append(config_path, key):
return '{}.{}'.format(config_path, key)
return '{}/{}'.format(config_path, key)
if isinstance(obj, six.string_types):
return converter.convert(config_path, interpolator.interpolate(obj))
@ -160,12 +160,12 @@ class UnsetRequiredSubstitution(Exception):
self.err = custom_err_msg
PATH_JOKER = '[^.]+'
PATH_JOKER = '[^/]+'
FULL_JOKER = '.+'
def re_path(*args):
return re.compile('^{}$'.format('\.'.join(args)))
return re.compile('^{}$'.format('/'.join(args)))
def re_path_basic(section, name):
@ -288,7 +288,7 @@ class ConversionMap(object):
except ValueError as e:
raise ConfigurationError(
'Error while attempting to convert {} to appropriate type: {}'.format(
path, e
path.replace('/', '.'), e
)
)
return value

View File

@ -1489,6 +1489,11 @@ def get_container_data_volumes(container, volumes_option, tmpfs_option, mounts_o
if not mount.get('Name'):
continue
# Volume (probably an image volume) is overridden by a mount in the service's config
# and would cause a duplicate mountpoint error
if volume.internal in [m.target for m in mounts_option]:
continue
# Copy existing volume from old container
volume = volume._replace(external=mount['Name'])
volumes.append(volume)

View File

@ -2,13 +2,13 @@ backports.ssl-match-hostname==3.5.0.1; python_version < '3'
cached-property==1.3.0
certifi==2017.4.17
chardet==3.0.4
colorama==0.4.0; sys_platform == 'win32'
docker==3.5.0
docker-pycreds==0.3.0
dockerpty==0.4.1
docopt==0.6.2
enum34==1.1.6; python_version < '3.4'
functools32==3.2.3.post2; python_version < '3.2'
git+git://github.com/tartley/colorama.git@bd378c725b45eba0b8e5cc091c3ca76a954c92ff; sys_platform == 'win32'
idna==2.5
ipaddress==1.0.18
jsonschema==2.6.0

View File

@ -1,15 +0,0 @@
FROM python:3.6
RUN mkdir -p /src && pip install -U Jinja2==2.10 \
PyGithub==1.39 \
pypandoc==1.4 \
GitPython==2.1.9 \
requests==2.18.4 \
twine==1.11.0 && \
apt-get update && apt-get install -y pandoc
VOLUME /src/script/release
WORKDIR /src
COPY . /src
RUN python setup.py develop
ENTRYPOINT ["python", "script/release/release.py"]
CMD ["--help"]

View File

@ -9,8 +9,7 @@ The following things are required to bring a release to a successful conclusion
### Local Docker engine (Linux Containers)
The release script runs inside a container and builds images that will be part
of the release.
The release script builds images that will be part of the release.
### Docker Hub account
@ -20,11 +19,9 @@ following repositories:
- docker/compose
- docker/compose-tests
### A local Python environment
### Python
While most of the release script is running inside a Docker container,
fetching local Docker credentials depends on the `docker` Python package
being available locally.
The release script is written in Python and requires Python 3.3 at minimum.
### A Github account and Github API token
@ -59,6 +56,18 @@ Said account needs to be a member of the maintainers group for the
Moreover, the `~/.pypirc` file should exist on your host and contain the
relevant pypi credentials.
The following is a sample `.pypirc` provided as a guideline:
```
[distutils]
index-servers =
pypi
[pypi]
username = user
password = pass
```
## Start a feature release
A feature release is a release that includes all changes present in the

View File

@ -17,6 +17,8 @@ from release.const import NAME
from release.const import REPO_ROOT
from release.downloader import BinaryDownloader
from release.images import ImageManager
from release.pypi import check_pypirc
from release.pypi import pypi_upload
from release.repository import delete_assets
from release.repository import get_contributors
from release.repository import Repository
@ -28,8 +30,6 @@ from release.utils import ScriptError
from release.utils import update_init_py_version
from release.utils import update_run_sh_version
from release.utils import yesno
from requests.exceptions import HTTPError
from twine.commands.upload import main as twine_upload
def create_initial_branch(repository, args):
@ -170,25 +170,6 @@ def distclean():
shutil.rmtree(folder, ignore_errors=True)
def pypi_upload(args):
print('Uploading to PyPi')
try:
rel = args.release.replace('-rc', 'rc')
twine_upload([
'dist/docker_compose-{}*.whl'.format(rel),
'dist/docker-compose-{}*.tar.gz'.format(rel)
])
except HTTPError as e:
if e.response.status_code == 400 and 'File already exists' in e.message:
if not args.finalize_resume:
raise ScriptError(
'Package already uploaded on PyPi.'
)
print('Skipping PyPi upload - package already uploaded')
else:
raise ScriptError('Unexpected HTTP error uploading package to PyPi: {}'.format(e))
def resume(args):
try:
distclean()
@ -277,6 +258,7 @@ def start(args):
def finalize(args):
distclean()
try:
check_pypirc()
repository = Repository(REPO_ROOT, args.repo)
img_manager = ImageManager(args.release)
pr_data = repository.find_release_pr(args.release)
@ -284,7 +266,7 @@ def finalize(args):
raise ScriptError('No PR found for {}'.format(args.release))
if not check_pr_mergeable(pr_data):
raise ScriptError('Can not finalize release with an unmergeable PR')
if not img_manager.check_images(args.release):
if not img_manager.check_images():
raise ScriptError('Missing release image')
br_name = branch_name(args.release)
if not repository.branch_exists(br_name):

View File

@ -1,36 +1,13 @@
#!/bin/sh
docker image inspect compose/release-tool > /dev/null
if test $? -ne 0; then
docker build -t compose/release-tool -f $(pwd)/script/release/Dockerfile $(pwd)
if test -d ${VENV_DIR:-./.release-venv}; then
true
else
./script/release/setup-venv.sh
fi
if test -z $GITHUB_TOKEN; then
echo "GITHUB_TOKEN environment variable must be set"
exit 1
if test -z "$*"; then
args="--help"
fi
if test -z $BINTRAY_TOKEN; then
echo "BINTRAY_TOKEN environment variable must be set"
exit 1
fi
if test -z $(python -c "import docker; print(docker.version)" 2>/dev/null); then
echo "This script requires the 'docker' Python package to be installed locally"
exit 1
fi
hub_credentials=$(python -c "from docker import auth; cfg = auth.load_config(); print(auth.encode_header(auth.resolve_authconfig(cfg, 'docker.io')).decode('ascii'))")
docker run -it \
-e GITHUB_TOKEN=$GITHUB_TOKEN \
-e BINTRAY_TOKEN=$BINTRAY_TOKEN \
-e SSH_AUTH_SOCK=$SSH_AUTH_SOCK \
-e HUB_CREDENTIALS=$hub_credentials \
--mount type=bind,source=$(pwd),target=/src \
--mount type=bind,source=$HOME/.gitconfig,target=/root/.gitconfig \
--mount type=bind,source=/var/run/docker.sock,target=/var/run/docker.sock \
--mount type=bind,source=$HOME/.ssh,target=/root/.ssh \
--mount type=bind,source=/tmp,target=/tmp \
-v $HOME/.pypirc:/root/.pypirc \
compose/release-tool $*
${VENV_DIR:-./.release-venv}/bin/python ./script/release/release.py "$@"

View File

@ -27,13 +27,12 @@ class ImageManager(object):
def build_images(self, repository, files):
print("Building release images...")
repository.write_git_sha()
docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
distdir = os.path.join(REPO_ROOT, 'dist')
os.makedirs(distdir, exist_ok=True)
shutil.copy(files['docker-compose-Linux-x86_64'][0], distdir)
os.chmod(os.path.join(distdir, 'docker-compose-Linux-x86_64'), 0o755)
print('Building docker/compose image')
logstream = docker_client.build(
logstream = self.docker_client.build(
REPO_ROOT, tag='docker/compose:{}'.format(self.version), dockerfile='Dockerfile.run',
decode=True
)
@ -44,7 +43,7 @@ class ImageManager(object):
print(chunk['stream'], end='')
print('Building test image (for UCP e2e)')
logstream = docker_client.build(
logstream = self.docker_client.build(
REPO_ROOT, tag='docker-compose-tests:tmp', decode=True
)
for chunk in logstream:
@ -53,13 +52,15 @@ class ImageManager(object):
if 'stream' in chunk:
print(chunk['stream'], end='')
container = docker_client.create_container(
container = self.docker_client.create_container(
'docker-compose-tests:tmp', entrypoint='tox'
)
docker_client.commit(container, 'docker/compose-tests', 'latest')
docker_client.tag('docker/compose-tests:latest', 'docker/compose-tests:{}'.format(self.version))
docker_client.remove_container(container, force=True)
docker_client.remove_image('docker-compose-tests:tmp', force=True)
self.docker_client.commit(container, 'docker/compose-tests', 'latest')
self.docker_client.tag(
'docker/compose-tests:latest', 'docker/compose-tests:{}'.format(self.version)
)
self.docker_client.remove_container(container, force=True)
self.docker_client.remove_image('docker-compose-tests:tmp', force=True)
@property
def image_names(self):
@ -69,23 +70,19 @@ class ImageManager(object):
'docker/compose:{}'.format(self.version)
]
def check_images(self, version):
docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
def check_images(self):
for name in self.image_names:
try:
docker_client.inspect_image(name)
self.docker_client.inspect_image(name)
except docker.errors.ImageNotFound:
print('Expected image {} was not found'.format(name))
return False
return True
def push_images(self):
docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
for name in self.image_names:
print('Pushing {} to Docker Hub'.format(name))
logstream = docker_client.push(name, stream=True, decode=True)
logstream = self.docker_client.push(name, stream=True, decode=True)
for chunk in logstream:
if 'status' in chunk:
print(chunk['status'])

View File

@ -0,0 +1,44 @@
from __future__ import absolute_import
from __future__ import unicode_literals
from configparser import Error
from requests.exceptions import HTTPError
from twine.commands.upload import main as twine_upload
from twine.utils import get_config
from .utils import ScriptError
def pypi_upload(args):
print('Uploading to PyPi')
try:
rel = args.release.replace('-rc', 'rc')
twine_upload([
'dist/docker_compose-{}*.whl'.format(rel),
'dist/docker-compose-{}*.tar.gz'.format(rel)
])
except HTTPError as e:
if e.response.status_code == 400 and 'File already exists' in e.message:
if not args.finalize_resume:
raise ScriptError(
'Package already uploaded on PyPi.'
)
print('Skipping PyPi upload - package already uploaded')
else:
raise ScriptError('Unexpected HTTP error uploading package to PyPi: {}'.format(e))
def check_pypirc():
try:
config = get_config()
except Error as e:
raise ScriptError('Failed to parse .pypirc file: {}'.format(e))
if config is None:
raise ScriptError('Failed to parse .pypirc file')
if 'pypi' not in config:
raise ScriptError('Missing [pypi] section in .pypirc file')
if not (config['pypi'].get('username') and config['pypi'].get('password')):
raise ScriptError('Missing login/password pair for pypi repo')

47
script/release/setup-venv.sh Executable file
View File

@ -0,0 +1,47 @@
#!/bin/bash
debian_based() { test -f /etc/debian_version; }
if test -z $VENV_DIR; then
VENV_DIR=./.release-venv
fi
if test -z $PYTHONBIN; then
PYTHONBIN=$(which python3)
if test -z $PYTHONBIN; then
PYTHONBIN=$(which python)
fi
fi
VERSION=$($PYTHONBIN -c "import sys; print('{}.{}'.format(*sys.version_info[0:2]))")
if test $(echo $VERSION | cut -d. -f1) -lt 3; then
echo "Python 3.3 or above is required"
fi
if test $(echo $VERSION | cut -d. -f2) -lt 3; then
echo "Python 3.3 or above is required"
fi
# Debian / Ubuntu workaround:
# https://askubuntu.com/questions/879437/ensurepip-is-disabled-in-debian-ubuntu-for-the-system-python
if debian_based; then
VENV_FLAGS="$VENV_FLAGS --without-pip"
fi
$PYTHONBIN -m venv $VENV_DIR $VENV_FLAGS
VENV_PYTHONBIN=$VENV_DIR/bin/python
if debian_based; then
curl https://bootstrap.pypa.io/get-pip.py -o $VENV_DIR/get-pip.py
$VENV_PYTHONBIN $VENV_DIR/get-pip.py
fi
$VENV_PYTHONBIN -m pip install -U Jinja2==2.10 \
PyGithub==1.39 \
pypandoc==1.4 \
GitPython==2.1.9 \
requests==2.18.4 \
twine==1.11.0
$VENV_PYTHONBIN setup.py develop

View File

@ -15,7 +15,7 @@
set -e
VERSION="1.23.0-rc2"
VERSION="1.23.0-rc3"
IMAGE="docker/compose:$VERSION"

View File

@ -55,7 +55,7 @@ extras_require = {
':python_version < "3.4"': ['enum34 >= 1.0.4, < 2'],
':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5'],
':python_version < "3.3"': ['ipaddress >= 1.0.16'],
':sys_platform == "win32"': ['colorama >= 0.3.9, < 0.4'],
':sys_platform == "win32"': ['colorama >= 0.4, < 0.5'],
'socks': ['PySocks >= 1.5.6, != 1.5.7, < 2'],
}

View File

@ -425,6 +425,22 @@ class ServiceTest(DockerClientTestCase):
new_container = service.recreate_container(old_container)
assert new_container.get_mount('/data')['Source'] == volume_path
def test_recreate_volume_to_mount(self):
# https://github.com/docker/compose/issues/6280
service = Service(
project='composetest',
name='db',
client=self.client,
build={'context': 'tests/fixtures/dockerfile-with-volume'},
volumes=[MountSpec.parse({
'type': 'volume',
'target': '/data',
})]
)
old_container = create_and_start_container(service)
new_container = service.recreate_container(old_container)
assert new_container.get_mount('/data')['Source']
def test_duplicate_volume_trailing_slash(self):
"""
When an image specifies a volume, and the Compose file specifies a host path

View File

@ -332,6 +332,37 @@ def test_interpolate_environment_external_resource_convert_types(mock_env):
assert value == expected
def test_interpolate_service_name_uses_dot(mock_env):
entry = {
'service.1': {
'image': 'busybox',
'ulimits': {
'nproc': '${POSINT}',
'nofile': {
'soft': '${POSINT}',
'hard': '${DEFAULT:-40000}'
},
},
}
}
expected = {
'service.1': {
'image': 'busybox',
'ulimits': {
'nproc': 50,
'nofile': {
'soft': 50,
'hard': 40000
},
},
}
}
value = interpolate_environment_variables(V3_4, entry, 'service', mock_env)
assert value == expected
def test_escaped_interpolation(defaults_interpolator):
assert defaults_interpolator('$${foo}') == '${foo}'