diff --git a/Release.Jenkinsfile b/Release.Jenkinsfile index c4d5ad1a5..96aa01530 100644 --- a/Release.Jenkinsfile +++ b/Release.Jenkinsfile @@ -2,7 +2,7 @@ def dockerVersions = ['19.03.5', '18.09.9'] def baseImages = ['alpine', 'debian'] -def pythonVersions = ['py27', 'py37'] +def pythonVersions = ['py37'] pipeline { agent none @@ -75,7 +75,7 @@ pipeline { steps { checkout scm sh './script/setup/osx' - sh 'tox -e py27,py37 -- tests/unit' + sh 'tox -e py37 -- tests/unit' sh './script/build/osx' dir ('dist') { checksum('docker-compose-Darwin-x86_64') @@ -112,7 +112,7 @@ pipeline { } steps { checkout scm - bat 'tox.exe -e py27,py37 -- tests/unit' + bat 'tox.exe -e py37 -- tests/unit' powershell '.\\script\\build\\windows.ps1' dir ('dist') { checksum('docker-compose-Windows-x86_64.exe') @@ -159,6 +159,9 @@ pipeline { agent { label 'linux' } + environment { + GITHUB_TOKEN = credentials('github-release-token') + } steps { checkout scm sh 'mkdir -p dist' @@ -167,7 +170,20 @@ pipeline { unstash "bin-linux" unstash "bin-win" unstash "changelog" - githubRelease() + sh(""" + curl -SfL https://github.com/github/hub/releases/download/v2.13.0/hub-linux-amd64-2.13.0.tgz | tar xzv --wildcards 'hub-*/bin/hub' --strip=2 + ./hub release create --draft --prerelease=${env.TAG_NAME !=~ /v[0-9\.]+/} \\ + -a docker-compose-Darwin-x86_64 \\ + -a docker-compose-Darwin-x86_64.sha256 \\ + -a docker-compose-Darwin-x86_64.tgz \\ + -a docker-compose-Darwin-x86_64.tgz.sha256 \\ + -a docker-compose-Linux-x86_64 \\ + -a docker-compose-Linux-x86_64.sha256 \\ + -a docker-compose-Windows-x86_64.exe \\ + -a docker-compose-Windows-x86_64.exe.sha256 \\ + -a ../script/run/run.sh \\ + -F CHANGELOG.md \${TAG_NAME} + """) } } } @@ -175,20 +191,18 @@ pipeline { agent { label 'linux' } + environment { + PYPIRC = credentials('pypirc-docker-dsg-cibot') + } steps { checkout scm - withCredentials([[$class: "FileBinding", credentialsId: 'pypirc-docker-dsg-cibot', variable: 'PYPIRC']]) { - sh """ - virtualenv venv-publish - source venv-publish/bin/activate - python setup.py sdist bdist_wheel - pip install twine - twine upload --config-file ${PYPIRC} ./dist/docker-compose-${env.TAG_NAME}.tar.gz ./dist/docker_compose-${env.TAG_NAME}-py2.py3-none-any.whl - """ - } - } - post { - sh 'deactivate; rm -rf venv-publish' + sh """ + rm -rf build/ dist/ + pip install wheel + python setup.py sdist bdist_wheel + pip install twine + ~/.local/bin/twine upload --config-file ${PYPIRC} ./dist/docker-compose-*.tar.gz ./dist/docker_compose-*-py2.py3-none-any.whl + """ } } } @@ -268,9 +282,8 @@ def buildRuntimeImage(baseImage) { def pushRuntimeImage(baseImage) { unstash "compose-${baseImage}" - sh 'echo -n "${DOCKERHUB_CREDS_PSW}" | docker login --username "${DOCKERHUB_CREDS_USR}" --password-stdin' sh "docker load -i dist/docker-compose-${baseImage}.tar" - withDockerRegistry(credentialsId: 'dockerbuildbot-hub.docker.com') { + withDockerRegistry(credentialsId: 'dockerhub-dockerdsgcibot') { sh "docker push docker/compose:${baseImage}-${env.TAG_NAME}" if (baseImage == "alpine" && env.TAG_NAME != null) { sh "docker tag docker/compose:alpine-${env.TAG_NAME} docker/compose:${env.TAG_NAME}" @@ -279,33 +292,6 @@ def pushRuntimeImage(baseImage) { } } -def githubRelease() { - withCredentials([string(credentialsId: 'github-compose-release-test-token', variable: 'GITHUB_TOKEN')]) { - def prerelease = !( env.TAG_NAME ==~ /v[0-9\.]+/ ) - changelog = readFile "CHANGELOG.md" - def data = """{ - \"tag_name\": \"${env.TAG_NAME}\", - \"name\": \"${env.TAG_NAME}\", - \"draft\": true, - \"prerelease\": ${prerelease}, - \"body\" : \"${changelog}\" - """ - echo $data - - def url = "https://api.github.com/repos/docker/compose/releases" - def upload_url = sh(returnStdout: true, script: """ - curl -sSf -H 'Authorization: token ${GITHUB_TOKEN}' -H 'Accept: application/json' -H 'Content-type: application/json' -X POST -d '$data' $url") \\ - | jq '.upload_url | .[:rindex("{")]' - """) - sh(""" - for f in * ; do - curl -sf -H 'Authorization: token ${GITHUB_TOKEN}' -H 'Accept: application/json' -H 'Content-type: application/octet-stream' \\ - -X POST --data-binary @\$f ${upload_url}?name=\$f; - done - """) - } -} - def checksum(filepath) { if (isUnix()) { sh "openssl sha256 -r -out ${filepath}.sha256 ${filepath}" diff --git a/script/release/README.md b/script/release/README.md index 97168d376..d53f67606 100644 --- a/script/release/README.md +++ b/script/release/README.md @@ -1,201 +1,9 @@ # Release HOWTO -This file describes the process of making a public release of `docker-compose`. -Please read it carefully before proceeding! +The release process is fully automated by `Release.Jenkinsfile`. -## Prerequisites +## Usage -The following things are required to bring a release to a successful conclusion - -### Local Docker engine (Linux Containers) - -The release script builds images that will be part of the release. - -### Docker Hub account - -You should be logged into a Docker Hub account that allows pushing to the -following repositories: - -- docker/compose -- docker/compose-tests - -### Python - -The release script is written in Python and requires Python 3.3 at minimum. - -### A Github account and Github API token - -Your Github account needs to have write access on the `docker/compose` repo. -To generate a Github token, head over to the -[Personal access tokens](https://github.com/settings/tokens) page in your -Github settings and select "Generate new token". Your token should include -(at minimum) the following scopes: - -- `repo:status` -- `public_repo` - -This API token should be exposed to the release script through the -`GITHUB_TOKEN` environment variable. - -### A Bintray account and Bintray API key - -Your Bintray account will need to be an admin member of the -[docker-compose organization](https://bintray.com/docker-compose). -Additionally, you should generate a personal API key. To do so, click your -username in the top-right hand corner and select "Edit profile" ; on the new -page, select "API key" in the left-side menu. - -This API key should be exposed to the release script through the -`BINTRAY_TOKEN` environment variable. - -### A PyPi account - -Said account needs to be a member of the maintainers group for the -[`docker-compose` project](https://pypi.org/project/docker-compose/). - -Moreover, the `~/.pypirc` file should exist on your host and contain the -relevant pypi credentials. - -The following is a sample `.pypirc` provided as a guideline: - -``` -[distutils] -index-servers = - pypi - -[pypi] -username = user -password = pass -``` - -## Start a feature release - -A feature release is a release that includes all changes present in the -`master` branch when initiated. It's typically versioned `X.Y.0-rc1`, where -Y is the minor version of the previous release incremented by one. A series -of one or more Release Candidates (RCs) should be made available to the public -to find and squash potential bugs. - -From the root of the Compose repository, run the following command: -``` -./script/release/release.sh -b start X.Y.0-rc1 -``` - -After a short initialization period, the script will invite you to edit the -`CHANGELOG.md` file. Do so by being careful to respect the same format as -previous releases. Once done, the script will display a `diff` of the staged -changes for the bump commit. Once you validate these, a bump commit will be -created on the newly created release branch and pushed remotely. - -The release tool then waits for the CI to conclude before proceeding. -If failures are reported, the release will be aborted until these are fixed. -Please refer to the "Resume a draft release" section below for more details. - -Once all resources have been prepared, the release script will exit with a -message resembling this one: - -``` -You're almost done! Please verify that everything is in order and you are ready -to make the release public, then run the following command: -./script/release/release.sh -b user finalize X.Y.0-rc1 -``` - -Once you are ready to finalize the release (making binaries and other versioned -assets public), proceed to the "Finalize a release" section of this guide. - -## Start a patch release - -A patch release is a release that builds off a previous release with discrete -additions. This can be an RC release after RC1 (`X.Y.0-rcZ`, `Z > 1`), a GA release -based off the final RC (`X.Y.0`), or a bugfix release based off a previous -GA release (`X.Y.Z`, `Z > 0`). - -From the root of the Compose repository, run the following command: -``` -./script/release/release.sh -b start --patch=BASE_VERSION RELEASE_VERSION -``` - -The process of starting a patch release is identical to starting a feature -release except for one difference ; at the beginning, the script will ask for -PR numbers you wish to cherry-pick into the release. These numbers should -correspond to existing PRs on the docker/compose repository. Multiple numbers -should be separated by whitespace. - -Once you are ready to finalize the release (making binaries and other versioned -assets public), proceed to the "Finalize a release" section of this guide. - -## Finalize a release - -Once you're ready to make your release public, you may execute the following -command from the root of the Compose repository: -``` -./script/release/release.sh -b finalize RELEASE_VERSION -``` - -Note that this command will create and publish versioned assets to the public. -As a result, it can not be reverted. The command will perform some basic -sanity checks before doing so, but it is your responsibility to ensure -everything is in order before pushing the button. - -After the command exits, you should make sure: - -- The `docker/compose:VERSION` image is available on Docker Hub and functional -- The `pip install -U docker-compose==VERSION` command correctly installs the - specified version -- The install command on the Github release page installs the new release - -## Resume a draft release - -"Resuming" a release lets you address the following situations occurring before -a release is made final: - -- Cherry-pick additional PRs to include in the release -- Resume a release that was aborted because of CI failures after they've been - addressed -- Rebuild / redownload assets after manual changes have been made to the - release branch -- etc. - -From the root of the Compose repository, run the following command: -``` -./script/release/release.sh -b resume RELEASE_VERSION -``` - -The release tool will attempt to determine what steps it's already been through -for the specified release and pick up where it left off. Some steps are -executed again no matter what as it's assumed they'll produce different -results, like building images or downloading binaries. - -## Cancel a draft release - -If issues snuck into your release branch, it is sometimes easier to start from -scratch. Before a release has been finalized, it is possible to cancel it using -the following command: -``` -./script/release/release.sh -b cancel RELEASE_VERSION -``` - -This will remove the release branch with this release (locally and remotely), -close the associated PR, remove the release page draft on Github and delete -the Bintray repository for it, allowing you to start fresh. - -## Manual operations - -Some common, release-related operations are not covered by this tool and should -be handled manually by the operator: - -- After any release: - - Announce new release on Slack -- After a GA release: - - Close the release milestone - - Merge back `CHANGELOG.md` changes from the `release` branch into `master` - - Bump the version in `compose/__init__.py` to the *next* minor version - number with `dev` appended. For example, if you just released `1.4.0`, - update it to `1.5.0dev` - - Update compose_version in [github.com/docker/docker.github.io/blob/master/_config.yml](https://github.com/docker/docker.github.io/blob/master/_config.yml) and [github.com/docker/docker.github.io/blob/master/_config_authoring.yml](https://github.com/docker/docker.github.io/blob/master/_config_authoring.yml) - - Update the release note in [github.com/docker/docker.github.io](https://github.com/docker/docker.github.io/blob/master/release-notes/docker-compose.md) - -## Advanced options - -You can consult the full list of options for the release tool by executing -`./script/release/release.sh --help`. +1. edit `compose/__init__.py` to set release version number +1. commit and tag as `v{major}.{minor}.{patch}` +1. edit `compose/__init__.py` again to set next development version number diff --git a/script/release/generate_changelog.sh b/script/release/generate_changelog.sh index 783e74400..8c3b3da8d 100755 --- a/script/release/generate_changelog.sh +++ b/script/release/generate_changelog.sh @@ -26,14 +26,17 @@ changes=$(pullrequests | uniq) echo "pull requests merged within range:" echo $changes -echo '#Features' > CHANGELOG.md +echo '#Features' > FEATURES.md +echo '#Bugs' > BUGS.md for pr in $changes; do - curl -fs -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/docker/compose/pulls/${pr} \ - | jq -r ' select( .labels[].name | contains("kind/feature") ) | "* "+.title' >> CHANGELOG.md + curl -fs -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/docker/compose/pulls/${pr} -o PR.json + + cat PR.json | jq -r ' select( .labels[].name | contains("kind/feature") ) | "- "+.title' >> FEATURES.md + cat PR.json | jq -r ' select( .labels[].name | contains("kind/bug") ) | "- "+.title' >> BUGS.md done -echo '#Bugs' >> CHANGELOG.md -for pr in $changes; do - curl -fs -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/docker/compose/pulls/${pr} \ - | jq -r ' select( .labels[].name | contains("kind/bug") ) | "* "+.title' >> CHANGELOG.md -done +echo ${TAG_NAME} > CHANGELOG.md +echo >> CHANGELOG.md +cat FEATURES.md >> CHANGELOG.md +echo >> CHANGELOG.md +cat BUGS.md >> CHANGELOG.md diff --git a/script/release/push-release b/script/release/push-release deleted file mode 100755 index f28c1d4fe..000000000 --- a/script/release/push-release +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/bash -# -# Create the official release -# - -. "$(dirname "${BASH_SOURCE[0]}")/utils.sh" - -function usage() { - >&2 cat << EOM -Publish a release by building all artifacts and pushing them. - -This script requires that 'git config branch.${BRANCH}.release' is set to the -release version for the release branch. - -EOM - exit 1 -} - -BRANCH="$(git rev-parse --abbrev-ref HEAD)" -VERSION="$(git config "branch.${BRANCH}.release")" || usage - -if [ -z "$(command -v jq 2> /dev/null)" ]; then - >&2 echo "$0 requires https://stedolan.github.io/jq/" - >&2 echo "Please install it and make sure it is available on your \$PATH." - exit 2 -fi - - -API=https://api.github.com/repos -REPO=docker/compose -GITHUB_REPO=git@github.com:$REPO - -# Check the build status is green -sha=$(git rev-parse HEAD) -url=$API/$REPO/statuses/$sha -build_status=$(curl -s $url | jq -r '.[0].state') -if [ -n "$SKIP_BUILD_CHECK" ]; then - echo "Skipping build status check..." -elif [[ "$build_status" != "success" ]]; then - >&2 echo "Build status is $build_status, but it should be success." - exit -1 -fi - -echo "Tagging the release as $VERSION" -git tag $VERSION -git push $GITHUB_REPO $VERSION - -echo "Uploading the docker image" -docker push docker/compose:$VERSION - -echo "Uploading the compose-tests image" -docker push docker/compose-tests:latest -docker push docker/compose-tests:$VERSION - -echo "Uploading package to PyPI" -./script/build/write-git-sha -python setup.py sdist bdist_wheel -if [ "$(command -v twine 2> /dev/null)" ]; then - twine upload ./dist/docker-compose-${VERSION/-/}.tar.gz ./dist/docker_compose-${VERSION/-/}-py2.py3-none-any.whl -else - python setup.py upload -fi - -echo "Testing pip package" -deactivate || true -virtualenv venv-test -source venv-test/bin/activate -pip install docker-compose==$VERSION -docker-compose version -deactivate -rm -rf venv-test - -echo "Now publish the github release, and test the downloads." -echo "Email maintainers@dockerproject.org and engineering@docker.com about the new release." diff --git a/script/release/rebase-bump-commit b/script/release/rebase-bump-commit deleted file mode 100755 index 3c2ae72b1..000000000 --- a/script/release/rebase-bump-commit +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# -# Move the "bump to " commit to the HEAD of the branch -# - -. "$(dirname "${BASH_SOURCE[0]}")/utils.sh" - -function usage() { - >&2 cat << EOM -Move the "bump to " commit to the HEAD of the branch - -This script requires that 'git config branch.${BRANCH}.release' is set to the -release version for the release branch. - -EOM - exit 1 -} - - -BRANCH="$(git rev-parse --abbrev-ref HEAD)" -VERSION="$(git config "branch.${BRANCH}.release")" || usage - - -COMMIT_MSG="Bump $VERSION" -sha="$(git log --grep "$COMMIT_MSG\$" --format="%H")" -if [ -z "$sha" ]; then - >&2 echo "No commit with message \"$COMMIT_MSG\"" - exit 2 -fi -if [[ "$sha" == "$(git rev-parse HEAD)" ]]; then - >&2 echo "Bump commit already at HEAD" - exit 0 -fi - -commits=$(git log --format="%H" "$sha..HEAD" | wc -l | xargs echo) - -git rebase --onto $sha~1 HEAD~$commits $BRANCH -git cherry-pick $sha diff --git a/script/release/release.py b/script/release/release.py deleted file mode 100755 index 82bc9a0a6..000000000 --- a/script/release/release.py +++ /dev/null @@ -1,387 +0,0 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -import argparse -import os -import shutil -import sys -import time - -from jinja2 import Template -from release.bintray import BintrayAPI -from release.const import BINTRAY_ORG -from release.const import NAME -from release.const import REPO_ROOT -from release.downloader import BinaryDownloader -from release.images import ImageManager -from release.images import is_tag_latest -from release.pypi import check_pypirc -from release.pypi import pypi_upload -from release.repository import delete_assets -from release.repository import get_contributors -from release.repository import Repository -from release.repository import upload_assets -from release.utils import branch_name -from release.utils import compatibility_matrix -from release.utils import read_release_notes_from_changelog -from release.utils import ScriptError -from release.utils import update_init_py_version -from release.utils import update_run_sh_version -from release.utils import yesno - - -def create_initial_branch(repository, args): - release_branch = repository.create_release_branch(args.release, args.base) - if args.base and args.cherries: - print('Detected patch version.') - cherries = input('Indicate (space-separated) PR numbers to cherry-pick then press Enter:\n') - repository.cherry_pick_prs(release_branch, cherries.split()) - - return create_bump_commit(repository, release_branch, args.bintray_user, args.bintray_org) - - -def create_bump_commit(repository, release_branch, bintray_user, bintray_org): - with release_branch.config_reader() as cfg: - release = cfg.get('release') - print('Updating version info in __init__.py and run.sh') - update_run_sh_version(release) - update_init_py_version(release) - - input('Please add the release notes to the CHANGELOG.md file, then press Enter to continue.') - proceed = None - while not proceed: - print(repository.diff()) - proceed = yesno('Are these changes ok? y/N ', default=False) - - if repository.diff(): - repository.create_bump_commit(release_branch, release) - repository.push_branch_to_remote(release_branch) - - bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], bintray_user) - if not bintray_api.repository_exists(bintray_org, release_branch.name): - print('Creating data repository {} on bintray'.format(release_branch.name)) - bintray_api.create_repository(bintray_org, release_branch.name, 'generic') - else: - print('Bintray repository {} already exists. Skipping'.format(release_branch.name)) - - -def monitor_pr_status(pr_data): - print('Waiting for CI to complete...') - last_commit = pr_data.get_commits().reversed[0] - while True: - status = last_commit.get_combined_status() - if status.state == 'pending' or status.state == 'failure': - summary = { - 'pending': 0, - 'success': 0, - 'failure': 0, - 'error': 0, - } - for detail in status.statuses: - if detail.context == 'dco-signed': - # dco-signed check breaks on merge remote-tracking ; ignore it - continue - if detail.state in summary: - summary[detail.state] += 1 - print( - '{pending} pending, {success} successes, {failure} failures, ' - '{error} errors'.format(**summary) - ) - if summary['failure'] > 0 or summary['error'] > 0: - raise ScriptError('CI failures detected!') - elif summary['pending'] == 0 and summary['success'] > 0: - # This check assumes at least 1 non-DCO CI check to avoid race conditions. - # If testing on a repo without CI, use --skip-ci-check to avoid looping eternally - return True - time.sleep(30) - elif status.state == 'success': - print('{} successes: all clear!'.format(status.total_count)) - return True - - -def check_pr_mergeable(pr_data): - if pr_data.mergeable is False: - # mergeable can also be null, in which case the warning would be a false positive. - print( - 'WARNING!! PR #{} can not currently be merged. You will need to ' - 'resolve the conflicts manually before finalizing the release.'.format(pr_data.number) - ) - - return pr_data.mergeable is True - - -def create_release_draft(repository, version, pr_data, files): - print('Creating Github release draft') - with open(os.path.join(os.path.dirname(__file__), 'release.md.tmpl'), 'r') as f: - template = Template(f.read()) - print('Rendering release notes based on template') - release_notes = template.render( - version=version, - compat_matrix=compatibility_matrix(), - integrity=files, - contributors=get_contributors(pr_data), - changelog=read_release_notes_from_changelog(), - ) - gh_release = repository.create_release( - version, release_notes, draft=True, prerelease='-rc' in version, - target_commitish='release' - ) - print('Release draft initialized') - return gh_release - - -def print_final_instructions(args): - print( - "You're almost done! Please verify that everything is in order and " - "you are ready to make the release public, then run the following " - "command:\n{exe} -b {user} finalize {version}".format( - exe='./script/release/release.sh', user=args.bintray_user, version=args.release - ) - ) - - -def distclean(): - print('Running distclean...') - dirs = [ - os.path.join(REPO_ROOT, 'build'), os.path.join(REPO_ROOT, 'dist'), - os.path.join(REPO_ROOT, 'docker-compose.egg-info') - ] - files = [] - for base, dirnames, fnames in os.walk(REPO_ROOT): - for fname in fnames: - path = os.path.normpath(os.path.join(base, fname)) - if fname.endswith('.pyc'): - files.append(path) - elif fname.startswith('.coverage.'): - files.append(path) - for dirname in dirnames: - path = os.path.normpath(os.path.join(base, dirname)) - if dirname == '__pycache__': - dirs.append(path) - elif dirname == '.coverage-binfiles': - dirs.append(path) - - for file in files: - os.unlink(file) - - for folder in dirs: - shutil.rmtree(folder, ignore_errors=True) - - -def resume(args): - try: - distclean() - repository = Repository(REPO_ROOT, args.repo) - br_name = branch_name(args.release) - if not repository.branch_exists(br_name): - raise ScriptError('No local branch exists for this release.') - gh_release = repository.find_release(args.release) - if gh_release and not gh_release.draft: - print('WARNING!! Found non-draft (public) release for this version!') - proceed = yesno( - 'Are you sure you wish to proceed? Modifying an already ' - 'released version is dangerous! y/N ', default=False - ) - if proceed.lower() is not True: - raise ScriptError('Aborting release') - - release_branch = repository.checkout_branch(br_name) - if args.cherries: - cherries = input('Indicate (space-separated) PR numbers to cherry-pick then press Enter:\n') - repository.cherry_pick_prs(release_branch, cherries.split()) - - create_bump_commit(repository, release_branch, args.bintray_user, args.bintray_org) - pr_data = repository.find_release_pr(args.release) - if not pr_data: - pr_data = repository.create_release_pull_request(args.release) - check_pr_mergeable(pr_data) - if not args.skip_ci: - monitor_pr_status(pr_data) - downloader = BinaryDownloader(args.destination) - files = downloader.download_all(args.release) - if not gh_release: - gh_release = create_release_draft(repository, args.release, pr_data, files) - delete_assets(gh_release) - upload_assets(gh_release, files) - tag_as_latest = is_tag_latest(args.release) - img_manager = ImageManager(args.release, tag_as_latest) - img_manager.build_images(repository) - except ScriptError as e: - print(e) - return 1 - - print_final_instructions(args) - return 0 - - -def cancel(args): - try: - repository = Repository(REPO_ROOT, args.repo) - repository.close_release_pr(args.release) - repository.remove_release(args.release) - repository.remove_bump_branch(args.release) - bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], args.bintray_user) - print('Removing Bintray data repository for {}'.format(args.release)) - bintray_api.delete_repository(args.bintray_org, branch_name(args.release)) - distclean() - except ScriptError as e: - print(e) - return 1 - print('Release cancellation complete.') - return 0 - - -def start(args): - distclean() - try: - repository = Repository(REPO_ROOT, args.repo) - create_initial_branch(repository, args) - pr_data = repository.create_release_pull_request(args.release) - check_pr_mergeable(pr_data) - if not args.skip_ci: - monitor_pr_status(pr_data) - downloader = BinaryDownloader(args.destination) - files = downloader.download_all(args.release) - gh_release = create_release_draft(repository, args.release, pr_data, files) - upload_assets(gh_release, files) - tag_as_latest = is_tag_latest(args.release) - img_manager = ImageManager(args.release, tag_as_latest) - img_manager.build_images(repository) - except ScriptError as e: - print(e) - return 1 - - print_final_instructions(args) - return 0 - - -def finalize(args): - distclean() - try: - check_pypirc() - repository = Repository(REPO_ROOT, args.repo) - tag_as_latest = is_tag_latest(args.release) - img_manager = ImageManager(args.release, tag_as_latest) - pr_data = repository.find_release_pr(args.release) - if not pr_data: - raise ScriptError('No PR found for {}'.format(args.release)) - if not check_pr_mergeable(pr_data): - raise ScriptError('Can not finalize release with an unmergeable PR') - if not img_manager.check_images(): - raise ScriptError('Missing release image') - br_name = branch_name(args.release) - if not repository.branch_exists(br_name): - raise ScriptError('No local branch exists for this release.') - gh_release = repository.find_release(args.release) - if not gh_release: - raise ScriptError('No Github release draft for this version') - - repository.checkout_branch(br_name) - - os.system('python {setup_script} sdist bdist_wheel'.format( - setup_script=os.path.join(REPO_ROOT, 'setup.py'))) - - merge_status = pr_data.merge() - if not merge_status.merged and not args.finalize_resume: - raise ScriptError( - 'Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message) - ) - - pypi_upload(args) - - img_manager.push_images() - repository.publish_release(gh_release) - except ScriptError as e: - print(e) - return 1 - - return 0 - - -ACTIONS = [ - 'start', - 'cancel', - 'resume', - 'finalize', -] - -EPILOG = '''Example uses: - * Start a new feature release (includes all changes currently in master) - release.sh -b user start 1.23.0 - * Start a new patch release - release.sh -b user --patch 1.21.0 start 1.21.1 - * Cancel / rollback an existing release draft - release.sh -b user cancel 1.23.0 - * Restart a previously aborted patch release - release.sh -b user -p 1.21.0 resume 1.21.1 -''' - - -def main(): - if 'GITHUB_TOKEN' not in os.environ: - print('GITHUB_TOKEN environment variable must be set') - return 1 - - if 'BINTRAY_TOKEN' not in os.environ: - print('BINTRAY_TOKEN environment variable must be set') - return 1 - - parser = argparse.ArgumentParser( - description='Orchestrate a new release of docker/compose. This tool assumes that you have ' - 'obtained a Github API token and Bintray API key and set the GITHUB_TOKEN and ' - 'BINTRAY_TOKEN environment variables accordingly.', - epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) - parser.add_argument( - 'action', choices=ACTIONS, help='The action to be performed for this release' - ) - parser.add_argument('release', help='Release number, e.g. 1.9.0-rc1, 2.1.1') - parser.add_argument( - '--patch', '-p', dest='base', - help='Which version is being patched by this release' - ) - parser.add_argument( - '--repo', '-r', dest='repo', default=NAME, - help='Start a release for the given repo (default: {})'.format(NAME) - ) - parser.add_argument( - '-b', dest='bintray_user', required=True, metavar='USER', - help='Username associated with the Bintray API key' - ) - parser.add_argument( - '--bintray-org', dest='bintray_org', metavar='ORG', default=BINTRAY_ORG, - help='Organization name on bintray where the data repository will be created.' - ) - parser.add_argument( - '--destination', '-o', metavar='DIR', default='binaries', - help='Directory where release binaries will be downloaded relative to the project root' - ) - parser.add_argument( - '--no-cherries', '-C', dest='cherries', action='store_false', - help='If set, the program will not prompt the user for PR numbers to cherry-pick' - ) - parser.add_argument( - '--skip-ci-checks', dest='skip_ci', action='store_true', - help='If set, the program will not wait for CI jobs to complete' - ) - parser.add_argument( - '--finalize-resume', dest='finalize_resume', action='store_true', - help='If set, finalize will continue through steps that have already been completed.' - ) - args = parser.parse_args() - - if args.action == 'start': - return start(args) - elif args.action == 'resume': - return resume(args) - elif args.action == 'cancel': - return cancel(args) - elif args.action == 'finalize': - return finalize(args) - - print('Unexpected action "{}"'.format(args.action), file=sys.stderr) - return 1 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/script/release/release.sh b/script/release/release.sh deleted file mode 100755 index 5f853808b..000000000 --- a/script/release/release.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/sh - -if test -d ${VENV_DIR:-./.release-venv}; then - true -else - ./script/release/setup-venv.sh -fi - -if test -z "$*"; then - args="--help" -fi - -${VENV_DIR:-./.release-venv}/bin/python ./script/release/release.py "$@" diff --git a/script/release/release/__init__.py b/script/release/release/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/script/release/release/bintray.py b/script/release/release/bintray.py deleted file mode 100644 index fb4008ad0..000000000 --- a/script/release/release/bintray.py +++ /dev/null @@ -1,50 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import json - -import requests - -from .const import NAME - - -class BintrayAPI(requests.Session): - def __init__(self, api_key, user, *args, **kwargs): - super(BintrayAPI, self).__init__(*args, **kwargs) - self.auth = (user, api_key) - self.base_url = 'https://api.bintray.com/' - - def create_repository(self, subject, repo_name, repo_type='generic'): - url = '{base}repos/{subject}/{repo_name}'.format( - base=self.base_url, subject=subject, repo_name=repo_name, - ) - data = { - 'name': repo_name, - 'type': repo_type, - 'private': False, - 'desc': 'Automated release for {}: {}'.format(NAME, repo_name), - 'labels': ['docker-compose', 'docker', 'release-bot'], - } - return self.post_json(url, data) - - def repository_exists(self, subject, repo_name): - url = '{base}/repos/{subject}/{repo_name}'.format( - base=self.base_url, subject=subject, repo_name=repo_name, - ) - result = self.get(url) - if result.status_code == 404: - return False - result.raise_for_status() - return True - - def delete_repository(self, subject, repo_name): - url = '{base}repos/{subject}/{repo_name}'.format( - base=self.base_url, subject=subject, repo_name=repo_name, - ) - return self.delete(url) - - def post_json(self, url, data, **kwargs): - if 'headers' not in kwargs: - kwargs['headers'] = {} - kwargs['headers']['Content-Type'] = 'application/json' - return self.post(url, data=json.dumps(data), **kwargs) diff --git a/script/release/release/const.py b/script/release/release/const.py deleted file mode 100644 index 52458ea14..000000000 --- a/script/release/release/const.py +++ /dev/null @@ -1,10 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import os - - -REPO_ROOT = os.path.join(os.path.dirname(__file__), '..', '..', '..') -NAME = 'docker/compose' -COMPOSE_TESTS_IMAGE_BASE_NAME = NAME + '-tests' -BINTRAY_ORG = 'docker-compose' diff --git a/script/release/release/downloader.py b/script/release/release/downloader.py deleted file mode 100644 index 0e9b80130..000000000 --- a/script/release/release/downloader.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -import hashlib -import os - -import requests - -from .const import BINTRAY_ORG -from .const import NAME -from .const import REPO_ROOT -from .utils import branch_name - - -class BinaryDownloader(requests.Session): - base_bintray_url = 'https://dl.bintray.com/{}'.format(BINTRAY_ORG) - base_appveyor_url = 'https://ci.appveyor.com/api/projects/{}/artifacts/'.format(NAME) - - def __init__(self, destination, *args, **kwargs): - super(BinaryDownloader, self).__init__(*args, **kwargs) - self.destination = destination - os.makedirs(self.destination, exist_ok=True) - - def download_from_bintray(self, repo_name, filename): - print('Downloading {} from bintray'.format(filename)) - url = '{base}/{repo_name}/{filename}'.format( - base=self.base_bintray_url, repo_name=repo_name, filename=filename - ) - full_dest = os.path.join(REPO_ROOT, self.destination, filename) - return self._download(url, full_dest) - - def download_from_appveyor(self, branch_name, filename): - print('Downloading {} from appveyor'.format(filename)) - url = '{base}/dist%2F{filename}?branch={branch_name}'.format( - base=self.base_appveyor_url, filename=filename, branch_name=branch_name - ) - full_dest = os.path.join(REPO_ROOT, self.destination, filename) - return self._download(url, full_dest) - - def _download(self, url, full_dest): - m = hashlib.sha256() - with open(full_dest, 'wb') as f: - r = self.get(url, stream=True) - for chunk in r.iter_content(chunk_size=1024 * 600, decode_unicode=False): - print('.', end='', flush=True) - m.update(chunk) - f.write(chunk) - - print(' download complete') - hex_digest = m.hexdigest() - with open(full_dest + '.sha256', 'w') as f: - f.write('{} {}\n'.format(hex_digest, os.path.basename(full_dest))) - return full_dest, hex_digest - - def download_all(self, version): - files = { - 'docker-compose-Darwin-x86_64.tgz': None, - 'docker-compose-Darwin-x86_64': None, - 'docker-compose-Linux-x86_64': None, - 'docker-compose-Windows-x86_64.exe': None, - } - - for filename in files.keys(): - if 'Windows' in filename: - files[filename] = self.download_from_appveyor( - branch_name(version), filename - ) - else: - files[filename] = self.download_from_bintray( - branch_name(version), filename - ) - return files diff --git a/script/release/release/images.py b/script/release/release/images.py deleted file mode 100644 index 17d572df3..000000000 --- a/script/release/release/images.py +++ /dev/null @@ -1,157 +0,0 @@ -from __future__ import absolute_import -from __future__ import print_function -from __future__ import unicode_literals - -import base64 -import json -import os - -import docker -from enum import Enum - -from .const import NAME -from .const import REPO_ROOT -from .utils import ScriptError -from .utils import yesno -from script.release.release.const import COMPOSE_TESTS_IMAGE_BASE_NAME - - -class Platform(Enum): - ALPINE = 'alpine' - DEBIAN = 'debian' - - def __str__(self): - return self.value - - -# Checks if this version respects the GA version format ('x.y.z') and not an RC -def is_tag_latest(version): - ga_version = all(n.isdigit() for n in version.split('.')) and version.count('.') == 2 - return ga_version and yesno('Should this release be tagged as \"latest\"? [Y/n]: ', default=True) - - -class ImageManager(object): - def __init__(self, version, latest=False): - self.docker_client = docker.APIClient(**docker.utils.kwargs_from_env()) - self.version = version - self.latest = latest - if 'HUB_CREDENTIALS' in os.environ: - print('HUB_CREDENTIALS found in environment, issuing login') - credentials = json.loads(base64.urlsafe_b64decode(os.environ['HUB_CREDENTIALS'])) - self.docker_client.login( - username=credentials['Username'], password=credentials['Password'] - ) - - def _tag(self, image, existing_tag, new_tag): - existing_repo_tag = '{image}:{tag}'.format(image=image, tag=existing_tag) - new_repo_tag = '{image}:{tag}'.format(image=image, tag=new_tag) - self.docker_client.tag(existing_repo_tag, new_repo_tag) - - def get_full_version(self, platform=None): - return self.version + '-' + platform.__str__() if platform else self.version - - def get_runtime_image_tag(self, tag): - return '{image_base_image}:{tag}'.format( - image_base_image=NAME, - tag=self.get_full_version(tag) - ) - - def build_runtime_image(self, repository, platform): - git_sha = repository.write_git_sha() - compose_image_base_name = NAME - print('Building {image} image ({platform} based)'.format( - image=compose_image_base_name, - platform=platform - )) - full_version = self.get_full_version(platform) - build_tag = self.get_runtime_image_tag(platform) - logstream = self.docker_client.build( - REPO_ROOT, - tag=build_tag, - buildargs={ - 'BUILD_PLATFORM': platform.value, - 'GIT_COMMIT': git_sha, - }, - decode=True - ) - for chunk in logstream: - if 'error' in chunk: - raise ScriptError('Build error: {}'.format(chunk['error'])) - if 'stream' in chunk: - print(chunk['stream'], end='') - - if platform == Platform.ALPINE: - self._tag(compose_image_base_name, full_version, self.version) - if self.latest: - self._tag(compose_image_base_name, full_version, platform) - if platform == Platform.ALPINE: - self._tag(compose_image_base_name, full_version, 'latest') - - def get_ucp_test_image_tag(self, tag=None): - return '{image}:{tag}'.format( - image=COMPOSE_TESTS_IMAGE_BASE_NAME, - tag=tag or self.version - ) - - # Used for producing a test image for UCP - def build_ucp_test_image(self, repository): - print('Building test image (debian based for UCP e2e)') - git_sha = repository.write_git_sha() - ucp_test_image_tag = self.get_ucp_test_image_tag() - logstream = self.docker_client.build( - REPO_ROOT, - tag=ucp_test_image_tag, - target='build', - buildargs={ - 'BUILD_PLATFORM': Platform.DEBIAN.value, - 'GIT_COMMIT': git_sha, - }, - decode=True - ) - for chunk in logstream: - if 'error' in chunk: - raise ScriptError('Build error: {}'.format(chunk['error'])) - if 'stream' in chunk: - print(chunk['stream'], end='') - - self._tag(COMPOSE_TESTS_IMAGE_BASE_NAME, self.version, 'latest') - - def build_images(self, repository): - self.build_runtime_image(repository, Platform.ALPINE) - self.build_runtime_image(repository, Platform.DEBIAN) - self.build_ucp_test_image(repository) - - def check_images(self): - for name in self.get_images_to_push(): - try: - self.docker_client.inspect_image(name) - except docker.errors.ImageNotFound: - print('Expected image {} was not found'.format(name)) - return False - return True - - def get_images_to_push(self): - tags_to_push = { - "{}:{}".format(NAME, self.version), - self.get_runtime_image_tag(Platform.ALPINE), - self.get_runtime_image_tag(Platform.DEBIAN), - self.get_ucp_test_image_tag(), - self.get_ucp_test_image_tag('latest'), - } - if is_tag_latest(self.version): - tags_to_push.add("{}:latest".format(NAME)) - return tags_to_push - - def push_images(self): - tags_to_push = self.get_images_to_push() - print('Build tags to push {}'.format(tags_to_push)) - for name in tags_to_push: - print('Pushing {} to Docker Hub'.format(name)) - logstream = self.docker_client.push(name, stream=True, decode=True) - for chunk in logstream: - if 'status' in chunk: - print(chunk['status']) - if 'error' in chunk: - raise ScriptError( - 'Error pushing {name}: {err}'.format(name=name, err=chunk['error']) - ) diff --git a/script/release/release/pypi.py b/script/release/release/pypi.py deleted file mode 100644 index dc0b0cb97..000000000 --- a/script/release/release/pypi.py +++ /dev/null @@ -1,44 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -from configparser import Error -from requests.exceptions import HTTPError -from twine.commands.upload import main as twine_upload -from twine.utils import get_config - -from .utils import ScriptError - - -def pypi_upload(args): - print('Uploading to PyPi') - try: - rel = args.release.replace('-rc', 'rc') - twine_upload([ - 'dist/docker_compose-{}*.whl'.format(rel), - 'dist/docker-compose-{}*.tar.gz'.format(rel) - ]) - except HTTPError as e: - if e.response.status_code == 400 and 'File already exists' in str(e): - if not args.finalize_resume: - raise ScriptError( - 'Package already uploaded on PyPi.' - ) - print('Skipping PyPi upload - package already uploaded') - else: - raise ScriptError('Unexpected HTTP error uploading package to PyPi: {}'.format(e)) - - -def check_pypirc(): - try: - config = get_config() - except Error as e: - raise ScriptError('Failed to parse .pypirc file: {}'.format(e)) - - if config is None: - raise ScriptError('Failed to parse .pypirc file') - - if 'pypi' not in config: - raise ScriptError('Missing [pypi] section in .pypirc file') - - if not (config['pypi'].get('username') and config['pypi'].get('password')): - raise ScriptError('Missing login/password pair for pypi repo') diff --git a/script/release/release/repository.py b/script/release/release/repository.py deleted file mode 100644 index a0281eaa3..000000000 --- a/script/release/release/repository.py +++ /dev/null @@ -1,246 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import os -import tempfile - -import requests -from git import GitCommandError -from git import Repo -from github import Github - -from .const import NAME -from .const import REPO_ROOT -from .utils import branch_name -from .utils import read_release_notes_from_changelog -from .utils import ScriptError - - -class Repository(object): - def __init__(self, root=None, gh_name=None): - if root is None: - root = REPO_ROOT - if gh_name is None: - gh_name = NAME - self.git_repo = Repo(root) - self.gh_client = Github(os.environ['GITHUB_TOKEN']) - self.gh_repo = self.gh_client.get_repo(gh_name) - - def create_release_branch(self, version, base=None): - print('Creating release branch {} based on {}...'.format(version, base or 'master')) - remote = self.find_remote(self.gh_repo.full_name) - br_name = branch_name(version) - remote.fetch() - if self.branch_exists(br_name): - raise ScriptError( - "Branch {} already exists locally. Please remove it before " - "running the release script, or use `resume` instead.".format( - br_name - ) - ) - if base is not None: - base = self.git_repo.tag('refs/tags/{}'.format(base)) - else: - base = 'refs/remotes/{}/master'.format(remote.name) - release_branch = self.git_repo.create_head(br_name, commit=base) - release_branch.checkout() - self.git_repo.git.merge('--strategy=ours', '--no-edit', '{}/release'.format(remote.name)) - with release_branch.config_writer() as cfg: - cfg.set_value('release', version) - return release_branch - - def find_remote(self, remote_name=None): - if not remote_name: - remote_name = self.gh_repo.full_name - for remote in self.git_repo.remotes: - for url in remote.urls: - if remote_name in url: - return remote - return None - - def create_bump_commit(self, bump_branch, version): - print('Creating bump commit...') - bump_branch.checkout() - self.git_repo.git.commit('-a', '-s', '-m "Bump {}"'.format(version), '--no-verify') - - def diff(self): - return self.git_repo.git.diff() - - def checkout_branch(self, name): - return self.git_repo.branches[name].checkout() - - def push_branch_to_remote(self, branch, remote_name=None): - print('Pushing branch {} to remote...'.format(branch.name)) - remote = self.find_remote(remote_name) - remote.push(refspec=branch, force=True) - - def branch_exists(self, name): - return name in [h.name for h in self.git_repo.heads] - - def create_release_pull_request(self, version): - return self.gh_repo.create_pull( - title='Bump {}'.format(version), - body='Automated release for docker-compose {}\n\n{}'.format( - version, read_release_notes_from_changelog() - ), - base='release', - head=branch_name(version), - ) - - def create_release(self, version, release_notes, **kwargs): - return self.gh_repo.create_git_release( - tag=version, name=version, message=release_notes, **kwargs - ) - - def find_release(self, version): - print('Retrieving release draft for {}'.format(version)) - releases = self.gh_repo.get_releases() - for release in releases: - if release.tag_name == version and release.title == version: - return release - return None - - def publish_release(self, release): - release.update_release( - name=release.title, - message=release.body, - draft=False, - prerelease=release.prerelease - ) - - def remove_release(self, version): - print('Removing release draft for {}'.format(version)) - releases = self.gh_repo.get_releases() - for release in releases: - if release.tag_name == version and release.title == version: - if not release.draft: - print( - 'The release at {} is no longer a draft. If you TRULY intend ' - 'to remove it, please do so manually.'.format(release.url) - ) - continue - release.delete_release() - - def remove_bump_branch(self, version, remote_name=None): - name = branch_name(version) - if not self.branch_exists(name): - return False - print('Removing local branch "{}"'.format(name)) - if self.git_repo.active_branch.name == name: - print('Active branch is about to be deleted. Checking out to master...') - try: - self.checkout_branch('master') - except GitCommandError: - raise ScriptError( - 'Unable to checkout master. Try stashing local changes before proceeding.' - ) - self.git_repo.branches[name].delete(self.git_repo, name, force=True) - print('Removing remote branch "{}"'.format(name)) - remote = self.find_remote(remote_name) - try: - remote.push(name, delete=True) - except GitCommandError as e: - if 'remote ref does not exist' in str(e): - return False - raise ScriptError( - 'Error trying to remove remote branch: {}'.format(e) - ) - return True - - def find_release_pr(self, version): - print('Retrieving release PR for {}'.format(version)) - name = branch_name(version) - open_prs = self.gh_repo.get_pulls(state='open') - for pr in open_prs: - if pr.head.ref == name: - print('Found matching PR #{}'.format(pr.number)) - return pr - print('No open PR for this release branch.') - return None - - def close_release_pr(self, version): - print('Retrieving and closing release PR for {}'.format(version)) - name = branch_name(version) - open_prs = self.gh_repo.get_pulls(state='open') - count = 0 - for pr in open_prs: - if pr.head.ref == name: - print('Found matching PR #{}'.format(pr.number)) - pr.edit(state='closed') - count += 1 - if count == 0: - print('No open PR for this release branch.') - return count - - def write_git_sha(self): - with open(os.path.join(REPO_ROOT, 'compose', 'GITSHA'), 'w') as f: - f.write(self.git_repo.head.commit.hexsha[:7]) - return self.git_repo.head.commit.hexsha[:7] - - def cherry_pick_prs(self, release_branch, ids): - if not ids: - return - release_branch.checkout() - for i in ids: - try: - i = int(i) - except ValueError as e: - raise ScriptError('Invalid PR id: {}'.format(e)) - print('Retrieving PR#{}'.format(i)) - pr = self.gh_repo.get_pull(i) - patch_data = requests.get(pr.patch_url).text - self.apply_patch(patch_data) - - def apply_patch(self, patch_data): - with tempfile.NamedTemporaryFile(mode='w', prefix='_compose_cherry', encoding='utf-8') as f: - f.write(patch_data) - f.flush() - self.git_repo.git.am('--3way', f.name) - - def get_prs_in_milestone(self, version): - milestones = self.gh_repo.get_milestones(state='open') - milestone = None - for ms in milestones: - if ms.title == version: - milestone = ms - break - if not milestone: - print('Didn\'t find a milestone matching "{}"'.format(version)) - return None - - issues = self.gh_repo.get_issues(milestone=milestone, state='all') - prs = [] - for issue in issues: - if issue.pull_request is not None: - prs.append(issue.number) - return sorted(prs) - - -def get_contributors(pr_data): - commits = pr_data.get_commits() - authors = {} - for commit in commits: - if not commit or not commit.author or not commit.author.login: - continue - author = commit.author.login - authors[author] = authors.get(author, 0) + 1 - return [x[0] for x in sorted(list(authors.items()), key=lambda x: x[1])] - - -def upload_assets(gh_release, files): - print('Uploading binaries and hash sums') - for filename, filedata in files.items(): - print('Uploading {}...'.format(filename)) - gh_release.upload_asset(filedata[0], content_type='application/octet-stream') - gh_release.upload_asset('{}.sha256'.format(filedata[0]), content_type='text/plain') - print('Uploading run.sh...') - gh_release.upload_asset( - os.path.join(REPO_ROOT, 'script', 'run', 'run.sh'), content_type='text/plain' - ) - - -def delete_assets(gh_release): - print('Removing previously uploaded assets') - for asset in gh_release.get_assets(): - print('Deleting asset {}'.format(asset.name)) - asset.delete_asset() diff --git a/script/release/release/utils.py b/script/release/release/utils.py deleted file mode 100644 index 977a0a712..000000000 --- a/script/release/release/utils.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import os -import re - -from .const import REPO_ROOT -from compose import const as compose_const - -section_header_re = re.compile(r'^[0-9]+\.[0-9]+\.[0-9]+ \([0-9]{4}-[01][0-9]-[0-3][0-9]\)$') - - -class ScriptError(Exception): - pass - - -def branch_name(version): - return 'bump-{}'.format(version) - - -def read_release_notes_from_changelog(): - with open(os.path.join(REPO_ROOT, 'CHANGELOG.md'), 'r') as f: - lines = f.readlines() - i = 0 - while i < len(lines): - if section_header_re.match(lines[i]): - break - i += 1 - - j = i + 1 - while j < len(lines): - if section_header_re.match(lines[j]): - break - j += 1 - - return ''.join(lines[i + 2:j - 1]) - - -def update_init_py_version(version): - path = os.path.join(REPO_ROOT, 'compose', '__init__.py') - with open(path, 'r') as f: - contents = f.read() - contents = re.sub(r"__version__ = '[0-9a-z.-]+'", "__version__ = '{}'".format(version), contents) - with open(path, 'w') as f: - f.write(contents) - - -def update_run_sh_version(version): - path = os.path.join(REPO_ROOT, 'script', 'run', 'run.sh') - with open(path, 'r') as f: - contents = f.read() - contents = re.sub(r'VERSION="[0-9a-z.-]+"', 'VERSION="{}"'.format(version), contents) - with open(path, 'w') as f: - f.write(contents) - - -def compatibility_matrix(): - result = {} - for engine_version in compose_const.API_VERSION_TO_ENGINE_VERSION.values(): - result[engine_version] = [] - for fmt, api_version in compose_const.API_VERSIONS.items(): - result[compose_const.API_VERSION_TO_ENGINE_VERSION[api_version]].append(fmt.vstring) - return result - - -def yesno(prompt, default=None): - """ - Prompt the user for a yes or no. - - Can optionally specify a default value, which will only be - used if they enter a blank line. - - Unrecognised input (anything other than "y", "n", "yes", - "no" or "") will return None. - """ - answer = input(prompt).strip().lower() - - if answer == "y" or answer == "yes": - return True - elif answer == "n" or answer == "no": - return False - elif answer == "": - return default - else: - return None diff --git a/script/release/setup-venv.sh b/script/release/setup-venv.sh deleted file mode 100755 index ab419be0c..000000000 --- a/script/release/setup-venv.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -debian_based() { test -f /etc/debian_version; } - -if test -z $VENV_DIR; then - VENV_DIR=./.release-venv -fi - -if test -z $PYTHONBIN; then - PYTHONBIN=$(which python3) - if test -z $PYTHONBIN; then - PYTHONBIN=$(which python) - fi -fi - -VERSION=$($PYTHONBIN -c "import sys; print('{}.{}'.format(*sys.version_info[0:2]))") -if test $(echo $VERSION | cut -d. -f1) -lt 3; then - echo "Python 3.3 or above is required" -fi - -if test $(echo $VERSION | cut -d. -f2) -lt 3; then - echo "Python 3.3 or above is required" -fi - -# Debian / Ubuntu workaround: -# https://askubuntu.com/questions/879437/ensurepip-is-disabled-in-debian-ubuntu-for-the-system-python -if debian_based; then - VENV_FLAGS="$VENV_FLAGS --without-pip" -fi - -$PYTHONBIN -m venv $VENV_DIR $VENV_FLAGS - -VENV_PYTHONBIN=$VENV_DIR/bin/python - -if debian_based; then - curl https://bootstrap.pypa.io/get-pip.py -o $VENV_DIR/get-pip.py - $VENV_PYTHONBIN $VENV_DIR/get-pip.py -fi - -$VENV_PYTHONBIN -m pip install -U Jinja2==2.10 \ - PyGithub==1.39 \ - GitPython==2.1.9 \ - requests==2.18.4 \ - setuptools==40.6.2 \ - twine==1.11.0 - -$VENV_PYTHONBIN setup.py develop