From 101ee1cd62ec0f4018a9c1ac46b7de8d71f98215 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2019 13:27:05 +0000 Subject: [PATCH 01/46] Bump idna from 2.5 to 2.8 Bumps [idna](https://github.com/kjd/idna) from 2.5 to 2.8. - [Release notes](https://github.com/kjd/idna/releases) - [Changelog](https://github.com/kjd/idna/blob/master/HISTORY.rst) - [Commits](https://github.com/kjd/idna/compare/v2.5...v2.8) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1627cca9e..b08f6221b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ dockerpty==0.4.1 docopt==0.6.2 enum34==1.1.6; python_version < '3.4' functools32==3.2.3.post2; python_version < '3.2' -idna==2.5 +idna==2.8 ipaddress==1.0.18 jsonschema==3.0.1 paramiko==2.6.0 From e6e9263260f92249ed4f71d9bf62cefc0d186e3d Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2019 07:10:30 +0000 Subject: [PATCH 02/46] Bump ddt from 1.2.0 to 1.2.2 Bumps [ddt](https://github.com/datadriventests/ddt) from 1.2.0 to 1.2.2. - [Release notes](https://github.com/datadriventests/ddt/releases) - [Commits](https://github.com/datadriventests/ddt/compare/1.2.0...1.2.2) Signed-off-by: dependabot-preview[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 00a2c6447..e40cbc43c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,5 @@ coverage==4.5.4 -ddt==1.2.0 +ddt==1.2.2 flake8==3.7.9 mock==3.0.5 pytest==3.6.3 From 025002260bb9ac45353d97bc8fd2f493147ffe37 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2019 13:31:00 +0000 Subject: [PATCH 03/46] Bump colorama from 0.4.0 to 0.4.3 Bumps [colorama](https://github.com/tartley/colorama) from 0.4.0 to 0.4.3. - [Release notes](https://github.com/tartley/colorama/releases) - [Changelog](https://github.com/tartley/colorama/blob/master/CHANGELOG.rst) - [Commits](https://github.com/tartley/colorama/compare/0.4.0...0.4.3) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1627cca9e..05fd18806 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ backports.ssl-match-hostname==3.5.0.1; python_version < '3' cached-property==1.3.0 certifi==2017.4.17 chardet==3.0.4 -colorama==0.4.0; sys_platform == 'win32' +colorama==0.4.3; sys_platform == 'win32' docker==4.1.0 docker-pycreds==0.4.0 dockerpty==0.4.1 From 67cce913a6ab960b7ddc476fa9a16adb39a69862 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 7 Jan 2020 16:44:42 +0100 Subject: [PATCH 04/46] Set dev version to 1.26.0dev after releasing 1.25.1 Signed-off-by: Ulysses Souza --- compose/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose/__init__.py b/compose/__init__.py index 8112b4e16..69c4e0e49 100644 --- a/compose/__init__.py +++ b/compose/__init__.py @@ -1,4 +1,4 @@ from __future__ import absolute_import from __future__ import unicode_literals -__version__ = '1.25.1' +__version__ = '1.26.0dev' From 7f49bbb998546d6850c2ea185157aed567db5deb Mon Sep 17 00:00:00 2001 From: ulyssessouza Date: Thu, 12 Dec 2019 01:03:19 +0100 Subject: [PATCH 05/46] Validate version format on formats 2+ Signed-off-by: ulyssessouza --- compose/config/config.py | 7 +++++++ tests/unit/config/config_test.py | 23 +++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/compose/config/config.py b/compose/config/config.py index f64dc04a0..84933e9c9 100644 --- a/compose/config/config.py +++ b/compose/config/config.py @@ -5,6 +5,7 @@ import functools import io import logging import os +import re import string import sys from collections import namedtuple @@ -214,6 +215,12 @@ class ConfigFile(namedtuple('_ConfigFile', 'filename config')): .format(self.filename, VERSION_EXPLANATION) ) + version_pattern = re.compile(r"^[2-9]+(\.\d+)?$") + if not version_pattern.match(version): + raise ConfigurationError( + 'Version "{}" in "{}" is invalid.' + .format(version, self.filename)) + if version == '2': return const.COMPOSEFILE_V2_0 diff --git a/tests/unit/config/config_test.py b/tests/unit/config/config_test.py index 0d3f49b99..0f744e22a 100644 --- a/tests/unit/config/config_test.py +++ b/tests/unit/config/config_test.py @@ -13,6 +13,8 @@ from random import shuffle import py import pytest import yaml +from ddt import data +from ddt import ddt from ...helpers import build_config_details from ...helpers import BUSYBOX_IMAGE_WITH_TAG @@ -68,6 +70,7 @@ def secret_sort(secrets): return sorted(secrets, key=itemgetter('source')) +@ddt class ConfigTest(unittest.TestCase): def test_load(self): @@ -1885,6 +1888,26 @@ class ConfigTest(unittest.TestCase): } ] + @data( + '2 ', + '3.', + '3.0.0', + '3.0.a', + '3.a', + '3a') + def test_invalid_version_formats(self, version): + content = { + 'version': version, + 'services': { + 'web': { + 'image': 'alpine', + } + } + } + with pytest.raises(ConfigurationError) as exc: + config.load(build_config_details(content)) + assert 'Version "{}" in "filename.yml" is invalid.'.format(version) in exc.exconly() + def test_group_add_option(self): actual = config.load(build_config_details({ 'version': '2', From 3df4ba1544e90f7dc7f01b018e92d018295b73c9 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Mon, 6 Jan 2020 17:25:25 +0100 Subject: [PATCH 06/46] Assume infinite terminal width when not running in a terminal Close https://github.com/docker/compose/issues/7119 Signed-off-by: Nicolas De Loof --- compose/cli/formatter.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/compose/cli/formatter.py b/compose/cli/formatter.py index c1f43ed7a..9651fb4da 100644 --- a/compose/cli/formatter.py +++ b/compose/cli/formatter.py @@ -17,7 +17,12 @@ else: def get_tty_width(): try: - width, _ = get_terminal_size() + # get_terminal_size can't determine the size if compose is piped + # to another command. But in such case it doesn't make sense to + # try format the output by terminal size as this output is consumed + # by another command. So let's pretend we have a huge terminal so + # output is single-lined + width, _ = get_terminal_size(fallback=(999, 0)) return int(width) except OSError: return 0 From dd889b990b723edf08c21b7442d02f603cf8eec1 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Tue, 19 Nov 2019 17:26:34 +0100 Subject: [PATCH 07/46] Prepare drop of python 2.x support see https://github.com/docker/compose/issues/6890 Signed-off-by: Nicolas De Loof --- Jenkinsfile | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 1d7c348e3..db9f601d7 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -48,7 +48,7 @@ def runTests = { Map settings -> def imageName = settings.get("image", null) if (!pythonVersions) { - throw new Exception("Need Python versions to test. e.g.: `runTests(pythonVersions: 'py27,py37')`") + throw new Exception("Need Python versions to test. e.g.: `runTests(pythonVersions: 'py37')`") } if (!dockerVersions) { throw new Exception("Need Docker versions to test. e.g.: `runTests(dockerVersions: 'all')`") @@ -82,13 +82,10 @@ def runTests = { Map settings -> def testMatrix = [failFast: true] def baseImages = ['alpine', 'debian'] -def pythonVersions = ['py27', 'py37'] baseImages.each { baseImage -> def imageName = buildImage(baseImage) get_versions(imageName, 2).each { dockerVersion -> - pythonVersions.each { pyVersion -> - testMatrix["${baseImage}_${dockerVersion}_${pyVersion}"] = runTests([baseImage: baseImage, image: imageName, dockerVersions: dockerVersion, pythonVersions: pyVersion]) - } + testMatrix["${baseImage}_${dockerVersion}"] = runTests([baseImage: baseImage, image: imageName, dockerVersions: dockerVersion, pythonVersions: 'py37']) } } From c5c287db5c31c2107bfd5778205e54fe9bf30b84 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Tue, 3 Dec 2019 11:44:30 +0100 Subject: [PATCH 08/46] We don't use FOSSA anymore Signed-off-by: Nicolas De Loof --- script/Jenkinsfile.fossa | 20 -------------------- script/fossa.mk | 16 ---------------- 2 files changed, 36 deletions(-) delete mode 100644 script/Jenkinsfile.fossa delete mode 100644 script/fossa.mk diff --git a/script/Jenkinsfile.fossa b/script/Jenkinsfile.fossa deleted file mode 100644 index 480e98efa..000000000 --- a/script/Jenkinsfile.fossa +++ /dev/null @@ -1,20 +0,0 @@ -pipeline { - agent any - stages { - stage("License Scan") { - agent { - label 'ubuntu-1604-aufs-edge' - } - - steps { - withCredentials([ - string(credentialsId: 'fossa-api-key', variable: 'FOSSA_API_KEY') - ]) { - checkout scm - sh "FOSSA_API_KEY='${FOSSA_API_KEY}' BRANCH_NAME='${env.BRANCH_NAME}' make -f script/fossa.mk fossa-analyze" - sh "FOSSA_API_KEY='${FOSSA_API_KEY}' make -f script/fossa.mk fossa-test" - } - } - } - } -} diff --git a/script/fossa.mk b/script/fossa.mk deleted file mode 100644 index 8d7af49d8..000000000 --- a/script/fossa.mk +++ /dev/null @@ -1,16 +0,0 @@ -# Variables for Fossa -BUILD_ANALYZER?=docker/fossa-analyzer -FOSSA_OPTS?=--option all-tags:true --option allow-unresolved:true - -fossa-analyze: - docker run --rm -e FOSSA_API_KEY=$(FOSSA_API_KEY) \ - -v $(CURDIR)/$*:/go/src/github.com/docker/compose \ - -w /go/src/github.com/docker/compose \ - $(BUILD_ANALYZER) analyze ${FOSSA_OPTS} --branch ${BRANCH_NAME} - - # This command is used to run the fossa test command -fossa-test: - docker run -i -e FOSSA_API_KEY=$(FOSSA_API_KEY) \ - -v $(CURDIR)/$*:/go/src/github.com/docker/compose \ - -w /go/src/github.com/docker/compose \ - $(BUILD_ANALYZER) test From 912d90832c88ab401115db59a5df9ace4c1bd3d2 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Tue, 3 Dec 2019 13:45:05 +0100 Subject: [PATCH 09/46] Use a simple script to get docker-ce releases Signed-off-by: Nicolas De Loof --- .dockerignore | 1 + Jenkinsfile | 27 ++++++++++++--------------- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/.dockerignore b/.dockerignore index 65ad588d9..c1323a918 100644 --- a/.dockerignore +++ b/.dockerignore @@ -11,3 +11,4 @@ docs/_site .tox **/__pycache__ *.pyc +Jenkinsfile diff --git a/Jenkinsfile b/Jenkinsfile index db9f601d7..95a8bb522 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,20 +1,19 @@ #!groovy -def buildImage = { String baseImage -> +def buildImage(baseImage) { def image wrappedNode(label: "ubuntu && amd64 && !zfs", cleanWorkspace: true) { stage("build image for \"${baseImage}\"") { - checkout(scm) - def imageName = "dockerbuildbot/compose:${baseImage}-${gitCommit()}" + def scmvar = checkout(scm) + def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" image = docker.image(imageName) try { image.pull() } catch (Exception exc) { - sh """GIT_COMMIT=\$(script/build/write-git-sha) && \\ - docker build -t ${imageName} \\ + sh """docker build -t ${imageName} \\ --target build \\ --build-arg BUILD_PLATFORM="${baseImage}" \\ - --build-arg GIT_COMMIT="${GIT_COMMIT}" \\ + --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\ .\\ """ sh "docker push ${imageName}" @@ -27,16 +26,14 @@ def buildImage = { String baseImage -> return image.id } -def get_versions = { String imageId, int number -> +def get_versions(imageId, number) { def docker_versions wrappedNode(label: "ubuntu && amd64 && !zfs") { - def result = sh(script: """docker run --rm \\ - --entrypoint=/code/.tox/py27/bin/python \\ - ${imageId} \\ - /code/script/test/versions.py -n ${number} docker/docker-ce recent - """, returnStdout: true - ) - docker_versions = result.split() + docker_versions = sh(script:""" + curl https://api.github.com/repos/docker/docker-ce/releases \ + | jq -r -c '.[] | select (.prerelease == false ) | .tag_name | ltrimstr("v")' > /tmp/versions.txt + for v in \$(cut -f1 -d"." /tmp/versions.txt | uniq | head -${number}); do grep -m 1 "\$v" /tmp/versions.txt ; done + """, returnStdout: true) } return docker_versions } @@ -84,7 +81,7 @@ def testMatrix = [failFast: true] def baseImages = ['alpine', 'debian'] baseImages.each { baseImage -> def imageName = buildImage(baseImage) - get_versions(imageName, 2).each { dockerVersion -> + get_versions(imageName, 2).eachLine { dockerVersion -> testMatrix["${baseImage}_${dockerVersion}"] = runTests([baseImage: baseImage, image: imageName, dockerVersions: dockerVersion, pythonVersions: 'py37']) } } From 644c55c4f7d0630755f45de1a80561f98267af17 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Tue, 3 Dec 2019 16:47:52 +0100 Subject: [PATCH 10/46] Use declarative syntax when possible Signed-off-by: Nicolas De Loof --- Jenkinsfile | 90 +++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 64 insertions(+), 26 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 95a8bb522..5b62363cc 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,32 +1,70 @@ #!groovy -def buildImage(baseImage) { - def image - wrappedNode(label: "ubuntu && amd64 && !zfs", cleanWorkspace: true) { - stage("build image for \"${baseImage}\"") { - def scmvar = checkout(scm) - def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" - image = docker.image(imageName) - try { - image.pull() - } catch (Exception exc) { - sh """docker build -t ${imageName} \\ - --target build \\ - --build-arg BUILD_PLATFORM="${baseImage}" \\ - --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\ - .\\ - """ - sh "docker push ${imageName}" - echo "${imageName}" - return imageName - } +pipeline { + agent none + + options { + skipDefaultCheckout(true) + buildDiscarder(logRotator(daysToKeepStr: '30')) + timeout(time: 2, unit: 'HOURS') + timestamps() + } + + environment { + TAG = tag() + BUILD_TAG = tag() + } + + stages { + stage('Build test images') { + parallel { + stage('alpine') { + agent { + label 'ubuntu && amd64 && !zfs' + } + steps { + buildImage('alpine') + } + } + stage('debian') { + agent { + label 'ubuntu && amd64 && !zfs' + } + steps { + buildImage('debian') + } + } + } + } } - } - echo "image.id: ${image.id}" - return image.id } -def get_versions(imageId, number) { + +def buildImage(baseImage) { + def scmvar = checkout(scm) + def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" + image = docker.image(imageName) + + withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { + try { + image.pull() + } catch (Exception exc) { + ansiColor('xterm') { + sh """docker build -t ${imageName} \\ + --target build \\ + --build-arg BUILD_PLATFORM="${baseImage}" \\ + --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\ + .\\ + """ + sh "docker push ${imageName}" + } + echo "${imageName}" + return imageName + } + } +} + +def get_versions(number) { def docker_versions wrappedNode(label: "ubuntu && amd64 && !zfs") { docker_versions = sh(script:""" @@ -42,7 +80,6 @@ def runTests = { Map settings -> def dockerVersions = settings.get("dockerVersions", null) def pythonVersions = settings.get("pythonVersions", null) def baseImage = settings.get("baseImage", null) - def imageName = settings.get("image", null) if (!pythonVersions) { throw new Exception("Need Python versions to test. e.g.: `runTests(pythonVersions: 'py37')`") @@ -54,7 +91,8 @@ def runTests = { Map settings -> { -> wrappedNode(label: "ubuntu && amd64 && !zfs", cleanWorkspace: true) { stage("test python=${pythonVersions} / docker=${dockerVersions} / baseImage=${baseImage}") { - checkout(scm) + def scmvar = checkout(scm) + def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" def storageDriver = sh(script: 'docker info | awk -F \': \' \'$1 == "Storage Driver" { print $2; exit }\'', returnStdout: true).trim() echo "Using local system's storage driver: ${storageDriver}" sh """docker run \\ From 2955f48468574f783207baeff9754655310af719 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Wed, 4 Dec 2019 08:43:28 +0100 Subject: [PATCH 11/46] Get docker versions using a plain command line Signed-off-by: Nicolas De Loof --- Jenkinsfile | 73 +++++++++++++++++++++++++---------------------------- 1 file changed, 34 insertions(+), 39 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 5b62363cc..694f12f47 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,5 +1,9 @@ #!groovy +def dockerVersions +def baseImages = ['alpine', 'debian'] +def pythonVersions = ['py27', 'py37'] + pipeline { agent none @@ -17,6 +21,7 @@ pipeline { stages { stage('Build test images') { + // TODO use declarative 1.5.0 `matrix` once available on CI parallel { stage('alpine') { agent { @@ -36,6 +41,20 @@ pipeline { } } } + stage('Get Docker versions') { + agent { + label 'ubuntu' + } + steps { + script { + dockerVersions = sh(script:""" + curl https://api.github.com/repos/docker/docker-ce/releases \ + | jq -r -c '.[] | select (.prerelease == false ) | .tag_name | ltrimstr("v")' > /tmp/versions.txt + for v in \$(cut -f1 -d"." /tmp/versions.txt | uniq | head -2); do grep -m 1 "\$v" /tmp/versions.txt ; done + """, returnStdout: true) + } + } + } } } @@ -64,33 +83,9 @@ def buildImage(baseImage) { } } -def get_versions(number) { - def docker_versions - wrappedNode(label: "ubuntu && amd64 && !zfs") { - docker_versions = sh(script:""" - curl https://api.github.com/repos/docker/docker-ce/releases \ - | jq -r -c '.[] | select (.prerelease == false ) | .tag_name | ltrimstr("v")' > /tmp/versions.txt - for v in \$(cut -f1 -d"." /tmp/versions.txt | uniq | head -${number}); do grep -m 1 "\$v" /tmp/versions.txt ; done - """, returnStdout: true) - } - return docker_versions -} - -def runTests = { Map settings -> - def dockerVersions = settings.get("dockerVersions", null) - def pythonVersions = settings.get("pythonVersions", null) - def baseImage = settings.get("baseImage", null) - - if (!pythonVersions) { - throw new Exception("Need Python versions to test. e.g.: `runTests(pythonVersions: 'py37')`") - } - if (!dockerVersions) { - throw new Exception("Need Docker versions to test. e.g.: `runTests(dockerVersions: 'all')`") - } - - { -> +def runTests(dockerVersion, pythonVersion, baseImage) { wrappedNode(label: "ubuntu && amd64 && !zfs", cleanWorkspace: true) { - stage("test python=${pythonVersions} / docker=${dockerVersions} / baseImage=${baseImage}") { + stage("test python=${pythonVersion} / docker=${dockerVersion} / baseImage=${baseImage}") { def scmvar = checkout(scm) def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" def storageDriver = sh(script: 'docker info | awk -F \': \' \'$1 == "Storage Driver" { print $2; exit }\'', returnStdout: true).trim() @@ -103,25 +98,25 @@ def runTests = { Map settings -> --volume="/var/run/docker.sock:/var/run/docker.sock" \\ -e "TAG=${imageName}" \\ -e "STORAGE_DRIVER=${storageDriver}" \\ - -e "DOCKER_VERSIONS=${dockerVersions}" \\ + -e "DOCKER_VERSIONS=${dockerVersion}" \\ -e "BUILD_NUMBER=\$BUILD_TAG" \\ - -e "PY_TEST_VERSIONS=${pythonVersions}" \\ + -e "PY_TEST_VERSIONS=${pythonVersion}" \\ --entrypoint="script/test/ci" \\ ${imageName} \\ --verbose """ - } + } + } +} + +def testMatrix = [failFast: true] + +baseImages.each { baseImage -> + dockerVersions.eachLine { dockerVersion -> + pythonVersions.each { pythonVersion -> + testMatrix["${baseImage}_${dockerVersion}_${pythonVersion}"] = runTests(dockerVersion, pythonVersion, baseImage) } } } -def testMatrix = [failFast: true] -def baseImages = ['alpine', 'debian'] -baseImages.each { baseImage -> - def imageName = buildImage(baseImage) - get_versions(imageName, 2).eachLine { dockerVersion -> - testMatrix["${baseImage}_${dockerVersion}"] = runTests([baseImage: baseImage, image: imageName, dockerVersions: dockerVersion, pythonVersions: 'py37']) - } -} - -parallel(testMatrix) +parallel testMatrix From 9478725a70fd80622395518753d944e0ed594b7c Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Wed, 4 Dec 2019 09:40:33 +0100 Subject: [PATCH 12/46] Fix tested docker releases in Pipeline This allows Engine team to trigger a compose build by pushing a PR changing the `dockerVersions` variable to test Release Candidates Signed-off-by: Nicolas De Loof --- Jenkinsfile | 34 ++++++++++++---------------------- 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 694f12f47..cae52e147 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,6 +1,6 @@ #!groovy -def dockerVersions +def dockerVersions = ['19.03.5', '18.09.9'] def baseImages = ['alpine', 'debian'] def pythonVersions = ['py27', 'py37'] @@ -41,17 +41,19 @@ pipeline { } } } - stage('Get Docker versions') { - agent { - label 'ubuntu' - } + stage('Test') { steps { script { - dockerVersions = sh(script:""" - curl https://api.github.com/repos/docker/docker-ce/releases \ - | jq -r -c '.[] | select (.prerelease == false ) | .tag_name | ltrimstr("v")' > /tmp/versions.txt - for v in \$(cut -f1 -d"." /tmp/versions.txt | uniq | head -2); do grep -m 1 "\$v" /tmp/versions.txt ; done - """, returnStdout: true) + def testMatrix = [:] + baseImages.each { baseImage -> + dockerVersions.each { dockerVersion -> + pythonVersions.each { pythonVersion -> + testMatrix["${baseImage}_${dockerVersion}_${pythonVersion}"] = runTests(dockerVersion, pythonVersion, baseImage) + } + } + } + + parallel testMatrix } } } @@ -108,15 +110,3 @@ def runTests(dockerVersion, pythonVersion, baseImage) { } } } - -def testMatrix = [failFast: true] - -baseImages.each { baseImage -> - dockerVersions.eachLine { dockerVersion -> - pythonVersions.each { pythonVersion -> - testMatrix["${baseImage}_${dockerVersion}_${pythonVersion}"] = runTests(dockerVersion, pythonVersion, baseImage) - } - } -} - -parallel testMatrix From 8859ab0d66a1996af3be11ddd2b289996f7cd70b Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Wed, 4 Dec 2019 10:10:37 +0100 Subject: [PATCH 13/46] Use gotemplate formater to extract specific data Signed-off-by: Nicolas De Loof --- Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index cae52e147..2cf2a0e28 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -90,7 +90,7 @@ def runTests(dockerVersion, pythonVersion, baseImage) { stage("test python=${pythonVersion} / docker=${dockerVersion} / baseImage=${baseImage}") { def scmvar = checkout(scm) def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" - def storageDriver = sh(script: 'docker info | awk -F \': \' \'$1 == "Storage Driver" { print $2; exit }\'', returnStdout: true).trim() + def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim() echo "Using local system's storage driver: ${storageDriver}" sh """docker run \\ -t \\ @@ -101,7 +101,7 @@ def runTests(dockerVersion, pythonVersion, baseImage) { -e "TAG=${imageName}" \\ -e "STORAGE_DRIVER=${storageDriver}" \\ -e "DOCKER_VERSIONS=${dockerVersion}" \\ - -e "BUILD_NUMBER=\$BUILD_TAG" \\ + -e "BUILD_NUMBER=${env.BUILD_NUMBER}" \\ -e "PY_TEST_VERSIONS=${pythonVersion}" \\ --entrypoint="script/test/ci" \\ ${imageName} \\ From 6b0acc9ecbcf7b769612b775ce895a4f0b53277b Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Wed, 4 Dec 2019 10:33:54 +0100 Subject: [PATCH 14/46] tests don't run in parallel Signed-off-by: Nicolas De Loof --- Jenkinsfile | 49 +++++++++++++++++++++++++++---------------------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 2cf2a0e28..87ff15b7f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -43,6 +43,7 @@ pipeline { } stage('Test') { steps { + // TODO use declarative 1.5.0 `matrix` once available on CI script { def testMatrix = [:] baseImages.each { baseImage -> @@ -86,27 +87,31 @@ def buildImage(baseImage) { } def runTests(dockerVersion, pythonVersion, baseImage) { - wrappedNode(label: "ubuntu && amd64 && !zfs", cleanWorkspace: true) { - stage("test python=${pythonVersion} / docker=${dockerVersion} / baseImage=${baseImage}") { - def scmvar = checkout(scm) - def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" - def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim() - echo "Using local system's storage driver: ${storageDriver}" - sh """docker run \\ - -t \\ - --rm \\ - --privileged \\ - --volume="\$(pwd)/.git:/code/.git" \\ - --volume="/var/run/docker.sock:/var/run/docker.sock" \\ - -e "TAG=${imageName}" \\ - -e "STORAGE_DRIVER=${storageDriver}" \\ - -e "DOCKER_VERSIONS=${dockerVersion}" \\ - -e "BUILD_NUMBER=${env.BUILD_NUMBER}" \\ - -e "PY_TEST_VERSIONS=${pythonVersion}" \\ - --entrypoint="script/test/ci" \\ - ${imageName} \\ - --verbose - """ - } + return { + stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") { + node("ubuntu && amd64 && !zfs") { + def scmvar = checkout(scm) + def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" + def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim() + echo "Using local system's storage driver: ${storageDriver}" + withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { + sh """docker run \\ + -t \\ + --rm \\ + --privileged \\ + --volume="\$(pwd)/.git:/code/.git" \\ + --volume="/var/run/docker.sock:/var/run/docker.sock" \\ + -e "TAG=${imageName}" \\ + -e "STORAGE_DRIVER=${storageDriver}" \\ + -e "DOCKER_VERSIONS=${dockerVersion}" \\ + -e "BUILD_NUMBER=${env.BUILD_NUMBER}" \\ + -e "PY_TEST_VERSIONS=${pythonVersion}" \\ + --entrypoint="script/test/ci" \\ + ${imageName} \\ + --verbose + """ + } + } + } } } From 7be66baaa74fe9e32ab303c91a177fa0db16e7dd Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Wed, 4 Dec 2019 13:54:05 +0100 Subject: [PATCH 15/46] TAG and BUILD_TAG are obsolete Signed-off-by: Nicolas De Loof --- Jenkinsfile | 5 ----- 1 file changed, 5 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 87ff15b7f..35998bf96 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -14,11 +14,6 @@ pipeline { timestamps() } - environment { - TAG = tag() - BUILD_TAG = tag() - } - stages { stage('Build test images') { // TODO use declarative 1.5.0 `matrix` once available on CI From da5567715479612264d58384fd9449ae0effffae Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Thu, 5 Dec 2019 12:11:06 +0100 Subject: [PATCH 16/46] Release pipeline Signed-off-by: Nicolas De Loof --- Jenkinsfile.release | 267 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 267 insertions(+) create mode 100644 Jenkinsfile.release diff --git a/Jenkinsfile.release b/Jenkinsfile.release new file mode 100644 index 000000000..d089a8c98 --- /dev/null +++ b/Jenkinsfile.release @@ -0,0 +1,267 @@ +#!groovy + +def dockerVersions = ['19.03.5', '18.09.9'] +def baseImages = ['alpine', 'debian'] +def pythonVersions = ['py27', 'py37'] + +pipeline { + agent none + + options { + skipDefaultCheckout(true) + buildDiscarder(logRotator(daysToKeepStr: '30')) + timeout(time: 2, unit: 'HOURS') + timestamps() + } + + stages { + stage('Build test images') { + // TODO use declarative 1.5.0 `matrix` once available on CI + parallel { + stage('alpine') { + agent { + label 'linux' + } + steps { + buildImage('alpine') + } + } + stage('debian') { + agent { + label 'linux' + } + steps { + buildImage('debian') + } + } + } + } + stage('Test') { + steps { + // TODO use declarative 1.5.0 `matrix` once available on CI + script { + def testMatrix = [:] + baseImages.each { baseImage -> + dockerVersions.each { dockerVersion -> + pythonVersions.each { pythonVersion -> + testMatrix["${baseImage}_${dockerVersion}_${pythonVersion}"] = runTests(dockerVersion, pythonVersion, baseImage) + } + } + } + + parallel testMatrix + } + } + } + stage('Package') { + parallel { + stage('macosx binary') { + agent { + label 'mac-python' + } + steps { + checkout scm + sh 'script/setup/osx-ci' + sh 'tox -e py27,py37 -- tests/unit' + sh './script/build/osx' + checksum("dist/docker-compose-Darwin-x86_64") + checksum("dist/docker-compose-Darwin-x86_64.tgz") + archiveArtifacts artifacts: 'dist/*', fingerprint: true + dir("dist") { + stash name: "bin-darwin" + } + } + } + stage('linux binary') { + agent { + label 'linux' + } + steps { + checkout scm + sh ' ./script/build/linux' + checksum("dist/docker-compose-Linux-x86_64") + archiveArtifacts artifacts: 'dist/*', fingerprint: true + dir("dist") { + stash name: "bin-linux" + } + } + } + stage('windows binary') { + agent { + label 'windows-python' + } + environment { + PATH = "$PATH;C:\\Python37;C:\\Python37\\Scripts" + } + steps { + checkout scm + bat 'tox.exe -e py27,py37 -- tests/unit' + powershell '.\\script\\build\\windows.ps1' + checksum("dist/docker-compose-Windows-x86_64.exe") + archiveArtifacts artifacts: 'dist/*', fingerprint: true + dir("dist") { + stash name: "bin-win" + } + } + } + stage('alpine image') { + agent { + label 'linux' + } + steps { + buildRuntimeImage('alpine') + } + } + stage('debian image') { + agent { + label 'linux' + } + steps { + buildRuntimeImage('debian') + } + } + } + } + stage('Release') { + when { + buildingTag() + } + parallel { + stage('Pushing images') { + agent { + label 'linux' + } + steps { + pushRuntimeImage('alpine') + pushRuntimeImage('debian') + } + } + stage('Creating Github Release') { + agent { + label 'linux' + } + steps { + checkout scm + sh 'mkdir -p dist' + dir("dist") { + unstash "bin-darwin" + unstash "bin-linux" + unstash "bin-win" + githubRelease("docker/compose") + } + } + } + stage('Publishing Python packages') { + agent { + label 'linux' + } + steps { + checkout scm + withCredentials([[$class: "FileBinding", credentialsId: 'pypirc-docker-dsg-cibot', variable: 'PYPIRC']]) { + sh './script/release/python-package' + } + archiveArtifacts artifacts: 'dist/*', fingerprint: true + } + } + stage('Publishing binaries to Bintray') { + agent { + label 'linux' + } + steps { + checkout scm + dir("dist") { + unstash "bin-darwin" + unstash "bin-linux" + unstash "bin-win" + } + withCredentials([usernamePassword(credentialsId: 'bintray-docker-dsg-cibot', usernameVariable: 'BINTRAY_USER', passwordVariable: 'BINTRAY_TOKEN')]) { + sh './script/release/push-binaries' + } + } + } + } + } + } +} + + +def buildImage(baseImage) { + def scmvar = checkout(scm) + def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" + image = docker.image(imageName) + + withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { + try { + image.pull() + } catch (Exception exc) { + ansiColor('xterm') { + sh """docker build -t ${imageName} \\ + --target build \\ + --build-arg BUILD_PLATFORM="${baseImage}" \\ + --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\ + .\\ + """ + sh "docker push ${imageName}" + } + echo "${imageName}" + return imageName + } + } +} + +def runTests(dockerVersion, pythonVersion, baseImage) { + return { + stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") { + node("linux") { + def scmvar = checkout(scm) + def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}" + def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim() + echo "Using local system's storage driver: ${storageDriver}" + withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { + sh """docker run \\ + -t \\ + --rm \\ + --privileged \\ + --volume="\$(pwd)/.git:/code/.git" \\ + --volume="/var/run/docker.sock:/var/run/docker.sock" \\ + -e "TAG=${imageName}" \\ + -e "STORAGE_DRIVER=${storageDriver}" \\ + -e "DOCKER_VERSIONS=${dockerVersion}" \\ + -e "BUILD_NUMBER=${env.BUILD_NUMBER}" \\ + -e "PY_TEST_VERSIONS=${pythonVersion}" \\ + --entrypoint="script/test/ci" \\ + ${imageName} \\ + --verbose + """ + } + } + } + } +} + +def buildRuntimeImage(baseImage) { + scmvar = checkout scm + def imageName = "docker/compose:${baseImage}-${env.BRANCH_NAME}" + ansiColor('xterm') { + sh """docker build -t ${imageName} \\ + --build-arg BUILD_PLATFORM="${baseImage}" \\ + --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT.take(7)}" + """ + } + sh "mkdir -p dist" + sh "docker save ${imageName} -o dist/docker-compose-${baseImage}.tar" + stash name: "compose-${baseImage}", includes: "dist/docker-compose-${baseImage}.tar" +} + +def pushRuntimeImage(baseImage) { + unstash "compose-${baseImage}" + sh 'echo -n "${DOCKERHUB_CREDS_PSW}" | docker login --username "${DOCKERHUB_CREDS_USR}" --password-stdin' + sh "docker load -i dist/docker-compose-${baseImage}.tar" + withDockerRegistry(credentialsId: 'dockerbuildbot-hub.docker.com') { + sh "docker push docker/compose:${baseImage}-${env.TAG_NAME}" + if (baseImage == "alpine" && env.TAG_NAME != null) { + sh "docker tag docker/compose:alpine-${env.TAG_NAME} docker/compose:${env.TAG_NAME}" + sh "docker push docker/compose:${env.TAG_NAME}" + } + } +} From bdb11849b15848244050238f3c8dd3ce4362ab90 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Thu, 5 Dec 2019 12:45:57 +0100 Subject: [PATCH 17/46] Use .Jenkinsfile extension for IDE support Signed-off-by: Nicolas De Loof --- Jenkinsfile.release => Release.Jenkinsfile | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename Jenkinsfile.release => Release.Jenkinsfile (100%) diff --git a/Jenkinsfile.release b/Release.Jenkinsfile similarity index 100% rename from Jenkinsfile.release rename to Release.Jenkinsfile From 417d72ea3d9e0080dc0180c6a7c0da0f3e4a9840 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Thu, 5 Dec 2019 14:23:41 +0100 Subject: [PATCH 18/46] Compute checksum Signed-off-by: Nicolas De Loof --- Release.Jenkinsfile | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/Release.Jenkinsfile b/Release.Jenkinsfile index d089a8c98..8958ada61 100644 --- a/Release.Jenkinsfile +++ b/Release.Jenkinsfile @@ -61,11 +61,13 @@ pipeline { } steps { checkout scm - sh 'script/setup/osx-ci' + sh './script/setup/osx' sh 'tox -e py27,py37 -- tests/unit' sh './script/build/osx' - checksum("dist/docker-compose-Darwin-x86_64") - checksum("dist/docker-compose-Darwin-x86_64.tgz") + dir ('dist') { + sh 'openssl sha256 -r -out docker-compose-Darwin-x86_64.sha256 docker-compose-Darwin-x86_64' + sh 'openssl sha256 -r -out docker-compose-Darwin-x86_64.tgz.sha256 docker-compose-Darwin-x86_64.tgz' + } archiveArtifacts artifacts: 'dist/*', fingerprint: true dir("dist") { stash name: "bin-darwin" @@ -79,7 +81,9 @@ pipeline { steps { checkout scm sh ' ./script/build/linux' - checksum("dist/docker-compose-Linux-x86_64") + dir ('dist') { + sh 'openssl sha256 -r -out docker-compose-Linux-x86_64.sha256 docker-compose-Linux-x86_64' + } archiveArtifacts artifacts: 'dist/*', fingerprint: true dir("dist") { stash name: "bin-linux" @@ -97,7 +101,9 @@ pipeline { checkout scm bat 'tox.exe -e py27,py37 -- tests/unit' powershell '.\\script\\build\\windows.ps1' - checksum("dist/docker-compose-Windows-x86_64.exe") + dir ('dist') { + sh 'openssl sha256 -r -out docker-compose-Windows-x86_64.exe.sha256 docker-compose-Windows-x86_64.exe' + } archiveArtifacts artifacts: 'dist/*', fingerprint: true dir("dist") { stash name: "bin-win" @@ -245,7 +251,8 @@ def buildRuntimeImage(baseImage) { ansiColor('xterm') { sh """docker build -t ${imageName} \\ --build-arg BUILD_PLATFORM="${baseImage}" \\ - --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT.take(7)}" + --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT.take(7)}" \\ + ." """ } sh "mkdir -p dist" From 9c6db546e8a52970e7ee9e4f5d3971a91c67fd33 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Fri, 6 Dec 2019 09:47:30 +0100 Subject: [PATCH 19/46] Remove Circle-CI and AppVeyor config files Signed-off-by: Nicolas De Loof --- .circleci/config.yml | 66 -------------------------------------------- Release.Jenkinsfile | 2 +- appveyor.yml | 24 ---------------- 3 files changed, 1 insertion(+), 91 deletions(-) delete mode 100644 .circleci/config.yml delete mode 100644 appveyor.yml diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 36dd8d57e..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,66 +0,0 @@ -version: 2 -jobs: - test: - macos: - xcode: "9.4.1" - steps: - - checkout - - run: - name: setup script - command: ./script/setup/osx - - run: - name: install tox - command: sudo pip install --upgrade tox==2.1.1 virtualenv==16.2.0 - - run: - name: unit tests - command: tox -e py27,py37 -- tests/unit - - build-osx-binary: - macos: - xcode: "9.4.1" - steps: - - checkout - - run: - name: upgrade python tools - command: sudo pip install --upgrade pip virtualenv==16.2.0 - - run: - name: setup script - command: DEPLOYMENT_TARGET=10.11 ./script/setup/osx - - run: - name: build script - command: ./script/build/osx - - store_artifacts: - path: dist/docker-compose-Darwin-x86_64 - destination: docker-compose-Darwin-x86_64 - - store_artifacts: - path: dist/docker-compose-Darwin-x86_64.tgz - destination: docker-compose-Darwin-x86_64.tgz - - deploy: - name: Deploy binary to bintray - command: | - OS_NAME=Darwin PKG_NAME=osx ./script/circle/bintray-deploy.sh - - build-linux-binary: - machine: - enabled: true - steps: - - checkout - - run: - name: build Linux binary - command: ./script/build/linux - - store_artifacts: - path: dist/docker-compose-Linux-x86_64 - destination: docker-compose-Linux-x86_64 - - deploy: - name: Deploy binary to bintray - command: | - OS_NAME=Linux PKG_NAME=linux ./script/circle/bintray-deploy.sh - - -workflows: - version: 2 - all: - jobs: - - test - - build-linux-binary - - build-osx-binary diff --git a/Release.Jenkinsfile b/Release.Jenkinsfile index 8958ada61..76a990e0d 100644 --- a/Release.Jenkinsfile +++ b/Release.Jenkinsfile @@ -252,7 +252,7 @@ def buildRuntimeImage(baseImage) { sh """docker build -t ${imageName} \\ --build-arg BUILD_PLATFORM="${baseImage}" \\ --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT.take(7)}" \\ - ." + . """ } sh "mkdir -p dist" diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 04a40e9c2..000000000 --- a/appveyor.yml +++ /dev/null @@ -1,24 +0,0 @@ - -version: '{branch}-{build}' - -install: - - "SET PATH=C:\\Python37-x64;C:\\Python37-x64\\Scripts;%PATH%" - - "python --version" - - "pip install tox==2.9.1 virtualenv==16.2.0" - -# Build the binary after tests -build: false - -test_script: - - "tox -e py27,py37 -- tests/unit" - - ps: ".\\script\\build\\windows.ps1" - -artifacts: - - path: .\dist\docker-compose-Windows-x86_64.exe - name: "Compose Windows binary" - -deploy: - - provider: Environment - name: master-builds - on: - branch: master From 1af385227722bfd32cb0456bf6edc3f708928504 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Fri, 6 Dec 2019 18:00:33 +0100 Subject: [PATCH 20/46] Generate changelog Signed-off-by: Nicolas De Loof --- Release.Jenkinsfile | 41 +++++++++++++++++++++++++++- script/release/generate_changelog.sh | 39 ++++++++++++++++++++++++++ 2 files changed, 79 insertions(+), 1 deletion(-) create mode 100755 script/release/generate_changelog.sh diff --git a/Release.Jenkinsfile b/Release.Jenkinsfile index 76a990e0d..aa205c6c8 100644 --- a/Release.Jenkinsfile +++ b/Release.Jenkinsfile @@ -53,6 +53,19 @@ pipeline { } } } + stage('Generate Changelog') { + agent { + label 'linux' + } + steps { + checkout scm + withCredentials([string(credentialsId: 'github-compose-release-test-token', variable: 'GITHUB_TOKEN')]) { + sh "./script/release/generate_changelog.sh" + } + archiveArtifacts artifacts: 'CHANGELOG.md' + stash( name: "changelog", includes: 'CHANGELOG.md' ) + } + } stage('Package') { parallel { stage('macosx binary') { @@ -153,7 +166,7 @@ pipeline { unstash "bin-darwin" unstash "bin-linux" unstash "bin-win" - githubRelease("docker/compose") + githubRelease() } } } @@ -272,3 +285,29 @@ def pushRuntimeImage(baseImage) { } } } + +def githubRelease() { + withCredentials([string(credentialsId: 'github-compose-release-test-token', variable: 'GITHUB_TOKEN')]) { + def prerelease = !( env.TAG_NAME ==~ /v[0-9\.]+/ ) + def data = """{ + \"tag_name\": \"${env.TAG_NAME}\", + \"name\": \"${env.TAG_NAME}\", + \"draft\": true, + \"prerelease\": ${prerelease}, + \"body\" : \"${changelog}\" + """ + echo $data + + def url = "https://api.github.com/repos/docker/compose/releases" + def upload_url = sh(returnStdout: true, script: """ + curl -sSf -H 'Authorization: token ${GITHUB_TOKEN}' -H 'Accept: application/json' -H 'Content-type: application/json' -X POST -d '$data' $url") \\ + | jq '.upload_url | .[:rindex("{")]' + """) + sh(""" + for f in * ; do + curl -sf -H 'Authorization: token ${GITHUB_TOKEN}' -H 'Accept: application/json' -H 'Content-type: application/octet-stream' \\ + -X POST --data-binary @\$f ${upload_url}?name=\$f; + done + """) + } +} diff --git a/script/release/generate_changelog.sh b/script/release/generate_changelog.sh new file mode 100755 index 000000000..783e74400 --- /dev/null +++ b/script/release/generate_changelog.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +set -e +set -x + +## Usage : +## changelog PREVIOUS_TAG..HEAD + +# configure refs so we get pull-requests metadata +git config --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pull/* +git fetch origin + +RANGE=${1:-"$(git describe --tags --abbrev=0)..HEAD"} +echo "Generate changelog for range ${RANGE}" +echo + +pullrequests() { + for commit in $(git log ${RANGE} --format='format:%H'); do + # Get the oldest remotes/origin/pull/* branch to include this commit, i.e. the one to introduce it + git branch -a --sort=committerdate --contains $commit --list 'origin/pull/*' | head -1 | cut -d'/' -f4 + done +} + +changes=$(pullrequests | uniq) + +echo "pull requests merged within range:" +echo $changes + +echo '#Features' > CHANGELOG.md +for pr in $changes; do + curl -fs -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/docker/compose/pulls/${pr} \ + | jq -r ' select( .labels[].name | contains("kind/feature") ) | "* "+.title' >> CHANGELOG.md +done + +echo '#Bugs' >> CHANGELOG.md +for pr in $changes; do + curl -fs -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/docker/compose/pulls/${pr} \ + | jq -r ' select( .labels[].name | contains("kind/bug") ) | "* "+.title' >> CHANGELOG.md +done From 0e826efee5b4b14f28a19666073b22b5837e622e Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Sat, 7 Dec 2019 14:01:07 +0100 Subject: [PATCH 21/46] attempt to fix windows build Signed-off-by: Nicolas De Loof --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 1627cca9e..fccd7c4ce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,6 +20,7 @@ PySocks==1.6.7 PyYAML==4.2b1 requests==2.22.0 six==1.12.0 +subprocess32==3.5.4; python_version < '3.2' texttable==1.6.2 urllib3==1.24.2; python_version == '3.3' websocket-client==0.32.0 From d6c13b69c32173ac264a6ace6454058ab9132fe9 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Mon, 9 Dec 2019 08:22:04 +0100 Subject: [PATCH 22/46] compute sha256sum windows nodes don't have openssl installed:'( Signed-off-by: Nicolas De Loof --- Release.Jenkinsfile | 34 ++++++++++++++-------------------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/Release.Jenkinsfile b/Release.Jenkinsfile index aa205c6c8..def799072 100644 --- a/Release.Jenkinsfile +++ b/Release.Jenkinsfile @@ -78,8 +78,8 @@ pipeline { sh 'tox -e py27,py37 -- tests/unit' sh './script/build/osx' dir ('dist') { - sh 'openssl sha256 -r -out docker-compose-Darwin-x86_64.sha256 docker-compose-Darwin-x86_64' - sh 'openssl sha256 -r -out docker-compose-Darwin-x86_64.tgz.sha256 docker-compose-Darwin-x86_64.tgz' + checksum('docker-compose-Darwin-x86_64') + checksum('docker-compose-Darwin-x86_64.tgz') } archiveArtifacts artifacts: 'dist/*', fingerprint: true dir("dist") { @@ -95,7 +95,7 @@ pipeline { checkout scm sh ' ./script/build/linux' dir ('dist') { - sh 'openssl sha256 -r -out docker-compose-Linux-x86_64.sha256 docker-compose-Linux-x86_64' + checksum('docker-compose-Linux-x86_64') } archiveArtifacts artifacts: 'dist/*', fingerprint: true dir("dist") { @@ -115,7 +115,7 @@ pipeline { bat 'tox.exe -e py27,py37 -- tests/unit' powershell '.\\script\\build\\windows.ps1' dir ('dist') { - sh 'openssl sha256 -r -out docker-compose-Windows-x86_64.exe.sha256 docker-compose-Windows-x86_64.exe' + checksum('docker-compose-Windows-x86_64.exe') } archiveArtifacts artifacts: 'dist/*', fingerprint: true dir("dist") { @@ -166,6 +166,7 @@ pipeline { unstash "bin-darwin" unstash "bin-linux" unstash "bin-win" + unstash "changelog" githubRelease() } } @@ -182,22 +183,6 @@ pipeline { archiveArtifacts artifacts: 'dist/*', fingerprint: true } } - stage('Publishing binaries to Bintray') { - agent { - label 'linux' - } - steps { - checkout scm - dir("dist") { - unstash "bin-darwin" - unstash "bin-linux" - unstash "bin-win" - } - withCredentials([usernamePassword(credentialsId: 'bintray-docker-dsg-cibot', usernameVariable: 'BINTRAY_USER', passwordVariable: 'BINTRAY_TOKEN')]) { - sh './script/release/push-binaries' - } - } - } } } } @@ -289,6 +274,7 @@ def pushRuntimeImage(baseImage) { def githubRelease() { withCredentials([string(credentialsId: 'github-compose-release-test-token', variable: 'GITHUB_TOKEN')]) { def prerelease = !( env.TAG_NAME ==~ /v[0-9\.]+/ ) + changelog = readFile "CHANGELOG.md" def data = """{ \"tag_name\": \"${env.TAG_NAME}\", \"name\": \"${env.TAG_NAME}\", @@ -311,3 +297,11 @@ def githubRelease() { """) } } + +def checksum(filepath) { + if (isUnix()) { + sh "openssl sha256 -r -out ${filepath}.sha256 ${filepath}" + } else { + powershell "(Get-FileHash -Path ${filepath} -Algorithm SHA256 | % hash) + ' *${filepath}' > ${filepath}.sha256" + } +} From 31396786baf768acad4ec121976c31b2d6775885 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Mon, 9 Dec 2019 16:41:05 +0100 Subject: [PATCH 23/46] publish package on PyPI Signed-off-by: Nicolas De Loof --- Release.Jenkinsfile | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/Release.Jenkinsfile b/Release.Jenkinsfile index def799072..c4d5ad1a5 100644 --- a/Release.Jenkinsfile +++ b/Release.Jenkinsfile @@ -178,9 +178,17 @@ pipeline { steps { checkout scm withCredentials([[$class: "FileBinding", credentialsId: 'pypirc-docker-dsg-cibot', variable: 'PYPIRC']]) { - sh './script/release/python-package' + sh """ + virtualenv venv-publish + source venv-publish/bin/activate + python setup.py sdist bdist_wheel + pip install twine + twine upload --config-file ${PYPIRC} ./dist/docker-compose-${env.TAG_NAME}.tar.gz ./dist/docker_compose-${env.TAG_NAME}-py2.py3-none-any.whl + """ } - archiveArtifacts artifacts: 'dist/*', fingerprint: true + } + post { + sh 'deactivate; rm -rf venv-publish' } } } From 75c45c27dfc2429cf985855a7bd1e0cd2f195480 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2020 09:17:04 +0000 Subject: [PATCH 24/46] Bump pysocks from 1.6.7 to 1.7.1 Bumps [pysocks](https://github.com/Anorov/PySocks) from 1.6.7 to 1.7.1. - [Release notes](https://github.com/Anorov/PySocks/releases) - [Changelog](https://github.com/Anorov/PySocks/blob/master/CHANGELOG.md) - [Commits](https://github.com/Anorov/PySocks/commits) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fccd7c4ce..161404e52 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,7 +16,7 @@ jsonschema==3.0.1 paramiko==2.6.0 pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' -PySocks==1.6.7 +PySocks==1.7.1 PyYAML==4.2b1 requests==2.22.0 six==1.12.0 From f2f6b3035099d7f42a799f11f1032df822594503 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2020 09:17:12 +0000 Subject: [PATCH 25/46] Bump websocket-client from 0.32.0 to 0.57.0 Bumps [websocket-client](https://github.com/websocket-client/websocket-client) from 0.32.0 to 0.57.0. - [Release notes](https://github.com/websocket-client/websocket-client/releases) - [Changelog](https://github.com/websocket-client/websocket-client/blob/master/ChangeLog) - [Commits](https://github.com/websocket-client/websocket-client/compare/v0.32.0...v0.57.0) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fccd7c4ce..0d3bdc98f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,4 +23,4 @@ six==1.12.0 subprocess32==3.5.4; python_version < '3.2' texttable==1.6.2 urllib3==1.24.2; python_version == '3.3' -websocket-client==0.32.0 +websocket-client==0.57.0 From 81e3566ebd71ea06497c2f2882d503c852516469 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2020 14:42:12 +0000 Subject: [PATCH 26/46] Bump urllib3 from 1.24.2 to 1.25.7 Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.2 to 1.25.7. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/master/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.24.2...1.25.7) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0d3bdc98f..cffa88958 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,5 +22,5 @@ requests==2.22.0 six==1.12.0 subprocess32==3.5.4; python_version < '3.2' texttable==1.6.2 -urllib3==1.24.2; python_version == '3.3' +urllib3==1.25.7; python_version == '3.3' websocket-client==0.57.0 From 4ace98acbe4fe98fa9190b445a02c371f3498675 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2020 15:01:58 +0000 Subject: [PATCH 27/46] Bump jsonschema from 3.0.1 to 3.2.0 Bumps [jsonschema](https://github.com/Julian/jsonschema) from 3.0.1 to 3.2.0. - [Release notes](https://github.com/Julian/jsonschema/releases) - [Changelog](https://github.com/Julian/jsonschema/blob/master/CHANGELOG.rst) - [Commits](https://github.com/Julian/jsonschema/compare/v3.0.1...v3.2.0) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 128b18cf6..9260d29a0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ enum34==1.1.6; python_version < '3.4' functools32==3.2.3.post2; python_version < '3.2' idna==2.8 ipaddress==1.0.18 -jsonschema==3.0.1 +jsonschema==3.2.0 paramiko==2.6.0 pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' From 33eeef41abc6c463fa494db6766395a80f37d9d9 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 6 Dec 2019 16:49:45 +0100 Subject: [PATCH 28/46] Remove "bundle" subcommand and support for DAB files Deploying stacks using the "Docker Application Bundle" (`.dab`) file format was introduced as an experimental feature in Docker 1.13 / 17.03, but superseded by support for Docker Compose files in the CLI. With no development being done on this feature, and no active use of the file format, support for the DAB file format and the top-level `docker deploy` command (hidden by default in 19.03), will be removed from the CLI, in favour of `docker stack deploy` using compose files. This patch removes the `docker-compose bundle` subcommand from Docker Compose, which was used to convert compose files into DAB files (and given the above, will no longer be needed). Signed-off-by: Sebastiaan van Stijn --- compose/bundle.py | 275 ------------------ compose/cli/main.py | 49 +--- compose/project.py | 89 +++++- contrib/completion/bash/docker-compose | 13 - contrib/completion/zsh/_docker-compose | 6 - tests/acceptance/cli_test.py | 26 -- .../bundle-with-digests/docker-compose.yml | 9 - tests/unit/bundle_test.py | 233 --------------- 8 files changed, 95 insertions(+), 605 deletions(-) delete mode 100644 compose/bundle.py delete mode 100644 tests/fixtures/bundle-with-digests/docker-compose.yml delete mode 100644 tests/unit/bundle_test.py diff --git a/compose/bundle.py b/compose/bundle.py deleted file mode 100644 index 77cb37aa9..000000000 --- a/compose/bundle.py +++ /dev/null @@ -1,275 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import json -import logging - -import six -from docker.utils import split_command -from docker.utils.ports import split_port - -from .cli.errors import UserError -from .config.serialize import denormalize_config -from .network import get_network_defs_for_service -from .service import format_environment -from .service import NoSuchImageError -from .service import parse_repository_tag - - -log = logging.getLogger(__name__) - - -SERVICE_KEYS = { - 'working_dir': 'WorkingDir', - 'user': 'User', - 'labels': 'Labels', -} - -IGNORED_KEYS = {'build'} - -SUPPORTED_KEYS = { - 'image', - 'ports', - 'expose', - 'networks', - 'command', - 'environment', - 'entrypoint', -} | set(SERVICE_KEYS) - -VERSION = '0.1' - - -class NeedsPush(Exception): - def __init__(self, image_name): - self.image_name = image_name - - -class NeedsPull(Exception): - def __init__(self, image_name, service_name): - self.image_name = image_name - self.service_name = service_name - - -class MissingDigests(Exception): - def __init__(self, needs_push, needs_pull): - self.needs_push = needs_push - self.needs_pull = needs_pull - - -def serialize_bundle(config, image_digests): - return json.dumps(to_bundle(config, image_digests), indent=2, sort_keys=True) - - -def get_image_digests(project, allow_push=False): - digests = {} - needs_push = set() - needs_pull = set() - - for service in project.services: - try: - digests[service.name] = get_image_digest( - service, - allow_push=allow_push, - ) - except NeedsPush as e: - needs_push.add(e.image_name) - except NeedsPull as e: - needs_pull.add(e.service_name) - - if needs_push or needs_pull: - raise MissingDigests(needs_push, needs_pull) - - return digests - - -def get_image_digest(service, allow_push=False): - if 'image' not in service.options: - raise UserError( - "Service '{s.name}' doesn't define an image tag. An image name is " - "required to generate a proper image digest for the bundle. Specify " - "an image repo and tag with the 'image' option.".format(s=service)) - - _, _, separator = parse_repository_tag(service.options['image']) - # Compose file already uses a digest, no lookup required - if separator == '@': - return service.options['image'] - - digest = get_digest(service) - - if digest: - return digest - - if 'build' not in service.options: - raise NeedsPull(service.image_name, service.name) - - if not allow_push: - raise NeedsPush(service.image_name) - - return push_image(service) - - -def get_digest(service): - digest = None - try: - image = service.image() - # TODO: pick a digest based on the image tag if there are multiple - # digests - if image['RepoDigests']: - digest = image['RepoDigests'][0] - except NoSuchImageError: - try: - # Fetch the image digest from the registry - distribution = service.get_image_registry_data() - - if distribution['Descriptor']['digest']: - digest = '{image_name}@{digest}'.format( - image_name=service.image_name, - digest=distribution['Descriptor']['digest'] - ) - except NoSuchImageError: - raise UserError( - "Digest not found for service '{service}'. " - "Repository does not exist or may require 'docker login'" - .format(service=service.name)) - return digest - - -def push_image(service): - try: - digest = service.push() - except Exception: - log.error( - "Failed to push image for service '{s.name}'. Please use an " - "image tag that can be pushed to a Docker " - "registry.".format(s=service)) - raise - - if not digest: - raise ValueError("Failed to get digest for %s" % service.name) - - repo, _, _ = parse_repository_tag(service.options['image']) - identifier = '{repo}@{digest}'.format(repo=repo, digest=digest) - - # only do this if RepoDigests isn't already populated - image = service.image() - if not image['RepoDigests']: - # Pull by digest so that image['RepoDigests'] is populated for next time - # and we don't have to pull/push again - service.client.pull(identifier) - log.info("Stored digest for {}".format(service.image_name)) - - return identifier - - -def to_bundle(config, image_digests): - if config.networks: - log.warning("Unsupported top level key 'networks' - ignoring") - - if config.volumes: - log.warning("Unsupported top level key 'volumes' - ignoring") - - config = denormalize_config(config) - - return { - 'Version': VERSION, - 'Services': { - name: convert_service_to_bundle( - name, - service_dict, - image_digests[name], - ) - for name, service_dict in config['services'].items() - }, - } - - -def convert_service_to_bundle(name, service_dict, image_digest): - container_config = {'Image': image_digest} - - for key, value in service_dict.items(): - if key in IGNORED_KEYS: - continue - - if key not in SUPPORTED_KEYS: - log.warning("Unsupported key '{}' in services.{} - ignoring".format(key, name)) - continue - - if key == 'environment': - container_config['Env'] = format_environment({ - envkey: envvalue for envkey, envvalue in value.items() - if envvalue - }) - continue - - if key in SERVICE_KEYS: - container_config[SERVICE_KEYS[key]] = value - continue - - set_command_and_args( - container_config, - service_dict.get('entrypoint', []), - service_dict.get('command', [])) - container_config['Networks'] = make_service_networks(name, service_dict) - - ports = make_port_specs(service_dict) - if ports: - container_config['Ports'] = ports - - return container_config - - -# See https://github.com/docker/swarmkit/blob/agent/exec/container/container.go#L95 -def set_command_and_args(config, entrypoint, command): - if isinstance(entrypoint, six.string_types): - entrypoint = split_command(entrypoint) - if isinstance(command, six.string_types): - command = split_command(command) - - if entrypoint: - config['Command'] = entrypoint + command - return - - if command: - config['Args'] = command - - -def make_service_networks(name, service_dict): - networks = [] - - for network_name, network_def in get_network_defs_for_service(service_dict).items(): - for key in network_def.keys(): - log.warning( - "Unsupported key '{}' in services.{}.networks.{} - ignoring" - .format(key, name, network_name)) - - networks.append(network_name) - - return networks - - -def make_port_specs(service_dict): - ports = [] - - internal_ports = [ - internal_port - for port_def in service_dict.get('ports', []) - for internal_port in split_port(port_def)[0] - ] - - internal_ports += service_dict.get('expose', []) - - for internal_port in internal_ports: - spec = make_port_spec(internal_port) - if spec not in ports: - ports.append(spec) - - return ports - - -def make_port_spec(value): - components = six.text_type(value).partition('/') - return { - 'Protocol': components[2] or 'tcp', - 'Port': int(components[0]), - } diff --git a/compose/cli/main.py b/compose/cli/main.py index fde4fd035..200d4eeac 100644 --- a/compose/cli/main.py +++ b/compose/cli/main.py @@ -15,14 +15,12 @@ from distutils.spawn import find_executable from inspect import getdoc from operator import attrgetter -import docker +import docker.errors +import docker.utils from . import errors from . import signals from .. import __version__ -from ..bundle import get_image_digests -from ..bundle import MissingDigests -from ..bundle import serialize_bundle from ..config import ConfigurationError from ..config import parse_environment from ..config import parse_labels @@ -34,6 +32,8 @@ from ..const import COMPOSEFILE_V2_2 as V2_2 from ..const import IS_WINDOWS_PLATFORM from ..errors import StreamParseError from ..progress_stream import StreamOutputError +from ..project import get_image_digests +from ..project import MissingDigests from ..project import NoSuchService from ..project import OneOffFilter from ..project import ProjectError @@ -213,7 +213,6 @@ class TopLevelCommand(object): Commands: build Build or rebuild services - bundle Generate a Docker bundle from the Compose file config Validate and view the Compose file create Create services down Stop and remove containers, networks, images, and volumes @@ -304,38 +303,6 @@ class TopLevelCommand(object): progress=options.get('--progress'), ) - def bundle(self, options): - """ - Generate a Distributed Application Bundle (DAB) from the Compose file. - - Images must have digests stored, which requires interaction with a - Docker registry. If digests aren't stored for all images, you can fetch - them with `docker-compose pull` or `docker-compose push`. To push images - automatically when bundling, pass `--push-images`. Only services with - a `build` option specified will have their images pushed. - - Usage: bundle [options] - - Options: - --push-images Automatically push images for any services - which have a `build` option specified. - - -o, --output PATH Path to write the bundle file to. - Defaults to ".dab". - """ - compose_config = get_config_from_options('.', self.toplevel_options) - - output = options["--output"] - if not output: - output = "{}.dab".format(self.project.name) - - image_digests = image_digests_for_project(self.project, options['--push-images']) - - with open(output, 'w') as f: - f.write(serialize_bundle(compose_config, image_digests)) - - log.info("Wrote bundle to {}".format(output)) - def config(self, options): """ Validate and view the Compose file. @@ -1216,12 +1183,10 @@ def timeout_from_opts(options): return None if timeout is None else int(timeout) -def image_digests_for_project(project, allow_push=False): +def image_digests_for_project(project): try: - return get_image_digests( - project, - allow_push=allow_push - ) + return get_image_digests(project) + except MissingDigests as e: def list_images(images): return "\n".join(" {}".format(name) for name in sorted(images)) diff --git a/compose/project.py b/compose/project.py index d7405defd..a7770ddc9 100644 --- a/compose/project.py +++ b/compose/project.py @@ -16,6 +16,7 @@ from docker.errors import NotFound from docker.utils import version_lt from . import parallel +from .cli.errors import UserError from .config import ConfigurationError from .config.config import V1 from .config.sort_services import get_container_name_from_network_mode @@ -33,6 +34,7 @@ from .service import ContainerNetworkMode from .service import ContainerPidMode from .service import ConvergenceStrategy from .service import NetworkMode +from .service import NoSuchImageError from .service import parse_repository_tag from .service import PidMode from .service import Service @@ -42,7 +44,6 @@ from .utils import microseconds_from_time_nano from .utils import truncate_string from .volume import ProjectVolumes - log = logging.getLogger(__name__) @@ -381,6 +382,7 @@ class Project(object): def build_service(service): service.build(no_cache, pull, force_rm, memory, build_args, gzip, rm, silent, cli, progress) + if parallel_build: _, errors = parallel.parallel_execute( services, @@ -844,6 +846,91 @@ def get_secrets(service, service_secrets, secret_defs): return secrets +def get_image_digests(project): + digests = {} + needs_push = set() + needs_pull = set() + + for service in project.services: + try: + digests[service.name] = get_image_digest(service) + except NeedsPush as e: + needs_push.add(e.image_name) + except NeedsPull as e: + needs_pull.add(e.service_name) + + if needs_push or needs_pull: + raise MissingDigests(needs_push, needs_pull) + + return digests + + +def get_image_digest(service): + if 'image' not in service.options: + raise UserError( + "Service '{s.name}' doesn't define an image tag. An image name is " + "required to generate a proper image digest. Specify an image repo " + "and tag with the 'image' option.".format(s=service)) + + _, _, separator = parse_repository_tag(service.options['image']) + # Compose file already uses a digest, no lookup required + if separator == '@': + return service.options['image'] + + digest = get_digest(service) + + if digest: + return digest + + if 'build' not in service.options: + raise NeedsPull(service.image_name, service.name) + + raise NeedsPush(service.image_name) + + +def get_digest(service): + digest = None + try: + image = service.image() + # TODO: pick a digest based on the image tag if there are multiple + # digests + if image['RepoDigests']: + digest = image['RepoDigests'][0] + except NoSuchImageError: + try: + # Fetch the image digest from the registry + distribution = service.get_image_registry_data() + + if distribution['Descriptor']['digest']: + digest = '{image_name}@{digest}'.format( + image_name=service.image_name, + digest=distribution['Descriptor']['digest'] + ) + except NoSuchImageError: + raise UserError( + "Digest not found for service '{service}'. " + "Repository does not exist or may require 'docker login'" + .format(service=service.name)) + return digest + + +class MissingDigests(Exception): + def __init__(self, needs_push, needs_pull): + self.needs_push = needs_push + self.needs_pull = needs_pull + + +class NeedsPush(Exception): + def __init__(self, image_name): + self.image_name = image_name + + +class NeedsPull(Exception): + def __init__(self, image_name, service_name): + self.image_name = image_name + self.service_name = service_name + + class NoSuchService(Exception): def __init__(self, name): if isinstance(name, six.binary_type): diff --git a/contrib/completion/bash/docker-compose b/contrib/completion/bash/docker-compose index 6dc47799d..23c48b7f4 100644 --- a/contrib/completion/bash/docker-compose +++ b/contrib/completion/bash/docker-compose @@ -126,18 +126,6 @@ _docker_compose_build() { } -_docker_compose_bundle() { - case "$prev" in - --output|-o) - _filedir - return - ;; - esac - - COMPREPLY=( $( compgen -W "--push-images --help --output -o" -- "$cur" ) ) -} - - _docker_compose_config() { case "$prev" in --hash) @@ -581,7 +569,6 @@ _docker_compose() { local commands=( build - bundle config create down diff --git a/contrib/completion/zsh/_docker-compose b/contrib/completion/zsh/_docker-compose index faf405988..277bf0d3c 100755 --- a/contrib/completion/zsh/_docker-compose +++ b/contrib/completion/zsh/_docker-compose @@ -121,12 +121,6 @@ __docker-compose_subcommand() { '--parallel[Build images in parallel.]' \ '*:services:__docker-compose_services_from_build' && ret=0 ;; - (bundle) - _arguments \ - $opts_help \ - '--push-images[Automatically push images for any services which have a `build` option specified.]' \ - '(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to ".dab".]:file:_files' && ret=0 - ;; (config) _arguments \ $opts_help \ diff --git a/tests/acceptance/cli_test.py b/tests/acceptance/cli_test.py index b729e7d76..ffa055744 100644 --- a/tests/acceptance/cli_test.py +++ b/tests/acceptance/cli_test.py @@ -855,32 +855,6 @@ services: ) assert 'Favorite Touhou Character: hong.meiling' in result.stdout - def test_bundle_with_digests(self): - self.base_dir = 'tests/fixtures/bundle-with-digests/' - tmpdir = pytest.ensuretemp('cli_test_bundle') - self.addCleanup(tmpdir.remove) - filename = str(tmpdir.join('example.dab')) - - self.dispatch(['bundle', '--output', filename]) - with open(filename, 'r') as fh: - bundle = json.load(fh) - - assert bundle == { - 'Version': '0.1', - 'Services': { - 'web': { - 'Image': ('dockercloud/hello-world@sha256:fe79a2cfbd17eefc3' - '44fb8419420808df95a1e22d93b7f621a7399fd1e9dca1d'), - 'Networks': ['default'], - }, - 'redis': { - 'Image': ('redis@sha256:a84cb8f53a70e19f61ff2e1d5e73fb7ae62d' - '374b2b7392de1e7d77be26ef8f7b'), - 'Networks': ['default'], - } - }, - } - def test_build_override_dir(self): self.base_dir = 'tests/fixtures/build-path-override-dir' self.override_dir = os.path.abspath('tests/fixtures') diff --git a/tests/fixtures/bundle-with-digests/docker-compose.yml b/tests/fixtures/bundle-with-digests/docker-compose.yml deleted file mode 100644 index b70135120..000000000 --- a/tests/fixtures/bundle-with-digests/docker-compose.yml +++ /dev/null @@ -1,9 +0,0 @@ - -version: '2.0' - -services: - web: - image: dockercloud/hello-world@sha256:fe79a2cfbd17eefc344fb8419420808df95a1e22d93b7f621a7399fd1e9dca1d - - redis: - image: redis@sha256:a84cb8f53a70e19f61ff2e1d5e73fb7ae62d374b2b7392de1e7d77be26ef8f7b diff --git a/tests/unit/bundle_test.py b/tests/unit/bundle_test.py deleted file mode 100644 index 8faebb7f1..000000000 --- a/tests/unit/bundle_test.py +++ /dev/null @@ -1,233 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import docker -import pytest - -from .. import mock -from compose import bundle -from compose import service -from compose.cli.errors import UserError -from compose.config.config import Config -from compose.const import COMPOSEFILE_V2_0 as V2_0 -from compose.service import NoSuchImageError - - -@pytest.fixture -def mock_service(): - return mock.create_autospec( - service.Service, - client=mock.create_autospec(docker.APIClient), - options={}) - - -def test_get_image_digest_exists(mock_service): - mock_service.options['image'] = 'abcd' - mock_service.image.return_value = {'RepoDigests': ['digest1']} - digest = bundle.get_image_digest(mock_service) - assert digest == 'digest1' - - -def test_get_image_digest_image_uses_digest(mock_service): - mock_service.options['image'] = image_id = 'redis@sha256:digest' - - digest = bundle.get_image_digest(mock_service) - assert digest == image_id - assert not mock_service.image.called - - -def test_get_image_digest_from_repository(mock_service): - mock_service.options['image'] = 'abcd' - mock_service.image_name = 'abcd' - mock_service.image.side_effect = NoSuchImageError(None) - mock_service.get_image_registry_data.return_value = {'Descriptor': {'digest': 'digest'}} - - digest = bundle.get_image_digest(mock_service) - assert digest == 'abcd@digest' - - -def test_get_image_digest_no_image(mock_service): - with pytest.raises(UserError) as exc: - bundle.get_image_digest(service.Service(name='theservice')) - - assert "doesn't define an image tag" in exc.exconly() - - -def test_push_image_with_saved_digest(mock_service): - mock_service.options['build'] = '.' - mock_service.options['image'] = image_id = 'abcd' - mock_service.push.return_value = expected = 'sha256:thedigest' - mock_service.image.return_value = {'RepoDigests': ['digest1']} - - digest = bundle.push_image(mock_service) - assert digest == image_id + '@' + expected - - mock_service.push.assert_called_once_with() - assert not mock_service.client.push.called - - -def test_push_image(mock_service): - mock_service.options['build'] = '.' - mock_service.options['image'] = image_id = 'abcd' - mock_service.push.return_value = expected = 'sha256:thedigest' - mock_service.image.return_value = {'RepoDigests': []} - - digest = bundle.push_image(mock_service) - assert digest == image_id + '@' + expected - - mock_service.push.assert_called_once_with() - mock_service.client.pull.assert_called_once_with(digest) - - -def test_to_bundle(): - image_digests = {'a': 'aaaa', 'b': 'bbbb'} - services = [ - {'name': 'a', 'build': '.', }, - {'name': 'b', 'build': './b'}, - ] - config = Config( - version=V2_0, - services=services, - volumes={'special': {}}, - networks={'extra': {}}, - secrets={}, - configs={} - ) - - with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log: - output = bundle.to_bundle(config, image_digests) - - assert mock_log.mock_calls == [ - mock.call("Unsupported top level key 'networks' - ignoring"), - mock.call("Unsupported top level key 'volumes' - ignoring"), - ] - - assert output == { - 'Version': '0.1', - 'Services': { - 'a': {'Image': 'aaaa', 'Networks': ['default']}, - 'b': {'Image': 'bbbb', 'Networks': ['default']}, - } - } - - -def test_convert_service_to_bundle(): - name = 'theservice' - image_digest = 'thedigest' - service_dict = { - 'ports': ['80'], - 'expose': ['1234'], - 'networks': {'extra': {}}, - 'command': 'foo', - 'entrypoint': 'entry', - 'environment': {'BAZ': 'ENV'}, - 'build': '.', - 'working_dir': '/tmp', - 'user': 'root', - 'labels': {'FOO': 'LABEL'}, - 'privileged': True, - } - - with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log: - config = bundle.convert_service_to_bundle(name, service_dict, image_digest) - - mock_log.assert_called_once_with( - "Unsupported key 'privileged' in services.theservice - ignoring") - - assert config == { - 'Image': image_digest, - 'Ports': [ - {'Protocol': 'tcp', 'Port': 80}, - {'Protocol': 'tcp', 'Port': 1234}, - ], - 'Networks': ['extra'], - 'Command': ['entry', 'foo'], - 'Env': ['BAZ=ENV'], - 'WorkingDir': '/tmp', - 'User': 'root', - 'Labels': {'FOO': 'LABEL'}, - } - - -def test_set_command_and_args_none(): - config = {} - bundle.set_command_and_args(config, [], []) - assert config == {} - - -def test_set_command_and_args_from_command(): - config = {} - bundle.set_command_and_args(config, [], "echo ok") - assert config == {'Args': ['echo', 'ok']} - - -def test_set_command_and_args_from_entrypoint(): - config = {} - bundle.set_command_and_args(config, "echo entry", []) - assert config == {'Command': ['echo', 'entry']} - - -def test_set_command_and_args_from_both(): - config = {} - bundle.set_command_and_args(config, "echo entry", ["extra", "arg"]) - assert config == {'Command': ['echo', 'entry', "extra", "arg"]} - - -def test_make_service_networks_default(): - name = 'theservice' - service_dict = {} - - with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log: - networks = bundle.make_service_networks(name, service_dict) - - assert not mock_log.called - assert networks == ['default'] - - -def test_make_service_networks(): - name = 'theservice' - service_dict = { - 'networks': { - 'foo': { - 'aliases': ['one', 'two'], - }, - 'bar': {} - }, - } - - with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log: - networks = bundle.make_service_networks(name, service_dict) - - mock_log.assert_called_once_with( - "Unsupported key 'aliases' in services.theservice.networks.foo - ignoring") - assert sorted(networks) == sorted(service_dict['networks']) - - -def test_make_port_specs(): - service_dict = { - 'expose': ['80', '500/udp'], - 'ports': [ - '400:80', - '222', - '127.0.0.1:8001:8001', - '127.0.0.1:5000-5001:3000-3001'], - } - port_specs = bundle.make_port_specs(service_dict) - assert port_specs == [ - {'Protocol': 'tcp', 'Port': 80}, - {'Protocol': 'tcp', 'Port': 222}, - {'Protocol': 'tcp', 'Port': 8001}, - {'Protocol': 'tcp', 'Port': 3000}, - {'Protocol': 'tcp', 'Port': 3001}, - {'Protocol': 'udp', 'Port': 500}, - ] - - -def test_make_port_spec_with_protocol(): - port_spec = bundle.make_port_spec("5000/udp") - assert port_spec == {'Protocol': 'udp', 'Port': 5000} - - -def test_make_port_spec_default_protocol(): - port_spec = bundle.make_port_spec("50000") - assert port_spec == {'Protocol': 'tcp', 'Port': 50000} From fb14f41ddb1479d9990e147c9d18bf047928164d Mon Sep 17 00:00:00 2001 From: Lumir Balhar Date: Tue, 27 Aug 2019 12:24:05 +0200 Subject: [PATCH 29/46] Move to the latest pytest versions for Python 2 and 3 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Lumír Balhar --- requirements-dev.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index e40cbc43c..496cefe15 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,5 +2,6 @@ coverage==4.5.4 ddt==1.2.2 flake8==3.7.9 mock==3.0.5 -pytest==3.6.3 +pytest==5.1.1; python_version >= '3.5' +pytest==4.6.5; python_version < '3.5' pytest-cov==2.8.1 From 60458c8ae7c3d99a4dd408bc7fbced3b4a8cd7de Mon Sep 17 00:00:00 2001 From: Lumir Balhar Date: Tue, 27 Aug 2019 12:25:20 +0200 Subject: [PATCH 30/46] Implement custom context manager for changing CWD MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Lumír Balhar --- tests/helpers.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/tests/helpers.py b/tests/helpers.py index 327715ee2..1365c5bcf 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -1,6 +1,7 @@ from __future__ import absolute_import from __future__ import unicode_literals +import contextlib import os from compose.config.config import ConfigDetails @@ -55,3 +56,17 @@ def create_host_file(client, filename): content = fh.read() return create_custom_host_file(client, filename, content) + + +@contextlib.contextmanager +def cd(path): + """ + A context manager which changes the working directory to the given + path, and then changes it back to its previous value on exit. + """ + prev_cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(prev_cwd) From 73cc89c15ffb808047b1562e35a4c61c86c9a572 Mon Sep 17 00:00:00 2001 From: Lumir Balhar Date: Tue, 27 Aug 2019 12:26:01 +0200 Subject: [PATCH 31/46] Use stdlib modules instead of deprecated pytest fixtures MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Lumír Balhar --- tests/integration/project_test.py | 8 +- tests/integration/state_test.py | 29 ++- tests/unit/config/config_test.py | 317 ++++++++++++++------------ tests/unit/config/environment_test.py | 15 +- 4 files changed, 196 insertions(+), 173 deletions(-) diff --git a/tests/integration/project_test.py b/tests/integration/project_test.py index 4c88f3d6b..cb620a8c9 100644 --- a/tests/integration/project_test.py +++ b/tests/integration/project_test.py @@ -8,7 +8,6 @@ import random import shutil import tempfile -import py import pytest from docker.errors import APIError from docker.errors import NotFound @@ -16,6 +15,7 @@ from docker.errors import NotFound from .. import mock from ..helpers import build_config as load_config from ..helpers import BUSYBOX_IMAGE_WITH_TAG +from ..helpers import cd from ..helpers import create_host_file from .testcases import DockerClientTestCase from .testcases import SWARM_SKIP_CONTAINERS_ALL @@ -1329,9 +1329,9 @@ class ProjectTest(DockerClientTestCase): }) details = config.ConfigDetails('.', [base_file, override_file]) - tmpdir = py.test.ensuretemp('logging_test') - self.addCleanup(tmpdir.remove) - with tmpdir.as_cwd(): + tmpdir = tempfile.mkdtemp('logging_test') + self.addCleanup(shutil.rmtree, tmpdir) + with cd(tmpdir): config_data = config.load(details) project = Project.from_config( name='composetest', config_data=config_data, client=self.client diff --git a/tests/integration/state_test.py b/tests/integration/state_test.py index 714945ee5..492de7b8a 100644 --- a/tests/integration/state_test.py +++ b/tests/integration/state_test.py @@ -6,8 +6,10 @@ from __future__ import absolute_import from __future__ import unicode_literals import copy +import os +import shutil +import tempfile -import py from docker.errors import ImageNotFound from ..helpers import BUSYBOX_IMAGE_WITH_TAG @@ -426,29 +428,32 @@ class ServiceStateTest(DockerClientTestCase): @no_cluster('Can not guarantee the build will be run on the same node the service is deployed') def test_trigger_recreate_with_build(self): - context = py.test.ensuretemp('test_trigger_recreate_with_build') - self.addCleanup(context.remove) + context = tempfile.mkdtemp('test_trigger_recreate_with_build') + self.addCleanup(shutil.rmtree, context) base_image = "FROM busybox\nLABEL com.docker.compose.test_image=true\n" - dockerfile = context.join('Dockerfile') - dockerfile.write(base_image) + dockerfile = os.path.join(context, 'Dockerfile') + with open(dockerfile, mode="w") as dockerfile_fh: + dockerfile_fh.write(base_image) web = self.create_service('web', build={'context': str(context)}) container = web.create_container() - dockerfile.write(base_image + 'CMD echo hello world\n') + with open(dockerfile, mode="w") as dockerfile_fh: + dockerfile_fh.write(base_image + 'CMD echo hello world\n') web.build() web = self.create_service('web', build={'context': str(context)}) assert ('recreate', [container]) == web.convergence_plan() def test_image_changed_to_build(self): - context = py.test.ensuretemp('test_image_changed_to_build') - self.addCleanup(context.remove) - context.join('Dockerfile').write(""" - FROM busybox - LABEL com.docker.compose.test_image=true - """) + context = tempfile.mkdtemp('test_image_changed_to_build') + self.addCleanup(shutil.rmtree, context) + with open(os.path.join(context, 'Dockerfile'), mode="w") as dockerfile: + dockerfile.write(""" + FROM busybox + LABEL com.docker.compose.test_image=true + """) web = self.create_service('web', image='busybox') container = web.create_container() diff --git a/tests/unit/config/config_test.py b/tests/unit/config/config_test.py index 0f744e22a..f1398e84f 100644 --- a/tests/unit/config/config_test.py +++ b/tests/unit/config/config_test.py @@ -10,7 +10,6 @@ import tempfile from operator import itemgetter from random import shuffle -import py import pytest import yaml from ddt import data @@ -18,6 +17,7 @@ from ddt import ddt from ...helpers import build_config_details from ...helpers import BUSYBOX_IMAGE_WITH_TAG +from ...helpers import cd from compose.config import config from compose.config import types from compose.config.config import ConfigFile @@ -780,13 +780,14 @@ class ConfigTest(unittest.TestCase): }) details = config.ConfigDetails('.', [base_file, override_file]) - tmpdir = py.test.ensuretemp('config_test') - self.addCleanup(tmpdir.remove) - tmpdir.join('common.yml').write(""" - base: - labels: ['label=one'] - """) - with tmpdir.as_cwd(): + tmpdir = tempfile.mkdtemp('config_test') + self.addCleanup(shutil.rmtree, tmpdir) + with open(os.path.join(tmpdir, 'common.yml'), mode="w") as common_fh: + common_fh.write(""" + base: + labels: ['label=one'] + """) + with cd(tmpdir): service_dicts = config.load(details).services expected = [ @@ -815,19 +816,20 @@ class ConfigTest(unittest.TestCase): } ) - tmpdir = pytest.ensuretemp('config_test') - self.addCleanup(tmpdir.remove) - tmpdir.join('base.yml').write(""" - version: '2.2' - services: - base: - image: base - web: - extends: base - """) + tmpdir = tempfile.mkdtemp('config_test') + self.addCleanup(shutil.rmtree, tmpdir) + with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh: + base_fh.write(""" + version: '2.2' + services: + base: + image: base + web: + extends: base + """) details = config.ConfigDetails('.', [main_file]) - with tmpdir.as_cwd(): + with cd(tmpdir): service_dicts = config.load(details).services assert service_dicts[0] == { 'name': 'prodweb', @@ -1765,22 +1767,23 @@ class ConfigTest(unittest.TestCase): assert services[0]['environment']['SPRING_JPA_HIBERNATE_DDL-AUTO'] == 'none' def test_load_yaml_with_yaml_error(self): - tmpdir = py.test.ensuretemp('invalid_yaml_test') - self.addCleanup(tmpdir.remove) - invalid_yaml_file = tmpdir.join('docker-compose.yml') - invalid_yaml_file.write(""" - web: - this is bogus: ok: what - """) + tmpdir = tempfile.mkdtemp('invalid_yaml_test') + self.addCleanup(shutil.rmtree, tmpdir) + invalid_yaml_file = os.path.join(tmpdir, 'docker-compose.yml') + with open(invalid_yaml_file, mode="w") as invalid_yaml_file_fh: + invalid_yaml_file_fh.write(""" + web: + this is bogus: ok: what + """) with pytest.raises(ConfigurationError) as exc: config.load_yaml(str(invalid_yaml_file)) - assert 'line 3, column 32' in exc.exconly() + assert 'line 3, column 36' in exc.exconly() def test_load_yaml_with_bom(self): - tmpdir = py.test.ensuretemp('bom_yaml') - self.addCleanup(tmpdir.remove) - bom_yaml = tmpdir.join('docker-compose.yml') + tmpdir = tempfile.mkdtemp('bom_yaml') + self.addCleanup(shutil.rmtree, tmpdir) + bom_yaml = os.path.join(tmpdir, 'docker-compose.yml') with codecs.open(str(bom_yaml), 'w', encoding='utf-8') as f: f.write('''\ufeff version: '2.3' @@ -4724,43 +4727,48 @@ class ExtendsTest(unittest.TestCase): @mock.patch.dict(os.environ) def test_extends_with_environment_and_env_files(self): - tmpdir = py.test.ensuretemp('test_extends_with_environment') - self.addCleanup(tmpdir.remove) - commondir = tmpdir.mkdir('common') - commondir.join('base.yml').write(""" - app: - image: 'example/app' - env_file: - - 'envs' - environment: - - SECRET - - TEST_ONE=common - - TEST_TWO=common - """) - tmpdir.join('docker-compose.yml').write(""" - ext: - extends: - file: common/base.yml - service: app - env_file: - - 'envs' - environment: - - THING - - TEST_ONE=top - """) - commondir.join('envs').write(""" - COMMON_ENV_FILE - TEST_ONE=common-env-file - TEST_TWO=common-env-file - TEST_THREE=common-env-file - TEST_FOUR=common-env-file - """) - tmpdir.join('envs').write(""" - TOP_ENV_FILE - TEST_ONE=top-env-file - TEST_TWO=top-env-file - TEST_THREE=top-env-file - """) + tmpdir = tempfile.mkdtemp('test_extends_with_environment') + self.addCleanup(shutil.rmtree, tmpdir) + commondir = os.path.join(tmpdir, 'common') + os.mkdir(commondir) + with open(os.path.join(commondir, 'base.yml'), mode="w") as base_fh: + base_fh.write(""" + app: + image: 'example/app' + env_file: + - 'envs' + environment: + - SECRET + - TEST_ONE=common + - TEST_TWO=common + """) + with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh: + docker_compose_fh.write(""" + ext: + extends: + file: common/base.yml + service: app + env_file: + - 'envs' + environment: + - THING + - TEST_ONE=top + """) + with open(os.path.join(commondir, 'envs'), mode="w") as envs_fh: + envs_fh.write(""" + COMMON_ENV_FILE + TEST_ONE=common-env-file + TEST_TWO=common-env-file + TEST_THREE=common-env-file + TEST_FOUR=common-env-file + """) + with open(os.path.join(tmpdir, 'envs'), mode="w") as envs_fh: + envs_fh.write(""" + TOP_ENV_FILE + TEST_ONE=top-env-file + TEST_TWO=top-env-file + TEST_THREE=top-env-file + """) expected = [ { @@ -4783,72 +4791,77 @@ class ExtendsTest(unittest.TestCase): os.environ['THING'] = 'thing' os.environ['COMMON_ENV_FILE'] = 'secret' os.environ['TOP_ENV_FILE'] = 'secret' - config = load_from_filename(str(tmpdir.join('docker-compose.yml'))) + config = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml'))) assert config == expected def test_extends_with_mixed_versions_is_error(self): - tmpdir = py.test.ensuretemp('test_extends_with_mixed_version') - self.addCleanup(tmpdir.remove) - tmpdir.join('docker-compose.yml').write(""" - version: "2" - services: - web: - extends: - file: base.yml - service: base - image: busybox - """) - tmpdir.join('base.yml').write(""" - base: - volumes: ['/foo'] - ports: ['3000:3000'] - """) - - with pytest.raises(ConfigurationError) as exc: - load_from_filename(str(tmpdir.join('docker-compose.yml'))) - assert 'Version mismatch' in exc.exconly() - - def test_extends_with_defined_version_passes(self): - tmpdir = py.test.ensuretemp('test_extends_with_defined_version') - self.addCleanup(tmpdir.remove) - tmpdir.join('docker-compose.yml').write(""" - version: "2" - services: - web: - extends: - file: base.yml - service: base - image: busybox - """) - tmpdir.join('base.yml').write(""" - version: "2" - services: + tmpdir = tempfile.mkdtemp('test_extends_with_mixed_version') + self.addCleanup(shutil.rmtree, tmpdir) + with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh: + docker_compose_fh.write(""" + version: "2" + services: + web: + extends: + file: base.yml + service: base + image: busybox + """) + with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh: + base_fh.write(""" base: volumes: ['/foo'] ports: ['3000:3000'] - command: top - """) + """) - service = load_from_filename(str(tmpdir.join('docker-compose.yml'))) + with pytest.raises(ConfigurationError) as exc: + load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml'))) + assert 'Version mismatch' in exc.exconly() + + def test_extends_with_defined_version_passes(self): + tmpdir = tempfile.mkdtemp('test_extends_with_defined_version') + self.addCleanup(shutil.rmtree, tmpdir) + with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh: + docker_compose_fh.write(""" + version: "2" + services: + web: + extends: + file: base.yml + service: base + image: busybox + """) + with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh: + base_fh.write(""" + version: "2" + services: + base: + volumes: ['/foo'] + ports: ['3000:3000'] + command: top + """) + + service = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml'))) assert service[0]['command'] == "top" def test_extends_with_depends_on(self): - tmpdir = py.test.ensuretemp('test_extends_with_depends_on') - self.addCleanup(tmpdir.remove) - tmpdir.join('docker-compose.yml').write(""" - version: "2" - services: - base: - image: example - web: - extends: base - image: busybox - depends_on: ['other'] - other: - image: example - """) - services = load_from_filename(str(tmpdir.join('docker-compose.yml'))) + tmpdir = tempfile.mkdtemp('test_extends_with_depends_on') + self.addCleanup(shutil.rmtree, tmpdir) + with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh: + docker_compose_fh.write(""" + version: "2" + services: + base: + image: example + web: + extends: base + image: busybox + depends_on: ['other'] + other: + image: example + """) + services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml'))) assert service_sort(services)[2]['depends_on'] == { 'other': {'condition': 'service_started'} } @@ -4867,45 +4880,47 @@ class ExtendsTest(unittest.TestCase): }] def test_extends_with_ports(self): - tmpdir = py.test.ensuretemp('test_extends_with_ports') - self.addCleanup(tmpdir.remove) - tmpdir.join('docker-compose.yml').write(""" - version: '2' + tmpdir = tempfile.mkdtemp('test_extends_with_ports') + self.addCleanup(shutil.rmtree, tmpdir) + with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh: + docker_compose_fh.write(""" + version: '2' - services: - a: - image: nginx - ports: - - 80 + services: + a: + image: nginx + ports: + - 80 - b: - extends: - service: a - """) - services = load_from_filename(str(tmpdir.join('docker-compose.yml'))) + b: + extends: + service: a + """) + services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml'))) assert len(services) == 2 for svc in services: assert svc['ports'] == [types.ServicePort('80', None, None, None, None)] def test_extends_with_security_opt(self): - tmpdir = py.test.ensuretemp('test_extends_with_ports') - self.addCleanup(tmpdir.remove) - tmpdir.join('docker-compose.yml').write(""" - version: '2' + tmpdir = tempfile.mkdtemp('test_extends_with_ports') + self.addCleanup(shutil.rmtree, tmpdir) + with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh: + docker_compose_fh.write(""" + version: '2' - services: - a: - image: nginx - security_opt: - - apparmor:unconfined - - seccomp:unconfined + services: + a: + image: nginx + security_opt: + - apparmor:unconfined + - seccomp:unconfined - b: - extends: - service: a - """) - services = load_from_filename(str(tmpdir.join('docker-compose.yml'))) + b: + extends: + service: a + """) + services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml'))) assert len(services) == 2 for svc in services: assert types.SecurityOpt.parse('apparmor:unconfined') in svc['security_opt'] diff --git a/tests/unit/config/environment_test.py b/tests/unit/config/environment_test.py index 88eb0d6e1..186702db1 100644 --- a/tests/unit/config/environment_test.py +++ b/tests/unit/config/environment_test.py @@ -4,6 +4,9 @@ from __future__ import print_function from __future__ import unicode_literals import codecs +import os +import shutil +import tempfile import pytest @@ -46,19 +49,19 @@ class EnvironmentTest(unittest.TestCase): assert env.get_boolean('UNDEFINED') is False def test_env_vars_from_file_bom(self): - tmpdir = pytest.ensuretemp('env_file') - self.addCleanup(tmpdir.remove) + tmpdir = tempfile.mkdtemp('env_file') + self.addCleanup(shutil.rmtree, tmpdir) with codecs.open('{}/bom.env'.format(str(tmpdir)), 'w', encoding='utf-8') as f: f.write('\ufeffPARK_BOM=박봄\n') - assert env_vars_from_file(str(tmpdir.join('bom.env'))) == { + assert env_vars_from_file(str(os.path.join(tmpdir, 'bom.env'))) == { 'PARK_BOM': '박봄' } def test_env_vars_from_file_whitespace(self): - tmpdir = pytest.ensuretemp('env_file') - self.addCleanup(tmpdir.remove) + tmpdir = tempfile.mkdtemp('env_file') + self.addCleanup(shutil.rmtree, tmpdir) with codecs.open('{}/whitespace.env'.format(str(tmpdir)), 'w', encoding='utf-8') as f: f.write('WHITESPACE =yes\n') with pytest.raises(ConfigurationError) as exc: - env_vars_from_file(str(tmpdir.join('whitespace.env'))) + env_vars_from_file(str(os.path.join(tmpdir, 'whitespace.env'))) assert 'environment variable' in exc.exconly() From c818bfc62c0574009175d832c1a8a2857bf1b1bf Mon Sep 17 00:00:00 2001 From: Sergey Fursov Date: Sun, 31 Mar 2019 12:45:50 +0700 Subject: [PATCH 32/46] support PyYAML up to 5.x version Signed-off-by: Sergey Fursov --- requirements.txt | 2 +- setup.py | 2 +- tests/acceptance/cli_test.py | 28 ++++++++++++++-------------- tests/unit/config/config_test.py | 20 ++++++++++---------- 4 files changed, 26 insertions(+), 26 deletions(-) diff --git a/requirements.txt b/requirements.txt index 128b18cf6..0cef32af5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ paramiko==2.6.0 pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' PySocks==1.7.1 -PyYAML==4.2b1 +PyYAML==5.3 requests==2.22.0 six==1.12.0 subprocess32==3.5.4; python_version < '3.2' diff --git a/setup.py b/setup.py index 23ae08a12..110441dca 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ def find_version(*file_paths): install_requires = [ 'cached-property >= 1.2.0, < 2', 'docopt >= 0.6.1, < 1', - 'PyYAML >= 3.10, < 5', + 'PyYAML >= 3.10, < 6', 'requests >= 2.20.0, < 3', 'texttable >= 0.9.0, < 2', 'websocket-client >= 0.32.0, < 1', diff --git a/tests/acceptance/cli_test.py b/tests/acceptance/cli_test.py index ffa055744..d1e96fdc8 100644 --- a/tests/acceptance/cli_test.py +++ b/tests/acceptance/cli_test.py @@ -269,7 +269,7 @@ services: # assert there are no python objects encoded in the output assert '!!' not in result.stdout - output = yaml.load(result.stdout) + output = yaml.safe_load(result.stdout) expected = { 'version': '2.0', 'volumes': {'data': {'driver': 'local'}}, @@ -294,7 +294,7 @@ services: def test_config_restart(self): self.base_dir = 'tests/fixtures/restart' result = self.dispatch(['config']) - assert yaml.load(result.stdout) == { + assert yaml.safe_load(result.stdout) == { 'version': '2.0', 'services': { 'never': { @@ -323,7 +323,7 @@ services: def test_config_external_network(self): self.base_dir = 'tests/fixtures/networks' result = self.dispatch(['-f', 'external-networks.yml', 'config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert 'networks' in json_result assert json_result['networks'] == { 'networks_foo': { @@ -337,7 +337,7 @@ services: def test_config_with_dot_env(self): self.base_dir = 'tests/fixtures/default-env-file' result = self.dispatch(['config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert json_result == { 'services': { 'web': { @@ -352,7 +352,7 @@ services: def test_config_with_env_file(self): self.base_dir = 'tests/fixtures/default-env-file' result = self.dispatch(['--env-file', '.env2', 'config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert json_result == { 'services': { 'web': { @@ -367,7 +367,7 @@ services: def test_config_with_dot_env_and_override_dir(self): self.base_dir = 'tests/fixtures/default-env-file' result = self.dispatch(['--project-directory', 'alt/', 'config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert json_result == { 'services': { 'web': { @@ -382,7 +382,7 @@ services: def test_config_external_volume_v2(self): self.base_dir = 'tests/fixtures/volumes' result = self.dispatch(['-f', 'external-volumes-v2.yml', 'config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert 'volumes' in json_result assert json_result['volumes'] == { 'foo': { @@ -398,7 +398,7 @@ services: def test_config_external_volume_v2_x(self): self.base_dir = 'tests/fixtures/volumes' result = self.dispatch(['-f', 'external-volumes-v2-x.yml', 'config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert 'volumes' in json_result assert json_result['volumes'] == { 'foo': { @@ -414,7 +414,7 @@ services: def test_config_external_volume_v3_x(self): self.base_dir = 'tests/fixtures/volumes' result = self.dispatch(['-f', 'external-volumes-v3-x.yml', 'config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert 'volumes' in json_result assert json_result['volumes'] == { 'foo': { @@ -430,7 +430,7 @@ services: def test_config_external_volume_v3_4(self): self.base_dir = 'tests/fixtures/volumes' result = self.dispatch(['-f', 'external-volumes-v3-4.yml', 'config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert 'volumes' in json_result assert json_result['volumes'] == { 'foo': { @@ -446,7 +446,7 @@ services: def test_config_external_network_v3_5(self): self.base_dir = 'tests/fixtures/networks' result = self.dispatch(['-f', 'external-networks-v3-5.yml', 'config']) - json_result = yaml.load(result.stdout) + json_result = yaml.safe_load(result.stdout) assert 'networks' in json_result assert json_result['networks'] == { 'foo': { @@ -462,7 +462,7 @@ services: def test_config_v1(self): self.base_dir = 'tests/fixtures/v1-config' result = self.dispatch(['config']) - assert yaml.load(result.stdout) == { + assert yaml.safe_load(result.stdout) == { 'version': '2.1', 'services': { 'net': { @@ -487,7 +487,7 @@ services: self.base_dir = 'tests/fixtures/v3-full' result = self.dispatch(['config']) - assert yaml.load(result.stdout) == { + assert yaml.safe_load(result.stdout) == { 'version': '3.5', 'volumes': { 'foobar': { @@ -564,7 +564,7 @@ services: self.base_dir = 'tests/fixtures/compatibility-mode' result = self.dispatch(['--compatibility', 'config']) - assert yaml.load(result.stdout) == { + assert yaml.safe_load(result.stdout) == { 'version': '2.3', 'volumes': {'foo': {'driver': 'default'}}, 'networks': {'bar': {}}, diff --git a/tests/unit/config/config_test.py b/tests/unit/config/config_test.py index 0f744e22a..fc76a2b9c 100644 --- a/tests/unit/config/config_test.py +++ b/tests/unit/config/config_test.py @@ -5060,7 +5060,7 @@ class HealthcheckTest(unittest.TestCase): }) ) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) serialized_service = serialized_config['services']['test'] assert serialized_service['healthcheck'] == { @@ -5087,7 +5087,7 @@ class HealthcheckTest(unittest.TestCase): }) ) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) serialized_service = serialized_config['services']['test'] assert serialized_service['healthcheck'] == { @@ -5294,7 +5294,7 @@ class SerializeTest(unittest.TestCase): 'secrets': secrets_dict })) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) serialized_service = serialized_config['services']['web'] assert secret_sort(serialized_service['secrets']) == secret_sort(service_dict['secrets']) assert 'secrets' in serialized_config @@ -5309,7 +5309,7 @@ class SerializeTest(unittest.TestCase): } ], volumes={}, networks={}, secrets={}, configs={}) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) assert '8080:80/tcp' in serialized_config['services']['web']['ports'] def test_serialize_ports_with_ext_ip(self): @@ -5321,7 +5321,7 @@ class SerializeTest(unittest.TestCase): } ], volumes={}, networks={}, secrets={}, configs={}) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) assert '127.0.0.1:8080:80/tcp' in serialized_config['services']['web']['ports'] def test_serialize_configs(self): @@ -5349,7 +5349,7 @@ class SerializeTest(unittest.TestCase): 'configs': configs_dict })) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) serialized_service = serialized_config['services']['web'] assert secret_sort(serialized_service['configs']) == secret_sort(service_dict['configs']) assert 'configs' in serialized_config @@ -5389,7 +5389,7 @@ class SerializeTest(unittest.TestCase): } config_dict = config.load(build_config_details(cfg)) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) serialized_service = serialized_config['services']['web'] assert serialized_service['environment']['CURRENCY'] == '$$' assert serialized_service['command'] == 'echo $$FOO' @@ -5411,7 +5411,7 @@ class SerializeTest(unittest.TestCase): } config_dict = config.load(build_config_details(cfg), interpolate=False) - serialized_config = yaml.load(serialize_config(config_dict, escape_dollar=False)) + serialized_config = yaml.safe_load(serialize_config(config_dict, escape_dollar=False)) serialized_service = serialized_config['services']['web'] assert serialized_service['environment']['CURRENCY'] == '$' assert serialized_service['command'] == 'echo $FOO' @@ -5430,7 +5430,7 @@ class SerializeTest(unittest.TestCase): config_dict = config.load(build_config_details(cfg)) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) serialized_service = serialized_config['services']['web'] assert serialized_service['command'] == 'echo 十六夜 咲夜' @@ -5446,6 +5446,6 @@ class SerializeTest(unittest.TestCase): } config_dict = config.load(build_config_details(cfg)) - serialized_config = yaml.load(serialize_config(config_dict)) + serialized_config = yaml.safe_load(serialize_config(config_dict)) serialized_volume = serialized_config['volumes']['test'] assert serialized_volume['external'] is False From a436fb953c4843f44dd9305ae188eb7d714fa49e Mon Sep 17 00:00:00 2001 From: Lumir Balhar Date: Fri, 10 Jan 2020 08:39:54 +0100 Subject: [PATCH 33/46] Remove indentation from test YAML Signed-off-by: Lumir Balhar --- tests/unit/config/config_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/config/config_test.py b/tests/unit/config/config_test.py index f1398e84f..03d868880 100644 --- a/tests/unit/config/config_test.py +++ b/tests/unit/config/config_test.py @@ -1772,13 +1772,13 @@ class ConfigTest(unittest.TestCase): invalid_yaml_file = os.path.join(tmpdir, 'docker-compose.yml') with open(invalid_yaml_file, mode="w") as invalid_yaml_file_fh: invalid_yaml_file_fh.write(""" - web: - this is bogus: ok: what +web: + this is bogus: ok: what """) with pytest.raises(ConfigurationError) as exc: config.load_yaml(str(invalid_yaml_file)) - assert 'line 3, column 36' in exc.exconly() + assert 'line 3, column 22' in exc.exconly() def test_load_yaml_with_bom(self): tmpdir = tempfile.mkdtemp('bom_yaml') From 3ea84fd9bcd185c30fe97655b6281ed781b11ce9 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Fri, 10 Jan 2020 16:10:59 +0000 Subject: [PATCH 34/46] Bump pytest from 3.6.3 to 5.3.2 Bumps [pytest](https://github.com/pytest-dev/pytest) from 3.6.3 to 5.3.2. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/3.6.3...5.3.2) Signed-off-by: dependabot-preview[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 496cefe15..a641868f4 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,6 +2,6 @@ coverage==4.5.4 ddt==1.2.2 flake8==3.7.9 mock==3.0.5 -pytest==5.1.1; python_version >= '3.5' +pytest==5.3.2; python_version >= '3.5' pytest==4.6.5; python_version < '3.5' pytest-cov==2.8.1 From 707a340304be39fd6885c8d26df3151eb28f14e9 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 13 Jan 2020 13:32:31 +0000 Subject: [PATCH 35/46] Bump certifi from 2017.4.17 to 2019.11.28 Bumps [certifi](https://github.com/certifi/python-certifi) from 2017.4.17 to 2019.11.28. - [Release notes](https://github.com/certifi/python-certifi/releases) - [Commits](https://github.com/certifi/python-certifi/compare/2017.04.17...2019.11.28) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 50a2e6fc0..3f960ef6c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ backports.shutil_get_terminal_size==1.0.0 backports.ssl-match-hostname==3.5.0.1; python_version < '3' cached-property==1.3.0 -certifi==2017.4.17 +certifi==2019.11.28 chardet==3.0.4 colorama==0.4.3; sys_platform == 'win32' docker==4.1.0 From 2cdd2f626b6a2131afdd1f8c542a17c350948b33 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 13 Jan 2020 13:33:13 +0000 Subject: [PATCH 36/46] Bump coverage from 4.5.4 to 5.0.3 Bumps [coverage](https://github.com/nedbat/coveragepy) from 4.5.4 to 5.0.3. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/coverage-4.5.4...coverage-5.0.3) Signed-off-by: dependabot-preview[bot] --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a641868f4..6f670a380 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -coverage==4.5.4 +coverage==5.0.3 ddt==1.2.2 flake8==3.7.9 mock==3.0.5 From 661afb400330db289cf9002b19c183d3ec562fd2 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 13 Jan 2020 13:34:32 +0000 Subject: [PATCH 37/46] Bump paramiko from 2.6.0 to 2.7.1 Bumps [paramiko](https://github.com/paramiko/paramiko) from 2.6.0 to 2.7.1. - [Release notes](https://github.com/paramiko/paramiko/releases) - [Changelog](https://github.com/paramiko/paramiko/blob/master/NEWS) - [Commits](https://github.com/paramiko/paramiko/compare/2.6.0...2.7.1) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 50a2e6fc0..281c03ba3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,7 @@ functools32==3.2.3.post2; python_version < '3.2' idna==2.8 ipaddress==1.0.18 jsonschema==3.2.0 -paramiko==2.6.0 +paramiko==2.7.1 pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' PySocks==1.7.1 From 093cc2c089ab77978ca993eee09c2f53111730b1 Mon Sep 17 00:00:00 2001 From: Kevin Roy Date: Mon, 18 Nov 2019 15:30:35 +0100 Subject: [PATCH 38/46] Allow setting compatibility options from environment Signed-off-by: Kevin Roy --- compose/cli/command.py | 14 ++++++++-- tests/acceptance/cli_test.py | 53 ++++++++++++++++++++++++------------ 2 files changed, 47 insertions(+), 20 deletions(-) diff --git a/compose/cli/command.py b/compose/cli/command.py index 1fa8a17a2..cf77237ac 100644 --- a/compose/cli/command.py +++ b/compose/cli/command.py @@ -59,7 +59,7 @@ def project_from_options(project_dir, options, additional_options={}): tls_config=tls_config_from_options(options, environment), environment=environment, override_dir=override_dir, - compatibility=options.get('--compatibility'), + compatibility=compatibility_from_options(project_dir, options, environment), interpolate=(not additional_options.get('--no-interpolate')), environment_file=environment_file ) @@ -90,7 +90,7 @@ def get_config_from_options(base_dir, options, additional_options={}): ) return config.load( config.find(base_dir, config_path, environment, override_dir), - options.get('--compatibility'), + compatibility_from_options(config_path, options, environment), not additional_options.get('--no-interpolate') ) @@ -198,3 +198,13 @@ def get_project_name(working_dir, project_name=None, environment=None): return normalize_name(project) return 'default' + + +def compatibility_from_options(working_dir, options=None, environment=None): + """Get compose v3 compatibility from --compatibility option + or from COMPOSE_COMPATIBILITY environment variable.""" + + compatibility_option = options.get('--compatibility') + compatibility_environment = environment.get_boolean('COMPOSE_COMPATIBILITY') + + return compatibility_option or compatibility_environment diff --git a/tests/acceptance/cli_test.py b/tests/acceptance/cli_test.py index d1e96fdc8..3a207c83a 100644 --- a/tests/acceptance/cli_test.py +++ b/tests/acceptance/cli_test.py @@ -43,6 +43,24 @@ ProcessResult = namedtuple('ProcessResult', 'stdout stderr') BUILD_CACHE_TEXT = 'Using cache' BUILD_PULL_TEXT = 'Status: Image is up to date for busybox:1.27.2' +COMPOSE_COMPATIBILITY_DICT = { + 'version': '2.3', + 'volumes': {'foo': {'driver': 'default'}}, + 'networks': {'bar': {}}, + 'services': { + 'foo': { + 'command': '/bin/true', + 'image': 'alpine:3.10.1', + 'scale': 3, + 'restart': 'always:7', + 'mem_limit': '300M', + 'mem_reservation': '100M', + 'cpus': 0.7, + 'volumes': ['foo:/bar:rw'], + 'networks': {'bar': None}, + } + }, +} def start_process(base_dir, options): @@ -564,24 +582,23 @@ services: self.base_dir = 'tests/fixtures/compatibility-mode' result = self.dispatch(['--compatibility', 'config']) - assert yaml.safe_load(result.stdout) == { - 'version': '2.3', - 'volumes': {'foo': {'driver': 'default'}}, - 'networks': {'bar': {}}, - 'services': { - 'foo': { - 'command': '/bin/true', - 'image': 'alpine:3.10.1', - 'scale': 3, - 'restart': 'always:7', - 'mem_limit': '300M', - 'mem_reservation': '100M', - 'cpus': 0.7, - 'volumes': ['foo:/bar:rw'], - 'networks': {'bar': None}, - } - }, - } + assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT + + @mock.patch.dict(os.environ) + def test_config_compatibility_mode_from_env(self): + self.base_dir = 'tests/fixtures/compatibility-mode' + os.environ['COMPOSE_COMPATIBILITY'] = 'true' + result = self.dispatch(['config']) + + assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT + + @mock.patch.dict(os.environ) + def test_config_compatibility_mode_from_env_and_option_precedence(self): + self.base_dir = 'tests/fixtures/compatibility-mode' + os.environ['COMPOSE_COMPATIBILITY'] = 'false' + result = self.dispatch(['--compatibility', 'config']) + + assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT def test_ps(self): self.project.get_service('simple').create_container() From 120a7b1b067cf5c4c68afeadecd5541eafeaed68 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2020 17:23:07 +0000 Subject: [PATCH 39/46] Bump ipaddress from 1.0.18 to 1.0.23 Bumps [ipaddress](https://github.com/phihag/ipaddress) from 1.0.18 to 1.0.23. - [Release notes](https://github.com/phihag/ipaddress/releases) - [Commits](https://github.com/phihag/ipaddress/compare/v1.0.18...v1.0.23) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 281c03ba3..914931242 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,7 @@ docopt==0.6.2 enum34==1.1.6; python_version < '3.4' functools32==3.2.3.post2; python_version < '3.2' idna==2.8 -ipaddress==1.0.18 +ipaddress==1.0.23 jsonschema==3.2.0 paramiko==2.7.1 pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' From dafece4ae56b0649cb349fcf6929c1b10f87c522 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2020 17:26:05 +0000 Subject: [PATCH 40/46] Bump cached-property from 1.3.0 to 1.5.1 Bumps [cached-property](https://github.com/pydanny/cached-property) from 1.3.0 to 1.5.1. - [Release notes](https://github.com/pydanny/cached-property/releases) - [Changelog](https://github.com/pydanny/cached-property/blob/master/HISTORY.rst) - [Commits](https://github.com/pydanny/cached-property/compare/1.3.0...1.5.1) Signed-off-by: dependabot-preview[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 486307e77..eedda62b5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ backports.shutil_get_terminal_size==1.0.0 backports.ssl-match-hostname==3.5.0.1; python_version < '3' -cached-property==1.3.0 +cached-property==1.5.1 certifi==2019.11.28 chardet==3.0.4 colorama==0.4.3; sys_platform == 'win32' From a92a8eb508456effd5388c8eb7c172ec54ca9936 Mon Sep 17 00:00:00 2001 From: Christopher Crone Date: Fri, 29 Nov 2019 09:58:03 +0100 Subject: [PATCH 41/46] Bump macOS dependencies - Python 3.7.5 - OpenSSL 1.1.1d Signed-off-by: Christopher Crone --- script/setup/osx | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/script/setup/osx b/script/setup/osx index 08420fb23..077b5b497 100755 --- a/script/setup/osx +++ b/script/setup/osx @@ -13,13 +13,13 @@ if ! [ ${DEPLOYMENT_TARGET} == "$(macos_version)" ]; then SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62 fi -OPENSSL_VERSION=1.1.1c +OPENSSL_VERSION=1.1.1d OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz -OPENSSL_SHA1=71b830a077276cbeccc994369538617a21bee808 +OPENSSL_SHA1=056057782325134b76d1931c48f2c7e6595d7ef4 -PYTHON_VERSION=3.7.4 +PYTHON_VERSION=3.7.5 PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz -PYTHON_SHA1=fb1d764be8a9dcd40f2f152a610a0ab04e0d0ed3 +PYTHON_SHA1=8b0311d4cca19f0ea9181731189fa33c9f5aedf9 # # Install prerequisites. From a2cdffeeee4802882ac88ccc0159e6f2297fe52f Mon Sep 17 00:00:00 2001 From: Christopher Crone Date: Fri, 29 Nov 2019 11:57:18 +0100 Subject: [PATCH 42/46] Bump Linux dependencies - Alpine 3.10.3 - Debian Stretch 20191118 - Python 3.7.5 Signed-off-by: Christopher Crone --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 64de77890..ed270eb4a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,9 @@ -ARG DOCKER_VERSION=18.09.7 -ARG PYTHON_VERSION=3.7.4 +ARG DOCKER_VERSION=19.03.5 +ARG PYTHON_VERSION=3.7.5 ARG BUILD_ALPINE_VERSION=3.10 ARG BUILD_DEBIAN_VERSION=slim-stretch -ARG RUNTIME_ALPINE_VERSION=3.10.1 -ARG RUNTIME_DEBIAN_VERSION=stretch-20190812-slim +ARG RUNTIME_ALPINE_VERSION=3.10.3 +ARG RUNTIME_DEBIAN_VERSION=stretch-20191118-slim ARG BUILD_PLATFORM=alpine From 53d00f76777d111c192b0674eb89f2073da73cf0 Mon Sep 17 00:00:00 2001 From: yukihira1992 Date: Tue, 14 Jan 2020 20:13:10 +0900 Subject: [PATCH 43/46] Refactored mutable default values. Signed-off-by: yukihira1992 --- compose/cli/command.py | 6 ++++-- compose/project.py | 3 ++- compose/service.py | 4 ++-- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/compose/cli/command.py b/compose/cli/command.py index cf77237ac..2fabbe18a 100644 --- a/compose/cli/command.py +++ b/compose/cli/command.py @@ -40,7 +40,8 @@ SILENT_COMMANDS = { } -def project_from_options(project_dir, options, additional_options={}): +def project_from_options(project_dir, options, additional_options=None): + additional_options = additional_options or {} override_dir = options.get('--project-directory') environment_file = options.get('--env-file') environment = Environment.from_env_file(override_dir or project_dir, environment_file) @@ -81,7 +82,8 @@ def set_parallel_limit(environment): parallel.GlobalLimit.set_global_limit(parallel_limit) -def get_config_from_options(base_dir, options, additional_options={}): +def get_config_from_options(base_dir, options, additional_options=None): + additional_options = additional_options or {} override_dir = options.get('--project-directory') environment_file = options.get('--env-file') environment = Environment.from_env_file(override_dir or base_dir, environment_file) diff --git a/compose/project.py b/compose/project.py index a7770ddc9..696c8b040 100644 --- a/compose/project.py +++ b/compose/project.py @@ -87,10 +87,11 @@ class Project(object): return labels @classmethod - def from_config(cls, name, config_data, client, default_platform=None, extra_labels=[]): + def from_config(cls, name, config_data, client, default_platform=None, extra_labels=None): """ Construct a Project from a config.Config object. """ + extra_labels = extra_labels or [] use_networking = (config_data.version and config_data.version != V1) networks = build_networks(name, config_data, client) project_networks = ProjectNetworks.from_services( diff --git a/compose/service.py b/compose/service.py index 024e7fbde..ebe237b8c 100644 --- a/compose/service.py +++ b/compose/service.py @@ -185,7 +185,7 @@ class Service(object): scale=1, pid_mode=None, default_platform=None, - extra_labels=[], + extra_labels=None, **options ): self.name = name @@ -201,7 +201,7 @@ class Service(object): self.scale_num = scale self.default_platform = default_platform self.options = options - self.extra_labels = extra_labels + self.extra_labels = extra_labels or [] def __repr__(self): return ''.format(self.name) From 387f5e4c96ea841a8855d4e887b6f2a0e54c5cc9 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 16 Jan 2020 13:46:47 +0100 Subject: [PATCH 44/46] Bump pyinstaller to 3.6 Signed-off-by: Ulysses Souza --- requirements-build.txt | 2 +- script/build/linux-entrypoint | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-build.txt b/requirements-build.txt index 2a1cd7d6b..9126f8af9 100644 --- a/requirements-build.txt +++ b/requirements-build.txt @@ -1 +1 @@ -pyinstaller==3.5 +pyinstaller==3.6 diff --git a/script/build/linux-entrypoint b/script/build/linux-entrypoint index d607dd5c2..d75b9927d 100755 --- a/script/build/linux-entrypoint +++ b/script/build/linux-entrypoint @@ -24,7 +24,7 @@ if [ ! -z "${BUILD_BOOTLOADER}" ]; then git clone --single-branch --branch develop https://github.com/pyinstaller/pyinstaller.git /tmp/pyinstaller cd /tmp/pyinstaller/bootloader # Checkout commit corresponding to version in requirements-build - git checkout v3.5 + git checkout v3.6 "${VENV}"/bin/python3 ./waf configure --no-lsb all "${VENV}"/bin/pip3 install .. cd "${CODE_PATH}" From a6b602d086dee4b2d17b3cf731163a77f2d617db Mon Sep 17 00:00:00 2001 From: Ben Thorner Date: Tue, 8 Oct 2019 17:14:23 +0100 Subject: [PATCH 45/46] Support attaching to dependencies on up When using the 'up' command, only services listed as arguments are attached to, which can be very different to the 'no argument' case if a service has many and deep dependencies: - It's not clear when dependencies have failed to start. Have to run 'compose ps' separately to find out. - It's not clear when dependencies are erroring. Have to run 'compose logs' separately to find out. With a simple setup, it's possible to work around theses issue by using the 'up' command without arguments. But when there are lots of 'top-level' services, with common dependencies, in a single config, using 'up' without arguments isn't practical due to resource limits and the sheer volume of output from other services. This introduces a new '--attach-dependencies' flag to optionally attach dependent containers as part of the 'up' command. This makes their logs visible in the output, alongside the listed services. It also means we benefit from the '--abort-on-container-exit' behaviour when dependencies fail to start, giving more visibility of the failure. Signed-off-by: Ben Thorner --- compose/cli/main.py | 18 ++++++++++----- contrib/completion/bash/docker-compose | 2 +- contrib/completion/zsh/_docker-compose | 3 ++- tests/acceptance/cli_test.py | 20 +++++++++++++++++ .../docker-compose.yml | 10 +++++++++ .../docker-compose.yml | 10 +++++++++ tests/unit/cli/main_test.py | 22 ++++++++++++++----- 7 files changed, 72 insertions(+), 13 deletions(-) create mode 100644 tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml create mode 100644 tests/fixtures/echo-services-dependencies/docker-compose.yml diff --git a/compose/cli/main.py b/compose/cli/main.py index 200d4eeac..b6ad404dc 100644 --- a/compose/cli/main.py +++ b/compose/cli/main.py @@ -1012,6 +1012,7 @@ class TopLevelCommand(object): --build Build images before starting containers. --abort-on-container-exit Stops all containers if any container was stopped. Incompatible with -d. + --attach-dependencies Attach to dependent containers -t, --timeout TIMEOUT Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10) @@ -1033,16 +1034,18 @@ class TopLevelCommand(object): remove_orphans = options['--remove-orphans'] detached = options.get('--detach') no_start = options.get('--no-start') + attach_dependencies = options.get('--attach-dependencies') - if detached and (cascade_stop or exit_value_from): - raise UserError("--abort-on-container-exit and -d cannot be combined.") + if detached and (cascade_stop or exit_value_from or attach_dependencies): + raise UserError( + "-d cannot be combined with --abort-on-container-exit or --attach-dependencies.") ignore_orphans = self.toplevel_environment.get_boolean('COMPOSE_IGNORE_ORPHANS') if ignore_orphans and remove_orphans: raise UserError("COMPOSE_IGNORE_ORPHANS and --remove-orphans cannot be combined.") - opts = ['--detach', '--abort-on-container-exit', '--exit-code-from'] + opts = ['--detach', '--abort-on-container-exit', '--exit-code-from', '--attach-dependencies'] for excluded in [x for x in opts if options.get(x) and no_start]: raise UserError('--no-start and {} cannot be combined.'.format(excluded)) @@ -1087,7 +1090,10 @@ class TopLevelCommand(object): if detached or no_start: return - attached_containers = filter_containers_to_service_names(to_attach, service_names) + attached_containers = filter_attached_containers( + to_attach, + service_names, + attach_dependencies) log_printer = log_printer_from_project( self.project, @@ -1392,8 +1398,8 @@ def log_printer_from_project( log_args=log_args) -def filter_containers_to_service_names(containers, service_names): - if not service_names: +def filter_attached_containers(containers, service_names, attach_dependencies=False): + if attach_dependencies or not service_names: return containers return [ diff --git a/contrib/completion/bash/docker-compose b/contrib/completion/bash/docker-compose index 23c48b7f4..ad0ce44c1 100644 --- a/contrib/completion/bash/docker-compose +++ b/contrib/completion/bash/docker-compose @@ -545,7 +545,7 @@ _docker_compose_up() { case "$cur" in -*) - COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) ) + COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --attach-dependencies --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) ) ;; *) __docker_compose_complete_services diff --git a/contrib/completion/zsh/_docker-compose b/contrib/completion/zsh/_docker-compose index 277bf0d3c..de1414984 100755 --- a/contrib/completion/zsh/_docker-compose +++ b/contrib/completion/zsh/_docker-compose @@ -284,7 +284,7 @@ __docker-compose_subcommand() { (up) _arguments \ $opts_help \ - '(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit.]' \ + '(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit and --attach-dependencies.]' \ $opts_no_color \ $opts_no_deps \ $opts_force_recreate \ @@ -292,6 +292,7 @@ __docker-compose_subcommand() { $opts_no_build \ "(--no-build)--build[Build images before starting containers.]" \ "(-d)--abort-on-container-exit[Stops all containers if any container was stopped. Incompatible with -d.]" \ + "(-d)--attach-dependencies[Attach to dependent containers. Incompatible with -d.]" \ '(-t --timeout)'{-t,--timeout}"[Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10)]:seconds: " \ '--scale[SERVICE=NUM Scale SERVICE to NUM instances. Overrides the `scale` setting in the Compose file if present.]:service scale SERVICE=NUM: ' \ '--exit-code-from=[Return the exit code of the selected service container. Implies --abort-on-container-exit]:service:__docker-compose_services' \ diff --git a/tests/acceptance/cli_test.py b/tests/acceptance/cli_test.py index 3a207c83a..7fa7fc548 100644 --- a/tests/acceptance/cli_test.py +++ b/tests/acceptance/cli_test.py @@ -1571,6 +1571,26 @@ services: assert len(db.containers()) == 0 assert len(console.containers()) == 0 + def test_up_with_attach_dependencies(self): + self.base_dir = 'tests/fixtures/echo-services-dependencies' + result = self.dispatch(['up', '--attach-dependencies', '--no-color', 'simple'], None) + simple_name = self.project.get_service('simple').containers(stopped=True)[0].name_without_project + another_name = self.project.get_service('another').containers( + stopped=True + )[0].name_without_project + + assert '{} | simple'.format(simple_name) in result.stdout + assert '{} | another'.format(another_name) in result.stdout + + def test_up_handles_aborted_dependencies(self): + self.base_dir = 'tests/fixtures/abort-on-container-exit-dependencies' + proc = start_process( + self.base_dir, + ['up', 'simple', '--attach-dependencies', '--abort-on-container-exit']) + wait_on_condition(ContainerCountCondition(self.project, 0)) + proc.wait() + assert proc.returncode == 1 + def test_up_with_force_recreate(self): self.dispatch(['up', '-d'], None) service = self.project.get_service('simple') diff --git a/tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml b/tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml new file mode 100644 index 000000000..cd10c851c --- /dev/null +++ b/tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml @@ -0,0 +1,10 @@ +version: "2.0" +services: + simple: + image: busybox:1.31.0-uclibc + command: top + depends_on: + - another + another: + image: busybox:1.31.0-uclibc + command: ls /thecakeisalie diff --git a/tests/fixtures/echo-services-dependencies/docker-compose.yml b/tests/fixtures/echo-services-dependencies/docker-compose.yml new file mode 100644 index 000000000..5329e0033 --- /dev/null +++ b/tests/fixtures/echo-services-dependencies/docker-compose.yml @@ -0,0 +1,10 @@ +version: "2.0" +services: + simple: + image: busybox:1.31.0-uclibc + command: echo simple + depends_on: + - another + another: + image: busybox:1.31.0-uclibc + command: echo another diff --git a/tests/unit/cli/main_test.py b/tests/unit/cli/main_test.py index aadb9d459..067c74f0b 100644 --- a/tests/unit/cli/main_test.py +++ b/tests/unit/cli/main_test.py @@ -12,7 +12,7 @@ from compose.cli.formatter import ConsoleWarningFormatter from compose.cli.main import build_one_off_container_options from compose.cli.main import call_docker from compose.cli.main import convergence_strategy_from_opts -from compose.cli.main import filter_containers_to_service_names +from compose.cli.main import filter_attached_containers from compose.cli.main import get_docker_start_call from compose.cli.main import setup_console_handler from compose.cli.main import warn_for_swarm_mode @@ -37,7 +37,7 @@ def logging_handler(): class TestCLIMainTestCase(object): - def test_filter_containers_to_service_names(self): + def test_filter_attached_containers(self): containers = [ mock_container('web', 1), mock_container('web', 2), @@ -46,17 +46,29 @@ class TestCLIMainTestCase(object): mock_container('another', 1), ] service_names = ['web', 'db'] - actual = filter_containers_to_service_names(containers, service_names) + actual = filter_attached_containers(containers, service_names) assert actual == containers[:3] - def test_filter_containers_to_service_names_all(self): + def test_filter_attached_containers_with_dependencies(self): + containers = [ + mock_container('web', 1), + mock_container('web', 2), + mock_container('db', 1), + mock_container('other', 1), + mock_container('another', 1), + ] + service_names = ['web', 'db'] + actual = filter_attached_containers(containers, service_names, attach_dependencies=True) + assert actual == containers + + def test_filter_attached_containers_all(self): containers = [ mock_container('web', 1), mock_container('db', 1), mock_container('other', 1), ] service_names = [] - actual = filter_containers_to_service_names(containers, service_names) + actual = filter_attached_containers(containers, service_names) assert actual == containers def test_warning_in_swarm_mode(self): From 634eb501f8bac9469482039fe3f9067dea62cb4e Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Fri, 17 Jan 2020 19:33:03 +0100 Subject: [PATCH 46/46] "Bump 1.25.2-rc1" Signed-off-by: Ulysses Souza --- CHANGELOG.md | 35 +++++++++++++++++++++++++++++++++++ compose/__init__.py | 2 +- script/run/run.sh | 2 +- 3 files changed, 37 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8cd889f0..ccc44f77e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,41 @@ Change log ========== +1.25.2 (2020-01-17) +------------------- + +### Features + +- Allow compatibility option with `COMPOSE_COMPATIBILITY` environment variable + +- Bump PyInstaller from 3.5 to 3.6 + +- Bump pysocks from 1.6.7 to 1.7.1 + +- Bump websocket-client from 0.32.0 to 0.57.0 + +- Bump urllib3 from 1.24.2 to 1.25.7 + +- Bump jsonschema from 3.0.1 to 3.2.0 + +- Bump PyYAML from 4.2b1 to 5.3 + +- Bump certifi from 2017.4.17 to 2019.11.28 + +- Bump coverage from 4.5.4 to 5.0.3 + +- Bump paramiko from 2.6.0 to 2.7.1 + +- Bump cached-property from 1.3.0 to 1.5.1 + +- Bump minor Linux and MacOSX dependencies + +### Bugfixes + +- Validate version format on formats 2+ + +- Assume infinite terminal width when not running in a terminal + 1.25.1 (2020-01-06) ------------------- diff --git a/compose/__init__.py b/compose/__init__.py index 69c4e0e49..2728a8329 100644 --- a/compose/__init__.py +++ b/compose/__init__.py @@ -1,4 +1,4 @@ from __future__ import absolute_import from __future__ import unicode_literals -__version__ = '1.26.0dev' +__version__ = '1.25.2-rc1' diff --git a/script/run/run.sh b/script/run/run.sh index 7df5fe979..f521b280d 100755 --- a/script/run/run.sh +++ b/script/run/run.sh @@ -15,7 +15,7 @@ set -e -VERSION="1.25.1" +VERSION="1.25.2-rc1" IMAGE="docker/compose:$VERSION"