From ab21746d8f988af474f251792a39fcccd98839cc Mon Sep 17 00:00:00 2001 From: Ronald van Zantvoort Date: Sat, 24 Feb 2018 13:07:29 +0100 Subject: build_prune Signed-off-by: Ronald van Zantvoort --- docker/api/build.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docker/api/build.py b/docker/api/build.py index 56f1fcf..62b92c9 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -292,3 +292,19 @@ class BuildApiMixin(object): ) else: log.debug('No auth config found') + + @utils.minimum_version('1.31') + def prune_build(self): + """ + Delete builder cache + + Returns: + (dict): A dict containing + the amount of disk space reclaimed in bytes. + + Raises: + :py:class:`docker.errors.APIError` + If the server returns an error. + """ + url = self._url("/build/prune") + return self._result(self._post(url), True) -- cgit v1.2.1 From 110672d1a8cb5b5f418e4fb7fd7cdff775191122 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 5 Mar 2018 15:26:59 -0800 Subject: Bump test engine versions Signed-off-by: Joffrey F --- Jenkinsfile | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 6d9d343..c548492 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -5,7 +5,12 @@ def imageNamePy2 def imageNamePy3 def images = [:] -def dockerVersions = ["17.06.2-ce", "17.12.0-ce", "18.01.0-ce"] +def dockerVersions = [ + "17.06.2-ce", // Latest EE + "17.12.1-ce", // Latest CE stable + "18.02.0-ce", // Latest CE edge + "18.03.0-ce-rc1" // Latest CE RC +] def buildImage = { name, buildargs, pyTag -> img = docker.image(name) @@ -33,7 +38,7 @@ def buildImages = { -> } def getAPIVersion = { engineVersion -> - def versionMap = ['17.06': '1.30', '17.12': '1.35', '18.01': '1.35'] + def versionMap = ['17.06': '1.30', '17.12': '1.35', '18.02': '1.36', '18.03': '1.37'] return versionMap[engineVersion.substring(0, 5)] } -- cgit v1.2.1 From b75799d33a053b6dc99e45721d280e99b21436a6 Mon Sep 17 00:00:00 2001 From: Matthieu Nottale Date: Wed, 14 Mar 2018 14:14:20 +0100 Subject: Add close() method to DockerClient. Signed-off-by: Matthieu Nottale --- docker/client.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker/client.py b/docker/client.py index 467583e..b4364c3 100644 --- a/docker/client.py +++ b/docker/client.py @@ -186,6 +186,10 @@ class DockerClient(object): return self.api.version(*args, **kwargs) version.__doc__ = APIClient.version.__doc__ + def close(self): + return self.api.close() + close.__doc__ = APIClient.close.__doc__ + def __getattr__(self, name): s = ["'DockerClient' object has no attribute '{}'".format(name)] # If a user calls a method on APIClient, they -- cgit v1.2.1 From 1829bd26991a179bfec70d0fe6c28c406fe0c7ee Mon Sep 17 00:00:00 2001 From: Matthieu Nottale Date: Wed, 14 Mar 2018 15:30:30 +0100 Subject: Add sparse argument to DockerClient.containers.list(). Signed-off-by: Matthieu Nottale --- docker/models/containers.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docker/models/containers.py b/docker/models/containers.py index 895080c..d4ed1aa 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -833,7 +833,8 @@ class ContainerCollection(Collection): resp = self.client.api.inspect_container(container_id) return self.prepare_model(resp) - def list(self, all=False, before=None, filters=None, limit=-1, since=None): + def list(self, all=False, before=None, filters=None, limit=-1, since=None, + sparse=False): """ List containers. Similar to the ``docker ps`` command. @@ -862,6 +863,9 @@ class ContainerCollection(Collection): container. Give the container name or id. - `since` (str): Only containers created after a particular container. Give container name or id. + sparse (bool): Do not inspect containers. Returns partial + informations, but guaranteed not to block. Use reload() on + each container to get the full list of attributes. A comprehensive list can be found in the documentation for `docker ps @@ -877,7 +881,10 @@ class ContainerCollection(Collection): resp = self.client.api.containers(all=all, before=before, filters=filters, limit=limit, since=since) - return [self.get(r['Id']) for r in resp] + if sparse: + return [self.prepare_model(r) for r in resp] + else: + return [self.get(r['Id']) for r in resp] def prune(self, filters=None): return self.client.api.prune_containers(filters=filters) -- cgit v1.2.1 From 33f1ca9a48dc79661a774fe6ac79b3feba39ed0e Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 14 Mar 2018 14:11:42 -0700 Subject: Use same split rules for Dockerfile as other include/exclude patterns Signed-off-by: Joffrey F --- docker/utils/build.py | 7 +++++-- tests/unit/utils_test.py | 5 +++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/docker/utils/build.py b/docker/utils/build.py index 1da56fb..1622ec3 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -31,18 +31,21 @@ def exclude_paths(root, patterns, dockerfile=None): if dockerfile is None: dockerfile = 'Dockerfile' + def split_path(p): + return [pt for pt in re.split(_SEP, p) if pt and pt != '.'] + def normalize(p): # Leading and trailing slashes are not relevant. Yes, # "foo.py/" must exclude the "foo.py" regular file. "." # components are not relevant either, even if the whole # pattern is only ".", as the Docker reference states: "For # historical reasons, the pattern . is ignored." - split = [pt for pt in re.split(_SEP, p) if pt and pt != '.'] # ".." component must be cleared with the potential previous # component, regardless of whether it exists: "A preprocessing # step [...] eliminates . and .. elements using Go's # filepath.". i = 0 + split = split_path(p) while i < len(split): if split[i] == '..': del split[i] @@ -62,7 +65,7 @@ def exclude_paths(root, patterns, dockerfile=None): # Exclude empty patterns such as "." or the empty string. filter(lambda p: p[1], patterns), # Always include the Dockerfile and .dockerignore - [(True, dockerfile.split('/')), (True, ['.dockerignore'])])))) + [(True, split_path(dockerfile)), (True, ['.dockerignore'])])))) return set(walk(root, patterns)) diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index c2dd502..56800f9 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -698,6 +698,11 @@ class ExcludePathsTest(unittest.TestCase): ['*'], dockerfile='foo/Dockerfile3' ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore'])) + # https://github.com/docker/docker-py/issues/1956 + assert self.exclude( + ['*'], dockerfile='./foo/Dockerfile3' + ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore'])) + def test_exclude_dockerfile_child(self): includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3') assert convert_path('foo/Dockerfile3') in includes -- cgit v1.2.1 From 90c0dbe5f8df7ef6d8fd0ccc581fd64cc2ecd1ab Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 14 Mar 2018 16:53:56 -0700 Subject: Add test for container list with sparse=True Signed-off-by: Joffrey F --- docker/models/containers.py | 30 ++++++++++++++++++++--------- tests/integration/models_containers_test.py | 22 +++++++++++++++++++++ 2 files changed, 43 insertions(+), 9 deletions(-) diff --git a/docker/models/containers.py b/docker/models/containers.py index d4ed1aa..1e06ed6 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -4,8 +4,10 @@ from collections import namedtuple from ..api import APIClient from ..constants import DEFAULT_DATA_CHUNK_SIZE -from ..errors import (ContainerError, ImageNotFound, - create_unexpected_kwargs_error) +from ..errors import ( + ContainerError, DockerException, ImageNotFound, + create_unexpected_kwargs_error +) from ..types import HostConfig from ..utils import version_gte from .images import Image @@ -27,7 +29,7 @@ class Container(Model): """ The image of the container. """ - image_id = self.attrs['Image'] + image_id = self.attrs.get('ImageID', self.attrs['Image']) if image_id is None: return None return self.client.images.get(image_id.split(':')[1]) @@ -37,15 +39,23 @@ class Container(Model): """ The labels of a container as dictionary. """ - result = self.attrs['Config'].get('Labels') - return result or {} + try: + result = self.attrs['Config'].get('Labels') + return result or {} + except KeyError: + raise DockerException( + 'Label data is not available for sparse objects. Call reload()' + ' to retrieve all information' + ) @property def status(self): """ The status of the container. For example, ``running``, or ``exited``. """ - return self.attrs['State']['Status'] + if isinstance(self.attrs['State'], dict): + return self.attrs['State']['Status'] + return self.attrs['State'] def attach(self, **kwargs): """ @@ -863,14 +873,16 @@ class ContainerCollection(Collection): container. Give the container name or id. - `since` (str): Only containers created after a particular container. Give container name or id. - sparse (bool): Do not inspect containers. Returns partial - informations, but guaranteed not to block. Use reload() on - each container to get the full list of attributes. A comprehensive list can be found in the documentation for `docker ps `_. + sparse (bool): Do not inspect containers. Returns partial + information, but guaranteed not to block. Use + :py:meth:`Container.reload` on resulting objects to retrieve + all attributes. Default: ``False`` + Returns: (list of :py:class:`Container`) diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index fac4de2..38aae4d 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -159,6 +159,28 @@ class ContainerCollectionTest(BaseIntegrationTest): container = containers[0] assert container.attrs['Config']['Image'] == 'alpine' + assert container.status == 'running' + assert container.image == client.images.get('alpine') + + container.kill() + container.remove() + assert container_id not in [c.id for c in client.containers.list()] + + def test_list_sparse(self): + client = docker.from_env(version=TEST_API_VERSION) + container_id = client.containers.run( + "alpine", "sleep 300", detach=True).id + self.tmp_containers.append(container_id) + containers = [c for c in client.containers.list(sparse=True) if c.id == + container_id] + assert len(containers) == 1 + + container = containers[0] + assert container.attrs['Image'] == 'alpine' + assert container.status == 'running' + assert container.image == client.images.get('alpine') + with pytest.raises(docker.errors.DockerException): + container.labels container.kill() container.remove() -- cgit v1.2.1 From 16ccf377a38548ecdbfe8e2317e2ddcc599c6aea Mon Sep 17 00:00:00 2001 From: James Date: Thu, 15 Mar 2018 15:01:13 +0100 Subject: Updates docs for rename of `name` to `repository` Signed-off-by: James Meakin --- docker/models/images.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/models/images.py b/docker/models/images.py index 58d5d93..d4c2813 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -284,7 +284,7 @@ class ImageCollection(Collection): low-level API. Args: - name (str): The repository to pull + repository (str): The repository to pull tag (str): The tag to pull auth_config (dict): Override the credentials that :py:meth:`~docker.client.DockerClient.login` has set for -- cgit v1.2.1 From 884261e24103f6732d4f529c19e6f7b56ccf199c Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 15 Mar 2018 14:36:39 -0700 Subject: Fix socket tests for TLS-enabled tests Signed-off-by: Joffrey F --- tests/helpers.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/helpers.py b/tests/helpers.py index c4ea364..b6b493b 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -108,21 +108,21 @@ def swarm_listen_addr(): def assert_cat_socket_detached_with_keys(sock, inputs): - if six.PY3: + if six.PY3 and hasattr(sock, '_sock'): sock = sock._sock for i in inputs: - sock.send(i) + sock.sendall(i) time.sleep(0.5) # If we're using a Unix socket, the sock.send call will fail with a # BrokenPipeError ; INET sockets will just stop receiving / sending data # but will not raise an error - if sock.family == getattr(socket, 'AF_UNIX', -1): + if getattr(sock, 'family', -9) == getattr(socket, 'AF_UNIX', -1): with pytest.raises(socket.error): - sock.send(b'make sure the socket is closed\n') + sock.sendall(b'make sure the socket is closed\n') else: - sock.send(b"make sure the socket is closed\n") + sock.sendall(b"make sure the socket is closed\n") assert sock.recv(32) == b'' -- cgit v1.2.1 From a4e642b015c50d9c628413341ed00c89599f66be Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 15 Mar 2018 14:37:02 -0700 Subject: Use networks instead of legacy links for test setup Signed-off-by: Joffrey F --- Jenkinsfile | 14 +++++++++----- Makefile | 36 +++++++++++++++++++++--------------- 2 files changed, 30 insertions(+), 20 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index c548492..1323f4b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -9,7 +9,7 @@ def dockerVersions = [ "17.06.2-ce", // Latest EE "17.12.1-ce", // Latest CE stable "18.02.0-ce", // Latest CE edge - "18.03.0-ce-rc1" // Latest CE RC + "18.03.0-ce-rc4" // Latest CE RC ] def buildImage = { name, buildargs, pyTag -> @@ -64,15 +64,18 @@ def runTests = { Map settings -> checkout(scm) def dindContainerName = "dpy-dind-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" def testContainerName = "dpy-tests-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" + def testNetwork = "dpy-testnet-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" try { - sh """docker run -d --name ${dindContainerName} -v /tmp --privileged \\ + sh """docker network create ${testNetwork}""" + sh """docker run -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ dockerswarm/dind:${dockerVersion} dockerd -H tcp://0.0.0.0:2375 """ sh """docker run \\ - --name ${testContainerName} --volumes-from ${dindContainerName} \\ - -e 'DOCKER_HOST=tcp://docker:2375' \\ + --name ${testContainerName} \\ + -e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\ -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ - --link=${dindContainerName}:docker \\ + --network ${testNetwork} \\ + --volumes-from ${dindContainerName} \\ ${testImage} \\ py.test -v -rxs tests/integration """ @@ -80,6 +83,7 @@ def runTests = { Map settings -> sh """ docker stop ${dindContainerName} ${testContainerName} docker rm -vf ${dindContainerName} ${testContainerName} + docker network rm ${testNetwork} """ } } diff --git a/Makefile b/Makefile index f491993..434d40e 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ all: test .PHONY: clean clean: - -docker rm -f dpy-dind-py2 dpy-dind-py3 + -docker rm -f dpy-dind-py2 dpy-dind-py3 dpy-dind-certs dpy-dind-ssl find -name "__pycache__" | xargs rm -rf .PHONY: build @@ -44,41 +44,47 @@ integration-test-py3: build-py3 TEST_API_VERSION ?= 1.35 TEST_ENGINE_VERSION ?= 17.12.0-ce +.PHONY: setup-network +setup-network: + docker network inspect dpy-tests || docker network create dpy-tests + .PHONY: integration-dind integration-dind: integration-dind-py2 integration-dind-py3 .PHONY: integration-dind-py2 -integration-dind-py2: build +integration-dind-py2: build setup-network docker rm -vf dpy-dind-py2 || : - docker run -d --name dpy-dind-py2 --privileged dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd\ - -H tcp://0.0.0.0:2375 --experimental - docker run -t --rm --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --link=dpy-dind-py2:docker docker-sdk-python py.test tests/integration + docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\ + dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd -H tcp://0.0.0.0:2375 --experimental + docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py2:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ + --network dpy-tests docker-sdk-python py.test tests/integration docker rm -vf dpy-dind-py2 .PHONY: integration-dind-py3 -integration-dind-py3: build-py3 +integration-dind-py3: build-py3 setup-network docker rm -vf dpy-dind-py3 || : - docker run -d --name dpy-dind-py3 --privileged dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd\ - -H tcp://0.0.0.0:2375 --experimental - docker run -t --rm --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --link=dpy-dind-py3:docker docker-sdk-python3 py.test tests/integration + docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\ + dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd -H tcp://0.0.0.0:2375 --experimental + docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py3:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ + --network dpy-tests docker-sdk-python3 py.test tests/integration docker rm -vf dpy-dind-py3 .PHONY: integration-dind-ssl integration-dind-ssl: build-dind-certs build build-py3 + docker rm -vf dpy-dind-certs dpy-dind-ssl || : docker run -d --name dpy-dind-certs dpy-dind-certs docker run -d --env="DOCKER_HOST=tcp://localhost:2375" --env="DOCKER_TLS_VERIFY=1"\ --env="DOCKER_CERT_PATH=/certs" --volumes-from dpy-dind-certs --name dpy-dind-ssl\ - -v /tmp --privileged dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd --tlsverify\ - --tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem\ + --network dpy-tests --network-alias docker -v /tmp --privileged\ + dockerswarm/dind:${TEST_ENGINE_VERSION}\ + dockerd --tlsverify --tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem\ --tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 --experimental docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\ --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --link=dpy-dind-ssl:docker docker-sdk-python py.test tests/integration + --network dpy-tests docker-sdk-python py.test tests/integration docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\ --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --link=dpy-dind-ssl:docker docker-sdk-python3 py.test tests/integration + --network dpy-tests docker-sdk-python3 py.test tests/integration docker rm -vf dpy-dind-ssl dpy-dind-certs .PHONY: flake8 -- cgit v1.2.1 From 719d4e9e2091edef8c084857051a751bb8f97ea2 Mon Sep 17 00:00:00 2001 From: Viktor Adam Date: Wed, 21 Feb 2018 22:16:21 +0000 Subject: Allow cancelling the streams from other threads Signed-off-by: Viktor Adam --- docker/api/container.py | 17 ++++++-- docker/api/daemon.py | 21 ++++++---- docker/types/__init__.py | 1 + docker/types/daemon.py | 63 +++++++++++++++++++++++++++++ tests/integration/api_container_test.py | 48 ++++++++++++++++++++++ tests/integration/client_test.py | 20 +++++++++ tests/integration/models_containers_test.py | 21 ++++++++++ 7 files changed, 181 insertions(+), 10 deletions(-) create mode 100644 docker/types/daemon.py diff --git a/docker/api/container.py b/docker/api/container.py index f8d52de..cb97b79 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -5,7 +5,8 @@ from .. import errors from .. import utils from ..constants import DEFAULT_DATA_CHUNK_SIZE from ..types import ( - ContainerConfig, EndpointConfig, HostConfig, NetworkingConfig + CancellableStream, ContainerConfig, EndpointConfig, HostConfig, + NetworkingConfig ) @@ -52,10 +53,15 @@ class ContainerApiMixin(object): u = self._url("/containers/{0}/attach", container) response = self._post(u, headers=headers, params=params, stream=True) - return self._read_from_socket( + output = self._read_from_socket( response, stream, self._check_is_tty(container) ) + if stream: + return CancellableStream(output, response) + else: + return output + @utils.check_resource('container') def attach_socket(self, container, params=None, ws=False): """ @@ -815,7 +821,12 @@ class ContainerApiMixin(object): url = self._url("/containers/{0}/logs", container) res = self._get(url, params=params, stream=stream) - return self._get_result(container, stream, res) + output = self._get_result(container, stream, res) + + if stream: + return CancellableStream(output, res) + else: + return output @utils.check_resource('container') def pause(self, container): diff --git a/docker/api/daemon.py b/docker/api/daemon.py index 0e1c753..fc3692c 100644 --- a/docker/api/daemon.py +++ b/docker/api/daemon.py @@ -1,7 +1,7 @@ import os from datetime import datetime -from .. import auth, utils +from .. import auth, types, utils class DaemonApiMixin(object): @@ -34,8 +34,7 @@ class DaemonApiMixin(object): the fly. False by default. Returns: - (generator): A blocking generator you can iterate over to retrieve - events as they happen. + A :py:class:`docker.types.daemon.CancellableStream` generator Raises: :py:class:`docker.errors.APIError` @@ -50,6 +49,14 @@ class DaemonApiMixin(object): u'status': u'start', u'time': 1423339459} ... + + or + + >>> events = client.events() + >>> for event in events: + ... print event + >>> # and cancel from another thread + >>> events.close() """ if isinstance(since, datetime): @@ -68,10 +75,10 @@ class DaemonApiMixin(object): } url = self._url('/events') - return self._stream_helper( - self._get(url, params=params, stream=True, timeout=None), - decode=decode - ) + response = self._get(url, params=params, stream=True, timeout=None) + stream = self._stream_helper(response, decode=decode) + + return types.CancellableStream(stream, response) def info(self): """ diff --git a/docker/types/__init__.py b/docker/types/__init__.py index 39c93e3..0b0d847 100644 --- a/docker/types/__init__.py +++ b/docker/types/__init__.py @@ -1,5 +1,6 @@ # flake8: noqa from .containers import ContainerConfig, HostConfig, LogConfig, Ulimit +from .daemon import CancellableStream from .healthcheck import Healthcheck from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig from .services import ( diff --git a/docker/types/daemon.py b/docker/types/daemon.py new file mode 100644 index 0000000..ba0334d --- /dev/null +++ b/docker/types/daemon.py @@ -0,0 +1,63 @@ +import socket + +try: + import requests.packages.urllib3 as urllib3 +except ImportError: + import urllib3 + + +class CancellableStream(object): + """ + Stream wrapper for real-time events, logs, etc. from the server. + + Example: + >>> events = client.events() + >>> for event in events: + ... print event + >>> # and cancel from another thread + >>> events.close() + """ + + def __init__(self, stream, response): + self._stream = stream + self._response = response + + def __iter__(self): + return self + + def __next__(self): + try: + return next(self._stream) + except urllib3.exceptions.ProtocolError: + raise StopIteration + except socket.error: + raise StopIteration + + next = __next__ + + def close(self): + """ + Closes the event streaming. + """ + + if not self._response.raw.closed: + # find the underlying socket object + # based on api.client._get_raw_response_socket + + sock_fp = self._response.raw._fp.fp + + if hasattr(sock_fp, 'raw'): + sock_raw = sock_fp.raw + + if hasattr(sock_raw, 'sock'): + sock = sock_raw.sock + + elif hasattr(sock_raw, '_sock'): + sock = sock_raw._sock + + else: + sock = sock_fp._sock + + sock.shutdown(socket.SHUT_RDWR) + sock.makefile().close() + sock.close() diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 8447aa5..cc2c071 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -2,6 +2,7 @@ import os import re import signal import tempfile +import threading from datetime import datetime import docker @@ -880,6 +881,30 @@ Line2''' assert logs == (snippet + '\n').encode(encoding='ascii') + def test_logs_streaming_and_follow_and_cancel(self): + snippet = 'Flowering Nights (Sakuya Iyazoi)' + container = self.client.create_container( + BUSYBOX, 'sh -c "echo \\"{0}\\" && sleep 3"'.format(snippet) + ) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(id) + logs = six.binary_type() + + generator = self.client.logs(id, stream=True, follow=True) + + exit_timer = threading.Timer(3, os._exit, args=[1]) + exit_timer.start() + + threading.Timer(1, generator.close).start() + + for chunk in generator: + logs += chunk + + exit_timer.cancel() + + assert logs == (snippet + '\n').encode(encoding='ascii') + def test_logs_with_dict_instead_of_id(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( @@ -1226,6 +1251,29 @@ class AttachContainerTest(BaseAPIIntegrationTest): output = self.client.attach(container, stream=False, logs=True) assert output == 'hello\n'.encode(encoding='ascii') + def test_attach_stream_and_cancel(self): + container = self.client.create_container( + BUSYBOX, 'sh -c "echo hello && sleep 60"', + tty=True + ) + self.tmp_containers.append(container) + self.client.start(container) + output = self.client.attach(container, stream=True, logs=True) + + exit_timer = threading.Timer(3, os._exit, args=[1]) + exit_timer.start() + + threading.Timer(1, output.close).start() + + lines = [] + for line in output: + lines.append(line) + + exit_timer.cancel() + + assert len(lines) == 1 + assert lines[0] == 'hello\r\n'.encode(encoding='ascii') + def test_detach_with_default(self): container = self.client.create_container( BUSYBOX, 'cat', diff --git a/tests/integration/client_test.py b/tests/integration/client_test.py index 8f6bd86..7df172c 100644 --- a/tests/integration/client_test.py +++ b/tests/integration/client_test.py @@ -1,7 +1,10 @@ +import threading import unittest import docker +from datetime import datetime, timedelta + from ..helpers import requires_api_version from .base import TEST_API_VERSION @@ -27,3 +30,20 @@ class ClientTest(unittest.TestCase): assert 'Containers' in data assert 'Volumes' in data assert 'Images' in data + + +class CancellableEventsTest(unittest.TestCase): + client = docker.from_env(version=TEST_API_VERSION) + + def test_cancel_events(self): + start = datetime.now() + + events = self.client.events(until=start + timedelta(seconds=5)) + + cancel_thread = threading.Timer(2, events.close) + cancel_thread.start() + + for _ in events: + pass + + self.assertLess(datetime.now() - start, timedelta(seconds=3)) diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index 38aae4d..41faff3 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -1,4 +1,6 @@ +import os import tempfile +import threading import docker import pytest @@ -141,6 +143,25 @@ class ContainerCollectionTest(BaseIntegrationTest): assert logs[0] == b'hello\n' assert logs[1] == b'world\n' + def test_run_with_streamed_logs_and_cancel(self): + client = docker.from_env(version=TEST_API_VERSION) + out = client.containers.run( + 'alpine', 'sh -c "echo hello && echo world"', stream=True + ) + + exit_timer = threading.Timer(3, os._exit, args=[1]) + exit_timer.start() + + threading.Timer(1, out.close).start() + + logs = [line for line in out] + + exit_timer.cancel() + + assert len(logs) == 2 + assert logs[0] == b'hello\n' + assert logs[1] == b'world\n' + def test_get(self): client = docker.from_env(version=TEST_API_VERSION) container = client.containers.run("alpine", "sleep 300", detach=True) -- cgit v1.2.1 From 284c3d90d6ab1c49410d5622ca8cd3f37dcbe296 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 19 Mar 2018 14:40:49 +0100 Subject: Remove redundant single-socket select call Clean up + use pytest-timeout Signed-off-by: Joffrey F --- docker/types/daemon.py | 1 - docker/utils/socket.py | 3 +-- test-requirements.txt | 5 +++-- tests/integration/api_container_test.py | 13 ++----------- tests/integration/models_containers_test.py | 7 +------ 5 files changed, 7 insertions(+), 22 deletions(-) diff --git a/docker/types/daemon.py b/docker/types/daemon.py index ba0334d..852f3d8 100644 --- a/docker/types/daemon.py +++ b/docker/types/daemon.py @@ -59,5 +59,4 @@ class CancellableStream(object): sock = sock_fp._sock sock.shutdown(socket.SHUT_RDWR) - sock.makefile().close() sock.close() diff --git a/docker/utils/socket.py b/docker/utils/socket.py index c3a5f90..0945f0a 100644 --- a/docker/utils/socket.py +++ b/docker/utils/socket.py @@ -22,8 +22,7 @@ def read(socket, n=4096): recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK) - # wait for data to become available - if not isinstance(socket, NpipeSocket): + if six.PY3 and not isinstance(socket, NpipeSocket): select.select([socket], [], []) try: diff --git a/test-requirements.txt b/test-requirements.txt index f79e815..09680b6 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,5 +1,6 @@ +coverage==3.7.1 +flake8==3.4.1 mock==1.0.1 pytest==2.9.1 -coverage==3.7.1 pytest-cov==2.1.0 -flake8==3.4.1 +pytest-timeout==1.2.1 diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index cc2c071..e212518 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -881,6 +881,7 @@ Line2''' assert logs == (snippet + '\n').encode(encoding='ascii') + @pytest.mark.timeout(5) def test_logs_streaming_and_follow_and_cancel(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( @@ -892,17 +893,11 @@ Line2''' logs = six.binary_type() generator = self.client.logs(id, stream=True, follow=True) - - exit_timer = threading.Timer(3, os._exit, args=[1]) - exit_timer.start() - threading.Timer(1, generator.close).start() for chunk in generator: logs += chunk - exit_timer.cancel() - assert logs == (snippet + '\n').encode(encoding='ascii') def test_logs_with_dict_instead_of_id(self): @@ -1251,6 +1246,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): output = self.client.attach(container, stream=False, logs=True) assert output == 'hello\n'.encode(encoding='ascii') + @pytest.mark.timeout(5) def test_attach_stream_and_cancel(self): container = self.client.create_container( BUSYBOX, 'sh -c "echo hello && sleep 60"', @@ -1260,17 +1256,12 @@ class AttachContainerTest(BaseAPIIntegrationTest): self.client.start(container) output = self.client.attach(container, stream=True, logs=True) - exit_timer = threading.Timer(3, os._exit, args=[1]) - exit_timer.start() - threading.Timer(1, output.close).start() lines = [] for line in output: lines.append(line) - exit_timer.cancel() - assert len(lines) == 1 assert lines[0] == 'hello\r\n'.encode(encoding='ascii') diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index 41faff3..6ddb034 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -1,4 +1,3 @@ -import os import tempfile import threading @@ -143,21 +142,17 @@ class ContainerCollectionTest(BaseIntegrationTest): assert logs[0] == b'hello\n' assert logs[1] == b'world\n' + @pytest.mark.timeout(5) def test_run_with_streamed_logs_and_cancel(self): client = docker.from_env(version=TEST_API_VERSION) out = client.containers.run( 'alpine', 'sh -c "echo hello && echo world"', stream=True ) - exit_timer = threading.Timer(3, os._exit, args=[1]) - exit_timer.start() - threading.Timer(1, out.close).start() logs = [line for line in out] - exit_timer.cancel() - assert len(logs) == 2 assert logs[0] == b'hello\n' assert logs[1] == b'world\n' -- cgit v1.2.1 From 7a28cad58ec7c279b91c75a3aa701bb89e0e75cd Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 21 Mar 2018 13:54:43 +0100 Subject: Don't descend into symlinks when building context tar Signed-off-by: Joffrey F --- docker/utils/build.py | 2 +- tests/unit/utils_test.py | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/docker/utils/build.py b/docker/utils/build.py index 1622ec3..894b299 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -93,7 +93,7 @@ def walk(root, patterns, default=True): # Whether this file is implicitely included / excluded. matched = default if hit is None else hit sub = list(filter(lambda p: p[1], sub)) - if os.path.isdir(cur): + if os.path.isdir(cur) and not os.path.islink(cur): # Entirely skip directories if there are no chance any subfile will # be included. if all(not p[0] for p in sub) and not matched: diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 56800f9..00456e8 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -1058,6 +1058,21 @@ class TarTest(unittest.TestCase): assert tar_data.getnames() == ['th.txt'] assert tar_data.getmember('th.txt').mtime == -3600 + @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows') + def test_tar_directory_link(self): + dirs = ['a', 'b', 'a/c'] + files = ['a/hello.py', 'b/utils.py', 'a/c/descend.py'] + base = make_tree(dirs, files) + self.addCleanup(shutil.rmtree, base) + os.symlink(os.path.join(base, 'b'), os.path.join(base, 'a/c/b')) + with tar(base) as archive: + tar_data = tarfile.open(fileobj=archive) + names = tar_data.getnames() + for member in dirs + files: + assert member in names + assert 'a/c/b' in names + assert 'a/c/b/utils.py' not in names + class FormatEnvironmentTest(unittest.TestCase): def test_format_env_binary_unicode_value(self): -- cgit v1.2.1 From cd9fed108cd06baf318e9a9670fd27298304ef04 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 22 Mar 2018 13:10:35 +0100 Subject: Generate test engines list dynamically Signed-off-by: Joffrey F --- Jenkinsfile | 29 ++++++++++++++++------ scripts/versions.py | 71 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 8 deletions(-) create mode 100644 scripts/versions.py diff --git a/Jenkinsfile b/Jenkinsfile index 1323f4b..211159b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -5,13 +5,6 @@ def imageNamePy2 def imageNamePy3 def images = [:] -def dockerVersions = [ - "17.06.2-ce", // Latest EE - "17.12.1-ce", // Latest CE stable - "18.02.0-ce", // Latest CE edge - "18.03.0-ce-rc4" // Latest CE RC -] - def buildImage = { name, buildargs, pyTag -> img = docker.image(name) try { @@ -37,9 +30,27 @@ def buildImages = { -> } } +def getDockerVersions = { -> + def dockerVersions = ["17.06.2-ce"] + wrappedNode(label: "ubuntu && !zfs") { + def result = sh(script: """docker run --rm \\ + --entrypoint=python \\ + ${imageNamePy3} \\ + /src/scripts/versions.py + """, returnStdout: true + ) + dockerVersions = dockerVersions + result.trim().tokenize(' ') + } + return dockerVersions +} + def getAPIVersion = { engineVersion -> def versionMap = ['17.06': '1.30', '17.12': '1.35', '18.02': '1.36', '18.03': '1.37'] - return versionMap[engineVersion.substring(0, 5)] + def result = versionMap[engineVersion.substring(0, 5)] + if (!result) { + return '1.37' + } + return result } def runTests = { Map settings -> @@ -94,6 +105,8 @@ def runTests = { Map settings -> buildImages() +def dockerVersions = getDockerVersions() + def testMatrix = [failFast: false] for (imgKey in new ArrayList(images.keySet())) { diff --git a/scripts/versions.py b/scripts/versions.py new file mode 100644 index 0000000..77aaf4f --- /dev/null +++ b/scripts/versions.py @@ -0,0 +1,71 @@ +import operator +import re +from collections import namedtuple + +import requests + +base_url = 'https://download.docker.com/linux/static/{0}/x86_64/' +categories = [ + 'edge', + 'stable', + 'test' +] + + +class Version(namedtuple('_Version', 'major minor patch rc edition')): + + @classmethod + def parse(cls, version): + edition = None + version = version.lstrip('v') + version, _, rc = version.partition('-') + if rc: + if 'rc' not in rc: + edition = rc + rc = None + elif '-' in rc: + edition, rc = rc.split('-') + + major, minor, patch = version.split('.', 3) + return cls(major, minor, patch, rc, edition) + + @property + def major_minor(self): + return self.major, self.minor + + @property + def order(self): + """Return a representation that allows this object to be sorted + correctly with the default comparator. + """ + # rc releases should appear before official releases + rc = (0, self.rc) if self.rc else (1, ) + return (int(self.major), int(self.minor), int(self.patch)) + rc + + def __str__(self): + rc = '-{}'.format(self.rc) if self.rc else '' + edition = '-{}'.format(self.edition) if self.edition else '' + return '.'.join(map(str, self[:3])) + edition + rc + + +def main(): + results = set() + for url in [base_url.format(cat) for cat in categories]: + res = requests.get(url) + content = res.text + versions = [ + Version.parse( + v.strip('"').lstrip('docker-').rstrip('.tgz').rstrip('-x86_64') + ) for v in re.findall( + r'"docker-[0-9]+\.[0-9]+\.[0-9]+-.*tgz"', content + ) + ] + sorted_versions = sorted( + versions, reverse=True, key=operator.attrgetter('order') + ) + latest = sorted_versions[0] + results.add(str(latest)) + print(' '.join(results)) + +if __name__ == '__main__': + main() -- cgit v1.2.1 From c88db80f01ebef002d3bf9aca49ce273b46c6928 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 22 Mar 2018 09:51:10 +0100 Subject: Add isolation param to build Signed-off-by: Joffrey F --- docker/api/build.py | 11 ++++++++++- docker/models/images.py | 2 ++ tests/integration/api_build_test.py | 15 +++++++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) diff --git a/docker/api/build.py b/docker/api/build.py index e136a6e..3067c10 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -18,7 +18,7 @@ class BuildApiMixin(object): forcerm=False, dockerfile=None, container_limits=None, decode=False, buildargs=None, gzip=False, shmsize=None, labels=None, cache_from=None, target=None, network_mode=None, - squash=None, extra_hosts=None, platform=None): + squash=None, extra_hosts=None, platform=None, isolation=None): """ Similar to the ``docker build`` command. Either ``path`` or ``fileobj`` needs to be set. ``path`` can be a local path (to a directory @@ -100,6 +100,8 @@ class BuildApiMixin(object): extra_hosts (dict): Extra hosts to add to /etc/hosts in building containers, as a mapping of hostname to IP address. platform (str): Platform in the format ``os[/arch[/variant]]`` + isolation (str): Isolation technology used during build. + Default: `None`. Returns: A generator for the build output. @@ -232,6 +234,13 @@ class BuildApiMixin(object): ) params['platform'] = platform + if isolation is not None: + if utils.version_lt(self._version, '1.24'): + raise errors.InvalidVersion( + 'isolation was only introduced in API version 1.24' + ) + params['isolation'] = isolation + if context is not None: headers = {'Content-Type': 'application/tar'} if encoding: diff --git a/docker/models/images.py b/docker/models/images.py index d4c2813..bb24eb5 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -164,6 +164,8 @@ class ImageCollection(Collection): extra_hosts (dict): Extra hosts to add to /etc/hosts in building containers, as a mapping of hostname to IP address. platform (str): Platform in the format ``os[/arch[/variant]]``. + isolation (str): Isolation technology used during build. + Default: `None`. Returns: (tuple): The first item is the :py:class:`Image` object for the diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index ce587d5..13bd8ac 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -138,6 +138,21 @@ class BuildTest(BaseAPIIntegrationTest): # There is currently no way to get the shmsize # that was used to build the image + @requires_api_version('1.24') + def test_build_isolation(self): + script = io.BytesIO('\n'.join([ + 'FROM scratch', + 'CMD sh -c "echo \'Deaf To All But The Song\'' + ]).encode('ascii')) + + stream = self.client.build( + fileobj=script, tag='isolation', + isolation='default' + ) + + for chunk in stream: + pass + @requires_api_version('1.23') def test_build_labels(self): script = io.BytesIO('\n'.join([ -- cgit v1.2.1 From 12a6833eba4f64be1386d3da0d605156319c5946 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 26 Mar 2018 14:12:42 -0700 Subject: Update MAINTAINERS file Signed-off-by: Joffrey F --- MAINTAINERS | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/MAINTAINERS b/MAINTAINERS index 76aafd8..b857d13 100644 --- a/MAINTAINERS +++ b/MAINTAINERS @@ -10,13 +10,16 @@ # [Org] [Org."Core maintainers"] + people = [ + "shin-", + ] + [Org.Alumni] people = [ "aanand", "bfirsh", "dnephin", "mnowster", "mpetazzoni", - "shin-", ] [people] -- cgit v1.2.1 From 081b78f15e9a7d3702dd61fa9f01c3babf61a819 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 26 Mar 2018 13:36:45 -0700 Subject: Support building with Dockerfile outside of context Signed-off-by: Joffrey F --- docker/api/build.py | 10 ++++++++++ docker/utils/build.py | 22 ++++++++++++++++------ docker/utils/utils.py | 12 +++++++++++- tests/integration/api_build_test.py | 33 +++++++++++++++++++++++++++++++++ 4 files changed, 70 insertions(+), 7 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index 3067c10..2a22759 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -1,6 +1,7 @@ import json import logging import os +import random from .. import auth from .. import constants @@ -148,6 +149,15 @@ class BuildApiMixin(object): lambda x: x != '' and x[0] != '#', [l.strip() for l in f.read().splitlines()] )) + if dockerfile and os.path.relpath(dockerfile, path).startswith( + '..'): + with open(dockerfile, 'r') as df: + dockerfile = ( + '.dockerfile.{0:x}'.format(random.getrandbits(160)), + df.read() + ) + else: + dockerfile = (dockerfile, None) context = utils.tar( path, exclude=exclude, dockerfile=dockerfile, gzip=gzip ) diff --git a/docker/utils/build.py b/docker/utils/build.py index 894b299..0f17347 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -2,23 +2,33 @@ import os import re from ..constants import IS_WINDOWS_PLATFORM +from .utils import create_archive from fnmatch import fnmatch from itertools import chain -from .utils import create_archive + + +_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/') def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False): root = os.path.abspath(path) exclude = exclude or [] + dockerfile = dockerfile or (None, None) + extra_files = [] + if dockerfile[1] is not None: + dockerignore_contents = '\n'.join( + (exclude or ['.dockerignore']) + [dockerfile[0]] + ) + extra_files = [ + ('.dockerignore', dockerignore_contents), + dockerfile, + ] return create_archive( - files=sorted(exclude_paths(root, exclude, dockerfile=dockerfile)), - root=root, fileobj=fileobj, gzip=gzip + files=sorted(exclude_paths(root, exclude, dockerfile=dockerfile[0])), + root=root, fileobj=fileobj, gzip=gzip, extra_files=extra_files ) -_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/') - - def exclude_paths(root, patterns, dockerfile=None): """ Given a root directory path and a list of .dockerignore patterns, return diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 3cd2be8..5024e47 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -88,13 +88,17 @@ def build_file_list(root): return files -def create_archive(root, files=None, fileobj=None, gzip=False): +def create_archive(root, files=None, fileobj=None, gzip=False, + extra_files=None): if not fileobj: fileobj = tempfile.NamedTemporaryFile() t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj) if files is None: files = build_file_list(root) for path in files: + if path in [e[0] for e in extra_files]: + # Extra files override context files with the same name + continue full_path = os.path.join(root, path) i = t.gettarinfo(full_path, arcname=path) @@ -123,6 +127,12 @@ def create_archive(root, files=None, fileobj=None, gzip=False): else: # Directories, FIFOs, symlinks... don't need to be read. t.addfile(i, None) + + for name, contents in extra_files: + info = tarfile.TarInfo(name) + info.size = len(contents) + t.addfile(info, io.BytesIO(contents.encode('utf-8'))) + t.close() fileobj.seek(0) return fileobj diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index 13bd8ac..f411efc 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -407,3 +407,36 @@ class BuildTest(BaseAPIIntegrationTest): assert excinfo.value.status_code == 400 assert 'invalid platform' in excinfo.exconly() + + def test_build_out_of_context_dockerfile(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + with open(os.path.join(base_dir, 'file.txt'), 'w') as f: + f.write('hello world') + with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: + f.write('.dockerignore\n') + df = tempfile.NamedTemporaryFile() + self.addCleanup(df.close) + df.write(('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])).encode('utf-8')) + df.flush() + img_name = random_name() + self.tmp_imgs.append(img_name) + stream = self.client.build( + path=base_dir, dockerfile=df.name, tag=img_name, + decode=True + ) + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully tagged' in lines[-1]['stream'] + + ctnr = self.client.create_container(img_name, 'ls -a') + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + lsdata = self.client.logs(ctnr).strip().split(b'\n') + assert len(lsdata) == 3 + assert sorted([b'.', b'..', b'file.txt']) == sorted(lsdata) -- cgit v1.2.1 From 3fdc0127c1c42ddde96dbcc1e5611207ba8b8bd7 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 26 Mar 2018 13:38:13 -0700 Subject: Move build utils to appropriate file Signed-off-by: Joffrey F --- docker/utils/__init__.py | 6 ++-- docker/utils/build.py | 91 +++++++++++++++++++++++++++++++++++++++++++++++- docker/utils/utils.py | 89 ---------------------------------------------- 3 files changed, 93 insertions(+), 93 deletions(-) diff --git a/docker/utils/__init__.py b/docker/utils/__init__.py index e70a5e6..81c8186 100644 --- a/docker/utils/__init__.py +++ b/docker/utils/__init__.py @@ -1,13 +1,13 @@ # flake8: noqa -from .build import tar, exclude_paths +from .build import create_archive, exclude_paths, mkbuildcontext, tar from .decorators import check_resource, minimum_version, update_headers from .utils import ( compare_version, convert_port_bindings, convert_volume_binds, - mkbuildcontext, parse_repository_tag, parse_host, + parse_repository_tag, parse_host, kwargs_from_env, convert_filters, datetime_to_timestamp, create_host_config, parse_bytes, parse_env_file, version_lt, version_gte, decode_json_header, split_command, create_ipam_config, create_ipam_pool, parse_devices, normalize_links, convert_service_networks, - format_environment, create_archive, format_extra_hosts + format_environment, format_extra_hosts ) diff --git a/docker/utils/build.py b/docker/utils/build.py index 0f17347..783273e 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -1,8 +1,11 @@ +import io import os import re +import six +import tarfile +import tempfile from ..constants import IS_WINDOWS_PLATFORM -from .utils import create_archive from fnmatch import fnmatch from itertools import chain @@ -127,3 +130,89 @@ def walk(root, patterns, default=True): yield f elif matched: yield f + + +def build_file_list(root): + files = [] + for dirname, dirnames, fnames in os.walk(root): + for filename in fnames + dirnames: + longpath = os.path.join(dirname, filename) + files.append( + longpath.replace(root, '', 1).lstrip('/') + ) + + return files + + +def create_archive(root, files=None, fileobj=None, gzip=False, + extra_files=None): + extra_files = extra_files or [] + if not fileobj: + fileobj = tempfile.NamedTemporaryFile() + t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj) + if files is None: + files = build_file_list(root) + for path in files: + if path in [e[0] for e in extra_files]: + # Extra files override context files with the same name + continue + full_path = os.path.join(root, path) + + i = t.gettarinfo(full_path, arcname=path) + if i is None: + # This happens when we encounter a socket file. We can safely + # ignore it and proceed. + continue + + # Workaround https://bugs.python.org/issue32713 + if i.mtime < 0 or i.mtime > 8**11 - 1: + i.mtime = int(i.mtime) + + if IS_WINDOWS_PLATFORM: + # Windows doesn't keep track of the execute bit, so we make files + # and directories executable by default. + i.mode = i.mode & 0o755 | 0o111 + + if i.isfile(): + try: + with open(full_path, 'rb') as f: + t.addfile(i, f) + except IOError: + raise IOError( + 'Can not read file in context: {}'.format(full_path) + ) + else: + # Directories, FIFOs, symlinks... don't need to be read. + t.addfile(i, None) + + for name, contents in extra_files: + info = tarfile.TarInfo(name) + info.size = len(contents) + t.addfile(info, io.BytesIO(contents.encode('utf-8'))) + + t.close() + fileobj.seek(0) + return fileobj + + +def mkbuildcontext(dockerfile): + f = tempfile.NamedTemporaryFile() + t = tarfile.open(mode='w', fileobj=f) + if isinstance(dockerfile, io.StringIO): + dfinfo = tarfile.TarInfo('Dockerfile') + if six.PY3: + raise TypeError('Please use io.BytesIO to create in-memory ' + 'Dockerfiles with Python 3') + else: + dfinfo.size = len(dockerfile.getvalue()) + dockerfile.seek(0) + elif isinstance(dockerfile, io.BytesIO): + dfinfo = tarfile.TarInfo('Dockerfile') + dfinfo.size = len(dockerfile.getvalue()) + dockerfile.seek(0) + else: + dfinfo = t.gettarinfo(fileobj=dockerfile, arcname='Dockerfile') + t.addfile(dfinfo, dockerfile) + t.close() + f.seek(0) + return f diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 5024e47..fe3b9a5 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -1,17 +1,13 @@ import base64 -import io import os import os.path import json import shlex -import tarfile -import tempfile from distutils.version import StrictVersion from datetime import datetime import six -from .. import constants from .. import errors from .. import tls @@ -46,29 +42,6 @@ def create_ipam_config(*args, **kwargs): ) -def mkbuildcontext(dockerfile): - f = tempfile.NamedTemporaryFile() - t = tarfile.open(mode='w', fileobj=f) - if isinstance(dockerfile, io.StringIO): - dfinfo = tarfile.TarInfo('Dockerfile') - if six.PY3: - raise TypeError('Please use io.BytesIO to create in-memory ' - 'Dockerfiles with Python 3') - else: - dfinfo.size = len(dockerfile.getvalue()) - dockerfile.seek(0) - elif isinstance(dockerfile, io.BytesIO): - dfinfo = tarfile.TarInfo('Dockerfile') - dfinfo.size = len(dockerfile.getvalue()) - dockerfile.seek(0) - else: - dfinfo = t.gettarinfo(fileobj=dockerfile, arcname='Dockerfile') - t.addfile(dfinfo, dockerfile) - t.close() - f.seek(0) - return f - - def decode_json_header(header): data = base64.b64decode(header) if six.PY3: @@ -76,68 +49,6 @@ def decode_json_header(header): return json.loads(data) -def build_file_list(root): - files = [] - for dirname, dirnames, fnames in os.walk(root): - for filename in fnames + dirnames: - longpath = os.path.join(dirname, filename) - files.append( - longpath.replace(root, '', 1).lstrip('/') - ) - - return files - - -def create_archive(root, files=None, fileobj=None, gzip=False, - extra_files=None): - if not fileobj: - fileobj = tempfile.NamedTemporaryFile() - t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj) - if files is None: - files = build_file_list(root) - for path in files: - if path in [e[0] for e in extra_files]: - # Extra files override context files with the same name - continue - full_path = os.path.join(root, path) - - i = t.gettarinfo(full_path, arcname=path) - if i is None: - # This happens when we encounter a socket file. We can safely - # ignore it and proceed. - continue - - # Workaround https://bugs.python.org/issue32713 - if i.mtime < 0 or i.mtime > 8**11 - 1: - i.mtime = int(i.mtime) - - if constants.IS_WINDOWS_PLATFORM: - # Windows doesn't keep track of the execute bit, so we make files - # and directories executable by default. - i.mode = i.mode & 0o755 | 0o111 - - if i.isfile(): - try: - with open(full_path, 'rb') as f: - t.addfile(i, f) - except IOError: - raise IOError( - 'Can not read file in context: {}'.format(full_path) - ) - else: - # Directories, FIFOs, symlinks... don't need to be read. - t.addfile(i, None) - - for name, contents in extra_files: - info = tarfile.TarInfo(name) - info.size = len(contents) - t.addfile(info, io.BytesIO(contents.encode('utf-8'))) - - t.close() - fileobj.seek(0) - return fileobj - - def compare_version(v1, v2): """Compare docker versions -- cgit v1.2.1 From 899f3cf5a86784dc63eda9545bde73cffc236f0b Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Tue, 27 Mar 2018 10:22:17 -0700 Subject: Improve extra_files override check Signed-off-by: Joffrey F --- docker/utils/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/utils/build.py b/docker/utils/build.py index 783273e..b644c9f 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -152,8 +152,9 @@ def create_archive(root, files=None, fileobj=None, gzip=False, t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj) if files is None: files = build_file_list(root) + extra_names = set(e[0] for e in extra_files) for path in files: - if path in [e[0] for e in extra_files]: + if path in extra_names: # Extra files override context files with the same name continue full_path = os.path.join(root, path) -- cgit v1.2.1 From 9ff787cb5f6b2ad21669c54a7fa089498538dd2a Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 26 Mar 2018 19:01:50 -0700 Subject: Add methods for /distribution//json endpoint Signed-off-by: Joffrey F --- docker/api/image.py | 21 +++++++ docker/models/images.py | 107 +++++++++++++++++++++++++++++++++++- docs/images.rst | 19 +++++++ tests/integration/api_image_test.py | 9 +++ 4 files changed, 155 insertions(+), 1 deletion(-) diff --git a/docker/api/image.py b/docker/api/image.py index 3ebca32..5f05d88 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -245,6 +245,27 @@ class ImageApiMixin(object): self._get(self._url("/images/{0}/json", image)), True ) + @utils.minimum_version('1.30') + @utils.check_resource('image') + def inspect_distribution(self, image): + """ + Get image digest and platform information by contacting the registry. + + Args: + image (str): The image name to inspect + + Returns: + (dict): A dict containing distribution data + + Raises: + :py:class:`docker.errors.APIError` + If the server returns an error. + """ + + return self._result( + self._get(self._url("/distribution/{0}/json", image)), True + ) + def load_image(self, data, quiet=None): """ Load an image that was previously saved using diff --git a/docker/models/images.py b/docker/models/images.py index bb24eb5..d4893bb 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -5,7 +5,7 @@ import six from ..api import APIClient from ..constants import DEFAULT_DATA_CHUNK_SIZE -from ..errors import BuildError, ImageLoadError +from ..errors import BuildError, ImageLoadError, InvalidArgument from ..utils import parse_repository_tag from ..utils.json_stream import json_stream from .resource import Collection, Model @@ -105,6 +105,81 @@ class Image(Model): return self.client.api.tag(self.id, repository, tag=tag, **kwargs) +class RegistryData(Model): + """ + Image metadata stored on the registry, including available platforms. + """ + def __init__(self, image_name, *args, **kwargs): + super(RegistryData, self).__init__(*args, **kwargs) + self.image_name = image_name + + @property + def id(self): + """ + The ID of the object. + """ + return self.attrs['Descriptor']['digest'] + + @property + def short_id(self): + """ + The ID of the image truncated to 10 characters, plus the ``sha256:`` + prefix. + """ + return self.id[:17] + + def pull(self, platform=None): + """ + Pull the image digest. + + Args: + platform (str): The platform to pull the image for. + Default: ``None`` + + Returns: + (:py:class:`Image`): A reference to the pulled image. + """ + repository, _ = parse_repository_tag(self.image_name) + return self.collection.pull(repository, tag=self.id, platform=platform) + + def has_platform(self, platform): + """ + Check whether the given platform identifier is available for this + digest. + + Args: + platform (str or dict): A string using the ``os[/arch[/variant]]`` + format, or a platform dictionary. + + Returns: + (bool): ``True`` if the platform is recognized as available, + ``False`` otherwise. + + Raises: + :py:class:`docker.errors.InvalidArgument` + If the platform argument is not a valid descriptor. + """ + if platform and not isinstance(platform, dict): + parts = platform.split('/') + if len(parts) > 3 or len(parts) < 1: + raise InvalidArgument( + '"{0}" is not a valid platform descriptor'.format(platform) + ) + platform = {'os': parts[0]} + if len(parts) > 2: + platform['variant'] = parts[2] + if len(parts) > 1: + platform['architecture'] = parts[1] + return normalize_platform( + platform, self.client.version() + ) in self.attrs['Platforms'] + + def reload(self): + self.attrs = self.client.api.inspect_distribution(self.image_name) + + reload.__doc__ = Model.reload.__doc__ + + class ImageCollection(Collection): model = Image @@ -219,6 +294,26 @@ class ImageCollection(Collection): """ return self.prepare_model(self.client.api.inspect_image(name)) + def get_registry_data(self, name): + """ + Gets the registry data for an image. + + Args: + name (str): The name of the image. + + Returns: + (:py:class:`RegistryData`): The data object. + Raises: + :py:class:`docker.errors.APIError` + If the server returns an error. + """ + return RegistryData( + image_name=name, + attrs=self.client.api.inspect_distribution(name), + client=self.client, + collection=self, + ) + def list(self, name=None, all=False, filters=None): """ List images on the server. @@ -336,3 +431,13 @@ class ImageCollection(Collection): def prune(self, filters=None): return self.client.api.prune_images(filters=filters) prune.__doc__ = APIClient.prune_images.__doc__ + + +def normalize_platform(platform, engine_info): + if platform is None: + platform = {} + if 'os' not in platform: + platform['os'] = engine_info['Os'] + if 'architecture' not in platform: + platform['architecture'] = engine_info['Arch'] + return platform diff --git a/docs/images.rst b/docs/images.rst index 12b0fd1..4d425e9 100644 --- a/docs/images.rst +++ b/docs/images.rst @@ -12,6 +12,7 @@ Methods available on ``client.images``: .. automethod:: build .. automethod:: get + .. automethod:: get_registry_data .. automethod:: list(**kwargs) .. automethod:: load .. automethod:: prune @@ -41,3 +42,21 @@ Image objects .. automethod:: reload .. automethod:: save .. automethod:: tag + +RegistryData objects +-------------------- + +.. autoclass:: RegistryData() + + .. py:attribute:: attrs + + The raw representation of this object from the server. + + .. autoattribute:: id + .. autoattribute:: short_id + + + + .. automethod:: has_platform + .. automethod:: pull + .. automethod:: reload diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index ab638c9..050e7f3 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -357,3 +357,12 @@ class SaveLoadImagesTest(BaseAPIIntegrationTest): success = True break assert success is True + + +@requires_api_version('1.30') +class InspectDistributionTest(BaseAPIIntegrationTest): + def test_inspect_distribution(self): + data = self.client.inspect_distribution('busybox:latest') + assert data is not None + assert 'Platforms' in data + assert {'os': 'linux', 'architecture': 'amd64'} in data['Platforms'] -- cgit v1.2.1 From bdee6e308734dfb0d1cd959575222b081e150d2f Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 29 Mar 2018 16:37:52 -0700 Subject: dev version Signed-off-by: Joffrey F --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 5460e16..c949131 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "3.2.0" +version = "3.3.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) -- cgit v1.2.1 From 16751ac509b4bbe75293847fe87099ff51a74013 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 30 Mar 2018 10:22:39 -0700 Subject: Properly handle relative Dockerfile paths and Dockerfile on different drives Signed-off-by: Joffrey F --- docker/api/build.py | 29 ++++++++++++++++++++--------- tests/integration/api_build_test.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 9 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index 2a22759..d69985e 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -149,15 +149,7 @@ class BuildApiMixin(object): lambda x: x != '' and x[0] != '#', [l.strip() for l in f.read().splitlines()] )) - if dockerfile and os.path.relpath(dockerfile, path).startswith( - '..'): - with open(dockerfile, 'r') as df: - dockerfile = ( - '.dockerfile.{0:x}'.format(random.getrandbits(160)), - df.read() - ) - else: - dockerfile = (dockerfile, None) + dockerfile = process_dockerfile(dockerfile, path) context = utils.tar( path, exclude=exclude, dockerfile=dockerfile, gzip=gzip ) @@ -312,3 +304,22 @@ class BuildApiMixin(object): ) else: log.debug('No auth config found') + + +def process_dockerfile(dockerfile, path): + if not dockerfile: + return (None, None) + + abs_dockerfile = dockerfile + if not os.path.isabs(dockerfile): + abs_dockerfile = os.path.join(path, dockerfile) + + if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or + os.path.relpath(abs_dockerfile, path).startswith('..')): + with open(abs_dockerfile, 'r') as df: + return ( + '.dockerfile.{0:x}'.format(random.getrandbits(160)), + df.read() + ) + else: + return (dockerfile, None) diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index f411efc..8910eb7 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -440,3 +440,35 @@ class BuildTest(BaseAPIIntegrationTest): lsdata = self.client.logs(ctnr).strip().split(b'\n') assert len(lsdata) == 3 assert sorted([b'.', b'..', b'file.txt']) == sorted(lsdata) + + def test_build_in_context_dockerfile(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + with open(os.path.join(base_dir, 'file.txt'), 'w') as f: + f.write('hello world') + with open(os.path.join(base_dir, 'custom.dockerfile'), 'w') as df: + df.write('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])) + print(os.path.join(base_dir, 'custom.dockerfile')) + img_name = random_name() + self.tmp_imgs.append(img_name) + stream = self.client.build( + path=base_dir, dockerfile='custom.dockerfile', tag=img_name, + decode=True + ) + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully tagged' in lines[-1]['stream'] + + ctnr = self.client.create_container(img_name, 'ls -a') + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + lsdata = self.client.logs(ctnr).strip().split(b'\n') + assert len(lsdata) == 4 + assert sorted( + [b'.', b'..', b'file.txt', b'custom.dockerfile'] + ) == sorted(lsdata) -- cgit v1.2.1 From 1d6f8ecf9277ef1c77f3d530efaafd39323cc8e7 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 12 Apr 2018 12:38:27 -0700 Subject: Support absolute paths for in-context Dockerfiles Signed-off-by: Joffrey F --- docker/api/build.py | 6 ++++-- setup.py | 16 ++++++++++------ tests/integration/api_build_test.py | 33 ++++++++++++++++++++++++++++++++- 3 files changed, 46 insertions(+), 9 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index d69985e..a76e32c 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -316,10 +316,12 @@ def process_dockerfile(dockerfile, path): if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or os.path.relpath(abs_dockerfile, path).startswith('..')): + # Dockerfile not in context - read data to insert into tar later with open(abs_dockerfile, 'r') as df: return ( '.dockerfile.{0:x}'.format(random.getrandbits(160)), df.read() ) - else: - return (dockerfile, None) + + # Dockerfile is inside the context - return path relative to context root + return (os.path.relpath(abs_dockerfile, path), None) diff --git a/setup.py b/setup.py index 271d94f..1153f78 100644 --- a/setup.py +++ b/setup.py @@ -9,12 +9,16 @@ import pip from setuptools import setup, find_packages -if 'docker-py' in [x.project_name for x in pip.get_installed_distributions()]: - print( - 'ERROR: "docker-py" needs to be uninstalled before installing this' - ' package:\npip uninstall docker-py', file=sys.stderr - ) - sys.exit(1) +try: + if 'docker-py' in [ + x.project_name for x in pip.get_installed_distributions()]: + print( + 'ERROR: "docker-py" needs to be uninstalled before installing this' + ' package:\npip uninstall docker-py', file=sys.stderr + ) + sys.exit(1) +except AttributeError: + pass ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index 8910eb7..9423012 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -452,7 +452,6 @@ class BuildTest(BaseAPIIntegrationTest): 'COPY . /src', 'WORKDIR /src', ])) - print(os.path.join(base_dir, 'custom.dockerfile')) img_name = random_name() self.tmp_imgs.append(img_name) stream = self.client.build( @@ -472,3 +471,35 @@ class BuildTest(BaseAPIIntegrationTest): assert sorted( [b'.', b'..', b'file.txt', b'custom.dockerfile'] ) == sorted(lsdata) + + def test_build_in_context_abs_dockerfile(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + abs_dockerfile_path = os.path.join(base_dir, 'custom.dockerfile') + with open(os.path.join(base_dir, 'file.txt'), 'w') as f: + f.write('hello world') + with open(abs_dockerfile_path, 'w') as df: + df.write('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])) + img_name = random_name() + self.tmp_imgs.append(img_name) + stream = self.client.build( + path=base_dir, dockerfile=abs_dockerfile_path, tag=img_name, + decode=True + ) + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully tagged' in lines[-1]['stream'] + + ctnr = self.client.create_container(img_name, 'ls -a') + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + lsdata = self.client.logs(ctnr).strip().split(b'\n') + assert len(lsdata) == 4 + assert sorted( + [b'.', b'..', b'file.txt', b'custom.dockerfile'] + ) == sorted(lsdata) -- cgit v1.2.1 From e1ab5457ca05e22c4e1f80b60c3a038c0da6d19e Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 14:02:53 -0700 Subject: Bump docker-pycreds dependency Signed-off-by: Joffrey F --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 2b281ae..9079315 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ asn1crypto==0.22.0 backports.ssl-match-hostname==3.5.0.1 cffi==1.10.0 cryptography==1.9 -docker-pycreds==0.2.2 +docker-pycreds==0.2.3 enum34==1.1.6 idna==2.5 ipaddress==1.0.18 diff --git a/setup.py b/setup.py index 1153f78..a65437e 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ requirements = [ 'requests >= 2.14.2, != 2.18.0', 'six >= 1.4.0', 'websocket-client >= 0.32.0', - 'docker-pycreds >= 0.2.2' + 'docker-pycreds >= 0.2.3' ] extras_require = { -- cgit v1.2.1 From accb9de52f6e383ad0335807f73c8c35bd6e7426 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 14:49:18 -0700 Subject: Remove obsolete docker-py check in setup.py Signed-off-by: Joffrey F --- setup.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/setup.py b/setup.py index a65437e..c1eabcf 100644 --- a/setup.py +++ b/setup.py @@ -3,23 +3,9 @@ from __future__ import print_function import codecs import os -import sys - -import pip from setuptools import setup, find_packages -try: - if 'docker-py' in [ - x.project_name for x in pip.get_installed_distributions()]: - print( - 'ERROR: "docker-py" needs to be uninstalled before installing this' - ' package:\npip uninstall docker-py', file=sys.stderr - ) - sys.exit(1) -except AttributeError: - pass - ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) -- cgit v1.2.1 From caf0f37927a296174c3f00e9505d87f70ac8fa0d Mon Sep 17 00:00:00 2001 From: John Hu Date: Fri, 13 Apr 2018 14:40:39 +0800 Subject: Set minimum version for configs api to 1.30 See: https://docs.docker.com/engine/reference/commandline/config/ https://docs.docker.com/engine/api/v1.30/ Signed-off-by: John Hu --- docker/api/config.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/api/config.py b/docker/api/config.py index b46b09c..767bef2 100644 --- a/docker/api/config.py +++ b/docker/api/config.py @@ -6,7 +6,7 @@ from .. import utils class ConfigApiMixin(object): - @utils.minimum_version('1.25') + @utils.minimum_version('1.30') def create_config(self, name, data, labels=None): """ Create a config @@ -35,7 +35,7 @@ class ConfigApiMixin(object): self._post_json(url, data=body), True ) - @utils.minimum_version('1.25') + @utils.minimum_version('1.30') @utils.check_resource('id') def inspect_config(self, id): """ @@ -53,7 +53,7 @@ class ConfigApiMixin(object): url = self._url('/configs/{0}', id) return self._result(self._get(url), True) - @utils.minimum_version('1.25') + @utils.minimum_version('1.30') @utils.check_resource('id') def remove_config(self, id): """ @@ -73,7 +73,7 @@ class ConfigApiMixin(object): self._raise_for_status(res) return True - @utils.minimum_version('1.25') + @utils.minimum_version('1.30') def configs(self, filters=None): """ List configs -- cgit v1.2.1 From cef9940ed3d993145c6db075b2f7f0f005415ff2 Mon Sep 17 00:00:00 2001 From: Matthieu Nottale Date: Tue, 13 Mar 2018 15:31:36 +0100 Subject: stop(), restart(): Adjust request timeout. Signed-off-by: Matthieu Nottale --- docker/api/container.py | 11 ++++++++--- tests/integration/api_container_test.py | 11 +++++++++++ tests/unit/api_container_test.py | 4 ++-- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index cb97b79..144a6d9 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -1018,7 +1018,10 @@ class ContainerApiMixin(object): """ params = {'t': timeout} url = self._url("/containers/{0}/restart", container) - res = self._post(url, params=params) + conn_timeout = self.timeout + if conn_timeout: + conn_timeout = max(conn_timeout, timeout+15) + res = self._post(url, params=params, timeout=conn_timeout) self._raise_for_status(res) @utils.check_resource('container') @@ -1107,9 +1110,11 @@ class ContainerApiMixin(object): else: params = {'t': timeout} url = self._url("/containers/{0}/stop", container) - + conn_timeout = self.timeout + if conn_timeout: + conn_timeout = max(conn_timeout, timeout + 15) res = self._post(url, params=params, - timeout=(timeout + (self.timeout or 0))) + timeout=conn_timeout) self._raise_for_status(res) @utils.check_resource('container') diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index e212518..3d985a4 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1165,6 +1165,17 @@ class RestartContainerTest(BaseAPIIntegrationTest): assert info2['State']['Running'] is True self.client.kill(id) + def test_restart_with_hight_timeout(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.client.timeout = 1 + self.client.restart(id, timeout=3) + self.client.timeout = None + self.client.restart(id, timeout=3) + self.client.timeout = 1 + self.client.stop(id, timeout=3) + def test_restart_with_dict_instead_of_id(self): container = self.client.create_container(BUSYBOX, ['sleep', '9999']) assert 'Id' in container diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index c33f129..1aed967 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -1264,7 +1264,7 @@ class ContainerTest(BaseAPIClientTest): 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, - timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) + timeout=(DEFAULT_TIMEOUT_SECONDS) ) def test_stop_container_with_dict_instead_of_id(self): @@ -1277,7 +1277,7 @@ class ContainerTest(BaseAPIClientTest): 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, - timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) + timeout=(DEFAULT_TIMEOUT_SECONDS) ) def test_pause_container(self): -- cgit v1.2.1 From da028d88a2f133d038fa4a651318b60bed770ba5 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 14:36:41 -0700 Subject: Total timeout should be HTTP timeout + operation timeout Signed-off-by: Joffrey F --- docker/api/container.py | 12 ++++++------ tests/integration/api_container_test.py | 2 +- tests/unit/api_container_test.py | 8 ++++---- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 144a6d9..4a49bab 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -1018,10 +1018,9 @@ class ContainerApiMixin(object): """ params = {'t': timeout} url = self._url("/containers/{0}/restart", container) - conn_timeout = self.timeout - if conn_timeout: - conn_timeout = max(conn_timeout, timeout+15) - res = self._post(url, params=params, timeout=conn_timeout) + res = self._post( + url, params=params, timeout=timeout + (self.timeout or 0) + ) self._raise_for_status(res) @utils.check_resource('container') @@ -1113,8 +1112,9 @@ class ContainerApiMixin(object): conn_timeout = self.timeout if conn_timeout: conn_timeout = max(conn_timeout, timeout + 15) - res = self._post(url, params=params, - timeout=conn_timeout) + res = self._post( + url, params=params, timeout=timeout + (self.timeout or 0) + ) self._raise_for_status(res) @utils.check_resource('container') diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 3d985a4..da9b3ec 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1165,7 +1165,7 @@ class RestartContainerTest(BaseAPIIntegrationTest): assert info2['State']['Running'] is True self.client.kill(id) - def test_restart_with_hight_timeout(self): + def test_restart_with_high_timeout(self): container = self.client.create_container(BUSYBOX, ['sleep', '9999']) id = container['Id'] self.client.start(id) diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index 1aed967..a7e183c 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -1264,7 +1264,7 @@ class ContainerTest(BaseAPIClientTest): 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, - timeout=(DEFAULT_TIMEOUT_SECONDS) + timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) def test_stop_container_with_dict_instead_of_id(self): @@ -1277,7 +1277,7 @@ class ContainerTest(BaseAPIClientTest): 'POST', url_prefix + 'containers/3cc2351ab11b/stop', params={'t': timeout}, - timeout=(DEFAULT_TIMEOUT_SECONDS) + timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) def test_pause_container(self): @@ -1335,7 +1335,7 @@ class ContainerTest(BaseAPIClientTest): 'POST', url_prefix + 'containers/3cc2351ab11b/restart', params={'t': 2}, - timeout=DEFAULT_TIMEOUT_SECONDS + timeout=(DEFAULT_TIMEOUT_SECONDS + 2) ) def test_restart_container_with_dict_instead_of_id(self): @@ -1345,7 +1345,7 @@ class ContainerTest(BaseAPIClientTest): 'POST', url_prefix + 'containers/3cc2351ab11b/restart', params={'t': 2}, - timeout=DEFAULT_TIMEOUT_SECONDS + timeout=(DEFAULT_TIMEOUT_SECONDS + 2) ) def test_remove_container(self): -- cgit v1.2.1 From ae8f77737c164d2474681f839c43f51400b9e119 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 15:12:49 -0700 Subject: Fix session timeout = None case Signed-off-by: Joffrey F --- docker/api/container.py | 15 +++++++-------- tests/integration/api_container_test.py | 12 +++++------- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 4a49bab..05676f1 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -1018,9 +1018,10 @@ class ContainerApiMixin(object): """ params = {'t': timeout} url = self._url("/containers/{0}/restart", container) - res = self._post( - url, params=params, timeout=timeout + (self.timeout or 0) - ) + conn_timeout = self.timeout + if conn_timeout is not None: + conn_timeout += timeout + res = self._post(url, params=params, timeout=conn_timeout) self._raise_for_status(res) @utils.check_resource('container') @@ -1110,11 +1111,9 @@ class ContainerApiMixin(object): params = {'t': timeout} url = self._url("/containers/{0}/stop", container) conn_timeout = self.timeout - if conn_timeout: - conn_timeout = max(conn_timeout, timeout + 15) - res = self._post( - url, params=params, timeout=timeout + (self.timeout or 0) - ) + if conn_timeout is not None: + conn_timeout += timeout + res = self._post(url, params=params, timeout=conn_timeout) self._raise_for_status(res) @utils.check_resource('container') diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index da9b3ec..afd439f 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1165,16 +1165,14 @@ class RestartContainerTest(BaseAPIIntegrationTest): assert info2['State']['Running'] is True self.client.kill(id) - def test_restart_with_high_timeout(self): + def test_restart_with_low_timeout(self): container = self.client.create_container(BUSYBOX, ['sleep', '9999']) - id = container['Id'] - self.client.start(id) + self.client.start(container) self.client.timeout = 1 - self.client.restart(id, timeout=3) + self.client.restart(container, timeout=3) self.client.timeout = None - self.client.restart(id, timeout=3) - self.client.timeout = 1 - self.client.stop(id, timeout=3) + self.client.restart(container, timeout=3) + self.client.kill(container) def test_restart_with_dict_instead_of_id(self): container = self.client.create_container(BUSYBOX, ['sleep', '9999']) -- cgit v1.2.1 From 8360ecae973a84bbd203cfe145657618a5659415 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 16:29:41 -0700 Subject: prune_builds test Signed-off-by: Joffrey F --- docker/api/build.py | 33 +++++++++++++++++---------------- tests/integration/api_build_test.py | 6 ++++++ 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index 3d83b98..f62a731 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -264,6 +264,23 @@ class BuildApiMixin(object): return self._stream_helper(response, decode=decode) + @utils.minimum_version('1.31') + def prune_builds(self): + """ + Delete the builder cache + + Returns: + (dict): A dictionary containing information about the operation's + result. The ``SpaceReclaimed`` key indicates the amount of + bytes of disk space reclaimed. + + Raises: + :py:class:`docker.errors.APIError` + If the server returns an error. + """ + url = self._url("/build/prune") + return self._result(self._post(url), True) + def _set_auth_headers(self, headers): log.debug('Looking for auth config') @@ -305,22 +322,6 @@ class BuildApiMixin(object): else: log.debug('No auth config found') - @utils.minimum_version('1.31') - def prune_build(self): - """ - Delete builder cache - - Returns: - (dict): A dict containing - the amount of disk space reclaimed in bytes. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url("/build/prune") - return self._result(self._post(url), True) - def process_dockerfile(dockerfile, path): if not dockerfile: diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index 9423012..92e0062 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -503,3 +503,9 @@ class BuildTest(BaseAPIIntegrationTest): assert sorted( [b'.', b'..', b'file.txt', b'custom.dockerfile'] ) == sorted(lsdata) + + @requires_api_version('1.31') + def test_prune_builds(self): + prune_result = self.client.prune_builds() + assert 'SpaceReclaimed' in prune_result + assert isinstance(prune_result['SpaceReclaimed'], int) -- cgit v1.2.1 From b3ae4d6ebd0674b9dff0abfef001e8fe47ccfd22 Mon Sep 17 00:00:00 2001 From: Ben Doan Date: Thu, 15 Mar 2018 18:21:52 -0500 Subject: avoid race condition in containers.list Signed-off-by: Ben Doan --- docker/models/containers.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docker/models/containers.py b/docker/models/containers.py index 1e06ed6..789fa93 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -6,7 +6,7 @@ from ..api import APIClient from ..constants import DEFAULT_DATA_CHUNK_SIZE from ..errors import ( ContainerError, DockerException, ImageNotFound, - create_unexpected_kwargs_error + NotFound, create_unexpected_kwargs_error ) from ..types import HostConfig from ..utils import version_gte @@ -896,7 +896,14 @@ class ContainerCollection(Collection): if sparse: return [self.prepare_model(r) for r in resp] else: - return [self.get(r['Id']) for r in resp] + containers = [] + for r in resp: + try: + containers.append(self.get(r['Id'])) + # a container may have been removed while iterating + except NotFound: + pass + return containers def prune(self, filters=None): return self.client.api.prune_containers(filters=filters) -- cgit v1.2.1 From 9709dd454b0ce23db5af55ad4b1d35a2fb67cc45 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 16:55:40 -0700 Subject: Add ignore_removed param to containers.list() to control whether to raise or ignore NotFound Signed-off-by: Joffrey F --- docker/models/containers.py | 9 +++++++-- tests/unit/fake_api_client.py | 16 +++++++++++----- tests/unit/models_containers_test.py | 12 ++++++++++++ 3 files changed, 30 insertions(+), 7 deletions(-) diff --git a/docker/models/containers.py b/docker/models/containers.py index 789fa93..b33a718 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -844,7 +844,7 @@ class ContainerCollection(Collection): return self.prepare_model(resp) def list(self, all=False, before=None, filters=None, limit=-1, since=None, - sparse=False): + sparse=False, ignore_removed=False): """ List containers. Similar to the ``docker ps`` command. @@ -882,6 +882,10 @@ class ContainerCollection(Collection): information, but guaranteed not to block. Use :py:meth:`Container.reload` on resulting objects to retrieve all attributes. Default: ``False`` + ignore_removed (bool): Ignore failures due to missing containers + when attempting to inspect containers from the original list. + Set to ``True`` if race conditions are likely. Has no effect + if ``sparse=True``. Default: ``False`` Returns: (list of :py:class:`Container`) @@ -902,7 +906,8 @@ class ContainerCollection(Collection): containers.append(self.get(r['Id'])) # a container may have been removed while iterating except NotFound: - pass + if not ignore_removed: + raise return containers def prune(self, filters=None): diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py index 15b60ea..2147bfd 100644 --- a/tests/unit/fake_api_client.py +++ b/tests/unit/fake_api_client.py @@ -20,15 +20,18 @@ class CopyReturnMagicMock(mock.MagicMock): return ret -def make_fake_api_client(): +def make_fake_api_client(overrides=None): """ Returns non-complete fake APIClient. This returns most of the default cases correctly, but most arguments that change behaviour will not work. """ + + if overrides is None: + overrides = {} api_client = docker.APIClient() - mock_client = CopyReturnMagicMock(**{ + mock_attrs = { 'build.return_value': fake_api.FAKE_IMAGE_ID, 'commit.return_value': fake_api.post_fake_commit()[1], 'containers.return_value': fake_api.get_fake_containers()[1], @@ -47,15 +50,18 @@ def make_fake_api_client(): 'networks.return_value': fake_api.get_fake_network_list()[1], 'start.return_value': None, 'wait.return_value': {'StatusCode': 0}, - }) + } + mock_attrs.update(overrides) + mock_client = CopyReturnMagicMock(**mock_attrs) + mock_client._version = docker.constants.DEFAULT_DOCKER_API_VERSION return mock_client -def make_fake_client(): +def make_fake_client(overrides=None): """ Returns a Client with a fake APIClient. """ client = docker.DockerClient() - client.api = make_fake_api_client() + client.api = make_fake_api_client(overrides) return client diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index 2b0b499..48a5288 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -359,6 +359,18 @@ class ContainerCollectionTest(unittest.TestCase): assert isinstance(containers[0], Container) assert containers[0].id == FAKE_CONTAINER_ID + def test_list_ignore_removed(self): + def side_effect(*args, **kwargs): + raise docker.errors.NotFound('Container not found') + client = make_fake_client({ + 'inspect_container.side_effect': side_effect + }) + + with pytest.raises(docker.errors.NotFound): + client.containers.list(all=True, ignore_removed=False) + + assert client.containers.list(all=True, ignore_removed=True) == [] + class ContainerTest(unittest.TestCase): def test_name(self): -- cgit v1.2.1 From d5693ed903217950f37c32ef6f35a1a0a10f55d9 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 17:28:51 -0700 Subject: Add prune_builds to DockerClient Signed-off-by: Joffrey F --- docker/models/images.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker/models/images.py b/docker/models/images.py index d4893bb..41632c6 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -432,6 +432,10 @@ class ImageCollection(Collection): return self.client.api.prune_images(filters=filters) prune.__doc__ = APIClient.prune_images.__doc__ + def prune_builds(self, *args, **kwargs): + return self.client.api.prune_builds(*args, **kwargs) + prune_builds.__doc__ = APIClient.prune_builds.__doc__ + def normalize_platform(platform, engine_info): if platform is None: -- cgit v1.2.1 From 467cacb00d8dce68aa8ff2bdacc85acecd2d1207 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 25 Apr 2018 18:03:15 -0700 Subject: 3.4.0-dev Signed-off-by: Joffrey F --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 8f6e651..04fd3c2 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "3.3.0" +version = "3.4.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) -- cgit v1.2.1 From 8228059f1e070e490b690dea539b065c449b0194 Mon Sep 17 00:00:00 2001 From: Srinivas Reddy Thatiparthy Date: Thu, 24 May 2018 23:31:31 +0530 Subject: return the pruned networks Signed-off-by: Srinivas Reddy Thatiparthy --- docker/models/networks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/models/networks.py b/docker/models/networks.py index 1c2fbf2..be3291a 100644 --- a/docker/models/networks.py +++ b/docker/models/networks.py @@ -211,5 +211,5 @@ class NetworkCollection(Collection): return networks def prune(self, filters=None): - self.client.api.prune_networks(filters=filters) + return self.client.api.prune_networks(filters=filters) prune.__doc__ = APIClient.prune_networks.__doc__ -- cgit v1.2.1 From 17f41b56726957177724711b5bff6d51b02e6d93 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 24 May 2018 17:19:18 -0700 Subject: Avoid unwanted modification of dockerfile path Signed-off-by: Joffrey F --- docker/api/build.py | 7 ++++- tests/integration/api_build_test.py | 53 ++++++++++++++++++++++++++++++------- 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index f62a731..3c3f130 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -341,4 +341,9 @@ def process_dockerfile(dockerfile, path): ) # Dockerfile is inside the context - return path relative to context root - return (os.path.relpath(abs_dockerfile, path), None) + if dockerfile == abs_dockerfile: + # Only calculate relpath if necessary to avoid errors + # on Windows client -> Linux Docker + # see https://github.com/docker/compose/issues/5969 + dockerfile = os.path.relpath(abs_dockerfile, path) + return (dockerfile, None) diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index 92e0062..baaf33e 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -415,18 +415,20 @@ class BuildTest(BaseAPIIntegrationTest): f.write('hello world') with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: f.write('.dockerignore\n') - df = tempfile.NamedTemporaryFile() - self.addCleanup(df.close) - df.write(('\n'.join([ - 'FROM busybox', - 'COPY . /src', - 'WORKDIR /src', - ])).encode('utf-8')) - df.flush() + df_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, df_dir) + df_name = os.path.join(df_dir, 'Dockerfile') + with open(df_name, 'wb') as df: + df.write(('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])).encode('utf-8')) + df.flush() img_name = random_name() self.tmp_imgs.append(img_name) stream = self.client.build( - path=base_dir, dockerfile=df.name, tag=img_name, + path=base_dir, dockerfile=df_name, tag=img_name, decode=True ) lines = [] @@ -472,6 +474,39 @@ class BuildTest(BaseAPIIntegrationTest): [b'.', b'..', b'file.txt', b'custom.dockerfile'] ) == sorted(lsdata) + def test_build_in_context_nested_dockerfile(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + with open(os.path.join(base_dir, 'file.txt'), 'w') as f: + f.write('hello world') + subdir = os.path.join(base_dir, 'hello', 'world') + os.makedirs(subdir) + with open(os.path.join(subdir, 'custom.dockerfile'), 'w') as df: + df.write('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])) + img_name = random_name() + self.tmp_imgs.append(img_name) + stream = self.client.build( + path=base_dir, dockerfile='hello/world/custom.dockerfile', + tag=img_name, decode=True + ) + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully tagged' in lines[-1]['stream'] + + ctnr = self.client.create_container(img_name, 'ls -a') + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + lsdata = self.client.logs(ctnr).strip().split(b'\n') + assert len(lsdata) == 4 + assert sorted( + [b'.', b'..', b'file.txt', b'hello'] + ) == sorted(lsdata) + def test_build_in_context_abs_dockerfile(self): base_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, base_dir) -- cgit v1.2.1 From 95ad903c35fc6781c18191b5cbc586ed4abd1d41 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 24 May 2018 17:20:45 -0700 Subject: Fix create_plugin on Windows Signed-off-by: Joffrey F --- docker/api/plugin.py | 5 ++++- tests/integration/api_plugin_test.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docker/api/plugin.py b/docker/api/plugin.py index 73f1852..33d7419 100644 --- a/docker/api/plugin.py +++ b/docker/api/plugin.py @@ -44,7 +44,10 @@ class PluginApiMixin(object): """ url = self._url('/plugins/create') - with utils.create_archive(root=plugin_data_dir, gzip=gzip) as archv: + with utils.create_archive( + root=plugin_data_dir, gzip=gzip, + files=set(utils.build.walk(plugin_data_dir, [])) + ) as archv: res = self._post(url, params={'name': name}, data=archv) self._raise_for_status(res) return True diff --git a/tests/integration/api_plugin_test.py b/tests/integration/api_plugin_test.py index 433d44d..1150b09 100644 --- a/tests/integration/api_plugin_test.py +++ b/tests/integration/api_plugin_test.py @@ -135,7 +135,7 @@ class PluginTest(BaseAPIIntegrationTest): def test_create_plugin(self): plugin_data_dir = os.path.join( - os.path.dirname(__file__), 'testdata/dummy-plugin' + os.path.dirname(__file__), os.path.join('testdata', 'dummy-plugin') ) assert self.client.create_plugin( 'docker-sdk-py/dummy', plugin_data_dir -- cgit v1.2.1 From 40711cb50189c4c39ed5a60c16910646a00f9acc Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 24 May 2018 17:21:17 -0700 Subject: Fix cancellable streams on Windows clients + HTTPS transport Signed-off-by: Joffrey F --- docker/types/daemon.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/types/daemon.py b/docker/types/daemon.py index 852f3d8..ee8624e 100644 --- a/docker/types/daemon.py +++ b/docker/types/daemon.py @@ -57,6 +57,8 @@ class CancellableStream(object): else: sock = sock_fp._sock + if isinstance(sock, urllib3.contrib.pyopenssl.WrappedSocket): + sock = sock.socket sock.shutdown(socket.SHUT_RDWR) sock.close() -- cgit v1.2.1 From b4efdc1b28062c835d04ac56995cb293d74de92b Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 24 May 2018 17:21:53 -0700 Subject: Fix several integration tests on Windows Signed-off-by: Joffrey F --- tests/integration/api_container_test.py | 19 ++++--------------- tests/integration/models_containers_test.py | 3 +++ 2 files changed, 7 insertions(+), 15 deletions(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index afd439f..ff70148 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -491,6 +491,9 @@ class CreateContainerTest(BaseAPIIntegrationTest): assert rule in self.client.logs(ctnr).decode('utf-8') +@pytest.mark.xfail( + IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform' +) class VolumeBindTest(BaseAPIIntegrationTest): def setUp(self): super(VolumeBindTest, self).setUp() @@ -507,9 +510,6 @@ class VolumeBindTest(BaseAPIIntegrationTest): ['touch', os.path.join(self.mount_dest, self.filename)], ) - @pytest.mark.xfail( - IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform' - ) def test_create_with_binds_rw(self): container = self.run_with_volume( @@ -525,9 +525,6 @@ class VolumeBindTest(BaseAPIIntegrationTest): inspect_data = self.client.inspect_container(container) self.check_container_data(inspect_data, True) - @pytest.mark.xfail( - IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform' - ) def test_create_with_binds_ro(self): self.run_with_volume( False, @@ -548,9 +545,6 @@ class VolumeBindTest(BaseAPIIntegrationTest): inspect_data = self.client.inspect_container(container) self.check_container_data(inspect_data, False) - @pytest.mark.xfail( - IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform' - ) @requires_api_version('1.30') def test_create_with_mounts(self): mount = docker.types.Mount( @@ -569,9 +563,6 @@ class VolumeBindTest(BaseAPIIntegrationTest): inspect_data = self.client.inspect_container(container) self.check_container_data(inspect_data, True) - @pytest.mark.xfail( - IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform' - ) @requires_api_version('1.30') def test_create_with_mounts_ro(self): mount = docker.types.Mount( @@ -1116,9 +1107,7 @@ class ContainerTopTest(BaseAPIIntegrationTest): self.client.start(container) res = self.client.top(container) - if IS_WINDOWS_PLATFORM: - assert res['Titles'] == ['PID', 'USER', 'TIME', 'COMMAND'] - else: + if not IS_WINDOWS_PLATFORM: assert res['Titles'] == [ 'UID', 'PID', 'PPID', 'C', 'STIME', 'TTY', 'TIME', 'CMD' ] diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index 6ddb034..ab41ea5 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -36,6 +36,9 @@ class ContainerCollectionTest(BaseIntegrationTest): with pytest.raises(docker.errors.ImageNotFound): client.containers.run("dockerpytest_does_not_exist") + @pytest.mark.skipif( + docker.constants.IS_WINDOWS_PLATFORM, reason="host mounts on Windows" + ) def test_run_with_volume(self): client = docker.from_env(version=TEST_API_VERSION) path = tempfile.mkdtemp() -- cgit v1.2.1 From 22b7b76142bd735c6be4f678dda8cf9d413e9f1c Mon Sep 17 00:00:00 2001 From: Anthony Sottile Date: Tue, 1 May 2018 11:55:48 -0400 Subject: Use six.moves to handle a py2+py3 import Signed-off-by: Anthony Sottile --- docker/transport/unixconn.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py index cc35d00..c59821a 100644 --- a/docker/transport/unixconn.py +++ b/docker/transport/unixconn.py @@ -1,14 +1,10 @@ import six import requests.adapters import socket +from six.moves import http_client as httplib from .. import constants -if six.PY3: - import http.client as httplib -else: - import httplib - try: import requests.packages.urllib3 as urllib3 except ImportError: -- cgit v1.2.1 From 49bb7386a3a3752dca64b411a6996663ed04ea1e Mon Sep 17 00:00:00 2001 From: Mike Lee Date: Wed, 6 Jun 2018 02:57:16 +0800 Subject: query plugin privilege with registry auth header Signed-off-by: Mike Lee --- docker/api/plugin.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docker/api/plugin.py b/docker/api/plugin.py index 33d7419..f6c0b13 100644 --- a/docker/api/plugin.py +++ b/docker/api/plugin.py @@ -170,8 +170,16 @@ class PluginApiMixin(object): 'remote': name, } + headers = {} + registry, repo_name = auth.resolve_repository_name(name) + header = auth.get_config_header(self, registry) + if header: + headers['X-Registry-Auth'] = header + url = self._url('/plugins/privileges') - return self._result(self._get(url, params=params), True) + return self._result( + self._get(url, params=params, headers=headers), True + ) @utils.minimum_version('1.25') @utils.check_resource('name') -- cgit v1.2.1 From dbe52dcb7d5765352faa43ab0210fddbcb546431 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Tue, 5 Jun 2018 10:51:54 -0700 Subject: Fix socket reading function for TCP (non-HTTPS) connections on Windows Signed-off-by: Joffrey F --- docker/utils/socket.py | 3 +++ tests/unit/api_test.py | 58 ++++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 59 insertions(+), 2 deletions(-) diff --git a/docker/utils/socket.py b/docker/utils/socket.py index 0945f0a..7b96d4f 100644 --- a/docker/utils/socket.py +++ b/docker/utils/socket.py @@ -1,6 +1,7 @@ import errno import os import select +import socket as pysocket import struct import six @@ -28,6 +29,8 @@ def read(socket, n=4096): try: if hasattr(socket, 'recv'): return socket.recv(n) + if six.PY3 and isinstance(socket, getattr(pysocket, 'SocketIO')): + return socket.read(n) return os.read(socket.fileno(), n) except EnvironmentError as e: if e.errno not in recoverable_errors: diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index 46cbd68..ba80840 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -365,7 +365,7 @@ class DockerApiTest(BaseAPIClientTest): assert result == content -class StreamTest(unittest.TestCase): +class UnixSocketStreamTest(unittest.TestCase): def setUp(self): socket_dir = tempfile.mkdtemp() self.build_context = tempfile.mkdtemp() @@ -462,7 +462,61 @@ class StreamTest(unittest.TestCase): raise e assert list(stream) == [ - str(i).encode() for i in range(50)] + str(i).encode() for i in range(50) + ] + + +class TCPSocketStreamTest(unittest.TestCase): + text_data = b''' + Now, those children out there, they're jumping through the + flames in the hope that the god of the fire will make them fruitful. + Really, you can't blame them. After all, what girl would not prefer the + child of a god to that of some acne-scarred artisan? + ''' + + def setUp(self): + + self.server = six.moves.socketserver.ThreadingTCPServer( + ('', 0), self.get_handler_class() + ) + self.thread = threading.Thread(target=self.server.serve_forever) + self.thread.setDaemon(True) + self.thread.start() + self.address = 'http://{}:{}'.format( + socket.gethostname(), self.server.server_address[1] + ) + + def tearDown(self): + self.server.shutdown() + self.server.server_close() + self.thread.join() + + def get_handler_class(self): + text_data = self.text_data + + class Handler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler, object): + def do_POST(self): + self.send_response(101) + self.send_header( + 'Content-Type', 'application/vnd.docker.raw-stream' + ) + self.send_header('Connection', 'Upgrade') + self.send_header('Upgrade', 'tcp') + self.end_headers() + self.wfile.flush() + time.sleep(0.2) + self.wfile.write(text_data) + self.wfile.flush() + + return Handler + + def test_read_from_socket(self): + with APIClient(base_url=self.address) as client: + resp = client._post(client._url('/dummy'), stream=True) + data = client._read_from_socket(resp, stream=True, tty=True) + results = b''.join(data) + + assert results == self.text_data class UserAgentTest(unittest.TestCase): -- cgit v1.2.1 From 2d0c5dd484e7621a9859ab40ac43d25a1f5f5078 Mon Sep 17 00:00:00 2001 From: Chris Mark Date: Tue, 5 Jun 2018 13:22:07 +0000 Subject: Adding missing comma in spec list. Fixing #2046, syntax error caused by missing comma on CONTAINER_SPEC_KWARGS list. Signed-off-by: Chris Mark --- docker/models/services.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/models/services.py b/docker/models/services.py index 125896b..2834dd7 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -276,7 +276,7 @@ CONTAINER_SPEC_KWARGS = [ 'labels', 'mounts', 'open_stdin', - 'privileges' + 'privileges', 'read_only', 'secrets', 'stop_grace_period', -- cgit v1.2.1 From f1189bfb4b1f2ecb6adc77f7349a085bdca1a824 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 8 Jun 2018 15:14:06 -0700 Subject: Allow passing of env overrides to credstore through APIClient ctor Signed-off-by: Joffrey F --- docker/api/build.py | 3 ++- docker/api/client.py | 6 +++++- docker/api/daemon.py | 4 +++- docker/auth.py | 13 ++++++++----- docker/client.py | 9 +++++++-- requirements.txt | 2 +- setup.py | 2 +- tests/unit/api_test.py | 2 +- 8 files changed, 28 insertions(+), 13 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index 3c3f130..419255f 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -302,7 +302,8 @@ class BuildApiMixin(object): # credentials/native_store.go#L68-L83 for registry in self._auth_configs.get('auths', {}).keys(): auth_data[registry] = auth.resolve_authconfig( - self._auth_configs, registry + self._auth_configs, registry, + credstore_env=self.credstore_env, ) else: auth_data = self._auth_configs.get('auths', {}).copy() diff --git a/docker/api/client.py b/docker/api/client.py index 13c292a..91da1c8 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -83,6 +83,8 @@ class APIClient( :py:class:`~docker.tls.TLSConfig` object to use custom configuration. user_agent (str): Set a custom user agent for requests to the server. + credstore_env (dict): Override environment variables when calling the + credential store process. """ __attrs__ = requests.Session.__attrs__ + ['_auth_configs', @@ -93,7 +95,8 @@ class APIClient( def __init__(self, base_url=None, version=None, timeout=DEFAULT_TIMEOUT_SECONDS, tls=False, - user_agent=DEFAULT_USER_AGENT, num_pools=DEFAULT_NUM_POOLS): + user_agent=DEFAULT_USER_AGENT, num_pools=DEFAULT_NUM_POOLS, + credstore_env=None): super(APIClient, self).__init__() if tls and not base_url: @@ -109,6 +112,7 @@ class APIClient( self._auth_configs = auth.load_config( config_dict=self._general_configs ) + self.credstore_env = credstore_env base_url = utils.parse_host( base_url, IS_WINDOWS_PLATFORM, tls=bool(tls) diff --git a/docker/api/daemon.py b/docker/api/daemon.py index fc3692c..76a94cf 100644 --- a/docker/api/daemon.py +++ b/docker/api/daemon.py @@ -128,7 +128,9 @@ class DaemonApiMixin(object): elif not self._auth_configs: self._auth_configs = auth.load_config() - authcfg = auth.resolve_authconfig(self._auth_configs, registry) + authcfg = auth.resolve_authconfig( + self._auth_configs, registry, credstore_env=self.credstore_env, + ) # If we found an existing auth config for this registry and username # combination, we can return it immediately unless reauth is requested. if authcfg and authcfg.get('username', None) == username \ diff --git a/docker/auth.py b/docker/auth.py index 48fcd8b..0c0cb20 100644 --- a/docker/auth.py +++ b/docker/auth.py @@ -44,7 +44,9 @@ def get_config_header(client, registry): "No auth config in memory - loading from filesystem" ) client._auth_configs = load_config() - authcfg = resolve_authconfig(client._auth_configs, registry) + authcfg = resolve_authconfig( + client._auth_configs, registry, credstore_env=client.credstore_env + ) # Do not fail here if no authentication exists for this # specific registry as we can have a readonly pull. Just # put the header if we can. @@ -76,7 +78,7 @@ def get_credential_store(authconfig, registry): ) -def resolve_authconfig(authconfig, registry=None): +def resolve_authconfig(authconfig, registry=None, credstore_env=None): """ Returns the authentication data from the given auth configuration for a specific registry. As with the Docker client, legacy entries in the config @@ -91,7 +93,7 @@ def resolve_authconfig(authconfig, registry=None): 'Using credentials store "{0}"'.format(store_name) ) cfg = _resolve_authconfig_credstore( - authconfig, registry, store_name + authconfig, registry, store_name, env=credstore_env ) if cfg is not None: return cfg @@ -115,13 +117,14 @@ def resolve_authconfig(authconfig, registry=None): return None -def _resolve_authconfig_credstore(authconfig, registry, credstore_name): +def _resolve_authconfig_credstore(authconfig, registry, credstore_name, + env=None): if not registry or registry == INDEX_NAME: # The ecosystem is a little schizophrenic with index.docker.io VS # docker.io - in that case, it seems the full URL is necessary. registry = INDEX_URL log.debug("Looking for auth entry for {0}".format(repr(registry))) - store = dockerpycreds.Store(credstore_name) + store = dockerpycreds.Store(credstore_name, environment=env) try: data = store.get(registry) res = { diff --git a/docker/client.py b/docker/client.py index b4364c3..8d4a52b 100644 --- a/docker/client.py +++ b/docker/client.py @@ -33,6 +33,8 @@ class DockerClient(object): :py:class:`~docker.tls.TLSConfig` object to use custom configuration. user_agent (str): Set a custom user agent for requests to the server. + credstore_env (dict): Override environment variables when calling the + credential store process. """ def __init__(self, *args, **kwargs): self.api = APIClient(*args, **kwargs) @@ -66,6 +68,8 @@ class DockerClient(object): assert_hostname (bool): Verify the hostname of the server. environment (dict): The environment to read environment variables from. Default: the value of ``os.environ`` + credstore_env (dict): Override environment variables when calling + the credential store process. Example: @@ -77,8 +81,9 @@ class DockerClient(object): """ timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS) version = kwargs.pop('version', None) - return cls(timeout=timeout, version=version, - **kwargs_from_env(**kwargs)) + return cls( + timeout=timeout, version=version, **kwargs_from_env(**kwargs) + ) # Resources @property diff --git a/requirements.txt b/requirements.txt index 9079315..6c5e7d0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ asn1crypto==0.22.0 backports.ssl-match-hostname==3.5.0.1 cffi==1.10.0 cryptography==1.9 -docker-pycreds==0.2.3 +docker-pycreds==0.3.0 enum34==1.1.6 idna==2.5 ipaddress==1.0.18 diff --git a/setup.py b/setup.py index c1eabcf..57b2b5a 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ requirements = [ 'requests >= 2.14.2, != 2.18.0', 'six >= 1.4.0', 'websocket-client >= 0.32.0', - 'docker-pycreds >= 0.2.3' + 'docker-pycreds >= 0.3.0' ] extras_require = { diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index ba80840..af2bb1c 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -44,7 +44,7 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0, return res -def fake_resolve_authconfig(authconfig, registry=None): +def fake_resolve_authconfig(authconfig, registry=None, *args, **kwargs): return None -- cgit v1.2.1 From 76471c6519204e2c761a57fbefc565f0ea23dc21 Mon Sep 17 00:00:00 2001 From: Alex Lloyd Date: Tue, 12 Jun 2018 12:45:33 +0100 Subject: Fixed typo in ContainerSpec Docs Signed-off-by: Alexander Lloyd --- docker/types/services.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/types/services.py b/docker/types/services.py index 09eb05e..31f4750 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -82,7 +82,7 @@ class ContainerSpec(dict): args (:py:class:`list`): Arguments to the command. hostname (string): The hostname to set on the container. env (dict): Environment variables. - dir (string): The working directory for commands to run in. + workdir (string): The working directory for commands to run in. user (string): The user inside the container. labels (dict): A map of labels to associate with the service. mounts (:py:class:`list`): A list of specifications for mounts to be -- cgit v1.2.1 From 000331cfc1443c61e1a7ac58b9ea8dbeb09d110d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=BCray=20Y=C4=B1ld=C4=B1r=C4=B1m?= Date: Sun, 29 Apr 2018 04:43:11 +0300 Subject: Swarm Mode service scaling parameter mistake is fixed. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Güray Yıldırım --- docker/models/services.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/models/services.py b/docker/models/services.py index 2834dd7..458d2c8 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -126,7 +126,7 @@ class Service(Model): service_mode = ServiceMode('replicated', replicas) return self.client.api.update_service(self.id, self.version, - service_mode, + mode=service_mode, fetch_current_spec=True) def force_update(self): -- cgit v1.2.1 From e5f56247e3d6f6f0f325aab507d9845ad2c4c097 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 18 Jun 2018 15:11:12 -0700 Subject: Bump 3.4.0 Signed-off-by: Joffrey F --- docker/version.py | 2 +- docs/change-log.md | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 04fd3c2..c504327 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "3.4.0-dev" +version = "3.4.0" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index 0065c62..5a0d55a 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,31 @@ Change log ========== +3.4.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/51?closed=1) + +### Features + +* The `APIClient` and `DockerClient` constructors now accept a `credstore_env` + parameter. When set, values in this dictionary are added to the environment + when executing the credential store process. + +### Bugfixes + +* `DockerClient.networks.prune` now properly returns the operation's result +* Fixed a bug that caused custom Dockerfile paths in a subfolder of the build + context to be invalidated, preventing these builds from working +* The `plugin_privileges` method can now be called for plugins requiring + authentication to access +* Fixed a bug that caused attempts to read a data stream over an unsecured TCP + socket to crash on Windows clients +* Fixed a bug where using the `read_only` parameter when creating a service using + the `DockerClient` was being ignored +* Fixed an issue where `Service.scale` would not properly update the service's + mode, causing the operation to fail silently + 3.3.0 ----- -- cgit v1.2.1