summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoffrey F <f.joffrey@gmail.com>2018-01-31 17:48:45 -0800
committerGitHub <noreply@github.com>2018-01-31 17:48:45 -0800
commit91bc75cc92f578ae9d659ad7e8ed11a0877b70aa (patch)
tree87f97de6b2fae79782157404f17c2e1024c1b06e
parent5bed7b8f0dde3949779b60ec0f6305a02dd35a87 (diff)
parent9a87f80f85557ee3ef808677f982548b33a490c8 (diff)
downloaddocker-py-91bc75cc92f578ae9d659ad7e8ed11a0877b70aa.tar.gz
Merge pull request #1891 from docker/3.0.0-release3.0.0
3.0.0 release
-rw-r--r--.travis.yml23
-rw-r--r--Dockerfile-py32
-rw-r--r--Jenkinsfile6
-rw-r--r--Makefile28
-rw-r--r--appveyor.yml2
-rw-r--r--docker/api/build.py70
-rw-r--r--docker/api/client.py35
-rw-r--r--docker/api/container.py200
-rw-r--r--docker/api/daemon.py11
-rw-r--r--docker/api/exec_api.py32
-rw-r--r--docker/api/image.py100
-rw-r--r--docker/api/network.py6
-rw-r--r--docker/api/service.py41
-rw-r--r--docker/api/volume.py4
-rw-r--r--docker/auth.py120
-rw-r--r--docker/constants.py2
-rw-r--r--docker/errors.py9
-rw-r--r--docker/models/containers.py80
-rw-r--r--docker/models/images.py73
-rw-r--r--docker/models/services.py26
-rw-r--r--docker/tls.py34
-rw-r--r--docker/types/containers.py123
-rw-r--r--docker/types/services.py25
-rw-r--r--docker/utils/__init__.py2
-rw-r--r--docker/utils/config.py66
-rw-r--r--docker/utils/decorators.py6
-rw-r--r--docker/utils/utils.py28
-rw-r--r--docker/version.py2
-rw-r--r--docs/change-log.md77
-rw-r--r--docs/images.rst12
-rw-r--r--requirements.txt2
-rw-r--r--setup.py10
-rw-r--r--tests/helpers.py29
-rw-r--r--tests/integration/api_build_test.py68
-rw-r--r--tests/integration/api_client_test.py43
-rw-r--r--tests/integration/api_container_test.py583
-rw-r--r--tests/integration/api_exec_test.py105
-rw-r--r--tests/integration/api_healthcheck_test.py5
-rw-r--r--tests/integration/api_image_test.py95
-rw-r--r--tests/integration/api_network_test.py107
-rw-r--r--tests/integration/api_service_test.py27
-rw-r--r--tests/integration/api_volume_test.py15
-rw-r--r--tests/integration/base.py6
-rw-r--r--tests/integration/errors_test.py5
-rw-r--r--tests/integration/models_containers_test.py49
-rw-r--r--tests/integration/models_images_test.py46
-rw-r--r--tests/integration/models_services_test.py72
-rw-r--r--tests/integration/models_swarm_test.py9
-rw-r--r--tests/integration/regression_test.py30
-rw-r--r--tests/unit/api_build_test.py72
-rw-r--r--tests/unit/api_container_test.py852
-rw-r--r--tests/unit/api_exec_test.py90
-rw-r--r--tests/unit/api_image_test.py52
-rw-r--r--tests/unit/api_network_test.py108
-rw-r--r--tests/unit/api_test.py80
-rw-r--r--tests/unit/api_volume_test.py77
-rw-r--r--tests/unit/auth_test.py379
-rw-r--r--tests/unit/client_test.py23
-rw-r--r--tests/unit/dockertypes_test.py299
-rw-r--r--tests/unit/fake_api.py39
-rw-r--r--tests/unit/fake_api_client.py2
-rw-r--r--tests/unit/models_containers_test.py46
-rw-r--r--tests/unit/models_images_test.py17
-rw-r--r--tests/unit/ssladapter_test.py11
-rw-r--r--tests/unit/swarm_test.py12
-rw-r--r--tests/unit/utils_config_test.py123
-rw-r--r--tests/unit/utils_test.py333
-rw-r--r--tox.ini2
-rw-r--r--win32-requirements.txt1
69 files changed, 2634 insertions, 2535 deletions
diff --git a/.travis.yml b/.travis.yml
index cd64b44..842e352 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,15 +1,18 @@
sudo: false
language: python
-python:
- - "3.5"
-env:
- - TOX_ENV=py27
-# FIXME: default travis worker does not carry py33 anymore. Can this be configured?
-# - TOX_ENV=py33
- - TOX_ENV=py34
- - TOX_ENV=py35
- - TOX_ENV=flake8
+matrix:
+ include:
+ - python: 2.7
+ env: TOXENV=py27
+ - python: 3.4
+ env: TOXENV=py34
+ - python: 3.5
+ env: TOXENV=py35
+ - python: 3.6
+ env: TOXENV=py36
+ - env: TOXENV=flake8
+
install:
- pip install tox
script:
- - tox -e $TOX_ENV
+ - tox
diff --git a/Dockerfile-py3 b/Dockerfile-py3
index 543cf4d..d558ba3 100644
--- a/Dockerfile-py3
+++ b/Dockerfile-py3
@@ -1,4 +1,4 @@
-FROM python:3.5
+FROM python:3.6
RUN mkdir /src
WORKDIR /src
diff --git a/Jenkinsfile b/Jenkinsfile
index e3168cd..6d9d343 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -5,7 +5,7 @@ def imageNamePy2
def imageNamePy3
def images = [:]
-def dockerVersions = ["17.06.2-ce", "17.09.0-ce", "17.10.0-ce"]
+def dockerVersions = ["17.06.2-ce", "17.12.0-ce", "18.01.0-ce"]
def buildImage = { name, buildargs, pyTag ->
img = docker.image(name)
@@ -27,13 +27,13 @@ def buildImages = { ->
imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}"
buildImage(imageNamePy2, ".", "py2.7")
- buildImage(imageNamePy3, "-f Dockerfile-py3 .", "py3.5")
+ buildImage(imageNamePy3, "-f Dockerfile-py3 .", "py3.6")
}
}
}
def getAPIVersion = { engineVersion ->
- def versionMap = ['17.06': '1.30', '17.09': '1.32', '17.10': '1.33']
+ def versionMap = ['17.06': '1.30', '17.12': '1.35', '18.01': '1.35']
return versionMap[engineVersion.substring(0, 5)]
}
diff --git a/Makefile b/Makefile
index 32ef510..f491993 100644
--- a/Makefile
+++ b/Makefile
@@ -3,7 +3,7 @@ all: test
.PHONY: clean
clean:
- -docker rm -vf dpy-dind
+ -docker rm -f dpy-dind-py2 dpy-dind-py3
find -name "__pycache__" | xargs rm -rf
.PHONY: build
@@ -41,19 +41,29 @@ integration-test: build
integration-test-py3: build-py3
docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test tests/integration/${file}
-TEST_API_VERSION ?= 1.33
-TEST_ENGINE_VERSION ?= 17.10.0-ce
+TEST_API_VERSION ?= 1.35
+TEST_ENGINE_VERSION ?= 17.12.0-ce
.PHONY: integration-dind
-integration-dind: build build-py3
- docker rm -vf dpy-dind || :
- docker run -d --name dpy-dind --privileged dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd\
+integration-dind: integration-dind-py2 integration-dind-py3
+
+.PHONY: integration-dind-py2
+integration-dind-py2: build
+ docker rm -vf dpy-dind-py2 || :
+ docker run -d --name dpy-dind-py2 --privileged dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd\
-H tcp://0.0.0.0:2375 --experimental
docker run -t --rm --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
- --link=dpy-dind:docker docker-sdk-python py.test tests/integration
+ --link=dpy-dind-py2:docker docker-sdk-python py.test tests/integration
+ docker rm -vf dpy-dind-py2
+
+.PHONY: integration-dind-py3
+integration-dind-py3: build-py3
+ docker rm -vf dpy-dind-py3 || :
+ docker run -d --name dpy-dind-py3 --privileged dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd\
+ -H tcp://0.0.0.0:2375 --experimental
docker run -t --rm --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
- --link=dpy-dind:docker docker-sdk-python3 py.test tests/integration
- docker rm -vf dpy-dind
+ --link=dpy-dind-py3:docker docker-sdk-python3 py.test tests/integration
+ docker rm -vf dpy-dind-py3
.PHONY: integration-dind-ssl
integration-dind-ssl: build-dind-certs build build-py3
diff --git a/appveyor.yml b/appveyor.yml
index 41cde62..d659b58 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -3,7 +3,7 @@ version: '{branch}-{build}'
install:
- "SET PATH=C:\\Python27-x64;C:\\Python27-x64\\Scripts;%PATH%"
- "python --version"
- - "pip install tox==2.7.0 virtualenv==15.1.0"
+ - "pip install tox==2.9.1"
# Build the binary after tests
build: false
diff --git a/docker/api/build.py b/docker/api/build.py
index 9ff2dfb..56f1fcf 100644
--- a/docker/api/build.py
+++ b/docker/api/build.py
@@ -1,7 +1,6 @@
import json
import logging
import os
-import re
from .. import auth
from .. import constants
@@ -14,12 +13,12 @@ log = logging.getLogger(__name__)
class BuildApiMixin(object):
def build(self, path=None, tag=None, quiet=False, fileobj=None,
- nocache=False, rm=False, stream=False, timeout=None,
+ nocache=False, rm=False, timeout=None,
custom_context=False, encoding=None, pull=False,
forcerm=False, dockerfile=None, container_limits=None,
decode=False, buildargs=None, gzip=False, shmsize=None,
labels=None, cache_from=None, target=None, network_mode=None,
- squash=None, extra_hosts=None):
+ squash=None, extra_hosts=None, platform=None):
"""
Similar to the ``docker build`` command. Either ``path`` or ``fileobj``
needs to be set. ``path`` can be a local path (to a directory
@@ -67,9 +66,6 @@ class BuildApiMixin(object):
rm (bool): Remove intermediate containers. The ``docker build``
command now defaults to ``--rm=true``, but we have kept the old
default of `False` to preserve backward compatibility
- stream (bool): *Deprecated for API version > 1.8 (always True)*.
- Return a blocking generator you can iterate over to retrieve
- build output as it happens
timeout (int): HTTP timeout
custom_context (bool): Optional if using ``fileobj``
encoding (str): The encoding for a stream. Set to ``gzip`` for
@@ -103,6 +99,7 @@ class BuildApiMixin(object):
single layer.
extra_hosts (dict): Extra hosts to add to /etc/hosts in building
containers, as a mapping of hostname to IP address.
+ platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
A generator for the build output.
@@ -145,23 +142,14 @@ class BuildApiMixin(object):
exclude = None
if os.path.exists(dockerignore):
with open(dockerignore, 'r') as f:
- exclude = list(filter(bool, f.read().splitlines()))
+ exclude = list(filter(
+ bool, [l.strip() for l in f.read().splitlines()]
+ ))
context = utils.tar(
path, exclude=exclude, dockerfile=dockerfile, gzip=gzip
)
encoding = 'gzip' if gzip else encoding
- if utils.compare_version('1.8', self._version) >= 0:
- stream = True
-
- if dockerfile and utils.compare_version('1.17', self._version) < 0:
- raise errors.InvalidVersion(
- 'dockerfile was only introduced in API version 1.17'
- )
-
- if utils.compare_version('1.19', self._version) < 0:
- pull = 1 if pull else 0
-
u = self._url('/build')
params = {
't': tag,
@@ -176,12 +164,7 @@ class BuildApiMixin(object):
params.update(container_limits)
if buildargs:
- if utils.version_gte(self._version, '1.21'):
- params.update({'buildargs': json.dumps(buildargs)})
- else:
- raise errors.InvalidVersion(
- 'buildargs was only introduced in API version 1.21'
- )
+ params.update({'buildargs': json.dumps(buildargs)})
if shmsize:
if utils.version_gte(self._version, '1.22'):
@@ -241,35 +224,33 @@ class BuildApiMixin(object):
extra_hosts = utils.format_extra_hosts(extra_hosts)
params.update({'extrahosts': extra_hosts})
+ if platform is not None:
+ if utils.version_lt(self._version, '1.32'):
+ raise errors.InvalidVersion(
+ 'platform was only introduced in API version 1.32'
+ )
+ params['platform'] = platform
+
if context is not None:
headers = {'Content-Type': 'application/tar'}
if encoding:
headers['Content-Encoding'] = encoding
- if utils.compare_version('1.9', self._version) >= 0:
- self._set_auth_headers(headers)
+ self._set_auth_headers(headers)
response = self._post(
u,
data=context,
params=params,
headers=headers,
- stream=stream,
+ stream=True,
timeout=timeout,
)
if context is not None and not custom_context:
context.close()
- if stream:
- return self._stream_helper(response, decode=decode)
- else:
- output = self._result(response)
- srch = r'Successfully built ([0-9a-f]+)'
- match = re.search(srch, output)
- if not match:
- return None, output
- return match.group(1), output
+ return self._stream_helper(response, decode=decode)
def _set_auth_headers(self, headers):
log.debug('Looking for auth config')
@@ -290,14 +271,12 @@ class BuildApiMixin(object):
# Matches CLI behavior: https://github.com/docker/docker/blob/
# 67b85f9d26f1b0b2b240f2d794748fac0f45243c/cliconfig/
# credentials/native_store.go#L68-L83
- for registry in self._auth_configs.keys():
- if registry == 'credsStore' or registry == 'HttpHeaders':
- continue
+ for registry in self._auth_configs.get('auths', {}).keys():
auth_data[registry] = auth.resolve_authconfig(
self._auth_configs, registry
)
else:
- auth_data = self._auth_configs.copy()
+ auth_data = self._auth_configs.get('auths', {}).copy()
# See https://github.com/docker/docker-py/issues/1683
if auth.INDEX_NAME in auth_data:
auth_data[auth.INDEX_URL] = auth_data[auth.INDEX_NAME]
@@ -308,13 +287,8 @@ class BuildApiMixin(object):
)
)
- if utils.compare_version('1.19', self._version) >= 0:
- headers['X-Registry-Config'] = auth.encode_header(
- auth_data
- )
- else:
- headers['X-Registry-Config'] = auth.encode_header({
- 'configs': auth_data
- })
+ headers['X-Registry-Config'] = auth.encode_header(
+ auth_data
+ )
else:
log.debug('No auth config found')
diff --git a/docker/api/client.py b/docker/api/client.py
index f0a86d4..e69d143 100644
--- a/docker/api/client.py
+++ b/docker/api/client.py
@@ -1,6 +1,5 @@
import json
import struct
-import warnings
from functools import partial
import requests
@@ -27,12 +26,12 @@ from ..constants import (
MINIMUM_DOCKER_API_VERSION
)
from ..errors import (
- DockerException, TLSParameterError,
+ DockerException, InvalidVersion, TLSParameterError,
create_api_error_from_http_exception
)
from ..tls import TLSConfig
from ..transport import SSLAdapter, UnixAdapter
-from ..utils import utils, check_resource, update_headers
+from ..utils import utils, check_resource, update_headers, config
from ..utils.socket import frames_iter, socket_raw_iter
from ..utils.json_stream import json_stream
try:
@@ -87,6 +86,7 @@ class APIClient(
"""
__attrs__ = requests.Session.__attrs__ + ['_auth_configs',
+ '_general_configs',
'_version',
'base_url',
'timeout']
@@ -105,7 +105,10 @@ class APIClient(
self.timeout = timeout
self.headers['User-Agent'] = user_agent
- self._auth_configs = auth.load_config()
+ self._general_configs = config.load_general_config()
+ self._auth_configs = auth.load_config(
+ config_dict=self._general_configs
+ )
base_url = utils.parse_host(
base_url, IS_WINDOWS_PLATFORM, tls=bool(tls)
@@ -156,11 +159,9 @@ class APIClient(
)
)
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
- warnings.warn(
- 'The minimum API version supported is {}, but you are using '
- 'version {}. It is recommended you either upgrade Docker '
- 'Engine or use an older version of Docker SDK for '
- 'Python.'.format(MINIMUM_DOCKER_API_VERSION, self._version)
+ raise InvalidVersion(
+ 'API versions below {} are no longer supported by this '
+ 'library.'.format(MINIMUM_DOCKER_API_VERSION)
)
def _retrieve_server_version(self):
@@ -349,17 +350,8 @@ class APIClient(
break
yield data
- def _stream_raw_result_old(self, response):
- ''' Stream raw output for API versions below 1.6 '''
- self._raise_for_status(response)
- for line in response.iter_lines(chunk_size=1,
- decode_unicode=True):
- # filter out keep-alive new lines
- if line:
- yield line
-
def _stream_raw_result(self, response):
- ''' Stream result for TTY-enabled container above API 1.6 '''
+ ''' Stream result for TTY-enabled container '''
self._raise_for_status(response)
for out in response.iter_content(chunk_size=1, decode_unicode=True):
yield out
@@ -415,11 +407,6 @@ class APIClient(
return self._get_result_tty(stream, res, self._check_is_tty(container))
def _get_result_tty(self, stream, res, is_tty):
- # Stream multi-plexing was only introduced in API v1.6. Anything
- # before that needs old-style streaming.
- if utils.compare_version('1.6', self._version) < 0:
- return self._stream_raw_result_old(res)
-
# We should also use raw streaming (without keep-alives)
# if we're dealing with a tty-enabled container.
if is_tty:
diff --git a/docker/api/container.py b/docker/api/container.py
index 5d58851..962d8cb 100644
--- a/docker/api/container.py
+++ b/docker/api/container.py
@@ -1,5 +1,4 @@
import six
-import warnings
from datetime import datetime
from .. import errors
@@ -66,6 +65,7 @@ class ContainerApiMixin(object):
container (str): The container to attach to.
params (dict): Dictionary of request parameters (e.g. ``stdout``,
``stderr``, ``stream``).
+ For ``detachKeys``, ~/.docker/config.json is used by default.
ws (bool): Use websockets instead of raw HTTP.
Raises:
@@ -79,6 +79,11 @@ class ContainerApiMixin(object):
'stream': 1
}
+ if 'detachKeys' not in params \
+ and 'detachKeys' in self._general_configs:
+
+ params['detachKeys'] = self._general_configs['detachKeys']
+
if ws:
return self._attach_websocket(container, params)
@@ -198,50 +203,14 @@ class ContainerApiMixin(object):
x['Id'] = x['Id'][:12]
return res
- @utils.check_resource('container')
- def copy(self, container, resource):
- """
- Identical to the ``docker cp`` command. Get files/folders from the
- container.
-
- **Deprecated for API version >= 1.20.** Use
- :py:meth:`~ContainerApiMixin.get_archive` instead.
-
- Args:
- container (str): The container to copy from
- resource (str): The path within the container
-
- Returns:
- The contents of the file as a string
-
- Raises:
- :py:class:`docker.errors.APIError`
- If the server returns an error.
- """
- if utils.version_gte(self._version, '1.20'):
- warnings.warn(
- 'APIClient.copy() is deprecated for API version >= 1.20, '
- 'please use get_archive() instead',
- DeprecationWarning
- )
- res = self._post_json(
- self._url("/containers/{0}/copy", container),
- data={"Resource": resource},
- stream=True
- )
- self._raise_for_status(res)
- return res.raw
-
def create_container(self, image, command=None, hostname=None, user=None,
- detach=False, stdin_open=False, tty=False,
- mem_limit=None, ports=None, environment=None,
- dns=None, volumes=None, volumes_from=None,
+ detach=False, stdin_open=False, tty=False, ports=None,
+ environment=None, volumes=None,
network_disabled=False, name=None, entrypoint=None,
- cpu_shares=None, working_dir=None, domainname=None,
- memswap_limit=None, cpuset=None, host_config=None,
- mac_address=None, labels=None, volume_driver=None,
- stop_signal=None, networking_config=None,
- healthcheck=None, stop_timeout=None, runtime=None):
+ working_dir=None, domainname=None, host_config=None,
+ mac_address=None, labels=None, stop_signal=None,
+ networking_config=None, healthcheck=None,
+ stop_timeout=None, runtime=None):
"""
Creates a container. Parameters are similar to those for the ``docker
run`` command except it doesn't support the attach options (``-a``).
@@ -383,27 +352,17 @@ class ContainerApiMixin(object):
return container ID
stdin_open (bool): Keep STDIN open even if not attached
tty (bool): Allocate a pseudo-TTY
- mem_limit (float or str): Memory limit. Accepts float values (which
- represent the memory limit of the created container in bytes)
- or a string with a units identification char (``100000b``,
- ``1000k``, ``128m``, ``1g``). If a string is specified without
- a units character, bytes are assumed as an intended unit.
ports (list of ints): A list of port numbers
environment (dict or list): A dictionary or a list of strings in
the following format ``["PASSWORD=xxx"]`` or
``{"PASSWORD": "xxx"}``.
- dns (:py:class:`list`): DNS name servers. Deprecated since API
- version 1.10. Use ``host_config`` instead.
volumes (str or list): List of paths inside the container to use
as volumes.
- volumes_from (:py:class:`list`): List of container names or Ids to
- get volumes from.
network_disabled (bool): Disable networking
name (str): A name for the container
entrypoint (str or list): An entrypoint
working_dir (str): Path to the working directory
domainname (str): The domain name to use for the container
- memswap_limit (int):
host_config (dict): A dictionary created with
:py:meth:`create_host_config`.
mac_address (str): The Mac Address to assign the container
@@ -411,7 +370,6 @@ class ContainerApiMixin(object):
``{"label1": "value1", "label2": "value2"}``) or a list of
names of labels to set with empty values (e.g.
``["label1", "label2"]``)
- volume_driver (str): The name of a volume driver/plugin.
stop_signal (str): The stop signal to use to stop the container
(e.g. ``SIGINT``).
stop_timeout (int): Timeout to stop the container, in seconds.
@@ -434,17 +392,12 @@ class ContainerApiMixin(object):
if isinstance(volumes, six.string_types):
volumes = [volumes, ]
- if host_config and utils.compare_version('1.15', self._version) < 0:
- raise errors.InvalidVersion(
- 'host_config is not supported in API < 1.15'
- )
-
config = self.create_container_config(
- image, command, hostname, user, detach, stdin_open, tty, mem_limit,
- ports, dns, environment, volumes, volumes_from,
- network_disabled, entrypoint, cpu_shares, working_dir, domainname,
- memswap_limit, cpuset, host_config, mac_address, labels,
- volume_driver, stop_signal, networking_config, healthcheck,
+ image, command, hostname, user, detach, stdin_open, tty,
+ ports, environment, volumes,
+ network_disabled, entrypoint, working_dir, domainname,
+ host_config, mac_address, labels,
+ stop_signal, networking_config, healthcheck,
stop_timeout, runtime
)
return self.create_container_from_config(config, name)
@@ -698,7 +651,7 @@ class ContainerApiMixin(object):
container (str): The container to export
Returns:
- (str): The filesystem tar archive
+ (generator): The archived filesystem data stream
Raises:
:py:class:`docker.errors.APIError`
@@ -707,11 +660,9 @@ class ContainerApiMixin(object):
res = self._get(
self._url("/containers/{0}/export", container), stream=True
)
- self._raise_for_status(res)
- return res.raw
+ return self._stream_raw_result(res)
@utils.check_resource('container')
- @utils.minimum_version('1.20')
def get_archive(self, container, path):
"""
Retrieve a file or folder from a container in the form of a tar
@@ -737,7 +688,7 @@ class ContainerApiMixin(object):
self._raise_for_status(res)
encoded_stat = res.headers.get('x-docker-container-path-stat')
return (
- res.raw,
+ self._stream_raw_result(res),
utils.decode_json_header(encoded_stat) if encoded_stat else None
)
@@ -786,7 +737,8 @@ class ContainerApiMixin(object):
@utils.check_resource('container')
def logs(self, container, stdout=True, stderr=True, stream=False,
- timestamps=False, tail='all', since=None, follow=None):
+ timestamps=False, tail='all', since=None, follow=None,
+ until=None):
"""
Get logs from a container. Similar to the ``docker logs`` command.
@@ -805,6 +757,8 @@ class ContainerApiMixin(object):
since (datetime or int): Show logs since a given datetime or
integer epoch (in seconds)
follow (bool): Follow log output
+ until (datetime or int): Show logs that occurred before the given
+ datetime or integer epoch (in seconds)
Returns:
(generator or str)
@@ -813,44 +767,46 @@ class ContainerApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- if utils.compare_version('1.11', self._version) >= 0:
- if follow is None:
- follow = stream
- params = {'stderr': stderr and 1 or 0,
- 'stdout': stdout and 1 or 0,
- 'timestamps': timestamps and 1 or 0,
- 'follow': follow and 1 or 0,
- }
- if utils.compare_version('1.13', self._version) >= 0:
- if tail != 'all' and (not isinstance(tail, int) or tail < 0):
- tail = 'all'
- params['tail'] = tail
-
- if since is not None:
- if utils.compare_version('1.19', self._version) < 0:
- raise errors.InvalidVersion(
- 'since is not supported in API < 1.19'
- )
- else:
- if isinstance(since, datetime):
- params['since'] = utils.datetime_to_timestamp(since)
- elif (isinstance(since, int) and since > 0):
- params['since'] = since
- else:
- raise errors.InvalidArgument(
- 'since value should be datetime or int, not {}'.
- format(type(since))
- )
- url = self._url("/containers/{0}/logs", container)
- res = self._get(url, params=params, stream=stream)
- return self._get_result(container, stream, res)
- return self.attach(
- container,
- stdout=stdout,
- stderr=stderr,
- stream=stream,
- logs=True
- )
+ if follow is None:
+ follow = stream
+ params = {'stderr': stderr and 1 or 0,
+ 'stdout': stdout and 1 or 0,
+ 'timestamps': timestamps and 1 or 0,
+ 'follow': follow and 1 or 0,
+ }
+ if tail != 'all' and (not isinstance(tail, int) or tail < 0):
+ tail = 'all'
+ params['tail'] = tail
+
+ if since is not None:
+ if isinstance(since, datetime):
+ params['since'] = utils.datetime_to_timestamp(since)
+ elif (isinstance(since, int) and since > 0):
+ params['since'] = since
+ else:
+ raise errors.InvalidArgument(
+ 'since value should be datetime or positive int, '
+ 'not {}'.format(type(since))
+ )
+
+ if until is not None:
+ if utils.version_lt(self._version, '1.35'):
+ raise errors.InvalidVersion(
+ 'until is not supported for API version < 1.35'
+ )
+ if isinstance(until, datetime):
+ params['until'] = utils.datetime_to_timestamp(until)
+ elif (isinstance(until, int) and until > 0):
+ params['until'] = until
+ else:
+ raise errors.InvalidArgument(
+ 'until value should be datetime or positive int, '
+ 'not {}'.format(type(until))
+ )
+
+ url = self._url("/containers/{0}/logs", container)
+ res = self._get(url, params=params, stream=stream)
+ return self._get_result(container, stream, res)
@utils.check_resource('container')
def pause(self, container):
@@ -918,7 +874,6 @@ class ContainerApiMixin(object):
return h_ports
@utils.check_resource('container')
- @utils.minimum_version('1.20')
def put_archive(self, container, path, data):
"""
Insert a file or folder in an existing container using a tar archive as
@@ -988,7 +943,6 @@ class ContainerApiMixin(object):
)
self._raise_for_status(res)
- @utils.minimum_version('1.17')
@utils.check_resource('container')
def rename(self, container, name):
"""
@@ -1085,7 +1039,6 @@ class ContainerApiMixin(object):
res = self._post(url)
self._raise_for_status(res)
- @utils.minimum_version('1.17')
@utils.check_resource('container')
def stats(self, container, decode=None, stream=True):
"""
@@ -1240,7 +1193,7 @@ class ContainerApiMixin(object):
return self._result(res, True)
@utils.check_resource('container')
- def wait(self, container, timeout=None):
+ def wait(self, container, timeout=None, condition=None):
"""
Block until a container stops, then return its exit code. Similar to
the ``docker wait`` command.
@@ -1249,10 +1202,13 @@ class ContainerApiMixin(object):
container (str or dict): The container to wait on. If a dict, the
``Id`` key is used.
timeout (int): Request timeout
+ condition (str): Wait until a container state reaches the given
+ condition, either ``not-running`` (default), ``next-exit``,
+ or ``removed``
Returns:
- (int): The exit code of the container. Returns ``-1`` if the API
- responds without a ``StatusCode`` attribute.
+ (dict): The API's response as a Python dictionary, including
+ the container's exit code under the ``StatusCode`` attribute.
Raises:
:py:class:`requests.exceptions.ReadTimeout`
@@ -1261,9 +1217,13 @@ class ContainerApiMixin(object):
If the server returns an error.
"""
url = self._url("/containers/{0}/wait", container)
- res = self._post(url, timeout=timeout)
- self._raise_for_status(res)
- json_ = res.json()
- if 'StatusCode' in json_:
- return json_['StatusCode']
- return -1
+ params = {}
+ if condition is not None:
+ if utils.version_lt(self._version, '1.30'):
+ raise errors.InvalidVersion(
+ 'wait condition is not supported for API version < 1.30'
+ )
+ params['condition'] = condition
+
+ res = self._post(url, timeout=timeout, params=params)
+ return self._result(res, True)
diff --git a/docker/api/daemon.py b/docker/api/daemon.py
index 285b742..033dbf1 100644
--- a/docker/api/daemon.py
+++ b/docker/api/daemon.py
@@ -1,9 +1,7 @@
import os
-import warnings
from datetime import datetime
from .. import auth, utils
-from ..constants import INSECURE_REGISTRY_DEPRECATION_WARNING
class DaemonApiMixin(object):
@@ -90,7 +88,7 @@ class DaemonApiMixin(object):
return self._result(self._get(self._url("/info")), True)
def login(self, username, password=None, email=None, registry=None,
- reauth=False, insecure_registry=False, dockercfg_path=None):
+ reauth=False, dockercfg_path=None):
"""
Authenticate with a registry. Similar to the ``docker login`` command.
@@ -113,11 +111,6 @@ class DaemonApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- if insecure_registry:
- warnings.warn(
- INSECURE_REGISTRY_DEPRECATION_WARNING.format('login()'),
- DeprecationWarning
- )
# If we don't have any auth data so far, try reloading the config file
# one more time in case anything showed up in there.
@@ -144,6 +137,8 @@ class DaemonApiMixin(object):
response = self._post_json(self._url('/auth'), data=req_data)
if response.status_code == 200:
+ if 'auths' not in self._auth_configs:
+ self._auth_configs['auths'] = {}
self._auth_configs[registry or auth.INDEX_NAME] = req_data
return self._result(response, json=True)
diff --git a/docker/api/exec_api.py b/docker/api/exec_api.py
index cff5cfa..986d87f 100644
--- a/docker/api/exec_api.py
+++ b/docker/api/exec_api.py
@@ -5,11 +5,10 @@ from .. import utils
class ExecApiMixin(object):
- @utils.minimum_version('1.15')
@utils.check_resource('container')
def exec_create(self, container, cmd, stdout=True, stderr=True,
stdin=False, tty=False, privileged=False, user='',
- environment=None):
+ environment=None, workdir=None, detach_keys=None):
"""
Sets up an exec instance in a running container.
@@ -26,6 +25,12 @@ class ExecApiMixin(object):
environment (dict or list): A dictionary or a list of strings in
the following format ``["PASSWORD=xxx"]`` or
``{"PASSWORD": "xxx"}``.
+ workdir (str): Path to working directory for this exec session
+ detach_keys (str): Override the key sequence for detaching
+ a container. Format is a single character `[a-Z]`
+ or `ctrl-<value>` where `<value>` is one of:
+ `a-z`, `@`, `^`, `[`, `,` or `_`.
+ ~/.docker/config.json is used by default.
Returns:
(dict): A dictionary with an exec ``Id`` key.
@@ -35,14 +40,6 @@ class ExecApiMixin(object):
If the server returns an error.
"""
- if privileged and utils.version_lt(self._version, '1.19'):
- raise errors.InvalidVersion(
- 'Privileged exec is not supported in API < 1.19'
- )
- if user and utils.version_lt(self._version, '1.19'):
- raise errors.InvalidVersion(
- 'User-specific exec is not supported in API < 1.19'
- )
if environment is not None and utils.version_lt(self._version, '1.25'):
raise errors.InvalidVersion(
'Setting environment for exec is not supported in API < 1.25'
@@ -66,11 +63,22 @@ class ExecApiMixin(object):
'Env': environment,
}
+ if workdir is not None:
+ if utils.version_lt(self._version, '1.35'):
+ raise errors.InvalidVersion(
+ 'workdir is not supported for API version < 1.35'
+ )
+ data['WorkingDir'] = workdir
+
+ if detach_keys:
+ data['detachKeys'] = detach_keys
+ elif 'detachKeys' in self._general_configs:
+ data['detachKeys'] = self._general_configs['detachKeys']
+
url = self._url('/containers/{0}/exec', container)
res = self._post_json(url, data=data)
return self._result(res, True)
- @utils.minimum_version('1.16')
def exec_inspect(self, exec_id):
"""
Return low-level information about an exec command.
@@ -90,7 +98,6 @@ class ExecApiMixin(object):
res = self._get(self._url("/exec/{0}/json", exec_id))
return self._result(res, True)
- @utils.minimum_version('1.15')
def exec_resize(self, exec_id, height=None, width=None):
"""
Resize the tty session used by the specified exec command.
@@ -109,7 +116,6 @@ class ExecApiMixin(object):
res = self._post(url, params=params)
self._raise_for_status(res)
- @utils.minimum_version('1.15')
@utils.check_resource('exec_id')
def exec_start(self, exec_id, detach=False, tty=False, stream=False,
socket=False):
diff --git a/docker/api/image.py b/docker/api/image.py
index 7755312..fa832a3 100644
--- a/docker/api/image.py
+++ b/docker/api/image.py
@@ -1,11 +1,9 @@
import logging
import os
-import warnings
import six
from .. import auth, errors, utils
-from ..constants import INSECURE_REGISTRY_DEPRECATION_WARNING
log = logging.getLogger(__name__)
@@ -21,8 +19,7 @@ class ImageApiMixin(object):
image (str): Image name to get
Returns:
- (urllib3.response.HTTPResponse object): The response from the
- daemon.
+ (generator): A stream of raw archive data.
Raises:
:py:class:`docker.errors.APIError`
@@ -30,14 +27,14 @@ class ImageApiMixin(object):
Example:
- >>> image = cli.get_image("fedora:latest")
- >>> f = open('/tmp/fedora-latest.tar', 'w')
- >>> f.write(image.data)
+ >>> image = cli.get_image("busybox:latest")
+ >>> f = open('/tmp/busybox-latest.tar', 'w')
+ >>> for chunk in image:
+ >>> f.write(chunk)
>>> f.close()
"""
res = self._get(self._url("/images/{0}/get", image), stream=True)
- self._raise_for_status(res)
- return res.raw
+ return self._stream_raw_result(res)
@utils.check_resource('image')
def history(self, image):
@@ -57,8 +54,7 @@ class ImageApiMixin(object):
res = self._get(self._url("/images/{0}/history", image))
return self._result(res, True)
- def images(self, name=None, quiet=False, all=False, viz=False,
- filters=None):
+ def images(self, name=None, quiet=False, all=False, filters=None):
"""
List images. Similar to the ``docker images`` command.
@@ -79,10 +75,6 @@ class ImageApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- if viz:
- if utils.compare_version('1.7', self._version) >= 0:
- raise Exception('Viz output is not supported in API >= 1.7!')
- return self._result(self._get(self._url("images/viz")))
params = {
'filter': name,
'only_ids': 1 if quiet else 0,
@@ -229,19 +221,6 @@ class ImageApiMixin(object):
)
@utils.check_resource('image')
- def insert(self, image, url, path):
- if utils.compare_version('1.12', self._version) >= 0:
- raise errors.DeprecatedMethod(
- 'insert is not available for API version >=1.12'
- )
- api_url = self._url("/images/{0}/insert", image)
- params = {
- 'url': url,
- 'path': path
- }
- return self._result(self._post(api_url, params=params))
-
- @utils.check_resource('image')
def inspect_image(self, image):
"""
Get detailed information about an image. Similar to the ``docker
@@ -322,8 +301,8 @@ class ImageApiMixin(object):
params['filters'] = utils.convert_filters(filters)
return self._result(self._post(url, params=params), True)
- def pull(self, repository, tag=None, stream=False,
- insecure_registry=False, auth_config=None, decode=False):
+ def pull(self, repository, tag=None, stream=False, auth_config=None,
+ decode=False, platform=None):
"""
Pulls an image. Similar to the ``docker pull`` command.
@@ -331,11 +310,13 @@ class ImageApiMixin(object):
repository (str): The repository to pull
tag (str): The tag to pull
stream (bool): Stream the output as a generator
- insecure_registry (bool): Use an insecure registry
auth_config (dict): Override the credentials that
:py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
+ decode (bool): Decode the JSON data from the server into dicts.
+ Only applies with ``stream=True``
+ platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
(generator or str): The output
@@ -360,12 +341,6 @@ class ImageApiMixin(object):
}
"""
- if insecure_registry:
- warnings.warn(
- INSECURE_REGISTRY_DEPRECATION_WARNING.format('pull()'),
- DeprecationWarning
- )
-
if not tag:
repository, tag = utils.parse_repository_tag(repository)
registry, repo_name = auth.resolve_repository_name(repository)
@@ -376,14 +351,20 @@ class ImageApiMixin(object):
}
headers = {}
- if utils.compare_version('1.5', self._version) >= 0:
- if auth_config is None:
- header = auth.get_config_header(self, registry)
- if header:
- headers['X-Registry-Auth'] = header
- else:
- log.debug('Sending supplied auth config')
- headers['X-Registry-Auth'] = auth.encode_header(auth_config)
+ if auth_config is None:
+ header = auth.get_config_header(self, registry)
+ if header:
+ headers['X-Registry-Auth'] = header
+ else:
+ log.debug('Sending supplied auth config')
+ headers['X-Registry-Auth'] = auth.encode_header(auth_config)
+
+ if platform is not None:
+ if utils.version_lt(self._version, '1.32'):
+ raise errors.InvalidVersion(
+ 'platform was only introduced in API version 1.32'
+ )
+ params['platform'] = platform
response = self._post(
self._url('/images/create'), params=params, headers=headers,
@@ -397,8 +378,8 @@ class ImageApiMixin(object):
return self._result(response)
- def push(self, repository, tag=None, stream=False,
- insecure_registry=False, auth_config=None, decode=False):
+ def push(self, repository, tag=None, stream=False, auth_config=None,
+ decode=False):
"""
Push an image or a repository to the registry. Similar to the ``docker
push`` command.
@@ -407,12 +388,12 @@ class ImageApiMixin(object):
repository (str): The repository to push to
tag (str): An optional tag to push
stream (bool): Stream the output as a blocking generator
- insecure_registry (bool): Use ``http://`` to connect to the
- registry
auth_config (dict): Override the credentials that
:py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
+ decode (bool): Decode the JSON data from the server into dicts.
+ Only applies with ``stream=True``
Returns:
(generator or str): The output from the server.
@@ -431,12 +412,6 @@ class ImageApiMixin(object):
...
"""
- if insecure_registry:
- warnings.warn(
- INSECURE_REGISTRY_DEPRECATION_WARNING.format('push()'),
- DeprecationWarning
- )
-
if not tag:
repository, tag = utils.parse_repository_tag(repository)
registry, repo_name = auth.resolve_repository_name(repository)
@@ -446,14 +421,13 @@ class ImageApiMixin(object):
}
headers = {}
- if utils.compare_version('1.5', self._version) >= 0:
- if auth_config is None:
- header = auth.get_config_header(self, registry)
- if header:
- headers['X-Registry-Auth'] = header
- else:
- log.debug('Sending supplied auth config')
- headers['X-Registry-Auth'] = auth.encode_header(auth_config)
+ if auth_config is None:
+ header = auth.get_config_header(self, registry)
+ if header:
+ headers['X-Registry-Auth'] = header
+ else:
+ log.debug('Sending supplied auth config')
+ headers['X-Registry-Auth'] = auth.encode_header(auth_config)
response = self._post_json(
u, None, headers=headers, stream=stream, params=params
diff --git a/docker/api/network.py b/docker/api/network.py
index 7977808..57ed8d3 100644
--- a/docker/api/network.py
+++ b/docker/api/network.py
@@ -5,7 +5,6 @@ from .. import utils
class NetworkApiMixin(object):
- @minimum_version('1.21')
def networks(self, names=None, ids=None, filters=None):
"""
List networks. Similar to the ``docker networks ls`` command.
@@ -38,7 +37,6 @@ class NetworkApiMixin(object):
res = self._get(url, params=params)
return self._result(res, json=True)
- @minimum_version('1.21')
def create_network(self, name, driver=None, options=None, ipam=None,
check_duplicate=None, internal=False, labels=None,
enable_ipv6=False, attachable=None, scope=None,
@@ -175,7 +173,6 @@ class NetworkApiMixin(object):
url = self._url('/networks/prune')
return self._result(self._post(url, params=params), True)
- @minimum_version('1.21')
@check_resource('net_id')
def remove_network(self, net_id):
"""
@@ -188,7 +185,6 @@ class NetworkApiMixin(object):
res = self._delete(url)
self._raise_for_status(res)
- @minimum_version('1.21')
@check_resource('net_id')
def inspect_network(self, net_id, verbose=None, scope=None):
"""
@@ -216,7 +212,6 @@ class NetworkApiMixin(object):
return self._result(res, json=True)
@check_resource('container')
- @minimum_version('1.21')
def connect_container_to_network(self, container, net_id,
ipv4_address=None, ipv6_address=None,
aliases=None, links=None,
@@ -253,7 +248,6 @@ class NetworkApiMixin(object):
self._raise_for_status(res)
@check_resource('container')
- @minimum_version('1.21')
def disconnect_container_from_network(self, container, net_id,
force=False):
"""
diff --git a/docker/api/service.py b/docker/api/service.py
index 86f4b07..ceae8fc 100644
--- a/docker/api/service.py
+++ b/docker/api/service.py
@@ -1,9 +1,8 @@
-import warnings
from .. import auth, errors, utils
from ..types import ServiceMode
-def _check_api_features(version, task_template, update_config):
+def _check_api_features(version, task_template, update_config, endpoint_spec):
def raise_version_error(param, min_version):
raise errors.InvalidVersion(
@@ -23,6 +22,11 @@ def _check_api_features(version, task_template, update_config):
if 'Order' in update_config:
raise_version_error('UpdateConfig.order', '1.29')
+ if endpoint_spec is not None:
+ if utils.version_lt(version, '1.32') and 'Ports' in endpoint_spec:
+ if any(p.get('PublishMode') for p in endpoint_spec['Ports']):
+ raise_version_error('EndpointSpec.Ports[].mode', '1.32')
+
if task_template is not None:
if 'ForceUpdate' in task_template and utils.version_lt(
version, '1.25'):
@@ -65,6 +69,10 @@ def _check_api_features(version, task_template, update_config):
if container_spec.get('Privileges') is not None:
raise_version_error('ContainerSpec.privileges', '1.30')
+ if utils.version_lt(version, '1.35'):
+ if container_spec.get('Isolation') is not None:
+ raise_version_error('ContainerSpec.isolation', '1.35')
+
def _merge_task_template(current, override):
merged = current.copy()
@@ -114,14 +122,10 @@ class ServiceApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- if endpoint_config is not None:
- warnings.warn(
- 'endpoint_config has been renamed to endpoint_spec.',
- DeprecationWarning
- )
- endpoint_spec = endpoint_config
- _check_api_features(self._version, task_template, update_config)
+ _check_api_features(
+ self._version, task_template, update_config, endpoint_spec
+ )
url = self._url('/services/create')
headers = {}
@@ -137,6 +141,8 @@ class ServiceApiMixin(object):
auth_header = auth.get_config_header(self, registry)
if auth_header:
headers['X-Registry-Auth'] = auth_header
+ if utils.version_lt(self._version, '1.25'):
+ networks = networks or task_template.pop('Networks', None)
data = {
'Name': name,
'Labels': labels,
@@ -357,14 +363,10 @@ class ServiceApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- if endpoint_config is not None:
- warnings.warn(
- 'endpoint_config has been renamed to endpoint_spec.',
- DeprecationWarning
- )
- endpoint_spec = endpoint_config
- _check_api_features(self._version, task_template, update_config)
+ _check_api_features(
+ self._version, task_template, update_config, endpoint_spec
+ )
if fetch_current_spec:
inspect_defaults = True
@@ -411,7 +413,12 @@ class ServiceApiMixin(object):
if networks is not None:
converted_networks = utils.convert_service_networks(networks)
- data['TaskTemplate']['Networks'] = converted_networks
+ if utils.version_lt(self._version, '1.25'):
+ data['Networks'] = converted_networks
+ else:
+ data['TaskTemplate']['Networks'] = converted_networks
+ elif utils.version_lt(self._version, '1.25'):
+ data['Networks'] = current.get('Networks')
elif data['TaskTemplate'].get('Networks') is None:
current_task_template = current.get('TaskTemplate', {})
current_networks = current_task_template.get('Networks')
diff --git a/docker/api/volume.py b/docker/api/volume.py
index ce911c8..900a608 100644
--- a/docker/api/volume.py
+++ b/docker/api/volume.py
@@ -3,7 +3,6 @@ from .. import utils
class VolumeApiMixin(object):
- @utils.minimum_version('1.21')
def volumes(self, filters=None):
"""
List volumes currently registered by the docker daemon. Similar to the
@@ -37,7 +36,6 @@ class VolumeApiMixin(object):
url = self._url('/volumes')
return self._result(self._get(url, params=params), True)
- @utils.minimum_version('1.21')
def create_volume(self, name=None, driver=None, driver_opts=None,
labels=None):
"""
@@ -90,7 +88,6 @@ class VolumeApiMixin(object):
return self._result(self._post_json(url, data=data), True)
- @utils.minimum_version('1.21')
def inspect_volume(self, name):
"""
Retrieve volume info by name.
@@ -138,7 +135,6 @@ class VolumeApiMixin(object):
url = self._url('/volumes/prune')
return self._result(self._post(url, params=params), True)
- @utils.minimum_version('1.21')
def remove_volume(self, name, force=False):
"""
Remove a volume. Similar to the ``docker volume rm`` command.
diff --git a/docker/auth.py b/docker/auth.py
index c0cae5d..91be2b8 100644
--- a/docker/auth.py
+++ b/docker/auth.py
@@ -1,18 +1,15 @@
import base64
import json
import logging
-import os
import dockerpycreds
import six
from . import errors
-from .constants import IS_WINDOWS_PLATFORM
+from .utils import config
INDEX_NAME = 'docker.io'
INDEX_URL = 'https://index.{0}/v1/'.format(INDEX_NAME)
-DOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json')
-LEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg'
TOKEN_USERNAME = '<token>'
log = logging.getLogger(__name__)
@@ -101,14 +98,15 @@ def resolve_authconfig(authconfig, registry=None):
registry = resolve_index_name(registry) if registry else INDEX_NAME
log.debug("Looking for auth entry for {0}".format(repr(registry)))
- if registry in authconfig:
+ authdict = authconfig.get('auths', {})
+ if registry in authdict:
log.debug("Found {0}".format(repr(registry)))
- return authconfig[registry]
+ return authdict[registry]
- for key, config in six.iteritems(authconfig):
+ for key, conf in six.iteritems(authdict):
if resolve_index_name(key) == registry:
log.debug("Found {0}".format(repr(key)))
- return config
+ return conf
log.debug("No entry found")
return None
@@ -223,45 +221,7 @@ def parse_auth(entries, raise_on_error=False):
return conf
-def find_config_file(config_path=None):
- paths = list(filter(None, [
- config_path, # 1
- config_path_from_environment(), # 2
- os.path.join(home_dir(), DOCKER_CONFIG_FILENAME), # 3
- os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4
- ]))
-
- log.debug("Trying paths: {0}".format(repr(paths)))
-
- for path in paths:
- if os.path.exists(path):
- log.debug("Found file at path: {0}".format(path))
- return path
-
- log.debug("No config file found")
-
- return None
-
-
-def config_path_from_environment():
- config_dir = os.environ.get('DOCKER_CONFIG')
- if not config_dir:
- return None
- return os.path.join(config_dir, os.path.basename(DOCKER_CONFIG_FILENAME))
-
-
-def home_dir():
- """
- Get the user's home directory, using the same logic as the Docker Engine
- client - use %USERPROFILE% on Windows, $HOME/getuid on POSIX.
- """
- if IS_WINDOWS_PLATFORM:
- return os.environ.get('USERPROFILE', '')
- else:
- return os.path.expanduser('~')
-
-
-def load_config(config_path=None):
+def load_config(config_path=None, config_dict=None):
"""
Loads authentication data from a Docker configuration file in the given
root directory or if config_path is passed use given path.
@@ -269,39 +229,45 @@ def load_config(config_path=None):
explicit config_path parameter > DOCKER_CONFIG environment variable >
~/.docker/config.json > ~/.dockercfg
"""
- config_file = find_config_file(config_path)
- if not config_file:
- return {}
+ if not config_dict:
+ config_file = config.find_config_file(config_path)
+
+ if not config_file:
+ return {}
+ try:
+ with open(config_file) as f:
+ config_dict = json.load(f)
+ except (IOError, KeyError, ValueError) as e:
+ # Likely missing new Docker config file or it's in an
+ # unknown format, continue to attempt to read old location
+ # and format.
+ log.debug(e)
+ return _load_legacy_config(config_file)
+
+ res = {}
+ if config_dict.get('auths'):
+ log.debug("Found 'auths' section")
+ res.update({
+ 'auths': parse_auth(config_dict.pop('auths'), raise_on_error=True)
+ })
+ if config_dict.get('credsStore'):
+ log.debug("Found 'credsStore' section")
+ res.update({'credsStore': config_dict.pop('credsStore')})
+ if config_dict.get('credHelpers'):
+ log.debug("Found 'credHelpers' section")
+ res.update({'credHelpers': config_dict.pop('credHelpers')})
+ if res:
+ return res
+
+ log.debug(
+ "Couldn't find auth-related section ; attempting to interpret"
+ "as auth-only file"
+ )
+ return parse_auth(config_dict)
- try:
- with open(config_file) as f:
- data = json.load(f)
- res = {}
- if data.get('auths'):
- log.debug("Found 'auths' section")
- res.update(parse_auth(data['auths'], raise_on_error=True))
- if data.get('HttpHeaders'):
- log.debug("Found 'HttpHeaders' section")
- res.update({'HttpHeaders': data['HttpHeaders']})
- if data.get('credsStore'):
- log.debug("Found 'credsStore' section")
- res.update({'credsStore': data['credsStore']})
- if data.get('credHelpers'):
- log.debug("Found 'credHelpers' section")
- res.update({'credHelpers': data['credHelpers']})
- if res:
- return res
- else:
- log.debug("Couldn't find 'auths' or 'HttpHeaders' sections")
- f.seek(0)
- return parse_auth(json.load(f))
- except (IOError, KeyError, ValueError) as e:
- # Likely missing new Docker config file or it's in an
- # unknown format, continue to attempt to read old location
- # and format.
- log.debug(e)
+def _load_legacy_config(config_file):
log.debug("Attempting to parse legacy auth file format")
try:
data = []
diff --git a/docker/constants.py b/docker/constants.py
index 6de8fad..9ab3673 100644
--- a/docker/constants.py
+++ b/docker/constants.py
@@ -1,7 +1,7 @@
import sys
from .version import version
-DEFAULT_DOCKER_API_VERSION = '1.30'
+DEFAULT_DOCKER_API_VERSION = '1.35'
MINIMUM_DOCKER_API_VERSION = '1.21'
DEFAULT_TIMEOUT_SECONDS = 60
STREAM_HEADER_SIZE_BYTES = 8
diff --git a/docker/errors.py b/docker/errors.py
index 50423a2..0253695 100644
--- a/docker/errors.py
+++ b/docker/errors.py
@@ -140,7 +140,14 @@ class StreamParseError(RuntimeError):
self.msg = reason
-class BuildError(Exception):
+class BuildError(DockerException):
+ def __init__(self, reason, build_log):
+ super(BuildError, self).__init__(reason)
+ self.msg = reason
+ self.build_log = build_log
+
+
+class ImageLoadError(DockerException):
pass
diff --git a/docker/models/containers.py b/docker/models/containers.py
index 6ba308e..107a020 100644
--- a/docker/models/containers.py
+++ b/docker/models/containers.py
@@ -1,4 +1,6 @@
import copy
+import ntpath
+from collections import namedtuple
from ..api import APIClient
from ..errors import (ContainerError, ImageNotFound,
@@ -126,7 +128,7 @@ class Container(Model):
def exec_run(self, cmd, stdout=True, stderr=True, stdin=False, tty=False,
privileged=False, user='', detach=False, stream=False,
- socket=False, environment=None):
+ socket=False, environment=None, workdir=None):
"""
Run a command inside this container. Similar to
``docker exec``.
@@ -147,23 +149,37 @@ class Container(Model):
environment (dict or list): A dictionary or a list of strings in
the following format ``["PASSWORD=xxx"]`` or
``{"PASSWORD": "xxx"}``.
+ workdir (str): Path to working directory for this exec session
Returns:
- (generator or str):
- If ``stream=True``, a generator yielding response chunks.
- If ``socket=True``, a socket object for the connection.
- A string containing response data otherwise.
+ (ExecResult): A tuple of (exit_code, output)
+ exit_code: (int):
+ Exit code for the executed command or ``None`` if
+ either ``stream```or ``socket`` is ``True``.
+ output: (generator or str):
+ If ``stream=True``, a generator yielding response chunks.
+ If ``socket=True``, a socket object for the connection.
+ A string containing response data otherwise.
+
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
resp = self.client.api.exec_create(
self.id, cmd, stdout=stdout, stderr=stderr, stdin=stdin, tty=tty,
- privileged=privileged, user=user, environment=environment
+ privileged=privileged, user=user, environment=environment,
+ workdir=workdir
)
- return self.client.api.exec_start(
+ exec_output = self.client.api.exec_start(
resp['Id'], detach=detach, tty=tty, stream=stream, socket=socket
)
+ if socket or stream:
+ return ExecResult(None, exec_output)
+
+ return ExecResult(
+ self.client.api.exec_inspect(resp['Id'])['ExitCode'],
+ exec_output
+ )
def export(self):
"""
@@ -228,6 +244,8 @@ class Container(Model):
since (datetime or int): Show logs since a given datetime or
integer epoch (in seconds)
follow (bool): Follow log output
+ until (datetime or int): Show logs that occurred before the given
+ datetime or integer epoch (in seconds)
Returns:
(generator or str): Logs from the container.
@@ -427,10 +445,13 @@ class Container(Model):
Args:
timeout (int): Request timeout
+ condition (str): Wait until a container state reaches the given
+ condition, either ``not-running`` (default), ``next-exit``,
+ or ``removed``
Returns:
- (int): The exit code of the container. Returns ``-1`` if the API
- responds without a ``StatusCode`` attribute.
+ (dict): The API's response as a Python dictionary, including
+ the container's exit code under the ``StatusCode`` attribute.
Raises:
:py:class:`requests.exceptions.ReadTimeout`
@@ -557,7 +578,7 @@ class ContainerCollection(Collection):
item in the list is expected to be a
:py:class:`docker.types.Mount` object.
name (str): The name for this container.
- nano_cpus (int): CPU quota in units of 10-9 CPUs.
+ nano_cpus (int): CPU quota in units of 1e-9 CPUs.
network (str): Name of the network this container will be connected
to at creation time. You can connect to additional networks
using :py:meth:`Network.connect`. Incompatible with
@@ -571,6 +592,7 @@ class ContainerCollection(Collection):
- ``container:<name|id>`` Reuse another container's network
stack.
- ``host`` Use the host network stack.
+
Incompatible with ``network``.
oom_kill_disable (bool): Whether to disable OOM killer.
oom_score_adj (int): An integer value containing the score given
@@ -579,6 +601,8 @@ class ContainerCollection(Collection):
inside the container.
pids_limit (int): Tune a container's pids limit. Set ``-1`` for
unlimited.
+ platform (str): Platform in the format ``os[/arch[/variant]]``.
+ Only used if the method needs to pull the requested image.
ports (dict): Ports to bind inside the container.
The keys of the dictionary are the ports to bind inside the
@@ -700,7 +724,9 @@ class ContainerCollection(Collection):
if isinstance(image, Image):
image = image.id
stream = kwargs.pop('stream', False)
- detach = kwargs.pop("detach", False)
+ detach = kwargs.pop('detach', False)
+ platform = kwargs.pop('platform', None)
+
if detach and remove:
if version_gte(self.client.api._version, '1.25'):
kwargs["auto_remove"] = True
@@ -718,7 +744,7 @@ class ContainerCollection(Collection):
container = self.create(image=image, command=command,
detach=detach, **kwargs)
except ImageNotFound:
- self.client.images.pull(image)
+ self.client.images.pull(image, platform=platform)
container = self.create(image=image, command=command,
detach=detach, **kwargs)
@@ -735,7 +761,7 @@ class ContainerCollection(Collection):
stdout=stdout, stderr=stderr, stream=True, follow=True
)
- exit_status = container.wait()
+ exit_status = container.wait()['StatusCode']
if exit_status != 0:
out = None
if not kwargs.get('auto_remove'):
@@ -973,17 +999,27 @@ def _create_container_args(kwargs):
# sort to make consistent for tests
create_kwargs['ports'] = [tuple(p.split('/', 1))
for p in sorted(port_bindings.keys())]
- binds = create_kwargs['host_config'].get('Binds')
- if binds:
- create_kwargs['volumes'] = [_host_volume_from_bind(v) for v in binds]
+ if volumes:
+ if isinstance(volumes, dict):
+ create_kwargs['volumes'] = [
+ v.get('bind') for v in volumes.values()
+ ]
+ else:
+ create_kwargs['volumes'] = [
+ _host_volume_from_bind(v) for v in volumes
+ ]
return create_kwargs
def _host_volume_from_bind(bind):
- bits = bind.split(':')
- if len(bits) == 1:
- return bits[0]
- elif len(bits) == 2 and bits[1] in ('ro', 'rw'):
- return bits[0]
+ drive, rest = ntpath.splitdrive(bind)
+ bits = rest.split(':', 1)
+ if len(bits) == 1 or bits[1] in ('ro', 'rw'):
+ return drive + bits[0]
else:
- return bits[1]
+ return bits[1].rstrip(':ro').rstrip(':rw')
+
+
+ExecResult = namedtuple('ExecResult', 'exit_code,output')
+""" A result of Container.exec_run with the properties ``exit_code`` and
+ ``output``. """
diff --git a/docker/models/images.py b/docker/models/images.py
index 82ca541..0f3c71a 100644
--- a/docker/models/images.py
+++ b/docker/models/images.py
@@ -1,9 +1,11 @@
+import itertools
import re
import six
from ..api import APIClient
-from ..errors import BuildError
+from ..errors import BuildError, ImageLoadError
+from ..utils import parse_repository_tag
from ..utils.json_stream import json_stream
from .resource import Collection, Model
@@ -61,8 +63,7 @@ class Image(Model):
Get a tarball of an image. Similar to the ``docker save`` command.
Returns:
- (urllib3.response.HTTPResponse object): The response from the
- daemon.
+ (generator): A stream of raw archive data.
Raises:
:py:class:`docker.errors.APIError`
@@ -70,11 +71,10 @@ class Image(Model):
Example:
- >>> image = cli.images.get("fedora:latest")
- >>> resp = image.save()
- >>> f = open('/tmp/fedora-latest.tar', 'w')
- >>> for chunk in resp.stream():
- >>> f.write(chunk)
+ >>> image = cli.get_image("busybox:latest")
+ >>> f = open('/tmp/busybox-latest.tar', 'w')
+ >>> for chunk in image:
+ >>> f.write(chunk)
>>> f.close()
"""
return self.client.api.get_image(self.id)
@@ -157,9 +157,12 @@ class ImageCollection(Collection):
single layer.
extra_hosts (dict): Extra hosts to add to /etc/hosts in building
containers, as a mapping of hostname to IP address.
+ platform (str): Platform in the format ``os[/arch[/variant]]``.
Returns:
- (:py:class:`Image`): The built image.
+ (tuple): The first item is the :py:class:`Image` object for the
+ image that was build. The second item is a generator of the
+ build logs as JSON-decoded objects.
Raises:
:py:class:`docker.errors.BuildError`
@@ -174,9 +177,10 @@ class ImageCollection(Collection):
return self.get(resp)
last_event = None
image_id = None
- for chunk in json_stream(resp):
+ result_stream, internal_stream = itertools.tee(json_stream(resp))
+ for chunk in internal_stream:
if 'error' in chunk:
- raise BuildError(chunk['error'])
+ raise BuildError(chunk['error'], result_stream)
if 'stream' in chunk:
match = re.search(
r'(^Successfully built |sha256:)([0-9a-f]+)$',
@@ -186,8 +190,8 @@ class ImageCollection(Collection):
image_id = match.group(2)
last_event = chunk
if image_id:
- return self.get(image_id)
- raise BuildError(last_event or 'Unknown')
+ return (self.get(image_id), result_stream)
+ raise BuildError(last_event or 'Unknown', result_stream)
def get(self, name):
"""
@@ -240,18 +244,34 @@ class ImageCollection(Collection):
data (binary): Image data to be loaded.
Returns:
- (generator): Progress output as JSON objects
+ (list of :py:class:`Image`): The images.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- return self.client.api.load_image(data)
+ resp = self.client.api.load_image(data)
+ images = []
+ for chunk in resp:
+ if 'stream' in chunk:
+ match = re.search(
+ r'(^Loaded image ID: |^Loaded image: )(.+)$',
+ chunk['stream']
+ )
+ if match:
+ image_id = match.group(2)
+ images.append(image_id)
+ if 'error' in chunk:
+ raise ImageLoadError(chunk['error'])
+
+ return [self.get(i) for i in images]
- def pull(self, name, tag=None, **kwargs):
+ def pull(self, repository, tag=None, **kwargs):
"""
Pull an image of the given name and return it. Similar to the
``docker pull`` command.
+ If no tag is specified, all tags from that repository will be
+ pulled.
If you want to get the raw pull output, use the
:py:meth:`~docker.api.image.ImageApiMixin.pull` method in the
@@ -260,14 +280,16 @@ class ImageCollection(Collection):
Args:
name (str): The repository to pull
tag (str): The tag to pull
- insecure_registry (bool): Use an insecure registry
auth_config (dict): Override the credentials that
:py:meth:`~docker.client.DockerClient.login` has set for
this request. ``auth_config`` should contain the ``username``
and ``password`` keys to be valid.
+ platform (str): Platform in the format ``os[/arch[/variant]]``
Returns:
- (:py:class:`Image`): The image that has been pulled.
+ (:py:class:`Image` or list): The image that has been pulled.
+ If no ``tag`` was specified, the method will return a list
+ of :py:class:`Image` objects belonging to this repository.
Raises:
:py:class:`docker.errors.APIError`
@@ -275,10 +297,19 @@ class ImageCollection(Collection):
Example:
- >>> image = client.images.pull('busybox')
+ >>> # Pull the image tagged `latest` in the busybox repo
+ >>> image = client.images.pull('busybox:latest')
+
+ >>> # Pull all tags in the busybox repo
+ >>> images = client.images.pull('busybox')
"""
- self.client.api.pull(name, tag=tag, **kwargs)
- return self.get('{0}:{1}'.format(name, tag) if tag else name)
+ if not tag:
+ repository, tag = parse_repository_tag(repository)
+
+ self.client.api.pull(repository, tag=tag, **kwargs)
+ if tag:
+ return self.get('{0}:{1}'.format(repository, tag))
+ return self.list(repository)
def push(self, repository, tag=None, **kwargs):
return self.client.api.push(repository, tag=tag, **kwargs)
diff --git a/docker/models/services.py b/docker/models/services.py
index 009e455..8a633df 100644
--- a/docker/models/services.py
+++ b/docker/models/services.py
@@ -1,6 +1,6 @@
import copy
-from docker.errors import create_unexpected_kwargs_error
-from docker.types import TaskTemplate, ContainerSpec
+from docker.errors import create_unexpected_kwargs_error, InvalidArgument
+from docker.types import TaskTemplate, ContainerSpec, ServiceMode
from .resource import Model, Collection
@@ -105,6 +105,25 @@ class Service(Model):
)
return self.client.api.service_logs(self.id, is_tty=is_tty, **kwargs)
+ def scale(self, replicas):
+ """
+ Scale service container.
+
+ Args:
+ replicas (int): The number of containers that should be running.
+
+ Returns:
+ ``True``if successful.
+ """
+
+ if 'Global' in self.attrs['Spec']['Mode'].keys():
+ raise InvalidArgument('Cannot scale a global container')
+
+ service_mode = ServiceMode('replicated', replicas)
+ return self.client.api.update_service(self.id, self.version,
+ service_mode,
+ fetch_current_spec=True)
+
class ServiceCollection(Collection):
"""Services on the Docker server."""
@@ -125,6 +144,8 @@ class ServiceCollection(Collection):
env (list of str): Environment variables, in the form
``KEY=val``.
hostname (string): Hostname to set on the container.
+ isolation (string): Isolation technology used by the service's
+ containers. Only used for Windows containers.
labels (dict): Labels to apply to the service.
log_driver (str): Log driver to use for containers.
log_driver_options (dict): Log driver options.
@@ -236,6 +257,7 @@ CONTAINER_SPEC_KWARGS = [
'hostname',
'hosts',
'image',
+ 'isolation',
'labels',
'mounts',
'open_stdin',
diff --git a/docker/tls.py b/docker/tls.py
index 6488bbc..4900e9f 100644
--- a/docker/tls.py
+++ b/docker/tls.py
@@ -37,13 +37,33 @@ class TLSConfig(object):
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
- # TLS v1.0 seems to be the safest default; SSLv23 fails in mysterious
- # ways: https://github.com/docker/docker-py/issues/963
-
- self.ssl_version = ssl_version or ssl.PROTOCOL_TLSv1
-
- # "tls" and "tls_verify" must have both or neither cert/key files
- # In either case, Alert the user when both are expected, but any are
+ # TODO(dperny): according to the python docs, PROTOCOL_TLSvWhatever is
+ # depcreated, and it's recommended to use OPT_NO_TLSvWhatever instead
+ # to exclude versions. But I think that might require a bigger
+ # architectural change, so I've opted not to pursue it at this time
+
+ # If the user provides an SSL version, we should use their preference
+ if ssl_version:
+ self.ssl_version = ssl_version
+ else:
+ # If the user provides no ssl version, we should default to
+ # TLSv1_2. This option is the most secure, and will work for the
+ # majority of users with reasonably up-to-date software. However,
+ # before doing so, detect openssl version to ensure we can support
+ # it.
+ if ssl.OPENSSL_VERSION_INFO[:3] >= (1, 0, 1) and hasattr(
+ ssl, 'PROTOCOL_TLSv1_2'):
+ # If the OpenSSL version is high enough to support TLSv1_2,
+ # then we should use it.
+ self.ssl_version = getattr(ssl, 'PROTOCOL_TLSv1_2')
+ else:
+ # Otherwise, TLS v1.0 seems to be the safest default;
+ # SSLv23 fails in mysterious ways:
+ # https://github.com/docker/docker-py/issues/963
+ self.ssl_version = ssl.PROTOCOL_TLSv1
+
+ # "tls" and "tls_verify" must have both or neither cert/key files In
+ # either case, Alert the user when both are expected, but any are
# missing.
if client_cert:
diff --git a/docker/types/containers.py b/docker/types/containers.py
index 15dd86c..b4a329c 100644
--- a/docker/types/containers.py
+++ b/docker/types/containers.py
@@ -1,5 +1,4 @@
import six
-import warnings
from .. import errors
from ..utils.utils import (
@@ -130,20 +129,12 @@ class HostConfig(dict):
self['MemorySwap'] = parse_bytes(memswap_limit)
if mem_reservation:
- if version_lt(version, '1.21'):
- raise host_config_version_error('mem_reservation', '1.21')
-
self['MemoryReservation'] = parse_bytes(mem_reservation)
if kernel_memory:
- if version_lt(version, '1.21'):
- raise host_config_version_error('kernel_memory', '1.21')
-
self['KernelMemory'] = parse_bytes(kernel_memory)
if mem_swappiness is not None:
- if version_lt(version, '1.20'):
- raise host_config_version_error('mem_swappiness', '1.20')
if not isinstance(mem_swappiness, int):
raise host_config_type_error(
'mem_swappiness', mem_swappiness, 'int'
@@ -169,9 +160,6 @@ class HostConfig(dict):
self['Privileged'] = privileged
if oom_kill_disable:
- if version_lt(version, '1.20'):
- raise host_config_version_error('oom_kill_disable', '1.19')
-
self['OomKillDisable'] = oom_kill_disable
if oom_score_adj:
@@ -194,7 +182,7 @@ class HostConfig(dict):
if network_mode:
self['NetworkMode'] = network_mode
- elif network_mode is None and version_gte(version, '1.20'):
+ elif network_mode is None:
self['NetworkMode'] = 'default'
if restart_policy:
@@ -215,18 +203,12 @@ class HostConfig(dict):
self['Devices'] = parse_devices(devices)
if group_add:
- if version_lt(version, '1.20'):
- raise host_config_version_error('group_add', '1.20')
-
self['GroupAdd'] = [six.text_type(grp) for grp in group_add]
if dns is not None:
self['Dns'] = dns
if dns_opt is not None:
- if version_lt(version, '1.21'):
- raise host_config_version_error('dns_opt', '1.21')
-
self['DnsOptions'] = dns_opt
if security_opt is not None:
@@ -299,38 +281,23 @@ class HostConfig(dict):
if cpu_quota:
if not isinstance(cpu_quota, int):
raise host_config_type_error('cpu_quota', cpu_quota, 'int')
- if version_lt(version, '1.19'):
- raise host_config_version_error('cpu_quota', '1.19')
-
self['CpuQuota'] = cpu_quota
if cpu_period:
if not isinstance(cpu_period, int):
raise host_config_type_error('cpu_period', cpu_period, 'int')
- if version_lt(version, '1.19'):
- raise host_config_version_error('cpu_period', '1.19')
-
self['CpuPeriod'] = cpu_period
if cpu_shares:
- if version_lt(version, '1.18'):
- raise host_config_version_error('cpu_shares', '1.18')
-
if not isinstance(cpu_shares, int):
raise host_config_type_error('cpu_shares', cpu_shares, 'int')
self['CpuShares'] = cpu_shares
if cpuset_cpus:
- if version_lt(version, '1.18'):
- raise host_config_version_error('cpuset_cpus', '1.18')
-
self['CpusetCpus'] = cpuset_cpus
if cpuset_mems:
- if version_lt(version, '1.19'):
- raise host_config_version_error('cpuset_mems', '1.19')
-
if not isinstance(cpuset_mems, str):
raise host_config_type_error(
'cpuset_mems', cpuset_mems, 'str'
@@ -463,8 +430,6 @@ class HostConfig(dict):
self['InitPath'] = init_path
if volume_driver is not None:
- if version_lt(version, '1.21'):
- raise host_config_version_error('volume_driver', '1.21')
self['VolumeDriver'] = volume_driver
if cpu_count:
@@ -521,67 +486,12 @@ def host_config_value_error(param, param_value):
class ContainerConfig(dict):
def __init__(
self, version, image, command, hostname=None, user=None, detach=False,
- stdin_open=False, tty=False, mem_limit=None, ports=None, dns=None,
- environment=None, volumes=None, volumes_from=None,
- network_disabled=False, entrypoint=None, cpu_shares=None,
- working_dir=None, domainname=None, memswap_limit=None, cpuset=None,
- host_config=None, mac_address=None, labels=None, volume_driver=None,
- stop_signal=None, networking_config=None, healthcheck=None,
- stop_timeout=None, runtime=None
+ stdin_open=False, tty=False, ports=None, environment=None,
+ volumes=None, network_disabled=False, entrypoint=None,
+ working_dir=None, domainname=None, host_config=None, mac_address=None,
+ labels=None, stop_signal=None, networking_config=None,
+ healthcheck=None, stop_timeout=None, runtime=None
):
- if version_gte(version, '1.10'):
- message = ('{0!r} parameter has no effect on create_container().'
- ' It has been moved to host_config')
- if dns is not None:
- raise errors.InvalidVersion(message.format('dns'))
- if volumes_from is not None:
- raise errors.InvalidVersion(message.format('volumes_from'))
-
- if version_lt(version, '1.18'):
- if labels is not None:
- raise errors.InvalidVersion(
- 'labels were only introduced in API version 1.18'
- )
- else:
- if cpuset is not None or cpu_shares is not None:
- warnings.warn(
- 'The cpuset_cpus and cpu_shares options have been moved to'
- ' host_config in API version 1.18, and will be removed',
- DeprecationWarning
- )
-
- if version_lt(version, '1.19'):
- if volume_driver is not None:
- raise errors.InvalidVersion(
- 'Volume drivers were only introduced in API version 1.19'
- )
- mem_limit = mem_limit if mem_limit is not None else 0
- memswap_limit = memswap_limit if memswap_limit is not None else 0
- else:
- if mem_limit is not None:
- raise errors.InvalidVersion(
- 'mem_limit has been moved to host_config in API version'
- ' 1.19'
- )
-
- if memswap_limit is not None:
- raise errors.InvalidVersion(
- 'memswap_limit has been moved to host_config in API '
- 'version 1.19'
- )
-
- if version_lt(version, '1.21'):
- if stop_signal is not None:
- raise errors.InvalidVersion(
- 'stop_signal was only introduced in API version 1.21'
- )
- else:
- if volume_driver is not None:
- warnings.warn(
- 'The volume_driver option has been moved to'
- ' host_config in API version 1.21, and will be removed',
- DeprecationWarning
- )
if stop_timeout is not None and version_lt(version, '1.25'):
raise errors.InvalidVersion(
@@ -612,12 +522,6 @@ class ContainerConfig(dict):
if isinstance(labels, list):
labels = dict((lbl, six.text_type('')) for lbl in labels)
- if mem_limit is not None:
- mem_limit = parse_bytes(mem_limit)
-
- if memswap_limit is not None:
- memswap_limit = parse_bytes(memswap_limit)
-
if isinstance(ports, list):
exposed_ports = {}
for port_definition in ports:
@@ -639,13 +543,6 @@ class ContainerConfig(dict):
volumes_dict[vol] = {}
volumes = volumes_dict
- if volumes_from:
- if not isinstance(volumes_from, six.string_types):
- volumes_from = ','.join(volumes_from)
- else:
- # Force None, an empty list or dict causes client.start to fail
- volumes_from = None
-
if healthcheck and isinstance(healthcheck, dict):
healthcheck = Healthcheck(**healthcheck)
@@ -670,28 +567,20 @@ class ContainerConfig(dict):
'Tty': tty,
'OpenStdin': stdin_open,
'StdinOnce': stdin_once,
- 'Memory': mem_limit,
'AttachStdin': attach_stdin,
'AttachStdout': attach_stdout,
'AttachStderr': attach_stderr,
'Env': environment,
'Cmd': command,
- 'Dns': dns,
'Image': image,
'Volumes': volumes,
- 'VolumesFrom': volumes_from,
'NetworkDisabled': network_disabled,
'Entrypoint': entrypoint,
- 'CpuShares': cpu_shares,
- 'Cpuset': cpuset,
- 'CpusetCpus': cpuset,
'WorkingDir': working_dir,
- 'MemorySwap': memswap_limit,
'HostConfig': host_config,
'NetworkingConfig': networking_config,
'MacAddress': mac_address,
'Labels': labels,
- 'VolumeDriver': volume_driver,
'StopSignal': stop_signal,
'Healthcheck': healthcheck,
'StopTimeout': stop_timeout,
diff --git a/docker/types/services.py b/docker/types/services.py
index 18d4d2a..d530e61 100644
--- a/docker/types/services.py
+++ b/docker/types/services.py
@@ -102,19 +102,21 @@ class ContainerSpec(dict):
healthcheck (Healthcheck): Healthcheck
configuration for this service.
hosts (:py:class:`dict`): A set of host to IP mappings to add to
- the container's `hosts` file.
+ the container's ``hosts`` file.
dns_config (DNSConfig): Specification for DNS
related configurations in resolver configuration file.
configs (:py:class:`list`): List of :py:class:`ConfigReference` that
will be exposed to the service.
privileges (Privileges): Security options for the service's containers.
+ isolation (string): Isolation technology used by the service's
+ containers. Only used for Windows containers.
"""
def __init__(self, image, command=None, args=None, hostname=None, env=None,
workdir=None, user=None, labels=None, mounts=None,
stop_grace_period=None, secrets=None, tty=None, groups=None,
open_stdin=None, read_only=None, stop_signal=None,
healthcheck=None, hosts=None, dns_config=None, configs=None,
- privileges=None):
+ privileges=None, isolation=None):
self['Image'] = image
if isinstance(command, six.string_types):
@@ -178,6 +180,9 @@ class ContainerSpec(dict):
if read_only is not None:
self['ReadOnly'] = read_only
+ if isolation is not None:
+ self['Isolation'] = isolation
+
class Mount(dict):
"""
@@ -444,9 +449,10 @@ class EndpointSpec(dict):
balancing between tasks (``'vip'`` or ``'dnsrr'``). Defaults to
``'vip'`` if not provided.
ports (dict): Exposed ports that this service is accessible on from the
- outside, in the form of ``{ target_port: published_port }`` or
- ``{ target_port: (published_port, protocol) }``. Ports can only be
- provided if the ``vip`` resolution mode is used.
+ outside, in the form of ``{ published_port: target_port }`` or
+ ``{ published_port: <port_config_tuple> }``. Port config tuple format
+ is ``(target_port [, protocol [, publish_mode]])``.
+ Ports can only be provided if the ``vip`` resolution mode is used.
"""
def __init__(self, mode=None, ports=None):
if ports:
@@ -472,8 +478,15 @@ def convert_service_ports(ports):
if isinstance(v, tuple):
port_spec['TargetPort'] = v[0]
- if len(v) == 2:
+ if len(v) >= 2 and v[1] is not None:
port_spec['Protocol'] = v[1]
+ if len(v) == 3:
+ port_spec['PublishMode'] = v[2]
+ if len(v) > 3:
+ raise ValueError(
+ 'Service port configuration can have at most 3 elements: '
+ '(target_port, protocol, mode)'
+ )
else:
port_spec['TargetPort'] = v
diff --git a/docker/utils/__init__.py b/docker/utils/__init__.py
index c162e3b..e70a5e6 100644
--- a/docker/utils/__init__.py
+++ b/docker/utils/__init__.py
@@ -5,7 +5,7 @@ from .utils import (
compare_version, convert_port_bindings, convert_volume_binds,
mkbuildcontext, parse_repository_tag, parse_host,
kwargs_from_env, convert_filters, datetime_to_timestamp,
- create_host_config, parse_bytes, ping_registry, parse_env_file, version_lt,
+ create_host_config, parse_bytes, parse_env_file, version_lt,
version_gte, decode_json_header, split_command, create_ipam_config,
create_ipam_pool, parse_devices, normalize_links, convert_service_networks,
format_environment, create_archive, format_extra_hosts
diff --git a/docker/utils/config.py b/docker/utils/config.py
new file mode 100644
index 0000000..82a0e2a
--- /dev/null
+++ b/docker/utils/config.py
@@ -0,0 +1,66 @@
+import json
+import logging
+import os
+
+from ..constants import IS_WINDOWS_PLATFORM
+
+DOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json')
+LEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg'
+
+log = logging.getLogger(__name__)
+
+
+def find_config_file(config_path=None):
+ paths = list(filter(None, [
+ config_path, # 1
+ config_path_from_environment(), # 2
+ os.path.join(home_dir(), DOCKER_CONFIG_FILENAME), # 3
+ os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4
+ ]))
+
+ log.debug("Trying paths: {0}".format(repr(paths)))
+
+ for path in paths:
+ if os.path.exists(path):
+ log.debug("Found file at path: {0}".format(path))
+ return path
+
+ log.debug("No config file found")
+
+ return None
+
+
+def config_path_from_environment():
+ config_dir = os.environ.get('DOCKER_CONFIG')
+ if not config_dir:
+ return None
+ return os.path.join(config_dir, os.path.basename(DOCKER_CONFIG_FILENAME))
+
+
+def home_dir():
+ """
+ Get the user's home directory, using the same logic as the Docker Engine
+ client - use %USERPROFILE% on Windows, $HOME/getuid on POSIX.
+ """
+ if IS_WINDOWS_PLATFORM:
+ return os.environ.get('USERPROFILE', '')
+ else:
+ return os.path.expanduser('~')
+
+
+def load_general_config(config_path=None):
+ config_file = find_config_file(config_path)
+
+ if not config_file:
+ return {}
+
+ try:
+ with open(config_file) as f:
+ return json.load(f)
+ except (IOError, ValueError) as e:
+ # In the case of a legacy `.dockercfg` file, we won't
+ # be able to load any JSON data.
+ log.debug(e)
+
+ log.debug("All parsing attempts failed - returning empty config")
+ return {}
diff --git a/docker/utils/decorators.py b/docker/utils/decorators.py
index 5e195c0..c975d4b 100644
--- a/docker/utils/decorators.py
+++ b/docker/utils/decorators.py
@@ -38,10 +38,10 @@ def minimum_version(version):
def update_headers(f):
def inner(self, *args, **kwargs):
- if 'HttpHeaders' in self._auth_configs:
+ if 'HttpHeaders' in self._general_configs:
if not kwargs.get('headers'):
- kwargs['headers'] = self._auth_configs['HttpHeaders']
+ kwargs['headers'] = self._general_configs['HttpHeaders']
else:
- kwargs['headers'].update(self._auth_configs['HttpHeaders'])
+ kwargs['headers'].update(self._general_configs['HttpHeaders'])
return f(self, *args, **kwargs)
return inner
diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index 2de995c..e4e2c0d 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -6,11 +6,9 @@ import json
import shlex
import tarfile
import tempfile
-import warnings
from distutils.version import StrictVersion
from datetime import datetime
-import requests
import six
from .. import constants
@@ -98,7 +96,8 @@ def create_archive(root, files=None, fileobj=None, gzip=False):
files = build_file_list(root)
for path in files:
full_path = os.path.join(root, path)
- if not os.access(full_path, os.R_OK):
+
+ if os.lstat(full_path).st_mode & os.R_OK == 0:
raise IOError(
'Can not access file in context: {}'.format(full_path)
)
@@ -157,29 +156,6 @@ def version_gte(v1, v2):
return not version_lt(v1, v2)
-def ping_registry(url):
- warnings.warn(
- 'The `ping_registry` method is deprecated and will be removed.',
- DeprecationWarning
- )
-
- return ping(url + '/v2/', [401]) or ping(url + '/v1/_ping')
-
-
-def ping(url, valid_4xx_statuses=None):
- try:
- res = requests.get(url, timeout=3)
- except Exception:
- return False
- else:
- # We don't send yet auth headers
- # and a v2 registry will respond with status 401
- return (
- res.status_code < 400 or
- (valid_4xx_statuses and res.status_code in valid_4xx_statuses)
- )
-
-
def _convert_port_binding(binding):
result = {'HostIp': '', 'HostPort': ''}
if isinstance(binding, tuple):
diff --git a/docker/version.py b/docker/version.py
index 2502183..f141747 100644
--- a/docker/version.py
+++ b/docker/version.py
@@ -1,2 +1,2 @@
-version = "2.7.0"
+version = "3.0.0"
version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
diff --git a/docs/change-log.md b/docs/change-log.md
index b8298a7..08d4e8f 100644
--- a/docs/change-log.md
+++ b/docs/change-log.md
@@ -1,6 +1,83 @@
Change log
==========
+3.0.0
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/39?closed=1)
+
+### Breaking changes
+
+* Support for API version < 1.21 has been removed.
+* The following methods have been removed:
+ * `APIClient.copy` has been removed. Users should use `APIClient.get_archive`
+ instead.
+ * `APIClient.insert` has been removed. Users may use `APIClient.put_archive`
+ combined with `APIClient.commit` to replicate the method's behavior.
+ * `utils.ping_registry` and `utils.ping` have been removed.
+* The following parameters have been removed:
+ * `stream` in `APIClient.build`
+ * `cpu_shares`, `cpuset`, `dns`, `mem_limit`, `memswap_limit`,
+ `volume_driver`, `volumes_from` in `APIClient.create_container`. These are
+ all replaced by their equivalent in `create_host_config`
+ * `insecure_registry` in `APIClient.login`, `APIClient.pull`,
+ `APIClient.push`, `DockerClient.images.push` and `DockerClient.images.pull`
+ * `viz` in `APIClient.images`
+* The following parameters have been renamed:
+ * `endpoint_config` in `APIClient.create_service` and
+ `APIClient.update_service` is now `endpoint_spec`
+ * `name` in `DockerClient.images.pull` is now `repository`
+* The return value for the following methods has changed:
+ * `APIClient.wait` and `Container.wait` now return a ``dict`` representing
+ the API's response instead of returning the status code directly.
+ * `DockerClient.images.load` now returns a list of `Image` objects that have
+ for the images that were loaded, instead of a log stream.
+ * `Container.exec_run` now returns a tuple of (exit_code, output) instead of
+ just the output.
+ * `DockerClient.images.build` now returns a tuple of (image, build_logs)
+ instead of just the image object.
+ * `APIClient.export`, `APIClient.get_archive` and `APIClient.get_image` now
+ return generators streaming the raw binary data from the server's response.
+ * When no tag is provided, `DockerClient.images.pull` now returns a list of
+ `Image`s associated to the pulled repository instead of just the `latest`
+ image.
+
+### Features
+
+* The Docker Python SDK is now officially supported on Python 3.6
+* Added `scale` method to the `Service` model ; this method is a shorthand
+ that calls `update_service` with the required number of replicas
+* Added support for the `platform` parameter in `APIClient.build`,
+ `DockerClient.images.build`, `APIClient.pull` and `DockerClient.images.pull`
+* Added support for the `until` parameter in `APIClient.logs` and
+ `Container.logs`
+* Added support for the `workdir` argument in `APIClient.exec_create` and
+ `Container.exec_run`
+* Added support for the `condition` argument in `APIClient.wait` and
+ `Container.wait`
+* Users can now specify a publish mode for ports in `EndpointSpec` using
+ the `{published_port: (target_port, protocol, publish_mode)}` syntax.
+* Added support for the `isolation` parameter in `ContainerSpec`,
+ `DockerClient.services.create` and `Service.update`
+* `APIClient.attach_socket`, `APIClient.exec_create` now allow specifying a
+ `detach_keys` combination. If unspecified, the value from the `config.json`
+ file will be used
+* TLS connections now default to using the TLSv1.2 protocol when available
+
+
+### Bugfixes
+
+* Fixed a bug where whitespace-only lines in `.dockerignore` would break builds
+ on Windows
+* Fixed a bug where broken symlinks inside a build context would cause the
+ build to fail
+* Fixed a bug where specifying volumes with Windows drives would cause
+ incorrect parsing in `DockerClient.containers.run`
+* Fixed a bug where the `networks` data provided to `create_service` and
+ `update_service` would be sent incorrectly to the Engine with API < 1.25
+* Pulling all tags from a repository with no `latest` tag using the
+ `DockerClient` will no longer raise a `NotFound` exception
+
2.7.0
-----
diff --git a/docs/images.rst b/docs/images.rst
index 3ba0601..12b0fd1 100644
--- a/docs/images.rst
+++ b/docs/images.rst
@@ -26,14 +26,16 @@ Image objects
.. autoclass:: Image()
-.. py:attribute:: attrs
-.. autoattribute:: id
-.. autoattribute:: labels
-.. autoattribute:: short_id
-.. autoattribute:: tags
+ .. py:attribute:: attrs
The raw representation of this object from the server.
+ .. autoattribute:: id
+ .. autoattribute:: labels
+ .. autoattribute:: short_id
+ .. autoattribute:: tags
+
+
.. automethod:: history
.. automethod:: reload
diff --git a/requirements.txt b/requirements.txt
index f3c61e7..1602750 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -11,6 +11,8 @@ packaging==16.8
pycparser==2.17
pyOpenSSL==17.0.0
pyparsing==2.2.0
+pypiwin32==219; sys_platform == 'win32' and python_version < '3.6'
+pypiwin32==220; sys_platform == 'win32' and python_version >= '3.6'
requests==2.14.2
six==1.10.0
websocket-client==0.40.0
diff --git a/setup.py b/setup.py
index d59d812..b628f4a 100644
--- a/setup.py
+++ b/setup.py
@@ -26,9 +26,6 @@ requirements = [
'docker-pycreds >= 0.2.1'
]
-if sys.platform == 'win32':
- requirements.append('pypiwin32 >= 219')
-
extras_require = {
':python_version < "3.5"': 'backports.ssl_match_hostname >= 3.5',
# While not imported explicitly, the ipaddress module is required for
@@ -36,6 +33,12 @@ extras_require = {
# ServerAltname: https://pypi.python.org/pypi/backports.ssl_match_hostname
':python_version < "3.3"': 'ipaddress >= 1.0.16',
+ # win32 APIs if on Windows (required for npipe support)
+ # Python 3.6 is only compatible with v220 ; Python < 3.5 is not supported
+ # on v220 ; ALL versions are broken for v222 (as of 2018-01-26)
+ ':sys_platform == "win32" and python_version < "3.6"': 'pypiwin32==219',
+ ':sys_platform == "win32" and python_version >= "3.6"': 'pypiwin32==220',
+
# If using docker-py over TLS, highly recommend this option is
# pip-installed or pinned.
@@ -87,6 +90,7 @@ setup(
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
],
diff --git a/tests/helpers.py b/tests/helpers.py
index 124ae2d..c4ea364 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -5,6 +5,9 @@ import random
import tarfile
import tempfile
import time
+import re
+import six
+import socket
import docker
import pytest
@@ -102,3 +105,29 @@ def force_leave_swarm(client):
def swarm_listen_addr():
return '0.0.0.0:{0}'.format(random.randrange(10000, 25000))
+
+
+def assert_cat_socket_detached_with_keys(sock, inputs):
+ if six.PY3:
+ sock = sock._sock
+
+ for i in inputs:
+ sock.send(i)
+ time.sleep(0.5)
+
+ # If we're using a Unix socket, the sock.send call will fail with a
+ # BrokenPipeError ; INET sockets will just stop receiving / sending data
+ # but will not raise an error
+ if sock.family == getattr(socket, 'AF_UNIX', -1):
+ with pytest.raises(socket.error):
+ sock.send(b'make sure the socket is closed\n')
+ else:
+ sock.send(b"make sure the socket is closed\n")
+ assert sock.recv(32) == b''
+
+
+def ctrl_with(char):
+ if re.match('[a-z]', char):
+ return chr(ord(char) - ord('a') + 1).encode('ascii')
+ else:
+ raise(Exception('char must be [a-z]'))
diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py
index 8e98cc9..4c2b992 100644
--- a/tests/integration/api_build_test.py
+++ b/tests/integration/api_build_test.py
@@ -21,7 +21,7 @@ class BuildTest(BaseAPIIntegrationTest):
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
' /tmp/silence.tar.gz'
]).encode('ascii'))
- stream = self.client.build(fileobj=script, stream=True, decode=True)
+ stream = self.client.build(fileobj=script, decode=True)
logs = []
for chunk in stream:
logs.append(chunk)
@@ -37,15 +37,14 @@ class BuildTest(BaseAPIIntegrationTest):
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
' /tmp/silence.tar.gz'
]))
- stream = self.client.build(fileobj=script, stream=True)
+ stream = self.client.build(fileobj=script)
logs = ''
for chunk in stream:
if six.PY3:
chunk = chunk.decode('utf-8')
logs += chunk
- self.assertNotEqual(logs, '')
+ assert logs != ''
- @requires_api_version('1.8')
def test_build_with_dockerignore(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
@@ -92,13 +91,11 @@ class BuildTest(BaseAPIIntegrationTest):
if six.PY3:
logs = logs.decode('utf-8')
- self.assertEqual(
- sorted(list(filter(None, logs.split('\n')))),
- sorted(['/test/ignored/subdir/excepted-file',
- '/test/not-ignored']),
- )
+ assert sorted(list(filter(None, logs.split('\n')))) == sorted([
+ '/test/ignored/subdir/excepted-file',
+ '/test/not-ignored'
+ ])
- @requires_api_version('1.21')
def test_build_with_buildargs(self):
script = io.BytesIO('\n'.join([
'FROM scratch',
@@ -114,7 +111,7 @@ class BuildTest(BaseAPIIntegrationTest):
pass
info = self.client.inspect_image('buildargs')
- self.assertEqual(info['Config']['User'], 'OK')
+ assert info['Config']['User'] == 'OK'
@requires_api_version('1.22')
def test_build_shmsize(self):
@@ -152,7 +149,7 @@ class BuildTest(BaseAPIIntegrationTest):
pass
info = self.client.inspect_image('labels')
- self.assertEqual(info['Config']['Labels'], labels)
+ assert info['Config']['Labels'] == labels
@requires_api_version('1.25')
def test_build_with_cache_from(self):
@@ -309,8 +306,8 @@ class BuildTest(BaseAPIIntegrationTest):
non_squashed = build_squashed(False)
squashed = build_squashed(True)
- self.assertEqual(len(non_squashed['RootFS']['Layers']), 4)
- self.assertEqual(len(squashed['RootFS']['Layers']), 2)
+ assert len(non_squashed['RootFS']['Layers']) == 4
+ assert len(squashed['RootFS']['Layers']) == 2
def test_build_stderr_data(self):
control_chars = ['\x1b[91m', '\x1b[0m']
@@ -321,7 +318,7 @@ class BuildTest(BaseAPIIntegrationTest):
]))
stream = self.client.build(
- fileobj=script, stream=True, decode=True, nocache=True
+ fileobj=script, decode=True, nocache=True
)
lines = []
for chunk in stream:
@@ -329,7 +326,7 @@ class BuildTest(BaseAPIIntegrationTest):
expected = '{0}{2}\n{1}'.format(
control_chars[0], control_chars[1], snippet
)
- self.assertTrue(any([line == expected for line in lines]))
+ assert any([line == expected for line in lines])
def test_build_gzip_encoding(self):
base_dir = tempfile.mkdtemp()
@@ -342,7 +339,7 @@ class BuildTest(BaseAPIIntegrationTest):
]))
stream = self.client.build(
- path=base_dir, stream=True, decode=True, nocache=True,
+ path=base_dir, decode=True, nocache=True,
gzip=True
)
@@ -352,6 +349,41 @@ class BuildTest(BaseAPIIntegrationTest):
assert 'Successfully built' in lines[-1]['stream']
+ def test_build_with_dockerfile_empty_lines(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write('FROM busybox\n')
+ with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
+ f.write('\n'.join([
+ ' ',
+ '',
+ '\t\t',
+ '\t ',
+ ]))
+
+ stream = self.client.build(
+ path=base_dir, decode=True, nocache=True
+ )
+
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully built' in lines[-1]['stream']
+
def test_build_gzip_custom_encoding(self):
- with self.assertRaises(errors.DockerException):
+ with pytest.raises(errors.DockerException):
self.client.build(path='.', gzip=True, encoding='text/html')
+
+ @requires_api_version('1.32')
+ @requires_experimental(until=None)
+ def test_build_invalid_platform(self):
+ script = io.BytesIO('FROM busybox\n'.encode('ascii'))
+
+ with pytest.raises(errors.APIError) as excinfo:
+ stream = self.client.build(fileobj=script, platform='foobar')
+ for _ in stream:
+ pass
+
+ assert excinfo.value.status_code == 400
+ assert 'invalid platform' in excinfo.exconly()
diff --git a/tests/integration/api_client_test.py b/tests/integration/api_client_test.py
index cfb45a3..05281f8 100644
--- a/tests/integration/api_client_test.py
+++ b/tests/integration/api_client_test.py
@@ -14,14 +14,14 @@ from .base import BaseAPIIntegrationTest
class InformationTest(BaseAPIIntegrationTest):
def test_version(self):
res = self.client.version()
- self.assertIn('GoVersion', res)
- self.assertIn('Version', res)
+ assert 'GoVersion' in res
+ assert 'Version' in res
def test_info(self):
res = self.client.info()
- self.assertIn('Containers', res)
- self.assertIn('Images', res)
- self.assertIn('Debug', res)
+ assert 'Containers' in res
+ assert 'Images' in res
+ assert 'Debug' in res
class LoadConfigTest(BaseAPIIntegrationTest):
@@ -35,12 +35,12 @@ class LoadConfigTest(BaseAPIIntegrationTest):
f.write('email = sakuya@scarlet.net')
f.close()
cfg = docker.auth.load_config(cfg_path)
- self.assertNotEqual(cfg[docker.auth.INDEX_NAME], None)
+ assert cfg[docker.auth.INDEX_NAME] is not None
cfg = cfg[docker.auth.INDEX_NAME]
- self.assertEqual(cfg['username'], 'sakuya')
- self.assertEqual(cfg['password'], 'izayoi')
- self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
- self.assertEqual(cfg.get('Auth'), None)
+ assert cfg['username'] == 'sakuya'
+ assert cfg['password'] == 'izayoi'
+ assert cfg['email'] == 'sakuya@scarlet.net'
+ assert cfg.get('Auth') is None
def test_load_json_config(self):
folder = tempfile.mkdtemp()
@@ -53,12 +53,12 @@ class LoadConfigTest(BaseAPIIntegrationTest):
docker.auth.INDEX_URL, auth_, email_))
f.close()
cfg = docker.auth.load_config(cfg_path)
- self.assertNotEqual(cfg[docker.auth.INDEX_URL], None)
+ assert cfg[docker.auth.INDEX_URL] is not None
cfg = cfg[docker.auth.INDEX_URL]
- self.assertEqual(cfg['username'], 'sakuya')
- self.assertEqual(cfg['password'], 'izayoi')
- self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
- self.assertEqual(cfg.get('Auth'), None)
+ assert cfg['username'] == 'sakuya'
+ assert cfg['password'] == 'izayoi'
+ assert cfg['email'] == 'sakuya@scarlet.net'
+ assert cfg.get('Auth') is None
class AutoDetectVersionTest(unittest.TestCase):
@@ -66,9 +66,9 @@ class AutoDetectVersionTest(unittest.TestCase):
client = docker.APIClient(version='auto', **kwargs_from_env())
client_version = client._version
api_version = client.version(api_version=False)['ApiVersion']
- self.assertEqual(client_version, api_version)
+ assert client_version == api_version
api_version_2 = client.version()['ApiVersion']
- self.assertEqual(client_version, api_version_2)
+ assert client_version == api_version_2
client.close()
@@ -90,8 +90,8 @@ class ConnectionTimeoutTest(unittest.TestCase):
except:
pass
end = time.time()
- self.assertTrue(res is None)
- self.assertTrue(end - start < 2 * self.timeout)
+ assert res is None
+ assert end - start < 2 * self.timeout
class UnixconnTest(unittest.TestCase):
@@ -112,5 +112,6 @@ class UnixconnTest(unittest.TestCase):
client.close()
del client
- assert len(w) == 0, \
- "No warnings produced: {0}".format(w[0].message)
+ assert len(w) == 0, "No warnings produced: {0}".format(
+ w[0].message
+ )
diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py
index 5e30eee..01780a7 100644
--- a/tests/integration/api_container_test.py
+++ b/tests/integration/api_container_test.py
@@ -1,6 +1,8 @@
import os
+import re
import signal
import tempfile
+from datetime import datetime
import docker
from docker.constants import IS_WINDOWS_PLATFORM
@@ -9,11 +11,14 @@ from docker.utils.socket import read_exactly
import pytest
+import requests
import six
from .base import BUSYBOX, BaseAPIIntegrationTest
from .. import helpers
-from ..helpers import requires_api_version
+from ..helpers import (
+ requires_api_version, ctrl_with, assert_cat_socket_detached_with_keys
+)
class ListContainersTest(BaseAPIIntegrationTest):
@@ -21,26 +26,26 @@ class ListContainersTest(BaseAPIIntegrationTest):
res0 = self.client.containers(all=True)
size = len(res0)
res1 = self.client.create_container(BUSYBOX, 'true')
- self.assertIn('Id', res1)
+ assert 'Id' in res1
self.client.start(res1['Id'])
self.tmp_containers.append(res1['Id'])
res2 = self.client.containers(all=True)
- self.assertEqual(size + 1, len(res2))
+ assert size + 1 == len(res2)
retrieved = [x for x in res2 if x['Id'].startswith(res1['Id'])]
- self.assertEqual(len(retrieved), 1)
+ assert len(retrieved) == 1
retrieved = retrieved[0]
- self.assertIn('Command', retrieved)
- self.assertEqual(retrieved['Command'], six.text_type('true'))
- self.assertIn('Image', retrieved)
- self.assertRegex(retrieved['Image'], r'busybox:.*')
- self.assertIn('Status', retrieved)
+ assert 'Command' in retrieved
+ assert retrieved['Command'] == six.text_type('true')
+ assert 'Image' in retrieved
+ assert re.search(r'busybox:.*', retrieved['Image'])
+ assert 'Status' in retrieved
class CreateContainerTest(BaseAPIIntegrationTest):
def test_create(self):
res = self.client.create_container(BUSYBOX, 'true')
- self.assertIn('Id', res)
+ assert 'Id' in res
self.tmp_containers.append(res['Id'])
def test_create_with_host_pid_mode(self):
@@ -49,14 +54,14 @@ class CreateContainerTest(BaseAPIIntegrationTest):
pid_mode='host', network_mode='none'
)
)
- self.assertIn('Id', ctnr)
+ assert 'Id' in ctnr
self.tmp_containers.append(ctnr['Id'])
self.client.start(ctnr)
inspect = self.client.inspect_container(ctnr)
- self.assertIn('HostConfig', inspect)
+ assert 'HostConfig' in inspect
host_config = inspect['HostConfig']
- self.assertIn('PidMode', host_config)
- self.assertEqual(host_config['PidMode'], 'host')
+ assert 'PidMode' in host_config
+ assert host_config['PidMode'] == 'host'
def test_create_with_links(self):
res0 = self.client.create_container(
@@ -97,15 +102,15 @@ class CreateContainerTest(BaseAPIIntegrationTest):
container3_id = res2['Id']
self.tmp_containers.append(container3_id)
self.client.start(container3_id)
- self.assertEqual(self.client.wait(container3_id), 0)
+ assert self.client.wait(container3_id)['StatusCode'] == 0
logs = self.client.logs(container3_id)
if six.PY3:
logs = logs.decode('utf-8')
- self.assertIn('{0}_NAME='.format(link_env_prefix1), logs)
- self.assertIn('{0}_ENV_FOO=1'.format(link_env_prefix1), logs)
- self.assertIn('{0}_NAME='.format(link_env_prefix2), logs)
- self.assertIn('{0}_ENV_FOO=1'.format(link_env_prefix2), logs)
+ assert '{0}_NAME='.format(link_env_prefix1) in logs
+ assert '{0}_ENV_FOO=1'.format(link_env_prefix1) in logs
+ assert '{0}_NAME='.format(link_env_prefix2) in logs
+ assert '{0}_ENV_FOO=1'.format(link_env_prefix2) in logs
def test_create_with_restart_policy(self):
container = self.client.create_container(
@@ -118,12 +123,10 @@ class CreateContainerTest(BaseAPIIntegrationTest):
id = container['Id']
self.client.start(id)
self.client.wait(id)
- with self.assertRaises(docker.errors.APIError) as exc:
+ with pytest.raises(docker.errors.APIError) as exc:
self.client.remove_container(id)
- err = exc.exception.explanation
- self.assertIn(
- 'You cannot remove ', err
- )
+ err = exc.value.explanation
+ assert 'You cannot remove ' in err
self.client.remove_container(id, force=True)
def test_create_container_with_volumes_from(self):
@@ -142,23 +145,19 @@ class CreateContainerTest(BaseAPIIntegrationTest):
container2_id = res1['Id']
self.tmp_containers.append(container2_id)
self.client.start(container2_id)
- with self.assertRaises(docker.errors.DockerException):
- self.client.create_container(
- BUSYBOX, 'cat', detach=True, stdin_open=True,
- volumes_from=vol_names
- )
- res2 = self.client.create_container(
+
+ res = self.client.create_container(
BUSYBOX, 'cat', detach=True, stdin_open=True,
host_config=self.client.create_host_config(
volumes_from=vol_names, network_mode='none'
)
)
- container3_id = res2['Id']
+ container3_id = res['Id']
self.tmp_containers.append(container3_id)
self.client.start(container3_id)
- info = self.client.inspect_container(res2['Id'])
- self.assertCountEqual(info['HostConfig']['VolumesFrom'], vol_names)
+ info = self.client.inspect_container(res['Id'])
+ assert len(info['HostConfig']['VolumesFrom']) == len(vol_names)
def create_container_readonly_fs(self):
ctnr = self.client.create_container(
@@ -167,19 +166,19 @@ class CreateContainerTest(BaseAPIIntegrationTest):
read_only=True, network_mode='none'
)
)
- self.assertIn('Id', ctnr)
+ assert 'Id' in ctnr
self.tmp_containers.append(ctnr['Id'])
self.client.start(ctnr)
- res = self.client.wait(ctnr)
- self.assertNotEqual(res, 0)
+ res = self.client.wait(ctnr)['StatusCode']
+ assert res != 0
def create_container_with_name(self):
res = self.client.create_container(BUSYBOX, 'true', name='foobar')
- self.assertIn('Id', res)
+ assert 'Id' in res
self.tmp_containers.append(res['Id'])
inspect = self.client.inspect_container(res['Id'])
- self.assertIn('Name', inspect)
- self.assertEqual('/foobar', inspect['Name'])
+ assert 'Name' in inspect
+ assert '/foobar' == inspect['Name']
def create_container_privileged(self):
res = self.client.create_container(
@@ -187,24 +186,24 @@ class CreateContainerTest(BaseAPIIntegrationTest):
privileged=True, network_mode='none'
)
)
- self.assertIn('Id', res)
+ assert 'Id' in res
self.tmp_containers.append(res['Id'])
self.client.start(res['Id'])
inspect = self.client.inspect_container(res['Id'])
- self.assertIn('Config', inspect)
- self.assertIn('Id', inspect)
- self.assertTrue(inspect['Id'].startswith(res['Id']))
- self.assertIn('Image', inspect)
- self.assertIn('State', inspect)
- self.assertIn('Running', inspect['State'])
+ assert 'Config' in inspect
+ assert 'Id' in inspect
+ assert inspect['Id'].startswith(res['Id'])
+ assert 'Image' in inspect
+ assert 'State' in inspect
+ assert 'Running' in inspect['State']
if not inspect['State']['Running']:
- self.assertIn('ExitCode', inspect['State'])
- self.assertEqual(inspect['State']['ExitCode'], 0)
+ assert 'ExitCode' in inspect['State']
+ assert inspect['State']['ExitCode'] == 0
# Since Nov 2013, the Privileged flag is no longer part of the
# container's config exposed via the API (safety concerns?).
#
if 'Privileged' in inspect['Config']:
- self.assertEqual(inspect['Config']['Privileged'], True)
+ assert inspect['Config']['Privileged'] is True
def test_create_with_mac_address(self):
mac_address_expected = "02:42:ac:11:00:0a"
@@ -215,12 +214,10 @@ class CreateContainerTest(BaseAPIIntegrationTest):
self.client.start(container)
res = self.client.inspect_container(container['Id'])
- self.assertEqual(mac_address_expected,
- res['NetworkSettings']['MacAddress'])
+ assert mac_address_expected == res['NetworkSettings']['MacAddress']
self.client.kill(id)
- @requires_api_version('1.20')
def test_group_id_ints(self):
container = self.client.create_container(
BUSYBOX, 'id -G',
@@ -234,10 +231,9 @@ class CreateContainerTest(BaseAPIIntegrationTest):
if six.PY3:
logs = logs.decode('utf-8')
groups = logs.strip().split(' ')
- self.assertIn('1000', groups)
- self.assertIn('1001', groups)
+ assert '1000' in groups
+ assert '1001' in groups
- @requires_api_version('1.20')
def test_group_id_strings(self):
container = self.client.create_container(
BUSYBOX, 'id -G', host_config=self.client.create_host_config(
@@ -253,8 +249,8 @@ class CreateContainerTest(BaseAPIIntegrationTest):
logs = logs.decode('utf-8')
groups = logs.strip().split(' ')
- self.assertIn('1000', groups)
- self.assertIn('1001', groups)
+ assert '1000' in groups
+ assert '1001' in groups
def test_valid_log_driver_and_log_opt(self):
log_config = docker.types.LogConfig(
@@ -272,8 +268,8 @@ class CreateContainerTest(BaseAPIIntegrationTest):
info = self.client.inspect_container(container)
container_log_config = info['HostConfig']['LogConfig']
- self.assertEqual(container_log_config['Type'], log_config.type)
- self.assertEqual(container_log_config['Config'], log_config.config)
+ assert container_log_config['Type'] == log_config.type
+ assert container_log_config['Config'] == log_config.config
def test_invalid_log_driver_raises_exception(self):
log_config = docker.types.LogConfig(
@@ -309,8 +305,8 @@ class CreateContainerTest(BaseAPIIntegrationTest):
info = self.client.inspect_container(container)
container_log_config = info['HostConfig']['LogConfig']
- self.assertEqual(container_log_config['Type'], "json-file")
- self.assertEqual(container_log_config['Config'], log_config.config)
+ assert container_log_config['Type'] == "json-file"
+ assert container_log_config['Config'] == log_config.config
def test_valid_no_config_specified(self):
log_config = docker.types.LogConfig(
@@ -328,8 +324,8 @@ class CreateContainerTest(BaseAPIIntegrationTest):
info = self.client.inspect_container(container)
container_log_config = info['HostConfig']['LogConfig']
- self.assertEqual(container_log_config['Type'], "json-file")
- self.assertEqual(container_log_config['Config'], {})
+ assert container_log_config['Type'] == "json-file"
+ assert container_log_config['Config'] == {}
def test_create_with_memory_constraints_with_str(self):
ctnr = self.client.create_container(
@@ -339,29 +335,29 @@ class CreateContainerTest(BaseAPIIntegrationTest):
mem_limit='700M'
)
)
- self.assertIn('Id', ctnr)
+ assert 'Id' in ctnr
self.tmp_containers.append(ctnr['Id'])
self.client.start(ctnr)
inspect = self.client.inspect_container(ctnr)
- self.assertIn('HostConfig', inspect)
+ assert 'HostConfig' in inspect
host_config = inspect['HostConfig']
for limit in ['Memory', 'MemorySwap']:
- self.assertIn(limit, host_config)
+ assert limit in host_config
def test_create_with_memory_constraints_with_int(self):
ctnr = self.client.create_container(
BUSYBOX, 'true',
host_config=self.client.create_host_config(mem_swappiness=40)
)
- self.assertIn('Id', ctnr)
+ assert 'Id' in ctnr
self.tmp_containers.append(ctnr['Id'])
self.client.start(ctnr)
inspect = self.client.inspect_container(ctnr)
- self.assertIn('HostConfig', inspect)
+ assert 'HostConfig' in inspect
host_config = inspect['HostConfig']
- self.assertIn('MemorySwappiness', host_config)
+ assert 'MemorySwappiness' in host_config
def test_create_with_environment_variable_no_value(self):
container = self.client.create_container(
@@ -509,7 +505,7 @@ class VolumeBindTest(BaseAPIIntegrationTest):
if six.PY3:
logs = logs.decode('utf-8')
- self.assertIn(self.filename, logs)
+ assert self.filename in logs
inspect_data = self.client.inspect_container(container)
self.check_container_data(inspect_data, True)
@@ -531,7 +527,7 @@ class VolumeBindTest(BaseAPIIntegrationTest):
if six.PY3:
logs = logs.decode('utf-8')
- self.assertIn(self.filename, logs)
+ assert self.filename in logs
inspect_data = self.client.inspect_container(container)
self.check_container_data(inspect_data, False)
@@ -602,24 +598,15 @@ class VolumeBindTest(BaseAPIIntegrationTest):
assert mount_data['RW'] is True
def check_container_data(self, inspect_data, rw):
- if docker.utils.compare_version('1.20', self.client._version) < 0:
- self.assertIn('Volumes', inspect_data)
- self.assertIn(self.mount_dest, inspect_data['Volumes'])
- self.assertEqual(
- self.mount_origin, inspect_data['Volumes'][self.mount_dest]
- )
- self.assertIn(self.mount_dest, inspect_data['VolumesRW'])
- self.assertFalse(inspect_data['VolumesRW'][self.mount_dest])
- else:
- self.assertIn('Mounts', inspect_data)
- filtered = list(filter(
- lambda x: x['Destination'] == self.mount_dest,
- inspect_data['Mounts']
- ))
- self.assertEqual(len(filtered), 1)
- mount_data = filtered[0]
- self.assertEqual(mount_data['Source'], self.mount_origin)
- self.assertEqual(mount_data['RW'], rw)
+ assert 'Mounts' in inspect_data
+ filtered = list(filter(
+ lambda x: x['Destination'] == self.mount_dest,
+ inspect_data['Mounts']
+ ))
+ assert len(filtered) == 1
+ mount_data = filtered[0]
+ assert mount_data['Source'] == self.mount_origin
+ assert mount_data['RW'] == rw
def run_with_volume(self, ro, *args, **kwargs):
return self.run_container(
@@ -638,7 +625,6 @@ class VolumeBindTest(BaseAPIIntegrationTest):
)
-@requires_api_version('1.20')
class ArchiveTest(BaseAPIIntegrationTest):
def test_get_file_archive_from_container(self):
data = 'The Maid and the Pocket Watch of Blood'
@@ -657,7 +643,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
retrieved_data = helpers.untar_file(destination, 'data.txt')
if six.PY3:
retrieved_data = retrieved_data.decode('utf-8')
- self.assertEqual(data, retrieved_data.strip())
+ assert data == retrieved_data.strip()
def test_get_file_stat_from_container(self):
data = 'The Maid and the Pocket Watch of Blood'
@@ -669,10 +655,10 @@ class ArchiveTest(BaseAPIIntegrationTest):
self.client.start(ctnr)
self.client.wait(ctnr)
strm, stat = self.client.get_archive(ctnr, '/vol1/data.txt')
- self.assertIn('name', stat)
- self.assertEqual(stat['name'], 'data.txt')
- self.assertIn('size', stat)
- self.assertEqual(stat['size'], len(data))
+ assert 'name' in stat
+ assert stat['name'] == 'data.txt'
+ assert 'size' in stat
+ assert stat['size'] == len(data)
def test_copy_file_to_container(self):
data = b'Deaf To All But The Song'
@@ -695,7 +681,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
if six.PY3:
logs = logs.decode('utf-8')
data = data.decode('utf-8')
- self.assertEqual(logs.strip(), data)
+ assert logs.strip() == data
def test_copy_directory_to_container(self):
files = ['a.py', 'b.py', 'foo/b.py']
@@ -713,10 +699,10 @@ class ArchiveTest(BaseAPIIntegrationTest):
if six.PY3:
logs = logs.decode('utf-8')
results = logs.strip().split()
- self.assertIn('a.py', results)
- self.assertIn('b.py', results)
- self.assertIn('foo/', results)
- self.assertIn('bar/', results)
+ assert 'a.py' in results
+ assert 'b.py' in results
+ assert 'foo/' in results
+ assert 'bar/' in results
class RenameContainerTest(BaseAPIIntegrationTest):
@@ -724,49 +710,49 @@ class RenameContainerTest(BaseAPIIntegrationTest):
version = self.client.version()['Version']
name = 'hong_meiling'
res = self.client.create_container(BUSYBOX, 'true')
- self.assertIn('Id', res)
+ assert 'Id' in res
self.tmp_containers.append(res['Id'])
self.client.rename(res, name)
inspect = self.client.inspect_container(res['Id'])
- self.assertIn('Name', inspect)
+ assert 'Name' in inspect
if version == '1.5.0':
- self.assertEqual(name, inspect['Name'])
+ assert name == inspect['Name']
else:
- self.assertEqual('/{0}'.format(name), inspect['Name'])
+ assert '/{0}'.format(name) == inspect['Name']
class StartContainerTest(BaseAPIIntegrationTest):
def test_start_container(self):
res = self.client.create_container(BUSYBOX, 'true')
- self.assertIn('Id', res)
+ assert 'Id' in res
self.tmp_containers.append(res['Id'])
self.client.start(res['Id'])
inspect = self.client.inspect_container(res['Id'])
- self.assertIn('Config', inspect)
- self.assertIn('Id', inspect)
- self.assertTrue(inspect['Id'].startswith(res['Id']))
- self.assertIn('Image', inspect)
- self.assertIn('State', inspect)
- self.assertIn('Running', inspect['State'])
+ assert 'Config' in inspect
+ assert 'Id' in inspect
+ assert inspect['Id'].startswith(res['Id'])
+ assert 'Image' in inspect
+ assert 'State' in inspect
+ assert 'Running' in inspect['State']
if not inspect['State']['Running']:
- self.assertIn('ExitCode', inspect['State'])
- self.assertEqual(inspect['State']['ExitCode'], 0)
+ assert 'ExitCode' in inspect['State']
+ assert inspect['State']['ExitCode'] == 0
def test_start_container_with_dict_instead_of_id(self):
res = self.client.create_container(BUSYBOX, 'true')
- self.assertIn('Id', res)
+ assert 'Id' in res
self.tmp_containers.append(res['Id'])
self.client.start(res)
inspect = self.client.inspect_container(res['Id'])
- self.assertIn('Config', inspect)
- self.assertIn('Id', inspect)
- self.assertTrue(inspect['Id'].startswith(res['Id']))
- self.assertIn('Image', inspect)
- self.assertIn('State', inspect)
- self.assertIn('Running', inspect['State'])
+ assert 'Config' in inspect
+ assert 'Id' in inspect
+ assert inspect['Id'].startswith(res['Id'])
+ assert 'Image' in inspect
+ assert 'State' in inspect
+ assert 'Running' in inspect['State']
if not inspect['State']['Running']:
- self.assertIn('ExitCode', inspect['State'])
- self.assertEqual(inspect['State']['ExitCode'], 0)
+ assert 'ExitCode' in inspect['State']
+ assert inspect['State']['ExitCode'] == 0
def test_run_shlex_commands(self):
commands = [
@@ -785,8 +771,8 @@ class StartContainerTest(BaseAPIIntegrationTest):
id = container['Id']
self.client.start(id)
self.tmp_containers.append(id)
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0, msg=cmd)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0, cmd
class WaitTest(BaseAPIIntegrationTest):
@@ -795,26 +781,43 @@ class WaitTest(BaseAPIIntegrationTest):
id = res['Id']
self.tmp_containers.append(id)
self.client.start(id)
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0
inspect = self.client.inspect_container(id)
- self.assertIn('Running', inspect['State'])
- self.assertEqual(inspect['State']['Running'], False)
- self.assertIn('ExitCode', inspect['State'])
- self.assertEqual(inspect['State']['ExitCode'], exitcode)
+ assert 'Running' in inspect['State']
+ assert inspect['State']['Running'] is False
+ assert 'ExitCode' in inspect['State']
+ assert inspect['State']['ExitCode'] == exitcode
def test_wait_with_dict_instead_of_id(self):
res = self.client.create_container(BUSYBOX, ['sleep', '3'])
id = res['Id']
self.tmp_containers.append(id)
self.client.start(res)
- exitcode = self.client.wait(res)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(res)['StatusCode']
+ assert exitcode == 0
inspect = self.client.inspect_container(res)
- self.assertIn('Running', inspect['State'])
- self.assertEqual(inspect['State']['Running'], False)
- self.assertIn('ExitCode', inspect['State'])
- self.assertEqual(inspect['State']['ExitCode'], exitcode)
+ assert 'Running' in inspect['State']
+ assert inspect['State']['Running'] is False
+ assert 'ExitCode' in inspect['State']
+ assert inspect['State']['ExitCode'] == exitcode
+
+ @requires_api_version('1.30')
+ def test_wait_with_condition(self):
+ ctnr = self.client.create_container(BUSYBOX, 'true')
+ self.tmp_containers.append(ctnr)
+ with pytest.raises(requests.exceptions.ConnectionError):
+ self.client.wait(ctnr, condition='removed', timeout=1)
+
+ ctnr = self.client.create_container(
+ BUSYBOX, ['sleep', '3'],
+ host_config=self.client.create_host_config(auto_remove=True)
+ )
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ assert self.client.wait(
+ ctnr, condition='removed', timeout=5
+ )['StatusCode'] == 0
class LogsTest(BaseAPIIntegrationTest):
@@ -826,10 +829,10 @@ class LogsTest(BaseAPIIntegrationTest):
id = container['Id']
self.tmp_containers.append(id)
self.client.start(id)
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0
logs = self.client.logs(id)
- self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii'))
+ assert logs == (snippet + '\n').encode(encoding='ascii')
def test_logs_tail_option(self):
snippet = '''Line1
@@ -840,10 +843,10 @@ Line2'''
id = container['Id']
self.tmp_containers.append(id)
self.client.start(id)
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0
logs = self.client.logs(id, tail=1)
- self.assertEqual(logs, 'Line2\n'.encode(encoding='ascii'))
+ assert logs == 'Line2\n'.encode(encoding='ascii')
def test_logs_streaming_and_follow(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
@@ -857,10 +860,10 @@ Line2'''
for chunk in self.client.logs(id, stream=True, follow=True):
logs += chunk
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0
- self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii'))
+ assert logs == (snippet + '\n').encode(encoding='ascii')
def test_logs_with_dict_instead_of_id(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
@@ -870,10 +873,10 @@ Line2'''
id = container['Id']
self.tmp_containers.append(id)
self.client.start(id)
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0
logs = self.client.logs(container)
- self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii'))
+ assert logs == (snippet + '\n').encode(encoding='ascii')
def test_logs_with_tail_0(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
@@ -883,10 +886,26 @@ Line2'''
id = container['Id']
self.tmp_containers.append(id)
self.client.start(id)
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0
logs = self.client.logs(id, tail=0)
- self.assertEqual(logs, ''.encode(encoding='ascii'))
+ assert logs == ''.encode(encoding='ascii')
+
+ @requires_api_version('1.35')
+ def test_logs_with_until(self):
+ snippet = 'Shanghai Teahouse (Hong Meiling)'
+ container = self.client.create_container(
+ BUSYBOX, 'echo "{0}"'.format(snippet)
+ )
+
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ exitcode = self.client.wait(container)['StatusCode']
+ assert exitcode == 0
+ logs_until_1 = self.client.logs(container, until=1)
+ assert logs_until_1 == b''
+ logs_until_now = self.client.logs(container, datetime.now())
+ assert logs_until_now == (snippet + '\n').encode(encoding='ascii')
class DiffTest(BaseAPIIntegrationTest):
@@ -895,26 +914,26 @@ class DiffTest(BaseAPIIntegrationTest):
id = container['Id']
self.client.start(id)
self.tmp_containers.append(id)
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0
diff = self.client.diff(id)
test_diff = [x for x in diff if x.get('Path', None) == '/test']
- self.assertEqual(len(test_diff), 1)
- self.assertIn('Kind', test_diff[0])
- self.assertEqual(test_diff[0]['Kind'], 1)
+ assert len(test_diff) == 1
+ assert 'Kind' in test_diff[0]
+ assert test_diff[0]['Kind'] == 1
def test_diff_with_dict_instead_of_id(self):
container = self.client.create_container(BUSYBOX, ['touch', '/test'])
id = container['Id']
self.client.start(id)
self.tmp_containers.append(id)
- exitcode = self.client.wait(id)
- self.assertEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode == 0
diff = self.client.diff(container)
test_diff = [x for x in diff if x.get('Path', None) == '/test']
- self.assertEqual(len(test_diff), 1)
- self.assertIn('Kind', test_diff[0])
- self.assertEqual(test_diff[0]['Kind'], 1)
+ assert len(test_diff) == 1
+ assert 'Kind' in test_diff[0]
+ assert test_diff[0]['Kind'] == 1
class StopTest(BaseAPIIntegrationTest):
@@ -925,23 +944,23 @@ class StopTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
self.client.stop(id, timeout=2)
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], False)
+ assert 'Running' in state
+ assert state['Running'] is False
def test_stop_with_dict_instead_of_id(self):
container = self.client.create_container(BUSYBOX, ['sleep', '9999'])
- self.assertIn('Id', container)
+ assert 'Id' in container
id = container['Id']
self.client.start(container)
self.tmp_containers.append(id)
self.client.stop(container, timeout=2)
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], False)
+ assert 'Running' in state
+ assert state['Running'] is False
class KillTest(BaseAPIIntegrationTest):
@@ -952,12 +971,12 @@ class KillTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
self.client.kill(id)
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('ExitCode', state)
- self.assertNotEqual(state['ExitCode'], 0)
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], False)
+ assert 'ExitCode' in state
+ assert state['ExitCode'] != 0
+ assert 'Running' in state
+ assert state['Running'] is False
def test_kill_with_dict_instead_of_id(self):
container = self.client.create_container(BUSYBOX, ['sleep', '9999'])
@@ -966,12 +985,12 @@ class KillTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
self.client.kill(container)
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('ExitCode', state)
- self.assertNotEqual(state['ExitCode'], 0)
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], False)
+ assert 'ExitCode' in state
+ assert state['ExitCode'] != 0
+ assert 'Running' in state
+ assert state['Running'] is False
def test_kill_with_signal(self):
id = self.client.create_container(BUSYBOX, ['sleep', '60'])
@@ -980,45 +999,45 @@ class KillTest(BaseAPIIntegrationTest):
self.client.kill(
id, signal=signal.SIGKILL if not IS_WINDOWS_PLATFORM else 9
)
- exitcode = self.client.wait(id)
- self.assertNotEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode != 0
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('ExitCode', state)
- self.assertNotEqual(state['ExitCode'], 0)
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], False, state)
+ assert 'ExitCode' in state
+ assert state['ExitCode'] != 0
+ assert 'Running' in state
+ assert state['Running'] is False, state
def test_kill_with_signal_name(self):
id = self.client.create_container(BUSYBOX, ['sleep', '60'])
self.client.start(id)
self.tmp_containers.append(id)
self.client.kill(id, signal='SIGKILL')
- exitcode = self.client.wait(id)
- self.assertNotEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode != 0
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('ExitCode', state)
- self.assertNotEqual(state['ExitCode'], 0)
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], False, state)
+ assert 'ExitCode' in state
+ assert state['ExitCode'] != 0
+ assert 'Running' in state
+ assert state['Running'] is False, state
def test_kill_with_signal_integer(self):
id = self.client.create_container(BUSYBOX, ['sleep', '60'])
self.client.start(id)
self.tmp_containers.append(id)
self.client.kill(id, signal=9)
- exitcode = self.client.wait(id)
- self.assertNotEqual(exitcode, 0)
+ exitcode = self.client.wait(id)['StatusCode']
+ assert exitcode != 0
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('ExitCode', state)
- self.assertNotEqual(state['ExitCode'], 0)
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], False, state)
+ assert 'ExitCode' in state
+ assert state['ExitCode'] != 0
+ assert 'Running' in state
+ assert state['Running'] is False, state
class PortTest(BaseAPIIntegrationTest):
@@ -1046,8 +1065,8 @@ class PortTest(BaseAPIIntegrationTest):
ip, host_port = port_binding['HostIp'], port_binding['HostPort']
- self.assertEqual(ip, port_bindings[port][0])
- self.assertEqual(host_port, port_bindings[port][1])
+ assert ip == port_bindings[port][0]
+ assert host_port == port_bindings[port][1]
self.client.kill(id)
@@ -1083,13 +1102,12 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container)
res = self.client.top(container, 'waux')
- self.assertEqual(
- res['Titles'],
- ['USER', 'PID', '%CPU', '%MEM', 'VSZ', 'RSS',
- 'TTY', 'STAT', 'START', 'TIME', 'COMMAND'],
- )
- self.assertEqual(len(res['Processes']), 1)
- self.assertEqual(res['Processes'][0][10], 'sleep 60')
+ assert res['Titles'] == [
+ 'USER', 'PID', '%CPU', '%MEM', 'VSZ', 'RSS',
+ 'TTY', 'STAT', 'START', 'TIME', 'COMMAND'
+ ]
+ assert len(res['Processes']) == 1
+ assert res['Processes'][0][10] == 'sleep 60'
class RestartContainerTest(BaseAPIIntegrationTest):
@@ -1099,37 +1117,37 @@ class RestartContainerTest(BaseAPIIntegrationTest):
self.client.start(id)
self.tmp_containers.append(id)
info = self.client.inspect_container(id)
- self.assertIn('State', info)
- self.assertIn('StartedAt', info['State'])
+ assert 'State' in info
+ assert 'StartedAt' in info['State']
start_time1 = info['State']['StartedAt']
self.client.restart(id, timeout=2)
info2 = self.client.inspect_container(id)
- self.assertIn('State', info2)
- self.assertIn('StartedAt', info2['State'])
+ assert 'State' in info2
+ assert 'StartedAt' in info2['State']
start_time2 = info2['State']['StartedAt']
- self.assertNotEqual(start_time1, start_time2)
- self.assertIn('Running', info2['State'])
- self.assertEqual(info2['State']['Running'], True)
+ assert start_time1 != start_time2
+ assert 'Running' in info2['State']
+ assert info2['State']['Running'] is True
self.client.kill(id)
def test_restart_with_dict_instead_of_id(self):
container = self.client.create_container(BUSYBOX, ['sleep', '9999'])
- self.assertIn('Id', container)
+ assert 'Id' in container
id = container['Id']
self.client.start(container)
self.tmp_containers.append(id)
info = self.client.inspect_container(id)
- self.assertIn('State', info)
- self.assertIn('StartedAt', info['State'])
+ assert 'State' in info
+ assert 'StartedAt' in info['State']
start_time1 = info['State']['StartedAt']
self.client.restart(container, timeout=2)
info2 = self.client.inspect_container(id)
- self.assertIn('State', info2)
- self.assertIn('StartedAt', info2['State'])
+ assert 'State' in info2
+ assert 'StartedAt' in info2['State']
start_time2 = info2['State']['StartedAt']
- self.assertNotEqual(start_time1, start_time2)
- self.assertIn('Running', info2['State'])
- self.assertEqual(info2['State']['Running'], True)
+ assert start_time1 != start_time2
+ assert 'Running' in info2['State']
+ assert info2['State']['Running'] is True
self.client.kill(id)
@@ -1142,7 +1160,7 @@ class RemoveContainerTest(BaseAPIIntegrationTest):
self.client.remove_container(id)
containers = self.client.containers(all=True)
res = [x for x in containers if 'Id' in x and x['Id'].startswith(id)]
- self.assertEqual(len(res), 0)
+ assert len(res) == 0
def test_remove_with_dict_instead_of_id(self):
container = self.client.create_container(BUSYBOX, ['true'])
@@ -1152,7 +1170,7 @@ class RemoveContainerTest(BaseAPIIntegrationTest):
self.client.remove_container(container)
containers = self.client.containers(all=True)
res = [x for x in containers if 'Id' in x and x['Id'].startswith(id)]
- self.assertEqual(len(res), 0)
+ assert len(res) == 0
class AttachContainerTest(BaseAPIIntegrationTest):
@@ -1163,7 +1181,7 @@ class AttachContainerTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
self.client.start(id)
sock = self.client.attach_socket(container, ws=False)
- self.assertTrue(sock.fileno() > -1)
+ assert sock.fileno() > -1
def test_run_container_reading_socket(self):
line = 'hi there and stuff and things, words!'
@@ -1180,9 +1198,9 @@ class AttachContainerTest(BaseAPIIntegrationTest):
self.client.start(container)
next_size = next_frame_size(pty_stdout)
- self.assertEqual(next_size, len(line))
+ assert next_size == len(line)
data = read_exactly(pty_stdout, next_size)
- self.assertEqual(data.decode('utf-8'), line)
+ assert data.decode('utf-8') == line
def test_attach_no_stream(self):
container = self.client.create_container(
@@ -1193,6 +1211,57 @@ class AttachContainerTest(BaseAPIIntegrationTest):
output = self.client.attach(container, stream=False, logs=True)
assert output == 'hello\n'.encode(encoding='ascii')
+ def test_detach_with_default(self):
+ container = self.client.create_container(
+ BUSYBOX, 'cat',
+ detach=True, stdin_open=True, tty=True
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+
+ sock = self.client.attach_socket(
+ container,
+ {'stdin': True, 'stream': True}
+ )
+
+ assert_cat_socket_detached_with_keys(
+ sock, [ctrl_with('p'), ctrl_with('q')]
+ )
+
+ def test_detach_with_config_file(self):
+ self.client._general_configs['detachKeys'] = 'ctrl-p'
+
+ container = self.client.create_container(
+ BUSYBOX, 'cat',
+ detach=True, stdin_open=True, tty=True
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+
+ sock = self.client.attach_socket(
+ container,
+ {'stdin': True, 'stream': True}
+ )
+
+ assert_cat_socket_detached_with_keys(sock, [ctrl_with('p')])
+
+ def test_detach_with_arg(self):
+ self.client._general_configs['detachKeys'] = 'ctrl-p'
+
+ container = self.client.create_container(
+ BUSYBOX, 'cat',
+ detach=True, stdin_open=True, tty=True
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+
+ sock = self.client.attach_socket(
+ container,
+ {'stdin': True, 'stream': True, 'detachKeys': 'ctrl-x'}
+ )
+
+ assert_cat_socket_detached_with_keys(sock, [ctrl_with('x')])
+
class PauseTest(BaseAPIIntegrationTest):
def test_pause_unpause(self):
@@ -1202,25 +1271,25 @@ class PauseTest(BaseAPIIntegrationTest):
self.client.start(container)
self.client.pause(id)
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('ExitCode', state)
- self.assertEqual(state['ExitCode'], 0)
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], True)
- self.assertIn('Paused', state)
- self.assertEqual(state['Paused'], True)
+ assert 'ExitCode' in state
+ assert state['ExitCode'] == 0
+ assert 'Running' in state
+ assert state['Running'] is True
+ assert 'Paused' in state
+ assert state['Paused'] is True
self.client.unpause(id)
container_info = self.client.inspect_container(id)
- self.assertIn('State', container_info)
+ assert 'State' in container_info
state = container_info['State']
- self.assertIn('ExitCode', state)
- self.assertEqual(state['ExitCode'], 0)
- self.assertIn('Running', state)
- self.assertEqual(state['Running'], True)
- self.assertIn('Paused', state)
- self.assertEqual(state['Paused'], False)
+ assert 'ExitCode' in state
+ assert state['ExitCode'] == 0
+ assert 'Running' in state
+ assert state['Running'] is True
+ assert 'Paused' in state
+ assert state['Paused'] is False
class PruneTest(BaseAPIIntegrationTest):
@@ -1240,7 +1309,6 @@ class PruneTest(BaseAPIIntegrationTest):
class GetContainerStatsTest(BaseAPIIntegrationTest):
- @requires_api_version('1.19')
def test_get_container_stats_no_stream(self):
container = self.client.create_container(
BUSYBOX, ['sleep', '60'],
@@ -1250,12 +1318,11 @@ class GetContainerStatsTest(BaseAPIIntegrationTest):
response = self.client.stats(container, stream=0)
self.client.kill(container)
- self.assertEqual(type(response), dict)
+ assert type(response) == dict
for key in ['read', 'networks', 'precpu_stats', 'cpu_stats',
'memory_stats', 'blkio_stats']:
- self.assertIn(key, response)
+ assert key in response
- @requires_api_version('1.17')
def test_get_container_stats_stream(self):
container = self.client.create_container(
BUSYBOX, ['sleep', '60'],
@@ -1264,10 +1331,10 @@ class GetContainerStatsTest(BaseAPIIntegrationTest):
self.client.start(container)
stream = self.client.stats(container)
for chunk in stream:
- self.assertEqual(type(chunk), dict)
+ assert type(chunk) == dict
for key in ['read', 'network', 'precpu_stats', 'cpu_stats',
'memory_stats', 'blkio_stats']:
- self.assertIn(key, chunk)
+ assert key in chunk
class ContainerUpdateTest(BaseAPIIntegrationTest):
@@ -1284,7 +1351,7 @@ class ContainerUpdateTest(BaseAPIIntegrationTest):
self.client.start(container)
self.client.update_container(container, mem_limit=new_mem_limit)
inspect_data = self.client.inspect_container(container)
- self.assertEqual(inspect_data['HostConfig']['Memory'], new_mem_limit)
+ assert inspect_data['HostConfig']['Memory'] == new_mem_limit
@requires_api_version('1.23')
def test_restart_policy_update(self):
@@ -1307,18 +1374,17 @@ class ContainerUpdateTest(BaseAPIIntegrationTest):
self.client.update_container(container,
restart_policy=new_restart_policy)
inspect_data = self.client.inspect_container(container)
- self.assertEqual(
- inspect_data['HostConfig']['RestartPolicy']['MaximumRetryCount'],
+ assert (
+ inspect_data['HostConfig']['RestartPolicy']['MaximumRetryCount'] ==
new_restart_policy['MaximumRetryCount']
)
- self.assertEqual(
- inspect_data['HostConfig']['RestartPolicy']['Name'],
+ assert (
+ inspect_data['HostConfig']['RestartPolicy']['Name'] ==
new_restart_policy['Name']
)
class ContainerCPUTest(BaseAPIIntegrationTest):
- @requires_api_version('1.18')
def test_container_cpu_shares(self):
cpu_shares = 512
container = self.client.create_container(
@@ -1329,9 +1395,8 @@ class ContainerCPUTest(BaseAPIIntegrationTest):
self.tmp_containers.append(container)
self.client.start(container)
inspect_data = self.client.inspect_container(container)
- self.assertEqual(inspect_data['HostConfig']['CpuShares'], 512)
+ assert inspect_data['HostConfig']['CpuShares'] == 512
- @requires_api_version('1.18')
def test_container_cpuset(self):
cpuset_cpus = "0,1"
container = self.client.create_container(
@@ -1342,7 +1407,7 @@ class ContainerCPUTest(BaseAPIIntegrationTest):
self.tmp_containers.append(container)
self.client.start(container)
inspect_data = self.client.inspect_container(container)
- self.assertEqual(inspect_data['HostConfig']['CpusetCpus'], cpuset_cpus)
+ assert inspect_data['HostConfig']['CpusetCpus'] == cpuset_cpus
@requires_api_version('1.25')
def test_create_with_runtime(self):
@@ -1386,11 +1451,11 @@ class LinkTest(BaseAPIIntegrationTest):
# Link is gone
containers = self.client.containers(all=True)
retrieved = [x for x in containers if link_name in x['Names']]
- self.assertEqual(len(retrieved), 0)
+ assert len(retrieved) == 0
# Containers are still there
retrieved = [
x for x in containers if x['Id'].startswith(container1_id) or
x['Id'].startswith(container2_id)
]
- self.assertEqual(len(retrieved), 2)
+ assert len(retrieved) == 2
diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py
index 7a65041..1a5a4e5 100644
--- a/tests/integration/api_exec_test.py
+++ b/tests/integration/api_exec_test.py
@@ -2,7 +2,9 @@ from docker.utils.socket import next_frame_size
from docker.utils.socket import read_exactly
from .base import BaseAPIIntegrationTest, BUSYBOX
-from ..helpers import requires_api_version
+from ..helpers import (
+ requires_api_version, ctrl_with, assert_cat_socket_detached_with_keys
+)
class ExecTest(BaseAPIIntegrationTest):
@@ -14,10 +16,10 @@ class ExecTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
res = self.client.exec_create(id, ['echo', 'hello'])
- self.assertIn('Id', res)
+ assert 'Id' in res
exec_log = self.client.exec_start(res)
- self.assertEqual(exec_log, b'hello\n')
+ assert exec_log == b'hello\n'
def test_exec_command_string(self):
container = self.client.create_container(BUSYBOX, 'cat',
@@ -27,10 +29,10 @@ class ExecTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
res = self.client.exec_create(id, 'echo hello world')
- self.assertIn('Id', res)
+ assert 'Id' in res
exec_log = self.client.exec_start(res)
- self.assertEqual(exec_log, b'hello world\n')
+ assert exec_log == b'hello world\n'
def test_exec_command_as_user(self):
container = self.client.create_container(BUSYBOX, 'cat',
@@ -40,10 +42,10 @@ class ExecTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
res = self.client.exec_create(id, 'whoami', user='default')
- self.assertIn('Id', res)
+ assert 'Id' in res
exec_log = self.client.exec_start(res)
- self.assertEqual(exec_log, b'default\n')
+ assert exec_log == b'default\n'
def test_exec_command_as_root(self):
container = self.client.create_container(BUSYBOX, 'cat',
@@ -53,10 +55,10 @@ class ExecTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
res = self.client.exec_create(id, 'whoami')
- self.assertIn('Id', res)
+ assert 'Id' in res
exec_log = self.client.exec_start(res)
- self.assertEqual(exec_log, b'root\n')
+ assert exec_log == b'root\n'
def test_exec_command_streaming(self):
container = self.client.create_container(BUSYBOX, 'cat',
@@ -66,12 +68,12 @@ class ExecTest(BaseAPIIntegrationTest):
self.client.start(id)
exec_id = self.client.exec_create(id, ['echo', 'hello\nworld'])
- self.assertIn('Id', exec_id)
+ assert 'Id' in exec_id
res = b''
for chunk in self.client.exec_start(exec_id, stream=True):
res += chunk
- self.assertEqual(res, b'hello\nworld\n')
+ assert res == b'hello\nworld\n'
def test_exec_start_socket(self):
container = self.client.create_container(BUSYBOX, 'cat',
@@ -84,15 +86,15 @@ class ExecTest(BaseAPIIntegrationTest):
# `echo` appends CRLF, `printf` doesn't
exec_id = self.client.exec_create(
container_id, ['printf', line], tty=True)
- self.assertIn('Id', exec_id)
+ assert 'Id' in exec_id
socket = self.client.exec_start(exec_id, socket=True)
self.addCleanup(socket.close)
next_size = next_frame_size(socket)
- self.assertEqual(next_size, len(line))
+ assert next_size == len(line)
data = read_exactly(socket, next_size)
- self.assertEqual(data.decode('utf-8'), line)
+ assert data.decode('utf-8') == line
def test_exec_start_detached(self):
container = self.client.create_container(BUSYBOX, 'cat',
@@ -103,11 +105,11 @@ class ExecTest(BaseAPIIntegrationTest):
exec_id = self.client.exec_create(
container_id, ['printf', "asdqwe"])
- self.assertIn('Id', exec_id)
+ assert 'Id' in exec_id
response = self.client.exec_start(exec_id, detach=True)
- self.assertEqual(response, "")
+ assert response == ""
def test_exec_inspect(self):
container = self.client.create_container(BUSYBOX, 'cat',
@@ -117,11 +119,11 @@ class ExecTest(BaseAPIIntegrationTest):
self.tmp_containers.append(id)
exec_id = self.client.exec_create(id, ['mkdir', '/does/not/exist'])
- self.assertIn('Id', exec_id)
+ assert 'Id' in exec_id
self.client.exec_start(exec_id)
exec_info = self.client.exec_inspect(exec_id)
- self.assertIn('ExitCode', exec_info)
- self.assertNotEqual(exec_info['ExitCode'], 0)
+ assert 'ExitCode' in exec_info
+ assert exec_info['ExitCode'] != 0
@requires_api_version('1.25')
def test_exec_command_with_env(self):
@@ -136,3 +138,68 @@ class ExecTest(BaseAPIIntegrationTest):
exec_log = self.client.exec_start(res)
assert b'X=Y\n' in exec_log
+
+ @requires_api_version('1.35')
+ def test_exec_command_with_workdir(self):
+ container = self.client.create_container(
+ BUSYBOX, 'cat', detach=True, stdin_open=True
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+
+ res = self.client.exec_create(container, 'pwd', workdir='/var/www')
+ exec_log = self.client.exec_start(res)
+ assert exec_log == b'/var/www\n'
+
+ def test_detach_with_default(self):
+ container = self.client.create_container(
+ BUSYBOX, 'cat', detach=True, stdin_open=True
+ )
+ id = container['Id']
+ self.client.start(id)
+ self.tmp_containers.append(id)
+
+ exec_id = self.client.exec_create(
+ id, 'cat', stdin=True, tty=True, stdout=True
+ )
+ sock = self.client.exec_start(exec_id, tty=True, socket=True)
+ self.addCleanup(sock.close)
+
+ assert_cat_socket_detached_with_keys(
+ sock, [ctrl_with('p'), ctrl_with('q')]
+ )
+
+ def test_detach_with_config_file(self):
+ self.client._general_configs['detachKeys'] = 'ctrl-p'
+ container = self.client.create_container(
+ BUSYBOX, 'cat', detach=True, stdin_open=True
+ )
+ id = container['Id']
+ self.client.start(id)
+ self.tmp_containers.append(id)
+
+ exec_id = self.client.exec_create(
+ id, 'cat', stdin=True, tty=True, stdout=True
+ )
+ sock = self.client.exec_start(exec_id, tty=True, socket=True)
+ self.addCleanup(sock.close)
+
+ assert_cat_socket_detached_with_keys(sock, [ctrl_with('p')])
+
+ def test_detach_with_arg(self):
+ self.client._general_configs['detachKeys'] = 'ctrl-p'
+ container = self.client.create_container(
+ BUSYBOX, 'cat', detach=True, stdin_open=True
+ )
+ id = container['Id']
+ self.client.start(id)
+ self.tmp_containers.append(id)
+
+ exec_id = self.client.exec_create(
+ id, 'cat',
+ stdin=True, tty=True, detach_keys='ctrl-x', stdout=True
+ )
+ sock = self.client.exec_start(exec_id, tty=True, socket=True)
+ self.addCleanup(sock.close)
+
+ assert_cat_socket_detached_with_keys(sock, [ctrl_with('x')])
diff --git a/tests/integration/api_healthcheck_test.py b/tests/integration/api_healthcheck_test.py
index 211042d..5dbac37 100644
--- a/tests/integration/api_healthcheck_test.py
+++ b/tests/integration/api_healthcheck_test.py
@@ -20,8 +20,9 @@ class HealthcheckTest(BaseAPIIntegrationTest):
self.tmp_containers.append(container)
res = self.client.inspect_container(container)
- assert res['Config']['Healthcheck']['Test'] == \
- ['CMD-SHELL', 'echo "hello world"']
+ assert res['Config']['Healthcheck']['Test'] == [
+ 'CMD-SHELL', 'echo "hello world"'
+ ]
@helpers.requires_api_version('1.24')
def test_healthcheck_passes(self):
diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py
index 14fb77a..ab638c9 100644
--- a/tests/integration/api_image_test.py
+++ b/tests/integration/api_image_test.py
@@ -14,26 +14,26 @@ from six.moves import socketserver
import docker
-from ..helpers import requires_api_version
+from ..helpers import requires_api_version, requires_experimental
from .base import BaseAPIIntegrationTest, BUSYBOX
class ListImagesTest(BaseAPIIntegrationTest):
def test_images(self):
res1 = self.client.images(all=True)
- self.assertIn('Id', res1[0])
+ assert 'Id' in res1[0]
res10 = res1[0]
- self.assertIn('Created', res10)
- self.assertIn('RepoTags', res10)
+ assert 'Created' in res10
+ assert 'RepoTags' in res10
distinct = []
for img in res1:
if img['Id'] not in distinct:
distinct.append(img['Id'])
- self.assertEqual(len(distinct), self.client.info()['Images'])
+ assert len(distinct) == self.client.info()['Images']
def test_images_quiet(self):
res1 = self.client.images(quiet=True)
- self.assertEqual(type(res1[0]), six.text_type)
+ assert type(res1[0]) == six.text_type
class PullImageTest(BaseAPIIntegrationTest):
@@ -44,12 +44,10 @@ class PullImageTest(BaseAPIIntegrationTest):
pass
res = self.client.pull('hello-world', tag='latest')
self.tmp_imgs.append('hello-world')
- self.assertEqual(type(res), six.text_type)
- self.assertGreaterEqual(
- len(self.client.images('hello-world')), 1
- )
+ assert type(res) == six.text_type
+ assert len(self.client.images('hello-world')) >= 1
img_info = self.client.inspect_image('hello-world')
- self.assertIn('Id', img_info)
+ assert 'Id' in img_info
def test_pull_streaming(self):
try:
@@ -61,11 +59,18 @@ class PullImageTest(BaseAPIIntegrationTest):
self.tmp_imgs.append('hello-world')
for chunk in stream:
assert isinstance(chunk, dict)
- self.assertGreaterEqual(
- len(self.client.images('hello-world')), 1
- )
+ assert len(self.client.images('hello-world')) >= 1
img_info = self.client.inspect_image('hello-world')
- self.assertIn('Id', img_info)
+ assert 'Id' in img_info
+
+ @requires_api_version('1.32')
+ @requires_experimental(until=None)
+ def test_pull_invalid_platform(self):
+ with pytest.raises(docker.errors.APIError) as excinfo:
+ self.client.pull('hello-world', platform='foobar')
+
+ assert excinfo.value.status_code == 500
+ assert 'invalid platform' in excinfo.exconly()
class CommitTest(BaseAPIIntegrationTest):
@@ -75,18 +80,18 @@ class CommitTest(BaseAPIIntegrationTest):
self.client.start(id)
self.tmp_containers.append(id)
res = self.client.commit(id)
- self.assertIn('Id', res)
+ assert 'Id' in res
img_id = res['Id']
self.tmp_imgs.append(img_id)
img = self.client.inspect_image(img_id)
- self.assertIn('Container', img)
- self.assertTrue(img['Container'].startswith(id))
- self.assertIn('ContainerConfig', img)
- self.assertIn('Image', img['ContainerConfig'])
- self.assertEqual(BUSYBOX, img['ContainerConfig']['Image'])
+ assert 'Container' in img
+ assert img['Container'].startswith(id)
+ assert 'ContainerConfig' in img
+ assert 'Image' in img['ContainerConfig']
+ assert BUSYBOX == img['ContainerConfig']['Image']
busybox_id = self.client.inspect_image(BUSYBOX)['Id']
- self.assertIn('Parent', img)
- self.assertEqual(img['Parent'], busybox_id)
+ assert 'Parent' in img
+ assert img['Parent'] == busybox_id
def test_commit_with_changes(self):
cid = self.client.create_container(BUSYBOX, ['touch', '/test'])
@@ -110,14 +115,14 @@ class RemoveImageTest(BaseAPIIntegrationTest):
self.client.start(id)
self.tmp_containers.append(id)
res = self.client.commit(id)
- self.assertIn('Id', res)
+ assert 'Id' in res
img_id = res['Id']
self.tmp_imgs.append(img_id)
logs = self.client.remove_image(img_id, force=True)
- self.assertIn({"Deleted": img_id}, logs)
+ assert {"Deleted": img_id} in logs
images = self.client.images(all=True)
res = [x for x in images if x['Id'].startswith(img_id)]
- self.assertEqual(len(res), 0)
+ assert len(res) == 0
class ImportImageTest(BaseAPIIntegrationTest):
@@ -171,7 +176,7 @@ class ImportImageTest(BaseAPIIntegrationTest):
result_text = statuses.splitlines()[-1]
result = json.loads(result_text)
- self.assertNotIn('error', result)
+ assert 'error' not in result
img_id = result['status']
self.tmp_imgs.append(img_id)
@@ -186,9 +191,9 @@ class ImportImageTest(BaseAPIIntegrationTest):
result_text = statuses.splitlines()[-1]
result = json.loads(result_text)
- self.assertNotIn('error', result)
+ assert 'error' not in result
- self.assertIn('status', result)
+ assert 'status' in result
img_id = result['status']
self.tmp_imgs.append(img_id)
@@ -201,9 +206,9 @@ class ImportImageTest(BaseAPIIntegrationTest):
result_text = statuses.splitlines()[-1]
result = json.loads(result_text)
- self.assertNotIn('error', result)
+ assert 'error' not in result
- self.assertIn('status', result)
+ assert 'status' in result
img_id = result['status']
self.tmp_imgs.append(img_id)
@@ -296,9 +301,9 @@ class ImportImageTest(BaseAPIIntegrationTest):
result_text = statuses.splitlines()[-1]
result = json.loads(result_text)
- self.assertNotIn('error', result)
+ assert 'error' not in result
- self.assertIn('status', result)
+ assert 'status' in result
img_id = result['status']
self.tmp_imgs.append(img_id)
@@ -320,7 +325,7 @@ class PruneImagesTest(BaseAPIIntegrationTest):
img_id = self.client.inspect_image('hello-world')['Id']
result = self.client.prune_images()
assert img_id not in [
- img.get('Deleted') for img in result['ImagesDeleted']
+ img.get('Deleted') for img in result.get('ImagesDeleted') or []
]
result = self.client.prune_images({'dangling': False})
assert result['SpaceReclaimed'] > 0
@@ -330,3 +335,25 @@ class PruneImagesTest(BaseAPIIntegrationTest):
assert img_id in [
img.get('Deleted') for img in result['ImagesDeleted']
]
+
+
+class SaveLoadImagesTest(BaseAPIIntegrationTest):
+ @requires_api_version('1.23')
+ def test_get_image_load_image(self):
+ with tempfile.TemporaryFile() as f:
+ stream = self.client.get_image(BUSYBOX)
+ for chunk in stream:
+ f.write(chunk)
+
+ f.seek(0)
+ result = self.client.load_image(f.read())
+
+ success = False
+ result_line = 'Loaded image: {}\n'.format(BUSYBOX)
+ for data in result:
+ print(data)
+ if 'stream' in data:
+ if data['stream'] == result_line:
+ success = True
+ break
+ assert success is True
diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py
index 10e09dd..b6726d0 100644
--- a/tests/integration/api_network_test.py
+++ b/tests/integration/api_network_test.py
@@ -17,33 +17,30 @@ class TestNetworks(BaseAPIIntegrationTest):
self.tmp_networks.append(net_id)
return (net_name, net_id)
- @requires_api_version('1.21')
def test_list_networks(self):
networks = self.client.networks()
net_name, net_id = self.create_network()
networks = self.client.networks()
- self.assertTrue(net_id in [n['Id'] for n in networks])
+ assert net_id in [n['Id'] for n in networks]
networks_by_name = self.client.networks(names=[net_name])
- self.assertEqual([n['Id'] for n in networks_by_name], [net_id])
+ assert [n['Id'] for n in networks_by_name] == [net_id]
networks_by_partial_id = self.client.networks(ids=[net_id[:8]])
- self.assertEqual([n['Id'] for n in networks_by_partial_id], [net_id])
+ assert [n['Id'] for n in networks_by_partial_id] == [net_id]
- @requires_api_version('1.21')
def test_inspect_network(self):
net_name, net_id = self.create_network()
net = self.client.inspect_network(net_id)
- self.assertEqual(net['Id'], net_id)
- self.assertEqual(net['Name'], net_name)
- self.assertEqual(net['Driver'], 'bridge')
- self.assertEqual(net['Scope'], 'local')
- self.assertEqual(net['IPAM']['Driver'], 'default')
+ assert net['Id'] == net_id
+ assert net['Name'] == net_name
+ assert net['Driver'] == 'bridge'
+ assert net['Scope'] == 'local'
+ assert net['IPAM']['Driver'] == 'default'
- @requires_api_version('1.21')
def test_create_network_with_ipam_config(self):
_, net_id = self.create_network(
ipam=IPAMConfig(
@@ -81,12 +78,10 @@ class TestNetworks(BaseAPIIntegrationTest):
},
}]
- @requires_api_version('1.21')
def test_create_network_with_host_driver_fails(self):
with pytest.raises(docker.errors.APIError):
self.client.create_network(random_name(), driver='host')
- @requires_api_version('1.21')
def test_remove_network(self):
net_name, net_id = self.create_network()
assert net_name in [n['Name'] for n in self.client.networks()]
@@ -94,7 +89,6 @@ class TestNetworks(BaseAPIIntegrationTest):
self.client.remove_network(net_id)
assert net_name not in [n['Name'] for n in self.client.networks()]
- @requires_api_version('1.21')
def test_connect_and_disconnect_container(self):
net_name, net_id = self.create_network()
@@ -103,21 +97,20 @@ class TestNetworks(BaseAPIIntegrationTest):
self.client.start(container)
network_data = self.client.inspect_network(net_id)
- self.assertFalse(network_data.get('Containers'))
+ assert not network_data.get('Containers')
self.client.connect_container_to_network(container, net_id)
network_data = self.client.inspect_network(net_id)
- self.assertEqual(
- list(network_data['Containers'].keys()),
- [container['Id']]
- )
+ assert list(network_data['Containers'].keys()) == [
+ container['Id']
+ ]
with pytest.raises(docker.errors.APIError):
self.client.connect_container_to_network(container, net_id)
self.client.disconnect_container_from_network(container, net_id)
network_data = self.client.inspect_network(net_id)
- self.assertFalse(network_data.get('Containers'))
+ assert not network_data.get('Containers')
with pytest.raises(docker.errors.APIError):
self.client.disconnect_container_from_network(container, net_id)
@@ -131,18 +124,16 @@ class TestNetworks(BaseAPIIntegrationTest):
self.client.start(container)
network_data = self.client.inspect_network(net_id)
- self.assertFalse(network_data.get('Containers'))
+ assert not network_data.get('Containers')
self.client.connect_container_to_network(container, net_id)
network_data = self.client.inspect_network(net_id)
- self.assertEqual(
- list(network_data['Containers'].keys()),
+ assert list(network_data['Containers'].keys()) == \
[container['Id']]
- )
self.client.disconnect_container_from_network(container, net_id, True)
network_data = self.client.inspect_network(net_id)
- self.assertFalse(network_data.get('Containers'))
+ assert not network_data.get('Containers')
with pytest.raises(docker.errors.APIError):
self.client.disconnect_container_from_network(
@@ -166,7 +157,6 @@ class TestNetworks(BaseAPIIntegrationTest):
assert 'foo' in aliases
assert 'bar' in aliases
- @requires_api_version('1.21')
def test_connect_on_container_create(self):
net_name, net_id = self.create_network()
@@ -179,13 +169,12 @@ class TestNetworks(BaseAPIIntegrationTest):
self.client.start(container)
network_data = self.client.inspect_network(net_id)
- self.assertEqual(
- list(network_data['Containers'].keys()),
- [container['Id']])
+ assert list(network_data['Containers'].keys()) == \
+ [container['Id']]
self.client.disconnect_container_from_network(container, net_id)
network_data = self.client.inspect_network(net_id)
- self.assertFalse(network_data.get('Containers'))
+ assert not network_data.get('Containers')
@requires_api_version('1.22')
def test_create_with_aliases(self):
@@ -233,14 +222,11 @@ class TestNetworks(BaseAPIIntegrationTest):
self.tmp_containers.append(container)
self.client.start(container)
- container_data = self.client.inspect_container(container)
- self.assertEqual(
- container_data[
- 'NetworkSettings']['Networks'][net_name]['IPAMConfig'][
- 'IPv4Address'
- ],
- '132.124.0.23'
- )
+ net_settings = self.client.inspect_container(container)[
+ 'NetworkSettings'
+ ]
+ assert net_settings['Networks'][net_name]['IPAMConfig']['IPv4Address']\
+ == '132.124.0.23'
@requires_api_version('1.22')
def test_create_with_ipv6_address(self):
@@ -262,14 +248,11 @@ class TestNetworks(BaseAPIIntegrationTest):
self.tmp_containers.append(container)
self.client.start(container)
- container_data = self.client.inspect_container(container)
- self.assertEqual(
- container_data[
- 'NetworkSettings']['Networks'][net_name]['IPAMConfig'][
- 'IPv6Address'
- ],
- '2001:389::f00d'
- )
+ net_settings = self.client.inspect_container(container)[
+ 'NetworkSettings'
+ ]
+ assert net_settings['Networks'][net_name]['IPAMConfig']['IPv6Address']\
+ == '2001:389::f00d'
@requires_api_version('1.24')
def test_create_with_linklocal_ips(self):
@@ -305,10 +288,12 @@ class TestNetworks(BaseAPIIntegrationTest):
}),
)
- container_data = self.client.inspect_container(container)
- self.assertEqual(
- container_data['NetworkSettings']['Networks'][net_name]['Links'],
- ['docker-py-test-upstream:bar'])
+ net_settings = self.client.inspect_container(container)[
+ 'NetworkSettings'
+ ]
+ assert net_settings['Networks'][net_name]['Links'] == [
+ 'docker-py-test-upstream:bar'
+ ]
self.create_and_start(
name='docker-py-test-upstream',
@@ -317,10 +302,9 @@ class TestNetworks(BaseAPIIntegrationTest):
self.execute(container, ['nslookup', 'bar'])
- @requires_api_version('1.21')
def test_create_check_duplicate(self):
net_name, net_id = self.create_network()
- with self.assertRaises(docker.errors.APIError):
+ with pytest.raises(docker.errors.APIError):
self.client.create_network(net_name, check_duplicate=True)
net_id = self.client.create_network(net_name, check_duplicate=False)
self.tmp_networks.append(net_id['Id'])
@@ -337,10 +321,12 @@ class TestNetworks(BaseAPIIntegrationTest):
container, net_name,
links=[('docker-py-test-upstream', 'bar')])
- container_data = self.client.inspect_container(container)
- self.assertEqual(
- container_data['NetworkSettings']['Networks'][net_name]['Links'],
- ['docker-py-test-upstream:bar'])
+ net_settings = self.client.inspect_container(container)[
+ 'NetworkSettings'
+ ]
+ assert net_settings['Networks'][net_name]['Links'] == [
+ 'docker-py-test-upstream:bar'
+ ]
self.create_and_start(
name='docker-py-test-upstream',
@@ -373,9 +359,7 @@ class TestNetworks(BaseAPIIntegrationTest):
container_data = self.client.inspect_container(container)
net_data = container_data['NetworkSettings']['Networks'][net_name]
- self.assertEqual(
- net_data['IPAMConfig']['IPv4Address'], '172.28.5.24'
- )
+ assert net_data['IPAMConfig']['IPv4Address'] == '172.28.5.24'
@requires_api_version('1.22')
def test_connect_with_ipv6_address(self):
@@ -401,9 +385,7 @@ class TestNetworks(BaseAPIIntegrationTest):
container_data = self.client.inspect_container(container)
net_data = container_data['NetworkSettings']['Networks'][net_name]
- self.assertEqual(
- net_data['IPAMConfig']['IPv6Address'], '2001:389::f00d'
- )
+ assert net_data['IPAMConfig']['IPv6Address'] == '2001:389::f00d'
@requires_api_version('1.23')
def test_create_internal_networks(self):
@@ -485,7 +467,6 @@ class TestNetworks(BaseAPIIntegrationTest):
with pytest.raises(docker.errors.NotFound):
self.client.inspect_network(net_name_swarm, scope='local')
- @requires_api_version('1.21')
def test_create_remove_network_with_space_in_name(self):
net_id = self.client.create_network('test 01')
self.tmp_networks.append(net_id)
diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py
index 4a2093d..5cc3fc1 100644
--- a/tests/integration/api_service_test.py
+++ b/tests/integration/api_service_test.py
@@ -353,7 +353,6 @@ class ServiceTest(BaseAPIIntegrationTest):
task_tmpl, name=name, endpoint_spec=endpoint_spec
)
svc_info = self.client.inspect_service(svc_id)
- print(svc_info)
ports = svc_info['Spec']['EndpointSpec']['Ports']
for port in ports:
if port['PublishedPort'] == 12562:
@@ -370,6 +369,26 @@ class ServiceTest(BaseAPIIntegrationTest):
assert len(ports) == 3
+ @requires_api_version('1.32')
+ def test_create_service_with_endpoint_spec_host_publish_mode(self):
+ container_spec = docker.types.ContainerSpec(BUSYBOX, ['true'])
+ task_tmpl = docker.types.TaskTemplate(container_spec)
+ name = self.get_service_name()
+ endpoint_spec = docker.types.EndpointSpec(ports={
+ 12357: (1990, None, 'host'),
+ })
+ svc_id = self.client.create_service(
+ task_tmpl, name=name, endpoint_spec=endpoint_spec
+ )
+ svc_info = self.client.inspect_service(svc_id)
+ ports = svc_info['Spec']['EndpointSpec']['Ports']
+ assert len(ports) == 1
+ port = ports[0]
+ assert port['PublishedPort'] == 12357
+ assert port['TargetPort'] == 1990
+ assert port['Protocol'] == 'tcp'
+ assert port['PublishMode'] == 'host'
+
def test_create_service_with_env(self):
container_spec = docker.types.ContainerSpec(
BUSYBOX, ['true'], env={'DOCKER_PY_TEST': 1}
@@ -1096,11 +1115,13 @@ class ServiceTest(BaseAPIIntegrationTest):
)
task_tmpl = docker.types.TaskTemplate(container_spec)
net1 = self.client.create_network(
- 'dockerpytest_1', driver='overlay', ipam={'Driver': 'default'}
+ self.get_service_name(), driver='overlay',
+ ipam={'Driver': 'default'}
)
self.tmp_networks.append(net1['Id'])
net2 = self.client.create_network(
- 'dockerpytest_2', driver='overlay', ipam={'Driver': 'default'}
+ self.get_service_name(), driver='overlay',
+ ipam={'Driver': 'default'}
)
self.tmp_networks.append(net2['Id'])
name = self.get_service_name()
diff --git a/tests/integration/api_volume_test.py b/tests/integration/api_volume_test.py
index 5a4bb1e..8e7dd3a 100644
--- a/tests/integration/api_volume_test.py
+++ b/tests/integration/api_volume_test.py
@@ -5,16 +5,15 @@ from ..helpers import requires_api_version
from .base import BaseAPIIntegrationTest
-@requires_api_version('1.21')
class TestVolumes(BaseAPIIntegrationTest):
def test_create_volume(self):
name = 'perfectcherryblossom'
self.tmp_volumes.append(name)
result = self.client.create_volume(name)
- self.assertIn('Name', result)
- self.assertEqual(result['Name'], name)
- self.assertIn('Driver', result)
- self.assertEqual(result['Driver'], 'local')
+ assert 'Name' in result
+ assert result['Name'] == name
+ assert 'Driver' in result
+ assert result['Driver'] == 'local'
def test_create_volume_invalid_driver(self):
driver_name = 'invalid.driver'
@@ -27,16 +26,16 @@ class TestVolumes(BaseAPIIntegrationTest):
self.tmp_volumes.append(name)
volume_info = self.client.create_volume(name)
result = self.client.volumes()
- self.assertIn('Volumes', result)
+ assert 'Volumes' in result
volumes = result['Volumes']
- self.assertIn(volume_info, volumes)
+ assert volume_info in volumes
def test_inspect_volume(self):
name = 'embodimentofscarletdevil'
self.tmp_volumes.append(name)
volume_info = self.client.create_volume(name)
result = self.client.inspect_volume(name)
- self.assertEqual(volume_info, result)
+ assert volume_info == result
def test_inspect_nonexistent_volume(self):
name = 'embodimentofscarletdevil'
diff --git a/tests/integration/base.py b/tests/integration/base.py
index 4f92901..c22126d 100644
--- a/tests/integration/base.py
+++ b/tests/integration/base.py
@@ -4,7 +4,6 @@ import unittest
import docker
from docker.utils import kwargs_from_env
-import six
from .. import helpers
@@ -19,9 +18,6 @@ class BaseIntegrationTest(unittest.TestCase):
"""
def setUp(self):
- if six.PY2:
- self.assertRegex = self.assertRegexpMatches
- self.assertCountEqual = self.assertItemsEqual
self.tmp_imgs = []
self.tmp_containers = []
self.tmp_folders = []
@@ -100,7 +96,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
container = self.client.create_container(*args, **kwargs)
self.tmp_containers.append(container)
self.client.start(container)
- exitcode = self.client.wait(container)
+ exitcode = self.client.wait(container)['StatusCode']
if exitcode != 0:
output = self.client.logs(container)
diff --git a/tests/integration/errors_test.py b/tests/integration/errors_test.py
index dc5cef4..ac74d72 100644
--- a/tests/integration/errors_test.py
+++ b/tests/integration/errors_test.py
@@ -1,14 +1,15 @@
from docker.errors import APIError
from .base import BaseAPIIntegrationTest, BUSYBOX
+import pytest
class ErrorsTest(BaseAPIIntegrationTest):
def test_api_error_parses_json(self):
container = self.client.create_container(BUSYBOX, ['sleep', '10'])
self.client.start(container['Id'])
- with self.assertRaises(APIError) as cm:
+ with pytest.raises(APIError) as cm:
self.client.remove_container(container['Id'])
- explanation = cm.exception.explanation
+ explanation = cm.value.explanation
assert 'You cannot remove a running container' in explanation
assert '{"message":' not in explanation
self.client.remove_container(container['Id'], force=True)
diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py
index d246189..a4d9f9c 100644
--- a/tests/integration/models_containers_test.py
+++ b/tests/integration/models_containers_test.py
@@ -10,10 +10,9 @@ class ContainerCollectionTest(BaseIntegrationTest):
def test_run(self):
client = docker.from_env(version=TEST_API_VERSION)
- self.assertEqual(
- client.containers.run("alpine", "echo hello world", remove=True),
- b'hello world\n'
- )
+ assert client.containers.run(
+ "alpine", "echo hello world", remove=True
+ ) == b'hello world\n'
def test_run_detach(self):
client = docker.from_env(version=TEST_API_VERSION)
@@ -24,16 +23,16 @@ class ContainerCollectionTest(BaseIntegrationTest):
def test_run_with_error(self):
client = docker.from_env(version=TEST_API_VERSION)
- with self.assertRaises(docker.errors.ContainerError) as cm:
+ with pytest.raises(docker.errors.ContainerError) as cm:
client.containers.run("alpine", "cat /test", remove=True)
- assert cm.exception.exit_status == 1
- assert "cat /test" in str(cm.exception)
- assert "alpine" in str(cm.exception)
- assert "No such file or directory" in str(cm.exception)
+ assert cm.value.exit_status == 1
+ assert "cat /test" in cm.exconly()
+ assert "alpine" in cm.exconly()
+ assert "No such file or directory" in cm.exconly()
def test_run_with_image_that_does_not_exist(self):
client = docker.from_env(version=TEST_API_VERSION)
- with self.assertRaises(docker.errors.ImageNotFound):
+ with pytest.raises(docker.errors.ImageNotFound):
client.containers.run("dockerpytest_does_not_exist")
def test_run_with_volume(self):
@@ -52,7 +51,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
"alpine", "cat /insidecontainer/test",
volumes=["%s:/insidecontainer" % path]
)
- self.assertEqual(out, b'hello\n')
+ assert out == b'hello\n'
def test_run_with_named_volume(self):
client = docker.from_env(version=TEST_API_VERSION)
@@ -70,7 +69,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
"alpine", "cat /insidecontainer/test",
volumes=["somevolume:/insidecontainer"]
)
- self.assertEqual(out, b'hello\n')
+ assert out == b'hello\n'
def test_run_with_network(self):
net_name = random_name()
@@ -170,10 +169,9 @@ class ContainerTest(BaseIntegrationTest):
self.tmp_containers.append(container.id)
container.wait()
image = container.commit()
- self.assertEqual(
- client.containers.run(image.id, "cat /test", remove=True),
- b"hello\n"
- )
+ assert client.containers.run(
+ image.id, "cat /test", remove=True
+ ) == b"hello\n"
def test_diff(self):
client = docker.from_env(version=TEST_API_VERSION)
@@ -182,13 +180,24 @@ class ContainerTest(BaseIntegrationTest):
container.wait()
assert container.diff() == [{'Path': '/test', 'Kind': 1}]
- def test_exec_run(self):
+ def test_exec_run_success(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run(
"alpine", "sh -c 'echo \"hello\" > /test; sleep 60'", detach=True
)
self.tmp_containers.append(container.id)
- assert container.exec_run("cat /test") == b"hello\n"
+ exec_output = container.exec_run("cat /test")
+ assert exec_output[0] == 0
+ assert exec_output[1] == b"hello\n"
+
+ def test_exec_run_failed(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ container = client.containers.run(
+ "alpine", "sh -c 'sleep 60'", detach=True
+ )
+ self.tmp_containers.append(container.id)
+ exec_output = container.exec_run("docker ps")
+ assert exec_output[0] == 126
def test_kill(self):
client = docker.from_env(version=TEST_API_VERSION)
@@ -300,8 +309,8 @@ class ContainerTest(BaseIntegrationTest):
container = client.containers.run("alpine", "sh -c 'exit 0'",
detach=True)
self.tmp_containers.append(container.id)
- assert container.wait() == 0
+ assert container.wait()['StatusCode'] == 0
container = client.containers.run("alpine", "sh -c 'exit 1'",
detach=True)
self.tmp_containers.append(container.id)
- assert container.wait() == 1
+ assert container.wait()['StatusCode'] == 1
diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py
index 8f812d9..2fa71a7 100644
--- a/tests/integration/models_images_test.py
+++ b/tests/integration/models_images_test.py
@@ -1,36 +1,39 @@
import io
+import tempfile
import docker
import pytest
-from .base import BaseIntegrationTest, TEST_API_VERSION
+from .base import BaseIntegrationTest, BUSYBOX, TEST_API_VERSION
class ImageCollectionTest(BaseIntegrationTest):
def test_build(self):
client = docker.from_env(version=TEST_API_VERSION)
- image = client.images.build(fileobj=io.BytesIO(
+ image, _ = client.images.build(fileobj=io.BytesIO(
"FROM alpine\n"
"CMD echo hello world".encode('ascii')
))
self.tmp_imgs.append(image.id)
assert client.containers.run(image) == b"hello world\n"
- @pytest.mark.xfail(reason='Engine 1.13 responds with status 500')
+ # @pytest.mark.xfail(reason='Engine 1.13 responds with status 500')
def test_build_with_error(self):
client = docker.from_env(version=TEST_API_VERSION)
- with self.assertRaises(docker.errors.BuildError) as cm:
+ with pytest.raises(docker.errors.BuildError) as cm:
client.images.build(fileobj=io.BytesIO(
"FROM alpine\n"
- "NOTADOCKERFILECOMMAND".encode('ascii')
+ "RUN exit 1".encode('ascii')
))
- assert str(cm.exception) == ("Unknown instruction: "
- "NOTADOCKERFILECOMMAND")
+ assert (
+ "The command '/bin/sh -c exit 1' returned a non-zero code: 1"
+ ) in cm.exconly()
+ assert cm.value.build_log
def test_build_with_multiple_success(self):
client = docker.from_env(version=TEST_API_VERSION)
- image = client.images.build(
+ image, _ = client.images.build(
tag='some-tag', fileobj=io.BytesIO(
"FROM alpine\n"
"CMD echo hello world".encode('ascii')
@@ -41,7 +44,7 @@ class ImageCollectionTest(BaseIntegrationTest):
def test_build_with_success_build_output(self):
client = docker.from_env(version=TEST_API_VERSION)
- image = client.images.build(
+ image, _ = client.images.build(
tag='dup-txt-tag', fileobj=io.BytesIO(
"FROM alpine\n"
"CMD echo Successfully built abcd1234".encode('ascii')
@@ -71,6 +74,31 @@ class ImageCollectionTest(BaseIntegrationTest):
image = client.images.pull('alpine', tag='3.3')
assert 'alpine:3.3' in image.attrs['RepoTags']
+ def test_pull_multiple(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ images = client.images.pull('hello-world')
+ assert len(images) == 1
+ assert 'hello-world:latest' in images[0].attrs['RepoTags']
+
+ def test_load_error(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ with pytest.raises(docker.errors.ImageLoadError):
+ client.images.load('abc')
+
+ def test_save_and_load(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ image = client.images.get(BUSYBOX)
+ with tempfile.TemporaryFile() as f:
+ stream = image.save()
+ for chunk in stream:
+ f.write(chunk)
+
+ f.seek(0)
+ result = client.images.load(f.read())
+
+ assert len(result) == 1
+ assert result[0].id == image.id
+
class ImageTest(BaseIntegrationTest):
diff --git a/tests/integration/models_services_test.py b/tests/integration/models_services_test.py
index ca8be48..cb8eca2 100644
--- a/tests/integration/models_services_test.py
+++ b/tests/integration/models_services_test.py
@@ -1,9 +1,12 @@
import unittest
import docker
+import pytest
from .. import helpers
from .base import TEST_API_VERSION
+from docker.errors import InvalidArgument
+from docker.types.services import ServiceMode
class ServiceTest(unittest.TestCase):
@@ -179,6 +182,32 @@ class ServiceTest(unittest.TestCase):
service.reload()
assert not service.attrs['Spec'].get('Labels')
+ @pytest.mark.xfail(reason='Flaky test')
+ def test_update_retains_networks(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ network_name = helpers.random_name()
+ network = client.networks.create(
+ network_name, driver='overlay'
+ )
+ service = client.services.create(
+ # create arguments
+ name=helpers.random_name(),
+ networks=[network.id],
+ # ContainerSpec arguments
+ image="alpine",
+ command="sleep 300"
+ )
+ service.reload()
+ service.update(
+ # create argument
+ name=service.name,
+ # ContainerSpec argument
+ command="sleep 600"
+ )
+ service.reload()
+ networks = service.attrs['Spec']['TaskTemplate']['Networks']
+ assert networks == [{'Target': network.id}]
+
def test_scale_service(self):
client = docker.from_env(version=TEST_API_VERSION)
service = client.services.create(
@@ -203,6 +232,49 @@ class ServiceTest(unittest.TestCase):
spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec']
assert spec.get('Command') == ['sleep', '300']
+ def test_scale_method_service(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ service = client.services.create(
+ # create arguments
+ name=helpers.random_name(),
+ # ContainerSpec arguments
+ image="alpine",
+ command="sleep 300",
+ )
+ tasks = []
+ while len(tasks) == 0:
+ tasks = service.tasks()
+ assert len(tasks) == 1
+ service.scale(2)
+ while len(tasks) == 1:
+ tasks = service.tasks()
+ assert len(tasks) >= 2
+ # check that the container spec is not overridden with None
+ service.reload()
+ spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec']
+ assert spec.get('Command') == ['sleep', '300']
+
+ def test_scale_method_global_service(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ mode = ServiceMode('global')
+ service = client.services.create(
+ name=helpers.random_name(),
+ image="alpine",
+ command="sleep 300",
+ mode=mode
+ )
+ tasks = []
+ while len(tasks) == 0:
+ tasks = service.tasks()
+ assert len(tasks) == 1
+ with pytest.raises(InvalidArgument):
+ service.scale(2)
+
+ assert len(tasks) == 1
+ service.reload()
+ spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec']
+ assert spec.get('Command') == ['sleep', '300']
+
@helpers.requires_api_version('1.25')
def test_restart_service(self):
client = docker.from_env(version=TEST_API_VERSION)
diff --git a/tests/integration/models_swarm_test.py b/tests/integration/models_swarm_test.py
index dadd77d..f39f0d3 100644
--- a/tests/integration/models_swarm_test.py
+++ b/tests/integration/models_swarm_test.py
@@ -4,6 +4,7 @@ import docker
from .. import helpers
from .base import TEST_API_VERSION
+import pytest
class SwarmTest(unittest.TestCase):
@@ -24,11 +25,9 @@ class SwarmTest(unittest.TestCase):
assert client.swarm.attrs['Spec']['Raft']['SnapshotInterval'] == 10000
assert client.swarm.id
assert client.swarm.leave(force=True)
- with self.assertRaises(docker.errors.APIError) as cm:
+ with pytest.raises(docker.errors.APIError) as cm:
client.swarm.reload()
assert (
- # FIXME: test for both until
- # https://github.com/docker/docker/issues/29192 is resolved
- cm.exception.response.status_code == 406 or
- cm.exception.response.status_code == 503
+ cm.value.response.status_code == 406 or
+ cm.value.response.status_code == 503
)
diff --git a/tests/integration/regression_test.py b/tests/integration/regression_test.py
index e3e6d9b..0fd4e43 100644
--- a/tests/integration/regression_test.py
+++ b/tests/integration/regression_test.py
@@ -5,15 +5,16 @@ import docker
import six
from .base import BaseAPIIntegrationTest, BUSYBOX
+import pytest
class TestRegressions(BaseAPIIntegrationTest):
def test_443_handle_nonchunked_response_in_stream(self):
dfile = io.BytesIO()
- with self.assertRaises(docker.errors.APIError) as exc:
+ with pytest.raises(docker.errors.APIError) as exc:
for line in self.client.build(fileobj=dfile, tag="a/b/c"):
pass
- self.assertEqual(exc.exception.response.status_code, 500)
+ assert exc.value.response.status_code == 500
dfile.close()
def test_542_truncate_ids_client_side(self):
@@ -21,10 +22,10 @@ class TestRegressions(BaseAPIIntegrationTest):
self.client.create_container(BUSYBOX, ['true'])
)
result = self.client.containers(all=True, trunc=True)
- self.assertEqual(len(result[0]['Id']), 12)
+ assert len(result[0]['Id']) == 12
def test_647_support_doubleslash_in_image_names(self):
- with self.assertRaises(docker.errors.APIError):
+ with pytest.raises(docker.errors.APIError):
self.client.inspect_image('gensokyo.jp//kirisame')
def test_649_handle_timeout_value_none(self):
@@ -53,15 +54,12 @@ class TestRegressions(BaseAPIIntegrationTest):
)
self.tmp_containers.append(ctnr)
self.client.start(ctnr)
- self.assertEqual(
- self.client.port(ctnr, 2000)[0]['HostPort'],
- six.text_type(tcp_port)
- )
- self.assertEqual(
- self.client.port(ctnr, '2000/tcp')[0]['HostPort'],
- six.text_type(tcp_port)
- )
- self.assertEqual(
- self.client.port(ctnr, '2000/udp')[0]['HostPort'],
- six.text_type(udp_port)
- )
+ assert self.client.port(
+ ctnr, 2000
+ )[0]['HostPort'] == six.text_type(tcp_port)
+ assert self.client.port(
+ ctnr, '2000/tcp'
+ )[0]['HostPort'] == six.text_type(tcp_port)
+ assert self.client.port(
+ ctnr, '2000/udp'
+ )[0]['HostPort'] == six.text_type(udp_port)
diff --git a/tests/unit/api_build_test.py b/tests/unit/api_build_test.py
index 927aa97..a7f34fd 100644
--- a/tests/unit/api_build_test.py
+++ b/tests/unit/api_build_test.py
@@ -5,6 +5,7 @@ import docker
from docker import auth
from .api_test import BaseAPIClientTest, fake_request, url_prefix
+import pytest
class BuildTest(BaseAPIClientTest):
@@ -30,17 +31,6 @@ class BuildTest(BaseAPIClientTest):
self.client.build(fileobj=script, pull=True)
- def test_build_container_stream(self):
- script = io.BytesIO('\n'.join([
- 'FROM busybox',
- 'RUN mkdir -p /tmp/test',
- 'EXPOSE 8080',
- 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
- ' /tmp/silence.tar.gz'
- ]).encode('ascii'))
-
- self.client.build(fileobj=script, stream=True)
-
def test_build_container_custom_context(self):
script = io.BytesIO('\n'.join([
'FROM busybox',
@@ -72,10 +62,12 @@ class BuildTest(BaseAPIClientTest):
def test_build_remote_with_registry_auth(self):
self.client._auth_configs = {
- 'https://example.com': {
- 'user': 'example',
- 'password': 'example',
- 'email': 'example@example.com'
+ 'auths': {
+ 'https://example.com': {
+ 'user': 'example',
+ 'password': 'example',
+ 'email': 'example@example.com'
+ }
}
}
@@ -84,7 +76,10 @@ class BuildTest(BaseAPIClientTest):
'forcerm': False,
'remote': 'https://github.com/docker-library/mongo'}
expected_headers = {
- 'X-Registry-Config': auth.encode_header(self.client._auth_configs)}
+ 'X-Registry-Config': auth.encode_header(
+ self.client._auth_configs['auths']
+ )
+ }
self.client.build(path='https://github.com/docker-library/mongo')
@@ -110,44 +105,53 @@ class BuildTest(BaseAPIClientTest):
})
def test_build_container_invalid_container_limits(self):
- self.assertRaises(
- docker.errors.DockerException,
- lambda: self.client.build('.', container_limits={
+ with pytest.raises(docker.errors.DockerException):
+ self.client.build('.', container_limits={
'foo': 'bar'
})
- )
def test_set_auth_headers_with_empty_dict_and_auth_configs(self):
self.client._auth_configs = {
- 'https://example.com': {
- 'user': 'example',
- 'password': 'example',
- 'email': 'example@example.com'
+ 'auths': {
+ 'https://example.com': {
+ 'user': 'example',
+ 'password': 'example',
+ 'email': 'example@example.com'
+ }
}
}
headers = {}
expected_headers = {
- 'X-Registry-Config': auth.encode_header(self.client._auth_configs)}
+ 'X-Registry-Config': auth.encode_header(
+ self.client._auth_configs['auths']
+ )
+ }
+
self.client._set_auth_headers(headers)
- self.assertEqual(headers, expected_headers)
+ assert headers == expected_headers
def test_set_auth_headers_with_dict_and_auth_configs(self):
self.client._auth_configs = {
- 'https://example.com': {
- 'user': 'example',
- 'password': 'example',
- 'email': 'example@example.com'
+ 'auths': {
+ 'https://example.com': {
+ 'user': 'example',
+ 'password': 'example',
+ 'email': 'example@example.com'
+ }
}
}
headers = {'foo': 'bar'}
expected_headers = {
- 'foo': 'bar',
- 'X-Registry-Config': auth.encode_header(self.client._auth_configs)}
+ 'X-Registry-Config': auth.encode_header(
+ self.client._auth_configs['auths']
+ ),
+ 'foo': 'bar'
+ }
self.client._set_auth_headers(headers)
- self.assertEqual(headers, expected_headers)
+ assert headers == expected_headers
def test_set_auth_headers_with_dict_and_no_auth_configs(self):
headers = {'foo': 'bar'}
@@ -156,4 +160,4 @@ class BuildTest(BaseAPIClientTest):
}
self.client._set_auth_headers(headers)
- self.assertEqual(headers, expected_headers)
+ assert headers == expected_headers
diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py
index 3b135a8..c33f129 100644
--- a/tests/unit/api_container_test.py
+++ b/tests/unit/api_container_test.py
@@ -30,31 +30,20 @@ class StartContainerTest(BaseAPIClientTest):
self.client.start(fake_api.FAKE_CONTAINER_ID)
args = fake_request.call_args
- self.assertEqual(
- args[0][1],
- url_prefix + 'containers/3cc2351ab11b/start'
- )
+ assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/start'
assert 'data' not in args[1]
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_start_container_none(self):
with pytest.raises(ValueError) as excinfo:
self.client.start(container=None)
- self.assertEqual(
- str(excinfo.value),
- 'Resource ID was not provided',
- )
+ assert str(excinfo.value) == 'Resource ID was not provided'
with pytest.raises(ValueError) as excinfo:
self.client.start(None)
- self.assertEqual(
- str(excinfo.value),
- 'Resource ID was not provided',
- )
+ assert str(excinfo.value) == 'Resource ID was not provided'
def test_start_container_regression_573(self):
self.client.start(**{'container': fake_api.FAKE_CONTAINER_ID})
@@ -134,14 +123,9 @@ class StartContainerTest(BaseAPIClientTest):
self.client.start({'Id': fake_api.FAKE_CONTAINER_ID})
args = fake_request.call_args
- self.assertEqual(
- args[0][1],
- url_prefix + 'containers/3cc2351ab11b/start'
- )
+ assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/start'
assert 'data' not in args[1]
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
class CreateContainerTest(BaseAPIClientTest):
@@ -149,17 +133,15 @@ class CreateContainerTest(BaseAPIClientTest):
self.client.create_container('busybox', 'true')
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox", "Cmd": ["true"],
- "AttachStdin": false,
- "AttachStderr": true, "AttachStdout": true,
- "StdinOnce": false,
- "OpenStdin": false, "NetworkDisabled": false}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox", "Cmd": ["true"],
+ "AttachStdin": false,
+ "AttachStderr": true, "AttachStdout": true,
+ "StdinOnce": false,
+ "OpenStdin": false, "NetworkDisabled": false}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_with_binds(self):
mount_dest = '/mnt'
@@ -168,19 +150,17 @@ class CreateContainerTest(BaseAPIClientTest):
volumes=[mount_dest])
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls", "/mnt"], "AttachStdin": false,
- "Volumes": {"/mnt": {}},
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls", "/mnt"], "AttachStdin": false,
+ "Volumes": {"/mnt": {}},
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_with_volume_string(self):
mount_dest = '/mnt'
@@ -189,82 +169,56 @@ class CreateContainerTest(BaseAPIClientTest):
volumes=mount_dest)
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls", "/mnt"], "AttachStdin": false,
- "Volumes": {"/mnt": {}},
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls", "/mnt"], "AttachStdin": false,
+ "Volumes": {"/mnt": {}},
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_with_ports(self):
self.client.create_container('busybox', 'ls',
ports=[1111, (2222, 'udp'), (3333,)])
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "ExposedPorts": {
- "1111/tcp": {},
- "2222/udp": {},
- "3333/tcp": {}
- },
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "ExposedPorts": {
+ "1111/tcp": {},
+ "2222/udp": {},
+ "3333/tcp": {}
+ },
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_with_entrypoint(self):
self.client.create_container('busybox', 'hello',
entrypoint='cowsay entry')
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["hello"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "Entrypoint": ["cowsay", "entry"]}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
-
- def test_create_container_with_cpu_shares(self):
- with pytest.deprecated_call():
- self.client.create_container('busybox', 'ls', cpu_shares=5)
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["hello"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "Entrypoint": ["cowsay", "entry"]}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
- args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "CpuShares": 5}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
-
- @requires_api_version('1.18')
def test_create_container_with_host_config_cpu_shares(self):
self.client.create_container(
'busybox', 'ls', host_config=self.client.create_host_config(
@@ -273,45 +227,22 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
-
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "HostConfig": {
- "CpuShares": 512,
- "NetworkMode": "default"
- }}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
-
- def test_create_container_with_cpuset(self):
- with pytest.deprecated_call():
- self.client.create_container('busybox', 'ls', cpuset='0,1')
+ assert args[0][1] == url_prefix + 'containers/create'
+
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "HostConfig": {
+ "CpuShares": 512,
+ "NetworkMode": "default"
+ }}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
- args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "Cpuset": "0,1",
- "CpusetCpus": "0,1"}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
-
- @requires_api_version('1.18')
def test_create_container_with_host_config_cpuset(self):
self.client.create_container(
'busybox', 'ls', host_config=self.client.create_host_config(
@@ -320,25 +251,22 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
-
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "HostConfig": {
- "CpusetCpus": "0,1",
- "NetworkMode": "default"
- }}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
-
- @requires_api_version('1.19')
+ assert args[0][1] == url_prefix + 'containers/create'
+
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "HostConfig": {
+ "CpusetCpus": "0,1",
+ "NetworkMode": "default"
+ }}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+
def test_create_container_with_host_config_cpuset_mems(self):
self.client.create_container(
'busybox', 'ls', host_config=self.client.create_host_config(
@@ -347,23 +275,21 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
-
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "HostConfig": {
- "CpusetMems": "0",
- "NetworkMode": "default"
- }}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
+ assert args[0][1] == url_prefix + 'containers/create'
+
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "HostConfig": {
+ "CpusetMems": "0",
+ "NetworkMode": "default"
+ }}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_with_cgroup_parent(self):
self.client.create_container(
@@ -373,87 +299,58 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
data = json.loads(args[1]['data'])
- self.assertIn('HostConfig', data)
- self.assertIn('CgroupParent', data['HostConfig'])
- self.assertEqual(data['HostConfig']['CgroupParent'], 'test')
+ assert 'HostConfig' in data
+ assert 'CgroupParent' in data['HostConfig']
+ assert data['HostConfig']['CgroupParent'] == 'test'
def test_create_container_with_working_dir(self):
self.client.create_container('busybox', 'ls',
working_dir='/root')
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "WorkingDir": "/root"}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "WorkingDir": "/root"}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_with_stdin_open(self):
self.client.create_container('busybox', 'true', stdin_open=True)
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox", "Cmd": ["true"],
- "AttachStdin": true,
- "AttachStderr": true, "AttachStdout": true,
- "StdinOnce": true,
- "OpenStdin": true, "NetworkDisabled": false}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
-
- def test_create_container_with_volumes_from(self):
- vol_names = ['foo', 'bar']
- try:
- self.client.create_container('busybox', 'true',
- volumes_from=vol_names)
- except docker.errors.DockerException:
- self.assertTrue(
- docker.utils.compare_version('1.10', self.client._version) >= 0
- )
- return
-
- args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data'])['VolumesFrom'],
- ','.join(vol_names))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
-
- def test_create_container_empty_volumes_from(self):
- with pytest.raises(docker.errors.InvalidVersion):
- self.client.create_container('busybox', 'true', volumes_from=[])
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox", "Cmd": ["true"],
+ "AttachStdin": true,
+ "AttachStderr": true, "AttachStdout": true,
+ "StdinOnce": true,
+ "OpenStdin": true, "NetworkDisabled": false}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_named_container(self):
self.client.create_container('busybox', 'true',
name='marisa-kirisame')
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox", "Cmd": ["true"],
- "AttachStdin": false,
- "AttachStderr": true, "AttachStdout": true,
- "StdinOnce": false,
- "OpenStdin": false, "NetworkDisabled": false}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(args[1]['params'], {'name': 'marisa-kirisame'})
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox", "Cmd": ["true"],
+ "AttachStdin": false,
+ "AttachStderr": true, "AttachStdout": true,
+ "StdinOnce": false,
+ "OpenStdin": false, "NetworkDisabled": false}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['params'] == {'name': 'marisa-kirisame'}
def test_create_container_with_mem_limit_as_int(self):
self.client.create_container(
@@ -464,7 +361,7 @@ class CreateContainerTest(BaseAPIClientTest):
args = fake_request.call_args
data = json.loads(args[1]['data'])
- self.assertEqual(data['HostConfig']['Memory'], 128.0)
+ assert data['HostConfig']['Memory'] == 128.0
def test_create_container_with_mem_limit_as_string(self):
self.client.create_container(
@@ -475,7 +372,7 @@ class CreateContainerTest(BaseAPIClientTest):
args = fake_request.call_args
data = json.loads(args[1]['data'])
- self.assertEqual(data['HostConfig']['Memory'], 128.0)
+ assert data['HostConfig']['Memory'] == 128.0
def test_create_container_with_mem_limit_as_string_with_k_unit(self):
self.client.create_container(
@@ -486,7 +383,7 @@ class CreateContainerTest(BaseAPIClientTest):
args = fake_request.call_args
data = json.loads(args[1]['data'])
- self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024)
+ assert data['HostConfig']['Memory'] == 128.0 * 1024
def test_create_container_with_mem_limit_as_string_with_m_unit(self):
self.client.create_container(
@@ -497,7 +394,7 @@ class CreateContainerTest(BaseAPIClientTest):
args = fake_request.call_args
data = json.loads(args[1]['data'])
- self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024 * 1024)
+ assert data['HostConfig']['Memory'] == 128.0 * 1024 * 1024
def test_create_container_with_mem_limit_as_string_with_g_unit(self):
self.client.create_container(
@@ -508,20 +405,14 @@ class CreateContainerTest(BaseAPIClientTest):
args = fake_request.call_args
data = json.loads(args[1]['data'])
- self.assertEqual(
- data['HostConfig']['Memory'], 128.0 * 1024 * 1024 * 1024
- )
+ assert data['HostConfig']['Memory'] == 128.0 * 1024 * 1024 * 1024
def test_create_container_with_mem_limit_as_string_with_wrong_value(self):
- self.assertRaises(
- docker.errors.DockerException,
- self.client.create_host_config, mem_limit='128p'
- )
+ with pytest.raises(docker.errors.DockerException):
+ self.client.create_host_config(mem_limit='128p')
- self.assertRaises(
- docker.errors.DockerException,
- self.client.create_host_config, mem_limit='1f28'
- )
+ with pytest.raises(docker.errors.DockerException):
+ self.client.create_host_config(mem_limit='1f28')
def test_create_container_with_lxc_conf(self):
self.client.create_container(
@@ -531,25 +422,16 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(
- args[0][1],
- url_prefix + 'containers/create'
- )
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['LxcConf'] = [
{"Value": "lxc.conf.value", "Key": "lxc.conf.k"}
]
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(
- args[1]['headers'],
- {'Content-Type': 'application/json'}
- )
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_lxc_conf_compat(self):
self.client.create_container(
@@ -559,20 +441,15 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['LxcConf'] = [
{"Value": "lxc.conf.value", "Key": "lxc.conf.k"}
]
- self.assertEqual(
- json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_binds_ro(self):
mount_dest = '/mnt'
@@ -588,18 +465,13 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix +
- 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:ro"]
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_binds_rw(self):
mount_dest = '/mnt'
@@ -615,18 +487,13 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix +
- 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:rw"]
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_binds_mode(self):
mount_dest = '/mnt'
@@ -642,18 +509,13 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix +
- 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:z"]
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_binds_mode_and_ro_error(self):
with pytest.raises(ValueError):
@@ -680,21 +542,16 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix +
- 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Binds'] = [
"/tmp:/mnt/1:ro",
"/tmp:/mnt/2",
]
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_port_binds(self):
self.maxDiff = None
@@ -713,42 +570,31 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
data = json.loads(args[1]['data'])
port_bindings = data['HostConfig']['PortBindings']
- self.assertTrue('1111/tcp' in port_bindings)
- self.assertTrue('2222/tcp' in port_bindings)
- self.assertTrue('3333/udp' in port_bindings)
- self.assertTrue('4444/tcp' in port_bindings)
- self.assertTrue('5555/tcp' in port_bindings)
- self.assertTrue('6666/tcp' in port_bindings)
- self.assertEqual(
- [{"HostPort": "", "HostIp": ""}],
- port_bindings['1111/tcp']
- )
- self.assertEqual(
- [{"HostPort": "2222", "HostIp": ""}],
- port_bindings['2222/tcp']
- )
- self.assertEqual(
- [{"HostPort": "3333", "HostIp": ""}],
- port_bindings['3333/udp']
- )
- self.assertEqual(
- [{"HostPort": "", "HostIp": "127.0.0.1"}],
- port_bindings['4444/tcp']
- )
- self.assertEqual(
- [{"HostPort": "5555", "HostIp": "127.0.0.1"}],
- port_bindings['5555/tcp']
- )
- self.assertEqual(len(port_bindings['6666/tcp']), 2)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert '1111/tcp' in port_bindings
+ assert '2222/tcp' in port_bindings
+ assert '3333/udp' in port_bindings
+ assert '4444/tcp' in port_bindings
+ assert '5555/tcp' in port_bindings
+ assert '6666/tcp' in port_bindings
+ assert [{"HostPort": "", "HostIp": ""}] == port_bindings['1111/tcp']
+ assert [
+ {"HostPort": "2222", "HostIp": ""}
+ ] == port_bindings['2222/tcp']
+ assert [
+ {"HostPort": "3333", "HostIp": ""}
+ ] == port_bindings['3333/udp']
+ assert [
+ {"HostPort": "", "HostIp": "127.0.0.1"}
+ ] == port_bindings['4444/tcp']
+ assert [
+ {"HostPort": "5555", "HostIp": "127.0.0.1"}
+ ] == port_bindings['5555/tcp']
+ assert len(port_bindings['6666/tcp']) == 2
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_mac_address(self):
expected = "02:42:ac:11:00:0a"
@@ -760,7 +606,7 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
data = json.loads(args[1]['data'])
assert data['MacAddress'] == expected
@@ -775,17 +621,13 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(
- args[0][1], url_prefix + 'containers/create'
- )
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Links'] = ['path:alias']
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_with_multiple_links(self):
link_path = 'path'
@@ -801,16 +643,14 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Links'] = [
'path1:alias1', 'path2:alias2'
]
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_with_links_as_list_of_tuples(self):
link_path = 'path'
@@ -823,15 +663,13 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Links'] = ['path:alias']
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_create_container_privileged(self):
self.client.create_container(
@@ -843,14 +681,10 @@ class CreateContainerTest(BaseAPIClientTest):
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Privileged'] = True
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_restart_policy(self):
self.client.create_container(
@@ -863,21 +697,17 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['RestartPolicy'] = {
"MaximumRetryCount": 0, "Name": "always"
}
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ assert json.loads(args[1]['data']) == expected_payload
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_added_capabilities(self):
self.client.create_container(
@@ -886,17 +716,13 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['CapAdd'] = ['MKNOD']
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_dropped_capabilities(self):
self.client.create_container(
@@ -905,17 +731,13 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['CapDrop'] = ['MKNOD']
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_devices(self):
self.client.create_container(
@@ -927,7 +749,7 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Devices'] = [
@@ -941,13 +763,9 @@ class CreateContainerTest(BaseAPIClientTest):
'PathInContainer': '/dev/sdc',
'PathOnHost': '/dev/sdc'}
]
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_labels_dict(self):
labels_dict = {
@@ -961,14 +779,10 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data'])['Labels'] == labels_dict
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_labels_list(self):
labels_list = [
@@ -986,14 +800,10 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data'])['Labels'] == labels_dict
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_named_volume(self):
mount_dest = '/mnt'
@@ -1010,39 +820,31 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(
- args[0][1], url_prefix + 'containers/create'
- )
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['VolumeDriver'] = 'foodriver'
expected_payload['HostConfig']['Binds'] = ["name:/mnt:rw"]
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_stop_signal(self):
self.client.create_container('busybox', 'ls',
stop_signal='SIGINT')
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "StopSignal": "SIGINT"}'''))
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "StopSignal": "SIGINT"}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
@requires_api_version('1.22')
def test_create_container_with_aliases(self):
@@ -1059,22 +861,22 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "HostConfig": {
- "NetworkMode": "some-network"
- },
- "NetworkingConfig": {
- "EndpointsConfig": {
- "some-network": {"Aliases": ["foo", "bar"]}
- }
- }}'''))
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "HostConfig": {
+ "NetworkMode": "some-network"
+ },
+ "NetworkingConfig": {
+ "EndpointsConfig": {
+ "some-network": {"Aliases": ["foo", "bar"]}
+ }
+ }}
+ ''')
@requires_api_version('1.22')
def test_create_container_with_tmpfs_list(self):
@@ -1089,21 +891,16 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix +
- 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Tmpfs'] = {
"/tmp": "",
"/mnt": "size=3G,uid=100"
}
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
@requires_api_version('1.22')
def test_create_container_with_tmpfs_dict(self):
@@ -1118,21 +915,16 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix +
- 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Tmpfs'] = {
"/tmp": "",
"/mnt": "size=3G,uid=100"
}
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
- self.assertEqual(
- args[1]['timeout'],
- DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
@requires_api_version('1.24')
def test_create_container_with_sysctl(self):
@@ -1147,19 +939,15 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ assert args[0][1] == url_prefix + 'containers/create'
expected_payload = self.base_create_payload()
expected_payload['HostConfig'] = self.client.create_host_config()
expected_payload['HostConfig']['Sysctls'] = {
'net.core.somaxconn': '1024', 'net.ipv4.tcp_syncookies': '0',
}
- self.assertEqual(json.loads(args[1]['data']), expected_payload)
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
- self.assertEqual(
- args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
- )
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
def test_create_container_with_unicode_envvars(self):
envvars_dict = {
@@ -1176,8 +964,8 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1], url_prefix + 'containers/create')
- self.assertEqual(json.loads(args[1]['data'])['Env'], expected)
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data'])['Env'] == expected
@requires_api_version('1.25')
def test_create_container_with_host_config_cpus(self):
@@ -1190,26 +978,23 @@ class CreateContainerTest(BaseAPIClientTest):
)
args = fake_request.call_args
- self.assertEqual(args[0][1],
- url_prefix + 'containers/create')
-
- self.assertEqual(json.loads(args[1]['data']),
- json.loads('''
- {"Tty": false, "Image": "busybox",
- "Cmd": ["ls"], "AttachStdin": false,
- "AttachStderr": true,
- "AttachStdout": true, "OpenStdin": false,
- "StdinOnce": false,
- "NetworkDisabled": false,
- "HostConfig": {
- "CpuCount": 1,
- "CpuPercent": 20,
- "NanoCpus": 1000,
- "NetworkMode": "default"
- }}'''))
- self.assertEqual(
- args[1]['headers'], {'Content-Type': 'application/json'}
- )
+ assert args[0][1] == url_prefix + 'containers/create'
+
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "HostConfig": {
+ "CpuCount": 1,
+ "CpuPercent": 20,
+ "NanoCpus": 1000,
+ "NetworkMode": "default"
+ }}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
class ContainerTest(BaseAPIClientTest):
@@ -1263,7 +1048,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'containers/3cc2351ab11b/wait',
- timeout=None
+ timeout=None,
+ params={}
)
def test_wait_with_dict_instead_of_id(self):
@@ -1272,7 +1058,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'containers/3cc2351ab11b/wait',
- timeout=None
+ timeout=None,
+ params={}
)
def test_logs(self):
@@ -1289,10 +1076,7 @@ class ContainerTest(BaseAPIClientTest):
stream=False
)
- self.assertEqual(
- logs,
- 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
- )
+ assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
def test_logs_with_dict_instead_of_id(self):
with mock.patch('docker.api.client.APIClient.inspect_container',
@@ -1308,10 +1092,7 @@ class ContainerTest(BaseAPIClientTest):
stream=False
)
- self.assertEqual(
- logs,
- 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
- )
+ assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
def test_log_streaming(self):
with mock.patch('docker.api.client.APIClient.inspect_container',
@@ -1424,7 +1205,7 @@ class ContainerTest(BaseAPIClientTest):
def test_log_since_with_invalid_value_raises_error(self):
with mock.patch('docker.api.client.APIClient.inspect_container',
fake_inspect_container):
- with self.assertRaises(docker.errors.InvalidArgument):
+ with pytest.raises(docker.errors.InvalidArgument):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
follow=False, since=42.42)
@@ -1437,7 +1218,7 @@ class ContainerTest(BaseAPIClientTest):
self.client.logs(fake_api.FAKE_CONTAINER_ID,
follow=True, stream=True)
- self.assertTrue(m.called)
+ assert m.called
fake_request.assert_called_with(
'GET',
url_prefix + 'containers/3cc2351ab11b/logs',
@@ -1621,9 +1402,7 @@ class ContainerTest(BaseAPIClientTest):
with pytest.raises(docker.errors.NullResource) as excinfo:
self.client.inspect_container(arg)
- self.assertEqual(
- excinfo.value.args[0], 'Resource ID was not provided'
- )
+ assert excinfo.value.args[0] == 'Resource ID was not provided'
def test_container_stats(self):
self.client.stats(fake_api.FAKE_CONTAINER_ID)
@@ -1662,13 +1441,8 @@ class ContainerTest(BaseAPIClientTest):
blkio_weight=345
)
args = fake_request.call_args
- self.assertEqual(
- args[0][1], url_prefix + 'containers/3cc2351ab11b/update'
- )
- self.assertEqual(
- json.loads(args[1]['data']),
- {'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345}
- )
- self.assertEqual(
- args[1]['headers']['Content-Type'], 'application/json'
- )
+ assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/update'
+ assert json.loads(args[1]['data']) == {
+ 'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345
+ }
+ assert args[1]['headers']['Content-Type'] == 'application/json'
diff --git a/tests/unit/api_exec_test.py b/tests/unit/api_exec_test.py
index 41ee940..a9d2dd5 100644
--- a/tests/unit/api_exec_test.py
+++ b/tests/unit/api_exec_test.py
@@ -11,85 +11,65 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1'])
args = fake_request.call_args
- self.assertEqual(
- 'POST',
- args[0][0], url_prefix + 'containers/{0}/exec'.format(
- fake_api.FAKE_CONTAINER_ID
- )
+ assert 'POST' == args[0][0], url_prefix + 'containers/{0}/exec'.format(
+ fake_api.FAKE_CONTAINER_ID
)
- self.assertEqual(
- json.loads(args[1]['data']), {
- 'Tty': False,
- 'AttachStdout': True,
- 'Container': fake_api.FAKE_CONTAINER_ID,
- 'Cmd': ['ls', '-1'],
- 'Privileged': False,
- 'AttachStdin': False,
- 'AttachStderr': True,
- 'User': ''
- }
- )
+ assert json.loads(args[1]['data']) == {
+ 'Tty': False,
+ 'AttachStdout': True,
+ 'Container': fake_api.FAKE_CONTAINER_ID,
+ 'Cmd': ['ls', '-1'],
+ 'Privileged': False,
+ 'AttachStdin': False,
+ 'AttachStderr': True,
+ 'User': ''
+ }
- self.assertEqual(args[1]['headers'],
- {'Content-Type': 'application/json'})
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
def test_exec_start(self):
self.client.exec_start(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args
- self.assertEqual(
- args[0][1], url_prefix + 'exec/{0}/start'.format(
- fake_api.FAKE_EXEC_ID
- )
+ assert args[0][1] == url_prefix + 'exec/{0}/start'.format(
+ fake_api.FAKE_EXEC_ID
)
- self.assertEqual(
- json.loads(args[1]['data']), {
- 'Tty': False,
- 'Detach': False,
- }
- )
+ assert json.loads(args[1]['data']) == {
+ 'Tty': False,
+ 'Detach': False,
+ }
- self.assertEqual(
- args[1]['headers'], {
- 'Content-Type': 'application/json',
- 'Connection': 'Upgrade',
- 'Upgrade': 'tcp'
- }
- )
+ assert args[1]['headers'] == {
+ 'Content-Type': 'application/json',
+ 'Connection': 'Upgrade',
+ 'Upgrade': 'tcp'
+ }
def test_exec_start_detached(self):
self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True)
args = fake_request.call_args
- self.assertEqual(
- args[0][1], url_prefix + 'exec/{0}/start'.format(
- fake_api.FAKE_EXEC_ID
- )
+ assert args[0][1] == url_prefix + 'exec/{0}/start'.format(
+ fake_api.FAKE_EXEC_ID
)
- self.assertEqual(
- json.loads(args[1]['data']), {
- 'Tty': False,
- 'Detach': True
- }
- )
+ assert json.loads(args[1]['data']) == {
+ 'Tty': False,
+ 'Detach': True
+ }
- self.assertEqual(
- args[1]['headers'], {
- 'Content-Type': 'application/json'
- }
- )
+ assert args[1]['headers'] == {
+ 'Content-Type': 'application/json'
+ }
def test_exec_inspect(self):
self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args
- self.assertEqual(
- args[0][1], url_prefix + 'exec/{0}/json'.format(
- fake_api.FAKE_EXEC_ID
- )
+ assert args[0][1] == url_prefix + 'exec/{0}/json'.format(
+ fake_api.FAKE_EXEC_ID
)
def test_exec_resize(self):
diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py
index f1e42cc..1e2315d 100644
--- a/tests/unit/api_image_test.py
+++ b/tests/unit/api_image_test.py
@@ -65,29 +65,21 @@ class ImageTest(BaseAPIClientTest):
self.client.pull('joffrey/test001')
args = fake_request.call_args
- self.assertEqual(
- args[0][1],
- url_prefix + 'images/create'
- )
- self.assertEqual(
- args[1]['params'],
- {'tag': None, 'fromImage': 'joffrey/test001'}
- )
- self.assertFalse(args[1]['stream'])
+ assert args[0][1] == url_prefix + 'images/create'
+ assert args[1]['params'] == {
+ 'tag': None, 'fromImage': 'joffrey/test001'
+ }
+ assert not args[1]['stream']
def test_pull_stream(self):
self.client.pull('joffrey/test001', stream=True)
args = fake_request.call_args
- self.assertEqual(
- args[0][1],
- url_prefix + 'images/create'
- )
- self.assertEqual(
- args[1]['params'],
- {'tag': None, 'fromImage': 'joffrey/test001'}
- )
- self.assertTrue(args[1]['stream'])
+ assert args[0][1] == url_prefix + 'images/create'
+ assert args[1]['params'] == {
+ 'tag': None, 'fromImage': 'joffrey/test001'
+ }
+ assert args[1]['stream']
def test_commit(self):
self.client.commit(fake_api.FAKE_CONTAINER_ID)
@@ -203,29 +195,7 @@ class ImageTest(BaseAPIClientTest):
with pytest.raises(docker.errors.NullResource) as excinfo:
self.client.inspect_image(arg)
- self.assertEqual(
- excinfo.value.args[0], 'Resource ID was not provided'
- )
-
- def test_insert_image(self):
- try:
- self.client.insert(fake_api.FAKE_IMAGE_NAME,
- fake_api.FAKE_URL, fake_api.FAKE_PATH)
- except docker.errors.DeprecatedMethod:
- self.assertTrue(
- docker.utils.compare_version('1.12', self.client._version) >= 0
- )
- return
-
- fake_request.assert_called_with(
- 'POST',
- url_prefix + 'images/test_image/insert',
- params={
- 'url': fake_api.FAKE_URL,
- 'path': fake_api.FAKE_PATH
- },
- timeout=DEFAULT_TIMEOUT_SECONDS
- )
+ assert excinfo.value.args[0] == 'Resource ID was not provided'
def test_push_image(self):
with mock.patch('docker.auth.resolve_authconfig',
diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py
index 96cdc4b..c78554d 100644
--- a/tests/unit/api_network_test.py
+++ b/tests/unit/api_network_test.py
@@ -3,7 +3,6 @@ import json
import six
from .api_test import BaseAPIClientTest, url_prefix, response
-from ..helpers import requires_api_version
from docker.types import IPAMConfig, IPAMPool
try:
@@ -13,7 +12,6 @@ except ImportError:
class NetworkTest(BaseAPIClientTest):
- @requires_api_version('1.21')
def test_list_networks(self):
networks = [
{
@@ -34,22 +32,21 @@ class NetworkTest(BaseAPIClientTest):
status_code=200, content=json.dumps(networks).encode('utf-8')))
with mock.patch('docker.api.client.APIClient.get', get):
- self.assertEqual(self.client.networks(), networks)
+ assert self.client.networks() == networks
- self.assertEqual(get.call_args[0][0], url_prefix + 'networks')
+ assert get.call_args[0][0] == url_prefix + 'networks'
filters = json.loads(get.call_args[1]['params']['filters'])
- self.assertFalse(filters)
+ assert not filters
self.client.networks(names=['foo'])
filters = json.loads(get.call_args[1]['params']['filters'])
- self.assertEqual(filters, {'name': ['foo']})
+ assert filters == {'name': ['foo']}
self.client.networks(ids=['123'])
filters = json.loads(get.call_args[1]['params']['filters'])
- self.assertEqual(filters, {'id': ['123']})
+ assert filters == {'id': ['123']}
- @requires_api_version('1.21')
def test_create_network(self):
network_data = {
"id": 'abc12345',
@@ -61,15 +58,11 @@ class NetworkTest(BaseAPIClientTest):
with mock.patch('docker.api.client.APIClient.post', post):
result = self.client.create_network('foo')
- self.assertEqual(result, network_data)
+ assert result == network_data
- self.assertEqual(
- post.call_args[0][0],
- url_prefix + 'networks/create')
+ assert post.call_args[0][0] == url_prefix + 'networks/create'
- self.assertEqual(
- json.loads(post.call_args[1]['data']),
- {"Name": "foo"})
+ assert json.loads(post.call_args[1]['data']) == {"Name": "foo"}
opts = {
'com.docker.network.bridge.enable_icc': False,
@@ -77,9 +70,9 @@ class NetworkTest(BaseAPIClientTest):
}
self.client.create_network('foo', 'bridge', opts)
- self.assertEqual(
- json.loads(post.call_args[1]['data']),
- {"Name": "foo", "Driver": "bridge", "Options": opts})
+ assert json.loads(post.call_args[1]['data']) == {
+ "Name": "foo", "Driver": "bridge", "Options": opts
+ }
ipam_pool_config = IPAMPool(subnet="192.168.52.0/24",
gateway="192.168.52.254")
@@ -88,23 +81,20 @@ class NetworkTest(BaseAPIClientTest):
self.client.create_network("bar", driver="bridge",
ipam=ipam_config)
- self.assertEqual(
- json.loads(post.call_args[1]['data']),
- {
- "Name": "bar",
- "Driver": "bridge",
- "IPAM": {
- "Driver": "default",
- "Config": [{
- "IPRange": None,
- "Gateway": "192.168.52.254",
- "Subnet": "192.168.52.0/24",
- "AuxiliaryAddresses": None,
- }],
- }
- })
-
- @requires_api_version('1.21')
+ assert json.loads(post.call_args[1]['data']) == {
+ "Name": "bar",
+ "Driver": "bridge",
+ "IPAM": {
+ "Driver": "default",
+ "Config": [{
+ "IPRange": None,
+ "Gateway": "192.168.52.254",
+ "Subnet": "192.168.52.0/24",
+ "AuxiliaryAddresses": None,
+ }],
+ }
+ }
+
def test_remove_network(self):
network_id = 'abc12345'
delete = mock.Mock(return_value=response(status_code=200))
@@ -113,10 +103,8 @@ class NetworkTest(BaseAPIClientTest):
self.client.remove_network(network_id)
args = delete.call_args
- self.assertEqual(args[0][0],
- url_prefix + 'networks/{0}'.format(network_id))
+ assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id)
- @requires_api_version('1.21')
def test_inspect_network(self):
network_id = 'abc12345'
network_name = 'foo'
@@ -132,13 +120,11 @@ class NetworkTest(BaseAPIClientTest):
with mock.patch('docker.api.client.APIClient.get', get):
result = self.client.inspect_network(network_id)
- self.assertEqual(result, network_data)
+ assert result == network_data
args = get.call_args
- self.assertEqual(args[0][0],
- url_prefix + 'networks/{0}'.format(network_id))
+ assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id)
- @requires_api_version('1.21')
def test_connect_container_to_network(self):
network_id = 'abc12345'
container_id = 'def45678'
@@ -153,21 +139,18 @@ class NetworkTest(BaseAPIClientTest):
links=[('baz', 'quux')]
)
- self.assertEqual(
- post.call_args[0][0],
- url_prefix + 'networks/{0}/connect'.format(network_id))
+ assert post.call_args[0][0] == (
+ url_prefix + 'networks/{0}/connect'.format(network_id)
+ )
+
+ assert json.loads(post.call_args[1]['data']) == {
+ 'Container': container_id,
+ 'EndpointConfig': {
+ 'Aliases': ['foo', 'bar'],
+ 'Links': ['baz:quux'],
+ },
+ }
- self.assertEqual(
- json.loads(post.call_args[1]['data']),
- {
- 'Container': container_id,
- 'EndpointConfig': {
- 'Aliases': ['foo', 'bar'],
- 'Links': ['baz:quux'],
- },
- })
-
- @requires_api_version('1.21')
def test_disconnect_container_from_network(self):
network_id = 'abc12345'
container_id = 'def45678'
@@ -178,10 +161,9 @@ class NetworkTest(BaseAPIClientTest):
self.client.disconnect_container_from_network(
container={'Id': container_id}, net_id=network_id)
- self.assertEqual(
- post.call_args[0][0],
- url_prefix + 'networks/{0}/disconnect'.format(network_id))
-
- self.assertEqual(
- json.loads(post.call_args[1]['data']),
- {'Container': container_id})
+ assert post.call_args[0][0] == (
+ url_prefix + 'networks/{0}/disconnect'.format(network_id)
+ )
+ assert json.loads(post.call_args[1]['data']) == {
+ 'Container': container_id
+ }
diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
index 6ac92c4..c53a4be 100644
--- a/tests/unit/api_test.py
+++ b/tests/unit/api_test.py
@@ -128,34 +128,27 @@ class DockerApiTest(BaseAPIClientTest):
with pytest.raises(docker.errors.DockerException) as excinfo:
APIClient(version=1.12)
- self.assertEqual(
- str(excinfo.value),
- 'Version parameter must be a string or None. Found float'
- )
+ assert str(
+ excinfo.value
+ ) == 'Version parameter must be a string or None. Found float'
def test_url_valid_resource(self):
url = self.client._url('/hello/{0}/world', 'somename')
- self.assertEqual(
- url, '{0}{1}'.format(url_prefix, 'hello/somename/world')
- )
+ assert url == '{0}{1}'.format(url_prefix, 'hello/somename/world')
url = self.client._url(
'/hello/{0}/world/{1}', 'somename', 'someothername'
)
- self.assertEqual(
- url,
- '{0}{1}'.format(url_prefix, 'hello/somename/world/someothername')
+ assert url == '{0}{1}'.format(
+ url_prefix, 'hello/somename/world/someothername'
)
url = self.client._url('/hello/{0}/world', 'some?name')
- self.assertEqual(
- url, '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world')
- )
+ assert url == '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world')
url = self.client._url("/images/{0}/push", "localhost:5000/image")
- self.assertEqual(
- url,
- '{0}{1}'.format(url_prefix, 'images/localhost:5000/image/push')
+ assert url == '{0}{1}'.format(
+ url_prefix, 'images/localhost:5000/image/push'
)
def test_url_invalid_resource(self):
@@ -164,15 +157,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_no_resource(self):
url = self.client._url('/simple')
- self.assertEqual(url, '{0}{1}'.format(url_prefix, 'simple'))
+ assert url == '{0}{1}'.format(url_prefix, 'simple')
def test_url_unversioned_api(self):
url = self.client._url(
'/hello/{0}/world', 'somename', versioned_api=False
)
- self.assertEqual(
- url, '{0}{1}'.format(url_base, 'hello/somename/world')
- )
+ assert url == '{0}{1}'.format(url_base, 'hello/somename/world')
def test_version(self):
self.client.version()
@@ -194,13 +185,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_retrieve_server_version(self):
client = APIClient(version="auto")
- self.assertTrue(isinstance(client._version, six.string_types))
- self.assertFalse(client._version == "auto")
+ assert isinstance(client._version, six.string_types)
+ assert not (client._version == "auto")
client.close()
def test_auto_retrieve_server_version(self):
version = self.client._retrieve_server_version()
- self.assertTrue(isinstance(version, six.string_types))
+ assert isinstance(version, six.string_types)
def test_info(self):
self.client.info()
@@ -313,11 +304,10 @@ class DockerApiTest(BaseAPIClientTest):
def test_create_host_config_secopt(self):
security_opt = ['apparmor:test_profile']
result = self.client.create_host_config(security_opt=security_opt)
- self.assertIn('SecurityOpt', result)
- self.assertEqual(result['SecurityOpt'], security_opt)
- self.assertRaises(
- TypeError, self.client.create_host_config, security_opt='wrong'
- )
+ assert 'SecurityOpt' in result
+ assert result['SecurityOpt'] == security_opt
+ with pytest.raises(TypeError):
+ self.client.create_host_config(security_opt='wrong')
def test_stream_helper_decoding(self):
status_code, content = fake_api.fake_responses[url_prefix + 'events']()
@@ -335,26 +325,26 @@ class DockerApiTest(BaseAPIClientTest):
raw_resp._fp.seek(0)
resp = response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp))
- self.assertEqual(result, content_str)
+ assert result == content_str
# pass `decode=True` to the helper
raw_resp._fp.seek(0)
resp = response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp, decode=True))
- self.assertEqual(result, content)
+ assert result == content
# non-chunked response, pass `decode=False` to the helper
setattr(raw_resp._fp, 'chunked', False)
raw_resp._fp.seek(0)
resp = response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp))
- self.assertEqual(result, content_str.decode('utf-8'))
+ assert result == content_str.decode('utf-8')
# non-chunked response, pass `decode=True` to the helper
raw_resp._fp.seek(0)
resp = response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp, decode=True))
- self.assertEqual(result, content)
+ assert result == content
class StreamTest(unittest.TestCase):
@@ -442,21 +432,19 @@ class StreamTest(unittest.TestCase):
b'\r\n'
) + b'\r\n'.join(lines)
- with APIClient(base_url="http+unix://" + self.socket_file) \
- as client:
+ with APIClient(base_url="http+unix://" + self.socket_file) as client:
for i in range(5):
try:
stream = client.build(
path=self.build_context,
- stream=True
)
break
except requests.ConnectionError as e:
if i == 4:
raise e
- self.assertEqual(list(stream), [
- str(i).encode() for i in range(50)])
+ assert list(stream) == [
+ str(i).encode() for i in range(50)]
class UserAgentTest(unittest.TestCase):
@@ -475,18 +463,18 @@ class UserAgentTest(unittest.TestCase):
client = APIClient()
client.version()
- self.assertEqual(self.mock_send.call_count, 1)
+ assert self.mock_send.call_count == 1
headers = self.mock_send.call_args[0][0].headers
expected = 'docker-sdk-python/%s' % docker.__version__
- self.assertEqual(headers['User-Agent'], expected)
+ assert headers['User-Agent'] == expected
def test_custom_user_agent(self):
client = APIClient(user_agent='foo/bar')
client.version()
- self.assertEqual(self.mock_send.call_count, 1)
+ assert self.mock_send.call_count == 1
headers = self.mock_send.call_args[0][0].headers
- self.assertEqual(headers['User-Agent'], 'foo/bar')
+ assert headers['User-Agent'] == 'foo/bar'
class DisableSocketTest(unittest.TestCase):
@@ -509,7 +497,7 @@ class DisableSocketTest(unittest.TestCase):
self.client._disable_socket_timeout(socket)
- self.assertEqual(socket.timeout, None)
+ assert socket.timeout is None
def test_disable_socket_timeout2(self):
"""Test that the timeouts are disabled on a generic socket object
@@ -519,8 +507,8 @@ class DisableSocketTest(unittest.TestCase):
self.client._disable_socket_timeout(socket)
- self.assertEqual(socket.timeout, None)
- self.assertEqual(socket._sock.timeout, None)
+ assert socket.timeout is None
+ assert socket._sock.timeout is None
def test_disable_socket_timout_non_blocking(self):
"""Test that a non-blocking socket does not get set to blocking."""
@@ -529,5 +517,5 @@ class DisableSocketTest(unittest.TestCase):
self.client._disable_socket_timeout(socket)
- self.assertEqual(socket.timeout, None)
- self.assertEqual(socket._sock.timeout, 0.0)
+ assert socket.timeout is None
+ assert socket._sock.timeout == 0.0
diff --git a/tests/unit/api_volume_test.py b/tests/unit/api_volume_test.py
index fc2a556..7850c22 100644
--- a/tests/unit/api_volume_test.py
+++ b/tests/unit/api_volume_test.py
@@ -7,17 +7,15 @@ from .api_test import BaseAPIClientTest, url_prefix, fake_request
class VolumeTest(BaseAPIClientTest):
- @requires_api_version('1.21')
def test_list_volumes(self):
volumes = self.client.volumes()
- self.assertIn('Volumes', volumes)
- self.assertEqual(len(volumes['Volumes']), 2)
+ assert 'Volumes' in volumes
+ assert len(volumes['Volumes']) == 2
args = fake_request.call_args
- self.assertEqual(args[0][0], 'GET')
- self.assertEqual(args[0][1], url_prefix + 'volumes')
+ assert args[0][0] == 'GET'
+ assert args[0][1] == url_prefix + 'volumes'
- @requires_api_version('1.21')
def test_list_volumes_and_filters(self):
volumes = self.client.volumes(filters={'dangling': True})
assert 'Volumes' in volumes
@@ -29,29 +27,28 @@ class VolumeTest(BaseAPIClientTest):
assert args[1] == {'params': {'filters': '{"dangling": ["true"]}'},
'timeout': 60}
- @requires_api_version('1.21')
def test_create_volume(self):
name = 'perfectcherryblossom'
result = self.client.create_volume(name)
- self.assertIn('Name', result)
- self.assertEqual(result['Name'], name)
- self.assertIn('Driver', result)
- self.assertEqual(result['Driver'], 'local')
+ assert 'Name' in result
+ assert result['Name'] == name
+ assert 'Driver' in result
+ assert result['Driver'] == 'local'
args = fake_request.call_args
- self.assertEqual(args[0][0], 'POST')
- self.assertEqual(args[0][1], url_prefix + 'volumes/create')
- self.assertEqual(json.loads(args[1]['data']), {'Name': name})
+ assert args[0][0] == 'POST'
+ assert args[0][1] == url_prefix + 'volumes/create'
+ assert json.loads(args[1]['data']) == {'Name': name}
@requires_api_version('1.23')
def test_create_volume_with_labels(self):
name = 'perfectcherryblossom'
result = self.client.create_volume(name, labels={
- 'com.example.some-label': 'some-value'})
- self.assertEqual(
- result["Labels"],
- {'com.example.some-label': 'some-value'}
- )
+ 'com.example.some-label': 'some-value'
+ })
+ assert result["Labels"] == {
+ 'com.example.some-label': 'some-value'
+ }
@requires_api_version('1.23')
def test_create_volume_with_invalid_labels(self):
@@ -59,20 +56,18 @@ class VolumeTest(BaseAPIClientTest):
with pytest.raises(TypeError):
self.client.create_volume(name, labels=1)
- @requires_api_version('1.21')
def test_create_volume_with_driver(self):
name = 'perfectcherryblossom'
driver_name = 'sshfs'
self.client.create_volume(name, driver=driver_name)
args = fake_request.call_args
- self.assertEqual(args[0][0], 'POST')
- self.assertEqual(args[0][1], url_prefix + 'volumes/create')
+ assert args[0][0] == 'POST'
+ assert args[0][1] == url_prefix + 'volumes/create'
data = json.loads(args[1]['data'])
- self.assertIn('Driver', data)
- self.assertEqual(data['Driver'], driver_name)
+ assert 'Driver' in data
+ assert data['Driver'] == driver_name
- @requires_api_version('1.21')
def test_create_volume_invalid_opts_type(self):
with pytest.raises(TypeError):
self.client.create_volume(
@@ -92,31 +87,29 @@ class VolumeTest(BaseAPIClientTest):
@requires_api_version('1.24')
def test_create_volume_with_no_specified_name(self):
result = self.client.create_volume(name=None)
- self.assertIn('Name', result)
- self.assertNotEqual(result['Name'], None)
- self.assertIn('Driver', result)
- self.assertEqual(result['Driver'], 'local')
- self.assertIn('Scope', result)
- self.assertEqual(result['Scope'], 'local')
-
- @requires_api_version('1.21')
+ assert 'Name' in result
+ assert result['Name'] is not None
+ assert 'Driver' in result
+ assert result['Driver'] == 'local'
+ assert 'Scope' in result
+ assert result['Scope'] == 'local'
+
def test_inspect_volume(self):
name = 'perfectcherryblossom'
result = self.client.inspect_volume(name)
- self.assertIn('Name', result)
- self.assertEqual(result['Name'], name)
- self.assertIn('Driver', result)
- self.assertEqual(result['Driver'], 'local')
+ assert 'Name' in result
+ assert result['Name'] == name
+ assert 'Driver' in result
+ assert result['Driver'] == 'local'
args = fake_request.call_args
- self.assertEqual(args[0][0], 'GET')
- self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name))
+ assert args[0][0] == 'GET'
+ assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name)
- @requires_api_version('1.21')
def test_remove_volume(self):
name = 'perfectcherryblossom'
self.client.remove_volume(name)
args = fake_request.call_args
- self.assertEqual(args[0][0], 'DELETE')
- self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name))
+ assert args[0][0] == 'DELETE'
+ assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name)
diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py
index 56fd50c..d6981cd 100644
--- a/tests/unit/auth_test.py
+++ b/tests/unit/auth_test.py
@@ -9,10 +9,8 @@ import shutil
import tempfile
import unittest
-from py.test import ensuretemp
-from pytest import mark
-
from docker import auth, errors
+import pytest
try:
from unittest import mock
@@ -33,82 +31,68 @@ class RegressionTest(unittest.TestCase):
class ResolveRepositoryNameTest(unittest.TestCase):
def test_resolve_repository_name_hub_library_image(self):
- self.assertEqual(
- auth.resolve_repository_name('image'),
- ('docker.io', 'image'),
+ assert auth.resolve_repository_name('image') == (
+ 'docker.io', 'image'
)
def test_resolve_repository_name_dotted_hub_library_image(self):
- self.assertEqual(
- auth.resolve_repository_name('image.valid'),
- ('docker.io', 'image.valid')
+ assert auth.resolve_repository_name('image.valid') == (
+ 'docker.io', 'image.valid'
)
def test_resolve_repository_name_hub_image(self):
- self.assertEqual(
- auth.resolve_repository_name('username/image'),
- ('docker.io', 'username/image'),
+ assert auth.resolve_repository_name('username/image') == (
+ 'docker.io', 'username/image'
)
def test_explicit_hub_index_library_image(self):
- self.assertEqual(
- auth.resolve_repository_name('docker.io/image'),
- ('docker.io', 'image')
+ assert auth.resolve_repository_name('docker.io/image') == (
+ 'docker.io', 'image'
)
def test_explicit_legacy_hub_index_library_image(self):
- self.assertEqual(
- auth.resolve_repository_name('index.docker.io/image'),
- ('docker.io', 'image')
+ assert auth.resolve_repository_name('index.docker.io/image') == (
+ 'docker.io', 'image'
)
def test_resolve_repository_name_private_registry(self):
- self.assertEqual(
- auth.resolve_repository_name('my.registry.net/image'),
- ('my.registry.net', 'image'),
+ assert auth.resolve_repository_name('my.registry.net/image') == (
+ 'my.registry.net', 'image'
)
def test_resolve_repository_name_private_registry_with_port(self):
- self.assertEqual(
- auth.resolve_repository_name('my.registry.net:5000/image'),
- ('my.registry.net:5000', 'image'),
+ assert auth.resolve_repository_name('my.registry.net:5000/image') == (
+ 'my.registry.net:5000', 'image'
)
def test_resolve_repository_name_private_registry_with_username(self):
- self.assertEqual(
- auth.resolve_repository_name('my.registry.net/username/image'),
- ('my.registry.net', 'username/image'),
- )
+ assert auth.resolve_repository_name(
+ 'my.registry.net/username/image'
+ ) == ('my.registry.net', 'username/image')
def test_resolve_repository_name_no_dots_but_port(self):
- self.assertEqual(
- auth.resolve_repository_name('hostname:5000/image'),
- ('hostname:5000', 'image'),
+ assert auth.resolve_repository_name('hostname:5000/image') == (
+ 'hostname:5000', 'image'
)
def test_resolve_repository_name_no_dots_but_port_and_username(self):
- self.assertEqual(
- auth.resolve_repository_name('hostname:5000/username/image'),
- ('hostname:5000', 'username/image'),
- )
+ assert auth.resolve_repository_name(
+ 'hostname:5000/username/image'
+ ) == ('hostname:5000', 'username/image')
def test_resolve_repository_name_localhost(self):
- self.assertEqual(
- auth.resolve_repository_name('localhost/image'),
- ('localhost', 'image'),
+ assert auth.resolve_repository_name('localhost/image') == (
+ 'localhost', 'image'
)
def test_resolve_repository_name_localhost_with_username(self):
- self.assertEqual(
- auth.resolve_repository_name('localhost/username/image'),
- ('localhost', 'username/image'),
+ assert auth.resolve_repository_name('localhost/username/image') == (
+ 'localhost', 'username/image'
)
def test_invalid_index_name(self):
- self.assertRaises(
- errors.InvalidRepository,
- lambda: auth.resolve_repository_name('-gecko.com/image')
- )
+ with pytest.raises(errors.InvalidRepository):
+ auth.resolve_repository_name('-gecko.com/image')
def encode_auth(auth_info):
@@ -122,154 +106,109 @@ class ResolveAuthTest(unittest.TestCase):
private_config = {'auth': encode_auth({'username': 'privateuser'})}
legacy_config = {'auth': encode_auth({'username': 'legacyauth'})}
- auth_config = auth.parse_auth({
- 'https://index.docker.io/v1/': index_config,
- 'my.registry.net': private_config,
- 'http://legacy.registry.url/v1/': legacy_config,
- })
+ auth_config = {
+ 'auths': auth.parse_auth({
+ 'https://index.docker.io/v1/': index_config,
+ 'my.registry.net': private_config,
+ 'http://legacy.registry.url/v1/': legacy_config,
+ })
+ }
def test_resolve_authconfig_hostname_only(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'my.registry.net'
- )['username'],
- 'privateuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'my.registry.net'
+ )['username'] == 'privateuser'
def test_resolve_authconfig_no_protocol(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'my.registry.net/v1/'
- )['username'],
- 'privateuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'my.registry.net/v1/'
+ )['username'] == 'privateuser'
def test_resolve_authconfig_no_path(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'http://my.registry.net'
- )['username'],
- 'privateuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'http://my.registry.net'
+ )['username'] == 'privateuser'
def test_resolve_authconfig_no_path_trailing_slash(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'http://my.registry.net/'
- )['username'],
- 'privateuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'http://my.registry.net/'
+ )['username'] == 'privateuser'
def test_resolve_authconfig_no_path_wrong_secure_proto(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'https://my.registry.net'
- )['username'],
- 'privateuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'https://my.registry.net'
+ )['username'] == 'privateuser'
def test_resolve_authconfig_no_path_wrong_insecure_proto(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'http://index.docker.io'
- )['username'],
- 'indexuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'http://index.docker.io'
+ )['username'] == 'indexuser'
def test_resolve_authconfig_path_wrong_proto(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'https://my.registry.net/v1/'
- )['username'],
- 'privateuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'https://my.registry.net/v1/'
+ )['username'] == 'privateuser'
def test_resolve_authconfig_default_registry(self):
- self.assertEqual(
- auth.resolve_authconfig(self.auth_config)['username'],
- 'indexuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config
+ )['username'] == 'indexuser'
def test_resolve_authconfig_default_explicit_none(self):
- self.assertEqual(
- auth.resolve_authconfig(self.auth_config, None)['username'],
- 'indexuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, None
+ )['username'] == 'indexuser'
def test_resolve_authconfig_fully_explicit(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'http://my.registry.net/v1/'
- )['username'],
- 'privateuser'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'http://my.registry.net/v1/'
+ )['username'] == 'privateuser'
def test_resolve_authconfig_legacy_config(self):
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, 'legacy.registry.url'
- )['username'],
- 'legacyauth'
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'legacy.registry.url'
+ )['username'] == 'legacyauth'
def test_resolve_authconfig_no_match(self):
- self.assertTrue(
- auth.resolve_authconfig(self.auth_config, 'does.not.exist') is None
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, 'does.not.exist'
+ ) is None
def test_resolve_registry_and_auth_library_image(self):
image = 'image'
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, auth.resolve_repository_name(image)[0]
- )['username'],
- 'indexuser',
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, auth.resolve_repository_name(image)[0]
+ )['username'] == 'indexuser'
def test_resolve_registry_and_auth_hub_image(self):
image = 'username/image'
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, auth.resolve_repository_name(image)[0]
- )['username'],
- 'indexuser',
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, auth.resolve_repository_name(image)[0]
+ )['username'] == 'indexuser'
def test_resolve_registry_and_auth_explicit_hub(self):
image = 'docker.io/username/image'
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, auth.resolve_repository_name(image)[0]
- )['username'],
- 'indexuser',
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, auth.resolve_repository_name(image)[0]
+ )['username'] == 'indexuser'
def test_resolve_registry_and_auth_explicit_legacy_hub(self):
image = 'index.docker.io/username/image'
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, auth.resolve_repository_name(image)[0]
- )['username'],
- 'indexuser',
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, auth.resolve_repository_name(image)[0]
+ )['username'] == 'indexuser'
def test_resolve_registry_and_auth_private_registry(self):
image = 'my.registry.net/image'
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, auth.resolve_repository_name(image)[0]
- )['username'],
- 'privateuser',
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, auth.resolve_repository_name(image)[0]
+ )['username'] == 'privateuser'
def test_resolve_registry_and_auth_unauthenticated_registry(self):
image = 'other.registry.net/image'
- self.assertEqual(
- auth.resolve_authconfig(
- self.auth_config, auth.resolve_repository_name(image)[0]
- ),
- None,
- )
+ assert auth.resolve_authconfig(
+ self.auth_config, auth.resolve_repository_name(image)[0]
+ ) is None
class CredStoreTest(unittest.TestCase):
@@ -323,62 +262,12 @@ class CredStoreTest(unittest.TestCase):
) == 'truesecret'
-class FindConfigFileTest(unittest.TestCase):
- def tmpdir(self, name):
- tmpdir = ensuretemp(name)
- self.addCleanup(tmpdir.remove)
- return tmpdir
-
- def test_find_config_fallback(self):
- tmpdir = self.tmpdir('test_find_config_fallback')
-
- with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
- assert auth.find_config_file() is None
-
- def test_find_config_from_explicit_path(self):
- tmpdir = self.tmpdir('test_find_config_from_explicit_path')
- config_path = tmpdir.ensure('my-config-file.json')
-
- assert auth.find_config_file(str(config_path)) == str(config_path)
-
- def test_find_config_from_environment(self):
- tmpdir = self.tmpdir('test_find_config_from_environment')
- config_path = tmpdir.ensure('config.json')
-
- with mock.patch.dict(os.environ, {'DOCKER_CONFIG': str(tmpdir)}):
- assert auth.find_config_file() == str(config_path)
-
- @mark.skipif("sys.platform == 'win32'")
- def test_find_config_from_home_posix(self):
- tmpdir = self.tmpdir('test_find_config_from_home_posix')
- config_path = tmpdir.ensure('.docker', 'config.json')
-
- with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
- assert auth.find_config_file() == str(config_path)
-
- @mark.skipif("sys.platform == 'win32'")
- def test_find_config_from_home_legacy_name(self):
- tmpdir = self.tmpdir('test_find_config_from_home_legacy_name')
- config_path = tmpdir.ensure('.dockercfg')
-
- with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
- assert auth.find_config_file() == str(config_path)
-
- @mark.skipif("sys.platform != 'win32'")
- def test_find_config_from_home_windows(self):
- tmpdir = self.tmpdir('test_find_config_from_home_windows')
- config_path = tmpdir.ensure('.docker', 'config.json')
-
- with mock.patch.dict(os.environ, {'USERPROFILE': str(tmpdir)}):
- assert auth.find_config_file() == str(config_path)
-
-
class LoadConfigTest(unittest.TestCase):
def test_load_config_no_file(self):
folder = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, folder)
cfg = auth.load_config(folder)
- self.assertTrue(cfg is not None)
+ assert cfg is not None
def test_load_config(self):
folder = tempfile.mkdtemp()
@@ -390,12 +279,12 @@ class LoadConfigTest(unittest.TestCase):
f.write('email = sakuya@scarlet.net')
cfg = auth.load_config(dockercfg_path)
assert auth.INDEX_NAME in cfg
- self.assertNotEqual(cfg[auth.INDEX_NAME], None)
+ assert cfg[auth.INDEX_NAME] is not None
cfg = cfg[auth.INDEX_NAME]
- self.assertEqual(cfg['username'], 'sakuya')
- self.assertEqual(cfg['password'], 'izayoi')
- self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
- self.assertEqual(cfg.get('auth'), None)
+ assert cfg['username'] == 'sakuya'
+ assert cfg['password'] == 'izayoi'
+ assert cfg['email'] == 'sakuya@scarlet.net'
+ assert cfg.get('auth') is None
def test_load_config_with_random_name(self):
folder = tempfile.mkdtemp()
@@ -418,12 +307,12 @@ class LoadConfigTest(unittest.TestCase):
cfg = auth.load_config(dockercfg_path)
assert registry in cfg
- self.assertNotEqual(cfg[registry], None)
+ assert cfg[registry] is not None
cfg = cfg[registry]
- self.assertEqual(cfg['username'], 'sakuya')
- self.assertEqual(cfg['password'], 'izayoi')
- self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
- self.assertEqual(cfg.get('auth'), None)
+ assert cfg['username'] == 'sakuya'
+ assert cfg['password'] == 'izayoi'
+ assert cfg['email'] == 'sakuya@scarlet.net'
+ assert cfg.get('auth') is None
def test_load_config_custom_config_env(self):
folder = tempfile.mkdtemp()
@@ -445,12 +334,12 @@ class LoadConfigTest(unittest.TestCase):
with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}):
cfg = auth.load_config(None)
assert registry in cfg
- self.assertNotEqual(cfg[registry], None)
+ assert cfg[registry] is not None
cfg = cfg[registry]
- self.assertEqual(cfg['username'], 'sakuya')
- self.assertEqual(cfg['password'], 'izayoi')
- self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
- self.assertEqual(cfg.get('auth'), None)
+ assert cfg['username'] == 'sakuya'
+ assert cfg['password'] == 'izayoi'
+ assert cfg['email'] == 'sakuya@scarlet.net'
+ assert cfg.get('auth') is None
def test_load_config_custom_config_env_with_auths(self):
folder = tempfile.mkdtemp()
@@ -473,13 +362,12 @@ class LoadConfigTest(unittest.TestCase):
with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}):
cfg = auth.load_config(None)
- assert registry in cfg
- self.assertNotEqual(cfg[registry], None)
- cfg = cfg[registry]
- self.assertEqual(cfg['username'], 'sakuya')
- self.assertEqual(cfg['password'], 'izayoi')
- self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
- self.assertEqual(cfg.get('auth'), None)
+ assert registry in cfg['auths']
+ cfg = cfg['auths'][registry]
+ assert cfg['username'] == 'sakuya'
+ assert cfg['password'] == 'izayoi'
+ assert cfg['email'] == 'sakuya@scarlet.net'
+ assert cfg.get('auth') is None
def test_load_config_custom_config_env_utf8(self):
folder = tempfile.mkdtemp()
@@ -503,37 +391,12 @@ class LoadConfigTest(unittest.TestCase):
with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}):
cfg = auth.load_config(None)
- assert registry in cfg
- self.assertNotEqual(cfg[registry], None)
- cfg = cfg[registry]
- self.assertEqual(cfg['username'], b'sakuya\xc3\xa6'.decode('utf8'))
- self.assertEqual(cfg['password'], b'izayoi\xc3\xa6'.decode('utf8'))
- self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
- self.assertEqual(cfg.get('auth'), None)
-
- def test_load_config_custom_config_env_with_headers(self):
- folder = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, folder)
-
- dockercfg_path = os.path.join(folder, 'config.json')
- config = {
- 'HttpHeaders': {
- 'Name': 'Spike',
- 'Surname': 'Spiegel'
- },
- }
-
- with open(dockercfg_path, 'w') as f:
- json.dump(config, f)
-
- with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}):
- cfg = auth.load_config(None)
- assert 'HttpHeaders' in cfg
- self.assertNotEqual(cfg['HttpHeaders'], None)
- cfg = cfg['HttpHeaders']
-
- self.assertEqual(cfg['Name'], 'Spike')
- self.assertEqual(cfg['Surname'], 'Spiegel')
+ assert registry in cfg['auths']
+ cfg = cfg['auths'][registry]
+ assert cfg['username'] == b'sakuya\xc3\xa6'.decode('utf8')
+ assert cfg['password'] == b'izayoi\xc3\xa6'.decode('utf8')
+ assert cfg['email'] == 'sakuya@scarlet.net'
+ assert cfg.get('auth') is None
def test_load_config_unknown_keys(self):
folder = tempfile.mkdtemp()
@@ -561,7 +424,7 @@ class LoadConfigTest(unittest.TestCase):
json.dump(config, f)
cfg = auth.load_config(dockercfg_path)
- assert cfg == {'scarlet.net': {}}
+ assert cfg == {'auths': {'scarlet.net': {}}}
def test_load_config_identity_token(self):
folder = tempfile.mkdtemp()
@@ -582,7 +445,7 @@ class LoadConfigTest(unittest.TestCase):
json.dump(config, f)
cfg = auth.load_config(dockercfg_path)
- assert registry in cfg
- cfg = cfg[registry]
+ assert registry in cfg['auths']
+ cfg = cfg['auths'][registry]
assert 'IdentityToken' in cfg
assert cfg['IdentityToken'] == token
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index c4996f1..cce99c5 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -8,6 +8,7 @@ import os
import unittest
from . import fake_api
+import pytest
try:
from unittest import mock
@@ -51,25 +52,25 @@ class ClientTest(unittest.TestCase):
def test_call_api_client_method(self):
client = docker.from_env()
- with self.assertRaises(AttributeError) as cm:
+ with pytest.raises(AttributeError) as cm:
client.create_container()
- s = str(cm.exception)
+ s = cm.exconly()
assert "'DockerClient' object has no attribute 'create_container'" in s
assert "this method is now on the object APIClient" in s
- with self.assertRaises(AttributeError) as cm:
+ with pytest.raises(AttributeError) as cm:
client.abcdef()
- s = str(cm.exception)
+ s = cm.exconly()
assert "'DockerClient' object has no attribute 'abcdef'" in s
assert "this method is now on the object APIClient" not in s
def test_call_containers(self):
client = docker.DockerClient(**kwargs_from_env())
- with self.assertRaises(TypeError) as cm:
+ with pytest.raises(TypeError) as cm:
client.containers()
- s = str(cm.exception)
+ s = cm.exconly()
assert "'ContainerCollection' object is not callable" in s
assert "docker.APIClient" in s
@@ -90,22 +91,22 @@ class FromEnvTest(unittest.TestCase):
DOCKER_CERT_PATH=TEST_CERT_DIR,
DOCKER_TLS_VERIFY='1')
client = docker.from_env()
- self.assertEqual(client.api.base_url, "https://192.168.59.103:2376")
+ assert client.api.base_url == "https://192.168.59.103:2376"
def test_from_env_with_version(self):
os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
DOCKER_CERT_PATH=TEST_CERT_DIR,
DOCKER_TLS_VERIFY='1')
client = docker.from_env(version='2.32')
- self.assertEqual(client.api.base_url, "https://192.168.59.103:2376")
- self.assertEqual(client.api._version, '2.32')
+ assert client.api.base_url == "https://192.168.59.103:2376"
+ assert client.api._version == '2.32'
def test_from_env_without_version_uses_default(self):
client = docker.from_env()
- self.assertEqual(client.api._version, DEFAULT_DOCKER_API_VERSION)
+ assert client.api._version == DEFAULT_DOCKER_API_VERSION
def test_from_env_without_timeout_uses_default(self):
client = docker.from_env()
- self.assertEqual(client.api.timeout, DEFAULT_TIMEOUT_SECONDS)
+ assert client.api.timeout == DEFAULT_TIMEOUT_SECONDS
diff --git a/tests/unit/dockertypes_test.py b/tests/unit/dockertypes_test.py
index 40adbb7..2be0578 100644
--- a/tests/unit/dockertypes_test.py
+++ b/tests/unit/dockertypes_test.py
@@ -1,16 +1,16 @@
# -*- coding: utf-8 -*-
import unittest
-import warnings
import pytest
from docker.constants import DEFAULT_DOCKER_API_VERSION
from docker.errors import InvalidArgument, InvalidVersion
from docker.types import (
- ContainerConfig, ContainerSpec, EndpointConfig, HostConfig, IPAMConfig,
+ ContainerSpec, EndpointConfig, HostConfig, IPAMConfig,
IPAMPool, LogConfig, Mount, ServiceMode, Ulimit,
)
+from docker.types.services import convert_service_ports
try:
from unittest import mock
@@ -23,88 +23,75 @@ def create_host_config(*args, **kwargs):
class HostConfigTest(unittest.TestCase):
- def test_create_host_config_no_options(self):
- config = create_host_config(version='1.19')
- self.assertFalse('NetworkMode' in config)
-
def test_create_host_config_no_options_newer_api_version(self):
- config = create_host_config(version='1.20')
- self.assertEqual(config['NetworkMode'], 'default')
+ config = create_host_config(version='1.21')
+ assert config['NetworkMode'] == 'default'
def test_create_host_config_invalid_cpu_cfs_types(self):
with pytest.raises(TypeError):
- create_host_config(version='1.20', cpu_quota='0')
+ create_host_config(version='1.21', cpu_quota='0')
with pytest.raises(TypeError):
- create_host_config(version='1.20', cpu_period='0')
+ create_host_config(version='1.21', cpu_period='0')
with pytest.raises(TypeError):
- create_host_config(version='1.20', cpu_quota=23.11)
+ create_host_config(version='1.21', cpu_quota=23.11)
with pytest.raises(TypeError):
- create_host_config(version='1.20', cpu_period=1999.0)
+ create_host_config(version='1.21', cpu_period=1999.0)
def test_create_host_config_with_cpu_quota(self):
- config = create_host_config(version='1.20', cpu_quota=1999)
- self.assertEqual(config.get('CpuQuota'), 1999)
+ config = create_host_config(version='1.21', cpu_quota=1999)
+ assert config.get('CpuQuota') == 1999
def test_create_host_config_with_cpu_period(self):
- config = create_host_config(version='1.20', cpu_period=1999)
- self.assertEqual(config.get('CpuPeriod'), 1999)
+ config = create_host_config(version='1.21', cpu_period=1999)
+ assert config.get('CpuPeriod') == 1999
def test_create_host_config_with_blkio_constraints(self):
blkio_rate = [{"Path": "/dev/sda", "Rate": 1000}]
- config = create_host_config(version='1.22',
- blkio_weight=1999,
- blkio_weight_device=blkio_rate,
- device_read_bps=blkio_rate,
- device_write_bps=blkio_rate,
- device_read_iops=blkio_rate,
- device_write_iops=blkio_rate)
-
- self.assertEqual(config.get('BlkioWeight'), 1999)
- self.assertTrue(config.get('BlkioWeightDevice') is blkio_rate)
- self.assertTrue(config.get('BlkioDeviceReadBps') is blkio_rate)
- self.assertTrue(config.get('BlkioDeviceWriteBps') is blkio_rate)
- self.assertTrue(config.get('BlkioDeviceReadIOps') is blkio_rate)
- self.assertTrue(config.get('BlkioDeviceWriteIOps') is blkio_rate)
- self.assertEqual(blkio_rate[0]['Path'], "/dev/sda")
- self.assertEqual(blkio_rate[0]['Rate'], 1000)
+ config = create_host_config(
+ version='1.22', blkio_weight=1999, blkio_weight_device=blkio_rate,
+ device_read_bps=blkio_rate, device_write_bps=blkio_rate,
+ device_read_iops=blkio_rate, device_write_iops=blkio_rate
+ )
+
+ assert config.get('BlkioWeight') == 1999
+ assert config.get('BlkioWeightDevice') is blkio_rate
+ assert config.get('BlkioDeviceReadBps') is blkio_rate
+ assert config.get('BlkioDeviceWriteBps') is blkio_rate
+ assert config.get('BlkioDeviceReadIOps') is blkio_rate
+ assert config.get('BlkioDeviceWriteIOps') is blkio_rate
+ assert blkio_rate[0]['Path'] == "/dev/sda"
+ assert blkio_rate[0]['Rate'] == 1000
def test_create_host_config_with_shm_size(self):
config = create_host_config(version='1.22', shm_size=67108864)
- self.assertEqual(config.get('ShmSize'), 67108864)
+ assert config.get('ShmSize') == 67108864
def test_create_host_config_with_shm_size_in_mb(self):
config = create_host_config(version='1.22', shm_size='64M')
- self.assertEqual(config.get('ShmSize'), 67108864)
+ assert config.get('ShmSize') == 67108864
def test_create_host_config_with_oom_kill_disable(self):
- config = create_host_config(version='1.20', oom_kill_disable=True)
- self.assertEqual(config.get('OomKillDisable'), True)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(version='1.18.3',
- oom_kill_disable=True))
+ config = create_host_config(version='1.21', oom_kill_disable=True)
+ assert config.get('OomKillDisable') is True
def test_create_host_config_with_userns_mode(self):
config = create_host_config(version='1.23', userns_mode='host')
- self.assertEqual(config.get('UsernsMode'), 'host')
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(version='1.22',
- userns_mode='host'))
- self.assertRaises(
- ValueError, lambda: create_host_config(version='1.23',
- userns_mode='host12'))
+ assert config.get('UsernsMode') == 'host'
+ with pytest.raises(InvalidVersion):
+ create_host_config(version='1.22', userns_mode='host')
+ with pytest.raises(ValueError):
+ create_host_config(version='1.23', userns_mode='host12')
def test_create_host_config_with_oom_score_adj(self):
config = create_host_config(version='1.22', oom_score_adj=100)
- self.assertEqual(config.get('OomScoreAdj'), 100)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(version='1.21',
- oom_score_adj=100))
- self.assertRaises(
- TypeError, lambda: create_host_config(version='1.22',
- oom_score_adj='100'))
+ assert config.get('OomScoreAdj') == 100
+ with pytest.raises(InvalidVersion):
+ create_host_config(version='1.21', oom_score_adj=100)
+ with pytest.raises(TypeError):
+ create_host_config(version='1.22', oom_score_adj='100')
def test_create_host_config_with_dns_opt(self):
@@ -112,30 +99,20 @@ class HostConfigTest(unittest.TestCase):
config = create_host_config(version='1.21', dns_opt=tested_opts)
dns_opts = config.get('DnsOptions')
- self.assertTrue('use-vc' in dns_opts)
- self.assertTrue('no-tld-query' in dns_opts)
-
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(version='1.20',
- dns_opt=tested_opts))
+ assert 'use-vc' in dns_opts
+ assert 'no-tld-query' in dns_opts
def test_create_host_config_with_mem_reservation(self):
config = create_host_config(version='1.21', mem_reservation=67108864)
- self.assertEqual(config.get('MemoryReservation'), 67108864)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(
- version='1.20', mem_reservation=67108864))
+ assert config.get('MemoryReservation') == 67108864
def test_create_host_config_with_kernel_memory(self):
config = create_host_config(version='1.21', kernel_memory=67108864)
- self.assertEqual(config.get('KernelMemory'), 67108864)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(
- version='1.20', kernel_memory=67108864))
+ assert config.get('KernelMemory') == 67108864
def test_create_host_config_with_pids_limit(self):
config = create_host_config(version='1.23', pids_limit=1024)
- self.assertEqual(config.get('PidsLimit'), 1024)
+ assert config.get('PidsLimit') == 1024
with pytest.raises(InvalidVersion):
create_host_config(version='1.22', pids_limit=1024)
@@ -144,7 +121,7 @@ class HostConfigTest(unittest.TestCase):
def test_create_host_config_with_isolation(self):
config = create_host_config(version='1.24', isolation='hyperv')
- self.assertEqual(config.get('Isolation'), 'hyperv')
+ assert config.get('Isolation') == 'hyperv'
with pytest.raises(InvalidVersion):
create_host_config(version='1.23', isolation='hyperv')
@@ -167,9 +144,6 @@ class HostConfigTest(unittest.TestCase):
create_host_config(version='1.24', mem_swappiness='40')
def test_create_host_config_with_volume_driver(self):
- with pytest.raises(InvalidVersion):
- create_host_config(version='1.20', volume_driver='local')
-
config = create_host_config(version='1.21', volume_driver='local')
assert config.get('VolumeDriver') == 'local'
@@ -179,10 +153,9 @@ class HostConfigTest(unittest.TestCase):
def test_create_host_config_with_cpu_count(self):
config = create_host_config(version='1.25', cpu_count=2)
- self.assertEqual(config.get('CpuCount'), 2)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(
- version='1.24', cpu_count=1))
+ assert config.get('CpuCount') == 2
+ with pytest.raises(InvalidVersion):
+ create_host_config(version='1.24', cpu_count=1)
def test_create_host_config_invalid_cpu_percent_types(self):
with pytest.raises(TypeError):
@@ -190,10 +163,9 @@ class HostConfigTest(unittest.TestCase):
def test_create_host_config_with_cpu_percent(self):
config = create_host_config(version='1.25', cpu_percent=15)
- self.assertEqual(config.get('CpuPercent'), 15)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(
- version='1.24', cpu_percent=10))
+ assert config.get('CpuPercent') == 15
+ with pytest.raises(InvalidVersion):
+ create_host_config(version='1.24', cpu_percent=10)
def test_create_host_config_invalid_nano_cpus_types(self):
with pytest.raises(TypeError):
@@ -201,10 +173,9 @@ class HostConfigTest(unittest.TestCase):
def test_create_host_config_with_nano_cpus(self):
config = create_host_config(version='1.25', nano_cpus=1000)
- self.assertEqual(config.get('NanoCpus'), 1000)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(
- version='1.24', nano_cpus=1))
+ assert config.get('NanoCpus') == 1000
+ with pytest.raises(InvalidVersion):
+ create_host_config(version='1.24', nano_cpus=1)
def test_create_host_config_with_cpu_rt_period_types(self):
with pytest.raises(TypeError):
@@ -212,10 +183,9 @@ class HostConfigTest(unittest.TestCase):
def test_create_host_config_with_cpu_rt_period(self):
config = create_host_config(version='1.25', cpu_rt_period=1000)
- self.assertEqual(config.get('CPURealtimePeriod'), 1000)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(
- version='1.24', cpu_rt_period=1000))
+ assert config.get('CPURealtimePeriod') == 1000
+ with pytest.raises(InvalidVersion):
+ create_host_config(version='1.24', cpu_rt_period=1000)
def test_ctrate_host_config_with_cpu_rt_runtime_types(self):
with pytest.raises(TypeError):
@@ -223,23 +193,9 @@ class HostConfigTest(unittest.TestCase):
def test_create_host_config_with_cpu_rt_runtime(self):
config = create_host_config(version='1.25', cpu_rt_runtime=1000)
- self.assertEqual(config.get('CPURealtimeRuntime'), 1000)
- self.assertRaises(
- InvalidVersion, lambda: create_host_config(
- version='1.24', cpu_rt_runtime=1000))
-
-
-class ContainerConfigTest(unittest.TestCase):
- def test_create_container_config_volume_driver_warning(self):
- with warnings.catch_warnings(record=True) as w:
- warnings.simplefilter('always')
- ContainerConfig(
- version='1.21', image='scratch', command=None,
- volume_driver='local'
- )
-
- assert len(w) == 1
- assert 'The volume_driver option has been moved' in str(w[0].message)
+ assert config.get('CPURealtimeRuntime') == 1000
+ with pytest.raises(InvalidVersion):
+ create_host_config(version='1.24', cpu_rt_runtime=1000)
class ContainerSpecTest(unittest.TestCase):
@@ -264,43 +220,46 @@ class UlimitTest(unittest.TestCase):
config = create_host_config(
ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION
)
- self.assertIn('Ulimits', config)
- self.assertEqual(len(config['Ulimits']), 1)
+ assert 'Ulimits' in config
+ assert len(config['Ulimits']) == 1
ulimit_obj = config['Ulimits'][0]
- self.assertTrue(isinstance(ulimit_obj, Ulimit))
- self.assertEqual(ulimit_obj.name, ulimit_dct['name'])
- self.assertEqual(ulimit_obj.soft, ulimit_dct['soft'])
- self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft)
+ assert isinstance(ulimit_obj, Ulimit)
+ assert ulimit_obj.name == ulimit_dct['name']
+ assert ulimit_obj.soft == ulimit_dct['soft']
+ assert ulimit_obj['Soft'] == ulimit_obj.soft
def test_create_host_config_dict_ulimit_capitals(self):
ulimit_dct = {'Name': 'nofile', 'Soft': 8096, 'Hard': 8096 * 4}
config = create_host_config(
ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION
)
- self.assertIn('Ulimits', config)
- self.assertEqual(len(config['Ulimits']), 1)
+ assert 'Ulimits' in config
+ assert len(config['Ulimits']) == 1
ulimit_obj = config['Ulimits'][0]
- self.assertTrue(isinstance(ulimit_obj, Ulimit))
- self.assertEqual(ulimit_obj.name, ulimit_dct['Name'])
- self.assertEqual(ulimit_obj.soft, ulimit_dct['Soft'])
- self.assertEqual(ulimit_obj.hard, ulimit_dct['Hard'])
- self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft)
+ assert isinstance(ulimit_obj, Ulimit)
+ assert ulimit_obj.name == ulimit_dct['Name']
+ assert ulimit_obj.soft == ulimit_dct['Soft']
+ assert ulimit_obj.hard == ulimit_dct['Hard']
+ assert ulimit_obj['Soft'] == ulimit_obj.soft
def test_create_host_config_obj_ulimit(self):
ulimit_dct = Ulimit(name='nofile', soft=8096)
config = create_host_config(
ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION
)
- self.assertIn('Ulimits', config)
- self.assertEqual(len(config['Ulimits']), 1)
+ assert 'Ulimits' in config
+ assert len(config['Ulimits']) == 1
ulimit_obj = config['Ulimits'][0]
- self.assertTrue(isinstance(ulimit_obj, Ulimit))
- self.assertEqual(ulimit_obj, ulimit_dct)
+ assert isinstance(ulimit_obj, Ulimit)
+ assert ulimit_obj == ulimit_dct
def test_ulimit_invalid_type(self):
- self.assertRaises(ValueError, lambda: Ulimit(name=None))
- self.assertRaises(ValueError, lambda: Ulimit(name='hello', soft='123'))
- self.assertRaises(ValueError, lambda: Ulimit(name='hello', hard='456'))
+ with pytest.raises(ValueError):
+ Ulimit(name=None)
+ with pytest.raises(ValueError):
+ Ulimit(name='hello', soft='123')
+ with pytest.raises(ValueError):
+ Ulimit(name='hello', hard='456')
class LogConfigTest(unittest.TestCase):
@@ -309,18 +268,18 @@ class LogConfigTest(unittest.TestCase):
config = create_host_config(
version=DEFAULT_DOCKER_API_VERSION, log_config=dct
)
- self.assertIn('LogConfig', config)
- self.assertTrue(isinstance(config['LogConfig'], LogConfig))
- self.assertEqual(dct['type'], config['LogConfig'].type)
+ assert 'LogConfig' in config
+ assert isinstance(config['LogConfig'], LogConfig)
+ assert dct['type'] == config['LogConfig'].type
def test_create_host_config_obj_logconfig(self):
obj = LogConfig(type=LogConfig.types.SYSLOG, config={'key1': 'val1'})
config = create_host_config(
version=DEFAULT_DOCKER_API_VERSION, log_config=obj
)
- self.assertIn('LogConfig', config)
- self.assertTrue(isinstance(config['LogConfig'], LogConfig))
- self.assertEqual(obj, config['LogConfig'])
+ assert 'LogConfig' in config
+ assert isinstance(config['LogConfig'], LogConfig)
+ assert obj == config['LogConfig']
def test_logconfig_invalid_config_type(self):
with pytest.raises(ValueError):
@@ -342,7 +301,7 @@ class IPAMConfigTest(unittest.TestCase):
gateway='192.168.52.254')
ipam_config = IPAMConfig(pool_configs=[ipam_pool])
- self.assertEqual(ipam_config, {
+ assert ipam_config == {
'Driver': 'default',
'Config': [{
'Subnet': '192.168.52.0/24',
@@ -350,7 +309,7 @@ class IPAMConfigTest(unittest.TestCase):
'AuxiliaryAddresses': None,
'IPRange': None,
}]
- })
+ }
class ServiceModeTest(unittest.TestCase):
@@ -435,3 +394,77 @@ class MountTest(unittest.TestCase):
assert mount['Source'] == "C:/foo/bar"
assert mount['Target'] == "/baz"
assert mount['Type'] == 'bind'
+
+
+class ServicePortsTest(unittest.TestCase):
+ def test_convert_service_ports_simple(self):
+ ports = {8080: 80}
+ assert convert_service_ports(ports) == [{
+ 'Protocol': 'tcp',
+ 'PublishedPort': 8080,
+ 'TargetPort': 80,
+ }]
+
+ def test_convert_service_ports_with_protocol(self):
+ ports = {8080: (80, 'udp')}
+
+ assert convert_service_ports(ports) == [{
+ 'Protocol': 'udp',
+ 'PublishedPort': 8080,
+ 'TargetPort': 80,
+ }]
+
+ def test_convert_service_ports_with_protocol_and_mode(self):
+ ports = {8080: (80, 'udp', 'ingress')}
+
+ assert convert_service_ports(ports) == [{
+ 'Protocol': 'udp',
+ 'PublishedPort': 8080,
+ 'TargetPort': 80,
+ 'PublishMode': 'ingress',
+ }]
+
+ def test_convert_service_ports_invalid(self):
+ ports = {8080: ('way', 'too', 'many', 'items', 'here')}
+
+ with pytest.raises(ValueError):
+ convert_service_ports(ports)
+
+ def test_convert_service_ports_no_protocol_and_mode(self):
+ ports = {8080: (80, None, 'host')}
+
+ assert convert_service_ports(ports) == [{
+ 'Protocol': 'tcp',
+ 'PublishedPort': 8080,
+ 'TargetPort': 80,
+ 'PublishMode': 'host',
+ }]
+
+ def test_convert_service_ports_multiple(self):
+ ports = {
+ 8080: (80, None, 'host'),
+ 9999: 99,
+ 2375: (2375,)
+ }
+
+ converted_ports = convert_service_ports(ports)
+ assert {
+ 'Protocol': 'tcp',
+ 'PublishedPort': 8080,
+ 'TargetPort': 80,
+ 'PublishMode': 'host',
+ } in converted_ports
+
+ assert {
+ 'Protocol': 'tcp',
+ 'PublishedPort': 9999,
+ 'TargetPort': 99,
+ } in converted_ports
+
+ assert {
+ 'Protocol': 'tcp',
+ 'PublishedPort': 2375,
+ 'TargetPort': 2375,
+ } in converted_ports
+
+ assert len(converted_ports) == 3
diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py
index 37154a3..63d7331 100644
--- a/tests/unit/fake_api.py
+++ b/tests/unit/fake_api.py
@@ -21,21 +21,36 @@ FAKE_NODE_ID = '24ifsmvkjbyhk'
# for clarity and readability
-def get_fake_raw_version():
+def get_fake_version():
status_code = 200
response = {
- "ApiVersion": "1.18",
- "GitCommit": "fake-commit",
- "GoVersion": "go1.3.3",
- "Version": "1.5.0"
+ 'ApiVersion': '1.35',
+ 'Arch': 'amd64',
+ 'BuildTime': '2018-01-10T20:09:37.000000000+00:00',
+ 'Components': [{
+ 'Details': {
+ 'ApiVersion': '1.35',
+ 'Arch': 'amd64',
+ 'BuildTime': '2018-01-10T20:09:37.000000000+00:00',
+ 'Experimental': 'false',
+ 'GitCommit': '03596f5',
+ 'GoVersion': 'go1.9.2',
+ 'KernelVersion': '4.4.0-112-generic',
+ 'MinAPIVersion': '1.12',
+ 'Os': 'linux'
+ },
+ 'Name': 'Engine',
+ 'Version': '18.01.0-ce'
+ }],
+ 'GitCommit': '03596f5',
+ 'GoVersion': 'go1.9.2',
+ 'KernelVersion': '4.4.0-112-generic',
+ 'MinAPIVersion': '1.12',
+ 'Os': 'linux',
+ 'Platform': {'Name': ''},
+ 'Version': '18.01.0-ce'
}
- return status_code, response
-
-def get_fake_version():
- status_code = 200
- response = {'GoVersion': '1', 'Version': '1.1.1',
- 'GitCommit': 'deadbeef+CHANGES'}
return status_code, response
@@ -503,7 +518,7 @@ if constants.IS_WINDOWS_PLATFORM:
fake_responses = {
'{0}/version'.format(prefix):
- get_fake_raw_version,
+ get_fake_version,
'{1}/{0}/version'.format(CURRENT_VERSION, prefix):
get_fake_version,
'{1}/{0}/info'.format(CURRENT_VERSION, prefix):
diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py
index f908355..15b60ea 100644
--- a/tests/unit/fake_api_client.py
+++ b/tests/unit/fake_api_client.py
@@ -46,7 +46,7 @@ def make_fake_api_client():
'logs.return_value': [b'hello world\n'],
'networks.return_value': fake_api.get_fake_network_list()[1],
'start.return_value': None,
- 'wait.return_value': 0,
+ 'wait.return_value': {'StatusCode': 0},
})
mock_client._version = docker.constants.DEFAULT_DOCKER_API_VERSION
return mock_client
diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py
index a479e83..f79f5d5 100644
--- a/tests/unit/models_containers_test.py
+++ b/tests/unit/models_containers_test.py
@@ -5,6 +5,7 @@ import unittest
from .fake_api import FAKE_CONTAINER_ID, FAKE_IMAGE_ID, FAKE_EXEC_ID
from .fake_api_client import make_fake_client
+import pytest
class ContainerCollectionTest(unittest.TestCase):
@@ -101,6 +102,7 @@ class ContainerCollectionTest(unittest.TestCase):
'volumename:/mnt/vol3',
'/volumewithnohostpath',
'/anothervolumewithnohostpath:ro',
+ 'C:\\windows\\path:D:\\hello\\world:rw'
],
volumes_from=['container'],
working_dir='/code'
@@ -119,7 +121,8 @@ class ContainerCollectionTest(unittest.TestCase):
'/var/www:/mnt/vol1:ro',
'volumename:/mnt/vol3',
'/volumewithnohostpath',
- '/anothervolumewithnohostpath:ro'
+ '/anothervolumewithnohostpath:ro',
+ 'C:\\windows\\path:D:\\hello\\world:rw'
],
'BlkioDeviceReadBps': [{'Path': 'foo', 'Rate': 3}],
'BlkioDeviceReadIOps': [{'Path': 'foo', 'Rate': 3}],
@@ -190,7 +193,8 @@ class ContainerCollectionTest(unittest.TestCase):
'/mnt/vol1',
'/mnt/vol3',
'/volumewithnohostpath',
- '/anothervolumewithnohostpath'
+ '/anothervolumewithnohostpath',
+ 'D:\\hello\\world'
],
working_dir='/code'
)
@@ -225,17 +229,17 @@ class ContainerCollectionTest(unittest.TestCase):
container = client.containers.run('alpine', 'sleep 300', detach=True)
assert container.id == FAKE_CONTAINER_ID
- client.api.pull.assert_called_with('alpine', tag=None)
+ client.api.pull.assert_called_with('alpine', platform=None, tag=None)
def test_run_with_error(self):
client = make_fake_client()
client.api.logs.return_value = "some error"
- client.api.wait.return_value = 1
+ client.api.wait.return_value = {'StatusCode': 1}
- with self.assertRaises(docker.errors.ContainerError) as cm:
+ with pytest.raises(docker.errors.ContainerError) as cm:
client.containers.run('alpine', 'echo hello world')
- assert cm.exception.exit_status == 1
- assert "some error" in str(cm.exception)
+ assert cm.value.exit_status == 1
+ assert "some error" in cm.exconly()
def test_run_with_image_object(self):
client = make_fake_client()
@@ -256,8 +260,8 @@ class ContainerCollectionTest(unittest.TestCase):
client.api.remove_container.assert_not_called()
client = make_fake_client()
- client.api.wait.return_value = 1
- with self.assertRaises(docker.errors.ContainerError):
+ client.api.wait.return_value = {'StatusCode': 1}
+ with pytest.raises(docker.errors.ContainerError):
client.containers.run("alpine")
client.api.remove_container.assert_not_called()
@@ -266,19 +270,19 @@ class ContainerCollectionTest(unittest.TestCase):
client.api.remove_container.assert_called_with(FAKE_CONTAINER_ID)
client = make_fake_client()
- client.api.wait.return_value = 1
- with self.assertRaises(docker.errors.ContainerError):
+ client.api.wait.return_value = {'StatusCode': 1}
+ with pytest.raises(docker.errors.ContainerError):
client.containers.run("alpine", remove=True)
client.api.remove_container.assert_called_with(FAKE_CONTAINER_ID)
client = make_fake_client()
client.api._version = '1.24'
- with self.assertRaises(RuntimeError):
+ with pytest.raises(RuntimeError):
client.containers.run("alpine", detach=True, remove=True)
client = make_fake_client()
client.api._version = '1.23'
- with self.assertRaises(RuntimeError):
+ with pytest.raises(RuntimeError):
client.containers.run("alpine", detach=True, remove=True)
client = make_fake_client()
@@ -394,12 +398,26 @@ class ContainerTest(unittest.TestCase):
container.exec_run("echo hello world", privileged=True, stream=True)
client.api.exec_create.assert_called_with(
FAKE_CONTAINER_ID, "echo hello world", stdout=True, stderr=True,
- stdin=False, tty=False, privileged=True, user='', environment=None
+ stdin=False, tty=False, privileged=True, user='', environment=None,
+ workdir=None
)
client.api.exec_start.assert_called_with(
FAKE_EXEC_ID, detach=False, tty=False, stream=True, socket=False
)
+ def test_exec_run_failure(self):
+ client = make_fake_client()
+ container = client.containers.get(FAKE_CONTAINER_ID)
+ container.exec_run("docker ps", privileged=True, stream=False)
+ client.api.exec_create.assert_called_with(
+ FAKE_CONTAINER_ID, "docker ps", stdout=True, stderr=True,
+ stdin=False, tty=False, privileged=True, user='', environment=None,
+ workdir=None
+ )
+ client.api.exec_start.assert_called_with(
+ FAKE_EXEC_ID, detach=False, tty=False, stream=False, socket=False
+ )
+
def test_export(self):
client = make_fake_client()
container = client.containers.get(FAKE_CONTAINER_ID)
diff --git a/tests/unit/models_images_test.py b/tests/unit/models_images_test.py
index 9ecb7e4..dacd72b 100644
--- a/tests/unit/models_images_test.py
+++ b/tests/unit/models_images_test.py
@@ -41,9 +41,22 @@ class ImageCollectionTest(unittest.TestCase):
def test_pull(self):
client = make_fake_client()
- image = client.images.pull('test_image')
+ image = client.images.pull('test_image:latest')
+ client.api.pull.assert_called_with('test_image', tag='latest')
+ client.api.inspect_image.assert_called_with('test_image:latest')
+ assert isinstance(image, Image)
+ assert image.id == FAKE_IMAGE_ID
+
+ def test_pull_multiple(self):
+ client = make_fake_client()
+ images = client.images.pull('test_image')
client.api.pull.assert_called_with('test_image', tag=None)
- client.api.inspect_image.assert_called_with('test_image')
+ client.api.images.assert_called_with(
+ all=False, name='test_image', filters=None
+ )
+ client.api.inspect_image.assert_called_with(FAKE_IMAGE_ID)
+ assert len(images) == 1
+ image = images[0]
assert isinstance(image, Image)
assert image.id == FAKE_IMAGE_ID
diff --git a/tests/unit/ssladapter_test.py b/tests/unit/ssladapter_test.py
index 2b7ce52..73b7336 100644
--- a/tests/unit/ssladapter_test.py
+++ b/tests/unit/ssladapter_test.py
@@ -1,5 +1,6 @@
import unittest
from docker.transport import ssladapter
+import pytest
try:
from backports.ssl_match_hostname import (
@@ -69,11 +70,9 @@ class MatchHostnameTest(unittest.TestCase):
assert match_hostname(self.cert, 'touhou.gensokyo.jp') is None
def test_match_ip_address_failure(self):
- self.assertRaises(
- CertificateError, match_hostname, self.cert, '192.168.0.25'
- )
+ with pytest.raises(CertificateError):
+ match_hostname(self.cert, '192.168.0.25')
def test_match_dns_failure(self):
- self.assertRaises(
- CertificateError, match_hostname, self.cert, 'foobar.co.uk'
- )
+ with pytest.raises(CertificateError):
+ match_hostname(self.cert, 'foobar.co.uk')
diff --git a/tests/unit/swarm_test.py b/tests/unit/swarm_test.py
index 9a66c0c..4385380 100644
--- a/tests/unit/swarm_test.py
+++ b/tests/unit/swarm_test.py
@@ -21,15 +21,11 @@ class SwarmTest(BaseAPIClientTest):
node_id=fake_api.FAKE_NODE_ID, version=1, node_spec=node_spec
)
args = fake_request.call_args
- self.assertEqual(
- args[0][1], url_prefix + 'nodes/24ifsmvkjbyhk/update?version=1'
- )
- self.assertEqual(
- json.loads(args[1]['data']), node_spec
- )
- self.assertEqual(
- args[1]['headers']['Content-Type'], 'application/json'
+ assert args[0][1] == (
+ url_prefix + 'nodes/24ifsmvkjbyhk/update?version=1'
)
+ assert json.loads(args[1]['data']) == node_spec
+ assert args[1]['headers']['Content-Type'] == 'application/json'
@requires_api_version('1.24')
def test_join_swarm(self):
diff --git a/tests/unit/utils_config_test.py b/tests/unit/utils_config_test.py
new file mode 100644
index 0000000..50ba383
--- /dev/null
+++ b/tests/unit/utils_config_test.py
@@ -0,0 +1,123 @@
+import os
+import unittest
+import shutil
+import tempfile
+import json
+
+from py.test import ensuretemp
+from pytest import mark
+from docker.utils import config
+
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+
+class FindConfigFileTest(unittest.TestCase):
+ def tmpdir(self, name):
+ tmpdir = ensuretemp(name)
+ self.addCleanup(tmpdir.remove)
+ return tmpdir
+
+ def test_find_config_fallback(self):
+ tmpdir = self.tmpdir('test_find_config_fallback')
+
+ with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
+ assert config.find_config_file() is None
+
+ def test_find_config_from_explicit_path(self):
+ tmpdir = self.tmpdir('test_find_config_from_explicit_path')
+ config_path = tmpdir.ensure('my-config-file.json')
+
+ assert config.find_config_file(str(config_path)) == str(config_path)
+
+ def test_find_config_from_environment(self):
+ tmpdir = self.tmpdir('test_find_config_from_environment')
+ config_path = tmpdir.ensure('config.json')
+
+ with mock.patch.dict(os.environ, {'DOCKER_CONFIG': str(tmpdir)}):
+ assert config.find_config_file() == str(config_path)
+
+ @mark.skipif("sys.platform == 'win32'")
+ def test_find_config_from_home_posix(self):
+ tmpdir = self.tmpdir('test_find_config_from_home_posix')
+ config_path = tmpdir.ensure('.docker', 'config.json')
+
+ with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
+ assert config.find_config_file() == str(config_path)
+
+ @mark.skipif("sys.platform == 'win32'")
+ def test_find_config_from_home_legacy_name(self):
+ tmpdir = self.tmpdir('test_find_config_from_home_legacy_name')
+ config_path = tmpdir.ensure('.dockercfg')
+
+ with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
+ assert config.find_config_file() == str(config_path)
+
+ @mark.skipif("sys.platform != 'win32'")
+ def test_find_config_from_home_windows(self):
+ tmpdir = self.tmpdir('test_find_config_from_home_windows')
+ config_path = tmpdir.ensure('.docker', 'config.json')
+
+ with mock.patch.dict(os.environ, {'USERPROFILE': str(tmpdir)}):
+ assert config.find_config_file() == str(config_path)
+
+
+class LoadConfigTest(unittest.TestCase):
+ def test_load_config_no_file(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ cfg = config.load_general_config(folder)
+ assert cfg is not None
+ assert isinstance(cfg, dict)
+ assert not cfg
+
+ def test_load_config_custom_headers(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+
+ dockercfg_path = os.path.join(folder, 'config.json')
+ config_data = {
+ 'HttpHeaders': {
+ 'Name': 'Spike',
+ 'Surname': 'Spiegel'
+ },
+ }
+
+ with open(dockercfg_path, 'w') as f:
+ json.dump(config_data, f)
+
+ cfg = config.load_general_config(dockercfg_path)
+ assert 'HttpHeaders' in cfg
+ assert cfg['HttpHeaders'] == {
+ 'Name': 'Spike',
+ 'Surname': 'Spiegel'
+ }
+
+ def test_load_config_detach_keys(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ dockercfg_path = os.path.join(folder, 'config.json')
+ config_data = {
+ 'detachKeys': 'ctrl-q, ctrl-u, ctrl-i'
+ }
+ with open(dockercfg_path, 'w') as f:
+ json.dump(config_data, f)
+
+ cfg = config.load_general_config(dockercfg_path)
+ assert cfg == config_data
+
+ def test_load_config_from_env(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ dockercfg_path = os.path.join(folder, 'config.json')
+ config_data = {
+ 'detachKeys': 'ctrl-q, ctrl-u, ctrl-i'
+ }
+ with open(dockercfg_path, 'w') as f:
+ json.dump(config_data, f)
+
+ with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}):
+ cfg = config.load_general_config(None)
+ assert cfg == config_data
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 2fa1d05..1f9daf6 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -46,7 +46,7 @@ class DecoratorsTest(unittest.TestCase):
return headers
client = APIClient()
- client._auth_configs = {}
+ client._general_configs = {}
g = update_headers(f)
assert g(client, headers=None) is None
@@ -55,7 +55,7 @@ class DecoratorsTest(unittest.TestCase):
'Content-type': 'application/json',
}
- client._auth_configs = {
+ client._general_configs = {
'HttpHeaders': sample_headers
}
@@ -80,25 +80,25 @@ class KwargsFromEnvTest(unittest.TestCase):
os.environ.pop('DOCKER_TLS_VERIFY', None)
kwargs = kwargs_from_env()
- self.assertEqual(None, kwargs.get('base_url'))
- self.assertEqual(None, kwargs.get('tls'))
+ assert kwargs.get('base_url') is None
+ assert kwargs.get('tls') is None
def test_kwargs_from_env_tls(self):
os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
DOCKER_CERT_PATH=TEST_CERT_DIR,
DOCKER_TLS_VERIFY='1')
kwargs = kwargs_from_env(assert_hostname=False)
- self.assertEqual('https://192.168.59.103:2376', kwargs['base_url'])
- self.assertTrue('ca.pem' in kwargs['tls'].ca_cert)
- self.assertTrue('cert.pem' in kwargs['tls'].cert[0])
- self.assertTrue('key.pem' in kwargs['tls'].cert[1])
- self.assertEqual(False, kwargs['tls'].assert_hostname)
- self.assertTrue(kwargs['tls'].verify)
+ assert 'https://192.168.59.103:2376' == kwargs['base_url']
+ assert 'ca.pem' in kwargs['tls'].ca_cert
+ assert 'cert.pem' in kwargs['tls'].cert[0]
+ assert 'key.pem' in kwargs['tls'].cert[1]
+ assert kwargs['tls'].assert_hostname is False
+ assert kwargs['tls'].verify
try:
client = APIClient(**kwargs)
- self.assertEqual(kwargs['base_url'], client.base_url)
- self.assertEqual(kwargs['tls'].ca_cert, client.verify)
- self.assertEqual(kwargs['tls'].cert, client.cert)
+ assert kwargs['base_url'] == client.base_url
+ assert kwargs['tls'].ca_cert == client.verify
+ assert kwargs['tls'].cert == client.cert
except TypeError as e:
self.fail(e)
@@ -107,17 +107,17 @@ class KwargsFromEnvTest(unittest.TestCase):
DOCKER_CERT_PATH=TEST_CERT_DIR,
DOCKER_TLS_VERIFY='')
kwargs = kwargs_from_env(assert_hostname=True)
- self.assertEqual('https://192.168.59.103:2376', kwargs['base_url'])
- self.assertTrue('ca.pem' in kwargs['tls'].ca_cert)
- self.assertTrue('cert.pem' in kwargs['tls'].cert[0])
- self.assertTrue('key.pem' in kwargs['tls'].cert[1])
- self.assertEqual(True, kwargs['tls'].assert_hostname)
- self.assertEqual(False, kwargs['tls'].verify)
+ assert 'https://192.168.59.103:2376' == kwargs['base_url']
+ assert 'ca.pem' in kwargs['tls'].ca_cert
+ assert 'cert.pem' in kwargs['tls'].cert[0]
+ assert 'key.pem' in kwargs['tls'].cert[1]
+ assert kwargs['tls'].assert_hostname is True
+ assert kwargs['tls'].verify is False
try:
client = APIClient(**kwargs)
- self.assertEqual(kwargs['base_url'], client.base_url)
- self.assertEqual(kwargs['tls'].cert, client.cert)
- self.assertFalse(kwargs['tls'].verify)
+ assert kwargs['base_url'] == client.base_url
+ assert kwargs['tls'].cert == client.cert
+ assert not kwargs['tls'].verify
except TypeError as e:
self.fail(e)
@@ -131,7 +131,7 @@ class KwargsFromEnvTest(unittest.TestCase):
DOCKER_TLS_VERIFY='')
os.environ.pop('DOCKER_CERT_PATH', None)
kwargs = kwargs_from_env(assert_hostname=True)
- self.assertEqual('tcp://192.168.59.103:2376', kwargs['base_url'])
+ assert 'tcp://192.168.59.103:2376' == kwargs['base_url']
def test_kwargs_from_env_no_cert_path(self):
try:
@@ -144,10 +144,10 @@ class KwargsFromEnvTest(unittest.TestCase):
DOCKER_TLS_VERIFY='1')
kwargs = kwargs_from_env()
- self.assertTrue(kwargs['tls'].verify)
- self.assertIn(cert_dir, kwargs['tls'].ca_cert)
- self.assertIn(cert_dir, kwargs['tls'].cert[0])
- self.assertIn(cert_dir, kwargs['tls'].cert[1])
+ assert kwargs['tls'].verify
+ assert cert_dir in kwargs['tls'].ca_cert
+ assert cert_dir in kwargs['tls'].cert[0]
+ assert cert_dir in kwargs['tls'].cert[1]
finally:
if temp_dir:
shutil.rmtree(temp_dir)
@@ -169,12 +169,12 @@ class KwargsFromEnvTest(unittest.TestCase):
class ConverVolumeBindsTest(unittest.TestCase):
def test_convert_volume_binds_empty(self):
- self.assertEqual(convert_volume_binds({}), [])
- self.assertEqual(convert_volume_binds([]), [])
+ assert convert_volume_binds({}) == []
+ assert convert_volume_binds([]) == []
def test_convert_volume_binds_list(self):
data = ['/a:/a:ro', '/b:/c:z']
- self.assertEqual(convert_volume_binds(data), data)
+ assert convert_volume_binds(data) == data
def test_convert_volume_binds_complete(self):
data = {
@@ -183,13 +183,13 @@ class ConverVolumeBindsTest(unittest.TestCase):
'mode': 'ro'
}
}
- self.assertEqual(convert_volume_binds(data), ['/mnt/vol1:/data:ro'])
+ assert convert_volume_binds(data) == ['/mnt/vol1:/data:ro']
def test_convert_volume_binds_compact(self):
data = {
'/mnt/vol1': '/data'
}
- self.assertEqual(convert_volume_binds(data), ['/mnt/vol1:/data:rw'])
+ assert convert_volume_binds(data) == ['/mnt/vol1:/data:rw']
def test_convert_volume_binds_no_mode(self):
data = {
@@ -197,7 +197,7 @@ class ConverVolumeBindsTest(unittest.TestCase):
'bind': '/data'
}
}
- self.assertEqual(convert_volume_binds(data), ['/mnt/vol1:/data:rw'])
+ assert convert_volume_binds(data) == ['/mnt/vol1:/data:rw']
def test_convert_volume_binds_unicode_bytes_input(self):
expected = [u'/mnt/지연:/unicode/박:rw']
@@ -208,9 +208,7 @@ class ConverVolumeBindsTest(unittest.TestCase):
'mode': 'rw'
}
}
- self.assertEqual(
- convert_volume_binds(data), expected
- )
+ assert convert_volume_binds(data) == expected
def test_convert_volume_binds_unicode_unicode_input(self):
expected = [u'/mnt/지연:/unicode/박:rw']
@@ -221,9 +219,7 @@ class ConverVolumeBindsTest(unittest.TestCase):
'mode': 'rw'
}
}
- self.assertEqual(
- convert_volume_binds(data), expected
- )
+ assert convert_volume_binds(data) == expected
class ParseEnvFileTest(unittest.TestCase):
@@ -242,38 +238,35 @@ class ParseEnvFileTest(unittest.TestCase):
env_file = self.generate_tempfile(
file_content='USER=jdoe\nPASS=secret')
get_parse_env_file = parse_env_file(env_file)
- self.assertEqual(get_parse_env_file,
- {'USER': 'jdoe', 'PASS': 'secret'})
+ assert get_parse_env_file == {'USER': 'jdoe', 'PASS': 'secret'}
os.unlink(env_file)
def test_parse_env_file_with_equals_character(self):
env_file = self.generate_tempfile(
file_content='USER=jdoe\nPASS=sec==ret')
get_parse_env_file = parse_env_file(env_file)
- self.assertEqual(get_parse_env_file,
- {'USER': 'jdoe', 'PASS': 'sec==ret'})
+ assert get_parse_env_file == {'USER': 'jdoe', 'PASS': 'sec==ret'}
os.unlink(env_file)
def test_parse_env_file_commented_line(self):
env_file = self.generate_tempfile(
file_content='USER=jdoe\n#PASS=secret')
get_parse_env_file = parse_env_file(env_file)
- self.assertEqual(get_parse_env_file, {'USER': 'jdoe'})
+ assert get_parse_env_file == {'USER': 'jdoe'}
os.unlink(env_file)
def test_parse_env_file_newline(self):
env_file = self.generate_tempfile(
file_content='\nUSER=jdoe\n\n\nPASS=secret')
get_parse_env_file = parse_env_file(env_file)
- self.assertEqual(get_parse_env_file,
- {'USER': 'jdoe', 'PASS': 'secret'})
+ assert get_parse_env_file == {'USER': 'jdoe', 'PASS': 'secret'}
os.unlink(env_file)
def test_parse_env_file_invalid_line(self):
env_file = self.generate_tempfile(
file_content='USER jdoe')
- self.assertRaises(
- DockerException, parse_env_file, env_file)
+ with pytest.raises(DockerException):
+ parse_env_file(env_file)
os.unlink(env_file)
@@ -343,46 +336,34 @@ class ParseRepositoryTagTest(unittest.TestCase):
sha = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
def test_index_image_no_tag(self):
- self.assertEqual(
- parse_repository_tag("root"), ("root", None)
- )
+ assert parse_repository_tag("root") == ("root", None)
def test_index_image_tag(self):
- self.assertEqual(
- parse_repository_tag("root:tag"), ("root", "tag")
- )
+ assert parse_repository_tag("root:tag") == ("root", "tag")
def test_index_user_image_no_tag(self):
- self.assertEqual(
- parse_repository_tag("user/repo"), ("user/repo", None)
- )
+ assert parse_repository_tag("user/repo") == ("user/repo", None)
def test_index_user_image_tag(self):
- self.assertEqual(
- parse_repository_tag("user/repo:tag"), ("user/repo", "tag")
- )
+ assert parse_repository_tag("user/repo:tag") == ("user/repo", "tag")
def test_private_reg_image_no_tag(self):
- self.assertEqual(
- parse_repository_tag("url:5000/repo"), ("url:5000/repo", None)
- )
+ assert parse_repository_tag("url:5000/repo") == ("url:5000/repo", None)
def test_private_reg_image_tag(self):
- self.assertEqual(
- parse_repository_tag("url:5000/repo:tag"), ("url:5000/repo", "tag")
+ assert parse_repository_tag("url:5000/repo:tag") == (
+ "url:5000/repo", "tag"
)
def test_index_image_sha(self):
- self.assertEqual(
- parse_repository_tag("root@sha256:{0}".format(self.sha)),
- ("root", "sha256:{0}".format(self.sha))
+ assert parse_repository_tag("root@sha256:{0}".format(self.sha)) == (
+ "root", "sha256:{0}".format(self.sha)
)
def test_private_reg_image_sha(self):
- self.assertEqual(
- parse_repository_tag("url:5000/repo@sha256:{0}".format(self.sha)),
- ("url:5000/repo", "sha256:{0}".format(self.sha))
- )
+ assert parse_repository_tag(
+ "url:5000/repo@sha256:{0}".format(self.sha)
+ ) == ("url:5000/repo", "sha256:{0}".format(self.sha))
class ParseDeviceTest(unittest.TestCase):
@@ -392,35 +373,35 @@ class ParseDeviceTest(unittest.TestCase):
'PathInContainer': '/dev/mnt1',
'CgroupPermissions': 'r'
}])
- self.assertEqual(devices[0], {
+ assert devices[0] == {
'PathOnHost': '/dev/sda1',
'PathInContainer': '/dev/mnt1',
'CgroupPermissions': 'r'
- })
+ }
def test_partial_string_definition(self):
devices = parse_devices(['/dev/sda1'])
- self.assertEqual(devices[0], {
+ assert devices[0] == {
'PathOnHost': '/dev/sda1',
'PathInContainer': '/dev/sda1',
'CgroupPermissions': 'rwm'
- })
+ }
def test_permissionless_string_definition(self):
devices = parse_devices(['/dev/sda1:/dev/mnt1'])
- self.assertEqual(devices[0], {
+ assert devices[0] == {
'PathOnHost': '/dev/sda1',
'PathInContainer': '/dev/mnt1',
'CgroupPermissions': 'rwm'
- })
+ }
def test_full_string_definition(self):
devices = parse_devices(['/dev/sda1:/dev/mnt1:r'])
- self.assertEqual(devices[0], {
+ assert devices[0] == {
'PathOnHost': '/dev/sda1',
'PathInContainer': '/dev/mnt1',
'CgroupPermissions': 'r'
- })
+ }
def test_hybrid_list(self):
devices = parse_devices([
@@ -432,36 +413,38 @@ class ParseDeviceTest(unittest.TestCase):
}
])
- self.assertEqual(devices[0], {
+ assert devices[0] == {
'PathOnHost': '/dev/sda1',
'PathInContainer': '/dev/mnt1',
'CgroupPermissions': 'rw'
- })
- self.assertEqual(devices[1], {
+ }
+ assert devices[1] == {
'PathOnHost': '/dev/sda2',
'PathInContainer': '/dev/mnt2',
'CgroupPermissions': 'r'
- })
+ }
class ParseBytesTest(unittest.TestCase):
def test_parse_bytes_valid(self):
- self.assertEqual(parse_bytes("512MB"), 536870912)
- self.assertEqual(parse_bytes("512M"), 536870912)
- self.assertEqual(parse_bytes("512m"), 536870912)
+ assert parse_bytes("512MB") == 536870912
+ assert parse_bytes("512M") == 536870912
+ assert parse_bytes("512m") == 536870912
def test_parse_bytes_invalid(self):
- self.assertRaises(DockerException, parse_bytes, "512MK")
- self.assertRaises(DockerException, parse_bytes, "512L")
- self.assertRaises(DockerException, parse_bytes, "127.0.0.1K")
+ with pytest.raises(DockerException):
+ parse_bytes("512MK")
+ with pytest.raises(DockerException):
+ parse_bytes("512L")
+ with pytest.raises(DockerException):
+ parse_bytes("127.0.0.1K")
def test_parse_bytes_float(self):
- self.assertRaises(DockerException, parse_bytes, "1.5k")
+ with pytest.raises(DockerException):
+ parse_bytes("1.5k")
def test_parse_bytes_maxint(self):
- self.assertEqual(
- parse_bytes("{0}k".format(sys.maxsize)), sys.maxsize * 1024
- )
+ assert parse_bytes("{0}k".format(sys.maxsize)) == sys.maxsize * 1024
class UtilsTest(unittest.TestCase):
@@ -476,7 +459,7 @@ class UtilsTest(unittest.TestCase):
]
for filters, expected in tests:
- self.assertEqual(convert_filters(filters), expected)
+ assert convert_filters(filters) == expected
def test_decode_json_header(self):
obj = {'a': 'b', 'c': 1}
@@ -486,144 +469,144 @@ class UtilsTest(unittest.TestCase):
else:
data = base64.urlsafe_b64encode(json.dumps(obj))
decoded_data = decode_json_header(data)
- self.assertEqual(obj, decoded_data)
+ assert obj == decoded_data
class SplitCommandTest(unittest.TestCase):
def test_split_command_with_unicode(self):
- self.assertEqual(split_command(u'echo μμ'), ['echo', 'μμ'])
+ assert split_command(u'echo μμ') == ['echo', 'μμ']
@pytest.mark.skipif(six.PY3, reason="shlex doesn't support bytes in py3")
def test_split_command_with_bytes(self):
- self.assertEqual(split_command('echo μμ'), ['echo', 'μμ'])
+ assert split_command('echo μμ') == ['echo', 'μμ']
class PortsTest(unittest.TestCase):
def test_split_port_with_host_ip(self):
internal_port, external_port = split_port("127.0.0.1:1000:2000")
- self.assertEqual(internal_port, ["2000"])
- self.assertEqual(external_port, [("127.0.0.1", "1000")])
+ assert internal_port == ["2000"]
+ assert external_port == [("127.0.0.1", "1000")]
def test_split_port_with_protocol(self):
internal_port, external_port = split_port("127.0.0.1:1000:2000/udp")
- self.assertEqual(internal_port, ["2000/udp"])
- self.assertEqual(external_port, [("127.0.0.1", "1000")])
+ assert internal_port == ["2000/udp"]
+ assert external_port == [("127.0.0.1", "1000")]
def test_split_port_with_host_ip_no_port(self):
internal_port, external_port = split_port("127.0.0.1::2000")
- self.assertEqual(internal_port, ["2000"])
- self.assertEqual(external_port, [("127.0.0.1", None)])
+ assert internal_port == ["2000"]
+ assert external_port == [("127.0.0.1", None)]
def test_split_port_range_with_host_ip_no_port(self):
internal_port, external_port = split_port("127.0.0.1::2000-2001")
- self.assertEqual(internal_port, ["2000", "2001"])
- self.assertEqual(external_port,
- [("127.0.0.1", None), ("127.0.0.1", None)])
+ assert internal_port == ["2000", "2001"]
+ assert external_port == [("127.0.0.1", None), ("127.0.0.1", None)]
def test_split_port_with_host_port(self):
internal_port, external_port = split_port("1000:2000")
- self.assertEqual(internal_port, ["2000"])
- self.assertEqual(external_port, ["1000"])
+ assert internal_port == ["2000"]
+ assert external_port == ["1000"]
def test_split_port_range_with_host_port(self):
internal_port, external_port = split_port("1000-1001:2000-2001")
- self.assertEqual(internal_port, ["2000", "2001"])
- self.assertEqual(external_port, ["1000", "1001"])
+ assert internal_port == ["2000", "2001"]
+ assert external_port == ["1000", "1001"]
def test_split_port_random_port_range_with_host_port(self):
internal_port, external_port = split_port("1000-1001:2000")
- self.assertEqual(internal_port, ["2000"])
- self.assertEqual(external_port, ["1000-1001"])
+ assert internal_port == ["2000"]
+ assert external_port == ["1000-1001"]
def test_split_port_no_host_port(self):
internal_port, external_port = split_port("2000")
- self.assertEqual(internal_port, ["2000"])
- self.assertEqual(external_port, None)
+ assert internal_port == ["2000"]
+ assert external_port is None
def test_split_port_range_no_host_port(self):
internal_port, external_port = split_port("2000-2001")
- self.assertEqual(internal_port, ["2000", "2001"])
- self.assertEqual(external_port, None)
+ assert internal_port == ["2000", "2001"]
+ assert external_port is None
def test_split_port_range_with_protocol(self):
internal_port, external_port = split_port(
"127.0.0.1:1000-1001:2000-2001/udp")
- self.assertEqual(internal_port, ["2000/udp", "2001/udp"])
- self.assertEqual(external_port,
- [("127.0.0.1", "1000"), ("127.0.0.1", "1001")])
+ assert internal_port == ["2000/udp", "2001/udp"]
+ assert external_port == [("127.0.0.1", "1000"), ("127.0.0.1", "1001")]
def test_split_port_with_ipv6_address(self):
internal_port, external_port = split_port(
"2001:abcd:ef00::2:1000:2000")
- self.assertEqual(internal_port, ["2000"])
- self.assertEqual(external_port, [("2001:abcd:ef00::2", "1000")])
+ assert internal_port == ["2000"]
+ assert external_port == [("2001:abcd:ef00::2", "1000")]
def test_split_port_invalid(self):
- self.assertRaises(ValueError,
- lambda: split_port("0.0.0.0:1000:2000:tcp"))
+ with pytest.raises(ValueError):
+ split_port("0.0.0.0:1000:2000:tcp")
def test_non_matching_length_port_ranges(self):
- self.assertRaises(
- ValueError,
- lambda: split_port("0.0.0.0:1000-1010:2000-2002/tcp")
- )
+ with pytest.raises(ValueError):
+ split_port("0.0.0.0:1000-1010:2000-2002/tcp")
def test_port_and_range_invalid(self):
- self.assertRaises(ValueError,
- lambda: split_port("0.0.0.0:1000:2000-2002/tcp"))
+ with pytest.raises(ValueError):
+ split_port("0.0.0.0:1000:2000-2002/tcp")
def test_port_only_with_colon(self):
- self.assertRaises(ValueError,
- lambda: split_port(":80"))
+ with pytest.raises(ValueError):
+ split_port(":80")
def test_host_only_with_colon(self):
- self.assertRaises(ValueError,
- lambda: split_port("localhost:"))
+ with pytest.raises(ValueError):
+ split_port("localhost:")
def test_with_no_container_port(self):
- self.assertRaises(ValueError,
- lambda: split_port("localhost:80:"))
+ with pytest.raises(ValueError):
+ split_port("localhost:80:")
def test_split_port_empty_string(self):
- self.assertRaises(ValueError, lambda: split_port(""))
+ with pytest.raises(ValueError):
+ split_port("")
def test_split_port_non_string(self):
assert split_port(1243) == (['1243'], None)
def test_build_port_bindings_with_one_port(self):
port_bindings = build_port_bindings(["127.0.0.1:1000:1000"])
- self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
+ assert port_bindings["1000"] == [("127.0.0.1", "1000")]
def test_build_port_bindings_with_matching_internal_ports(self):
port_bindings = build_port_bindings(
["127.0.0.1:1000:1000", "127.0.0.1:2000:1000"])
- self.assertEqual(port_bindings["1000"],
- [("127.0.0.1", "1000"), ("127.0.0.1", "2000")])
+ assert port_bindings["1000"] == [
+ ("127.0.0.1", "1000"), ("127.0.0.1", "2000")
+ ]
def test_build_port_bindings_with_nonmatching_internal_ports(self):
port_bindings = build_port_bindings(
["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"])
- self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
- self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")])
+ assert port_bindings["1000"] == [("127.0.0.1", "1000")]
+ assert port_bindings["2000"] == [("127.0.0.1", "2000")]
def test_build_port_bindings_with_port_range(self):
port_bindings = build_port_bindings(["127.0.0.1:1000-1001:1000-1001"])
- self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
- self.assertEqual(port_bindings["1001"], [("127.0.0.1", "1001")])
+ assert port_bindings["1000"] == [("127.0.0.1", "1000")]
+ assert port_bindings["1001"] == [("127.0.0.1", "1001")]
def test_build_port_bindings_with_matching_internal_port_ranges(self):
port_bindings = build_port_bindings(
["127.0.0.1:1000-1001:1000-1001", "127.0.0.1:2000-2001:1000-1001"])
- self.assertEqual(port_bindings["1000"],
- [("127.0.0.1", "1000"), ("127.0.0.1", "2000")])
- self.assertEqual(port_bindings["1001"],
- [("127.0.0.1", "1001"), ("127.0.0.1", "2001")])
+ assert port_bindings["1000"] == [
+ ("127.0.0.1", "1000"), ("127.0.0.1", "2000")
+ ]
+ assert port_bindings["1001"] == [
+ ("127.0.0.1", "1001"), ("127.0.0.1", "2001")
+ ]
def test_build_port_bindings_with_nonmatching_internal_port_ranges(self):
port_bindings = build_port_bindings(
["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"])
- self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
- self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")])
+ assert port_bindings["1000"] == [("127.0.0.1", "1000")]
+ assert port_bindings["2000"] == [("127.0.0.1", "2000")]
def convert_paths(collection):
@@ -708,11 +691,13 @@ class ExcludePathsTest(unittest.TestCase):
If we're using a custom Dockerfile, make sure that's not
excluded.
"""
- assert self.exclude(['*'], dockerfile='Dockerfile.alt') == \
- set(['Dockerfile.alt', '.dockerignore'])
+ assert self.exclude(['*'], dockerfile='Dockerfile.alt') == set(
+ ['Dockerfile.alt', '.dockerignore']
+ )
- assert self.exclude(['*'], dockerfile='foo/Dockerfile3') == \
- convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
+ assert self.exclude(
+ ['*'], dockerfile='foo/Dockerfile3'
+ ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
def test_exclude_dockerfile_child(self):
includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3')
@@ -946,7 +931,21 @@ class TarTest(unittest.TestCase):
os.makedirs(os.path.join(base, d))
with tar(base) as archive:
tar_data = tarfile.open(fileobj=archive)
- self.assertEqual(sorted(tar_data.getnames()), ['bar', 'foo'])
+ assert sorted(tar_data.getnames()) == ['bar', 'foo']
+
+ @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No chmod on Windows')
+ def test_tar_with_inaccessible_file(self):
+ base = tempfile.mkdtemp()
+ full_path = os.path.join(base, 'foo')
+ self.addCleanup(shutil.rmtree, base)
+ with open(full_path, 'w') as f:
+ f.write('content')
+ os.chmod(full_path, 0o222)
+ with pytest.raises(IOError) as ei:
+ tar(base)
+
+ assert 'Can not access file in context: {}'.format(full_path) in \
+ ei.exconly()
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
def test_tar_with_file_symlinks(self):
@@ -958,9 +957,7 @@ class TarTest(unittest.TestCase):
os.symlink('../foo', os.path.join(base, 'bar/foo'))
with tar(base) as archive:
tar_data = tarfile.open(fileobj=archive)
- self.assertEqual(
- sorted(tar_data.getnames()), ['bar', 'bar/foo', 'foo']
- )
+ assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
def test_tar_with_directory_symlinks(self):
@@ -971,9 +968,19 @@ class TarTest(unittest.TestCase):
os.symlink('../foo', os.path.join(base, 'bar/foo'))
with tar(base) as archive:
tar_data = tarfile.open(fileobj=archive)
- self.assertEqual(
- sorted(tar_data.getnames()), ['bar', 'bar/foo', 'foo']
- )
+ assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
+
+ @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
+ def test_tar_with_broken_symlinks(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ for d in ['foo', 'bar']:
+ os.makedirs(os.path.join(base, d))
+
+ os.symlink('../baz', os.path.join(base, 'bar/foo'))
+ with tar(base) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No UNIX sockets on Win32')
def test_tar_socket_file(self):
@@ -986,9 +993,7 @@ class TarTest(unittest.TestCase):
sock.bind(os.path.join(base, 'test.sock'))
with tar(base) as archive:
tar_data = tarfile.open(fileobj=archive)
- self.assertEqual(
- sorted(tar_data.getnames()), ['bar', 'foo']
- )
+ assert sorted(tar_data.getnames()) == ['bar', 'foo']
class ShouldCheckDirectoryTest(unittest.TestCase):
diff --git a/tox.ini b/tox.ini
index 3bf2b71..41d8860 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27, py33, py34, py35, flake8
+envlist = py27, py33, py34, py35, py36, flake8
skipsdist=True
[testenv]
diff --git a/win32-requirements.txt b/win32-requirements.txt
index e77c3d9..bc04b49 100644
--- a/win32-requirements.txt
+++ b/win32-requirements.txt
@@ -1,2 +1 @@
-r requirements.txt
-pypiwin32==219 \ No newline at end of file