summaryrefslogtreecommitdiff
path: root/tests/unit
diff options
context:
space:
mode:
authorJoffrey F <joffrey@docker.com>2015-09-23 17:42:29 -0700
committerJoffrey F <joffrey@docker.com>2015-09-24 15:33:51 -0700
commit15385685323b18671bbfed38ba744ac2b6b01bb9 (patch)
tree4de1fdbd7f0d318f9848becb302166bfeea81f5b /tests/unit
parent36264349912c3192a7712167b901115a123fbada (diff)
downloaddocker-py-efficiency.tar.gz
Reorganize test directoriesefficiency
More clearly separate unit and integration tests Allow splitting into multiple files Cleaner Signed-off-by: Joffrey F <joffrey@docker.com>
Diffstat (limited to 'tests/unit')
-rw-r--r--tests/unit/__init__.py0
-rw-r--r--tests/unit/api_test.py2451
-rw-r--r--tests/unit/fake_api.py513
-rw-r--r--tests/unit/fake_stat.py133
-rw-r--r--tests/unit/testdata/certs/ca.pem0
-rw-r--r--tests/unit/testdata/certs/cert.pem0
-rw-r--r--tests/unit/testdata/certs/key.pem0
-rw-r--r--tests/unit/testdata/context/Dockerfile2
-rw-r--r--tests/unit/testdata/context/ctx.tar.gzbin0 -> 171 bytes
-rw-r--r--tests/unit/testdata/context/custom_dockerfile2
-rw-r--r--tests/unit/utils_test.py651
11 files changed, 3752 insertions, 0 deletions
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/__init__.py
diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
new file mode 100644
index 0000000..e44e562
--- /dev/null
+++ b/tests/unit/api_test.py
@@ -0,0 +1,2451 @@
+# Copyright 2013 dotCloud inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import datetime
+import gzip
+import io
+import json
+import os
+import re
+import shutil
+import signal
+import socket
+import sys
+import tarfile
+import tempfile
+import threading
+import time
+import random
+
+import docker
+import docker.efficiency
+import requests
+import six
+
+from .. import base
+from . import fake_api
+from ..helpers import make_tree
+
+import pytest
+
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+
+DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
+
+
+def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
+ request=None):
+ res = requests.Response()
+ res.status_code = status_code
+ if not isinstance(content, six.binary_type):
+ content = json.dumps(content).encode('ascii')
+ res._content = content
+ res.headers = requests.structures.CaseInsensitiveDict(headers or {})
+ res.reason = reason
+ res.elapsed = datetime.timedelta(elapsed)
+ res.request = request
+ return res
+
+
+def fake_resolve_authconfig(authconfig, registry=None):
+ return None
+
+
+def fake_inspect_container(self, container, tty=False):
+ return fake_api.get_fake_inspect_container(tty=tty)[1]
+
+
+def fake_inspect_container_tty(self, container):
+ return fake_inspect_container(self, container, tty=True)
+
+
+def fake_resp(method, url, *args, **kwargs):
+ key = None
+ if url in fake_api.fake_responses:
+ key = url
+ elif (url, method) in fake_api.fake_responses:
+ key = (url, method)
+ if not key:
+ raise Exception('{0} {1}'.format(method, url))
+ status_code, content = fake_api.fake_responses[key]()
+ return response(status_code=status_code, content=content)
+
+
+fake_request = mock.Mock(side_effect=fake_resp)
+
+
+def fake_get(self, url, *args, **kwargs):
+ return fake_request('GET', url, *args, **kwargs)
+
+
+def fake_post(self, url, *args, **kwargs):
+ return fake_request('POST', url, *args, **kwargs)
+
+
+def fake_put(self, url, *args, **kwargs):
+ return fake_request('PUT', url, *args, **kwargs)
+
+
+def fake_delete(self, url, *args, **kwargs):
+ return fake_request('DELETE', url, *args, **kwargs)
+
+url_prefix = 'http+docker://localunixsocket/v{0}/'.format(
+ docker.constants.DEFAULT_DOCKER_API_VERSION)
+
+
+@mock.patch.multiple('docker.Client', get=fake_get, post=fake_post,
+ put=fake_put, delete=fake_delete)
+class DockerClientTest(base.Cleanup, base.BaseTestCase):
+ def setUp(self):
+ self.client = docker.Client()
+ # Force-clear authconfig to avoid tampering with the tests
+ self.client._cfg = {'Configs': {}}
+
+ def tearDown(self):
+ self.client.close()
+
+ def assertIn(self, object, collection):
+ if six.PY2 and sys.version_info[1] <= 6:
+ return self.assertTrue(object in collection)
+ return super(DockerClientTest, self).assertIn(object, collection)
+
+ def base_create_payload(self, img='busybox', cmd=None):
+ if not cmd:
+ cmd = ['true']
+ return {"Tty": False, "Image": img, "Cmd": cmd,
+ "AttachStdin": False,
+ "AttachStderr": True, "AttachStdout": True,
+ "StdinOnce": False,
+ "OpenStdin": False, "NetworkDisabled": False,
+ }
+
+ def test_ctor(self):
+ with pytest.raises(docker.errors.DockerException) as excinfo:
+ docker.Client(version=1.12)
+
+ self.assertEqual(
+ str(excinfo.value),
+ 'Version parameter must be a string or None. Found float'
+ )
+
+ def test_url_valid_resource(self):
+ url = self.client._url('/hello/{0}/world', 'somename')
+ self.assertEqual(
+ url, '{0}{1}'.format(url_prefix, 'hello/somename/world')
+ )
+
+ url = self.client._url('/hello/{0}/world', '/some?name')
+ self.assertEqual(
+ url, '{0}{1}'.format(url_prefix, 'hello/%2Fsome%3Fname/world')
+ )
+
+ def test_url_invalid_resource(self):
+ with pytest.raises(ValueError):
+ self.client._url('/hello/{0}/world', ['sakuya', 'izayoi'])
+
+ def test_url_no_resource(self):
+ url = self.client._url('/simple')
+ self.assertEqual(url, '{0}{1}'.format(url_prefix, 'simple'))
+
+ url = self.client._url('/simple', None)
+ self.assertEqual(url, '{0}{1}'.format(url_prefix, 'simple'))
+
+ #########################
+ # INFORMATION TESTS #
+ #########################
+ def test_version(self):
+ self.client.version()
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'version',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_retrieve_server_version(self):
+ client = docker.Client(version="auto")
+ self.assertTrue(isinstance(client._version, six.string_types))
+ self.assertFalse(client._version == "auto")
+ client.close()
+
+ def test_auto_retrieve_server_version(self):
+ version = self.client._retrieve_server_version()
+ self.assertTrue(isinstance(version, six.string_types))
+
+ def test_info(self):
+ self.client.info()
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'info',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_search(self):
+ self.client.search('busybox')
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/search',
+ params={'term': 'busybox'},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_image_viz(self):
+ with pytest.raises(Exception):
+ self.client.images('busybox', viz=True)
+ self.fail('Viz output should not be supported!')
+
+ def test_events(self):
+ self.client.events()
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'events',
+ params={'since': None, 'until': None, 'filters': None},
+ stream=True
+ )
+
+ def test_events_with_since_until(self):
+ ts = 1356048000
+ now = datetime.datetime.utcfromtimestamp(ts)
+ since = now - datetime.timedelta(seconds=10)
+ until = now + datetime.timedelta(seconds=10)
+
+ self.client.events(since=since, until=until)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'events',
+ params={
+ 'since': ts - 10,
+ 'until': ts + 10,
+ 'filters': None
+ },
+ stream=True
+ )
+
+ def test_events_with_filters(self):
+ filters = {'event': ['die', 'stop'],
+ 'container': fake_api.FAKE_CONTAINER_ID}
+
+ self.client.events(filters=filters)
+
+ expected_filters = docker.utils.convert_filters(filters)
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'events',
+ params={
+ 'since': None,
+ 'until': None,
+ 'filters': expected_filters
+ },
+ stream=True
+ )
+
+ ###################
+ # LISTING TESTS #
+ ###################
+
+ def test_images(self):
+ self.client.images(all=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/json',
+ params={'filter': None, 'only_ids': 0, 'all': 1},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_images_quiet(self):
+ self.client.images(all=True, quiet=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/json',
+ params={'filter': None, 'only_ids': 1, 'all': 1},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_image_ids(self):
+ self.client.images(quiet=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/json',
+ params={'filter': None, 'only_ids': 1, 'all': 0},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_images_filters(self):
+ self.client.images(filters={'dangling': True})
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/json',
+ params={'filter': None, 'only_ids': 0, 'all': 0,
+ 'filters': '{"dangling": ["true"]}'},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_list_containers(self):
+ self.client.containers(all=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/json',
+ params={
+ 'all': 1,
+ 'since': None,
+ 'size': 0,
+ 'limit': -1,
+ 'trunc_cmd': 0,
+ 'before': None
+ },
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ #####################
+ # CONTAINER TESTS #
+ #####################
+
+ def test_create_container(self):
+ self.client.create_container('busybox', 'true')
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox", "Cmd": ["true"],
+ "AttachStdin": false,
+ "AttachStderr": true, "AttachStdout": true,
+ "StdinOnce": false,
+ "OpenStdin": false, "NetworkDisabled": false}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_binds(self):
+ mount_dest = '/mnt'
+
+ self.client.create_container('busybox', ['ls', mount_dest],
+ volumes=[mount_dest])
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls", "/mnt"], "AttachStdin": false,
+ "Volumes": {"/mnt": {}},
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_volume_string(self):
+ mount_dest = '/mnt'
+
+ self.client.create_container('busybox', ['ls', mount_dest],
+ volumes=mount_dest)
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls", "/mnt"], "AttachStdin": false,
+ "Volumes": {"/mnt": {}},
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_ports(self):
+ self.client.create_container('busybox', 'ls',
+ ports=[1111, (2222, 'udp'), (3333,)])
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "ExposedPorts": {
+ "1111/tcp": {},
+ "2222/udp": {},
+ "3333/tcp": {}
+ },
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_entrypoint(self):
+ self.client.create_container('busybox', 'hello',
+ entrypoint='cowsay entry')
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["hello"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "Entrypoint": ["cowsay", "entry"]}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_cpu_shares(self):
+ self.client.create_container('busybox', 'ls',
+ cpu_shares=5)
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "CpuShares": 5}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_cpuset(self):
+ self.client.create_container('busybox', 'ls',
+ cpuset='0,1')
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "Cpuset": "0,1",
+ "CpusetCpus": "0,1"}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_cgroup_parent(self):
+ self.client.create_container(
+ 'busybox', 'ls', host_config=self.client.create_host_config(
+ cgroup_parent='test'
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ data = json.loads(args[1]['data'])
+ self.assertIn('HostConfig', data)
+ self.assertIn('CgroupParent', data['HostConfig'])
+ self.assertEqual(data['HostConfig']['CgroupParent'], 'test')
+
+ def test_create_container_with_working_dir(self):
+ self.client.create_container('busybox', 'ls',
+ working_dir='/root')
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox",
+ "Cmd": ["ls"], "AttachStdin": false,
+ "AttachStderr": true,
+ "AttachStdout": true, "OpenStdin": false,
+ "StdinOnce": false,
+ "NetworkDisabled": false,
+ "WorkingDir": "/root"}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_stdin_open(self):
+ self.client.create_container('busybox', 'true', stdin_open=True)
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox", "Cmd": ["true"],
+ "AttachStdin": true,
+ "AttachStderr": true, "AttachStdout": true,
+ "StdinOnce": true,
+ "OpenStdin": true, "NetworkDisabled": false}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_with_volumes_from(self):
+ vol_names = ['foo', 'bar']
+ try:
+ self.client.create_container('busybox', 'true',
+ volumes_from=vol_names)
+ except docker.errors.DockerException:
+ self.assertTrue(
+ docker.utils.compare_version('1.10', self.client._version) >= 0
+ )
+ return
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data'])['VolumesFrom'],
+ ','.join(vol_names))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_create_container_empty_volumes_from(self):
+ self.client.create_container('busybox', 'true', volumes_from=[])
+
+ args = fake_request.call_args
+ data = json.loads(args[1]['data'])
+ self.assertTrue('VolumesFrom' not in data)
+
+ def test_create_named_container(self):
+ self.client.create_container('busybox', 'true',
+ name='marisa-kirisame')
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1],
+ url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']),
+ json.loads('''
+ {"Tty": false, "Image": "busybox", "Cmd": ["true"],
+ "AttachStdin": false,
+ "AttachStderr": true, "AttachStdout": true,
+ "StdinOnce": false,
+ "OpenStdin": false, "NetworkDisabled": false}'''))
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(args[1]['params'], {'name': 'marisa-kirisame'})
+
+ def test_create_container_with_mem_limit_as_int(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ mem_limit=128.0
+ )
+ )
+
+ args = fake_request.call_args
+ data = json.loads(args[1]['data'])
+ self.assertEqual(data['HostConfig']['Memory'], 128.0)
+
+ def test_create_container_with_mem_limit_as_string(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ mem_limit='128'
+ )
+ )
+
+ args = fake_request.call_args
+ data = json.loads(args[1]['data'])
+ self.assertEqual(data['HostConfig']['Memory'], 128.0)
+
+ def test_create_container_with_mem_limit_as_string_with_k_unit(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ mem_limit='128k'
+ )
+ )
+
+ args = fake_request.call_args
+ data = json.loads(args[1]['data'])
+ self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024)
+
+ def test_create_container_with_mem_limit_as_string_with_m_unit(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ mem_limit='128m'
+ )
+ )
+
+ args = fake_request.call_args
+ data = json.loads(args[1]['data'])
+ self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024 * 1024)
+
+ def test_create_container_with_mem_limit_as_string_with_g_unit(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ mem_limit='128g'
+ )
+ )
+
+ args = fake_request.call_args
+ data = json.loads(args[1]['data'])
+ self.assertEqual(
+ data['HostConfig']['Memory'], 128.0 * 1024 * 1024 * 1024
+ )
+
+ def test_create_container_with_mem_limit_as_string_with_wrong_value(self):
+ self.assertRaises(
+ docker.errors.DockerException,
+ self.client.create_host_config, mem_limit='128p'
+ )
+
+ self.assertRaises(
+ docker.errors.DockerException,
+ self.client.create_host_config, mem_limit='1f28'
+ )
+
+ def test_start_container(self):
+ self.client.start(fake_api.FAKE_CONTAINER_ID)
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1],
+ url_prefix + 'containers/3cc2351ab11b/start'
+ )
+ self.assertEqual(json.loads(args[1]['data']), {})
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_start_container_none(self):
+ with pytest.raises(ValueError) as excinfo:
+ self.client.start(container=None)
+
+ self.assertEqual(
+ str(excinfo.value),
+ 'image or container param is undefined',
+ )
+
+ with pytest.raises(ValueError) as excinfo:
+ self.client.start(None)
+
+ self.assertEqual(
+ str(excinfo.value),
+ 'image or container param is undefined',
+ )
+
+ def test_start_container_regression_573(self):
+ self.client.start(**{'container': fake_api.FAKE_CONTAINER_ID})
+
+ def test_create_container_with_lxc_conf(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ lxc_conf={'lxc.conf.k': 'lxc.conf.value'}
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1],
+ url_prefix + 'containers/create'
+ )
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['LxcConf'] = [
+ {"Value": "lxc.conf.value", "Key": "lxc.conf.k"}
+ ]
+
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(
+ args[1]['headers'],
+ {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_lxc_conf_compat(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}]
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['LxcConf'] = [
+ {"Value": "lxc.conf.value", "Key": "lxc.conf.k"}
+ ]
+ self.assertEqual(
+ json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_binds_ro(self):
+ mount_dest = '/mnt'
+ mount_origin = '/tmp'
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ binds={mount_origin: {
+ "bind": mount_dest,
+ "ro": True
+ }}
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix +
+ 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:ro"]
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_binds_rw(self):
+ mount_dest = '/mnt'
+ mount_origin = '/tmp'
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ binds={mount_origin: {
+ "bind": mount_dest,
+ "ro": False
+ }}
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix +
+ 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:rw"]
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_binds_mode(self):
+ mount_dest = '/mnt'
+ mount_origin = '/tmp'
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ binds={mount_origin: {
+ "bind": mount_dest,
+ "mode": "z",
+ }}
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix +
+ 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:z"]
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_binds_mode_and_ro_error(self):
+ with pytest.raises(ValueError):
+ mount_dest = '/mnt'
+ mount_origin = '/tmp'
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ binds={mount_origin: {
+ "bind": mount_dest,
+ "mode": "z",
+ "ro": True,
+ }}
+ )
+ )
+
+ def test_create_container_with_binds_list(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ binds=[
+ "/tmp:/mnt/1:ro",
+ "/tmp:/mnt/2",
+ ],
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix +
+ 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Binds'] = [
+ "/tmp:/mnt/1:ro",
+ "/tmp:/mnt/2",
+ ]
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_port_binds(self):
+ self.maxDiff = None
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ port_bindings={
+ 1111: None,
+ 2222: 2222,
+ '3333/udp': (3333,),
+ 4444: ('127.0.0.1',),
+ 5555: ('127.0.0.1', 5555),
+ 6666: [('127.0.0.1',), ('192.168.0.1',)]
+ }
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ data = json.loads(args[1]['data'])
+ port_bindings = data['HostConfig']['PortBindings']
+ self.assertTrue('1111/tcp' in port_bindings)
+ self.assertTrue('2222/tcp' in port_bindings)
+ self.assertTrue('3333/udp' in port_bindings)
+ self.assertTrue('4444/tcp' in port_bindings)
+ self.assertTrue('5555/tcp' in port_bindings)
+ self.assertTrue('6666/tcp' in port_bindings)
+ self.assertEqual(
+ [{"HostPort": "", "HostIp": ""}],
+ port_bindings['1111/tcp']
+ )
+ self.assertEqual(
+ [{"HostPort": "2222", "HostIp": ""}],
+ port_bindings['2222/tcp']
+ )
+ self.assertEqual(
+ [{"HostPort": "3333", "HostIp": ""}],
+ port_bindings['3333/udp']
+ )
+ self.assertEqual(
+ [{"HostPort": "", "HostIp": "127.0.0.1"}],
+ port_bindings['4444/tcp']
+ )
+ self.assertEqual(
+ [{"HostPort": "5555", "HostIp": "127.0.0.1"}],
+ port_bindings['5555/tcp']
+ )
+ self.assertEqual(len(port_bindings['6666/tcp']), 2)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_mac_address(self):
+ mac_address_expected = "02:42:ac:11:00:0a"
+
+ container = self.client.create_container(
+ 'busybox', ['sleep', '60'], mac_address=mac_address_expected)
+
+ res = self.client.inspect_container(container['Id'])
+ self.assertEqual(mac_address_expected,
+ res['NetworkSettings']['MacAddress'])
+
+ def test_create_container_with_links(self):
+ link_path = 'path'
+ alias = 'alias'
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ links={link_path: alias}
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1], url_prefix + 'containers/create'
+ )
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Links'] = ['path:alias']
+
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+
+ def test_create_container_with_multiple_links(self):
+ link_path = 'path'
+ alias = 'alias'
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ links={
+ link_path + '1': alias + '1',
+ link_path + '2': alias + '2'
+ }
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Links'] = [
+ 'path1:alias1', 'path2:alias2'
+ ]
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+
+ def test_create_container_with_links_as_list_of_tuples(self):
+ link_path = 'path'
+ alias = 'alias'
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ links=[(link_path, alias)]
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Links'] = ['path:alias']
+
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+
+ def test_create_container_privileged(self):
+ self.client.create_container(
+ 'busybox', 'true',
+ host_config=self.client.create_host_config(privileged=True)
+ )
+
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Privileged'] = True
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_start_container_with_lxc_conf(self):
+ def call_start():
+ self.client.start(
+ fake_api.FAKE_CONTAINER_ID,
+ lxc_conf={'lxc.conf.k': 'lxc.conf.value'}
+ )
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_with_lxc_conf_compat(self):
+ def call_start():
+ self.client.start(
+ fake_api.FAKE_CONTAINER_ID,
+ lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}]
+ )
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_with_binds_ro(self):
+ def call_start():
+ self.client.start(
+ fake_api.FAKE_CONTAINER_ID, binds={
+ '/tmp': {
+ "bind": '/mnt',
+ "ro": True
+ }
+ }
+ )
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_with_binds_rw(self):
+ def call_start():
+ self.client.start(
+ fake_api.FAKE_CONTAINER_ID, binds={
+ '/tmp': {"bind": '/mnt', "ro": False}
+ }
+ )
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_with_port_binds(self):
+ self.maxDiff = None
+
+ def call_start():
+ self.client.start(fake_api.FAKE_CONTAINER_ID, port_bindings={
+ 1111: None,
+ 2222: 2222,
+ '3333/udp': (3333,),
+ 4444: ('127.0.0.1',),
+ 5555: ('127.0.0.1', 5555),
+ 6666: [('127.0.0.1',), ('192.168.0.1',)]
+ })
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_with_links(self):
+ def call_start():
+ self.client.start(
+ fake_api.FAKE_CONTAINER_ID, links={'path': 'alias'}
+ )
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_with_multiple_links(self):
+ def call_start():
+ self.client.start(
+ fake_api.FAKE_CONTAINER_ID,
+ links={
+ 'path1': 'alias1',
+ 'path2': 'alias2'
+ }
+ )
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_with_links_as_list_of_tuples(self):
+ def call_start():
+ self.client.start(fake_api.FAKE_CONTAINER_ID,
+ links=[('path', 'alias')])
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_privileged(self):
+ def call_start():
+ self.client.start(fake_api.FAKE_CONTAINER_ID, privileged=True)
+
+ pytest.deprecated_call(call_start)
+
+ def test_start_container_with_dict_instead_of_id(self):
+ self.client.start({'Id': fake_api.FAKE_CONTAINER_ID})
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1],
+ url_prefix + 'containers/3cc2351ab11b/start'
+ )
+ self.assertEqual(json.loads(args[1]['data']), {})
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_restart_policy(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ restart_policy={
+ "Name": "always",
+ "MaximumRetryCount": 0
+ }
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['RestartPolicy'] = {
+ "MaximumRetryCount": 0, "Name": "always"
+ }
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_added_capabilities(self):
+ self.client.create_container(
+ 'busybox', 'true',
+ host_config=self.client.create_host_config(cap_add=['MKNOD'])
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['CapAdd'] = ['MKNOD']
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_dropped_capabilities(self):
+ self.client.create_container(
+ 'busybox', 'true',
+ host_config=self.client.create_host_config(cap_drop=['MKNOD'])
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['CapDrop'] = ['MKNOD']
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_devices(self):
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ devices=['/dev/sda:/dev/xvda:rwm',
+ '/dev/sdb:/dev/xvdb',
+ '/dev/sdc']
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Devices'] = [
+ {'CgroupPermissions': 'rwm',
+ 'PathInContainer': '/dev/xvda',
+ 'PathOnHost': '/dev/sda'},
+ {'CgroupPermissions': 'rwm',
+ 'PathInContainer': '/dev/xvdb',
+ 'PathOnHost': '/dev/sdb'},
+ {'CgroupPermissions': 'rwm',
+ 'PathInContainer': '/dev/sdc',
+ 'PathOnHost': '/dev/sdc'}
+ ]
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_labels_dict(self):
+ labels_dict = {
+ six.text_type('foo'): six.text_type('1'),
+ six.text_type('bar'): six.text_type('2'),
+ }
+
+ self.client.create_container(
+ 'busybox', 'true',
+ labels=labels_dict,
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict)
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_labels_list(self):
+ labels_list = [
+ six.text_type('foo'),
+ six.text_type('bar'),
+ ]
+ labels_dict = {
+ six.text_type('foo'): six.text_type(),
+ six.text_type('bar'): six.text_type(),
+ }
+
+ self.client.create_container(
+ 'busybox', 'true',
+ labels=labels_list,
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix + 'containers/create')
+ self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict)
+ self.assertEqual(
+ args[1]['headers'], {'Content-Type': 'application/json'}
+ )
+ self.assertEqual(
+ args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_create_container_with_named_volume(self):
+ mount_dest = '/mnt'
+ volume_name = 'name'
+
+ self.client.create_container(
+ 'busybox', 'true',
+ host_config=self.client.create_host_config(
+ binds={volume_name: {
+ "bind": mount_dest,
+ "ro": False
+ }}),
+ volume_driver='foodriver',
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1], url_prefix + 'containers/create'
+ )
+ expected_payload = self.base_create_payload()
+ expected_payload['VolumeDriver'] = 'foodriver'
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Binds'] = ["name:/mnt:rw"]
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_resize_container(self):
+ self.client.resize(
+ {'Id': fake_api.FAKE_CONTAINER_ID},
+ height=15,
+ width=120
+ )
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/resize',
+ params={'h': 15, 'w': 120},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_rename_container(self):
+ self.client.rename(
+ {'Id': fake_api.FAKE_CONTAINER_ID},
+ name='foobar'
+ )
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/rename',
+ params={'name': 'foobar'},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_wait(self):
+ self.client.wait(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/wait',
+ timeout=None
+ )
+
+ def test_wait_with_dict_instead_of_id(self):
+ self.client.wait({'Id': fake_api.FAKE_CONTAINER_ID})
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/wait',
+ timeout=None
+ )
+
+ def _socket_path_for_client_session(self, client):
+ socket_adapter = client.get_adapter('http+docker://')
+ return socket_adapter.socket_path
+
+ def test_url_compatibility_unix(self):
+ c = docker.Client(base_url="unix://socket")
+
+ assert self._socket_path_for_client_session(c) == '/socket'
+
+ def test_url_compatibility_unix_triple_slash(self):
+ c = docker.Client(base_url="unix:///socket")
+
+ assert self._socket_path_for_client_session(c) == '/socket'
+
+ def test_url_compatibility_http_unix_triple_slash(self):
+ c = docker.Client(base_url="http+unix:///socket")
+
+ assert self._socket_path_for_client_session(c) == '/socket'
+
+ def test_url_compatibility_http(self):
+ c = docker.Client(base_url="http://hostname:1234")
+
+ assert c.base_url == "http://hostname:1234"
+
+ def test_url_compatibility_tcp(self):
+ c = docker.Client(base_url="tcp://hostname:1234")
+
+ assert c.base_url == "http://hostname:1234"
+
+ def test_logs(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ logs = self.client.logs(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=False
+ )
+
+ self.assertEqual(
+ logs,
+ 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
+ )
+
+ def test_logs_with_dict_instead_of_id(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ logs = self.client.logs({'Id': fake_api.FAKE_CONTAINER_ID})
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=False
+ )
+
+ self.assertEqual(
+ logs,
+ 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
+ )
+
+ def test_log_streaming(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=True
+ )
+
+ def test_log_tail(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
+ tail=10)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
+ 'tail': 10},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=False
+ )
+
+ def test_log_tty(self):
+ m = mock.Mock()
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container_tty):
+ with mock.patch('docker.Client._stream_raw_result',
+ m):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID,
+ stream=True)
+
+ self.assertTrue(m.called)
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=True
+ )
+
+ def test_diff(self):
+ self.client.diff(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/changes',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_diff_with_dict_instead_of_id(self):
+ self.client.diff({'Id': fake_api.FAKE_CONTAINER_ID})
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/changes',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_port(self):
+ self.client.port({'Id': fake_api.FAKE_CONTAINER_ID}, 1111)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/json',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_stop_container(self):
+ timeout = 2
+
+ self.client.stop(fake_api.FAKE_CONTAINER_ID, timeout=timeout)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/stop',
+ params={'t': timeout},
+ timeout=(DEFAULT_TIMEOUT_SECONDS + timeout)
+ )
+
+ def test_stop_container_with_dict_instead_of_id(self):
+ timeout = 2
+
+ self.client.stop({'Id': fake_api.FAKE_CONTAINER_ID},
+ timeout=timeout)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/stop',
+ params={'t': timeout},
+ timeout=(DEFAULT_TIMEOUT_SECONDS + timeout)
+ )
+
+ def test_exec_create(self):
+ self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1'])
+
+ args = fake_request.call_args
+ self.assertEqual(
+ 'POST',
+ args[0][0], url_prefix + 'containers/{0}/exec'.format(
+ fake_api.FAKE_CONTAINER_ID
+ )
+ )
+
+ self.assertEqual(
+ json.loads(args[1]['data']), {
+ 'Tty': False,
+ 'AttachStdout': True,
+ 'Container': fake_api.FAKE_CONTAINER_ID,
+ 'Cmd': ['ls', '-1'],
+ 'Privileged': False,
+ 'AttachStdin': False,
+ 'AttachStderr': True,
+ 'User': ''
+ }
+ )
+
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_exec_start(self):
+ self.client.exec_start(fake_api.FAKE_EXEC_ID)
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1], url_prefix + 'exec/{0}/start'.format(
+ fake_api.FAKE_EXEC_ID
+ )
+ )
+
+ self.assertEqual(
+ json.loads(args[1]['data']), {
+ 'Tty': False,
+ 'Detach': False,
+ }
+ )
+
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+
+ def test_exec_inspect(self):
+ self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1], url_prefix + 'exec/{0}/json'.format(
+ fake_api.FAKE_EXEC_ID
+ )
+ )
+
+ def test_exec_resize(self):
+ self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID),
+ params={'h': 20, 'w': 60},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_pause_container(self):
+ self.client.pause(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/pause',
+ timeout=(DEFAULT_TIMEOUT_SECONDS)
+ )
+
+ def test_unpause_container(self):
+ self.client.unpause(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/unpause',
+ timeout=(DEFAULT_TIMEOUT_SECONDS)
+ )
+
+ def test_kill_container(self):
+ self.client.kill(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/kill',
+ params={},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_kill_container_with_dict_instead_of_id(self):
+ self.client.kill({'Id': fake_api.FAKE_CONTAINER_ID})
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/kill',
+ params={},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_kill_container_with_signal(self):
+ self.client.kill(fake_api.FAKE_CONTAINER_ID, signal=signal.SIGTERM)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/kill',
+ params={'signal': signal.SIGTERM},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_restart_container(self):
+ self.client.restart(fake_api.FAKE_CONTAINER_ID, timeout=2)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/restart',
+ params={'t': 2},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_restart_container_with_dict_instead_of_id(self):
+ self.client.restart({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=2)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'containers/3cc2351ab11b/restart',
+ params={'t': 2},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_remove_container(self):
+ self.client.remove_container(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'DELETE',
+ url_prefix + 'containers/3cc2351ab11b',
+ params={'v': False, 'link': False, 'force': False},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_remove_container_with_dict_instead_of_id(self):
+ self.client.remove_container({'Id': fake_api.FAKE_CONTAINER_ID})
+
+ fake_request.assert_called_with(
+ 'DELETE',
+ url_prefix + 'containers/3cc2351ab11b',
+ params={'v': False, 'link': False, 'force': False},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_remove_link(self):
+ self.client.remove_container(fake_api.FAKE_CONTAINER_ID, link=True)
+
+ fake_request.assert_called_with(
+ 'DELETE',
+ url_prefix + 'containers/3cc2351ab11b',
+ params={'v': False, 'link': True, 'force': False},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_export(self):
+ self.client.export(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/export',
+ stream=True,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_export_with_dict_instead_of_id(self):
+ self.client.export({'Id': fake_api.FAKE_CONTAINER_ID})
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/export',
+ stream=True,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_inspect_container(self):
+ self.client.inspect_container(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/json',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_inspect_container_undefined_id(self):
+ for arg in None, '', {True: True}:
+ with pytest.raises(docker.errors.NullResource) as excinfo:
+ self.client.inspect_container(arg)
+
+ self.assertEqual(
+ excinfo.value.args[0], 'image or container param is undefined'
+ )
+
+ def test_container_stats(self):
+ self.client.stats(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/stats',
+ timeout=60,
+ stream=True
+ )
+
+ ##################
+ # IMAGES TESTS #
+ ##################
+
+ def test_pull(self):
+ self.client.pull('joffrey/test001')
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1],
+ url_prefix + 'images/create'
+ )
+ self.assertEqual(
+ args[1]['params'],
+ {'tag': None, 'fromImage': 'joffrey/test001'}
+ )
+ self.assertFalse(args[1]['stream'])
+
+ def test_pull_stream(self):
+ self.client.pull('joffrey/test001', stream=True)
+
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1],
+ url_prefix + 'images/create'
+ )
+ self.assertEqual(
+ args[1]['params'],
+ {'tag': None, 'fromImage': 'joffrey/test001'}
+ )
+ self.assertTrue(args[1]['stream'])
+
+ def test_commit(self):
+ self.client.commit(fake_api.FAKE_CONTAINER_ID)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'commit',
+ data='{}',
+ headers={'Content-Type': 'application/json'},
+ params={
+ 'repo': None,
+ 'comment': None,
+ 'tag': None,
+ 'container': '3cc2351ab11b',
+ 'author': None
+ },
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_remove_image(self):
+ self.client.remove_image(fake_api.FAKE_IMAGE_ID)
+
+ fake_request.assert_called_with(
+ 'DELETE',
+ url_prefix + 'images/e9aa60c60128',
+ params={'force': False, 'noprune': False},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_image_history(self):
+ self.client.history(fake_api.FAKE_IMAGE_NAME)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/test_image/history',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_import_image(self):
+ self.client.import_image(
+ fake_api.FAKE_TARBALL_PATH,
+ repository=fake_api.FAKE_REPO_NAME,
+ tag=fake_api.FAKE_TAG_NAME
+ )
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/create',
+ params={
+ 'repo': fake_api.FAKE_REPO_NAME,
+ 'tag': fake_api.FAKE_TAG_NAME,
+ 'fromSrc': fake_api.FAKE_TARBALL_PATH
+ },
+ data=None,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_import_image_from_bytes(self):
+ stream = (i for i in range(0, 100))
+
+ self.client.import_image(
+ stream,
+ repository=fake_api.FAKE_REPO_NAME,
+ tag=fake_api.FAKE_TAG_NAME
+ )
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/create',
+ params={
+ 'repo': fake_api.FAKE_REPO_NAME,
+ 'tag': fake_api.FAKE_TAG_NAME,
+ 'fromSrc': '-',
+ },
+ headers={
+ 'Content-Type': 'application/tar',
+ },
+ data=stream,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_import_image_from_image(self):
+ self.client.import_image(
+ image=fake_api.FAKE_IMAGE_NAME,
+ repository=fake_api.FAKE_REPO_NAME,
+ tag=fake_api.FAKE_TAG_NAME
+ )
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/create',
+ params={
+ 'repo': fake_api.FAKE_REPO_NAME,
+ 'tag': fake_api.FAKE_TAG_NAME,
+ 'fromImage': fake_api.FAKE_IMAGE_NAME
+ },
+ data=None,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_inspect_image(self):
+ self.client.inspect_image(fake_api.FAKE_IMAGE_NAME)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/test_image/json',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_inspect_image_undefined_id(self):
+ for arg in None, '', {True: True}:
+ with pytest.raises(docker.errors.NullResource) as excinfo:
+ self.client.inspect_image(arg)
+
+ self.assertEqual(
+ excinfo.value.args[0], 'image or container param is undefined'
+ )
+
+ def test_insert_image(self):
+ try:
+ self.client.insert(fake_api.FAKE_IMAGE_NAME,
+ fake_api.FAKE_URL, fake_api.FAKE_PATH)
+ except docker.errors.DeprecatedMethod:
+ self.assertTrue(
+ docker.utils.compare_version('1.12', self.client._version) >= 0
+ )
+ return
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/test_image/insert',
+ params={
+ 'url': fake_api.FAKE_URL,
+ 'path': fake_api.FAKE_PATH
+ },
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_push_image(self):
+ with mock.patch('docker.auth.auth.resolve_authconfig',
+ fake_resolve_authconfig):
+ self.client.push(fake_api.FAKE_IMAGE_NAME)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/test_image/push',
+ params={
+ 'tag': None
+ },
+ data='{}',
+ headers={'Content-Type': 'application/json'},
+ stream=False,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_push_image_with_tag(self):
+ with mock.patch('docker.auth.auth.resolve_authconfig',
+ fake_resolve_authconfig):
+ self.client.push(
+ fake_api.FAKE_IMAGE_NAME, tag=fake_api.FAKE_TAG_NAME
+ )
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/test_image/push',
+ params={
+ 'tag': fake_api.FAKE_TAG_NAME,
+ },
+ data='{}',
+ headers={'Content-Type': 'application/json'},
+ stream=False,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_push_image_stream(self):
+ with mock.patch('docker.auth.auth.resolve_authconfig',
+ fake_resolve_authconfig):
+ self.client.push(fake_api.FAKE_IMAGE_NAME, stream=True)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/test_image/push',
+ params={
+ 'tag': None
+ },
+ data='{}',
+ headers={'Content-Type': 'application/json'},
+ stream=True,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_tag_image(self):
+ self.client.tag(fake_api.FAKE_IMAGE_ID, fake_api.FAKE_REPO_NAME)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/e9aa60c60128/tag',
+ params={
+ 'tag': None,
+ 'repo': 'repo',
+ 'force': 0
+ },
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_tag_image_tag(self):
+ self.client.tag(
+ fake_api.FAKE_IMAGE_ID,
+ fake_api.FAKE_REPO_NAME,
+ tag=fake_api.FAKE_TAG_NAME
+ )
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/e9aa60c60128/tag',
+ params={
+ 'tag': 'tag',
+ 'repo': 'repo',
+ 'force': 0
+ },
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_tag_image_force(self):
+ self.client.tag(
+ fake_api.FAKE_IMAGE_ID, fake_api.FAKE_REPO_NAME, force=True)
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/e9aa60c60128/tag',
+ params={
+ 'tag': None,
+ 'repo': 'repo',
+ 'force': 1
+ },
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_get_image(self):
+ self.client.get_image(fake_api.FAKE_IMAGE_ID)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/e9aa60c60128/get',
+ stream=True,
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_load_image(self):
+ self.client.load_image('Byte Stream....')
+
+ fake_request.assert_called_with(
+ 'POST',
+ url_prefix + 'images/load',
+ data='Byte Stream....',
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ #################
+ # BUILDER TESTS #
+ #################
+
+ def test_build_container(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'MAINTAINER docker-py',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]).encode('ascii'))
+
+ self.client.build(fileobj=script)
+
+ def test_build_container_pull(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'MAINTAINER docker-py',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]).encode('ascii'))
+
+ self.client.build(fileobj=script, pull=True)
+
+ def test_build_container_stream(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'MAINTAINER docker-py',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]).encode('ascii'))
+
+ self.client.build(fileobj=script, stream=True)
+
+ def test_build_container_custom_context(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'MAINTAINER docker-py',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]).encode('ascii'))
+ context = docker.utils.mkbuildcontext(script)
+
+ self.client.build(fileobj=context, custom_context=True)
+
+ def test_build_container_custom_context_gzip(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'MAINTAINER docker-py',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]).encode('ascii'))
+ context = docker.utils.mkbuildcontext(script)
+ gz_context = gzip.GzipFile(fileobj=context)
+
+ self.client.build(
+ fileobj=gz_context,
+ custom_context=True,
+ encoding="gzip"
+ )
+
+ def test_build_remote_with_registry_auth(self):
+ self.client._auth_configs = {
+ 'https://example.com': {
+ 'user': 'example',
+ 'password': 'example',
+ 'email': 'example@example.com'
+ }
+ }
+
+ self.client.build(path='https://github.com/docker-library/mongo')
+
+ def test_build_container_with_named_dockerfile(self):
+ self.client.build('.', dockerfile='nameddockerfile')
+
+ def test_build_container_with_container_limits(self):
+ self.client.build('.', container_limits={
+ 'memory': 1024 * 1024,
+ 'cpusetcpus': 1,
+ 'cpushares': 1000,
+ 'memswap': 1024 * 1024 * 8
+ })
+
+ def test_build_container_invalid_container_limits(self):
+ self.assertRaises(
+ docker.errors.DockerException,
+ lambda: self.client.build('.', container_limits={
+ 'foo': 'bar'
+ })
+ )
+
+ def test_build_container_from_context_object_with_tarball(self):
+ base_path = os.path.join(
+ os.path.dirname(__file__),
+ 'testdata/context'
+ )
+ tarball_path = os.path.join(base_path, 'ctx.tar.gz')
+ context = docker.efficiency.create_context_from_path(tarball_path)
+ try:
+ self.client.build(context.path, **context.job_params)
+ if context.job_params['fileobj'] is not None:
+ context.job_params['fileobj'].close()
+ except Exception as e:
+ self.fail('Command should not raise exception: {0}'.format(e))
+
+ def test_build_container_from_context_object_with_custom_dockerfile(self):
+ base_path = os.path.abspath(os.path.join(
+ os.path.dirname(__file__),
+ 'testdata/context'
+ ))
+ custom_dockerfile = 'custom_dockerfile'
+ try:
+ context = docker.efficiency.create_context_from_path(
+ base_path,
+ dockerfile=custom_dockerfile
+ )
+ self.client.build(context.path, **context.job_params)
+ except docker.errors.ContextError as ce:
+ self.fail(ce.message)
+ except Exception as e:
+ self.fail('Command should not raise exception: {0}'.format(e))
+
+ def test_build_container_from_remote_context(self):
+ ctxurl = 'https://localhost/staging/context.tar.gz'
+ try:
+ context = docker.efficiency.create_context_from_path(ctxurl)
+ self.assertEqual(context.path, ctxurl)
+ self.assertEqual(context.format, 'remote')
+ self.client.build(context.path, **context.job_params)
+ except docker.errors.ContextError as ce:
+ self.fail(ce.message)
+ except Exception as e:
+ self.fail('Command should not raise exception: {0}'.format(e))
+
+ ###################
+ # VOLUMES TESTS #
+ ###################
+
+ @base.requires_api_version('1.21')
+ def test_list_volumes(self):
+ volumes = self.client.volumes()
+ self.assertIn('Volumes', volumes)
+ self.assertEqual(len(volumes['Volumes']), 2)
+ args = fake_request.call_args
+
+ self.assertEqual(args[0][0], 'GET')
+ self.assertEqual(args[0][1], url_prefix + 'volumes')
+
+ @base.requires_api_version('1.21')
+ def test_create_volume(self):
+ name = 'perfectcherryblossom'
+ result = self.client.create_volume(name)
+ self.assertIn('Name', result)
+ self.assertEqual(result['Name'], name)
+ self.assertIn('Driver', result)
+ self.assertEqual(result['Driver'], 'local')
+ args = fake_request.call_args
+
+ self.assertEqual(args[0][0], 'POST')
+ self.assertEqual(args[0][1], url_prefix + 'volumes')
+ self.assertEqual(args[1]['data'], {
+ 'Name': name, 'Driver': None, 'DriverOpts': None
+ })
+
+ @base.requires_api_version('1.21')
+ def test_create_volume_with_driver(self):
+ name = 'perfectcherryblossom'
+ driver_name = 'sshfs'
+ self.client.create_volume(name, driver=driver_name)
+ args = fake_request.call_args
+
+ self.assertEqual(args[0][0], 'POST')
+ self.assertEqual(args[0][1], url_prefix + 'volumes')
+ self.assertIn('Driver', args[1]['data'])
+ self.assertEqual(args[1]['data']['Driver'], driver_name)
+
+ @base.requires_api_version('1.21')
+ def test_create_volume_invalid_opts_type(self):
+ with pytest.raises(TypeError):
+ self.client.create_volume(
+ 'perfectcherryblossom', driver_opts='hello=world'
+ )
+
+ with pytest.raises(TypeError):
+ self.client.create_volume(
+ 'perfectcherryblossom', driver_opts=['hello=world']
+ )
+
+ with pytest.raises(TypeError):
+ self.client.create_volume(
+ 'perfectcherryblossom', driver_opts=''
+ )
+
+ @base.requires_api_version('1.21')
+ def test_inspect_volume(self):
+ name = 'perfectcherryblossom'
+ result = self.client.inspect_volume(name)
+ self.assertIn('Name', result)
+ self.assertEqual(result['Name'], name)
+ self.assertIn('Driver', result)
+ self.assertEqual(result['Driver'], 'local')
+ args = fake_request.call_args
+
+ self.assertEqual(args[0][0], 'GET')
+ self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name))
+
+ @base.requires_api_version('1.21')
+ def test_remove_volume(self):
+ name = 'perfectcherryblossom'
+ result = self.client.remove_volume(name)
+ self.assertIsNone(result)
+ args = fake_request.call_args
+
+ self.assertEqual(args[0][0], 'DELETE')
+ self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name))
+
+ #######################
+ # PY SPECIFIC TESTS #
+ #######################
+
+ def test_load_config_no_file(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ cfg = docker.auth.load_config(folder)
+ self.assertTrue(cfg is not None)
+
+ def test_load_config(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ dockercfg_path = os.path.join(folder, '.dockercfg')
+ with open(dockercfg_path, 'w') as f:
+ auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
+ f.write('auth = {0}\n'.format(auth_))
+ f.write('email = sakuya@scarlet.net')
+ cfg = docker.auth.load_config(dockercfg_path)
+ self.assertTrue(docker.auth.INDEX_NAME in cfg)
+ self.assertNotEqual(cfg[docker.auth.INDEX_NAME], None)
+ cfg = cfg[docker.auth.INDEX_NAME]
+ self.assertEqual(cfg['username'], 'sakuya')
+ self.assertEqual(cfg['password'], 'izayoi')
+ self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
+ self.assertEqual(cfg.get('auth'), None)
+
+ def test_load_config_with_random_name(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+
+ dockercfg_path = os.path.join(folder,
+ '.{0}.dockercfg'.format(
+ random.randrange(100000)))
+ registry = 'https://your.private.registry.io'
+ auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
+ config = {
+ registry: {
+ 'auth': '{0}'.format(auth_),
+ 'email': 'sakuya@scarlet.net'
+ }
+ }
+
+ with open(dockercfg_path, 'w') as f:
+ f.write(json.dumps(config))
+
+ cfg = docker.auth.load_config(dockercfg_path)
+ self.assertTrue(registry in cfg)
+ self.assertNotEqual(cfg[registry], None)
+ cfg = cfg[registry]
+ self.assertEqual(cfg['username'], 'sakuya')
+ self.assertEqual(cfg['password'], 'izayoi')
+ self.assertEqual(cfg['email'], 'sakuya@scarlet.net')
+ self.assertEqual(cfg.get('auth'), None)
+
+ def test_tar_with_excludes(self):
+ dirs = [
+ 'foo',
+ 'foo/bar',
+ 'bar',
+ ]
+
+ files = [
+ 'Dockerfile',
+ 'Dockerfile.alt',
+ '.dockerignore',
+ 'a.py',
+ 'a.go',
+ 'b.py',
+ 'cde.py',
+ 'foo/a.py',
+ 'foo/b.py',
+ 'foo/bar/a.py',
+ 'bar/a.py',
+ ]
+
+ exclude = [
+ '*.py',
+ '!b.py',
+ '!a.go',
+ 'foo',
+ 'Dockerfile*',
+ '.dockerignore',
+ ]
+
+ expected_names = set([
+ 'Dockerfile',
+ '.dockerignore',
+ 'a.go',
+ 'b.py',
+ 'bar',
+ 'bar/a.py',
+ ])
+
+ base = make_tree(dirs, files)
+ self.addCleanup(shutil.rmtree, base)
+
+ with docker.utils.tar(base, exclude=exclude) as archive:
+ tar = tarfile.open(fileobj=archive)
+ assert sorted(tar.getnames()) == sorted(expected_names)
+
+ def test_tar_with_empty_directory(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ for d in ['foo', 'bar']:
+ os.makedirs(os.path.join(base, d))
+ with docker.utils.tar(base) as archive:
+ tar = tarfile.open(fileobj=archive)
+ self.assertEqual(sorted(tar.getnames()), ['bar', 'foo'])
+
+ def test_tar_with_file_symlinks(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ with open(os.path.join(base, 'foo'), 'w') as f:
+ f.write("content")
+ os.makedirs(os.path.join(base, 'bar'))
+ os.symlink('../foo', os.path.join(base, 'bar/foo'))
+ with docker.utils.tar(base) as archive:
+ tar = tarfile.open(fileobj=archive)
+ self.assertEqual(sorted(tar.getnames()), ['bar', 'bar/foo', 'foo'])
+
+ def test_tar_with_directory_symlinks(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ for d in ['foo', 'bar']:
+ os.makedirs(os.path.join(base, d))
+ os.symlink('../foo', os.path.join(base, 'bar/foo'))
+ with docker.utils.tar(base) as archive:
+ tar = tarfile.open(fileobj=archive)
+ self.assertEqual(sorted(tar.getnames()), ['bar', 'bar/foo', 'foo'])
+
+ #######################
+ # HOST CONFIG TESTS #
+ #######################
+
+ def test_create_host_config_secopt(self):
+ security_opt = ['apparmor:test_profile']
+ result = self.client.create_host_config(security_opt=security_opt)
+ self.assertIn('SecurityOpt', result)
+ self.assertEqual(result['SecurityOpt'], security_opt)
+
+ self.assertRaises(
+ docker.errors.DockerException, self.client.create_host_config,
+ security_opt='wrong'
+ )
+
+
+class StreamTest(base.Cleanup, base.BaseTestCase):
+
+ def setUp(self):
+ socket_dir = tempfile.mkdtemp()
+ self.build_context = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, socket_dir)
+ self.addCleanup(shutil.rmtree, self.build_context)
+ self.socket_file = os.path.join(socket_dir, 'test_sock.sock')
+ self.server_socket = self._setup_socket()
+ self.stop_server = False
+ server_thread = threading.Thread(target=self.run_server)
+ server_thread.setDaemon(True)
+ server_thread.start()
+ self.response = None
+ self.request_handler = None
+ self.addCleanup(server_thread.join)
+ self.addCleanup(self.stop)
+
+ def stop(self):
+ self.stop_server = True
+
+ def _setup_socket(self):
+ server_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ server_sock.bind(self.socket_file)
+ # Non-blocking mode so that we can shut the test down easily
+ server_sock.setblocking(0)
+ server_sock.listen(5)
+ return server_sock
+
+ def run_server(self):
+ try:
+ while not self.stop_server:
+ try:
+ connection, client_address = self.server_socket.accept()
+ except socket.error:
+ # Probably no connection to accept yet
+ time.sleep(0.01)
+ continue
+
+ connection.setblocking(1)
+ try:
+ self.request_handler(connection)
+ finally:
+ connection.close()
+ finally:
+ self.server_socket.close()
+
+ def early_response_sending_handler(self, connection):
+ data = b''
+ headers = None
+
+ connection.sendall(self.response)
+ while not headers:
+ data += connection.recv(2048)
+ parts = data.split(b'\r\n\r\n', 1)
+ if len(parts) == 2:
+ headers, data = parts
+
+ mo = re.search(r'Content-Length: ([0-9]+)', headers.decode())
+ assert mo
+ content_length = int(mo.group(1))
+
+ while True:
+ if len(data) >= content_length:
+ break
+
+ data += connection.recv(2048)
+
+ def test_early_stream_response(self):
+ self.request_handler = self.early_response_sending_handler
+ lines = []
+ for i in range(0, 50):
+ line = str(i).encode()
+ lines += [('%x' % len(line)).encode(), line]
+ lines.append(b'0')
+ lines.append(b'')
+
+ self.response = (
+ b'HTTP/1.1 200 OK\r\n'
+ b'Transfer-Encoding: chunked\r\n'
+ b'\r\n'
+ ) + b'\r\n'.join(lines)
+
+ with docker.Client(base_url="http+unix://" + self.socket_file) \
+ as client:
+ for i in range(5):
+ try:
+ stream = client.build(
+ path=self.build_context,
+ stream=True
+ )
+ break
+ except requests.ConnectionError as e:
+ if i == 4:
+ raise e
+
+ self.assertEqual(list(stream), [
+ str(i).encode() for i in range(50)])
diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py
new file mode 100644
index 0000000..5a89dee
--- /dev/null
+++ b/tests/unit/fake_api.py
@@ -0,0 +1,513 @@
+# Copyright 2013 dotCloud inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import fake_stat
+from docker import constants
+
+CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION)
+
+FAKE_CONTAINER_ID = '3cc2351ab11b'
+FAKE_IMAGE_ID = 'e9aa60c60128'
+FAKE_EXEC_ID = 'd5d177f121dc'
+FAKE_IMAGE_NAME = 'test_image'
+FAKE_TARBALL_PATH = '/path/to/tarball'
+FAKE_REPO_NAME = 'repo'
+FAKE_TAG_NAME = 'tag'
+FAKE_FILE_NAME = 'file'
+FAKE_URL = 'myurl'
+FAKE_PATH = '/path'
+FAKE_VOLUME_NAME = 'perfectcherryblossom'
+
+# Each method is prefixed with HTTP method (get, post...)
+# for clarity and readability
+
+
+def get_fake_raw_version():
+ status_code = 200
+ response = {
+ "ApiVersion": "1.18",
+ "GitCommit": "fake-commit",
+ "GoVersion": "go1.3.3",
+ "Version": "1.5.0"
+ }
+ return status_code, response
+
+
+def get_fake_version():
+ status_code = 200
+ response = {'GoVersion': '1', 'Version': '1.1.1',
+ 'GitCommit': 'deadbeef+CHANGES'}
+ return status_code, response
+
+
+def get_fake_info():
+ status_code = 200
+ response = {'Containers': 1, 'Images': 1, 'Debug': False,
+ 'MemoryLimit': False, 'SwapLimit': False,
+ 'IPv4Forwarding': True}
+ return status_code, response
+
+
+def get_fake_search():
+ status_code = 200
+ response = [{'Name': 'busybox', 'Description': 'Fake Description'}]
+ return status_code, response
+
+
+def get_fake_images():
+ status_code = 200
+ response = [{
+ 'Id': FAKE_IMAGE_ID,
+ 'Created': '2 days ago',
+ 'Repository': 'busybox',
+ 'RepoTags': ['busybox:latest', 'busybox:1.0'],
+ }]
+ return status_code, response
+
+
+def get_fake_image_history():
+ status_code = 200
+ response = [
+ {
+ "Id": "b750fe79269d",
+ "Created": 1364102658,
+ "CreatedBy": "/bin/bash"
+ },
+ {
+ "Id": "27cf78414709",
+ "Created": 1364068391,
+ "CreatedBy": ""
+ }
+ ]
+
+ return status_code, response
+
+
+def post_fake_import_image():
+ status_code = 200
+ response = 'Import messages...'
+
+ return status_code, response
+
+
+def get_fake_containers():
+ status_code = 200
+ response = [{
+ 'Id': FAKE_CONTAINER_ID,
+ 'Image': 'busybox:latest',
+ 'Created': '2 days ago',
+ 'Command': 'true',
+ 'Status': 'fake status'
+ }]
+ return status_code, response
+
+
+def post_fake_start_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_resize_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_create_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def get_fake_inspect_container(tty=False):
+ status_code = 200
+ response = {
+ 'Id': FAKE_CONTAINER_ID,
+ 'Config': {'Privileged': True, 'Tty': tty},
+ 'ID': FAKE_CONTAINER_ID,
+ 'Image': 'busybox:latest',
+ "State": {
+ "Running": True,
+ "Pid": 0,
+ "ExitCode": 0,
+ "StartedAt": "2013-09-25T14:01:18.869545111+02:00",
+ "Ghost": False
+ },
+ "MacAddress": "02:42:ac:11:00:0a"
+ }
+ return status_code, response
+
+
+def get_fake_inspect_image():
+ status_code = 200
+ response = {
+ 'id': FAKE_IMAGE_ID,
+ 'parent': "27cf784147099545",
+ 'created': "2013-03-23T22:24:18.818426-07:00",
+ 'container': FAKE_CONTAINER_ID,
+ 'container_config':
+ {
+ "Hostname": "",
+ "User": "",
+ "Memory": 0,
+ "MemorySwap": 0,
+ "AttachStdin": False,
+ "AttachStdout": False,
+ "AttachStderr": False,
+ "PortSpecs": "",
+ "Tty": True,
+ "OpenStdin": True,
+ "StdinOnce": False,
+ "Env": "",
+ "Cmd": ["/bin/bash"],
+ "Dns": "",
+ "Image": "base",
+ "Volumes": "",
+ "VolumesFrom": "",
+ "WorkingDir": ""
+ },
+ 'Size': 6823592
+ }
+ return status_code, response
+
+
+def get_fake_port():
+ status_code = 200
+ response = {
+ 'HostConfig': {
+ 'Binds': None,
+ 'ContainerIDFile': '',
+ 'Links': None,
+ 'LxcConf': None,
+ 'PortBindings': {
+ '1111': None,
+ '1111/tcp': [{'HostIp': '127.0.0.1', 'HostPort': '4567'}],
+ '2222': None
+ },
+ 'Privileged': False,
+ 'PublishAllPorts': False
+ },
+ 'NetworkSettings': {
+ 'Bridge': 'docker0',
+ 'PortMapping': None,
+ 'Ports': {
+ '1111': None,
+ '1111/tcp': [{'HostIp': '127.0.0.1', 'HostPort': '4567'}],
+ '2222': None},
+ 'MacAddress': '02:42:ac:11:00:0a'
+ }
+ }
+ return status_code, response
+
+
+def get_fake_insert_image():
+ status_code = 200
+ response = {'StatusCode': 0}
+ return status_code, response
+
+
+def get_fake_wait():
+ status_code = 200
+ response = {'StatusCode': 0}
+ return status_code, response
+
+
+def get_fake_logs():
+ status_code = 200
+ response = (b'\x01\x00\x00\x00\x00\x00\x00\x11Flowering Nights\n'
+ b'\x01\x00\x00\x00\x00\x00\x00\x10(Sakuya Iyazoi)\n')
+ return status_code, response
+
+
+def get_fake_diff():
+ status_code = 200
+ response = [{'Path': '/test', 'Kind': 1}]
+ return status_code, response
+
+
+def get_fake_events():
+ status_code = 200
+ response = [{'status': 'stop', 'id': FAKE_CONTAINER_ID,
+ 'from': FAKE_IMAGE_ID, 'time': 1423247867}]
+ return status_code, response
+
+
+def get_fake_export():
+ status_code = 200
+ response = 'Byte Stream....'
+ return status_code, response
+
+
+def post_fake_exec_create():
+ status_code = 200
+ response = {'Id': FAKE_EXEC_ID}
+ return status_code, response
+
+
+def post_fake_exec_start():
+ status_code = 200
+ response = (b'\x01\x00\x00\x00\x00\x00\x00\x11bin\nboot\ndev\netc\n'
+ b'\x01\x00\x00\x00\x00\x00\x00\x12lib\nmnt\nproc\nroot\n'
+ b'\x01\x00\x00\x00\x00\x00\x00\x0csbin\nusr\nvar\n')
+ return status_code, response
+
+
+def post_fake_exec_resize():
+ status_code = 201
+ return status_code, ''
+
+
+def get_fake_exec_inspect():
+ return 200, {
+ 'OpenStderr': True,
+ 'OpenStdout': True,
+ 'Container': get_fake_inspect_container()[1],
+ 'Running': False,
+ 'ProcessConfig': {
+ 'arguments': ['hello world'],
+ 'tty': False,
+ 'entrypoint': 'echo',
+ 'privileged': False,
+ 'user': ''
+ },
+ 'ExitCode': 0,
+ 'ID': FAKE_EXEC_ID,
+ 'OpenStdin': False
+ }
+
+
+def post_fake_stop_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_kill_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_pause_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_unpause_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_restart_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_rename_container():
+ status_code = 204
+ return status_code, None
+
+
+def delete_fake_remove_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_image_create():
+ status_code = 200
+ response = {'Id': FAKE_IMAGE_ID}
+ return status_code, response
+
+
+def delete_fake_remove_image():
+ status_code = 200
+ response = {'Id': FAKE_IMAGE_ID}
+ return status_code, response
+
+
+def get_fake_get_image():
+ status_code = 200
+ response = 'Byte Stream....'
+ return status_code, response
+
+
+def post_fake_load_image():
+ status_code = 200
+ response = {'Id': FAKE_IMAGE_ID}
+ return status_code, response
+
+
+def post_fake_commit():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_push():
+ status_code = 200
+ response = {'Id': FAKE_IMAGE_ID}
+ return status_code, response
+
+
+def post_fake_build_container():
+ status_code = 200
+ response = {'Id': FAKE_CONTAINER_ID}
+ return status_code, response
+
+
+def post_fake_tag_image():
+ status_code = 200
+ response = {'Id': FAKE_IMAGE_ID}
+ return status_code, response
+
+
+def get_fake_stats():
+ status_code = 200
+ response = fake_stat.OBJ
+ return status_code, response
+
+
+def get_fake_volume_list():
+ status_code = 200
+ response = {
+ 'Volumes': [
+ {
+ 'Name': 'perfectcherryblossom',
+ 'Driver': 'local',
+ 'Mountpoint': '/var/lib/docker/volumes/perfectcherryblossom'
+ }, {
+ 'Name': 'subterraneananimism',
+ 'Driver': 'local',
+ 'Mountpoint': '/var/lib/docker/volumes/subterraneananimism'
+ }
+ ]
+ }
+ return status_code, response
+
+
+def get_fake_volume():
+ status_code = 200
+ response = {
+ 'Name': 'perfectcherryblossom',
+ 'Driver': 'local',
+ 'Mountpoint': '/var/lib/docker/volumes/perfectcherryblossom'
+ }
+ return status_code, response
+
+
+def fake_remove_volume():
+ return 204, None
+
+# Maps real api url to fake response callback
+prefix = 'http+docker://localunixsocket'
+fake_responses = {
+ '{0}/version'.format(prefix):
+ get_fake_raw_version,
+ '{1}/{0}/version'.format(CURRENT_VERSION, prefix):
+ get_fake_version,
+ '{1}/{0}/info'.format(CURRENT_VERSION, prefix):
+ get_fake_info,
+ '{1}/{0}/images/search'.format(CURRENT_VERSION, prefix):
+ get_fake_search,
+ '{1}/{0}/images/json'.format(CURRENT_VERSION, prefix):
+ get_fake_images,
+ '{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix):
+ get_fake_image_history,
+ '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix):
+ post_fake_import_image,
+ '{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix):
+ get_fake_containers,
+ '{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix):
+ post_fake_start_container,
+ '{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix):
+ post_fake_resize_container,
+ '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix):
+ get_fake_inspect_container,
+ '{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix):
+ post_fake_rename_container,
+ '{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix):
+ post_fake_tag_image,
+ '{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix):
+ get_fake_wait,
+ '{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix):
+ get_fake_logs,
+ '{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix):
+ get_fake_diff,
+ '{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix):
+ get_fake_export,
+ '{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix):
+ post_fake_exec_create,
+ '{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix):
+ post_fake_exec_start,
+ '{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix):
+ get_fake_exec_inspect,
+ '{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix):
+ post_fake_exec_resize,
+
+ '{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix):
+ get_fake_stats,
+ '{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix):
+ post_fake_stop_container,
+ '{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix):
+ post_fake_kill_container,
+ '{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix):
+ post_fake_pause_container,
+ '{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix):
+ post_fake_unpause_container,
+ '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix):
+ get_fake_port,
+ '{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix):
+ post_fake_restart_container,
+ '{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix):
+ delete_fake_remove_container,
+ '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix):
+ post_fake_image_create,
+ '{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix):
+ delete_fake_remove_image,
+ '{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix):
+ get_fake_get_image,
+ '{1}/{0}/images/load'.format(CURRENT_VERSION, prefix):
+ post_fake_load_image,
+ '{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix):
+ get_fake_inspect_image,
+ '{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix):
+ get_fake_insert_image,
+ '{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix):
+ post_fake_push,
+ '{1}/{0}/commit'.format(CURRENT_VERSION, prefix):
+ post_fake_commit,
+ '{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix):
+ post_fake_create_container,
+ '{1}/{0}/build'.format(CURRENT_VERSION, prefix):
+ post_fake_build_container,
+ '{1}/{0}/events'.format(CURRENT_VERSION, prefix):
+ get_fake_events,
+ ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'):
+ get_fake_volume_list,
+ ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'POST'):
+ get_fake_volume,
+ ('{1}/{0}/volumes/{2}'.format(
+ CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
+ ), 'GET'):
+ get_fake_volume,
+ ('{1}/{0}/volumes/{2}'.format(
+ CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
+ ), 'DELETE'):
+ fake_remove_volume,
+}
diff --git a/tests/unit/fake_stat.py b/tests/unit/fake_stat.py
new file mode 100644
index 0000000..a7f1029
--- /dev/null
+++ b/tests/unit/fake_stat.py
@@ -0,0 +1,133 @@
+OBJ = {
+ "read": "2015-02-11T19:20:46.667237763+02:00",
+ "network": {
+ "rx_bytes": 567224,
+ "rx_packets": 3773,
+ "rx_errors": 0,
+ "rx_dropped": 0,
+ "tx_bytes": 1176,
+ "tx_packets": 13,
+ "tx_errors": 0,
+ "tx_dropped": 0
+ },
+ "cpu_stats": {
+ "cpu_usage": {
+ "total_usage": 157260874053,
+ "percpu_usage": [
+ 52196306950,
+ 24118413549,
+ 53292684398,
+ 27653469156
+ ],
+ "usage_in_kernelmode": 37140000000,
+ "usage_in_usermode": 62140000000
+ },
+ "system_cpu_usage": 3.0881377e+14,
+ "throttling_data": {
+ "periods": 0,
+ "throttled_periods": 0,
+ "throttled_time": 0
+ }
+ },
+ "memory_stats": {
+ "usage": 179314688,
+ "max_usage": 258166784,
+ "stats": {
+ "active_anon": 90804224,
+ "active_file": 2195456,
+ "cache": 3096576,
+ "hierarchical_memory_limit": 1.844674407371e+19,
+ "inactive_anon": 85516288,
+ "inactive_file": 798720,
+ "mapped_file": 2646016,
+ "pgfault": 101034,
+ "pgmajfault": 1207,
+ "pgpgin": 115814,
+ "pgpgout": 75613,
+ "rss": 176218112,
+ "rss_huge": 12582912,
+ "total_active_anon": 90804224,
+ "total_active_file": 2195456,
+ "total_cache": 3096576,
+ "total_inactive_anon": 85516288,
+ "total_inactive_file": 798720,
+ "total_mapped_file": 2646016,
+ "total_pgfault": 101034,
+ "total_pgmajfault": 1207,
+ "total_pgpgin": 115814,
+ "total_pgpgout": 75613,
+ "total_rss": 176218112,
+ "total_rss_huge": 12582912,
+ "total_unevictable": 0,
+ "total_writeback": 0,
+ "unevictable": 0,
+ "writeback": 0
+ },
+ "failcnt": 0,
+ "limit": 8039038976
+ },
+ "blkio_stats": {
+ "io_service_bytes_recursive": [
+ {
+ "major": 8,
+ "minor": 0,
+ "op": "Read",
+ "value": 72843264
+ }, {
+ "major": 8,
+ "minor": 0,
+ "op": "Write",
+ "value": 4096
+ }, {
+ "major": 8,
+ "minor": 0,
+ "op": "Sync",
+ "value": 4096
+ }, {
+ "major": 8,
+ "minor": 0,
+ "op": "Async",
+ "value": 72843264
+ }, {
+ "major": 8,
+ "minor": 0,
+ "op": "Total",
+ "value": 72847360
+ }
+ ],
+ "io_serviced_recursive": [
+ {
+ "major": 8,
+ "minor": 0,
+ "op": "Read",
+ "value": 10581
+ }, {
+ "major": 8,
+ "minor": 0,
+ "op": "Write",
+ "value": 1
+ }, {
+ "major": 8,
+ "minor": 0,
+ "op": "Sync",
+ "value": 1
+ }, {
+ "major": 8,
+ "minor": 0,
+ "op": "Async",
+ "value": 10581
+ }, {
+ "major": 8,
+ "minor": 0,
+ "op": "Total",
+ "value": 10582
+ }
+ ],
+ "io_queue_recursive": [],
+ "io_service_time_recursive": [],
+ "io_wait_time_recursive": [],
+ "io_merged_recursive": [],
+ "io_time_recursive": [],
+ "sectors_recursive": []
+ }
+}
diff --git a/tests/unit/testdata/certs/ca.pem b/tests/unit/testdata/certs/ca.pem
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/testdata/certs/ca.pem
diff --git a/tests/unit/testdata/certs/cert.pem b/tests/unit/testdata/certs/cert.pem
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/testdata/certs/cert.pem
diff --git a/tests/unit/testdata/certs/key.pem b/tests/unit/testdata/certs/key.pem
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/testdata/certs/key.pem
diff --git a/tests/unit/testdata/context/Dockerfile b/tests/unit/testdata/context/Dockerfile
new file mode 100644
index 0000000..d1ceac6
--- /dev/null
+++ b/tests/unit/testdata/context/Dockerfile
@@ -0,0 +1,2 @@
+FROM busybox:latest
+CMD echo "success"
diff --git a/tests/unit/testdata/context/ctx.tar.gz b/tests/unit/testdata/context/ctx.tar.gz
new file mode 100644
index 0000000..c14e5b9
--- /dev/null
+++ b/tests/unit/testdata/context/ctx.tar.gz
Binary files differ
diff --git a/tests/unit/testdata/context/custom_dockerfile b/tests/unit/testdata/context/custom_dockerfile
new file mode 100644
index 0000000..d1ceac6
--- /dev/null
+++ b/tests/unit/testdata/context/custom_dockerfile
@@ -0,0 +1,2 @@
+FROM busybox:latest
+CMD echo "success"
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
new file mode 100644
index 0000000..71a382b
--- /dev/null
+++ b/tests/unit/utils_test.py
@@ -0,0 +1,651 @@
+import os
+import os.path
+import shutil
+import tempfile
+
+from docker.client import Client
+from docker.constants import DEFAULT_DOCKER_API_VERSION
+from docker.errors import DockerException
+from docker.utils import (
+ parse_repository_tag, parse_host, convert_filters, kwargs_from_env,
+ create_host_config, Ulimit, LogConfig, parse_bytes, parse_env_file,
+ exclude_paths,
+)
+from docker.utils.ports import build_port_bindings, split_port
+from docker.auth import resolve_repository_name, resolve_authconfig
+
+from .. import base
+from ..helpers import make_tree
+
+import pytest
+
+TEST_CERT_DIR = os.path.join(
+ os.path.dirname(__file__),
+ 'testdata/certs',
+)
+
+
+class UtilsTest(base.BaseTestCase):
+ longMessage = True
+
+ def generate_tempfile(self, file_content=None):
+ """
+ Generates a temporary file for tests with the content
+ of 'file_content' and returns the filename.
+ Don't forget to unlink the file with os.unlink() after.
+ """
+ local_tempfile = tempfile.NamedTemporaryFile(delete=False)
+ local_tempfile.write(file_content.encode('UTF-8'))
+ local_tempfile.close()
+ return local_tempfile.name
+
+ def setUp(self):
+ self.os_environ = os.environ.copy()
+
+ def tearDown(self):
+ os.environ = self.os_environ
+
+ def test_parse_repository_tag(self):
+ self.assertEqual(parse_repository_tag("root"),
+ ("root", None))
+ self.assertEqual(parse_repository_tag("root:tag"),
+ ("root", "tag"))
+ self.assertEqual(parse_repository_tag("user/repo"),
+ ("user/repo", None))
+ self.assertEqual(parse_repository_tag("user/repo:tag"),
+ ("user/repo", "tag"))
+ self.assertEqual(parse_repository_tag("url:5000/repo"),
+ ("url:5000/repo", None))
+ self.assertEqual(parse_repository_tag("url:5000/repo:tag"),
+ ("url:5000/repo", "tag"))
+
+ def test_parse_bytes(self):
+ self.assertEqual(parse_bytes("512MB"), (536870912))
+ self.assertEqual(parse_bytes("512M"), (536870912))
+ self.assertRaises(DockerException, parse_bytes, "512MK")
+ self.assertRaises(DockerException, parse_bytes, "512L")
+
+ def test_parse_host(self):
+ invalid_hosts = [
+ '0.0.0.0',
+ 'tcp://',
+ 'udp://127.0.0.1',
+ 'udp://127.0.0.1:2375',
+ ]
+
+ valid_hosts = {
+ '0.0.0.1:5555': 'http://0.0.0.1:5555',
+ ':6666': 'http://127.0.0.1:6666',
+ 'tcp://:7777': 'http://127.0.0.1:7777',
+ 'http://:7777': 'http://127.0.0.1:7777',
+ 'https://kokia.jp:2375': 'https://kokia.jp:2375',
+ 'unix:///var/run/docker.sock': 'http+unix:///var/run/docker.sock',
+ 'unix://': 'http+unix://var/run/docker.sock',
+ 'somehost.net:80/service/swarm': (
+ 'http://somehost.net:80/service/swarm'
+ ),
+ }
+
+ for host in invalid_hosts:
+ with pytest.raises(DockerException):
+ parse_host(host, None)
+
+ for host, expected in valid_hosts.items():
+ self.assertEqual(parse_host(host, None), expected, msg=host)
+
+ def test_parse_host_empty_value(self):
+ unix_socket = 'http+unix://var/run/docker.sock'
+ tcp_port = 'http://127.0.0.1:2375'
+
+ for val in [None, '']:
+ for platform in ['darwin', 'linux2', None]:
+ assert parse_host(val, platform) == unix_socket
+
+ assert parse_host(val, 'win32') == tcp_port
+
+ def test_kwargs_from_env_empty(self):
+ os.environ.update(DOCKER_HOST='',
+ DOCKER_CERT_PATH='',
+ DOCKER_TLS_VERIFY='')
+
+ kwargs = kwargs_from_env()
+ self.assertEqual(None, kwargs.get('base_url'))
+ self.assertEqual(None, kwargs.get('tls'))
+
+ def test_kwargs_from_env_tls(self):
+ os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
+ DOCKER_CERT_PATH=TEST_CERT_DIR,
+ DOCKER_TLS_VERIFY='1')
+ kwargs = kwargs_from_env(assert_hostname=False)
+ self.assertEqual('https://192.168.59.103:2376', kwargs['base_url'])
+ self.assertTrue('ca.pem' in kwargs['tls'].verify)
+ self.assertTrue('cert.pem' in kwargs['tls'].cert[0])
+ self.assertTrue('key.pem' in kwargs['tls'].cert[1])
+ self.assertEqual(False, kwargs['tls'].assert_hostname)
+ try:
+ client = Client(**kwargs)
+ self.assertEqual(kwargs['base_url'], client.base_url)
+ self.assertEqual(kwargs['tls'].verify, client.verify)
+ self.assertEqual(kwargs['tls'].cert, client.cert)
+ except TypeError as e:
+ self.fail(e)
+
+ def test_kwargs_from_env_no_cert_path(self):
+ try:
+ temp_dir = tempfile.mkdtemp()
+ cert_dir = os.path.join(temp_dir, '.docker')
+ shutil.copytree(TEST_CERT_DIR, cert_dir)
+
+ os.environ.update(HOME=temp_dir,
+ DOCKER_CERT_PATH='',
+ DOCKER_TLS_VERIFY='1')
+
+ kwargs = kwargs_from_env()
+ self.assertIn(cert_dir, kwargs['tls'].verify)
+ self.assertIn(cert_dir, kwargs['tls'].cert[0])
+ self.assertIn(cert_dir, kwargs['tls'].cert[1])
+ finally:
+ if temp_dir:
+ shutil.rmtree(temp_dir)
+
+ def test_parse_env_file_proper(self):
+ env_file = self.generate_tempfile(
+ file_content='USER=jdoe\nPASS=secret')
+ get_parse_env_file = parse_env_file(env_file)
+ self.assertEqual(get_parse_env_file,
+ {'USER': 'jdoe', 'PASS': 'secret'})
+ os.unlink(env_file)
+
+ def test_parse_env_file_commented_line(self):
+ env_file = self.generate_tempfile(
+ file_content='USER=jdoe\n#PASS=secret')
+ get_parse_env_file = parse_env_file((env_file))
+ self.assertEqual(get_parse_env_file, {'USER': 'jdoe'})
+ os.unlink(env_file)
+
+ def test_parse_env_file_invalid_line(self):
+ env_file = self.generate_tempfile(
+ file_content='USER jdoe')
+ self.assertRaises(
+ DockerException, parse_env_file, env_file)
+ os.unlink(env_file)
+
+ def test_convert_filters(self):
+ tests = [
+ ({'dangling': True}, '{"dangling": ["true"]}'),
+ ({'dangling': "true"}, '{"dangling": ["true"]}'),
+ ({'exited': 0}, '{"exited": [0]}'),
+ ({'exited': [0, 1]}, '{"exited": [0, 1]}'),
+ ]
+
+ for filters, expected in tests:
+ self.assertEqual(convert_filters(filters), expected)
+
+ def test_create_host_config_no_options(self):
+ config = create_host_config(version='1.19')
+ self.assertFalse('NetworkMode' in config)
+
+ def test_create_host_config_no_options_newer_api_version(self):
+ config = create_host_config(version='1.20')
+ self.assertEqual(config['NetworkMode'], 'default')
+
+ def test_create_host_config_dict_ulimit(self):
+ ulimit_dct = {'name': 'nofile', 'soft': 8096}
+ config = create_host_config(
+ ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION
+ )
+ self.assertIn('Ulimits', config)
+ self.assertEqual(len(config['Ulimits']), 1)
+ ulimit_obj = config['Ulimits'][0]
+ self.assertTrue(isinstance(ulimit_obj, Ulimit))
+ self.assertEqual(ulimit_obj.name, ulimit_dct['name'])
+ self.assertEqual(ulimit_obj.soft, ulimit_dct['soft'])
+ self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft)
+
+ def test_create_host_config_dict_ulimit_capitals(self):
+ ulimit_dct = {'Name': 'nofile', 'Soft': 8096, 'Hard': 8096 * 4}
+ config = create_host_config(
+ ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION
+ )
+ self.assertIn('Ulimits', config)
+ self.assertEqual(len(config['Ulimits']), 1)
+ ulimit_obj = config['Ulimits'][0]
+ self.assertTrue(isinstance(ulimit_obj, Ulimit))
+ self.assertEqual(ulimit_obj.name, ulimit_dct['Name'])
+ self.assertEqual(ulimit_obj.soft, ulimit_dct['Soft'])
+ self.assertEqual(ulimit_obj.hard, ulimit_dct['Hard'])
+ self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft)
+
+ def test_create_host_config_obj_ulimit(self):
+ ulimit_dct = Ulimit(name='nofile', soft=8096)
+ config = create_host_config(
+ ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION
+ )
+ self.assertIn('Ulimits', config)
+ self.assertEqual(len(config['Ulimits']), 1)
+ ulimit_obj = config['Ulimits'][0]
+ self.assertTrue(isinstance(ulimit_obj, Ulimit))
+ self.assertEqual(ulimit_obj, ulimit_dct)
+
+ def test_ulimit_invalid_type(self):
+ self.assertRaises(ValueError, lambda: Ulimit(name=None))
+ self.assertRaises(ValueError, lambda: Ulimit(name='hello', soft='123'))
+ self.assertRaises(ValueError, lambda: Ulimit(name='hello', hard='456'))
+
+ def test_create_host_config_dict_logconfig(self):
+ dct = {'type': LogConfig.types.SYSLOG, 'config': {'key1': 'val1'}}
+ config = create_host_config(
+ version=DEFAULT_DOCKER_API_VERSION, log_config=dct
+ )
+ self.assertIn('LogConfig', config)
+ self.assertTrue(isinstance(config['LogConfig'], LogConfig))
+ self.assertEqual(dct['type'], config['LogConfig'].type)
+
+ def test_create_host_config_obj_logconfig(self):
+ obj = LogConfig(type=LogConfig.types.SYSLOG, config={'key1': 'val1'})
+ config = create_host_config(
+ version=DEFAULT_DOCKER_API_VERSION, log_config=obj
+ )
+ self.assertIn('LogConfig', config)
+ self.assertTrue(isinstance(config['LogConfig'], LogConfig))
+ self.assertEqual(obj, config['LogConfig'])
+
+ def test_logconfig_invalid_config_type(self):
+ with pytest.raises(ValueError):
+ LogConfig(type=LogConfig.types.JSON, config='helloworld')
+
+ def test_resolve_repository_name(self):
+ # docker hub library image
+ self.assertEqual(
+ resolve_repository_name('image'),
+ ('index.docker.io', 'image'),
+ )
+
+ # docker hub image
+ self.assertEqual(
+ resolve_repository_name('username/image'),
+ ('index.docker.io', 'username/image'),
+ )
+
+ # private registry
+ self.assertEqual(
+ resolve_repository_name('my.registry.net/image'),
+ ('my.registry.net', 'image'),
+ )
+
+ # private registry with port
+ self.assertEqual(
+ resolve_repository_name('my.registry.net:5000/image'),
+ ('my.registry.net:5000', 'image'),
+ )
+
+ # private registry with username
+ self.assertEqual(
+ resolve_repository_name('my.registry.net/username/image'),
+ ('my.registry.net', 'username/image'),
+ )
+
+ # no dots but port
+ self.assertEqual(
+ resolve_repository_name('hostname:5000/image'),
+ ('hostname:5000', 'image'),
+ )
+
+ # no dots but port and username
+ self.assertEqual(
+ resolve_repository_name('hostname:5000/username/image'),
+ ('hostname:5000', 'username/image'),
+ )
+
+ # localhost
+ self.assertEqual(
+ resolve_repository_name('localhost/image'),
+ ('localhost', 'image'),
+ )
+
+ # localhost with username
+ self.assertEqual(
+ resolve_repository_name('localhost/username/image'),
+ ('localhost', 'username/image'),
+ )
+
+ def test_resolve_authconfig(self):
+ auth_config = {
+ 'https://index.docker.io/v1/': {'auth': 'indexuser'},
+ 'my.registry.net': {'auth': 'privateuser'},
+ 'http://legacy.registry.url/v1/': {'auth': 'legacyauth'}
+ }
+ # hostname only
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'my.registry.net'),
+ {'auth': 'privateuser'}
+ )
+ # no protocol
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'my.registry.net/v1/'),
+ {'auth': 'privateuser'}
+ )
+ # no path
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'http://my.registry.net'),
+ {'auth': 'privateuser'}
+ )
+ # no path, trailing slash
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'http://my.registry.net/'),
+ {'auth': 'privateuser'}
+ )
+ # no path, wrong secure protocol
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'https://my.registry.net'),
+ {'auth': 'privateuser'}
+ )
+ # no path, wrong insecure protocol
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'http://index.docker.io'),
+ {'auth': 'indexuser'}
+ )
+ # with path, wrong protocol
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'https://my.registry.net/v1/'),
+ {'auth': 'privateuser'}
+ )
+ # default registry
+ self.assertEqual(
+ resolve_authconfig(auth_config), {'auth': 'indexuser'}
+ )
+ # default registry (explicit None)
+ self.assertEqual(
+ resolve_authconfig(auth_config, None), {'auth': 'indexuser'}
+ )
+ # fully explicit
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'http://my.registry.net/v1/'),
+ {'auth': 'privateuser'}
+ )
+ # legacy entry in config
+ self.assertEqual(
+ resolve_authconfig(auth_config, 'legacy.registry.url'),
+ {'auth': 'legacyauth'}
+ )
+ # no matching entry
+ self.assertTrue(
+ resolve_authconfig(auth_config, 'does.not.exist') is None
+ )
+
+ def test_resolve_registry_and_auth(self):
+ auth_config = {
+ 'https://index.docker.io/v1/': {'auth': 'indexuser'},
+ 'my.registry.net': {'auth': 'privateuser'},
+ }
+
+ # library image
+ image = 'image'
+ self.assertEqual(
+ resolve_authconfig(auth_config, resolve_repository_name(image)[0]),
+ {'auth': 'indexuser'},
+ )
+
+ # docker hub image
+ image = 'username/image'
+ self.assertEqual(
+ resolve_authconfig(auth_config, resolve_repository_name(image)[0]),
+ {'auth': 'indexuser'},
+ )
+
+ # private registry
+ image = 'my.registry.net/image'
+ self.assertEqual(
+ resolve_authconfig(auth_config, resolve_repository_name(image)[0]),
+ {'auth': 'privateuser'},
+ )
+
+ # unauthenticated registry
+ image = 'other.registry.net/image'
+ self.assertEqual(
+ resolve_authconfig(auth_config, resolve_repository_name(image)[0]),
+ None,
+ )
+
+ def test_split_port_with_host_ip(self):
+ internal_port, external_port = split_port("127.0.0.1:1000:2000")
+ self.assertEqual(internal_port, ["2000"])
+ self.assertEqual(external_port, [("127.0.0.1", "1000")])
+
+ def test_split_port_with_protocol(self):
+ internal_port, external_port = split_port("127.0.0.1:1000:2000/udp")
+ self.assertEqual(internal_port, ["2000/udp"])
+ self.assertEqual(external_port, [("127.0.0.1", "1000")])
+
+ def test_split_port_with_host_ip_no_port(self):
+ internal_port, external_port = split_port("127.0.0.1::2000")
+ self.assertEqual(internal_port, ["2000"])
+ self.assertEqual(external_port, [("127.0.0.1", None)])
+
+ def test_split_port_range_with_host_ip_no_port(self):
+ internal_port, external_port = split_port("127.0.0.1::2000-2001")
+ self.assertEqual(internal_port, ["2000", "2001"])
+ self.assertEqual(external_port,
+ [("127.0.0.1", None), ("127.0.0.1", None)])
+
+ def test_split_port_with_host_port(self):
+ internal_port, external_port = split_port("1000:2000")
+ self.assertEqual(internal_port, ["2000"])
+ self.assertEqual(external_port, ["1000"])
+
+ def test_split_port_range_with_host_port(self):
+ internal_port, external_port = split_port("1000-1001:2000-2001")
+ self.assertEqual(internal_port, ["2000", "2001"])
+ self.assertEqual(external_port, ["1000", "1001"])
+
+ def test_split_port_no_host_port(self):
+ internal_port, external_port = split_port("2000")
+ self.assertEqual(internal_port, ["2000"])
+ self.assertEqual(external_port, None)
+
+ def test_split_port_range_no_host_port(self):
+ internal_port, external_port = split_port("2000-2001")
+ self.assertEqual(internal_port, ["2000", "2001"])
+ self.assertEqual(external_port, None)
+
+ def test_split_port_range_with_protocol(self):
+ internal_port, external_port = split_port(
+ "127.0.0.1:1000-1001:2000-2001/udp")
+ self.assertEqual(internal_port, ["2000/udp", "2001/udp"])
+ self.assertEqual(external_port,
+ [("127.0.0.1", "1000"), ("127.0.0.1", "1001")])
+
+ def test_split_port_invalid(self):
+ self.assertRaises(ValueError,
+ lambda: split_port("0.0.0.0:1000:2000:tcp"))
+
+ def test_non_matching_length_port_ranges(self):
+ self.assertRaises(
+ ValueError,
+ lambda: split_port("0.0.0.0:1000-1010:2000-2002/tcp")
+ )
+
+ def test_port_and_range_invalid(self):
+ self.assertRaises(ValueError,
+ lambda: split_port("0.0.0.0:1000:2000-2002/tcp"))
+
+ def test_port_only_with_colon(self):
+ self.assertRaises(ValueError,
+ lambda: split_port(":80"))
+
+ def test_host_only_with_colon(self):
+ self.assertRaises(ValueError,
+ lambda: split_port("localhost:"))
+
+ def test_build_port_bindings_with_one_port(self):
+ port_bindings = build_port_bindings(["127.0.0.1:1000:1000"])
+ self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
+
+ def test_build_port_bindings_with_matching_internal_ports(self):
+ port_bindings = build_port_bindings(
+ ["127.0.0.1:1000:1000", "127.0.0.1:2000:1000"])
+ self.assertEqual(port_bindings["1000"],
+ [("127.0.0.1", "1000"), ("127.0.0.1", "2000")])
+
+ def test_build_port_bindings_with_nonmatching_internal_ports(self):
+ port_bindings = build_port_bindings(
+ ["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"])
+ self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
+ self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")])
+
+ def test_build_port_bindings_with_port_range(self):
+ port_bindings = build_port_bindings(["127.0.0.1:1000-1001:1000-1001"])
+ self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
+ self.assertEqual(port_bindings["1001"], [("127.0.0.1", "1001")])
+
+ def test_build_port_bindings_with_matching_internal_port_ranges(self):
+ port_bindings = build_port_bindings(
+ ["127.0.0.1:1000-1001:1000-1001", "127.0.0.1:2000-2001:1000-1001"])
+ self.assertEqual(port_bindings["1000"],
+ [("127.0.0.1", "1000"), ("127.0.0.1", "2000")])
+ self.assertEqual(port_bindings["1001"],
+ [("127.0.0.1", "1001"), ("127.0.0.1", "2001")])
+
+ def test_build_port_bindings_with_nonmatching_internal_port_ranges(self):
+ port_bindings = build_port_bindings(
+ ["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"])
+ self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
+ self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")])
+
+
+class ExcludePathsTest(base.BaseTestCase):
+ dirs = [
+ 'foo',
+ 'foo/bar',
+ 'bar',
+ ]
+
+ files = [
+ 'Dockerfile',
+ 'Dockerfile.alt',
+ '.dockerignore',
+ 'a.py',
+ 'a.go',
+ 'b.py',
+ 'cde.py',
+ 'foo/a.py',
+ 'foo/b.py',
+ 'foo/bar/a.py',
+ 'bar/a.py',
+ ]
+
+ all_paths = set(dirs + files)
+
+ def setUp(self):
+ self.base = make_tree(self.dirs, self.files)
+
+ def tearDown(self):
+ shutil.rmtree(self.base)
+
+ def exclude(self, patterns, dockerfile=None):
+ return set(exclude_paths(self.base, patterns, dockerfile=dockerfile))
+
+ def test_no_excludes(self):
+ assert self.exclude(['']) == self.all_paths
+
+ def test_no_dupes(self):
+ paths = exclude_paths(self.base, ['!a.py'])
+ assert sorted(paths) == sorted(set(paths))
+
+ def test_wildcard_exclude(self):
+ assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore'])
+
+ def test_exclude_dockerfile_dockerignore(self):
+ """
+ Even if the .dockerignore file explicitly says to exclude
+ Dockerfile and/or .dockerignore, don't exclude them from
+ the actual tar file.
+ """
+ assert self.exclude(['Dockerfile', '.dockerignore']) == self.all_paths
+
+ def test_exclude_custom_dockerfile(self):
+ """
+ If we're using a custom Dockerfile, make sure that's not
+ excluded.
+ """
+ assert self.exclude(['*'], dockerfile='Dockerfile.alt') == \
+ set(['Dockerfile.alt', '.dockerignore'])
+
+ def test_single_filename(self):
+ assert self.exclude(['a.py']) == self.all_paths - set(['a.py'])
+
+ # As odd as it sounds, a filename pattern with a trailing slash on the
+ # end *will* result in that file being excluded.
+ def test_single_filename_trailing_slash(self):
+ assert self.exclude(['a.py/']) == self.all_paths - set(['a.py'])
+
+ def test_wildcard_filename_start(self):
+ assert self.exclude(['*.py']) == self.all_paths - set([
+ 'a.py', 'b.py', 'cde.py',
+ ])
+
+ def test_wildcard_with_exception(self):
+ assert self.exclude(['*.py', '!b.py']) == self.all_paths - set([
+ 'a.py', 'cde.py',
+ ])
+
+ def test_wildcard_with_wildcard_exception(self):
+ assert self.exclude(['*.*', '!*.go']) == self.all_paths - set([
+ 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt',
+ ])
+
+ def test_wildcard_filename_end(self):
+ assert self.exclude(['a.*']) == self.all_paths - set(['a.py', 'a.go'])
+
+ def test_question_mark(self):
+ assert self.exclude(['?.py']) == self.all_paths - set(['a.py', 'b.py'])
+
+ def test_single_subdir_single_filename(self):
+ assert self.exclude(['foo/a.py']) == self.all_paths - set(['foo/a.py'])
+
+ def test_single_subdir_wildcard_filename(self):
+ assert self.exclude(['foo/*.py']) == self.all_paths - set([
+ 'foo/a.py', 'foo/b.py',
+ ])
+
+ def test_wildcard_subdir_single_filename(self):
+ assert self.exclude(['*/a.py']) == self.all_paths - set([
+ 'foo/a.py', 'bar/a.py',
+ ])
+
+ def test_wildcard_subdir_wildcard_filename(self):
+ assert self.exclude(['*/*.py']) == self.all_paths - set([
+ 'foo/a.py', 'foo/b.py', 'bar/a.py',
+ ])
+
+ def test_directory(self):
+ assert self.exclude(['foo']) == self.all_paths - set([
+ 'foo', 'foo/a.py', 'foo/b.py',
+ 'foo/bar', 'foo/bar/a.py',
+ ])
+
+ def test_directory_with_trailing_slash(self):
+ assert self.exclude(['foo']) == self.all_paths - set([
+ 'foo', 'foo/a.py', 'foo/b.py',
+ 'foo/bar', 'foo/bar/a.py',
+ ])
+
+ def test_directory_with_single_exception(self):
+ assert self.exclude(['foo', '!foo/bar/a.py']) == self.all_paths - set([
+ 'foo/a.py', 'foo/b.py',
+ ])
+
+ def test_directory_with_subdir_exception(self):
+ assert self.exclude(['foo', '!foo/bar']) == self.all_paths - set([
+ 'foo/a.py', 'foo/b.py',
+ ])
+
+ def test_directory_with_wildcard_exception(self):
+ assert self.exclude(['foo', '!foo/*.py']) == self.all_paths - set([
+ 'foo/bar', 'foo/bar/a.py',
+ ])
+
+ def test_subdirectory(self):
+ assert self.exclude(['foo/bar']) == self.all_paths - set([
+ 'foo/bar', 'foo/bar/a.py',
+ ])