From 15385685323b18671bbfed38ba744ac2b6b01bb9 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 23 Sep 2015 17:42:29 -0700 Subject: Reorganize test directories More clearly separate unit and integration tests Allow splitting into multiple files Cleaner Signed-off-by: Joffrey F --- Makefile | 14 +- tests/base.py | 25 + tests/fake_api.py | 513 ----- tests/fake_stat.py | 133 -- tests/integration/__init__.py | 2 + tests/integration/api_test.py | 1719 +++++++++++++++++ tests/integration_test.py | 1724 +---------------- tests/test.py | 2476 ------------------------- tests/testdata/certs/ca.pem | 0 tests/testdata/certs/cert.pem | 0 tests/testdata/certs/key.pem | 0 tests/testdata/context/Dockerfile | 2 - tests/testdata/context/ctx.tar.gz | Bin 171 -> 0 bytes tests/testdata/context/custom_dockerfile | 2 - tests/unit/__init__.py | 0 tests/unit/api_test.py | 2451 ++++++++++++++++++++++++ tests/unit/fake_api.py | 513 +++++ tests/unit/fake_stat.py | 133 ++ tests/unit/testdata/certs/ca.pem | 0 tests/unit/testdata/certs/cert.pem | 0 tests/unit/testdata/certs/key.pem | 0 tests/unit/testdata/context/Dockerfile | 2 + tests/unit/testdata/context/ctx.tar.gz | Bin 0 -> 171 bytes tests/unit/testdata/context/custom_dockerfile | 2 + tests/unit/utils_test.py | 651 +++++++ tests/utils_test.py | 651 ------- tox.ini | 2 +- 27 files changed, 5510 insertions(+), 5505 deletions(-) delete mode 100644 tests/fake_api.py delete mode 100644 tests/fake_stat.py create mode 100644 tests/integration/__init__.py create mode 100644 tests/integration/api_test.py delete mode 100644 tests/test.py delete mode 100644 tests/testdata/certs/ca.pem delete mode 100644 tests/testdata/certs/cert.pem delete mode 100644 tests/testdata/certs/key.pem delete mode 100644 tests/testdata/context/Dockerfile delete mode 100644 tests/testdata/context/ctx.tar.gz delete mode 100644 tests/testdata/context/custom_dockerfile create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/api_test.py create mode 100644 tests/unit/fake_api.py create mode 100644 tests/unit/fake_stat.py create mode 100644 tests/unit/testdata/certs/ca.pem create mode 100644 tests/unit/testdata/certs/cert.pem create mode 100644 tests/unit/testdata/certs/key.pem create mode 100644 tests/unit/testdata/context/Dockerfile create mode 100644 tests/unit/testdata/context/ctx.tar.gz create mode 100644 tests/unit/testdata/context/custom_dockerfile create mode 100644 tests/unit/utils_test.py delete mode 100644 tests/utils_test.py diff --git a/Makefile b/Makefile index f98abe7..772c2e2 100644 --- a/Makefile +++ b/Makefile @@ -14,22 +14,22 @@ build-py3: test: flake8 unit-test unit-test-py3 integration-dind unit-test: build - docker run docker-py py.test tests/test.py tests/utils_test.py + docker run docker-py py.test tests/unit unit-test-py3: build-py3 - docker run docker-py3 py.test tests/test.py tests/utils_test.py + docker run docker-py3 py.test tests/unit integration-test: build - docker run -v /var/run/docker.sock:/var/run/docker.sock docker-py py.test -rxs tests/integration_test.py + docker run -v /var/run/docker.sock:/var/run/docker.sock docker-py py.test -rxs tests/integration integration-test-py3: build-py3 - docker run -v /var/run/docker.sock:/var/run/docker.sock docker-py3 py.test -rxs tests/integration_test.py + docker run -v /var/run/docker.sock:/var/run/docker.sock docker-py3 py.test -rxs tests/integration integration-dind: build build-py3 docker run -d --name dpy-dind --privileged dockerswarm/dind:1.8.1 docker -d -H tcp://0.0.0.0:2375 - docker run --env="DOCKER_HOST=tcp://docker:2375" --link=dpy-dind:docker docker-py py.test -rxs tests/integration_test.py - docker run --env="DOCKER_HOST=tcp://docker:2375" --link=dpy-dind:docker docker-py3 py.test -rxs tests/integration_test.py + docker run --env="DOCKER_HOST=tcp://docker:2375" --link=dpy-dind:docker docker-py py.test -rxs tests/integration + docker run --env="DOCKER_HOST=tcp://docker:2375" --link=dpy-dind:docker docker-py3 py.test -rxs tests/integration docker rm -vf dpy-dind flake8: build - docker run docker-py flake8 docker tests \ No newline at end of file + docker run docker-py flake8 docker tests diff --git a/tests/base.py b/tests/base.py index 51b2300..a2c01fc 100644 --- a/tests/base.py +++ b/tests/base.py @@ -21,3 +21,28 @@ def requires_api_version(version): ), reason="API version is too low (< {0})".format(version) ) + + +class Cleanup(object): + if sys.version_info < (2, 7): + # Provide a basic implementation of addCleanup for Python < 2.7 + def __init__(self, *args, **kwargs): + super(Cleanup, self).__init__(*args, **kwargs) + self._cleanups = [] + + def tearDown(self): + super(Cleanup, self).tearDown() + ok = True + while self._cleanups: + fn, args, kwargs = self._cleanups.pop(-1) + try: + fn(*args, **kwargs) + except KeyboardInterrupt: + raise + except: + ok = False + if not ok: + raise + + def addCleanup(self, function, *args, **kwargs): + self._cleanups.append((function, args, kwargs)) diff --git a/tests/fake_api.py b/tests/fake_api.py deleted file mode 100644 index 5a89dee..0000000 --- a/tests/fake_api.py +++ /dev/null @@ -1,513 +0,0 @@ -# Copyright 2013 dotCloud inc. - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from . import fake_stat -from docker import constants - -CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION) - -FAKE_CONTAINER_ID = '3cc2351ab11b' -FAKE_IMAGE_ID = 'e9aa60c60128' -FAKE_EXEC_ID = 'd5d177f121dc' -FAKE_IMAGE_NAME = 'test_image' -FAKE_TARBALL_PATH = '/path/to/tarball' -FAKE_REPO_NAME = 'repo' -FAKE_TAG_NAME = 'tag' -FAKE_FILE_NAME = 'file' -FAKE_URL = 'myurl' -FAKE_PATH = '/path' -FAKE_VOLUME_NAME = 'perfectcherryblossom' - -# Each method is prefixed with HTTP method (get, post...) -# for clarity and readability - - -def get_fake_raw_version(): - status_code = 200 - response = { - "ApiVersion": "1.18", - "GitCommit": "fake-commit", - "GoVersion": "go1.3.3", - "Version": "1.5.0" - } - return status_code, response - - -def get_fake_version(): - status_code = 200 - response = {'GoVersion': '1', 'Version': '1.1.1', - 'GitCommit': 'deadbeef+CHANGES'} - return status_code, response - - -def get_fake_info(): - status_code = 200 - response = {'Containers': 1, 'Images': 1, 'Debug': False, - 'MemoryLimit': False, 'SwapLimit': False, - 'IPv4Forwarding': True} - return status_code, response - - -def get_fake_search(): - status_code = 200 - response = [{'Name': 'busybox', 'Description': 'Fake Description'}] - return status_code, response - - -def get_fake_images(): - status_code = 200 - response = [{ - 'Id': FAKE_IMAGE_ID, - 'Created': '2 days ago', - 'Repository': 'busybox', - 'RepoTags': ['busybox:latest', 'busybox:1.0'], - }] - return status_code, response - - -def get_fake_image_history(): - status_code = 200 - response = [ - { - "Id": "b750fe79269d", - "Created": 1364102658, - "CreatedBy": "/bin/bash" - }, - { - "Id": "27cf78414709", - "Created": 1364068391, - "CreatedBy": "" - } - ] - - return status_code, response - - -def post_fake_import_image(): - status_code = 200 - response = 'Import messages...' - - return status_code, response - - -def get_fake_containers(): - status_code = 200 - response = [{ - 'Id': FAKE_CONTAINER_ID, - 'Image': 'busybox:latest', - 'Created': '2 days ago', - 'Command': 'true', - 'Status': 'fake status' - }] - return status_code, response - - -def post_fake_start_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_resize_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_create_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def get_fake_inspect_container(tty=False): - status_code = 200 - response = { - 'Id': FAKE_CONTAINER_ID, - 'Config': {'Privileged': True, 'Tty': tty}, - 'ID': FAKE_CONTAINER_ID, - 'Image': 'busybox:latest', - "State": { - "Running": True, - "Pid": 0, - "ExitCode": 0, - "StartedAt": "2013-09-25T14:01:18.869545111+02:00", - "Ghost": False - }, - "MacAddress": "02:42:ac:11:00:0a" - } - return status_code, response - - -def get_fake_inspect_image(): - status_code = 200 - response = { - 'id': FAKE_IMAGE_ID, - 'parent': "27cf784147099545", - 'created': "2013-03-23T22:24:18.818426-07:00", - 'container': FAKE_CONTAINER_ID, - 'container_config': - { - "Hostname": "", - "User": "", - "Memory": 0, - "MemorySwap": 0, - "AttachStdin": False, - "AttachStdout": False, - "AttachStderr": False, - "PortSpecs": "", - "Tty": True, - "OpenStdin": True, - "StdinOnce": False, - "Env": "", - "Cmd": ["/bin/bash"], - "Dns": "", - "Image": "base", - "Volumes": "", - "VolumesFrom": "", - "WorkingDir": "" - }, - 'Size': 6823592 - } - return status_code, response - - -def get_fake_port(): - status_code = 200 - response = { - 'HostConfig': { - 'Binds': None, - 'ContainerIDFile': '', - 'Links': None, - 'LxcConf': None, - 'PortBindings': { - '1111': None, - '1111/tcp': [{'HostIp': '127.0.0.1', 'HostPort': '4567'}], - '2222': None - }, - 'Privileged': False, - 'PublishAllPorts': False - }, - 'NetworkSettings': { - 'Bridge': 'docker0', - 'PortMapping': None, - 'Ports': { - '1111': None, - '1111/tcp': [{'HostIp': '127.0.0.1', 'HostPort': '4567'}], - '2222': None}, - 'MacAddress': '02:42:ac:11:00:0a' - } - } - return status_code, response - - -def get_fake_insert_image(): - status_code = 200 - response = {'StatusCode': 0} - return status_code, response - - -def get_fake_wait(): - status_code = 200 - response = {'StatusCode': 0} - return status_code, response - - -def get_fake_logs(): - status_code = 200 - response = (b'\x01\x00\x00\x00\x00\x00\x00\x11Flowering Nights\n' - b'\x01\x00\x00\x00\x00\x00\x00\x10(Sakuya Iyazoi)\n') - return status_code, response - - -def get_fake_diff(): - status_code = 200 - response = [{'Path': '/test', 'Kind': 1}] - return status_code, response - - -def get_fake_events(): - status_code = 200 - response = [{'status': 'stop', 'id': FAKE_CONTAINER_ID, - 'from': FAKE_IMAGE_ID, 'time': 1423247867}] - return status_code, response - - -def get_fake_export(): - status_code = 200 - response = 'Byte Stream....' - return status_code, response - - -def post_fake_exec_create(): - status_code = 200 - response = {'Id': FAKE_EXEC_ID} - return status_code, response - - -def post_fake_exec_start(): - status_code = 200 - response = (b'\x01\x00\x00\x00\x00\x00\x00\x11bin\nboot\ndev\netc\n' - b'\x01\x00\x00\x00\x00\x00\x00\x12lib\nmnt\nproc\nroot\n' - b'\x01\x00\x00\x00\x00\x00\x00\x0csbin\nusr\nvar\n') - return status_code, response - - -def post_fake_exec_resize(): - status_code = 201 - return status_code, '' - - -def get_fake_exec_inspect(): - return 200, { - 'OpenStderr': True, - 'OpenStdout': True, - 'Container': get_fake_inspect_container()[1], - 'Running': False, - 'ProcessConfig': { - 'arguments': ['hello world'], - 'tty': False, - 'entrypoint': 'echo', - 'privileged': False, - 'user': '' - }, - 'ExitCode': 0, - 'ID': FAKE_EXEC_ID, - 'OpenStdin': False - } - - -def post_fake_stop_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_kill_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_pause_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_unpause_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_restart_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_rename_container(): - status_code = 204 - return status_code, None - - -def delete_fake_remove_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_image_create(): - status_code = 200 - response = {'Id': FAKE_IMAGE_ID} - return status_code, response - - -def delete_fake_remove_image(): - status_code = 200 - response = {'Id': FAKE_IMAGE_ID} - return status_code, response - - -def get_fake_get_image(): - status_code = 200 - response = 'Byte Stream....' - return status_code, response - - -def post_fake_load_image(): - status_code = 200 - response = {'Id': FAKE_IMAGE_ID} - return status_code, response - - -def post_fake_commit(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_push(): - status_code = 200 - response = {'Id': FAKE_IMAGE_ID} - return status_code, response - - -def post_fake_build_container(): - status_code = 200 - response = {'Id': FAKE_CONTAINER_ID} - return status_code, response - - -def post_fake_tag_image(): - status_code = 200 - response = {'Id': FAKE_IMAGE_ID} - return status_code, response - - -def get_fake_stats(): - status_code = 200 - response = fake_stat.OBJ - return status_code, response - - -def get_fake_volume_list(): - status_code = 200 - response = { - 'Volumes': [ - { - 'Name': 'perfectcherryblossom', - 'Driver': 'local', - 'Mountpoint': '/var/lib/docker/volumes/perfectcherryblossom' - }, { - 'Name': 'subterraneananimism', - 'Driver': 'local', - 'Mountpoint': '/var/lib/docker/volumes/subterraneananimism' - } - ] - } - return status_code, response - - -def get_fake_volume(): - status_code = 200 - response = { - 'Name': 'perfectcherryblossom', - 'Driver': 'local', - 'Mountpoint': '/var/lib/docker/volumes/perfectcherryblossom' - } - return status_code, response - - -def fake_remove_volume(): - return 204, None - -# Maps real api url to fake response callback -prefix = 'http+docker://localunixsocket' -fake_responses = { - '{0}/version'.format(prefix): - get_fake_raw_version, - '{1}/{0}/version'.format(CURRENT_VERSION, prefix): - get_fake_version, - '{1}/{0}/info'.format(CURRENT_VERSION, prefix): - get_fake_info, - '{1}/{0}/images/search'.format(CURRENT_VERSION, prefix): - get_fake_search, - '{1}/{0}/images/json'.format(CURRENT_VERSION, prefix): - get_fake_images, - '{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix): - get_fake_image_history, - '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): - post_fake_import_image, - '{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix): - get_fake_containers, - '{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix): - post_fake_start_container, - '{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix): - post_fake_resize_container, - '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): - get_fake_inspect_container, - '{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix): - post_fake_rename_container, - '{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix): - post_fake_tag_image, - '{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix): - get_fake_wait, - '{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix): - get_fake_logs, - '{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix): - get_fake_diff, - '{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix): - get_fake_export, - '{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix): - post_fake_exec_create, - '{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix): - post_fake_exec_start, - '{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix): - get_fake_exec_inspect, - '{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix): - post_fake_exec_resize, - - '{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix): - get_fake_stats, - '{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix): - post_fake_stop_container, - '{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix): - post_fake_kill_container, - '{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix): - post_fake_pause_container, - '{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix): - post_fake_unpause_container, - '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): - get_fake_port, - '{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix): - post_fake_restart_container, - '{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix): - delete_fake_remove_container, - '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): - post_fake_image_create, - '{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix): - delete_fake_remove_image, - '{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix): - get_fake_get_image, - '{1}/{0}/images/load'.format(CURRENT_VERSION, prefix): - post_fake_load_image, - '{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix): - get_fake_inspect_image, - '{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix): - get_fake_insert_image, - '{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix): - post_fake_push, - '{1}/{0}/commit'.format(CURRENT_VERSION, prefix): - post_fake_commit, - '{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix): - post_fake_create_container, - '{1}/{0}/build'.format(CURRENT_VERSION, prefix): - post_fake_build_container, - '{1}/{0}/events'.format(CURRENT_VERSION, prefix): - get_fake_events, - ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'): - get_fake_volume_list, - ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'POST'): - get_fake_volume, - ('{1}/{0}/volumes/{2}'.format( - CURRENT_VERSION, prefix, FAKE_VOLUME_NAME - ), 'GET'): - get_fake_volume, - ('{1}/{0}/volumes/{2}'.format( - CURRENT_VERSION, prefix, FAKE_VOLUME_NAME - ), 'DELETE'): - fake_remove_volume, -} diff --git a/tests/fake_stat.py b/tests/fake_stat.py deleted file mode 100644 index a7f1029..0000000 --- a/tests/fake_stat.py +++ /dev/null @@ -1,133 +0,0 @@ -OBJ = { - "read": "2015-02-11T19:20:46.667237763+02:00", - "network": { - "rx_bytes": 567224, - "rx_packets": 3773, - "rx_errors": 0, - "rx_dropped": 0, - "tx_bytes": 1176, - "tx_packets": 13, - "tx_errors": 0, - "tx_dropped": 0 - }, - "cpu_stats": { - "cpu_usage": { - "total_usage": 157260874053, - "percpu_usage": [ - 52196306950, - 24118413549, - 53292684398, - 27653469156 - ], - "usage_in_kernelmode": 37140000000, - "usage_in_usermode": 62140000000 - }, - "system_cpu_usage": 3.0881377e+14, - "throttling_data": { - "periods": 0, - "throttled_periods": 0, - "throttled_time": 0 - } - }, - "memory_stats": { - "usage": 179314688, - "max_usage": 258166784, - "stats": { - "active_anon": 90804224, - "active_file": 2195456, - "cache": 3096576, - "hierarchical_memory_limit": 1.844674407371e+19, - "inactive_anon": 85516288, - "inactive_file": 798720, - "mapped_file": 2646016, - "pgfault": 101034, - "pgmajfault": 1207, - "pgpgin": 115814, - "pgpgout": 75613, - "rss": 176218112, - "rss_huge": 12582912, - "total_active_anon": 90804224, - "total_active_file": 2195456, - "total_cache": 3096576, - "total_inactive_anon": 85516288, - "total_inactive_file": 798720, - "total_mapped_file": 2646016, - "total_pgfault": 101034, - "total_pgmajfault": 1207, - "total_pgpgin": 115814, - "total_pgpgout": 75613, - "total_rss": 176218112, - "total_rss_huge": 12582912, - "total_unevictable": 0, - "total_writeback": 0, - "unevictable": 0, - "writeback": 0 - }, - "failcnt": 0, - "limit": 8039038976 - }, - "blkio_stats": { - "io_service_bytes_recursive": [ - { - "major": 8, - "minor": 0, - "op": "Read", - "value": 72843264 - }, { - "major": 8, - "minor": 0, - "op": "Write", - "value": 4096 - }, { - "major": 8, - "minor": 0, - "op": "Sync", - "value": 4096 - }, { - "major": 8, - "minor": 0, - "op": "Async", - "value": 72843264 - }, { - "major": 8, - "minor": 0, - "op": "Total", - "value": 72847360 - } - ], - "io_serviced_recursive": [ - { - "major": 8, - "minor": 0, - "op": "Read", - "value": 10581 - }, { - "major": 8, - "minor": 0, - "op": "Write", - "value": 1 - }, { - "major": 8, - "minor": 0, - "op": "Sync", - "value": 1 - }, { - "major": 8, - "minor": 0, - "op": "Async", - "value": 10581 - }, { - "major": 8, - "minor": 0, - "op": "Total", - "value": 10582 - } - ], - "io_queue_recursive": [], - "io_service_time_recursive": [], - "io_wait_time_recursive": [], - "io_merged_recursive": [], - "io_time_recursive": [], - "sectors_recursive": [] - } -} diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..07feaa1 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from .api_test import * diff --git a/tests/integration/api_test.py b/tests/integration/api_test.py new file mode 100644 index 0000000..9080891 --- /dev/null +++ b/tests/integration/api_test.py @@ -0,0 +1,1719 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import contextlib +import json +import io +import os +import shutil +import signal +import socket +import tarfile +import tempfile +import threading +import time +import unittest +import warnings + +import pytest +import six +from six.moves import BaseHTTPServer +from six.moves import socketserver + +import docker +from docker.errors import APIError, NotFound +from docker.utils import kwargs_from_env + +from ..base import requires_api_version, Cleanup + + +# FIXME: missing tests for +# export; history; insert; port; push; tag; get; load; stats + +warnings.simplefilter('error') +compare_version = docker.utils.compare_version + +EXEC_DRIVER = [] +BUSYBOX = 'busybox:buildroot-2014.02' + + +def exec_driver_is_native(): + global EXEC_DRIVER + if not EXEC_DRIVER: + c = docker_client() + EXEC_DRIVER = c.info()['ExecutionDriver'] + c.close() + return EXEC_DRIVER.startswith('native') + + +def docker_client(**kwargs): + return docker.Client(**docker_client_kwargs(**kwargs)) + + +def docker_client_kwargs(**kwargs): + client_kwargs = kwargs_from_env(assert_hostname=False) + client_kwargs.update(kwargs) + return client_kwargs + + +def setup_module(): + c = docker_client() + try: + c.inspect_image(BUSYBOX) + except NotFound: + c.pull(BUSYBOX) + c.inspect_image(BUSYBOX) + c.close() + + +class BaseTestCase(unittest.TestCase): + tmp_imgs = [] + tmp_containers = [] + tmp_folders = [] + tmp_volumes = [] + + def setUp(self): + if six.PY2: + self.assertRegex = self.assertRegexpMatches + self.assertCountEqual = self.assertItemsEqual + self.client = docker_client(timeout=60) + self.tmp_imgs = [] + self.tmp_containers = [] + self.tmp_folders = [] + self.tmp_volumes = [] + + def tearDown(self): + for img in self.tmp_imgs: + try: + self.client.remove_image(img) + except docker.errors.APIError: + pass + for container in self.tmp_containers: + try: + self.client.stop(container, timeout=1) + self.client.remove_container(container) + except docker.errors.APIError: + pass + for folder in self.tmp_folders: + shutil.rmtree(folder) + + for volume in self.tmp_volumes: + try: + self.client.remove_volume(volume) + except docker.errors.APIError: + pass + + self.client.close() + + def run_container(self, *args, **kwargs): + container = self.client.create_container(*args, **kwargs) + self.tmp_containers.append(container) + self.client.start(container) + exitcode = self.client.wait(container) + + if exitcode != 0: + output = self.client.logs(container) + raise Exception( + "Container exited with code {}:\n{}" + .format(exitcode, output)) + + return container + + +######################### +# INFORMATION TESTS # +######################### + + +class TestVersion(BaseTestCase): + def runTest(self): + res = self.client.version() + self.assertIn('GoVersion', res) + self.assertIn('Version', res) + self.assertEqual(len(res['Version'].split('.')), 3) + + +class TestInfo(BaseTestCase): + def runTest(self): + res = self.client.info() + self.assertIn('Containers', res) + self.assertIn('Images', res) + self.assertIn('Debug', res) + + +class TestSearch(BaseTestCase): + def runTest(self): + self.client = docker_client(timeout=10) + res = self.client.search('busybox') + self.assertTrue(len(res) >= 1) + base_img = [x for x in res if x['name'] == 'busybox'] + self.assertEqual(len(base_img), 1) + self.assertIn('description', base_img[0]) + +################### +# LISTING TESTS # +################### + + +class TestImages(BaseTestCase): + def runTest(self): + res1 = self.client.images(all=True) + self.assertIn('Id', res1[0]) + res10 = res1[0] + self.assertIn('Created', res10) + self.assertIn('RepoTags', res10) + distinct = [] + for img in res1: + if img['Id'] not in distinct: + distinct.append(img['Id']) + self.assertEqual(len(distinct), self.client.info()['Images']) + + +class TestImageIds(BaseTestCase): + def runTest(self): + res1 = self.client.images(quiet=True) + self.assertEqual(type(res1[0]), six.text_type) + + +class TestListContainers(BaseTestCase): + def runTest(self): + res0 = self.client.containers(all=True) + size = len(res0) + res1 = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res1) + self.client.start(res1['Id']) + self.tmp_containers.append(res1['Id']) + res2 = self.client.containers(all=True) + self.assertEqual(size + 1, len(res2)) + retrieved = [x for x in res2 if x['Id'].startswith(res1['Id'])] + self.assertEqual(len(retrieved), 1) + retrieved = retrieved[0] + self.assertIn('Command', retrieved) + self.assertEqual(retrieved['Command'], six.text_type('true')) + self.assertIn('Image', retrieved) + self.assertRegex(retrieved['Image'], r'busybox:.*') + self.assertIn('Status', retrieved) + +##################### +# CONTAINER TESTS # +##################### + + +class TestCreateContainer(BaseTestCase): + def runTest(self): + res = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + + +class TestCreateContainerWithBinds(BaseTestCase): + def setUp(self): + super(TestCreateContainerWithBinds, self).setUp() + + self.mount_dest = '/mnt' + + # Get a random pathname - we don't need it to exist locally + self.mount_origin = tempfile.mkdtemp() + shutil.rmtree(self.mount_origin) + + self.filename = 'shared.txt' + + self.run_with_volume( + False, + BUSYBOX, + ['touch', os.path.join(self.mount_dest, self.filename)], + ) + + def run_with_volume(self, ro, *args, **kwargs): + return self.run_container( + *args, + volumes={self.mount_dest: {}}, + host_config=self.client.create_host_config( + binds={ + self.mount_origin: { + 'bind': self.mount_dest, + 'ro': ro, + }, + }, + network_mode='none' + ), + **kwargs + ) + + def test_rw(self): + container = self.run_with_volume( + False, + BUSYBOX, + ['ls', self.mount_dest], + ) + logs = self.client.logs(container) + + if six.PY3: + logs = logs.decode('utf-8') + self.assertIn(self.filename, logs) + inspect_data = self.client.inspect_container(container) + self.check_container_data(inspect_data, True) + + def test_ro(self): + container = self.run_with_volume( + True, + BUSYBOX, + ['ls', self.mount_dest], + ) + logs = self.client.logs(container) + + if six.PY3: + logs = logs.decode('utf-8') + self.assertIn(self.filename, logs) + + inspect_data = self.client.inspect_container(container) + self.check_container_data(inspect_data, False) + + def check_container_data(self, inspect_data, rw): + if docker.utils.compare_version('1.20', self.client._version) < 0: + self.assertIn('Volumes', inspect_data) + self.assertIn(self.mount_dest, inspect_data['Volumes']) + self.assertEqual( + self.mount_origin, inspect_data['Volumes'][self.mount_dest] + ) + self.assertIn(self.mount_dest, inspect_data['VolumesRW']) + self.assertFalse(inspect_data['VolumesRW'][self.mount_dest]) + else: + self.assertIn('Mounts', inspect_data) + filtered = list(filter( + lambda x: x['Destination'] == self.mount_dest, + inspect_data['Mounts'] + )) + self.assertEqual(len(filtered), 1) + mount_data = filtered[0] + self.assertEqual(mount_data['Source'], self.mount_origin) + self.assertEqual(mount_data['RW'], rw) + + +@requires_api_version('1.20') +class CreateContainerWithGroupAddTest(BaseTestCase): + def test_group_id_ints(self): + container = self.client.create_container( + BUSYBOX, 'id -G', + host_config=self.client.create_host_config(group_add=[1000, 1001]) + ) + self.tmp_containers.append(container) + self.client.start(container) + self.client.wait(container) + + logs = self.client.logs(container) + if six.PY3: + logs = logs.decode('utf-8') + groups = logs.strip().split(' ') + self.assertIn('1000', groups) + self.assertIn('1001', groups) + + def test_group_id_strings(self): + container = self.client.create_container( + BUSYBOX, 'id -G', host_config=self.client.create_host_config( + group_add=['1000', '1001'] + ) + ) + self.tmp_containers.append(container) + self.client.start(container) + self.client.wait(container) + + logs = self.client.logs(container) + if six.PY3: + logs = logs.decode('utf-8') + + groups = logs.strip().split(' ') + self.assertIn('1000', groups) + self.assertIn('1001', groups) + + +class CreateContainerWithLogConfigTest(BaseTestCase): + def test_valid_log_driver_and_log_opt(self): + log_config = docker.utils.LogConfig( + type='json-file', + config={'max-file': '100'} + ) + + container = self.client.create_container( + BUSYBOX, ['true'], + host_config=self.client.create_host_config(log_config=log_config) + ) + self.tmp_containers.append(container['Id']) + self.client.start(container) + + info = self.client.inspect_container(container) + container_log_config = info['HostConfig']['LogConfig'] + + self.assertEqual(container_log_config['Type'], log_config.type) + self.assertEqual(container_log_config['Config'], log_config.config) + + def test_invalid_log_driver_raises_exception(self): + log_config = docker.utils.LogConfig( + type='asdf-nope', + config={} + ) + + container = self.client.create_container( + BUSYBOX, ['true'], + host_config=self.client.create_host_config(log_config=log_config) + ) + + expected_msg = "logger: no log driver named 'asdf-nope' is registered" + + with pytest.raises(APIError) as excinfo: + # raises an internal server error 500 + self.client.start(container) + + assert expected_msg in str(excinfo.value) + + @pytest.mark.skipif(True, + reason="https://github.com/docker/docker/issues/15633") + def test_valid_no_log_driver_specified(self): + log_config = docker.utils.LogConfig( + type="", + config={'max-file': '100'} + ) + + container = self.client.create_container( + BUSYBOX, ['true'], + host_config=self.client.create_host_config(log_config=log_config) + ) + self.tmp_containers.append(container['Id']) + self.client.start(container) + + info = self.client.inspect_container(container) + container_log_config = info['HostConfig']['LogConfig'] + + self.assertEqual(container_log_config['Type'], "json-file") + self.assertEqual(container_log_config['Config'], log_config.config) + + def test_valid_no_config_specified(self): + log_config = docker.utils.LogConfig( + type="json-file", + config=None + ) + + container = self.client.create_container( + BUSYBOX, ['true'], + host_config=self.client.create_host_config(log_config=log_config) + ) + self.tmp_containers.append(container['Id']) + self.client.start(container) + + info = self.client.inspect_container(container) + container_log_config = info['HostConfig']['LogConfig'] + + self.assertEqual(container_log_config['Type'], "json-file") + self.assertEqual(container_log_config['Config'], {}) + + +class TestCreateContainerReadOnlyFs(BaseTestCase): + def runTest(self): + if not exec_driver_is_native(): + pytest.skip('Exec driver not native') + + ctnr = self.client.create_container( + BUSYBOX, ['mkdir', '/shrine'], + host_config=self.client.create_host_config( + read_only=True, network_mode='none' + ) + ) + self.assertIn('Id', ctnr) + self.tmp_containers.append(ctnr['Id']) + self.client.start(ctnr) + res = self.client.wait(ctnr) + self.assertNotEqual(res, 0) + + +class TestCreateContainerWithName(BaseTestCase): + def runTest(self): + res = self.client.create_container(BUSYBOX, 'true', name='foobar') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Name', inspect) + self.assertEqual('/foobar', inspect['Name']) + + +class TestRenameContainer(BaseTestCase): + def runTest(self): + version = self.client.version()['Version'] + name = 'hong_meiling' + res = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + self.client.rename(res, name) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Name', inspect) + if version == '1.5.0': + self.assertEqual(name, inspect['Name']) + else: + self.assertEqual('/{0}'.format(name), inspect['Name']) + + +class TestStartContainer(BaseTestCase): + def runTest(self): + res = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + self.client.start(res['Id']) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Config', inspect) + self.assertIn('Id', inspect) + self.assertTrue(inspect['Id'].startswith(res['Id'])) + self.assertIn('Image', inspect) + self.assertIn('State', inspect) + self.assertIn('Running', inspect['State']) + if not inspect['State']['Running']: + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], 0) + + +class TestStartContainerWithDictInsteadOfId(BaseTestCase): + def runTest(self): + res = self.client.create_container(BUSYBOX, 'true') + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + self.client.start(res) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Config', inspect) + self.assertIn('Id', inspect) + self.assertTrue(inspect['Id'].startswith(res['Id'])) + self.assertIn('Image', inspect) + self.assertIn('State', inspect) + self.assertIn('Running', inspect['State']) + if not inspect['State']['Running']: + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], 0) + + +class TestCreateContainerPrivileged(BaseTestCase): + def runTest(self): + res = self.client.create_container( + BUSYBOX, 'true', host_config=self.client.create_host_config( + privileged=True, network_mode='none' + ) + ) + self.assertIn('Id', res) + self.tmp_containers.append(res['Id']) + self.client.start(res['Id']) + inspect = self.client.inspect_container(res['Id']) + self.assertIn('Config', inspect) + self.assertIn('Id', inspect) + self.assertTrue(inspect['Id'].startswith(res['Id'])) + self.assertIn('Image', inspect) + self.assertIn('State', inspect) + self.assertIn('Running', inspect['State']) + if not inspect['State']['Running']: + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], 0) + # Since Nov 2013, the Privileged flag is no longer part of the + # container's config exposed via the API (safety concerns?). + # + if 'Privileged' in inspect['Config']: + self.assertEqual(inspect['Config']['Privileged'], True) + + +class TestWait(BaseTestCase): + def runTest(self): + res = self.client.create_container(BUSYBOX, ['sleep', '3']) + id = res['Id'] + self.tmp_containers.append(id) + self.client.start(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + inspect = self.client.inspect_container(id) + self.assertIn('Running', inspect['State']) + self.assertEqual(inspect['State']['Running'], False) + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], exitcode) + + +class TestWaitWithDictInsteadOfId(BaseTestCase): + def runTest(self): + res = self.client.create_container(BUSYBOX, ['sleep', '3']) + id = res['Id'] + self.tmp_containers.append(id) + self.client.start(res) + exitcode = self.client.wait(res) + self.assertEqual(exitcode, 0) + inspect = self.client.inspect_container(res) + self.assertIn('Running', inspect['State']) + self.assertEqual(inspect['State']['Running'], False) + self.assertIn('ExitCode', inspect['State']) + self.assertEqual(inspect['State']['ExitCode'], exitcode) + + +class TestLogs(BaseTestCase): + def runTest(self): + snippet = 'Flowering Nights (Sakuya Iyazoi)' + container = self.client.create_container( + BUSYBOX, 'echo {0}'.format(snippet) + ) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + logs = self.client.logs(id) + self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) + + +class TestLogsWithTailOption(BaseTestCase): + def runTest(self): + snippet = '''Line1 +Line2''' + container = self.client.create_container( + BUSYBOX, 'echo "{0}"'.format(snippet) + ) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + logs = self.client.logs(id, tail=1) + self.assertEqual(logs, ('Line2\n').encode(encoding='ascii')) + + +# class TestLogsStreaming(BaseTestCase): +# def runTest(self): +# snippet = 'Flowering Nights (Sakuya Iyazoi)' +# container = self.client.create_container( +# BUSYBOX, 'echo {0}'.format(snippet) +# ) +# id = container['Id'] +# self.client.start(id) +# self.tmp_containers.append(id) +# logs = bytes() if six.PY3 else str() +# for chunk in self.client.logs(id, stream=True): +# logs += chunk + +# exitcode = self.client.wait(id) +# self.assertEqual(exitcode, 0) + +# self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) + + +class TestLogsWithDictInsteadOfId(BaseTestCase): + def runTest(self): + snippet = 'Flowering Nights (Sakuya Iyazoi)' + container = self.client.create_container( + BUSYBOX, 'echo {0}'.format(snippet) + ) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + logs = self.client.logs(container) + self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) + + +class TestDiff(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['touch', '/test']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + diff = self.client.diff(id) + test_diff = [x for x in diff if x.get('Path', None) == '/test'] + self.assertEqual(len(test_diff), 1) + self.assertIn('Kind', test_diff[0]) + self.assertEqual(test_diff[0]['Kind'], 1) + + +class TestDiffWithDictInsteadOfId(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['touch', '/test']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0) + diff = self.client.diff(container) + test_diff = [x for x in diff if x.get('Path', None) == '/test'] + self.assertEqual(len(test_diff), 1) + self.assertIn('Kind', test_diff[0]) + self.assertEqual(test_diff[0]['Kind'], 1) + + +class TestStop(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + self.client.stop(id, timeout=2) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + if exec_driver_is_native(): + self.assertNotEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], False) + + +class TestStopWithDictInsteadOfId(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + self.assertIn('Id', container) + id = container['Id'] + self.client.start(container) + self.tmp_containers.append(id) + self.client.stop(container, timeout=2) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + if exec_driver_is_native(): + self.assertNotEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], False) + + +class TestKill(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + self.client.kill(id) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + if exec_driver_is_native(): + self.assertNotEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], False) + + +class TestKillWithDictInsteadOfId(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + self.client.kill(container) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + if exec_driver_is_native(): + self.assertNotEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], False) + + +class TestKillWithSignal(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['sleep', '60']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + self.client.kill(id, signal=signal.SIGKILL) + exitcode = self.client.wait(id) + self.assertNotEqual(exitcode, 0) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + self.assertNotEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], False, state) + + +class TestPort(BaseTestCase): + def runTest(self): + + port_bindings = { + '1111': ('127.0.0.1', '4567'), + '2222': ('127.0.0.1', '4568') + } + + container = self.client.create_container( + BUSYBOX, ['sleep', '60'], ports=list(port_bindings.keys()), + host_config=self.client.create_host_config( + port_bindings=port_bindings, network_mode='bridge' + ) + ) + id = container['Id'] + + self.client.start(container) + + # Call the port function on each biding and compare expected vs actual + for port in port_bindings: + actual_bindings = self.client.port(container, port) + port_binding = actual_bindings.pop() + + ip, host_port = port_binding['HostIp'], port_binding['HostPort'] + + self.assertEqual(ip, port_bindings[port][0]) + self.assertEqual(host_port, port_bindings[port][1]) + + self.client.kill(id) + + +class TestMacAddress(BaseTestCase): + def runTest(self): + mac_address_expected = "02:42:ac:11:00:0a" + container = self.client.create_container( + BUSYBOX, ['sleep', '60'], mac_address=mac_address_expected) + + id = container['Id'] + + self.client.start(container) + res = self.client.inspect_container(container['Id']) + self.assertEqual(mac_address_expected, + res['NetworkSettings']['MacAddress']) + + self.client.kill(id) + + +class TestRestart(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + info = self.client.inspect_container(id) + self.assertIn('State', info) + self.assertIn('StartedAt', info['State']) + start_time1 = info['State']['StartedAt'] + self.client.restart(id, timeout=2) + info2 = self.client.inspect_container(id) + self.assertIn('State', info2) + self.assertIn('StartedAt', info2['State']) + start_time2 = info2['State']['StartedAt'] + self.assertNotEqual(start_time1, start_time2) + self.assertIn('Running', info2['State']) + self.assertEqual(info2['State']['Running'], True) + self.client.kill(id) + + +class TestRestartWithDictInsteadOfId(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + self.assertIn('Id', container) + id = container['Id'] + self.client.start(container) + self.tmp_containers.append(id) + info = self.client.inspect_container(id) + self.assertIn('State', info) + self.assertIn('StartedAt', info['State']) + start_time1 = info['State']['StartedAt'] + self.client.restart(container, timeout=2) + info2 = self.client.inspect_container(id) + self.assertIn('State', info2) + self.assertIn('StartedAt', info2['State']) + start_time2 = info2['State']['StartedAt'] + self.assertNotEqual(start_time1, start_time2) + self.assertIn('Running', info2['State']) + self.assertEqual(info2['State']['Running'], True) + self.client.kill(id) + + +class TestRemoveContainer(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['true']) + id = container['Id'] + self.client.start(id) + self.client.wait(id) + self.client.remove_container(id) + containers = self.client.containers(all=True) + res = [x for x in containers if 'Id' in x and x['Id'].startswith(id)] + self.assertEqual(len(res), 0) + + +class TestRemoveContainerWithDictInsteadOfId(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['true']) + id = container['Id'] + self.client.start(id) + self.client.wait(id) + self.client.remove_container(container) + containers = self.client.containers(all=True) + res = [x for x in containers if 'Id' in x and x['Id'].startswith(id)] + self.assertEqual(len(res), 0) + + +class TestCreateContainerWithVolumesFrom(BaseTestCase): + def runTest(self): + vol_names = ['foobar_vol0', 'foobar_vol1'] + + res0 = self.client.create_container( + BUSYBOX, 'true', name=vol_names[0] + ) + container1_id = res0['Id'] + self.tmp_containers.append(container1_id) + self.client.start(container1_id) + + res1 = self.client.create_container( + BUSYBOX, 'true', name=vol_names[1] + ) + container2_id = res1['Id'] + self.tmp_containers.append(container2_id) + self.client.start(container2_id) + with self.assertRaises(docker.errors.DockerException): + self.client.create_container( + BUSYBOX, 'cat', detach=True, stdin_open=True, + volumes_from=vol_names + ) + res2 = self.client.create_container( + BUSYBOX, 'cat', detach=True, stdin_open=True, + host_config=self.client.create_host_config( + volumes_from=vol_names, network_mode='none' + ) + ) + container3_id = res2['Id'] + self.tmp_containers.append(container3_id) + self.client.start(container3_id) + + info = self.client.inspect_container(res2['Id']) + self.assertCountEqual(info['HostConfig']['VolumesFrom'], vol_names) + + +class TestCreateContainerWithLinks(BaseTestCase): + def runTest(self): + res0 = self.client.create_container( + BUSYBOX, 'cat', + detach=True, stdin_open=True, + environment={'FOO': '1'}) + + container1_id = res0['Id'] + self.tmp_containers.append(container1_id) + + self.client.start(container1_id) + + res1 = self.client.create_container( + BUSYBOX, 'cat', + detach=True, stdin_open=True, + environment={'FOO': '1'}) + + container2_id = res1['Id'] + self.tmp_containers.append(container2_id) + + self.client.start(container2_id) + + # we don't want the first / + link_path1 = self.client.inspect_container(container1_id)['Name'][1:] + link_alias1 = 'mylink1' + link_env_prefix1 = link_alias1.upper() + + link_path2 = self.client.inspect_container(container2_id)['Name'][1:] + link_alias2 = 'mylink2' + link_env_prefix2 = link_alias2.upper() + + res2 = self.client.create_container( + BUSYBOX, 'env', host_config=self.client.create_host_config( + links={link_path1: link_alias1, link_path2: link_alias2}, + network_mode='none' + ) + ) + container3_id = res2['Id'] + self.tmp_containers.append(container3_id) + self.client.start(container3_id) + self.assertEqual(self.client.wait(container3_id), 0) + + logs = self.client.logs(container3_id) + if six.PY3: + logs = logs.decode('utf-8') + self.assertIn('{0}_NAME='.format(link_env_prefix1), logs) + self.assertIn('{0}_ENV_FOO=1'.format(link_env_prefix1), logs) + self.assertIn('{0}_NAME='.format(link_env_prefix2), logs) + self.assertIn('{0}_ENV_FOO=1'.format(link_env_prefix2), logs) + + +class TestRestartingContainer(BaseTestCase): + def runTest(self): + container = self.client.create_container( + BUSYBOX, ['sleep', '2'], + host_config=self.client.create_host_config( + restart_policy={"Name": "always", "MaximumRetryCount": 0}, + network_mode='none' + ) + ) + id = container['Id'] + self.client.start(id) + self.client.wait(id) + with self.assertRaises(docker.errors.APIError) as exc: + self.client.remove_container(id) + err = exc.exception.response.text + self.assertIn( + 'You cannot remove a running container', err + ) + self.client.remove_container(id, force=True) + + +class TestExecuteCommand(BaseTestCase): + def runTest(self): + if not exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + res = self.client.exec_create(id, ['echo', 'hello']) + self.assertIn('Id', res) + + exec_log = self.client.exec_start(res) + self.assertEqual(exec_log, b'hello\n') + + +class TestExecuteCommandString(BaseTestCase): + def runTest(self): + if not exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + res = self.client.exec_create(id, 'echo hello world') + self.assertIn('Id', res) + + exec_log = self.client.exec_start(res) + self.assertEqual(exec_log, b'hello world\n') + + +class TestExecuteCommandStringAsUser(BaseTestCase): + def runTest(self): + if not exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + res = self.client.exec_create(id, 'whoami', user='default') + self.assertIn('Id', res) + + exec_log = self.client.exec_start(res) + self.assertEqual(exec_log, b'default\n') + + +class TestExecuteCommandStringAsRoot(BaseTestCase): + def runTest(self): + if not exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + res = self.client.exec_create(id, 'whoami') + self.assertIn('Id', res) + + exec_log = self.client.exec_start(res) + self.assertEqual(exec_log, b'root\n') + + +class TestExecuteCommandStreaming(BaseTestCase): + def runTest(self): + if not exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + exec_id = self.client.exec_create(id, ['echo', 'hello\nworld']) + self.assertIn('Id', exec_id) + + res = b'' + for chunk in self.client.exec_start(exec_id, stream=True): + res += chunk + self.assertEqual(res, b'hello\nworld\n') + + +class TestExecInspect(BaseTestCase): + def runTest(self): + if not exec_driver_is_native(): + pytest.skip('Exec driver not native') + + container = self.client.create_container(BUSYBOX, 'cat', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + + exec_id = self.client.exec_create(id, ['mkdir', '/does/not/exist']) + self.assertIn('Id', exec_id) + self.client.exec_start(exec_id) + exec_info = self.client.exec_inspect(exec_id) + self.assertIn('ExitCode', exec_info) + self.assertNotEqual(exec_info['ExitCode'], 0) + + +class TestRunContainerStreaming(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, '/bin/sh', + detach=True, stdin_open=True) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + sock = self.client.attach_socket(container, ws=False) + self.assertTrue(sock.fileno() > -1) + + +class TestPauseUnpauseContainer(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + id = container['Id'] + self.tmp_containers.append(id) + self.client.start(container) + self.client.pause(id) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + self.assertEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], True) + self.assertIn('Paused', state) + self.assertEqual(state['Paused'], True) + + self.client.unpause(id) + container_info = self.client.inspect_container(id) + self.assertIn('State', container_info) + state = container_info['State'] + self.assertIn('ExitCode', state) + self.assertEqual(state['ExitCode'], 0) + self.assertIn('Running', state) + self.assertEqual(state['Running'], True) + self.assertIn('Paused', state) + self.assertEqual(state['Paused'], False) + + +class TestCreateContainerWithHostPidMode(BaseTestCase): + def runTest(self): + ctnr = self.client.create_container( + BUSYBOX, 'true', host_config=self.client.create_host_config( + pid_mode='host', network_mode='none' + ) + ) + self.assertIn('Id', ctnr) + self.tmp_containers.append(ctnr['Id']) + self.client.start(ctnr) + inspect = self.client.inspect_container(ctnr) + self.assertIn('HostConfig', inspect) + host_config = inspect['HostConfig'] + self.assertIn('PidMode', host_config) + self.assertEqual(host_config['PidMode'], 'host') + + +################# +# LINKS TESTS # +################# + + +class TestRemoveLink(BaseTestCase): + def runTest(self): + # Create containers + container1 = self.client.create_container( + BUSYBOX, 'cat', detach=True, stdin_open=True + ) + container1_id = container1['Id'] + self.tmp_containers.append(container1_id) + self.client.start(container1_id) + + # Create Link + # we don't want the first / + link_path = self.client.inspect_container(container1_id)['Name'][1:] + link_alias = 'mylink' + + container2 = self.client.create_container( + BUSYBOX, 'cat', host_config=self.client.create_host_config( + links={link_path: link_alias}, network_mode='none' + ) + ) + container2_id = container2['Id'] + self.tmp_containers.append(container2_id) + self.client.start(container2_id) + + # Remove link + linked_name = self.client.inspect_container(container2_id)['Name'][1:] + link_name = '%s/%s' % (linked_name, link_alias) + self.client.remove_container(link_name, link=True) + + # Link is gone + containers = self.client.containers(all=True) + retrieved = [x for x in containers if link_name in x['Names']] + self.assertEqual(len(retrieved), 0) + + # Containers are still there + retrieved = [ + x for x in containers if x['Id'].startswith(container1_id) or + x['Id'].startswith(container2_id) + ] + self.assertEqual(len(retrieved), 2) + +################## +# IMAGES TESTS # +################## + + +class TestPull(BaseTestCase): + def runTest(self): + try: + self.client.remove_image('hello-world') + except docker.errors.APIError: + pass + res = self.client.pull('hello-world') + self.tmp_imgs.append('hello-world') + self.assertEqual(type(res), six.text_type) + self.assertGreaterEqual( + len(self.client.images('hello-world')), 1 + ) + img_info = self.client.inspect_image('hello-world') + self.assertIn('Id', img_info) + + +class TestPullStream(BaseTestCase): + def runTest(self): + try: + self.client.remove_image('hello-world') + except docker.errors.APIError: + pass + stream = self.client.pull('hello-world', stream=True) + self.tmp_imgs.append('hello-world') + for chunk in stream: + if six.PY3: + chunk = chunk.decode('utf-8') + json.loads(chunk) # ensure chunk is a single, valid JSON blob + self.assertGreaterEqual( + len(self.client.images('hello-world')), 1 + ) + img_info = self.client.inspect_image('hello-world') + self.assertIn('Id', img_info) + + +class TestCommit(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['touch', '/test']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + res = self.client.commit(id) + self.assertIn('Id', res) + img_id = res['Id'] + self.tmp_imgs.append(img_id) + img = self.client.inspect_image(img_id) + self.assertIn('Container', img) + self.assertTrue(img['Container'].startswith(id)) + self.assertIn('ContainerConfig', img) + self.assertIn('Image', img['ContainerConfig']) + self.assertEqual(BUSYBOX, img['ContainerConfig']['Image']) + busybox_id = self.client.inspect_image(BUSYBOX)['Id'] + self.assertIn('Parent', img) + self.assertEqual(img['Parent'], busybox_id) + + +class TestRemoveImage(BaseTestCase): + def runTest(self): + container = self.client.create_container(BUSYBOX, ['touch', '/test']) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + res = self.client.commit(id) + self.assertIn('Id', res) + img_id = res['Id'] + self.tmp_imgs.append(img_id) + self.client.remove_image(img_id, force=True) + images = self.client.images(all=True) + res = [x for x in images if x['Id'].startswith(img_id)] + self.assertEqual(len(res), 0) + + +################## +# IMPORT TESTS # +################## + + +class ImportTestCase(BaseTestCase): + '''Base class for `docker import` test cases.''' + + TAR_SIZE = 512 * 1024 + + def write_dummy_tar_content(self, n_bytes, tar_fd): + def extend_file(f, n_bytes): + f.seek(n_bytes - 1) + f.write(bytearray([65])) + f.seek(0) + + tar = tarfile.TarFile(fileobj=tar_fd, mode='w') + + with tempfile.NamedTemporaryFile() as f: + extend_file(f, n_bytes) + tarinfo = tar.gettarinfo(name=f.name, arcname='testdata') + tar.addfile(tarinfo, fileobj=f) + + tar.close() + + @contextlib.contextmanager + def dummy_tar_stream(self, n_bytes): + '''Yields a stream that is valid tar data of size n_bytes.''' + with tempfile.NamedTemporaryFile() as tar_file: + self.write_dummy_tar_content(n_bytes, tar_file) + tar_file.seek(0) + yield tar_file + + @contextlib.contextmanager + def dummy_tar_file(self, n_bytes): + '''Yields the name of a valid tar file of size n_bytes.''' + with tempfile.NamedTemporaryFile() as tar_file: + self.write_dummy_tar_content(n_bytes, tar_file) + tar_file.seek(0) + yield tar_file.name + + +class TestImportFromBytes(ImportTestCase): + '''Tests importing an image from in-memory byte data.''' + + def runTest(self): + with self.dummy_tar_stream(n_bytes=500) as f: + content = f.read() + + # The generic import_image() function cannot import in-memory bytes + # data that happens to be represented as a string type, because + # import_image() will try to use it as a filename and usually then + # trigger an exception. So we test the import_image_from_data() + # function instead. + statuses = self.client.import_image_from_data( + content, repository='test/import-from-bytes') + + result_text = statuses.splitlines()[-1] + result = json.loads(result_text) + + self.assertNotIn('error', result) + + img_id = result['status'] + self.tmp_imgs.append(img_id) + + +class TestImportFromFile(ImportTestCase): + '''Tests importing an image from a tar file on disk.''' + + def runTest(self): + with self.dummy_tar_file(n_bytes=self.TAR_SIZE) as tar_filename: + # statuses = self.client.import_image( + # src=tar_filename, repository='test/import-from-file') + statuses = self.client.import_image_from_file( + tar_filename, repository='test/import-from-file') + + result_text = statuses.splitlines()[-1] + result = json.loads(result_text) + + self.assertNotIn('error', result) + + self.assertIn('status', result) + img_id = result['status'] + self.tmp_imgs.append(img_id) + + +class TestImportFromStream(ImportTestCase): + '''Tests importing an image from a stream containing tar data.''' + + def runTest(self): + with self.dummy_tar_stream(n_bytes=self.TAR_SIZE) as tar_stream: + statuses = self.client.import_image( + src=tar_stream, repository='test/import-from-stream') + # statuses = self.client.import_image_from_stream( + # tar_stream, repository='test/import-from-stream') + result_text = statuses.splitlines()[-1] + result = json.loads(result_text) + + self.assertNotIn('error', result) + + self.assertIn('status', result) + img_id = result['status'] + self.tmp_imgs.append(img_id) + + +class TestImportFromURL(ImportTestCase): + '''Tests downloading an image over HTTP.''' + + @contextlib.contextmanager + def temporary_http_file_server(self, stream): + '''Serve data from an IO stream over HTTP.''' + + class Handler(BaseHTTPServer.BaseHTTPRequestHandler): + def do_GET(self): + self.send_response(200) + self.send_header('Content-Type', 'application/x-tar') + self.end_headers() + shutil.copyfileobj(stream, self.wfile) + + server = socketserver.TCPServer(('', 0), Handler) + thread = threading.Thread(target=server.serve_forever) + thread.setDaemon(True) + thread.start() + + yield 'http://%s:%s' % (socket.gethostname(), server.server_address[1]) + + server.shutdown() + + @pytest.mark.skipif(True, reason="Doesn't work inside a container - FIXME") + def runTest(self): + # The crappy test HTTP server doesn't handle large files well, so use + # a small file. + TAR_SIZE = 10240 + + with self.dummy_tar_stream(n_bytes=TAR_SIZE) as tar_data: + with self.temporary_http_file_server(tar_data) as url: + statuses = self.client.import_image( + src=url, repository='test/import-from-url') + + result_text = statuses.splitlines()[-1] + result = json.loads(result_text) + + self.assertNotIn('error', result) + + self.assertIn('status', result) + img_id = result['status'] + self.tmp_imgs.append(img_id) + + +################# +# VOLUMES TESTS # +################# + +@requires_api_version('1.21') +class TestVolumes(BaseTestCase): + def test_create_volume(self): + name = 'perfectcherryblossom' + self.tmp_volumes.append(name) + result = self.client.create_volume(name) + self.assertIn('Name', result) + self.assertEqual(result['Name'], name) + self.assertIn('Driver', result) + self.assertEqual(result['Driver'], 'local') + + def test_create_volume_invalid_driver(self): + driver_name = 'invalid.driver' + + with pytest.raises(docker.errors.NotFound): + self.client.create_volume('perfectcherryblossom', driver_name) + + def test_list_volumes(self): + name = 'imperishablenight' + self.tmp_volumes.append(name) + volume_info = self.client.create_volume(name) + result = self.client.volumes() + self.assertIn('Volumes', result) + volumes = result['Volumes'] + self.assertIn(volume_info, volumes) + + def test_inspect_volume(self): + name = 'embodimentofscarletdevil' + self.tmp_volumes.append(name) + volume_info = self.client.create_volume(name) + result = self.client.inspect_volume(name) + self.assertEqual(volume_info, result) + + def test_inspect_nonexistent_volume(self): + name = 'embodimentofscarletdevil' + with pytest.raises(docker.errors.NotFound): + self.client.inspect_volume(name) + + def test_remove_volume(self): + name = 'shootthebullet' + self.tmp_volumes.append(name) + self.client.create_volume(name) + result = self.client.remove_volume(name) + self.assertTrue(result) + + def test_remove_nonexistent_volume(self): + name = 'shootthebullet' + with pytest.raises(docker.errors.NotFound): + self.client.remove_volume(name) + + +################# +# BUILDER TESTS # +################# + +class TestBuildStream(BaseTestCase): + def runTest(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + stream = self.client.build(fileobj=script, stream=True) + logs = '' + for chunk in stream: + if six.PY3: + chunk = chunk.decode('utf-8') + json.loads(chunk) # ensure chunk is a single, valid JSON blob + logs += chunk + self.assertNotEqual(logs, '') + + +class TestBuildFromStringIO(BaseTestCase): + def runTest(self): + if six.PY3: + return + script = io.StringIO(six.text_type('\n').join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ])) + stream = self.client.build(fileobj=script, stream=True) + logs = '' + for chunk in stream: + if six.PY3: + chunk = chunk.decode('utf-8') + logs += chunk + self.assertNotEqual(logs, '') + + +@requires_api_version('1.8') +class TestBuildWithDockerignore(Cleanup, BaseTestCase): + def runTest(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + + with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: + f.write("\n".join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'ADD . /test', + ])) + + with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: + f.write("\n".join([ + 'ignored', + 'Dockerfile', + '.dockerignore', + '', # empty line + ])) + + with open(os.path.join(base_dir, 'not-ignored'), 'w') as f: + f.write("this file should not be ignored") + + subdir = os.path.join(base_dir, 'ignored', 'subdir') + os.makedirs(subdir) + with open(os.path.join(subdir, 'file'), 'w') as f: + f.write("this file should be ignored") + + tag = 'docker-py-test-build-with-dockerignore' + stream = self.client.build( + path=base_dir, + tag=tag, + ) + for chunk in stream: + pass + + c = self.client.create_container(tag, ['ls', '-1A', '/test']) + self.client.start(c) + self.client.wait(c) + logs = self.client.logs(c) + + if six.PY3: + logs = logs.decode('utf-8') + + self.assertEqual( + list(filter(None, logs.split('\n'))), + ['not-ignored'], + ) + +####################### +# PY SPECIFIC TESTS # +####################### + + +class TestRunShlex(BaseTestCase): + def runTest(self): + commands = [ + 'true', + 'echo "The Young Descendant of Tepes & Septette for the ' + 'Dead Princess"', + 'echo -n "The Young Descendant of Tepes & Septette for the ' + 'Dead Princess"', + '/bin/sh -c "echo Hello World"', + '/bin/sh -c \'echo "Hello World"\'', + 'echo "\"Night of Nights\""', + 'true && echo "Night of Nights"' + ] + for cmd in commands: + container = self.client.create_container(BUSYBOX, cmd) + id = container['Id'] + self.client.start(id) + self.tmp_containers.append(id) + exitcode = self.client.wait(id) + self.assertEqual(exitcode, 0, msg=cmd) + + +class TestLoadConfig(BaseTestCase): + def runTest(self): + folder = tempfile.mkdtemp() + self.tmp_folders.append(folder) + cfg_path = os.path.join(folder, '.dockercfg') + f = open(cfg_path, 'w') + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + f.write('auth = {0}\n'.format(auth_)) + f.write('email = sakuya@scarlet.net') + f.close() + cfg = docker.auth.load_config(cfg_path) + self.assertNotEqual(cfg[docker.auth.INDEX_NAME], None) + cfg = cfg[docker.auth.INDEX_NAME] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('Auth'), None) + + +class TestLoadJSONConfig(BaseTestCase): + def runTest(self): + folder = tempfile.mkdtemp() + self.tmp_folders.append(folder) + cfg_path = os.path.join(folder, '.dockercfg') + f = open(os.path.join(folder, '.dockercfg'), 'w') + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + email_ = 'sakuya@scarlet.net' + f.write('{{"{0}": {{"auth": "{1}", "email": "{2}"}}}}\n'.format( + docker.auth.INDEX_URL, auth_, email_)) + f.close() + cfg = docker.auth.load_config(cfg_path) + self.assertNotEqual(cfg[docker.auth.INDEX_URL], None) + cfg = cfg[docker.auth.INDEX_URL] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('Auth'), None) + + +class TestAutoDetectVersion(unittest.TestCase): + def test_client_init(self): + client = docker_client(version='auto') + client_version = client._version + api_version = client.version(api_version=False)['ApiVersion'] + self.assertEqual(client_version, api_version) + api_version_2 = client.version()['ApiVersion'] + self.assertEqual(client_version, api_version_2) + client.close() + + def test_auto_client(self): + client = docker.AutoVersionClient(**docker_client_kwargs()) + client_version = client._version + api_version = client.version(api_version=False)['ApiVersion'] + self.assertEqual(client_version, api_version) + api_version_2 = client.version()['ApiVersion'] + self.assertEqual(client_version, api_version_2) + client.close() + with self.assertRaises(docker.errors.DockerException): + docker.AutoVersionClient(**docker_client_kwargs(version='1.11')) + + +class TestConnectionTimeout(unittest.TestCase): + def setUp(self): + self.timeout = 0.5 + self.client = docker.client.Client(base_url='http://192.168.10.2:4243', + timeout=self.timeout) + + def runTest(self): + start = time.time() + res = None + # This call isn't supposed to complete, and it should fail fast. + try: + res = self.client.inspect_container('id') + except: + pass + end = time.time() + self.assertTrue(res is None) + self.assertTrue(end - start < 2 * self.timeout) + + +class UnixconnTestCase(unittest.TestCase): + """ + Test UNIX socket connection adapter. + """ + + def test_resource_warnings(self): + """ + Test no warnings are produced when using the client. + """ + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + + client = docker_client() + client.images() + client.close() + del client + + assert len(w) == 0, \ + "No warnings produced: {0}".format(w[0].message) + + +#################### +# REGRESSION TESTS # +#################### + +class TestRegressions(BaseTestCase): + def test_443(self): + dfile = io.BytesIO() + with self.assertRaises(docker.errors.APIError) as exc: + for line in self.client.build(fileobj=dfile, tag="a/b/c"): + pass + self.assertEqual(exc.exception.response.status_code, 500) + dfile.close() + + def test_542(self): + self.client.start( + self.client.create_container(BUSYBOX, ['true']) + ) + result = self.client.containers(all=True, trunc=True) + self.assertEqual(len(result[0]['Id']), 12) + + def test_647(self): + with self.assertRaises(docker.errors.APIError): + self.client.inspect_image('gensokyo.jp//kirisame') + + def test_649(self): + self.client.timeout = None + ctnr = self.client.create_container(BUSYBOX, ['sleep', '2']) + self.client.start(ctnr) + self.client.stop(ctnr) + + def test_715(self): + ctnr = self.client.create_container(BUSYBOX, ['id', '-u'], user=1000) + self.client.start(ctnr) + self.client.wait(ctnr) + logs = self.client.logs(ctnr) + if six.PY3: + logs = logs.decode('utf-8') + assert logs == '1000\n' diff --git a/tests/integration_test.py b/tests/integration_test.py index 763c863..01987e7 100644 --- a/tests/integration_test.py +++ b/tests/integration_test.py @@ -1,1720 +1,4 @@ -# Copyright 2013 dotCloud inc. - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import contextlib -import json -import io -import os -import shutil -import signal -import socket -import tarfile -import tempfile -import threading -import time -import unittest -import warnings - -import pytest -import six -from six.moves import BaseHTTPServer -from six.moves import socketserver - -import docker -from docker.errors import APIError, NotFound -from docker.utils import kwargs_from_env - -from .base import requires_api_version -from .test import Cleanup - - -# FIXME: missing tests for -# export; history; insert; port; push; tag; get; load; stats - -warnings.simplefilter('error') -compare_version = docker.utils.compare_version - -EXEC_DRIVER = [] -BUSYBOX = 'busybox:buildroot-2014.02' - - -def exec_driver_is_native(): - global EXEC_DRIVER - if not EXEC_DRIVER: - c = docker_client() - EXEC_DRIVER = c.info()['ExecutionDriver'] - c.close() - return EXEC_DRIVER.startswith('native') - - -def docker_client(**kwargs): - return docker.Client(**docker_client_kwargs(**kwargs)) - - -def docker_client_kwargs(**kwargs): - client_kwargs = kwargs_from_env(assert_hostname=False) - client_kwargs.update(kwargs) - return client_kwargs - - -def setup_module(): - c = docker_client() - try: - c.inspect_image(BUSYBOX) - except NotFound: - c.pull(BUSYBOX) - c.inspect_image(BUSYBOX) - c.close() - - -class BaseTestCase(unittest.TestCase): - tmp_imgs = [] - tmp_containers = [] - tmp_folders = [] - tmp_volumes = [] - - def setUp(self): - if six.PY2: - self.assertRegex = self.assertRegexpMatches - self.assertCountEqual = self.assertItemsEqual - self.client = docker_client(timeout=60) - self.tmp_imgs = [] - self.tmp_containers = [] - self.tmp_folders = [] - self.tmp_volumes = [] - - def tearDown(self): - for img in self.tmp_imgs: - try: - self.client.remove_image(img) - except docker.errors.APIError: - pass - for container in self.tmp_containers: - try: - self.client.stop(container, timeout=1) - self.client.remove_container(container) - except docker.errors.APIError: - pass - for folder in self.tmp_folders: - shutil.rmtree(folder) - - for volume in self.tmp_volumes: - try: - self.client.remove_volume(volume) - except docker.errors.APIError: - pass - - self.client.close() - - def run_container(self, *args, **kwargs): - container = self.client.create_container(*args, **kwargs) - self.tmp_containers.append(container) - self.client.start(container) - exitcode = self.client.wait(container) - - if exitcode != 0: - output = self.client.logs(container) - raise Exception( - "Container exited with code {}:\n{}" - .format(exitcode, output)) - - return container - - -######################### -# INFORMATION TESTS # -######################### - - -class TestVersion(BaseTestCase): - def runTest(self): - res = self.client.version() - self.assertIn('GoVersion', res) - self.assertIn('Version', res) - self.assertEqual(len(res['Version'].split('.')), 3) - - -class TestInfo(BaseTestCase): - def runTest(self): - res = self.client.info() - self.assertIn('Containers', res) - self.assertIn('Images', res) - self.assertIn('Debug', res) - - -class TestSearch(BaseTestCase): - def runTest(self): - self.client = docker_client(timeout=10) - res = self.client.search('busybox') - self.assertTrue(len(res) >= 1) - base_img = [x for x in res if x['name'] == 'busybox'] - self.assertEqual(len(base_img), 1) - self.assertIn('description', base_img[0]) - -################### -# LISTING TESTS # -################### - - -class TestImages(BaseTestCase): - def runTest(self): - res1 = self.client.images(all=True) - self.assertIn('Id', res1[0]) - res10 = res1[0] - self.assertIn('Created', res10) - self.assertIn('RepoTags', res10) - distinct = [] - for img in res1: - if img['Id'] not in distinct: - distinct.append(img['Id']) - self.assertEqual(len(distinct), self.client.info()['Images']) - - -class TestImageIds(BaseTestCase): - def runTest(self): - res1 = self.client.images(quiet=True) - self.assertEqual(type(res1[0]), six.text_type) - - -class TestListContainers(BaseTestCase): - def runTest(self): - res0 = self.client.containers(all=True) - size = len(res0) - res1 = self.client.create_container(BUSYBOX, 'true') - self.assertIn('Id', res1) - self.client.start(res1['Id']) - self.tmp_containers.append(res1['Id']) - res2 = self.client.containers(all=True) - self.assertEqual(size + 1, len(res2)) - retrieved = [x for x in res2 if x['Id'].startswith(res1['Id'])] - self.assertEqual(len(retrieved), 1) - retrieved = retrieved[0] - self.assertIn('Command', retrieved) - self.assertEqual(retrieved['Command'], six.text_type('true')) - self.assertIn('Image', retrieved) - self.assertRegex(retrieved['Image'], r'busybox:.*') - self.assertIn('Status', retrieved) - -##################### -# CONTAINER TESTS # -##################### - - -class TestCreateContainer(BaseTestCase): - def runTest(self): - res = self.client.create_container(BUSYBOX, 'true') - self.assertIn('Id', res) - self.tmp_containers.append(res['Id']) - - -class TestCreateContainerWithBinds(BaseTestCase): - def setUp(self): - super(TestCreateContainerWithBinds, self).setUp() - - self.mount_dest = '/mnt' - - # Get a random pathname - we don't need it to exist locally - self.mount_origin = tempfile.mkdtemp() - shutil.rmtree(self.mount_origin) - - self.filename = 'shared.txt' - - self.run_with_volume( - False, - BUSYBOX, - ['touch', os.path.join(self.mount_dest, self.filename)], - ) - - def run_with_volume(self, ro, *args, **kwargs): - return self.run_container( - *args, - volumes={self.mount_dest: {}}, - host_config=self.client.create_host_config( - binds={ - self.mount_origin: { - 'bind': self.mount_dest, - 'ro': ro, - }, - }, - network_mode='none' - ), - **kwargs - ) - - def test_rw(self): - container = self.run_with_volume( - False, - BUSYBOX, - ['ls', self.mount_dest], - ) - logs = self.client.logs(container) - - if six.PY3: - logs = logs.decode('utf-8') - self.assertIn(self.filename, logs) - inspect_data = self.client.inspect_container(container) - self.check_container_data(inspect_data, True) - - def test_ro(self): - container = self.run_with_volume( - True, - BUSYBOX, - ['ls', self.mount_dest], - ) - logs = self.client.logs(container) - - if six.PY3: - logs = logs.decode('utf-8') - self.assertIn(self.filename, logs) - - inspect_data = self.client.inspect_container(container) - self.check_container_data(inspect_data, False) - - def check_container_data(self, inspect_data, rw): - if docker.utils.compare_version('1.20', self.client._version) < 0: - self.assertIn('Volumes', inspect_data) - self.assertIn(self.mount_dest, inspect_data['Volumes']) - self.assertEqual( - self.mount_origin, inspect_data['Volumes'][self.mount_dest] - ) - self.assertIn(self.mount_dest, inspect_data['VolumesRW']) - self.assertFalse(inspect_data['VolumesRW'][self.mount_dest]) - else: - self.assertIn('Mounts', inspect_data) - filtered = list(filter( - lambda x: x['Destination'] == self.mount_dest, - inspect_data['Mounts'] - )) - self.assertEqual(len(filtered), 1) - mount_data = filtered[0] - self.assertEqual(mount_data['Source'], self.mount_origin) - self.assertEqual(mount_data['RW'], rw) - - -@requires_api_version('1.20') -class CreateContainerWithGroupAddTest(BaseTestCase): - def test_group_id_ints(self): - container = self.client.create_container( - BUSYBOX, 'id -G', - host_config=self.client.create_host_config(group_add=[1000, 1001]) - ) - self.tmp_containers.append(container) - self.client.start(container) - self.client.wait(container) - - logs = self.client.logs(container) - if six.PY3: - logs = logs.decode('utf-8') - groups = logs.strip().split(' ') - self.assertIn('1000', groups) - self.assertIn('1001', groups) - - def test_group_id_strings(self): - container = self.client.create_container( - BUSYBOX, 'id -G', host_config=self.client.create_host_config( - group_add=['1000', '1001'] - ) - ) - self.tmp_containers.append(container) - self.client.start(container) - self.client.wait(container) - - logs = self.client.logs(container) - if six.PY3: - logs = logs.decode('utf-8') - - groups = logs.strip().split(' ') - self.assertIn('1000', groups) - self.assertIn('1001', groups) - - -class CreateContainerWithLogConfigTest(BaseTestCase): - def test_valid_log_driver_and_log_opt(self): - log_config = docker.utils.LogConfig( - type='json-file', - config={'max-file': '100'} - ) - - container = self.client.create_container( - BUSYBOX, ['true'], - host_config=self.client.create_host_config(log_config=log_config) - ) - self.tmp_containers.append(container['Id']) - self.client.start(container) - - info = self.client.inspect_container(container) - container_log_config = info['HostConfig']['LogConfig'] - - self.assertEqual(container_log_config['Type'], log_config.type) - self.assertEqual(container_log_config['Config'], log_config.config) - - def test_invalid_log_driver_raises_exception(self): - log_config = docker.utils.LogConfig( - type='asdf-nope', - config={} - ) - - container = self.client.create_container( - BUSYBOX, ['true'], - host_config=self.client.create_host_config(log_config=log_config) - ) - - expected_msg = "logger: no log driver named 'asdf-nope' is registered" - - with pytest.raises(APIError) as excinfo: - # raises an internal server error 500 - self.client.start(container) - - assert expected_msg in str(excinfo.value) - - @pytest.mark.skipif(True, - reason="https://github.com/docker/docker/issues/15633") - def test_valid_no_log_driver_specified(self): - log_config = docker.utils.LogConfig( - type="", - config={'max-file': '100'} - ) - - container = self.client.create_container( - BUSYBOX, ['true'], - host_config=self.client.create_host_config(log_config=log_config) - ) - self.tmp_containers.append(container['Id']) - self.client.start(container) - - info = self.client.inspect_container(container) - container_log_config = info['HostConfig']['LogConfig'] - - self.assertEqual(container_log_config['Type'], "json-file") - self.assertEqual(container_log_config['Config'], log_config.config) - - def test_valid_no_config_specified(self): - log_config = docker.utils.LogConfig( - type="json-file", - config=None - ) - - container = self.client.create_container( - BUSYBOX, ['true'], - host_config=self.client.create_host_config(log_config=log_config) - ) - self.tmp_containers.append(container['Id']) - self.client.start(container) - - info = self.client.inspect_container(container) - container_log_config = info['HostConfig']['LogConfig'] - - self.assertEqual(container_log_config['Type'], "json-file") - self.assertEqual(container_log_config['Config'], {}) - - -class TestCreateContainerReadOnlyFs(BaseTestCase): - def runTest(self): - if not exec_driver_is_native(): - pytest.skip('Exec driver not native') - - ctnr = self.client.create_container( - BUSYBOX, ['mkdir', '/shrine'], - host_config=self.client.create_host_config( - read_only=True, network_mode='none' - ) - ) - self.assertIn('Id', ctnr) - self.tmp_containers.append(ctnr['Id']) - self.client.start(ctnr) - res = self.client.wait(ctnr) - self.assertNotEqual(res, 0) - - -class TestCreateContainerWithName(BaseTestCase): - def runTest(self): - res = self.client.create_container(BUSYBOX, 'true', name='foobar') - self.assertIn('Id', res) - self.tmp_containers.append(res['Id']) - inspect = self.client.inspect_container(res['Id']) - self.assertIn('Name', inspect) - self.assertEqual('/foobar', inspect['Name']) - - -class TestRenameContainer(BaseTestCase): - def runTest(self): - version = self.client.version()['Version'] - name = 'hong_meiling' - res = self.client.create_container(BUSYBOX, 'true') - self.assertIn('Id', res) - self.tmp_containers.append(res['Id']) - self.client.rename(res, name) - inspect = self.client.inspect_container(res['Id']) - self.assertIn('Name', inspect) - if version == '1.5.0': - self.assertEqual(name, inspect['Name']) - else: - self.assertEqual('/{0}'.format(name), inspect['Name']) - - -class TestStartContainer(BaseTestCase): - def runTest(self): - res = self.client.create_container(BUSYBOX, 'true') - self.assertIn('Id', res) - self.tmp_containers.append(res['Id']) - self.client.start(res['Id']) - inspect = self.client.inspect_container(res['Id']) - self.assertIn('Config', inspect) - self.assertIn('Id', inspect) - self.assertTrue(inspect['Id'].startswith(res['Id'])) - self.assertIn('Image', inspect) - self.assertIn('State', inspect) - self.assertIn('Running', inspect['State']) - if not inspect['State']['Running']: - self.assertIn('ExitCode', inspect['State']) - self.assertEqual(inspect['State']['ExitCode'], 0) - - -class TestStartContainerWithDictInsteadOfId(BaseTestCase): - def runTest(self): - res = self.client.create_container(BUSYBOX, 'true') - self.assertIn('Id', res) - self.tmp_containers.append(res['Id']) - self.client.start(res) - inspect = self.client.inspect_container(res['Id']) - self.assertIn('Config', inspect) - self.assertIn('Id', inspect) - self.assertTrue(inspect['Id'].startswith(res['Id'])) - self.assertIn('Image', inspect) - self.assertIn('State', inspect) - self.assertIn('Running', inspect['State']) - if not inspect['State']['Running']: - self.assertIn('ExitCode', inspect['State']) - self.assertEqual(inspect['State']['ExitCode'], 0) - - -class TestCreateContainerPrivileged(BaseTestCase): - def runTest(self): - res = self.client.create_container( - BUSYBOX, 'true', host_config=self.client.create_host_config( - privileged=True, network_mode='none' - ) - ) - self.assertIn('Id', res) - self.tmp_containers.append(res['Id']) - self.client.start(res['Id']) - inspect = self.client.inspect_container(res['Id']) - self.assertIn('Config', inspect) - self.assertIn('Id', inspect) - self.assertTrue(inspect['Id'].startswith(res['Id'])) - self.assertIn('Image', inspect) - self.assertIn('State', inspect) - self.assertIn('Running', inspect['State']) - if not inspect['State']['Running']: - self.assertIn('ExitCode', inspect['State']) - self.assertEqual(inspect['State']['ExitCode'], 0) - # Since Nov 2013, the Privileged flag is no longer part of the - # container's config exposed via the API (safety concerns?). - # - if 'Privileged' in inspect['Config']: - self.assertEqual(inspect['Config']['Privileged'], True) - - -class TestWait(BaseTestCase): - def runTest(self): - res = self.client.create_container(BUSYBOX, ['sleep', '3']) - id = res['Id'] - self.tmp_containers.append(id) - self.client.start(id) - exitcode = self.client.wait(id) - self.assertEqual(exitcode, 0) - inspect = self.client.inspect_container(id) - self.assertIn('Running', inspect['State']) - self.assertEqual(inspect['State']['Running'], False) - self.assertIn('ExitCode', inspect['State']) - self.assertEqual(inspect['State']['ExitCode'], exitcode) - - -class TestWaitWithDictInsteadOfId(BaseTestCase): - def runTest(self): - res = self.client.create_container(BUSYBOX, ['sleep', '3']) - id = res['Id'] - self.tmp_containers.append(id) - self.client.start(res) - exitcode = self.client.wait(res) - self.assertEqual(exitcode, 0) - inspect = self.client.inspect_container(res) - self.assertIn('Running', inspect['State']) - self.assertEqual(inspect['State']['Running'], False) - self.assertIn('ExitCode', inspect['State']) - self.assertEqual(inspect['State']['ExitCode'], exitcode) - - -class TestLogs(BaseTestCase): - def runTest(self): - snippet = 'Flowering Nights (Sakuya Iyazoi)' - container = self.client.create_container( - BUSYBOX, 'echo {0}'.format(snippet) - ) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - exitcode = self.client.wait(id) - self.assertEqual(exitcode, 0) - logs = self.client.logs(id) - self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) - - -class TestLogsWithTailOption(BaseTestCase): - def runTest(self): - snippet = '''Line1 -Line2''' - container = self.client.create_container( - BUSYBOX, 'echo "{0}"'.format(snippet) - ) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - exitcode = self.client.wait(id) - self.assertEqual(exitcode, 0) - logs = self.client.logs(id, tail=1) - self.assertEqual(logs, ('Line2\n').encode(encoding='ascii')) - - -# class TestLogsStreaming(BaseTestCase): -# def runTest(self): -# snippet = 'Flowering Nights (Sakuya Iyazoi)' -# container = self.client.create_container( -# BUSYBOX, 'echo {0}'.format(snippet) -# ) -# id = container['Id'] -# self.client.start(id) -# self.tmp_containers.append(id) -# logs = bytes() if six.PY3 else str() -# for chunk in self.client.logs(id, stream=True): -# logs += chunk - -# exitcode = self.client.wait(id) -# self.assertEqual(exitcode, 0) - -# self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) - - -class TestLogsWithDictInsteadOfId(BaseTestCase): - def runTest(self): - snippet = 'Flowering Nights (Sakuya Iyazoi)' - container = self.client.create_container( - BUSYBOX, 'echo {0}'.format(snippet) - ) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - exitcode = self.client.wait(id) - self.assertEqual(exitcode, 0) - logs = self.client.logs(container) - self.assertEqual(logs, (snippet + '\n').encode(encoding='ascii')) - - -class TestDiff(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['touch', '/test']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - exitcode = self.client.wait(id) - self.assertEqual(exitcode, 0) - diff = self.client.diff(id) - test_diff = [x for x in diff if x.get('Path', None) == '/test'] - self.assertEqual(len(test_diff), 1) - self.assertIn('Kind', test_diff[0]) - self.assertEqual(test_diff[0]['Kind'], 1) - - -class TestDiffWithDictInsteadOfId(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['touch', '/test']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - exitcode = self.client.wait(id) - self.assertEqual(exitcode, 0) - diff = self.client.diff(container) - test_diff = [x for x in diff if x.get('Path', None) == '/test'] - self.assertEqual(len(test_diff), 1) - self.assertIn('Kind', test_diff[0]) - self.assertEqual(test_diff[0]['Kind'], 1) - - -class TestStop(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - self.client.stop(id, timeout=2) - container_info = self.client.inspect_container(id) - self.assertIn('State', container_info) - state = container_info['State'] - self.assertIn('ExitCode', state) - if exec_driver_is_native(): - self.assertNotEqual(state['ExitCode'], 0) - self.assertIn('Running', state) - self.assertEqual(state['Running'], False) - - -class TestStopWithDictInsteadOfId(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) - self.assertIn('Id', container) - id = container['Id'] - self.client.start(container) - self.tmp_containers.append(id) - self.client.stop(container, timeout=2) - container_info = self.client.inspect_container(id) - self.assertIn('State', container_info) - state = container_info['State'] - self.assertIn('ExitCode', state) - if exec_driver_is_native(): - self.assertNotEqual(state['ExitCode'], 0) - self.assertIn('Running', state) - self.assertEqual(state['Running'], False) - - -class TestKill(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - self.client.kill(id) - container_info = self.client.inspect_container(id) - self.assertIn('State', container_info) - state = container_info['State'] - self.assertIn('ExitCode', state) - if exec_driver_is_native(): - self.assertNotEqual(state['ExitCode'], 0) - self.assertIn('Running', state) - self.assertEqual(state['Running'], False) - - -class TestKillWithDictInsteadOfId(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - self.client.kill(container) - container_info = self.client.inspect_container(id) - self.assertIn('State', container_info) - state = container_info['State'] - self.assertIn('ExitCode', state) - if exec_driver_is_native(): - self.assertNotEqual(state['ExitCode'], 0) - self.assertIn('Running', state) - self.assertEqual(state['Running'], False) - - -class TestKillWithSignal(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['sleep', '60']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - self.client.kill(id, signal=signal.SIGKILL) - exitcode = self.client.wait(id) - self.assertNotEqual(exitcode, 0) - container_info = self.client.inspect_container(id) - self.assertIn('State', container_info) - state = container_info['State'] - self.assertIn('ExitCode', state) - self.assertNotEqual(state['ExitCode'], 0) - self.assertIn('Running', state) - self.assertEqual(state['Running'], False, state) - - -class TestPort(BaseTestCase): - def runTest(self): - - port_bindings = { - '1111': ('127.0.0.1', '4567'), - '2222': ('127.0.0.1', '4568') - } - - container = self.client.create_container( - BUSYBOX, ['sleep', '60'], ports=list(port_bindings.keys()), - host_config=self.client.create_host_config( - port_bindings=port_bindings, network_mode='bridge' - ) - ) - id = container['Id'] - - self.client.start(container) - - # Call the port function on each biding and compare expected vs actual - for port in port_bindings: - actual_bindings = self.client.port(container, port) - port_binding = actual_bindings.pop() - - ip, host_port = port_binding['HostIp'], port_binding['HostPort'] - - self.assertEqual(ip, port_bindings[port][0]) - self.assertEqual(host_port, port_bindings[port][1]) - - self.client.kill(id) - - -class TestMacAddress(BaseTestCase): - def runTest(self): - mac_address_expected = "02:42:ac:11:00:0a" - container = self.client.create_container( - BUSYBOX, ['sleep', '60'], mac_address=mac_address_expected) - - id = container['Id'] - - self.client.start(container) - res = self.client.inspect_container(container['Id']) - self.assertEqual(mac_address_expected, - res['NetworkSettings']['MacAddress']) - - self.client.kill(id) - - -class TestRestart(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - info = self.client.inspect_container(id) - self.assertIn('State', info) - self.assertIn('StartedAt', info['State']) - start_time1 = info['State']['StartedAt'] - self.client.restart(id, timeout=2) - info2 = self.client.inspect_container(id) - self.assertIn('State', info2) - self.assertIn('StartedAt', info2['State']) - start_time2 = info2['State']['StartedAt'] - self.assertNotEqual(start_time1, start_time2) - self.assertIn('Running', info2['State']) - self.assertEqual(info2['State']['Running'], True) - self.client.kill(id) - - -class TestRestartWithDictInsteadOfId(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) - self.assertIn('Id', container) - id = container['Id'] - self.client.start(container) - self.tmp_containers.append(id) - info = self.client.inspect_container(id) - self.assertIn('State', info) - self.assertIn('StartedAt', info['State']) - start_time1 = info['State']['StartedAt'] - self.client.restart(container, timeout=2) - info2 = self.client.inspect_container(id) - self.assertIn('State', info2) - self.assertIn('StartedAt', info2['State']) - start_time2 = info2['State']['StartedAt'] - self.assertNotEqual(start_time1, start_time2) - self.assertIn('Running', info2['State']) - self.assertEqual(info2['State']['Running'], True) - self.client.kill(id) - - -class TestRemoveContainer(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['true']) - id = container['Id'] - self.client.start(id) - self.client.wait(id) - self.client.remove_container(id) - containers = self.client.containers(all=True) - res = [x for x in containers if 'Id' in x and x['Id'].startswith(id)] - self.assertEqual(len(res), 0) - - -class TestRemoveContainerWithDictInsteadOfId(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['true']) - id = container['Id'] - self.client.start(id) - self.client.wait(id) - self.client.remove_container(container) - containers = self.client.containers(all=True) - res = [x for x in containers if 'Id' in x and x['Id'].startswith(id)] - self.assertEqual(len(res), 0) - - -class TestCreateContainerWithVolumesFrom(BaseTestCase): - def runTest(self): - vol_names = ['foobar_vol0', 'foobar_vol1'] - - res0 = self.client.create_container( - BUSYBOX, 'true', name=vol_names[0] - ) - container1_id = res0['Id'] - self.tmp_containers.append(container1_id) - self.client.start(container1_id) - - res1 = self.client.create_container( - BUSYBOX, 'true', name=vol_names[1] - ) - container2_id = res1['Id'] - self.tmp_containers.append(container2_id) - self.client.start(container2_id) - with self.assertRaises(docker.errors.DockerException): - self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True, - volumes_from=vol_names - ) - res2 = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True, - host_config=self.client.create_host_config( - volumes_from=vol_names, network_mode='none' - ) - ) - container3_id = res2['Id'] - self.tmp_containers.append(container3_id) - self.client.start(container3_id) - - info = self.client.inspect_container(res2['Id']) - self.assertCountEqual(info['HostConfig']['VolumesFrom'], vol_names) - - -class TestCreateContainerWithLinks(BaseTestCase): - def runTest(self): - res0 = self.client.create_container( - BUSYBOX, 'cat', - detach=True, stdin_open=True, - environment={'FOO': '1'}) - - container1_id = res0['Id'] - self.tmp_containers.append(container1_id) - - self.client.start(container1_id) - - res1 = self.client.create_container( - BUSYBOX, 'cat', - detach=True, stdin_open=True, - environment={'FOO': '1'}) - - container2_id = res1['Id'] - self.tmp_containers.append(container2_id) - - self.client.start(container2_id) - - # we don't want the first / - link_path1 = self.client.inspect_container(container1_id)['Name'][1:] - link_alias1 = 'mylink1' - link_env_prefix1 = link_alias1.upper() - - link_path2 = self.client.inspect_container(container2_id)['Name'][1:] - link_alias2 = 'mylink2' - link_env_prefix2 = link_alias2.upper() - - res2 = self.client.create_container( - BUSYBOX, 'env', host_config=self.client.create_host_config( - links={link_path1: link_alias1, link_path2: link_alias2}, - network_mode='none' - ) - ) - container3_id = res2['Id'] - self.tmp_containers.append(container3_id) - self.client.start(container3_id) - self.assertEqual(self.client.wait(container3_id), 0) - - logs = self.client.logs(container3_id) - if six.PY3: - logs = logs.decode('utf-8') - self.assertIn('{0}_NAME='.format(link_env_prefix1), logs) - self.assertIn('{0}_ENV_FOO=1'.format(link_env_prefix1), logs) - self.assertIn('{0}_NAME='.format(link_env_prefix2), logs) - self.assertIn('{0}_ENV_FOO=1'.format(link_env_prefix2), logs) - - -class TestRestartingContainer(BaseTestCase): - def runTest(self): - container = self.client.create_container( - BUSYBOX, ['sleep', '2'], - host_config=self.client.create_host_config( - restart_policy={"Name": "always", "MaximumRetryCount": 0}, - network_mode='none' - ) - ) - id = container['Id'] - self.client.start(id) - self.client.wait(id) - with self.assertRaises(docker.errors.APIError) as exc: - self.client.remove_container(id) - err = exc.exception.response.text - self.assertIn( - 'You cannot remove a running container', err - ) - self.client.remove_container(id, force=True) - - -class TestExecuteCommand(BaseTestCase): - def runTest(self): - if not exec_driver_is_native(): - pytest.skip('Exec driver not native') - - container = self.client.create_container(BUSYBOX, 'cat', - detach=True, stdin_open=True) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - - res = self.client.exec_create(id, ['echo', 'hello']) - self.assertIn('Id', res) - - exec_log = self.client.exec_start(res) - self.assertEqual(exec_log, b'hello\n') - - -class TestExecuteCommandString(BaseTestCase): - def runTest(self): - if not exec_driver_is_native(): - pytest.skip('Exec driver not native') - - container = self.client.create_container(BUSYBOX, 'cat', - detach=True, stdin_open=True) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - - res = self.client.exec_create(id, 'echo hello world') - self.assertIn('Id', res) - - exec_log = self.client.exec_start(res) - self.assertEqual(exec_log, b'hello world\n') - - -class TestExecuteCommandStringAsUser(BaseTestCase): - def runTest(self): - if not exec_driver_is_native(): - pytest.skip('Exec driver not native') - - container = self.client.create_container(BUSYBOX, 'cat', - detach=True, stdin_open=True) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - - res = self.client.exec_create(id, 'whoami', user='default') - self.assertIn('Id', res) - - exec_log = self.client.exec_start(res) - self.assertEqual(exec_log, b'default\n') - - -class TestExecuteCommandStringAsRoot(BaseTestCase): - def runTest(self): - if not exec_driver_is_native(): - pytest.skip('Exec driver not native') - - container = self.client.create_container(BUSYBOX, 'cat', - detach=True, stdin_open=True) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - - res = self.client.exec_create(id, 'whoami') - self.assertIn('Id', res) - - exec_log = self.client.exec_start(res) - self.assertEqual(exec_log, b'root\n') - - -class TestExecuteCommandStreaming(BaseTestCase): - def runTest(self): - if not exec_driver_is_native(): - pytest.skip('Exec driver not native') - - container = self.client.create_container(BUSYBOX, 'cat', - detach=True, stdin_open=True) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - - exec_id = self.client.exec_create(id, ['echo', 'hello\nworld']) - self.assertIn('Id', exec_id) - - res = b'' - for chunk in self.client.exec_start(exec_id, stream=True): - res += chunk - self.assertEqual(res, b'hello\nworld\n') - - -class TestExecInspect(BaseTestCase): - def runTest(self): - if not exec_driver_is_native(): - pytest.skip('Exec driver not native') - - container = self.client.create_container(BUSYBOX, 'cat', - detach=True, stdin_open=True) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - - exec_id = self.client.exec_create(id, ['mkdir', '/does/not/exist']) - self.assertIn('Id', exec_id) - self.client.exec_start(exec_id) - exec_info = self.client.exec_inspect(exec_id) - self.assertIn('ExitCode', exec_info) - self.assertNotEqual(exec_info['ExitCode'], 0) - - -class TestRunContainerStreaming(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, '/bin/sh', - detach=True, stdin_open=True) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - sock = self.client.attach_socket(container, ws=False) - self.assertTrue(sock.fileno() > -1) - - -class TestPauseUnpauseContainer(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) - id = container['Id'] - self.tmp_containers.append(id) - self.client.start(container) - self.client.pause(id) - container_info = self.client.inspect_container(id) - self.assertIn('State', container_info) - state = container_info['State'] - self.assertIn('ExitCode', state) - self.assertEqual(state['ExitCode'], 0) - self.assertIn('Running', state) - self.assertEqual(state['Running'], True) - self.assertIn('Paused', state) - self.assertEqual(state['Paused'], True) - - self.client.unpause(id) - container_info = self.client.inspect_container(id) - self.assertIn('State', container_info) - state = container_info['State'] - self.assertIn('ExitCode', state) - self.assertEqual(state['ExitCode'], 0) - self.assertIn('Running', state) - self.assertEqual(state['Running'], True) - self.assertIn('Paused', state) - self.assertEqual(state['Paused'], False) - - -class TestCreateContainerWithHostPidMode(BaseTestCase): - def runTest(self): - ctnr = self.client.create_container( - BUSYBOX, 'true', host_config=self.client.create_host_config( - pid_mode='host', network_mode='none' - ) - ) - self.assertIn('Id', ctnr) - self.tmp_containers.append(ctnr['Id']) - self.client.start(ctnr) - inspect = self.client.inspect_container(ctnr) - self.assertIn('HostConfig', inspect) - host_config = inspect['HostConfig'] - self.assertIn('PidMode', host_config) - self.assertEqual(host_config['PidMode'], 'host') - - -################# -# LINKS TESTS # -################# - - -class TestRemoveLink(BaseTestCase): - def runTest(self): - # Create containers - container1 = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True - ) - container1_id = container1['Id'] - self.tmp_containers.append(container1_id) - self.client.start(container1_id) - - # Create Link - # we don't want the first / - link_path = self.client.inspect_container(container1_id)['Name'][1:] - link_alias = 'mylink' - - container2 = self.client.create_container( - BUSYBOX, 'cat', host_config=self.client.create_host_config( - links={link_path: link_alias}, network_mode='none' - ) - ) - container2_id = container2['Id'] - self.tmp_containers.append(container2_id) - self.client.start(container2_id) - - # Remove link - linked_name = self.client.inspect_container(container2_id)['Name'][1:] - link_name = '%s/%s' % (linked_name, link_alias) - self.client.remove_container(link_name, link=True) - - # Link is gone - containers = self.client.containers(all=True) - retrieved = [x for x in containers if link_name in x['Names']] - self.assertEqual(len(retrieved), 0) - - # Containers are still there - retrieved = [ - x for x in containers if x['Id'].startswith(container1_id) or - x['Id'].startswith(container2_id) - ] - self.assertEqual(len(retrieved), 2) - -################## -# IMAGES TESTS # -################## - - -class TestPull(BaseTestCase): - def runTest(self): - try: - self.client.remove_image('hello-world') - except docker.errors.APIError: - pass - res = self.client.pull('hello-world') - self.tmp_imgs.append('hello-world') - self.assertEqual(type(res), six.text_type) - self.assertGreaterEqual( - len(self.client.images('hello-world')), 1 - ) - img_info = self.client.inspect_image('hello-world') - self.assertIn('Id', img_info) - - -class TestPullStream(BaseTestCase): - def runTest(self): - try: - self.client.remove_image('hello-world') - except docker.errors.APIError: - pass - stream = self.client.pull('hello-world', stream=True) - self.tmp_imgs.append('hello-world') - for chunk in stream: - if six.PY3: - chunk = chunk.decode('utf-8') - json.loads(chunk) # ensure chunk is a single, valid JSON blob - self.assertGreaterEqual( - len(self.client.images('hello-world')), 1 - ) - img_info = self.client.inspect_image('hello-world') - self.assertIn('Id', img_info) - - -class TestCommit(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['touch', '/test']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - res = self.client.commit(id) - self.assertIn('Id', res) - img_id = res['Id'] - self.tmp_imgs.append(img_id) - img = self.client.inspect_image(img_id) - self.assertIn('Container', img) - self.assertTrue(img['Container'].startswith(id)) - self.assertIn('ContainerConfig', img) - self.assertIn('Image', img['ContainerConfig']) - self.assertEqual(BUSYBOX, img['ContainerConfig']['Image']) - busybox_id = self.client.inspect_image(BUSYBOX)['Id'] - self.assertIn('Parent', img) - self.assertEqual(img['Parent'], busybox_id) - - -class TestRemoveImage(BaseTestCase): - def runTest(self): - container = self.client.create_container(BUSYBOX, ['touch', '/test']) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - res = self.client.commit(id) - self.assertIn('Id', res) - img_id = res['Id'] - self.tmp_imgs.append(img_id) - self.client.remove_image(img_id, force=True) - images = self.client.images(all=True) - res = [x for x in images if x['Id'].startswith(img_id)] - self.assertEqual(len(res), 0) - - -################## -# IMPORT TESTS # -################## - - -class ImportTestCase(BaseTestCase): - '''Base class for `docker import` test cases.''' - - TAR_SIZE = 512 * 1024 - - def write_dummy_tar_content(self, n_bytes, tar_fd): - def extend_file(f, n_bytes): - f.seek(n_bytes - 1) - f.write(bytearray([65])) - f.seek(0) - - tar = tarfile.TarFile(fileobj=tar_fd, mode='w') - - with tempfile.NamedTemporaryFile() as f: - extend_file(f, n_bytes) - tarinfo = tar.gettarinfo(name=f.name, arcname='testdata') - tar.addfile(tarinfo, fileobj=f) - - tar.close() - - @contextlib.contextmanager - def dummy_tar_stream(self, n_bytes): - '''Yields a stream that is valid tar data of size n_bytes.''' - with tempfile.NamedTemporaryFile() as tar_file: - self.write_dummy_tar_content(n_bytes, tar_file) - tar_file.seek(0) - yield tar_file - - @contextlib.contextmanager - def dummy_tar_file(self, n_bytes): - '''Yields the name of a valid tar file of size n_bytes.''' - with tempfile.NamedTemporaryFile() as tar_file: - self.write_dummy_tar_content(n_bytes, tar_file) - tar_file.seek(0) - yield tar_file.name - - -class TestImportFromBytes(ImportTestCase): - '''Tests importing an image from in-memory byte data.''' - - def runTest(self): - with self.dummy_tar_stream(n_bytes=500) as f: - content = f.read() - - # The generic import_image() function cannot import in-memory bytes - # data that happens to be represented as a string type, because - # import_image() will try to use it as a filename and usually then - # trigger an exception. So we test the import_image_from_data() - # function instead. - statuses = self.client.import_image_from_data( - content, repository='test/import-from-bytes') - - result_text = statuses.splitlines()[-1] - result = json.loads(result_text) - - self.assertNotIn('error', result) - - img_id = result['status'] - self.tmp_imgs.append(img_id) - - -class TestImportFromFile(ImportTestCase): - '''Tests importing an image from a tar file on disk.''' - - def runTest(self): - with self.dummy_tar_file(n_bytes=self.TAR_SIZE) as tar_filename: - # statuses = self.client.import_image( - # src=tar_filename, repository='test/import-from-file') - statuses = self.client.import_image_from_file( - tar_filename, repository='test/import-from-file') - - result_text = statuses.splitlines()[-1] - result = json.loads(result_text) - - self.assertNotIn('error', result) - - self.assertIn('status', result) - img_id = result['status'] - self.tmp_imgs.append(img_id) - - -class TestImportFromStream(ImportTestCase): - '''Tests importing an image from a stream containing tar data.''' - - def runTest(self): - with self.dummy_tar_stream(n_bytes=self.TAR_SIZE) as tar_stream: - statuses = self.client.import_image( - src=tar_stream, repository='test/import-from-stream') - # statuses = self.client.import_image_from_stream( - # tar_stream, repository='test/import-from-stream') - result_text = statuses.splitlines()[-1] - result = json.loads(result_text) - - self.assertNotIn('error', result) - - self.assertIn('status', result) - img_id = result['status'] - self.tmp_imgs.append(img_id) - - -class TestImportFromURL(ImportTestCase): - '''Tests downloading an image over HTTP.''' - - @contextlib.contextmanager - def temporary_http_file_server(self, stream): - '''Serve data from an IO stream over HTTP.''' - - class Handler(BaseHTTPServer.BaseHTTPRequestHandler): - def do_GET(self): - self.send_response(200) - self.send_header('Content-Type', 'application/x-tar') - self.end_headers() - shutil.copyfileobj(stream, self.wfile) - - server = socketserver.TCPServer(('', 0), Handler) - thread = threading.Thread(target=server.serve_forever) - thread.setDaemon(True) - thread.start() - - yield 'http://%s:%s' % (socket.gethostname(), server.server_address[1]) - - server.shutdown() - - @pytest.mark.skipif(True, reason="Doesn't work inside a container - FIXME") - def runTest(self): - # The crappy test HTTP server doesn't handle large files well, so use - # a small file. - TAR_SIZE = 10240 - - with self.dummy_tar_stream(n_bytes=TAR_SIZE) as tar_data: - with self.temporary_http_file_server(tar_data) as url: - statuses = self.client.import_image( - src=url, repository='test/import-from-url') - - result_text = statuses.splitlines()[-1] - result = json.loads(result_text) - - self.assertNotIn('error', result) - - self.assertIn('status', result) - img_id = result['status'] - self.tmp_imgs.append(img_id) - - -################# -# VOLUMES TESTS # -################# - -@requires_api_version('1.21') -class TestVolumes(BaseTestCase): - def test_create_volume(self): - name = 'perfectcherryblossom' - self.tmp_volumes.append(name) - result = self.client.create_volume(name) - self.assertIn('Name', result) - self.assertEqual(result['Name'], name) - self.assertIn('Driver', result) - self.assertEqual(result['Driver'], 'local') - - def test_create_volume_invalid_driver(self): - driver_name = 'invalid.driver' - - with pytest.raises(docker.errors.NotFound): - self.client.create_volume('perfectcherryblossom', driver_name) - - def test_list_volumes(self): - name = 'imperishablenight' - self.tmp_volumes.append(name) - volume_info = self.client.create_volume(name) - result = self.client.volumes() - self.assertIn('Volumes', result) - volumes = result['Volumes'] - self.assertIn(volume_info, volumes) - - def test_inspect_volume(self): - name = 'embodimentofscarletdevil' - self.tmp_volumes.append(name) - volume_info = self.client.create_volume(name) - result = self.client.inspect_volume(name) - self.assertEqual(volume_info, result) - - def test_inspect_nonexistent_volume(self): - name = 'embodimentofscarletdevil' - with pytest.raises(docker.errors.NotFound): - self.client.inspect_volume(name) - - def test_remove_volume(self): - name = 'shootthebullet' - self.tmp_volumes.append(name) - self.client.create_volume(name) - result = self.client.remove_volume(name) - self.assertTrue(result) - - def test_remove_nonexistent_volume(self): - name = 'shootthebullet' - with pytest.raises(docker.errors.NotFound): - self.client.remove_volume(name) - - -################# -# BUILDER TESTS # -################# - -class TestBuildStream(BaseTestCase): - def runTest(self): - script = io.BytesIO('\n'.join([ - 'FROM busybox', - 'MAINTAINER docker-py', - 'RUN mkdir -p /tmp/test', - 'EXPOSE 8080', - 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' - ' /tmp/silence.tar.gz' - ]).encode('ascii')) - stream = self.client.build(fileobj=script, stream=True) - logs = '' - for chunk in stream: - if six.PY3: - chunk = chunk.decode('utf-8') - json.loads(chunk) # ensure chunk is a single, valid JSON blob - logs += chunk - self.assertNotEqual(logs, '') - - -class TestBuildFromStringIO(BaseTestCase): - def runTest(self): - if six.PY3: - return - script = io.StringIO(six.text_type('\n').join([ - 'FROM busybox', - 'MAINTAINER docker-py', - 'RUN mkdir -p /tmp/test', - 'EXPOSE 8080', - 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' - ' /tmp/silence.tar.gz' - ])) - stream = self.client.build(fileobj=script, stream=True) - logs = '' - for chunk in stream: - if six.PY3: - chunk = chunk.decode('utf-8') - logs += chunk - self.assertNotEqual(logs, '') - - -@requires_api_version('1.8') -class TestBuildWithDockerignore(Cleanup, BaseTestCase): - def runTest(self): - base_dir = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, base_dir) - - with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: - f.write("\n".join([ - 'FROM busybox', - 'MAINTAINER docker-py', - 'ADD . /test', - ])) - - with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: - f.write("\n".join([ - 'ignored', - 'Dockerfile', - '.dockerignore', - '', # empty line - ])) - - with open(os.path.join(base_dir, 'not-ignored'), 'w') as f: - f.write("this file should not be ignored") - - subdir = os.path.join(base_dir, 'ignored', 'subdir') - os.makedirs(subdir) - with open(os.path.join(subdir, 'file'), 'w') as f: - f.write("this file should be ignored") - - tag = 'docker-py-test-build-with-dockerignore' - stream = self.client.build( - path=base_dir, - tag=tag, - ) - for chunk in stream: - pass - - c = self.client.create_container(tag, ['ls', '-1A', '/test']) - self.client.start(c) - self.client.wait(c) - logs = self.client.logs(c) - - if six.PY3: - logs = logs.decode('utf-8') - - self.assertEqual( - list(filter(None, logs.split('\n'))), - ['not-ignored'], - ) - -####################### -# PY SPECIFIC TESTS # -####################### - - -class TestRunShlex(BaseTestCase): - def runTest(self): - commands = [ - 'true', - 'echo "The Young Descendant of Tepes & Septette for the ' - 'Dead Princess"', - 'echo -n "The Young Descendant of Tepes & Septette for the ' - 'Dead Princess"', - '/bin/sh -c "echo Hello World"', - '/bin/sh -c \'echo "Hello World"\'', - 'echo "\"Night of Nights\""', - 'true && echo "Night of Nights"' - ] - for cmd in commands: - container = self.client.create_container(BUSYBOX, cmd) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - exitcode = self.client.wait(id) - self.assertEqual(exitcode, 0, msg=cmd) - - -class TestLoadConfig(BaseTestCase): - def runTest(self): - folder = tempfile.mkdtemp() - self.tmp_folders.append(folder) - cfg_path = os.path.join(folder, '.dockercfg') - f = open(cfg_path, 'w') - auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') - f.write('auth = {0}\n'.format(auth_)) - f.write('email = sakuya@scarlet.net') - f.close() - cfg = docker.auth.load_config(cfg_path) - self.assertNotEqual(cfg[docker.auth.INDEX_NAME], None) - cfg = cfg[docker.auth.INDEX_NAME] - self.assertEqual(cfg['username'], 'sakuya') - self.assertEqual(cfg['password'], 'izayoi') - self.assertEqual(cfg['email'], 'sakuya@scarlet.net') - self.assertEqual(cfg.get('Auth'), None) - - -class TestLoadJSONConfig(BaseTestCase): - def runTest(self): - folder = tempfile.mkdtemp() - self.tmp_folders.append(folder) - cfg_path = os.path.join(folder, '.dockercfg') - f = open(os.path.join(folder, '.dockercfg'), 'w') - auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') - email_ = 'sakuya@scarlet.net' - f.write('{{"{0}": {{"auth": "{1}", "email": "{2}"}}}}\n'.format( - docker.auth.INDEX_URL, auth_, email_)) - f.close() - cfg = docker.auth.load_config(cfg_path) - self.assertNotEqual(cfg[docker.auth.INDEX_URL], None) - cfg = cfg[docker.auth.INDEX_URL] - self.assertEqual(cfg['username'], 'sakuya') - self.assertEqual(cfg['password'], 'izayoi') - self.assertEqual(cfg['email'], 'sakuya@scarlet.net') - self.assertEqual(cfg.get('Auth'), None) - - -class TestAutoDetectVersion(unittest.TestCase): - def test_client_init(self): - client = docker_client(version='auto') - client_version = client._version - api_version = client.version(api_version=False)['ApiVersion'] - self.assertEqual(client_version, api_version) - api_version_2 = client.version()['ApiVersion'] - self.assertEqual(client_version, api_version_2) - client.close() - - def test_auto_client(self): - client = docker.AutoVersionClient(**docker_client_kwargs()) - client_version = client._version - api_version = client.version(api_version=False)['ApiVersion'] - self.assertEqual(client_version, api_version) - api_version_2 = client.version()['ApiVersion'] - self.assertEqual(client_version, api_version_2) - client.close() - with self.assertRaises(docker.errors.DockerException): - docker.AutoVersionClient(**docker_client_kwargs(version='1.11')) - - -class TestConnectionTimeout(unittest.TestCase): - def setUp(self): - self.timeout = 0.5 - self.client = docker.client.Client(base_url='http://192.168.10.2:4243', - timeout=self.timeout) - - def runTest(self): - start = time.time() - res = None - # This call isn't supposed to complete, and it should fail fast. - try: - res = self.client.inspect_container('id') - except: - pass - end = time.time() - self.assertTrue(res is None) - self.assertTrue(end - start < 2 * self.timeout) - - -class UnixconnTestCase(unittest.TestCase): - """ - Test UNIX socket connection adapter. - """ - - def test_resource_warnings(self): - """ - Test no warnings are produced when using the client. - """ - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - - client = docker_client() - client.images() - client.close() - del client - - assert len(w) == 0, \ - "No warnings produced: {0}".format(w[0].message) - - -#################### -# REGRESSION TESTS # -#################### - -class TestRegressions(BaseTestCase): - def test_443(self): - dfile = io.BytesIO() - with self.assertRaises(docker.errors.APIError) as exc: - for line in self.client.build(fileobj=dfile, tag="a/b/c"): - pass - self.assertEqual(exc.exception.response.status_code, 500) - dfile.close() - - def test_542(self): - self.client.start( - self.client.create_container(BUSYBOX, ['true']) - ) - result = self.client.containers(all=True, trunc=True) - self.assertEqual(len(result[0]['Id']), 12) - - def test_647(self): - with self.assertRaises(docker.errors.APIError): - self.client.inspect_image('gensokyo.jp//kirisame') - - def test_649(self): - self.client.timeout = None - ctnr = self.client.create_container(BUSYBOX, ['sleep', '2']) - self.client.start(ctnr) - self.client.stop(ctnr) - - def test_715(self): - ctnr = self.client.create_container(BUSYBOX, ['id', '-u'], user=1000) - self.client.start(ctnr) - self.client.wait(ctnr) - logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') - assert logs == '1000\n' +# FIXME: placeholder while we transition to the new folder architecture +# Remove when merged in master and Jenkins is updated to find the tests +# in the new location. +from integration import * # flake8: noqa diff --git a/tests/test.py b/tests/test.py deleted file mode 100644 index 9993484..0000000 --- a/tests/test.py +++ /dev/null @@ -1,2476 +0,0 @@ -# Copyright 2013 dotCloud inc. - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import datetime -import gzip -import io -import json -import os -import re -import shutil -import signal -import socket -import sys -import tarfile -import tempfile -import threading -import time -import random - -import docker -import docker.efficiency -import requests -import six - -from . import base -from . import fake_api -from .helpers import make_tree - -import pytest - -try: - from unittest import mock -except ImportError: - import mock - - -DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS - - -def response(status_code=200, content='', headers=None, reason=None, elapsed=0, - request=None): - res = requests.Response() - res.status_code = status_code - if not isinstance(content, six.binary_type): - content = json.dumps(content).encode('ascii') - res._content = content - res.headers = requests.structures.CaseInsensitiveDict(headers or {}) - res.reason = reason - res.elapsed = datetime.timedelta(elapsed) - res.request = request - return res - - -def fake_resolve_authconfig(authconfig, registry=None): - return None - - -def fake_inspect_container(self, container, tty=False): - return fake_api.get_fake_inspect_container(tty=tty)[1] - - -def fake_inspect_container_tty(self, container): - return fake_inspect_container(self, container, tty=True) - - -def fake_resp(method, url, *args, **kwargs): - key = None - if url in fake_api.fake_responses: - key = url - elif (url, method) in fake_api.fake_responses: - key = (url, method) - if not key: - raise Exception('{0} {1}'.format(method, url)) - status_code, content = fake_api.fake_responses[key]() - return response(status_code=status_code, content=content) - - -fake_request = mock.Mock(side_effect=fake_resp) - - -def fake_get(self, url, *args, **kwargs): - return fake_request('GET', url, *args, **kwargs) - - -def fake_post(self, url, *args, **kwargs): - return fake_request('POST', url, *args, **kwargs) - - -def fake_put(self, url, *args, **kwargs): - return fake_request('PUT', url, *args, **kwargs) - - -def fake_delete(self, url, *args, **kwargs): - return fake_request('DELETE', url, *args, **kwargs) - -url_prefix = 'http+docker://localunixsocket/v{0}/'.format( - docker.constants.DEFAULT_DOCKER_API_VERSION) - - -class Cleanup(object): - if sys.version_info < (2, 7): - # Provide a basic implementation of addCleanup for Python < 2.7 - def __init__(self, *args, **kwargs): - super(Cleanup, self).__init__(*args, **kwargs) - self._cleanups = [] - - def tearDown(self): - super(Cleanup, self).tearDown() - ok = True - while self._cleanups: - fn, args, kwargs = self._cleanups.pop(-1) - try: - fn(*args, **kwargs) - except KeyboardInterrupt: - raise - except: - ok = False - if not ok: - raise - - def addCleanup(self, function, *args, **kwargs): - self._cleanups.append((function, args, kwargs)) - - -@mock.patch.multiple('docker.Client', get=fake_get, post=fake_post, - put=fake_put, delete=fake_delete) -class DockerClientTest(Cleanup, base.BaseTestCase): - def setUp(self): - self.client = docker.Client() - # Force-clear authconfig to avoid tampering with the tests - self.client._cfg = {'Configs': {}} - - def tearDown(self): - self.client.close() - - def assertIn(self, object, collection): - if six.PY2 and sys.version_info[1] <= 6: - return self.assertTrue(object in collection) - return super(DockerClientTest, self).assertIn(object, collection) - - def base_create_payload(self, img='busybox', cmd=None): - if not cmd: - cmd = ['true'] - return {"Tty": False, "Image": img, "Cmd": cmd, - "AttachStdin": False, - "AttachStderr": True, "AttachStdout": True, - "StdinOnce": False, - "OpenStdin": False, "NetworkDisabled": False, - } - - def test_ctor(self): - with pytest.raises(docker.errors.DockerException) as excinfo: - docker.Client(version=1.12) - - self.assertEqual( - str(excinfo.value), - 'Version parameter must be a string or None. Found float' - ) - - def test_url_valid_resource(self): - url = self.client._url('/hello/{0}/world', 'somename') - self.assertEqual( - url, '{0}{1}'.format(url_prefix, 'hello/somename/world') - ) - - url = self.client._url('/hello/{0}/world', '/some?name') - self.assertEqual( - url, '{0}{1}'.format(url_prefix, 'hello/%2Fsome%3Fname/world') - ) - - def test_url_invalid_resource(self): - with pytest.raises(ValueError): - self.client._url('/hello/{0}/world', ['sakuya', 'izayoi']) - - def test_url_no_resource(self): - url = self.client._url('/simple') - self.assertEqual(url, '{0}{1}'.format(url_prefix, 'simple')) - - url = self.client._url('/simple', None) - self.assertEqual(url, '{0}{1}'.format(url_prefix, 'simple')) - - ######################### - # INFORMATION TESTS # - ######################### - def test_version(self): - self.client.version() - - fake_request.assert_called_with( - 'GET', - url_prefix + 'version', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_retrieve_server_version(self): - client = docker.Client(version="auto") - self.assertTrue(isinstance(client._version, six.string_types)) - self.assertFalse(client._version == "auto") - client.close() - - def test_auto_retrieve_server_version(self): - version = self.client._retrieve_server_version() - self.assertTrue(isinstance(version, six.string_types)) - - def test_info(self): - self.client.info() - - fake_request.assert_called_with( - 'GET', - url_prefix + 'info', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_search(self): - self.client.search('busybox') - - fake_request.assert_called_with( - 'GET', - url_prefix + 'images/search', - params={'term': 'busybox'}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_image_viz(self): - with pytest.raises(Exception): - self.client.images('busybox', viz=True) - self.fail('Viz output should not be supported!') - - def test_events(self): - self.client.events() - - fake_request.assert_called_with( - 'GET', - url_prefix + 'events', - params={'since': None, 'until': None, 'filters': None}, - stream=True - ) - - def test_events_with_since_until(self): - ts = 1356048000 - now = datetime.datetime.utcfromtimestamp(ts) - since = now - datetime.timedelta(seconds=10) - until = now + datetime.timedelta(seconds=10) - - self.client.events(since=since, until=until) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'events', - params={ - 'since': ts - 10, - 'until': ts + 10, - 'filters': None - }, - stream=True - ) - - def test_events_with_filters(self): - filters = {'event': ['die', 'stop'], - 'container': fake_api.FAKE_CONTAINER_ID} - - self.client.events(filters=filters) - - expected_filters = docker.utils.convert_filters(filters) - fake_request.assert_called_with( - 'GET', - url_prefix + 'events', - params={ - 'since': None, - 'until': None, - 'filters': expected_filters - }, - stream=True - ) - - ################### - # LISTING TESTS # - ################### - - def test_images(self): - self.client.images(all=True) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'images/json', - params={'filter': None, 'only_ids': 0, 'all': 1}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_images_quiet(self): - self.client.images(all=True, quiet=True) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'images/json', - params={'filter': None, 'only_ids': 1, 'all': 1}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_image_ids(self): - self.client.images(quiet=True) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'images/json', - params={'filter': None, 'only_ids': 1, 'all': 0}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_images_filters(self): - self.client.images(filters={'dangling': True}) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'images/json', - params={'filter': None, 'only_ids': 0, 'all': 0, - 'filters': '{"dangling": ["true"]}'}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_list_containers(self): - self.client.containers(all=True) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/json', - params={ - 'all': 1, - 'since': None, - 'size': 0, - 'limit': -1, - 'trunc_cmd': 0, - 'before': None - }, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - ##################### - # CONTAINER TESTS # - ##################### - - def test_create_container(self): - self.client.create_container('busybox', 'true') - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", "Cmd": ["true"], - "AttachStdin": false, - "AttachStderr": true, "AttachStdout": true, - "StdinOnce": false, - "OpenStdin": false, "NetworkDisabled": false}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_binds(self): - mount_dest = '/mnt' - - self.client.create_container('busybox', ['ls', mount_dest], - volumes=[mount_dest]) - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", - "Cmd": ["ls", "/mnt"], "AttachStdin": false, - "Volumes": {"/mnt": {}}, - "AttachStderr": true, - "AttachStdout": true, "OpenStdin": false, - "StdinOnce": false, - "NetworkDisabled": false}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_volume_string(self): - mount_dest = '/mnt' - - self.client.create_container('busybox', ['ls', mount_dest], - volumes=mount_dest) - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", - "Cmd": ["ls", "/mnt"], "AttachStdin": false, - "Volumes": {"/mnt": {}}, - "AttachStderr": true, - "AttachStdout": true, "OpenStdin": false, - "StdinOnce": false, - "NetworkDisabled": false}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_ports(self): - self.client.create_container('busybox', 'ls', - ports=[1111, (2222, 'udp'), (3333,)]) - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", - "Cmd": ["ls"], "AttachStdin": false, - "ExposedPorts": { - "1111/tcp": {}, - "2222/udp": {}, - "3333/tcp": {} - }, - "AttachStderr": true, - "AttachStdout": true, "OpenStdin": false, - "StdinOnce": false, - "NetworkDisabled": false}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_entrypoint(self): - self.client.create_container('busybox', 'hello', - entrypoint='cowsay entry') - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", - "Cmd": ["hello"], "AttachStdin": false, - "AttachStderr": true, - "AttachStdout": true, "OpenStdin": false, - "StdinOnce": false, - "NetworkDisabled": false, - "Entrypoint": ["cowsay", "entry"]}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_cpu_shares(self): - self.client.create_container('busybox', 'ls', - cpu_shares=5) - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", - "Cmd": ["ls"], "AttachStdin": false, - "AttachStderr": true, - "AttachStdout": true, "OpenStdin": false, - "StdinOnce": false, - "NetworkDisabled": false, - "CpuShares": 5}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_cpuset(self): - self.client.create_container('busybox', 'ls', - cpuset='0,1') - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", - "Cmd": ["ls"], "AttachStdin": false, - "AttachStderr": true, - "AttachStdout": true, "OpenStdin": false, - "StdinOnce": false, - "NetworkDisabled": false, - "Cpuset": "0,1", - "CpusetCpus": "0,1"}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_cgroup_parent(self): - self.client.create_container( - 'busybox', 'ls', host_config=self.client.create_host_config( - cgroup_parent='test' - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - data = json.loads(args[1]['data']) - self.assertIn('HostConfig', data) - self.assertIn('CgroupParent', data['HostConfig']) - self.assertEqual(data['HostConfig']['CgroupParent'], 'test') - - def test_create_container_with_working_dir(self): - self.client.create_container('busybox', 'ls', - working_dir='/root') - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", - "Cmd": ["ls"], "AttachStdin": false, - "AttachStderr": true, - "AttachStdout": true, "OpenStdin": false, - "StdinOnce": false, - "NetworkDisabled": false, - "WorkingDir": "/root"}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_stdin_open(self): - self.client.create_container('busybox', 'true', stdin_open=True) - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", "Cmd": ["true"], - "AttachStdin": true, - "AttachStderr": true, "AttachStdout": true, - "StdinOnce": true, - "OpenStdin": true, "NetworkDisabled": false}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_with_volumes_from(self): - vol_names = ['foo', 'bar'] - try: - self.client.create_container('busybox', 'true', - volumes_from=vol_names) - except docker.errors.DockerException: - self.assertTrue( - docker.utils.compare_version('1.10', self.client._version) >= 0 - ) - return - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data'])['VolumesFrom'], - ','.join(vol_names)) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_create_container_empty_volumes_from(self): - self.client.create_container('busybox', 'true', volumes_from=[]) - - args = fake_request.call_args - data = json.loads(args[1]['data']) - self.assertTrue('VolumesFrom' not in data) - - def test_create_named_container(self): - self.client.create_container('busybox', 'true', - name='marisa-kirisame') - - args = fake_request.call_args - self.assertEqual(args[0][1], - url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), - json.loads(''' - {"Tty": false, "Image": "busybox", "Cmd": ["true"], - "AttachStdin": false, - "AttachStderr": true, "AttachStdout": true, - "StdinOnce": false, - "OpenStdin": false, "NetworkDisabled": false}''')) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual(args[1]['params'], {'name': 'marisa-kirisame'}) - - def test_create_container_with_mem_limit_as_int(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - mem_limit=128.0 - ) - ) - - args = fake_request.call_args - data = json.loads(args[1]['data']) - self.assertEqual(data['HostConfig']['Memory'], 128.0) - - def test_create_container_with_mem_limit_as_string(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - mem_limit='128' - ) - ) - - args = fake_request.call_args - data = json.loads(args[1]['data']) - self.assertEqual(data['HostConfig']['Memory'], 128.0) - - def test_create_container_with_mem_limit_as_string_with_k_unit(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - mem_limit='128k' - ) - ) - - args = fake_request.call_args - data = json.loads(args[1]['data']) - self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024) - - def test_create_container_with_mem_limit_as_string_with_m_unit(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - mem_limit='128m' - ) - ) - - args = fake_request.call_args - data = json.loads(args[1]['data']) - self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024 * 1024) - - def test_create_container_with_mem_limit_as_string_with_g_unit(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - mem_limit='128g' - ) - ) - - args = fake_request.call_args - data = json.loads(args[1]['data']) - self.assertEqual( - data['HostConfig']['Memory'], 128.0 * 1024 * 1024 * 1024 - ) - - def test_create_container_with_mem_limit_as_string_with_wrong_value(self): - self.assertRaises( - docker.errors.DockerException, - self.client.create_host_config, mem_limit='128p' - ) - - self.assertRaises( - docker.errors.DockerException, - self.client.create_host_config, mem_limit='1f28' - ) - - def test_start_container(self): - self.client.start(fake_api.FAKE_CONTAINER_ID) - - args = fake_request.call_args - self.assertEqual( - args[0][1], - url_prefix + 'containers/3cc2351ab11b/start' - ) - self.assertEqual(json.loads(args[1]['data']), {}) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS - ) - - def test_start_container_none(self): - with pytest.raises(ValueError) as excinfo: - self.client.start(container=None) - - self.assertEqual( - str(excinfo.value), - 'image or container param is undefined', - ) - - with pytest.raises(ValueError) as excinfo: - self.client.start(None) - - self.assertEqual( - str(excinfo.value), - 'image or container param is undefined', - ) - - def test_start_container_regression_573(self): - self.client.start(**{'container': fake_api.FAKE_CONTAINER_ID}) - - def test_create_container_with_lxc_conf(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - lxc_conf={'lxc.conf.k': 'lxc.conf.value'} - ) - ) - - args = fake_request.call_args - self.assertEqual( - args[0][1], - url_prefix + 'containers/create' - ) - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['LxcConf'] = [ - {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} - ] - - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual( - args[1]['headers'], - {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_lxc_conf_compat(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['LxcConf'] = [ - {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} - ] - self.assertEqual( - json.loads(args[1]['data']), expected_payload) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_binds_ro(self): - mount_dest = '/mnt' - mount_origin = '/tmp' - - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - binds={mount_origin: { - "bind": mount_dest, - "ro": True - }} - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + - 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:ro"] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_binds_rw(self): - mount_dest = '/mnt' - mount_origin = '/tmp' - - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - binds={mount_origin: { - "bind": mount_dest, - "ro": False - }} - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + - 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:rw"] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_binds_mode(self): - mount_dest = '/mnt' - mount_origin = '/tmp' - - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - binds={mount_origin: { - "bind": mount_dest, - "mode": "z", - }} - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + - 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:z"] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_binds_mode_and_ro_error(self): - with pytest.raises(ValueError): - mount_dest = '/mnt' - mount_origin = '/tmp' - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - binds={mount_origin: { - "bind": mount_dest, - "mode": "z", - "ro": True, - }} - ) - ) - - def test_create_container_with_binds_list(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - binds=[ - "/tmp:/mnt/1:ro", - "/tmp:/mnt/2", - ], - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + - 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Binds'] = [ - "/tmp:/mnt/1:ro", - "/tmp:/mnt/2", - ] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_port_binds(self): - self.maxDiff = None - - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - port_bindings={ - 1111: None, - 2222: 2222, - '3333/udp': (3333,), - 4444: ('127.0.0.1',), - 5555: ('127.0.0.1', 5555), - 6666: [('127.0.0.1',), ('192.168.0.1',)] - } - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - data = json.loads(args[1]['data']) - port_bindings = data['HostConfig']['PortBindings'] - self.assertTrue('1111/tcp' in port_bindings) - self.assertTrue('2222/tcp' in port_bindings) - self.assertTrue('3333/udp' in port_bindings) - self.assertTrue('4444/tcp' in port_bindings) - self.assertTrue('5555/tcp' in port_bindings) - self.assertTrue('6666/tcp' in port_bindings) - self.assertEqual( - [{"HostPort": "", "HostIp": ""}], - port_bindings['1111/tcp'] - ) - self.assertEqual( - [{"HostPort": "2222", "HostIp": ""}], - port_bindings['2222/tcp'] - ) - self.assertEqual( - [{"HostPort": "3333", "HostIp": ""}], - port_bindings['3333/udp'] - ) - self.assertEqual( - [{"HostPort": "", "HostIp": "127.0.0.1"}], - port_bindings['4444/tcp'] - ) - self.assertEqual( - [{"HostPort": "5555", "HostIp": "127.0.0.1"}], - port_bindings['5555/tcp'] - ) - self.assertEqual(len(port_bindings['6666/tcp']), 2) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_mac_address(self): - mac_address_expected = "02:42:ac:11:00:0a" - - container = self.client.create_container( - 'busybox', ['sleep', '60'], mac_address=mac_address_expected) - - res = self.client.inspect_container(container['Id']) - self.assertEqual(mac_address_expected, - res['NetworkSettings']['MacAddress']) - - def test_create_container_with_links(self): - link_path = 'path' - alias = 'alias' - - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - links={link_path: alias} - ) - ) - - args = fake_request.call_args - self.assertEqual( - args[0][1], url_prefix + 'containers/create' - ) - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Links'] = ['path:alias'] - - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - - def test_create_container_with_multiple_links(self): - link_path = 'path' - alias = 'alias' - - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - links={ - link_path + '1': alias + '1', - link_path + '2': alias + '2' - } - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Links'] = [ - 'path1:alias1', 'path2:alias2' - ] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - - def test_create_container_with_links_as_list_of_tuples(self): - link_path = 'path' - alias = 'alias' - - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - links=[(link_path, alias)] - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Links'] = ['path:alias'] - - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - - def test_create_container_privileged(self): - self.client.create_container( - 'busybox', 'true', - host_config=self.client.create_host_config(privileged=True) - ) - - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Privileged'] = True - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_start_container_with_lxc_conf(self): - def call_start(): - self.client.start( - fake_api.FAKE_CONTAINER_ID, - lxc_conf={'lxc.conf.k': 'lxc.conf.value'} - ) - - pytest.deprecated_call(call_start) - - def test_start_container_with_lxc_conf_compat(self): - def call_start(): - self.client.start( - fake_api.FAKE_CONTAINER_ID, - lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] - ) - - pytest.deprecated_call(call_start) - - def test_start_container_with_binds_ro(self): - def call_start(): - self.client.start( - fake_api.FAKE_CONTAINER_ID, binds={ - '/tmp': { - "bind": '/mnt', - "ro": True - } - } - ) - - pytest.deprecated_call(call_start) - - def test_start_container_with_binds_rw(self): - def call_start(): - self.client.start( - fake_api.FAKE_CONTAINER_ID, binds={ - '/tmp': {"bind": '/mnt', "ro": False} - } - ) - - pytest.deprecated_call(call_start) - - def test_start_container_with_port_binds(self): - self.maxDiff = None - - def call_start(): - self.client.start(fake_api.FAKE_CONTAINER_ID, port_bindings={ - 1111: None, - 2222: 2222, - '3333/udp': (3333,), - 4444: ('127.0.0.1',), - 5555: ('127.0.0.1', 5555), - 6666: [('127.0.0.1',), ('192.168.0.1',)] - }) - - pytest.deprecated_call(call_start) - - def test_start_container_with_links(self): - def call_start(): - self.client.start( - fake_api.FAKE_CONTAINER_ID, links={'path': 'alias'} - ) - - pytest.deprecated_call(call_start) - - def test_start_container_with_multiple_links(self): - def call_start(): - self.client.start( - fake_api.FAKE_CONTAINER_ID, - links={ - 'path1': 'alias1', - 'path2': 'alias2' - } - ) - - pytest.deprecated_call(call_start) - - def test_start_container_with_links_as_list_of_tuples(self): - def call_start(): - self.client.start(fake_api.FAKE_CONTAINER_ID, - links=[('path', 'alias')]) - - pytest.deprecated_call(call_start) - - def test_start_container_privileged(self): - def call_start(): - self.client.start(fake_api.FAKE_CONTAINER_ID, privileged=True) - - pytest.deprecated_call(call_start) - - def test_start_container_with_dict_instead_of_id(self): - self.client.start({'Id': fake_api.FAKE_CONTAINER_ID}) - - args = fake_request.call_args - self.assertEqual( - args[0][1], - url_prefix + 'containers/3cc2351ab11b/start' - ) - self.assertEqual(json.loads(args[1]['data']), {}) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_restart_policy(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - restart_policy={ - "Name": "always", - "MaximumRetryCount": 0 - } - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['RestartPolicy'] = { - "MaximumRetryCount": 0, "Name": "always" - } - self.assertEqual(json.loads(args[1]['data']), expected_payload) - - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_added_capabilities(self): - self.client.create_container( - 'busybox', 'true', - host_config=self.client.create_host_config(cap_add=['MKNOD']) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['CapAdd'] = ['MKNOD'] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_dropped_capabilities(self): - self.client.create_container( - 'busybox', 'true', - host_config=self.client.create_host_config(cap_drop=['MKNOD']) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['CapDrop'] = ['MKNOD'] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_devices(self): - self.client.create_container( - 'busybox', 'true', host_config=self.client.create_host_config( - devices=['/dev/sda:/dev/xvda:rwm', - '/dev/sdb:/dev/xvdb', - '/dev/sdc'] - ) - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - expected_payload = self.base_create_payload() - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Devices'] = [ - {'CgroupPermissions': 'rwm', - 'PathInContainer': '/dev/xvda', - 'PathOnHost': '/dev/sda'}, - {'CgroupPermissions': 'rwm', - 'PathInContainer': '/dev/xvdb', - 'PathOnHost': '/dev/sdb'}, - {'CgroupPermissions': 'rwm', - 'PathInContainer': '/dev/sdc', - 'PathOnHost': '/dev/sdc'} - ] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_labels_dict(self): - labels_dict = { - six.text_type('foo'): six.text_type('1'), - six.text_type('bar'): six.text_type('2'), - } - - self.client.create_container( - 'busybox', 'true', - labels=labels_dict, - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_labels_list(self): - labels_list = [ - six.text_type('foo'), - six.text_type('bar'), - ] - labels_dict = { - six.text_type('foo'): six.text_type(), - six.text_type('bar'): six.text_type(), - } - - self.client.create_container( - 'busybox', 'true', - labels=labels_list, - ) - - args = fake_request.call_args - self.assertEqual(args[0][1], url_prefix + 'containers/create') - self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) - self.assertEqual( - args[1]['headers'], {'Content-Type': 'application/json'} - ) - self.assertEqual( - args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS - ) - - def test_create_container_with_named_volume(self): - mount_dest = '/mnt' - volume_name = 'name' - - self.client.create_container( - 'busybox', 'true', - host_config=self.client.create_host_config( - binds={volume_name: { - "bind": mount_dest, - "ro": False - }}), - volume_driver='foodriver', - ) - - args = fake_request.call_args - self.assertEqual( - args[0][1], url_prefix + 'containers/create' - ) - expected_payload = self.base_create_payload() - expected_payload['VolumeDriver'] = 'foodriver' - expected_payload['HostConfig'] = self.client.create_host_config() - expected_payload['HostConfig']['Binds'] = ["name:/mnt:rw"] - self.assertEqual(json.loads(args[1]['data']), expected_payload) - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - self.assertEqual( - args[1]['timeout'], - DEFAULT_TIMEOUT_SECONDS - ) - - def test_resize_container(self): - self.client.resize( - {'Id': fake_api.FAKE_CONTAINER_ID}, - height=15, - width=120 - ) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/resize', - params={'h': 15, 'w': 120}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_rename_container(self): - self.client.rename( - {'Id': fake_api.FAKE_CONTAINER_ID}, - name='foobar' - ) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/rename', - params={'name': 'foobar'}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_wait(self): - self.client.wait(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/wait', - timeout=None - ) - - def test_wait_with_dict_instead_of_id(self): - self.client.wait({'Id': fake_api.FAKE_CONTAINER_ID}) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/wait', - timeout=None - ) - - def _socket_path_for_client_session(self, client): - socket_adapter = client.get_adapter('http+docker://') - return socket_adapter.socket_path - - def test_url_compatibility_unix(self): - c = docker.Client(base_url="unix://socket") - - assert self._socket_path_for_client_session(c) == '/socket' - - def test_url_compatibility_unix_triple_slash(self): - c = docker.Client(base_url="unix:///socket") - - assert self._socket_path_for_client_session(c) == '/socket' - - def test_url_compatibility_http_unix_triple_slash(self): - c = docker.Client(base_url="http+unix:///socket") - - assert self._socket_path_for_client_session(c) == '/socket' - - def test_url_compatibility_http(self): - c = docker.Client(base_url="http://hostname:1234") - - assert c.base_url == "http://hostname:1234" - - def test_url_compatibility_tcp(self): - c = docker.Client(base_url="tcp://hostname:1234") - - assert c.base_url == "http://hostname:1234" - - def test_logs(self): - with mock.patch('docker.Client.inspect_container', - fake_inspect_container): - logs = self.client.logs(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', - params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, - 'tail': 'all'}, - timeout=DEFAULT_TIMEOUT_SECONDS, - stream=False - ) - - self.assertEqual( - logs, - 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') - ) - - def test_logs_with_dict_instead_of_id(self): - with mock.patch('docker.Client.inspect_container', - fake_inspect_container): - logs = self.client.logs({'Id': fake_api.FAKE_CONTAINER_ID}) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', - params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, - 'tail': 'all'}, - timeout=DEFAULT_TIMEOUT_SECONDS, - stream=False - ) - - self.assertEqual( - logs, - 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') - ) - - def test_log_streaming(self): - with mock.patch('docker.Client.inspect_container', - fake_inspect_container): - self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', - params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, - 'tail': 'all'}, - timeout=DEFAULT_TIMEOUT_SECONDS, - stream=True - ) - - def test_log_tail(self): - with mock.patch('docker.Client.inspect_container', - fake_inspect_container): - self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, - tail=10) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', - params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, - 'tail': 10}, - timeout=DEFAULT_TIMEOUT_SECONDS, - stream=False - ) - - def test_log_tty(self): - m = mock.Mock() - with mock.patch('docker.Client.inspect_container', - fake_inspect_container_tty): - with mock.patch('docker.Client._stream_raw_result', - m): - self.client.logs(fake_api.FAKE_CONTAINER_ID, - stream=True) - - self.assertTrue(m.called) - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', - params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, - 'tail': 'all'}, - timeout=DEFAULT_TIMEOUT_SECONDS, - stream=True - ) - - def test_diff(self): - self.client.diff(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/changes', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_diff_with_dict_instead_of_id(self): - self.client.diff({'Id': fake_api.FAKE_CONTAINER_ID}) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/changes', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_port(self): - self.client.port({'Id': fake_api.FAKE_CONTAINER_ID}, 1111) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/json', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_stop_container(self): - timeout = 2 - - self.client.stop(fake_api.FAKE_CONTAINER_ID, timeout=timeout) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/stop', - params={'t': timeout}, - timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) - ) - - def test_stop_container_with_dict_instead_of_id(self): - timeout = 2 - - self.client.stop({'Id': fake_api.FAKE_CONTAINER_ID}, - timeout=timeout) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/stop', - params={'t': timeout}, - timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) - ) - - def test_exec_create(self): - self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1']) - - args = fake_request.call_args - self.assertEqual( - 'POST', - args[0][0], url_prefix + 'containers/{0}/exec'.format( - fake_api.FAKE_CONTAINER_ID - ) - ) - - self.assertEqual( - json.loads(args[1]['data']), { - 'Tty': False, - 'AttachStdout': True, - 'Container': fake_api.FAKE_CONTAINER_ID, - 'Cmd': ['ls', '-1'], - 'Privileged': False, - 'AttachStdin': False, - 'AttachStderr': True, - 'User': '' - } - ) - - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_exec_start(self): - self.client.exec_start(fake_api.FAKE_EXEC_ID) - - args = fake_request.call_args - self.assertEqual( - args[0][1], url_prefix + 'exec/{0}/start'.format( - fake_api.FAKE_EXEC_ID - ) - ) - - self.assertEqual( - json.loads(args[1]['data']), { - 'Tty': False, - 'Detach': False, - } - ) - - self.assertEqual(args[1]['headers'], - {'Content-Type': 'application/json'}) - - def test_exec_inspect(self): - self.client.exec_inspect(fake_api.FAKE_EXEC_ID) - - args = fake_request.call_args - self.assertEqual( - args[0][1], url_prefix + 'exec/{0}/json'.format( - fake_api.FAKE_EXEC_ID - ) - ) - - def test_exec_resize(self): - self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID), - params={'h': 20, 'w': 60}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_pause_container(self): - self.client.pause(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/pause', - timeout=(DEFAULT_TIMEOUT_SECONDS) - ) - - def test_unpause_container(self): - self.client.unpause(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/unpause', - timeout=(DEFAULT_TIMEOUT_SECONDS) - ) - - def test_kill_container(self): - self.client.kill(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/kill', - params={}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_kill_container_with_dict_instead_of_id(self): - self.client.kill({'Id': fake_api.FAKE_CONTAINER_ID}) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/kill', - params={}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_kill_container_with_signal(self): - self.client.kill(fake_api.FAKE_CONTAINER_ID, signal=signal.SIGTERM) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/kill', - params={'signal': signal.SIGTERM}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_restart_container(self): - self.client.restart(fake_api.FAKE_CONTAINER_ID, timeout=2) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/restart', - params={'t': 2}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_restart_container_with_dict_instead_of_id(self): - self.client.restart({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=2) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'containers/3cc2351ab11b/restart', - params={'t': 2}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_remove_container(self): - self.client.remove_container(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'DELETE', - url_prefix + 'containers/3cc2351ab11b', - params={'v': False, 'link': False, 'force': False}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_remove_container_with_dict_instead_of_id(self): - self.client.remove_container({'Id': fake_api.FAKE_CONTAINER_ID}) - - fake_request.assert_called_with( - 'DELETE', - url_prefix + 'containers/3cc2351ab11b', - params={'v': False, 'link': False, 'force': False}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_remove_link(self): - self.client.remove_container(fake_api.FAKE_CONTAINER_ID, link=True) - - fake_request.assert_called_with( - 'DELETE', - url_prefix + 'containers/3cc2351ab11b', - params={'v': False, 'link': True, 'force': False}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_export(self): - self.client.export(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/export', - stream=True, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_export_with_dict_instead_of_id(self): - self.client.export({'Id': fake_api.FAKE_CONTAINER_ID}) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/export', - stream=True, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_inspect_container(self): - self.client.inspect_container(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/json', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_inspect_container_undefined_id(self): - for arg in None, '', {True: True}: - with pytest.raises(docker.errors.NullResource) as excinfo: - self.client.inspect_container(arg) - - self.assertEqual( - excinfo.value.args[0], 'image or container param is undefined' - ) - - def test_container_stats(self): - self.client.stats(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'containers/3cc2351ab11b/stats', - timeout=60, - stream=True - ) - - ################## - # IMAGES TESTS # - ################## - - def test_pull(self): - self.client.pull('joffrey/test001') - - args = fake_request.call_args - self.assertEqual( - args[0][1], - url_prefix + 'images/create' - ) - self.assertEqual( - args[1]['params'], - {'tag': None, 'fromImage': 'joffrey/test001'} - ) - self.assertFalse(args[1]['stream']) - - def test_pull_stream(self): - self.client.pull('joffrey/test001', stream=True) - - args = fake_request.call_args - self.assertEqual( - args[0][1], - url_prefix + 'images/create' - ) - self.assertEqual( - args[1]['params'], - {'tag': None, 'fromImage': 'joffrey/test001'} - ) - self.assertTrue(args[1]['stream']) - - def test_commit(self): - self.client.commit(fake_api.FAKE_CONTAINER_ID) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'commit', - data='{}', - headers={'Content-Type': 'application/json'}, - params={ - 'repo': None, - 'comment': None, - 'tag': None, - 'container': '3cc2351ab11b', - 'author': None - }, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_remove_image(self): - self.client.remove_image(fake_api.FAKE_IMAGE_ID) - - fake_request.assert_called_with( - 'DELETE', - url_prefix + 'images/e9aa60c60128', - params={'force': False, 'noprune': False}, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_image_history(self): - self.client.history(fake_api.FAKE_IMAGE_NAME) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'images/test_image/history', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_import_image(self): - self.client.import_image( - fake_api.FAKE_TARBALL_PATH, - repository=fake_api.FAKE_REPO_NAME, - tag=fake_api.FAKE_TAG_NAME - ) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/create', - params={ - 'repo': fake_api.FAKE_REPO_NAME, - 'tag': fake_api.FAKE_TAG_NAME, - 'fromSrc': fake_api.FAKE_TARBALL_PATH - }, - data=None, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_import_image_from_bytes(self): - stream = (i for i in range(0, 100)) - - self.client.import_image( - stream, - repository=fake_api.FAKE_REPO_NAME, - tag=fake_api.FAKE_TAG_NAME - ) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/create', - params={ - 'repo': fake_api.FAKE_REPO_NAME, - 'tag': fake_api.FAKE_TAG_NAME, - 'fromSrc': '-', - }, - headers={ - 'Content-Type': 'application/tar', - }, - data=stream, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_import_image_from_image(self): - self.client.import_image( - image=fake_api.FAKE_IMAGE_NAME, - repository=fake_api.FAKE_REPO_NAME, - tag=fake_api.FAKE_TAG_NAME - ) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/create', - params={ - 'repo': fake_api.FAKE_REPO_NAME, - 'tag': fake_api.FAKE_TAG_NAME, - 'fromImage': fake_api.FAKE_IMAGE_NAME - }, - data=None, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_inspect_image(self): - self.client.inspect_image(fake_api.FAKE_IMAGE_NAME) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'images/test_image/json', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_inspect_image_undefined_id(self): - for arg in None, '', {True: True}: - with pytest.raises(docker.errors.NullResource) as excinfo: - self.client.inspect_image(arg) - - self.assertEqual( - excinfo.value.args[0], 'image or container param is undefined' - ) - - def test_insert_image(self): - try: - self.client.insert(fake_api.FAKE_IMAGE_NAME, - fake_api.FAKE_URL, fake_api.FAKE_PATH) - except docker.errors.DeprecatedMethod: - self.assertTrue( - docker.utils.compare_version('1.12', self.client._version) >= 0 - ) - return - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/test_image/insert', - params={ - 'url': fake_api.FAKE_URL, - 'path': fake_api.FAKE_PATH - }, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_push_image(self): - with mock.patch('docker.auth.auth.resolve_authconfig', - fake_resolve_authconfig): - self.client.push(fake_api.FAKE_IMAGE_NAME) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/test_image/push', - params={ - 'tag': None - }, - data='{}', - headers={'Content-Type': 'application/json'}, - stream=False, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_push_image_with_tag(self): - with mock.patch('docker.auth.auth.resolve_authconfig', - fake_resolve_authconfig): - self.client.push( - fake_api.FAKE_IMAGE_NAME, tag=fake_api.FAKE_TAG_NAME - ) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/test_image/push', - params={ - 'tag': fake_api.FAKE_TAG_NAME, - }, - data='{}', - headers={'Content-Type': 'application/json'}, - stream=False, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_push_image_stream(self): - with mock.patch('docker.auth.auth.resolve_authconfig', - fake_resolve_authconfig): - self.client.push(fake_api.FAKE_IMAGE_NAME, stream=True) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/test_image/push', - params={ - 'tag': None - }, - data='{}', - headers={'Content-Type': 'application/json'}, - stream=True, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_tag_image(self): - self.client.tag(fake_api.FAKE_IMAGE_ID, fake_api.FAKE_REPO_NAME) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/e9aa60c60128/tag', - params={ - 'tag': None, - 'repo': 'repo', - 'force': 0 - }, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_tag_image_tag(self): - self.client.tag( - fake_api.FAKE_IMAGE_ID, - fake_api.FAKE_REPO_NAME, - tag=fake_api.FAKE_TAG_NAME - ) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/e9aa60c60128/tag', - params={ - 'tag': 'tag', - 'repo': 'repo', - 'force': 0 - }, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_tag_image_force(self): - self.client.tag( - fake_api.FAKE_IMAGE_ID, fake_api.FAKE_REPO_NAME, force=True) - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/e9aa60c60128/tag', - params={ - 'tag': None, - 'repo': 'repo', - 'force': 1 - }, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_get_image(self): - self.client.get_image(fake_api.FAKE_IMAGE_ID) - - fake_request.assert_called_with( - 'GET', - url_prefix + 'images/e9aa60c60128/get', - stream=True, - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - def test_load_image(self): - self.client.load_image('Byte Stream....') - - fake_request.assert_called_with( - 'POST', - url_prefix + 'images/load', - data='Byte Stream....', - timeout=DEFAULT_TIMEOUT_SECONDS - ) - - ################# - # BUILDER TESTS # - ################# - - def test_build_container(self): - script = io.BytesIO('\n'.join([ - 'FROM busybox', - 'MAINTAINER docker-py', - 'RUN mkdir -p /tmp/test', - 'EXPOSE 8080', - 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' - ' /tmp/silence.tar.gz' - ]).encode('ascii')) - - self.client.build(fileobj=script) - - def test_build_container_pull(self): - script = io.BytesIO('\n'.join([ - 'FROM busybox', - 'MAINTAINER docker-py', - 'RUN mkdir -p /tmp/test', - 'EXPOSE 8080', - 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' - ' /tmp/silence.tar.gz' - ]).encode('ascii')) - - self.client.build(fileobj=script, pull=True) - - def test_build_container_stream(self): - script = io.BytesIO('\n'.join([ - 'FROM busybox', - 'MAINTAINER docker-py', - 'RUN mkdir -p /tmp/test', - 'EXPOSE 8080', - 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' - ' /tmp/silence.tar.gz' - ]).encode('ascii')) - - self.client.build(fileobj=script, stream=True) - - def test_build_container_custom_context(self): - script = io.BytesIO('\n'.join([ - 'FROM busybox', - 'MAINTAINER docker-py', - 'RUN mkdir -p /tmp/test', - 'EXPOSE 8080', - 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' - ' /tmp/silence.tar.gz' - ]).encode('ascii')) - context = docker.utils.mkbuildcontext(script) - - self.client.build(fileobj=context, custom_context=True) - - def test_build_container_custom_context_gzip(self): - script = io.BytesIO('\n'.join([ - 'FROM busybox', - 'MAINTAINER docker-py', - 'RUN mkdir -p /tmp/test', - 'EXPOSE 8080', - 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' - ' /tmp/silence.tar.gz' - ]).encode('ascii')) - context = docker.utils.mkbuildcontext(script) - gz_context = gzip.GzipFile(fileobj=context) - - self.client.build( - fileobj=gz_context, - custom_context=True, - encoding="gzip" - ) - - def test_build_remote_with_registry_auth(self): - self.client._auth_configs = { - 'https://example.com': { - 'user': 'example', - 'password': 'example', - 'email': 'example@example.com' - } - } - - self.client.build(path='https://github.com/docker-library/mongo') - - def test_build_container_with_named_dockerfile(self): - self.client.build('.', dockerfile='nameddockerfile') - - def test_build_container_with_container_limits(self): - self.client.build('.', container_limits={ - 'memory': 1024 * 1024, - 'cpusetcpus': 1, - 'cpushares': 1000, - 'memswap': 1024 * 1024 * 8 - }) - - def test_build_container_invalid_container_limits(self): - self.assertRaises( - docker.errors.DockerException, - lambda: self.client.build('.', container_limits={ - 'foo': 'bar' - }) - ) - - def test_build_container_from_context_object_with_tarball(self): - base_path = os.path.join( - os.path.dirname(__file__), - 'testdata/context' - ) - tarball_path = os.path.join(base_path, 'ctx.tar.gz') - context = docker.efficiency.create_context_from_path(tarball_path) - try: - self.client.build(context.path, **context.job_params) - if context.job_params['fileobj'] is not None: - context.job_params['fileobj'].close() - except Exception as e: - self.fail('Command should not raise exception: {0}'.format(e)) - - def test_build_container_from_context_object_with_custom_dockerfile(self): - base_path = os.path.abspath(os.path.join( - os.path.dirname(__file__), - 'testdata/context' - )) - custom_dockerfile = 'custom_dockerfile' - try: - context = docker.efficiency.create_context_from_path( - base_path, - dockerfile=custom_dockerfile - ) - self.client.build(context.path, **context.job_params) - except docker.errors.ContextError as ce: - self.fail(ce.message) - except Exception as e: - self.fail('Command should not raise exception: {0}'.format(e)) - - def test_build_container_from_remote_context(self): - ctxurl = 'https://localhost/staging/context.tar.gz' - try: - context = docker.efficiency.create_context_from_path(ctxurl) - self.assertEqual(context.path, ctxurl) - self.assertEqual(context.format, 'remote') - self.client.build(context.path, **context.job_params) - except docker.errors.ContextError as ce: - self.fail(ce.message) - except Exception as e: - self.fail('Command should not raise exception: {0}'.format(e)) - - ################### - # VOLUMES TESTS # - ################### - - @base.requires_api_version('1.21') - def test_list_volumes(self): - volumes = self.client.volumes() - self.assertIn('Volumes', volumes) - self.assertEqual(len(volumes['Volumes']), 2) - args = fake_request.call_args - - self.assertEqual(args[0][0], 'GET') - self.assertEqual(args[0][1], url_prefix + 'volumes') - - @base.requires_api_version('1.21') - def test_create_volume(self): - name = 'perfectcherryblossom' - result = self.client.create_volume(name) - self.assertIn('Name', result) - self.assertEqual(result['Name'], name) - self.assertIn('Driver', result) - self.assertEqual(result['Driver'], 'local') - args = fake_request.call_args - - self.assertEqual(args[0][0], 'POST') - self.assertEqual(args[0][1], url_prefix + 'volumes') - self.assertEqual(args[1]['data'], { - 'Name': name, 'Driver': None, 'DriverOpts': None - }) - - @base.requires_api_version('1.21') - def test_create_volume_with_driver(self): - name = 'perfectcherryblossom' - driver_name = 'sshfs' - self.client.create_volume(name, driver=driver_name) - args = fake_request.call_args - - self.assertEqual(args[0][0], 'POST') - self.assertEqual(args[0][1], url_prefix + 'volumes') - self.assertIn('Driver', args[1]['data']) - self.assertEqual(args[1]['data']['Driver'], driver_name) - - @base.requires_api_version('1.21') - def test_create_volume_invalid_opts_type(self): - with pytest.raises(TypeError): - self.client.create_volume( - 'perfectcherryblossom', driver_opts='hello=world' - ) - - with pytest.raises(TypeError): - self.client.create_volume( - 'perfectcherryblossom', driver_opts=['hello=world'] - ) - - with pytest.raises(TypeError): - self.client.create_volume( - 'perfectcherryblossom', driver_opts='' - ) - - @base.requires_api_version('1.21') - def test_inspect_volume(self): - name = 'perfectcherryblossom' - result = self.client.inspect_volume(name) - self.assertIn('Name', result) - self.assertEqual(result['Name'], name) - self.assertIn('Driver', result) - self.assertEqual(result['Driver'], 'local') - args = fake_request.call_args - - self.assertEqual(args[0][0], 'GET') - self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name)) - - @base.requires_api_version('1.21') - def test_remove_volume(self): - name = 'perfectcherryblossom' - result = self.client.remove_volume(name) - self.assertIsNone(result) - args = fake_request.call_args - - self.assertEqual(args[0][0], 'DELETE') - self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name)) - - ####################### - # PY SPECIFIC TESTS # - ####################### - - def test_load_config_no_file(self): - folder = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, folder) - cfg = docker.auth.load_config(folder) - self.assertTrue(cfg is not None) - - def test_load_config(self): - folder = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, folder) - dockercfg_path = os.path.join(folder, '.dockercfg') - with open(dockercfg_path, 'w') as f: - auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') - f.write('auth = {0}\n'.format(auth_)) - f.write('email = sakuya@scarlet.net') - cfg = docker.auth.load_config(dockercfg_path) - self.assertTrue(docker.auth.INDEX_NAME in cfg) - self.assertNotEqual(cfg[docker.auth.INDEX_NAME], None) - cfg = cfg[docker.auth.INDEX_NAME] - self.assertEqual(cfg['username'], 'sakuya') - self.assertEqual(cfg['password'], 'izayoi') - self.assertEqual(cfg['email'], 'sakuya@scarlet.net') - self.assertEqual(cfg.get('auth'), None) - - def test_load_config_with_random_name(self): - folder = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, folder) - - dockercfg_path = os.path.join(folder, - '.{0}.dockercfg'.format( - random.randrange(100000))) - registry = 'https://your.private.registry.io' - auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') - config = { - registry: { - 'auth': '{0}'.format(auth_), - 'email': 'sakuya@scarlet.net' - } - } - - with open(dockercfg_path, 'w') as f: - f.write(json.dumps(config)) - - cfg = docker.auth.load_config(dockercfg_path) - self.assertTrue(registry in cfg) - self.assertNotEqual(cfg[registry], None) - cfg = cfg[registry] - self.assertEqual(cfg['username'], 'sakuya') - self.assertEqual(cfg['password'], 'izayoi') - self.assertEqual(cfg['email'], 'sakuya@scarlet.net') - self.assertEqual(cfg.get('auth'), None) - - def test_tar_with_excludes(self): - dirs = [ - 'foo', - 'foo/bar', - 'bar', - ] - - files = [ - 'Dockerfile', - 'Dockerfile.alt', - '.dockerignore', - 'a.py', - 'a.go', - 'b.py', - 'cde.py', - 'foo/a.py', - 'foo/b.py', - 'foo/bar/a.py', - 'bar/a.py', - ] - - exclude = [ - '*.py', - '!b.py', - '!a.go', - 'foo', - 'Dockerfile*', - '.dockerignore', - ] - - expected_names = set([ - 'Dockerfile', - '.dockerignore', - 'a.go', - 'b.py', - 'bar', - 'bar/a.py', - ]) - - base = make_tree(dirs, files) - self.addCleanup(shutil.rmtree, base) - - with docker.utils.tar(base, exclude=exclude) as archive: - tar = tarfile.open(fileobj=archive) - assert sorted(tar.getnames()) == sorted(expected_names) - - def test_tar_with_empty_directory(self): - base = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, base) - for d in ['foo', 'bar']: - os.makedirs(os.path.join(base, d)) - with docker.utils.tar(base) as archive: - tar = tarfile.open(fileobj=archive) - self.assertEqual(sorted(tar.getnames()), ['bar', 'foo']) - - def test_tar_with_file_symlinks(self): - base = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, base) - with open(os.path.join(base, 'foo'), 'w') as f: - f.write("content") - os.makedirs(os.path.join(base, 'bar')) - os.symlink('../foo', os.path.join(base, 'bar/foo')) - with docker.utils.tar(base) as archive: - tar = tarfile.open(fileobj=archive) - self.assertEqual(sorted(tar.getnames()), ['bar', 'bar/foo', 'foo']) - - def test_tar_with_directory_symlinks(self): - base = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, base) - for d in ['foo', 'bar']: - os.makedirs(os.path.join(base, d)) - os.symlink('../foo', os.path.join(base, 'bar/foo')) - with docker.utils.tar(base) as archive: - tar = tarfile.open(fileobj=archive) - self.assertEqual(sorted(tar.getnames()), ['bar', 'bar/foo', 'foo']) - - ####################### - # HOST CONFIG TESTS # - ####################### - - def test_create_host_config_secopt(self): - security_opt = ['apparmor:test_profile'] - result = self.client.create_host_config(security_opt=security_opt) - self.assertIn('SecurityOpt', result) - self.assertEqual(result['SecurityOpt'], security_opt) - - self.assertRaises( - docker.errors.DockerException, self.client.create_host_config, - security_opt='wrong' - ) - - -class StreamTest(Cleanup, base.BaseTestCase): - - def setUp(self): - socket_dir = tempfile.mkdtemp() - self.build_context = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, socket_dir) - self.addCleanup(shutil.rmtree, self.build_context) - self.socket_file = os.path.join(socket_dir, 'test_sock.sock') - self.server_socket = self._setup_socket() - self.stop_server = False - server_thread = threading.Thread(target=self.run_server) - server_thread.setDaemon(True) - server_thread.start() - self.response = None - self.request_handler = None - self.addCleanup(server_thread.join) - self.addCleanup(self.stop) - - def stop(self): - self.stop_server = True - - def _setup_socket(self): - server_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - server_sock.bind(self.socket_file) - # Non-blocking mode so that we can shut the test down easily - server_sock.setblocking(0) - server_sock.listen(5) - return server_sock - - def run_server(self): - try: - while not self.stop_server: - try: - connection, client_address = self.server_socket.accept() - except socket.error: - # Probably no connection to accept yet - time.sleep(0.01) - continue - - connection.setblocking(1) - try: - self.request_handler(connection) - finally: - connection.close() - finally: - self.server_socket.close() - - def early_response_sending_handler(self, connection): - data = b'' - headers = None - - connection.sendall(self.response) - while not headers: - data += connection.recv(2048) - parts = data.split(b'\r\n\r\n', 1) - if len(parts) == 2: - headers, data = parts - - mo = re.search(r'Content-Length: ([0-9]+)', headers.decode()) - assert mo - content_length = int(mo.group(1)) - - while True: - if len(data) >= content_length: - break - - data += connection.recv(2048) - - def test_early_stream_response(self): - self.request_handler = self.early_response_sending_handler - lines = [] - for i in range(0, 50): - line = str(i).encode() - lines += [('%x' % len(line)).encode(), line] - lines.append(b'0') - lines.append(b'') - - self.response = ( - b'HTTP/1.1 200 OK\r\n' - b'Transfer-Encoding: chunked\r\n' - b'\r\n' - ) + b'\r\n'.join(lines) - - with docker.Client(base_url="http+unix://" + self.socket_file) \ - as client: - for i in range(5): - try: - stream = client.build( - path=self.build_context, - stream=True - ) - break - except requests.ConnectionError as e: - if i == 4: - raise e - - self.assertEqual(list(stream), [ - str(i).encode() for i in range(50)]) diff --git a/tests/testdata/certs/ca.pem b/tests/testdata/certs/ca.pem deleted file mode 100644 index e69de29..0000000 diff --git a/tests/testdata/certs/cert.pem b/tests/testdata/certs/cert.pem deleted file mode 100644 index e69de29..0000000 diff --git a/tests/testdata/certs/key.pem b/tests/testdata/certs/key.pem deleted file mode 100644 index e69de29..0000000 diff --git a/tests/testdata/context/Dockerfile b/tests/testdata/context/Dockerfile deleted file mode 100644 index d1ceac6..0000000 --- a/tests/testdata/context/Dockerfile +++ /dev/null @@ -1,2 +0,0 @@ -FROM busybox:latest -CMD echo "success" diff --git a/tests/testdata/context/ctx.tar.gz b/tests/testdata/context/ctx.tar.gz deleted file mode 100644 index c14e5b9..0000000 Binary files a/tests/testdata/context/ctx.tar.gz and /dev/null differ diff --git a/tests/testdata/context/custom_dockerfile b/tests/testdata/context/custom_dockerfile deleted file mode 100644 index d1ceac6..0000000 --- a/tests/testdata/context/custom_dockerfile +++ /dev/null @@ -1,2 +0,0 @@ -FROM busybox:latest -CMD echo "success" diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py new file mode 100644 index 0000000..e44e562 --- /dev/null +++ b/tests/unit/api_test.py @@ -0,0 +1,2451 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import datetime +import gzip +import io +import json +import os +import re +import shutil +import signal +import socket +import sys +import tarfile +import tempfile +import threading +import time +import random + +import docker +import docker.efficiency +import requests +import six + +from .. import base +from . import fake_api +from ..helpers import make_tree + +import pytest + +try: + from unittest import mock +except ImportError: + import mock + + +DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS + + +def response(status_code=200, content='', headers=None, reason=None, elapsed=0, + request=None): + res = requests.Response() + res.status_code = status_code + if not isinstance(content, six.binary_type): + content = json.dumps(content).encode('ascii') + res._content = content + res.headers = requests.structures.CaseInsensitiveDict(headers or {}) + res.reason = reason + res.elapsed = datetime.timedelta(elapsed) + res.request = request + return res + + +def fake_resolve_authconfig(authconfig, registry=None): + return None + + +def fake_inspect_container(self, container, tty=False): + return fake_api.get_fake_inspect_container(tty=tty)[1] + + +def fake_inspect_container_tty(self, container): + return fake_inspect_container(self, container, tty=True) + + +def fake_resp(method, url, *args, **kwargs): + key = None + if url in fake_api.fake_responses: + key = url + elif (url, method) in fake_api.fake_responses: + key = (url, method) + if not key: + raise Exception('{0} {1}'.format(method, url)) + status_code, content = fake_api.fake_responses[key]() + return response(status_code=status_code, content=content) + + +fake_request = mock.Mock(side_effect=fake_resp) + + +def fake_get(self, url, *args, **kwargs): + return fake_request('GET', url, *args, **kwargs) + + +def fake_post(self, url, *args, **kwargs): + return fake_request('POST', url, *args, **kwargs) + + +def fake_put(self, url, *args, **kwargs): + return fake_request('PUT', url, *args, **kwargs) + + +def fake_delete(self, url, *args, **kwargs): + return fake_request('DELETE', url, *args, **kwargs) + +url_prefix = 'http+docker://localunixsocket/v{0}/'.format( + docker.constants.DEFAULT_DOCKER_API_VERSION) + + +@mock.patch.multiple('docker.Client', get=fake_get, post=fake_post, + put=fake_put, delete=fake_delete) +class DockerClientTest(base.Cleanup, base.BaseTestCase): + def setUp(self): + self.client = docker.Client() + # Force-clear authconfig to avoid tampering with the tests + self.client._cfg = {'Configs': {}} + + def tearDown(self): + self.client.close() + + def assertIn(self, object, collection): + if six.PY2 and sys.version_info[1] <= 6: + return self.assertTrue(object in collection) + return super(DockerClientTest, self).assertIn(object, collection) + + def base_create_payload(self, img='busybox', cmd=None): + if not cmd: + cmd = ['true'] + return {"Tty": False, "Image": img, "Cmd": cmd, + "AttachStdin": False, + "AttachStderr": True, "AttachStdout": True, + "StdinOnce": False, + "OpenStdin": False, "NetworkDisabled": False, + } + + def test_ctor(self): + with pytest.raises(docker.errors.DockerException) as excinfo: + docker.Client(version=1.12) + + self.assertEqual( + str(excinfo.value), + 'Version parameter must be a string or None. Found float' + ) + + def test_url_valid_resource(self): + url = self.client._url('/hello/{0}/world', 'somename') + self.assertEqual( + url, '{0}{1}'.format(url_prefix, 'hello/somename/world') + ) + + url = self.client._url('/hello/{0}/world', '/some?name') + self.assertEqual( + url, '{0}{1}'.format(url_prefix, 'hello/%2Fsome%3Fname/world') + ) + + def test_url_invalid_resource(self): + with pytest.raises(ValueError): + self.client._url('/hello/{0}/world', ['sakuya', 'izayoi']) + + def test_url_no_resource(self): + url = self.client._url('/simple') + self.assertEqual(url, '{0}{1}'.format(url_prefix, 'simple')) + + url = self.client._url('/simple', None) + self.assertEqual(url, '{0}{1}'.format(url_prefix, 'simple')) + + ######################### + # INFORMATION TESTS # + ######################### + def test_version(self): + self.client.version() + + fake_request.assert_called_with( + 'GET', + url_prefix + 'version', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_retrieve_server_version(self): + client = docker.Client(version="auto") + self.assertTrue(isinstance(client._version, six.string_types)) + self.assertFalse(client._version == "auto") + client.close() + + def test_auto_retrieve_server_version(self): + version = self.client._retrieve_server_version() + self.assertTrue(isinstance(version, six.string_types)) + + def test_info(self): + self.client.info() + + fake_request.assert_called_with( + 'GET', + url_prefix + 'info', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_search(self): + self.client.search('busybox') + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/search', + params={'term': 'busybox'}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_image_viz(self): + with pytest.raises(Exception): + self.client.images('busybox', viz=True) + self.fail('Viz output should not be supported!') + + def test_events(self): + self.client.events() + + fake_request.assert_called_with( + 'GET', + url_prefix + 'events', + params={'since': None, 'until': None, 'filters': None}, + stream=True + ) + + def test_events_with_since_until(self): + ts = 1356048000 + now = datetime.datetime.utcfromtimestamp(ts) + since = now - datetime.timedelta(seconds=10) + until = now + datetime.timedelta(seconds=10) + + self.client.events(since=since, until=until) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'events', + params={ + 'since': ts - 10, + 'until': ts + 10, + 'filters': None + }, + stream=True + ) + + def test_events_with_filters(self): + filters = {'event': ['die', 'stop'], + 'container': fake_api.FAKE_CONTAINER_ID} + + self.client.events(filters=filters) + + expected_filters = docker.utils.convert_filters(filters) + fake_request.assert_called_with( + 'GET', + url_prefix + 'events', + params={ + 'since': None, + 'until': None, + 'filters': expected_filters + }, + stream=True + ) + + ################### + # LISTING TESTS # + ################### + + def test_images(self): + self.client.images(all=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'filter': None, 'only_ids': 0, 'all': 1}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_images_quiet(self): + self.client.images(all=True, quiet=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'filter': None, 'only_ids': 1, 'all': 1}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_image_ids(self): + self.client.images(quiet=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'filter': None, 'only_ids': 1, 'all': 0}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_images_filters(self): + self.client.images(filters={'dangling': True}) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'filter': None, 'only_ids': 0, 'all': 0, + 'filters': '{"dangling": ["true"]}'}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_list_containers(self): + self.client.containers(all=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/json', + params={ + 'all': 1, + 'since': None, + 'size': 0, + 'limit': -1, + 'trunc_cmd': 0, + 'before': None + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + ##################### + # CONTAINER TESTS # + ##################### + + def test_create_container(self): + self.client.create_container('busybox', 'true') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", "Cmd": ["true"], + "AttachStdin": false, + "AttachStderr": true, "AttachStdout": true, + "StdinOnce": false, + "OpenStdin": false, "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_binds(self): + mount_dest = '/mnt' + + self.client.create_container('busybox', ['ls', mount_dest], + volumes=[mount_dest]) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls", "/mnt"], "AttachStdin": false, + "Volumes": {"/mnt": {}}, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_volume_string(self): + mount_dest = '/mnt' + + self.client.create_container('busybox', ['ls', mount_dest], + volumes=mount_dest) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls", "/mnt"], "AttachStdin": false, + "Volumes": {"/mnt": {}}, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_ports(self): + self.client.create_container('busybox', 'ls', + ports=[1111, (2222, 'udp'), (3333,)]) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "ExposedPorts": { + "1111/tcp": {}, + "2222/udp": {}, + "3333/tcp": {} + }, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_entrypoint(self): + self.client.create_container('busybox', 'hello', + entrypoint='cowsay entry') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["hello"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "Entrypoint": ["cowsay", "entry"]}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_cpu_shares(self): + self.client.create_container('busybox', 'ls', + cpu_shares=5) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "CpuShares": 5}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_cpuset(self): + self.client.create_container('busybox', 'ls', + cpuset='0,1') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "Cpuset": "0,1", + "CpusetCpus": "0,1"}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_cgroup_parent(self): + self.client.create_container( + 'busybox', 'ls', host_config=self.client.create_host_config( + cgroup_parent='test' + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + data = json.loads(args[1]['data']) + self.assertIn('HostConfig', data) + self.assertIn('CgroupParent', data['HostConfig']) + self.assertEqual(data['HostConfig']['CgroupParent'], 'test') + + def test_create_container_with_working_dir(self): + self.client.create_container('busybox', 'ls', + working_dir='/root') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", + "Cmd": ["ls"], "AttachStdin": false, + "AttachStderr": true, + "AttachStdout": true, "OpenStdin": false, + "StdinOnce": false, + "NetworkDisabled": false, + "WorkingDir": "/root"}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_stdin_open(self): + self.client.create_container('busybox', 'true', stdin_open=True) + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", "Cmd": ["true"], + "AttachStdin": true, + "AttachStderr": true, "AttachStdout": true, + "StdinOnce": true, + "OpenStdin": true, "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_with_volumes_from(self): + vol_names = ['foo', 'bar'] + try: + self.client.create_container('busybox', 'true', + volumes_from=vol_names) + except docker.errors.DockerException: + self.assertTrue( + docker.utils.compare_version('1.10', self.client._version) >= 0 + ) + return + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data'])['VolumesFrom'], + ','.join(vol_names)) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_create_container_empty_volumes_from(self): + self.client.create_container('busybox', 'true', volumes_from=[]) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertTrue('VolumesFrom' not in data) + + def test_create_named_container(self): + self.client.create_container('busybox', 'true', + name='marisa-kirisame') + + args = fake_request.call_args + self.assertEqual(args[0][1], + url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), + json.loads(''' + {"Tty": false, "Image": "busybox", "Cmd": ["true"], + "AttachStdin": false, + "AttachStderr": true, "AttachStdout": true, + "StdinOnce": false, + "OpenStdin": false, "NetworkDisabled": false}''')) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual(args[1]['params'], {'name': 'marisa-kirisame'}) + + def test_create_container_with_mem_limit_as_int(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit=128.0 + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual(data['HostConfig']['Memory'], 128.0) + + def test_create_container_with_mem_limit_as_string(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit='128' + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual(data['HostConfig']['Memory'], 128.0) + + def test_create_container_with_mem_limit_as_string_with_k_unit(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit='128k' + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024) + + def test_create_container_with_mem_limit_as_string_with_m_unit(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit='128m' + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual(data['HostConfig']['Memory'], 128.0 * 1024 * 1024) + + def test_create_container_with_mem_limit_as_string_with_g_unit(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + mem_limit='128g' + ) + ) + + args = fake_request.call_args + data = json.loads(args[1]['data']) + self.assertEqual( + data['HostConfig']['Memory'], 128.0 * 1024 * 1024 * 1024 + ) + + def test_create_container_with_mem_limit_as_string_with_wrong_value(self): + self.assertRaises( + docker.errors.DockerException, + self.client.create_host_config, mem_limit='128p' + ) + + self.assertRaises( + docker.errors.DockerException, + self.client.create_host_config, mem_limit='1f28' + ) + + def test_start_container(self): + self.client.start(fake_api.FAKE_CONTAINER_ID) + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'containers/3cc2351ab11b/start' + ) + self.assertEqual(json.loads(args[1]['data']), {}) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_start_container_none(self): + with pytest.raises(ValueError) as excinfo: + self.client.start(container=None) + + self.assertEqual( + str(excinfo.value), + 'image or container param is undefined', + ) + + with pytest.raises(ValueError) as excinfo: + self.client.start(None) + + self.assertEqual( + str(excinfo.value), + 'image or container param is undefined', + ) + + def test_start_container_regression_573(self): + self.client.start(**{'container': fake_api.FAKE_CONTAINER_ID}) + + def test_create_container_with_lxc_conf(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + lxc_conf={'lxc.conf.k': 'lxc.conf.value'} + ) + ) + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'containers/create' + ) + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['LxcConf'] = [ + {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} + ] + + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], + {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_lxc_conf_compat(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['LxcConf'] = [ + {"Value": "lxc.conf.value", "Key": "lxc.conf.k"} + ] + self.assertEqual( + json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_binds_ro(self): + mount_dest = '/mnt' + mount_origin = '/tmp' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds={mount_origin: { + "bind": mount_dest, + "ro": True + }} + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:ro"] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_binds_rw(self): + mount_dest = '/mnt' + mount_origin = '/tmp' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds={mount_origin: { + "bind": mount_dest, + "ro": False + }} + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:rw"] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_binds_mode(self): + mount_dest = '/mnt' + mount_origin = '/tmp' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds={mount_origin: { + "bind": mount_dest, + "mode": "z", + }} + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = ["/tmp:/mnt:z"] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_binds_mode_and_ro_error(self): + with pytest.raises(ValueError): + mount_dest = '/mnt' + mount_origin = '/tmp' + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds={mount_origin: { + "bind": mount_dest, + "mode": "z", + "ro": True, + }} + ) + ) + + def test_create_container_with_binds_list(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + binds=[ + "/tmp:/mnt/1:ro", + "/tmp:/mnt/2", + ], + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = [ + "/tmp:/mnt/1:ro", + "/tmp:/mnt/2", + ] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_port_binds(self): + self.maxDiff = None + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + port_bindings={ + 1111: None, + 2222: 2222, + '3333/udp': (3333,), + 4444: ('127.0.0.1',), + 5555: ('127.0.0.1', 5555), + 6666: [('127.0.0.1',), ('192.168.0.1',)] + } + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + data = json.loads(args[1]['data']) + port_bindings = data['HostConfig']['PortBindings'] + self.assertTrue('1111/tcp' in port_bindings) + self.assertTrue('2222/tcp' in port_bindings) + self.assertTrue('3333/udp' in port_bindings) + self.assertTrue('4444/tcp' in port_bindings) + self.assertTrue('5555/tcp' in port_bindings) + self.assertTrue('6666/tcp' in port_bindings) + self.assertEqual( + [{"HostPort": "", "HostIp": ""}], + port_bindings['1111/tcp'] + ) + self.assertEqual( + [{"HostPort": "2222", "HostIp": ""}], + port_bindings['2222/tcp'] + ) + self.assertEqual( + [{"HostPort": "3333", "HostIp": ""}], + port_bindings['3333/udp'] + ) + self.assertEqual( + [{"HostPort": "", "HostIp": "127.0.0.1"}], + port_bindings['4444/tcp'] + ) + self.assertEqual( + [{"HostPort": "5555", "HostIp": "127.0.0.1"}], + port_bindings['5555/tcp'] + ) + self.assertEqual(len(port_bindings['6666/tcp']), 2) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_mac_address(self): + mac_address_expected = "02:42:ac:11:00:0a" + + container = self.client.create_container( + 'busybox', ['sleep', '60'], mac_address=mac_address_expected) + + res = self.client.inspect_container(container['Id']) + self.assertEqual(mac_address_expected, + res['NetworkSettings']['MacAddress']) + + def test_create_container_with_links(self): + link_path = 'path' + alias = 'alias' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + links={link_path: alias} + ) + ) + + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'containers/create' + ) + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Links'] = ['path:alias'] + + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + + def test_create_container_with_multiple_links(self): + link_path = 'path' + alias = 'alias' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + links={ + link_path + '1': alias + '1', + link_path + '2': alias + '2' + } + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Links'] = [ + 'path1:alias1', 'path2:alias2' + ] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + + def test_create_container_with_links_as_list_of_tuples(self): + link_path = 'path' + alias = 'alias' + + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + links=[(link_path, alias)] + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Links'] = ['path:alias'] + + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + + def test_create_container_privileged(self): + self.client.create_container( + 'busybox', 'true', + host_config=self.client.create_host_config(privileged=True) + ) + + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Privileged'] = True + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_start_container_with_lxc_conf(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, + lxc_conf={'lxc.conf.k': 'lxc.conf.value'} + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_lxc_conf_compat(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, + lxc_conf=[{'Key': 'lxc.conf.k', 'Value': 'lxc.conf.value'}] + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_binds_ro(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, binds={ + '/tmp': { + "bind": '/mnt', + "ro": True + } + } + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_binds_rw(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, binds={ + '/tmp': {"bind": '/mnt', "ro": False} + } + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_port_binds(self): + self.maxDiff = None + + def call_start(): + self.client.start(fake_api.FAKE_CONTAINER_ID, port_bindings={ + 1111: None, + 2222: 2222, + '3333/udp': (3333,), + 4444: ('127.0.0.1',), + 5555: ('127.0.0.1', 5555), + 6666: [('127.0.0.1',), ('192.168.0.1',)] + }) + + pytest.deprecated_call(call_start) + + def test_start_container_with_links(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, links={'path': 'alias'} + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_multiple_links(self): + def call_start(): + self.client.start( + fake_api.FAKE_CONTAINER_ID, + links={ + 'path1': 'alias1', + 'path2': 'alias2' + } + ) + + pytest.deprecated_call(call_start) + + def test_start_container_with_links_as_list_of_tuples(self): + def call_start(): + self.client.start(fake_api.FAKE_CONTAINER_ID, + links=[('path', 'alias')]) + + pytest.deprecated_call(call_start) + + def test_start_container_privileged(self): + def call_start(): + self.client.start(fake_api.FAKE_CONTAINER_ID, privileged=True) + + pytest.deprecated_call(call_start) + + def test_start_container_with_dict_instead_of_id(self): + self.client.start({'Id': fake_api.FAKE_CONTAINER_ID}) + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'containers/3cc2351ab11b/start' + ) + self.assertEqual(json.loads(args[1]['data']), {}) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_restart_policy(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + restart_policy={ + "Name": "always", + "MaximumRetryCount": 0 + } + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['RestartPolicy'] = { + "MaximumRetryCount": 0, "Name": "always" + } + self.assertEqual(json.loads(args[1]['data']), expected_payload) + + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_added_capabilities(self): + self.client.create_container( + 'busybox', 'true', + host_config=self.client.create_host_config(cap_add=['MKNOD']) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['CapAdd'] = ['MKNOD'] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_dropped_capabilities(self): + self.client.create_container( + 'busybox', 'true', + host_config=self.client.create_host_config(cap_drop=['MKNOD']) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['CapDrop'] = ['MKNOD'] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_devices(self): + self.client.create_container( + 'busybox', 'true', host_config=self.client.create_host_config( + devices=['/dev/sda:/dev/xvda:rwm', + '/dev/sdb:/dev/xvdb', + '/dev/sdc'] + ) + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Devices'] = [ + {'CgroupPermissions': 'rwm', + 'PathInContainer': '/dev/xvda', + 'PathOnHost': '/dev/sda'}, + {'CgroupPermissions': 'rwm', + 'PathInContainer': '/dev/xvdb', + 'PathOnHost': '/dev/sdb'}, + {'CgroupPermissions': 'rwm', + 'PathInContainer': '/dev/sdc', + 'PathOnHost': '/dev/sdc'} + ] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_labels_dict(self): + labels_dict = { + six.text_type('foo'): six.text_type('1'), + six.text_type('bar'): six.text_type('2'), + } + + self.client.create_container( + 'busybox', 'true', + labels=labels_dict, + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_labels_list(self): + labels_list = [ + six.text_type('foo'), + six.text_type('bar'), + ] + labels_dict = { + six.text_type('foo'): six.text_type(), + six.text_type('bar'): six.text_type(), + } + + self.client.create_container( + 'busybox', 'true', + labels=labels_list, + ) + + args = fake_request.call_args + self.assertEqual(args[0][1], url_prefix + 'containers/create') + self.assertEqual(json.loads(args[1]['data'])['Labels'], labels_dict) + self.assertEqual( + args[1]['headers'], {'Content-Type': 'application/json'} + ) + self.assertEqual( + args[1]['timeout'], DEFAULT_TIMEOUT_SECONDS + ) + + def test_create_container_with_named_volume(self): + mount_dest = '/mnt' + volume_name = 'name' + + self.client.create_container( + 'busybox', 'true', + host_config=self.client.create_host_config( + binds={volume_name: { + "bind": mount_dest, + "ro": False + }}), + volume_driver='foodriver', + ) + + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'containers/create' + ) + expected_payload = self.base_create_payload() + expected_payload['VolumeDriver'] = 'foodriver' + expected_payload['HostConfig'] = self.client.create_host_config() + expected_payload['HostConfig']['Binds'] = ["name:/mnt:rw"] + self.assertEqual(json.loads(args[1]['data']), expected_payload) + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + self.assertEqual( + args[1]['timeout'], + DEFAULT_TIMEOUT_SECONDS + ) + + def test_resize_container(self): + self.client.resize( + {'Id': fake_api.FAKE_CONTAINER_ID}, + height=15, + width=120 + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/resize', + params={'h': 15, 'w': 120}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_rename_container(self): + self.client.rename( + {'Id': fake_api.FAKE_CONTAINER_ID}, + name='foobar' + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/rename', + params={'name': 'foobar'}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_wait(self): + self.client.wait(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/wait', + timeout=None + ) + + def test_wait_with_dict_instead_of_id(self): + self.client.wait({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/wait', + timeout=None + ) + + def _socket_path_for_client_session(self, client): + socket_adapter = client.get_adapter('http+docker://') + return socket_adapter.socket_path + + def test_url_compatibility_unix(self): + c = docker.Client(base_url="unix://socket") + + assert self._socket_path_for_client_session(c) == '/socket' + + def test_url_compatibility_unix_triple_slash(self): + c = docker.Client(base_url="unix:///socket") + + assert self._socket_path_for_client_session(c) == '/socket' + + def test_url_compatibility_http_unix_triple_slash(self): + c = docker.Client(base_url="http+unix:///socket") + + assert self._socket_path_for_client_session(c) == '/socket' + + def test_url_compatibility_http(self): + c = docker.Client(base_url="http://hostname:1234") + + assert c.base_url == "http://hostname:1234" + + def test_url_compatibility_tcp(self): + c = docker.Client(base_url="tcp://hostname:1234") + + assert c.base_url == "http://hostname:1234" + + def test_logs(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + logs = self.client.logs(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + self.assertEqual( + logs, + 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') + ) + + def test_logs_with_dict_instead_of_id(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + logs = self.client.logs({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + self.assertEqual( + logs, + 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') + ) + + def test_log_streaming(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=True + ) + + def test_log_tail(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, + tail=10) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 10}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + def test_log_tty(self): + m = mock.Mock() + with mock.patch('docker.Client.inspect_container', + fake_inspect_container_tty): + with mock.patch('docker.Client._stream_raw_result', + m): + self.client.logs(fake_api.FAKE_CONTAINER_ID, + stream=True) + + self.assertTrue(m.called) + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=True + ) + + def test_diff(self): + self.client.diff(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/changes', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_diff_with_dict_instead_of_id(self): + self.client.diff({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/changes', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_port(self): + self.client.port({'Id': fake_api.FAKE_CONTAINER_ID}, 1111) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/json', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_stop_container(self): + timeout = 2 + + self.client.stop(fake_api.FAKE_CONTAINER_ID, timeout=timeout) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/stop', + params={'t': timeout}, + timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) + ) + + def test_stop_container_with_dict_instead_of_id(self): + timeout = 2 + + self.client.stop({'Id': fake_api.FAKE_CONTAINER_ID}, + timeout=timeout) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/stop', + params={'t': timeout}, + timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) + ) + + def test_exec_create(self): + self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1']) + + args = fake_request.call_args + self.assertEqual( + 'POST', + args[0][0], url_prefix + 'containers/{0}/exec'.format( + fake_api.FAKE_CONTAINER_ID + ) + ) + + self.assertEqual( + json.loads(args[1]['data']), { + 'Tty': False, + 'AttachStdout': True, + 'Container': fake_api.FAKE_CONTAINER_ID, + 'Cmd': ['ls', '-1'], + 'Privileged': False, + 'AttachStdin': False, + 'AttachStderr': True, + 'User': '' + } + ) + + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_exec_start(self): + self.client.exec_start(fake_api.FAKE_EXEC_ID) + + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'exec/{0}/start'.format( + fake_api.FAKE_EXEC_ID + ) + ) + + self.assertEqual( + json.loads(args[1]['data']), { + 'Tty': False, + 'Detach': False, + } + ) + + self.assertEqual(args[1]['headers'], + {'Content-Type': 'application/json'}) + + def test_exec_inspect(self): + self.client.exec_inspect(fake_api.FAKE_EXEC_ID) + + args = fake_request.call_args + self.assertEqual( + args[0][1], url_prefix + 'exec/{0}/json'.format( + fake_api.FAKE_EXEC_ID + ) + ) + + def test_exec_resize(self): + self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID), + params={'h': 20, 'w': 60}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_pause_container(self): + self.client.pause(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/pause', + timeout=(DEFAULT_TIMEOUT_SECONDS) + ) + + def test_unpause_container(self): + self.client.unpause(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/unpause', + timeout=(DEFAULT_TIMEOUT_SECONDS) + ) + + def test_kill_container(self): + self.client.kill(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/kill', + params={}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_kill_container_with_dict_instead_of_id(self): + self.client.kill({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/kill', + params={}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_kill_container_with_signal(self): + self.client.kill(fake_api.FAKE_CONTAINER_ID, signal=signal.SIGTERM) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/kill', + params={'signal': signal.SIGTERM}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_restart_container(self): + self.client.restart(fake_api.FAKE_CONTAINER_ID, timeout=2) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/restart', + params={'t': 2}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_restart_container_with_dict_instead_of_id(self): + self.client.restart({'Id': fake_api.FAKE_CONTAINER_ID}, timeout=2) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'containers/3cc2351ab11b/restart', + params={'t': 2}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_remove_container(self): + self.client.remove_container(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'DELETE', + url_prefix + 'containers/3cc2351ab11b', + params={'v': False, 'link': False, 'force': False}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_remove_container_with_dict_instead_of_id(self): + self.client.remove_container({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'DELETE', + url_prefix + 'containers/3cc2351ab11b', + params={'v': False, 'link': False, 'force': False}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_remove_link(self): + self.client.remove_container(fake_api.FAKE_CONTAINER_ID, link=True) + + fake_request.assert_called_with( + 'DELETE', + url_prefix + 'containers/3cc2351ab11b', + params={'v': False, 'link': True, 'force': False}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_export(self): + self.client.export(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/export', + stream=True, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_export_with_dict_instead_of_id(self): + self.client.export({'Id': fake_api.FAKE_CONTAINER_ID}) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/export', + stream=True, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_inspect_container(self): + self.client.inspect_container(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/json', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_inspect_container_undefined_id(self): + for arg in None, '', {True: True}: + with pytest.raises(docker.errors.NullResource) as excinfo: + self.client.inspect_container(arg) + + self.assertEqual( + excinfo.value.args[0], 'image or container param is undefined' + ) + + def test_container_stats(self): + self.client.stats(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/stats', + timeout=60, + stream=True + ) + + ################## + # IMAGES TESTS # + ################## + + def test_pull(self): + self.client.pull('joffrey/test001') + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'images/create' + ) + self.assertEqual( + args[1]['params'], + {'tag': None, 'fromImage': 'joffrey/test001'} + ) + self.assertFalse(args[1]['stream']) + + def test_pull_stream(self): + self.client.pull('joffrey/test001', stream=True) + + args = fake_request.call_args + self.assertEqual( + args[0][1], + url_prefix + 'images/create' + ) + self.assertEqual( + args[1]['params'], + {'tag': None, 'fromImage': 'joffrey/test001'} + ) + self.assertTrue(args[1]['stream']) + + def test_commit(self): + self.client.commit(fake_api.FAKE_CONTAINER_ID) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'commit', + data='{}', + headers={'Content-Type': 'application/json'}, + params={ + 'repo': None, + 'comment': None, + 'tag': None, + 'container': '3cc2351ab11b', + 'author': None + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_remove_image(self): + self.client.remove_image(fake_api.FAKE_IMAGE_ID) + + fake_request.assert_called_with( + 'DELETE', + url_prefix + 'images/e9aa60c60128', + params={'force': False, 'noprune': False}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_image_history(self): + self.client.history(fake_api.FAKE_IMAGE_NAME) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/test_image/history', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_import_image(self): + self.client.import_image( + fake_api.FAKE_TARBALL_PATH, + repository=fake_api.FAKE_REPO_NAME, + tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/create', + params={ + 'repo': fake_api.FAKE_REPO_NAME, + 'tag': fake_api.FAKE_TAG_NAME, + 'fromSrc': fake_api.FAKE_TARBALL_PATH + }, + data=None, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_import_image_from_bytes(self): + stream = (i for i in range(0, 100)) + + self.client.import_image( + stream, + repository=fake_api.FAKE_REPO_NAME, + tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/create', + params={ + 'repo': fake_api.FAKE_REPO_NAME, + 'tag': fake_api.FAKE_TAG_NAME, + 'fromSrc': '-', + }, + headers={ + 'Content-Type': 'application/tar', + }, + data=stream, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_import_image_from_image(self): + self.client.import_image( + image=fake_api.FAKE_IMAGE_NAME, + repository=fake_api.FAKE_REPO_NAME, + tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/create', + params={ + 'repo': fake_api.FAKE_REPO_NAME, + 'tag': fake_api.FAKE_TAG_NAME, + 'fromImage': fake_api.FAKE_IMAGE_NAME + }, + data=None, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_inspect_image(self): + self.client.inspect_image(fake_api.FAKE_IMAGE_NAME) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/test_image/json', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_inspect_image_undefined_id(self): + for arg in None, '', {True: True}: + with pytest.raises(docker.errors.NullResource) as excinfo: + self.client.inspect_image(arg) + + self.assertEqual( + excinfo.value.args[0], 'image or container param is undefined' + ) + + def test_insert_image(self): + try: + self.client.insert(fake_api.FAKE_IMAGE_NAME, + fake_api.FAKE_URL, fake_api.FAKE_PATH) + except docker.errors.DeprecatedMethod: + self.assertTrue( + docker.utils.compare_version('1.12', self.client._version) >= 0 + ) + return + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/test_image/insert', + params={ + 'url': fake_api.FAKE_URL, + 'path': fake_api.FAKE_PATH + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_push_image(self): + with mock.patch('docker.auth.auth.resolve_authconfig', + fake_resolve_authconfig): + self.client.push(fake_api.FAKE_IMAGE_NAME) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/test_image/push', + params={ + 'tag': None + }, + data='{}', + headers={'Content-Type': 'application/json'}, + stream=False, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_push_image_with_tag(self): + with mock.patch('docker.auth.auth.resolve_authconfig', + fake_resolve_authconfig): + self.client.push( + fake_api.FAKE_IMAGE_NAME, tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/test_image/push', + params={ + 'tag': fake_api.FAKE_TAG_NAME, + }, + data='{}', + headers={'Content-Type': 'application/json'}, + stream=False, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_push_image_stream(self): + with mock.patch('docker.auth.auth.resolve_authconfig', + fake_resolve_authconfig): + self.client.push(fake_api.FAKE_IMAGE_NAME, stream=True) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/test_image/push', + params={ + 'tag': None + }, + data='{}', + headers={'Content-Type': 'application/json'}, + stream=True, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_tag_image(self): + self.client.tag(fake_api.FAKE_IMAGE_ID, fake_api.FAKE_REPO_NAME) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/e9aa60c60128/tag', + params={ + 'tag': None, + 'repo': 'repo', + 'force': 0 + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_tag_image_tag(self): + self.client.tag( + fake_api.FAKE_IMAGE_ID, + fake_api.FAKE_REPO_NAME, + tag=fake_api.FAKE_TAG_NAME + ) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/e9aa60c60128/tag', + params={ + 'tag': 'tag', + 'repo': 'repo', + 'force': 0 + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_tag_image_force(self): + self.client.tag( + fake_api.FAKE_IMAGE_ID, fake_api.FAKE_REPO_NAME, force=True) + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/e9aa60c60128/tag', + params={ + 'tag': None, + 'repo': 'repo', + 'force': 1 + }, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_get_image(self): + self.client.get_image(fake_api.FAKE_IMAGE_ID) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/e9aa60c60128/get', + stream=True, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_load_image(self): + self.client.load_image('Byte Stream....') + + fake_request.assert_called_with( + 'POST', + url_prefix + 'images/load', + data='Byte Stream....', + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + ################# + # BUILDER TESTS # + ################# + + def test_build_container(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + + self.client.build(fileobj=script) + + def test_build_container_pull(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + + self.client.build(fileobj=script, pull=True) + + def test_build_container_stream(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + + self.client.build(fileobj=script, stream=True) + + def test_build_container_custom_context(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + context = docker.utils.mkbuildcontext(script) + + self.client.build(fileobj=context, custom_context=True) + + def test_build_container_custom_context_gzip(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'MAINTAINER docker-py', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + context = docker.utils.mkbuildcontext(script) + gz_context = gzip.GzipFile(fileobj=context) + + self.client.build( + fileobj=gz_context, + custom_context=True, + encoding="gzip" + ) + + def test_build_remote_with_registry_auth(self): + self.client._auth_configs = { + 'https://example.com': { + 'user': 'example', + 'password': 'example', + 'email': 'example@example.com' + } + } + + self.client.build(path='https://github.com/docker-library/mongo') + + def test_build_container_with_named_dockerfile(self): + self.client.build('.', dockerfile='nameddockerfile') + + def test_build_container_with_container_limits(self): + self.client.build('.', container_limits={ + 'memory': 1024 * 1024, + 'cpusetcpus': 1, + 'cpushares': 1000, + 'memswap': 1024 * 1024 * 8 + }) + + def test_build_container_invalid_container_limits(self): + self.assertRaises( + docker.errors.DockerException, + lambda: self.client.build('.', container_limits={ + 'foo': 'bar' + }) + ) + + def test_build_container_from_context_object_with_tarball(self): + base_path = os.path.join( + os.path.dirname(__file__), + 'testdata/context' + ) + tarball_path = os.path.join(base_path, 'ctx.tar.gz') + context = docker.efficiency.create_context_from_path(tarball_path) + try: + self.client.build(context.path, **context.job_params) + if context.job_params['fileobj'] is not None: + context.job_params['fileobj'].close() + except Exception as e: + self.fail('Command should not raise exception: {0}'.format(e)) + + def test_build_container_from_context_object_with_custom_dockerfile(self): + base_path = os.path.abspath(os.path.join( + os.path.dirname(__file__), + 'testdata/context' + )) + custom_dockerfile = 'custom_dockerfile' + try: + context = docker.efficiency.create_context_from_path( + base_path, + dockerfile=custom_dockerfile + ) + self.client.build(context.path, **context.job_params) + except docker.errors.ContextError as ce: + self.fail(ce.message) + except Exception as e: + self.fail('Command should not raise exception: {0}'.format(e)) + + def test_build_container_from_remote_context(self): + ctxurl = 'https://localhost/staging/context.tar.gz' + try: + context = docker.efficiency.create_context_from_path(ctxurl) + self.assertEqual(context.path, ctxurl) + self.assertEqual(context.format, 'remote') + self.client.build(context.path, **context.job_params) + except docker.errors.ContextError as ce: + self.fail(ce.message) + except Exception as e: + self.fail('Command should not raise exception: {0}'.format(e)) + + ################### + # VOLUMES TESTS # + ################### + + @base.requires_api_version('1.21') + def test_list_volumes(self): + volumes = self.client.volumes() + self.assertIn('Volumes', volumes) + self.assertEqual(len(volumes['Volumes']), 2) + args = fake_request.call_args + + self.assertEqual(args[0][0], 'GET') + self.assertEqual(args[0][1], url_prefix + 'volumes') + + @base.requires_api_version('1.21') + def test_create_volume(self): + name = 'perfectcherryblossom' + result = self.client.create_volume(name) + self.assertIn('Name', result) + self.assertEqual(result['Name'], name) + self.assertIn('Driver', result) + self.assertEqual(result['Driver'], 'local') + args = fake_request.call_args + + self.assertEqual(args[0][0], 'POST') + self.assertEqual(args[0][1], url_prefix + 'volumes') + self.assertEqual(args[1]['data'], { + 'Name': name, 'Driver': None, 'DriverOpts': None + }) + + @base.requires_api_version('1.21') + def test_create_volume_with_driver(self): + name = 'perfectcherryblossom' + driver_name = 'sshfs' + self.client.create_volume(name, driver=driver_name) + args = fake_request.call_args + + self.assertEqual(args[0][0], 'POST') + self.assertEqual(args[0][1], url_prefix + 'volumes') + self.assertIn('Driver', args[1]['data']) + self.assertEqual(args[1]['data']['Driver'], driver_name) + + @base.requires_api_version('1.21') + def test_create_volume_invalid_opts_type(self): + with pytest.raises(TypeError): + self.client.create_volume( + 'perfectcherryblossom', driver_opts='hello=world' + ) + + with pytest.raises(TypeError): + self.client.create_volume( + 'perfectcherryblossom', driver_opts=['hello=world'] + ) + + with pytest.raises(TypeError): + self.client.create_volume( + 'perfectcherryblossom', driver_opts='' + ) + + @base.requires_api_version('1.21') + def test_inspect_volume(self): + name = 'perfectcherryblossom' + result = self.client.inspect_volume(name) + self.assertIn('Name', result) + self.assertEqual(result['Name'], name) + self.assertIn('Driver', result) + self.assertEqual(result['Driver'], 'local') + args = fake_request.call_args + + self.assertEqual(args[0][0], 'GET') + self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name)) + + @base.requires_api_version('1.21') + def test_remove_volume(self): + name = 'perfectcherryblossom' + result = self.client.remove_volume(name) + self.assertIsNone(result) + args = fake_request.call_args + + self.assertEqual(args[0][0], 'DELETE') + self.assertEqual(args[0][1], '{0}volumes/{1}'.format(url_prefix, name)) + + ####################### + # PY SPECIFIC TESTS # + ####################### + + def test_load_config_no_file(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + cfg = docker.auth.load_config(folder) + self.assertTrue(cfg is not None) + + def test_load_config(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + dockercfg_path = os.path.join(folder, '.dockercfg') + with open(dockercfg_path, 'w') as f: + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + f.write('auth = {0}\n'.format(auth_)) + f.write('email = sakuya@scarlet.net') + cfg = docker.auth.load_config(dockercfg_path) + self.assertTrue(docker.auth.INDEX_NAME in cfg) + self.assertNotEqual(cfg[docker.auth.INDEX_NAME], None) + cfg = cfg[docker.auth.INDEX_NAME] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('auth'), None) + + def test_load_config_with_random_name(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + + dockercfg_path = os.path.join(folder, + '.{0}.dockercfg'.format( + random.randrange(100000))) + registry = 'https://your.private.registry.io' + auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') + config = { + registry: { + 'auth': '{0}'.format(auth_), + 'email': 'sakuya@scarlet.net' + } + } + + with open(dockercfg_path, 'w') as f: + f.write(json.dumps(config)) + + cfg = docker.auth.load_config(dockercfg_path) + self.assertTrue(registry in cfg) + self.assertNotEqual(cfg[registry], None) + cfg = cfg[registry] + self.assertEqual(cfg['username'], 'sakuya') + self.assertEqual(cfg['password'], 'izayoi') + self.assertEqual(cfg['email'], 'sakuya@scarlet.net') + self.assertEqual(cfg.get('auth'), None) + + def test_tar_with_excludes(self): + dirs = [ + 'foo', + 'foo/bar', + 'bar', + ] + + files = [ + 'Dockerfile', + 'Dockerfile.alt', + '.dockerignore', + 'a.py', + 'a.go', + 'b.py', + 'cde.py', + 'foo/a.py', + 'foo/b.py', + 'foo/bar/a.py', + 'bar/a.py', + ] + + exclude = [ + '*.py', + '!b.py', + '!a.go', + 'foo', + 'Dockerfile*', + '.dockerignore', + ] + + expected_names = set([ + 'Dockerfile', + '.dockerignore', + 'a.go', + 'b.py', + 'bar', + 'bar/a.py', + ]) + + base = make_tree(dirs, files) + self.addCleanup(shutil.rmtree, base) + + with docker.utils.tar(base, exclude=exclude) as archive: + tar = tarfile.open(fileobj=archive) + assert sorted(tar.getnames()) == sorted(expected_names) + + def test_tar_with_empty_directory(self): + base = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base) + for d in ['foo', 'bar']: + os.makedirs(os.path.join(base, d)) + with docker.utils.tar(base) as archive: + tar = tarfile.open(fileobj=archive) + self.assertEqual(sorted(tar.getnames()), ['bar', 'foo']) + + def test_tar_with_file_symlinks(self): + base = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base) + with open(os.path.join(base, 'foo'), 'w') as f: + f.write("content") + os.makedirs(os.path.join(base, 'bar')) + os.symlink('../foo', os.path.join(base, 'bar/foo')) + with docker.utils.tar(base) as archive: + tar = tarfile.open(fileobj=archive) + self.assertEqual(sorted(tar.getnames()), ['bar', 'bar/foo', 'foo']) + + def test_tar_with_directory_symlinks(self): + base = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base) + for d in ['foo', 'bar']: + os.makedirs(os.path.join(base, d)) + os.symlink('../foo', os.path.join(base, 'bar/foo')) + with docker.utils.tar(base) as archive: + tar = tarfile.open(fileobj=archive) + self.assertEqual(sorted(tar.getnames()), ['bar', 'bar/foo', 'foo']) + + ####################### + # HOST CONFIG TESTS # + ####################### + + def test_create_host_config_secopt(self): + security_opt = ['apparmor:test_profile'] + result = self.client.create_host_config(security_opt=security_opt) + self.assertIn('SecurityOpt', result) + self.assertEqual(result['SecurityOpt'], security_opt) + + self.assertRaises( + docker.errors.DockerException, self.client.create_host_config, + security_opt='wrong' + ) + + +class StreamTest(base.Cleanup, base.BaseTestCase): + + def setUp(self): + socket_dir = tempfile.mkdtemp() + self.build_context = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, socket_dir) + self.addCleanup(shutil.rmtree, self.build_context) + self.socket_file = os.path.join(socket_dir, 'test_sock.sock') + self.server_socket = self._setup_socket() + self.stop_server = False + server_thread = threading.Thread(target=self.run_server) + server_thread.setDaemon(True) + server_thread.start() + self.response = None + self.request_handler = None + self.addCleanup(server_thread.join) + self.addCleanup(self.stop) + + def stop(self): + self.stop_server = True + + def _setup_socket(self): + server_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + server_sock.bind(self.socket_file) + # Non-blocking mode so that we can shut the test down easily + server_sock.setblocking(0) + server_sock.listen(5) + return server_sock + + def run_server(self): + try: + while not self.stop_server: + try: + connection, client_address = self.server_socket.accept() + except socket.error: + # Probably no connection to accept yet + time.sleep(0.01) + continue + + connection.setblocking(1) + try: + self.request_handler(connection) + finally: + connection.close() + finally: + self.server_socket.close() + + def early_response_sending_handler(self, connection): + data = b'' + headers = None + + connection.sendall(self.response) + while not headers: + data += connection.recv(2048) + parts = data.split(b'\r\n\r\n', 1) + if len(parts) == 2: + headers, data = parts + + mo = re.search(r'Content-Length: ([0-9]+)', headers.decode()) + assert mo + content_length = int(mo.group(1)) + + while True: + if len(data) >= content_length: + break + + data += connection.recv(2048) + + def test_early_stream_response(self): + self.request_handler = self.early_response_sending_handler + lines = [] + for i in range(0, 50): + line = str(i).encode() + lines += [('%x' % len(line)).encode(), line] + lines.append(b'0') + lines.append(b'') + + self.response = ( + b'HTTP/1.1 200 OK\r\n' + b'Transfer-Encoding: chunked\r\n' + b'\r\n' + ) + b'\r\n'.join(lines) + + with docker.Client(base_url="http+unix://" + self.socket_file) \ + as client: + for i in range(5): + try: + stream = client.build( + path=self.build_context, + stream=True + ) + break + except requests.ConnectionError as e: + if i == 4: + raise e + + self.assertEqual(list(stream), [ + str(i).encode() for i in range(50)]) diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py new file mode 100644 index 0000000..5a89dee --- /dev/null +++ b/tests/unit/fake_api.py @@ -0,0 +1,513 @@ +# Copyright 2013 dotCloud inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import fake_stat +from docker import constants + +CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION) + +FAKE_CONTAINER_ID = '3cc2351ab11b' +FAKE_IMAGE_ID = 'e9aa60c60128' +FAKE_EXEC_ID = 'd5d177f121dc' +FAKE_IMAGE_NAME = 'test_image' +FAKE_TARBALL_PATH = '/path/to/tarball' +FAKE_REPO_NAME = 'repo' +FAKE_TAG_NAME = 'tag' +FAKE_FILE_NAME = 'file' +FAKE_URL = 'myurl' +FAKE_PATH = '/path' +FAKE_VOLUME_NAME = 'perfectcherryblossom' + +# Each method is prefixed with HTTP method (get, post...) +# for clarity and readability + + +def get_fake_raw_version(): + status_code = 200 + response = { + "ApiVersion": "1.18", + "GitCommit": "fake-commit", + "GoVersion": "go1.3.3", + "Version": "1.5.0" + } + return status_code, response + + +def get_fake_version(): + status_code = 200 + response = {'GoVersion': '1', 'Version': '1.1.1', + 'GitCommit': 'deadbeef+CHANGES'} + return status_code, response + + +def get_fake_info(): + status_code = 200 + response = {'Containers': 1, 'Images': 1, 'Debug': False, + 'MemoryLimit': False, 'SwapLimit': False, + 'IPv4Forwarding': True} + return status_code, response + + +def get_fake_search(): + status_code = 200 + response = [{'Name': 'busybox', 'Description': 'Fake Description'}] + return status_code, response + + +def get_fake_images(): + status_code = 200 + response = [{ + 'Id': FAKE_IMAGE_ID, + 'Created': '2 days ago', + 'Repository': 'busybox', + 'RepoTags': ['busybox:latest', 'busybox:1.0'], + }] + return status_code, response + + +def get_fake_image_history(): + status_code = 200 + response = [ + { + "Id": "b750fe79269d", + "Created": 1364102658, + "CreatedBy": "/bin/bash" + }, + { + "Id": "27cf78414709", + "Created": 1364068391, + "CreatedBy": "" + } + ] + + return status_code, response + + +def post_fake_import_image(): + status_code = 200 + response = 'Import messages...' + + return status_code, response + + +def get_fake_containers(): + status_code = 200 + response = [{ + 'Id': FAKE_CONTAINER_ID, + 'Image': 'busybox:latest', + 'Created': '2 days ago', + 'Command': 'true', + 'Status': 'fake status' + }] + return status_code, response + + +def post_fake_start_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_resize_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_create_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def get_fake_inspect_container(tty=False): + status_code = 200 + response = { + 'Id': FAKE_CONTAINER_ID, + 'Config': {'Privileged': True, 'Tty': tty}, + 'ID': FAKE_CONTAINER_ID, + 'Image': 'busybox:latest', + "State": { + "Running": True, + "Pid": 0, + "ExitCode": 0, + "StartedAt": "2013-09-25T14:01:18.869545111+02:00", + "Ghost": False + }, + "MacAddress": "02:42:ac:11:00:0a" + } + return status_code, response + + +def get_fake_inspect_image(): + status_code = 200 + response = { + 'id': FAKE_IMAGE_ID, + 'parent': "27cf784147099545", + 'created': "2013-03-23T22:24:18.818426-07:00", + 'container': FAKE_CONTAINER_ID, + 'container_config': + { + "Hostname": "", + "User": "", + "Memory": 0, + "MemorySwap": 0, + "AttachStdin": False, + "AttachStdout": False, + "AttachStderr": False, + "PortSpecs": "", + "Tty": True, + "OpenStdin": True, + "StdinOnce": False, + "Env": "", + "Cmd": ["/bin/bash"], + "Dns": "", + "Image": "base", + "Volumes": "", + "VolumesFrom": "", + "WorkingDir": "" + }, + 'Size': 6823592 + } + return status_code, response + + +def get_fake_port(): + status_code = 200 + response = { + 'HostConfig': { + 'Binds': None, + 'ContainerIDFile': '', + 'Links': None, + 'LxcConf': None, + 'PortBindings': { + '1111': None, + '1111/tcp': [{'HostIp': '127.0.0.1', 'HostPort': '4567'}], + '2222': None + }, + 'Privileged': False, + 'PublishAllPorts': False + }, + 'NetworkSettings': { + 'Bridge': 'docker0', + 'PortMapping': None, + 'Ports': { + '1111': None, + '1111/tcp': [{'HostIp': '127.0.0.1', 'HostPort': '4567'}], + '2222': None}, + 'MacAddress': '02:42:ac:11:00:0a' + } + } + return status_code, response + + +def get_fake_insert_image(): + status_code = 200 + response = {'StatusCode': 0} + return status_code, response + + +def get_fake_wait(): + status_code = 200 + response = {'StatusCode': 0} + return status_code, response + + +def get_fake_logs(): + status_code = 200 + response = (b'\x01\x00\x00\x00\x00\x00\x00\x11Flowering Nights\n' + b'\x01\x00\x00\x00\x00\x00\x00\x10(Sakuya Iyazoi)\n') + return status_code, response + + +def get_fake_diff(): + status_code = 200 + response = [{'Path': '/test', 'Kind': 1}] + return status_code, response + + +def get_fake_events(): + status_code = 200 + response = [{'status': 'stop', 'id': FAKE_CONTAINER_ID, + 'from': FAKE_IMAGE_ID, 'time': 1423247867}] + return status_code, response + + +def get_fake_export(): + status_code = 200 + response = 'Byte Stream....' + return status_code, response + + +def post_fake_exec_create(): + status_code = 200 + response = {'Id': FAKE_EXEC_ID} + return status_code, response + + +def post_fake_exec_start(): + status_code = 200 + response = (b'\x01\x00\x00\x00\x00\x00\x00\x11bin\nboot\ndev\netc\n' + b'\x01\x00\x00\x00\x00\x00\x00\x12lib\nmnt\nproc\nroot\n' + b'\x01\x00\x00\x00\x00\x00\x00\x0csbin\nusr\nvar\n') + return status_code, response + + +def post_fake_exec_resize(): + status_code = 201 + return status_code, '' + + +def get_fake_exec_inspect(): + return 200, { + 'OpenStderr': True, + 'OpenStdout': True, + 'Container': get_fake_inspect_container()[1], + 'Running': False, + 'ProcessConfig': { + 'arguments': ['hello world'], + 'tty': False, + 'entrypoint': 'echo', + 'privileged': False, + 'user': '' + }, + 'ExitCode': 0, + 'ID': FAKE_EXEC_ID, + 'OpenStdin': False + } + + +def post_fake_stop_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_kill_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_pause_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_unpause_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_restart_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_rename_container(): + status_code = 204 + return status_code, None + + +def delete_fake_remove_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_image_create(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def delete_fake_remove_image(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def get_fake_get_image(): + status_code = 200 + response = 'Byte Stream....' + return status_code, response + + +def post_fake_load_image(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def post_fake_commit(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_push(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def post_fake_build_container(): + status_code = 200 + response = {'Id': FAKE_CONTAINER_ID} + return status_code, response + + +def post_fake_tag_image(): + status_code = 200 + response = {'Id': FAKE_IMAGE_ID} + return status_code, response + + +def get_fake_stats(): + status_code = 200 + response = fake_stat.OBJ + return status_code, response + + +def get_fake_volume_list(): + status_code = 200 + response = { + 'Volumes': [ + { + 'Name': 'perfectcherryblossom', + 'Driver': 'local', + 'Mountpoint': '/var/lib/docker/volumes/perfectcherryblossom' + }, { + 'Name': 'subterraneananimism', + 'Driver': 'local', + 'Mountpoint': '/var/lib/docker/volumes/subterraneananimism' + } + ] + } + return status_code, response + + +def get_fake_volume(): + status_code = 200 + response = { + 'Name': 'perfectcherryblossom', + 'Driver': 'local', + 'Mountpoint': '/var/lib/docker/volumes/perfectcherryblossom' + } + return status_code, response + + +def fake_remove_volume(): + return 204, None + +# Maps real api url to fake response callback +prefix = 'http+docker://localunixsocket' +fake_responses = { + '{0}/version'.format(prefix): + get_fake_raw_version, + '{1}/{0}/version'.format(CURRENT_VERSION, prefix): + get_fake_version, + '{1}/{0}/info'.format(CURRENT_VERSION, prefix): + get_fake_info, + '{1}/{0}/images/search'.format(CURRENT_VERSION, prefix): + get_fake_search, + '{1}/{0}/images/json'.format(CURRENT_VERSION, prefix): + get_fake_images, + '{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix): + get_fake_image_history, + '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): + post_fake_import_image, + '{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix): + get_fake_containers, + '{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix): + post_fake_start_container, + '{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix): + post_fake_resize_container, + '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): + get_fake_inspect_container, + '{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix): + post_fake_rename_container, + '{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix): + post_fake_tag_image, + '{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix): + get_fake_wait, + '{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix): + get_fake_logs, + '{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix): + get_fake_diff, + '{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix): + get_fake_export, + '{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix): + post_fake_exec_create, + '{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix): + post_fake_exec_start, + '{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix): + get_fake_exec_inspect, + '{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix): + post_fake_exec_resize, + + '{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix): + get_fake_stats, + '{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix): + post_fake_stop_container, + '{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix): + post_fake_kill_container, + '{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix): + post_fake_pause_container, + '{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix): + post_fake_unpause_container, + '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): + get_fake_port, + '{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix): + post_fake_restart_container, + '{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix): + delete_fake_remove_container, + '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): + post_fake_image_create, + '{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix): + delete_fake_remove_image, + '{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix): + get_fake_get_image, + '{1}/{0}/images/load'.format(CURRENT_VERSION, prefix): + post_fake_load_image, + '{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix): + get_fake_inspect_image, + '{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix): + get_fake_insert_image, + '{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix): + post_fake_push, + '{1}/{0}/commit'.format(CURRENT_VERSION, prefix): + post_fake_commit, + '{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix): + post_fake_create_container, + '{1}/{0}/build'.format(CURRENT_VERSION, prefix): + post_fake_build_container, + '{1}/{0}/events'.format(CURRENT_VERSION, prefix): + get_fake_events, + ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'): + get_fake_volume_list, + ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'POST'): + get_fake_volume, + ('{1}/{0}/volumes/{2}'.format( + CURRENT_VERSION, prefix, FAKE_VOLUME_NAME + ), 'GET'): + get_fake_volume, + ('{1}/{0}/volumes/{2}'.format( + CURRENT_VERSION, prefix, FAKE_VOLUME_NAME + ), 'DELETE'): + fake_remove_volume, +} diff --git a/tests/unit/fake_stat.py b/tests/unit/fake_stat.py new file mode 100644 index 0000000..a7f1029 --- /dev/null +++ b/tests/unit/fake_stat.py @@ -0,0 +1,133 @@ +OBJ = { + "read": "2015-02-11T19:20:46.667237763+02:00", + "network": { + "rx_bytes": 567224, + "rx_packets": 3773, + "rx_errors": 0, + "rx_dropped": 0, + "tx_bytes": 1176, + "tx_packets": 13, + "tx_errors": 0, + "tx_dropped": 0 + }, + "cpu_stats": { + "cpu_usage": { + "total_usage": 157260874053, + "percpu_usage": [ + 52196306950, + 24118413549, + 53292684398, + 27653469156 + ], + "usage_in_kernelmode": 37140000000, + "usage_in_usermode": 62140000000 + }, + "system_cpu_usage": 3.0881377e+14, + "throttling_data": { + "periods": 0, + "throttled_periods": 0, + "throttled_time": 0 + } + }, + "memory_stats": { + "usage": 179314688, + "max_usage": 258166784, + "stats": { + "active_anon": 90804224, + "active_file": 2195456, + "cache": 3096576, + "hierarchical_memory_limit": 1.844674407371e+19, + "inactive_anon": 85516288, + "inactive_file": 798720, + "mapped_file": 2646016, + "pgfault": 101034, + "pgmajfault": 1207, + "pgpgin": 115814, + "pgpgout": 75613, + "rss": 176218112, + "rss_huge": 12582912, + "total_active_anon": 90804224, + "total_active_file": 2195456, + "total_cache": 3096576, + "total_inactive_anon": 85516288, + "total_inactive_file": 798720, + "total_mapped_file": 2646016, + "total_pgfault": 101034, + "total_pgmajfault": 1207, + "total_pgpgin": 115814, + "total_pgpgout": 75613, + "total_rss": 176218112, + "total_rss_huge": 12582912, + "total_unevictable": 0, + "total_writeback": 0, + "unevictable": 0, + "writeback": 0 + }, + "failcnt": 0, + "limit": 8039038976 + }, + "blkio_stats": { + "io_service_bytes_recursive": [ + { + "major": 8, + "minor": 0, + "op": "Read", + "value": 72843264 + }, { + "major": 8, + "minor": 0, + "op": "Write", + "value": 4096 + }, { + "major": 8, + "minor": 0, + "op": "Sync", + "value": 4096 + }, { + "major": 8, + "minor": 0, + "op": "Async", + "value": 72843264 + }, { + "major": 8, + "minor": 0, + "op": "Total", + "value": 72847360 + } + ], + "io_serviced_recursive": [ + { + "major": 8, + "minor": 0, + "op": "Read", + "value": 10581 + }, { + "major": 8, + "minor": 0, + "op": "Write", + "value": 1 + }, { + "major": 8, + "minor": 0, + "op": "Sync", + "value": 1 + }, { + "major": 8, + "minor": 0, + "op": "Async", + "value": 10581 + }, { + "major": 8, + "minor": 0, + "op": "Total", + "value": 10582 + } + ], + "io_queue_recursive": [], + "io_service_time_recursive": [], + "io_wait_time_recursive": [], + "io_merged_recursive": [], + "io_time_recursive": [], + "sectors_recursive": [] + } +} diff --git a/tests/unit/testdata/certs/ca.pem b/tests/unit/testdata/certs/ca.pem new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/testdata/certs/cert.pem b/tests/unit/testdata/certs/cert.pem new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/testdata/certs/key.pem b/tests/unit/testdata/certs/key.pem new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/testdata/context/Dockerfile b/tests/unit/testdata/context/Dockerfile new file mode 100644 index 0000000..d1ceac6 --- /dev/null +++ b/tests/unit/testdata/context/Dockerfile @@ -0,0 +1,2 @@ +FROM busybox:latest +CMD echo "success" diff --git a/tests/unit/testdata/context/ctx.tar.gz b/tests/unit/testdata/context/ctx.tar.gz new file mode 100644 index 0000000..c14e5b9 Binary files /dev/null and b/tests/unit/testdata/context/ctx.tar.gz differ diff --git a/tests/unit/testdata/context/custom_dockerfile b/tests/unit/testdata/context/custom_dockerfile new file mode 100644 index 0000000..d1ceac6 --- /dev/null +++ b/tests/unit/testdata/context/custom_dockerfile @@ -0,0 +1,2 @@ +FROM busybox:latest +CMD echo "success" diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py new file mode 100644 index 0000000..71a382b --- /dev/null +++ b/tests/unit/utils_test.py @@ -0,0 +1,651 @@ +import os +import os.path +import shutil +import tempfile + +from docker.client import Client +from docker.constants import DEFAULT_DOCKER_API_VERSION +from docker.errors import DockerException +from docker.utils import ( + parse_repository_tag, parse_host, convert_filters, kwargs_from_env, + create_host_config, Ulimit, LogConfig, parse_bytes, parse_env_file, + exclude_paths, +) +from docker.utils.ports import build_port_bindings, split_port +from docker.auth import resolve_repository_name, resolve_authconfig + +from .. import base +from ..helpers import make_tree + +import pytest + +TEST_CERT_DIR = os.path.join( + os.path.dirname(__file__), + 'testdata/certs', +) + + +class UtilsTest(base.BaseTestCase): + longMessage = True + + def generate_tempfile(self, file_content=None): + """ + Generates a temporary file for tests with the content + of 'file_content' and returns the filename. + Don't forget to unlink the file with os.unlink() after. + """ + local_tempfile = tempfile.NamedTemporaryFile(delete=False) + local_tempfile.write(file_content.encode('UTF-8')) + local_tempfile.close() + return local_tempfile.name + + def setUp(self): + self.os_environ = os.environ.copy() + + def tearDown(self): + os.environ = self.os_environ + + def test_parse_repository_tag(self): + self.assertEqual(parse_repository_tag("root"), + ("root", None)) + self.assertEqual(parse_repository_tag("root:tag"), + ("root", "tag")) + self.assertEqual(parse_repository_tag("user/repo"), + ("user/repo", None)) + self.assertEqual(parse_repository_tag("user/repo:tag"), + ("user/repo", "tag")) + self.assertEqual(parse_repository_tag("url:5000/repo"), + ("url:5000/repo", None)) + self.assertEqual(parse_repository_tag("url:5000/repo:tag"), + ("url:5000/repo", "tag")) + + def test_parse_bytes(self): + self.assertEqual(parse_bytes("512MB"), (536870912)) + self.assertEqual(parse_bytes("512M"), (536870912)) + self.assertRaises(DockerException, parse_bytes, "512MK") + self.assertRaises(DockerException, parse_bytes, "512L") + + def test_parse_host(self): + invalid_hosts = [ + '0.0.0.0', + 'tcp://', + 'udp://127.0.0.1', + 'udp://127.0.0.1:2375', + ] + + valid_hosts = { + '0.0.0.1:5555': 'http://0.0.0.1:5555', + ':6666': 'http://127.0.0.1:6666', + 'tcp://:7777': 'http://127.0.0.1:7777', + 'http://:7777': 'http://127.0.0.1:7777', + 'https://kokia.jp:2375': 'https://kokia.jp:2375', + 'unix:///var/run/docker.sock': 'http+unix:///var/run/docker.sock', + 'unix://': 'http+unix://var/run/docker.sock', + 'somehost.net:80/service/swarm': ( + 'http://somehost.net:80/service/swarm' + ), + } + + for host in invalid_hosts: + with pytest.raises(DockerException): + parse_host(host, None) + + for host, expected in valid_hosts.items(): + self.assertEqual(parse_host(host, None), expected, msg=host) + + def test_parse_host_empty_value(self): + unix_socket = 'http+unix://var/run/docker.sock' + tcp_port = 'http://127.0.0.1:2375' + + for val in [None, '']: + for platform in ['darwin', 'linux2', None]: + assert parse_host(val, platform) == unix_socket + + assert parse_host(val, 'win32') == tcp_port + + def test_kwargs_from_env_empty(self): + os.environ.update(DOCKER_HOST='', + DOCKER_CERT_PATH='', + DOCKER_TLS_VERIFY='') + + kwargs = kwargs_from_env() + self.assertEqual(None, kwargs.get('base_url')) + self.assertEqual(None, kwargs.get('tls')) + + def test_kwargs_from_env_tls(self): + os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376', + DOCKER_CERT_PATH=TEST_CERT_DIR, + DOCKER_TLS_VERIFY='1') + kwargs = kwargs_from_env(assert_hostname=False) + self.assertEqual('https://192.168.59.103:2376', kwargs['base_url']) + self.assertTrue('ca.pem' in kwargs['tls'].verify) + self.assertTrue('cert.pem' in kwargs['tls'].cert[0]) + self.assertTrue('key.pem' in kwargs['tls'].cert[1]) + self.assertEqual(False, kwargs['tls'].assert_hostname) + try: + client = Client(**kwargs) + self.assertEqual(kwargs['base_url'], client.base_url) + self.assertEqual(kwargs['tls'].verify, client.verify) + self.assertEqual(kwargs['tls'].cert, client.cert) + except TypeError as e: + self.fail(e) + + def test_kwargs_from_env_no_cert_path(self): + try: + temp_dir = tempfile.mkdtemp() + cert_dir = os.path.join(temp_dir, '.docker') + shutil.copytree(TEST_CERT_DIR, cert_dir) + + os.environ.update(HOME=temp_dir, + DOCKER_CERT_PATH='', + DOCKER_TLS_VERIFY='1') + + kwargs = kwargs_from_env() + self.assertIn(cert_dir, kwargs['tls'].verify) + self.assertIn(cert_dir, kwargs['tls'].cert[0]) + self.assertIn(cert_dir, kwargs['tls'].cert[1]) + finally: + if temp_dir: + shutil.rmtree(temp_dir) + + def test_parse_env_file_proper(self): + env_file = self.generate_tempfile( + file_content='USER=jdoe\nPASS=secret') + get_parse_env_file = parse_env_file(env_file) + self.assertEqual(get_parse_env_file, + {'USER': 'jdoe', 'PASS': 'secret'}) + os.unlink(env_file) + + def test_parse_env_file_commented_line(self): + env_file = self.generate_tempfile( + file_content='USER=jdoe\n#PASS=secret') + get_parse_env_file = parse_env_file((env_file)) + self.assertEqual(get_parse_env_file, {'USER': 'jdoe'}) + os.unlink(env_file) + + def test_parse_env_file_invalid_line(self): + env_file = self.generate_tempfile( + file_content='USER jdoe') + self.assertRaises( + DockerException, parse_env_file, env_file) + os.unlink(env_file) + + def test_convert_filters(self): + tests = [ + ({'dangling': True}, '{"dangling": ["true"]}'), + ({'dangling': "true"}, '{"dangling": ["true"]}'), + ({'exited': 0}, '{"exited": [0]}'), + ({'exited': [0, 1]}, '{"exited": [0, 1]}'), + ] + + for filters, expected in tests: + self.assertEqual(convert_filters(filters), expected) + + def test_create_host_config_no_options(self): + config = create_host_config(version='1.19') + self.assertFalse('NetworkMode' in config) + + def test_create_host_config_no_options_newer_api_version(self): + config = create_host_config(version='1.20') + self.assertEqual(config['NetworkMode'], 'default') + + def test_create_host_config_dict_ulimit(self): + ulimit_dct = {'name': 'nofile', 'soft': 8096} + config = create_host_config( + ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION + ) + self.assertIn('Ulimits', config) + self.assertEqual(len(config['Ulimits']), 1) + ulimit_obj = config['Ulimits'][0] + self.assertTrue(isinstance(ulimit_obj, Ulimit)) + self.assertEqual(ulimit_obj.name, ulimit_dct['name']) + self.assertEqual(ulimit_obj.soft, ulimit_dct['soft']) + self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft) + + def test_create_host_config_dict_ulimit_capitals(self): + ulimit_dct = {'Name': 'nofile', 'Soft': 8096, 'Hard': 8096 * 4} + config = create_host_config( + ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION + ) + self.assertIn('Ulimits', config) + self.assertEqual(len(config['Ulimits']), 1) + ulimit_obj = config['Ulimits'][0] + self.assertTrue(isinstance(ulimit_obj, Ulimit)) + self.assertEqual(ulimit_obj.name, ulimit_dct['Name']) + self.assertEqual(ulimit_obj.soft, ulimit_dct['Soft']) + self.assertEqual(ulimit_obj.hard, ulimit_dct['Hard']) + self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft) + + def test_create_host_config_obj_ulimit(self): + ulimit_dct = Ulimit(name='nofile', soft=8096) + config = create_host_config( + ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION + ) + self.assertIn('Ulimits', config) + self.assertEqual(len(config['Ulimits']), 1) + ulimit_obj = config['Ulimits'][0] + self.assertTrue(isinstance(ulimit_obj, Ulimit)) + self.assertEqual(ulimit_obj, ulimit_dct) + + def test_ulimit_invalid_type(self): + self.assertRaises(ValueError, lambda: Ulimit(name=None)) + self.assertRaises(ValueError, lambda: Ulimit(name='hello', soft='123')) + self.assertRaises(ValueError, lambda: Ulimit(name='hello', hard='456')) + + def test_create_host_config_dict_logconfig(self): + dct = {'type': LogConfig.types.SYSLOG, 'config': {'key1': 'val1'}} + config = create_host_config( + version=DEFAULT_DOCKER_API_VERSION, log_config=dct + ) + self.assertIn('LogConfig', config) + self.assertTrue(isinstance(config['LogConfig'], LogConfig)) + self.assertEqual(dct['type'], config['LogConfig'].type) + + def test_create_host_config_obj_logconfig(self): + obj = LogConfig(type=LogConfig.types.SYSLOG, config={'key1': 'val1'}) + config = create_host_config( + version=DEFAULT_DOCKER_API_VERSION, log_config=obj + ) + self.assertIn('LogConfig', config) + self.assertTrue(isinstance(config['LogConfig'], LogConfig)) + self.assertEqual(obj, config['LogConfig']) + + def test_logconfig_invalid_config_type(self): + with pytest.raises(ValueError): + LogConfig(type=LogConfig.types.JSON, config='helloworld') + + def test_resolve_repository_name(self): + # docker hub library image + self.assertEqual( + resolve_repository_name('image'), + ('index.docker.io', 'image'), + ) + + # docker hub image + self.assertEqual( + resolve_repository_name('username/image'), + ('index.docker.io', 'username/image'), + ) + + # private registry + self.assertEqual( + resolve_repository_name('my.registry.net/image'), + ('my.registry.net', 'image'), + ) + + # private registry with port + self.assertEqual( + resolve_repository_name('my.registry.net:5000/image'), + ('my.registry.net:5000', 'image'), + ) + + # private registry with username + self.assertEqual( + resolve_repository_name('my.registry.net/username/image'), + ('my.registry.net', 'username/image'), + ) + + # no dots but port + self.assertEqual( + resolve_repository_name('hostname:5000/image'), + ('hostname:5000', 'image'), + ) + + # no dots but port and username + self.assertEqual( + resolve_repository_name('hostname:5000/username/image'), + ('hostname:5000', 'username/image'), + ) + + # localhost + self.assertEqual( + resolve_repository_name('localhost/image'), + ('localhost', 'image'), + ) + + # localhost with username + self.assertEqual( + resolve_repository_name('localhost/username/image'), + ('localhost', 'username/image'), + ) + + def test_resolve_authconfig(self): + auth_config = { + 'https://index.docker.io/v1/': {'auth': 'indexuser'}, + 'my.registry.net': {'auth': 'privateuser'}, + 'http://legacy.registry.url/v1/': {'auth': 'legacyauth'} + } + # hostname only + self.assertEqual( + resolve_authconfig(auth_config, 'my.registry.net'), + {'auth': 'privateuser'} + ) + # no protocol + self.assertEqual( + resolve_authconfig(auth_config, 'my.registry.net/v1/'), + {'auth': 'privateuser'} + ) + # no path + self.assertEqual( + resolve_authconfig(auth_config, 'http://my.registry.net'), + {'auth': 'privateuser'} + ) + # no path, trailing slash + self.assertEqual( + resolve_authconfig(auth_config, 'http://my.registry.net/'), + {'auth': 'privateuser'} + ) + # no path, wrong secure protocol + self.assertEqual( + resolve_authconfig(auth_config, 'https://my.registry.net'), + {'auth': 'privateuser'} + ) + # no path, wrong insecure protocol + self.assertEqual( + resolve_authconfig(auth_config, 'http://index.docker.io'), + {'auth': 'indexuser'} + ) + # with path, wrong protocol + self.assertEqual( + resolve_authconfig(auth_config, 'https://my.registry.net/v1/'), + {'auth': 'privateuser'} + ) + # default registry + self.assertEqual( + resolve_authconfig(auth_config), {'auth': 'indexuser'} + ) + # default registry (explicit None) + self.assertEqual( + resolve_authconfig(auth_config, None), {'auth': 'indexuser'} + ) + # fully explicit + self.assertEqual( + resolve_authconfig(auth_config, 'http://my.registry.net/v1/'), + {'auth': 'privateuser'} + ) + # legacy entry in config + self.assertEqual( + resolve_authconfig(auth_config, 'legacy.registry.url'), + {'auth': 'legacyauth'} + ) + # no matching entry + self.assertTrue( + resolve_authconfig(auth_config, 'does.not.exist') is None + ) + + def test_resolve_registry_and_auth(self): + auth_config = { + 'https://index.docker.io/v1/': {'auth': 'indexuser'}, + 'my.registry.net': {'auth': 'privateuser'}, + } + + # library image + image = 'image' + self.assertEqual( + resolve_authconfig(auth_config, resolve_repository_name(image)[0]), + {'auth': 'indexuser'}, + ) + + # docker hub image + image = 'username/image' + self.assertEqual( + resolve_authconfig(auth_config, resolve_repository_name(image)[0]), + {'auth': 'indexuser'}, + ) + + # private registry + image = 'my.registry.net/image' + self.assertEqual( + resolve_authconfig(auth_config, resolve_repository_name(image)[0]), + {'auth': 'privateuser'}, + ) + + # unauthenticated registry + image = 'other.registry.net/image' + self.assertEqual( + resolve_authconfig(auth_config, resolve_repository_name(image)[0]), + None, + ) + + def test_split_port_with_host_ip(self): + internal_port, external_port = split_port("127.0.0.1:1000:2000") + self.assertEqual(internal_port, ["2000"]) + self.assertEqual(external_port, [("127.0.0.1", "1000")]) + + def test_split_port_with_protocol(self): + internal_port, external_port = split_port("127.0.0.1:1000:2000/udp") + self.assertEqual(internal_port, ["2000/udp"]) + self.assertEqual(external_port, [("127.0.0.1", "1000")]) + + def test_split_port_with_host_ip_no_port(self): + internal_port, external_port = split_port("127.0.0.1::2000") + self.assertEqual(internal_port, ["2000"]) + self.assertEqual(external_port, [("127.0.0.1", None)]) + + def test_split_port_range_with_host_ip_no_port(self): + internal_port, external_port = split_port("127.0.0.1::2000-2001") + self.assertEqual(internal_port, ["2000", "2001"]) + self.assertEqual(external_port, + [("127.0.0.1", None), ("127.0.0.1", None)]) + + def test_split_port_with_host_port(self): + internal_port, external_port = split_port("1000:2000") + self.assertEqual(internal_port, ["2000"]) + self.assertEqual(external_port, ["1000"]) + + def test_split_port_range_with_host_port(self): + internal_port, external_port = split_port("1000-1001:2000-2001") + self.assertEqual(internal_port, ["2000", "2001"]) + self.assertEqual(external_port, ["1000", "1001"]) + + def test_split_port_no_host_port(self): + internal_port, external_port = split_port("2000") + self.assertEqual(internal_port, ["2000"]) + self.assertEqual(external_port, None) + + def test_split_port_range_no_host_port(self): + internal_port, external_port = split_port("2000-2001") + self.assertEqual(internal_port, ["2000", "2001"]) + self.assertEqual(external_port, None) + + def test_split_port_range_with_protocol(self): + internal_port, external_port = split_port( + "127.0.0.1:1000-1001:2000-2001/udp") + self.assertEqual(internal_port, ["2000/udp", "2001/udp"]) + self.assertEqual(external_port, + [("127.0.0.1", "1000"), ("127.0.0.1", "1001")]) + + def test_split_port_invalid(self): + self.assertRaises(ValueError, + lambda: split_port("0.0.0.0:1000:2000:tcp")) + + def test_non_matching_length_port_ranges(self): + self.assertRaises( + ValueError, + lambda: split_port("0.0.0.0:1000-1010:2000-2002/tcp") + ) + + def test_port_and_range_invalid(self): + self.assertRaises(ValueError, + lambda: split_port("0.0.0.0:1000:2000-2002/tcp")) + + def test_port_only_with_colon(self): + self.assertRaises(ValueError, + lambda: split_port(":80")) + + def test_host_only_with_colon(self): + self.assertRaises(ValueError, + lambda: split_port("localhost:")) + + def test_build_port_bindings_with_one_port(self): + port_bindings = build_port_bindings(["127.0.0.1:1000:1000"]) + self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) + + def test_build_port_bindings_with_matching_internal_ports(self): + port_bindings = build_port_bindings( + ["127.0.0.1:1000:1000", "127.0.0.1:2000:1000"]) + self.assertEqual(port_bindings["1000"], + [("127.0.0.1", "1000"), ("127.0.0.1", "2000")]) + + def test_build_port_bindings_with_nonmatching_internal_ports(self): + port_bindings = build_port_bindings( + ["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"]) + self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) + self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")]) + + def test_build_port_bindings_with_port_range(self): + port_bindings = build_port_bindings(["127.0.0.1:1000-1001:1000-1001"]) + self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) + self.assertEqual(port_bindings["1001"], [("127.0.0.1", "1001")]) + + def test_build_port_bindings_with_matching_internal_port_ranges(self): + port_bindings = build_port_bindings( + ["127.0.0.1:1000-1001:1000-1001", "127.0.0.1:2000-2001:1000-1001"]) + self.assertEqual(port_bindings["1000"], + [("127.0.0.1", "1000"), ("127.0.0.1", "2000")]) + self.assertEqual(port_bindings["1001"], + [("127.0.0.1", "1001"), ("127.0.0.1", "2001")]) + + def test_build_port_bindings_with_nonmatching_internal_port_ranges(self): + port_bindings = build_port_bindings( + ["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"]) + self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) + self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")]) + + +class ExcludePathsTest(base.BaseTestCase): + dirs = [ + 'foo', + 'foo/bar', + 'bar', + ] + + files = [ + 'Dockerfile', + 'Dockerfile.alt', + '.dockerignore', + 'a.py', + 'a.go', + 'b.py', + 'cde.py', + 'foo/a.py', + 'foo/b.py', + 'foo/bar/a.py', + 'bar/a.py', + ] + + all_paths = set(dirs + files) + + def setUp(self): + self.base = make_tree(self.dirs, self.files) + + def tearDown(self): + shutil.rmtree(self.base) + + def exclude(self, patterns, dockerfile=None): + return set(exclude_paths(self.base, patterns, dockerfile=dockerfile)) + + def test_no_excludes(self): + assert self.exclude(['']) == self.all_paths + + def test_no_dupes(self): + paths = exclude_paths(self.base, ['!a.py']) + assert sorted(paths) == sorted(set(paths)) + + def test_wildcard_exclude(self): + assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore']) + + def test_exclude_dockerfile_dockerignore(self): + """ + Even if the .dockerignore file explicitly says to exclude + Dockerfile and/or .dockerignore, don't exclude them from + the actual tar file. + """ + assert self.exclude(['Dockerfile', '.dockerignore']) == self.all_paths + + def test_exclude_custom_dockerfile(self): + """ + If we're using a custom Dockerfile, make sure that's not + excluded. + """ + assert self.exclude(['*'], dockerfile='Dockerfile.alt') == \ + set(['Dockerfile.alt', '.dockerignore']) + + def test_single_filename(self): + assert self.exclude(['a.py']) == self.all_paths - set(['a.py']) + + # As odd as it sounds, a filename pattern with a trailing slash on the + # end *will* result in that file being excluded. + def test_single_filename_trailing_slash(self): + assert self.exclude(['a.py/']) == self.all_paths - set(['a.py']) + + def test_wildcard_filename_start(self): + assert self.exclude(['*.py']) == self.all_paths - set([ + 'a.py', 'b.py', 'cde.py', + ]) + + def test_wildcard_with_exception(self): + assert self.exclude(['*.py', '!b.py']) == self.all_paths - set([ + 'a.py', 'cde.py', + ]) + + def test_wildcard_with_wildcard_exception(self): + assert self.exclude(['*.*', '!*.go']) == self.all_paths - set([ + 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt', + ]) + + def test_wildcard_filename_end(self): + assert self.exclude(['a.*']) == self.all_paths - set(['a.py', 'a.go']) + + def test_question_mark(self): + assert self.exclude(['?.py']) == self.all_paths - set(['a.py', 'b.py']) + + def test_single_subdir_single_filename(self): + assert self.exclude(['foo/a.py']) == self.all_paths - set(['foo/a.py']) + + def test_single_subdir_wildcard_filename(self): + assert self.exclude(['foo/*.py']) == self.all_paths - set([ + 'foo/a.py', 'foo/b.py', + ]) + + def test_wildcard_subdir_single_filename(self): + assert self.exclude(['*/a.py']) == self.all_paths - set([ + 'foo/a.py', 'bar/a.py', + ]) + + def test_wildcard_subdir_wildcard_filename(self): + assert self.exclude(['*/*.py']) == self.all_paths - set([ + 'foo/a.py', 'foo/b.py', 'bar/a.py', + ]) + + def test_directory(self): + assert self.exclude(['foo']) == self.all_paths - set([ + 'foo', 'foo/a.py', 'foo/b.py', + 'foo/bar', 'foo/bar/a.py', + ]) + + def test_directory_with_trailing_slash(self): + assert self.exclude(['foo']) == self.all_paths - set([ + 'foo', 'foo/a.py', 'foo/b.py', + 'foo/bar', 'foo/bar/a.py', + ]) + + def test_directory_with_single_exception(self): + assert self.exclude(['foo', '!foo/bar/a.py']) == self.all_paths - set([ + 'foo/a.py', 'foo/b.py', + ]) + + def test_directory_with_subdir_exception(self): + assert self.exclude(['foo', '!foo/bar']) == self.all_paths - set([ + 'foo/a.py', 'foo/b.py', + ]) + + def test_directory_with_wildcard_exception(self): + assert self.exclude(['foo', '!foo/*.py']) == self.all_paths - set([ + 'foo/bar', 'foo/bar/a.py', + ]) + + def test_subdirectory(self): + assert self.exclude(['foo/bar']) == self.all_paths - set([ + 'foo/bar', 'foo/bar/a.py', + ]) diff --git a/tests/utils_test.py b/tests/utils_test.py deleted file mode 100644 index b67ac4e..0000000 --- a/tests/utils_test.py +++ /dev/null @@ -1,651 +0,0 @@ -import os -import os.path -import shutil -import tempfile - -from docker.client import Client -from docker.constants import DEFAULT_DOCKER_API_VERSION -from docker.errors import DockerException -from docker.utils import ( - parse_repository_tag, parse_host, convert_filters, kwargs_from_env, - create_host_config, Ulimit, LogConfig, parse_bytes, parse_env_file, - exclude_paths, -) -from docker.utils.ports import build_port_bindings, split_port -from docker.auth import resolve_repository_name, resolve_authconfig - -from . import base -from .helpers import make_tree - -import pytest - -TEST_CERT_DIR = os.path.join( - os.path.dirname(__file__), - 'testdata/certs', -) - - -class UtilsTest(base.BaseTestCase): - longMessage = True - - def generate_tempfile(self, file_content=None): - """ - Generates a temporary file for tests with the content - of 'file_content' and returns the filename. - Don't forget to unlink the file with os.unlink() after. - """ - local_tempfile = tempfile.NamedTemporaryFile(delete=False) - local_tempfile.write(file_content.encode('UTF-8')) - local_tempfile.close() - return local_tempfile.name - - def setUp(self): - self.os_environ = os.environ.copy() - - def tearDown(self): - os.environ = self.os_environ - - def test_parse_repository_tag(self): - self.assertEqual(parse_repository_tag("root"), - ("root", None)) - self.assertEqual(parse_repository_tag("root:tag"), - ("root", "tag")) - self.assertEqual(parse_repository_tag("user/repo"), - ("user/repo", None)) - self.assertEqual(parse_repository_tag("user/repo:tag"), - ("user/repo", "tag")) - self.assertEqual(parse_repository_tag("url:5000/repo"), - ("url:5000/repo", None)) - self.assertEqual(parse_repository_tag("url:5000/repo:tag"), - ("url:5000/repo", "tag")) - - def test_parse_bytes(self): - self.assertEqual(parse_bytes("512MB"), (536870912)) - self.assertEqual(parse_bytes("512M"), (536870912)) - self.assertRaises(DockerException, parse_bytes, "512MK") - self.assertRaises(DockerException, parse_bytes, "512L") - - def test_parse_host(self): - invalid_hosts = [ - '0.0.0.0', - 'tcp://', - 'udp://127.0.0.1', - 'udp://127.0.0.1:2375', - ] - - valid_hosts = { - '0.0.0.1:5555': 'http://0.0.0.1:5555', - ':6666': 'http://127.0.0.1:6666', - 'tcp://:7777': 'http://127.0.0.1:7777', - 'http://:7777': 'http://127.0.0.1:7777', - 'https://kokia.jp:2375': 'https://kokia.jp:2375', - 'unix:///var/run/docker.sock': 'http+unix:///var/run/docker.sock', - 'unix://': 'http+unix://var/run/docker.sock', - 'somehost.net:80/service/swarm': ( - 'http://somehost.net:80/service/swarm' - ), - } - - for host in invalid_hosts: - with pytest.raises(DockerException): - parse_host(host, None) - - for host, expected in valid_hosts.items(): - self.assertEqual(parse_host(host, None), expected, msg=host) - - def test_parse_host_empty_value(self): - unix_socket = 'http+unix://var/run/docker.sock' - tcp_port = 'http://127.0.0.1:2375' - - for val in [None, '']: - for platform in ['darwin', 'linux2', None]: - assert parse_host(val, platform) == unix_socket - - assert parse_host(val, 'win32') == tcp_port - - def test_kwargs_from_env_empty(self): - os.environ.update(DOCKER_HOST='', - DOCKER_CERT_PATH='', - DOCKER_TLS_VERIFY='') - - kwargs = kwargs_from_env() - self.assertEqual(None, kwargs.get('base_url')) - self.assertEqual(None, kwargs.get('tls')) - - def test_kwargs_from_env_tls(self): - os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376', - DOCKER_CERT_PATH=TEST_CERT_DIR, - DOCKER_TLS_VERIFY='1') - kwargs = kwargs_from_env(assert_hostname=False) - self.assertEqual('https://192.168.59.103:2376', kwargs['base_url']) - self.assertTrue('ca.pem' in kwargs['tls'].verify) - self.assertTrue('cert.pem' in kwargs['tls'].cert[0]) - self.assertTrue('key.pem' in kwargs['tls'].cert[1]) - self.assertEqual(False, kwargs['tls'].assert_hostname) - try: - client = Client(**kwargs) - self.assertEqual(kwargs['base_url'], client.base_url) - self.assertEqual(kwargs['tls'].verify, client.verify) - self.assertEqual(kwargs['tls'].cert, client.cert) - except TypeError as e: - self.fail(e) - - def test_kwargs_from_env_no_cert_path(self): - try: - temp_dir = tempfile.mkdtemp() - cert_dir = os.path.join(temp_dir, '.docker') - shutil.copytree(TEST_CERT_DIR, cert_dir) - - os.environ.update(HOME=temp_dir, - DOCKER_CERT_PATH='', - DOCKER_TLS_VERIFY='1') - - kwargs = kwargs_from_env() - self.assertIn(cert_dir, kwargs['tls'].verify) - self.assertIn(cert_dir, kwargs['tls'].cert[0]) - self.assertIn(cert_dir, kwargs['tls'].cert[1]) - finally: - if temp_dir: - shutil.rmtree(temp_dir) - - def test_parse_env_file_proper(self): - env_file = self.generate_tempfile( - file_content='USER=jdoe\nPASS=secret') - get_parse_env_file = parse_env_file(env_file) - self.assertEqual(get_parse_env_file, - {'USER': 'jdoe', 'PASS': 'secret'}) - os.unlink(env_file) - - def test_parse_env_file_commented_line(self): - env_file = self.generate_tempfile( - file_content='USER=jdoe\n#PASS=secret') - get_parse_env_file = parse_env_file((env_file)) - self.assertEqual(get_parse_env_file, {'USER': 'jdoe'}) - os.unlink(env_file) - - def test_parse_env_file_invalid_line(self): - env_file = self.generate_tempfile( - file_content='USER jdoe') - self.assertRaises( - DockerException, parse_env_file, env_file) - os.unlink(env_file) - - def test_convert_filters(self): - tests = [ - ({'dangling': True}, '{"dangling": ["true"]}'), - ({'dangling': "true"}, '{"dangling": ["true"]}'), - ({'exited': 0}, '{"exited": [0]}'), - ({'exited': [0, 1]}, '{"exited": [0, 1]}'), - ] - - for filters, expected in tests: - self.assertEqual(convert_filters(filters), expected) - - def test_create_host_config_no_options(self): - config = create_host_config(version='1.19') - self.assertFalse('NetworkMode' in config) - - def test_create_host_config_no_options_newer_api_version(self): - config = create_host_config(version='1.20') - self.assertEqual(config['NetworkMode'], 'default') - - def test_create_host_config_dict_ulimit(self): - ulimit_dct = {'name': 'nofile', 'soft': 8096} - config = create_host_config( - ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION - ) - self.assertIn('Ulimits', config) - self.assertEqual(len(config['Ulimits']), 1) - ulimit_obj = config['Ulimits'][0] - self.assertTrue(isinstance(ulimit_obj, Ulimit)) - self.assertEqual(ulimit_obj.name, ulimit_dct['name']) - self.assertEqual(ulimit_obj.soft, ulimit_dct['soft']) - self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft) - - def test_create_host_config_dict_ulimit_capitals(self): - ulimit_dct = {'Name': 'nofile', 'Soft': 8096, 'Hard': 8096 * 4} - config = create_host_config( - ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION - ) - self.assertIn('Ulimits', config) - self.assertEqual(len(config['Ulimits']), 1) - ulimit_obj = config['Ulimits'][0] - self.assertTrue(isinstance(ulimit_obj, Ulimit)) - self.assertEqual(ulimit_obj.name, ulimit_dct['Name']) - self.assertEqual(ulimit_obj.soft, ulimit_dct['Soft']) - self.assertEqual(ulimit_obj.hard, ulimit_dct['Hard']) - self.assertEqual(ulimit_obj['Soft'], ulimit_obj.soft) - - def test_create_host_config_obj_ulimit(self): - ulimit_dct = Ulimit(name='nofile', soft=8096) - config = create_host_config( - ulimits=[ulimit_dct], version=DEFAULT_DOCKER_API_VERSION - ) - self.assertIn('Ulimits', config) - self.assertEqual(len(config['Ulimits']), 1) - ulimit_obj = config['Ulimits'][0] - self.assertTrue(isinstance(ulimit_obj, Ulimit)) - self.assertEqual(ulimit_obj, ulimit_dct) - - def test_ulimit_invalid_type(self): - self.assertRaises(ValueError, lambda: Ulimit(name=None)) - self.assertRaises(ValueError, lambda: Ulimit(name='hello', soft='123')) - self.assertRaises(ValueError, lambda: Ulimit(name='hello', hard='456')) - - def test_create_host_config_dict_logconfig(self): - dct = {'type': LogConfig.types.SYSLOG, 'config': {'key1': 'val1'}} - config = create_host_config( - version=DEFAULT_DOCKER_API_VERSION, log_config=dct - ) - self.assertIn('LogConfig', config) - self.assertTrue(isinstance(config['LogConfig'], LogConfig)) - self.assertEqual(dct['type'], config['LogConfig'].type) - - def test_create_host_config_obj_logconfig(self): - obj = LogConfig(type=LogConfig.types.SYSLOG, config={'key1': 'val1'}) - config = create_host_config( - version=DEFAULT_DOCKER_API_VERSION, log_config=obj - ) - self.assertIn('LogConfig', config) - self.assertTrue(isinstance(config['LogConfig'], LogConfig)) - self.assertEqual(obj, config['LogConfig']) - - def test_logconfig_invalid_config_type(self): - with pytest.raises(ValueError): - LogConfig(type=LogConfig.types.JSON, config='helloworld') - - def test_resolve_repository_name(self): - # docker hub library image - self.assertEqual( - resolve_repository_name('image'), - ('index.docker.io', 'image'), - ) - - # docker hub image - self.assertEqual( - resolve_repository_name('username/image'), - ('index.docker.io', 'username/image'), - ) - - # private registry - self.assertEqual( - resolve_repository_name('my.registry.net/image'), - ('my.registry.net', 'image'), - ) - - # private registry with port - self.assertEqual( - resolve_repository_name('my.registry.net:5000/image'), - ('my.registry.net:5000', 'image'), - ) - - # private registry with username - self.assertEqual( - resolve_repository_name('my.registry.net/username/image'), - ('my.registry.net', 'username/image'), - ) - - # no dots but port - self.assertEqual( - resolve_repository_name('hostname:5000/image'), - ('hostname:5000', 'image'), - ) - - # no dots but port and username - self.assertEqual( - resolve_repository_name('hostname:5000/username/image'), - ('hostname:5000', 'username/image'), - ) - - # localhost - self.assertEqual( - resolve_repository_name('localhost/image'), - ('localhost', 'image'), - ) - - # localhost with username - self.assertEqual( - resolve_repository_name('localhost/username/image'), - ('localhost', 'username/image'), - ) - - def test_resolve_authconfig(self): - auth_config = { - 'https://index.docker.io/v1/': {'auth': 'indexuser'}, - 'my.registry.net': {'auth': 'privateuser'}, - 'http://legacy.registry.url/v1/': {'auth': 'legacyauth'} - } - # hostname only - self.assertEqual( - resolve_authconfig(auth_config, 'my.registry.net'), - {'auth': 'privateuser'} - ) - # no protocol - self.assertEqual( - resolve_authconfig(auth_config, 'my.registry.net/v1/'), - {'auth': 'privateuser'} - ) - # no path - self.assertEqual( - resolve_authconfig(auth_config, 'http://my.registry.net'), - {'auth': 'privateuser'} - ) - # no path, trailing slash - self.assertEqual( - resolve_authconfig(auth_config, 'http://my.registry.net/'), - {'auth': 'privateuser'} - ) - # no path, wrong secure protocol - self.assertEqual( - resolve_authconfig(auth_config, 'https://my.registry.net'), - {'auth': 'privateuser'} - ) - # no path, wrong insecure protocol - self.assertEqual( - resolve_authconfig(auth_config, 'http://index.docker.io'), - {'auth': 'indexuser'} - ) - # with path, wrong protocol - self.assertEqual( - resolve_authconfig(auth_config, 'https://my.registry.net/v1/'), - {'auth': 'privateuser'} - ) - # default registry - self.assertEqual( - resolve_authconfig(auth_config), {'auth': 'indexuser'} - ) - # default registry (explicit None) - self.assertEqual( - resolve_authconfig(auth_config, None), {'auth': 'indexuser'} - ) - # fully explicit - self.assertEqual( - resolve_authconfig(auth_config, 'http://my.registry.net/v1/'), - {'auth': 'privateuser'} - ) - # legacy entry in config - self.assertEqual( - resolve_authconfig(auth_config, 'legacy.registry.url'), - {'auth': 'legacyauth'} - ) - # no matching entry - self.assertTrue( - resolve_authconfig(auth_config, 'does.not.exist') is None - ) - - def test_resolve_registry_and_auth(self): - auth_config = { - 'https://index.docker.io/v1/': {'auth': 'indexuser'}, - 'my.registry.net': {'auth': 'privateuser'}, - } - - # library image - image = 'image' - self.assertEqual( - resolve_authconfig(auth_config, resolve_repository_name(image)[0]), - {'auth': 'indexuser'}, - ) - - # docker hub image - image = 'username/image' - self.assertEqual( - resolve_authconfig(auth_config, resolve_repository_name(image)[0]), - {'auth': 'indexuser'}, - ) - - # private registry - image = 'my.registry.net/image' - self.assertEqual( - resolve_authconfig(auth_config, resolve_repository_name(image)[0]), - {'auth': 'privateuser'}, - ) - - # unauthenticated registry - image = 'other.registry.net/image' - self.assertEqual( - resolve_authconfig(auth_config, resolve_repository_name(image)[0]), - None, - ) - - def test_split_port_with_host_ip(self): - internal_port, external_port = split_port("127.0.0.1:1000:2000") - self.assertEqual(internal_port, ["2000"]) - self.assertEqual(external_port, [("127.0.0.1", "1000")]) - - def test_split_port_with_protocol(self): - internal_port, external_port = split_port("127.0.0.1:1000:2000/udp") - self.assertEqual(internal_port, ["2000/udp"]) - self.assertEqual(external_port, [("127.0.0.1", "1000")]) - - def test_split_port_with_host_ip_no_port(self): - internal_port, external_port = split_port("127.0.0.1::2000") - self.assertEqual(internal_port, ["2000"]) - self.assertEqual(external_port, [("127.0.0.1", None)]) - - def test_split_port_range_with_host_ip_no_port(self): - internal_port, external_port = split_port("127.0.0.1::2000-2001") - self.assertEqual(internal_port, ["2000", "2001"]) - self.assertEqual(external_port, - [("127.0.0.1", None), ("127.0.0.1", None)]) - - def test_split_port_with_host_port(self): - internal_port, external_port = split_port("1000:2000") - self.assertEqual(internal_port, ["2000"]) - self.assertEqual(external_port, ["1000"]) - - def test_split_port_range_with_host_port(self): - internal_port, external_port = split_port("1000-1001:2000-2001") - self.assertEqual(internal_port, ["2000", "2001"]) - self.assertEqual(external_port, ["1000", "1001"]) - - def test_split_port_no_host_port(self): - internal_port, external_port = split_port("2000") - self.assertEqual(internal_port, ["2000"]) - self.assertEqual(external_port, None) - - def test_split_port_range_no_host_port(self): - internal_port, external_port = split_port("2000-2001") - self.assertEqual(internal_port, ["2000", "2001"]) - self.assertEqual(external_port, None) - - def test_split_port_range_with_protocol(self): - internal_port, external_port = split_port( - "127.0.0.1:1000-1001:2000-2001/udp") - self.assertEqual(internal_port, ["2000/udp", "2001/udp"]) - self.assertEqual(external_port, - [("127.0.0.1", "1000"), ("127.0.0.1", "1001")]) - - def test_split_port_invalid(self): - self.assertRaises(ValueError, - lambda: split_port("0.0.0.0:1000:2000:tcp")) - - def test_non_matching_length_port_ranges(self): - self.assertRaises( - ValueError, - lambda: split_port("0.0.0.0:1000-1010:2000-2002/tcp") - ) - - def test_port_and_range_invalid(self): - self.assertRaises(ValueError, - lambda: split_port("0.0.0.0:1000:2000-2002/tcp")) - - def test_port_only_with_colon(self): - self.assertRaises(ValueError, - lambda: split_port(":80")) - - def test_host_only_with_colon(self): - self.assertRaises(ValueError, - lambda: split_port("localhost:")) - - def test_build_port_bindings_with_one_port(self): - port_bindings = build_port_bindings(["127.0.0.1:1000:1000"]) - self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) - - def test_build_port_bindings_with_matching_internal_ports(self): - port_bindings = build_port_bindings( - ["127.0.0.1:1000:1000", "127.0.0.1:2000:1000"]) - self.assertEqual(port_bindings["1000"], - [("127.0.0.1", "1000"), ("127.0.0.1", "2000")]) - - def test_build_port_bindings_with_nonmatching_internal_ports(self): - port_bindings = build_port_bindings( - ["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"]) - self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) - self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")]) - - def test_build_port_bindings_with_port_range(self): - port_bindings = build_port_bindings(["127.0.0.1:1000-1001:1000-1001"]) - self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) - self.assertEqual(port_bindings["1001"], [("127.0.0.1", "1001")]) - - def test_build_port_bindings_with_matching_internal_port_ranges(self): - port_bindings = build_port_bindings( - ["127.0.0.1:1000-1001:1000-1001", "127.0.0.1:2000-2001:1000-1001"]) - self.assertEqual(port_bindings["1000"], - [("127.0.0.1", "1000"), ("127.0.0.1", "2000")]) - self.assertEqual(port_bindings["1001"], - [("127.0.0.1", "1001"), ("127.0.0.1", "2001")]) - - def test_build_port_bindings_with_nonmatching_internal_port_ranges(self): - port_bindings = build_port_bindings( - ["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"]) - self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")]) - self.assertEqual(port_bindings["2000"], [("127.0.0.1", "2000")]) - - -class ExcludePathsTest(base.BaseTestCase): - dirs = [ - 'foo', - 'foo/bar', - 'bar', - ] - - files = [ - 'Dockerfile', - 'Dockerfile.alt', - '.dockerignore', - 'a.py', - 'a.go', - 'b.py', - 'cde.py', - 'foo/a.py', - 'foo/b.py', - 'foo/bar/a.py', - 'bar/a.py', - ] - - all_paths = set(dirs + files) - - def setUp(self): - self.base = make_tree(self.dirs, self.files) - - def tearDown(self): - shutil.rmtree(self.base) - - def exclude(self, patterns, dockerfile=None): - return set(exclude_paths(self.base, patterns, dockerfile=dockerfile)) - - def test_no_excludes(self): - assert self.exclude(['']) == self.all_paths - - def test_no_dupes(self): - paths = exclude_paths(self.base, ['!a.py']) - assert sorted(paths) == sorted(set(paths)) - - def test_wildcard_exclude(self): - assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore']) - - def test_exclude_dockerfile_dockerignore(self): - """ - Even if the .dockerignore file explicitly says to exclude - Dockerfile and/or .dockerignore, don't exclude them from - the actual tar file. - """ - assert self.exclude(['Dockerfile', '.dockerignore']) == self.all_paths - - def test_exclude_custom_dockerfile(self): - """ - If we're using a custom Dockerfile, make sure that's not - excluded. - """ - assert self.exclude(['*'], dockerfile='Dockerfile.alt') == \ - set(['Dockerfile.alt', '.dockerignore']) - - def test_single_filename(self): - assert self.exclude(['a.py']) == self.all_paths - set(['a.py']) - - # As odd as it sounds, a filename pattern with a trailing slash on the - # end *will* result in that file being excluded. - def test_single_filename_trailing_slash(self): - assert self.exclude(['a.py/']) == self.all_paths - set(['a.py']) - - def test_wildcard_filename_start(self): - assert self.exclude(['*.py']) == self.all_paths - set([ - 'a.py', 'b.py', 'cde.py', - ]) - - def test_wildcard_with_exception(self): - assert self.exclude(['*.py', '!b.py']) == self.all_paths - set([ - 'a.py', 'cde.py', - ]) - - def test_wildcard_with_wildcard_exception(self): - assert self.exclude(['*.*', '!*.go']) == self.all_paths - set([ - 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt', - ]) - - def test_wildcard_filename_end(self): - assert self.exclude(['a.*']) == self.all_paths - set(['a.py', 'a.go']) - - def test_question_mark(self): - assert self.exclude(['?.py']) == self.all_paths - set(['a.py', 'b.py']) - - def test_single_subdir_single_filename(self): - assert self.exclude(['foo/a.py']) == self.all_paths - set(['foo/a.py']) - - def test_single_subdir_wildcard_filename(self): - assert self.exclude(['foo/*.py']) == self.all_paths - set([ - 'foo/a.py', 'foo/b.py', - ]) - - def test_wildcard_subdir_single_filename(self): - assert self.exclude(['*/a.py']) == self.all_paths - set([ - 'foo/a.py', 'bar/a.py', - ]) - - def test_wildcard_subdir_wildcard_filename(self): - assert self.exclude(['*/*.py']) == self.all_paths - set([ - 'foo/a.py', 'foo/b.py', 'bar/a.py', - ]) - - def test_directory(self): - assert self.exclude(['foo']) == self.all_paths - set([ - 'foo', 'foo/a.py', 'foo/b.py', - 'foo/bar', 'foo/bar/a.py', - ]) - - def test_directory_with_trailing_slash(self): - assert self.exclude(['foo']) == self.all_paths - set([ - 'foo', 'foo/a.py', 'foo/b.py', - 'foo/bar', 'foo/bar/a.py', - ]) - - def test_directory_with_single_exception(self): - assert self.exclude(['foo', '!foo/bar/a.py']) == self.all_paths - set([ - 'foo/a.py', 'foo/b.py', - ]) - - def test_directory_with_subdir_exception(self): - assert self.exclude(['foo', '!foo/bar']) == self.all_paths - set([ - 'foo/a.py', 'foo/b.py', - ]) - - def test_directory_with_wildcard_exception(self): - assert self.exclude(['foo', '!foo/*.py']) == self.all_paths - set([ - 'foo/bar', 'foo/bar/a.py', - ]) - - def test_subdirectory(self): - assert self.exclude(['foo/bar']) == self.all_paths - set([ - 'foo/bar', 'foo/bar/a.py', - ]) diff --git a/tox.ini b/tox.ini index eb31bee..96b9177 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ skipsdist=True [testenv] usedevelop=True commands = - py.test --cov=docker tests/test.py tests/utils_test.py + py.test --cov=docker tests/unit/ deps = -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt -- cgit v1.2.1