summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMilas Bowman <milas.bowman@docker.com>2022-07-29 15:56:01 -0400
committerMilas Bowman <milas.bowman@docker.com>2022-07-29 15:56:01 -0400
commitc6c2bbdcda6ebfc6afae55fd696fb83bcd8ebdf5 (patch)
treef5123e74d200bb0f22b9a8b193e7e52514850bd1
parentbf1a3518f92eb845d1e39c8c18d9ee137f896c32 (diff)
parent73421027be04c97fc6f50da0647ba47388ed60e5 (diff)
downloaddocker-py-c6c2bbdcda6ebfc6afae55fd696fb83bcd8ebdf5.tar.gz
Merge remote-tracking branch 'upstream/main' into HEAD
-rw-r--r--.github/CODEOWNERS6
-rw-r--r--.github/workflows/ci.yml51
-rw-r--r--.readthedocs.yml6
-rw-r--r--.travis.yml20
-rw-r--r--Dockerfile8
-rw-r--r--Dockerfile-docs4
-rw-r--r--Dockerfile-py315
-rw-r--r--Jenkinsfile92
-rw-r--r--MAINTAINERS14
-rw-r--r--Makefile174
-rw-r--r--README.md9
-rw-r--r--appveyor.yml13
-rw-r--r--docker/api/build.py14
-rw-r--r--docker/api/client.py105
-rw-r--r--docker/api/config.py15
-rw-r--r--docker/api/container.py110
-rw-r--r--docker/api/daemon.py4
-rw-r--r--docker/api/exec_api.py6
-rw-r--r--docker/api/image.py58
-rw-r--r--docker/api/network.py11
-rw-r--r--docker/api/plugin.py11
-rw-r--r--docker/api/secret.py7
-rw-r--r--docker/api/service.py4
-rw-r--r--docker/api/swarm.py14
-rw-r--r--docker/api/volume.py25
-rw-r--r--docker/auth.py39
-rw-r--r--docker/client.py26
-rw-r--r--docker/constants.py6
-rw-r--r--docker/context/api.py6
-rw-r--r--docker/context/config.py4
-rw-r--r--docker/context/context.py106
-rw-r--r--docker/credentials/store.py27
-rw-r--r--docker/errors.py50
-rw-r--r--docker/models/configs.py2
-rw-r--r--docker/models/containers.py32
-rw-r--r--docker/models/images.py71
-rw-r--r--docker/models/networks.py2
-rw-r--r--docker/models/plugins.py18
-rw-r--r--docker/models/resource.py13
-rw-r--r--docker/models/secrets.py3
-rw-r--r--docker/models/services.py10
-rw-r--r--docker/models/swarm.py2
-rw-r--r--docker/tls.py40
-rw-r--r--docker/transport/basehttpadapter.py2
-rw-r--r--docker/transport/npipeconn.py29
-rw-r--r--docker/transport/npipesocket.py8
-rw-r--r--docker/transport/sshconn.py188
-rw-r--r--docker/transport/ssladapter.py16
-rw-r--r--docker/transport/unixconn.py42
-rw-r--r--docker/types/__init__.py4
-rw-r--r--docker/types/base.py5
-rw-r--r--docker/types/containers.py175
-rw-r--r--docker/types/daemon.py4
-rw-r--r--docker/types/healthcheck.py8
-rw-r--r--docker/types/networks.py11
-rw-r--r--docker/types/services.py48
-rw-r--r--docker/utils/build.py33
-rw-r--r--docker/utils/config.py6
-rw-r--r--docker/utils/decorators.py2
-rw-r--r--docker/utils/fnmatch.py2
-rw-r--r--docker/utils/json_stream.py13
-rw-r--r--docker/utils/ports.py4
-rw-r--r--docker/utils/socket.py14
-rw-r--r--docker/utils/utils.py116
-rw-r--r--docker/version.py4
-rw-r--r--docs-requirements.txt4
-rw-r--r--docs/_static/custom.css5
-rw-r--r--docs/change-log.md151
-rw-r--r--docs/conf.py36
-rw-r--r--docs/index.rst2
-rw-r--r--docs/tls.rst2
-rw-r--r--requirements.txt25
-rwxr-xr-xscripts/versions.py4
-rw-r--r--setup.cfg3
-rw-r--r--setup.py46
-rw-r--r--test-requirements.txt13
-rw-r--r--tests/Dockerfile15
-rw-r--r--tests/Dockerfile-dind-certs2
-rw-r--r--tests/Dockerfile-ssh-dind18
-rw-r--r--tests/helpers.py11
-rw-r--r--tests/integration/api_build_test.py29
-rw-r--r--tests/integration/api_client_test.py2
-rw-r--r--tests/integration/api_config_test.py17
-rw-r--r--tests/integration/api_container_test.py118
-rw-r--r--tests/integration/api_exec_test.py2
-rw-r--r--tests/integration/api_image_test.py23
-rw-r--r--tests/integration/api_network_test.py23
-rw-r--r--tests/integration/api_plugin_test.py4
-rw-r--r--tests/integration/api_secret_test.py4
-rw-r--r--tests/integration/api_service_test.py78
-rw-r--r--tests/integration/api_swarm_test.py4
-rw-r--r--tests/integration/base.py4
-rw-r--r--tests/integration/conftest.py6
-rw-r--r--tests/integration/credentials/store_test.py7
-rw-r--r--tests/integration/credentials/utils_test.py6
-rw-r--r--tests/integration/models_images_test.py24
-rw-r--r--tests/integration/models_services_test.py45
-rw-r--r--tests/integration/regression_test.py11
-rw-r--r--tests/ssh/__init__.py0
-rw-r--r--tests/ssh/api_build_test.py590
-rw-r--r--tests/ssh/base.py134
-rw-r--r--tests/ssh/config/client/id_rsa38
-rw-r--r--tests/ssh/config/client/id_rsa.pub1
-rw-r--r--tests/ssh/config/server/known_ed255197
-rw-r--r--tests/ssh/config/server/known_ed25519.pub1
-rw-r--r--tests/ssh/config/server/sshd_config3
-rw-r--r--tests/ssh/config/server/unknown_ed255197
-rw-r--r--tests/ssh/config/server/unknown_ed25519.pub1
-rw-r--r--tests/ssh/connect_test.py22
-rw-r--r--tests/unit/api_container_test.py196
-rw-r--r--tests/unit/api_exec_test.py10
-rw-r--r--tests/unit/api_image_test.py41
-rw-r--r--tests/unit/api_network_test.py28
-rw-r--r--tests/unit/api_test.py103
-rw-r--r--tests/unit/api_volume_test.py4
-rw-r--r--tests/unit/auth_test.py26
-rw-r--r--tests/unit/client_test.py192
-rw-r--r--tests/unit/dockertypes_test.py8
-rw-r--r--tests/unit/errors_test.py2
-rw-r--r--tests/unit/fake_api.py119
-rw-r--r--tests/unit/fake_api_client.py17
-rw-r--r--tests/unit/models_containers_test.py40
-rw-r--r--tests/unit/models_images_test.py43
-rw-r--r--tests/unit/models_resources_test.py2
-rw-r--r--tests/unit/models_secrets_test.py11
-rw-r--r--tests/unit/models_services_test.py12
-rw-r--r--tests/unit/sshadapter_test.py39
-rw-r--r--tests/unit/ssladapter_test.py51
-rw-r--r--tests/unit/swarm_test.py2
-rw-r--r--tests/unit/utils_build_test.py134
-rw-r--r--tests/unit/utils_config_test.py6
-rw-r--r--tests/unit/utils_json_stream_test.py12
-rw-r--r--tests/unit/utils_proxy_test.py7
-rw-r--r--tests/unit/utils_test.py79
-rw-r--r--tox.ini2
135 files changed, 3303 insertions, 1418 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..5df3014
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,6 @@
+# GitHub code owners
+# See https://help.github.com/articles/about-codeowners/
+#
+# KEEP THIS FILE SORTED. Order is important. Last match takes precedence.
+
+* @aiordache @ulyssessouza
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..296bf0d
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,51 @@
+name: Python package
+
+on: [push, pull_request]
+
+jobs:
+ flake8:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
+ with:
+ python-version: '3.x'
+ - run: pip install -U flake8
+ - name: Run flake8
+ run: flake8 docker/ tests/
+
+ unit-tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.7", "3.8", "3.9", "3.10", "3.11.0-alpha - 3.11.0"]
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ python3 -m pip install --upgrade pip
+ pip3 install -r test-requirements.txt -r requirements.txt
+ - name: Run unit tests
+ run: |
+ docker logout
+ rm -rf ~/.docker
+ py.test -v --cov=docker tests/unit
+
+ integration-tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ variant: [ "integration-dind", "integration-dind-ssl", "integration-dind-ssh" ]
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: make ${{ matrix.variant }}
+ run: |
+ docker logout
+ rm -rf ~/.docker
+ make ${{ matrix.variant }}
diff --git a/.readthedocs.yml b/.readthedocs.yml
index 7679f80..464c782 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -3,8 +3,12 @@ version: 2
sphinx:
configuration: docs/conf.py
+build:
+ os: ubuntu-20.04
+ tools:
+ python: '3.10'
+
python:
- version: 3.5
install:
- requirements: docs-requirements.txt
- requirements: requirements.txt
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 7b3d724..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-sudo: false
-language: python
-matrix:
- include:
- - python: 2.7
- env: TOXENV=py27
- - python: 3.5
- env: TOXENV=py35
- - python: 3.6
- env: TOXENV=py36
- - python: 3.7
- env: TOXENV=py37
- dist: xenial
- sudo: true
- - env: TOXENV=flake8
-
-install:
- - pip install tox==2.9.1
-script:
- - tox
diff --git a/Dockerfile b/Dockerfile
index 124f68c..c158a9d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-ARG PYTHON_VERSION=2.7
+ARG PYTHON_VERSION=3.10
FROM python:${PYTHON_VERSION}
@@ -6,10 +6,10 @@ RUN mkdir /src
WORKDIR /src
COPY requirements.txt /src/requirements.txt
-RUN pip install -r requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt
COPY test-requirements.txt /src/test-requirements.txt
-RUN pip install -r test-requirements.txt
+RUN pip install --no-cache-dir -r test-requirements.txt
COPY . /src
-RUN pip install .
+RUN pip install --no-cache-dir .
diff --git a/Dockerfile-docs b/Dockerfile-docs
index 9d11312..e993822 100644
--- a/Dockerfile-docs
+++ b/Dockerfile-docs
@@ -1,4 +1,4 @@
-ARG PYTHON_VERSION=3.7
+ARG PYTHON_VERSION=3.10
FROM python:${PYTHON_VERSION}
@@ -10,6 +10,6 @@ RUN addgroup --gid $gid sphinx \
WORKDIR /src
COPY requirements.txt docs-requirements.txt ./
-RUN pip install -r requirements.txt -r docs-requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt -r docs-requirements.txt
USER sphinx
diff --git a/Dockerfile-py3 b/Dockerfile-py3
deleted file mode 100644
index 22732de..0000000
--- a/Dockerfile-py3
+++ /dev/null
@@ -1,15 +0,0 @@
-ARG PYTHON_VERSION=3.7
-
-FROM python:${PYTHON_VERSION}
-
-RUN mkdir /src
-WORKDIR /src
-
-COPY requirements.txt /src/requirements.txt
-RUN pip install -r requirements.txt
-
-COPY test-requirements.txt /src/test-requirements.txt
-RUN pip install -r test-requirements.txt
-
-COPY . /src
-RUN pip install .
diff --git a/Jenkinsfile b/Jenkinsfile
index 8777214..f9431ea 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,8 +1,8 @@
#!groovy
-def imageNameBase = "dockerbuildbot/docker-py"
-def imageNamePy2
+def imageNameBase = "dockerpinata/docker-py"
def imageNamePy3
+def imageDindSSH
def images = [:]
def buildImage = { name, buildargs, pyTag ->
@@ -13,26 +13,27 @@ def buildImage = { name, buildargs, pyTag ->
img = docker.build(name, buildargs)
img.push()
}
- images[pyTag] = img.id
+ if (pyTag?.trim()) images[pyTag] = img.id
}
def buildImages = { ->
- wrappedNode(label: "amd64 && ubuntu-1804 && overlay2", cleanWorkspace: true) {
+ wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) {
stage("build image") {
checkout(scm)
- imageNamePy2 = "${imageNameBase}:py2-${gitCommit()}"
imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}"
-
- buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7")
- buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7")
+ imageDindSSH = "${imageNameBase}:sshdind-${gitCommit()}"
+ withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+ buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "")
+ buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.10 .", "py3.10")
+ }
}
}
}
def getDockerVersions = { ->
- def dockerVersions = ["19.03.5"]
- wrappedNode(label: "amd64 && ubuntu-1804 && overlay2") {
+ def dockerVersions = ["19.03.12"]
+ wrappedNode(label: "amd64 && ubuntu-2004 && overlay2") {
def result = sh(script: """docker run --rm \\
--entrypoint=python \\
${imageNamePy3} \\
@@ -66,39 +67,64 @@ def runTests = { Map settings ->
throw new Exception("Need test image object, e.g.: `runTests(testImage: img)`")
}
if (!dockerVersion) {
- throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '1.12.3')`")
+ throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '19.03.12')`")
}
if (!pythonVersion) {
- throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py2.7')`")
+ throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py3.x')`")
}
{ ->
- wrappedNode(label: "amd64 && ubuntu-1804 && overlay2", cleanWorkspace: true) {
+ wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) {
stage("test python=${pythonVersion} / docker=${dockerVersion}") {
checkout(scm)
def dindContainerName = "dpy-dind-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}"
def testContainerName = "dpy-tests-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}"
def testNetwork = "dpy-testnet-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}"
- try {
- sh """docker network create ${testNetwork}"""
- sh """docker run -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\
- docker:${dockerVersion}-dind dockerd -H tcp://0.0.0.0:2375
- """
- sh """docker run \\
- --name ${testContainerName} \\
- -e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\
- -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
- --network ${testNetwork} \\
- --volumes-from ${dindContainerName} \\
- ${testImage} \\
- py.test -v -rxs --cov=docker tests/
- """
- } finally {
- sh """
- docker stop ${dindContainerName} ${testContainerName}
- docker rm -vf ${dindContainerName} ${testContainerName}
- docker network rm ${testNetwork}
- """
+ withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+ try {
+ // unit tests
+ sh """docker run --rm \\
+ -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
+ ${testImage} \\
+ py.test -v -rxs --cov=docker tests/unit
+ """
+ // integration tests
+ sh """docker network create ${testNetwork}"""
+ sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\
+ ${imageDindSSH} dockerd -H tcp://0.0.0.0:2375
+ """
+ sh """docker run --rm \\
+ --name ${testContainerName} \\
+ -e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\
+ -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
+ --network ${testNetwork} \\
+ --volumes-from ${dindContainerName} \\
+ -v $DOCKER_CONFIG/config.json:/root/.docker/config.json \\
+ ${testImage} \\
+ py.test -v -rxs --cov=docker tests/integration
+ """
+ sh """docker stop ${dindContainerName}"""
+ // start DIND container with SSH
+ sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\
+ ${imageDindSSH} dockerd --experimental"""
+ sh """docker exec ${dindContainerName} sh -c /usr/sbin/sshd """
+ // run SSH tests only
+ sh """docker run --rm \\
+ --name ${testContainerName} \\
+ -e "DOCKER_HOST=ssh://${dindContainerName}:22" \\
+ -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
+ --network ${testNetwork} \\
+ --volumes-from ${dindContainerName} \\
+ -v $DOCKER_CONFIG/config.json:/root/.docker/config.json \\
+ ${testImage} \\
+ py.test -v -rxs --cov=docker tests/ssh
+ """
+ } finally {
+ sh """
+ docker stop ${dindContainerName}
+ docker network rm ${testNetwork}
+ """
+ }
}
}
}
diff --git a/MAINTAINERS b/MAINTAINERS
index b857d13..b74cb28 100644
--- a/MAINTAINERS
+++ b/MAINTAINERS
@@ -11,7 +11,8 @@
[Org]
[Org."Core maintainers"]
people = [
- "shin-",
+ "aiordache",
+ "ulyssessouza",
]
[Org.Alumni]
people = [
@@ -20,6 +21,7 @@
"dnephin",
"mnowster",
"mpetazzoni",
+ "shin-",
]
[people]
@@ -35,6 +37,11 @@
Email = "aanand@docker.com"
GitHub = "aanand"
+ [people.aiordache]
+ Name = "Anca Iordache"
+ Email = "anca.iordache@docker.com"
+ GitHub = "aiordache"
+
[people.bfirsh]
Name = "Ben Firshman"
Email = "b@fir.sh"
@@ -59,3 +66,8 @@
Name = "Joffrey F"
Email = "joffrey@docker.com"
GitHub = "shin-"
+
+ [people.ulyssessouza]
+ Name = "Ulysses Domiciano Souza"
+ Email = "ulysses.souza@docker.com"
+ GitHub = "ulyssessouza"
diff --git a/Makefile b/Makefile
index 551868e..ae6ae34 100644
--- a/Makefile
+++ b/Makefile
@@ -1,100 +1,176 @@
+TEST_API_VERSION ?= 1.41
+TEST_ENGINE_VERSION ?= 20.10
+
+ifeq ($(OS),Windows_NT)
+ PLATFORM := Windows
+else
+ PLATFORM := $(shell sh -c 'uname -s 2>/dev/null || echo Unknown')
+endif
+
+ifeq ($(PLATFORM),Linux)
+ uid_args := "--build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g)"
+endif
+
.PHONY: all
all: test
.PHONY: clean
clean:
- -docker rm -f dpy-dind-py2 dpy-dind-py3 dpy-dind-certs dpy-dind-ssl
+ -docker rm -f dpy-dind-py3 dpy-dind-certs dpy-dind-ssl
find -name "__pycache__" | xargs rm -rf
-.PHONY: build
-build:
- docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 --build-arg APT_MIRROR .
+.PHONY: build-dind-ssh
+build-dind-ssh:
+ docker build \
+ --pull \
+ -t docker-dind-ssh \
+ -f tests/Dockerfile-ssh-dind \
+ --build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} \
+ --build-arg API_VERSION=${TEST_API_VERSION} \
+ --build-arg APT_MIRROR .
.PHONY: build-py3
build-py3:
- docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR .
+ docker build \
+ --pull \
+ -t docker-sdk-python3 \
+ -f tests/Dockerfile \
+ --build-arg APT_MIRROR .
.PHONY: build-docs
build-docs:
- docker build -t docker-sdk-python-docs -f Dockerfile-docs --build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g) .
+ docker build -t docker-sdk-python-docs -f Dockerfile-docs $(uid_args) .
.PHONY: build-dind-certs
build-dind-certs:
docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs .
.PHONY: test
-test: flake8 unit-test unit-test-py3 integration-dind integration-dind-ssl
-
-.PHONY: unit-test
-unit-test: build
- docker run -t --rm docker-sdk-python py.test tests/unit
+test: flake8 unit-test-py3 integration-dind integration-dind-ssl
.PHONY: unit-test-py3
unit-test-py3: build-py3
docker run -t --rm docker-sdk-python3 py.test tests/unit
-.PHONY: integration-test
-integration-test: build
- docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python py.test -v tests/integration/${file}
-
.PHONY: integration-test-py3
integration-test-py3: build-py3
docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file}
-TEST_API_VERSION ?= 1.35
-TEST_ENGINE_VERSION ?= 19.03.5
-
.PHONY: setup-network
setup-network:
docker network inspect dpy-tests || docker network create dpy-tests
.PHONY: integration-dind
-integration-dind: integration-dind-py2 integration-dind-py3
-
-.PHONY: integration-dind-py2
-integration-dind-py2: build setup-network
- docker rm -vf dpy-dind-py2 || :
- docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\
- docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental
- docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py2:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
- --network dpy-tests docker-sdk-python py.test tests/integration
- docker rm -vf dpy-dind-py2
+integration-dind: integration-dind-py3
.PHONY: integration-dind-py3
integration-dind-py3: build-py3 setup-network
docker rm -vf dpy-dind-py3 || :
- docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\
- docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental
- docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py3:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
- --network dpy-tests docker-sdk-python3 py.test tests/integration
+
+ docker run \
+ --detach \
+ --name dpy-dind-py3 \
+ --network dpy-tests \
+ --pull=always \
+ --privileged \
+ docker:${TEST_ENGINE_VERSION}-dind \
+ dockerd -H tcp://0.0.0.0:2375 --experimental
+
+ # Wait for Docker-in-Docker to come to life
+ docker run \
+ --network dpy-tests \
+ --rm \
+ --tty \
+ busybox \
+ sh -c 'while ! nc -z dpy-dind-py3 2375; do sleep 1; done'
+
+ docker run \
+ --env="DOCKER_HOST=tcp://dpy-dind-py3:2375" \
+ --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
+ --network dpy-tests \
+ --rm \
+ --tty \
+ docker-sdk-python3 \
+ py.test tests/integration/${file}
+
docker rm -vf dpy-dind-py3
+
+.PHONY: integration-dind-ssh
+integration-dind-ssh: build-dind-ssh build-py3 setup-network
+ docker rm -vf dpy-dind-ssh || :
+ docker run -d --network dpy-tests --name dpy-dind-ssh --privileged \
+ docker-dind-ssh dockerd --experimental
+ # start SSH daemon for known key
+ docker exec dpy-dind-ssh sh -c "/usr/sbin/sshd -h /etc/ssh/known_ed25519 -p 22"
+ docker exec dpy-dind-ssh sh -c "/usr/sbin/sshd -h /etc/ssh/unknown_ed25519 -p 2222"
+ docker run \
+ --tty \
+ --rm \
+ --env="DOCKER_HOST=ssh://dpy-dind-ssh" \
+ --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
+ --env="UNKNOWN_DOCKER_SSH_HOST=ssh://dpy-dind-ssh:2222" \
+ --network dpy-tests \
+ docker-sdk-python3 py.test tests/ssh/${file}
+ docker rm -vf dpy-dind-ssh
+
+
.PHONY: integration-dind-ssl
-integration-dind-ssl: build-dind-certs build build-py3
+integration-dind-ssl: build-dind-certs build-py3 setup-network
docker rm -vf dpy-dind-certs dpy-dind-ssl || :
docker run -d --name dpy-dind-certs dpy-dind-certs
- docker run -d --env="DOCKER_HOST=tcp://localhost:2375" --env="DOCKER_TLS_VERIFY=1"\
- --env="DOCKER_CERT_PATH=/certs" --volumes-from dpy-dind-certs --name dpy-dind-ssl\
- --network dpy-tests --network-alias docker -v /tmp --privileged\
- docker:${TEST_ENGINE_VERSION}-dind\
- dockerd --tlsverify --tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem\
- --tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 --experimental
- docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\
- --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
- --network dpy-tests docker-sdk-python py.test tests/integration
- docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\
- --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
- --network dpy-tests docker-sdk-python3 py.test tests/integration
+
+ docker run \
+ --detach \
+ --env="DOCKER_CERT_PATH=/certs" \
+ --env="DOCKER_HOST=tcp://localhost:2375" \
+ --env="DOCKER_TLS_VERIFY=1" \
+ --name dpy-dind-ssl \
+ --network dpy-tests \
+ --network-alias docker \
+ --pull=always \
+ --privileged \
+ --volume /tmp \
+ --volumes-from dpy-dind-certs \
+ docker:${TEST_ENGINE_VERSION}-dind \
+ dockerd \
+ --tlsverify \
+ --tlscacert=/certs/ca.pem \
+ --tlscert=/certs/server-cert.pem \
+ --tlskey=/certs/server-key.pem \
+ -H tcp://0.0.0.0:2375 \
+ --experimental
+
+ # Wait for Docker-in-Docker to come to life
+ docker run \
+ --network dpy-tests \
+ --rm \
+ --tty \
+ busybox \
+ sh -c 'while ! nc -z dpy-dind-ssl 2375; do sleep 1; done'
+
+ docker run \
+ --env="DOCKER_CERT_PATH=/certs" \
+ --env="DOCKER_HOST=tcp://docker:2375" \
+ --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
+ --env="DOCKER_TLS_VERIFY=1" \
+ --network dpy-tests \
+ --rm \
+ --volumes-from dpy-dind-ssl \
+ --tty \
+ docker-sdk-python3 \
+ py.test tests/integration/${file}
+
docker rm -vf dpy-dind-ssl dpy-dind-certs
.PHONY: flake8
-flake8: build
- docker run -t --rm docker-sdk-python flake8 docker tests
+flake8: build-py3
+ docker run -t --rm docker-sdk-python3 flake8 docker tests
.PHONY: docs
docs: build-docs
docker run --rm -t -v `pwd`:/src docker-sdk-python-docs sphinx-build docs docs/_build
.PHONY: shell
-shell: build
- docker run -it -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python python
+shell: build-py3
+ docker run -it -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 python
diff --git a/README.md b/README.md
index 3ff124d..2db678d 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# Docker SDK for Python
-[![Build Status](https://travis-ci.org/docker/docker-py.svg?branch=master)](https://travis-ci.org/docker/docker-py)
+[![Build Status](https://github.com/docker/docker-py/actions/workflows/ci.yml/badge.svg?branch=master)](https://github.com/docker/docker-py/actions/workflows/ci.yml/)
A Python library for the Docker Engine API. It lets you do anything the `docker` command does, but from within Python apps – run containers, manage containers, manage Swarms, etc.
@@ -10,9 +10,8 @@ The latest stable version [is available on PyPI](https://pypi.python.org/pypi/do
pip install docker
-If you are intending to connect to a docker host via TLS, add `docker[tls]` to your requirements instead, or install with pip:
-
- pip install docker[tls]
+> Older versions (< 6.0) required installing `docker[tls]` for SSL/TLS support.
+> This is no longer necessary and is a no-op, but is supported for backwards compatibility.
## Usage
@@ -58,7 +57,7 @@ You can stream logs:
```python
>>> for line in container.logs(stream=True):
-... print line.strip()
+... print(line.strip())
Reticulating spline 2...
Reticulating spline 3...
...
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index 144ab35..0000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-version: '{branch}-{build}'
-
-install:
- - "SET PATH=C:\\Python37-x64;C:\\Python37-x64\\Scripts;%PATH%"
- - "python --version"
- - "python -m pip install --upgrade pip"
- - "pip install tox==2.9.1"
-
-# Build the binary after tests
-build: false
-
-test_script:
- - "tox"
diff --git a/docker/api/build.py b/docker/api/build.py
index 365129a..a48204a 100644
--- a/docker/api/build.py
+++ b/docker/api/build.py
@@ -12,7 +12,7 @@ from .. import utils
log = logging.getLogger(__name__)
-class BuildApiMixin(object):
+class BuildApiMixin:
def build(self, path=None, tag=None, quiet=False, fileobj=None,
nocache=False, rm=False, timeout=None,
custom_context=False, encoding=None, pull=False,
@@ -132,7 +132,7 @@ class BuildApiMixin(object):
for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS:
raise errors.DockerException(
- 'Invalid container_limits key {0}'.format(key)
+ f'Invalid container_limits key {key}'
)
if custom_context:
@@ -150,10 +150,10 @@ class BuildApiMixin(object):
dockerignore = os.path.join(path, '.dockerignore')
exclude = None
if os.path.exists(dockerignore):
- with open(dockerignore, 'r') as f:
+ with open(dockerignore) as f:
exclude = list(filter(
lambda x: x != '' and x[0] != '#',
- [l.strip() for l in f.read().splitlines()]
+ [line.strip() for line in f.read().splitlines()]
))
dockerfile = process_dockerfile(dockerfile, path)
context = utils.tar(
@@ -313,7 +313,7 @@ class BuildApiMixin(object):
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
log.debug(
- 'Sending auth config ({0})'.format(
+ 'Sending auth config ({})'.format(
', '.join(repr(k) for k in auth_data.keys())
)
)
@@ -344,9 +344,9 @@ def process_dockerfile(dockerfile, path):
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
os.path.relpath(abs_dockerfile, path).startswith('..')):
# Dockerfile not in context - read data to insert into tar later
- with open(abs_dockerfile, 'r') as df:
+ with open(abs_dockerfile) as df:
return (
- '.dockerfile.{0:x}'.format(random.getrandbits(160)),
+ f'.dockerfile.{random.getrandbits(160):x}',
df.read()
)
diff --git a/docker/api/client.py b/docker/api/client.py
index 35dc84e..7733d33 100644
--- a/docker/api/client.py
+++ b/docker/api/client.py
@@ -1,12 +1,25 @@
import json
import struct
+import urllib
from functools import partial
import requests
import requests.exceptions
-import six
import websocket
+from .. import auth
+from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH,
+ DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS,
+ DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM,
+ MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES)
+from ..errors import (DockerException, InvalidVersion, TLSParameterError,
+ create_api_error_from_http_exception)
+from ..tls import TLSConfig
+from ..transport import SSLHTTPAdapter, UnixHTTPAdapter
+from ..utils import check_resource, config, update_headers, utils
+from ..utils.json_stream import json_stream
+from ..utils.proxy import ProxyConfig
+from ..utils.socket import consume_socket_output, demux_adaptor, frames_iter
from .build import BuildApiMixin
from .config import ConfigApiMixin
from .container import ContainerApiMixin
@@ -19,22 +32,7 @@ from .secret import SecretApiMixin
from .service import ServiceApiMixin
from .swarm import SwarmApiMixin
from .volume import VolumeApiMixin
-from .. import auth
-from ..constants import (
- DEFAULT_TIMEOUT_SECONDS, DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM,
- DEFAULT_DOCKER_API_VERSION, MINIMUM_DOCKER_API_VERSION,
- STREAM_HEADER_SIZE_BYTES, DEFAULT_NUM_POOLS_SSH, DEFAULT_NUM_POOLS
-)
-from ..errors import (
- DockerException, InvalidVersion, TLSParameterError,
- create_api_error_from_http_exception
-)
-from ..tls import TLSConfig
-from ..transport import SSLHTTPAdapter, UnixHTTPAdapter
-from ..utils import utils, check_resource, update_headers, config
-from ..utils.socket import frames_iter, consume_socket_output, demux_adaptor
-from ..utils.json_stream import json_stream
-from ..utils.proxy import ProxyConfig
+
try:
from ..transport import NpipeHTTPAdapter
except ImportError:
@@ -91,6 +89,11 @@ class APIClient(
user_agent (str): Set a custom user agent for requests to the server.
credstore_env (dict): Override environment variables when calling the
credential store process.
+ use_ssh_client (bool): If set to `True`, an ssh connection is made
+ via shelling out to the ssh client. Ensure the ssh client is
+ installed and configured on the host.
+ max_pool_size (int): The maximum number of connections
+ to save in the pool.
"""
__attrs__ = requests.Session.__attrs__ + ['_auth_configs',
@@ -102,8 +105,9 @@ class APIClient(
def __init__(self, base_url=None, version=None,
timeout=DEFAULT_TIMEOUT_SECONDS, tls=False,
user_agent=DEFAULT_USER_AGENT, num_pools=None,
- credstore_env=None):
- super(APIClient, self).__init__()
+ credstore_env=None, use_ssh_client=False,
+ max_pool_size=DEFAULT_MAX_POOL_SIZE):
+ super().__init__()
if tls and not base_url:
raise TLSParameterError(
@@ -138,7 +142,8 @@ class APIClient(
if base_url.startswith('http+unix://'):
self._custom_adapter = UnixHTTPAdapter(
- base_url, timeout, pool_connections=num_pools
+ base_url, timeout, pool_connections=num_pools,
+ max_pool_size=max_pool_size
)
self.mount('http+docker://', self._custom_adapter)
self._unmount('http://', 'https://')
@@ -152,7 +157,8 @@ class APIClient(
)
try:
self._custom_adapter = NpipeHTTPAdapter(
- base_url, timeout, pool_connections=num_pools
+ base_url, timeout, pool_connections=num_pools,
+ max_pool_size=max_pool_size
)
except NameError:
raise DockerException(
@@ -163,7 +169,8 @@ class APIClient(
elif base_url.startswith('ssh://'):
try:
self._custom_adapter = SSHHTTPAdapter(
- base_url, timeout, pool_connections=num_pools
+ base_url, timeout, pool_connections=num_pools,
+ max_pool_size=max_pool_size, shell_out=use_ssh_client
)
except NameError:
raise DockerException(
@@ -183,16 +190,16 @@ class APIClient(
self.base_url = base_url
# version detection needs to be after unix adapter mounting
- if version is None:
- self._version = DEFAULT_DOCKER_API_VERSION
- elif isinstance(version, six.string_types):
- if version.lower() == 'auto':
- self._version = self._retrieve_server_version()
- else:
- self._version = version
+ if version is None or (isinstance(
+ version,
+ str
+ ) and version.lower() == 'auto'):
+ self._version = self._retrieve_server_version()
else:
+ self._version = version
+ if not isinstance(self._version, str):
raise DockerException(
- 'Version parameter must be a string or None. Found {0}'.format(
+ 'Version parameter must be a string or None. Found {}'.format(
type(version).__name__
)
)
@@ -212,7 +219,7 @@ class APIClient(
)
except Exception as e:
raise DockerException(
- 'Error while fetching server API version: {0}'.format(e)
+ f'Error while fetching server API version: {e}'
)
def _set_request_timeout(self, kwargs):
@@ -239,28 +246,28 @@ class APIClient(
def _url(self, pathfmt, *args, **kwargs):
for arg in args:
- if not isinstance(arg, six.string_types):
+ if not isinstance(arg, str):
raise ValueError(
- 'Expected a string but found {0} ({1}) '
+ 'Expected a string but found {} ({}) '
'instead'.format(arg, type(arg))
)
- quote_f = partial(six.moves.urllib.parse.quote, safe="/:")
+ quote_f = partial(urllib.parse.quote, safe="/:")
args = map(quote_f, args)
if kwargs.get('versioned_api', True):
- return '{0}/v{1}{2}'.format(
+ return '{}/v{}{}'.format(
self.base_url, self._version, pathfmt.format(*args)
)
else:
- return '{0}{1}'.format(self.base_url, pathfmt.format(*args))
+ return f'{self.base_url}{pathfmt.format(*args)}'
def _raise_for_status(self, response):
"""Raises stored :class:`APIError`, if one occurred."""
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
- raise create_api_error_from_http_exception(e)
+ raise create_api_error_from_http_exception(e) from e
def _result(self, response, json=False, binary=False):
assert not (json and binary)
@@ -277,7 +284,7 @@ class APIClient(
# so we do this disgusting thing here.
data2 = {}
if data is not None and isinstance(data, dict):
- for k, v in six.iteritems(data):
+ for k, v in iter(data.items()):
if v is not None:
data2[k] = v
elif data is not None:
@@ -313,12 +320,10 @@ class APIClient(
sock = response.raw._fp.fp.raw.sock
elif self.base_url.startswith('http+docker://ssh'):
sock = response.raw._fp.fp.channel
- elif six.PY3:
+ else:
sock = response.raw._fp.fp.raw
if self.base_url.startswith("https://"):
sock = sock._sock
- else:
- sock = response.raw._fp.fp._sock
try:
# Keep a reference to the response to stop it being garbage
# collected. If the response is garbage collected, it will
@@ -336,8 +341,7 @@ class APIClient(
if response.raw._fp.chunked:
if decode:
- for chunk in json_stream(self._stream_helper(response, False)):
- yield chunk
+ yield from json_stream(self._stream_helper(response, False))
else:
reader = response.raw
while not reader.closed:
@@ -393,8 +397,13 @@ class APIClient(
def _stream_raw_result(self, response, chunk_size=1, decode=True):
''' Stream result for TTY-enabled container and raw binary data'''
self._raise_for_status(response)
- for out in response.iter_content(chunk_size, decode):
- yield out
+
+ # Disable timeout on the underlying socket to prevent
+ # Read timed out(s) for long running processes
+ socket = self._get_raw_response_socket(response)
+ self._disable_socket_timeout(socket)
+
+ yield from response.iter_content(chunk_size, decode)
def _read_from_socket(self, response, stream, tty=True, demux=False):
socket = self._get_raw_response_socket(response)
@@ -458,7 +467,7 @@ class APIClient(
self._result(res, binary=True)
self._raise_for_status(res)
- sep = six.binary_type()
+ sep = b''
if stream:
return self._multiplexed_response_stream_helper(res)
else:
@@ -472,7 +481,7 @@ class APIClient(
def get_adapter(self, url):
try:
- return super(APIClient, self).get_adapter(url)
+ return super().get_adapter(url)
except requests.exceptions.InvalidSchema as e:
if self._custom_adapter:
return self._custom_adapter
@@ -490,7 +499,7 @@ class APIClient(
Args:
dockercfg_path (str): Use a custom path for the Docker config file
(default ``$HOME/.docker/config.json`` if present,
- otherwise``$HOME/.dockercfg``)
+ otherwise ``$HOME/.dockercfg``)
Returns:
None
diff --git a/docker/api/config.py b/docker/api/config.py
index 93e5168..88c367e 100644
--- a/docker/api/config.py
+++ b/docker/api/config.py
@@ -1,13 +1,11 @@
import base64
-import six
-
from .. import utils
-class ConfigApiMixin(object):
+class ConfigApiMixin:
@utils.minimum_version('1.30')
- def create_config(self, name, data, labels=None):
+ def create_config(self, name, data, labels=None, templating=None):
"""
Create a config
@@ -15,6 +13,9 @@ class ConfigApiMixin(object):
name (string): Name of the config
data (bytes): Config data to be stored
labels (dict): A mapping of labels to assign to the config
+ templating (dict): dictionary containing the name of the
+ templating driver to be used expressed as
+ { name: <templating_driver_name>}
Returns (dict): ID of the newly created config
"""
@@ -22,12 +23,12 @@ class ConfigApiMixin(object):
data = data.encode('utf-8')
data = base64.b64encode(data)
- if six.PY3:
- data = data.decode('ascii')
+ data = data.decode('ascii')
body = {
'Data': data,
'Name': name,
- 'Labels': labels
+ 'Labels': labels,
+ 'Templating': templating
}
url = self._url('/configs/create')
diff --git a/docker/api/container.py b/docker/api/container.py
index 45bd352..f600be1 100644
--- a/docker/api/container.py
+++ b/docker/api/container.py
@@ -1,7 +1,5 @@
from datetime import datetime
-import six
-
from .. import errors
from .. import utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE
@@ -12,7 +10,7 @@ from ..types import HostConfig
from ..types import NetworkingConfig
-class ContainerApiMixin(object):
+class ContainerApiMixin:
@utils.check_resource('container')
def attach(self, container, stdout=True, stderr=True,
stream=False, logs=False, demux=False):
@@ -225,7 +223,7 @@ class ContainerApiMixin(object):
mac_address=None, labels=None, stop_signal=None,
networking_config=None, healthcheck=None,
stop_timeout=None, runtime=None,
- use_config_proxy=True):
+ use_config_proxy=True, platform=None):
"""
Creates a container. Parameters are similar to those for the ``docker
run`` command except it doesn't support the attach options (``-a``).
@@ -244,9 +242,9 @@ class ContainerApiMixin(object):
.. code-block:: python
- container_id = cli.create_container(
+ container_id = client.api.create_container(
'busybox', 'ls', ports=[1111, 2222],
- host_config=cli.create_host_config(port_bindings={
+ host_config=client.api.create_host_config(port_bindings={
1111: 4567,
2222: None
})
@@ -258,22 +256,24 @@ class ContainerApiMixin(object):
.. code-block:: python
- cli.create_host_config(port_bindings={1111: ('127.0.0.1', 4567)})
+ client.api.create_host_config(
+ port_bindings={1111: ('127.0.0.1', 4567)}
+ )
Or without host port assignment:
.. code-block:: python
- cli.create_host_config(port_bindings={1111: ('127.0.0.1',)})
+ client.api.create_host_config(port_bindings={1111: ('127.0.0.1',)})
If you wish to use UDP instead of TCP (default), you need to declare
ports as such in both the config and host config:
.. code-block:: python
- container_id = cli.create_container(
+ container_id = client.api.create_container(
'busybox', 'ls', ports=[(1111, 'udp'), 2222],
- host_config=cli.create_host_config(port_bindings={
+ host_config=client.api.create_host_config(port_bindings={
'1111/udp': 4567, 2222: None
})
)
@@ -283,7 +283,7 @@ class ContainerApiMixin(object):
.. code-block:: python
- cli.create_host_config(port_bindings={
+ client.api.create_host_config(port_bindings={
1111: [1234, 4567]
})
@@ -291,7 +291,7 @@ class ContainerApiMixin(object):
.. code-block:: python
- cli.create_host_config(port_bindings={
+ client.api.create_host_config(port_bindings={
1111: [
('192.168.0.100', 1234),
('192.168.0.101', 1234)
@@ -307,9 +307,9 @@ class ContainerApiMixin(object):
.. code-block:: python
- container_id = cli.create_container(
+ container_id = client.api.create_container(
'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'],
- host_config=cli.create_host_config(binds={
+ host_config=client.api.create_host_config(binds={
'/home/user1/': {
'bind': '/mnt/vol2',
'mode': 'rw',
@@ -326,9 +326,9 @@ class ContainerApiMixin(object):
.. code-block:: python
- container_id = cli.create_container(
+ container_id = client.api.create_container(
'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'],
- host_config=cli.create_host_config(binds=[
+ host_config=client.api.create_host_config(binds=[
'/home/user1/:/mnt/vol2',
'/var/www:/mnt/vol1:ro',
])
@@ -346,15 +346,15 @@ class ContainerApiMixin(object):
.. code-block:: python
- networking_config = docker_client.create_networking_config({
- 'network1': docker_client.create_endpoint_config(
+ networking_config = client.api.create_networking_config({
+ 'network1': client.api.create_endpoint_config(
ipv4_address='172.28.0.124',
aliases=['foo', 'bar'],
links=['container2']
)
})
- ctnr = docker_client.create_container(
+ ctnr = client.api.create_container(
img, command, networking_config=networking_config
)
@@ -398,6 +398,7 @@ class ContainerApiMixin(object):
configuration file (``~/.docker/config.json`` by default)
contains a proxy configuration, the corresponding environment
variables will be set in the container being created.
+ platform (str): Platform in the format ``os[/arch[/variant]]``.
Returns:
A dictionary with an image 'Id' key and a 'Warnings' key.
@@ -408,7 +409,7 @@ class ContainerApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- if isinstance(volumes, six.string_types):
+ if isinstance(volumes, str):
volumes = [volumes, ]
if isinstance(environment, dict):
@@ -427,16 +428,22 @@ class ContainerApiMixin(object):
stop_signal, networking_config, healthcheck,
stop_timeout, runtime
)
- return self.create_container_from_config(config, name)
+ return self.create_container_from_config(config, name, platform)
def create_container_config(self, *args, **kwargs):
return ContainerConfig(self._version, *args, **kwargs)
- def create_container_from_config(self, config, name=None):
+ def create_container_from_config(self, config, name=None, platform=None):
u = self._url("/containers/create")
params = {
'name': name
}
+ if platform:
+ if utils.version_lt(self._version, '1.41'):
+ raise errors.InvalidVersion(
+ 'platform is not supported for API version < 1.41'
+ )
+ params['platform'] = platform
res = self._post_json(u, data=config, params=params)
return self._result(res, True)
@@ -480,6 +487,9 @@ class ContainerApiMixin(object):
For example, ``/dev/sda:/dev/xvda:rwm`` allows the container
to have read-write access to the host's ``/dev/sda`` via a
node named ``/dev/xvda`` inside the container.
+ device_requests (:py:class:`list`): Expose host resources such as
+ GPUs to the container, as a list of
+ :py:class:`docker.types.DeviceRequest` instances.
dns (:py:class:`list`): Set custom DNS servers.
dns_opt (:py:class:`list`): Additional options to be added to the
container's ``resolv.conf`` file
@@ -503,7 +513,7 @@ class ContainerApiMixin(object):
bytes) or a string with a units identification char
(``100000b``, ``1000k``, ``128m``, ``1g``). If a string is
specified without a units character, bytes are assumed as an
- mem_reservation (int or str): Memory soft limit.
+ mem_reservation (float or str): Memory soft limit.
mem_swappiness (int): Tune a container's memory swappiness
behavior. Accepts number between 0 and 100.
memswap_limit (str or int): Maximum amount of memory + swap a
@@ -520,6 +530,8 @@ class ContainerApiMixin(object):
- ``container:<name|id>`` Reuse another container's network
stack.
- ``host`` Use the host network stack.
+ This mode is incompatible with ``port_bindings``.
+
oom_kill_disable (bool): Whether to disable OOM killer.
oom_score_adj (int): An integer value containing the score given
to the container in order to tune OOM killer preferences.
@@ -528,7 +540,8 @@ class ContainerApiMixin(object):
pids_limit (int): Tune a container's pids limit. Set ``-1`` for
unlimited.
port_bindings (dict): See :py:meth:`create_container`
- for more information.
+ for more information.
+ Imcompatible with ``host`` in ``network_mode``.
privileged (bool): Give extended privileges to this container.
publish_all_ports (bool): Publish all ports to the host.
read_only (bool): Mount the container's root filesystem as read
@@ -575,10 +588,13 @@ class ContainerApiMixin(object):
Example:
- >>> cli.create_host_config(privileged=True, cap_drop=['MKNOD'],
- volumes_from=['nostalgic_newton'])
+ >>> client.api.create_host_config(
+ ... privileged=True,
+ ... cap_drop=['MKNOD'],
+ ... volumes_from=['nostalgic_newton'],
+ ... )
{'CapDrop': ['MKNOD'], 'LxcConf': None, 'Privileged': True,
- 'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False}
+ 'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False}
"""
if not kwargs:
@@ -606,11 +622,11 @@ class ContainerApiMixin(object):
Example:
- >>> docker_client.create_network('network1')
- >>> networking_config = docker_client.create_networking_config({
- 'network1': docker_client.create_endpoint_config()
+ >>> client.api.create_network('network1')
+ >>> networking_config = client.api.create_networking_config({
+ 'network1': client.api.create_endpoint_config()
})
- >>> container = docker_client.create_container(
+ >>> container = client.api.create_container(
img, command, networking_config=networking_config
)
@@ -636,13 +652,15 @@ class ContainerApiMixin(object):
network, using the IPv6 protocol. Defaults to ``None``.
link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6)
addresses.
+ driver_opt (dict): A dictionary of options to provide to the
+ network driver. Defaults to ``None``.
Returns:
(dict) An endpoint config.
Example:
- >>> endpoint_config = client.create_endpoint_config(
+ >>> endpoint_config = client.api.create_endpoint_config(
aliases=['web', 'app'],
links={'app_db': 'db', 'another': None},
ipv4_address='132.65.0.123'
@@ -694,7 +712,8 @@ class ContainerApiMixin(object):
return self._stream_raw_result(res, chunk_size, False)
@utils.check_resource('container')
- def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
+ def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE,
+ encode_stream=False):
"""
Retrieve a file or folder from a container in the form of a tar
archive.
@@ -705,6 +724,8 @@ class ContainerApiMixin(object):
chunk_size (int): The number of bytes returned by each iteration
of the generator. If ``None``, data will be streamed as it is
received. Default: 2 MB
+ encode_stream (bool): Determines if data should be encoded
+ (gzip-compressed) during transmission. Default: False
Returns:
(tuple): First element is a raw tar data stream. Second element is
@@ -718,7 +739,7 @@ class ContainerApiMixin(object):
>>> c = docker.APIClient()
>>> f = open('./sh_bin.tar', 'wb')
- >>> bits, stat = c.get_archive(container, '/bin/sh')
+ >>> bits, stat = c.api.get_archive(container, '/bin/sh')
>>> print(stat)
{'name': 'sh', 'size': 1075464, 'mode': 493,
'mtime': '2018-10-01T15:37:48-07:00', 'linkTarget': ''}
@@ -729,8 +750,13 @@ class ContainerApiMixin(object):
params = {
'path': path
}
+ headers = {
+ "Accept-Encoding": "gzip, deflate"
+ } if encode_stream else {
+ "Accept-Encoding": "identity"
+ }
url = self._url('/containers/{0}/archive', container)
- res = self._get(url, params=params, stream=True)
+ res = self._get(url, params=params, stream=True, headers=headers)
self._raise_for_status(res)
encoded_stat = res.headers.get('x-docker-container-path-stat')
return (
@@ -774,7 +800,7 @@ class ContainerApiMixin(object):
url = self._url("/containers/{0}/kill", container)
params = {}
if signal is not None:
- if not isinstance(signal, six.string_types):
+ if not isinstance(signal, str):
signal = int(signal)
params['signal'] = signal
res = self._post(url, params=params)
@@ -900,7 +926,7 @@ class ContainerApiMixin(object):
.. code-block:: python
- >>> cli.port('7174d6347063', 80)
+ >>> client.api.port('7174d6347063', 80)
[{'HostIp': '0.0.0.0', 'HostPort': '80'}]
"""
res = self._get(self._url("/containers/{0}/json", container))
@@ -1079,10 +1105,10 @@ class ContainerApiMixin(object):
Example:
- >>> container = cli.create_container(
+ >>> container = client.api.create_container(
... image='busybox:latest',
... command='/bin/sleep 30')
- >>> cli.start(container=container.get('Id'))
+ >>> client.api.start(container=container.get('Id'))
"""
if args or kwargs:
raise errors.DeprecatedMethod(
@@ -1120,7 +1146,7 @@ class ContainerApiMixin(object):
else:
if decode:
raise errors.InvalidArgument(
- "decode is only available in conjuction with stream=True"
+ "decode is only available in conjunction with stream=True"
)
return self._result(self._get(url, params={'stream': False}),
json=True)
@@ -1206,8 +1232,8 @@ class ContainerApiMixin(object):
cpu_shares (int): CPU shares (relative weight)
cpuset_cpus (str): CPUs in which to allow execution
cpuset_mems (str): MEMs in which to allow execution
- mem_limit (int or str): Memory limit
- mem_reservation (int or str): Memory soft limit
+ mem_limit (float or str): Memory limit
+ mem_reservation (float or str): Memory soft limit
memswap_limit (int or str): Total memory (memory + swap), -1 to
disable swap
kernel_memory (int or str): Kernel memory limit
diff --git a/docker/api/daemon.py b/docker/api/daemon.py
index f715a13..a857213 100644
--- a/docker/api/daemon.py
+++ b/docker/api/daemon.py
@@ -4,7 +4,7 @@ from datetime import datetime
from .. import auth, types, utils
-class DaemonApiMixin(object):
+class DaemonApiMixin:
@utils.minimum_version('1.25')
def df(self):
"""
@@ -109,7 +109,7 @@ class DaemonApiMixin(object):
the Docker server.
dockercfg_path (str): Use a custom path for the Docker config file
(default ``$HOME/.docker/config.json`` if present,
- otherwise``$HOME/.dockercfg``)
+ otherwise ``$HOME/.dockercfg``)
Returns:
(dict): The response from the login request
diff --git a/docker/api/exec_api.py b/docker/api/exec_api.py
index 4c49ac3..496308a 100644
--- a/docker/api/exec_api.py
+++ b/docker/api/exec_api.py
@@ -1,10 +1,8 @@
-import six
-
from .. import errors
from .. import utils
-class ExecApiMixin(object):
+class ExecApiMixin:
@utils.check_resource('container')
def exec_create(self, container, cmd, stdout=True, stderr=True,
stdin=False, tty=False, privileged=False, user='',
@@ -45,7 +43,7 @@ class ExecApiMixin(object):
'Setting environment for exec is not supported in API < 1.25'
)
- if isinstance(cmd, six.string_types):
+ if isinstance(cmd, str):
cmd = utils.split_command(cmd)
if isinstance(environment, dict):
diff --git a/docker/api/image.py b/docker/api/image.py
index 11c8cf7..5e1466e 100644
--- a/docker/api/image.py
+++ b/docker/api/image.py
@@ -1,15 +1,13 @@
import logging
import os
-import six
-
from .. import auth, errors, utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE
log = logging.getLogger(__name__)
-class ImageApiMixin(object):
+class ImageApiMixin:
@utils.check_resource('image')
def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
@@ -31,7 +29,7 @@ class ImageApiMixin(object):
Example:
- >>> image = cli.get_image("busybox:latest")
+ >>> image = client.api.get_image("busybox:latest")
>>> f = open('/tmp/busybox-latest.tar', 'wb')
>>> for chunk in image:
>>> f.write(chunk)
@@ -81,10 +79,18 @@ class ImageApiMixin(object):
If the server returns an error.
"""
params = {
- 'filter': name,
'only_ids': 1 if quiet else 0,
'all': 1 if all else 0,
}
+ if name:
+ if utils.version_lt(self._version, '1.25'):
+ # only use "filter" on API 1.24 and under, as it is deprecated
+ params['filter'] = name
+ else:
+ if filters:
+ filters['reference'] = name
+ else:
+ filters = {'reference': name}
if filters:
params['filters'] = utils.convert_filters(filters)
res = self._result(self._get(self._url("/images/json"), params=params),
@@ -122,7 +128,7 @@ class ImageApiMixin(object):
params = _import_image_params(
repository, tag, image,
- src=(src if isinstance(src, six.string_types) else None),
+ src=(src if isinstance(src, str) else None),
changes=changes
)
headers = {'Content-Type': 'application/tar'}
@@ -131,7 +137,7 @@ class ImageApiMixin(object):
return self._result(
self._post(u, data=None, params=params)
)
- elif isinstance(src, six.string_types): # from file path
+ elif isinstance(src, str): # from file path
with open(src, 'rb') as f:
return self._result(
self._post(
@@ -343,13 +349,14 @@ class ImageApiMixin(object):
return self._result(self._post(url, params=params), True)
def pull(self, repository, tag=None, stream=False, auth_config=None,
- decode=False, platform=None):
+ decode=False, platform=None, all_tags=False):
"""
Pulls an image. Similar to the ``docker pull`` command.
Args:
repository (str): The repository to pull
- tag (str): The tag to pull
+ tag (str): The tag to pull. If ``tag`` is ``None`` or empty, it
+ is set to ``latest``.
stream (bool): Stream the output as a generator. Make sure to
consume the generator, otherwise pull might get cancelled.
auth_config (dict): Override the credentials that are found in the
@@ -358,6 +365,8 @@ class ImageApiMixin(object):
decode (bool): Decode the JSON data from the server into dicts.
Only applies with ``stream=True``
platform (str): Platform in the format ``os[/arch[/variant]]``
+ all_tags (bool): Pull all image tags, the ``tag`` parameter is
+ ignored.
Returns:
(generator or str): The output
@@ -368,7 +377,8 @@ class ImageApiMixin(object):
Example:
- >>> for line in cli.pull('busybox', stream=True, decode=True):
+ >>> resp = client.api.pull('busybox', stream=True, decode=True)
+ ... for line in resp:
... print(json.dumps(line, indent=4))
{
"status": "Pulling image (latest) from busybox",
@@ -382,8 +392,12 @@ class ImageApiMixin(object):
}
"""
- if not tag:
- repository, tag = utils.parse_repository_tag(repository)
+ repository, image_tag = utils.parse_repository_tag(repository)
+ tag = tag or image_tag or 'latest'
+
+ if all_tags:
+ tag = None
+
registry, repo_name = auth.resolve_repository_name(repository)
params = {
@@ -443,7 +457,12 @@ class ImageApiMixin(object):
If the server returns an error.
Example:
- >>> for line in cli.push('yourname/app', stream=True, decode=True):
+ >>> resp = client.api.push(
+ ... 'yourname/app',
+ ... stream=True,
+ ... decode=True,
+ ... )
+ ... for line in resp:
... print(line)
{'status': 'Pushing repository yourname/app (1 tags)'}
{'status': 'Pushing','progressDetail': {}, 'id': '511136ea3c5a'}
@@ -494,13 +513,14 @@ class ImageApiMixin(object):
res = self._delete(self._url("/images/{0}", image), params=params)
return self._result(res, True)
- def search(self, term):
+ def search(self, term, limit=None):
"""
Search for images on Docker Hub. Similar to the ``docker search``
command.
Args:
term (str): A term to search for.
+ limit (int): The maximum number of results to return.
Returns:
(list of dicts): The response of the search.
@@ -509,8 +529,12 @@ class ImageApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
+ params = {'term': term}
+ if limit is not None:
+ params['limit'] = limit
+
return self._result(
- self._get(self._url("/images/search"), params={'term': term}),
+ self._get(self._url("/images/search"), params=params),
True
)
@@ -534,7 +558,7 @@ class ImageApiMixin(object):
Example:
- >>> client.tag('ubuntu', 'localhost:5000/ubuntu', 'latest',
+ >>> client.api.tag('ubuntu', 'localhost:5000/ubuntu', 'latest',
force=True)
"""
params = {
@@ -551,7 +575,7 @@ class ImageApiMixin(object):
def is_file(src):
try:
return (
- isinstance(src, six.string_types) and
+ isinstance(src, str) and
os.path.isfile(src)
)
except TypeError: # a data string will make isfile() raise a TypeError
diff --git a/docker/api/network.py b/docker/api/network.py
index 750b91b..e95c5fc 100644
--- a/docker/api/network.py
+++ b/docker/api/network.py
@@ -4,7 +4,7 @@ from ..utils import version_lt
from .. import utils
-class NetworkApiMixin(object):
+class NetworkApiMixin:
def networks(self, names=None, ids=None, filters=None):
"""
List networks. Similar to the ``docker network ls`` command.
@@ -75,7 +75,7 @@ class NetworkApiMixin(object):
Example:
A network using the bridge driver:
- >>> client.create_network("network1", driver="bridge")
+ >>> client.api.create_network("network1", driver="bridge")
You can also create more advanced networks with custom IPAM
configurations. For example, setting the subnet to
@@ -90,7 +90,7 @@ class NetworkApiMixin(object):
>>> ipam_config = docker.types.IPAMConfig(
pool_configs=[ipam_pool]
)
- >>> docker_client.create_network("network1", driver="bridge",
+ >>> client.api.create_network("network1", driver="bridge",
ipam=ipam_config)
"""
if options is not None and not isinstance(options, dict):
@@ -216,7 +216,7 @@ class NetworkApiMixin(object):
def connect_container_to_network(self, container, net_id,
ipv4_address=None, ipv6_address=None,
aliases=None, links=None,
- link_local_ips=None):
+ link_local_ips=None, driver_opt=None):
"""
Connect a container to a network.
@@ -240,7 +240,8 @@ class NetworkApiMixin(object):
"Container": container,
"EndpointConfig": self.create_endpoint_config(
aliases=aliases, links=links, ipv4_address=ipv4_address,
- ipv6_address=ipv6_address, link_local_ips=link_local_ips
+ ipv6_address=ipv6_address, link_local_ips=link_local_ips,
+ driver_opt=driver_opt
),
}
diff --git a/docker/api/plugin.py b/docker/api/plugin.py
index f6c0b13..10210c1 100644
--- a/docker/api/plugin.py
+++ b/docker/api/plugin.py
@@ -1,9 +1,7 @@
-import six
-
from .. import auth, utils
-class PluginApiMixin(object):
+class PluginApiMixin:
@utils.minimum_version('1.25')
@utils.check_resource('name')
def configure_plugin(self, name, options):
@@ -21,7 +19,7 @@ class PluginApiMixin(object):
url = self._url('/plugins/{0}/set', name)
data = options
if isinstance(data, dict):
- data = ['{0}={1}'.format(k, v) for k, v in six.iteritems(data)]
+ data = [f'{k}={v}' for k, v in data.items()]
res = self._post_json(url, data=data)
self._raise_for_status(res)
return True
@@ -53,19 +51,20 @@ class PluginApiMixin(object):
return True
@utils.minimum_version('1.25')
- def disable_plugin(self, name):
+ def disable_plugin(self, name, force=False):
"""
Disable an installed plugin.
Args:
name (string): The name of the plugin. The ``:latest`` tag is
optional, and is the default if omitted.
+ force (bool): To enable the force query parameter.
Returns:
``True`` if successful
"""
url = self._url('/plugins/{0}/disable', name)
- res = self._post(url)
+ res = self._post(url, params={'force': force})
self._raise_for_status(res)
return True
diff --git a/docker/api/secret.py b/docker/api/secret.py
index e57952b..cd440b9 100644
--- a/docker/api/secret.py
+++ b/docker/api/secret.py
@@ -1,12 +1,10 @@
import base64
-import six
-
from .. import errors
from .. import utils
-class SecretApiMixin(object):
+class SecretApiMixin:
@utils.minimum_version('1.25')
def create_secret(self, name, data, labels=None, driver=None):
"""
@@ -25,8 +23,7 @@ class SecretApiMixin(object):
data = data.encode('utf-8')
data = base64.b64encode(data)
- if six.PY3:
- data = data.decode('ascii')
+ data = data.decode('ascii')
body = {
'Data': data,
'Name': name,
diff --git a/docker/api/service.py b/docker/api/service.py
index e9027bf..371f541 100644
--- a/docker/api/service.py
+++ b/docker/api/service.py
@@ -45,7 +45,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec,
if task_template is not None:
if 'ForceUpdate' in task_template and utils.version_lt(
version, '1.25'):
- raise_version_error('force_update', '1.25')
+ raise_version_error('force_update', '1.25')
if task_template.get('Placement'):
if utils.version_lt(version, '1.30'):
@@ -113,7 +113,7 @@ def _merge_task_template(current, override):
return merged
-class ServiceApiMixin(object):
+class ServiceApiMixin:
@utils.minimum_version('1.24')
def create_service(
self, task_template, name=None, labels=None, mode=None,
diff --git a/docker/api/swarm.py b/docker/api/swarm.py
index 897f08e..db40fdd 100644
--- a/docker/api/swarm.py
+++ b/docker/api/swarm.py
@@ -1,5 +1,5 @@
import logging
-from six.moves import http_client
+import http.client as http_client
from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE
from .. import errors
from .. import types
@@ -8,7 +8,7 @@ from .. import utils
log = logging.getLogger(__name__)
-class SwarmApiMixin(object):
+class SwarmApiMixin:
def create_swarm_spec(self, *args, **kwargs):
"""
@@ -58,10 +58,10 @@ class SwarmApiMixin(object):
Example:
- >>> spec = client.create_swarm_spec(
+ >>> spec = client.api.create_swarm_spec(
snapshot_interval=5000, log_entries_for_slow_followers=1200
)
- >>> client.init_swarm(
+ >>> client.api.init_swarm(
advertise_addr='eth0', listen_addr='0.0.0.0:5000',
force_new_cluster=False, swarm_spec=spec
)
@@ -354,8 +354,8 @@ class SwarmApiMixin(object):
Example:
- >>> key = client.get_unlock_key()
- >>> client.unlock_node(key)
+ >>> key = client.api.get_unlock_key()
+ >>> client.unlock_swarm(key)
"""
if isinstance(key, dict):
@@ -396,7 +396,7 @@ class SwarmApiMixin(object):
'Role': 'manager',
'Labels': {'foo': 'bar'}
}
- >>> client.update_node(node_id='24ifsmvkjbyhk', version=8,
+ >>> client.api.update_node(node_id='24ifsmvkjbyhk', version=8,
node_spec=node_spec)
"""
diff --git a/docker/api/volume.py b/docker/api/volume.py
index 900a608..98b42a1 100644
--- a/docker/api/volume.py
+++ b/docker/api/volume.py
@@ -2,7 +2,7 @@ from .. import errors
from .. import utils
-class VolumeApiMixin(object):
+class VolumeApiMixin:
def volumes(self, filters=None):
"""
List volumes currently registered by the docker daemon. Similar to the
@@ -21,7 +21,7 @@ class VolumeApiMixin(object):
Example:
- >>> cli.volumes()
+ >>> client.api.volumes()
{u'Volumes': [{u'Driver': u'local',
u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data',
u'Name': u'foobar'},
@@ -56,15 +56,18 @@ class VolumeApiMixin(object):
Example:
- >>> volume = cli.create_volume(name='foobar', driver='local',
- driver_opts={'foo': 'bar', 'baz': 'false'},
- labels={"key": "value"})
- >>> print(volume)
+ >>> volume = client.api.create_volume(
+ ... name='foobar',
+ ... driver='local',
+ ... driver_opts={'foo': 'bar', 'baz': 'false'},
+ ... labels={"key": "value"},
+ ... )
+ ... print(volume)
{u'Driver': u'local',
- u'Labels': {u'key': u'value'},
- u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data',
- u'Name': u'foobar',
- u'Scope': u'local'}
+ u'Labels': {u'key': u'value'},
+ u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data',
+ u'Name': u'foobar',
+ u'Scope': u'local'}
"""
url = self._url('/volumes/create')
@@ -104,7 +107,7 @@ class VolumeApiMixin(object):
Example:
- >>> cli.inspect_volume('foobar')
+ >>> client.api.inspect_volume('foobar')
{u'Driver': u'local',
u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data',
u'Name': u'foobar'}
diff --git a/docker/auth.py b/docker/auth.py
index 6a07ea2..cb38855 100644
--- a/docker/auth.py
+++ b/docker/auth.py
@@ -2,14 +2,12 @@ import base64
import json
import logging
-import six
-
from . import credentials
from . import errors
from .utils import config
INDEX_NAME = 'docker.io'
-INDEX_URL = 'https://index.{0}/v1/'.format(INDEX_NAME)
+INDEX_URL = f'https://index.{INDEX_NAME}/v1/'
TOKEN_USERNAME = '<token>'
log = logging.getLogger(__name__)
@@ -18,13 +16,13 @@ log = logging.getLogger(__name__)
def resolve_repository_name(repo_name):
if '://' in repo_name:
raise errors.InvalidRepository(
- 'Repository name cannot contain a scheme ({0})'.format(repo_name)
+ f'Repository name cannot contain a scheme ({repo_name})'
)
index_name, remote_name = split_repo_name(repo_name)
if index_name[0] == '-' or index_name[-1] == '-':
raise errors.InvalidRepository(
- 'Invalid index name ({0}). Cannot begin or end with a'
+ 'Invalid index name ({}). Cannot begin or end with a'
' hyphen.'.format(index_name)
)
return resolve_index_name(index_name), remote_name
@@ -98,10 +96,10 @@ class AuthConfig(dict):
"""
conf = {}
- for registry, entry in six.iteritems(entries):
+ for registry, entry in entries.items():
if not isinstance(entry, dict):
log.debug(
- 'Config entry for key {0} is not auth config'.format(
+ 'Config entry for key {} is not auth config'.format(
registry
)
)
@@ -111,14 +109,14 @@ class AuthConfig(dict):
# keys is not formatted properly.
if raise_on_error:
raise errors.InvalidConfigFile(
- 'Invalid configuration for registry {0}'.format(
+ 'Invalid configuration for registry {}'.format(
registry
)
)
return {}
if 'identitytoken' in entry:
log.debug(
- 'Found an IdentityToken entry for registry {0}'.format(
+ 'Found an IdentityToken entry for registry {}'.format(
registry
)
)
@@ -132,7 +130,7 @@ class AuthConfig(dict):
# a valid value in the auths config.
# https://github.com/docker/compose/issues/3265
log.debug(
- 'Auth data for {0} is absent. Client might be using a '
+ 'Auth data for {} is absent. Client might be using a '
'credentials store instead.'.format(registry)
)
conf[registry] = {}
@@ -140,7 +138,7 @@ class AuthConfig(dict):
username, password = decode_auth(entry['auth'])
log.debug(
- 'Found entry (registry={0}, username={1})'
+ 'Found entry (registry={}, username={})'
.format(repr(registry), repr(username))
)
@@ -170,7 +168,7 @@ class AuthConfig(dict):
try:
with open(config_file) as f:
config_dict = json.load(f)
- except (IOError, KeyError, ValueError) as e:
+ except (OSError, KeyError, ValueError) as e:
# Likely missing new Docker config file or it's in an
# unknown format, continue to attempt to read old location
# and format.
@@ -230,7 +228,7 @@ class AuthConfig(dict):
store_name = self.get_credential_store(registry)
if store_name is not None:
log.debug(
- 'Using credentials store "{0}"'.format(store_name)
+ f'Using credentials store "{store_name}"'
)
cfg = self._resolve_authconfig_credstore(registry, store_name)
if cfg is not None:
@@ -239,15 +237,15 @@ class AuthConfig(dict):
# Default to the public index server
registry = resolve_index_name(registry) if registry else INDEX_NAME
- log.debug("Looking for auth entry for {0}".format(repr(registry)))
+ log.debug(f"Looking for auth entry for {repr(registry)}")
if registry in self.auths:
- log.debug("Found {0}".format(repr(registry)))
+ log.debug(f"Found {repr(registry)}")
return self.auths[registry]
- for key, conf in six.iteritems(self.auths):
+ for key, conf in self.auths.items():
if resolve_index_name(key) == registry:
- log.debug("Found {0}".format(repr(key)))
+ log.debug(f"Found {repr(key)}")
return conf
log.debug("No entry found")
@@ -258,7 +256,7 @@ class AuthConfig(dict):
# The ecosystem is a little schizophrenic with index.docker.io VS
# docker.io - in that case, it seems the full URL is necessary.
registry = INDEX_URL
- log.debug("Looking for auth entry for {0}".format(repr(registry)))
+ log.debug(f"Looking for auth entry for {repr(registry)}")
store = self._get_store_instance(credstore_name)
try:
data = store.get(registry)
@@ -278,7 +276,7 @@ class AuthConfig(dict):
return None
except credentials.StoreError as e:
raise errors.DockerException(
- 'Credentials store error: {0}'.format(repr(e))
+ f'Credentials store error: {repr(e)}'
)
def _get_store_instance(self, name):
@@ -329,7 +327,7 @@ def convert_to_hostname(url):
def decode_auth(auth):
- if isinstance(auth, six.string_types):
+ if isinstance(auth, str):
auth = auth.encode('ascii')
s = base64.b64decode(auth)
login, pwd = s.split(b':', 1)
@@ -385,7 +383,6 @@ def _load_legacy_config(config_file):
}}
except Exception as e:
log.debug(e)
- pass
log.debug("All parsing attempts failed - returning empty config")
return {}
diff --git a/docker/client.py b/docker/client.py
index 99ae196..4dbd846 100644
--- a/docker/client.py
+++ b/docker/client.py
@@ -1,5 +1,5 @@
from .api.client import APIClient
-from .constants import DEFAULT_TIMEOUT_SECONDS
+from .constants import (DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE)
from .models.configs import ConfigCollection
from .models.containers import ContainerCollection
from .models.images import ImageCollection
@@ -13,7 +13,7 @@ from .models.volumes import VolumeCollection
from .utils import kwargs_from_env
-class DockerClient(object):
+class DockerClient:
"""
A client for communicating with a Docker server.
@@ -35,6 +35,11 @@ class DockerClient(object):
user_agent (str): Set a custom user agent for requests to the server.
credstore_env (dict): Override environment variables when calling the
credential store process.
+ use_ssh_client (bool): If set to `True`, an ssh connection is made
+ via shelling out to the ssh client. Ensure the ssh client is
+ installed and configured on the host.
+ max_pool_size (int): The maximum number of connections
+ to save in the pool.
"""
def __init__(self, *args, **kwargs):
self.api = APIClient(*args, **kwargs)
@@ -62,14 +67,19 @@ class DockerClient(object):
Args:
version (str): The version of the API to use. Set to ``auto`` to
- automatically detect the server's version. Default: ``1.35``
+ automatically detect the server's version. Default: ``auto``
timeout (int): Default timeout for API calls, in seconds.
+ max_pool_size (int): The maximum number of connections
+ to save in the pool.
ssl_version (int): A valid `SSL version`_.
assert_hostname (bool): Verify the hostname of the server.
environment (dict): The environment to read environment variables
from. Default: the value of ``os.environ``
credstore_env (dict): Override environment variables when calling
the credential store process.
+ use_ssh_client (bool): If set to `True`, an ssh connection is
+ made via shelling out to the ssh client. Ensure the ssh
+ client is installed and configured on the host.
Example:
@@ -80,9 +90,15 @@ class DockerClient(object):
https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_TLSv1
"""
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS)
+ max_pool_size = kwargs.pop('max_pool_size', DEFAULT_MAX_POOL_SIZE)
version = kwargs.pop('version', None)
+ use_ssh_client = kwargs.pop('use_ssh_client', False)
return cls(
- timeout=timeout, version=version, **kwargs_from_env(**kwargs)
+ timeout=timeout,
+ max_pool_size=max_pool_size,
+ version=version,
+ use_ssh_client=use_ssh_client,
+ **kwargs_from_env(**kwargs)
)
# Resources
@@ -196,7 +212,7 @@ class DockerClient(object):
close.__doc__ = APIClient.close.__doc__
def __getattr__(self, name):
- s = ["'DockerClient' object has no attribute '{}'".format(name)]
+ s = [f"'DockerClient' object has no attribute '{name}'"]
# If a user calls a method on APIClient, they
if hasattr(APIClient, name):
s.append("In Docker SDK for Python 2.0, this method is now on the "
diff --git a/docker/constants.py b/docker/constants.py
index e4daed5..d5bfc35 100644
--- a/docker/constants.py
+++ b/docker/constants.py
@@ -1,7 +1,7 @@
import sys
from .version import version
-DEFAULT_DOCKER_API_VERSION = '1.35'
+DEFAULT_DOCKER_API_VERSION = '1.41'
MINIMUM_DOCKER_API_VERSION = '1.21'
DEFAULT_TIMEOUT_SECONDS = 60
STREAM_HEADER_SIZE_BYTES = 8
@@ -28,7 +28,7 @@ INSECURE_REGISTRY_DEPRECATION_WARNING = \
IS_WINDOWS_PLATFORM = (sys.platform == 'win32')
WINDOWS_LONGPATH_PREFIX = '\\\\?\\'
-DEFAULT_USER_AGENT = "docker-sdk-python/{0}".format(version)
+DEFAULT_USER_AGENT = f"docker-sdk-python/{version}"
DEFAULT_NUM_POOLS = 25
# The OpenSSH server default value for MaxSessions is 10 which means we can
@@ -36,6 +36,8 @@ DEFAULT_NUM_POOLS = 25
# For more details see: https://github.com/docker/docker-py/issues/2246
DEFAULT_NUM_POOLS_SSH = 9
+DEFAULT_MAX_POOL_SIZE = 10
+
DEFAULT_DATA_CHUNK_SIZE = 1024 * 2048
DEFAULT_SWARM_ADDR_POOL = ['10.0.0.0/8']
diff --git a/docker/context/api.py b/docker/context/api.py
index c45115b..380e8c4 100644
--- a/docker/context/api.py
+++ b/docker/context/api.py
@@ -9,7 +9,7 @@ from docker.context.config import write_context_name_to_docker_config
from docker.context import Context
-class ContextAPI(object):
+class ContextAPI:
"""Context API.
Contains methods for context management:
create, list, remove, get, inspect.
@@ -109,7 +109,7 @@ class ContextAPI(object):
if filename == METAFILE:
try:
data = json.load(
- open(os.path.join(dirname, filename), "r"))
+ open(os.path.join(dirname, filename)))
names.append(data["Name"])
except Exception as e:
raise errors.ContextException(
@@ -138,7 +138,7 @@ class ContextAPI(object):
err = write_context_name_to_docker_config(name)
if err:
raise errors.ContextException(
- 'Failed to set current context: {}'.format(err))
+ f'Failed to set current context: {err}')
@classmethod
def remove_context(cls, name):
diff --git a/docker/context/config.py b/docker/context/config.py
index baf54f7..d761aef 100644
--- a/docker/context/config.py
+++ b/docker/context/config.py
@@ -15,7 +15,7 @@ def get_current_context_name():
docker_cfg_path = find_config_file()
if docker_cfg_path:
try:
- with open(docker_cfg_path, "r") as f:
+ with open(docker_cfg_path) as f:
name = json.load(f).get("currentContext", "default")
except Exception:
return "default"
@@ -29,7 +29,7 @@ def write_context_name_to_docker_config(name=None):
config = {}
if docker_cfg_path:
try:
- with open(docker_cfg_path, "r") as f:
+ with open(docker_cfg_path) as f:
config = json.load(f)
except Exception as e:
return e
diff --git a/docker/context/context.py b/docker/context/context.py
index b2af20c..dbaa01c 100644
--- a/docker/context/context.py
+++ b/docker/context/context.py
@@ -11,35 +11,48 @@ from docker.context.config import get_context_host
class Context:
"""A context."""
+
def __init__(self, name, orchestrator=None, host=None, endpoints=None,
tls=False):
if not name:
raise Exception("Name not provided")
self.name = name
+ self.context_type = None
self.orchestrator = orchestrator
+ self.endpoints = {}
+ self.tls_cfg = {}
+ self.meta_path = "IN MEMORY"
+ self.tls_path = "IN MEMORY"
+
if not endpoints:
+ # set default docker endpoint if no endpoint is set
default_endpoint = "docker" if (
not orchestrator or orchestrator == "swarm"
) else orchestrator
+
self.endpoints = {
default_endpoint: {
"Host": get_context_host(host, tls),
"SkipTLSVerify": not tls
}
}
- else:
- for k, v in endpoints.items():
- ekeys = v.keys()
- for param in ["Host", "SkipTLSVerify"]:
- if param not in ekeys:
- raise ContextException(
- "Missing parameter {} from endpoint {}".format(
- param, k))
- self.endpoints = endpoints
+ return
- self.tls_cfg = {}
- self.meta_path = "IN MEMORY"
- self.tls_path = "IN MEMORY"
+ # check docker endpoints
+ for k, v in endpoints.items():
+ if not isinstance(v, dict):
+ # unknown format
+ raise ContextException("""Unknown endpoint format for
+ context {}: {}""".format(name, v))
+
+ self.endpoints[k] = v
+ if k != "docker":
+ continue
+
+ self.endpoints[k]["Host"] = v.get("Host", get_context_host(
+ host, tls))
+ self.endpoints[k]["SkipTLSVerify"] = bool(v.get(
+ "SkipTLSVerify", not tls))
def set_endpoint(
self, name="docker", host=None, tls_cfg=None,
@@ -59,9 +72,13 @@ class Context:
@classmethod
def load_context(cls, name):
- name, orchestrator, endpoints = Context._load_meta(name)
- if name:
- instance = cls(name, orchestrator, endpoints=endpoints)
+ meta = Context._load_meta(name)
+ if meta:
+ instance = cls(
+ meta["Name"],
+ orchestrator=meta["Metadata"].get("StackOrchestrator", None),
+ endpoints=meta.get("Endpoints", None))
+ instance.context_type = meta["Metadata"].get("Type", None)
instance._load_certs()
instance.meta_path = get_meta_dir(name)
return instance
@@ -69,26 +86,30 @@ class Context:
@classmethod
def _load_meta(cls, name):
- metadata = {}
meta_file = get_meta_file(name)
- if os.path.isfile(meta_file):
+ if not os.path.isfile(meta_file):
+ return None
+
+ metadata = {}
+ try:
with open(meta_file) as f:
- try:
- with open(meta_file) as f:
- metadata = json.load(f)
- for k, v in metadata["Endpoints"].items():
- metadata["Endpoints"][k]["SkipTLSVerify"] = bool(
- v["SkipTLSVerify"])
- except (IOError, KeyError, ValueError) as e:
- # unknown format
- raise Exception("""Detected corrupted meta file for
- context {} : {}""".format(name, e))
-
- return (
- metadata["Name"],
- metadata["Metadata"].get("StackOrchestrator", None),
- metadata["Endpoints"])
- return None, None, None
+ metadata = json.load(f)
+ except (OSError, KeyError, ValueError) as e:
+ # unknown format
+ raise Exception("""Detected corrupted meta file for
+ context {} : {}""".format(name, e))
+
+ # for docker endpoints, set defaults for
+ # Host and SkipTLSVerify fields
+ for k, v in metadata["Endpoints"].items():
+ if k != "docker":
+ continue
+ metadata["Endpoints"][k]["Host"] = v.get(
+ "Host", get_context_host(None, False))
+ metadata["Endpoints"][k]["SkipTLSVerify"] = bool(
+ v.get("SkipTLSVerify", True))
+
+ return metadata
def _load_certs(self):
certs = {}
@@ -107,8 +128,12 @@ class Context:
elif filename.startswith("key"):
key = os.path.join(tls_dir, endpoint, filename)
if all([ca_cert, cert, key]):
+ verify = None
+ if endpoint == "docker" and not self.endpoints["docker"].get(
+ "SkipTLSVerify", False):
+ verify = True
certs[endpoint] = TLSConfig(
- client_cert=(cert, key), ca_cert=ca_cert)
+ client_cert=(cert, key), ca_cert=ca_cert, verify=verify)
self.tls_cfg = certs
self.tls_path = tls_dir
@@ -146,7 +171,7 @@ class Context:
rmtree(self.tls_path)
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__}: '{self.name}'>"
def __str__(self):
return json.dumps(self.__call__(), indent=2)
@@ -157,6 +182,9 @@ class Context:
result.update(self.Storage)
return result
+ def is_docker_host(self):
+ return self.context_type is None
+
@property
def Name(self):
return self.name
@@ -164,8 +192,12 @@ class Context:
@property
def Host(self):
if not self.orchestrator or self.orchestrator == "swarm":
- return self.endpoints["docker"]["Host"]
- return self.endpoints[self.orchestrator]["Host"]
+ endpoint = self.endpoints.get("docker", None)
+ if endpoint:
+ return endpoint.get("Host", None)
+ return None
+
+ return self.endpoints[self.orchestrator].get("Host", None)
@property
def Orchestrator(self):
diff --git a/docker/credentials/store.py b/docker/credentials/store.py
index 0017888..e55976f 100644
--- a/docker/credentials/store.py
+++ b/docker/credentials/store.py
@@ -2,15 +2,13 @@ import errno
import json
import subprocess
-import six
-
from . import constants
from . import errors
from .utils import create_environment_dict
from .utils import find_executable
-class Store(object):
+class Store:
def __init__(self, program, environment=None):
""" Create a store object that acts as an interface to
perform the basic operations for storing, retrieving
@@ -30,7 +28,7 @@ class Store(object):
""" Retrieve credentials for `server`. If no credentials are found,
a `StoreError` will be raised.
"""
- if not isinstance(server, six.binary_type):
+ if not isinstance(server, bytes):
server = server.encode('utf-8')
data = self._execute('get', server)
result = json.loads(data.decode('utf-8'))
@@ -41,7 +39,7 @@ class Store(object):
# raise CredentialsNotFound
if result['Username'] == '' and result['Secret'] == '':
raise errors.CredentialsNotFound(
- 'No matching credentials in {}'.format(self.program)
+ f'No matching credentials in {self.program}'
)
return result
@@ -61,7 +59,7 @@ class Store(object):
""" Erase credentials for `server`. Raises a `StoreError` if an error
occurs.
"""
- if not isinstance(server, six.binary_type):
+ if not isinstance(server, bytes):
server = server.encode('utf-8')
self._execute('erase', server)
@@ -75,20 +73,9 @@ class Store(object):
output = None
env = create_environment_dict(self.environment)
try:
- if six.PY3:
- output = subprocess.check_output(
- [self.exe, subcmd], input=data_input, env=env,
- )
- else:
- process = subprocess.Popen(
- [self.exe, subcmd], stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, env=env,
- )
- output, _ = process.communicate(data_input)
- if process.returncode != 0:
- raise subprocess.CalledProcessError(
- returncode=process.returncode, cmd='', output=output
- )
+ output = subprocess.check_output(
+ [self.exe, subcmd], input=data_input, env=env,
+ )
except subprocess.CalledProcessError as e:
raise errors.process_store_error(e, self.program)
except OSError as e:
diff --git a/docker/errors.py b/docker/errors.py
index e5d07a5..8cf8670 100644
--- a/docker/errors.py
+++ b/docker/errors.py
@@ -1,5 +1,14 @@
import requests
+_image_not_found_explanation_fragments = frozenset(
+ fragment.lower() for fragment in [
+ 'no such image',
+ 'not found: does not exist or no pull access',
+ 'repository does not exist',
+ 'was found but does not match the specified platform',
+ ]
+)
+
class DockerException(Exception):
"""
@@ -21,14 +30,13 @@ def create_api_error_from_http_exception(e):
explanation = (response.content or '').strip()
cls = APIError
if response.status_code == 404:
- if explanation and ('No such image' in str(explanation) or
- 'not found: does not exist or no pull access'
- in str(explanation) or
- 'repository does not exist' in str(explanation)):
+ explanation_msg = (explanation or '').lower()
+ if any(fragment in explanation_msg
+ for fragment in _image_not_found_explanation_fragments):
cls = ImageNotFound
else:
cls = NotFound
- raise cls(e, response=response, explanation=explanation)
+ raise cls(e, response=response, explanation=explanation) from e
class APIError(requests.exceptions.HTTPError, DockerException):
@@ -38,23 +46,25 @@ class APIError(requests.exceptions.HTTPError, DockerException):
def __init__(self, message, response=None, explanation=None):
# requests 1.2 supports response as a keyword argument, but
# requests 1.1 doesn't
- super(APIError, self).__init__(message)
+ super().__init__(message)
self.response = response
self.explanation = explanation
def __str__(self):
- message = super(APIError, self).__str__()
+ message = super().__str__()
if self.is_client_error():
- message = '{0} Client Error: {1}'.format(
- self.response.status_code, self.response.reason)
+ message = '{} Client Error for {}: {}'.format(
+ self.response.status_code, self.response.url,
+ self.response.reason)
elif self.is_server_error():
- message = '{0} Server Error: {1}'.format(
- self.response.status_code, self.response.reason)
+ message = '{} Server Error for {}: {}'.format(
+ self.response.status_code, self.response.url,
+ self.response.reason)
if self.explanation:
- message = '{0} ("{1}")'.format(message, self.explanation)
+ message = f'{message} ("{self.explanation}")'
return message
@@ -131,11 +141,11 @@ class ContainerError(DockerException):
self.image = image
self.stderr = stderr
- err = ": {}".format(stderr) if stderr is not None else ""
+ err = f": {stderr}" if stderr is not None else ""
msg = ("Command '{}' in image '{}' returned non-zero exit "
"status {}{}").format(command, image, exit_status, err)
- super(ContainerError, self).__init__(msg)
+ super().__init__(msg)
class StreamParseError(RuntimeError):
@@ -145,7 +155,7 @@ class StreamParseError(RuntimeError):
class BuildError(DockerException):
def __init__(self, reason, build_log):
- super(BuildError, self).__init__(reason)
+ super().__init__(reason)
self.msg = reason
self.build_log = build_log
@@ -155,8 +165,8 @@ class ImageLoadError(DockerException):
def create_unexpected_kwargs_error(name, kwargs):
- quoted_kwargs = ["'{}'".format(k) for k in sorted(kwargs)]
- text = ["{}() ".format(name)]
+ quoted_kwargs = [f"'{k}'" for k in sorted(kwargs)]
+ text = [f"{name}() "]
if len(quoted_kwargs) == 1:
text.append("got an unexpected keyword argument ")
else:
@@ -170,7 +180,7 @@ class MissingContextParameter(DockerException):
self.param = param
def __str__(self):
- return ("missing parameter: {}".format(self.param))
+ return (f"missing parameter: {self.param}")
class ContextAlreadyExists(DockerException):
@@ -178,7 +188,7 @@ class ContextAlreadyExists(DockerException):
self.name = name
def __str__(self):
- return ("context {} already exists".format(self.name))
+ return (f"context {self.name} already exists")
class ContextException(DockerException):
@@ -194,4 +204,4 @@ class ContextNotFound(DockerException):
self.name = name
def __str__(self):
- return ("context '{}' not found".format(self.name))
+ return (f"context '{self.name}' not found")
diff --git a/docker/models/configs.py b/docker/models/configs.py
index 7f23f65..3588c8b 100644
--- a/docker/models/configs.py
+++ b/docker/models/configs.py
@@ -7,7 +7,7 @@ class Config(Model):
id_attribute = 'ID'
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__}: '{self.name}'>"
@property
def name(self):
diff --git a/docker/models/containers.py b/docker/models/containers.py
index 19477fe..3d01031 100644
--- a/docker/models/containers.py
+++ b/docker/models/containers.py
@@ -225,7 +225,8 @@ class Container(Model):
"""
return self.client.api.export(self.id, chunk_size)
- def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
+ def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE,
+ encode_stream=False):
"""
Retrieve a file or folder from the container in the form of a tar
archive.
@@ -235,6 +236,8 @@ class Container(Model):
chunk_size (int): The number of bytes returned by each iteration
of the generator. If ``None``, data will be streamed as it is
received. Default: 2 MB
+ encode_stream (bool): Determines if data should be encoded
+ (gzip-compressed) during transmission. Default: False
Returns:
(tuple): First element is a raw tar data stream. Second element is
@@ -255,7 +258,8 @@ class Container(Model):
... f.write(chunk)
>>> f.close()
"""
- return self.client.api.get_archive(self.id, path, chunk_size)
+ return self.client.api.get_archive(self.id, path,
+ chunk_size, encode_stream)
def kill(self, signal=None):
"""
@@ -549,6 +553,11 @@ class ContainerCollection(Collection):
``["SYS_ADMIN", "MKNOD"]``.
cap_drop (list of str): Drop kernel capabilities.
cgroup_parent (str): Override the default parent cgroup.
+ cgroupns (str): Override the default cgroup namespace mode for the
+ container. One of:
+ - ``private`` the container runs in its own private cgroup
+ namespace.
+ - ``host`` use the host system's cgroup namespace.
cpu_count (int): Number of usable CPUs (Windows only).
cpu_percent (int): Usable percentage of the available CPUs
(Windows only).
@@ -579,6 +588,9 @@ class ContainerCollection(Collection):
For example, ``/dev/sda:/dev/xvda:rwm`` allows the container
to have read-write access to the host's ``/dev/sda`` via a
node named ``/dev/xvda`` inside the container.
+ device_requests (:py:class:`list`): Expose host resources such as
+ GPUs to the container, as a list of
+ :py:class:`docker.types.DeviceRequest` instances.
dns (:py:class:`list`): Set custom DNS servers.
dns_opt (:py:class:`list`): Additional options to be added to the
container's ``resolv.conf`` file.
@@ -662,6 +674,7 @@ class ContainerCollection(Collection):
- ``container:<name|id>`` Reuse another container's network
stack.
- ``host`` Use the host network stack.
+ This mode is incompatible with ``ports``.
Incompatible with ``network``.
oom_kill_disable (bool): Whether to disable OOM killer.
@@ -695,6 +708,7 @@ class ContainerCollection(Collection):
to a single container port. For example,
``{'1111/tcp': [1234, 4567]}``.
+ Incompatible with ``host`` network mode.
privileged (bool): Give extended privileges to this container.
publish_all_ports (bool): Publish all ports to the host.
read_only (bool): Mount the container's root filesystem as read
@@ -772,6 +786,15 @@ class ContainerCollection(Collection):
{'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'},
'/var/www': {'bind': '/mnt/vol1', 'mode': 'ro'}}
+ Or a list of strings which each one of its elements specifies a
+ mount volume.
+
+ For example:
+
+ .. code-block:: python
+
+ ['/home/user1/:/mnt/vol2','/var/www:/mnt/vol1']
+
volumes_from (:py:class:`list`): List of container names or IDs to
get volumes from.
working_dir (str): Path to the working directory.
@@ -803,7 +826,7 @@ class ContainerCollection(Collection):
image = image.id
stream = kwargs.pop('stream', False)
detach = kwargs.pop('detach', False)
- platform = kwargs.pop('platform', None)
+ platform = kwargs.get('platform', None)
if detach and remove:
if version_gte(self.client.api._version, '1.25'):
@@ -987,6 +1010,7 @@ RUN_CREATE_KWARGS = [
'mac_address',
'name',
'network_disabled',
+ 'platform',
'stdin_open',
'stop_signal',
'tty',
@@ -1003,6 +1027,7 @@ RUN_HOST_CONFIG_KWARGS = [
'cap_add',
'cap_drop',
'cgroup_parent',
+ 'cgroupns',
'cpu_count',
'cpu_percent',
'cpu_period',
@@ -1018,6 +1043,7 @@ RUN_HOST_CONFIG_KWARGS = [
'device_write_bps',
'device_write_iops',
'devices',
+ 'device_requests',
'dns_opt',
'dns_search',
'dns',
diff --git a/docker/models/images.py b/docker/models/images.py
index 757a5a4..79ccbe4 100644
--- a/docker/models/images.py
+++ b/docker/models/images.py
@@ -2,8 +2,6 @@ import itertools
import re
import warnings
-import six
-
from ..api import APIClient
from ..constants import DEFAULT_DATA_CHUNK_SIZE
from ..errors import BuildError, ImageLoadError, InvalidArgument
@@ -17,7 +15,10 @@ class Image(Model):
An image on the server.
"""
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, "', '".join(self.tags))
+ return "<{}: '{}'>".format(
+ self.__class__.__name__,
+ "', '".join(self.tags),
+ )
@property
def labels(self):
@@ -30,12 +31,12 @@ class Image(Model):
@property
def short_id(self):
"""
- The ID of the image truncated to 10 characters, plus the ``sha256:``
+ The ID of the image truncated to 12 characters, plus the ``sha256:``
prefix.
"""
if self.id.startswith('sha256:'):
- return self.id[:17]
- return self.id[:10]
+ return self.id[:19]
+ return self.id[:12]
@property
def tags(self):
@@ -60,6 +61,24 @@ class Image(Model):
"""
return self.client.api.history(self.id)
+ def remove(self, force=False, noprune=False):
+ """
+ Remove this image.
+
+ Args:
+ force (bool): Force removal of the image
+ noprune (bool): Do not delete untagged parents
+
+ Raises:
+ :py:class:`docker.errors.APIError`
+ If the server returns an error.
+ """
+ return self.client.api.remove_image(
+ self.id,
+ force=force,
+ noprune=noprune,
+ )
+
def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE, named=False):
"""
Get a tarball of an image. Similar to the ``docker save`` command.
@@ -84,19 +103,19 @@ class Image(Model):
Example:
- >>> image = cli.get_image("busybox:latest")
+ >>> image = cli.images.get("busybox:latest")
>>> f = open('/tmp/busybox-latest.tar', 'wb')
- >>> for chunk in image:
+ >>> for chunk in image.save():
>>> f.write(chunk)
>>> f.close()
"""
img = self.id
if named:
img = self.tags[0] if self.tags else img
- if isinstance(named, six.string_types):
+ if isinstance(named, str):
if named not in self.tags:
raise InvalidArgument(
- "{} is not a valid tag for this image".format(named)
+ f"{named} is not a valid tag for this image"
)
img = named
@@ -127,7 +146,7 @@ class RegistryData(Model):
Image metadata stored on the registry, including available platforms.
"""
def __init__(self, image_name, *args, **kwargs):
- super(RegistryData, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.image_name = image_name
@property
@@ -140,10 +159,10 @@ class RegistryData(Model):
@property
def short_id(self):
"""
- The ID of the image truncated to 10 characters, plus the ``sha256:``
+ The ID of the image truncated to 12 characters, plus the ``sha256:``
prefix.
"""
- return self.id[:17]
+ return self.id[:19]
def pull(self, platform=None):
"""
@@ -180,7 +199,7 @@ class RegistryData(Model):
parts = platform.split('/')
if len(parts) > 3 or len(parts) < 1:
raise InvalidArgument(
- '"{0}" is not a valid platform descriptor'.format(platform)
+ f'"{platform}" is not a valid platform descriptor'
)
platform = {'os': parts[0]}
if len(parts) > 2:
@@ -277,7 +296,7 @@ class ImageCollection(Collection):
If neither ``path`` nor ``fileobj`` is specified.
"""
resp = self.client.api.build(**kwargs)
- if isinstance(resp, six.string_types):
+ if isinstance(resp, str):
return self.get(resp)
last_event = None
image_id = None
@@ -395,12 +414,13 @@ class ImageCollection(Collection):
return [self.get(i) for i in images]
- def pull(self, repository, tag=None, **kwargs):
+ def pull(self, repository, tag=None, all_tags=False, **kwargs):
"""
Pull an image of the given name and return it. Similar to the
``docker pull`` command.
- If no tag is specified, all tags from that repository will be
- pulled.
+ If ``tag`` is ``None`` or empty, it is set to ``latest``.
+ If ``all_tags`` is set, the ``tag`` parameter is ignored and all image
+ tags will be pulled.
If you want to get the raw pull output, use the
:py:meth:`~docker.api.image.ImageApiMixin.pull` method in the
@@ -413,10 +433,11 @@ class ImageCollection(Collection):
config for this request. ``auth_config`` should contain the
``username`` and ``password`` keys to be valid.
platform (str): Platform in the format ``os[/arch[/variant]]``
+ all_tags (bool): Pull all image tags
Returns:
(:py:class:`Image` or list): The image that has been pulled.
- If no ``tag`` was specified, the method will return a list
+ If ``all_tags`` is True, the method will return a list
of :py:class:`Image` objects belonging to this repository.
Raises:
@@ -426,13 +447,13 @@ class ImageCollection(Collection):
Example:
>>> # Pull the image tagged `latest` in the busybox repo
- >>> image = client.images.pull('busybox:latest')
+ >>> image = client.images.pull('busybox')
>>> # Pull all tags in the busybox repo
- >>> images = client.images.pull('busybox')
+ >>> images = client.images.pull('busybox', all_tags=True)
"""
- if not tag:
- repository, tag = parse_repository_tag(repository)
+ repository, image_tag = parse_repository_tag(repository)
+ tag = tag or image_tag or 'latest'
if 'stream' in kwargs:
warnings.warn(
@@ -442,14 +463,14 @@ class ImageCollection(Collection):
del kwargs['stream']
pull_log = self.client.api.pull(
- repository, tag=tag, stream=True, **kwargs
+ repository, tag=tag, stream=True, all_tags=all_tags, **kwargs
)
for _ in pull_log:
# We don't do anything with the logs, but we need
# to keep the connection alive and wait for the image
# to be pulled.
pass
- if tag:
+ if not all_tags:
return self.get('{0}{2}{1}'.format(
repository, tag, '@' if tag.startswith('sha256:') else ':'
))
diff --git a/docker/models/networks.py b/docker/models/networks.py
index f944c8e..093deb7 100644
--- a/docker/models/networks.py
+++ b/docker/models/networks.py
@@ -46,6 +46,8 @@ class Network(Model):
network, using the IPv6 protocol. Defaults to ``None``.
link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6)
addresses.
+ driver_opt (dict): A dictionary of options to provide to the
+ network driver. Defaults to ``None``.
Raises:
:py:class:`docker.errors.APIError`
diff --git a/docker/models/plugins.py b/docker/models/plugins.py
index 0688018..16f5245 100644
--- a/docker/models/plugins.py
+++ b/docker/models/plugins.py
@@ -7,7 +7,7 @@ class Plugin(Model):
A plugin on the server.
"""
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__}: '{self.name}'>"
@property
def name(self):
@@ -44,16 +44,19 @@ class Plugin(Model):
self.client.api.configure_plugin(self.name, options)
self.reload()
- def disable(self):
+ def disable(self, force=False):
"""
Disable the plugin.
+ Args:
+ force (bool): Force disable. Default: False
+
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- self.client.api.disable_plugin(self.name)
+ self.client.api.disable_plugin(self.name, force)
self.reload()
def enable(self, timeout=0):
@@ -117,9 +120,12 @@ class Plugin(Model):
if remote is None:
remote = self.name
privileges = self.client.api.plugin_privileges(remote)
- for d in self.client.api.upgrade_plugin(self.name, remote, privileges):
- yield d
- self._reload()
+ yield from self.client.api.upgrade_plugin(
+ self.name,
+ remote,
+ privileges,
+ )
+ self.reload()
class PluginCollection(Collection):
diff --git a/docker/models/resource.py b/docker/models/resource.py
index ed3900a..89030e5 100644
--- a/docker/models/resource.py
+++ b/docker/models/resource.py
@@ -1,5 +1,4 @@
-
-class Model(object):
+class Model:
"""
A base class for representing a single object on the server.
"""
@@ -18,13 +17,13 @@ class Model(object):
self.attrs = {}
def __repr__(self):
- return "<%s: %s>" % (self.__class__.__name__, self.short_id)
+ return f"<{self.__class__.__name__}: {self.short_id}>"
def __eq__(self, other):
return isinstance(other, self.__class__) and self.id == other.id
def __hash__(self):
- return hash("%s:%s" % (self.__class__.__name__, self.id))
+ return hash(f"{self.__class__.__name__}:{self.id}")
@property
def id(self):
@@ -36,9 +35,9 @@ class Model(object):
@property
def short_id(self):
"""
- The ID of the object, truncated to 10 characters.
+ The ID of the object, truncated to 12 characters.
"""
- return self.id[:10]
+ return self.id[:12]
def reload(self):
"""
@@ -49,7 +48,7 @@ class Model(object):
self.attrs = new_model.attrs
-class Collection(object):
+class Collection:
"""
A base class for representing all objects of a particular type on the
server.
diff --git a/docker/models/secrets.py b/docker/models/secrets.py
index ca11ede..da01d44 100644
--- a/docker/models/secrets.py
+++ b/docker/models/secrets.py
@@ -7,7 +7,7 @@ class Secret(Model):
id_attribute = 'ID'
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__}: '{self.name}'>"
@property
def name(self):
@@ -30,6 +30,7 @@ class SecretCollection(Collection):
def create(self, **kwargs):
obj = self.client.api.create_secret(**kwargs)
+ obj.setdefault("Spec", {})["Name"] = kwargs.get("name")
return self.prepare_model(obj)
create.__doc__ = APIClient.create_secret.__doc__
diff --git a/docker/models/services.py b/docker/models/services.py
index a35687b..9255068 100644
--- a/docker/models/services.py
+++ b/docker/models/services.py
@@ -157,6 +157,8 @@ class ServiceCollection(Collection):
constraints.
preferences (list of tuple): :py:class:`~docker.types.Placement`
preferences.
+ maxreplicas (int): :py:class:`~docker.types.Placement` maxreplicas
+ or (int) representing maximum number of replicas per node.
platforms (list of tuple): A list of platform constraints
expressed as ``(arch, os)`` tuples.
container_labels (dict): Labels to apply to the container.
@@ -211,6 +213,10 @@ class ServiceCollection(Collection):
to the service.
privileges (Privileges): Security options for the service's
containers.
+ cap_add (:py:class:`list`): A list of kernel capabilities to add to
+ the default set for the container.
+ cap_drop (:py:class:`list`): A list of kernel capabilities to drop
+ from the default set for the container.
Returns:
:py:class:`Service`: The created service.
@@ -275,6 +281,8 @@ class ServiceCollection(Collection):
# kwargs to copy straight over to ContainerSpec
CONTAINER_SPEC_KWARGS = [
'args',
+ 'cap_add',
+ 'cap_drop',
'command',
'configs',
'dns_config',
@@ -312,6 +320,7 @@ CREATE_SERVICE_KWARGS = [
'labels',
'mode',
'update_config',
+ 'rollback_config',
'endpoint_spec',
]
@@ -319,6 +328,7 @@ PLACEMENT_KWARGS = [
'constraints',
'preferences',
'platforms',
+ 'maxreplicas',
]
diff --git a/docker/models/swarm.py b/docker/models/swarm.py
index 755c17d..b0b1a2e 100644
--- a/docker/models/swarm.py
+++ b/docker/models/swarm.py
@@ -11,7 +11,7 @@ class Swarm(Model):
id_attribute = 'ID'
def __init__(self, *args, **kwargs):
- super(Swarm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
if self.client:
try:
self.reload()
diff --git a/docker/tls.py b/docker/tls.py
index d4671d1..f4dffb2 100644
--- a/docker/tls.py
+++ b/docker/tls.py
@@ -5,15 +5,16 @@ from . import errors
from .transport import SSLHTTPAdapter
-class TLSConfig(object):
+class TLSConfig:
"""
TLS configuration.
Args:
client_cert (tuple of str): Path to client cert, path to client key.
ca_cert (str): Path to CA cert file.
- verify (bool or str): This can be ``False`` or a path to a CA cert
- file.
+ verify (bool or str): This can be a bool or a path to a CA cert
+ file to verify against. If ``True``, verify using ca_cert;
+ if ``False`` or not specified, do not verify.
ssl_version (int): A valid `SSL version`_.
assert_hostname (bool): Verify the hostname of the server.
@@ -32,37 +33,18 @@ class TLSConfig(object):
# https://docs.docker.com/engine/articles/https/
# This diverges from the Docker CLI in that users can specify 'tls'
# here, but also disable any public/default CA pool verification by
- # leaving tls_verify=False
+ # leaving verify=False
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
- # TODO(dperny): according to the python docs, PROTOCOL_TLSvWhatever is
- # depcreated, and it's recommended to use OPT_NO_TLSvWhatever instead
- # to exclude versions. But I think that might require a bigger
- # architectural change, so I've opted not to pursue it at this time
-
# If the user provides an SSL version, we should use their preference
if ssl_version:
self.ssl_version = ssl_version
else:
- # If the user provides no ssl version, we should default to
- # TLSv1_2. This option is the most secure, and will work for the
- # majority of users with reasonably up-to-date software. However,
- # before doing so, detect openssl version to ensure we can support
- # it.
- if ssl.OPENSSL_VERSION_INFO[:3] >= (1, 0, 1) and hasattr(
- ssl, 'PROTOCOL_TLSv1_2'):
- # If the OpenSSL version is high enough to support TLSv1_2,
- # then we should use it.
- self.ssl_version = getattr(ssl, 'PROTOCOL_TLSv1_2')
- else:
- # Otherwise, TLS v1.0 seems to be the safest default;
- # SSLv23 fails in mysterious ways:
- # https://github.com/docker/docker-py/issues/963
- self.ssl_version = ssl.PROTOCOL_TLSv1
-
- # "tls" and "tls_verify" must have both or neither cert/key files In
+ self.ssl_version = ssl.PROTOCOL_TLS_CLIENT
+
+ # "client_cert" must have both or neither cert/key files. In
# either case, Alert the user when both are expected, but any are
# missing.
@@ -71,7 +53,7 @@ class TLSConfig(object):
tls_cert, tls_key = client_cert
except ValueError:
raise errors.TLSParameterError(
- 'client_config must be a tuple of'
+ 'client_cert must be a tuple of'
' (client certificate, key file)'
)
@@ -79,7 +61,7 @@ class TLSConfig(object):
not os.path.isfile(tls_key)):
raise errors.TLSParameterError(
'Path to a certificate and key files must be provided'
- ' through the client_config param'
+ ' through the client_cert param'
)
self.cert = (tls_cert, tls_key)
@@ -88,7 +70,7 @@ class TLSConfig(object):
self.ca_cert = ca_cert
if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert):
raise errors.TLSParameterError(
- 'Invalid CA certificate provided for `tls_ca_cert`.'
+ 'Invalid CA certificate provided for `ca_cert`.'
)
def configure_client(self, client):
diff --git a/docker/transport/basehttpadapter.py b/docker/transport/basehttpadapter.py
index 4d819b6..dfbb193 100644
--- a/docker/transport/basehttpadapter.py
+++ b/docker/transport/basehttpadapter.py
@@ -3,6 +3,6 @@ import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
- super(BaseHTTPAdapter, self).close()
+ super().close()
if hasattr(self, 'pools'):
self.pools.clear()
diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py
index aa05538..87033cf 100644
--- a/docker/transport/npipeconn.py
+++ b/docker/transport/npipeconn.py
@@ -1,14 +1,11 @@
-import six
+import queue
import requests.adapters
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
from .npipesocket import NpipeSocket
-if six.PY3:
- import http.client as httplib
-else:
- import httplib
+import http.client as httplib
try:
import requests.packages.urllib3 as urllib3
@@ -18,9 +15,9 @@ except ImportError:
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
-class NpipeHTTPConnection(httplib.HTTPConnection, object):
+class NpipeHTTPConnection(httplib.HTTPConnection):
def __init__(self, npipe_path, timeout=60):
- super(NpipeHTTPConnection, self).__init__(
+ super().__init__(
'localhost', timeout=timeout
)
self.npipe_path = npipe_path
@@ -35,7 +32,7 @@ class NpipeHTTPConnection(httplib.HTTPConnection, object):
class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, npipe_path, timeout=60, maxsize=10):
- super(NpipeHTTPConnectionPool, self).__init__(
+ super().__init__(
'localhost', timeout=timeout, maxsize=maxsize
)
self.npipe_path = npipe_path
@@ -57,14 +54,14 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
except AttributeError: # self.pool is None
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
- except six.moves.queue.Empty:
+ except queue.Empty:
if self.block:
raise urllib3.exceptions.EmptyPoolError(
self,
"Pool reached maximum size and no more "
"connections are allowed."
)
- pass # Oh well, we'll create a new connection then
+ # Oh well, we'll create a new connection then
return conn or self._new_conn()
@@ -73,16 +70,19 @@ class NpipeHTTPAdapter(BaseHTTPAdapter):
__attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['npipe_path',
'pools',
- 'timeout']
+ 'timeout',
+ 'max_pool_size']
def __init__(self, base_url, timeout=60,
- pool_connections=constants.DEFAULT_NUM_POOLS):
+ pool_connections=constants.DEFAULT_NUM_POOLS,
+ max_pool_size=constants.DEFAULT_MAX_POOL_SIZE):
self.npipe_path = base_url.replace('npipe://', '')
self.timeout = timeout
+ self.max_pool_size = max_pool_size
self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close()
)
- super(NpipeHTTPAdapter, self).__init__()
+ super().__init__()
def get_connection(self, url, proxies=None):
with self.pools.lock:
@@ -91,7 +91,8 @@ class NpipeHTTPAdapter(BaseHTTPAdapter):
return pool
pool = NpipeHTTPConnectionPool(
- self.npipe_path, self.timeout
+ self.npipe_path, self.timeout,
+ maxsize=self.max_pool_size
)
self.pools[url] = pool
diff --git a/docker/transport/npipesocket.py b/docker/transport/npipesocket.py
index 176b5c8..766372a 100644
--- a/docker/transport/npipesocket.py
+++ b/docker/transport/npipesocket.py
@@ -2,7 +2,6 @@ import functools
import time
import io
-import six
import win32file
import win32pipe
@@ -24,7 +23,7 @@ def check_closed(f):
return wrapped
-class NpipeSocket(object):
+class NpipeSocket:
""" Partial implementation of the socket API over windows named pipes.
This implementation is only designed to be used as a client socket,
and server-specific methods (bind, listen, accept...) are not
@@ -128,9 +127,6 @@ class NpipeSocket(object):
@check_closed
def recv_into(self, buf, nbytes=0):
- if six.PY2:
- return self._recv_into_py2(buf, nbytes)
-
readbuf = buf
if not isinstance(buf, memoryview):
readbuf = memoryview(buf)
@@ -195,7 +191,7 @@ class NpipeFileIOBase(io.RawIOBase):
self.sock = npipe_socket
def close(self):
- super(NpipeFileIOBase, self).close()
+ super().close()
self.sock = None
def fileno(self):
diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py
index 7de0e59..2776406 100644
--- a/docker/transport/sshconn.py
+++ b/docker/transport/sshconn.py
@@ -1,16 +1,17 @@
import paramiko
+import queue
+import urllib.parse
import requests.adapters
-import six
import logging
import os
+import signal
+import socket
+import subprocess
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
-if six.PY3:
- import http.client as httplib
-else:
- import httplib
+import http.client as httplib
try:
import requests.packages.urllib3 as urllib3
@@ -20,33 +21,121 @@ except ImportError:
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
-class SSHConnection(httplib.HTTPConnection, object):
- def __init__(self, ssh_transport, timeout=60):
- super(SSHConnection, self).__init__(
+class SSHSocket(socket.socket):
+ def __init__(self, host):
+ super().__init__(
+ socket.AF_INET, socket.SOCK_STREAM)
+ self.host = host
+ self.port = None
+ self.user = None
+ if ':' in self.host:
+ self.host, self.port = self.host.split(':')
+ if '@' in self.host:
+ self.user, self.host = self.host.split('@')
+
+ self.proc = None
+
+ def connect(self, **kwargs):
+ args = ['ssh']
+ if self.user:
+ args = args + ['-l', self.user]
+
+ if self.port:
+ args = args + ['-p', self.port]
+
+ args = args + ['--', self.host, 'docker system dial-stdio']
+
+ preexec_func = None
+ if not constants.IS_WINDOWS_PLATFORM:
+ def f():
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+ preexec_func = f
+
+ env = dict(os.environ)
+
+ # drop LD_LIBRARY_PATH and SSL_CERT_FILE
+ env.pop('LD_LIBRARY_PATH', None)
+ env.pop('SSL_CERT_FILE', None)
+
+ self.proc = subprocess.Popen(
+ args,
+ env=env,
+ stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ preexec_fn=None if constants.IS_WINDOWS_PLATFORM else preexec_func)
+
+ def _write(self, data):
+ if not self.proc or self.proc.stdin.closed:
+ raise Exception('SSH subprocess not initiated.'
+ 'connect() must be called first.')
+ written = self.proc.stdin.write(data)
+ self.proc.stdin.flush()
+ return written
+
+ def sendall(self, data):
+ self._write(data)
+
+ def send(self, data):
+ return self._write(data)
+
+ def recv(self, n):
+ if not self.proc:
+ raise Exception('SSH subprocess not initiated.'
+ 'connect() must be called first.')
+ return self.proc.stdout.read(n)
+
+ def makefile(self, mode):
+ if not self.proc:
+ self.connect()
+ self.proc.stdout.channel = self
+
+ return self.proc.stdout
+
+ def close(self):
+ if not self.proc or self.proc.stdin.closed:
+ return
+ self.proc.stdin.write(b'\n\n')
+ self.proc.stdin.flush()
+ self.proc.terminate()
+
+
+class SSHConnection(httplib.HTTPConnection):
+ def __init__(self, ssh_transport=None, timeout=60, host=None):
+ super().__init__(
'localhost', timeout=timeout
)
self.ssh_transport = ssh_transport
self.timeout = timeout
+ self.ssh_host = host
def connect(self):
- sock = self.ssh_transport.open_session()
- sock.settimeout(self.timeout)
- sock.exec_command('docker system dial-stdio')
+ if self.ssh_transport:
+ sock = self.ssh_transport.open_session()
+ sock.settimeout(self.timeout)
+ sock.exec_command('docker system dial-stdio')
+ else:
+ sock = SSHSocket(self.ssh_host)
+ sock.settimeout(self.timeout)
+ sock.connect()
+
self.sock = sock
class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
scheme = 'ssh'
- def __init__(self, ssh_client, timeout=60, maxsize=10):
- super(SSHConnectionPool, self).__init__(
+ def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None):
+ super().__init__(
'localhost', timeout=timeout, maxsize=maxsize
)
- self.ssh_transport = ssh_client.get_transport()
+ self.ssh_transport = None
self.timeout = timeout
+ if ssh_client:
+ self.ssh_transport = ssh_client.get_transport()
+ self.ssh_host = host
def _new_conn(self):
- return SSHConnection(self.ssh_transport, self.timeout)
+ return SSHConnection(self.ssh_transport, self.timeout, self.ssh_host)
# When re-using connections, urllib3 calls fileno() on our
# SSH channel instance, quickly overloading our fd limit. To avoid this,
@@ -59,14 +148,14 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
except AttributeError: # self.pool is None
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
- except six.moves.queue.Empty:
+ except queue.Empty:
if self.block:
raise urllib3.exceptions.EmptyPoolError(
self,
"Pool reached maximum size and no more "
"connections are allowed."
)
- pass # Oh well, we'll create a new connection then
+ # Oh well, we'll create a new connection then
return conn or self._new_conn()
@@ -74,14 +163,33 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
class SSHHTTPAdapter(BaseHTTPAdapter):
__attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [
- 'pools', 'timeout', 'ssh_client', 'ssh_params'
+ 'pools', 'timeout', 'ssh_client', 'ssh_params', 'max_pool_size'
]
def __init__(self, base_url, timeout=60,
- pool_connections=constants.DEFAULT_NUM_POOLS):
+ pool_connections=constants.DEFAULT_NUM_POOLS,
+ max_pool_size=constants.DEFAULT_MAX_POOL_SIZE,
+ shell_out=False):
+ self.ssh_client = None
+ if not shell_out:
+ self._create_paramiko_client(base_url)
+ self._connect()
+
+ self.ssh_host = base_url
+ if base_url.startswith('ssh://'):
+ self.ssh_host = base_url[len('ssh://'):]
+
+ self.timeout = timeout
+ self.max_pool_size = max_pool_size
+ self.pools = RecentlyUsedContainer(
+ pool_connections, dispose_func=lambda p: p.close()
+ )
+ super().__init__()
+
+ def _create_paramiko_client(self, base_url):
logging.getLogger("paramiko").setLevel(logging.WARNING)
self.ssh_client = paramiko.SSHClient()
- base_url = six.moves.urllib_parse.urlparse(base_url)
+ base_url = urllib.parse.urlparse(base_url)
self.ssh_params = {
"hostname": base_url.hostname,
"port": base_url.port,
@@ -93,48 +201,54 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
with open(ssh_config_file) as f:
conf.parse(f)
host_config = conf.lookup(base_url.hostname)
- self.ssh_conf = host_config
if 'proxycommand' in host_config:
self.ssh_params["sock"] = paramiko.ProxyCommand(
- self.ssh_conf['proxycommand']
+ host_config['proxycommand']
)
if 'hostname' in host_config:
self.ssh_params['hostname'] = host_config['hostname']
if base_url.port is None and 'port' in host_config:
- self.ssh_params['port'] = self.ssh_conf['port']
+ self.ssh_params['port'] = host_config['port']
if base_url.username is None and 'user' in host_config:
- self.ssh_params['username'] = self.ssh_conf['user']
+ self.ssh_params['username'] = host_config['user']
+ if 'identityfile' in host_config:
+ self.ssh_params['key_filename'] = host_config['identityfile']
self.ssh_client.load_system_host_keys()
- self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())
-
- self._connect()
- self.timeout = timeout
- self.pools = RecentlyUsedContainer(
- pool_connections, dispose_func=lambda p: p.close()
- )
- super(SSHHTTPAdapter, self).__init__()
+ self.ssh_client.set_missing_host_key_policy(paramiko.RejectPolicy())
def _connect(self):
- self.ssh_client.connect(**self.ssh_params)
+ if self.ssh_client:
+ self.ssh_client.connect(**self.ssh_params)
def get_connection(self, url, proxies=None):
+ if not self.ssh_client:
+ return SSHConnectionPool(
+ ssh_client=self.ssh_client,
+ timeout=self.timeout,
+ maxsize=self.max_pool_size,
+ host=self.ssh_host
+ )
with self.pools.lock:
pool = self.pools.get(url)
if pool:
return pool
# Connection is closed try a reconnect
- if not self.ssh_client.get_transport():
+ if self.ssh_client and not self.ssh_client.get_transport():
self._connect()
pool = SSHConnectionPool(
- self.ssh_client, self.timeout
+ ssh_client=self.ssh_client,
+ timeout=self.timeout,
+ maxsize=self.max_pool_size,
+ host=self.ssh_host
)
self.pools[url] = pool
return pool
def close(self):
- super(SSHHTTPAdapter, self).close()
- self.ssh_client.close()
+ super().close()
+ if self.ssh_client:
+ self.ssh_client.close()
diff --git a/docker/transport/ssladapter.py b/docker/transport/ssladapter.py
index 12de76c..6aa8003 100644
--- a/docker/transport/ssladapter.py
+++ b/docker/transport/ssladapter.py
@@ -2,9 +2,7 @@
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
-import sys
-
-from distutils.version import StrictVersion
+from packaging.version import Version
from requests.adapters import HTTPAdapter
from docker.transport.basehttpadapter import BaseHTTPAdapter
@@ -17,12 +15,6 @@ except ImportError:
PoolManager = urllib3.poolmanager.PoolManager
-# Monkey-patching match_hostname with a version that supports
-# IP-address checking. Not necessary for Python 3.5 and above
-if sys.version_info[0] < 3 or sys.version_info[1] < 5:
- from backports.ssl_match_hostname import match_hostname
- urllib3.connection.match_hostname = match_hostname
-
class SSLHTTPAdapter(BaseHTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
@@ -36,7 +28,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
- super(SSLHTTPAdapter, self).__init__(**kwargs)
+ super().__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
kwargs = {
@@ -59,7 +51,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
But we still need to take care of when there is a proxy poolmanager
"""
- conn = super(SSLHTTPAdapter, self).get_connection(*args, **kwargs)
+ conn = super().get_connection(*args, **kwargs)
if conn.assert_hostname != self.assert_hostname:
conn.assert_hostname = self.assert_hostname
return conn
@@ -70,4 +62,4 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
return False
if urllib_ver == 'dev':
return True
- return StrictVersion(urllib_ver) > StrictVersion('1.5')
+ return Version(urllib_ver) > Version('1.5')
diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py
index b619103..1b00762 100644
--- a/docker/transport/unixconn.py
+++ b/docker/transport/unixconn.py
@@ -1,7 +1,6 @@
-import six
import requests.adapters
import socket
-from six.moves import http_client as httplib
+import http.client as httplib
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
@@ -15,27 +14,15 @@ except ImportError:
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
-class UnixHTTPResponse(httplib.HTTPResponse, object):
- def __init__(self, sock, *args, **kwargs):
- disable_buffering = kwargs.pop('disable_buffering', False)
- if six.PY2:
- # FIXME: We may need to disable buffering on Py3 as well,
- # but there's no clear way to do it at the moment. See:
- # https://github.com/docker/docker-py/issues/1799
- kwargs['buffering'] = not disable_buffering
- super(UnixHTTPResponse, self).__init__(sock, *args, **kwargs)
-
-
-class UnixHTTPConnection(httplib.HTTPConnection, object):
+class UnixHTTPConnection(httplib.HTTPConnection):
def __init__(self, base_url, unix_socket, timeout=60):
- super(UnixHTTPConnection, self).__init__(
+ super().__init__(
'localhost', timeout=timeout
)
self.base_url = base_url
self.unix_socket = unix_socket
self.timeout = timeout
- self.disable_buffering = False
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@@ -44,20 +31,15 @@ class UnixHTTPConnection(httplib.HTTPConnection, object):
self.sock = sock
def putheader(self, header, *values):
- super(UnixHTTPConnection, self).putheader(header, *values)
- if header == 'Connection' and 'Upgrade' in values:
- self.disable_buffering = True
+ super().putheader(header, *values)
def response_class(self, sock, *args, **kwargs):
- if self.disable_buffering:
- kwargs['disable_buffering'] = True
-
- return UnixHTTPResponse(sock, *args, **kwargs)
+ return httplib.HTTPResponse(sock, *args, **kwargs)
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path, timeout=60, maxsize=10):
- super(UnixHTTPConnectionPool, self).__init__(
+ super().__init__(
'localhost', timeout=timeout, maxsize=maxsize
)
self.base_url = base_url
@@ -74,19 +56,22 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
__attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['pools',
'socket_path',
- 'timeout']
+ 'timeout',
+ 'max_pool_size']
def __init__(self, socket_url, timeout=60,
- pool_connections=constants.DEFAULT_NUM_POOLS):
+ pool_connections=constants.DEFAULT_NUM_POOLS,
+ max_pool_size=constants.DEFAULT_MAX_POOL_SIZE):
socket_path = socket_url.replace('http+unix://', '')
if not socket_path.startswith('/'):
socket_path = '/' + socket_path
self.socket_path = socket_path
self.timeout = timeout
+ self.max_pool_size = max_pool_size
self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close()
)
- super(UnixHTTPAdapter, self).__init__()
+ super().__init__()
def get_connection(self, url, proxies=None):
with self.pools.lock:
@@ -95,7 +80,8 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
return pool
pool = UnixHTTPConnectionPool(
- url, self.socket_path, self.timeout
+ url, self.socket_path, self.timeout,
+ maxsize=self.max_pool_size
)
self.pools[url] = pool
diff --git a/docker/types/__init__.py b/docker/types/__init__.py
index 5db330e..b425746 100644
--- a/docker/types/__init__.py
+++ b/docker/types/__init__.py
@@ -1,5 +1,7 @@
# flake8: noqa
-from .containers import ContainerConfig, HostConfig, LogConfig, Ulimit
+from .containers import (
+ ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest
+)
from .daemon import CancellableStream
from .healthcheck import Healthcheck
from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig
diff --git a/docker/types/base.py b/docker/types/base.py
index 6891062..8851f1e 100644
--- a/docker/types/base.py
+++ b/docker/types/base.py
@@ -1,7 +1,4 @@
-import six
-
-
class DictType(dict):
def __init__(self, init):
- for k, v in six.iteritems(init):
+ for k, v in init.items():
self[k] = v
diff --git a/docker/types/containers.py b/docker/types/containers.py
index fd8cab4..84df0f7 100644
--- a/docker/types/containers.py
+++ b/docker/types/containers.py
@@ -1,5 +1,3 @@
-import six
-
from .. import errors
from ..utils.utils import (
convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds,
@@ -10,7 +8,7 @@ from .base import DictType
from .healthcheck import Healthcheck
-class LogConfigTypesEnum(object):
+class LogConfigTypesEnum:
_values = (
'json-file',
'syslog',
@@ -61,7 +59,7 @@ class LogConfig(DictType):
if config and not isinstance(config, dict):
raise ValueError("LogConfig.config must be a dictionary")
- super(LogConfig, self).__init__({
+ super().__init__({
'Type': log_driver_type,
'Config': config
})
@@ -97,8 +95,8 @@ class Ulimit(DictType):
Args:
- name (str): Which ulimit will this apply to. A list of valid names can
- be found `here <http://tinyurl.me/ZWRkM2Ztwlykf>`_.
+ name (str): Which ulimit will this apply to. The valid names can be
+ found in '/etc/security/limits.conf' on a gnu/linux system.
soft (int): The soft limit for this ulimit. Optional.
hard (int): The hard limit for this ulimit. Optional.
@@ -117,13 +115,13 @@ class Ulimit(DictType):
name = kwargs.get('name', kwargs.get('Name'))
soft = kwargs.get('soft', kwargs.get('Soft'))
hard = kwargs.get('hard', kwargs.get('Hard'))
- if not isinstance(name, six.string_types):
+ if not isinstance(name, str):
raise ValueError("Ulimit.name must be a string")
if soft and not isinstance(soft, int):
raise ValueError("Ulimit.soft must be an integer")
if hard and not isinstance(hard, int):
raise ValueError("Ulimit.hard must be an integer")
- super(Ulimit, self).__init__({
+ super().__init__({
'Name': name,
'Soft': soft,
'Hard': hard
@@ -154,6 +152,104 @@ class Ulimit(DictType):
self['Hard'] = value
+class DeviceRequest(DictType):
+ """
+ Create a device request to be used with
+ :py:meth:`~docker.api.container.ContainerApiMixin.create_host_config`.
+
+ Args:
+
+ driver (str): Which driver to use for this device. Optional.
+ count (int): Number or devices to request. Optional.
+ Set to -1 to request all available devices.
+ device_ids (list): List of strings for device IDs. Optional.
+ Set either ``count`` or ``device_ids``.
+ capabilities (list): List of lists of strings to request
+ capabilities. Optional. The global list acts like an OR,
+ and the sub-lists are AND. The driver will try to satisfy
+ one of the sub-lists.
+ Available capabilities for the ``nvidia`` driver can be found
+ `here <https://github.com/NVIDIA/nvidia-container-runtime>`_.
+ options (dict): Driver-specific options. Optional.
+ """
+
+ def __init__(self, **kwargs):
+ driver = kwargs.get('driver', kwargs.get('Driver'))
+ count = kwargs.get('count', kwargs.get('Count'))
+ device_ids = kwargs.get('device_ids', kwargs.get('DeviceIDs'))
+ capabilities = kwargs.get('capabilities', kwargs.get('Capabilities'))
+ options = kwargs.get('options', kwargs.get('Options'))
+
+ if driver is None:
+ driver = ''
+ elif not isinstance(driver, str):
+ raise ValueError('DeviceRequest.driver must be a string')
+ if count is None:
+ count = 0
+ elif not isinstance(count, int):
+ raise ValueError('DeviceRequest.count must be an integer')
+ if device_ids is None:
+ device_ids = []
+ elif not isinstance(device_ids, list):
+ raise ValueError('DeviceRequest.device_ids must be a list')
+ if capabilities is None:
+ capabilities = []
+ elif not isinstance(capabilities, list):
+ raise ValueError('DeviceRequest.capabilities must be a list')
+ if options is None:
+ options = {}
+ elif not isinstance(options, dict):
+ raise ValueError('DeviceRequest.options must be a dict')
+
+ super().__init__({
+ 'Driver': driver,
+ 'Count': count,
+ 'DeviceIDs': device_ids,
+ 'Capabilities': capabilities,
+ 'Options': options
+ })
+
+ @property
+ def driver(self):
+ return self['Driver']
+
+ @driver.setter
+ def driver(self, value):
+ self['Driver'] = value
+
+ @property
+ def count(self):
+ return self['Count']
+
+ @count.setter
+ def count(self, value):
+ self['Count'] = value
+
+ @property
+ def device_ids(self):
+ return self['DeviceIDs']
+
+ @device_ids.setter
+ def device_ids(self, value):
+ self['DeviceIDs'] = value
+
+ @property
+ def capabilities(self):
+ return self['Capabilities']
+
+ @capabilities.setter
+ def capabilities(self, value):
+ self['Capabilities'] = value
+
+ @property
+ def options(self):
+ return self['Options']
+
+ @options.setter
+ def options(self, value):
+ self['Options'] = value
+
+
class HostConfig(dict):
def __init__(self, version, binds=None, port_bindings=None,
lxc_conf=None, publish_all_ports=False, links=None,
@@ -176,7 +272,8 @@ class HostConfig(dict):
volume_driver=None, cpu_count=None, cpu_percent=None,
nano_cpus=None, cpuset_mems=None, runtime=None, mounts=None,
cpu_rt_period=None, cpu_rt_runtime=None,
- device_cgroup_rules=None):
+ device_cgroup_rules=None, device_requests=None,
+ cgroupns=None):
if mem_limit is not None:
self['Memory'] = parse_bytes(mem_limit)
@@ -199,7 +296,7 @@ class HostConfig(dict):
self['MemorySwappiness'] = mem_swappiness
if shm_size is not None:
- if isinstance(shm_size, six.string_types):
+ if isinstance(shm_size, str):
shm_size = parse_bytes(shm_size)
self['ShmSize'] = shm_size
@@ -236,10 +333,11 @@ class HostConfig(dict):
if dns_search:
self['DnsSearch'] = dns_search
- if network_mode:
- self['NetworkMode'] = network_mode
- elif network_mode is None:
- self['NetworkMode'] = 'default'
+ if network_mode == 'host' and port_bindings:
+ raise host_config_incompatible_error(
+ 'network_mode', 'host', 'port_bindings'
+ )
+ self['NetworkMode'] = network_mode or 'default'
if restart_policy:
if not isinstance(restart_policy, dict):
@@ -259,7 +357,7 @@ class HostConfig(dict):
self['Devices'] = parse_devices(devices)
if group_add:
- self['GroupAdd'] = [six.text_type(grp) for grp in group_add]
+ self['GroupAdd'] = [str(grp) for grp in group_add]
if dns is not None:
self['Dns'] = dns
@@ -279,11 +377,11 @@ class HostConfig(dict):
if not isinstance(sysctls, dict):
raise host_config_type_error('sysctls', sysctls, 'dict')
self['Sysctls'] = {}
- for k, v in six.iteritems(sysctls):
- self['Sysctls'][k] = six.text_type(v)
+ for k, v in sysctls.items():
+ self['Sysctls'][k] = str(v)
if volumes_from is not None:
- if isinstance(volumes_from, six.string_types):
+ if isinstance(volumes_from, str):
volumes_from = volumes_from.split(',')
self['VolumesFrom'] = volumes_from
@@ -305,7 +403,7 @@ class HostConfig(dict):
if isinstance(lxc_conf, dict):
formatted = []
- for k, v in six.iteritems(lxc_conf):
+ for k, v in lxc_conf.items():
formatted.append({'Key': k, 'Value': str(v)})
lxc_conf = formatted
@@ -460,7 +558,7 @@ class HostConfig(dict):
self["PidsLimit"] = pids_limit
if isolation:
- if not isinstance(isolation, six.string_types):
+ if not isinstance(isolation, str):
raise host_config_type_error('isolation', isolation, 'string')
if version_lt(version, '1.24'):
raise host_config_version_error('isolation', '1.24')
@@ -510,7 +608,7 @@ class HostConfig(dict):
self['CpuPercent'] = cpu_percent
if nano_cpus:
- if not isinstance(nano_cpus, six.integer_types):
+ if not isinstance(nano_cpus, int):
raise host_config_type_error('nano_cpus', nano_cpus, 'int')
if version_lt(version, '1.25'):
raise host_config_version_error('nano_cpus', '1.25')
@@ -536,6 +634,22 @@ class HostConfig(dict):
)
self['DeviceCgroupRules'] = device_cgroup_rules
+ if device_requests is not None:
+ if version_lt(version, '1.40'):
+ raise host_config_version_error('device_requests', '1.40')
+ if not isinstance(device_requests, list):
+ raise host_config_type_error(
+ 'device_requests', device_requests, 'list'
+ )
+ self['DeviceRequests'] = []
+ for req in device_requests:
+ if not isinstance(req, DeviceRequest):
+ req = DeviceRequest(**req)
+ self['DeviceRequests'].append(req)
+
+ if cgroupns:
+ self['CgroupnsMode'] = cgroupns
+
def host_config_type_error(param, param_value, expected):
error_msg = 'Invalid type for {0} param: expected {1} but found {2}'
@@ -553,6 +667,13 @@ def host_config_value_error(param, param_value):
return ValueError(error_msg.format(param, param_value))
+def host_config_incompatible_error(param, param_value, incompatible_param):
+ error_msg = '\"{1}\" {0} is incompatible with {2}'
+ return errors.InvalidArgument(
+ error_msg.format(param, param_value, incompatible_param)
+ )
+
+
class ContainerConfig(dict):
def __init__(
self, version, image, command, hostname=None, user=None, detach=False,
@@ -580,17 +701,17 @@ class ContainerConfig(dict):
'version 1.29'
)
- if isinstance(command, six.string_types):
+ if isinstance(command, str):
command = split_command(command)
- if isinstance(entrypoint, six.string_types):
+ if isinstance(entrypoint, str):
entrypoint = split_command(entrypoint)
if isinstance(environment, dict):
environment = format_environment(environment)
if isinstance(labels, list):
- labels = dict((lbl, six.text_type('')) for lbl in labels)
+ labels = {lbl: '' for lbl in labels}
if isinstance(ports, list):
exposed_ports = {}
@@ -601,10 +722,10 @@ class ContainerConfig(dict):
if len(port_definition) == 2:
proto = port_definition[1]
port = port_definition[0]
- exposed_ports['{0}/{1}'.format(port, proto)] = {}
+ exposed_ports[f'{port}/{proto}'] = {}
ports = exposed_ports
- if isinstance(volumes, six.string_types):
+ if isinstance(volumes, str):
volumes = [volumes, ]
if isinstance(volumes, list):
@@ -633,7 +754,7 @@ class ContainerConfig(dict):
'Hostname': hostname,
'Domainname': domainname,
'ExposedPorts': ports,
- 'User': six.text_type(user) if user is not None else None,
+ 'User': str(user) if user is not None else None,
'Tty': tty,
'OpenStdin': stdin_open,
'StdinOnce': stdin_once,
diff --git a/docker/types/daemon.py b/docker/types/daemon.py
index af3e5bc..10e8101 100644
--- a/docker/types/daemon.py
+++ b/docker/types/daemon.py
@@ -8,7 +8,7 @@ except ImportError:
from ..errors import DockerException
-class CancellableStream(object):
+class CancellableStream:
"""
Stream wrapper for real-time events, logs, etc. from the server.
@@ -32,7 +32,7 @@ class CancellableStream(object):
return next(self._stream)
except urllib3.exceptions.ProtocolError:
raise StopIteration
- except socket.error:
+ except OSError:
raise StopIteration
next = __next__
diff --git a/docker/types/healthcheck.py b/docker/types/healthcheck.py
index 9815018..dfc88a9 100644
--- a/docker/types/healthcheck.py
+++ b/docker/types/healthcheck.py
@@ -1,7 +1,5 @@
from .base import DictType
-import six
-
class Healthcheck(DictType):
"""
@@ -31,7 +29,7 @@ class Healthcheck(DictType):
"""
def __init__(self, **kwargs):
test = kwargs.get('test', kwargs.get('Test'))
- if isinstance(test, six.string_types):
+ if isinstance(test, str):
test = ["CMD-SHELL", test]
interval = kwargs.get('interval', kwargs.get('Interval'))
@@ -39,7 +37,7 @@ class Healthcheck(DictType):
retries = kwargs.get('retries', kwargs.get('Retries'))
start_period = kwargs.get('start_period', kwargs.get('StartPeriod'))
- super(Healthcheck, self).__init__({
+ super().__init__({
'Test': test,
'Interval': interval,
'Timeout': timeout,
@@ -53,7 +51,7 @@ class Healthcheck(DictType):
@test.setter
def test(self, value):
- if isinstance(value, six.string_types):
+ if isinstance(value, str):
value = ["CMD-SHELL", value]
self['Test'] = value
diff --git a/docker/types/networks.py b/docker/types/networks.py
index 1c7b2c9..1370dc1 100644
--- a/docker/types/networks.py
+++ b/docker/types/networks.py
@@ -4,7 +4,7 @@ from ..utils import normalize_links, version_lt
class EndpointConfig(dict):
def __init__(self, version, aliases=None, links=None, ipv4_address=None,
- ipv6_address=None, link_local_ips=None):
+ ipv6_address=None, link_local_ips=None, driver_opt=None):
if version_lt(version, '1.22'):
raise errors.InvalidVersion(
'Endpoint config is not supported for API version < 1.22'
@@ -33,6 +33,15 @@ class EndpointConfig(dict):
if ipam_config:
self['IPAMConfig'] = ipam_config
+ if driver_opt:
+ if version_lt(version, '1.32'):
+ raise errors.InvalidVersion(
+ 'DriverOpts is not supported for API version < 1.32'
+ )
+ if not isinstance(driver_opt, dict):
+ raise TypeError('driver_opt must be a dictionary')
+ self['DriverOpts'] = driver_opt
+
class NetworkingConfig(dict):
def __init__(self, endpoints_config=None):
diff --git a/docker/types/services.py b/docker/types/services.py
index 05dda15..c2fce9f 100644
--- a/docker/types/services.py
+++ b/docker/types/services.py
@@ -1,5 +1,3 @@
-import six
-
from .. import errors
from ..constants import IS_WINDOWS_PLATFORM
from ..utils import (
@@ -112,16 +110,21 @@ class ContainerSpec(dict):
containers. Only used for Windows containers.
init (boolean): Run an init inside the container that forwards signals
and reaps processes.
+ cap_add (:py:class:`list`): A list of kernel capabilities to add to the
+ default set for the container.
+ cap_drop (:py:class:`list`): A list of kernel capabilities to drop from
+ the default set for the container.
"""
def __init__(self, image, command=None, args=None, hostname=None, env=None,
workdir=None, user=None, labels=None, mounts=None,
stop_grace_period=None, secrets=None, tty=None, groups=None,
open_stdin=None, read_only=None, stop_signal=None,
healthcheck=None, hosts=None, dns_config=None, configs=None,
- privileges=None, isolation=None, init=None):
+ privileges=None, isolation=None, init=None, cap_add=None,
+ cap_drop=None):
self['Image'] = image
- if isinstance(command, six.string_types):
+ if isinstance(command, str):
command = split_command(command)
self['Command'] = command
self['Args'] = args
@@ -151,7 +154,7 @@ class ContainerSpec(dict):
if mounts is not None:
parsed_mounts = []
for mount in mounts:
- if isinstance(mount, six.string_types):
+ if isinstance(mount, str):
parsed_mounts.append(Mount.parse_mount_string(mount))
else:
# If mount already parsed
@@ -188,6 +191,18 @@ class ContainerSpec(dict):
if init is not None:
self['Init'] = init
+ if cap_add is not None:
+ if not isinstance(cap_add, list):
+ raise TypeError('cap_add must be a list')
+
+ self['CapabilityAdd'] = cap_add
+
+ if cap_drop is not None:
+ if not isinstance(cap_drop, list):
+ raise TypeError('cap_drop must be a list')
+
+ self['CapabilityDrop'] = cap_drop
+
class Mount(dict):
"""
@@ -224,7 +239,7 @@ class Mount(dict):
self['Source'] = source
if type not in ('bind', 'volume', 'tmpfs', 'npipe'):
raise errors.InvalidArgument(
- 'Unsupported mount type: "{}"'.format(type)
+ f'Unsupported mount type: "{type}"'
)
self['Type'] = type
self['ReadOnly'] = read_only
@@ -260,7 +275,7 @@ class Mount(dict):
elif type == 'tmpfs':
tmpfs_opts = {}
if tmpfs_mode:
- if not isinstance(tmpfs_mode, six.integer_types):
+ if not isinstance(tmpfs_mode, int):
raise errors.InvalidArgument(
'tmpfs_mode must be an integer'
)
@@ -280,7 +295,7 @@ class Mount(dict):
parts = string.split(':')
if len(parts) > 3:
raise errors.InvalidArgument(
- 'Invalid mount format "{0}"'.format(string)
+ f'Invalid mount format "{string}"'
)
if len(parts) == 1:
return cls(target=parts[0], source=None)
@@ -347,7 +362,7 @@ def _convert_generic_resources_dict(generic_resources):
' (found {})'.format(type(generic_resources))
)
resources = []
- for kind, value in six.iteritems(generic_resources):
+ for kind, value in generic_resources.items():
resource_type = None
if isinstance(value, int):
resource_type = 'DiscreteResourceSpec'
@@ -421,7 +436,8 @@ class UpdateConfig(dict):
class RollbackConfig(UpdateConfig):
"""
- Used to specify the way containe rollbacks should be performed by a service
+ Used to specify the way container rollbacks should be performed by a
+ service
Args:
parallelism (int): Maximum number of tasks to be rolled back in one
@@ -443,7 +459,7 @@ class RollbackConfig(UpdateConfig):
pass
-class RestartConditionTypesEnum(object):
+class RestartConditionTypesEnum:
_values = (
'none',
'on-failure',
@@ -474,7 +490,7 @@ class RestartPolicy(dict):
max_attempts=0, window=0):
if condition not in self.condition_types._values:
raise TypeError(
- 'Invalid RestartPolicy condition {0}'.format(condition)
+ f'Invalid RestartPolicy condition {condition}'
)
self['Condition'] = condition
@@ -533,7 +549,7 @@ def convert_service_ports(ports):
)
result = []
- for k, v in six.iteritems(ports):
+ for k, v in ports.items():
port_spec = {
'Protocol': 'tcp',
'PublishedPort': k
@@ -659,10 +675,12 @@ class Placement(dict):
are provided in order from highest to lowest precedence and
are expressed as ``(strategy, descriptor)`` tuples. See
:py:class:`PlacementPreference` for details.
+ maxreplicas (int): Maximum number of replicas per node
platforms (:py:class:`list` of tuple): A list of platforms
expressed as ``(arch, os)`` tuples
"""
- def __init__(self, constraints=None, preferences=None, platforms=None):
+ def __init__(self, constraints=None, preferences=None, platforms=None,
+ maxreplicas=None):
if constraints is not None:
self['Constraints'] = constraints
if preferences is not None:
@@ -671,6 +689,8 @@ class Placement(dict):
if isinstance(pref, tuple):
pref = PlacementPreference(*pref)
self['Preferences'].append(pref)
+ if maxreplicas is not None:
+ self['MaxReplicas'] = maxreplicas
if platforms:
self['Platforms'] = []
for plat in platforms:
diff --git a/docker/utils/build.py b/docker/utils/build.py
index 4fa5751..59564c4 100644
--- a/docker/utils/build.py
+++ b/docker/utils/build.py
@@ -4,8 +4,6 @@ import re
import tarfile
import tempfile
-import six
-
from .fnmatch import fnmatch
from ..constants import IS_WINDOWS_PLATFORM
@@ -69,7 +67,7 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj)
if files is None:
files = build_file_list(root)
- extra_names = set(e[0] for e in extra_files)
+ extra_names = {e[0] for e in extra_files}
for path in files:
if path in extra_names:
# Extra files override context files with the same name
@@ -95,9 +93,9 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
try:
with open(full_path, 'rb') as f:
t.addfile(i, f)
- except IOError:
- raise IOError(
- 'Can not read file in context: {}'.format(full_path)
+ except OSError:
+ raise OSError(
+ f'Can not read file in context: {full_path}'
)
else:
# Directories, FIFOs, symlinks... don't need to be read.
@@ -105,8 +103,9 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
for name, contents in extra_files:
info = tarfile.TarInfo(name)
- info.size = len(contents)
- t.addfile(info, io.BytesIO(contents.encode('utf-8')))
+ contents_encoded = contents.encode('utf-8')
+ info.size = len(contents_encoded)
+ t.addfile(info, io.BytesIO(contents_encoded))
t.close()
fileobj.seek(0)
@@ -118,12 +117,8 @@ def mkbuildcontext(dockerfile):
t = tarfile.open(mode='w', fileobj=f)
if isinstance(dockerfile, io.StringIO):
dfinfo = tarfile.TarInfo('Dockerfile')
- if six.PY3:
- raise TypeError('Please use io.BytesIO to create in-memory '
- 'Dockerfiles with Python 3')
- else:
- dfinfo.size = len(dockerfile.getvalue())
- dockerfile.seek(0)
+ raise TypeError('Please use io.BytesIO to create in-memory '
+ 'Dockerfiles with Python 3')
elif isinstance(dockerfile, io.BytesIO):
dfinfo = tarfile.TarInfo('Dockerfile')
dfinfo.size = len(dockerfile.getvalue())
@@ -153,7 +148,7 @@ def walk(root, patterns, default=True):
# Heavily based on
# https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go
-class PatternMatcher(object):
+class PatternMatcher:
def __init__(self, patterns):
self.patterns = list(filter(
lambda p: p.dirs, [Pattern(p) for p in patterns]
@@ -211,13 +206,12 @@ class PatternMatcher(object):
break
if skip:
continue
- for sub in rec_walk(cur):
- yield sub
+ yield from rec_walk(cur)
return rec_walk(root)
-class Pattern(object):
+class Pattern:
def __init__(self, pattern_str):
self.exclusion = False
if pattern_str.startswith('!'):
@@ -230,6 +224,9 @@ class Pattern(object):
@classmethod
def normalize(cls, p):
+ # Remove trailing spaces
+ p = p.strip()
+
# Leading and trailing slashes are not relevant. Yes,
# "foo.py/" must exclude the "foo.py" regular file. "."
# components are not relevant either, even if the whole
diff --git a/docker/utils/config.py b/docker/utils/config.py
index 82a0e2a..8e24959 100644
--- a/docker/utils/config.py
+++ b/docker/utils/config.py
@@ -18,11 +18,11 @@ def find_config_file(config_path=None):
os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4
]))
- log.debug("Trying paths: {0}".format(repr(paths)))
+ log.debug(f"Trying paths: {repr(paths)}")
for path in paths:
if os.path.exists(path):
- log.debug("Found file at path: {0}".format(path))
+ log.debug(f"Found file at path: {path}")
return path
log.debug("No config file found")
@@ -57,7 +57,7 @@ def load_general_config(config_path=None):
try:
with open(config_file) as f:
return json.load(f)
- except (IOError, ValueError) as e:
+ except (OSError, ValueError) as e:
# In the case of a legacy `.dockercfg` file, we won't
# be able to load any JSON data.
log.debug(e)
diff --git a/docker/utils/decorators.py b/docker/utils/decorators.py
index c975d4b..cf1baf4 100644
--- a/docker/utils/decorators.py
+++ b/docker/utils/decorators.py
@@ -27,7 +27,7 @@ def minimum_version(version):
def wrapper(self, *args, **kwargs):
if utils.version_lt(self._version, version):
raise errors.InvalidVersion(
- '{0} is not available for version < {1}'.format(
+ '{} is not available for version < {}'.format(
f.__name__, version
)
)
diff --git a/docker/utils/fnmatch.py b/docker/utils/fnmatch.py
index cc940a2..90e9f60 100644
--- a/docker/utils/fnmatch.py
+++ b/docker/utils/fnmatch.py
@@ -108,7 +108,7 @@ def translate(pat):
stuff = '^' + stuff[1:]
elif stuff[0] == '^':
stuff = '\\' + stuff
- res = '%s[%s]' % (res, stuff)
+ res = f'{res}[{stuff}]'
else:
res = res + re.escape(c)
diff --git a/docker/utils/json_stream.py b/docker/utils/json_stream.py
index addffdf..f384175 100644
--- a/docker/utils/json_stream.py
+++ b/docker/utils/json_stream.py
@@ -1,11 +1,6 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import json
import json.decoder
-import six
-
from ..errors import StreamParseError
@@ -20,7 +15,7 @@ def stream_as_text(stream):
instead of byte streams.
"""
for data in stream:
- if not isinstance(data, six.text_type):
+ if not isinstance(data, str):
data = data.decode('utf-8', 'replace')
yield data
@@ -46,8 +41,8 @@ def json_stream(stream):
return split_buffer(stream, json_splitter, json_decoder.decode)
-def line_splitter(buffer, separator=u'\n'):
- index = buffer.find(six.text_type(separator))
+def line_splitter(buffer, separator='\n'):
+ index = buffer.find(str(separator))
if index == -1:
return None
return buffer[:index + 1], buffer[index + 1:]
@@ -61,7 +56,7 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
of the input.
"""
splitter = splitter or line_splitter
- buffered = six.text_type('')
+ buffered = ''
for data in stream_as_text(stream):
buffered += data
diff --git a/docker/utils/ports.py b/docker/utils/ports.py
index a50cc02..e813936 100644
--- a/docker/utils/ports.py
+++ b/docker/utils/ports.py
@@ -3,7 +3,7 @@ import re
PORT_SPEC = re.compile(
"^" # Match full string
"(" # External part
- r"((?P<host>[a-fA-F\d.:]+):)?" # Address
+ r"(\[?(?P<host>[a-fA-F\d.:]+)\]?:)?" # Address
r"(?P<ext>[\d]*)(-(?P<ext_end>[\d]+))?:" # External range
")?"
r"(?P<int>[\d]+)(-(?P<int_end>[\d]+))?" # Internal range
@@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False):
if not end:
return [start + proto]
if randomly_available_port:
- return ['{}-{}'.format(start, end) + proto]
+ return [f'{start}-{end}' + proto]
return [str(port) + proto for port in range(int(start), int(end) + 1)]
diff --git a/docker/utils/socket.py b/docker/utils/socket.py
index 7ba9505..4a2076e 100644
--- a/docker/utils/socket.py
+++ b/docker/utils/socket.py
@@ -4,8 +4,6 @@ import select
import socket as pysocket
import struct
-import six
-
try:
from ..transport import NpipeSocket
except ImportError:
@@ -27,16 +25,16 @@ def read(socket, n=4096):
recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK)
- if six.PY3 and not isinstance(socket, NpipeSocket):
+ if not isinstance(socket, NpipeSocket):
select.select([socket], [], [])
try:
if hasattr(socket, 'recv'):
return socket.recv(n)
- if six.PY3 and isinstance(socket, getattr(pysocket, 'SocketIO')):
+ if isinstance(socket, getattr(pysocket, 'SocketIO')):
return socket.read(n)
return os.read(socket.fileno(), n)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in recoverable_errors:
raise
@@ -46,7 +44,7 @@ def read_exactly(socket, n):
Reads exactly n bytes from socket
Raises SocketError if there isn't enough data
"""
- data = six.binary_type()
+ data = bytes()
while len(data) < n:
next_data = read(socket, n - len(data))
if not next_data:
@@ -134,7 +132,7 @@ def consume_socket_output(frames, demux=False):
if demux is False:
# If the streams are multiplexed, the generator returns strings, that
# we just need to concatenate.
- return six.binary_type().join(frames)
+ return bytes().join(frames)
# If the streams are demultiplexed, the generator yields tuples
# (stdout, stderr)
@@ -166,4 +164,4 @@ def demux_adaptor(stream_id, data):
elif stream_id == STDERR:
return (None, data)
else:
- raise ValueError('{0} is not a valid stream'.format(stream_id))
+ raise ValueError(f'{stream_id} is not a valid stream')
diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index 447760b..7b2bbf4 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -1,26 +1,27 @@
import base64
+import collections
import json
import os
import os.path
import shlex
import string
from datetime import datetime
-from distutils.version import StrictVersion
-
-import six
+from packaging.version import Version
from .. import errors
-from .. import tls
from ..constants import DEFAULT_HTTP_HOST
from ..constants import DEFAULT_UNIX_SOCKET
from ..constants import DEFAULT_NPIPE
from ..constants import BYTE_UNITS
+from ..tls import TLSConfig
+
+from urllib.parse import urlparse, urlunparse
+
-if six.PY2:
- from urllib import splitnport
- from urlparse import urlparse
-else:
- from urllib.parse import splitnport, urlparse
+URLComponents = collections.namedtuple(
+ 'URLComponents',
+ 'scheme netloc url params query fragment',
+)
def create_ipam_pool(*args, **kwargs):
@@ -39,8 +40,7 @@ def create_ipam_config(*args, **kwargs):
def decode_json_header(header):
data = base64.b64decode(header)
- if six.PY3:
- data = data.decode('utf-8')
+ data = data.decode('utf-8')
return json.loads(data)
@@ -56,8 +56,8 @@ def compare_version(v1, v2):
>>> compare_version(v2, v2)
0
"""
- s1 = StrictVersion(v1)
- s2 = StrictVersion(v2)
+ s1 = Version(v1)
+ s2 = Version(v2)
if s1 == s2:
return 0
elif s1 > s2:
@@ -80,7 +80,7 @@ def _convert_port_binding(binding):
if len(binding) == 2:
result['HostPort'] = binding[1]
result['HostIp'] = binding[0]
- elif isinstance(binding[0], six.string_types):
+ elif isinstance(binding[0], str):
result['HostIp'] = binding[0]
else:
result['HostPort'] = binding[0]
@@ -104,7 +104,7 @@ def _convert_port_binding(binding):
def convert_port_bindings(port_bindings):
result = {}
- for k, v in six.iteritems(port_bindings):
+ for k, v in iter(port_bindings.items()):
key = str(k)
if '/' not in key:
key += '/tcp'
@@ -121,7 +121,7 @@ def convert_volume_binds(binds):
result = []
for k, v in binds.items():
- if isinstance(k, six.binary_type):
+ if isinstance(k, bytes):
k = k.decode('utf-8')
if isinstance(v, dict):
@@ -132,7 +132,7 @@ def convert_volume_binds(binds):
)
bind = v['bind']
- if isinstance(bind, six.binary_type):
+ if isinstance(bind, bytes):
bind = bind.decode('utf-8')
if 'ro' in v:
@@ -143,13 +143,13 @@ def convert_volume_binds(binds):
mode = 'rw'
result.append(
- six.text_type('{0}:{1}:{2}').format(k, bind, mode)
+ f'{k}:{bind}:{mode}'
)
else:
- if isinstance(v, six.binary_type):
+ if isinstance(v, bytes):
v = v.decode('utf-8')
result.append(
- six.text_type('{0}:{1}:rw').format(k, v)
+ f'{k}:{v}:rw'
)
return result
@@ -166,7 +166,7 @@ def convert_tmpfs_mounts(tmpfs):
result = {}
for mount in tmpfs:
- if isinstance(mount, six.string_types):
+ if isinstance(mount, str):
if ":" in mount:
name, options = mount.split(":", 1)
else:
@@ -191,7 +191,7 @@ def convert_service_networks(networks):
result = []
for n in networks:
- if isinstance(n, six.string_types):
+ if isinstance(n, str):
n = {'Target': n}
result.append(n)
return result
@@ -208,10 +208,6 @@ def parse_repository_tag(repo_name):
def parse_host(addr, is_win32=False, tls=False):
- path = ''
- port = None
- host = None
-
# Sensible defaults
if not addr and is_win32:
return DEFAULT_NPIPE
@@ -240,14 +236,14 @@ def parse_host(addr, is_win32=False, tls=False):
if proto not in ('tcp', 'unix', 'npipe', 'ssh'):
raise errors.DockerException(
- "Invalid bind address protocol: {}".format(addr)
+ f"Invalid bind address protocol: {addr}"
)
if proto == 'tcp' and not parsed_url.netloc:
# "tcp://" is exceptionally disallowed by convention;
# omitting a hostname for other protocols is fine
raise errors.DockerException(
- 'Invalid bind address format: {}'.format(addr)
+ f'Invalid bind address format: {addr}'
)
if any([
@@ -255,7 +251,7 @@ def parse_host(addr, is_win32=False, tls=False):
parsed_url.password
]):
raise errors.DockerException(
- 'Invalid bind address format: {}'.format(addr)
+ f'Invalid bind address format: {addr}'
)
if parsed_url.path and proto == 'ssh':
@@ -270,20 +266,20 @@ def parse_host(addr, is_win32=False, tls=False):
# to be valid and equivalent to unix:///path
path = '/'.join((parsed_url.hostname, path))
+ netloc = parsed_url.netloc
if proto in ('tcp', 'ssh'):
- # parsed_url.hostname strips brackets from IPv6 addresses,
- # which can be problematic hence our use of splitnport() instead.
- host, port = splitnport(parsed_url.netloc)
- if port is None or port < 0:
+ port = parsed_url.port or 0
+ if port <= 0:
if proto != 'ssh':
raise errors.DockerException(
'Invalid bind address format: port is required:'
' {}'.format(addr)
)
port = 22
+ netloc = f'{parsed_url.netloc}:{port}'
- if not host:
- host = DEFAULT_HTTP_HOST
+ if not parsed_url.hostname:
+ netloc = f'{DEFAULT_HTTP_HOST}:{port}'
# Rewrite schemes to fit library internals (requests adapters)
if proto == 'tcp':
@@ -292,8 +288,16 @@ def parse_host(addr, is_win32=False, tls=False):
proto = 'http+unix'
if proto in ('http+unix', 'npipe'):
- return "{}://{}".format(proto, path).rstrip('/')
- return '{0}://{1}:{2}{3}'.format(proto, host, port, path).rstrip('/')
+ return f"{proto}://{path}".rstrip('/')
+
+ return urlunparse(URLComponents(
+ scheme=proto,
+ netloc=netloc,
+ url=path,
+ params='',
+ query='',
+ fragment='',
+ )).rstrip('/')
def parse_devices(devices):
@@ -302,9 +306,9 @@ def parse_devices(devices):
if isinstance(device, dict):
device_list.append(device)
continue
- if not isinstance(device, six.string_types):
+ if not isinstance(device, str):
raise errors.DockerException(
- 'Invalid device type {0}'.format(type(device))
+ f'Invalid device type {type(device)}'
)
device_mapping = device.split(':')
if device_mapping:
@@ -358,7 +362,7 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None):
# so if it's not set already then set it to false.
assert_hostname = False
- params['tls'] = tls.TLSConfig(
+ params['tls'] = TLSConfig(
client_cert=(os.path.join(cert_path, 'cert.pem'),
os.path.join(cert_path, 'key.pem')),
ca_cert=os.path.join(cert_path, 'ca.pem'),
@@ -372,13 +376,13 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None):
def convert_filters(filters):
result = {}
- for k, v in six.iteritems(filters):
+ for k, v in iter(filters.items()):
if isinstance(v, bool):
v = 'true' if v else 'false'
if not isinstance(v, list):
v = [v, ]
result[k] = [
- str(item) if not isinstance(item, six.string_types) else item
+ str(item) if not isinstance(item, str) else item
for item in v
]
return json.dumps(result)
@@ -391,7 +395,7 @@ def datetime_to_timestamp(dt):
def parse_bytes(s):
- if isinstance(s, six.integer_types + (float,)):
+ if isinstance(s, (int, float,)):
return s
if len(s) == 0:
return 0
@@ -412,10 +416,10 @@ def parse_bytes(s):
if suffix in units.keys() or suffix.isdigit():
try:
- digits = int(digits_part)
+ digits = float(digits_part)
except ValueError:
raise errors.DockerException(
- 'Failed converting the string value for memory ({0}) to'
+ 'Failed converting the string value for memory ({}) to'
' an integer.'.format(digits_part)
)
@@ -423,7 +427,7 @@ def parse_bytes(s):
s = int(digits * units[suffix])
else:
raise errors.DockerException(
- 'The specified value for memory ({0}) should specify the'
+ 'The specified value for memory ({}) should specify the'
' units. The postfix should be one of the `b` `k` `m` `g`'
' characters'.format(s)
)
@@ -433,9 +437,9 @@ def parse_bytes(s):
def normalize_links(links):
if isinstance(links, dict):
- links = six.iteritems(links)
+ links = iter(links.items())
- return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)]
+ return [f'{k}:{v}' if v else k for k, v in sorted(links)]
def parse_env_file(env_file):
@@ -445,7 +449,7 @@ def parse_env_file(env_file):
"""
environment = {}
- with open(env_file, 'r') as f:
+ with open(env_file) as f:
for line in f:
if line[0] == '#':
@@ -461,15 +465,13 @@ def parse_env_file(env_file):
environment[k] = v
else:
raise errors.DockerException(
- 'Invalid line in environment file {0}:\n{1}'.format(
+ 'Invalid line in environment file {}:\n{}'.format(
env_file, line))
return environment
def split_command(command):
- if six.PY2 and not isinstance(command, six.binary_type):
- command = command.encode('utf-8')
return shlex.split(command)
@@ -477,22 +479,22 @@ def format_environment(environment):
def format_env(key, value):
if value is None:
return key
- if isinstance(value, six.binary_type):
+ if isinstance(value, bytes):
value = value.decode('utf-8')
- return u'{key}={value}'.format(key=key, value=value)
- return [format_env(*var) for var in six.iteritems(environment)]
+ return f'{key}={value}'
+ return [format_env(*var) for var in iter(environment.items())]
def format_extra_hosts(extra_hosts, task=False):
# Use format dictated by Swarm API if container is part of a task
if task:
return [
- '{} {}'.format(v, k) for k, v in sorted(six.iteritems(extra_hosts))
+ f'{v} {k}' for k, v in sorted(iter(extra_hosts.items()))
]
return [
- '{}:{}'.format(k, v) for k, v in sorted(six.iteritems(extra_hosts))
+ f'{k}:{v}' for k, v in sorted(iter(extra_hosts.items()))
]
diff --git a/docker/version.py b/docker/version.py
index a754609..88ee8b0 100644
--- a/docker/version.py
+++ b/docker/version.py
@@ -1,2 +1,2 @@
-version = "4.3.0-dev"
-version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
+version = "6.0.0-dev"
+version_info = tuple(int(d) for d in version.split("-")[0].split("."))
diff --git a/docs-requirements.txt b/docs-requirements.txt
index d69373d..04d1aff 100644
--- a/docs-requirements.txt
+++ b/docs-requirements.txt
@@ -1,2 +1,2 @@
-recommonmark==0.4.0
-Sphinx==1.4.6
+myst-parser==0.18.0
+Sphinx==5.1.1
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 5d711ee..b0b2e5d 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,3 +1,8 @@
dl.hide-signature > dt {
display: none;
}
+
+dl.field-list > dt {
+ /* prevent code blocks from forcing wrapping on the "Parameters" header */
+ word-break: initial;
+}
diff --git a/docs/change-log.md b/docs/change-log.md
index ab7065a..91f3fe6 100644
--- a/docs/change-log.md
+++ b/docs/change-log.md
@@ -1,6 +1,151 @@
Change log
==========
+5.0.3
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/76?closed=1)
+
+### Features
+- Add `cap_add` and `cap_drop` parameters to service create and ContainerSpec
+- Add `templating` parameter to config create
+
+### Bugfixes
+- Fix getting a read timeout for logs/attach with a tty and slow output
+
+### Miscellaneous
+- Fix documentation examples
+
+5.0.2
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/75?closed=1)
+
+### Bugfixes
+- Fix `disable_buffering` regression
+
+5.0.1
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/74?closed=1)
+
+### Bugfixes
+- Bring back support for ssh identity file
+- Cleanup remaining python-2 dependencies
+- Fix image save example in docs
+
+### Miscellaneous
+- Bump urllib3 to 1.26.5
+- Bump requests to 2.26.0
+
+5.0.0
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/70?closed=1)
+
+### Breaking changes
+- Remove support for Python 2.7
+- Make Python 3.6 the minimum version supported
+
+### Features
+- Add `limit` parameter to image search endpoint
+
+### Bugfixes
+- Fix `KeyError` exception on secret create
+- Verify TLS keys loaded from docker contexts
+- Update PORT_SPEC regex to allow square brackets for IPv6 addresses
+- Fix containers and images documentation examples
+
+4.4.4
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/73?closed=1)
+
+### Bugfixes
+- Remove `LD_LIBRARY_PATH` and `SSL_CERT_FILE` environment variables when shelling out to the ssh client
+
+4.4.3
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/72?closed=1)
+
+### Features
+- Add support for docker.types.Placement.MaxReplicas
+
+### Bugfixes
+- Fix SSH port parsing when shelling out to the ssh client
+
+4.4.2
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/71?closed=1)
+
+### Bugfixes
+- Fix SSH connection bug where the hostname was incorrectly trimmed and the error was hidden
+- Fix docs example
+
+### Miscellaneous
+- Add Python3.8 and 3.9 in setup.py classifier list
+
+4.4.1
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/69?closed=1)
+
+### Bugfixes
+- Avoid setting unsuported parameter for subprocess.Popen on Windows
+- Replace use of deprecated "filter" argument on ""docker/api/image"
+
+4.4.0
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/67?closed=1)
+
+### Features
+- Add an alternative SSH connection to the paramiko one, based on shelling out to the SSh client. Similar to the behaviour of Docker cli
+- Default image tag to `latest` on `pull`
+
+### Bugfixes
+- Fix plugin model upgrade
+- Fix examples URL in ulimits
+
+### Miscellaneous
+- Improve exception messages for server and client errors
+- Bump cryptography from 2.3 to 3.2
+
+4.3.1
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/68?closed=1)
+
+### Miscellaneous
+- Set default API version to `auto`
+- Fix conversion to bytes for `float`
+- Support OpenSSH `identityfile` option
+
+4.3.0
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/64?closed=1)
+
+### Features
+- Add `DeviceRequest` type to expose host resources such as GPUs
+- Add support for `DriverOpts` in EndpointConfig
+- Disable compression by default when using container.get_archive method
+
+### Miscellaneous
+- Update default API version to v1.39
+- Update test engine version to 19.03.12
+
+4.2.2
+-----
+
+[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/66?closed=1)
+
+### Bugfixes
+
+- Fix context load for non-docker endpoints
+
4.2.1
-----
@@ -47,7 +192,6 @@ Change log
- Adjust `--platform` tests for changes in docker engine
- Update credentials-helpers to v0.6.3
-
4.0.2
-----
@@ -61,7 +205,6 @@ Change log
- Bumped version of websocket-client
-
4.0.1
-----
@@ -120,7 +263,7 @@ Change log
### Bugfixes
-* Fix base_url to keep TCP protocol on utils.py by letting the responsability of changing the
+* Fix base_url to keep TCP protocol on utils.py by letting the responsibility of changing the
protocol to `parse_host` afterwards, letting `base_url` with the original value.
* XFAIL test_attach_stream_and_cancel on TLS
@@ -1224,7 +1367,7 @@ like the others
(`Client.volumes`, `Client.create_volume`, `Client.inspect_volume`,
`Client.remove_volume`).
* Added support for the `group_add` parameter in `create_host_config`.
-* Added support for the CPU CFS (`cpu_quota` and `cpu_period`) parameteres
+* Added support for the CPU CFS (`cpu_quota` and `cpu_period`) parameters
in `create_host_config`.
* Added support for the archive API endpoint (`Client.get_archive`,
`Client.put_archive`).
diff --git a/docs/conf.py b/docs/conf.py
index f46d1f7..1258a42 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# docker-sdk-python documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 14 15:48:58 2016.
@@ -34,24 +33,19 @@ sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
+ 'myst_parser'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
-from recommonmark.parser import CommonMarkParser
-
-source_parsers = {
- '.md': CommonMarkParser,
+source_suffix = {
+ '.rst': 'restructuredtext',
+ '.txt': 'markdown',
+ '.md': 'markdown',
}
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-source_suffix = ['.rst', '.md']
-# source_suffix = '.md'
-
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
@@ -60,28 +54,28 @@ source_suffix = ['.rst', '.md']
master_doc = 'index'
# General information about the project.
-project = u'Docker SDK for Python'
+project = 'Docker SDK for Python'
year = datetime.datetime.now().year
-copyright = u'%d Docker Inc' % year
-author = u'Docker Inc'
+copyright = '%d Docker Inc' % year
+author = 'Docker Inc'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
-with open('../docker/version.py', 'r') as vfile:
+with open('../docker/version.py') as vfile:
exec(vfile.read())
# The full version, including alpha/beta/rc tags.
release = version
# The short X.Y version.
-version = '{}.{}'.format(version_info[0], version_info[1])
+version = f'{version_info[0]}.{version_info[1]}'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = None
+language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
@@ -283,8 +277,8 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- (master_doc, 'docker-sdk-python.tex', u'docker-sdk-python Documentation',
- u'Docker Inc.', 'manual'),
+ (master_doc, 'docker-sdk-python.tex', 'docker-sdk-python Documentation',
+ 'Docker Inc.', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -325,7 +319,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- (master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation',
+ (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation',
[author], 1)
]
@@ -340,7 +334,7 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- (master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation',
+ (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation',
author, 'docker-sdk-python', 'One line description of project.',
'Miscellaneous'),
]
diff --git a/docs/index.rst b/docs/index.rst
index 63e85d3..93b30d4 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -58,7 +58,7 @@ You can stream logs:
.. code-block:: python
>>> for line in container.logs(stream=True):
- ... print line.strip()
+ ... print(line.strip())
Reticulating spline 2...
Reticulating spline 3...
...
diff --git a/docs/tls.rst b/docs/tls.rst
index 2e2f1ea..b95b468 100644
--- a/docs/tls.rst
+++ b/docs/tls.rst
@@ -15,7 +15,7 @@ For example, to check the server against a specific CA certificate:
.. code-block:: python
- tls_config = docker.tls.TLSConfig(ca_cert='/path/to/ca.pem')
+ tls_config = docker.tls.TLSConfig(ca_cert='/path/to/ca.pem', verify=True)
client = docker.DockerClient(base_url='<https_url>', tls=tls_config)
This is the equivalent of ``docker --tlsverify --tlscacert /path/to/ca.pem ...``.
diff --git a/requirements.txt b/requirements.txt
index 804a78a..36660b6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,19 +1,6 @@
-appdirs==1.4.3
-asn1crypto==0.22.0
-backports.ssl-match-hostname==3.5.0.1
-cffi==1.10.0
-cryptography==2.3
-enum34==1.1.6
-idna==2.5
-ipaddress==1.0.18
-packaging==16.8
-paramiko==2.4.2
-pycparser==2.17
-pyOpenSSL==18.0.0
-pyparsing==2.2.0
-pypiwin32==219; sys_platform == 'win32' and python_version < '3.6'
-pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6'
-requests==2.20.0
-six==1.10.0
-urllib3==1.24.3
-websocket-client==0.56.0
+packaging==21.3
+paramiko==2.11.0
+pywin32==304; sys_platform == 'win32'
+requests==2.28.1
+urllib3==1.26.11
+websocket-client==1.3.3
diff --git a/scripts/versions.py b/scripts/versions.py
index 4bdcb74..75e5355 100755
--- a/scripts/versions.py
+++ b/scripts/versions.py
@@ -52,8 +52,8 @@ class Version(namedtuple('_Version', 'major minor patch stage edition')):
return (int(self.major), int(self.minor), int(self.patch)) + stage
def __str__(self):
- stage = '-{}'.format(self.stage) if self.stage else ''
- edition = '-{}'.format(self.edition) if self.edition else ''
+ stage = f'-{self.stage}' if self.stage else ''
+ edition = f'-{self.edition}' if self.edition else ''
return '.'.join(map(str, self[:3])) + edition + stage
diff --git a/setup.cfg b/setup.cfg
index 907746f..a37e552 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,3 @@
-[bdist_wheel]
-universal = 1
-
[metadata]
description_file = README.rst
license = Apache License 2.0
diff --git a/setup.py b/setup.py
index c29787b..c6346b0 100644
--- a/setup.py
+++ b/setup.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-from __future__ import print_function
import codecs
import os
@@ -11,37 +10,23 @@ ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
requirements = [
- 'six >= 1.4.0',
+ 'packaging >= 14.0',
+ 'requests >= 2.26.0',
+ 'urllib3 >= 1.26.0',
'websocket-client >= 0.32.0',
- 'requests >= 2.14.2, != 2.18.0',
]
extras_require = {
- ':python_version < "3.5"': 'backports.ssl_match_hostname >= 3.5',
- # While not imported explicitly, the ipaddress module is required for
- # ssl_match_hostname to verify hosts match with certificates via
- # ServerAltname: https://pypi.python.org/pypi/backports.ssl_match_hostname
- ':python_version < "3.3"': 'ipaddress >= 1.0.16',
-
# win32 APIs if on Windows (required for npipe support)
- # Python 3.6 is only compatible with v220 ; Python < 3.5 is not supported
- # on v220 ; ALL versions are broken for v222 (as of 2018-01-26)
- ':sys_platform == "win32" and python_version < "3.6"': 'pypiwin32==219',
- ':sys_platform == "win32" and python_version >= "3.6"': 'pypiwin32==223',
-
- # If using docker-py over TLS, highly recommend this option is
- # pip-installed or pinned.
+ ':sys_platform == "win32"': 'pywin32>=304',
- # TODO: if pip installing both "requests" and "requests[security]", the
- # extra package from the "security" option are not installed (see
- # https://github.com/pypa/pip/issues/4391). Once that's fixed, instead of
- # installing the extra dependencies, install the following instead:
- # 'requests[security] >= 2.5.2, != 2.11.0, != 2.12.2'
- 'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=1.3.4', 'idna>=2.0.0'],
+ # This is now a no-op, as similarly the requests[security] extra is
+ # a no-op as of requests 2.26.0, this is always available/by default now
+ # see https://github.com/psf/requests/pull/5867
+ 'tls': [],
# Only required when connecting using the ssh:// protocol
- 'ssh': ['paramiko>=2.4.2'],
-
+ 'ssh': ['paramiko>=2.4.3'],
}
version = None
@@ -72,7 +57,7 @@ setup(
install_requires=requirements,
tests_require=test_requirements,
extras_require=extras_require,
- python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
+ python_requires='>=3.7',
zip_safe=False,
test_suite='tests',
classifiers=[
@@ -81,16 +66,15 @@ setup(
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
'Topic :: Software Development',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
],
- maintainer='Joffrey F',
- maintainer_email='joffrey@docker.com',
+ maintainer='Ulysses Souza',
+ maintainer_email='ulysses.souza@docker.com',
)
diff --git a/test-requirements.txt b/test-requirements.txt
index 24078e2..979b291 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,7 +1,6 @@
-setuptools==44.0.0 # last version with python 2.7 support
-coverage==4.5.2
-flake8==3.6.0
-mock==1.0.1
-pytest==4.3.1
-pytest-cov==2.6.1
-pytest-timeout==1.3.3
+setuptools==63.2.0
+coverage==6.4.2
+flake8==4.0.1
+pytest==7.1.2
+pytest-cov==3.0.0
+pytest-timeout==2.1.0
diff --git a/tests/Dockerfile b/tests/Dockerfile
index df8468a..e24da47 100644
--- a/tests/Dockerfile
+++ b/tests/Dockerfile
@@ -1,15 +1,20 @@
-ARG PYTHON_VERSION=3.7
+ARG PYTHON_VERSION=3.10
FROM python:${PYTHON_VERSION}
ARG APT_MIRROR
RUN sed -ri "s/(httpredir|deb).debian.org/${APT_MIRROR:-deb.debian.org}/g" /etc/apt/sources.list \
- && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list
+ && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list
-RUN apt-get update && apt-get -y install \
+RUN apt-get update && apt-get -y install --no-install-recommends \
gnupg2 \
- pass \
- curl
+ pass
+
+# Add SSH keys and set permissions
+COPY tests/ssh/config/client /root/.ssh
+COPY tests/ssh/config/server/known_ed25519.pub /root/.ssh/known_hosts
+RUN sed -i '1s;^;dpy-dind-ssh ;' /root/.ssh/known_hosts
+RUN chmod -R 600 /root/.ssh
COPY ./tests/gpg-keys /gpg-keys
RUN gpg2 --import gpg-keys/secret
diff --git a/tests/Dockerfile-dind-certs b/tests/Dockerfile-dind-certs
index 2ab87ef..6e71189 100644
--- a/tests/Dockerfile-dind-certs
+++ b/tests/Dockerfile-dind-certs
@@ -1,4 +1,4 @@
-ARG PYTHON_VERSION=2.7
+ARG PYTHON_VERSION=3.10
FROM python:${PYTHON_VERSION}
RUN mkdir /tmp/certs
diff --git a/tests/Dockerfile-ssh-dind b/tests/Dockerfile-ssh-dind
new file mode 100644
index 0000000..22c707a
--- /dev/null
+++ b/tests/Dockerfile-ssh-dind
@@ -0,0 +1,18 @@
+ARG API_VERSION=1.41
+ARG ENGINE_VERSION=20.10
+
+FROM docker:${ENGINE_VERSION}-dind
+
+RUN apk add --no-cache --upgrade \
+ openssh
+
+COPY tests/ssh/config/server /etc/ssh/
+RUN chmod -R 600 /etc/ssh
+
+# set authorized keys for client paswordless connection
+COPY tests/ssh/config/client/id_rsa.pub /root/.ssh/authorized_keys
+RUN chmod -R 600 /root/.ssh
+
+# RUN echo "root:root" | chpasswd
+RUN ln -s /usr/local/bin/docker /usr/bin/docker
+EXPOSE 22
diff --git a/tests/helpers.py b/tests/helpers.py
index f344e1c..63cbe2e 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -11,7 +11,6 @@ import time
import docker
import paramiko
import pytest
-import six
def make_tree(dirs, files):
@@ -54,7 +53,7 @@ def requires_api_version(version):
return pytest.mark.skipif(
docker.utils.version_lt(test_version, version),
- reason="API version is too low (< {0})".format(version)
+ reason=f"API version is too low (< {version})"
)
@@ -86,7 +85,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40):
def random_name():
- return u'dockerpytest_{0:x}'.format(random.getrandbits(64))
+ return f'dockerpytest_{random.getrandbits(64):x}'
def force_leave_swarm(client):
@@ -105,11 +104,11 @@ def force_leave_swarm(client):
def swarm_listen_addr():
- return '0.0.0.0:{0}'.format(random.randrange(10000, 25000))
+ return f'0.0.0.0:{random.randrange(10000, 25000)}'
def assert_cat_socket_detached_with_keys(sock, inputs):
- if six.PY3 and hasattr(sock, '_sock'):
+ if hasattr(sock, '_sock'):
sock = sock._sock
for i in inputs:
@@ -128,7 +127,7 @@ def assert_cat_socket_detached_with_keys(sock, inputs):
# of the daemon no longer cause this to raise an error.
try:
sock.sendall(b'make sure the socket is closed\n')
- except socket.error:
+ except OSError:
return
sock.sendall(b"make sure the socket is closed\n")
diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py
index 5712812..606c3b7 100644
--- a/tests/integration/api_build_test.py
+++ b/tests/integration/api_build_test.py
@@ -7,7 +7,6 @@ from docker import errors
from docker.utils.proxy import ProxyConfig
import pytest
-import six
from .base import BaseAPIIntegrationTest, TEST_IMG
from ..helpers import random_name, requires_api_version, requires_experimental
@@ -71,9 +70,8 @@ class BuildTest(BaseAPIIntegrationTest):
assert len(logs) > 0
def test_build_from_stringio(self):
- if six.PY3:
- return
- script = io.StringIO(six.text_type('\n').join([
+ return
+ script = io.StringIO('\n'.join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
@@ -83,8 +81,7 @@ class BuildTest(BaseAPIIntegrationTest):
stream = self.client.build(fileobj=script)
logs = ''
for chunk in stream:
- if six.PY3:
- chunk = chunk.decode('utf-8')
+ chunk = chunk.decode('utf-8')
logs += chunk
assert logs != ''
@@ -103,7 +100,9 @@ class BuildTest(BaseAPIIntegrationTest):
'ignored',
'Dockerfile',
'.dockerignore',
+ ' ignored-with-spaces ', # check that spaces are trimmed
'!ignored/subdir/excepted-file',
+ '! ignored/subdir/excepted-with-spaces '
'', # empty line,
'#*', # comment line
]))
@@ -114,6 +113,9 @@ class BuildTest(BaseAPIIntegrationTest):
with open(os.path.join(base_dir, '#file.txt'), 'w') as f:
f.write('this file should not be ignored')
+ with open(os.path.join(base_dir, 'ignored-with-spaces'), 'w') as f:
+ f.write("this file should be ignored")
+
subdir = os.path.join(base_dir, 'ignored', 'subdir')
os.makedirs(subdir)
with open(os.path.join(subdir, 'file'), 'w') as f:
@@ -122,6 +124,9 @@ class BuildTest(BaseAPIIntegrationTest):
with open(os.path.join(subdir, 'excepted-file'), 'w') as f:
f.write("this file should not be ignored")
+ with open(os.path.join(subdir, 'excepted-with-spaces'), 'w') as f:
+ f.write("this file should not be ignored")
+
tag = 'docker-py-test-build-with-dockerignore'
stream = self.client.build(
path=base_dir,
@@ -135,11 +140,11 @@ class BuildTest(BaseAPIIntegrationTest):
self.client.wait(c)
logs = self.client.logs(c)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = logs.decode('utf-8')
assert sorted(list(filter(None, logs.split('\n')))) == sorted([
'/test/#file.txt',
+ '/test/ignored/subdir/excepted-with-spaces',
'/test/ignored/subdir/excepted-file',
'/test/not-ignored'
])
@@ -339,10 +344,8 @@ class BuildTest(BaseAPIIntegrationTest):
assert self.client.inspect_image(img_name)
ctnr = self.run_container(img_name, 'cat /hosts-file')
- self.tmp_containers.append(ctnr)
logs = self.client.logs(ctnr)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = logs.decode('utf-8')
assert '127.0.0.1\textrahost.local.test' in logs
assert '127.0.0.1\thello.world.test' in logs
@@ -377,7 +380,7 @@ class BuildTest(BaseAPIIntegrationTest):
snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)'
script = io.BytesIO(b'\n'.join([
b'FROM busybox',
- 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8')
+ f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8')
]))
stream = self.client.build(
@@ -441,7 +444,7 @@ class BuildTest(BaseAPIIntegrationTest):
@requires_api_version('1.32')
@requires_experimental(until=None)
def test_build_invalid_platform(self):
- script = io.BytesIO('FROM busybox\n'.encode('ascii'))
+ script = io.BytesIO(b'FROM busybox\n')
with pytest.raises(errors.APIError) as excinfo:
stream = self.client.build(fileobj=script, platform='foobar')
diff --git a/tests/integration/api_client_test.py b/tests/integration/api_client_test.py
index 9e348f3..d1622fa 100644
--- a/tests/integration/api_client_test.py
+++ b/tests/integration/api_client_test.py
@@ -72,6 +72,6 @@ class UnixconnTest(unittest.TestCase):
client.close()
del client
- assert len(w) == 0, "No warnings produced: {0}".format(
+ assert len(w) == 0, "No warnings produced: {}".format(
w[0].message
)
diff --git a/tests/integration/api_config_test.py b/tests/integration/api_config_test.py
index 0ffd767..982ec46 100644
--- a/tests/integration/api_config_test.py
+++ b/tests/integration/api_config_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import docker
import pytest
@@ -31,7 +29,7 @@ class ConfigAPITest(BaseAPIIntegrationTest):
def test_create_config_unicode_data(self):
config_id = self.client.create_config(
- 'favorite_character', u'いざよいさくや'
+ 'favorite_character', 'いざよいさくや'
)
self.tmp_configs.append(config_id)
assert 'ID' in config_id
@@ -70,3 +68,16 @@ class ConfigAPITest(BaseAPIIntegrationTest):
data = self.client.configs(filters={'name': ['favorite_character']})
assert len(data) == 1
assert data[0]['ID'] == config_id['ID']
+
+ @requires_api_version('1.37')
+ def test_create_config_with_templating(self):
+ config_id = self.client.create_config(
+ 'favorite_character', 'sakuya izayoi',
+ templating={'name': 'golang'}
+ )
+ self.tmp_configs.append(config_id)
+ assert 'ID' in config_id
+ data = self.client.inspect_config(config_id)
+ assert data['Spec']['Name'] == 'favorite_character'
+ assert 'Templating' in data['Spec']
+ assert data['Spec']['Templating']['Name'] == 'golang'
diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py
index 411d4c2..8f69e41 100644
--- a/tests/integration/api_container_test.py
+++ b/tests/integration/api_container_test.py
@@ -7,7 +7,6 @@ from datetime import datetime
import pytest
import requests
-import six
import docker
from .. import helpers
@@ -35,7 +34,7 @@ class ListContainersTest(BaseAPIIntegrationTest):
assert len(retrieved) == 1
retrieved = retrieved[0]
assert 'Command' in retrieved
- assert retrieved['Command'] == six.text_type('true')
+ assert retrieved['Command'] == 'true'
assert 'Image' in retrieved
assert re.search(r'alpine:.*', retrieved['Image'])
assert 'Status' in retrieved
@@ -104,13 +103,11 @@ class CreateContainerTest(BaseAPIIntegrationTest):
self.client.start(container3_id)
assert self.client.wait(container3_id)['StatusCode'] == 0
- logs = self.client.logs(container3_id)
- if six.PY3:
- logs = logs.decode('utf-8')
- assert '{0}_NAME='.format(link_env_prefix1) in logs
- assert '{0}_ENV_FOO=1'.format(link_env_prefix1) in logs
- assert '{0}_NAME='.format(link_env_prefix2) in logs
- assert '{0}_ENV_FOO=1'.format(link_env_prefix2) in logs
+ logs = self.client.logs(container3_id).decode('utf-8')
+ assert f'{link_env_prefix1}_NAME=' in logs
+ assert f'{link_env_prefix1}_ENV_FOO=1' in logs
+ assert f'{link_env_prefix2}_NAME=' in logs
+ assert f'{link_env_prefix2}_ENV_FOO=1' in logs
def test_create_with_restart_policy(self):
container = self.client.create_container(
@@ -227,9 +224,7 @@ class CreateContainerTest(BaseAPIIntegrationTest):
self.client.start(container)
self.client.wait(container)
- logs = self.client.logs(container)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = self.client.logs(container).decode('utf-8')
groups = logs.strip().split(' ')
assert '1000' in groups
assert '1001' in groups
@@ -244,9 +239,7 @@ class CreateContainerTest(BaseAPIIntegrationTest):
self.client.start(container)
self.client.wait(container)
- logs = self.client.logs(container)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = self.client.logs(container).decode('utf-8')
groups = logs.strip().split(' ')
assert '1000' in groups
@@ -279,7 +272,7 @@ class CreateContainerTest(BaseAPIIntegrationTest):
expected_msgs = [
"logger: no log driver named 'asdf' is registered",
- "looking up logging plugin asdf: plugin \"asdf\" not found",
+ "error looking up logging plugin asdf: plugin \"asdf\" not found",
]
with pytest.raises(docker.errors.APIError) as excinfo:
# raises an internal server error 500
@@ -467,16 +460,13 @@ class CreateContainerTest(BaseAPIIntegrationTest):
def test_create_with_device_cgroup_rules(self):
rule = 'c 7:128 rwm'
ctnr = self.client.create_container(
- TEST_IMG, 'cat /sys/fs/cgroup/devices/devices.list',
- host_config=self.client.create_host_config(
+ TEST_IMG, 'true', host_config=self.client.create_host_config(
device_cgroup_rules=[rule]
)
)
self.tmp_containers.append(ctnr)
config = self.client.inspect_container(ctnr)
assert config['HostConfig']['DeviceCgroupRules'] == [rule]
- self.client.start(ctnr)
- assert rule in self.client.logs(ctnr).decode('utf-8')
def test_create_with_uts_mode(self):
container = self.client.create_container(
@@ -494,7 +484,7 @@ class CreateContainerTest(BaseAPIIntegrationTest):
)
class VolumeBindTest(BaseAPIIntegrationTest):
def setUp(self):
- super(VolumeBindTest, self).setUp()
+ super().setUp()
self.mount_dest = '/mnt'
@@ -515,10 +505,7 @@ class VolumeBindTest(BaseAPIIntegrationTest):
TEST_IMG,
['ls', self.mount_dest],
)
- logs = self.client.logs(container)
-
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = self.client.logs(container).decode('utf-8')
assert self.filename in logs
inspect_data = self.client.inspect_container(container)
self.check_container_data(inspect_data, True)
@@ -534,10 +521,8 @@ class VolumeBindTest(BaseAPIIntegrationTest):
TEST_IMG,
['ls', self.mount_dest],
)
- logs = self.client.logs(container)
+ logs = self.client.logs(container).decode('utf-8')
- if six.PY3:
- logs = logs.decode('utf-8')
assert self.filename in logs
inspect_data = self.client.inspect_container(container)
@@ -554,9 +539,7 @@ class VolumeBindTest(BaseAPIIntegrationTest):
host_config=host_config
)
assert container
- logs = self.client.logs(container)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = self.client.logs(container).decode('utf-8')
assert self.filename in logs
inspect_data = self.client.inspect_container(container)
self.check_container_data(inspect_data, True)
@@ -573,9 +556,7 @@ class VolumeBindTest(BaseAPIIntegrationTest):
host_config=host_config
)
assert container
- logs = self.client.logs(container)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = self.client.logs(container).decode('utf-8')
assert self.filename in logs
inspect_data = self.client.inspect_container(container)
self.check_container_data(inspect_data, False)
@@ -634,7 +615,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
def test_get_file_archive_from_container(self):
data = 'The Maid and the Pocket Watch of Blood'
ctnr = self.client.create_container(
- TEST_IMG, 'sh -c "echo {0} > /vol1/data.txt"'.format(data),
+ TEST_IMG, f'sh -c "echo {data} > /vol1/data.txt"',
volumes=['/vol1']
)
self.tmp_containers.append(ctnr)
@@ -645,15 +626,14 @@ class ArchiveTest(BaseAPIIntegrationTest):
for d in strm:
destination.write(d)
destination.seek(0)
- retrieved_data = helpers.untar_file(destination, 'data.txt')
- if six.PY3:
- retrieved_data = retrieved_data.decode('utf-8')
+ retrieved_data = helpers.untar_file(destination, 'data.txt')\
+ .decode('utf-8')
assert data == retrieved_data.strip()
def test_get_file_stat_from_container(self):
data = 'The Maid and the Pocket Watch of Blood'
ctnr = self.client.create_container(
- TEST_IMG, 'sh -c "echo -n {0} > /vol1/data.txt"'.format(data),
+ TEST_IMG, f'sh -c "echo -n {data} > /vol1/data.txt"',
volumes=['/vol1']
)
self.tmp_containers.append(ctnr)
@@ -672,7 +652,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
test_file.seek(0)
ctnr = self.client.create_container(
TEST_IMG,
- 'cat {0}'.format(
+ 'cat {}'.format(
os.path.join('/vol1/', os.path.basename(test_file.name))
),
volumes=['/vol1']
@@ -683,9 +663,6 @@ class ArchiveTest(BaseAPIIntegrationTest):
self.client.start(ctnr)
self.client.wait(ctnr)
logs = self.client.logs(ctnr)
- if six.PY3:
- logs = logs.decode('utf-8')
- data = data.decode('utf-8')
assert logs.strip() == data
def test_copy_directory_to_container(self):
@@ -700,9 +677,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
self.client.put_archive(ctnr, '/vol1', test_tar)
self.client.start(ctnr)
self.client.wait(ctnr)
- logs = self.client.logs(ctnr)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = self.client.logs(ctnr).decode('utf-8')
results = logs.strip().split()
assert 'a.py' in results
assert 'b.py' in results
@@ -723,7 +698,7 @@ class RenameContainerTest(BaseAPIIntegrationTest):
if version == '1.5.0':
assert name == inspect['Name']
else:
- assert '/{0}'.format(name) == inspect['Name']
+ assert f'/{name}' == inspect['Name']
class StartContainerTest(BaseAPIIntegrationTest):
@@ -829,7 +804,7 @@ class LogsTest(BaseAPIIntegrationTest):
def test_logs(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'echo {0}'.format(snippet)
+ TEST_IMG, f'echo {snippet}'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -843,7 +818,7 @@ class LogsTest(BaseAPIIntegrationTest):
snippet = '''Line1
Line2'''
container = self.client.create_container(
- TEST_IMG, 'echo "{0}"'.format(snippet)
+ TEST_IMG, f'echo "{snippet}"'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -856,12 +831,12 @@ Line2'''
def test_logs_streaming_and_follow(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'echo {0}'.format(snippet)
+ TEST_IMG, f'echo {snippet}'
)
id = container['Id']
self.tmp_containers.append(id)
self.client.start(id)
- logs = six.binary_type()
+ logs = b''
for chunk in self.client.logs(id, stream=True, follow=True):
logs += chunk
@@ -876,12 +851,12 @@ Line2'''
def test_logs_streaming_and_follow_and_cancel(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'sh -c "echo \\"{0}\\" && sleep 3"'.format(snippet)
+ TEST_IMG, f'sh -c "echo \\"{snippet}\\" && sleep 3"'
)
id = container['Id']
self.tmp_containers.append(id)
self.client.start(id)
- logs = six.binary_type()
+ logs = b''
generator = self.client.logs(id, stream=True, follow=True)
threading.Timer(1, generator.close).start()
@@ -894,7 +869,7 @@ Line2'''
def test_logs_with_dict_instead_of_id(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'echo {0}'.format(snippet)
+ TEST_IMG, f'echo {snippet}'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -907,7 +882,7 @@ Line2'''
def test_logs_with_tail_0(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'echo "{0}"'.format(snippet)
+ TEST_IMG, f'echo "{snippet}"'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -921,7 +896,7 @@ Line2'''
def test_logs_with_until(self):
snippet = 'Shanghai Teahouse (Hong Meiling)'
container = self.client.create_container(
- TEST_IMG, 'echo "{0}"'.format(snippet)
+ TEST_IMG, f'echo "{snippet}"'
)
self.tmp_containers.append(container)
@@ -1117,7 +1092,7 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container)
res = self.client.top(container)
if not IS_WINDOWS_PLATFORM:
- assert res['Titles'] == [u'PID', u'USER', u'TIME', u'COMMAND']
+ assert res['Titles'] == ['PID', 'USER', 'TIME', 'COMMAND']
assert len(res['Processes']) == 1
assert res['Processes'][0][-1] == 'sleep 60'
self.client.kill(container)
@@ -1135,7 +1110,7 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container)
res = self.client.top(container, '-eopid,user')
- assert res['Titles'] == [u'PID', u'USER']
+ assert res['Titles'] == ['PID', 'USER']
assert len(res['Processes']) == 1
assert res['Processes'][0][10] == 'sleep 60'
@@ -1222,10 +1197,10 @@ class AttachContainerTest(BaseAPIIntegrationTest):
sock = self.client.attach_socket(container, ws=False)
assert sock.fileno() > -1
- def test_run_container_reading_socket(self):
+ def test_run_container_reading_socket_http(self):
line = 'hi there and stuff and things, words!'
# `echo` appends CRLF, `printf` doesn't
- command = "printf '{0}'".format(line)
+ command = f"printf '{line}'"
container = self.client.create_container(TEST_IMG, command,
detach=True, tty=False)
self.tmp_containers.append(container)
@@ -1242,12 +1217,33 @@ class AttachContainerTest(BaseAPIIntegrationTest):
data = read_exactly(pty_stdout, next_size)
assert data.decode('utf-8') == line
+ @pytest.mark.xfail(condition=bool(os.environ.get('DOCKER_CERT_PATH', '')),
+ reason='DOCKER_CERT_PATH not respected for websockets')
+ def test_run_container_reading_socket_ws(self):
+ line = 'hi there and stuff and things, words!'
+ # `echo` appends CRLF, `printf` doesn't
+ command = f"printf '{line}'"
+ container = self.client.create_container(TEST_IMG, command,
+ detach=True, tty=False)
+ self.tmp_containers.append(container)
+
+ opts = {"stdout": 1, "stream": 1, "logs": 1}
+ pty_stdout = self.client.attach_socket(container, opts, ws=True)
+ self.addCleanup(pty_stdout.close)
+
+ self.client.start(container)
+
+ data = pty_stdout.recv()
+ assert data.decode('utf-8') == line
+
+ @pytest.mark.timeout(10)
def test_attach_no_stream(self):
container = self.client.create_container(
TEST_IMG, 'echo hello'
)
self.tmp_containers.append(container)
self.client.start(container)
+ self.client.wait(container, condition='not-running')
output = self.client.attach(container, stream=False, logs=True)
assert output == 'hello\n'.encode(encoding='ascii')
@@ -1509,7 +1505,7 @@ class LinkTest(BaseAPIIntegrationTest):
# Remove link
linked_name = self.client.inspect_container(container2_id)['Name'][1:]
- link_name = '%s/%s' % (linked_name, link_alias)
+ link_name = f'{linked_name}/{link_alias}'
self.client.remove_container(link_name, link=True)
# Link is gone
diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py
index 554e862..4d7748f 100644
--- a/tests/integration/api_exec_test.py
+++ b/tests/integration/api_exec_test.py
@@ -239,7 +239,7 @@ class ExecDemuxTest(BaseAPIIntegrationTest):
)
def setUp(self):
- super(ExecDemuxTest, self).setUp()
+ super().setUp()
self.container = self.client.create_container(
TEST_IMG, 'cat', detach=True, stdin_open=True
)
diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py
index 2bc96ab..6a6686e 100644
--- a/tests/integration/api_image_test.py
+++ b/tests/integration/api_image_test.py
@@ -7,9 +7,8 @@ import tempfile
import threading
import pytest
-import six
-from six.moves import BaseHTTPServer
-from six.moves import socketserver
+from http.server import SimpleHTTPRequestHandler
+import socketserver
import docker
@@ -33,7 +32,7 @@ class ListImagesTest(BaseAPIIntegrationTest):
def test_images_quiet(self):
res1 = self.client.images(quiet=True)
- assert type(res1[0]) == six.text_type
+ assert type(res1[0]) == str
class PullImageTest(BaseAPIIntegrationTest):
@@ -42,9 +41,9 @@ class PullImageTest(BaseAPIIntegrationTest):
self.client.remove_image('hello-world')
except docker.errors.APIError:
pass
- res = self.client.pull('hello-world', tag='latest')
+ res = self.client.pull('hello-world')
self.tmp_imgs.append('hello-world')
- assert type(res) == six.text_type
+ assert type(res) == str
assert len(self.client.images('hello-world')) >= 1
img_info = self.client.inspect_image('hello-world')
assert 'Id' in img_info
@@ -55,7 +54,7 @@ class PullImageTest(BaseAPIIntegrationTest):
except docker.errors.APIError:
pass
stream = self.client.pull(
- 'hello-world', tag='latest', stream=True, decode=True)
+ 'hello-world', stream=True, decode=True)
self.tmp_imgs.append('hello-world')
for chunk in stream:
assert isinstance(chunk, dict)
@@ -266,14 +265,14 @@ class ImportImageTest(BaseAPIIntegrationTest):
output = self.client.load_image(data)
assert any([
line for line in output
- if 'Loaded image: {}'.format(test_img) in line.get('stream', '')
+ if f'Loaded image: {test_img}' in line.get('stream', '')
])
@contextlib.contextmanager
def temporary_http_file_server(self, stream):
'''Serve data from an IO stream over HTTP.'''
- class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
+ class Handler(SimpleHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-Type', 'application/x-tar')
@@ -282,10 +281,10 @@ class ImportImageTest(BaseAPIIntegrationTest):
server = socketserver.TCPServer(('', 0), Handler)
thread = threading.Thread(target=server.serve_forever)
- thread.setDaemon(True)
+ thread.daemon = True
thread.start()
- yield 'http://%s:%s' % (socket.gethostname(), server.server_address[1])
+ yield f'http://{socket.gethostname()}:{server.server_address[1]}'
server.shutdown()
@@ -351,7 +350,7 @@ class SaveLoadImagesTest(BaseAPIIntegrationTest):
result = self.client.load_image(f.read())
success = False
- result_line = 'Loaded image: {}\n'.format(TEST_IMG)
+ result_line = f'Loaded image: {TEST_IMG}\n'
for data in result:
print(data)
if 'stream' in data:
diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py
index 0f26827..2568138 100644
--- a/tests/integration/api_network_test.py
+++ b/tests/integration/api_network_test.py
@@ -9,7 +9,7 @@ from .base import BaseAPIIntegrationTest, TEST_IMG
class TestNetworks(BaseAPIIntegrationTest):
def tearDown(self):
self.client.leave_swarm(force=True)
- super(TestNetworks, self).tearDown()
+ super().tearDown()
def create_network(self, *args, **kwargs):
net_name = random_name()
@@ -275,6 +275,27 @@ class TestNetworks(BaseAPIIntegrationTest):
assert 'LinkLocalIPs' in net_cfg['IPAMConfig']
assert net_cfg['IPAMConfig']['LinkLocalIPs'] == ['169.254.8.8']
+ @requires_api_version('1.32')
+ def test_create_with_driveropt(self):
+ container = self.client.create_container(
+ TEST_IMG, 'top',
+ networking_config=self.client.create_networking_config(
+ {
+ 'bridge': self.client.create_endpoint_config(
+ driver_opt={'com.docker-py.setting': 'on'}
+ )
+ }
+ ),
+ host_config=self.client.create_host_config(network_mode='bridge')
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ container_data = self.client.inspect_container(container)
+ net_cfg = container_data['NetworkSettings']['Networks']['bridge']
+ assert 'DriverOpts' in net_cfg
+ assert 'com.docker-py.setting' in net_cfg['DriverOpts']
+ assert net_cfg['DriverOpts']['com.docker-py.setting'] == 'on'
+
@requires_api_version('1.22')
def test_create_with_links(self):
net_name, net_id = self.create_network()
diff --git a/tests/integration/api_plugin_test.py b/tests/integration/api_plugin_test.py
index 38f9d12..3ecb028 100644
--- a/tests/integration/api_plugin_test.py
+++ b/tests/integration/api_plugin_test.py
@@ -22,13 +22,13 @@ class PluginTest(BaseAPIIntegrationTest):
def teardown_method(self, method):
client = self.get_client_instance()
try:
- client.disable_plugin(SSHFS)
+ client.disable_plugin(SSHFS, True)
except docker.errors.APIError:
pass
for p in self.tmp_plugins:
try:
- client.remove_plugin(p, force=True)
+ client.remove_plugin(p)
except docker.errors.APIError:
pass
diff --git a/tests/integration/api_secret_test.py b/tests/integration/api_secret_test.py
index b3d93b8..fd98543 100644
--- a/tests/integration/api_secret_test.py
+++ b/tests/integration/api_secret_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import docker
import pytest
@@ -31,7 +29,7 @@ class SecretAPITest(BaseAPIIntegrationTest):
def test_create_secret_unicode_data(self):
secret_id = self.client.create_secret(
- 'favorite_character', u'いざよいさくや'
+ 'favorite_character', 'いざよいさくや'
)
self.tmp_secrets.append(secret_id)
assert 'ID' in secret_id
diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py
index b6b7ec5..dcf195d 100644
--- a/tests/integration/api_service_test.py
+++ b/tests/integration/api_service_test.py
@@ -1,11 +1,8 @@
-# -*- coding: utf-8 -*-
-
import random
import time
import docker
import pytest
-import six
from ..helpers import (
force_leave_swarm, requires_api_version, requires_experimental
@@ -31,10 +28,10 @@ class ServiceTest(BaseAPIIntegrationTest):
self.client.remove_service(service['ID'])
except docker.errors.APIError:
pass
- super(ServiceTest, self).tearDown()
+ super().tearDown()
def get_service_name(self):
- return 'dockerpytest_{0:x}'.format(random.getrandbits(64))
+ return f'dockerpytest_{random.getrandbits(64):x}'
def get_service_container(self, service_name, attempts=20, interval=0.5,
include_stopped=False):
@@ -55,7 +52,7 @@ class ServiceTest(BaseAPIIntegrationTest):
def create_simple_service(self, name=None, labels=None):
if name:
- name = 'dockerpytest_{0}'.format(name)
+ name = f'dockerpytest_{name}'
else:
name = self.get_service_name()
@@ -150,7 +147,7 @@ class ServiceTest(BaseAPIIntegrationTest):
else:
break
- if six.PY3:
+ if log_line is not None:
log_line = log_line.decode('utf-8')
assert 'hello\n' in log_line
@@ -404,20 +401,20 @@ class ServiceTest(BaseAPIIntegrationTest):
node_id = self.client.nodes()[0]['ID']
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(
- container_spec, placement=['node.id=={}'.format(node_id)]
+ container_spec, placement=[f'node.id=={node_id}']
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert (svc_info['Spec']['TaskTemplate']['Placement'] ==
- {'Constraints': ['node.id=={}'.format(node_id)]})
+ {'Constraints': [f'node.id=={node_id}']})
def test_create_service_with_placement_object(self):
node_id = self.client.nodes()[0]['ID']
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
placemt = docker.types.Placement(
- constraints=['node.id=={}'.format(node_id)]
+ constraints=[f'node.id=={node_id}']
)
task_tmpl = docker.types.TaskTemplate(
container_spec, placement=placemt
@@ -471,6 +468,19 @@ class ServiceTest(BaseAPIIntegrationTest):
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
+ @requires_api_version('1.40')
+ def test_create_service_with_placement_maxreplicas(self):
+ container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
+ placemt = docker.types.Placement(maxreplicas=1)
+ task_tmpl = docker.types.TaskTemplate(
+ container_spec, placement=placemt
+ )
+ name = self.get_service_name()
+ svc_id = self.client.create_service(task_tmpl, name=name)
+ svc_info = self.client.inspect_service(svc_id)
+ assert 'Placement' in svc_info['Spec']['TaskTemplate']
+ assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
+
def test_create_service_with_endpoint_spec(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
@@ -496,7 +506,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
- self.fail('Invalid port specification: {0}'.format(port))
+ self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
@@ -658,14 +668,14 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
- container, 'cat /run/secrets/{0}'.format(secret_name)
+ container, f'cat /run/secrets/{secret_name}'
)
assert self.client.exec_start(exec_id) == secret_data
@requires_api_version('1.25')
def test_create_service_with_unicode_secret(self):
secret_name = 'favorite_touhou'
- secret_data = u'東方花映塚'
+ secret_data = '東方花映塚'
secret_id = self.client.create_secret(secret_name, secret_data)
self.tmp_secrets.append(secret_id)
secret_ref = docker.types.SecretReference(secret_id, secret_name)
@@ -683,7 +693,7 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
- container, 'cat /run/secrets/{0}'.format(secret_name)
+ container, f'cat /run/secrets/{secret_name}'
)
container_secret = self.client.exec_start(exec_id)
container_secret = container_secret.decode('utf-8')
@@ -710,14 +720,14 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
- container, 'cat /{0}'.format(config_name)
+ container, f'cat /{config_name}'
)
assert self.client.exec_start(exec_id) == config_data
@requires_api_version('1.30')
def test_create_service_with_unicode_config(self):
config_name = 'favorite_touhou'
- config_data = u'東方花映塚'
+ config_data = '東方花映塚'
config_id = self.client.create_config(config_name, config_data)
self.tmp_configs.append(config_id)
config_ref = docker.types.ConfigReference(config_id, config_name)
@@ -735,7 +745,7 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
- container, 'cat /{0}'.format(config_name)
+ container, f'cat /{config_name}'
)
container_config = self.client.exec_start(exec_id)
container_config = container_config.decode('utf-8')
@@ -1124,7 +1134,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
- self.fail('Invalid port specification: {0}'.format(port))
+ self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
@@ -1151,7 +1161,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
- self.fail('Invalid port specification: {0}'.format(port))
+ self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
@@ -1346,3 +1356,33 @@ class ServiceTest(BaseAPIIntegrationTest):
self.client.update_service(*args, **kwargs)
else:
raise
+
+ @requires_api_version('1.41')
+ def test_create_service_cap_add(self):
+ name = self.get_service_name()
+ container_spec = docker.types.ContainerSpec(
+ TEST_IMG, ['echo', 'hello'], cap_add=['CAP_SYSLOG']
+ )
+ task_tmpl = docker.types.TaskTemplate(container_spec)
+ svc_id = self.client.create_service(task_tmpl, name=name)
+ assert self.client.inspect_service(svc_id)
+ services = self.client.services(filters={'name': name})
+ assert len(services) == 1
+ assert services[0]['ID'] == svc_id['ID']
+ spec = services[0]['Spec']['TaskTemplate']['ContainerSpec']
+ assert 'CAP_SYSLOG' in spec['CapabilityAdd']
+
+ @requires_api_version('1.41')
+ def test_create_service_cap_drop(self):
+ name = self.get_service_name()
+ container_spec = docker.types.ContainerSpec(
+ TEST_IMG, ['echo', 'hello'], cap_drop=['CAP_SYSLOG']
+ )
+ task_tmpl = docker.types.TaskTemplate(container_spec)
+ svc_id = self.client.create_service(task_tmpl, name=name)
+ assert self.client.inspect_service(svc_id)
+ services = self.client.services(filters={'name': name})
+ assert len(services) == 1
+ assert services[0]['ID'] == svc_id['ID']
+ spec = services[0]['Spec']['TaskTemplate']['ContainerSpec']
+ assert 'CAP_SYSLOG' in spec['CapabilityDrop']
diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py
index f1cbc26..48c0592 100644
--- a/tests/integration/api_swarm_test.py
+++ b/tests/integration/api_swarm_test.py
@@ -8,7 +8,7 @@ from .base import BaseAPIIntegrationTest
class SwarmTest(BaseAPIIntegrationTest):
def setUp(self):
- super(SwarmTest, self).setUp()
+ super().setUp()
force_leave_swarm(self.client)
self._unlock_key = None
@@ -19,7 +19,7 @@ class SwarmTest(BaseAPIIntegrationTest):
except docker.errors.APIError:
pass
force_leave_swarm(self.client)
- super(SwarmTest, self).tearDown()
+ super().tearDown()
@requires_api_version('1.24')
def test_init_swarm_simple(self):
diff --git a/tests/integration/base.py b/tests/integration/base.py
index a7613f6..031079c 100644
--- a/tests/integration/base.py
+++ b/tests/integration/base.py
@@ -75,11 +75,11 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
"""
def setUp(self):
- super(BaseAPIIntegrationTest, self).setUp()
+ super().setUp()
self.client = self.get_client_instance()
def tearDown(self):
- super(BaseAPIIntegrationTest, self).tearDown()
+ super().tearDown()
self.client.close()
@staticmethod
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index ec48835..ae94595 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -1,5 +1,3 @@
-from __future__ import print_function
-
import sys
import warnings
@@ -17,11 +15,11 @@ def setup_test_session():
try:
c.inspect_image(TEST_IMG)
except docker.errors.NotFound:
- print("\npulling {0}".format(TEST_IMG), file=sys.stderr)
+ print(f"\npulling {TEST_IMG}", file=sys.stderr)
for data in c.pull(TEST_IMG, stream=True, decode=True):
status = data.get("status")
progress = data.get("progress")
- detail = "{0} - {1}".format(status, progress)
+ detail = f"{status} - {progress}"
print(detail, file=sys.stderr)
# Double make sure we now have busybox
diff --git a/tests/integration/credentials/store_test.py b/tests/integration/credentials/store_test.py
index dd543e2..d0cfd54 100644
--- a/tests/integration/credentials/store_test.py
+++ b/tests/integration/credentials/store_test.py
@@ -3,7 +3,6 @@ import random
import sys
import pytest
-import six
from distutils.spawn import find_executable
from docker.credentials import (
@@ -12,7 +11,7 @@ from docker.credentials import (
)
-class TestStore(object):
+class TestStore:
def teardown_method(self):
for server in self.tmp_keys:
try:
@@ -33,7 +32,7 @@ class TestStore(object):
self.store = Store(DEFAULT_OSX_STORE)
def get_random_servername(self):
- res = 'pycreds_test_{:x}'.format(random.getrandbits(32))
+ res = f'pycreds_test_{random.getrandbits(32):x}'
self.tmp_keys.append(res)
return res
@@ -61,7 +60,7 @@ class TestStore(object):
def test_unicode_strings(self):
key = self.get_random_servername()
- key = six.u(key)
+ key = key
self.store.store(server=key, username='user', secret='pass')
data = self.store.get(key)
assert data
diff --git a/tests/integration/credentials/utils_test.py b/tests/integration/credentials/utils_test.py
index ad55f32..acf018d 100644
--- a/tests/integration/credentials/utils_test.py
+++ b/tests/integration/credentials/utils_test.py
@@ -1,11 +1,7 @@
import os
from docker.credentials.utils import create_environment_dict
-
-try:
- from unittest import mock
-except ImportError:
- import mock
+from unittest import mock
@mock.patch.dict(os.environ)
diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py
index 223d102..94aa201 100644
--- a/tests/integration/models_images_test.py
+++ b/tests/integration/models_images_test.py
@@ -13,8 +13,8 @@ class ImageCollectionTest(BaseIntegrationTest):
def test_build(self):
client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build(fileobj=io.BytesIO(
- "FROM alpine\n"
- "CMD echo hello world".encode('ascii')
+ b"FROM alpine\n"
+ b"CMD echo hello world"
))
self.tmp_imgs.append(image.id)
assert client.containers.run(image) == b"hello world\n"
@@ -24,8 +24,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION)
with pytest.raises(docker.errors.BuildError) as cm:
client.images.build(fileobj=io.BytesIO(
- "FROM alpine\n"
- "RUN exit 1".encode('ascii')
+ b"FROM alpine\n"
+ b"RUN exit 1"
))
assert (
"The command '/bin/sh -c exit 1' returned a non-zero code: 1"
@@ -36,8 +36,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build(
tag='some-tag', fileobj=io.BytesIO(
- "FROM alpine\n"
- "CMD echo hello world".encode('ascii')
+ b"FROM alpine\n"
+ b"CMD echo hello world"
)
)
self.tmp_imgs.append(image.id)
@@ -47,8 +47,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build(
tag='dup-txt-tag', fileobj=io.BytesIO(
- "FROM alpine\n"
- "CMD echo Successfully built abcd1234".encode('ascii')
+ b"FROM alpine\n"
+ b"CMD echo Successfully built abcd1234"
)
)
self.tmp_imgs.append(image.id)
@@ -86,7 +86,7 @@ class ImageCollectionTest(BaseIntegrationTest):
def test_pull_multiple(self):
client = docker.from_env(version=TEST_API_VERSION)
- images = client.images.pull('hello-world')
+ images = client.images.pull('hello-world', all_tags=True)
assert len(images) >= 1
assert any([
'hello-world:latest' in img.attrs['RepoTags'] for img in images
@@ -119,7 +119,7 @@ class ImageCollectionTest(BaseIntegrationTest):
self.tmp_imgs.append(additional_tag)
image.reload()
with tempfile.TemporaryFile() as f:
- stream = image.save(named='{}:latest'.format(additional_tag))
+ stream = image.save(named=f'{additional_tag}:latest')
for chunk in stream:
f.write(chunk)
@@ -129,7 +129,7 @@ class ImageCollectionTest(BaseIntegrationTest):
assert len(result) == 1
assert result[0].id == image.id
- assert '{}:latest'.format(additional_tag) in result[0].tags
+ assert f'{additional_tag}:latest' in result[0].tags
def test_save_name_error(self):
client = docker.from_env(version=TEST_API_VERSION)
@@ -143,7 +143,7 @@ class ImageTest(BaseIntegrationTest):
def test_tag_and_remove(self):
repo = 'dockersdk.tests.images.test_tag'
tag = 'some-tag'
- identifier = '{}:{}'.format(repo, tag)
+ identifier = f'{repo}:{tag}'
client = docker.from_env(version=TEST_API_VERSION)
image = client.images.pull('alpine:latest')
diff --git a/tests/integration/models_services_test.py b/tests/integration/models_services_test.py
index 36caa85..f1439a4 100644
--- a/tests/integration/models_services_test.py
+++ b/tests/integration/models_services_test.py
@@ -30,13 +30,18 @@ class ServiceTest(unittest.TestCase):
# ContainerSpec arguments
image="alpine",
command="sleep 300",
- container_labels={'container': 'label'}
+ container_labels={'container': 'label'},
+ rollback_config={'order': 'start-first'}
)
assert service.name == name
assert service.attrs['Spec']['Labels']['foo'] == 'bar'
container_spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec']
assert "alpine" in container_spec['Image']
assert container_spec['Labels'] == {'container': 'label'}
+ spec_rollback = service.attrs['Spec'].get('RollbackConfig', None)
+ assert spec_rollback is not None
+ assert ('Order' in spec_rollback and
+ spec_rollback['Order'] == 'start-first')
def test_create_with_network(self):
client = docker.from_env(version=TEST_API_VERSION)
@@ -333,3 +338,41 @@ class ServiceTest(unittest.TestCase):
assert service.force_update()
service.reload()
assert service.version > initial_version
+
+ @helpers.requires_api_version('1.41')
+ def test_create_cap_add(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ name = helpers.random_name()
+ service = client.services.create(
+ name=name,
+ labels={'foo': 'bar'},
+ image="alpine",
+ command="sleep 300",
+ container_labels={'container': 'label'},
+ cap_add=["CAP_SYSLOG"]
+ )
+ assert service.name == name
+ assert service.attrs['Spec']['Labels']['foo'] == 'bar'
+ container_spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec']
+ assert "alpine" in container_spec['Image']
+ assert container_spec['Labels'] == {'container': 'label'}
+ assert "CAP_SYSLOG" in container_spec["CapabilityAdd"]
+
+ @helpers.requires_api_version('1.41')
+ def test_create_cap_drop(self):
+ client = docker.from_env(version=TEST_API_VERSION)
+ name = helpers.random_name()
+ service = client.services.create(
+ name=name,
+ labels={'foo': 'bar'},
+ image="alpine",
+ command="sleep 300",
+ container_labels={'container': 'label'},
+ cap_drop=["CAP_SYSLOG"]
+ )
+ assert service.name == name
+ assert service.attrs['Spec']['Labels']['foo'] == 'bar'
+ container_spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec']
+ assert "alpine" in container_spec['Image']
+ assert container_spec['Labels'] == {'container': 'label'}
+ assert "CAP_SYSLOG" in container_spec["CapabilityDrop"]
diff --git a/tests/integration/regression_test.py b/tests/integration/regression_test.py
index a63883c..10313a6 100644
--- a/tests/integration/regression_test.py
+++ b/tests/integration/regression_test.py
@@ -2,13 +2,13 @@ import io
import random
import docker
-import six
from .base import BaseAPIIntegrationTest, TEST_IMG
import pytest
class TestRegressions(BaseAPIIntegrationTest):
+ @pytest.mark.xfail(True, reason='Docker API always returns chunked resp')
def test_443_handle_nonchunked_response_in_stream(self):
dfile = io.BytesIO()
with pytest.raises(docker.errors.APIError) as exc:
@@ -39,8 +39,7 @@ class TestRegressions(BaseAPIIntegrationTest):
self.client.start(ctnr)
self.client.wait(ctnr)
logs = self.client.logs(ctnr)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = logs.decode('utf-8')
assert logs == '1000\n'
def test_792_explicit_port_protocol(self):
@@ -56,10 +55,10 @@ class TestRegressions(BaseAPIIntegrationTest):
self.client.start(ctnr)
assert self.client.port(
ctnr, 2000
- )[0]['HostPort'] == six.text_type(tcp_port)
+ )[0]['HostPort'] == str(tcp_port)
assert self.client.port(
ctnr, '2000/tcp'
- )[0]['HostPort'] == six.text_type(tcp_port)
+ )[0]['HostPort'] == str(tcp_port)
assert self.client.port(
ctnr, '2000/udp'
- )[0]['HostPort'] == six.text_type(udp_port)
+ )[0]['HostPort'] == str(udp_port)
diff --git a/tests/ssh/__init__.py b/tests/ssh/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/ssh/__init__.py
diff --git a/tests/ssh/api_build_test.py b/tests/ssh/api_build_test.py
new file mode 100644
index 0000000..ef48e12
--- /dev/null
+++ b/tests/ssh/api_build_test.py
@@ -0,0 +1,590 @@
+import io
+import os
+import shutil
+import tempfile
+
+from docker import errors
+from docker.utils.proxy import ProxyConfig
+
+import pytest
+
+from .base import BaseAPIIntegrationTest, TEST_IMG
+from ..helpers import random_name, requires_api_version, requires_experimental
+
+
+class BuildTest(BaseAPIIntegrationTest):
+ def test_build_with_proxy(self):
+ self.client._proxy_configs = ProxyConfig(
+ ftp='a', http='b', https='c', no_proxy='d'
+ )
+
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN env | grep "FTP_PROXY=a"',
+ 'RUN env | grep "ftp_proxy=a"',
+ 'RUN env | grep "HTTP_PROXY=b"',
+ 'RUN env | grep "http_proxy=b"',
+ 'RUN env | grep "HTTPS_PROXY=c"',
+ 'RUN env | grep "https_proxy=c"',
+ 'RUN env | grep "NO_PROXY=d"',
+ 'RUN env | grep "no_proxy=d"',
+ ]).encode('ascii'))
+
+ self.client.build(fileobj=script, decode=True)
+
+ def test_build_with_proxy_and_buildargs(self):
+ self.client._proxy_configs = ProxyConfig(
+ ftp='a', http='b', https='c', no_proxy='d'
+ )
+
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN env | grep "FTP_PROXY=XXX"',
+ 'RUN env | grep "ftp_proxy=xxx"',
+ 'RUN env | grep "HTTP_PROXY=b"',
+ 'RUN env | grep "http_proxy=b"',
+ 'RUN env | grep "HTTPS_PROXY=c"',
+ 'RUN env | grep "https_proxy=c"',
+ 'RUN env | grep "NO_PROXY=d"',
+ 'RUN env | grep "no_proxy=d"',
+ ]).encode('ascii'))
+
+ self.client.build(
+ fileobj=script,
+ decode=True,
+ buildargs={'FTP_PROXY': 'XXX', 'ftp_proxy': 'xxx'}
+ )
+
+ def test_build_streaming(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]).encode('ascii'))
+ stream = self.client.build(fileobj=script, decode=True)
+ logs = []
+ for chunk in stream:
+ logs.append(chunk)
+ assert len(logs) > 0
+
+ def test_build_from_stringio(self):
+ return
+ script = io.StringIO('\n'.join([
+ 'FROM busybox',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]))
+ stream = self.client.build(fileobj=script)
+ logs = ''
+ for chunk in stream:
+ chunk = chunk.decode('utf-8')
+ logs += chunk
+ assert logs != ''
+
+ def test_build_with_dockerignore(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write("\n".join([
+ 'FROM busybox',
+ 'ADD . /test',
+ ]))
+
+ with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
+ f.write("\n".join([
+ 'ignored',
+ 'Dockerfile',
+ '.dockerignore',
+ '!ignored/subdir/excepted-file',
+ '', # empty line,
+ '#*', # comment line
+ ]))
+
+ with open(os.path.join(base_dir, 'not-ignored'), 'w') as f:
+ f.write("this file should not be ignored")
+
+ with open(os.path.join(base_dir, '#file.txt'), 'w') as f:
+ f.write('this file should not be ignored')
+
+ subdir = os.path.join(base_dir, 'ignored', 'subdir')
+ os.makedirs(subdir)
+ with open(os.path.join(subdir, 'file'), 'w') as f:
+ f.write("this file should be ignored")
+
+ with open(os.path.join(subdir, 'excepted-file'), 'w') as f:
+ f.write("this file should not be ignored")
+
+ tag = 'docker-py-test-build-with-dockerignore'
+ stream = self.client.build(
+ path=base_dir,
+ tag=tag,
+ )
+ for chunk in stream:
+ pass
+
+ c = self.client.create_container(tag, ['find', '/test', '-type', 'f'])
+ self.client.start(c)
+ self.client.wait(c)
+ logs = self.client.logs(c)
+
+ logs = logs.decode('utf-8')
+
+ assert sorted(list(filter(None, logs.split('\n')))) == sorted([
+ '/test/#file.txt',
+ '/test/ignored/subdir/excepted-file',
+ '/test/not-ignored'
+ ])
+
+ def test_build_with_buildargs(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM scratch',
+ 'ARG test',
+ 'USER $test'
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, tag='buildargs', buildargs={'test': 'OK'}
+ )
+ self.tmp_imgs.append('buildargs')
+ for chunk in stream:
+ pass
+
+ info = self.client.inspect_image('buildargs')
+ assert info['Config']['User'] == 'OK'
+
+ @requires_api_version('1.22')
+ def test_build_shmsize(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM scratch',
+ 'CMD sh -c "echo \'Hello, World!\'"',
+ ]).encode('ascii'))
+
+ tag = 'shmsize'
+ shmsize = 134217728
+
+ stream = self.client.build(
+ fileobj=script, tag=tag, shmsize=shmsize
+ )
+ self.tmp_imgs.append(tag)
+ for chunk in stream:
+ pass
+
+ # There is currently no way to get the shmsize
+ # that was used to build the image
+
+ @requires_api_version('1.24')
+ def test_build_isolation(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM scratch',
+ 'CMD sh -c "echo \'Deaf To All But The Song\''
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, tag='isolation',
+ isolation='default'
+ )
+
+ for chunk in stream:
+ pass
+
+ @requires_api_version('1.23')
+ def test_build_labels(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM scratch',
+ ]).encode('ascii'))
+
+ labels = {'test': 'OK'}
+
+ stream = self.client.build(
+ fileobj=script, tag='labels', labels=labels
+ )
+ self.tmp_imgs.append('labels')
+ for chunk in stream:
+ pass
+
+ info = self.client.inspect_image('labels')
+ assert info['Config']['Labels'] == labels
+
+ @requires_api_version('1.25')
+ def test_build_with_cache_from(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'ENV FOO=bar',
+ 'RUN touch baz',
+ 'RUN touch bax',
+ ]).encode('ascii'))
+
+ stream = self.client.build(fileobj=script, tag='build1')
+ self.tmp_imgs.append('build1')
+ for chunk in stream:
+ pass
+
+ stream = self.client.build(
+ fileobj=script, tag='build2', cache_from=['build1'],
+ decode=True
+ )
+ self.tmp_imgs.append('build2')
+ counter = 0
+ for chunk in stream:
+ if 'Using cache' in chunk.get('stream', ''):
+ counter += 1
+ assert counter == 3
+ self.client.remove_image('build2')
+
+ counter = 0
+ stream = self.client.build(
+ fileobj=script, tag='build2', cache_from=['nosuchtag'],
+ decode=True
+ )
+ for chunk in stream:
+ if 'Using cache' in chunk.get('stream', ''):
+ counter += 1
+ assert counter == 0
+
+ @requires_api_version('1.29')
+ def test_build_container_with_target(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox as first',
+ 'RUN mkdir -p /tmp/test',
+ 'RUN touch /tmp/silence.tar.gz',
+ 'FROM alpine:latest',
+ 'WORKDIR /root/'
+ 'COPY --from=first /tmp/silence.tar.gz .',
+ 'ONBUILD RUN echo "This should not be in the final image"'
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, target='first', tag='build1'
+ )
+ self.tmp_imgs.append('build1')
+ for chunk in stream:
+ pass
+
+ info = self.client.inspect_image('build1')
+ assert not info['Config']['OnBuild']
+
+ @requires_api_version('1.25')
+ def test_build_with_network_mode(self):
+ # Set up pingable endpoint on custom network
+ network = self.client.create_network(random_name())['Id']
+ self.tmp_networks.append(network)
+ container = self.client.create_container(TEST_IMG, 'top')
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ self.client.connect_container_to_network(
+ container, network, aliases=['pingtarget.docker']
+ )
+
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN ping -c1 pingtarget.docker'
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, network_mode=network,
+ tag='dockerpytest_customnetbuild'
+ )
+
+ self.tmp_imgs.append('dockerpytest_customnetbuild')
+ for chunk in stream:
+ pass
+
+ assert self.client.inspect_image('dockerpytest_customnetbuild')
+
+ script.seek(0)
+ stream = self.client.build(
+ fileobj=script, network_mode='none',
+ tag='dockerpytest_nonebuild', nocache=True, decode=True
+ )
+
+ self.tmp_imgs.append('dockerpytest_nonebuild')
+ logs = [chunk for chunk in stream]
+ assert 'errorDetail' in logs[-1]
+ assert logs[-1]['errorDetail']['code'] == 1
+
+ with pytest.raises(errors.NotFound):
+ self.client.inspect_image('dockerpytest_nonebuild')
+
+ @requires_api_version('1.27')
+ def test_build_with_extra_hosts(self):
+ img_name = 'dockerpytest_extrahost_build'
+ self.tmp_imgs.append(img_name)
+
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN ping -c1 hello.world.test',
+ 'RUN ping -c1 extrahost.local.test',
+ 'RUN cp /etc/hosts /hosts-file'
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, tag=img_name,
+ extra_hosts={
+ 'extrahost.local.test': '127.0.0.1',
+ 'hello.world.test': '127.0.0.1',
+ }, decode=True
+ )
+ for chunk in stream:
+ if 'errorDetail' in chunk:
+ pytest.fail(chunk)
+
+ assert self.client.inspect_image(img_name)
+ ctnr = self.run_container(img_name, 'cat /hosts-file')
+ logs = self.client.logs(ctnr)
+ logs = logs.decode('utf-8')
+ assert '127.0.0.1\textrahost.local.test' in logs
+ assert '127.0.0.1\thello.world.test' in logs
+
+ @requires_experimental(until=None)
+ @requires_api_version('1.25')
+ def test_build_squash(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN echo blah > /file_1',
+ 'RUN echo blahblah > /file_2',
+ 'RUN echo blahblahblah > /file_3'
+ ]).encode('ascii'))
+
+ def build_squashed(squash):
+ tag = 'squash' if squash else 'nosquash'
+ stream = self.client.build(
+ fileobj=script, tag=tag, squash=squash
+ )
+ self.tmp_imgs.append(tag)
+ for chunk in stream:
+ pass
+
+ return self.client.inspect_image(tag)
+
+ non_squashed = build_squashed(False)
+ squashed = build_squashed(True)
+ assert len(non_squashed['RootFS']['Layers']) == 4
+ assert len(squashed['RootFS']['Layers']) == 2
+
+ def test_build_stderr_data(self):
+ control_chars = ['\x1b[91m', '\x1b[0m']
+ snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)'
+ script = io.BytesIO(b'\n'.join([
+ b'FROM busybox',
+ f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8')
+ ]))
+
+ stream = self.client.build(
+ fileobj=script, decode=True, nocache=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk.get('stream'))
+ expected = '{0}{2}\n{1}'.format(
+ control_chars[0], control_chars[1], snippet
+ )
+ assert any([line == expected for line in lines])
+
+ def test_build_gzip_encoding(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write("\n".join([
+ 'FROM busybox',
+ 'ADD . /test',
+ ]))
+
+ stream = self.client.build(
+ path=base_dir, decode=True, nocache=True,
+ gzip=True
+ )
+
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+
+ assert 'Successfully built' in lines[-1]['stream']
+
+ def test_build_with_dockerfile_empty_lines(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write('FROM busybox\n')
+ with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
+ f.write('\n'.join([
+ ' ',
+ '',
+ '\t\t',
+ '\t ',
+ ]))
+
+ stream = self.client.build(
+ path=base_dir, decode=True, nocache=True
+ )
+
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully built' in lines[-1]['stream']
+
+ def test_build_gzip_custom_encoding(self):
+ with pytest.raises(errors.DockerException):
+ self.client.build(path='.', gzip=True, encoding='text/html')
+
+ @requires_api_version('1.32')
+ @requires_experimental(until=None)
+ def test_build_invalid_platform(self):
+ script = io.BytesIO(b'FROM busybox\n')
+
+ with pytest.raises(errors.APIError) as excinfo:
+ stream = self.client.build(fileobj=script, platform='foobar')
+ for _ in stream:
+ pass
+
+ # Some API versions incorrectly returns 500 status; assert 4xx or 5xx
+ assert excinfo.value.is_error()
+ assert 'unknown operating system' in excinfo.exconly() \
+ or 'invalid platform' in excinfo.exconly()
+
+ def test_build_out_of_context_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
+ f.write('.dockerignore\n')
+ df_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, df_dir)
+ df_name = os.path.join(df_dir, 'Dockerfile')
+ with open(df_name, 'wb') as df:
+ df.write(('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ])).encode('utf-8'))
+ df.flush()
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile=df_name, tag=img_name,
+ decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 3
+ assert sorted([b'.', b'..', b'file.txt']) == sorted(lsdata)
+
+ def test_build_in_context_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ with open(os.path.join(base_dir, 'custom.dockerfile'), 'w') as df:
+ df.write('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ]))
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile='custom.dockerfile', tag=img_name,
+ decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 4
+ assert sorted(
+ [b'.', b'..', b'file.txt', b'custom.dockerfile']
+ ) == sorted(lsdata)
+
+ def test_build_in_context_nested_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ subdir = os.path.join(base_dir, 'hello', 'world')
+ os.makedirs(subdir)
+ with open(os.path.join(subdir, 'custom.dockerfile'), 'w') as df:
+ df.write('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ]))
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile='hello/world/custom.dockerfile',
+ tag=img_name, decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 4
+ assert sorted(
+ [b'.', b'..', b'file.txt', b'hello']
+ ) == sorted(lsdata)
+
+ def test_build_in_context_abs_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ abs_dockerfile_path = os.path.join(base_dir, 'custom.dockerfile')
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ with open(abs_dockerfile_path, 'w') as df:
+ df.write('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ]))
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile=abs_dockerfile_path, tag=img_name,
+ decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 4
+ assert sorted(
+ [b'.', b'..', b'file.txt', b'custom.dockerfile']
+ ) == sorted(lsdata)
+
+ @requires_api_version('1.31')
+ @pytest.mark.xfail(
+ True,
+ reason='Currently fails on 18.09: '
+ 'https://github.com/moby/moby/issues/37920'
+ )
+ def test_prune_builds(self):
+ prune_result = self.client.prune_builds()
+ assert 'SpaceReclaimed' in prune_result
+ assert isinstance(prune_result['SpaceReclaimed'], int)
diff --git a/tests/ssh/base.py b/tests/ssh/base.py
new file mode 100644
index 0000000..4b91add
--- /dev/null
+++ b/tests/ssh/base.py
@@ -0,0 +1,134 @@
+import os
+import shutil
+import unittest
+
+import pytest
+
+import docker
+from .. import helpers
+from docker.utils import kwargs_from_env
+
+TEST_IMG = 'alpine:3.10'
+TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')
+
+
+class BaseIntegrationTest(unittest.TestCase):
+ """
+ A base class for integration test cases. It cleans up the Docker server
+ after itself.
+ """
+
+ def setUp(self):
+ self.tmp_imgs = []
+ self.tmp_containers = []
+ self.tmp_folders = []
+ self.tmp_volumes = []
+ self.tmp_networks = []
+ self.tmp_plugins = []
+ self.tmp_secrets = []
+ self.tmp_configs = []
+
+ def tearDown(self):
+ client = docker.from_env(version=TEST_API_VERSION, use_ssh_client=True)
+ try:
+ for img in self.tmp_imgs:
+ try:
+ client.api.remove_image(img)
+ except docker.errors.APIError:
+ pass
+ for container in self.tmp_containers:
+ try:
+ client.api.remove_container(container, force=True, v=True)
+ except docker.errors.APIError:
+ pass
+ for network in self.tmp_networks:
+ try:
+ client.api.remove_network(network)
+ except docker.errors.APIError:
+ pass
+ for volume in self.tmp_volumes:
+ try:
+ client.api.remove_volume(volume)
+ except docker.errors.APIError:
+ pass
+
+ for secret in self.tmp_secrets:
+ try:
+ client.api.remove_secret(secret)
+ except docker.errors.APIError:
+ pass
+
+ for config in self.tmp_configs:
+ try:
+ client.api.remove_config(config)
+ except docker.errors.APIError:
+ pass
+
+ for folder in self.tmp_folders:
+ shutil.rmtree(folder)
+ finally:
+ client.close()
+
+
+@pytest.mark.skipif(not os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
+ reason='DOCKER_HOST is not an SSH target')
+class BaseAPIIntegrationTest(BaseIntegrationTest):
+ """
+ A test case for `APIClient` integration tests. It sets up an `APIClient`
+ as `self.client`.
+ """
+ @classmethod
+ def setUpClass(cls):
+ cls.client = cls.get_client_instance()
+ cls.client.pull(TEST_IMG)
+
+ def tearDown(self):
+ super().tearDown()
+ self.client.close()
+
+ @staticmethod
+ def get_client_instance():
+ return docker.APIClient(
+ version=TEST_API_VERSION,
+ timeout=60,
+ use_ssh_client=True,
+ **kwargs_from_env()
+ )
+
+ @staticmethod
+ def _init_swarm(client, **kwargs):
+ return client.init_swarm(
+ '127.0.0.1', listen_addr=helpers.swarm_listen_addr(), **kwargs
+ )
+
+ def run_container(self, *args, **kwargs):
+ container = self.client.create_container(*args, **kwargs)
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ exitcode = self.client.wait(container)['StatusCode']
+
+ if exitcode != 0:
+ output = self.client.logs(container)
+ raise Exception(
+ "Container exited with code {}:\n{}"
+ .format(exitcode, output))
+
+ return container
+
+ def create_and_start(self, image=TEST_IMG, command='top', **kwargs):
+ container = self.client.create_container(
+ image=image, command=command, **kwargs)
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ return container
+
+ def execute(self, container, cmd, exit_code=0, **kwargs):
+ exc = self.client.exec_create(container, cmd, **kwargs)
+ output = self.client.exec_start(exc)
+ actual_exit_code = self.client.exec_inspect(exc)['ExitCode']
+ msg = "Expected `{}` to exit with code {} but returned {}:\n{}".format(
+ " ".join(cmd), exit_code, actual_exit_code, output)
+ assert actual_exit_code == exit_code, msg
+
+ def init_swarm(self, **kwargs):
+ return self._init_swarm(self.client, **kwargs)
diff --git a/tests/ssh/config/client/id_rsa b/tests/ssh/config/client/id_rsa
new file mode 100644
index 0000000..0ec063e
--- /dev/null
+++ b/tests/ssh/config/client/id_rsa
@@ -0,0 +1,38 @@
+-----BEGIN OPENSSH PRIVATE KEY-----
+b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn
+NhAAAAAwEAAQAAAYEAvwYl5Gy/aBGxNzyb9UtqddlyuQR1t6kE+UX/gmBtAE2MjDyFTOvi
+F1cn90DcaZ7z172zwUCQrNKh3rj8GcthrG7d+UJ5pYK3MxT4l16LAg9jfsK3DkD2Rri40M
+lFD9siUVUky6afM5NhfMN5WhiAdyZNYVHDFBMXpisUGJPy+NG+a1ypGqy5OWsAbonI0UrT
+K3IT0R2dp+9eUxvs0r3/LQf1B0VymD6movyXuXoh98hlMwmOM5/rhKKgBW+FfJaSI/EcNx
+F5gmFcBtL4PuOECENoCZyIU5XJscJMp72Z/e57oODS5RiUPrAwpyLzGqcnB3xpDZQc93xb
+bvzkbMT6WW0zYP/Z6Gt2X/DqSMLxPxRzT6g3LRpbcMRIEMY+XxN+MdH2JxdPLXowFCSQmR
+N2LBoDWm7EuKQ/pEYSPN3hWb4I90NQHkytFfW0TO47o3HPUc/lfRm+c2BBzf5fD8RFZY9D
+pVEX/WZZJzUCvMUYefe4w1031UCgjDv50Wlh9m6tAAAFeM2kMyHNpDMhAAAAB3NzaC1yc2
+EAAAGBAL8GJeRsv2gRsTc8m/VLanXZcrkEdbepBPlF/4JgbQBNjIw8hUzr4hdXJ/dA3Gme
+89e9s8FAkKzSod64/BnLYaxu3flCeaWCtzMU+JdeiwIPY37Ctw5A9ka4uNDJRQ/bIlFVJM
+umnzOTYXzDeVoYgHcmTWFRwxQTF6YrFBiT8vjRvmtcqRqsuTlrAG6JyNFK0ytyE9Ednafv
+XlMb7NK9/y0H9QdFcpg+pqL8l7l6IffIZTMJjjOf64SioAVvhXyWkiPxHDcReYJhXAbS+D
+7jhAhDaAmciFOVybHCTKe9mf3ue6Dg0uUYlD6wMKci8xqnJwd8aQ2UHPd8W2785GzE+llt
+M2D/2ehrdl/w6kjC8T8Uc0+oNy0aW3DESBDGPl8TfjHR9icXTy16MBQkkJkTdiwaA1puxL
+ikP6RGEjzd4Vm+CPdDUB5MrRX1tEzuO6Nxz1HP5X0ZvnNgQc3+Xw/ERWWPQ6VRF/1mWSc1
+ArzFGHn3uMNdN9VAoIw7+dFpYfZurQAAAAMBAAEAAAGBAKtnotyiz+Vb6r57vh2OvEpfAd
+gOrmpMWVArhSfBykz5SOIU9C+fgVIcPJpaMuz7WiX97Ku9eZP5tJGbP2sN2ejV2ovtICZp
+cmV9rcp1ZRpGIKr/oS5DEDlJS1zdHQErSlHcqpWqPzQSTOmcpOk5Dxza25g1u2vp7dCG2x
+NqvhySZ+ECViK/Vby1zL9jFzTlhTJ4vFtpzauA2AyPBCPdpHkNqMoLgNYncXLSYHpnos8p
+m9T+AAFGwBhVrGz0Mr0mhRDnV/PgbKplKT7l+CGceb8LuWmj/vzuP5Wv6dglw3hJnT2V5p
+nTBp3dJ6R006+yvr5T/Xb+ObGqFfgfenjLfHjqbJ/gZdGWt4Le84g8tmSkjJBJ2Yj3kynQ
+sdfv9k7JJ4t5euoje0XW0YVN1ih5DdyO4hHDRD1lSTFYT5Gl2sCTt28qsMC12rWzFkezJo
+Fhewq2Ddtg4AK6SxqH4rFQCmgOR/ci7jv9TXS9xEQxYliyN5aNymRTyXmwqBIzjNKR6QAA
+AMEAxpme2upng9LS6Epa83d1gnWUilYPbpb1C8+1FgpnBv9zkjFE1vY0Vu4i9LcLGlCQ0x
+PB1Z16TQlEluqiSuSA0eyaWSQBF9NyGsOCOZ63lpJs/2FRBfcbUvHhv8/g1fv/xvI+FnE+
+DoAhz8V3byU8HUZer7pQY3hSxisdYdsaromxC8DSSPFQoxpxwh7WuP4c3veWkdL13h4fSN
+khGr3G1XGfsZOu6V6F1i7yMU6OcwBAxzPsHqZv66sT8lE6n4xjAAAAwQDzAaVaJqZ2ROoF
+loltJZUtE7o+zpoDzjOJyGYaCYTU4dHPN1aeYBjw8QfmJhdmZfJp9AeJDB/W0wzoHi2ONI
+chnQ1EdbCLk9pvA7rhfVdZaxPeHwniDp2iA/wZKTRG3hav9nEzS72uXuZprCsbBvGXeR0z
+iuIx5odVXG8qyuI9lDY6B/IoLg7zd+V6iw9mqWYlLLsgHiAvg32LAT4j0KoTufOqpnxqTQ
+P2EguTmxDWkfQmbEHdJvbD2tLQ90zMlwMAAADBAMk88wOA1i/TibH5gm/lAtKPcNKbrHfk
+7O9gdSZd2HL0fLjptpOplS89Y7muTElsRDRGiKq+7KV/sxQRNcITkxdTKu8CKnftFWHrLk
+9WHWVHXbu9h8ttsKeUr9i27ojxpe5I82of8k7fJTg1LxMnGzuDZfq1BGsQnOWrY7r1Yjcd
+8EtSrwOB+J/S4U+rR6kwUEFYeBkhE599P1EtHTCm8kWh368di9Q+Y/VIOa3qRx4hxuiCLI
+qj4ZpdVMk2cCNcjwAAAAAB
+-----END OPENSSH PRIVATE KEY-----
diff --git a/tests/ssh/config/client/id_rsa.pub b/tests/ssh/config/client/id_rsa.pub
new file mode 100644
index 0000000..33252fe
--- /dev/null
+++ b/tests/ssh/config/client/id_rsa.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0=
diff --git a/tests/ssh/config/server/known_ed25519 b/tests/ssh/config/server/known_ed25519
new file mode 100644
index 0000000..b79f217
--- /dev/null
+++ b/tests/ssh/config/server/known_ed25519
@@ -0,0 +1,7 @@
+-----BEGIN OPENSSH PRIVATE KEY-----
+b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
+QyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4QAAAJgIMffcCDH3
+3AAAAAtzc2gtZWQyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4Q
+AAAEDeXnt5AuNk4oTHjMU1vUsEwh64fuEPu4hXsG6wCVt/6Iax81dU/Xw3tcLohAa67FdB
+FtPGU8YuP7n8IHKP16DhAAAAEXJvb3RAMGRkZmQyMWRkYjM3AQIDBA==
+-----END OPENSSH PRIVATE KEY-----
diff --git a/tests/ssh/config/server/known_ed25519.pub b/tests/ssh/config/server/known_ed25519.pub
new file mode 100644
index 0000000..ec0296e
--- /dev/null
+++ b/tests/ssh/config/server/known_ed25519.pub
@@ -0,0 +1 @@
+ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIax81dU/Xw3tcLohAa67FdBFtPGU8YuP7n8IHKP16Dh docker-py integration tests known
diff --git a/tests/ssh/config/server/sshd_config b/tests/ssh/config/server/sshd_config
new file mode 100644
index 0000000..970dca3
--- /dev/null
+++ b/tests/ssh/config/server/sshd_config
@@ -0,0 +1,3 @@
+IgnoreUserKnownHosts yes
+PubkeyAuthentication yes
+PermitRootLogin yes
diff --git a/tests/ssh/config/server/unknown_ed25519 b/tests/ssh/config/server/unknown_ed25519
new file mode 100644
index 0000000..b79f217
--- /dev/null
+++ b/tests/ssh/config/server/unknown_ed25519
@@ -0,0 +1,7 @@
+-----BEGIN OPENSSH PRIVATE KEY-----
+b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
+QyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4QAAAJgIMffcCDH3
+3AAAAAtzc2gtZWQyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4Q
+AAAEDeXnt5AuNk4oTHjMU1vUsEwh64fuEPu4hXsG6wCVt/6Iax81dU/Xw3tcLohAa67FdB
+FtPGU8YuP7n8IHKP16DhAAAAEXJvb3RAMGRkZmQyMWRkYjM3AQIDBA==
+-----END OPENSSH PRIVATE KEY-----
diff --git a/tests/ssh/config/server/unknown_ed25519.pub b/tests/ssh/config/server/unknown_ed25519.pub
new file mode 100644
index 0000000..a24403e
--- /dev/null
+++ b/tests/ssh/config/server/unknown_ed25519.pub
@@ -0,0 +1 @@
+ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIax81dU/Xw3tcLohAa67FdBFtPGU8YuP7n8IHKP16Dh docker-py integration tests unknown
diff --git a/tests/ssh/connect_test.py b/tests/ssh/connect_test.py
new file mode 100644
index 0000000..3d33a96
--- /dev/null
+++ b/tests/ssh/connect_test.py
@@ -0,0 +1,22 @@
+import os
+import unittest
+
+import docker
+import paramiko.ssh_exception
+import pytest
+from .base import TEST_API_VERSION
+
+
+class SSHConnectionTest(unittest.TestCase):
+ @pytest.mark.skipif('UNKNOWN_DOCKER_SSH_HOST' not in os.environ,
+ reason='Unknown Docker SSH host not configured')
+ def test_ssh_unknown_host(self):
+ with self.assertRaises(paramiko.ssh_exception.SSHException) as cm:
+ docker.APIClient(
+ version=TEST_API_VERSION,
+ timeout=60,
+ # test only valid with Paramiko
+ use_ssh_client=False,
+ base_url=os.environ['UNKNOWN_DOCKER_SSH_HOST'],
+ )
+ self.assertIn('not found in known_hosts', str(cm.exception))
diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py
index a7e183c..3a2fbde 100644
--- a/tests/unit/api_container_test.py
+++ b/tests/unit/api_container_test.py
@@ -1,25 +1,19 @@
-# -*- coding: utf-8 -*-
-
import datetime
import json
import signal
import docker
+from docker.api import APIClient
+from unittest import mock
import pytest
-import six
from . import fake_api
from ..helpers import requires_api_version
from .api_test import (
BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS,
- fake_inspect_container
+ fake_inspect_container, url_base
)
-try:
- from unittest import mock
-except ImportError:
- import mock
-
def fake_inspect_container_tty(self, container):
return fake_inspect_container(self, container, tty=True)
@@ -30,7 +24,8 @@ class StartContainerTest(BaseAPIClientTest):
self.client.start(fake_api.FAKE_CONTAINER_ID)
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/start'
+ assert args[0][1] == (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/start')
assert 'data' not in args[1]
assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
@@ -123,7 +118,8 @@ class StartContainerTest(BaseAPIClientTest):
self.client.start({'Id': fake_api.FAKE_CONTAINER_ID})
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/start'
+ assert args[0][1] == (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/start')
assert 'data' not in args[1]
assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
@@ -352,6 +348,22 @@ class CreateContainerTest(BaseAPIClientTest):
assert args[1]['headers'] == {'Content-Type': 'application/json'}
assert args[1]['params'] == {'name': 'marisa-kirisame'}
+ def test_create_container_with_platform(self):
+ self.client.create_container('busybox', 'true',
+ platform='linux')
+
+ args = fake_request.call_args
+ assert args[0][1] == url_prefix + 'containers/create'
+ assert json.loads(args[1]['data']) == json.loads('''
+ {"Tty": false, "Image": "busybox", "Cmd": ["true"],
+ "AttachStdin": false,
+ "AttachStderr": true, "AttachStdout": true,
+ "StdinOnce": false,
+ "OpenStdin": false, "NetworkDisabled": false}
+ ''')
+ assert args[1]['headers'] == {'Content-Type': 'application/json'}
+ assert args[1]['params'] == {'name': None, 'platform': 'linux'}
+
def test_create_container_with_mem_limit_as_int(self):
self.client.create_container(
'busybox', 'true', host_config=self.client.create_host_config(
@@ -767,10 +779,71 @@ class CreateContainerTest(BaseAPIClientTest):
assert args[1]['headers'] == {'Content-Type': 'application/json'}
assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
+ def test_create_container_with_device_requests(self):
+ client = APIClient(version='1.40')
+ fake_api.fake_responses.setdefault(
+ f'{fake_api.prefix}/v1.40/containers/create',
+ fake_api.post_fake_create_container,
+ )
+ client.create_container(
+ 'busybox', 'true', host_config=client.create_host_config(
+ device_requests=[
+ {
+ 'device_ids': [
+ '0',
+ 'GPU-3a23c669-1f69-c64e-cf85-44e9b07e7a2a'
+ ]
+ },
+ {
+ 'driver': 'nvidia',
+ 'Count': -1,
+ 'capabilities': [
+ ['gpu', 'utility']
+ ],
+ 'options': {
+ 'key': 'value'
+ }
+ }
+ ]
+ )
+ )
+
+ args = fake_request.call_args
+ assert args[0][1] == url_base + 'v1.40/' + 'containers/create'
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = client.create_host_config()
+ expected_payload['HostConfig']['DeviceRequests'] = [
+ {
+ 'Driver': '',
+ 'Count': 0,
+ 'DeviceIDs': [
+ '0',
+ 'GPU-3a23c669-1f69-c64e-cf85-44e9b07e7a2a'
+ ],
+ 'Capabilities': [],
+ 'Options': {}
+ },
+ {
+ 'Driver': 'nvidia',
+ 'Count': -1,
+ 'DeviceIDs': [],
+ 'Capabilities': [
+ ['gpu', 'utility']
+ ],
+ 'Options': {
+ 'key': 'value'
+ }
+ }
+ ]
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers']['Content-Type'] == 'application/json'
+ assert set(args[1]['headers']) <= {'Content-Type', 'User-Agent'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
+
def test_create_container_with_labels_dict(self):
labels_dict = {
- six.text_type('foo'): six.text_type('1'),
- six.text_type('bar'): six.text_type('2'),
+ 'foo': '1',
+ 'bar': '2',
}
self.client.create_container(
@@ -786,12 +859,12 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_labels_list(self):
labels_list = [
- six.text_type('foo'),
- six.text_type('bar'),
+ 'foo',
+ 'bar',
]
labels_dict = {
- six.text_type('foo'): six.text_type(),
- six.text_type('bar'): six.text_type(),
+ 'foo': '',
+ 'bar': '',
}
self.client.create_container(
@@ -951,11 +1024,11 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_unicode_envvars(self):
envvars_dict = {
- 'foo': u'☃',
+ 'foo': '☃',
}
expected = [
- u'foo=☃'
+ 'foo=☃'
]
self.client.create_container(
@@ -1024,7 +1097,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/resize',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/resize'),
params={'h': 15, 'w': 120},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1037,7 +1111,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/rename',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/rename'),
params={'name': 'foobar'},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1047,7 +1122,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/wait',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/wait',
timeout=None,
params={}
)
@@ -1057,7 +1132,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/wait',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/wait',
timeout=None,
params={}
)
@@ -1069,14 +1144,14 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
'tail': 'all'},
timeout=DEFAULT_TIMEOUT_SECONDS,
stream=False
)
- assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
+ assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n'
def test_logs_with_dict_instead_of_id(self):
with mock.patch('docker.api.client.APIClient.inspect_container',
@@ -1085,14 +1160,14 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
'tail': 'all'},
timeout=DEFAULT_TIMEOUT_SECONDS,
stream=False
)
- assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
+ assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n'
def test_log_streaming(self):
with mock.patch('docker.api.client.APIClient.inspect_container',
@@ -1102,7 +1177,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
'tail': 'all'},
timeout=DEFAULT_TIMEOUT_SECONDS,
@@ -1117,7 +1192,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
'tail': 'all'},
timeout=DEFAULT_TIMEOUT_SECONDS,
@@ -1131,7 +1206,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
'tail': 'all'},
timeout=DEFAULT_TIMEOUT_SECONDS,
@@ -1146,7 +1221,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
'tail': 'all'},
timeout=DEFAULT_TIMEOUT_SECONDS,
@@ -1162,7 +1237,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
'tail': 10},
timeout=DEFAULT_TIMEOUT_SECONDS,
@@ -1178,7 +1253,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
'tail': 'all', 'since': ts},
timeout=DEFAULT_TIMEOUT_SECONDS,
@@ -1195,7 +1270,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
'tail': 'all', 'since': ts},
timeout=DEFAULT_TIMEOUT_SECONDS,
@@ -1221,7 +1296,7 @@ class ContainerTest(BaseAPIClientTest):
assert m.called
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/logs',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
'tail': 'all'},
timeout=DEFAULT_TIMEOUT_SECONDS,
@@ -1233,7 +1308,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/changes',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/changes'),
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1242,7 +1318,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/changes',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/changes'),
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1251,7 +1328,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/json',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/json',
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1262,7 +1339,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/stop',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stop',
params={'t': timeout},
timeout=(DEFAULT_TIMEOUT_SECONDS + timeout)
)
@@ -1275,7 +1352,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/stop',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stop',
params={'t': timeout},
timeout=(DEFAULT_TIMEOUT_SECONDS + timeout)
)
@@ -1285,7 +1362,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/pause',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/pause'),
timeout=(DEFAULT_TIMEOUT_SECONDS)
)
@@ -1294,7 +1372,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/unpause',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/unpause'),
timeout=(DEFAULT_TIMEOUT_SECONDS)
)
@@ -1303,7 +1382,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/kill',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill',
params={},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1313,7 +1392,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/kill',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill',
params={},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1323,7 +1402,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/kill',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill',
params={'signal': signal.SIGTERM},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1333,7 +1412,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/restart',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/restart'),
params={'t': 2},
timeout=(DEFAULT_TIMEOUT_SECONDS + 2)
)
@@ -1343,7 +1423,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'containers/3cc2351ab11b/restart',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/restart'),
params={'t': 2},
timeout=(DEFAULT_TIMEOUT_SECONDS + 2)
)
@@ -1353,7 +1434,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'DELETE',
- url_prefix + 'containers/3cc2351ab11b',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID,
params={'v': False, 'link': False, 'force': False},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1363,7 +1444,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'DELETE',
- url_prefix + 'containers/3cc2351ab11b',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID,
params={'v': False, 'link': False, 'force': False},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1373,7 +1454,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/export',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/export'),
stream=True,
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1383,7 +1465,8 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/export',
+ (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/export'),
stream=True,
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1393,7 +1476,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/json',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/json',
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1409,7 +1492,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/stats',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stats',
timeout=60,
stream=True
)
@@ -1419,7 +1502,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/top',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/top',
params={},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1429,7 +1512,7 @@ class ContainerTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'containers/3cc2351ab11b/top',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/top',
params={'ps_args': 'waux'},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -1441,7 +1524,8 @@ class ContainerTest(BaseAPIClientTest):
blkio_weight=345
)
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/update'
+ assert args[0][1] == (url_prefix + 'containers/' +
+ fake_api.FAKE_CONTAINER_ID + '/update')
assert json.loads(args[1]['data']) == {
'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345
}
diff --git a/tests/unit/api_exec_test.py b/tests/unit/api_exec_test.py
index a9d2dd5..4504250 100644
--- a/tests/unit/api_exec_test.py
+++ b/tests/unit/api_exec_test.py
@@ -11,7 +11,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1'])
args = fake_request.call_args
- assert 'POST' == args[0][0], url_prefix + 'containers/{0}/exec'.format(
+ assert 'POST' == args[0][0], url_prefix + 'containers/{}/exec'.format(
fake_api.FAKE_CONTAINER_ID
)
@@ -32,7 +32,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'exec/{0}/start'.format(
+ assert args[0][1] == url_prefix + 'exec/{}/start'.format(
fake_api.FAKE_EXEC_ID
)
@@ -51,7 +51,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True)
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'exec/{0}/start'.format(
+ assert args[0][1] == url_prefix + 'exec/{}/start'.format(
fake_api.FAKE_EXEC_ID
)
@@ -68,7 +68,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'exec/{0}/json'.format(
+ assert args[0][1] == url_prefix + 'exec/{}/json'.format(
fake_api.FAKE_EXEC_ID
)
@@ -77,7 +77,7 @@ class ExecTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID),
+ url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize',
params={'h': 20, 'w': 60},
timeout=DEFAULT_TIMEOUT_SECONDS
)
diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py
index 1e2315d..e285932 100644
--- a/tests/unit/api_image_test.py
+++ b/tests/unit/api_image_test.py
@@ -3,16 +3,12 @@ import pytest
from . import fake_api
from docker import auth
+from unittest import mock
from .api_test import (
BaseAPIClientTest, fake_request, DEFAULT_TIMEOUT_SECONDS, url_prefix,
fake_resolve_authconfig
)
-try:
- from unittest import mock
-except ImportError:
- import mock
-
class ImageTest(BaseAPIClientTest):
def test_image_viz(self):
@@ -26,7 +22,18 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
- params={'filter': None, 'only_ids': 0, 'all': 1},
+ params={'only_ids': 0, 'all': 1},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_images_name(self):
+ self.client.images('foo:bar')
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/json',
+ params={'only_ids': 0, 'all': 0,
+ 'filters': '{"reference": ["foo:bar"]}'},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -36,7 +43,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
- params={'filter': None, 'only_ids': 1, 'all': 1},
+ params={'only_ids': 1, 'all': 1},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -46,7 +53,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
- params={'filter': None, 'only_ids': 1, 'all': 0},
+ params={'only_ids': 1, 'all': 0},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -56,7 +63,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
- params={'filter': None, 'only_ids': 0, 'all': 0,
+ params={'only_ids': 0, 'all': 0,
'filters': '{"dangling": ["true"]}'},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -67,7 +74,7 @@ class ImageTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][1] == url_prefix + 'images/create'
assert args[1]['params'] == {
- 'tag': None, 'fromImage': 'joffrey/test001'
+ 'tag': 'latest', 'fromImage': 'joffrey/test001'
}
assert not args[1]['stream']
@@ -77,7 +84,7 @@ class ImageTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][1] == url_prefix + 'images/create'
assert args[1]['params'] == {
- 'tag': None, 'fromImage': 'joffrey/test001'
+ 'tag': 'latest', 'fromImage': 'joffrey/test001'
}
assert args[1]['stream']
@@ -93,7 +100,7 @@ class ImageTest(BaseAPIClientTest):
'repo': None,
'comment': None,
'tag': None,
- 'container': '3cc2351ab11b',
+ 'container': fake_api.FAKE_CONTAINER_ID,
'author': None,
'changes': None
},
@@ -105,7 +112,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'DELETE',
- url_prefix + 'images/e9aa60c60128',
+ url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID,
params={'force': False, 'noprune': False},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -280,7 +287,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'images/e9aa60c60128/tag',
+ url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
params={
'tag': None,
'repo': 'repo',
@@ -298,7 +305,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'images/e9aa60c60128/tag',
+ url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
params={
'tag': 'tag',
'repo': 'repo',
@@ -313,7 +320,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'images/e9aa60c60128/tag',
+ url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
params={
'tag': None,
'repo': 'repo',
@@ -327,7 +334,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
- url_prefix + 'images/e9aa60c60128/get',
+ url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/get',
stream=True,
timeout=DEFAULT_TIMEOUT_SECONDS
)
diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py
index c78554d..8afab73 100644
--- a/tests/unit/api_network_test.py
+++ b/tests/unit/api_network_test.py
@@ -1,14 +1,8 @@
import json
-import six
-
from .api_test import BaseAPIClientTest, url_prefix, response
from docker.types import IPAMConfig, IPAMPool
-
-try:
- from unittest import mock
-except ImportError:
- import mock
+from unittest import mock
class NetworkTest(BaseAPIClientTest):
@@ -103,16 +97,16 @@ class NetworkTest(BaseAPIClientTest):
self.client.remove_network(network_id)
args = delete.call_args
- assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id)
+ assert args[0][0] == url_prefix + f'networks/{network_id}'
def test_inspect_network(self):
network_id = 'abc12345'
network_name = 'foo'
network_data = {
- six.u('name'): network_name,
- six.u('id'): network_id,
- six.u('driver'): 'bridge',
- six.u('containers'): {},
+ 'name': network_name,
+ 'id': network_id,
+ 'driver': 'bridge',
+ 'containers': {},
}
network_response = response(status_code=200, content=network_data)
@@ -123,7 +117,7 @@ class NetworkTest(BaseAPIClientTest):
assert result == network_data
args = get.call_args
- assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id)
+ assert args[0][0] == url_prefix + f'networks/{network_id}'
def test_connect_container_to_network(self):
network_id = 'abc12345'
@@ -136,11 +130,12 @@ class NetworkTest(BaseAPIClientTest):
container={'Id': container_id},
net_id=network_id,
aliases=['foo', 'bar'],
- links=[('baz', 'quux')]
+ links=[('baz', 'quux')],
+ driver_opt={'com.docker-py.setting': 'yes'},
)
assert post.call_args[0][0] == (
- url_prefix + 'networks/{0}/connect'.format(network_id)
+ url_prefix + f'networks/{network_id}/connect'
)
assert json.loads(post.call_args[1]['data']) == {
@@ -148,6 +143,7 @@ class NetworkTest(BaseAPIClientTest):
'EndpointConfig': {
'Aliases': ['foo', 'bar'],
'Links': ['baz:quux'],
+ 'DriverOpts': {'com.docker-py.setting': 'yes'},
},
}
@@ -162,7 +158,7 @@ class NetworkTest(BaseAPIClientTest):
container={'Id': container_id}, net_id=network_id)
assert post.call_args[0][0] == (
- url_prefix + 'networks/{0}/disconnect'.format(network_id)
+ url_prefix + f'networks/{network_id}/disconnect'
)
assert json.loads(post.call_args[1]['data']) == {
'Container': container_id
diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
index f4d220a..a2348f0 100644
--- a/tests/unit/api_test.py
+++ b/tests/unit/api_test.py
@@ -1,31 +1,28 @@
import datetime
-import json
import io
+import json
import os
import re
import shutil
import socket
+import struct
import tempfile
import threading
import time
import unittest
+import socketserver
+import http.server
import docker
-from docker.api import APIClient
+import pytest
import requests
+from docker.api import APIClient
+from docker.constants import DEFAULT_DOCKER_API_VERSION
from requests.packages import urllib3
-import six
-import struct
+from unittest import mock
from . import fake_api
-import pytest
-
-try:
- from unittest import mock
-except ImportError:
- import mock
-
DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
@@ -34,7 +31,7 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
request=None, raw=None):
res = requests.Response()
res.status_code = status_code
- if not isinstance(content, six.binary_type):
+ if not isinstance(content, bytes):
content = json.dumps(content).encode('ascii')
res._content = content
res.headers = requests.structures.CaseInsensitiveDict(headers or {})
@@ -60,7 +57,7 @@ def fake_resp(method, url, *args, **kwargs):
elif (url, method) in fake_api.fake_responses:
key = (url, method)
if not key:
- raise Exception('{0} {1}'.format(method, url))
+ raise Exception(f'{method} {url}')
status_code, content = fake_api.fake_responses[key]()
return response(status_code=status_code, content=content)
@@ -85,11 +82,11 @@ def fake_delete(self, url, *args, **kwargs):
def fake_read_from_socket(self, response, stream, tty=False, demux=False):
- return six.binary_type()
+ return bytes()
-url_base = '{0}/'.format(fake_api.prefix)
-url_prefix = '{0}v{1}/'.format(
+url_base = f'{fake_api.prefix}/'
+url_prefix = '{}v{}/'.format(
url_base,
docker.constants.DEFAULT_DOCKER_API_VERSION)
@@ -105,7 +102,7 @@ class BaseAPIClientTest(unittest.TestCase):
_read_from_socket=fake_read_from_socket
)
self.patcher.start()
- self.client = APIClient()
+ self.client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
def tearDown(self):
self.client.close()
@@ -133,20 +130,20 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_valid_resource(self):
url = self.client._url('/hello/{0}/world', 'somename')
- assert url == '{0}{1}'.format(url_prefix, 'hello/somename/world')
+ assert url == '{}{}'.format(url_prefix, 'hello/somename/world')
url = self.client._url(
'/hello/{0}/world/{1}', 'somename', 'someothername'
)
- assert url == '{0}{1}'.format(
+ assert url == '{}{}'.format(
url_prefix, 'hello/somename/world/someothername'
)
url = self.client._url('/hello/{0}/world', 'some?name')
- assert url == '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world')
+ assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world')
url = self.client._url("/images/{0}/push", "localhost:5000/image")
- assert url == '{0}{1}'.format(
+ assert url == '{}{}'.format(
url_prefix, 'images/localhost:5000/image/push'
)
@@ -156,13 +153,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_no_resource(self):
url = self.client._url('/simple')
- assert url == '{0}{1}'.format(url_prefix, 'simple')
+ assert url == '{}{}'.format(url_prefix, 'simple')
def test_url_unversioned_api(self):
url = self.client._url(
'/hello/{0}/world', 'somename', versioned_api=False
)
- assert url == '{0}{1}'.format(url_base, 'hello/somename/world')
+ assert url == '{}{}'.format(url_base, 'hello/somename/world')
def test_version(self):
self.client.version()
@@ -184,13 +181,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_retrieve_server_version(self):
client = APIClient(version="auto")
- assert isinstance(client._version, six.string_types)
+ assert isinstance(client._version, str)
assert not (client._version == "auto")
client.close()
def test_auto_retrieve_server_version(self):
version = self.client._retrieve_server_version()
- assert isinstance(version, six.string_types)
+ assert isinstance(version, str)
def test_info(self):
self.client.info()
@@ -282,27 +279,37 @@ class DockerApiTest(BaseAPIClientTest):
return socket_adapter.socket_path
def test_url_compatibility_unix(self):
- c = APIClient(base_url="unix://socket")
+ c = APIClient(
+ base_url="unix://socket",
+ version=DEFAULT_DOCKER_API_VERSION)
assert self._socket_path_for_client_session(c) == '/socket'
def test_url_compatibility_unix_triple_slash(self):
- c = APIClient(base_url="unix:///socket")
+ c = APIClient(
+ base_url="unix:///socket",
+ version=DEFAULT_DOCKER_API_VERSION)
assert self._socket_path_for_client_session(c) == '/socket'
def test_url_compatibility_http_unix_triple_slash(self):
- c = APIClient(base_url="http+unix:///socket")
+ c = APIClient(
+ base_url="http+unix:///socket",
+ version=DEFAULT_DOCKER_API_VERSION)
assert self._socket_path_for_client_session(c) == '/socket'
def test_url_compatibility_http(self):
- c = APIClient(base_url="http://hostname:1234")
+ c = APIClient(
+ base_url="http://hostname:1234",
+ version=DEFAULT_DOCKER_API_VERSION)
assert c.base_url == "http://hostname:1234"
def test_url_compatibility_tcp(self):
- c = APIClient(base_url="tcp://hostname:1234")
+ c = APIClient(
+ base_url="tcp://hostname:1234",
+ version=DEFAULT_DOCKER_API_VERSION)
assert c.base_url == "http://hostname:1234"
@@ -311,7 +318,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with(
'DELETE',
- url_prefix + 'containers/3cc2351ab11b',
+ url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID,
params={'v': False, 'link': True, 'force': False},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -327,8 +334,7 @@ class DockerApiTest(BaseAPIClientTest):
def test_stream_helper_decoding(self):
status_code, content = fake_api.fake_responses[url_prefix + 'events']()
content_str = json.dumps(content)
- if six.PY3:
- content_str = content_str.encode('utf-8')
+ content_str = content_str.encode('utf-8')
body = io.BytesIO(content_str)
# mock a stream interface
@@ -372,7 +378,7 @@ class UnixSocketStreamTest(unittest.TestCase):
self.server_socket = self._setup_socket()
self.stop_server = False
server_thread = threading.Thread(target=self.run_server)
- server_thread.setDaemon(True)
+ server_thread.daemon = True
server_thread.start()
self.response = None
self.request_handler = None
@@ -395,7 +401,7 @@ class UnixSocketStreamTest(unittest.TestCase):
while not self.stop_server:
try:
connection, client_address = self.server_socket.accept()
- except socket.error:
+ except OSError:
# Probably no connection to accept yet
time.sleep(0.01)
continue
@@ -447,7 +453,9 @@ class UnixSocketStreamTest(unittest.TestCase):
b'\r\n'
) + b'\r\n'.join(lines)
- with APIClient(base_url="http+unix://" + self.socket_file) as client:
+ with APIClient(
+ base_url="http+unix://" + self.socket_file,
+ version=DEFAULT_DOCKER_API_VERSION) as client:
for i in range(5):
try:
stream = client.build(
@@ -477,10 +485,10 @@ class TCPSocketStreamTest(unittest.TestCase):
@classmethod
def setup_class(cls):
- cls.server = six.moves.socketserver.ThreadingTCPServer(
+ cls.server = socketserver.ThreadingTCPServer(
('', 0), cls.get_handler_class())
cls.thread = threading.Thread(target=cls.server.serve_forever)
- cls.thread.setDaemon(True)
+ cls.thread.daemon = True
cls.thread.start()
cls.address = 'http://{}:{}'.format(
socket.gethostname(), cls.server.server_address[1])
@@ -496,7 +504,7 @@ class TCPSocketStreamTest(unittest.TestCase):
stdout_data = cls.stdout_data
stderr_data = cls.stderr_data
- class Handler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler, object):
+ class Handler(http.server.BaseHTTPRequestHandler):
def do_POST(self):
resp_data = self.get_resp_data()
self.send_response(101)
@@ -522,7 +530,7 @@ class TCPSocketStreamTest(unittest.TestCase):
data += stderr_data
return data
else:
- raise Exception('Unknown path {0}'.format(path))
+ raise Exception(f'Unknown path {path}')
@staticmethod
def frame_header(stream, data):
@@ -532,7 +540,10 @@ class TCPSocketStreamTest(unittest.TestCase):
def request(self, stream=None, tty=None, demux=None):
assert stream is not None and tty is not None and demux is not None
- with APIClient(base_url=self.address) as client:
+ with APIClient(
+ base_url=self.address,
+ version=DEFAULT_DOCKER_API_VERSION
+ ) as client:
if tty:
url = client._url('/tty')
else:
@@ -597,7 +608,7 @@ class UserAgentTest(unittest.TestCase):
self.patcher.stop()
def test_default_user_agent(self):
- client = APIClient()
+ client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
client.version()
assert self.mock_send.call_count == 1
@@ -606,7 +617,9 @@ class UserAgentTest(unittest.TestCase):
assert headers['User-Agent'] == expected
def test_custom_user_agent(self):
- client = APIClient(user_agent='foo/bar')
+ client = APIClient(
+ user_agent='foo/bar',
+ version=DEFAULT_DOCKER_API_VERSION)
client.version()
assert self.mock_send.call_count == 1
@@ -615,7 +628,7 @@ class UserAgentTest(unittest.TestCase):
class DisableSocketTest(unittest.TestCase):
- class DummySocket(object):
+ class DummySocket:
def __init__(self, timeout=60):
self.timeout = timeout
@@ -626,7 +639,7 @@ class DisableSocketTest(unittest.TestCase):
return self.timeout
def setUp(self):
- self.client = APIClient()
+ self.client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
def test_disable_socket_timeout(self):
"""Test that the timeout is disabled on a generic socket object."""
diff --git a/tests/unit/api_volume_test.py b/tests/unit/api_volume_test.py
index 7850c22..a8d9193 100644
--- a/tests/unit/api_volume_test.py
+++ b/tests/unit/api_volume_test.py
@@ -104,7 +104,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][0] == 'GET'
- assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name)
+ assert args[0][1] == f'{url_prefix}volumes/{name}'
def test_remove_volume(self):
name = 'perfectcherryblossom'
@@ -112,4 +112,4 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][0] == 'DELETE'
- assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name)
+ assert args[0][1] == f'{url_prefix}volumes/{name}'
diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py
index aac8910..dd5b5f8 100644
--- a/tests/unit/auth_test.py
+++ b/tests/unit/auth_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import base64
import json
import os
@@ -10,13 +8,9 @@ import tempfile
import unittest
from docker import auth, credentials, errors
+from unittest import mock
import pytest
-try:
- from unittest import mock
-except ImportError:
- import mock
-
class RegressionTest(unittest.TestCase):
def test_803_urlsafe_encode(self):
@@ -239,7 +233,7 @@ class LoadConfigTest(unittest.TestCase):
cfg_path = os.path.join(folder, '.dockercfg')
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
with open(cfg_path, 'w') as f:
- f.write('auth = {0}\n'.format(auth_))
+ f.write(f'auth = {auth_}\n')
f.write('email = sakuya@scarlet.net')
cfg = auth.load_config(cfg_path)
@@ -297,13 +291,13 @@ class LoadConfigTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder,
- '.{0}.dockercfg'.format(
+ '.{}.dockercfg'.format(
random.randrange(100000)))
registry = 'https://your.private.registry.io'
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = {
registry: {
- 'auth': '{0}'.format(auth_),
+ 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net'
}
}
@@ -329,7 +323,7 @@ class LoadConfigTest(unittest.TestCase):
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = {
registry: {
- 'auth': '{0}'.format(auth_),
+ 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net'
}
}
@@ -357,7 +351,7 @@ class LoadConfigTest(unittest.TestCase):
config = {
'auths': {
registry: {
- 'auth': '{0}'.format(auth_),
+ 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net'
}
}
@@ -386,7 +380,7 @@ class LoadConfigTest(unittest.TestCase):
config = {
'auths': {
registry: {
- 'auth': '{0}'.format(auth_),
+ 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net'
}
}
@@ -794,9 +788,9 @@ class InMemoryStore(credentials.Store):
}
def list(self):
- return dict(
- [(k, v['Username']) for k, v in self.__store.items()]
- )
+ return {
+ k: v['Username'] for k, v in self.__store.items()
+ }
def erase(self, server):
del self.__store[server]
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index cce99c5..e7c7eec 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1,22 +1,20 @@
import datetime
-import docker
-from docker.utils import kwargs_from_env
-from docker.constants import (
- DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS
-)
import os
import unittest
-from . import fake_api
+import docker
import pytest
+from docker.constants import (
+ DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS,
+ DEFAULT_MAX_POOL_SIZE, IS_WINDOWS_PLATFORM
+)
+from docker.utils import kwargs_from_env
+from unittest import mock
-try:
- from unittest import mock
-except ImportError:
- import mock
-
+from . import fake_api
TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs')
+POOL_SIZE = 20
class ClientTest(unittest.TestCase):
@@ -25,33 +23,33 @@ class ClientTest(unittest.TestCase):
def test_events(self, mock_func):
since = datetime.datetime(2016, 1, 1, 0, 0)
mock_func.return_value = fake_api.get_fake_events()[1]
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.events(since=since) == mock_func.return_value
mock_func.assert_called_with(since=since)
@mock.patch('docker.api.APIClient.info')
def test_info(self, mock_func):
mock_func.return_value = fake_api.get_fake_info()[1]
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.info() == mock_func.return_value
mock_func.assert_called_with()
@mock.patch('docker.api.APIClient.ping')
def test_ping(self, mock_func):
mock_func.return_value = True
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.ping() is True
mock_func.assert_called_with()
@mock.patch('docker.api.APIClient.version')
def test_version(self, mock_func):
mock_func.return_value = fake_api.get_fake_version()[1]
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.version() == mock_func.return_value
mock_func.assert_called_with()
def test_call_api_client_method(self):
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
with pytest.raises(AttributeError) as cm:
client.create_container()
s = cm.exconly()
@@ -65,7 +63,9 @@ class ClientTest(unittest.TestCase):
assert "this method is now on the object APIClient" not in s
def test_call_containers(self):
- client = docker.DockerClient(**kwargs_from_env())
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION,
+ **kwargs_from_env())
with pytest.raises(TypeError) as cm:
client.containers()
@@ -74,6 +74,84 @@ class ClientTest(unittest.TestCase):
assert "'ContainerCollection' object is not callable" in s
assert "docker.APIClient" in s
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
+ )
+ @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
+ def test_default_pool_size_unix(self, mock_obj):
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ base_url = "{base_url}/v{version}/_ping".format(
+ base_url=client.api.base_url,
+ version=client.api._version
+ )
+
+ mock_obj.assert_called_once_with(base_url,
+ "/var/run/docker.sock",
+ 60,
+ maxsize=DEFAULT_MAX_POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
+ )
+ @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
+ def test_default_pool_size_win(self, mock_obj):
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ mock_obj.assert_called_once_with("//./pipe/docker_engine",
+ 60,
+ maxsize=DEFAULT_MAX_POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
+ )
+ @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
+ def test_pool_size_unix(self, mock_obj):
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION,
+ max_pool_size=POOL_SIZE
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ base_url = "{base_url}/v{version}/_ping".format(
+ base_url=client.api.base_url,
+ version=client.api._version
+ )
+
+ mock_obj.assert_called_once_with(base_url,
+ "/var/run/docker.sock",
+ 60,
+ maxsize=POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
+ )
+ @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
+ def test_pool_size_win(self, mock_obj):
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION,
+ max_pool_size=POOL_SIZE
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ mock_obj.assert_called_once_with("//./pipe/docker_engine",
+ 60,
+ maxsize=POOL_SIZE
+ )
+
class FromEnvTest(unittest.TestCase):
@@ -90,7 +168,7 @@ class FromEnvTest(unittest.TestCase):
os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
DOCKER_CERT_PATH=TEST_CERT_DIR,
DOCKER_TLS_VERIFY='1')
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api.base_url == "https://192.168.59.103:2376"
def test_from_env_with_version(self):
@@ -102,11 +180,85 @@ class FromEnvTest(unittest.TestCase):
assert client.api._version == '2.32'
def test_from_env_without_version_uses_default(self):
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api._version == DEFAULT_DOCKER_API_VERSION
def test_from_env_without_timeout_uses_default(self):
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api.timeout == DEFAULT_TIMEOUT_SECONDS
+
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
+ )
+ @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
+ def test_default_pool_size_from_env_unix(self, mock_obj):
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ base_url = "{base_url}/v{version}/_ping".format(
+ base_url=client.api.base_url,
+ version=client.api._version
+ )
+
+ mock_obj.assert_called_once_with(base_url,
+ "/var/run/docker.sock",
+ 60,
+ maxsize=DEFAULT_MAX_POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
+ )
+ @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
+ def test_default_pool_size_from_env_win(self, mock_obj):
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ mock_obj.assert_called_once_with("//./pipe/docker_engine",
+ 60,
+ maxsize=DEFAULT_MAX_POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
+ )
+ @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
+ def test_pool_size_from_env_unix(self, mock_obj):
+ client = docker.from_env(
+ version=DEFAULT_DOCKER_API_VERSION,
+ max_pool_size=POOL_SIZE
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ base_url = "{base_url}/v{version}/_ping".format(
+ base_url=client.api.base_url,
+ version=client.api._version
+ )
+
+ mock_obj.assert_called_once_with(base_url,
+ "/var/run/docker.sock",
+ 60,
+ maxsize=POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
+ )
+ @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
+ def test_pool_size_from_env_win(self, mock_obj):
+ client = docker.from_env(
+ version=DEFAULT_DOCKER_API_VERSION,
+ max_pool_size=POOL_SIZE
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ mock_obj.assert_called_once_with("//./pipe/docker_engine",
+ 60,
+ maxsize=POOL_SIZE
+ )
diff --git a/tests/unit/dockertypes_test.py b/tests/unit/dockertypes_test.py
index 0689d07..76a99a6 100644
--- a/tests/unit/dockertypes_test.py
+++ b/tests/unit/dockertypes_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import unittest
import pytest
@@ -11,11 +9,7 @@ from docker.types import (
IPAMPool, LogConfig, Mount, ServiceMode, Ulimit,
)
from docker.types.services import convert_service_ports
-
-try:
- from unittest import mock
-except: # noqa: E722
- import mock
+from unittest import mock
def create_host_config(*args, **kwargs):
diff --git a/tests/unit/errors_test.py b/tests/unit/errors_test.py
index 54c2ba8..f8c3a66 100644
--- a/tests/unit/errors_test.py
+++ b/tests/unit/errors_test.py
@@ -126,7 +126,7 @@ class ContainerErrorTest(unittest.TestCase):
err = ContainerError(container, exit_status, command, image, stderr)
msg = ("Command '{}' in image '{}' returned non-zero exit status {}"
- ).format(command, image, exit_status, stderr)
+ ).format(command, image, exit_status)
assert str(err) == msg
def test_container_with_stderr(self):
diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py
index e609b64..6acfb64 100644
--- a/tests/unit/fake_api.py
+++ b/tests/unit/fake_api.py
@@ -1,12 +1,13 @@
-from . import fake_stat
from docker import constants
-CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION)
+from . import fake_stat
+
+CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}'
-FAKE_CONTAINER_ID = '3cc2351ab11b'
-FAKE_IMAGE_ID = 'e9aa60c60128'
-FAKE_EXEC_ID = 'd5d177f121dc'
-FAKE_NETWORK_ID = '33fb6a3462b8'
+FAKE_CONTAINER_ID = '81cf499cc928ce3fedc250a080d2b9b978df20e4517304c45211e8a68b33e254' # noqa: E501
+FAKE_IMAGE_ID = 'sha256:fe7a8fc91d3f17835cbb3b86a1c60287500ab01a53bc79c4497d09f07a3f0688' # noqa: E501
+FAKE_EXEC_ID = 'b098ec855f10434b5c7c973c78484208223a83f663ddaefb0f02a242840cb1c7' # noqa: E501
+FAKE_NETWORK_ID = '1999cfb42e414483841a125ade3c276c3cb80cb3269b14e339354ac63a31b02c' # noqa: E501
FAKE_IMAGE_NAME = 'test_image'
FAKE_TARBALL_PATH = '/path/to/tarball'
FAKE_REPO_NAME = 'repo'
@@ -16,6 +17,8 @@ FAKE_URL = 'myurl'
FAKE_PATH = '/path'
FAKE_VOLUME_NAME = 'perfectcherryblossom'
FAKE_NODE_ID = '24ifsmvkjbyhk'
+FAKE_SECRET_ID = 'epdyrw4tsi03xy3deu8g8ly6o'
+FAKE_SECRET_NAME = 'super_secret'
# Each method is prefixed with HTTP method (get, post...)
# for clarity and readability
@@ -511,102 +514,108 @@ def post_fake_network_disconnect():
return 200, None
+def post_fake_secret():
+ status_code = 200
+ response = {'ID': FAKE_SECRET_ID}
+ return status_code, response
+
+
# Maps real api url to fake response callback
prefix = 'http+docker://localhost'
if constants.IS_WINDOWS_PLATFORM:
prefix = 'http+docker://localnpipe'
fake_responses = {
- '{0}/version'.format(prefix):
+ f'{prefix}/version':
get_fake_version,
- '{1}/{0}/version'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/version':
get_fake_version,
- '{1}/{0}/info'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/info':
get_fake_info,
- '{1}/{0}/auth'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/auth':
post_fake_auth,
- '{1}/{0}/_ping'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/_ping':
get_fake_ping,
- '{1}/{0}/images/search'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/search':
get_fake_search,
- '{1}/{0}/images/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/json':
get_fake_images,
- '{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/test_image/history':
get_fake_image_history,
- '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/create':
post_fake_import_image,
- '{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/json':
get_fake_containers,
- '{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/start':
post_fake_start_container,
- '{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/resize':
post_fake_resize_container,
- '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/json':
get_fake_inspect_container,
- '{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/rename':
post_fake_rename_container,
- '{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}/tag':
post_fake_tag_image,
- '{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/wait':
get_fake_wait,
- '{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/logs':
get_fake_logs,
- '{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/changes':
get_fake_diff,
- '{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/export':
get_fake_export,
- '{1}/{0}/containers/3cc2351ab11b/update'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/update':
post_fake_update_container,
- '{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/exec':
post_fake_exec_create,
- '{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/start':
post_fake_exec_start,
- '{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/json':
get_fake_exec_inspect,
- '{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/resize':
post_fake_exec_resize,
- '{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/stats':
get_fake_stats,
- '{1}/{0}/containers/3cc2351ab11b/top'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/top':
get_fake_top,
- '{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/stop':
post_fake_stop_container,
- '{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/kill':
post_fake_kill_container,
- '{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/pause':
post_fake_pause_container,
- '{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/unpause':
post_fake_unpause_container,
- '{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/restart':
post_fake_restart_container,
- '{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}':
delete_fake_remove_container,
- '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/create':
post_fake_image_create,
- '{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}':
delete_fake_remove_image,
- '{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}/get':
get_fake_get_image,
- '{1}/{0}/images/load'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/load':
post_fake_load_image,
- '{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/test_image/json':
get_fake_inspect_image,
- '{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/test_image/insert':
get_fake_insert_image,
- '{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/test_image/push':
post_fake_push,
- '{1}/{0}/commit'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/commit':
post_fake_commit,
- '{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/create':
post_fake_create_container,
- '{1}/{0}/build'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/build':
post_fake_build_container,
- '{1}/{0}/events'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/events':
get_fake_events,
- ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'):
+ (f'{prefix}/{CURRENT_VERSION}/volumes', 'GET'):
get_fake_volume_list,
- ('{1}/{0}/volumes/create'.format(CURRENT_VERSION, prefix), 'POST'):
+ (f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
get_fake_volume,
('{1}/{0}/volumes/{2}'.format(
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
@@ -620,11 +629,11 @@ fake_responses = {
CURRENT_VERSION, prefix, FAKE_NODE_ID
), 'POST'):
post_fake_update_node,
- ('{1}/{0}/swarm/join'.format(CURRENT_VERSION, prefix), 'POST'):
+ (f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
post_fake_join_swarm,
- ('{1}/{0}/networks'.format(CURRENT_VERSION, prefix), 'GET'):
+ (f'{prefix}/{CURRENT_VERSION}/networks', 'GET'):
get_fake_network_list,
- ('{1}/{0}/networks/create'.format(CURRENT_VERSION, prefix), 'POST'):
+ (f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
post_fake_network,
('{1}/{0}/networks/{2}'.format(
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
@@ -642,4 +651,6 @@ fake_responses = {
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'):
post_fake_network_disconnect,
+ f'{prefix}/{CURRENT_VERSION}/secrets/create':
+ post_fake_secret,
}
diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py
index 2147bfd..95cf63b 100644
--- a/tests/unit/fake_api_client.py
+++ b/tests/unit/fake_api_client.py
@@ -1,20 +1,17 @@
import copy
-import docker
+import docker
+from docker.constants import DEFAULT_DOCKER_API_VERSION
+from unittest import mock
from . import fake_api
-try:
- from unittest import mock
-except ImportError:
- import mock
-
class CopyReturnMagicMock(mock.MagicMock):
"""
A MagicMock which deep copies every return value.
"""
def _mock_call(self, *args, **kwargs):
- ret = super(CopyReturnMagicMock, self)._mock_call(*args, **kwargs)
+ ret = super()._mock_call(*args, **kwargs)
if isinstance(ret, (dict, list)):
ret = copy.deepcopy(ret)
return ret
@@ -30,7 +27,7 @@ def make_fake_api_client(overrides=None):
if overrides is None:
overrides = {}
- api_client = docker.APIClient()
+ api_client = docker.APIClient(version=DEFAULT_DOCKER_API_VERSION)
mock_attrs = {
'build.return_value': fake_api.FAKE_IMAGE_ID,
'commit.return_value': fake_api.post_fake_commit()[1],
@@ -39,6 +36,7 @@ def make_fake_api_client(overrides=None):
fake_api.post_fake_create_container()[1],
'create_host_config.side_effect': api_client.create_host_config,
'create_network.return_value': fake_api.post_fake_network()[1],
+ 'create_secret.return_value': fake_api.post_fake_secret()[1],
'exec_create.return_value': fake_api.post_fake_exec_create()[1],
'exec_start.return_value': fake_api.post_fake_exec_start()[1],
'images.return_value': fake_api.get_fake_images()[1],
@@ -50,6 +48,7 @@ def make_fake_api_client(overrides=None):
'networks.return_value': fake_api.get_fake_network_list()[1],
'start.return_value': None,
'wait.return_value': {'StatusCode': 0},
+ 'version.return_value': fake_api.get_fake_version()
}
mock_attrs.update(overrides)
mock_client = CopyReturnMagicMock(**mock_attrs)
@@ -62,6 +61,6 @@ def make_fake_client(overrides=None):
"""
Returns a Client with a fake APIClient.
"""
- client = docker.DockerClient()
+ client = docker.DockerClient(version=DEFAULT_DOCKER_API_VERSION)
client.api = make_fake_api_client(overrides)
return client
diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py
index da5f0ab..101708e 100644
--- a/tests/unit/models_containers_test.py
+++ b/tests/unit/models_containers_test.py
@@ -39,6 +39,7 @@ class ContainerCollectionTest(unittest.TestCase):
cap_add=['foo'],
cap_drop=['bar'],
cgroup_parent='foobar',
+ cgroupns='host',
cpu_period=1,
cpu_quota=2,
cpu_shares=5,
@@ -77,6 +78,7 @@ class ContainerCollectionTest(unittest.TestCase):
oom_score_adj=5,
pid_mode='host',
pids_limit=500,
+ platform='linux',
ports={
1111: 4567,
2222: None
@@ -134,6 +136,7 @@ class ContainerCollectionTest(unittest.TestCase):
'BlkioWeight': 2,
'CapAdd': ['foo'],
'CapDrop': ['bar'],
+ 'CgroupnsMode': 'host',
'CgroupParent': 'foobar',
'CpuPeriod': 1,
'CpuQuota': 2,
@@ -186,6 +189,7 @@ class ContainerCollectionTest(unittest.TestCase):
name='somename',
network_disabled=False,
networking_config={'foo': None},
+ platform='linux',
ports=[('1111', 'tcp'), ('2222', 'tcp')],
stdin_open=True,
stop_signal=9,
@@ -233,7 +237,7 @@ class ContainerCollectionTest(unittest.TestCase):
assert container.id == FAKE_CONTAINER_ID
client.api.pull.assert_called_with(
- 'alpine', platform=None, tag=None, stream=True
+ 'alpine', platform=None, tag='latest', all_tags=False, stream=True
)
def test_run_with_error(self):
@@ -314,6 +318,33 @@ class ContainerCollectionTest(unittest.TestCase):
'NetworkMode': 'default'}
)
+ def test_run_platform(self):
+ client = make_fake_client()
+
+ # raise exception on first call, then return normal value
+ client.api.create_container.side_effect = [
+ docker.errors.ImageNotFound(""),
+ client.api.create_container.return_value
+ ]
+
+ client.containers.run(image='alpine', platform='linux/arm64')
+
+ client.api.pull.assert_called_with(
+ 'alpine',
+ tag='latest',
+ all_tags=False,
+ stream=True,
+ platform='linux/arm64',
+ )
+
+ client.api.create_container.assert_called_with(
+ detach=False,
+ platform='linux/arm64',
+ image='alpine',
+ command=None,
+ host_config={'NetworkMode': 'default'},
+ )
+
def test_create(self):
client = make_fake_client()
container = client.containers.create(
@@ -377,6 +408,11 @@ class ContainerCollectionTest(unittest.TestCase):
class ContainerTest(unittest.TestCase):
+ def test_short_id(self):
+ container = Container(attrs={'Id': '8497fe9244dd45cac543eb3c37d8605077'
+ '6800eebef1f3ec2ee111e8ccf12db6'})
+ assert container.short_id == '8497fe9244dd'
+
def test_name(self):
client = make_fake_client()
container = client.containers.get(FAKE_CONTAINER_ID)
@@ -450,7 +486,7 @@ class ContainerTest(unittest.TestCase):
container = client.containers.get(FAKE_CONTAINER_ID)
container.get_archive('foo')
client.api.get_archive.assert_called_with(
- FAKE_CONTAINER_ID, 'foo', DEFAULT_DATA_CHUNK_SIZE
+ FAKE_CONTAINER_ID, 'foo', DEFAULT_DATA_CHUNK_SIZE, False
)
def test_image(self):
diff --git a/tests/unit/models_images_test.py b/tests/unit/models_images_test.py
index fd894ab..3478c3f 100644
--- a/tests/unit/models_images_test.py
+++ b/tests/unit/models_images_test.py
@@ -44,9 +44,25 @@ class ImageCollectionTest(unittest.TestCase):
def test_pull(self):
client = make_fake_client()
- image = client.images.pull('test_image:latest')
+ image = client.images.pull('test_image:test')
client.api.pull.assert_called_with(
- 'test_image', tag='latest', stream=True
+ 'test_image', tag='test', all_tags=False, stream=True
+ )
+ client.api.inspect_image.assert_called_with('test_image:test')
+ assert isinstance(image, Image)
+ assert image.id == FAKE_IMAGE_ID
+
+ def test_pull_tag_precedence(self):
+ client = make_fake_client()
+ image = client.images.pull('test_image:latest', tag='test')
+ client.api.pull.assert_called_with(
+ 'test_image', tag='test', all_tags=False, stream=True
+ )
+ client.api.inspect_image.assert_called_with('test_image:test')
+
+ image = client.images.pull('test_image')
+ client.api.pull.assert_called_with(
+ 'test_image', tag='latest', all_tags=False, stream=True
)
client.api.inspect_image.assert_called_with('test_image:latest')
assert isinstance(image, Image)
@@ -54,9 +70,9 @@ class ImageCollectionTest(unittest.TestCase):
def test_pull_multiple(self):
client = make_fake_client()
- images = client.images.pull('test_image')
+ images = client.images.pull('test_image', all_tags=True)
client.api.pull.assert_called_with(
- 'test_image', tag=None, stream=True
+ 'test_image', tag='latest', all_tags=True, stream=True
)
client.api.images.assert_called_with(
all=False, name='test_image', filters=None
@@ -96,16 +112,21 @@ class ImageCollectionTest(unittest.TestCase):
client.images.search('test')
client.api.search.assert_called_with('test')
+ def test_search_limit(self):
+ client = make_fake_client()
+ client.images.search('test', limit=5)
+ client.api.search.assert_called_with('test', limit=5)
+
class ImageTest(unittest.TestCase):
def test_short_id(self):
image = Image(attrs={'Id': 'sha256:b6846070672ce4e8f1f91564ea6782bd675'
'f69d65a6f73ef6262057ad0a15dcd'})
- assert image.short_id == 'sha256:b684607067'
+ assert image.short_id == 'sha256:b6846070672c'
image = Image(attrs={'Id': 'b6846070672ce4e8f1f91564ea6782bd675'
'f69d65a6f73ef6262057ad0a15dcd'})
- assert image.short_id == 'b684607067'
+ assert image.short_id == 'b6846070672c'
def test_tags(self):
image = Image(attrs={
@@ -129,6 +150,16 @@ class ImageTest(unittest.TestCase):
image.history()
client.api.history.assert_called_with(FAKE_IMAGE_ID)
+ def test_remove(self):
+ client = make_fake_client()
+ image = client.images.get(FAKE_IMAGE_ID)
+ image.remove()
+ client.api.remove_image.assert_called_with(
+ FAKE_IMAGE_ID,
+ force=False,
+ noprune=False,
+ )
+
def test_save(self):
client = make_fake_client()
image = client.images.get(FAKE_IMAGE_ID)
diff --git a/tests/unit/models_resources_test.py b/tests/unit/models_resources_test.py
index 5af24ee..11dea29 100644
--- a/tests/unit/models_resources_test.py
+++ b/tests/unit/models_resources_test.py
@@ -16,7 +16,7 @@ class ModelTest(unittest.TestCase):
def test_hash(self):
client = make_fake_client()
container1 = client.containers.get(FAKE_CONTAINER_ID)
- my_set = set([container1])
+ my_set = {container1}
assert len(my_set) == 1
container2 = client.containers.get(FAKE_CONTAINER_ID)
diff --git a/tests/unit/models_secrets_test.py b/tests/unit/models_secrets_test.py
new file mode 100644
index 0000000..1c261a8
--- /dev/null
+++ b/tests/unit/models_secrets_test.py
@@ -0,0 +1,11 @@
+import unittest
+
+from .fake_api_client import make_fake_client
+from .fake_api import FAKE_SECRET_NAME
+
+
+class CreateServiceTest(unittest.TestCase):
+ def test_secrets_repr(self):
+ client = make_fake_client()
+ secret = client.secrets.create(name="super_secret", data="secret")
+ assert secret.__repr__() == f"<Secret: '{FAKE_SECRET_NAME}'>"
diff --git a/tests/unit/models_services_test.py b/tests/unit/models_services_test.py
index a4ac50c..94a27f0 100644
--- a/tests/unit/models_services_test.py
+++ b/tests/unit/models_services_test.py
@@ -11,6 +11,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
'labels': {'key': 'value'},
'hostname': 'test_host',
'mode': 'global',
+ 'rollback_config': {'rollback': 'config'},
'update_config': {'update': 'config'},
'networks': ['somenet'],
'endpoint_spec': {'blah': 'blah'},
@@ -28,6 +29,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
'constraints': ['foo=bar'],
'preferences': ['bar=baz'],
'platforms': [('x86_64', 'linux')],
+ 'maxreplicas': 1
})
task_template = kwargs.pop('task_template')
@@ -36,24 +38,26 @@ class CreateServiceKwargsTest(unittest.TestCase):
'name': 'somename',
'labels': {'key': 'value'},
'mode': 'global',
+ 'rollback_config': {'rollback': 'config'},
'update_config': {'update': 'config'},
'endpoint_spec': {'blah': 'blah'},
}
- assert set(task_template.keys()) == set([
+ assert set(task_template.keys()) == {
'ContainerSpec', 'Resources', 'RestartPolicy', 'Placement',
'LogDriver', 'Networks'
- ])
+ }
assert task_template['Placement'] == {
'Constraints': ['foo=bar'],
'Preferences': ['bar=baz'],
'Platforms': [{'Architecture': 'x86_64', 'OS': 'linux'}],
+ 'MaxReplicas': 1,
}
assert task_template['LogDriver'] == {
'Name': 'logdriver',
'Options': {'foo': 'bar'}
}
assert task_template['Networks'] == [{'Target': 'somenet'}]
- assert set(task_template['ContainerSpec'].keys()) == set([
+ assert set(task_template['ContainerSpec'].keys()) == {
'Image', 'Command', 'Args', 'Hostname', 'Env', 'Dir', 'User',
'Labels', 'Mounts', 'StopGracePeriod'
- ])
+ }
diff --git a/tests/unit/sshadapter_test.py b/tests/unit/sshadapter_test.py
new file mode 100644
index 0000000..874239a
--- /dev/null
+++ b/tests/unit/sshadapter_test.py
@@ -0,0 +1,39 @@
+import unittest
+import docker
+from docker.transport.sshconn import SSHSocket
+
+
+class SSHAdapterTest(unittest.TestCase):
+ @staticmethod
+ def test_ssh_hostname_prefix_trim():
+ conn = docker.transport.SSHHTTPAdapter(
+ base_url="ssh://user@hostname:1234", shell_out=True)
+ assert conn.ssh_host == "user@hostname:1234"
+
+ @staticmethod
+ def test_ssh_parse_url():
+ c = SSHSocket(host="user@hostname:1234")
+ assert c.host == "hostname"
+ assert c.port == "1234"
+ assert c.user == "user"
+
+ @staticmethod
+ def test_ssh_parse_hostname_only():
+ c = SSHSocket(host="hostname")
+ assert c.host == "hostname"
+ assert c.port is None
+ assert c.user is None
+
+ @staticmethod
+ def test_ssh_parse_user_and_hostname():
+ c = SSHSocket(host="user@hostname")
+ assert c.host == "hostname"
+ assert c.port is None
+ assert c.user == "user"
+
+ @staticmethod
+ def test_ssh_parse_hostname_and_port():
+ c = SSHSocket(host="hostname:22")
+ assert c.host == "hostname"
+ assert c.port == "22"
+ assert c.user is None
diff --git a/tests/unit/ssladapter_test.py b/tests/unit/ssladapter_test.py
index 73b7336..d3f2407 100644
--- a/tests/unit/ssladapter_test.py
+++ b/tests/unit/ssladapter_test.py
@@ -1,15 +1,8 @@
import unittest
-from docker.transport import ssladapter
-import pytest
+from ssl import match_hostname, CertificateError
-try:
- from backports.ssl_match_hostname import (
- match_hostname, CertificateError
- )
-except ImportError:
- from ssl import (
- match_hostname, CertificateError
- )
+import pytest
+from docker.transport import ssladapter
try:
from ssl import OP_NO_SSLv3, OP_NO_SSLv2, OP_NO_TLSv1
@@ -32,30 +25,30 @@ class SSLAdapterTest(unittest.TestCase):
class MatchHostnameTest(unittest.TestCase):
cert = {
'issuer': (
- (('countryName', u'US'),),
- (('stateOrProvinceName', u'California'),),
- (('localityName', u'San Francisco'),),
- (('organizationName', u'Docker Inc'),),
- (('organizationalUnitName', u'Docker-Python'),),
- (('commonName', u'localhost'),),
- (('emailAddress', u'info@docker.com'),)
+ (('countryName', 'US'),),
+ (('stateOrProvinceName', 'California'),),
+ (('localityName', 'San Francisco'),),
+ (('organizationName', 'Docker Inc'),),
+ (('organizationalUnitName', 'Docker-Python'),),
+ (('commonName', 'localhost'),),
+ (('emailAddress', 'info@docker.com'),)
),
'notAfter': 'Mar 25 23:08:23 2030 GMT',
- 'notBefore': u'Mar 25 23:08:23 2016 GMT',
- 'serialNumber': u'BD5F894C839C548F',
+ 'notBefore': 'Mar 25 23:08:23 2016 GMT',
+ 'serialNumber': 'BD5F894C839C548F',
'subject': (
- (('countryName', u'US'),),
- (('stateOrProvinceName', u'California'),),
- (('localityName', u'San Francisco'),),
- (('organizationName', u'Docker Inc'),),
- (('organizationalUnitName', u'Docker-Python'),),
- (('commonName', u'localhost'),),
- (('emailAddress', u'info@docker.com'),)
+ (('countryName', 'US'),),
+ (('stateOrProvinceName', 'California'),),
+ (('localityName', 'San Francisco'),),
+ (('organizationName', 'Docker Inc'),),
+ (('organizationalUnitName', 'Docker-Python'),),
+ (('commonName', 'localhost'),),
+ (('emailAddress', 'info@docker.com'),)
),
'subjectAltName': (
- ('DNS', u'localhost'),
- ('DNS', u'*.gensokyo.jp'),
- ('IP Address', u'127.0.0.1'),
+ ('DNS', 'localhost'),
+ ('DNS', '*.gensokyo.jp'),
+ ('IP Address', '127.0.0.1'),
),
'version': 3
}
diff --git a/tests/unit/swarm_test.py b/tests/unit/swarm_test.py
index 4385380..aee1b9e 100644
--- a/tests/unit/swarm_test.py
+++ b/tests/unit/swarm_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import json
from . import fake_api
diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py
index 012f15b..fa7d833 100644
--- a/tests/unit/utils_build_test.py
+++ b/tests/unit/utils_build_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import os
import os.path
import shutil
@@ -82,7 +80,7 @@ class ExcludePathsTest(unittest.TestCase):
assert sorted(paths) == sorted(set(paths))
def test_wildcard_exclude(self):
- assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore'])
+ assert self.exclude(['*']) == {'Dockerfile', '.dockerignore'}
def test_exclude_dockerfile_dockerignore(self):
"""
@@ -99,18 +97,18 @@ class ExcludePathsTest(unittest.TestCase):
If we're using a custom Dockerfile, make sure that's not
excluded.
"""
- assert self.exclude(['*'], dockerfile='Dockerfile.alt') == set(
- ['Dockerfile.alt', '.dockerignore']
- )
+ assert self.exclude(['*'], dockerfile='Dockerfile.alt') == {
+ 'Dockerfile.alt', '.dockerignore'
+ }
assert self.exclude(
['*'], dockerfile='foo/Dockerfile3'
- ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
+ ) == convert_paths({'foo/Dockerfile3', '.dockerignore'})
# https://github.com/docker/docker-py/issues/1956
assert self.exclude(
['*'], dockerfile='./foo/Dockerfile3'
- ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
+ ) == convert_paths({'foo/Dockerfile3', '.dockerignore'})
def test_exclude_dockerfile_child(self):
includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3')
@@ -119,56 +117,56 @@ class ExcludePathsTest(unittest.TestCase):
def test_single_filename(self):
assert self.exclude(['a.py']) == convert_paths(
- self.all_paths - set(['a.py'])
+ self.all_paths - {'a.py'}
)
def test_single_filename_leading_dot_slash(self):
assert self.exclude(['./a.py']) == convert_paths(
- self.all_paths - set(['a.py'])
+ self.all_paths - {'a.py'}
)
# As odd as it sounds, a filename pattern with a trailing slash on the
# end *will* result in that file being excluded.
def test_single_filename_trailing_slash(self):
assert self.exclude(['a.py/']) == convert_paths(
- self.all_paths - set(['a.py'])
+ self.all_paths - {'a.py'}
)
def test_wildcard_filename_start(self):
assert self.exclude(['*.py']) == convert_paths(
- self.all_paths - set(['a.py', 'b.py', 'cde.py'])
+ self.all_paths - {'a.py', 'b.py', 'cde.py'}
)
def test_wildcard_with_exception(self):
assert self.exclude(['*.py', '!b.py']) == convert_paths(
- self.all_paths - set(['a.py', 'cde.py'])
+ self.all_paths - {'a.py', 'cde.py'}
)
def test_wildcard_with_wildcard_exception(self):
assert self.exclude(['*.*', '!*.go']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'a.py', 'b.py', 'cde.py', 'Dockerfile.alt',
- ])
+ }
)
def test_wildcard_filename_end(self):
assert self.exclude(['a.*']) == convert_paths(
- self.all_paths - set(['a.py', 'a.go'])
+ self.all_paths - {'a.py', 'a.go'}
)
def test_question_mark(self):
assert self.exclude(['?.py']) == convert_paths(
- self.all_paths - set(['a.py', 'b.py'])
+ self.all_paths - {'a.py', 'b.py'}
)
def test_single_subdir_single_filename(self):
assert self.exclude(['foo/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
+ self.all_paths - {'foo/a.py'}
)
def test_single_subdir_single_filename_leading_slash(self):
assert self.exclude(['/foo/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
+ self.all_paths - {'foo/a.py'}
)
def test_exclude_include_absolute_path(self):
@@ -176,57 +174,57 @@ class ExcludePathsTest(unittest.TestCase):
assert exclude_paths(
base,
['/*', '!/*.py']
- ) == set(['a.py', 'b.py'])
+ ) == {'a.py', 'b.py'}
def test_single_subdir_with_path_traversal(self):
assert self.exclude(['foo/whoops/../a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
+ self.all_paths - {'foo/a.py'}
)
def test_single_subdir_wildcard_filename(self):
assert self.exclude(['foo/*.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'foo/b.py'])
+ self.all_paths - {'foo/a.py', 'foo/b.py'}
)
def test_wildcard_subdir_single_filename(self):
assert self.exclude(['*/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'bar/a.py'])
+ self.all_paths - {'foo/a.py', 'bar/a.py'}
)
def test_wildcard_subdir_wildcard_filename(self):
assert self.exclude(['*/*.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'foo/b.py', 'bar/a.py'])
+ self.all_paths - {'foo/a.py', 'foo/b.py', 'bar/a.py'}
)
def test_directory(self):
assert self.exclude(['foo']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py',
'foo/Dockerfile3'
- ])
+ }
)
def test_directory_with_trailing_slash(self):
assert self.exclude(['foo']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo', 'foo/a.py', 'foo/b.py',
'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3'
- ])
+ }
)
def test_directory_with_single_exception(self):
assert self.exclude(['foo', '!foo/bar/a.py']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/bar',
'foo/Dockerfile3'
- ])
+ }
)
def test_directory_with_subdir_exception(self):
assert self.exclude(['foo', '!foo/bar']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
- ])
+ }
)
@pytest.mark.skipif(
@@ -234,21 +232,21 @@ class ExcludePathsTest(unittest.TestCase):
)
def test_directory_with_subdir_exception_win32_pathsep(self):
assert self.exclude(['foo', '!foo\\bar']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
- ])
+ }
)
def test_directory_with_wildcard_exception(self):
assert self.exclude(['foo', '!foo/*.py']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo/bar', 'foo/bar/a.py', 'foo', 'foo/Dockerfile3'
- ])
+ }
)
def test_subdirectory(self):
assert self.exclude(['foo/bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ self.all_paths - {'foo/bar', 'foo/bar/a.py'}
)
@pytest.mark.skipif(
@@ -256,52 +254,52 @@ class ExcludePathsTest(unittest.TestCase):
)
def test_subdirectory_win32_pathsep(self):
assert self.exclude(['foo\\bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ self.all_paths - {'foo/bar', 'foo/bar/a.py'}
)
def test_double_wildcard(self):
assert self.exclude(['**/a.py']) == convert_paths(
- self.all_paths - set(
- ['a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py']
- )
+ self.all_paths - {
+ 'a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py'
+ }
)
assert self.exclude(['foo/**/bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ self.all_paths - {'foo/bar', 'foo/bar/a.py'}
)
def test_single_and_double_wildcard(self):
assert self.exclude(['**/target/*/*']) == convert_paths(
- self.all_paths - set(
- ['target/subdir/file.txt',
- 'subdir/target/subdir/file.txt',
- 'subdir/subdir2/target/subdir/file.txt']
- )
+ self.all_paths - {
+ 'target/subdir/file.txt',
+ 'subdir/target/subdir/file.txt',
+ 'subdir/subdir2/target/subdir/file.txt'
+ }
)
def test_trailing_double_wildcard(self):
assert self.exclude(['subdir/**']) == convert_paths(
- self.all_paths - set(
- ['subdir/file.txt',
- 'subdir/target/file.txt',
- 'subdir/target/subdir/file.txt',
- 'subdir/subdir2/file.txt',
- 'subdir/subdir2/target/file.txt',
- 'subdir/subdir2/target/subdir/file.txt',
- 'subdir/target',
- 'subdir/target/subdir',
- 'subdir/subdir2',
- 'subdir/subdir2/target',
- 'subdir/subdir2/target/subdir']
- )
+ self.all_paths - {
+ 'subdir/file.txt',
+ 'subdir/target/file.txt',
+ 'subdir/target/subdir/file.txt',
+ 'subdir/subdir2/file.txt',
+ 'subdir/subdir2/target/file.txt',
+ 'subdir/subdir2/target/subdir/file.txt',
+ 'subdir/target',
+ 'subdir/target/subdir',
+ 'subdir/subdir2',
+ 'subdir/subdir2/target',
+ 'subdir/subdir2/target/subdir'
+ }
)
def test_double_wildcard_with_exception(self):
assert self.exclude(['**', '!bar', '!foo/bar']) == convert_paths(
- set([
+ {
'foo/bar', 'foo/bar/a.py', 'bar', 'bar/a.py', 'Dockerfile',
'.dockerignore',
- ])
+ }
)
def test_include_wildcard(self):
@@ -324,7 +322,7 @@ class ExcludePathsTest(unittest.TestCase):
assert exclude_paths(
base,
['*.md', '!README*.md', 'README-secret.md']
- ) == set(['README.md', 'README-bis.md'])
+ ) == {'README.md', 'README-bis.md'}
def test_parent_directory(self):
base = make_tree(
@@ -335,12 +333,12 @@ class ExcludePathsTest(unittest.TestCase):
# Dockerignore reference stipulates that absolute paths are
# equivalent to relative paths, hence /../foo should be
# equivalent to ../foo. It also stipulates that paths are run
- # through Go's filepath.Clean, which explicitely "replace
+ # through Go's filepath.Clean, which explicitly "replace
# "/.." by "/" at the beginning of a path".
assert exclude_paths(
base,
['../a.py', '/../b.py']
- ) == set(['c.py'])
+ ) == {'c.py'}
class TarTest(unittest.TestCase):
@@ -374,14 +372,14 @@ class TarTest(unittest.TestCase):
'.dockerignore',
]
- expected_names = set([
+ expected_names = {
'Dockerfile',
'.dockerignore',
'a.go',
'b.py',
'bar',
'bar/a.py',
- ])
+ }
base = make_tree(dirs, files)
self.addCleanup(shutil.rmtree, base)
@@ -413,7 +411,7 @@ class TarTest(unittest.TestCase):
with pytest.raises(IOError) as ei:
tar(base)
- assert 'Can not read file in context: {}'.format(full_path) in (
+ assert f'Can not read file in context: {full_path}' in (
ei.exconly()
)
diff --git a/tests/unit/utils_config_test.py b/tests/unit/utils_config_test.py
index b0934f9..27d5a7c 100644
--- a/tests/unit/utils_config_test.py
+++ b/tests/unit/utils_config_test.py
@@ -5,14 +5,10 @@ import tempfile
import json
from pytest import mark, fixture
+from unittest import mock
from docker.utils import config
-try:
- from unittest import mock
-except ImportError:
- import mock
-
class FindConfigFileTest(unittest.TestCase):
diff --git a/tests/unit/utils_json_stream_test.py b/tests/unit/utils_json_stream_test.py
index f7aefd0..821ebe4 100644
--- a/tests/unit/utils_json_stream_test.py
+++ b/tests/unit/utils_json_stream_test.py
@@ -1,11 +1,7 @@
-# encoding: utf-8
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from docker.utils.json_stream import json_splitter, stream_as_text, json_stream
-class TestJsonSplitter(object):
+class TestJsonSplitter:
def test_json_splitter_no_object(self):
data = '{"foo": "bar'
@@ -20,7 +16,7 @@ class TestJsonSplitter(object):
assert json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
-class TestStreamAsText(object):
+class TestStreamAsText:
def test_stream_with_non_utf_unicode_character(self):
stream = [b'\xed\xf3\xf3']
@@ -28,12 +24,12 @@ class TestStreamAsText(object):
assert output == '���'
def test_stream_with_utf_character(self):
- stream = ['ěĝ'.encode('utf-8')]
+ stream = ['ěĝ'.encode()]
output, = stream_as_text(stream)
assert output == 'ěĝ'
-class TestJsonStream(object):
+class TestJsonStream:
def test_with_falsy_entries(self):
stream = [
diff --git a/tests/unit/utils_proxy_test.py b/tests/unit/utils_proxy_test.py
index ff0e14b..2da6040 100644
--- a/tests/unit/utils_proxy_test.py
+++ b/tests/unit/utils_proxy_test.py
@@ -1,7 +1,4 @@
-# -*- coding: utf-8 -*-
-
import unittest
-import six
from docker.utils.proxy import ProxyConfig
@@ -65,7 +62,7 @@ class ProxyConfigTest(unittest.TestCase):
# Proxy config is non null, env is None.
self.assertSetEqual(
set(CONFIG.inject_proxy_environment(None)),
- set(['{}={}'.format(k, v) for k, v in six.iteritems(ENV)]))
+ {f'{k}={v}' for k, v in ENV.items()})
# Proxy config is null, env is None.
self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None)
@@ -74,7 +71,7 @@ class ProxyConfigTest(unittest.TestCase):
# Proxy config is non null, env is non null
actual = CONFIG.inject_proxy_environment(env)
- expected = ['{}={}'.format(k, v) for k, v in six.iteritems(ENV)] + env
+ expected = [f'{k}={v}' for k, v in ENV.items()] + env
# It's important that the first 8 variables are the ones from the proxy
# config, and the last 2 are the ones from the input environment
self.assertSetEqual(set(actual[:8]), set(expected[:8]))
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index d9cb002..12cb7bd 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -1,31 +1,22 @@
-# -*- coding: utf-8 -*-
-
import base64
import json
import os
import os.path
import shutil
-import sys
import tempfile
import unittest
-
+import pytest
from docker.api.client import APIClient
-from docker.constants import IS_WINDOWS_PLATFORM
+from docker.constants import IS_WINDOWS_PLATFORM, DEFAULT_DOCKER_API_VERSION
from docker.errors import DockerException
-from docker.utils import (
- convert_filters, convert_volume_binds, decode_json_header, kwargs_from_env,
- parse_bytes, parse_devices, parse_env_file, parse_host,
- parse_repository_tag, split_command, update_headers,
-)
-
+from docker.utils import (convert_filters, convert_volume_binds,
+ decode_json_header, kwargs_from_env, parse_bytes,
+ parse_devices, parse_env_file, parse_host,
+ parse_repository_tag, split_command, update_headers)
from docker.utils.ports import build_port_bindings, split_port
from docker.utils.utils import format_environment
-import pytest
-
-import six
-
TEST_CERT_DIR = os.path.join(
os.path.dirname(__file__),
'testdata/certs',
@@ -41,7 +32,7 @@ class DecoratorsTest(unittest.TestCase):
def f(self, headers=None):
return headers
- client = APIClient()
+ client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
client._general_configs = {}
g = update_headers(f)
@@ -92,6 +83,7 @@ class KwargsFromEnvTest(unittest.TestCase):
assert kwargs['tls'].verify
parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True)
+ kwargs['version'] = DEFAULT_DOCKER_API_VERSION
try:
client = APIClient(**kwargs)
assert parsed_host == client.base_url
@@ -112,6 +104,7 @@ class KwargsFromEnvTest(unittest.TestCase):
assert kwargs['tls'].assert_hostname is True
assert kwargs['tls'].verify is False
parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True)
+ kwargs['version'] = DEFAULT_DOCKER_API_VERSION
try:
client = APIClient(**kwargs)
assert parsed_host == client.base_url
@@ -199,22 +192,22 @@ class ConverVolumeBindsTest(unittest.TestCase):
assert convert_volume_binds(data) == ['/mnt/vol1:/data:rw']
def test_convert_volume_binds_unicode_bytes_input(self):
- expected = [u'/mnt/지연:/unicode/박:rw']
+ expected = ['/mnt/지연:/unicode/박:rw']
data = {
- u'/mnt/지연'.encode('utf-8'): {
- 'bind': u'/unicode/박'.encode('utf-8'),
+ '/mnt/지연'.encode(): {
+ 'bind': '/unicode/박'.encode(),
'mode': 'rw'
}
}
assert convert_volume_binds(data) == expected
def test_convert_volume_binds_unicode_unicode_input(self):
- expected = [u'/mnt/지연:/unicode/박:rw']
+ expected = ['/mnt/지연:/unicode/박:rw']
data = {
- u'/mnt/지연': {
- 'bind': u'/unicode/박',
+ '/mnt/지연': {
+ 'bind': '/unicode/박',
'mode': 'rw'
}
}
@@ -303,17 +296,24 @@ class ParseHostTest(unittest.TestCase):
'[fd12::82d1]:2375/docker/engine': (
'http://[fd12::82d1]:2375/docker/engine'
),
+ 'ssh://[fd12::82d1]': 'ssh://[fd12::82d1]:22',
+ 'ssh://user@[fd12::82d1]:8765': 'ssh://user@[fd12::82d1]:8765',
'ssh://': 'ssh://127.0.0.1:22',
'ssh://user@localhost:22': 'ssh://user@localhost:22',
'ssh://user@remote': 'ssh://user@remote:22',
}
for host in invalid_hosts:
- with pytest.raises(DockerException):
+ msg = f'Should have failed to parse invalid host: {host}'
+ with self.assertRaises(DockerException, msg=msg):
parse_host(host, None)
for host, expected in valid_hosts.items():
- assert parse_host(host, None) == expected
+ self.assertEqual(
+ parse_host(host, None),
+ expected,
+ msg=f'Failed to parse valid host: {host}',
+ )
def test_parse_host_empty_value(self):
unix_socket = 'http+unix:///var/run/docker.sock'
@@ -363,14 +363,14 @@ class ParseRepositoryTagTest(unittest.TestCase):
)
def test_index_image_sha(self):
- assert parse_repository_tag("root@sha256:{0}".format(self.sha)) == (
- "root", "sha256:{0}".format(self.sha)
+ assert parse_repository_tag(f"root@sha256:{self.sha}") == (
+ "root", f"sha256:{self.sha}"
)
def test_private_reg_image_sha(self):
assert parse_repository_tag(
- "url:5000/repo@sha256:{0}".format(self.sha)
- ) == ("url:5000/repo", "sha256:{0}".format(self.sha))
+ f"url:5000/repo@sha256:{self.sha}"
+ ) == ("url:5000/repo", f"sha256:{self.sha}")
class ParseDeviceTest(unittest.TestCase):
@@ -447,11 +447,7 @@ class ParseBytesTest(unittest.TestCase):
parse_bytes("127.0.0.1K")
def test_parse_bytes_float(self):
- with pytest.raises(DockerException):
- parse_bytes("1.5k")
-
- def test_parse_bytes_maxint(self):
- assert parse_bytes("{0}k".format(sys.maxsize)) == sys.maxsize * 1024
+ assert parse_bytes("1.5k") == 1536
class UtilsTest(unittest.TestCase):
@@ -471,20 +467,13 @@ class UtilsTest(unittest.TestCase):
def test_decode_json_header(self):
obj = {'a': 'b', 'c': 1}
data = None
- if six.PY3:
- data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8'))
- else:
- data = base64.urlsafe_b64encode(json.dumps(obj))
+ data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8'))
decoded_data = decode_json_header(data)
assert obj == decoded_data
class SplitCommandTest(unittest.TestCase):
def test_split_command_with_unicode(self):
- assert split_command(u'echo μμ') == ['echo', 'μμ']
-
- @pytest.mark.skipif(six.PY3, reason="shlex doesn't support bytes in py3")
- def test_split_command_with_bytes(self):
assert split_command('echo μμ') == ['echo', 'μμ']
@@ -549,6 +538,12 @@ class PortsTest(unittest.TestCase):
assert internal_port == ["2000"]
assert external_port == [("2001:abcd:ef00::2", "1000")]
+ def test_split_port_with_ipv6_square_brackets_address(self):
+ internal_port, external_port = split_port(
+ "[2001:abcd:ef00::2]:1000:2000")
+ assert internal_port == ["2000"]
+ assert external_port == [("2001:abcd:ef00::2", "1000")]
+
def test_split_port_invalid(self):
with pytest.raises(ValueError):
split_port("0.0.0.0:1000:2000:tcp")
@@ -628,7 +623,7 @@ class FormatEnvironmentTest(unittest.TestCase):
env_dict = {
'ARTIST_NAME': b'\xec\x86\xa1\xec\xa7\x80\xec\x9d\x80'
}
- assert format_environment(env_dict) == [u'ARTIST_NAME=송지은']
+ assert format_environment(env_dict) == ['ARTIST_NAME=송지은']
def test_format_env_no_value(self):
env_dict = {
diff --git a/tox.ini b/tox.ini
index df797f4..d35d41a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27, py35, py36, py37, flake8
+envlist = py36, py37, flake8
skipsdist=True
[testenv]